text stringlengths 1 1.05M |
|---|
def calculateSum(numbers):
"""Make the function more efficient"""
result = 0
for n in numbers:
result += n
return result |
#!/bin/bash
CURRENT_DIR="$(pwd)"
DEMO_ROOT="$(dirname "${BASH_SOURCE}")"
KCP_ROOT="$(cd ${DEMO_ROOT}/../.. && pwd)"
export KCP_DATA_ROOT=${KCP_DATA_ROOT:-$KCP_ROOT}
source ${DEMO_ROOT}/.startUtils
setupTraps $0
KUBECONFIG=${KCP_DATA_ROOT}/.kcp/data/admin.kubeconfig
export KCP_LISTEN_ADDR="127.0.0.1:6443"
${DEMO_ROOT}/startKcpAndClusterController.sh -auto_publish_apis=false deployments.apps &
wait_command "grep 'Serving securely' ${CURRENT_DIR}/kcp.log"
echo ""
echo "Use ctrl-C to stop all components"
echo ""
wait
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=/Users/panglin/.fvm/versions/253"
export "FLUTTER_APPLICATION_PATH=/Users/panglin/FlutterProjects/fix_dev/jmessage-flutter-plugin/example"
export "COCOAPODS_PARALLEL_CODE_SIGN=true"
export "FLUTTER_TARGET=lib/main.dart"
export "FLUTTER_BUILD_DIR=build"
export "FLUTTER_BUILD_NAME=1.0.0"
export "FLUTTER_BUILD_NUMBER=1"
export "DART_OBFUSCATION=false"
export "TRACK_WIDGET_CREATION=false"
export "TREE_SHAKE_ICONS=false"
export "PACKAGE_CONFIG=.packages"
|
density_library = {
'Bagel': {
'Plain Bagel': [0.0, 424.4507]
},
'Burger': {
'Beef Burger': [0.0, 391.7372]
},
'French Fries': {
'French Fries': [0.0, 328.3566]
},
'Fried Chicken': {
'Chicken Nugget': [0.0, 538.0302]
},
'Donut': {
'Glazed Donut': [0.0, 328.1879],
'Chocolate Donut': [0.0, 308.5272]
}
}
|
module Gbase
class User < ActiveRecord::Base
#include PublicActivity::Model
#tracked owner: Proc.new { |controller, model| controller.current_user ? controller.current_user : nil },
#name: proc {|controller, model| model.name }
#has_paper_trail :only => [:name, :email]
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
has_many :permissions, :dependent => :destroy
has_many :modulos, through: :permissions
has_many :menus, through: :permissions
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable, :timeoutable
enum role: [:super, :admin, :user]
after_initialize :set_default_role, :if => :new_record?
def set_default_role
self.role ||= :user
end
def active_for_authentication?
super && approved?
end
def inactive_message
if !approved?
:not_approved
else
super # Use whatever other message
end
end
validates :name, presence: true
end
end
|
<gh_stars>1-10
import { IGridSeparator } from '../../typings/interfaces'
import { IRowOperationFactory } from '../../typings/interfaces/grid-row-operation-factory.interface'
import { TPrimaryKey } from '../../typings/types'
import { Operation } from '../operation.abstract'
export class GetRowSeparators extends Operation {
constructor(factory: IRowOperationFactory) { super(factory.gridController) }
public run(primaryKey: TPrimaryKey): IGridSeparator[] {
return this.rowOperations.GetRowMeta.run(primaryKey)?.separators || []
}
}
|
<reponame>henvo/railsful<filename>lib/railsful/interceptors/errors.rb
# frozen_string_literal: true
require 'active_model/errors'
module Railsful
module Interceptors
# This interceptor checks the given json object for an 'errors' array
# and checks if any errors are available.
module Errors
def render(options)
super(errors_options(options))
end
def errors_options(options)
return options unless errors?(options)
# Fetch all the errors from the passed json value.
errors = errors(options.fetch(:json))
# Overwrite the json value and set the errors array.
options.merge(json: { errors: errors })
end
# Transform error output format into more "jsonapi" like.
def errors(raw_errors)
errors = []
raw_errors.details.each do |field, array|
errors += field_errors(field, array)
end
errors
end
def field_errors(field, array)
array.map do |hash|
formatted_error(hash, field)
end
end
# Format the error by adding additional status and field information.
#
# :reek:UtilityFunction
def formatted_error(hash, field)
{
status: '422',
field: field
}.merge(hash)
end
# Checks if given renderable is an ActiveModel::Error
#
# :reek:UtilityFunction
def errors?(options)
return false unless options
options.fetch(:json, nil).is_a?(ActiveModel::Errors)
end
end
end
end
|
#!/usr/bin/env bats
load test_helpers
setup() {
setup_asdf_dir
}
teardown() {
clean_asdf_dir
}
@test "plugin_test_command with no URL specified prints an error" {
run asdf plugin-test "elixir"
[ "$status" -eq 1 ]
[ "$output" = "FAILED: please provide a plugin name and url" ]
}
@test "plugin_test_command with no name or URL specified prints an error" {
run asdf plugin-test
[ "$status" -eq 1 ]
[ "$output" = "FAILED: please provide a plugin name and url" ]
}
|
<gh_stars>0
//@ts-check
import { Client, CommandInteraction, MessageEmbed } from "discord.js";
import inventory from "../Schemas/inventory";
module.exports = {
name: "inventory",
description: "Inventory",
options: [
{ name: "list", description: "list your inventory", type: 1 },
{
name: "use",
description: "Use an item",
type: 1,
options: [{ name: "item", description: "Name of the item", type: 3 }],
},
],
run: async (client: Client, interaction: CommandInteraction) => {
const embed = new MessageEmbed();
if (interaction.options.getSubcommand() === "list") {
embed
.setTitle("Inventory of " + interaction.user.tag)
.setColor("ORANGE")
.setTimestamp(Date.now());
const extractedItems: { name: string; count: number }[] = [];
const inv = await inventory.findOne({ userId: interaction.user.id });
// @ts-ignore
if (!inv) return interaction.reply("Get An Inventory By Running b!bal");
if (inv.inventory.length < 1)
return interaction.reply(
"There is literally Nothing In Your Inventory!"
);
for (const item of inv.inventory) {
const isExist = extractedItems.find(
(i) => i.name.toLowerCase() === item.toLowerCase()
);
if (isExist) {
isExist.count = isExist.count + 1;
} else {
extractedItems.push({ name: item, count: 1 });
}
//typeof isExist==='object'?extractedItems[index]={name:item.toLowerCase(), count:isExist.count++}:extractedItems.push({name:item.toLowerCase(), count:1})
}
for (const item of extractedItems) {
embed.addField(item.name + `(${item.count.toString()})`, "\u200b");
}
embed.fields.length < 2
? embed.addField("Literally Nothing Else", "\u200b")
: null;
// console.log(extractedItems)
interaction
.deferReply()
.then(() => interaction.editReply({ embeds: [embed] }));
} else if (interaction.options.getSubcommand() == "use") {
interaction.reply("test");
}
},
};
|
export const GET_ALL_TWEET = 'GET_ALL/TWEET';
export const SET_HAS_NEXT = 'SET/HAS_NEXT';
|
<filename>src/com/namelessmc/java_api/exception/ReportUserBannedException.java
package com.namelessmc.java_api.exception;
import com.namelessmc.java_api.ApiError;
public class ReportUserBannedException extends ApiErrorException {
private static final long serialVersionUID = 1L;
public ReportUserBannedException() {
super(ApiError.USER_CREATING_REPORT_BANNED);
}
}
|
SELECT *
FROM customers
ORDER BY amount_spent DESC
LIMIT 10; |
#!/bin/bash
# Script to create latest swagger spec.
set -o errexit
set -o nounset
set -o pipefail
OS_ROOT=$(dirname "${BASH_SOURCE}")/..
source "${OS_ROOT}/hack/util.sh"
source "${OS_ROOT}/hack/lib/util/environment.sh"
os::log::install_errexit
function cleanup()
{
out=$?
cleanup_openshift
if [ $out -ne 0 ]; then
echo "[FAIL] !!!!! Generate Failed !!!!"
echo
tail -100 "${LOG_DIR}/openshift.log"
echo
echo -------------------------------------
echo
fi
exit $out
}
trap "exit" INT TERM
trap "cleanup" EXIT
export ALL_IP_ADDRESSES=127.0.0.1
export SERVER_HOSTNAME_LIST=127.0.0.1
export API_BIND_HOST=127.0.0.1
export API_PORT=38443
export ETCD_PORT=34001
export ETCD_PEER_PORT=37001
os::util::environment::setup_all_server_vars "generate-swagger-spec/"
reset_tmp_dir
configure_os_server
SWAGGER_SPEC_REL_DIR=${1:-""}
SWAGGER_SPEC_OUT_DIR="${OS_ROOT}/${SWAGGER_SPEC_REL_DIR}/api/swagger-spec"
mkdir -p "${SWAGGER_SPEC_OUT_DIR}" || true
SWAGGER_API_PATH="${MASTER_ADDR}/swaggerapi/"
# Start openshift
start_os_master
echo "Updating ${SWAGGER_SPEC_OUT_DIR}:"
ENDPOINT_TYPES="oapi api"
for type in $ENDPOINT_TYPES
do
ENDPOINTS=(v1)
for endpoint in $ENDPOINTS
do
echo "Updating ${SWAGGER_SPEC_OUT_DIR}/${type}-${endpoint}.json from ${SWAGGER_API_PATH}${type}/${endpoint}..."
curl -w "\n" "${SWAGGER_API_PATH}${type}/${endpoint}" > "${SWAGGER_SPEC_OUT_DIR}/${type}-${endpoint}.json"
os::util::sed 's|https://127.0.0.1:38443|https://127.0.0.1:8443|g' "${SWAGGER_SPEC_OUT_DIR}/${type}-${endpoint}.json"
done
done
echo "SUCCESS"
|
<html>
<head>
<title>Zodiac Finder</title>
<script>
const calculateZodiacSign = (name, dob) => {
// Calculations for calculating the zodiac sign
return zodiacSign;
}
const getInputs = () => {
const name = document.getElementById('name').value;
const dob = document.getElementById('dob').value;
const sign = calculateZodiacSign(name, dob);
document.getElementById('zodiac-sign').innerHTML = `Your zodiac sign is ${sign}`;
}
</script>
</head>
<body>
<h1>Zodiac Finder</h1>
<label>Name:
<input type="text" id="name" name="name"/>
</label>
<label>Date of Birth:
<input type="date" id="dob" name="dob"/>
</label>
<button onclick="getInputs()">Submit</button>
<h2 id="zodiac-sign"></h2>
</body>
</html> |
cd ./models/post_processing/pa/
python setup.py build_ext --inplace
cd ../pse/
python setup.py build_ext --inplace
#cd ../pse_v2/
#python setup.py build_ext --inplace
cd ../../../ |
#!/usr/bin/bash
# Copyright (c) 2021. Huawei Technologies Co.,Ltd.ALL rights reserved.
# This program is licensed under Mulan PSL v2.
# You can use it according to the terms and conditions of the Mulan PSL v2.
# http://license.coscl.org.cn/MulanPSL2
# THIS PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more detaitest -f.
# #############################################
# @Author : huangrong
# @Contact : 1820463064@qq.com
# @Date : 2020/10/23
# @License : Mulan PSL v2
# @Desc : Test man-db-cache-update.service restart
# #############################################
source "../common/common_lib.sh"
function pre_test() {
LOG_INFO "Start environmental preparation."
systemctl start man-db-cache-update.service
LOG_INFO "End of environmental preparation!"
}
function run_test() {
LOG_INFO "Start testing..."
test_oneshot man-db-cache-update.service 'inactive (dead)'
LOG_INFO "Finish test!"
}
main "$@"
|
"use strict";
var App = {
components: {
btnConvert: document.getElementById("btn-convert"),
btnClear: document.getElementById("btn-clear"),
btnCopy: document.getElementById("btn-copy"),
btnHelp: document.getElementById("btn-help"),
btnHelpClose: document.querySelectorAll(".btn-help-close"),
btnReaderView: document.getElementById("btn-reader-view"),
btnReaderViewClose: document.querySelectorAll(".btn-reader-view-close"),
txtInputRecipe: document.getElementById("input-recipe"),
txtInputMultiplier: document.getElementById("input-multiplier"),
txtReaderView: document.getElementById("reader-view-content"),
txtOutputRecipe: document.getElementById("output-recipe"),
modalReaderView: document.getElementById("reader-view"),
modalHelp: document.getElementById("modal-help")
},
routes: {
convert: "convert",
ingredientFromUrl: "ingredients_from_url"
},
init: function() {
App.addEventListeners();
},
addEventListeners: function() {
App.components.btnConvert.addEventListener("click", App.onConvertClicked);
App.components.btnClear.addEventListener("click", App.onClearClicked);
App.components.btnCopy.addEventListener("click", App.onCopyClicked);
App.components.btnHelp.addEventListener("click", App.onHelpClicked);
App.components.btnReaderView.addEventListener("click", App.onReaderViewClicked);
App.components.btnHelpClose.forEach(item => {
item.addEventListener("click", App.onHelpCloseClicked)
});
App.components.btnReaderViewClose.forEach(item => {
item.addEventListener("click", App.onReaderViewCloseClicked)
});
},
convertRecipe: function(recipe, multiplier) {
return fetch(
App.routes.convert,
{
headers: {'Content-Type': 'application/json'},
method: "POST",
body: JSON.stringify({ "data": recipe, "multiplier": multiplier })
}
)
.then( response => response.json() )
.then( function(response) {
App.components.txtOutputRecipe.value = response["data"];
});
},
appendInstructions: function(instructions) {
App.components.txtOutputRecipe.value = App.components.txtOutputRecipe.value.concat(
"\n\n", instructions
);
},
onConvertClicked: function() {
if (App.components.txtInputRecipe.value.includes("http")) {
App.components.btnConvert.disabled = true;
App.components.btnClear.disabled = true;
// Get ingredients from URL, then convert
fetch(
App.routes.ingredientFromUrl,
{
headers: {'Content-Type': 'application/json'},
method: "POST",
body: JSON.stringify({ "url": App.components.txtInputRecipe.value })
}
)
.then( response => response.json() )
.then(function (data) {
App.components.txtInputRecipe.value = data["ingredients"] + "\n\n\n" + data["instructions"];
App.convertRecipe(
data["ingredients"], App.components.txtInputMultiplier.value
).then(function() {
App.appendInstructions(data["instructions"])
})
}).catch(function () {
alert("Parsing error: Website not supported for " + App.components.txtInputRecipe.value + "\n\nPlease manually copy the recipe into the input box.")
}).then(function () {
App.components.btnConvert.disabled = false;
App.components.btnClear.disabled = false;
})
}
else {
var split_str = App.components.txtInputRecipe.value.split("\n\n\n")
var ingredients = split_str[0]
var instructions = split_str[1]
console.debug(ingredients)
console.debug(instructions)
// Ingredients entered manually
App.convertRecipe(
ingredients, App.components.txtInputMultiplier.value
)
.then(function() {
// Do this double replacement for formatting in case the final output has too many newlines
// Happens when instructions already have two newlines in between them
App.appendInstructions(instructions.replace(/\n/g, "\n\n").replace(/\n\s*\n/g, '\n\n'))
})
}
},
onClearClicked: function() {
App.components.txtInputRecipe.value = "";
App.components.txtInputRecipe.focus();
},
onCopyClicked: function() {
copyToClipboard(App.components.txtOutputRecipe.value);
// Change button text for user feedback
const originalText = App.components.btnCopy.innerHTML;
App.components.btnCopy.innerHTML = "Copied!";
setTimeout(function() {
App.components.btnCopy.innerHTML = originalText;
}, 2000)
},
onHelpClicked: function() {
App.components.modalHelp.classList.add("active");
},
onHelpCloseClicked: function() {
App.components.modalHelp.classList.remove("active");
},
onReaderViewClicked: function() {
App.components.txtReaderView.innerHTML = App.components.txtOutputRecipe.value;
App.components.modalReaderView.classList.add("active")
},
onReaderViewCloseClicked: function() {
App.components.modalReaderView.classList.remove("active")
},
}
// Copies a string to the clipboard. Must be called from within an
// event handler such as click. May return false if it failed, but
// this is not always possible. Browser support for Chrome 43+,
// Firefox 42+, Safari 10+, Edge and Internet Explorer 10+.
// Internet Explorer: The clipboard feature may be disabled by
// an administrator. By default a prompt is shown the first
// time the clipboard is used (per session).
function copyToClipboard(text) {
if (window.clipboardData && window.clipboardData.setData) {
// Internet Explorer-specific code path to prevent textarea being shown while dialog is visible.
return clipboardData.setData("Text", text);
}
else if (document.queryCommandSupported && document.queryCommandSupported("copy")) {
var textarea = document.createElement("textarea");
textarea.textContent = text;
textarea.style.position = "fixed"; // Prevent scrolling to bottom of page in Microsoft Edge.
document.body.appendChild(textarea);
textarea.select();
try {
return document.execCommand("copy"); // Security exception may be thrown by some browsers.
}
catch (ex) {
console.warn("Copy to clipboard failed.", ex);
return false;
}
finally {
document.body.removeChild(textarea);
}
}
}
window.addEventListener("load", function () {
App.init();
});
|
import Foundation
typealias FHIRJSON = [String: Any]
class SwiftFHIRAccount {
let id: String
let status: String
let type: String
init(id: String, status: String, type: String) {
self.id = id
self.status = status
self.type = type
}
convenience init(json: FHIRJSON) throws {
guard let id = json["id"] as? String,
let status = json["status"] as? String,
let type = json["type"] as? String else {
throw FHIRAccountError.invalidJSON
}
self.init(id: id, status: status, type: type)
}
}
enum FHIRAccountError: Error {
case invalidJSON
}
func readJSONFile(_ filename: String) throws -> FHIRJSON {
// Implementation to read JSON data from the file and return FHIRJSON
// Example:
// let data = try Data(contentsOf: URL(fileURLWithPath: filename))
// let json = try JSONSerialization.jsonObject(with: data, options: []) as? FHIRJSON
// return json ?? [:]
return [:] // Placeholder implementation
}
func instantiateFrom(filename: String) throws -> SwiftFHIRAccount {
return try instantiateFrom(json: try readJSONFile(filename))
}
func instantiateFrom(json: FHIRJSON) throws -> SwiftFHIRAccount {
return try SwiftFHIRAccount(json: json)
}
func testAccount1() {
// Test the implementation here
} |
public void SortListNumberAscending(List<int> numbers)
{
numbers.Sort();
} |
//#####################################################################
// Copyright 2002-2005, <NAME>, <NAME>, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#include <PhysBAM_Tools/Grids_Uniform_Boundaries/BOUNDARY_UNIFORM.h>
#include <PhysBAM_Tools/Ordinary_Differential_Equations/RUNGEKUTTA.h>
#include <PhysBAM_Geometry/Level_Sets/LEVELSET_UTILITIES.h>
#include <PhysBAM_Dynamics/Level_Sets/LEVELSET_ADVECTION_3D.h>
using namespace PhysBAM;
//#####################################################################
// Function Euler_Step
//#####################################################################
template<class T> void LEVELSET_ADVECTION_3D<T>::
Euler_Step(const ARRAY<T,FACE_INDEX<TV::dimension> >& face_velocity,const T dt,const T time,const int number_of_ghost_cells)
{
GRID<TV>& grid=levelset->grid;
Euler_Step_Subset(face_velocity,1,grid.counts.x,1,grid.counts.y,1,grid.counts.z,dt,time,number_of_ghost_cells);
}
//#####################################################################
// Function Euler_Step_Cell
//#####################################################################
template<class T> void LEVELSET_ADVECTION_3D<T>::
Euler_Step_Cell(const ARRAY<T,FACE_INDEX<TV::dimension> >& face_velocity,int i,int j,int ij,const T dt,const T time,const int number_of_ghost_cells)
{
Euler_Step_Subset(face_velocity,i,i,j,j,ij,ij,dt,time,number_of_ghost_cells);
}
//#####################################################################
// Function Euler_Step_Subset
//#####################################################################
template<class T> void LEVELSET_ADVECTION_3D<T>::
Euler_Step_Subset(const ARRAY<T,FACE_INDEX<TV::dimension> >& face_velocity,int m_start,int m_end,int n_start,int n_end,int mn_start,int mn_end,const T dt,const T time,const int number_of_ghost_cells)
{
DEBUG_UTILITIES::Debug_Breakpoint();
GRID<TV>& grid=levelset->grid;
T_BOUNDARY_SCALAR* boundary=levelset->boundary;
T_ARRAYS_SCALAR& phi=levelset->phi;
assert(grid.Is_MAC_Grid() && advection);
assert(grid.Is_MAC_Grid());
ARRAY<T,VECTOR<int,3> > phi_ghost(grid.Domain_Indices(number_of_ghost_cells));boundary->Fill_Ghost_Cells(grid,phi,phi_ghost,dt,time,number_of_ghost_cells);
int z=phi_ghost.counts.z,yz=phi_ghost.counts.y*z;
if(levelset->curvature_motion){ // do curvature first - based on phi^n
T one_over_two_dx=1/(2*grid.dX.x),one_over_two_dy=1/(2*grid.dX.y),one_over_two_dz=1/(2*grid.dX.z);
bool curvature_defined=levelset->curvature!=0;levelset->Compute_Curvature(time);
TV_INT i;
for(i.x=m_start;i.x<=m_end;i.x++) for(i.y=n_start;i.y<=n_end;i.y++) for(i.z=mn_start;i.z<=mn_end;i.z++){
int index=phi_ghost.Standard_Index(i);
T phix=(phi_ghost.array(index+yz)-phi_ghost.array(index-yz))*one_over_two_dx,
phiy=(phi_ghost.array(index+z)-phi_ghost.array(index-z))*one_over_two_dy,
phiz=(phi_ghost.array(index+1)-phi_ghost.array(index-1))*one_over_two_dz;
phi(i)-=dt*levelset->sigma*(*levelset->curvature)(i)*sqrt(sqr(phix)+sqr(phiy)+sqr(phiz));}
boundary->Fill_Ghost_Cells(grid,phi,phi_ghost,dt,time,number_of_ghost_cells);
if(!curvature_defined){delete levelset->curvature;levelset->curvature=0;}}
advection->Update_Advection_Equation_Cell(grid,phi,phi_ghost,face_velocity,*boundary,dt,time);
boundary->Apply_Boundary_Condition(grid,phi,time+dt);
}
//#####################################################################
// Functions Reinitialize
//#####################################################################
template<class T> void LEVELSET_ADVECTION_3D<T>::
Reinitialize(const int time_steps,const T time)
{
GRID<TV>& grid=levelset->grid;
T_ARRAYS_SCALAR& phi=levelset->phi;
int m=grid.counts.x,n=grid.counts.y,mn=grid.counts.z;
ARRAY<T,VECTOR<int,3> > sign_phi(1,m,1,n,1,mn); // smeared out sign function
T epsilon=sqr(grid.dX.Max());
TV_INT i;
for(i.x=1;i.x<=m;i.x++) for(i.y=1;i.y<=n;i.y++) for(i.z=1;i.z<=mn;i.z++) sign_phi(i)=phi(i)/sqrt(sqr(phi(i))+epsilon);
T dt=reinitialization_cfl*grid.min_dX;
RUNGEKUTTA<ARRAY<T,VECTOR<int,3> > > rungekutta(phi);
rungekutta.Set_Grid_And_Boundary_Condition(grid,*levelset->boundary);
rungekutta.Set_Order(reinitialization_runge_kutta_order);
rungekutta.Set_Time(time);
rungekutta.Pseudo_Time();
for(int k=1;k<=time_steps;k++){
rungekutta.Start(dt);
for(int kk=1;kk<=rungekutta.order;kk++){Euler_Step_Of_Reinitialization(sign_phi,dt,time);rungekutta.Main();}
}
}
//#####################################################################
// Functions Euler_Step_Of_Reinitialization
//#####################################################################
template<class T> void LEVELSET_ADVECTION_3D<T>::
Euler_Step_Of_Reinitialization(const ARRAY<T,VECTOR<int,3> >& sign_phi,const T dt,const T time)
{
GRID<TV>& grid=levelset->grid;
T_BOUNDARY_SCALAR* boundary=levelset->boundary;
T_ARRAYS_SCALAR& phi=levelset->phi;
TV_INT i;int m=grid.counts.x,n=grid.counts.y,mn=grid.counts.z;T dx=grid.dX.x,dy=grid.dX.y,dz=grid.dX.z;
int ghost_cells=3;
ARRAY<T,VECTOR<int,3> > phi_ghost(grid.Domain_Indices(ghost_cells));boundary->Fill_Ghost_Cells(grid,phi,phi_ghost,dt,time,ghost_cells);
ARRAY<T,VECTOR<int,3> > rhs(1,m,1,n,1,mn);
ARRAY<T,VECTOR<int,1> > phi_1d_x(1-ghost_cells,m+ghost_cells),phix_minus(1,m),phix_plus(1,m);
for(i.y=1;i.y<=n;i.y++) for(i.z=1;i.z<=mn;i.z++){
for(i.x=1-ghost_cells;i.x<=m+ghost_cells;i.x++) phi_1d_x(i.x)=phi_ghost(i);
if(reinitialization_spatial_order == 5) HJ_WENO(m,dx,phi_1d_x,phix_minus,phix_plus);
else HJ_ENO(reinitialization_spatial_order,m,dx,phi_1d_x,phix_minus,phix_plus);
for(i.x=1;i.x<=m;i.x++)
if(LEVELSET_UTILITIES<T>::Sign(phi(i)) < 0) rhs(i)=sqr(max(-phix_minus(i.x),phix_plus(i.x),(T)0));
else rhs(i)=sqr(max(phix_minus(i.x),-phix_plus(i.x),(T)0));}
ARRAY<T,VECTOR<int,1> > phi_1d_y(1-ghost_cells,n+ghost_cells),phiy_minus(1,n),phiy_plus(1,n);
for(i.x=1;i.x<=m;i.x++) for(i.z=1;i.z<=mn;i.z++){
for(i.y=1-ghost_cells;i.y<=n+ghost_cells;i.y++) phi_1d_y(i.y)=phi_ghost(i);
if(reinitialization_spatial_order == 5) HJ_WENO(n,dy,phi_1d_y,phiy_minus,phiy_plus);
else HJ_ENO(reinitialization_spatial_order,n,dy,phi_1d_y,phiy_minus,phiy_plus);
for(i.y=1;i.y<=n;i.y++)
if(LEVELSET_UTILITIES<T>::Sign(phi(i)) < 0) rhs(i)+=sqr(max(-phiy_minus(i.y),phiy_plus(i.y),(T)0));
else rhs(i)+=sqr(max(phiy_minus(i.y),-phiy_plus(i.y),(T)0));}
ARRAY<T,VECTOR<int,1> > phi_1d_z(1-ghost_cells,mn+ghost_cells),phiz_minus(1,mn),phiz_plus(1,mn);
for(i.x=1;i.x<=m;i.x++) for(i.y=1;i.y<=n;i.y++){
for(i.z=1-ghost_cells;i.z<=mn+ghost_cells;i.z++) phi_1d_z(i.z)=phi_ghost(i);
if(reinitialization_spatial_order == 5) HJ_WENO(mn,dz,phi_1d_z,phiz_minus,phiz_plus);
else HJ_ENO(reinitialization_spatial_order,mn,dz,phi_1d_z,phiz_minus,phiz_plus);
for(i.z=1;i.z<=mn;i.z++)
if(LEVELSET_UTILITIES<T>::Sign(phi(i)) < 0) rhs(i)+=sqr(max(-phiz_minus(i.z),phiz_plus(i.z),(T)0));
else rhs(i)+=sqr(max(phiz_minus(i.z),-phiz_plus(i.z),(T)0));}
for(i.x=1;i.x<=m;i.x++) for(i.y=1;i.y<=n;i.y++) for(i.z=1;i.z<=mn;i.z++){
phi(i)-=dt*sign_phi(i)*(sqrt(rhs(i))-1);
if(LEVELSET_UTILITIES<T>::Interface(phi_ghost(i),phi(i))) phi(i)=LEVELSET_UTILITIES<T>::Sign(phi_ghost(i))*levelset->small_number*grid.min_dX;}
boundary->Apply_Boundary_Condition(grid,phi,time); // time not incremented, pseudo-time
}
//#####################################################################
template class LEVELSET_ADVECTION_3D<float>;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class LEVELSET_ADVECTION_3D<double>;
#endif
|
<reponame>osak/mikutterd<filename>core/mui/gtk_webicon.rb<gh_stars>0
# -*- coding: utf-8 -*-
# /(^o^)\
require File.expand_path(File.dirname(__FILE__+'/utils'))
miquire :core, 'environment', 'serialthread', 'skin'
miquire :mui, 'web_image_loader'
require 'gtk2'
require 'observer'
# Web上の画像をレンダリングできる。
# レンダリング中は読み込み中の代替イメージが表示され、ロードが終了したら指定された画像が表示される。
# メモリキャッシュ、ストレージキャッシュがついてる。
module Gtk
class WebIcon < Image
DEFAULT_RECTANGLE = Gdk::Rectangle.new(0, 0, 48, 48)
include Observable
# ==== Args
# [url] 画像のURLもしくはパス(String)
# [rect] 画像のサイズ(Gdk::Rectangle) または幅(px)
# [height] 画像の高さ(px)
def initialize(url, rect = DEFAULT_RECTANGLE, height = nil)
rect = Gdk::Rectangle.new(0, 0, rect, height) if height
if(Gdk::WebImageLoader.is_local_path?(url))
url = File.expand_path(url)
if FileTest.exist?(url)
super begin
Gdk::Pixbuf.new(url, rect.width, rect.height)
rescue
Gdk::WebImageLoader.notfound_pixbuf(rect.width, rect.height) end
else
super(Gdk::WebImageLoader.notfound_pixbuf(rect.width, rect.height)) end
else
super(Gdk::WebImageLoader.pixbuf(url, rect.width, rect.height) { |pixbuf, success|
unless destroyed?
self.pixbuf = pixbuf
self.changed
self.notify_observers end }) end end
end
end
|
#!/bin/bash
set -e
TEST_RUN=ALL
PAUSE=no
COVERAGE=yes
CPUPROFILE=no
MEMPROFILE=no
MICRO_OSD_PATH="/micro-osd.sh"
BUILD_TAGS=""
RESULTS_DIR=/results
CEPH_CONF=/tmp/ceph/ceph.conf
# Default env vars that are not currently changed by this script
# but can be used to change the test behavior:
# GO_CEPH_TEST_MDS_NAME
CLI="$(getopt -o h --long test-run:,test-pkg:,pause,cpuprofile,memprofile,no-cover,micro-osd:,wait-for:,results:,ceph-conf:,mirror:,help -n "${0}" -- "$@")"
eval set -- "${CLI}"
while true ; do
case "${1}" in
--test-pkg)
TEST_PKG="${2}"
shift
shift
;;
--test-run)
TEST_RUN="${2}"
shift
shift
;;
--pause)
PAUSE=yes
shift
;;
--micro-osd)
MICRO_OSD_PATH="${2}"
shift
shift
;;
--wait-for)
WAIT_FILES="${2}"
shift
shift
;;
--results)
RESULTS_DIR="${2}"
shift
shift
;;
--ceph-conf)
CEPH_CONF="${2}"
shift
shift
;;
--mirror)
MIRROR_CONF="${2}"
shift
shift
;;
--cpuprofile)
CPUPROFILE=yes
shift
;;
--memprofile)
MEMPROFILE=yes
shift
;;
--no-cover)
COVERAGE=no
shift
;;
-h|--help)
echo "Options:"
echo " --test-run=VALUE Run selected test or ALL, NONE"
echo " ALL is the default"
echo " --test-pkg=PKG Run only tests from PKG"
echo " --pause Sleep forever after tests execute"
echo " --micro-osd Specify path to micro-osd script"
echo " --wait-for=FILES Wait for files before starting tests"
echo " (colon separated, disables micro-osd)"
echo " --results=PATH Specify path to store test results"
echo " --ceph-conf=PATH Specify path to ceph configuration"
echo " --mirror=PATH Specify path to ceph conf of mirror"
echo " --cpuprofile Run tests with cpu profiling"
echo " --memprofile Run tests with mem profiling"
echo " --no-cover Disable code coverage profiling"
echo " -h|--help Display help text"
echo ""
exit 0
;;
--)
shift
break
;;
*)
echo "unknown option" >&2
exit 2
;;
esac
done
if [ -n "${CEPH_VERSION}" ]; then
BUILD_TAGS="${CEPH_VERSION}"
fi
if [ -n "${USE_PTRGUARD}" ]; then
BUILD_TAGS+=",ptrguard"
fi
if [ -n "${BUILD_TAGS}" ]; then
BUILD_TAGS="-tags ${BUILD_TAGS}"
fi
show() {
echo "*** running:" "$@"
"$@"
}
wait_for_files() {
for file in "$@" ; do
echo -n "*** waiting for $file ..."
while ! [[ -f $file ]] ; do
sleep 1
done
echo "done"
done
}
test_failed() {
local pkg="${1}"
echo "*** ERROR: ${pkg} tests failed"
pause_if_needed
return 1
}
setup_mirroring() {
echo "Setting up mirroring..."
local CONF_A=${CEPH_CONF}
local CONF_B=${MIRROR_CONF}
ceph -c $CONF_A osd pool create rbd 8
ceph -c $CONF_B osd pool create rbd 8
rbd -c $CONF_A pool init
rbd -c $CONF_B pool init
rbd -c $CONF_A mirror pool enable rbd image
rbd -c $CONF_B mirror pool enable rbd image
rbd -c $CONF_A mirror pool peer bootstrap create --site-name ceph_a rbd > token
rbd -c $CONF_B mirror pool peer bootstrap import --site-name ceph_b rbd token
rbd -c $CONF_A rm mirror_test 2>/dev/null || true
rbd -c $CONF_B rm mirror_test 2>/dev/null || true
(echo "Mirror Test"; dd if=/dev/zero bs=1 count=500K) | rbd -c $CONF_A import - mirror_test
rbd -c $CONF_A mirror image enable mirror_test snapshot
echo -n "Waiting for mirroring activation..."
while ! rbd -c $CONF_A mirror image status mirror_test \
| grep -q "state: \+up+replaying" ; do
sleep 1
done
echo "done"
rbd -c $CONF_A mirror image snapshot mirror_test
echo -n "Waiting for mirror sync..."
while ! rbd -c $CONF_B export mirror_test - 2>/dev/null | grep -q "Mirror Test" ; do
sleep 1
done
echo " mirroring functional!"
}
test_pkg() {
local pkg="${1}"
if [[ "${TEST_PKG}" && "${TEST_PKG}" != "${pkg}" ]]; then
return 0
fi
# run go vet and capture the result for the package, but still execute the
# test suite anyway
show go vet ${BUILD_TAGS} "./${pkg}"
ret=$?
# disable caching of tests results
testargs=("-count=1"\
${BUILD_TAGS})
if [[ ${TEST_RUN} != ALL ]]; then
testargs+=("-run" "${TEST_RUN}")
fi
if [[ ${COVERAGE} = yes ]]; then
testargs+=(\
"-covermode=count" \
"-coverprofile=${pkg}.cover.out" \
"-coverpkg=${P}/${pkg}")
fi
if [[ ${CPUPROFILE} = yes ]]; then
testargs+=("-cpuprofile" "${pkg}.cpu.out")
fi
if [[ ${MEMPROFILE} = yes ]]; then
testargs+=("-memprofile" "${pkg}.mem.out")
fi
show go test -v "${testargs[@]}" "./${pkg}"
ret=$(($?+${ret}))
grep -v "^mode: count" "${pkg}.cover.out" >> "cover.out"
return ${ret}
}
pre_all_tests() {
# Prepare Go code
go get -t -v ${BUILD_TAGS} ./...
diff -u <(echo -n) <(gofmt -d -s .)
make implements
# Reset whole-module coverage file
echo "mode: count" > "cover.out"
}
post_all_tests() {
if [[ ${COVERAGE} = yes ]]; then
mkdir -p "${RESULTS_DIR}/coverage"
show go tool cover -html=cover.out -o "${RESULTS_DIR}/coverage/go-ceph.html"
fi
if [[ ${COVERAGE} = yes ]] && command -v castxml ; then
mkdir -p "${RESULTS_DIR}/coverage"
show ./implements --list \
--report-json "${RESULTS_DIR}/implements.json" \
--report-text "${RESULTS_DIR}/implements.txt" \
cephfs rados rbd
# output the brief summary info onto stdout
grep '^[A-Z]' "${RESULTS_DIR}/implements.txt"
fi
}
test_go_ceph() {
mkdir -p /tmp/ceph
if ! [[ ${WAIT_FILES} ]]; then
show "${MICRO_OSD_PATH}" /tmp/ceph
fi
export CEPH_CONF
if [[ ${TEST_RUN} == NONE ]]; then
echo "skipping test execution"
return 0
fi
P=github.com/ceph/go-ceph
pkgs=(\
"cephfs" \
"cephfs/admin" \
"internal/callbacks" \
"internal/cutil" \
"internal/errutil" \
"internal/retry" \
"rados" \
"rbd" \
)
pre_all_tests
if [[ ${WAIT_FILES} ]]; then
wait_for_files ${WAIT_FILES//:/ }
fi
if [[ ${MIRROR_CONF} && ${CEPH_VERSION} != nautilus ]]; then
setup_mirroring
export MIRROR_CONF
fi
for pkg in "${pkgs[@]}"; do
test_pkg "${pkg}" || test_failed "${pkg}"
done
post_all_tests
}
pause_if_needed() {
if [[ ${PAUSE} = yes ]]; then
echo "*** pausing execution"
sleep infinity
fi
}
test_go_ceph
pause_if_needed
|
#ifndef _WKT_ENTITY_MANAGER_H
#define _WKT_ENTITY_MANAGER_H
#include "ecs/Entity.h"
#include <unordered_map>
#include <vector>
namespace wkt {
namespace managers
{
class EntityManager final
{
public:
using iterator = std::unordered_map<wkt::ecs::Entity::EntityUniqueID, wkt::ecs::Entity>::iterator;
private:
using Entity = wkt::ecs::Entity;
public:
EntityManager() = default;
EntityManager(const EntityManager&) = delete;
EntityManager(EntityManager&&) = default;
~EntityManager() = default;
EntityManager& operator=(const EntityManager&) = delete;
EntityManager& operator=(EntityManager&&) = default;
public:
Entity& make();
void kill(Entity::EntityUniqueID id);
void kill(const Entity& en);
void gain(Entity&& en);
Entity* operator[](Entity::EntityUniqueID id);
void clean();
iterator begin() { return this->entities.begin(); }
iterator end() { return this->entities.end(); }
private:
std::vector<Entity::EntityUniqueID> toBeKilled;
std::unordered_map<Entity::EntityUniqueID, Entity> entities;
};
}}
#endif |
<reponame>chrstnbwnkl/arcgis_batch_geocoder
import json
from requests import get, post
from dotenv import dotenv_values
class Geocoder:
"""Geocoder class that provides batch geocoding functionalities for the ArcGIS world geocoding REST API.
API docs here: https://developers.arcgis.com/rest/geocode/api-reference/geocoding-geocode-addresses.htm
Attributes
----------
"""
def __init__(self, conn=None, iter_conn=None, debug=True):
self._conn = conn
self._iter_conn = iter_conn
if conn:
self._cur = conn.cursor()
if iter_conn:
self._iter_cur = iter_conn.cursor()
self._debug = debug
self._env = dotenv_values()
@property
def conn(self):
return self._conn
@conn.setter
def conn(self, value):
self._conn = value
self._cur = value.cursor()
@property
def iter_conn(self):
return self._iter_conn
@iter_conn.setter
def iter_conn(self, value):
self._iter_conn = value
self._iter_cur = value.cursor()
def _load_addresses(self):
querystring = """
SELECT DISTINCT location->'location'->'additionalNames'->>'long'::varchar, (location->'location'->>'locationId')::int
FROM users
WHERE location->'location'->'additionalNames'->>'long'::varchar IS NOT null
"""
if self._debug == True:
querystring += " LIMIT 230"
query = self._iter_cur.mogrify(querystring)
return self._iter_cur.execute(query)
def _filter_duplicates(self):
"""Checks if the passed list of SingleLine addresses contains addresses that have already been geocoded and
returns a filtered list of not geocoded addresses.
Parameters
----------
lst : list
a list of SingleLine address strings
"""
filtered_locations = []
self._load_addresses()
while True:
row = self._iter_cur.fetchone()
if row == None:
break
if not self._exists(row):
filtered_locations.append(row)
return filtered_locations
def _exists(self, row):
querystring_template = f"""
SELECT exists (SELECT 1 FROM user_locations WHERE id = {row[1]} LIMIT 1);
"""
query = self._cur.mogrify(querystring_template)
self._cur.execute(query)
value = self._cur.fetchone()
return value[0]
def _make_location_objects(self, locations):
"""Turns a list of locations into a list of dictionaries according to the ArcGIS World Geocoding REST API reuirements.
Parameters
----------
locations : list
list of location strings
Returns
-------
dict
"""
return [ {"attributes" : {"OBJECTID": l_id, "SingleLine": l_str}} for l_str, l_id in locations]
def _make_addresses(self, location_objects, max_batch_size):
"""Formats a list of single line addresses into the required format for the ArcGIS World Geocoding REST API.
Parameters
----------
locsaction_objects : dict
a list of dictionaries of location strings and their OBJECTID
max_batch_size: int
the maximum number of addresses to be included in one batch geocoding request to the REST API
Returns
-------
list
a list of stringified dictionaries in the format required by the the ArcGIS World Geocoding REST API documentation
"""
chunked = []
for loc in self._split_list(location_objects, max_batch_size):
data = { 'addresses': {}, 'f':'json'}
records = {"records": loc}
data["addresses"] = json.dumps(records)
chunked.append(data)
return chunked
def _split_list(self, lst, n):
"""Yield successive n-sized chunks from lst."""
for i in range(0, len(lst), n):
yield lst[i:i + n]
def _get_token(self, user, secret):
"""Generate an ArcGIS token with your ArcGIS user credentials
Parameters
----------
user : str
the user key generated on your ArcGIS dev portal page
secret : str
the user secret generated on your ArcGIS dev portal page
Returns
-------
str
a user token that can be used to access the ArcGIS world geocoding REST API endpoint
"""
token_creator = """
https://www.arcgis.com/sharing/oauth2/token?client_id={}&grant_type=client_credentials&client_secret={}&f=pjson
"""
res = get(token_creator.format(user, secret))
token_data = json.loads(res.content)
return token_data["access_token"]
def _make_request(self, payload):
token = self._get_token(self._env["ArcGIS_user"], self._env["ArcGIS_secret"])
params = {"token" : token}
headers = {"Content-Type": "application/x-www-form-urlencoded"}
api_url = "https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/geocodeAddresses"
r = post(url=api_url, params=params, data=payload, headers=headers)
return r
def _insert_user_location(self, ul):
"""Inserts the user location info as a row into the PostgreSQL user locations table
Parameters
----------
ul : UserLocation
a UserLocation instance
"""
querystring_template = """
INSERT INTO user_locations (location, geom, score, type, id) VALUES (%s, ST_SetSRID(ST_MakePoint(%s, %s),4326), %s, %s, %s)
"""
query = self._cur.mogrify(querystring_template, (ul.loc_str, ul.geom[0], ul.geom[1], ul.score, ul.loc_type, ul.loc_id))
self._cur.execute(query)
self._conn.commit()
return
def geocode(self):
"""Do the geocoding!
Parameters
----------
Returns
-------
"""
locations = self._filter_duplicates()
location_objects = self._make_location_objects(locations)
payloads = self._make_addresses(location_objects, 200)
for payload in payloads:
res = self._make_request(payload)
if res.status_code != 200:
print(f"Something went wrong!")
break
else:
data = json.loads(res.content)
for geocoded_location in data["locations"]:
u_loc = UserLocation()
u_loc.loc_str = geocoded_location["attributes"]["LongLabel"]
u_loc.geom = [geocoded_location["location"]["x"], geocoded_location["location"]["y"]]
u_loc.score = geocoded_location["score"]
u_loc.loc_type = geocoded_location["attributes"]["Type"]
u_loc.loc_id = geocoded_location["attributes"]["ResultID"]
self._insert_user_location(u_loc)
print(f"Updated user location {u_loc.loc_str}")
class UserLocation:
def __init__(self, loc_str=None, geom=None, score=None, loc_type=None, loc_id=None):
self._loc_str = loc_str
self._geom = geom
self._score = score
self._loc_type = loc_type
self._loc_id = loc_id
@property
def loc_str(self):
return self._loc_str
@loc_str.setter
def loc_str(self, value):
self._loc_str = value
@property
def geom(self):
return self._geom
@geom.setter
def geom(self, value):
self._geom = value
@property
def score(self):
return self._score
@score.setter
def score(self, value):
self._score = value
@property
def loc_type(self):
return self._loc_type
@loc_type.setter
def loc_type(self, value):
self._loc_type = value
@property
def loc_id(self):
return self._loc_id
@loc_id.setter
def loc_id(self, value):
self._loc_id = value |
import React from 'react';
import { graphql } from 'gatsby';
import Fuse from 'fuse.js';
import { useQueryParam } from 'gatsby-query-params';
import { useSiteMetadata } from '../hooks';
import Layout from '../components/Layout';
import Sidebar from '../components/Sidebar';
import Feed from '../components/Feed';
import Page from '../components/Page';
import SearchBox from '../components/SearchBox';
const SEARCH_OPTIONS = {
threshold: 0.3,
caseSensitive: false,
keys: [
'node.title',
'node.content',
'node.published_at',
'node.created_at',
'node.updated_at',
'node.slug',
'node.category.name',
'node.tags.name'
]
};
const SearchTemplate = ({ data }) => {
const { title: siteTitle, subtitle: siteSubtitle, socialMediaCard } = useSiteMetadata();
const { edges } = data.allStrapiArticle;
console.log(edges);
const q = useQueryParam('q', '');
const fuse = new Fuse(edges, SEARCH_OPTIONS);
const result = q ? fuse.search(q) : edges.map((e) => ({ item: e }));
const mainPage = (
<Page content={
<div>
<div className={'flex justify-center ml-0 mb-6'}>
<SearchBox q={q} />
</div>
{result.length === 0 && (
<>
<div className='flex justify-center'>
<img src={'/media/no_result.svg'} className={'w-32'}/>
</div>
<div className='my-4 text-center'>
記事が見つかりませんでした。
</div>
<Feed edges={edges} />
</>
)}
<Feed edges={result.map((r) => r.item)} />
</div>
}/>
);
const side = <Sidebar />;
return (
<Layout main={mainPage}
side={side}
socialImage={socialMediaCard.image}
title={siteTitle}
description={siteSubtitle} />
);
};
export const query = graphql`
query SearchTemplate {
allStrapiArticle
(
sort: { fields: updated_at, order: DESC }
)
{
group(field: tags___name) {
fieldValue
totalCount
}
edges {
node {
title
content
published_at
created_at
updated_at
slug
socialImage {
publicURL
}
category {
id
name
}
tags {
id
name
}
}
}
}
}
`;
export default SearchTemplate;
|
<filename>frontend/src/routes.js<gh_stars>0
import React from 'react';
import { BrowserRouter, Route, Switch } from 'react-router-dom';
import {
Logon,
Register,
Profile,
NewIncidents
} from './pages';
const Routes = () => {
return (
<BrowserRouter>
<Switch>
<Route
path="/"
exact
component={Logon} />
<Route
path="/register"
exact
component={Register} />
<Route
path="/profile"
exact
component={Profile} />
<Route
path="/incidents/new"
exact
component={NewIncidents} />
</Switch>
</BrowserRouter>
);
};
export default Routes;
|
<gh_stars>1-10
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import { Base } from "./base.js";
import { DoubleSide } from "three/src/constants.js";
import { Mesh } from "three/src/objects/Mesh.js";
import { SpriteGeometry } from "../geometry/spritegeometry.js";
export class Sprite extends Base {
constructor(renderer, shaders, options) {
let f;
super(renderer, shaders, options);
let { uniforms } = options;
const { material, position, sprite, map, combine, linear, color, mask, stpq, } = options;
if (uniforms == null) {
uniforms = {};
}
const hasStyle = uniforms.styleColor != null;
this.geometry = new SpriteGeometry({
items: options.items,
width: options.width,
height: options.height,
depth: options.depth,
});
this._adopt(uniforms);
this._adopt(this.geometry.uniforms);
// Shared vertex shader
const factory = shaders.material();
const v = factory.vertex;
v.pipe(this._vertexColor(color, mask));
v.require(this._vertexPosition(position, material, map, 2, stpq));
v.require(sprite);
v.pipe("sprite.position", this.uniforms);
v.pipe("project.position", this.uniforms);
// Shared fragment shader
factory.fragment = f = this._fragmentColor(hasStyle, material, color, mask, map, 2, stpq, combine, linear);
// Split fragment into edge and fill pass for better z layering
const edgeFactory = shaders.material();
edgeFactory.vertex.pipe(v);
edgeFactory.fragment.pipe(f);
edgeFactory.fragment.pipe("fragment.transparent", this.uniforms);
const fillFactory = shaders.material();
fillFactory.vertex.pipe(v);
fillFactory.fragment.pipe(f);
fillFactory.fragment.pipe("fragment.solid", this.uniforms);
const fillOpts = fillFactory.link({
side: DoubleSide,
});
this.fillMaterial = this._material(fillOpts);
const edgeOpts = edgeFactory.link({
side: DoubleSide,
});
this.edgeMaterial = this._material(edgeOpts);
this.fillObject = new Mesh(this.geometry, this.fillMaterial);
this.edgeObject = new Mesh(this.geometry, this.edgeMaterial);
this._raw(this.fillObject);
this.fillObject.userData = fillOpts;
this._raw(this.edgeObject);
this.edgeObject.userData = edgeOpts;
this.renders = [this.fillObject, this.edgeObject];
}
show(transparent, blending, order, depth) {
this._show(this.edgeObject, true, blending, order, depth);
return this._show(this.fillObject, transparent, blending, order, depth);
}
dispose() {
this.geometry.dispose();
this.edgeMaterial.dispose();
this.fillMaterial.dispose();
this.nreders =
this.geometry =
this.edgeMaterial =
this.fillMaterial =
this.edgeObject =
this.fillObject =
null;
return super.dispose();
}
}
|
<gh_stars>1-10
/*
* Copyright 2021 Wultra s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.getlime.security.powerauth.lib.nextstep.model.request;
import io.getlime.security.powerauth.lib.nextstep.model.entity.enumeration.CredentialAuthenticationMode;
import io.getlime.security.powerauth.lib.nextstep.model.enumeration.AuthMethod;
import lombok.Data;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Size;
import java.util.ArrayList;
import java.util.List;
/**
* Request object used for authenticating using a credential and an OTP.
*
* @author <NAME>, <EMAIL>
*/
@Data
public class CombinedAuthenticationRequest {
@NotBlank
@Size(min = 2, max = 256)
private String credentialName;
@NotBlank
@Size(min = 1, max = 256)
private String userId;
@NotBlank
@Size(min = 1, max = 256)
private String credentialValue;
// Null value is allowed, defaults to MATCH_EXACT
private CredentialAuthenticationMode authenticationMode;
private List<Integer> credentialPositionsToVerify = new ArrayList<>();
// Either otpId or operationId should be present
@Size(min = 36, max = 36)
private String otpId;
@Size(min = 1, max = 256)
private String operationId;
@NotBlank
@Size(min = 1, max = 256)
private String otpValue;
// Operation ID is extracted from OTP record in case that otpId is sent
private boolean updateOperation;
// Authentication method is required only in case multiple methods are defined in Next Steps
private AuthMethod authMethod;
}
|
package com.java.study.offer.chapter5;
import com.alibaba.fastjson.JSONArray;
public class AgeSort {
public static void main(String[] args) {
int[] age = new int[]{50, 24, 21, 23, 45, 7, 32, 23, 45, 65};
int[] sortAge = sortAge(age);
System.out.println(JSONArray.toJSONString(sortAge));
}
private static int[] sortAge(int[] ageArray) {
int oldestAge = 99;
int[] timesOfAge = new int[oldestAge + 1];
for (int i = 0; i < timesOfAge.length; i++) {
timesOfAge[i] = 0;
}
for (int i : ageArray) {
if (i < 0 || i > oldestAge) {
throw new RuntimeException("年龄超出范围");
}
timesOfAge[i] = timesOfAge[i] + 1;
}
int index = 0;
for (int i = 0; i < oldestAge; i++) {
for (int j = 0; j < timesOfAge[i]; j++) {
ageArray[index] = i;
index++;
}
}
return ageArray;
}
}
|
#!/usr/bin/env bash
set -euo pipefail
cd `dirname $0`
function dec(){
echo $1 | openssl enc -a -d -aes-256-cbc -K $(printf "${PASSWORD}" | od -A n -t x1 | tr -d '\040\011\012\015') -iv $(printf "0937465827384759" | od -A n -t x1 | tr -d '\040\011\012\015')
}
function enc(){
echo $1 | openssl enc -a -e -aes-256-cbc -K $(printf "${PASSWORD}" | od -A n -t x1 | tr -d '\040\011\012\015') -iv $(printf "0937465827384759" | od -A n -t x1 | tr -d '\040\011\012\015')
}
DEBUG=""
FORCE_DEPLOY=false
APP_NAME="myproject"
ONLY_APP_NAME=""
BASE_PATH=""
BUILD_NUMBER=""
PLAYBOOK_ENV=prod
TAGS=""
while [ $# -gt 0 ]; do
case "$1" in
--pass=*)
PASSWORD="${1#*=}"
;;
--env=*)
PLAYBOOK_ENV="${1#*=}"
;;
--debug=*)
DEBUG="-vvv"
;;
--tags=*)
TAGS="${1#*=}"
;;
--app_name=*)
APP_NAME="${1#*=}"
;;
--only_app_name=*)
ONLY_APP_NAME="only_app_name=${1#*=}"
;;
--base_path=*)
BASE_PATH="base_path=${1#*=}"
;;
--force_deploy=*)
FORCE_DEPLOY="${1#*=}"
;;
--build_number=*)
BUILD_NUMBER="build_number=${1#*=}"
;;
*)
printf "***************************\n"
printf "* Error: Invalid argument.*\n"
printf "***************************\n"
exit 1
esac
shift
done
if [ -z "$PASSWORD" ]; then
read -s -p "Enter Password: " PASSWORD
fi
if [ -z "$TAGS" ]; then
echo ERROR: Please add some tags
exit 1
fi
BOTO_SCRIPT="./inventory/boto/ec2.py --refresh-cache > /dev/null "
if [ "$TAGS" == "artifact" ]; then
BOTO_SCRIPT="echo - No boto"
fi
echo
#Uncomment these lines to generate the decrypted keys
#echo $(enc "AKIAIOSFODNN7EXAMPLE")
#echo $(enc "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY")
#exit
AWS_ACCESS_KEY_ID=$(dec "+Shz32R4hY0XusaYzVjht1CTiuXbBV+TKPazf04lCrI=")
AWS_SECRET_ACCESS_KEY=$(dec "h9tn4+S4Cj4d1dvuXZfNypses1yt9rb8jUA8sHKLLzQ4gGbdtR6H3h+FjcRC73Xb")
#echo $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY && exit
VAULT_FILE=vault_key
function run-ansible(){
export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} && \
export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} && \
eval $BOTO_SCRIPT && \
ansible-playbook aws-playbook.yml -i inventory/boto/ec2.py \
--extra-vars "env=${PLAYBOOK_ENV} encryption_pass=${PASSWORD} release=8 force_deploy=${FORCE_DEPLOY} aws_key_id=${AWS_ACCESS_KEY_ID} aws_secret_key=${AWS_SECRET_ACCESS_KEY} app_name=${APP_NAME} ${ONLY_APP_NAME} ansible_ssh_common_args='-o StrictHostKeyChecking=no' ${BASE_PATH} ${BUILD_NUMBER} " \
$DEBUG --tags $1 --vault-password-file $VAULT_FILE
}
echo $PASSWORD > $VAULT_FILE
printf "***************************\n"
printf "* Starting tags: $TAGS app_name: ${APP_NAME} \n"
printf "***************************\n"
run-ansible $TAGS
rm -rf $VAULT_FILE *-playbook.retry
|
#!/bin/bash
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
echo "=============================================================================================================="
echo "Please run the scipt as: "
echo "bash run_eval_cpu.sh ACLIMDB_DIR GLOVE_DIR CKPT_FILE"
echo "for example: bash run_eval_cpu.sh ./aclimdb ./glove_dir lstm-20_390.ckpt"
echo "=============================================================================================================="
ACLIMDB_DIR=$1
GLOVE_DIR=$2
CKPT_FILE=$3
mkdir -p ms_log
CUR_DIR=`pwd`
export GLOG_log_dir=${CUR_DIR}/ms_log
export GLOG_logtostderr=0
python eval.py \
--device_target="CPU" \
--aclimdb_path=$ACLIMDB_DIR \
--glove_path=$GLOVE_DIR \
--preprocess=false \
--preprocess_path=./preprocess \
--ckpt_path=$CKPT_FILE > log.txt 2>&1 &
|
#!/bin/bash
. Installation/Pipeline/include/test_resilience_FOXX_ENGINE_OS.inc yes mmfiles linux
|
#!/bin/bash
set -e
set -o pipefail
export IC_TIMEOUT=900
export TF_VAR_ibmcloud_api_key=$IBMCLOUD_API_KEY
export TF_VAR_softlayer_username=$SOFTLAYER_USERNAME
export TF_VAR_softlayer_api_key=$SOFTLAYER_API_KEY
export TF_VAR_ssh_public_key_file=$SSH_PUBLIC_KEY
export TF_VAR_ssh_private_key_file=$SSH_PRIVATE_KEY
export TF_VAR_classic_datacenter=$DATACENTER
export TF_VAR_prefix=$PREFIX
my_dir=$(dirname "$0")
# cleanup previous run
# (cd $my_dir/create-classic && rm -rf .terraform terraform.tfstate terraform.tfstate.backup)
# create VSI
(cd $my_dir/create-classic && terraform init && terraform apply --auto-approve)
CLASSIC_IP_ADDRESS=$(cd $my_dir/create-classic && terraform output CLASSIC_IP_ADDRESS)
if curl --connect-timeout 10 http://$CLASSIC_IP_ADDRESS; then
echo "Classic VM is ready to be captured"
else
echo "Can't reach the classic VM public IP address"
exit 1
fi
|
<filename>src/Thermostat.js
'use strict';
function Thermostat() {
this._temperature = 20;
this.MIN_TEMP = 10;
this.powerSavingMode = true;
}
Thermostat.prototype.returnTemperature = function() {
return this._temperature;
}
Thermostat.prototype.up = function() {
this._temperature++;
};
Thermostat.prototype.isMinTemp = function() {
return this._temperature === this.MIN_TEMP;
}
Thermostat.prototype.down = function() {
if(this.isMinTemp()) {
throw new Error("Minimum temperature reached");
}
this._temperature--;
};
Thermostat.prototype.isPSMon = function() {
return this.powerSavingMode;
}
Thermostat.prototype.togglePSM = function() {
this.isPSMon() ? this.powerSavingMode = false : this.powerSavingMode = true;
};
Thermostat.prototype.maxTempValue = function() {
return (this.isPSMon() ? 25 : 32);
};
Thermostat.prototype.reset = function() {
return this._temperature = 20;
}
Thermostat.prototype.checkEnergyUsage = function() {
if(this._temperature <= 15) {
return "very-low"
} else if(this._temperature >= 16 && this._temperature <= 18) {
return "low"
} else if(this._temperature === 19 || this._temperature === 20) {
return "low-to-med"
} else if(this._temperature === 21 || this._temperature === 22) {
return "medium"
} else if(this._temperature === 23 || this._temperature === 24) {
return "med-to-high"
} else if(this._temperature === 25 || this._temperature === 26) {
return "high"
} else {
return "super-high"
}
} |
<filename>chapter_002/src/test/java/ru/job4j/strategy/TriangleTest.java
package ru.job4j.strategy;
import static org.junit.Assert.*;
public class TriangleTest {
} |
import gym
import numpy as np
env = gym.make('MountainCar-v0')
# Define the Q-Learning Model
model = Sequential()
model.add(Dense(24, input_shape=(env.observation_space.shape[0],), activation='relu'))
model.add(Dense(24, activation='relu'))
model.add(Dense(env.action_space.n, activation='linear'))
model.compile(loss='mse', optimizer='adam')
# Train the model
episodes = 1000
#reduce the epsilon for exploration
epsilon = 0.5
gamma = 0.95
epsilon_min = 0.01
epsilon_decrement = 0.995
for e in range(episodes):
state = env.reset()
done = False
while not done:
#choose an action
if np.random.random() < epsilon:
action = env.action_space.sample()
else:
action = np.argmax(model.predict(state))
#take the action
next_state, reward, done, _ = env.step(action)
#get the input for the new state
target = reward + gamma * np.amax(model.predict(next_state))
target_vec = model.predict(state)[0]
target_vec[action] = target
model.fit(state, target_vec.reshape(-1, 3), epochs=1, verbose=0)
state = next_state
#decrement the epsilon
if epsilon > epsilon_min:
epsilon *= epsilon_decrement |
#!/bin/bash
yc compute instance create
--name reddit-app-auto
--hostname reddit-app-auto
--memory=2
--create-boot-disk image-folder-id=standard-images,image-family=ubuntu-1604,size=10GB
--network-interface subnet-name=default-ru-central1-a,nat-ip-version=ipv4
--metadata serial-port-enable=1
--metadata-from-file user-data=./metadata_app.yaml
|
#!/bin/sh
set -eo pipefail -o nounset
#!/bin/sh
set -eo pipefail -o nounset
## Get .genome file
genome=https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/genomes/Homo_sapiens/hg38/hg38.genome
## Get the chromomsome mapping file
chr_mapping=$(ggd get-files hg38-chrom-mapping-ensembl2ucsc-ncbi-v1 --pattern "*.txt")
## Process GTF file
wget --quiet ftp://ftp.ensembl.org/pub/release-99/gtf/homo_sapiens/Homo_sapiens.GRCh38.99.gtf.gz
cat <(gzip -dc Homo_sapiens.GRCh38.99.gtf.gz | grep "^#") <(gzip -dc Homo_sapiens.GRCh38.99.gtf.gz | grep -v "^#") \
| awk -v OFS="\t" 'BEGIN{print "#chrom\tsource\tfeature\tstart\tend\tscore\tstrand\tframe\tattribute"} { if ( $3 == "gene") print $0}' \
| gsort --chromosomemappings $chr_mapping /dev/stdin $genome \
| bgzip -c > hg38-gene-only-features-ensembl-v1.gtf.gz
tabix hg38-gene-only-features-ensembl-v1.gtf.gz
rm Homo_sapiens.GRCh38.99.gtf.gz
|
<reponame>luanlazz/barbecue-app-back
import { DbAddAccount } from './db-add-account'
import { mockAddAccountRepository, mockHasher, mockLoadAccountByEmailRepository } from '@/data/test'
import { AddAccountRepository, LoadAccountByEmailRepository } from '@/data/protocols/db'
import { Hasher } from '@/data/protocols/cryptography'
import { throwError, mockAccountModel, mockAddAccountParams } from '@/domain/test'
type SutTypes = {
sut: DbAddAccount
hasherStub: Hasher
addAccountRepositoryStub: AddAccountRepository
loadAccountByEmailRepositoryStub: LoadAccountByEmailRepository
}
const makeSut = (): SutTypes => {
const hasherStub = mockHasher()
const addAccountRepositoryStub = mockAddAccountRepository()
const loadAccountByEmailRepositoryStub = mockLoadAccountByEmailRepository()
jest.spyOn(loadAccountByEmailRepositoryStub, 'loadByEmail').mockImplementation(null)
const sut = new DbAddAccount(hasherStub, addAccountRepositoryStub, loadAccountByEmailRepositoryStub)
return {
sut,
hasherStub,
addAccountRepositoryStub,
loadAccountByEmailRepositoryStub
}
}
describe('DbAddAccount use case', () => {
test('Should call Hasher with correct value', async () => {
const { sut, hasherStub } = makeSut()
const hashSpy = jest.spyOn(hasherStub, 'hash')
await sut.add(mockAddAccountParams())
expect(hashSpy).toHaveBeenCalledWith('any_password')
})
test('Should throw if Hasher throws', async () => {
const { sut, hasherStub } = makeSut()
jest.spyOn(hasherStub, 'hash').mockImplementation(throwError)
const promise = sut.add(mockAddAccountParams())
await expect(promise).rejects.toThrow()
})
test('Should call AddAccountRepository with correct value', async () => {
const { sut, addAccountRepositoryStub } = makeSut()
const addSpy = jest.spyOn(addAccountRepositoryStub, 'add')
await sut.add(mockAddAccountParams())
expect(addSpy).toHaveBeenCalledWith({
name: 'any_name',
email: '<EMAIL>',
password: '<PASSWORD>'
})
})
test('Should throw if AddAccountRepository throws', async () => {
const { sut, addAccountRepositoryStub } = makeSut()
jest.spyOn(addAccountRepositoryStub, 'add').mockImplementation(throwError)
const promise = sut.add(mockAddAccountParams())
await expect(promise).rejects.toThrow()
})
test('Should call LoadAccountByEmailRepository with correct email', async () => {
const { sut, loadAccountByEmailRepositoryStub } = makeSut()
const loadSpy = jest.spyOn(loadAccountByEmailRepositoryStub, 'loadByEmail')
await sut.add(mockAddAccountParams())
expect(loadSpy).toHaveBeenCalledWith('<EMAIL>')
})
test('Should throw if LoadAccountByEmailRepository throws', async () => {
const { sut, loadAccountByEmailRepositoryStub } = makeSut()
jest.spyOn(loadAccountByEmailRepositoryStub, 'loadByEmail').mockImplementation(throwError)
const promise = sut.add(mockAddAccountParams())
await expect(promise).rejects.toThrow()
})
test('Should return null if LoadAccountByEmailRepository find any account with email', async () => {
const { sut, loadAccountByEmailRepositoryStub } = makeSut()
jest.spyOn(loadAccountByEmailRepositoryStub, 'loadByEmail').mockReturnValueOnce(Promise.resolve(mockAccountModel()))
const account = await sut.add(mockAddAccountParams())
expect(account).toBeNull()
})
test('Should return an account on success', async () => {
const { sut } = makeSut()
const account = await sut.add(mockAddAccountParams())
expect(account).toEqual(mockAccountModel())
})
})
|
from typing import List, Dict
def find_aliases(original_name: str, aliases: List[str]) -> Dict[str, str]:
alias_map = {}
original_name_lower = original_name.lower()
for alias in aliases:
if original_name_lower in alias.lower() and alias.lower().replace(original_name_lower, '') == '':
alias_map[alias] = original_name
return alias_map |
#!/bin/bash
####################################################################
#functions
####################################################################
function randomtext() {
chars=abcd1234ABCD
for i in {1..8} ; do
echo -n "${chars:RANDOM%${#chars}:1}"
done
echo
}
function create_part1() {
replicateName=$1
echo -ne "replicateName\n$replicateName\n" > ${replicateName}.part1
}
function create_part2() {
replicateName=$1
cat $FRIPFILE|grep "^${replicateName}\b"|cut -f2,3|bash $SCRIPTSDIR/_transpose.sh > ${replicateName}.part2
}
####################################################################
#scripts starts here
####################################################################
SCRIPTSDIR=$(dirname $0)
SCRIPTSDIR=$(readlink -f $SCRIPTSDIR)
RANDOMTXT=$(randomtext)
FRIPFILE=$1
replicates=$(cut -f1 $FRIPFILE|sort|uniq)
for r in $replicates;do
create_part1 $r
create_part2 $r
paste ${replicateName}.part1 ${replicateName}.part2
rm -f ${replicateName}.part*
done > $RANDOMTXT
grep -m1 "replicateName" $RANDOMTXT > ${RANDOMTXT}.header
grep -v "replicateName" $RANDOMTXT | sort > ${RANDOMTXT}.body
cat ${RANDOMTXT}.header ${RANDOMTXT}.body
rm -f ${RANDOMTXT}*
|
#!/usr/bin/env bash
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "${DIR}/.."
TIME=`date +%F_%H_%M`
if [ -e data/log/garage.csv ]; then
cp data/log/garage.csv data/log/garage-${TIME}.csv
fi
set -x
NUM_PROCESSES="$(pgrep -c -f "record_play/rtk_recorderpy")"
if [ "${NUM_PROCESSES}" -eq 0 ]; then
python modules/tools/record_play/rtk_recorder.py
fi
|
package berlin.yuna.tinkerforgesensor.model.sensor;
import berlin.yuna.tinkerforgesensor.model.exception.NetworkConnectionException;
import berlin.yuna.tinkerforgesensor.model.type.ValueType;
import com.tinkerforge.BrickletSoundIntensity;
import com.tinkerforge.Device;
import com.tinkerforge.TinkerforgeException;
import static berlin.yuna.tinkerforgesensor.model.sensor.Sensor.LedStatusType.LED_NONE;
import static berlin.yuna.tinkerforgesensor.model.type.ValueType.DEVICE_TIMEOUT;
import static berlin.yuna.tinkerforgesensor.model.type.ValueType.SOUND_INTENSITY;
/**
* <h3>{@link SoundIntensity}</h3><br />
* <i>Measures sound intensity</i><br />
*
* <h3>Values</h3>
* <ul>
* <li>{@link ValueType#SOUND_INTENSITY} [x / 10 = db]</li>
* </ul>
* <h3>Technical Info</h3>
* <ul>
* <li><a href="https://www.tinkerforge.com/de/doc/Hardware/Bricklets/Sound_Intensity.html">Official documentation</a></li>
* </ul>
* <h6>Getting sound intensity example</h6>
* <code>stack.values().soundIntensity();</code>
*/
public class SoundIntensity extends Sensor<BrickletSoundIntensity> {
public SoundIntensity(final Device device, final String uid) throws NetworkConnectionException {
super((BrickletSoundIntensity) device, uid);
}
@Override
protected Sensor<BrickletSoundIntensity> initListener() {
device.addIntensityListener(value -> sendEvent(SOUND_INTENSITY, (long) value));
refreshPeriod(1);
return this;
}
@Override
public Sensor<BrickletSoundIntensity> send(final Object value) {
return this;
}
@Override
public Sensor<BrickletSoundIntensity> setLedStatus(final Integer value) {
return this;
}
@Override
public Sensor<BrickletSoundIntensity> ledAdditional(final Integer value) {
return this;
}
@Override
public Sensor<BrickletSoundIntensity> initLedConfig() {
ledStatus = LED_NONE;
ledAdditional = LED_NONE;
return this;
}
@Override
public Sensor<BrickletSoundIntensity> refreshPeriod(final int milliseconds) {
try {
if (milliseconds < 1) {
device.setIntensityCallbackPeriod(0);
} else {
device.setIntensityCallbackPeriod(milliseconds);
}
sendEvent(SOUND_INTENSITY, (long) device.getIntensity());
} catch (TinkerforgeException ignored) {
sendEvent(DEVICE_TIMEOUT, 404L);
}
return this;
}
}
|
'use strict'
import { Application } from '../core'
export type ServiceProviderCtor =
/**
* Create a new service provider instance.
*
* @param {Application} app - the application instance
*/
new(app: Application) => ServiceProvider
export interface ServiceProvider {
/**
* Returns the application instance.
*
* @returns {Application}
*/
app (): Application
/**
* Register application services to the container.
*
* @param {Application} app - the application instance
*/
register (app: Application): void
/**
* Boot application services.
*
* @param {Application} app - the application instance
*/
boot? (app: Application): void | Promise<void>
/**
* Register a booting callback that runs before the `boot` method is called.
*
* @param callback Function
*/
booting (callback: () => void): this
/**
* Register a booted callback that runs after the `boot` method was called.
*
* @param callback Function
*/
booted (callback: () => void): this
/**
* Call the registered booting callbacks.
*/
callBootingCallbacks (): void
/**
* Call the registered booted callbacks.
*/
callBootedCallbacks (): void
/**
* Merge the content of the configuration file located at the
* given `path` with the existing app configuration.
*/
mergeConfigFrom (path: string, key: string): this
}
|
<filename>frontend/app/scripts/services/blogservice.js
/**
* @ngdoc service
* @name foodCircle.BlogService
* @description
* # BlogService
* Service in the foodCircle.
*/
/*global
angular
*/
(function () {
'use strict';
angular.module('foodCircle').service('BlogService', ['$auth', 'sailsResource', '$log', '$q', function ($auth, sailsResource, $log, $q) {
var BlogService = {},
sailsResourceName = 'Blog',
createQueryDto = function (query) {
return query || {};
},
createDto = function (data) {
var Resource = sailsResource(sailsResourceName),
BlogDto;
if (data.id) {
BlogDto = BlogService.getBlogById(data.id);
angular.extend(BlogDto, data);
BlogDto.token = $auth.getToken();
} else {
BlogDto = new Resource();
angular.extend(BlogDto, data);
BlogDto.token = $auth.getToken();
}
return BlogDto;
};
BlogService.getBlog = function (query) {
var dfd = $q.defer(),
blog;
sailsResource(sailsResourceName).get(createQueryDto(query),
function (blog) {
dfd.resolve(blog);
},
function (response) {
dfd.resolve({});
});
return dfd.promise;
};
BlogService.getBlogById = function (id) {
if (!id) {
$log.error('Id missing');
return [];
}
return BlogService.getBlog({
where: {
id: id
}
});
};
BlogService.getBlogByName = function (name) {
if (!name) {
$log.error('Name missing');
return [];
}
return BlogService.getBlog({
where: {
name: name
}
});
};
BlogService.createOrUpdate = function (data) {
var recipeDto = createDto(data);
return recipeDto.$save();
};
BlogService.getBlogList = function (query) {
var dfd = $q.defer();
sailsResource(sailsResourceName).query(createQueryDto(query), function (bloglist) {
dfd.resolve(bloglist);
}, function (error) {
dfd.resolve([]);
});
return dfd.promise;
};
return BlogService;
}]);
}());
|
#include "option_parser.hpp"
#include "RAJA/RAJA.hpp"
#include "TOML_Reader/toml.hpp"
#include "mfem.hpp"
#include "ECMech_cases.h"
#include "ECMech_evptnWrap.h"
#include "ECMech_const.h"
#include <iostream>
#include <fstream>
inline bool if_file_exists (const std::string& name) {
std::ifstream f(name.c_str());
return f.good();
}
namespace {
typedef ecmech::evptn::matModel<ecmech::SlipGeom_BCC_A, ecmech::Kin_FCC_A,
ecmech::evptn::ThermoElastNCubic, ecmech::EosModelConst<false>>
VoceBCCModel;
typedef ecmech::evptn::matModel<ecmech::SlipGeom_BCC_A, ecmech::Kin_FCC_AH,
ecmech::evptn::ThermoElastNCubic, ecmech::EosModelConst<false>>
VoceNLBCCModel;
}
// my_id corresponds to the processor id.
void ExaOptions::parse_options(int my_id)
{
// From the toml file it finds all the values related to state and mat'l
// properties
get_properties();
// From the toml file it finds all the values related to the BCs
get_bcs();
// From the toml file it finds all the values related to the model
get_model();
// From the toml file it finds all the values related to the time
get_time_steps();
// From the toml file it finds all the values related to the visualizations
get_visualizations();
// From the toml file it finds all the values related to the Solvers
get_solvers();
// From the toml file it finds all the values related to the mesh
get_mesh();
// If the processor is set 0 then the options are printed out.
if (my_id == 0) {
print_options();
}
}
// From the toml file it finds all the values related to state and mat'l
// properties
void ExaOptions::get_properties()
{
const auto data = toml::parse(floc);
const auto& table = toml::find(data, "Properties");
double _temp_k = toml::find_or<double>(table, "temperature", 298.0);
if (_temp_k <= 0.0) {
MFEM_ABORT("Properties.temperature is given in Kelvins and therefore can't be less than 0");
}
temp_k = _temp_k;
// Check to see if our table exists
if (table.contains("Matl_Props")) {
// Material properties are obtained first
const auto& prop_table = toml::find(table, "Matl_Props");
std::string _props_file = toml::find_or<std::string>(prop_table, "floc", "props.txt");
props_file = _props_file;
if (!if_file_exists(props_file))
{
MFEM_ABORT("Property file does not exist");
}
nProps = toml::find_or<int>(prop_table, "num_props", 1);
}
else {
MFEM_ABORT("Properties.Matl_Props table was not provided in toml file");
}
// Check to see if our table exists
if (table.contains("State_Vars")) {
// State variable properties are now obtained
const auto& state_table = toml::find(table, "State_Vars");
numStateVars = toml::find_or<int>(state_table, "num_vars", 1);
std::string _state_file = toml::find_or<std::string>(state_table, "floc", "state.txt");
state_file = _state_file;
if (!if_file_exists(state_file))
{
MFEM_ABORT("State file does not exist");
}
}
else {
MFEM_ABORT("Properties.State_Vars table was not provided in toml file");
}
// Check to see if our table exists
if (table.contains("Grain")) {
// Grain related properties are now obtained
const auto& grain_table = toml::find(table, "Grain");
grain_statevar_offset = toml::find_or<int>(grain_table, "ori_state_var_loc", -1);
grain_custom_stride = toml::find_or<int>(grain_table, "ori_stride", 0);
std::string _ori_type = toml::find_or<std::string>(grain_table, "ori_type", "euler");
ngrains = toml::find_or<int>(grain_table, "num_grains", 0);
std::string _ori_file = toml::find_or<std::string>(grain_table, "ori_floc", "ori.txt");
ori_file = _ori_file;
std::string _grain_map = toml::find_or<std::string>(grain_table, "grain_floc", "grain_map.txt");
grain_map = _grain_map;
// I still can't believe C++ doesn't allow strings to be used in switch statements...
if ((_ori_type == "euler") || _ori_type == "Euler" || (_ori_type == "EULER")) {
ori_type = OriType::EULER;
}
else if ((_ori_type == "quat") || (_ori_type == "Quat") || (_ori_type == "quaternion") || (_ori_type == "Quaternion")) {
ori_type = OriType::QUAT;
}
else if ((_ori_type == "custom") || (_ori_type == "Custom") || (_ori_type == "CUSTOM")) {
ori_type = OriType::CUSTOM;
}
else {
MFEM_ABORT("Properties.Grain.ori_type was not provided a valid type.");
ori_type = OriType::NOTYPE;
}
} // end of if statement for grain data
} // End of propert parsing
// From the toml file it finds all the values related to the BCs
void ExaOptions::get_bcs()
{
const auto data = toml::parse(floc);
const auto& table = toml::find(data, "BCs");
changing_bcs = toml::find_or<bool>(table, "changing_ess_bcs", false);
if (!changing_bcs) {
std::vector<int> _essential_ids = toml::find<std::vector<int>>(table, "essential_ids");
if (_essential_ids.empty()) {
MFEM_ABORT("BCs.essential_ids was not provided any values.");
}
map_ess_id[0] = std::vector<int>();
map_ess_id[1] = _essential_ids;
std::vector<int> _essential_comp = toml::find<std::vector<int>>(table, "essential_comps");
if (_essential_comp.empty()) {
MFEM_ABORT("BCs.essential_comps was not provided any values.");
}
map_ess_comp[0] = std::vector<int>();
map_ess_comp[1] = _essential_comp;
// Getting out arrays of values isn't always the simplest thing to do using
// this TOML libary.
std::vector<double> _essential_vals = toml::find<std::vector<double>>(table, "essential_vals");
if (_essential_vals.empty()) {
MFEM_ABORT("BCs.essential_vals was not provided any values.");
}
map_ess_vel[0] = std::vector<double>();
map_ess_vel[1] = _essential_vals;
updateStep.push_back(1);
}
else {
updateStep = toml::find<std::vector<int>>(table, "update_steps");
if (updateStep.empty()) {
MFEM_ABORT("BCs.update_steps was not provided any values.");
}
if (std::find(updateStep.begin(), updateStep.end(), 1) == updateStep.end()) {
MFEM_ABORT("BCs.update_steps must contain 1 in the array");
}
int size = updateStep.size();
std::vector<std::vector<int>> nested_ess_ids = toml::find<std::vector<std::vector<int>>>(table, "essential_ids");
int ilength = 0;
map_ess_id[0] = std::vector<int>();
for (const auto &vec : nested_ess_ids) {
int key = updateStep.at(ilength);
map_ess_id[key] = std::vector<int>();
for (const auto &val : vec) {
map_ess_id[key].push_back(val);
}
if (map_ess_id[key].empty()) {
MFEM_ABORT("BCs.essential_ids contains empty array.");
}
ilength += 1;
}
if (ilength != size) {
MFEM_ABORT("BCs.essential_ids did not contain the same number of arrays as number of update steps");
}
std::vector<std::vector<int>> nested_ess_comps = toml::find<std::vector<std::vector<int>>>(table, "essential_comps");
ilength = 0;
map_ess_comp[0] = std::vector<int>();
for (const auto &vec : nested_ess_comps) {
int key = updateStep.at(ilength);
map_ess_comp[key] = std::vector<int>();
for (const auto &val : vec) {
map_ess_comp[key].push_back(val);
}
if (map_ess_comp[key].empty()) {
MFEM_ABORT("BCs.essential_comps contains empty array.");
}
ilength += 1;
}
if (ilength != size) {
MFEM_ABORT("BCs.essential_comps did not contain the same number of arrays as number of update steps");
}
std::vector<std::vector<double>> nested_ess_vals = toml::find<std::vector<std::vector<double>>>(table, "essential_vals");
ilength = 0;
map_ess_vel[0] = std::vector<double>();
for (const auto &vec : nested_ess_vals) {
int key = updateStep.at(ilength);
map_ess_vel[key] = std::vector<double>();
for (const auto &val : vec) {
map_ess_vel[key].push_back(val);
}
if (map_ess_vel[key].empty()) {
MFEM_ABORT("BCs.essential_vals contains empty array.");
}
ilength += 1;
}
if (ilength != size) {
MFEM_ABORT("BCs.essential_vals did not contain the same number of arrays as number of update steps");
}
}
} // end of parsing BCs
// From the toml file it finds all the values related to the model
void ExaOptions::get_model()
{
const auto data = toml::parse(floc);
const auto& table = toml::find(data, "Model");
std::string _mech_type = toml::find_or<std::string>(table, "mech_type", "");
// I still can't believe C++ doesn't allow strings to be used in switch statements...
if ((_mech_type == "umat") || (_mech_type == "Umat") || (_mech_type == "UMAT") || (_mech_type == "UMat")) {
mech_type = MechType::UMAT;
}
else if ((_mech_type == "exacmech") || (_mech_type == "Exacmech") || (_mech_type == "ExaCMech") || (_mech_type == "EXACMECH")) {
mech_type = MechType::EXACMECH;
}
else {
MFEM_ABORT("Model.mech_type was not provided a valid type.");
mech_type = MechType::NOTYPE;
}
cp = toml::find_or<bool>(table, "cp", false);
if (mech_type == MechType::EXACMECH) {
if (!cp) {
MFEM_ABORT("Model.cp needs to be set to true when using ExaCMech based models.");
}
if (ori_type != OriType::QUAT) {
MFEM_ABORT("Properties.Grain.ori_type is not set to quaternion for use with an ExaCMech model.");
xtal_type = XtalType::NOTYPE;
}
grain_statevar_offset = ecmech::evptn::iHistLbQ;
if(table.contains("ExaCMech")) {
const auto& exacmech_table = toml::find(table, "ExaCMech");
std::string _xtal_type = toml::find_or<std::string>(exacmech_table, "xtal_type", "");
std::string _slip_type = toml::find_or<std::string>(exacmech_table, "slip_type", "");
if ((_xtal_type == "fcc") || (_xtal_type == "FCC")) {
xtal_type = XtalType::FCC;
}
else if ((_xtal_type == "bcc") || (_xtal_type == "BCC")) {
xtal_type = XtalType::BCC;
}
else if ((_xtal_type == "hcp") || (_xtal_type == "HCP")) {
xtal_type = XtalType::HCP;
}
else {
MFEM_ABORT("Model.ExaCMech.xtal_type was not provided a valid type.");
xtal_type = XtalType::NOTYPE;
}
if ((_slip_type == "mts") || (_slip_type == "MTS") || (_slip_type == "mtsdd") || (_slip_type == "MTSDD")) {
slip_type = SlipType::MTSDD;
if (xtal_type == XtalType::FCC) {
if (nProps != ecmech::matModelEvptn_FCC_B::nParams) {
MFEM_ABORT("Properties.Matl_Props.num_props needs " << ecmech::matModelEvptn_FCC_B::nParams <<
" values for the MTSDD option and FCC option");
}
}
else if (xtal_type == XtalType::BCC) {
if (nProps != ecmech::matModelEvptn_BCC_A::nParams) {
MFEM_ABORT("Properties.Matl_Props.num_props needs " << ecmech::matModelEvptn_BCC_A::nParams <<
" values for the MTSDD option and BCC option");
}
}
else if (xtal_type == XtalType::HCP) {
if (nProps != ecmech::matModelEvptn_HCP_A::nParams) {
MFEM_ABORT("Properties.Matl_Props.num_props needs " << ecmech::matModelEvptn_HCP_A::nParams <<
" values for the MTSDD option and HCP option");
}
}
else {
MFEM_ABORT("Model.ExaCMech.slip_type can not be MTS for BCC materials.")
}
}
else if ((_slip_type == "powervoce") || (_slip_type == "PowerVoce") || (_slip_type == "POWERVOCE")) {
slip_type = SlipType::POWERVOCE;
if (xtal_type == XtalType::FCC) {
if (nProps != ecmech::matModelEvptn_FCC_A::nParams) {
MFEM_ABORT("Properties.Matl_Props.num_props needs " << ecmech::matModelEvptn_FCC_A::nParams <<
" values for the PowerVoce option and FCC option");
}
}
else if (xtal_type == XtalType::BCC) {
if (nProps != VoceBCCModel::nParams) {
MFEM_ABORT("Properties.Matl_Props.num_props needs " << VoceBCCModel::nParams <<
" values for the PowerVoce option and BCC option");
}
}
else {
MFEM_ABORT("Model.ExaCMech.slip_type can not be PowerVoce for HCP or BCC_112 materials.")
}
}
else if ((_slip_type == "powervocenl") || (_slip_type == "PowerVoceNL") || (_slip_type == "POWERVOCENL")) {
slip_type = SlipType::POWERVOCENL;
if (xtal_type == XtalType::FCC) {
if (nProps != ecmech::matModelEvptn_FCC_AH::nParams) {
MFEM_ABORT("Properties.Matl_Props.num_props needs " << ecmech::matModelEvptn_FCC_AH::nParams <<
" values for the PowerVoceNL option and FCC option");
}
}
else if (xtal_type == XtalType::BCC) {
if (nProps != VoceNLBCCModel::nParams) {
MFEM_ABORT("Properties.Matl_Props.num_props needs " << VoceNLBCCModel::nParams <<
" values for the PowerVoceNL option and BCC option");
}
}
else {
MFEM_ABORT("Model.ExaCMech.slip_type can not be PowerVoceNL for HCP or BCC_112 materials.")
}
}
else {
MFEM_ABORT("Model.ExaCMech.slip_type was not provided a valid type.");
slip_type = SlipType::NOTYPE;
}
if (slip_type != SlipType::NOTYPE) {
if (xtal_type == XtalType::FCC) {
int num_state_vars_check = ecmech::matModelEvptn_FCC_A::numHist + ecmech::ne + 1 - 4;
if (numStateVars != num_state_vars_check) {
MFEM_ABORT("Properties.State_Vars.num_vars needs " << num_state_vars_check << " values for a "
"face cubic material when using an ExaCMech model. Note: the number of values for a quaternion "
"are not included in this count.");
}
}
else if (xtal_type == XtalType::BCC) {
// We'll probably need to modify this whenever we add support for the other BCC variations in
// here due to the change in number of slip systems.
int num_state_vars_check = ecmech::matModelEvptn_BCC_A::numHist + ecmech::ne + 1 - 4;
if (numStateVars != num_state_vars_check) {
MFEM_ABORT("Properties.State_Vars.num_vars needs " << num_state_vars_check << " values for a "
"body center cubic material when using an ExaCMech model. Note: the number of values for a quaternion "
"are not included in this count.");
}
}
else if (xtal_type == XtalType::HCP) {
int num_state_vars_check = ecmech::matModelEvptn_HCP_A::numHist + ecmech::ne + 1 - 4;
if (numStateVars != num_state_vars_check) {
MFEM_ABORT("Properties.State_Vars.num_vars needs " << num_state_vars_check << " values for a "
"hexagonal material when using an ExaCMech model. Note: the number of values for a quaternion "
"are not included in this count.");
}
}
}
}
else {
MFEM_ABORT("The table Model.ExaCMech does not exist, but the model being used is ExaCMech.");
}// End if ExaCMech Table Exists
}
} // end of model parsing
// From the toml file it finds all the values related to the time
void ExaOptions::get_time_steps()
{
const auto data = toml::parse(floc);
const auto& table = toml::find(data, "Time");
// First look at the fixed time stuff
// check to see if our table exists
if (table.contains("Fixed")) {
const auto& fixed_table = toml::find(table, "Fixed");
dt_cust = false;
dt = toml::find_or<double>(fixed_table, "dt", 1.0);
t_final = toml::find_or<double>(fixed_table, "t_final", 1.0);
}
// Time to look at our custom time table stuff
// check to see if our table exists
if (table.contains("Custom")) {
const auto& cust_table = toml::find(table, "Custom");
dt_cust = true;
nsteps = toml::find_or<int>(cust_table, "nsteps", 1);
std::string _dt_file = toml::find_or<std::string>(cust_table, "floc", "custom_dt.txt");
dt_file = _dt_file;
}
} // end of time step parsing
// From the toml file it finds all the values related to the visualizations
void ExaOptions::get_visualizations()
{
const auto data = toml::parse(floc);
const auto& table = toml::find(data, "Visualizations");
vis_steps = toml::find_or<int>(table, "steps", 1);
visit = toml::find_or<bool>(table, "visit", false);
conduit = toml::find_or<bool>(table, "conduit", false);
paraview = toml::find_or<bool>(table, "paraview", false);
adios2 = toml::find_or<bool>(table, "adios2", false);
if (conduit || adios2) {
if (conduit) {
#ifndef MFEM_USE_CONDUIT
MFEM_ABORT("MFEM was not built with conduit.")
#endif
}
else {
#ifndef MFEM_USE_ADIOS2
MFEM_ABORT("MFEM was not built with ADIOS2");
#endif
}
}
std::string _basename = toml::find_or<std::string>(table, "floc", "results/exaconstit");
basename = _basename;
std::string _avg_stress_fname = toml::find_or<std::string>(table, "avg_stress_fname", "avg_stress.txt");
avg_stress_fname = _avg_stress_fname;
bool _additional_avgs = toml::find_or<bool>(table, "additional_avgs", false);
additional_avgs = _additional_avgs;
std::string _avg_def_grad_fname = toml::find_or<std::string>(table, "avg_def_grad_fname", "avg_def_grad.txt");
avg_def_grad_fname = _avg_def_grad_fname;
std::string _avg_pl_work_fname = toml::find_or<std::string>(table, "avg_pl_work_fname", "avg_pl_work.txt");
avg_pl_work_fname = _avg_pl_work_fname;
std::string _avg_dp_tensor_fname = toml::find_or<std::string>(table, "avg_dp_tensor_fname", "avg_dp_tensor.txt");
avg_dp_tensor_fname = _avg_dp_tensor_fname;
} // end of visualization parsing
// From the toml file it finds all the values related to the Solvers
void ExaOptions::get_solvers()
{
const auto data = toml::parse(floc);
const auto& table = toml::find(data, "Solvers");
std::string _assembly = toml::find_or<std::string>(table, "assembly", "FULL");
if ((_assembly == "FULL") || (_assembly == "full")) {
assembly = Assembly::FULL;
}
else if ((_assembly == "PA") || (_assembly == "pa")) {
assembly = Assembly::PA;
}
else if ((_assembly == "EA") || (_assembly == "ea")) {
assembly = Assembly::EA;
}
else {
MFEM_ABORT("Solvers.assembly was not provided a valid type.");
assembly = Assembly::NOTYPE;
}
std::string _rtmodel = toml::find_or<std::string>(table, "rtmodel", "CPU");
if ((_rtmodel == "CPU") || (_rtmodel == "cpu")) {
rtmodel = RTModel::CPU;
}
#if defined(RAJA_ENABLE_OPENMP)
else if ((_rtmodel == "OPENMP") || (_rtmodel == "OpenMP")|| (_rtmodel == "openmp")) {
rtmodel = RTModel::OPENMP;
}
#endif
#if defined(RAJA_ENABLE_CUDA)
else if ((_rtmodel == "CUDA") || (_rtmodel == "cuda")) {
if (assembly == Assembly::FULL) {
MFEM_ABORT("Solvers.rtmodel can't be CUDA if Solvers.rtmodel is FULL.");
}
rtmodel = RTModel::CUDA;
}
#endif
else {
MFEM_ABORT("Solvers.rtmodel was not provided a valid type.");
rtmodel = RTModel::NOTYPE;
}
if (table.contains("NR")) {
// Obtaining information related to the newton raphson solver
const auto& nr_table = toml::find(table, "NR");
std::string _solver = toml::find_or<std::string>(nr_table, "nl_solver", "NR");
if ((_solver == "nr") || (_solver == "NR")) {
nl_solver = NLSolver::NR;
}
else if ((_solver == "nrls") || (_solver == "NRLS")) {
nl_solver = NLSolver::NRLS;
}
else {
MFEM_ABORT("Solvers.NR.nl_solver was not provided a valid type.");
nl_solver = NLSolver::NOTYPE;
}
newton_iter = toml::find_or<int>(nr_table, "iter", 25);
newton_rel_tol = toml::find_or<double>(nr_table, "rel_tol", 1e-5);
newton_abs_tol = toml::find_or<double>(nr_table, "abs_tol", 1e-10);
} // end of NR info
std::string _integ_model = toml::find_or<std::string>(table, "integ_model", "FULL");
if ((_integ_model == "FULL") || (_integ_model == "full")) {
integ_type = IntegrationType::FULL;
}
else if ((_integ_model == "BBAR") || (_integ_model == "bbar")) {
integ_type = IntegrationType::BBAR;
if (nl_solver == NLSolver::NR) {
std::cout << "BBar method performs better when paired with a NR solver with line search" << std::endl;
}
}
if (table.contains("Krylov")) {
// Now getting information about the Krylov solvers used to the linearized
// system of equations of the nonlinear problem.
auto iter_table = toml::find(table, "Krylov");
krylov_iter = toml::find_or<int>(iter_table, "iter", 200);
krylov_rel_tol = toml::find_or<double>(iter_table, "rel_tol", 1e-10);
krylov_abs_tol = toml::find_or<double>(iter_table, "abs_tol", 1e-30);
std::string _solver = toml::find_or<std::string>(iter_table, "solver", "GMRES");
if ((_solver == "GMRES") || (_solver == "gmres")) {
solver = KrylovSolver::GMRES;
}
else if ((_solver == "PCG") || (_solver == "pcg")) {
solver = KrylovSolver::PCG;
}
else if ((_solver == "MINRES") || (_solver == "minres")) {
solver = KrylovSolver::MINRES;
}
else {
MFEM_ABORT("Solvers.Krylov.solver was not provided a valid type.");
solver = KrylovSolver::NOTYPE;
}
} // end of krylov solver info
} // end of solver parsing
// From the toml file it finds all the values related to the mesh
void ExaOptions::get_mesh()
{
// Refinement of the mesh and element order
const auto data = toml::parse(floc);
const auto& table = toml::find(data, "Mesh");
ser_ref_levels = toml::find_or<int>(table, "ref_ser", 0);
par_ref_levels = toml::find_or<int>(table, "ref_par", 0);
order = toml::find_or<int>(table, "p_refinement", 1);
// file location of the mesh
std::string _mesh_file = toml::find_or<std::string>(table, "floc", "../../data/cube-hex-ro.mesh");
mesh_file = _mesh_file;
// Type of mesh that we're reading/going to generate
std::string mtype = toml::find_or<std::string>(table, "type", "other");
if ((mtype == "cubit") || (mtype == "Cubit") || (mtype == "CUBIT")) {
mesh_type = MeshType::CUBIT;
}
else if ((mtype == "auto") || (mtype == "Auto") || (mtype == "AUTO")) {
mesh_type = MeshType::AUTO;
if (table.contains("Auto")){
auto auto_table = toml::find(table, "Auto");
std::vector<double> _mxyz = toml::find<std::vector<double>>(auto_table, "length");
if (_mxyz.size() != 3) {
MFEM_ABORT("Mesh.Auto.length was not provided a valid array of size 3.");
}
mxyz[0] = _mxyz[0];
mxyz[1] = _mxyz[1];
mxyz[2] = _mxyz[2];
std::vector<int> _nxyz = toml::find<std::vector<int>>(auto_table, "ncuts");
if (_nxyz.size() != 3) {
MFEM_ABORT("Mesh.Auto.ncuts was not provided a valid array of size 3.");
}
nxyz[0] = _nxyz[0];
nxyz[1] = _nxyz[1];
nxyz[2] = _nxyz[2];
}
else {
MFEM_ABORT("Mesh.type was set to Auto but Mesh.Auto does not exist");
}
}
else if ((mtype == "other") || (mtype == "Other") || (mtype == "OTHER")) {
mesh_type = MeshType::OTHER;
}
else {
MFEM_ABORT("Mesh.type was not provided a valid type.");
mesh_type = MeshType::NOTYPE;
} // end of mesh type parsing
if (mesh_type == MeshType::OTHER || mesh_type == MeshType::CUBIT) {
if (!if_file_exists(mesh_file))
{
MFEM_ABORT("Mesh file does not exist");
}
}
} // End of mesh parsing
void ExaOptions::print_options()
{
std::cout << "Mesh file location: " << mesh_file << "\n";
std::cout << "Mesh type: ";
if (mesh_type == MeshType::OTHER) {
std::cout << "other";
}
else if (mesh_type == MeshType::CUBIT) {
std::cout << "cubit";
}
else {
std::cout << "auto";
}
std::cout << "\n";
std::cout << "Edge dimensions (mx, my, mz): " << mxyz[0] << " " << mxyz[1] << " " << mxyz[2] << "\n";
std::cout << "Number of cells on an edge (nx, ny, nz): " << nxyz[0] << " " << nxyz[1] << " " << nxyz[2] << "\n";
std::cout << "Serial Refinement level: " << ser_ref_levels << "\n";
std::cout << "Parallel Refinement level: " << par_ref_levels << "\n";
std::cout << "P-refinement level: " << order << "\n";
std::cout << std::boolalpha;
std::cout << "Custom dt flag (dt_cust): " << dt_cust << "\n";
if (dt_cust) {
std::cout << "Number of time steps (nsteps): " << nsteps << "\n";
std::cout << "Custom time file loc (dt_file): " << dt_file << "\n";
}
else {
std::cout << "Constant time stepping on \n";
std::cout << "Final time (t_final): " << t_final << "\n";
std::cout << "Time step (dt): " << dt << "\n";
}
std::cout << "Visit flag: " << visit << "\n";
std::cout << "Conduit flag: " << conduit << "\n";
std::cout << "Paraview flag: " << paraview << "\n";
std::cout << "ADIOS2 flag: " << adios2 << "\n";
std::cout << "Visualization steps: " << vis_steps << "\n";
std::cout << "Visualization directory: " << basename << "\n";
std::cout << "Average stress filename: " << avg_stress_fname << std::endl;
if (additional_avgs)
{
std::cout << "Additional averages being computed" << std::endl;
std::cout << "Average deformation gradient filename: " << avg_def_grad_fname << std::endl;
std::cout << "Average plastic work filename: " << avg_pl_work_fname << std::endl;
std::cout << "Average plastic strain rate tensor filename: " << avg_dp_tensor_fname << std::endl;
}
else
{
std::cout << "No additional averages being computed" << std::endl;
}
std::cout << "Average stress filename: " << avg_stress_fname << std::endl;
if (nl_solver == NLSolver::NR) {
std::cout << "Nonlinear Solver is Newton Raphson \n";
}
else if (nl_solver == NLSolver::NRLS) {
std::cout << "Nonlinear Solver is Newton Raphson with a line search\n";
}
std::cout << "Newton Raphson rel. tol.: " << newton_rel_tol << "\n";
std::cout << "Newton Raphson abs. tol.: " << newton_abs_tol << "\n";
std::cout << "Newton Raphson # of iter.: " << newton_iter << "\n";
std::cout << "Newton Raphson grad debug: " << grad_debug << "\n";
if (integ_type == IntegrationType::FULL) {
std::cout << "Integration Type: Full \n";
}
else if (integ_type == IntegrationType::BBAR) {
std::cout << "Integration Type: BBar \n";
}
std::cout << "Krylov solver: ";
if (solver == KrylovSolver::GMRES) {
std::cout << "GMRES";
}
else if (solver == KrylovSolver::PCG) {
std::cout << "PCG";
}
else {
std::cout << "MINRES";
}
std::cout << "\n";
std::cout << "Krylov solver rel. tol.: " << krylov_rel_tol << "\n";
std::cout << "Krylov solver abs. tol.: " << krylov_abs_tol << "\n";
std::cout << "Krylov solver # of iter.: " << krylov_iter << "\n";
std::cout << "Matrix Assembly is: ";
if (assembly == Assembly::FULL) {
std::cout << "Full Assembly\n";
}
else if (assembly == Assembly::PA) {
std::cout << "Partial Assembly\n";
}
else {
std::cout << "Element Assembly\n";
}
std::cout << "Runtime model is: ";
if (rtmodel == RTModel::CPU) {
std::cout << "CPU\n";
}
else if (rtmodel == RTModel::CUDA) {
std::cout << "CUDA\n";
}
else if (rtmodel == RTModel::OPENMP) {
std::cout << "OpenMP\n";
}
std::cout << "Mechanical model library being used ";
if (mech_type == MechType::UMAT) {
std::cout << "UMAT\n";
}
else if (mech_type == MechType::EXACMECH) {
std::cout << "ExaCMech\n";
std::cout << "Crystal symmetry group is ";
if (xtal_type == XtalType::FCC) {
std::cout << "FCC\n";
}
else if (xtal_type == XtalType::BCC) {
std::cout << "BCC\n";
}
else if (xtal_type == XtalType::HCP) {
std::cout << "HCP\n";
}
std::cout << "Slip system and hardening model being used is ";
if (slip_type == SlipType::MTSDD) {
std::cout << "MTS slip like kinetics with dislocation density based hardening\n";
}
else if (slip_type == SlipType::POWERVOCE) {
std::cout << "Power law slip kinetics with a linear Voce hardening law\n";
}
else if (slip_type == SlipType::POWERVOCENL) {
std::cout << "Power law slip kinetics with a nonlinear Voce hardening law\n";
}
}
std::cout << "Xtal Plasticity being used: " << cp << "\n";
std::cout << "Orientation file location: " << ori_file << "\n";
std::cout << "Grain map file location: " << grain_map << "\n";
std::cout << "Number of grains: " << ngrains << "\n";
std::cout << "Orientation type: ";
if (ori_type == OriType::EULER) {
std::cout << "euler";
}
else if (ori_type == OriType::QUAT) {
std::cout << "quaternion";
}
else {
std::cout << "custom";
}
std::cout << "\n";
std::cout << "Custom stride to read grain map file: " << grain_custom_stride << "\n";
std::cout << "Orientation offset in state variable file: " << grain_statevar_offset << "\n";
std::cout << "Number of properties: " << nProps << "\n";
std::cout << "Property file location: " << props_file << "\n";
std::cout << "Number of state variables: " << numStateVars << "\n";
std::cout << "State variable file location: " << state_file << "\n";
for (const auto key: updateStep)
{
std::cout << "Starting on step " << key << " essential BCs values are:" << std::endl;
std::cout << "Essential ids are set as: ";
for (const auto & val: map_ess_id.at(key)) {
std::cout << val << " ";
}
std::cout << std::endl << "Essential components are set as: ";
for (const auto & val: map_ess_comp.at(key)) {
std::cout << val << " ";
}
std::cout << std::endl << "Essential boundary values are set as: ";
for (const auto & val: map_ess_vel.at(key)) {
std::cout << val << " ";
}
std::cout << std::endl;
}
} // End of printing out options
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package freemarker.template;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import org.junit.Test;
public class TemplateConstructorsTest {
private static final String READER_CONTENT = "From a reader...";
private static final String READER_CONTENT_FORCE_UTF8 = "<#ftl encoding='utf-8'>From a reader...";
@Test
public void test() throws IOException {
final Configuration cfg = new Configuration(Configuration.VERSION_2_3_22);
//cfg.setDefaultEncoding("ISO-8859-1");
final String name = "foo/bar.ftl";
final String sourceName = "foo/bar_de.ftl";
final String content = "From a String...";
final String encoding = "UTF-16LE";
{
Template t = new Template(name, createReader());
assertEquals(name, t.getName());
assertEquals(name, t.getSourceName());
assertEquals(READER_CONTENT, t.toString());
assertNull(t.getEncoding());
}
{
Template t = new Template(name, createReader(), cfg);
assertEquals(name, t.getName());
assertEquals(name, t.getSourceName());
assertEquals(READER_CONTENT, t.toString());
assertNull(t.getEncoding());
}
{
Template t = new Template(name, content, cfg);
assertEquals(name, t.getName());
assertEquals(name, t.getSourceName());
assertEquals(content, t.toString());
assertNull(t.getEncoding());
}
{
Template t = new Template(name, createReader(), cfg, encoding);
assertEquals(name, t.getName());
assertEquals(name, t.getSourceName());
assertEquals(READER_CONTENT, t.toString());
assertEquals("UTF-16LE", t.getEncoding());
}
{
Template t = new Template(name, sourceName, createReader(), cfg);
assertEquals(name, t.getName());
assertEquals(sourceName, t.getSourceName());
assertEquals(READER_CONTENT, t.toString());
assertNull(t.getEncoding());
}
{
Template t = new Template(name, sourceName, createReader(), cfg, encoding);
assertEquals(name, t.getName());
assertEquals(sourceName, t.getSourceName());
assertEquals(READER_CONTENT, t.toString());
assertEquals("UTF-16LE", t.getEncoding());
}
{
Template t = Template.getPlainTextTemplate(name, content, cfg);
assertEquals(name, t.getName());
assertEquals(name, t.getSourceName());
assertEquals(content, t.toString());
assertNull(t.getEncoding());
}
{
try {
new Template(name, sourceName, createReaderForceUTF8(), cfg, encoding);
fail();
} catch (Template.WrongEncodingException e) {
assertThat(e.getMessage(), containsString("utf-8"));
assertThat(e.getMessage(), containsString(encoding));
}
}
}
private final Reader createReader() {
return new StringReader(READER_CONTENT);
}
private final Reader createReaderForceUTF8() {
return new StringReader(READER_CONTENT_FORCE_UTF8);
}
}
|
import sys
import pandas as pd
import numpy as np
import itertools
from sklearn.preprocessing import RobustScaler
from sklearn.linear_model import SGDClassifier
from evaluate_model import evaluate_model
dataset = sys.argv[1]
pipeline_components = [RobustScaler, SGDClassifier]
pipeline_parameters = {}
loss_values = ['hinge', 'log', 'modified_huber', 'squared_hinge', 'perceptron']
penalty_values = ['l2', 'l1', 'elasticnet']
alpha_values = [0.000001, 0.00001, 0.0001, 0.001, 0.01]
learning_rate_values = ['constant', 'optimal', 'invscaling']
fit_intercept_values = [True, False]
l1_ratio_values = [0., 0.1, 0.15, 0.25, 0.5, 0.75, 0.9, 1.]
eta0_values = [0.0, 0.01, 0.1, 0.5, 1., 10., 50., 100.]
power_t_values = [0., 0.1, 0.5, 1., 10., 50., 100.]
random_state = [324089]
all_param_combinations = itertools.product(loss_values, penalty_values, alpha_values, learning_rate_values, fit_intercept_values, l1_ratio_values, eta0_values, power_t_values, random_state)
pipeline_parameters[SGDClassifier] = \
[{'loss': loss, 'penalty': penalty, 'alpha': alpha, 'learning_rate': learning_rate, 'fit_intercept': fit_intercept, 'l1_ratio': l1_ratio, 'eta0': eta0, 'power_t': power_t, 'random_state': random_state}
for (loss, penalty, alpha, learning_rate, fit_intercept, l1_ratio, eta0, power_t, random_state) in all_param_combinations
if not (penalty != 'elasticnet' and l1_ratio != 0.15) and not (learning_rate not in ['constant', 'invscaling'] and eta0 != 0.0) and not (learning_rate != 'invscaling' and power_t != 0.5)]
evaluate_model(dataset, pipeline_components, pipeline_parameters) |
#!/usr/bin/expect
set timeout 2
set username xxx
set password xxx
proc login { host username password } {
spawn telnet $host
expect {
"Username:" {
send "$username\r"
expect "Password:"
send "$password\r";
expect eof
}
"Unable to connect to remote host" {
expect eof
}
"Connection refused" {
expect eof
}
}
}
set running [login "9.111.250.2" $username $password]
set running [login "9.21.63.1" $username $password]
set running [login "9.111.143.1" $username $password]
|
import my_keras.input_manipulation as to_test
import numpy as np
def test_rotate_array():
xs = [1, 2, 3, 4, 5]
assert np.array_equal(to_test.rotate(xs, 2), np.asarray([3, 4, 5, 1, 2]))
def test_rotate_array_wrap():
xs = [1, 2, 3, 4, 5]
assert np.array_equal(to_test.rotate(xs, 7), np.asarray([3, 4, 5, 1, 2]))
|
<filename>app/components/LinksApp.js
var React = require('react/addons');
var _ = require('lodash');
var LinkList = require('./LinkList');
var App = React.createClass({
getInitialState: function () {
console.log('this.props.links.length', this.props.links.length);
return {
links: this.props.links,
allLinks: _.cloneDeep(this.props.links),
updatedLink: null,
count: 0,
currentUser: this.props.currentUser
};
},
componentDidMount: function () {
console.log('this componentDidMount', this.state.links);
var that = this;
var socket = io.connect();
socket.on('linkSaved', function (data) {
// console.log('data socket' , data);
if(!data.installationId){
//no installationId means it is new
that.addLinkViaSocket(data);
}
});
},
addLinkViaSocket: function (link) {
// Get current application state
var updatedLinks = this.state.links;
var count = this.state.count;
// Add link to the beginning of array
updatedLinks.unshift(link.object);
// Set application state
this.setState({
links: updatedLinks,
count: ++count,
updatedLink: link.object
});
},
dismissNew: function () {
console.log('dismissed');
this.setState({count: 0})
},
updatePostsAfterUpvote: function (channel, isFiltered) {
var notFIltered = _.sortByOrder(this.props.links, ['upvotes', 'createdAt'], ['desc', 'desc']);
var all;
if(isFiltered){
all = _.filter(notFIltered, function (n) {
return n.channel_id === channel;
});
}
console.log('all', all);
this.setState({
links: isFiltered ? all : notFIltered,
isFiltered: isFiltered
});
},
updateAfterUpvote: function (id, count) {
var updatedCount = +(++count);
var parseObj = new Parse.Query('Links');
var filter = null;
var channel = null;
parseObj.get(id)
.then(function (linkObj) {
linkObj.increment('upvotes');
linkObj.save();
return linkObj;
}, function (object, error) {
console.log('error ', error);
if (error.code === 100) {
window.alert('There was an error with your request. Please try again.');
}
});
//todo -- does not respect if no filter is chosen
if(this.state.isFiltered){
channel = _.findWhere(this.props.links, {objectId: id}).channel_id;
filter = true;
}
_.extend(_.findWhere(this.props.links, {objectId: id}), {
upvotes: updatedCount
});
this.updatePostsAfterUpvote(channel, filter);
},
handleUpvote: function (postId, count, e) {
//todo filter should be here
e.preventDefault();
this.updateAfterUpvote(postId, count);
},
sortFilter: function (channelId, e) {
var all = _.filter(this.props.links, function (n) {
return n.channel_id === channelId;
});
this.setState({
links: all,
isFiltered: true
});
},
clearFilter: function () {
this.setState({
links: this.state.allLinks,
isFiltered: true
});
},
doFilterByVotes: function () {
var all = _.sortByOrder(this.props.links, ['upvotes'], ['desc']);
this.setState({
links: all,
isFiltered: false
});
},
doFilterByDate: function () {
var all = _.sortByOrder(this.props.links, ['createdAt'], ['desc']);
this.setState({
links: all,
isFiltered: false
});
},
render: function () {
return (
<div className="posts-app">
<LinkList
handleUpvote={this.handleUpvote}
sortFilter={this.sortFilter}
clearFilter={this.clearFilter}
doFilterByVotes={this.doFilterByVotes}
doFilterByDate={this.doFilterByDate}
links={this.state.links}
channels={this.props.channels}
users={this.props.users}
count={this.state.count}
dismissNew={this.dismissNew}
updatedLink={this.state.updatedLink}
currentUser={this.state.currentUser}
/>
</div>
);
}
});
module.exports = App; |
#!/usr/bin/env python3.7
import common
import unittest
pass_examples = {
'{A,B,C}': 'A B C',
'{A,B}{C,D}': 'AC AD BC BD',
'{A,B{C,D}}': 'A BC BD',
'{ABC}': 'ABC',
'ABC': 'ABC'
}
fail_examples = [
'}ABC',
'{ABC',
'}{',
'{}',
'A,B,C',
'{A{B,C}',
'{A,}'
]
class TestSpecification(unittest.TestCase):
def test_pass_examples(self):
for input, expected_output in pass_examples.items():
status, stdout, stderr = common.brex(input + '\n')
self.assertEqual(status, 0) # success
self.assertEqual(stderr, '') # --verbose not indicated
self.assertEqual(stdout, expected_output + '\n')
def test_fail_examples(self):
for input in fail_examples:
status, stdout, stderr = common.brex(input + '\n')
self.assertNotEqual(status, 0) # _not_ success
self.assertEqual(stderr, '') # --verbose not indicated
self.assertEqual(stdout, '') # error means no standard output
if __name__ == '__main__':
unittest.main()
|
<filename>pipeline/plot/__init__.py
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import seaborn as sns
import itertools
import pandas as pd
from scipy import ndimage
from pipeline import experiment, ephys, psth
from pipeline import smooth_psth
# ---------- PLOTTING HELPER FUNCTIONS --------------
def _plot_avg_psth(ipsi_psth, contra_psth, vlines={}, ax=None, title=''):
avg_contra_psth = np.vstack(
np.array([i[0] for i in contra_psth])).mean(axis=0)
contra_edges = contra_psth[0][1][:-1]
avg_ipsi_psth = np.vstack(
np.array([i[0] for i in ipsi_psth])).mean(axis=0)
ipsi_edges = ipsi_psth[0][1][:-1]
ax.plot(contra_edges, smooth_psth(avg_contra_psth), 'b', label='contra')
ax.plot(ipsi_edges, smooth_psth(avg_ipsi_psth), 'r', label='ipsi')
for x in vlines:
ax.axvline(x=x, linestyle='--', color='k')
# cosmetic
ax.legend()
ax.set_title(title)
ax.set_ylabel('Firing Rate (spike/s)')
ax.set_xlabel('Time (s)')
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
def _plot_stacked_psth_diff(psth_a, psth_b, vlines=[], ax=None, flip=False, plot=True):
"""
Heatmap of (psth_a - psth_b)
psth_a, psth_b are the unit_psth(s) resulted from psth.UnitPSTH.fetch()
"""
plt_xmin, plt_xmax = -3, 3
assert len(psth_a) == len(psth_b)
nunits = len(psth_a)
aspect = 4.5 / nunits # 4:3 aspect ratio
extent = [plt_xmin, plt_xmax, 0, nunits]
a_data = np.array([r[0] for r in psth_a['unit_psth']])
b_data = np.array([r[0] for r in psth_b['unit_psth']])
result = a_data - b_data
result = result / np.repeat(result.max(axis=1)[:, None], result.shape[1], axis=1)
# color flip
result = result * -1 if flip else result
# moving average
result = np.array([_movmean(i) for i in result])
if plot:
if ax is None:
fig, ax = plt.subplots(1, 1)
# ax.set_axis_off()
ax.set_xlim([plt_xmin, plt_xmax])
for x in vlines:
ax.axvline(x=x, linestyle='--', color='k')
im = ax.imshow(result, cmap=plt.cm.bwr, aspect=aspect, extent=extent)
im.set_clim((-1, 1))
return result
def _plot_with_sem(data, t_vec, ax, c='k'):
v_mean = np.nanmean(data, axis=0)
v_sem = np.nanstd(data, axis=0) / np.sqrt(data.shape[0])
ax.plot(t_vec, v_mean, c)
ax.fill_between(t_vec, v_mean - v_sem, v_mean + v_sem, alpha=0.25, facecolor=c)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
def _movmean(data, nsamp=5):
ret = np.cumsum(data, dtype=float)
ret[nsamp:] = ret[nsamp:] - ret[:-nsamp]
return ret[nsamp - 1:] / nsamp
def _extract_one_stim_dur(stim_durs):
"""
In case of multiple photostim durations - pick the shortest duration
In case of no photostim durations - return the default of 0.5s
"""
default_stim_dur = 0.5
if len(stim_durs) == 0:
return default_stim_dur
elif len(stim_durs) > 1:
print(f'Found multiple stim durations: {stim_durs} - select {min(stim_durs)}')
return float(min(stim_durs))
else:
return float(stim_durs[0]) if len(stim_durs) == 1 and stim_durs[0] else default_stim_dur
def _get_photostim_time_and_duration(units, trials):
# get photostim duration and stim time (relative to go-cue)
stim_times, stim_durs = (experiment.PhotostimEvent
* (experiment.TrialEvent & 'trial_event_type = "go"').proj(..., '-duration')
* trials
& units).proj('duration', stim_time='photostim_event_time - trial_event_time').fetch(
'stim_time', 'duration')
stim_dur = _extract_one_stim_dur(np.unique(stim_durs))
stim_time = np.nanmean(stim_times.astype(np.float))
return stim_time, stim_dur
def _get_trial_event_times(events, units, trial_cond_name):
"""
Get median event start times from all unit-trials from the specified "trial_cond_name" and "units" - aligned to GO CUE
:param events: list of events
"""
events = list(events) + ['go']
event_types, event_times = (psth.TrialCondition().get_trials(trial_cond_name)
* (experiment.TrialEvent & [{'trial_event_type': eve} for eve in events])
& units).fetch('trial_event_type', 'trial_event_time')
period_starts = [np.nanmedian((event_times[event_types == event_type] - event_times[event_types == 'go']).astype(float))
for event_type in events[:-1]]
return period_starts
def _get_units_hemisphere(units):
hemispheres = np.unique((ephys.ProbeInsertion.InsertionLocation
* experiment.BrainLocation & units).fetch('hemisphere'))
if len(hemispheres) > 1:
raise Exception('Error! The specified units belongs to both hemispheres...')
return hemispheres[0]
def jointplot_w_hue(data, x, y, hue=None, colormap=None,
figsize=None, fig=None, scatter_kws=None):
"""
__author__ = "<EMAIL>"
__copyright__ = "Copyright 2018, github.com/ruxi"
__license__ = "MIT"
__version__ = 0.0
.1
# update: Mar 5 , 2018
# created: Feb 19, 2018
# desc: seaborn jointplot with 'hue'
# prepared for issue: https://github.com/mwaskom/seaborn/issues/365
jointplots with hue groupings.
minimum working example
-----------------------
iris = sns.load_dataset("iris")
jointplot_w_hue(data=iris, x = 'sepal_length', y = 'sepal_width', hue = 'species')['fig']
changelog
---------
2018 Mar 5: added legends and colormap
2018 Feb 19: gist made
"""
import matplotlib.gridspec as gridspec
import matplotlib.patches as mpatches
# defaults
if colormap is None:
colormap = sns.color_palette() # ['blue','orange']
if figsize is None:
figsize = (5, 5)
if fig is None:
fig = plt.figure(figsize=figsize)
if scatter_kws is None:
scatter_kws = dict(alpha=0.4, lw=1)
# derived variables
if hue is None:
return "use normal sns.jointplot"
hue_groups = data[hue].unique()
subdata = dict()
colors = dict()
active_colormap = colormap[0: len(hue_groups)]
legend_mapping = []
for hue_grp, color in zip(hue_groups, active_colormap):
legend_entry = mpatches.Patch(color=color, label=hue_grp)
legend_mapping.append(legend_entry)
subdata[hue_grp] = data[data[hue] == hue_grp]
colors[hue_grp] = color
# canvas setup
grid = gridspec.GridSpec(2, 2,
width_ratios=[4, 1],
height_ratios=[1, 4],
hspace=0, wspace=0
)
ax_main = plt.subplot(grid[1, 0])
ax_xhist = plt.subplot(grid[0, 0], sharex = ax_main)
ax_yhist = plt.subplot(grid[1, 1]) # , sharey=ax_main)
## plotting
# histplot x-axis
for hue_grp in hue_groups:
sns.distplot(subdata[hue_grp][x], color=colors[hue_grp]
, ax=ax_xhist)
# histplot y-axis
for hue_grp in hue_groups:
sns.distplot(subdata[hue_grp][y], color=colors[hue_grp]
, ax=ax_yhist, vertical=True)
# main scatterplot
# note: must be after the histplots else ax_yhist messes up
for hue_grp in hue_groups:
sns.regplot(data=subdata[hue_grp], fit_reg=True,
x=x, y=y, ax=ax_main, color=colors[hue_grp]
, line_kws={'alpha': 0.5}, scatter_kws=scatter_kws
)
# despine
for myax in [ax_yhist, ax_xhist]:
sns.despine(ax=myax, bottom=False, top=True, left=False, right=True
, trim=False)
plt.setp(myax.get_xticklabels(), visible=False)
plt.setp(myax.get_yticklabels(), visible=False)
# topright
ax_legend = plt.subplot(grid[0, 1]) # , sharey=ax_main)
plt.setp(ax_legend.get_xticklabels(), visible=False)
plt.setp(ax_legend.get_yticklabels(), visible=False)
ax_legend.legend(handles=legend_mapping)
return dict(fig=fig, gridspec=grid)
|
<reponame>ch1huizong/learning
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
from shutil import *
from commands import *
print'BEFORE:'
print getoutput('ls -rlast /tmp/example')
copytree('../shutil','/tmp/example')
print'\nAFTER:'
print getoutput('ls -rlast /tmp/example')
|
java -Dlogger.level=2 -classpath $GITDIR/tf/target/classes:$GITDIR/tf/libs/* -Djava.net.preferIPv4Stack=true com.pointr.tensorflow.DirectSubmitter $GITDIR/tf/src/main/resources/submitter.local.yml
|
<reponame>jacbop/brewery<filename>src/test/java/com/wilberding/brewery/data/VolBetaCurveTest.java
/**
* Copyright (C) 2017 - present by wilberding.com
*
* Please see distribution for license.
*/
package com.wilberding.brewery.data;
import com.wilberding.brewery.lib.Curve;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class VolBetaCurveTest {
//http://www.engineeringtoolbox.com/water-thermal-properties-d_162.html
@Test
public void testBasics() {
Curve sut = VolBetaCurve.INSTANCE.getCurve();
assertThat(sut.y(32.0)).isEqualTo(-5.0E-5);
assertThat(sut.y(100.0)).isEqualTo(3.6677777777777777E-4);
assertThat(sut.y(149.0)).isEqualTo(5.52E-4);
assertThat(sut.y(150.0)).isEqualTo(5.553333333333334E-4);
assertThat(sut.y(212.0)).isEqualTo(6.95E-4);
}
}
|
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the blog dashboard page"""
from __future__ import absolute_import
from __future__ import unicode_literals
from core import feconf
from core import utils
from core.controllers import acl_decorators
from core.controllers import base
from core.controllers import domain_objects_validator as validation_method
from core.domain import blog_domain
from core.domain import blog_services
from core.domain import config_domain
from core.domain import fs_services
from core.domain import image_validation_services
from core.domain import user_services
from typing import Any, Dict, List
# Here we are using Dict[str, Any] for the return value `summary_dicts` since
# we have to return a list with each element being domain object converted to
# a dictionary.
def _get_blog_card_summary_dicts_for_dashboard(
summaries: List[blog_domain.BlogPostSummary]) -> List[Dict[str, Any]]:
"""Creates summary dicts for use in blog dashboard.
Args:
summaries: list(BlogPostSummary). List of blog post summary
domain objects.
Returns:
list(Dict(str, *)). The list of blog post summary dicts.
"""
summary_dicts = []
for summary in summaries:
summary_dict = summary.to_dict()
del summary_dict['author_id']
summary_dicts.append(summary_dict)
return summary_dicts
class BlogDashboardPage(base.BaseHandler):
"""Blog Dashboard Page Handler to render the frontend template."""
URL_PATH_ARGS_SCHEMAS = {}
HANDLER_ARGS_SCHEMAS = {
'GET': {}
}
@acl_decorators.can_access_blog_dashboard
def get(self) -> None:
"""Handles GET requests."""
self.render_template('blog-dashboard-page.mainpage.html')
class BlogDashboardDataHandler(base.BaseHandler):
"""Provides user data for the blog dashboard."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
URL_PATH_ARGS_SCHEMAS = {}
HANDLER_ARGS_SCHEMAS = {
'GET': {},
'POST': {},
}
@acl_decorators.can_access_blog_dashboard
def get(self) -> None:
"""Handles GET requests."""
user_settings = user_services.get_user_settings(self.user_id)
no_of_published_blog_posts = 0
published_post_summary_dicts = []
no_of_draft_blog_posts = 0
draft_blog_post_summary_dicts = []
published_post_summaries = (
blog_services.get_blog_post_summary_models_list_by_user_id(
self.user_id, True))
if published_post_summaries:
no_of_published_blog_posts = len(published_post_summaries)
published_post_summary_dicts = (
_get_blog_card_summary_dicts_for_dashboard(
published_post_summaries))
draft_blog_post_summaries = (
blog_services.get_blog_post_summary_models_list_by_user_id(
self.user_id, False))
if draft_blog_post_summaries:
no_of_draft_blog_posts = len(draft_blog_post_summaries)
draft_blog_post_summary_dicts = (
_get_blog_card_summary_dicts_for_dashboard(
draft_blog_post_summaries))
self.values.update({
'username': user_settings.username,
'profile_picture_data_url': user_settings.profile_picture_data_url,
'no_of_published_blog_posts': no_of_published_blog_posts,
'no_of_draft_blog_posts': no_of_draft_blog_posts,
'published_blog_post_summary_dicts': published_post_summary_dicts,
'draft_blog_post_summary_dicts': draft_blog_post_summary_dicts
})
self.render_json(self.values)
@acl_decorators.can_access_blog_dashboard
def post(self) -> None:
"""Handles POST requests to create a new blog post draft."""
new_blog_post = blog_services.create_new_blog_post(self.user_id)
self.render_json({'blog_post_id': new_blog_post.id})
class BlogPostHandler(base.BaseHandler):
"""Handler for blog dashboard editor"""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
URL_PATH_ARGS_SCHEMAS = {
'blog_post_id': {
'schema': {
'type': 'basestring'
}
}
}
HANDLER_ARGS_SCHEMAS = {
'GET': {},
'PUT': {
'new_publish_status': {
'schema': {
'type': 'bool',
}
},
'change_dict': {
'schema': {
'type': 'object_dict',
'validation_method': (
validation_method.validate_change_dict_for_blog_post),
},
'default_value': None
},
},
'POST': {
'thumbnail_filename': {
'schema': {
'type': 'basestring'
}
},
'image': {
'schema': {
'type': 'basestring'
}
},
},
'DELETE': {}
}
@acl_decorators.can_access_blog_dashboard
def get(self, blog_post_id: str) -> None:
"""Populates the data on the blog dashboard editor page."""
blog_domain.BlogPost.require_valid_blog_post_id(blog_post_id)
blog_post = (
blog_services.get_blog_post_by_id(blog_post_id, strict=False))
if blog_post is None:
raise self.PageNotFoundException(
'The blog post with the given id or url doesn\'t exist.')
user_settings = user_services.get_users_settings(
[blog_post.author_id], strict=False, include_marked_deleted=True)
username = user_settings[0].username
max_no_of_tags = config_domain.Registry.get_config_property(
'max_number_of_tags_assigned_to_blog_post').value
list_of_default_tags = config_domain.Registry.get_config_property(
'list_of_default_tags_for_blog_post').value
blog_post_dict = blog_post.to_dict()
del blog_post_dict['author_id']
blog_post_dict['author_username'] = username
self.values.update({
'blog_post_dict': blog_post_dict,
'username': username,
'profile_picture_data_url': (
user_settings[0].profile_picture_data_url),
'max_no_of_tags': max_no_of_tags,
'list_of_default_tags': list_of_default_tags
})
self.render_json(self.values)
@acl_decorators.can_edit_blog_post
def put(self, blog_post_id: str) -> None:
"""Updates properties of the given blog post."""
blog_domain.BlogPost.require_valid_blog_post_id(blog_post_id)
blog_post_rights = (
blog_services.get_blog_post_rights(blog_post_id, strict=False))
blog_post_currently_published = blog_post_rights.blog_post_is_published
change_dict = self.normalized_payload.get('change_dict')
blog_services.update_blog_post(blog_post_id, change_dict)
new_publish_status = self.normalized_payload.get('new_publish_status')
if new_publish_status:
blog_services.publish_blog_post(blog_post_id)
elif blog_post_currently_published:
blog_services.unpublish_blog_post(blog_post_id)
blog_post_dict = (
blog_services.get_blog_post_by_id(blog_post_id).to_dict())
self.values.update({
'blog_post': blog_post_dict
})
self.render_json(self.values)
@acl_decorators.can_edit_blog_post
def post(self, blog_post_id: str) -> None:
"""Stores thumbnail of the blog post in the datastore."""
blog_domain.BlogPost.require_valid_blog_post_id(blog_post_id)
raw_image = self.normalized_request.get('image')
thumbnail_filename = self.normalized_payload.get('thumbnail_filename')
try:
file_format = image_validation_services.validate_image_and_filename(
raw_image, thumbnail_filename)
except utils.ValidationError as e:
raise self.InvalidInputException(e)
entity_id = blog_post_id
filename_prefix = 'thumbnail'
image_is_compressible = (
file_format in feconf.COMPRESSIBLE_IMAGE_FORMATS)
fs_services.save_original_and_compressed_versions_of_image(
thumbnail_filename, feconf.ENTITY_TYPE_BLOG_POST, entity_id,
raw_image, filename_prefix, image_is_compressible)
self.render_json(self.values)
@acl_decorators.can_delete_blog_post
def delete(self, blog_post_id: str) -> None:
"""Handles Delete requests."""
blog_domain.BlogPost.require_valid_blog_post_id(blog_post_id)
blog_services.delete_blog_post(blog_post_id)
self.render_json(self.values)
|
package com.infamous.framework.sensitive.core;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Optional;
public enum MessageDigestAlgorithm {
MD5("MD5"),
SHA_1("SHA-1"),
SHA_256("SHA-256"),
SHA_512("SHA-512");
private final String m_name;
MessageDigestAlgorithm(String name) {
this.m_name = name;
}
public String getName() {
return m_name;
}
public String hash(String source) {
return getMessageDigest(this.getName())
.map(md -> HashStringUtils.hash(md, source))
.orElseThrow(() -> new IllegalStateException("Not supported message digest algorithm"));
}
private Optional<MessageDigest> getMessageDigest(String s) {
try {
return Optional.of(MessageDigest.getInstance(s));
} catch (NoSuchAlgorithmException e) {
return Optional.empty();
}
}
}
|
/**
* Lambda function to be triggered by a Contentful webhook if changes are applied to
* an Entry through Contentful's management API.
*
* Event flow:
*
* 1. Parse the incoming data and headers from Contentful
* 2. Use Contentful's management API to resolve a user id to a user name
* 3. Format a message compatible with Slack's incoming webhooks
* 4. Post to Slack
* @param event AWS event data.
* @param context AWS Lambda uses this parameter to provide your handler the runtime
* information of the Lambda function that is executing.
* @param callback Optional callback to return information to the caller.
*/
exports.handler = function (event, context, callback) {
// The body of the entity (Entry / ContentType).
const body = event;
const id = body.sys.id;
const spaceId = body.sys.space.sys.id;
// The type of the entity (Entry / ContentType).
const type = body.sys.type;
// We only want to post to slack if an entry is changed.
if (type != 'Entry') {
callback(null, "I am only posting entries to slack!");
return;
}
// The URL of your incoming webhook at slack,
const slackURL = process.env.SLACK_URL;
// URL pattern of the CMA endpoint.
const cmaURL = `https://api.contentful.com/spaces/${spaceId}/`;
// A valid content management token.
const cmaToken = process.env.CMA_TOKEN;
// Maps URL fragments to types.
const typeUrlMap = {
"Entry": "entries",
"Asset": "assets",
"ContentType": "content_types"
};
// Path to deeplink to the entity changed.
const contentfulDeeplinkURL = `https://app.contentful.com/spaces/${spaceId}/${typeUrlMap[type]}/${id}`;
// Message template following the slack specification:
// https://api.slack.com/docs/formatting
var slackMessageTemplate = {
"username": "Webhook: Glossier",
"icon_emoji": ":contentful:",
"attachments": [{
"fallback": "",
"pretext": "",
"color": "#000000",
"fields": [
{
"title": "Action applied to Entry",
"value": event.topic,
"short": false
}
]
}]
};
// Load the axios http lib.
var axios = require('axios');
// Promise lib.
var Promise = require('promise');
/*
* 1. get the userId
* 2. getTheName of the user
* 3. post final message to slack
*/
getUserId().
then(getUserName).
then(postToSlack).
then(function () {
callback(null, 'Webhook adaptor complete!');
}).catch(function (error) {
console.log('error', error);
callback(error);
});
/**
* The structure of the body of the payload follows th convention of our APIs. That means
* if an action does affect the Delivery API (e.g. publish), the body will not contain the
* id of the user who applied the change. If an action does affect the Management API
* (e.g. auto_save), the body will contain the id of the user who applied the change.
*/
function getUserId() {
// Do not ask the CMA for user id if already present in body.
if (body.sys.updatedBy !== undefined) {
return Promise.resolve(body.sys.updatedBy.sys.id);
}
return axios({
url: cmaURL + typeUrlMap[type] + '/' + id,
method: 'GET',
headers: {
'Authorization': 'Bearer ' + cmaToken
}
}).then(response => {
return response.data.sys.updatedBy.sys.id;
}).catch(error => {
throw error;
});
}
/**
* Request the user name from CMA.
* @param userId Id of the user.
* @returns {axios.Promise}
*/
function getUserName(userId) {
return axios({
url: cmaURL + 'users/',
method: 'GET',
headers: {
'Authorization': 'Bearer ' + cmaToken
}
}).then(response => {
var users = response.data.items.filter(user => {
return user.sys.id == userId;
});
var userName = users[0].firstName + ' ' + users[0].lastName;
console.log('getUserName', userName);
return userName;
}).catch(function (error) {
throw error;
})
}
/**
* Post final message to slack webhook.
* @param userName Username.
* @returns {*|axios.Promise}
*/
function postToSlack(userName) {
console.log('postToSlack');
var message = `An Entry has just been changed by ${userName}. The full Entry is below in the fields. Here is the link to the entry: <${contentfulDeeplinkURL}|Link to ${body.sys.type}>`;
// Append all fields to post.
for (var key in body.fields) {
slackMessageTemplate.attachments[0].fields.push({
"title": "field." + key,
"value": body.fields[key]['en-US'],
short: false
});
}
slackMessageTemplate.attachments[0].fallback = message;
slackMessageTemplate.attachments[0].pretext = message;
console.log(JSON.stringify(slackMessageTemplate));
return axios({
url: slackURL,
method: 'POST',
data: JSON.stringify(slackMessageTemplate)
});
}
};
|
/*
* Copyright 2015 Shorindo, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.shorindo.dbtools;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
/**
*
*/
public class DatabaseIO extends Connector {
public static final int HEADER_ROW = 0;
public static final int START_ROW = HEADER_ROW + 1;
public static final int START_COL = 0;
public static final String NULL_EXPR = "{NULL}";
Properties props = new Properties();
Map<String,TableMeta> tableMap = new TreeMap<String,TableMeta>();
public static void main(String args[]) {
try {
DatabaseIO dbio = new DatabaseIO();
dbio.exportToXls(new File("data/sample_XXX.xls"));
dbio.importFromXls(new File("data/sample.xls"));
} catch (Exception e) {
e.printStackTrace();
} finally {
}
}
private static void trace(Object msg) {
System.out.println("[T] " + msg);
}
private static void debug(Object msg) {
System.out.println("[D] " + msg);
}
private static void info(Object msg) {
System.out.println("[I] " + msg);
}
public DatabaseIO() {
try {
loadTableMeta();
} catch (Exception e) {
e.printStackTrace();
}
}
public void importFromXls(File xls)throws IOException, SQLException {
Connection conn = getConnection();
info("File:" + xls.getName());
FileInputStream is = new FileInputStream(xls);
HSSFWorkbook book = new HSSFWorkbook(is);
for (int i = 0; i < book.getNumberOfSheets(); i++) {
HSSFSheet sheet = book.getSheetAt(i);
String tableName = sheet.getSheetName();
info("Sheet:" + tableName);
TableMeta tableMeta = new TableMeta(conn, tableName);
HSSFRow row = sheet.getRow(HEADER_ROW);
int colNum = START_COL;
boolean hasCol = true;
List<String> colNames = new ArrayList<String>();
while (hasCol) {
//debug("[" + HEADER_ROW + "," + colNum + "]");
HSSFCell cell = row.getCell(colNum);
if (cell == null) {
break;
}
String columnName = cell.getStringCellValue();
if (columnName == null || columnName.length() == 0) {
hasCol = false;
} else {
colNames.add(columnName);
}
colNum++;
}
StringBuffer sb = new StringBuffer("INSERT INTO " + tableName + " (");
String sep = "";
for (String colName : colNames) {
sb.append(sep + colName);
sep = ",";
}
sb.append(") VALUES (");
sep = "";
for (String colName : colNames) {
sb.append(sep + "?");
sep = ",";
}
sb.append(")");
//debug(sb.toString());
for (int rowNum = START_ROW; rowNum <= sheet.getLastRowNum(); rowNum++) {
trace("Row:" + String.valueOf(rowNum));
row = sheet.getRow(rowNum);
PreparedStatement stmt = conn.prepareStatement(sb.toString());
for (int j = 0; j < colNames.size(); j++) {
HSSFCell cell = row.getCell(j);
//println(cell.getStringCellValue());
switch (cell.getCellType()) {
case HSSFCell.CELL_TYPE_NUMERIC:
stmt.setDouble(j + 1, row.getCell(j).getNumericCellValue());
break;
default:
String cellValue = row.getCell(j).getStringCellValue();
if (NULL_EXPR.equals(cellValue)) {
cellValue = null;
}
stmt.setString(j + 1, cellValue);
}
}
stmt.executeUpdate();
stmt.close();
}
}
if (conn != null)
try {
conn.close();
} catch(SQLException e) {
e.printStackTrace();
}
}
public void exportToXls(File xls) throws Exception {
Connection conn = getConnection();
HSSFWorkbook book = new HSSFWorkbook();
for (String key : tableMap.keySet()) {
TableMeta tableMeta = tableMap.get(key);
List<ColumnMeta> columnList = tableMeta.getColumnList();
trace("Table:" + tableMeta.getName());
HSSFSheet sheet = book.createSheet(tableMeta.getName());
int rowNum = HEADER_ROW;
HSSFRow row = sheet.createRow(rowNum);
int colNum = START_COL;
for (ColumnMeta columnMeta : tableMeta.getColumnList()) {
HSSFCell cell = row.createCell(colNum);
cell.setCellType(Cell.CELL_TYPE_STRING);
cell.setCellValue(columnMeta.getName());
colNum++;
}
PreparedStatement stmt = conn.prepareStatement("SELECT * FROM " + tableMeta.getName());
ResultSet rset = stmt.executeQuery();
while (rset.next()) {
row = sheet.createRow(++rowNum);
colNum = START_COL;
for (ColumnMeta columnMeta : columnList) {
Object value = rset.getObject(columnMeta.getName());
HSSFCell cell = row.createCell(colNum++);
cell.setCellValue(value == null ? NULL_EXPR : value.toString());
}
}
rset.close();
}
conn.close();
book.write(new FileOutputStream(xls));
}
private void loadTableMeta() throws Exception {
Connection conn = null;
try {
conn = getConnection();
ResultSet rset = conn.getMetaData().getTables(null, null, "%", new String[]{ "TABLE" });
while (rset.next()) {
String tableName = rset.getString("TABLE_NAME");
tableMap.put(tableName.toUpperCase(), new TableMeta(conn, tableName));
}
rset.close();
} finally {
if (conn != null)
try {
conn.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
public static class TableMeta {
private String name;
private List<ColumnMeta> columnList = new ArrayList<ColumnMeta>();
private List<ColumnMeta> pkList = new ArrayList<ColumnMeta>();
public TableMeta(Connection conn, String tableName) throws SQLException {
this.name = tableName;
DatabaseMetaData meta = conn.getMetaData();
ResultSet rset = meta.getColumns(null, null, tableName, null);
//debug(tableName);
while (rset.next()) {
ColumnMeta columnMeta = new ColumnMeta(
rset.getString("COLUMN_NAME"),
rset.getString("TYPE_NAME"));
columnList.add(columnMeta);
}
rset.close();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<ColumnMeta> getColumnList() {
return columnList;
}
public void setColumnList(List<ColumnMeta> columnList) {
this.columnList = columnList;
}
public List<ColumnMeta> getPkList() {
return pkList;
}
public void setPkList(List<ColumnMeta> pkList) {
this.pkList = pkList;
}
}
public static class ColumnMeta {
private String name;
private Class<?> type;
public ColumnMeta(String columnName, String typeName) {
this.name = columnName;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Class<?> getType() {
return type;
}
public void setType(Class<?> type) {
this.type = type;
}
}
}
|
#!/usr/bin/env bash
if [[ -z "$curlman_dev_home" ]]; then
echo "[$(basename $0)]: ERROR: curlman_dev_home variable not set. Set it to the absolute path of your local working tree."
exit 1
fi
tmpDir=$($curlman_dev_home/src/test-utils/init-tmp-dir.sh "$0")
# ARRANGE
# ACT
$curlman_dev_home/src/main/curlman.sh add --help > "$tmpDir/out.txt"
exitCode=$?
# ASSERT
if [[ $exitCode -ne 0 ]]; then
echo "[$(basename $0)]: FAIL: curlman ended abnormally: $exitCode"
exit 1
fi
diff "$tmpDir/out.txt" "$curlman_dev_home/src/main/docs/add-usage.txt"
exitCode=$?
if [[ $exitCode -ne 0 ]]; then
echo "[$(basename $0)]:[FAIL]: Output differs expectation."
exit 1
fi
echo "[$(basename $0)]: PASS"
exit 0
|
# Requirements:
# jsonscheme2md installed
# referrals-api pulled to latest master next to slate project
echo "(Re)generating documentation"
jsonschema2md -e json -d ../referrals-api/src/utils/referral/flights/schemas -o ../referrals-api/src/utils/referral/flights/docs
jsonschema2md -e json -d ../referrals-api/src/utils/referral/hotels/schemas -o ../referrals-api/src/utils/referral/hotels/docs
jsonschema2md -e json -d ../referrals-api/src/utils/referral/cars/schemas -o ../referrals-api/src/utils/referral/cars/docs
echo "Parsing ..."
# Parsing the generated MD files and
# * Fixing the headings;
# * Removing the json files links
sed s/^#/##/g ../referrals-api/src/utils/referral/flights/docs/dayView.md | sed s/[\\[\|\(][a-zA-Z]*View\.json[\]\|\)]//g > source/includes/_flights_dayView.md.erb
sed s/^#/##/g ../referrals-api/src/utils/referral/flights/docs/browseView.md | sed s/[\\[\|\(][a-zA-Z]*View\.json[\]\|\)]//g > source/includes/_flights_browseView.md.erb
sed s/^#/##/g ../referrals-api/src/utils/referral/flights/docs/calendarMonthView.md | sed s/[\\[\|\(][a-zA-Z]*View\.json[\]\|\)]//g > source/includes/_flights_calendarMonthView.md.erb
sed s/^#/##/g ../referrals-api/src/utils/referral/flights/docs/multiCity.md | sed s/[\\[\|\(][a-zA-Z]*City\.json[\]\|\)]//g > source/includes/_flights_multiCity.md.erb
sed s/^#/##/g ../referrals-api/src/utils/referral/flights/docs/flightsHomePage.md | sed s/[\\[\|\(][a-zA-Z]*View\.json[\]\|\)]//g > source/includes/_flights_homeView.md.erb
sed s/^#/##/g ../referrals-api/src/utils/referral/flights/docs/cheapFlightsTo.md | sed s/[\\[\|\(][a-zA-Z]*City\.json[\]\|\)]//g > source/includes/_flights_cheapFlightsTo.md.erb
sed s/^#/##/g ../referrals-api/src/utils/referral/flights/docs/flightsAirline.md | sed s/[\\[\|\(][a-zA-Z]*Airline\.json[\]\|\)]//g > source/includes/_flights_airline.md.erb
#sed s/^#/##/g ../referrals-api/src/utils/referral/hotels/docs/dayView.md | sed s/[\\[\|\(][a-zA-Z]*View\.json[\]\|\)]//g > source/includes/_hotels_dayView.md.erb
#sed s/^#/##/g ../referrals-api/src/utils/referral/hotels/docs/homeView.md | sed s/[\\[\|\(][a-zA-Z]*View\.json[\]\|\)]//g > source/includes/_hotels_homeView.md.erb
#sed s/^#/##/g ../referrals-api/src/utils/referral/hotels/docs/hotelDetails.md | sed s/[\\[\|\(][a-zA-Z]*View\.json[\]\|\)]//g > source/includes/_hotels_hotelDetails.md.erb
sed s/^#/##/g ../referrals-api/src/utils/referral/cars/docs/dayView.md | sed s/[\\[\|\(][a-zA-Z]*View\.json[\]\|\)]//g > source/includes/_cars_dayView.md.erb
sed s/^#/##/g ../referrals-api/src/utils/referral/cars/docs/carsHome.md | sed s/[\\[\|\(][a-zA-Z]*View\.json[\]\|\)]//g > source/includes/_cars_carsHome.md.erb
|
#!/usr/bin/env sh
exec /usr/sbin/httpd -D FOREGROUND
|
<reponame>easel/opentelemetry-java-instrumentation<filename>instrumentation/redisson-3.0/src/main/java/io/opentelemetry/instrumentation/auto/redisson/RedissonClientTracer.java
/*
* Copyright The OpenTelemetry Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opentelemetry.instrumentation.auto.redisson;
import io.netty.channel.Channel;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.instrumentation.auto.api.jdbc.DbSystem;
import java.net.InetSocketAddress;
import java.util.List;
import org.redisson.client.RedisConnection;
import org.redisson.client.protocol.CommandData;
import org.redisson.client.protocol.CommandsData;
public class RedissonClientTracer extends DatabaseClientTracer<RedisConnection, Object> {
public static final RedissonClientTracer TRACER = new RedissonClientTracer();
@Override
protected String getInstrumentationName() {
return "io.opentelemetry.auto.redisson-3.0";
}
@Override
protected String normalizeQuery(Object args) {
// get command
if (args instanceof CommandsData) {
List<CommandData<?, ?>> commands = ((CommandsData) args).getCommands();
StringBuilder commandStrings = new StringBuilder();
for (CommandData commandData : commands) {
commandStrings.append(commandData.getCommand().getName()).append(";");
}
if (commandStrings.length() > 0) {
commandStrings.deleteCharAt(commandStrings.length() - 1);
}
return commandStrings.toString();
} else if (args instanceof CommandData) {
return ((CommandData) args).getCommand().getName();
}
return "Redis Command";
}
@Override
protected String dbSystem(RedisConnection connection) {
return DbSystem.REDIS;
}
@Override
protected String dbUser(RedisConnection o) {
return null;
}
@Override
protected String dbName(RedisConnection o) {
return null;
}
@Override
protected InetSocketAddress peerAddress(RedisConnection connection) {
Channel channel = connection.getChannel();
return (InetSocketAddress) channel.remoteAddress();
}
@Override
protected String dbConnectionString(RedisConnection connection) {
Channel channel = connection.getChannel();
InetSocketAddress remoteAddress = (InetSocketAddress) channel.remoteAddress();
return remoteAddress.getHostString() + ":" + remoteAddress.getPort();
}
}
|
#include <stdio.h>
//Function to reverse an array
void reverseArray(int arr[], int start, int end)
{
int temp;
while (start < end)
{
temp = arr[start];
arr[start] = arr[end];
arr[end] = temp;
start++;
end--;
}
}
int main()
{
int arr[] = {1, 2, 3, 4, 5};
int n = sizeof(arr) / sizeof(arr[0]);
reverseArray(arr, 0, n - 1);
for (int i = 0; i < n; i++)
printf("%d ", arr[i]);
return 0;
} |
<reponame>HISPSA/data-visualizer-app
import {
TITLE_PROP,
SUBTITLE_PROP,
SERIES_PROP,
Y_AXIS_PROP,
X_AXIS_PROP,
LEGEND_PROP,
PLOT_LINES_PROP,
LABELS_PROP,
} from './config'
const CONFIG_PROP = '$config'
export const expectWindowConfigTitleToBeValue = value =>
cy.window().its(CONFIG_PROP).its(TITLE_PROP).should('eql', value)
export const expectWindowConfigSubtitleToBeValue = value =>
cy.window().its(CONFIG_PROP).its(SUBTITLE_PROP).should('eql', value)
export const expectWindowConfigLegendToBeValue = value =>
cy.window().its(CONFIG_PROP).its(LEGEND_PROP).should('eql', value)
export const expectWindowConfigAxisPlotLinesToBeValue = ({
axisType,
axisIndex,
lineIndex,
value,
}) =>
cy
.window()
.its(CONFIG_PROP)
.its(axisType)
.its(axisIndex)
.its(PLOT_LINES_PROP)
.its(lineIndex)
.should('eql', value)
export const expectWindowConfigAxisTitleToBeValue = (
axisType,
axisIndex,
value
) =>
cy
.window()
.its(CONFIG_PROP)
.its(axisType)
.its(axisIndex)
.its(TITLE_PROP)
.should('eql', value)
export const expectWindowConfigAxisLabelsToBeValue = (
axisType,
axisIndex,
value
) =>
cy
.window()
.its(CONFIG_PROP)
.its(axisType)
.its(axisIndex)
.its(LABELS_PROP)
.should('eql', value)
export const expectWindowConfigSeriesToNotHaveTrendline = () =>
cy
.window()
.its(CONFIG_PROP)
.its(SERIES_PROP)
.then(series => {
const trendlines = series.filter(
item => item.type === 'line' || item.type === 'spline'
)
expect(trendlines).to.have.lengthOf(0)
})
export const expectWindowConfigSeriesToHaveTrendline = expectedTL =>
cy
.window()
.its(CONFIG_PROP)
.its(SERIES_PROP)
.then(series => {
const actualTL = series.find(
item =>
(item.type === 'line' || item.type === 'spline') &&
item.name === expectedTL.name
)
expect(actualTL.name).to.eq(expectedTL.name)
expect(actualTL.type).to.eq(expectedTL.type)
expect(actualTL.dashStyle).to.eq(expectedTL.dashStyle)
expect(actualTL.lineWidth).to.eq(expectedTL.lineWidth)
expect(actualTL.marker).to.eql(expectedTL.marker)
expect(actualTL.zIndex).to.eq(expectedTL.zIndex)
})
export const expectWindowConfigYAxisToHaveTitleText = text =>
cy
.window()
.its(CONFIG_PROP)
.its(Y_AXIS_PROP)
.then(yAxes => {
const yAxis = yAxes[0]
expect(yAxis.title.text).to.eq(text)
})
export const expectWindowConfigXAxisToHaveTitleText = text =>
cy
.window()
.its(CONFIG_PROP)
.its(X_AXIS_PROP)
.then(xAxes => {
const xAxis = xAxes[0]
expect(xAxis.title.text).to.eq(text)
})
export const expectWindowConfigYAxisToHaveRangeMinValue = value =>
cy
.window()
.its(CONFIG_PROP)
.its(Y_AXIS_PROP)
.then(yAxes => {
const yAxis = yAxes[0]
expect(yAxis.min).to.eq(value)
})
export const expectWindowConfigYAxisToHaveRangeMaxValue = value =>
cy
.window()
.its(CONFIG_PROP)
.its(Y_AXIS_PROP)
.then(yAxes => {
const yAxis = yAxes[0]
expect(yAxis.max).to.eq(value)
})
export const expectWindowConfigXAxisToHaveRangeMinValue = value =>
cy
.window()
.its(CONFIG_PROP)
.its(X_AXIS_PROP)
.then(xAxes => {
const xAxis = xAxes[0]
expect(xAxis.min).to.eq(value)
})
export const expectWindowConfigXAxisToHaveRangeMaxValue = value =>
cy
.window()
.its(CONFIG_PROP)
.its(X_AXIS_PROP)
.then(xAxes => {
const xAxis = xAxes[0]
expect(xAxis.max).to.eq(value)
})
|
package net.exkazuu.battle2048.game;
public class GameResult {
public final int winner;
public final int turn;
public final DefeatReason defeatReason;
public GameResult(final int winner, final int turn, final DefeatReason defeatReason) {
this.winner = winner;
this.turn = turn;
this.defeatReason = defeatReason;
}
}
|
# Copyright (c) 2019 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is meant to be sourced in a .bashrc file to add useful
# bash functions to an interactive shell
# Bash function to run vpp 'make test' testcases
# repeatedly, stopping on test failure or when
# a test log contains the optionally specified text
vpp-make-test()
{
local options
local usage
local all
local debug
local grep_for
local show_grep
local run_make_test
local old_pwd
local test_desc
local is_feature="false"
local retry_count=100
local tester=${GERRIT_USER:-$USER}
local jobs="auto"
if [ -z "$WS_ROOT" ] ; then
echo "ERROR: WS_ROOT is not set!"
return
elif [ ! -d "$WS_ROOT/src/vppinfra" ] ; then
echo "ERROR: WS_ROOT is not set to a VPP workspace!"
return
fi
options=$(getopt -o "adfg:j:r:" -- "$@")
if [ $? -eq 1 ] ; then
usage=true
else
eval set -- $options
fi
while [ -z "$usage" ] ; do
case "$1" in
-a)
all="-all"
;;
-d)
debug="-debug"
;;
-f)
is_feature="true"
retry_count=1
;;
-g)
shift
show_grep=$1
grep_for="${1//-/\\-}"
;;
-j)
shift
jobs=$1
if [ $((jobs)) != $jobs ] ; then
echo "ERROR: Invalid option value for -j option ($jobs)!"
usage=true;
fi
;;
-r)
shift
retry_count=$1
if [ $((retry_count)) != $retry_count ] ; then
echo "ERROR: Invalid option value for -r option ($retry_count)!"
usage=true;
fi
;;
--)
shift
break
;;
esac
shift
done
if [ -n "$usage" ] || [ -z "$1" ] ; then
if [ -z "$1" ] ; then
echo "ERROR: no testcase specified!"
fi
echo "Usage: vpp-make-test [-a][-d][-f][-g <text>][-j <jobs>][-r <retry count>] <testcase> [<retry_count>]"
echo " -a Run extended tests"
echo " -d Run vpp debug image (i.e. with ASSERTS)"
echo " -f Testcase is a feature set (e.g. tcp)"
echo " -g <text> Text to grep for in log, FAIL on match."
echo " Enclose <text> in single quotes when it contains any dashes:"
echo " e.g. vpp-make-test -g 'goof-bad-' test_xyz"
echo " -j <# jobs> Set TEST_JOBS (default = auto) for feature set"
echo " -r <retry count> Retry Count (default = 100 for individual test | 1 for feature set)"
return
fi
if [ $retry_count -le 0 ] ; then
retry_count=1
fi
if [ "$is_feature" == "true" ] ; then
run_make_test="make test$all$debug TEST=$1 SANITY=no TEST_JOBS=$jobs"
else
run_make_test="make test$all$debug TEST=*.*.$1 SANITY=no"
fi
old_pwd=$(pwd)
cd $WS_ROOT
line="------------------------------------------------------------------------------"
test_desc="'$run_make_test'"
if [ -n "$grep_for" ] ; then
test_desc="$test_desc [grep '$show_grep']"
fi
for ((i=1; i<=retry_count; i++)) ; do
echo -e "\n$line"
echo -e "ITERATION [$i/$retry_count]: $test_desc\n$line"
result=$($run_make_test)
if [ ! -d /tmp/vpp-unittest* ] ; then
echo -e "\nERROR: No testcase(s) executed!\n"
return
fi
echo "$result"
if [ -n "$grep_for" ] ; then
grep_results=$(grep -sHn $grep_for /tmp/vpp-u*/log.txt)
fi
if [ -n "$(echo $result | grep FAILURE)" ] || [ -n "$grep_results" ] ; then
if [ -n "$grep_results" ] ; then
fail="FAIL (grep)"
else
fail="FAIL"
fi
echo -e "\n$line\n$fail [$i/$retry_count]: $test_desc\n$line\n"
return
fi
done
echo -e "\n$line\nPASS [$((i-1))/$retry_count]: $test_desc\n$line\n"
echo -e "Hey $tester, Life is good!!! :D\n"
cd $old_pwd
}
# bash function to set up csit python virtual environment
csit-env()
{
if [ -f "$WS_ROOT/VPP_REPO_URL" ] && [ -f "$WS_ROOT/requirements.txt" ]; then
if [ -n "$(declare -f deactivate)" ]; then
echo "Deactivating Python Virtualenv!"
deactivate
fi
local PIP=pip
local setup_framework=$WS_ROOT/resources/libraries/python/SetupFramework.py
if [ -n "$(grep pip3 $setup_framework)" ]; then
PIP=pip3
local VENV_OPTS="-p python3"
fi
export CSIT_DIR=$WS_ROOT
export PYTHONPATH=$CSIT_DIR
rm -rf $PYTHONPATH/env && virtualenv $VENV_OPTS $PYTHONPATH/env \
&& source $PYTHONPATH/env/bin/activate \
&& $PIP install --upgrade -r $PYTHONPATH/requirements.txt \
&& $PIP install --upgrade -r $PYTHONPATH/tox-requirements.txt
else
echo "ERROR: WS_ROOT not set to a CSIT workspace!"
fi
}
# bash function to set up jenkins sandbox environment
#
# See LF Sandbox documentation:
# https://docs.releng.linuxfoundation.org/en/latest/jenkins-sandbox.html
#
# Prerequisites:
# 1. Create jenkins sandbox token and add it to your local jenkins.ini file
# Either specify the location of the init file in $JENKINS_INI or
# JENKINS_INI will be initialized to either
# ~/.config/jenkins_jobs/jenkins.ini
# $WS_ROOT/jenkins.ini
# 2. Clone ci-management workspace from gerrit.fd.io
# 3. export WS_ROOT=<local ci-management workspace>
jjb-sandbox-env()
{
if [ -z "$WS_ROOT" ] ; then
echo "ERROR: WS_ROOT is not set!"
return
elif [ ! -d "$WS_ROOT/jjb" ] ; then
echo "ERROR: WS_ROOT is not set to a ci-management workspace!"
return
fi
if [ -n "$(declare -f deactivate)" ]; then
echo "Deactivating Python Virtualenv!"
deactivate
fi
if [ -z "$JENKINS_INI" ] ; then
local user_jenkins_ini="/home/$USER/.config/jenkins_jobs/jenkins.ini"
if [ -f "$user_jenkins_ini" ] ; then
export JENKINS_INI=$user_jenkins_ini
elif [ -f "$WS_ROOT/jenkins.ini" ] ; then
export JENKINS_INI="$WS_ROOT/jenkins.ini"
else
echo "ERROR: Unable to find 'jenkins.ini'!"
return
fi
echo "Exporting JENKINS_INI=$JENKINS_INI"
elif [ ! -f "$JENKINS_INI" ] ; then
echo "ERROR: file specified in JENKINS_INI ($JENKINS_INI) not found!"
return
fi
if [ -n "$(declare -f deactivate)" ]; then
echo "Deactivating Python Virtualenv!"
deactivate
fi
cd $WS_ROOT
git submodule update --init --recursive
local VENV_DIR=$WS_ROOT/venv
rm -rf $VENV_DIR \
&& python3 -m venv $VENV_DIR \
&& source $VENV_DIR/bin/activate \
&& pip3 install wheel \
&& pip3 install jenkins-job-builder==3.0.2
alias jjsb='jenkins-jobs --conf $JENKINS_INI'
function jjsb-test() {
if [ -z "$(which jenkins-jobs 2>&1)" ] ; then
echo "jenkins-jobs not found! Run jjb-sandbox-env to activate."
return
fi
if [ -z "$1" ] ; then
echo "Usage: $FUNCNAME <jenkins-job-name>"
return
fi
which jenkins-jobs \
&& jenkins-jobs --conf $JENKINS_INI test $WS_ROOT/jjb $@
}
function jjsb-update() {
if [ -z "$(which jenkins-jobs 2>&1)" ] ; then
echo "jenkins-jobs not found! Run jjb-sandbox-env to activate."
return
fi
if [ -z "$1" ] ; then
echo "Usage: $FUNCNAME <jenkins-job-name>"
return
fi
which jenkins-jobs \
&& jenkins-jobs --conf $JENKINS_INI update $WS_ROOT/jjb $@
}
jenkins-jobs --version
}
|
import json
import re
from pathlib import Path
from typing import Union
import albumentations as A
import cv2
import numpy as np
import pandas as pd
import torch
from torch.utils.data import SubsetRandomSampler
from src.utils import Constants
class FoodVisorDataset(torch.utils.data.Dataset):
"""
FoodVisorDataset is a custom Dataset. It is adapted to FoodVisor data convention.
Indeed, to build this dataset, à img_annotations and a csv_mapping are needed.
Arguments:
----------
- json_annotations (dict): dictionnary of the img_annotations.json
- csv_mapping (str): path file for the label_mapping.csv
- img_folder (str): path folder where all images are located
- regex_aliment (str): regex to build class. Example: with regex r"[Tt]omate(s)?" with build two classes,
one containing only image with tomatoes, and one with everything else.
- augmentations (albumentation, default=None): Transform to apply using albumentation
- lang (str, default="fr"): lang corresponding to label ("fr" and "en" only)
"""
def __init__(
self,
json_annotations: str,
csv_mapping: str,
imgs_folder: str,
regex_aliment: str,
transforms: A = None,
lang: str = "fr"):
self.imgs_folder = Path(imgs_folder)
with open(Path(json_annotations).as_posix()) as f:
self.img_annotations = json.load(f)
self.csv_mapping = pd.read_csv(csv_mapping)
self.transforms = transforms
self.__regex_aliment = regex_aliment
if lang in Constants.LANG_LABEL:
self.__lang = lang
else:
print("lang parameter should be one of the following :")
for l in Constants.LANG_LABEL:
print(" - {:s}".format(l))
raise ValueError
def __getitem__(self, index: int):
img_id = list(self.img_annotations.keys())[index]
img_name = self.imgs_folder / img_id
objs = self.img_annotations[img_id]
boxes = []
labels = []
for obj in objs:
if not obj["is_background"]:
boxes.append(coco_to_pascalvoc(obj["box"]))
label_str = self.__get_label_for_id(obj["id"])
label = self.__is_aliment_present(label_str)
labels.append(label)
img = cv2.imread(img_name.as_posix())
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
if self.transforms:
data = {
"image": img,
"bboxes": boxes,
"labels": labels}
res = self.transforms(**data)
img = res["image"]
boxes = res["bboxes"]
boxes = torch.as_tensor(boxes, dtype=torch.float32)
labels = torch.as_tensor(labels, dtype=torch.int64)
area = boxes[:, 3] * boxes[:, 2]
image_id = torch.tensor([index])
target = {
"boxes": boxes,
"labels": labels,
"area": area,
"image_id": image_id,
# "image_filename": img_id
}
return img, target
def __len__(self) -> int:
return len(self.img_annotations.keys())
def __get_label_for_id(self, label_id: str) -> str:
"""
Method to get the label from a label id using the label mapping
Argument:
---------
- label_id (str): id of the label
Return:
-------
- label (str)
"""
return self.csv_mapping[self.csv_mapping[Constants.COL_LABEL_ID]
== label_id][Constants.COL_LABEL_NAME + self.__lang].values[0]
def __is_aliment_present(self, label: str) -> int:
"""
Method to check if an aliment is present in an image.
Argument:
---------
- image_id (str): id of the image we want to check
Return:
-------
- boolean: true if the image contains aliment, false else
"""
if bool(re.search(self.__regex_aliment, label)):
return 1
else:
return 0
def coco_to_pascalvoc(bbox: Union[list, tuple, np.array]) -> Union[list, tuple, np.array]:
# Coco dataset format : [x, y, width, height]
x, y, w, h = bbox[0], bbox[1], bbox[2] - 1, bbox[3] - 1
# Transform into pascal_voc format : [xmin, ymin, xmax, ymax]
x0, y0 = min(x, x + w), min(y, y + h)
x1, y1 = max(x, x + w), max(y, y + h)
return x0, y0, x1, y1
|
curl -X POST https://asia-northeast1-mlops-basic.cloudfunctions.net/predict_fashion_mnist \
-H 'Content-Type: application/json' \
-d '{"url":"https://raw.githubusercontent.com/ryfeus/gcf-packs/master/tensorflow2.0/example/test.png"}' |
#!/usr/bin/env bash
env=${1:-demo}
eb create ${env} \
--cfg ${env} \
--cname dasniko-kc-${env}
|
def bubbleSort(arr):
n = len(arr)
# Traverse through all array elements
for i in range(n):
# Last i elements are already in place
for j in range(0, n-i-1):
# traverse the array from 0 to n-i-1
# Swap if the element found is greater
# than the next element
if arr[j] > arr[j+1] :
arr[j], arr[j+1] = arr[j+1], arr[j]
# Test the code
arr = [64, 34, 25, 12, 22, 11, 90]
bubbleSort(arr)
print ("Sorted array is:")
for i in range(len(arr)):
print ("%d" %arr[i]), |
import * as numerals from '.';
describe('Numerals', () => {
const numeralSystems = Object.keys(numerals);
numeralSystems.forEach(numeralSystem => {
it(`should return ${numeralSystem} numerals`, () => {
expect(numerals[numeralSystem]).toMatchSnapshot();
});
});
});
|
/*
* express-mfs
*
* Copyright(c) 2020 <NAME>
* MIT Licensed
*/
"use strict";
// ==========================================================================================
// ping middleware
//
// mfs.ping:
// The middleware provides an API-friendly json response ("pong"). Every API needs a simple
// "I'm Alive" endpoint.
//
// ==========================================================================================
const express = require("express");
const mfs = require("../lib");
const API_PORT = 3000;
const app = express();
app.get("/ping", mfs.ping);
// handle unknown routes
app.use(mfs.unknown);
// handle all errors
app.use(mfs.error);
app.listen(API_PORT);
console.log(`API Service Listening On Port: ${API_PORT}`);
|
import React, { Component } from "react";
import { StyleSheet, Text, View } from "react-native";
class TicTacToe extends Component { state = {
board: Array(9).fill(null),
player1: true,
};
handlePress = (index) => {
const { player1, board } = this.state;
const newBoard = board;
if (player1) {
newBoard[index] = "X";
} else {
newBoard[index] = "O";
}
this.setState({
board: newBoard,
player1: !player1,
});
};
render() {
return (
<View style={styles.container}>
{this.state.board.map((box, index) => (
<View key={index} style={styles.boxes}>
// handle each button click.
<Text onPress={() => this.handlePress(index)}
style={styles.text}>{box}</Text>
</View>
))}
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flexWrap: "wrap",
flexDirection: "row"
},
boxes: {
width: 100,
height: 100,
borderWidth: 2,
borderColor: "black",
justifyContent: "center",
alignItems: "center"
},
text: {
fontSize: 40
}
});
export default TicTacToe; |
#!/bin/bash -e
. "${SCRIPT_INPUT_FILE_0}"
|
<reponame>AY1920S1-CS2113T-W17-3/main
package owlmoney.logic.command.bond;
import static owlmoney.commons.log.LogsCenter.getLogger;
import java.util.logging.Logger;
import owlmoney.logic.command.Command;
import owlmoney.model.bank.exception.BankException;
import owlmoney.model.bond.exception.BondException;
import owlmoney.model.profile.Profile;
import owlmoney.ui.Ui;
/**
* Executes DeleteBondCommand and prints the results.
*/
public class DeleteBondCommand extends Command {
private final String bankName;
private final String bondName;
private static final Logger logger = getLogger(DeleteBondCommand.class);
/**
* Creates an instance of DeleteInvestmentCommand.
*
* @param bankName The name of the bank account.
* @param bondName The name of the bond to be deleted.
*/
public DeleteBondCommand(String bankName, String bondName) {
this.bankName = bankName;
this.bondName = bondName;
}
/**
* Executes the function to delete an investment account from the profile.
*
* @param profile Profile of the user.
* @param ui Ui of OwlMoney.
* @return false so OwlMoney will not terminate yet.
* @throws BankException If used on savings or bank does not exist.
* @throws BondException If there are no bonds.
*/
@Override
public boolean execute(Profile profile, Ui ui) throws BondException, BankException {
profile.profileDeleteBond(this.bankName, this.bondName, ui);
logger.info("Successful execution of DeleteBondCommand");
return this.isExit;
}
}
|
#! /bin/sh
# Get the OOPMH-LIB root directory from a makefile
OOMPH_ROOT_DIR=$(make -s --no-print-directory print-top_builddir)
#Set the number of tests to be checked
NUM_TESTS=1
# Setup validation directory
#---------------------------
touch Validation
rm -r -f Validation
mkdir Validation
# Validation for unstructured fluid
#----------------------------------
cd Validation
echo "Running 2D unstructured adaptive Navier Stokes ALE validation "
mkdir RESLT
../unstructured_adaptive_ALE --validation > OUTPUT
echo "done"
echo " " >> validation.log
echo "2D unstructured adaptive Navier Stokes ALE validation" >> validation.log
echo "-----------------------------------------------------" >> validation.log
echo " " >> validation.log
echo "Validation directory: " >> validation.log
echo " " >> validation.log
echo " " `pwd` >> validation.log
echo " " >> validation.log
cat RESLT/norm.dat > results.dat
if test "$1" = "no_fpdiff"; then
echo "dummy [OK] -- Can't run fpdiff.py because we don't have python or validata" >> validation.log
else
../../../../bin/fpdiff.py ../validata/results.dat.gz \
results.dat >> validation.log
fi
# Append log to main validation log
cat validation.log >> ../../../../validation.log
cd ..
#######################################################################
#Check that we get the correct number of OKs
# validate_ok_count will exit with status
# 0 if all tests has passed.
# 1 if some tests failed.
# 2 if there are more 'OK' than expected.
. $OOMPH_ROOT_DIR/bin/validate_ok_count
# Never get here
exit 10
|
var React = require('react'),
Link = require('react-router').Link,
ProfImg = require('../prof_img.js');
var guest;
var NavBar = React.createClass({
componentWillMount: function() {
console.log("componentWillMount");
if (!Cookies.get("session"))
return;
console.log("Made it past cookies check")
this.getGuest();
},
componentWillUpdate: function() {
console.log("componentWillUpdate");
if (!this.state.guest && Cookies.get("session"))
this.getGuest();
},
getGuest: function() {
var api_resp =
api_call('kitchenuser', {method: 'Get', session: Cookies.get("session")});
if (api_resp.Success)
this.setState({guest: api_resp.Return});
else {
Cookies.remove('session');
location.reload();
}
},
signout: function() {
Cookies.remove('session');
location.reload();
},
getInitialState: function() {
return ({guest: null});
},
render: function() {
var guest = this.state.guest;
var right_nav,
user_tab,
host_tab;
var host_tab =
<li className="nav-item">
<Link className="nav-item-content"
to={(Cookies.get("session"))? "/edit_host_info" : "/why_chakula"}
id="be-a-chef">
<button className="c-blue-bg">Become a Chef</button>
</Link>
</li>,
user_tab =
<li id="signin">
<Link to="login?fwd=/">
<span className="nav-text">Sign In</span>
<img className="img-responsive nav-icon" alt="Brand" src="/img/user-icon.svg" align="right" />
</Link>
</li>;
if (guest) {
user_tab =
(<li id="user" className="dropdown">
<button className="btn dropdown-toggle" type="button" id="user-dropdown" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">
<span className="nav-text">{guest.First_name}</span>
<ProfImg className="img-responsive img-circle" alt="Brand" src={guest.Prof_pic || "/img/user-icon.svg"} align="right" />
</button>
<ul className="dropdown-menu" aria-labelledby="dropdownMenu1">
<li><Link to="/edit_guest_info">My Profile</Link></li>
<li><a onClick={this.signout}>Signout</a></li>
</ul>
</li>);
if (guest.Is_host) {
host_tab =
(<li className="nav-item">
<Link className="nav-item-content" to="/my_meals">
<span>Host</span>
<img className="img-responsive nav-icon" src="/img/host-icon.svg"/>
</Link>
</li>);
}
}
right_nav =
(<ul className="nav navbar-right">
{host_tab}
{user_tab}
</ul>);
return(
<nav className="navbar navbar-default navbar-static-top">
<div className="container-fluid">
<div className="navbar-header">
<Link className="navbar-brand" id="navbar-brand" to="/">
<img className="img-responsive nav-icon" alt="Brand" src="/img/chakula_icon.svg" align="left" />
</Link>
<button type="button" className="navbar-toggle collapsed" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1" aria-expanded="false" align="right">
<span className="sr-only">Toggle navigation</span>
<span className="icon-bar"></span>
<span className="icon-bar"></span>
<span className="icon-bar"></span>
</button>
</div>
<div className="collapse navbar-collapse" id="bs-example-navbar-collapse-1">
{right_nav}
</div>
</div>
</nav>
);
}
});
module.exports = React.createClass({
renderFooter: function() {
return(
<div className="row">
<footer className="col-xs-12">
<div className="row">
<div className="col-xs-6 col-xs-offset-3">
<p><Link to="/about">About</Link></p>
<p><a href="/privacy.html">Privacy</a></p>
<div className="text-center">
<p>Copyright © Chakula 2015</p>
<p>717 Newton Pl NW, Washington DC</p>
</div>
</div>
</div>
</footer>
</div>);
},
render: function() {
return(
<div className="container-fluid">
<div className="row" id="app-body">
<NavBar/>
{this.props.children}
</div>
<div id="js-working">
</div>
</div>);
}
});
|
const searchString = (str, keywords) => {
let map = new Map();
for (let keyword of keywords) {
let count = 0;
let idx = str.indexOf(keyword);
while (idx >= 0) {
count++;
idx = str.indexOf(keyword, idx + 1);
}
map.set(keyword, count);
}
return map;
};
console.log(searchString('The quick brown fox jumps over the lazy dog', ['quick', 'fox', 'dog']));
// Output: Map { 'quick' => 1, 'fox' => 1, 'dog' => 1 } |
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdbool.h>
#include <time.h>
#include <limits.h>
#include <netdb.h>
#include "net.h"
#include "timer.h"
#include "data.h"
#include "packet.h"
int main(int argc, char **argv)
{
if (argc < 2) {
printf("Usage:\n");
printf(" %s port_no [loss_rate]\n", argv[0]);
exit(1);
}
// Seed the RNG from is_lost()
srand48(12345);
// Parse/validate args
char *port = argv[1];
printf("port = %s\n", port);
long int x = strtol(argv[1], NULL, 10);
if (x < 0 || x > USHRT_MAX) {
fprintf(stderr, "[error]: port %ld is invalid\n", x);
exit(1);
}
double loss_rate = 0.0;
if (argc == 3) {
loss_rate = strtof(argv[2], NULL);
if (loss_rate < 0.0 || loss_rate > 1.0) {
fprintf(stderr, "[error]: loss_rate must be between 0 and 1\n");
exit(1);
}
}
// Get a socket to connect to
int sock;
struct sockaddr their_addr;
if (get_addr_sock(&their_addr, &sock, NULL, port) == -1) {
fprintf(stderr, "[error]: unable to get socket\n");
exit(1);
}
// Len of connecting address
socklen_t addrlen = (socklen_t) sizeof(their_addr);
// Last packet received
int packet_received = -1;
// Buffer to store data in
char *buf = malloc((strlen(g_buffer) + 1) * sizeof(char));
char *bufstart = buf;
// Main loop of execution - runs until we get an error or tear-down msg
while (true) {
// Receive a packet
struct packet_t pkt;
if (recv_packet(&pkt, sock, &their_addr, &addrlen, loss_rate) == -1) {
fprintf(stderr, "[receiver]: couldn't receive packet\n");
exit(1);
}
// Check if this is the tear-down message. If so, get out of loop.
if (pkt.type == 4) {
printf("RECEIVED TEAR-DOWN PACKET\n");
break;
}
// Check if this is the next packet in the sequence. If so, adjust
// packet_received appropriately and copy data to the buffer
else if (pkt.seq_no == (packet_received + 1)) {
packet_received++;
strcpy(bufstart, pkt.data);
bufstart += pkt.len;
}
printf("RECEIVED PACKET %d\n", pkt.seq_no);
// Send ACK
struct ack_t ack;
if (make_ack(&ack, 2, packet_received) == -1) {
fprintf(stderr, "[receiver]: couldn't construct ACK\n");
exit(1);
}
if (send_ack(&ack, sock, &their_addr) == -1) {
fprintf(stderr, "[receiver]: couldn't send ACK %d\n", ack.ack_no);
exit(1);
}
printf("--------SEND ACK %d\n", ack.ack_no + 1);
printf("\n");
}
// Construct ACK to tear-down message and send
struct ack_t tear_down_ack;
if (make_ack(&tear_down_ack, 8, 0) == -1) {
fprintf(stderr, "[receiver]: couldn't construct tear-down ACK\n");
exit(1);
}
if (send_ack(&tear_down_ack, sock, &their_addr) == -1) {
fprintf(stderr, "[receiver]: couldn't send tear-down ACK\n");
exit(1);
}
printf("--------SEND TEAR-DOWN ACK\n");
// Timer for 7 seconds. Additionally, set a timeout on the socket so that
// we don't exceed the timeout by not receiving any packets
if (set_timeout(sock, 7) == -1) {
fprintf(stderr, "[receiver]: unable to set timeout\n");
exit(1);
}
clock_t start = clock();
int msec = (clock() - start) * 1000 / CLOCKS_PER_SEC;
while (msec < 7000) {
struct packet_t pkt;
if (recv_packet(&pkt, sock, &their_addr, &addrlen, loss_rate) == -1) {
break;
}
print_packet(pkt);
if (pkt.type == 4) {
printf("RECEIVED TEAR-DOWN PACKET\n");
} else {
printf("RECEIVED PACKET %d\n", pkt.seq_no);
}
// Only ACK if it's a tear-down packet
if (pkt.type == 4) {
if (send_ack(&tear_down_ack, sock, &their_addr) == -1) {
fprintf(stderr, "[receiver]: couldn't send tear-down ACK\n");
break;
}
printf("--------SEND TEAR-DOWN ACK\n");
}
}
free(buf);
return 0;
}
|
#include <stdio.h>
#include <stdlib.h>
typedef struct {
int num;
float saldo;
} Conta;
typedef struct {
Conta *pc;
int indice;
} TConta;
void iniciarConta(TConta *p) {
p->pc = NULL;
p->indice = 0;
}
void mostrarTodas(TConta *p) {
puts("");
puts("===========================");
puts("=== RELATORIO DE CONTAS ===");
puts("===========================");
for(int i = 0; i < p->indice; i++) {
printf("Numero: %d", p->pc[i].num);
printf("\nSaldo.: %.2f\n", p->pc[i].saldo);
puts("===========================");
}
}
void creditarConta (TConta *p, Conta c) {
for (int i = 0; i < p->indice; i++) {
if (p->pc[i].num == c.num)
p->pc[i].saldo += c.saldo;
}
}
void inserirConta (TConta *p, Conta c) {
if (p->pc == NULL && p->indice == 0)
p->pc = (Conta*)malloc(sizeof(Conta));
else
p->pc = (Conta*)realloc(p->pc, (p->indice+1)*sizeof(Conta));
p->pc[p->indice] = c;
p->indice++;
}
int main () {
TConta conta;
Conta c;
unsigned opcao = 0;
iniciarConta(&conta);
do{
puts("");
puts("==========================================");
puts("====== Controle de Contas Bancarias ======");
puts("1. Cadastrar Conta 2. Mostrar Saldo");
puts("3. Credito em Conta 4. Debito da Conta");
puts("5. Transferir Valor 6. Mostrar Contas");
puts("7. Remover Posicao 8. Remover Numero");
puts("==========================================");
puts("0. Sair");
printf("Informe sua opcao: ");
scanf("%u", &opcao);
switch (opcao) {
case 1:
for (int i = 12; i < 20; i++) {
c.num = i;
c.saldo = i * 10.00;
inserirConta(&conta, c);
}
break;
case 3:
printf("Informe a conta: ");
scanf("%d", &c.num);
printf("Informe o valor: ");
scanf("%f", &c.saldo);
creditarConta(&conta, c);
break;
case 6:
mostrarTodas(&conta);
break;
default:
puts("\nOpcao invalida!!");
}
}while(opcao > 0);
free(conta.pc);
return 0;
}
|
<reponame>astrex1969/astrobot16<gh_stars>0
const { MessageAttachment } = require("discord.js");
module.exports = {
name: "supreme",
description: "Display custom text as the Supreme logo",
category: "image",
async execute(bot, message, args) {
const text = args.join(" ");
if (!text) {
return message.channel.send("Please provide text!");
}
const image = await bot.alexClient.image.supreme({
text: encodeURIComponent(text),
});
const att = new MessageAttachment(image, "supreme.png");
message.channel.send(att);
},
};
|
<gh_stars>100-1000
package dev.fiki.forgehax.main.mods.player;
import dev.fiki.forgehax.api.event.SubscribeListener;
import dev.fiki.forgehax.api.events.entity.LocalPlayerUpdateEvent;
import dev.fiki.forgehax.api.mod.Category;
import dev.fiki.forgehax.api.mod.ToggleMod;
import dev.fiki.forgehax.api.modloader.RegisterMod;
import dev.fiki.forgehax.asm.events.packet.PacketInboundEvent;
import net.minecraft.network.play.client.CEntityActionPacket;
import net.minecraft.network.play.client.CPlayerPacket;
import net.minecraft.network.play.server.SPlayerPositionLookPacket;
import net.minecraft.util.math.vector.Vector3d;
import java.util.Objects;
import static dev.fiki.forgehax.main.Common.*;
@RegisterMod(
name = "PacketFly",
description = "Enables flying",
category = Category.PLAYER
)
@SuppressWarnings("MethodCallSideOnly")
public class PacketFlyMod extends ToggleMod {
private boolean zoomies = true;
@Override
public void onDisabled() {
if (Objects.nonNull(getLocalPlayer())) {
getLocalPlayer().noPhysics = false;
}
}
@SubscribeListener
public void onLocalPlayerUpdate(LocalPlayerUpdateEvent event) {
double[] dir = moveLooking(0);
double xDir = dir[0];
double zDir = dir[1];
if ((getGameSettings().keyUp.isDown()
|| getGameSettings().keyLeft.isDown()
|| getGameSettings().keyRight.isDown()
|| getGameSettings().keyDown.isDown())
&& !getGameSettings().keyJump.isDown()) {
Vector3d vel = getLocalPlayer().getDeltaMovement();
getLocalPlayer().setDeltaMovement(xDir * 0.26, vel.y(), zDir * 0.26);
}
double posX = getLocalPlayer().getX() + getLocalPlayer().getDeltaMovement().x();
double posY =
getLocalPlayer().getY()
+ (getGameSettings().keyJump.isDown() ? (zoomies ? 0.0625 : 0.0624) : 0.00000001)
- (getGameSettings().keyShift.isDown()
? (zoomies ? 0.0625 : 0.0624)
: 0.00000002);
double posZ = getLocalPlayer().getZ() + getLocalPlayer().getDeltaMovement().z();
getNetworkManager()
.send(
new CPlayerPacket.PositionRotationPacket(
getLocalPlayer().getX() + getLocalPlayer().getDeltaMovement().x(),
getLocalPlayer().getY()
+ (getGameSettings().keyJump.isDown()
? (zoomies ? 0.0625 : 0.0624)
: 0.00000001)
- (getGameSettings().keyShift.isDown()
? (zoomies ? 0.0625 : 0.0624)
: 0.00000002),
getLocalPlayer().getZ() + getLocalPlayer().getDeltaMovement().z(),
getLocalPlayer().yRot,
getLocalPlayer().xRot,
false));
getNetworkManager()
.send(
new CPlayerPacket.PositionRotationPacket(
getLocalPlayer().getX() + getLocalPlayer().getDeltaMovement().x(),
1337 + getLocalPlayer().getY(),
getLocalPlayer().getZ() + getLocalPlayer().getDeltaMovement().z(),
getLocalPlayer().yRot,
getLocalPlayer().xRot,
true));
getNetworkManager().send(new CEntityActionPacket(getLocalPlayer(), CEntityActionPacket.Action.START_FALL_FLYING));
getLocalPlayer().moveTo(posX, posY, posZ);
zoomies = !zoomies;
getLocalPlayer().setDeltaMovement(0.D, 0.D, 0.D);
getLocalPlayer().noPhysics = true;
}
public double[] moveLooking(int ignored) {
return new double[]{getLocalPlayer().yRot * 360 / 360 * 180 / 180, 0};
}
@SubscribeListener
public void onOutgoingPacketSent(PacketInboundEvent event) {
if (event.getPacket() instanceof SPlayerPositionLookPacket) {
event.setCanceled(true);
}
}
}
|
<reponame>sinkosi/red_tetris
function User(alias, socketId) {
this.alias = alias;
this.socketId = socketId;
this.score = 0;
this.status = "not-in-game";
this.isadmin = false;
}
export default User;
|
# platform = Red Hat Enterprise Linux 8,multi_platform_fedora
{{{ bash_instantiate_variables("sshd_approved_macs") }}}
CONF_FILE=/etc/crypto-policies/back-ends/opensshserver.config
correct_value="-oMACs=${sshd_approved_macs}"
# Test if file exists
test -f ${CONF_FILE} || touch ${CONF_FILE}
# Ensure CRYPTO_POLICY is not commented out
sed -i 's/#CRYPTO_POLICY=/CRYPTO_POLICY=/' ${CONF_FILE}
grep -q "'${correct_value}'" ${CONF_FILE}
if [[ $? -ne 0 ]]; then
# We need to get the existing value, using PCRE to maintain same regex
existing_value=$(grep -Po '(-oMACs=\S+)' ${CONF_FILE})
if [[ ! -z ${existing_value} ]]; then
# replace existing_value with correct_value
sed -i "s/${existing_value}/${correct_value}/g" ${CONF_FILE}
else
# ***NOTE*** #
# This probably means this file is not here or it's been modified
# unintentionally.
# ********** #
# echo correct_value to end
echo "CRYPTO_POLICY='${correct_value}'" >> ${CONF_FILE}
fi
fi
|
<filename>app/src/main/java/com/example/hammedopejin/breakoutgame/Brick.java
package com.example.hammedopejin.breakoutgame;
import android.graphics.RectF;
/**
* Created by hammedopejin on 3/18/17.
*/
public class Brick {
private RectF rect;
private boolean isVisible;
public Brick(int row, int column, int width, int height){
isVisible = true;
int padding = 1;
rect = new RectF(column * width + padding,
row * height + padding,
column * width + width - padding,
row * height + height - padding);
}
public RectF getRect(){
return this.rect;
}
public void setInvisible(){
isVisible = false;
}
public boolean getVisibility(){
return isVisible;
}
}
|
def find_palindromes(string):
palindromes = []
for i in range(len(string) - 1):
for j in range(i+2, len(string) + 1):
substring = string[i:j]
if substring == substring[::-1]:
palindromes.append(substring)
return palindromes |
#/bin/bash -e
IMAGE_ID=gcr.io/bitrise-platform-staging/ascii-art:latest
docker build . -t $IMAGE_ID
docker push $IMAGE_ID
gcloud run deploy addons-ascii-art --image=$IMAGE_ID --allow-unauthenticated --project bitrise-platform-staging --platform=managed --region=us-central1
|
#!/bin/bash
#
# This script releases a new version of the Go Agent.
#
# Usage:
# ./release.sh <version_number>
set -e
MAIN_BRANCH="main"
VERSION_FILE="./version/version.go"
function write_version_file {
cat > $2 <<EOL
// Code generated by ./release.sh. DO NOT EDIT.
package version
const Version = "$1"
EOL
}
function rollback_changes {
git reset HEAD~2 --soft # reverts the last two commits
git checkout . # drop all the changes
git tag -d $1 # removes local tag
}
if [[ -z $1 || "$1" == "--help" ]]; then
echo "Usage: $0 <version_number>"
exit 0
fi
VERSION=$1
if [[ ! $VERSION =~ ^[0-9]+\.[0-9]+\.[0-9]+ ]]; then
echo "Invalid version \"$VERSION\". It should follow semver."
exit 1
fi
MAJOR="$(cut -d'.' -f1 <<<"$VERSION")"
MINOR="$(cut -d'.' -f2 <<<"$VERSION")"
PATCH="$(cut -d'.' -f3 <<<"$VERSION")"
if [[ "$MAJOR" == "0" && "$MINOR" == "0" && "$PATCH" == "0" ]]; then
echo "Version cannot be \"0.0.0\"."
exit 1
fi
# TODO: add a check for making sure incremental version.
if [ ! -z "$(git status --porcelain)" ]; then
echo "You have uncommitted files. Commit or stash them first."
exit 1
fi
echo "Fetching remote tags..."
git fetch --tags
if [ ! -z "$(git tag -l "$VERSION")" ]; then
echo "Version \"$VERSION\" already exists."
exit 1
fi
git checkout $MAIN_BRANCH
echo "Fetching latest $MAIN_BRANCH..."
git pull origin $MAIN_BRANCH
write_version_file $VERSION $VERSION_FILE
git add $VERSION_FILE
git commit -m "chore(version): changes version to $VERSION"
git tag -a "v$VERSION" -m "Version $VERSION"
NEW_VERSION="$MAJOR.$MINOR.$(($PATCH+1))-dev"
write_version_file $NEW_VERSION $VERSION_FILE
git add $VERSION_FILE
git commit -m "chore(version): prepares for next version $NEW_VERSION."
set +e
git push origin $MAIN_BRANCH
PUSH_RESULT_CODE=$?
set -e
if [ "$PUSH_RESULT_CODE" != "0" ]; then
rollback_changes $VERSION
echo "Failed to push to $MAIN_BRANCH"
exit 1
fi
git push --tags
|
#!/bin/bash
rm -rf elf-contracts
echo "Downloading contracts..."
# link/clone and build contracts
if [ ! -z "$1" ] && [ $1="local" ]; then
ln -sf ../../elf-contracts .
else
git clone git@github.com:element-fi/elf-contracts.git && cd-elf-contracts && npm ci && npm run load-contracts
fi
# blow away old-contracts
rm -rf src/contracts
mkdir src/contracts
# TODO: copy the contracts to the src folder so hardhat will pick up on them.
# A coupl of things are out of sync so I need to get those fixed first. I made WETH and USDC
# contracts and I think elf-contracts is using AToken. I also added all the balancer V1 stuff and
# now elf-contracts has balancer V2 stuff in it. I need to wait for some PRs to land in
# elf-contracts before I can sync up.
echo "Copying latest contracts..."
cp -R elf-contracts/contracts src
echo "Done!" |
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region: '<Region>'});
exports.handler = async (event) => {
const params = {
TableName: '<TableName>',
Key: {
'ID': event.id,
}
};
const data = await docClient.get(params).promise();
return {
statusCode: 200,
body: JSON.stringify({
Title: data.Item.Title,
Author: data.Item.Author
})
};
}; |
#!/usr/bin/env bash
export BUILDCONFIGURATION=Release
cd $BUILD_SOURCESDIRECTORY/build
# uncomment the following lines to override the installed Xamarin.Android SDK
# wget -nv https://jenkins.mono-project.com/view/Xamarin.Android/job/xamarin-android-d16-2/49/Azure/processDownloadRequest/xamarin-android/xamarin-android/bin/BuildRelease/Xamarin.Android.Sdk-OSS-9.4.0.59_d16-2_6d9b105.pkg
# sudo installer -verbose -pkg Xamarin.Android.Sdk-OSS-9.4.0.59_d16-2_6d9b105.pkg -target /
# Install AVD files
echo "y" | $ANDROID_HOME/tools/bin/sdkmanager --install 'system-images;android-28;google_apis;x86'
# Create emulator
echo "no" | $ANDROID_HOME/tools/bin/avdmanager create avd -n xamarin_android_emulator -k 'system-images;android-28;google_apis;x86' --sdcard 128M --force
echo $ANDROID_HOME/emulator/emulator -list-avds
echo "Starting emulator"
# Start emulator in background
nohup $ANDROID_HOME/emulator/emulator -avd xamarin_android_emulator -skin 1280x800 -memory 2048 -no-audio -no-snapshot -netfast > /dev/null 2>&1 &
export IsUiAutomationMappingEnabled=true
# build the sample and tests, while the emulator is starting
msbuild /r /p:Configuration=$BUILDCONFIGURATION $BUILD_SOURCESDIRECTORY/src/SamplesApp/SamplesApp.Droid/SamplesApp.Droid.csproj
msbuild /r /p:Configuration=$BUILDCONFIGURATION $BUILD_SOURCESDIRECTORY/src/SamplesApp/SamplesApp.UITests/SamplesApp.UITests.csproj
# Wait for the emulator to finish booting
chmod +x $BUILD_SOURCESDIRECTORY/build/android-uitest-wait-systemui.sh
$BUILD_SOURCESDIRECTORY/build/android-uitest-wait-systemui.sh
$ANDROID_HOME/platform-tools/adb devices
echo "Emulator started"
export UNO_UITEST_SCREENSHOT_PATH=$BUILD_ARTIFACTSTAGINGDIRECTORY/screenshots/android
export UNO_UITEST_PLATFORM=Android
export UNO_UITEST_ANDROIDAPK_PATH=$BUILD_SOURCESDIRECTORY/src/SamplesApp/SamplesApp.Droid/bin/$BUILDCONFIGURATION/uno.platform.unosampleapp.apk
cd $BUILD_SOURCESDIRECTORY/build
mono nuget/NuGet.exe install NUnit.ConsoleRunner -Version 3.10.0
mkdir -p $UNO_UITEST_SCREENSHOT_PATH
mono $BUILD_SOURCESDIRECTORY/build/NUnit.ConsoleRunner.3.10.0/tools/nunit3-console.exe $BUILD_SOURCESDIRECTORY/src/SamplesApp/SamplesApp.UITests/bin/$BUILDCONFIGURATION/net47/SamplesApp.UITests.dll
$ANDROID_HOME/platform-tools/adb shell logcat -d > $BUILD_ARTIFACTSTAGINGDIRECTORY/android-device-log.txt
cp $UNO_UITEST_ANDROIDAPK_PATH $BUILD_ARTIFACTSTAGINGDIRECTORY
|
<filename>src/main/java/io/github/orlouge/amphitritecoffer/rei/WaterConversionDisplay.java
package io.github.orlouge.amphitritecoffer.rei;
import io.github.orlouge.amphitritecoffer.WaterConversionRecipe;
import me.shedaniel.rei.api.common.category.CategoryIdentifier;
import me.shedaniel.rei.api.common.display.SimpleGridMenuDisplay;
import me.shedaniel.rei.api.common.display.basic.BasicDisplay;
import me.shedaniel.rei.api.common.entry.EntryIngredient;
import me.shedaniel.rei.api.common.util.EntryIngredients;
import java.util.List;
import java.util.Optional;
public class WaterConversionDisplay extends BasicDisplay implements SimpleGridMenuDisplay {
private final int charge;
private final Optional<EntryIngredient> additionalOutput;
public WaterConversionDisplay(WaterConversionRecipe recipe) {
super(
List.of(EntryIngredients.ofItemStacks(recipe.getInput())),
List.of(EntryIngredients.of(recipe.getSampleOutput()))
);
this.charge = recipe.getCost();
this.additionalOutput = recipe.getAdditionalOutput().map(EntryIngredients::of);
}
@Override
public int getWidth() {
return 2;
}
@Override
public int getHeight() {
return 1;
}
@Override
public CategoryIdentifier<?> getCategoryIdentifier() {
return AmphitriteCofferREIClientPlugin.WATER_CONVERSION;
}
public int getCharge() {
return charge;
}
public Optional<EntryIngredient> getAdditionalOutput() {
return additionalOutput;
}
}
|
<filename>back/src/main/java/com/java110/things/entity/accessControl/SyncGetTaskResultDto.java
package com.java110.things.entity.accessControl;
import com.java110.things.entity.community.CommunityDto;
import com.java110.things.entity.machine.MachineDto;
import java.io.Serializable;
/**
* @ClassName SyncGetTaskResultDto
* @Description TODO
* @Author wuxw
* @Date 2020/6/6 17:39
* @Version 1.0
* add by wuxw 2020/6/6
**/
public class SyncGetTaskResultDto implements Serializable {
private String cmd;
private String taskId;
private String taskInfo;
private CommunityDto communityDto;
private MachineDto machineDto;
private UserFaceDto userFaceDto;
public String getCmd() {
return cmd;
}
public void setCmd(String cmd) {
this.cmd = cmd;
}
public String getTaskId() {
return taskId;
}
public void setTaskId(String taskId) {
this.taskId = taskId;
}
public String getTaskInfo() {
return taskInfo;
}
public void setTaskInfo(String taskInfo) {
this.taskInfo = taskInfo;
}
public UserFaceDto getUserFaceDto() {
return userFaceDto;
}
public void setUserFaceDto(UserFaceDto userFaceDto) {
this.userFaceDto = userFaceDto;
}
public CommunityDto getCommunityDto() {
return communityDto;
}
public void setCommunityDto(CommunityDto communityDto) {
this.communityDto = communityDto;
}
public MachineDto getMachineDto() {
return machineDto;
}
public void setMachineDto(MachineDto machineDto) {
this.machineDto = machineDto;
}
}
|
'use strict'
const url = require('url')
const Dropbox = require('dropbox').Dropbox
const fetch = require('isomorphic-fetch')
const dropBoxResultUrlRegularExpression = /www.dropbox.com/;
const dropBoxDownloadUrl = 'dl.dropboxusercontent.com'
module.exports = {
provider: 'dropbox',
name: 'Dropbox',
auth: {
accessToken: {
label: 'Access Token',
type: 'text'
}
},
init: config => {
const dbx = new Dropbox({ accessToken: config.accessToken, fetch })
return {
upload: file => {
return new Promise((resolve, reject) => {
dbx.filesUpload({ path: `/uploads/${file.hash}${file.ext}`, contents: Buffer.from(file.buffer, 'binary') })
.then(uploadedFile => dbx.sharingCreateSharedLinkWithSettings({ path: uploadedFile.path_display }))
.then(fileUrl => {
const { protocol, hostname, pathname } = url.parse(fileUrl.url)
file.public_id = fileUrl.id
file.url = url.format({
protocol,
hostname: hostname.replace(dropBoxResultUrlRegularExpression,dropBoxDownloadUrl),
pathname,
query: {
raw: 1
}
})
return resolve()
})
.catch(function (err) {
return reject(err)
})
})
},
delete: file => {
return new Promise((resolve, reject) => {
dbx.filesDelete({path: `/uploads/${file.hash}${file.ext}`})
.then(() => resolve())
.catch(err => reject(err))
})
}
}
}
} |
<reponame>sleepepi/sleepinnovate.org<gh_stars>1-10
# frozen_string_literal: true
# Allows admins to edit user accounts.
class Admin::UsersController < Admin::AdminController
before_action :find_user_or_redirect, only: [
:assign_subject, :unrevoke, :show, :edit, :update, :destroy
]
# GET /admin/users
def index
@users = User.current.order(id: :desc).page(params[:page]).per(40)
end
# # GET /admin/users/1
# def show
# end
# # GET /admin/users/1/edit
# def edit
# end
# PATCH /users/1
def update
if @user.update(user_params)
redirect_to admin_user_path(@user), notice: "User was successfully updated."
else
render :edit
end
end
# POST /admin/users/1/assign-subject
def assign_subject
@user.assign_subject!
redirect_to admin_user_path(@user)
end
# POST /admin/users/1/unrevoke
def unrevoke
@user.unrevoke_consent!
redirect_to admin_user_path(@user)
end
# DELETE /admin/users/1
def destroy
@user.destroy
redirect_to admin_users_path, notice: "User was successfully deleted."
end
protected
def find_user_or_redirect
@user = User.current.find_by(id: params[:id])
empty_response_or_root_path(admin_users_path) unless @user
end
def user_params
params.require(:user).permit(
:biobank_status, :clinic, :emails_enabled, :tester
)
end
end
|
#include "dialog.h"
#include <QApplication>
#include <QSurfaceFormat>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
// Force OpenGL 2.1
QSurfaceFormat format = QSurfaceFormat::defaultFormat();
format.setRenderableType(QSurfaceFormat::OpenGL);
format.setProfile(QSurfaceFormat::CoreProfile);
format.setVersion(2, 1);
QSurfaceFormat::setDefaultFormat(format);
Dialog w;
w.show();
return a.exec();
}
|
<filename>demo/src/export-named.ts<gh_stars>1-10
export default function main() {
console.log("main");
}
export function named() {
console.log("named");
}
|
time CUDA_VISIBLE_DEVICES=$1 python main.py --cuda \
--env-name Walker2d-v3 \
--id $0 \
--seed $2 \
--switching none |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.