text stringlengths 1 1.05M |
|---|
import { Injectable } from '@nestjs/common';
import {InjectRepository} from "@nestjs/typeorm";
import {Repository} from "typeorm";
import {Filter} from "./filter.entity";
@Injectable()
export class FiltersService {
constructor(@InjectRepository(Filter) private filterRepository: Repository<Filter>) { }
async findFilter(icon_id: number): Promise<Filter> {
const filter = await this.filterRepository.findOne({ icon_id });
return new Promise(resolve => {
resolve(filter);
});
}
async findById(id: number): Promise<Filter> {
const filter = await this.filterRepository.findOne({id});
return new Promise(resolve => {
resolve(filter);
});
}
async getFilters(): Promise<Filter[]> {
return await this.filterRepository.find();
}
async getFilter(id: number): Promise<Filter> {
return await this.filterRepository.findOne(id);
}
async createFilter(icon_id:number,xml_filter_id: number, filter: Filter) {
const new_filter = new Filter;
new_filter.icon_id = icon_id;
new_filter.xml_filter_id = xml_filter_id;
return await this.filterRepository.save(new_filter);
}
async updateFilter(id:number,icon_id: number, xml_filter_id:number) {
this.filterRepository.update({id:id}, {icon_id:icon_id,xml_filter_id:xml_filter_id});
return await this.filterRepository.findOne(id);
}
async deleteFilter(id: number) {
this.filterRepository.delete(id);
return await "SUCCESS!!! Filter was deleted";
}
}
|
def get_integer_input(msg: str) -> int:
while True:
try:
n = int(input(msg))
return n
except ValueError:
print("Invalid input! Please enter an integer.") |
source '../redis/plan.sh'
pkg_name=redis4
pkg_origin=core
pkg_version="4.0.10"
pkg_description="Persistent key-value database, with built-in net interface"
pkg_upstream_url="http://redis.io"
pkg_license=("BSD-3-Clause")
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_source="http://download.redis.io/releases/${pkg_dist_name}-${pkg_version}.tar.gz"
pkg_shasum="1db67435a704f8d18aec9b9637b373c34aa233d65b6e174bdac4c1b161f38ca4"
pkg_dirname="${pkg_dist_name}-${pkg_version}"
|
StartTest({
defaultTimeout : 90000
}, function (t) {
var popup = window.open("html-page/popup-content.html", '_blank', "left=10,top=10,width=500,height=500")
// in our experience, IE sometimes fails to open a popup. This happens sporadically even if popups are enabled
// in the browser, need to take into account such possibility, in which we just skip the rest of the test
if (!popup) return
t.switchTo({ url : /popup/ }, function () {
t.chain(
{ action : 'click', target : '>> [foo] tool[type=close]' },
function() {
t.notOk(t.global.win.el, 'The dom element of the window is gone');
}
);
})
}); |
def expression(x):
return 3 * x + 5
print(expression(7)) |
package table
import (
"bytes"
"fmt"
"strings"
"github.com/elgopher/noteo/date"
"github.com/elgopher/noteo/notes"
"github.com/elgopher/noteo/output"
"github.com/juju/ansiterm"
"golang.org/x/crypto/ssh/terminal"
)
var mapping = map[string]column{
"FILE": fileColumn{},
"BEGINNING": beginningColumn{},
"MODIFIED": modifiedColumn{},
"CREATED": createdColumn{},
"TAGS": tagsColumn{},
}
func NewFormatter(columns []string, dateFormat date.Format) (*Formatter, error) {
w, h, err := terminal.GetSize(0)
if err != nil {
w = 80
h = 25
}
var cols []string
for _, c := range columns {
c = strings.ToUpper(c)
if _, ok := mapping[c]; !ok {
return nil, fmt.Errorf("unsupported output column: %s", c)
}
cols = append(cols, c)
}
buffer := bytes.NewBuffer([]byte{})
writer := ansiterm.NewTabWriter(buffer, 0, 8, 1, '\t', 0)
writer.SetColorCapable(true)
return &Formatter{
columns: cols,
dateFormat: dateFormat,
width: w,
height: h,
buffer: buffer,
writer: writer,
},
nil
}
type Formatter struct {
columns []string
dateFormat date.Format
width int
height int
line int
writer *ansiterm.TabWriter
buffer *bytes.Buffer
}
func (o *Formatter) flush() string {
_ = o.writer.Flush()
out := o.buffer.String()
o.buffer.Reset()
o.line = 0
return out
}
func (o *Formatter) Header() string {
o.line++
for _, c := range o.columns {
options := opts{dateFormat: o.dateFormat}
column := mapping[c]
column.printHeader(options, o.writer)
_, _ = o.writer.Write([]byte("\t"))
}
_, _ = o.writer.Write([]byte("\n"))
return ""
}
func (o *Formatter) Footer() string {
return o.flush()
}
func (o *Formatter) Note(note notes.Note) string {
out := ""
if o.line == o.height {
out = o.flush()
}
o.line++
for _, c := range o.columns {
options := opts{dateFormat: o.dateFormat}
column := mapping[c]
column.printValue(note, options, o.writer)
_, _ = o.writer.Write([]byte("\t"))
}
_, _ = o.writer.Write([]byte("\n"))
return out
}
func format(text string, limit int) string {
runes := []rune(text) // cast is need to make len work
if len(runes) > limit {
runes = runes[:limit-1]
runes = append(runes, 'โฆ')
}
return fmt.Sprintf("%-*s", limit, string(runes))
}
func beginning(text string) string {
t := strings.Trim(text, "\n")
if strings.Contains(t, "\n") {
t = t[:strings.IndexRune(t, '\n')]
}
t = strings.ReplaceAll(t, "\t", " ")
for i := 0; i < 5; i++ {
t = strings.TrimPrefix(t, "#")
}
t = strings.TrimPrefix(t, "*")
t = strings.ReplaceAll(t, "\r", "")
t = strings.Trim(t, " ")
return t
}
type column interface {
printHeader(opts opts, writer *ansiterm.TabWriter)
printValue(note notes.Note, opts opts, writer *ansiterm.TabWriter)
}
type opts struct {
dateFormat date.Format
}
type fileColumn struct{}
func (f fileColumn) printHeader(opts opts, writer *ansiterm.TabWriter) {
_, _ = writer.Write([]byte("FILE"))
}
func (f fileColumn) printValue(note notes.Note, opts opts, writer *ansiterm.TabWriter) {
writer.SetForeground(ansiterm.BrightBlue)
defer writer.Reset()
_, _ = fmt.Fprint(writer, note.Path())
}
type beginningColumn struct{}
func (s beginningColumn) printHeader(opts opts, writer *ansiterm.TabWriter) {
_, _ = fmt.Fprint(writer, format("BEGINNING", 34))
}
func (s beginningColumn) printValue(note notes.Note, opts opts, writer *ansiterm.TabWriter) {
body, _ := note.Body()
writer.SetStyle(ansiterm.Bold)
defer writer.Reset()
_, _ = fmt.Fprint(writer, format(beginning(body), 34))
}
type modifiedColumn struct{}
func (m modifiedColumn) printHeader(_ opts, writer *ansiterm.TabWriter) {
_, _ = writer.Write([]byte("MODIFIED"))
}
func (m modifiedColumn) printValue(note notes.Note, opts opts, writer *ansiterm.TabWriter) {
modified, err := note.Modified()
if err != nil {
writeError(err, writer)
return
}
formatted := date.FormatWithType(modified, opts.dateFormat)
_, _ = fmt.Fprint(writer, formatted)
}
func writeError(err error, writer *ansiterm.TabWriter) {
errString := err.Error()
errString = strings.ReplaceAll(errString, "\t", " ")
_, _ = fmt.Fprint(writer, errString)
}
type createdColumn struct{}
func (c createdColumn) printHeader(_ opts, writer *ansiterm.TabWriter) {
_, _ = writer.Write([]byte("CREATED"))
}
func (c createdColumn) printValue(note notes.Note, opts opts, writer *ansiterm.TabWriter) {
created, err := note.Created()
if err != nil {
writeError(err, writer)
return
}
modified := date.FormatWithType(created, opts.dateFormat)
_, _ = fmt.Fprint(writer, modified)
}
type tagsColumn struct{}
func (t tagsColumn) printHeader(_ opts, writer *ansiterm.TabWriter) {
_, _ = writer.Write([]byte("TAGS"))
}
func (t tagsColumn) printValue(note notes.Note, opts opts, writer *ansiterm.TabWriter) {
tags, _ := output.StringTags(note)
tagsString := strings.Join(tags, " ")
_, _ = fmt.Fprint(writer, tagsString)
}
|
<reponame>tarachandverma/ngx-openidc
#ifndef DOC_PARSER_UTILS_H_
#define DOC_PARSER_UTILS_H_
#include <config-core/config_bindings_shm.h>
char* docp_getRemoteResourcePath(pool* p, char* resource,cbs_service_descriptor *rs,char* homeDir,char**details);
char* docp_getLocalResourcePath(pool*p,char* resource,char* homeDir);
char* docp_getRemoteResourcePathEx(pool* p, char* resourceUri, char* homeDir,char**details);
#endif /*DOC_PARSER_UTILS_H_*/
|
#ifndef __WPA_COMMAND_BSS_HPP__
#define __WPA_COMMAND_BSS_HPP__
#include <cinttypes>
#include <string>
#include <vector>
#include "wifi-telemetry/wifi/wifi_80211.hpp"
#include "wifi-telemetry/wpa/wpa_command.hpp"
#include "wifi-telemetry/wpa/wpa_command_response_parser.hpp"
struct WpaCommandBssResponseParser : public WpaCommandResponseParser
{
WpaCommandBssResponseParser(const std::string_view payload);
std::shared_ptr<WpaCommandResponse>
parse_payload(void) const override;
};
struct WpaCommandBssResponse : public WpaCommandResponse
{
WpaCommandBssResponse();
wifi_80211_mac bssid;
uint32_t id;
uint32_t capabilities;
uint32_t update_index;
uint64_t tsf;
int32_t frequency;
int32_t beacon_int;
int32_t quality;
int32_t snr;
int32_t estimated_throughput;
int32_t age;
std::string flags;
std::string ssid;
std::vector<uint8_t> ie;
std::vector<uint8_t> beacon_ie;
};
struct WpaCommandBss : public WpaCommand
{
WpaCommandBss(std::string bssid_);
WpaCommandBss(const wifi_80211_mac& bssid_);
std::string bssid;
private:
std::unique_ptr<WpaCommandResponseParser>
create_response_parser(const std::string_view payload) const override;
};
#endif // __WPA_COMMAND_BSS_HPP__
|
<filename>src/state/standard-request/auth.js
// TODO: set this token on login
// instead of using local storage
export const authToken = () =>
JSON.parse(JSON.parse(window.localStorage.getItem("persist:root")).auth).token
export const authHeaders = authToken => () => {
return {
Authorization: `Bearer ${authToken()}`,
}
}
|
<filename>packages/server/http-server/prisma/migrations/20210616232339_alter_map_cancellation_created_at_column/migration.sql
/*
Warnings:
- You are about to drop the column `canceled_at` on the `cancellations` table. All the data in the column will be lost.
*/
-- AlterTable
ALTER TABLE "cancellations" DROP COLUMN "canceled_at",
ADD COLUMN "created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP;
|
// Define a trait for middleware
trait Middleware {
fn handle(&mut self, req: &mut Request, res: &mut Response);
}
// Implement the middleware system
struct MiddlewareSystem {
middlewares: Vec<Box<dyn Middleware>>,
}
impl MiddlewareSystem {
fn new() -> Self {
MiddlewareSystem {
middlewares: Vec::new(),
}
}
fn add_middleware<T: 'static + Middleware>(&mut self, middleware: T) {
self.middlewares.push(Box::new(middleware));
}
fn apply_middleware(&mut self, req: &mut Request, res: &mut Response) {
for middleware in &mut self.middlewares {
middleware.handle(req, res);
}
}
}
// Create a middleware function for logging
struct LoggerMiddleware;
impl Middleware for LoggerMiddleware {
fn handle(&mut self, req: &mut Request, res: &mut Response) {
// Log the incoming request
println!("Incoming request: {:?}", req);
// Modify the outgoing response
res.add_header("X-Custom-Header", "Middleware-Modified");
}
}
// Example usage
fn main() {
let mut middleware_system = MiddlewareSystem::new();
middleware_system.add_middleware(LoggerMiddleware);
let mut request = Request::new();
let mut response = Response::new();
// Apply the middleware system to the request and response
middleware_system.apply_middleware(&mut request, &mut response);
} |
import React, {Component} from 'react'
import { hot } from 'react-hot-loader'
import Root from './Root'
class Dev extends Component {
render() {
return (<Root {...this.props} />)
}
}
โ
export default hot(module)(Dev)
|
//===----------------------------------------------------------------------===//
// DuckDB
//
// duckdb/common/types/string_type.hpp
//
//
//===----------------------------------------------------------------------===//
#pragma once
#include "duckdb/common/constants.hpp"
#include <cstring>
#include <cassert>
namespace duckdb {
struct string_t {
friend struct StringComparisonOperators;
friend class StringSegment;
public:
static constexpr idx_t PREFIX_LENGTH = 4 * sizeof(char);
static constexpr idx_t INLINE_LENGTH = 12;
string_t() = default;
string_t(uint32_t len) : length(len) {
memset(prefix, 0, PREFIX_LENGTH);
value_.data = nullptr;
}
string_t(const char *data, uint32_t len) : length(len) {
assert(data || length == 0);
if (IsInlined()) {
// zero initialize the prefix first
// this makes sure that strings with length smaller than 4 still have an equal prefix
memset(prefix, 0, PREFIX_LENGTH);
if (length == 0) {
return;
}
// small string: inlined
memcpy(prefix, data, length);
prefix[length] = '\0';
} else {
// large string: store pointer
memcpy(prefix, data, PREFIX_LENGTH);
value_.data = (char *)data;
}
}
string_t(const char *data) : string_t(data, strlen(data)) {
}
string_t(const string &value) : string_t(value.c_str(), value.size()) {
}
bool IsInlined() const {
return length < INLINE_LENGTH;
}
char *GetData() {
return IsInlined() ? (char *)prefix : value_.data;
}
const char *GetData() const {
return IsInlined() ? (const char *)prefix : value_.data;
}
idx_t GetSize() const {
return length;
}
string GetString() const {
return string(GetData(), GetSize());
}
void Finalize() {
// set trailing NULL byte
auto dataptr = (char *)GetData();
dataptr[length] = '\0';
if (length < INLINE_LENGTH) {
// fill prefix with zeros if the length is smaller than the prefix length
for (idx_t i = length; i < PREFIX_LENGTH; i++) {
prefix[i] = '\0';
}
} else {
// copy the data into the prefix
memcpy(prefix, dataptr, PREFIX_LENGTH);
}
}
void Verify();
private:
uint32_t length;
char prefix[4];
union {
char inlined[8];
char *data;
} value_;
};
}; // namespace duckdb
|
#!/bin/sh
prefix="$(dirname "$0")"
test_decompose_short_options() {
eval set -- "$("$prefix"/option-decompose.sh -abc)"
assert x"$1" = x"-a"
assert x"$2" = x"-b"
assert x"$3" = x"-c"
}
test_decompose_short_options_and_an_argument() {
eval set -- "$("$prefix"/option-decompose.sh -abc arg)"
assert x"$1" = x"-a"
assert x"$2" = x"-b"
assert x"$3" = x"-c"
assert x"$4" = x"arg"
}
test_decompose_long_option() {
eval set -- "$("$prefix"/option-decompose.sh --long-opt)"
assert x"$1" = x"--long-opt"
}
test_decompose_long_option_eq_argument() {
eval set -- $("$prefix"/option-decompose.sh --long-opt=arg)
assert x"$1" = x"--long-opt"
assert x"$2" = x"arg"
}
test_decompose_long_option_eq_argument_containing_eq() {
eval set -- $("$prefix"/option-decompose.sh --long-opt=flag=on)
assert x"$1" = x"--long-opt"
assert x"$2" = x"flag=on"
}
test_decompose_long_option_eq_argument_containing_single_quote() {
eval set -- $("$prefix"/option-decompose.sh --long-opt="arg'n")
assert x"$1" = x"--long-opt"
assert x"$2" = x"arg'n"
}
test_decompose_long_option_eq_argument_containing_double_quote() {
eval set -- $("$prefix"/option-decompose.sh --long-opt=\")
assert x"$1" = x"--long-opt"
assert x"$2" = x\"
}
test_decompose_long_option_eq_newline_argument_containing_eq() {
eval set -- "$("$prefix"/option-decompose.sh --long-opt='fl
ag=on')"
assert x"$1" = x"--long-opt"
assert x"$2" = x"fl
ag=on"
}
test_argument_includes_space() {
eval set -- $("$prefix"/option-decompose.sh "hello world")
assert x"$1" = x"hello world"
}
test_raw_argument_includes_newline() {
arg="$("$prefix"/option-decompose.sh "hello
world")"
assert x"$arg" = x"'hello
world'"
}
test_argument_includes_newline() {
eval set -- "$("$prefix"/option-decompose.sh "hello
world" "_")"
assert x"$1" = x"hello
world"
assert x"$2" = x"_"
}
test_setopt_includes_newline() {
eval set -- "'a
b'"
assert x"$1" = x'a
b'
}
test_argument_containing_escape() {
eval set -- "$("$prefix"/option-decompose.sh "\"hello world\"")"
assert x"$1" = x"\"hello world\""
}
test_argument_not_expanded() {
eval set -- "$("$prefix"/option-decompose.sh '$0')"
assert x"$1" = x'$0'
}
. "$prefix"/../tester/tester.sh
|
import random
class Person:
def __init__(self):
self.age = random.randint(1, 100)
self.gender = None
def generate_greeting(self, lastname):
title = "Mr." if self.gender == "male" else "Ms."
return f"Hello, {title} {lastname}! You are {self.age} years old."
# Example usage
person1 = Person()
person1.gender = "male"
print(person1.generate_greeting("Smith")) # Output: Hello, Mr. Smith! You are 42 years old.
person2 = Person()
person2.gender = "female"
print(person2.generate_greeting("Johnson")) # Output: Hello, Ms. Johnson! You are 76 years old. |
<reponame>FunMusicalIdeas/zdaubyaos
export { thunkThirtyfiveSegments } from './segments'
export {
ThirtyfiveSegments,
} from './types'
|
#!/bin/bash
#
# pkgver.sh - Check the 'pkgver' variable conforms to requirements.
#
# Copyright (c) 2014-2021 Pacman Development Team <pacman-dev@archlinux.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
[[ -n "$LIBMAKEPKG_LINT_PKGBUILD_PKGVER_SH" ]] && return
LIBMAKEPKG_LINT_PKGBUILD_PKGVER_SH=1
LIBRARY=${LIBRARY:-'/usr/share/makepkg'}
source "$LIBRARY/util/message.sh"
lint_pkgbuild_functions+=('lint_pkgver')
check_pkgver() {
local ver=$1 type=$2
if [[ -z $ver ]]; then
error "$(gettext "%s is not allowed to be empty.")" "pkgver${type:+ in $type}"
return 1
fi
invalid_characters="$(echo "${ver}" | sed 's|[a-z0-9.+~]||g')"
if [[ "${invalid_characters:+x}" == "x" ]]; then
error "$(gettext "%s contains invalid characters.")" "pkgver${type:+ in $type}"
return 1
fi
if ! echo "$ver" | awk -F '' '{print $1}' | grep -q '[0-9]'; then
error "$(gettext "%s must start with a number.")" "pkgver${type:+ in $type}"
return 1
fi
}
lint_pkgver() {
check_pkgver "$pkgver"
}
|
package dolstats
import (
"testing"
"time"
)
func TestUnmarshalFullMine(t *testing.T) {
mock, err := loadMock("./testdata/full_mine_information/msha_mines.json")
if err != nil {
t.Error("Error loading FullMine mock data. Error was:", err)
}
a, err := unmarshalFullMine(mock)
if err != nil {
t.Error("Error unmarshaling FullMine. Error was:", err)
}
if len(a.Data) != 2 {
t.Error("Invalid Data length. Length is:", len(a.Data))
}
resOne := a.Data[0]
if resOne.ID != 1 {
t.Error("Invalid ID value returned. Value was:", resOne.ID)
}
d := time.Time(resOne.CurrentStatusDate)
if d.Day() != 25 {
t.Error("Invalid day value returned. Value was:", d.Day())
}
if d.Month() != 6 {
t.Error("Invalid month value returned. Value was:", d.Month())
}
if d.Year() != 1987 {
t.Error("Invalid year value returned. Value was:", d.Year())
}
if resOne.PortableOperation != false {
t.Error("Invalid PortableOperation value returned. Should have been false.")
}
}
|
#!/bin/sh
INPUT=$1
OUTPUT=$2
#insert colorize script here
sleep 4
cp $INPUT $OUTPUT
sips -f horizontal $OUTPUT
rm $INPUT |
public static void moveFirstToEnd(int[] arr) {
int first = arr[0];
for (int i = 0; i < arr.length - 1; i++) {
arr[i] = arr[i + 1];
}
arr[arr.length - 1] = first;
} |
/**
* Graphology Cycle Creation Checker
* ==================================
*
* Function returning whether adding the given directed edge to a DAG will
* create a cycle.
*
* Note that this function requires the given graph to be a valid DAG forest
* and will not check it beforehand for performance reasons.
*/
const isGraph = require('graphology-utils/is-graph');
module.exports = function willCreateCycle(graph, source, target) {
if (!isGraph(graph))
throw new Error(
'graphology-dag/will-create-cycle: the given graph is not a valid graphology instance.'
);
source = '' + source;
target = '' + target;
// If the edge is a self loop, it will obviously add a cycle
if (source === target) return true;
// If any of the pointed nodes isn't in the graph yet,
// then no cycle can be created by adding this edge
if (!graph.hasNode(source) || !graph.hasNode(target)) return false;
// Early exit for existing edge or mutual one
if (graph.hasDirectedEdge(source, target)) return false;
if (graph.hasDirectedEdge(target, source)) return true;
// Else, we need to assess whether a directed path between target and source
// can be found. We will use DFS traversal because it is usually less
// costly than BFS (stack vs. queue).
const stack = graph.outNeighbors(target);
function push(neighbor) {
// NOTE: we don't check whether pushed neighbors have not been seen
// because this is not necessary in a DAG. This could result in
// undefined behavior for cyclic graphs, ranging from infinite loop to
// overkill memory usage.
stack.push(neighbor);
}
while (stack.length !== 0) {
const node = stack.pop();
if (node === source) return true;
graph.forEachOutNeighbor(node, push);
}
return false;
};
|
export interface IStarterWpWebPartPropspropertiesStarter {
maxitems: string;
}
|
<filename>docs/next.config.js
const prod = process.env.NODE_ENV === "production"
const withNextra = require('nextra')({
theme: 'nextra-theme-docs',
themeConfig: './theme.config.js',
// optional: add `unstable_staticImage: true` to enable Nextra's auto image import
})
module.exports = withNextra({
// Gavin, 20220208: Updated for Netlify
basePath: '/docs',
})
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
if (!window.SITE)
window.SITE = {};
SITE.PartEdit = function( mapa, interfaceParams ) {
this.mapa = mapa;
var that = this;
this.Div = new DRAGGABLE.ui.Window(
interfaceParams.partEditDiv
, ['help']
, {translator: SITE.translator, statusbar: false, draggable: false, top: "3px", left: "1px", width: '100%', height: "100%", title: 'PartEditTitle'}
, {listener: this, method: 'a2pCallback'}
);
this.Div.setVisible(true);
this.Div.dataDiv.style.overflow = 'hidden';
this.midiParser = new ABCXJS.midi.Parse();
this.midiPlayer = new ABCXJS.midi.Player(this);
var canvas_id = 'a2pCanvasDiv';
var warnings_id = 'a2pWarningsDiv';
this.renderedTune = {text:undefined, abc:undefined, title:undefined
,tab: undefined, div: undefined ,selector: undefined };
if (interfaceParams.generate_tablature) {
if (interfaceParams.generate_tablature === 'accordion') {
this.accordion = new window.ABCXJS.tablature.Accordion(
interfaceParams.accordion_options
, SITE.properties.options.tabFormat
,!SITE.properties.options.tabShowOnlyNumbers );
if (interfaceParams.accordionNameSpan) {
this.accordionNameSpan = document.getElementById(interfaceParams.accordionNameSpan);
this.accordionNameSpan.innerHTML = this.accordion.getFullName();
}
} else {
throw new Error('Tablatura para ' + interfaceParams.generate_tablature + ' nรฃo suportada!');
}
}
this.editorWindow = new ABCXJS.edit.EditArea(
this.Div.dataDiv
,{listener : this, method: 'editorCallback' }
,{ draggable:SITE.properties.partEdit.editor.floating
,toolbar: true, statusbar:true, translator: SITE.translator
,title: 'PartEditEditorTitle'
,compileOnChange: false /*SITE.properties.options.autoRefresh*/
}
);
this.editorWindow.setVisible(false);
this.controlDiv = document.createElement("DIV");
this.controlDiv.setAttribute("id", 'a2pcontrolDiv' );
this.controlDiv.setAttribute("class", 'controlDiv btn-group' );
this.Div.dataDiv.appendChild(this.controlDiv);
this.controlDiv.innerHTML = document.getElementById(interfaceParams.controlDiv).innerHTML;
document.getElementById(interfaceParams.controlDiv).innerHTML = "";
this.media = new SITE.Media( this.Div.dataDiv, interfaceParams.btShowMedia, SITE.properties.partEdit.media );
this.keyboardWindow = new DRAGGABLE.ui.Window(
this.Div.dataDiv
,[ 'move', 'rotate', 'zoom', 'globe']
,{title: '', translator: SITE.translator, statusbar: false
, top: SITE.properties.partEdit.keyboard.top
, left: SITE.properties.partEdit.keyboard.left
}
,{listener: this, method: 'keyboardCallback'}
);
this.accordion.setRenderOptions({
draggable: true
,show: SITE.properties.partEdit.keyboard.visible
,transpose: SITE.properties.partEdit.keyboard.transpose
,mirror: SITE.properties.partEdit.keyboard.mirror
,scale: SITE.properties.partEdit.keyboard.scale
,label: SITE.properties.partEdit.keyboard.label
});
this.warningsDiv = document.createElement("DIV");
this.warningsDiv.setAttribute("id", warnings_id);
this.warningsDiv.setAttribute("class", "warningsDiv" );
this.Div.dataDiv.appendChild(this.warningsDiv);
this.studioCanvasDiv = document.createElement("DIV");
this.studioCanvasDiv.setAttribute("id", 'a2pStudioCanvasDiv' );
this.studioCanvasDiv.setAttribute("class", "studioCanvasDiv" );
this.canvasDiv = document.createElement("DIV");
this.canvasDiv.setAttribute("id", canvas_id);
this.canvasDiv.setAttribute("class", "canvasDiv" );
this.studioCanvasDiv.appendChild(this.canvasDiv);
this.renderedTune.div = this.canvasDiv;
this.Div.dataDiv.appendChild(this.studioCanvasDiv);
if( this.ps )
this.ps.destroy();
this.ps = new PerfectScrollbar( this.studioCanvasDiv, {
handlers: ['click-rail', 'drag-thumb', 'keyboard', 'wheel', 'touch'],
wheelSpeed: 1,
wheelPropagation: false,
suppressScrollX: false,
minScrollbarLength: 100,
swipeEasing: true,
scrollingThreshold: 500
});
this.fileLoadABC = document.getElementById('fileLoadABC');
this.fileLoadABC.addEventListener('change', function(event) { that.carregaPartitura(event); }, false);
this.showEditorButton = document.getElementById(interfaceParams.showEditorBtn);
this.showMapButton = document.getElementById(interfaceParams.showMapBtn);
this.updateButton = document.getElementById(interfaceParams.updateBtn);
this.loadButton = document.getElementById(interfaceParams.loadBtn);
this.saveButton = document.getElementById(interfaceParams.saveBtn);
this.printButton = document.getElementById(interfaceParams.printBtn);
// player control
this.playButton = document.getElementById(interfaceParams.playBtn);
this.stopButton = document.getElementById(interfaceParams.stopBtn);
this.currentPlayTimeLabel = document.getElementById(interfaceParams.currentPlayTimeLabel);
this.showEditorButton.addEventListener("click", function (evt) {
evt.preventDefault();
this.blur();
that.showEditor();
}, false);
this.showMapButton.addEventListener("click", function (evt) {
evt.preventDefault();
this.blur();
that.showKeyboard();
}, false);
this.updateButton.addEventListener("click", function() {
that.fireChanged();
}, false);
this.loadButton.addEventListener("click", function() {
that.fileLoadABC.click();
}, false);
this.saveButton.addEventListener("click", function() {
that.salvaPartitura();
}, false);
this.printButton.addEventListener("click", function(evt) {
evt.preventDefault();
this.blur();
that.mapa.printPreview(that.renderedTune.div.innerHTML, ["#topBar","#mapaDiv","#partEditDiv"], that.renderedTune.abc.formatting.landscape);
}, false);
this.playerCallBackOnScroll = function( player ) {
that.setScrolling(player);
};
this.playerCallBackOnPlay = function( player ) {
var strTime = player.getTime().cTime;
if(that.gotoMeasureButton)
that.gotoMeasureButton.value = player.currentMeasure;
if(that.currentPlayTimeLabel)
that.currentPlayTimeLabel.innerHTML = strTime;
};
this.playerCallBackOnEnd = function( player ) {
var warns = that.midiPlayer.getWarnings();
that.playButton.title = SITE.translator.getResource("playBtn");
that.playButton.innerHTML = ' <i class="ico-play"></i> ';
that.renderedTune.printer.clearSelection();
that.accordion.clearKeyboard(true);
that.blockEdition(false);
if( warns ) {
var txt = "";
warns.forEach(function(msg){ txt += msg + '<br>'; });
that.warningsDiv.style.color = 'blue';
that.warningsDiv.innerHTML = '<hr>'+txt+'<hr>';
}
};
this.playButton.addEventListener("click", function() {
window.setTimeout(function(){ that.startPlay( 'normal' );}, 0 );
}, false);
this.stopButton.addEventListener("click", function(evt) {
evt.preventDefault();
this.blur();
that.blockEdition(false);
if(that.currentPlayTimeLabel)
that.currentPlayTimeLabel.innerHTML = "00:00";
that.midiPlayer.stopPlay();
}, false);
this.midiPlayer.defineCallbackOnPlay( that.playerCallBackOnPlay );
this.midiPlayer.defineCallbackOnEnd( that.playerCallBackOnEnd );
this.midiPlayer.defineCallbackOnScroll( that.playerCallBackOnScroll );
};
SITE.PartEdit.prototype.setup = function(options) {
this.mapa.closeMapa();
this.accordion.loadById(options.accordionId);
this.setVisible(true);
if( this.editorWindow.getString() === "" ) {
var text = FILEMANAGER.loadLocal("ultimaPartituraEditada");
if( ! text ) {
text = this.getDemoText();
}
this.editorWindow.setString(text);
}
this.warningsDiv.style.display = SITE.properties.options.showWarnings? 'block':'none';
this.fireChanged();
this.editorWindow.restartUndoManager();
this.Div.setSubTitle( '- ' + this.accordion.getTxtModel() );
this.showEditor(SITE.properties.partEdit.editor.visible);
if(SITE.properties.partEdit.editor.floating) {
if( SITE.properties.partEdit.editor.maximized ) {
this.editorWindow.container.dispatchAction('MAXIMIZE');
} else {
this.editorWindow.container.dispatchAction('POPOUT');
}
} else {
this.editorWindow.container.dispatchAction('POPIN');
}
this.showKeyboard(SITE.properties.partEdit.keyboard.visible);
this.keyboardWindow.setTitle(this.accordion.getTxtTuning() + ' - ' + this.accordion.getTxtNumButtons() );
this.resize();
};
SITE.PartEdit.prototype.resize = function( ) {
// redimensiona a workspace
var winH = window.innerHeight
|| document.documentElement.clientHeight
|| document.body.clientHeight;
var winW = window.innerWidth
|| document.documentElement.clientWidth
|| document.body.clientWidth;
// -paddingTop 78
var h = (winH -78 - 10 );
var w = (winW - 8 );
this.Div.topDiv.style.left = "3px";
this.Div.topDiv.style.top = "82px";
this.Div.topDiv.style.height = Math.max(h,200) +"px";
this.Div.topDiv.style.width = Math.max(w,400) +"px";
var w = 0, e = 0;
var c = this.controlDiv.clientHeight;
var t = this.Div.dataDiv.clientHeight;
if(! SITE.properties.showWarnings) {
w = this.warningsDiv.clientHeight;
}
if(! SITE.properties.partEdit.editor.floating) {
e = this.editorWindow.container.topDiv.clientHeight+4;
}
this.studioCanvasDiv.style.height = t-(w+e+c+6) +"px";
this.posicionaTeclado();
this.editorWindow.resize();
(this.ps) && this.ps.update();
};
SITE.PartEdit.prototype.posicionaTeclado = function() {
if( ! SITE.properties.partEdit.keyboard.visible ) return;
var w = window.innerWidth;
var k = this.keyboardWindow.topDiv;
var x = parseInt(k.style.left.replace('px', ''));
if( x + k.offsetWidth > w ) {
x = (w - (k.offsetWidth + 50));
}
if(x < 0) x = 10;
k.style.left = x+"px";
};
SITE.PartEdit.prototype.closePartEdit = function(save) {
var self = this;
var loader = this.mapa.startLoader( "ClosePartEdit" );
loader.start( function() {
var text = self.editorWindow.getString();
self.setVisible(false);
self.editorWindow.setString("");
self.midiPlayer.stopPlay();
(save) && SITE.SaveProperties();
if(text !== "" )
FILEMANAGER.saveLocal( 'ultimaPartituraEditada', text );
self.mapa.openMapa();
loader.stop();
}, '<br/>   '+SITE.translator.getResource('wait')+'<br/><br/>' );
};
SITE.PartEdit.prototype.showEditor = function(show) {
SITE.properties.partEdit.editor.visible =
(typeof show === 'undefined'? ! SITE.properties.partEdit.editor.visible : show );
if(SITE.properties.partEdit.editor.visible) {
this.editorWindow.setVisible(true);
document.getElementById('a2pI_showEditor').setAttribute('class', 'ico-folder-open' );
} else {
document.getElementById('a2pI_showEditor').setAttribute('class', 'ico-folder' );
this.editorWindow.setVisible(false);
}
this.resize();
};
SITE.PartEdit.prototype.showKeyboard = function(show) {
SITE.properties.partEdit.keyboard.visible =
(typeof show === 'undefined'? ! SITE.properties.partEdit.keyboard.visible : show );
this.accordion.render_opts.show = SITE.properties.partEdit.keyboard.visible;
if(SITE.properties.partEdit.keyboard.visible) {
this.keyboardWindow.setVisible(true);
this.accordion.printKeyboard(this.keyboardWindow.dataDiv);
document.getElementById('a2pI_showMap').setAttribute('class', 'ico-folder-open' );
this.posicionaTeclado();
} else {
this.accordion.render_opts.show = false;
this.keyboardWindow.setVisible(false);
document.getElementById('a2pI_showMap').setAttribute('class', 'ico-folder' );
}
};
SITE.PartEdit.prototype.editorCallback = function (action, elem) {
switch(action) {
case '0':
break;
case '1': case '2': case '3': case '4': case '5': case '6':
case '7': case '8': case '9': case '10': case '11':
case '-1': case '-2': case '-3': case '-4': case '-5': case '-6':
case '-7': case '-8': case '-9': case '-10': case '-11':
this.fireChanged( parseInt(action), {force:true} );
break;
case 'OCTAVEUP':
this.fireChanged(12, {force:true} );
break;
case 'OCTAVEDOWN':
this.fireChanged(-12, {force:true} );
break;
case 'REFRESH':
this.fireChanged();
break;
case 'DOWNLOAD':
this.salvaPartitura();
break;
case 'MAXIMIZE':
this.editorWindow.maximizeWindow( true, SITE.properties.partEdit.editor );
break;
case 'RESTORE':
this.editorWindow.maximizeWindow( false, SITE.properties.partEdit.editor );
break;
case 'POPIN':
this.editorWindow.dockWindow(true, SITE.properties.partEdit.editor, 0, 0, "calc(100% - 5px)", "200px" );
this.resize();
break;
case 'POPOUT':
this.editorWindow.dockWindow(false, SITE.properties.partEdit.editor );
this.resize();
break;
case 'RESIZE':
case 'MOVE':
this.editorWindow.retrieveProps( SITE.properties.partEdit.editor );
break;
case 'CLOSE':
this.showEditor(false);
break;
}
};
SITE.PartEdit.prototype.a2pCallback = function( e ) {
switch(e) {
case 'CLOSE':
this.closePartEdit(true);
break;
case 'HELP':
//this.mapa.showHelp('HelpTitle', 'PartEditTitle', '/html/geradorPartitura.pt_BR.html', { width: '1024', height: '600' } );
alert( 'Not implemented yet!' );
}
};
SITE.PartEdit.prototype.setVisible = function( visible ) {
this.Div.parent.style.display = visible?'block':'none';
};
SITE.PartEdit.prototype.fireChanged = function(transpose) {
var text = this.editorWindow.getString();
if(text !== "" ) {
FILEMANAGER.saveLocal( 'ultimaPartituraEditada', text );
this.parseABC(text, transpose);
this.printABC();
} else {
this.editorWindow.container.setSubTitle( "" );
this.warningsDiv.innerHTML = "";
this.renderedTune.div.innerHTML = "";
delete this.renderedTune.abc.midi;
}
this.resize();
};
SITE.PartEdit.prototype.parseABC = function(text, transpose) {
transpose = transpose || 0;
var transposer = new ABCXJS.parse.Transposer(transpose);
var abcParser = new ABCXJS.parse.Parse( transposer, this.accordion );
try {
abcParser.parse(text, this.parserparams );
this.renderedTune.text = abcParser.getStrTune();
if( this.renderedTune.text !== text ) {
this.editorWindow.setString(this.renderedTune.text);
//FILEMANAGER.saveLocal( 'ultimaPartituraEditada', this.renderedTune.text );
}
} catch(e) {
waterbug.log( 'Could not parse ABC.' );
waterbug.show();
}
this.renderedTune.abc = abcParser.getTune();
this.renderedTune.title = this.renderedTune.abc.metaText.title ;
if (this.editorWindow.keySelector) {
this.editorWindow.keySelector.populate(transposer.keyToNumber(transposer.getKeyVoice(0)));
}
var warnings = abcParser.getWarnings() || [];
if(this.renderedTune.title) {
this.editorWindow.container.setSubTitle('- ' + this.renderedTune.abc.metaText.title );
if( ! this.GApartEdit || this.GApartEdit !== this.renderedTune.abc.metaText.title ) {
this.GApartEdit = this.renderedTune.abc.metaText.title;
SITE.ga('send', 'event', 'Mapa5', 'partEdit', this.GApartEdit );
}
}else
this.editorWindow.container.setSubTitle( "" );
if ( this.midiParser ) {
this.midiParser.parse( this.renderedTune.abc, this.accordion.loadedKeyboard );
warnings = warnings.concat(this.midiParser.getWarnings() );
}
if(warnings.length>0) {
this.warningsDiv.innerHTML = warnings.join('<br>');
this.warningsDiv.style.color = 'red';
} else {
this.warningsDiv.innerHTML = 'Partitura gerada com sucesso!';
this.warningsDiv.style.color = 'green';
}
};
SITE.PartEdit.prototype.printABC = function() {
this.renderedTune.div.innerHTML = "";
this.renderedTune.printer = new ABCXJS.write.Printer( new SVG.Printer( this.renderedTune.div ), {}, this.accordion.loadedKeyboard );
this.renderedTune.printer.printABC(this.renderedTune.abc);
this.renderedTune.printer.addSelectListener(this);
this.media.show(this.renderedTune);
};
SITE.PartEdit.prototype.highlight = function(abcelem) {
if( !this.midiPlayer.playing ) {
if(SITE.properties.partEdit.keyboard.visible ) {
this.accordion.clearKeyboard(true);
this.midiParser.setSelection(abcelem);
}
if(SITE.properties.partEdit.editor.visible ) {
this.editorWindow.setSelection(abcelem);
}
}
};
// limpa apenas a janela de texto. Os demais elementos sรฃo controlados por tempo
SITE.PartEdit.prototype.unhighlight = function(abcelem) {
if(SITE.properties.partEdit.editor.visible) {
this.editorWindow.clearSelection(abcelem);
}
};
SITE.PartEdit.prototype.updateSelection = function (force) {
var that = this;
if( force ) {
var selection = that.editorWindow.getSelection();
try {
that.renderedTune.printer.rangeHighlight(selection);
} catch (e) {
} // maybe printer isn't defined yet?
delete this.updating;
} else {
if( this.updating ) return;
this.updating = true;
setTimeout( that.updateSelection(true), 300 );
}
};
SITE.PartEdit.prototype.keyboardCallback = function( e ) {
switch(e) {
case 'MOVE':
var k = this.keyboardWindow.topDiv.style;
SITE.properties.partEdit.keyboard.left = k.left;
SITE.properties.partEdit.keyboard.top = k.top;
break;
case 'ROTATE':
this.accordion.rotateKeyboard(this.keyboardWindow.dataDiv);
SITE.properties.partEdit.keyboard.transpose = this.accordion.render_opts.transpose;
SITE.properties.partEdit.keyboard.mirror = this.accordion.render_opts.mirror;
break;
case 'ZOOM':
this.accordion.scaleKeyboard(this.keyboardWindow.dataDiv);
SITE.properties.partEdit.keyboard.scale = this.accordion.render_opts.scale;
break;
case 'GLOBE':
this.accordion.changeNotation();
SITE.properties.partEdit.keyboard.label = this.accordion.render_opts.label;
break;
case 'CLOSE':
this.showKeyboard(false);
break;
}
};
SITE.PartEdit.prototype.salvaPartitura = function() {
if (FILEMANAGER.requiredFeaturesAvailable()) {
var name = this.renderedTune.abc.metaText.title + ".abcx";
var conteudo = this.renderedTune.text;
FILEMANAGER.download(name, conteudo);
} else {
alert(SITE.translator.getResource("err_saving"));
}
};
SITE.PartEdit.prototype.carregaPartitura = function(evt) {
var that = this;
FILEMANAGER.loadLocalFiles( evt, function() {
that.doCarregaPartitura(FILEMANAGER.files);
evt.target.value = "";
});
};
SITE.PartEdit.prototype.doCarregaPartitura = function(file) {
this.editorWindow.setString(file[0].content);
this.fireChanged();
};
SITE.PartEdit.prototype.blockEdition = function( block ) {
this.editorWindow.setReadOnly(!block);
this.editorWindow.container.dispatchAction('READONLY');
if( block ) {
this.editorWindow.setEditorHighLightStyle();
} else {
this.editorWindow.clearEditorHighLightStyle();
this.editorWindow.aceEditor.focus();
}
};
SITE.PartEdit.prototype.startPlay = function( type, value ) {
this.ypos = this.studioCanvasDiv.scrollTop;
this.lastStaffGroup = -1;
if( this.midiPlayer.playing) {
if (type === "normal" ) {
this.playButton.title = SITE.translator.getResource("playBtn");
this.playButton.innerHTML = ' <i class="ico-play"></i> ';
this.midiPlayer.pausePlay();
} else {
this.midiPlayer.pausePlay(true);
}
this.blockEdition(false);
} else {
this.accordion.clearKeyboard();
if(type==="normal") {
this.blockEdition(true);
if( this.midiPlayer.startPlay(this.renderedTune.abc.midi) ) {
this.playButton.title = SITE.translator.getResource("pause");
this.playButton.innerHTML = ' <i class="ico-pause"></i> ';
}
} else {
if( this.midiPlayer.startDidacticPlay(this.renderedTune.abc.midi, type, value ) ) {
}
}
}
};
SITE.PartEdit.prototype.setScrolling = function(player) {
if( !this.studioCanvasDiv || player.currAbsElem.staffGroup === this.lastStaffGroup ) return;
this.lastStaffGroup = player.currAbsElem.staffGroup;
var fixedTop = player.printer.staffgroups[0].top;
var vp = this.studioCanvasDiv.clientHeight - fixedTop;
var top = player.printer.staffgroups[player.currAbsElem.staffGroup].top-12;
var bottom = top + player.printer.staffgroups[player.currAbsElem.staffGroup].height;
if( bottom > vp+this.ypos || this.ypos > top-fixedTop ) {
this.ypos = top;
this.studioCanvasDiv.scrollTop = this.ypos;
}
};
SITE.PartEdit.prototype.getDemoText = function() {
return '\
X: 1\n\
T:Oh! Susannah\n\
F:/images/susannah.tablatura.png\n\
M:2/4\n\
L:1/4\n\
Q:100\n\
K:G\n\
V:melodia treble\n\
|"C"c c|e/2e/2 -e/2e/2|"G"d/2d/2 B/2G/2|"D7"A3/2G/4A/4|\\\n\
"G"B/2d/2 d3/4e/4|d/2B/2 G/2A/2|"G"B/2B/2 "D7"A/2A/2|"G"G2 |\n\
V:baixos bass\n\
| C, [C,E,G,] | C, z | G,, z | D, z | G,, z | G,, z | D, [D,^F,A,] | G,, z |\n\
V:tablatura accordionTab';
};
|
import { Express } from 'express';
import cookieSession from 'cookie-session';
import unless, { RequestHandler } from 'express-unless';
import * as config from '../config';
import authorizeRouter from './router.authorize';
import explorerRouter from './router.explorer';
import articleRouter from './router.article';
import userRouter from './router.user';
import mailRouter from './router.mail';
import toolsRouter from './router.tools';
import pictureRouter from './router.picture';
import draftRouter from './router.draft';
import articleTagRouter from './router.articleTag';
// CORS
const corsHandler: RequestHandler = (req, res, next) => {
const origin = req.headers.origin;
if (origin) {
//ๅฆๆๆฏๅๆบ็๏ผๅๆฒกๆoriginๅญๆฎต
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Methods', 'POST,GET,PUT,DELETE');
res.setHeader('Access-Control-Allow-Headers', 'Content-Type,Authorization');
res.setHeader('Access-Control-Max-Age', 604800);
res.setHeader('Access-Control-Allow-Credentials', 'true'); //ๅ
่ฎธ่ทจๅๅ่ฎพ็ฝฎcookie
}
next();
};
const cookieHandler = cookieSession({
name: 'uid',
secret: config.COOKIE_SECRET,
maxAge: config.COOKIE_MAX_AGE,
});
const cookieChecker: RequestHandler = (req, res, next) => {
const uid = req.session?.uid;
if (uid) {
return next();
}
res.status(401).send({
status: 'error',
message: 'ๆช็ปๅฝ',
});
};
cookieChecker.unless = unless;
const cookieCheckerUnless = cookieChecker.unless({
path: config.DO_NOT_CHECK_REQUEST_PATH,
});
export default (app: Express) => {
app.use(corsHandler);
app.use(cookieHandler);
app.use(cookieCheckerUnless);
app.use(authorizeRouter);
app.use(explorerRouter);
app.use(articleRouter);
app.use(userRouter);
app.use(mailRouter);
app.use(toolsRouter);
app.use(pictureRouter);
app.use(draftRouter);
app.use(articleTagRouter);
};
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
import { AssessmentsProvider } from 'assessments/types/assessments-provider';
import { FeatureFlags } from 'common/feature-flags';
import { AssessmentStoreData } from 'common/types/store-data/assessment-result-data';
import { FeatureFlagStoreData } from 'common/types/store-data/feature-flag-store-data';
import { ScanMetadata, ToolData } from 'common/types/store-data/unified-data-interface';
import { TabStopRequirementState } from 'common/types/store-data/visualization-scan-result-data';
import { AssessmentJsonExportGenerator } from 'reports/assessment-json-export-generator';
import { AssessmentReportHtmlGenerator } from 'reports/assessment-report-html-generator';
import {
FastPassReportHtmlGenerator,
FastPassReportModel,
} from 'reports/fast-pass-report-html-generator';
import { ReportGenerator } from 'reports/report-generator';
import { ReportHtmlGenerator } from 'reports/report-html-generator';
import { IMock, Mock, MockBehavior, Times } from 'typemoq';
import { exampleUnifiedStatusResults } from '../common/components/cards/sample-view-model-data';
describe('ReportGenerator', () => {
const title = 'title';
const url = 'http://url/';
const description = 'description';
const cardsViewDataStub = {
cards: exampleUnifiedStatusResults,
visualHelperEnabled: true,
allCardsCollapsed: true,
};
const tabStopRequirementStateStub = {} as TabStopRequirementState;
const toolDataStub: ToolData = {
applicationProperties: { name: 'some app' },
} as ToolData;
const targetPage = {
name: title,
url: url,
};
const scanMetadataStub: ScanMetadata = {
toolData: toolDataStub,
targetAppInfo: targetPage,
} as ScanMetadata;
const featureFlagStoreDataStub: FeatureFlagStoreData = { stub: 'featureFlagStoreData' } as any;
const fastPassReportModelStub: FastPassReportModel = {
description,
results: {
automatedChecks: cardsViewDataStub,
tabStops: tabStopRequirementStateStub,
},
targetPage: targetPage,
};
let fastPassReportHtmlGeneratorMock: IMock<FastPassReportHtmlGenerator>;
let automatedChecksReportHtmlGeneratorMock: IMock<ReportHtmlGenerator>;
let assessmentReportHtmlGeneratorMock: IMock<AssessmentReportHtmlGenerator>;
let assessmentJsonExportGeneratorMock: IMock<AssessmentJsonExportGenerator>;
let testSubject: ReportGenerator;
beforeEach(() => {
automatedChecksReportHtmlGeneratorMock = Mock.ofType<ReportHtmlGenerator>(
undefined,
MockBehavior.Strict,
);
fastPassReportHtmlGeneratorMock = Mock.ofType<FastPassReportHtmlGenerator>(
undefined,
MockBehavior.Strict,
);
assessmentReportHtmlGeneratorMock = Mock.ofType(
AssessmentReportHtmlGenerator,
MockBehavior.Strict,
);
assessmentJsonExportGeneratorMock = Mock.ofType(
AssessmentJsonExportGenerator,
MockBehavior.Strict,
);
testSubject = new ReportGenerator(
automatedChecksReportHtmlGeneratorMock.object,
fastPassReportHtmlGeneratorMock.object,
assessmentReportHtmlGeneratorMock.object,
assessmentJsonExportGeneratorMock.object,
);
});
describe('generateFastPassHtmlReport', () => {
it('uses fastPassReportHtmlGenerator with FeatureFlags.newTabStopsDetailsView', () => {
const featureFlagStoreData = { [FeatureFlags.newTabStopsDetailsView]: true };
fastPassReportHtmlGeneratorMock
.setup(m => m.generateHtml(fastPassReportModelStub))
.returns(() => 'stub FastPass report');
const actual = testSubject.generateFastPassHtmlReport(
fastPassReportModelStub,
scanMetadataStub,
featureFlagStoreData,
);
expect(actual).toEqual('stub FastPass report');
});
it('uses automatedChecksReportHtmlGenerator without FeatureFlags.newTabStopsDetailsView', () => {
const featureFlagStoreData = { [FeatureFlags.newTabStopsDetailsView]: false };
automatedChecksReportHtmlGeneratorMock
.setup(m => m.generateHtml(description, cardsViewDataStub, scanMetadataStub))
.returns(() => 'stub automated checks report');
const actual = testSubject.generateFastPassHtmlReport(
fastPassReportModelStub,
scanMetadataStub,
featureFlagStoreData,
);
expect(actual).toEqual('stub automated checks report');
});
});
test('generateAssessmentHtmlReport', () => {
const assessmentStoreData: AssessmentStoreData = { stub: 'assessmentStoreData' } as any;
const assessmentsProvider: AssessmentsProvider = { stub: 'assessmentsProvider' } as any;
const assessmentDescription = 'generateAssessmentHtml-description';
assessmentReportHtmlGeneratorMock
.setup(builder =>
builder.generateHtml(
assessmentStoreData,
assessmentsProvider,
featureFlagStoreDataStub,
targetPage,
assessmentDescription,
),
)
.returns(() => 'generated-assessment-html')
.verifiable(Times.once());
const actual = testSubject.generateAssessmentHtmlReport(
assessmentStoreData,
assessmentsProvider,
featureFlagStoreDataStub,
targetPage,
assessmentDescription,
);
const expected = 'generated-assessment-html';
expect(actual).toEqual(expected);
});
});
|
import logging
class StreamManager:
def __init__(self):
self._stream = None
self._connect_callbacks = None
def connect(self, stream):
self._stream = stream
def disconnect(self):
if self._connect_callbacks:
self._connect_callbacks = None
def raise_error(self, error):
if self._connect_callbacks:
self._connect_callbacks = None
raise error
logging.error(self._stream.error)
def set_callbacks(self, callback):
self._connect_callbacks = callback
def execute_callbacks(self):
if self._connect_callbacks:
self._connect_callbacks(self._stream)
def handle_error(self, error):
if self._connect_callbacks:
self._connect_callbacks = None
logging.error(error) |
def my_function(N):
list1 = [[i+j for j in range(N)] for i in range(N)]
return list1 |
#!/usr/bin/env bash
echo_time_step "[TODO] golang code style..."
gofmt $gitlab_project_dir/*.go
|
#!/bin/bash
# Function to insert a comma after every occurrence of a specific word in a file
insert_comma_after_word() {
file_path=$1
word_to_modify=$2
# Use sed to replace the word with the word followed by a comma
sed -i "s/${word_to_modify}/${word_to_modify},/g" $file_path
}
# Example usage
insert_comma_after_word "input.txt" "<MOOS_BINARY>" |
<gh_stars>1-10
from argparse import ArgumentParser
from runners.utils import load_yaml
import inspect
import textwrap
import os
from src import logging
def build_parser_for_yml_script():
"""
Builds an ArgumentParser with a common setup. Used in the scripts.
"""
parser = ArgumentParser(add_help=False)
parser.add_argument(
'-h',
'--help',
action='store_true',
dest='help',
help= """
show this help message and exit.
"""
)
parser.add_argument(
'-y',
'--yml',
type=str,
help= """
Path to .yml file containing command line arguments for the script. If the only key
is 'jobs', then we assume it points to a list of jobs with parameters
input_path and output_path. Each job is executed one after the other. The
structure of each .yml file is up to you. If this is given, all other arguments
to the script are not required (as they should be defined solely within the
.yml).
"""
)
return parser
def sequential_job_execution(script_func, jobs):
"""
Execute jobs one by one with a simple for loop.
Args:
script_func (function): Function to run.
jobs (list): List of dictionaries containing arguments for function.
"""
for args in jobs:
script_func(**args)
def document_parser(script_name, reference):
"""
Fancy function for documenting a parser easily. Runs the function to build
the parser, then gets the parsers help texts and formats it into the
function's doc string for sphinx. A bit hacky but works great!
Args:
script_name (str): Name of the script.
reference (str): Where to point the reference function for the script
(e.g. the script that it runs).
"""
def decorator(parser_func):
parser = parser_func()
help_text = parser.format_help()
help_text = help_text.replace('sphinx-build', f'python -m scripts.{script_name} ')
help_text = textwrap.indent(help_text, '\t\t ')
help_text = ' .. code-block:: none\n\n' + help_text
doc = (
f"""
Builds the parser for :py:mod:`scripts.{script_name}`.
{help_text}
Also see the arguments to :py:func:`{reference}`.
Returns:
:class:`argparse.ArgumentParser`: The parser for this script.
"""
)
parser_func.__doc__ = doc
return parser_func
return decorator
def cmd(script_func, parser_func, exec_func=sequential_job_execution):
"""
Builds a parser for any script in the scripts/ directory. Scripts should have two
main functions: 1) a function that actually runs the script and 2) a build_parser
function that builds up an ArgumentParser with informative help text for the script.
This function allows the command line arguments to be passed to the script either
through the command line as normal or through a YAML file which has matching keyword
arguments for the script. Positional arguments are discouraged.
The arguments in the YAML file are checked by passing them back into the command
line parser function before giving them to the script. This also allows for default
values to be defined in the script argument parser.
A script can be called multiple times using a YAML file by having a top-level key
called 'jobs'. 'jobs' should contain a list where each item in the list is a
set of arguments to be passed to the script one by one.
For each script, simply add this like so::
if __name__ == "__main__":
cmd(script_func, parser_func)
Then to run a script, simply do::
python -m scripts.[script_name] --yml [path_to_yml_file] # for yml
python -m scripts.[script_name] [--arg val] # for cmd line
Arguments:
script_func (function): A function that will take in the arguments as keyword
arguments and perform some action.
parser_func (function): A function that will build up the argument parser for
the script.
"""
# first check if environment variables exist
if not os.getenv('DATA_DIRECTORY'):
logging.info(
"""
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
| It doesn't look like you sourced your environment variables! Make sure to |
| run 'source setup/environment/[machine_name]_local.sh' before running scripts, |
| as the scripts depend on the environment variables. |
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
)
return
jobs = []
yml_parser = build_parser_for_yml_script()
cmd_parser = parser_func()
args = vars(yml_parser.parse_known_args()[0])
if args['help']:
print('Usage via YML file.')
yml_parser_help = yml_parser.print_help()
if cmd_parser:
print('\nDirect usage via command line arguments.')
cmd_parser_help = cmd_parser.print_help()
return
extra_args = {}
if args['yml'] is None:
args, unknown_args = cmd_parser.parse_known_args()
unknown_args = [u.replace('--', '') for u in unknown_args]
unknown_args = dict(zip(unknown_args[:-1:2], unknown_args[1::2]))
args = vars(args)
script_signature = inspect.getfullargspec(script_func)
if script_signature.varkw is not None:
args.update(unknown_args)
jobs.append(args)
else:
_args = load_yaml(args['yml'])
_jobs = []
if 'jobs' in _args:
_jobs = _args.pop('jobs')
extra_args = _args
else:
_jobs.append(_args)
for job in _jobs:
if cmd_parser:
args = []
for key, val in job.items():
if isinstance(val, bool):
if val:
args.append(f'--{key}')
else:
args.append(f'--{key}')
args.append(str(val))
args, unknown_args = cmd_parser.parse_known_args(args)
unknown_args = [u.replace('--', '') for u in unknown_args]
unknown_args = dict(zip(unknown_args[:-1:2], unknown_args[1::2]))
args = vars(args)
script_signature = inspect.getfullargspec(script_func)
if script_signature.varkw is not None:
args.update(unknown_args)
[job.pop(k) for k in args if k in job]
args.update(job)
else:
args = job
jobs.append(args)
exec_args = inspect.getfullargspec(exec_func)
for key in extra_args.copy():
if key not in exec_args.args:
extra_args.pop(key)
exec_func(script_func, jobs, **extra_args) |
package com.semmle.js.ast;
/** The common interface implemented by all AST node types. */
public interface INode extends ISourceElement {
/** Accept a visitor object. */
public <C, R> R accept(Visitor<C, R> v, C c);
/** Return the node's type tag. */
public String getType();
}
|
<filename>resolver-audioplayer/src/main/java/com/iflytek/cyber/resolver/audioplayer/service/model/ProgressReport.java
package com.iflytek.cyber.resolver.audioplayer.service.model;
import android.os.Parcel;
import android.os.Parcelable;
public class ProgressReport implements Parcelable {
public long progressReportDelayInMilliseconds;
public long progressReportIntervalInMilliseconds;
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeLong(this.progressReportDelayInMilliseconds);
dest.writeLong(this.progressReportIntervalInMilliseconds);
}
public ProgressReport() {
}
protected ProgressReport(Parcel in) {
this.progressReportDelayInMilliseconds = in.readLong();
this.progressReportIntervalInMilliseconds = in.readLong();
}
public static Creator<ProgressReport> CREATOR = new Creator<ProgressReport>() {
@Override
public ProgressReport createFromParcel(Parcel source) {
return new ProgressReport(source);
}
@Override
public ProgressReport[] newArray(int size) {
return new ProgressReport[size];
}
};
} |
#!/usr/bin/env bash
set -eu
export LC_ALL=C
ROOT=$(dirname "${BASH_SOURCE}")
mkdir -p ${KUBE_ASSETS_DIR}
source ${ROOT}/render-kubeconfig.sh
source ${ROOT}/render-installer.sh
source ${ROOT}/render-cluster-check.sh
|
<filename>src/interface/presentation/batatinha/BatatinhaRoutes.js
module.exports = ({ batatinhaSchema, batatinhaController }) => {
return [
{
httpMethod: 'post',
routePath: '/',
schemaValidation: {
headers: batatinhaSchema.common.headers,
body: batatinhaSchema.createBatatinha.body
},
handler: batatinhaController.createBatatinha
},
{
httpMethod: 'get',
routePath: '/',
schemaValidation: {
headers: batatinhaSchema.common.headers,
query: batatinhaSchema.listBatatinha.query,
},
handler: batatinhaController.listBatatinha
},
{
httpMethod: 'get',
routePath: '/:batatinha_id',
schemaValidation: {
headers: batatinhaSchema.common.headers,
params: batatinhaSchema.getBatatinha.params
},
handler: batatinhaController.getBatatinha
},
{
httpMethod: 'patch',
routePath: '/:batatinha_id',
schemaValidation: {
headers: batatinhaSchema.common.headers,
params: batatinhaSchema.updateBatatinha.params,
body: batatinhaSchema.updateBatatinha.body
},
handler: batatinhaController.updateBatatinha
},
{
httpMethod: 'delete',
routePath: '/:batatinha_id',
schemaValidation: {
headers: batatinhaSchema.common.headers,
params: batatinhaSchema.deleteBatatinha.params,
},
handler: batatinhaController.deleteBatatinha
}
];
};
|
##########################################################################################
#### CHECK THE SEASONAL FORECAST EXECUTION ###############################################
#### ENSEMBLE MEMBER IS ALWAYS 1 FOR HINDCAST ############################################
##########################################################################################
# assumptions: 10 months , 9 collections:
# assumptions: 10 months ,19 collections:
# available on tape: 9 monthly (10 months) 9 daily (10 months) 2 diurnal (10 months)
# available on tape: 19 monthly (10 months) 19 daily (10 months) 2 diurnal (10 months)
# available on disk: 19 monthly (10 months) 2 daily (10 months) 2 diurnal (10 months) 2 daily (3 months jul25)
############################################################################################
# PROVIDE YEAR AND EXPERIMENT ID
# YEAR: YYYY (1999-2015)
# ID : MMMDD (jan-dec 1-31)
############################################################################################
set YEAR=$1
set ID=$2
set DD = `echo $ID | cut -c4-5`
set ENSN=1
set JOBNAME = "${ID}_${YEAR}R$ENSN"
set NCOLLS=19
if ( $NCOLLS == 9 ) set ntape=200
if ( $NCOLLS == 9 ) set ndisk=114
if ( $NCOLLS == 14 ) set ntape=300
if ( $NCOLLS == 14 ) set ndisk=164
if ( $NCOLLS == 19 ) set ntape=400
if ( $NCOLLS == 19 ) set ndisk=236
set MM = 2
if ( $DD > 15 ) then
set ndisk = 239
set MM = 3
endif
if ( -e $GEOSS2S/runh/$YEAR/${ID}/ens$ENSN/cap_restartIC ) then
set cap_sta1 = `cat $GEOSS2S/runh/$YEAR/${ID}/ens$ENSN/cap_restartIC`
else
echo "NO EXPERIMENT SUBMITTED: $YEAR $ID $ENSN "
exit
endif
echo
echo "________________________________________________________________________________________"
echo JOB QUEUE STATUS:
qme | grep ${JOBNAME}
set cap_sta2 = `cat $GEOSS2S/runx/$YEAR/${ID}/ens$ENSN/cap_restart`
set cap_sta3 = `cat $GEOSS2S/runh/$YEAR/${ID}/ens$ENSN/CAP.rc | grep END_DATE | cut -c15-29`
echo
echo "_______________________________________________"
echo "____________DATE $YEAR $ID $ENSN"
echo "____________DATE START ${cap_sta1}"
echo "____________DATE DONE ${cap_sta2}"
echo "____________DATE LAST ${cap_sta3}"
echo "_______________________________________________"
@ nn = 0
@ afn = 0
echo 1- CHECK:
echo 1- IF ARCHIVE FILE NUMBER = 400: monthly 19x10+ diurnal 2x10 + daily 19x10
set afn1 = `ls -1 $ARCHIVE/GEOS_S2S/seasonal/Y$YEAR/${ID}/ens$ENSN/geosgcm_*/*nc4 | wc -l`
set afn2 = `ls -1 $ARCHIVE/GEOS_S2S/seasonal/Y$YEAR/${ID}/ens$ENSN/geosgcm*/*tar | wc -l`
@ afnn = $afn1 + $afn2
if ($afnn == $ntape) then
@ nn = $nn + 1
echo OK
else
echo " NOT READY: NUMBER FILES IN ARCHIVE IS: $afnn = $afn1 + $afn2"
endif
echo 2- CHECK:
echo "2- IF DISK FILE NUMBER == ${ndisk}: monthly 19x10+ diurnal 2x10 + daily subx 3x${MM}+ daily vis 2x10"
set mfnu1 = `ls -1 $GEOSS2S/runx/$YEAR/${ID}/ens$ENSN/geosgcm_*/*monthly*nc4 | wc -l`
set mfnu2 = `ls -1 $GEOSS2S/runx/$YEAR/${ID}/ens$ENSN/geosgcm_*/diurnal/*diurnal*nc4 | wc -l`
set mfnu3 = `ls -1 $GEOSS2S/runx/$YEAR/${ID}/ens$ENSN/geosgcm_*/*tar | wc -l`
@ mfnu = $mfnu1 + $mfnu2
@ mfnu = $mfnu + $mfnu3
if ($mfnu == $ndisk) then
@ nn = $nn + 1
echo OK
else
echo " NOT READY: NUMBER FILES ON DISK IS: $mfnu = $mfnu1 + $mfnu2 + $mfnu3"
endif
echo 3- CHECK:
echo 3- IF RUN IS FINISHED BASED ON cap_restart
set lcap = `cat $GEOSS2S/runx/$YEAR/${ID}/ens$ENSN/cap_restart | cut -c1-8`
set endd = `cat $GEOSS2S/runh/$YEAR/${ID}/ens$ENSN/CAP.rc | grep END_DATE | cut -c15-22`
set endh = `cat $GEOSS2S/runh/$YEAR/${ID}/ens$ENSN/CAP.rc | grep END_DATE | cut -c15-20`
if ($lcap == $endd) then
@ nn = $nn + 1
echo OK
else
echo " NOT READY: cap_restart VALUE NOT YET LAST: $lcap VS $endd"
set runtag="${ID}_${YEAR}R$ENSN"
qme | grep $runtag
set qstat = $status
if ($qstat != 0) echo " JOB $runtag IS NOT RUNNING"
endif
echo 4- CHECK:
echo "4- IF HOLDING CONTENT IS ONLY IN $endh"
set nhld1 = `ls -1 $GEOSS2S/runx/$YEAR/${ID}/ens$ENSN/holding/geos*/*/*nc4 | wc -l`
set qstat = $status
set nhld2 = `ls -1 $GEOSS2S/runx/$YEAR/${ID}/ens$ENSN/holding/geos*/$endh/*nc4 | wc -l`
if ( ! -e $GEOSS2S/runx/$YEAR/${ID}/ens$ENSN/holding) then
echo " NO ACTION NECESSARY"
exit
endif
if ( ($qstat == 0) & ($nhld1 == $nhld2) ) then
@ nn = $nn + 1
echo OK
else
echo " NOT READY: HOLDING CONTENT: $nhld1 $nhld2"
endif
echo SUMMARY:
if ($nn == 4) then
echo " COMPLETED: ALL CONDITIONS MET TO CLEAN DISK"
else
echo " NOT READY: $nn CONDITIONS MET FOR CLEANING"
endif
|
<reponame>vaniot-s/sentry
import React from 'react';
import styled from '@emotion/styled';
import * as Sentry from '@sentry/browser';
import * as ReactRouter from 'react-router';
import {Location} from 'history';
import omit from 'lodash/omit';
import isEqual from 'lodash/isEqual';
import {Organization, GlobalSelection} from 'app/types';
import {PageContent} from 'app/styles/organization';
import {Client} from 'app/api';
import {getParams} from 'app/components/organizations/globalSelectionHeader/getParams';
import {fetchTotalCount} from 'app/actionCreators/events';
import {loadOrganizationTags} from 'app/actionCreators/tags';
import Alert from 'app/components/alert';
import GlobalSelectionHeader from 'app/components/organizations/globalSelectionHeader';
import LightWeightNoProjectMessage from 'app/components/lightWeightNoProjectMessage';
import SentryDocumentTitle from 'app/components/sentryDocumentTitle';
import space from 'app/styles/space';
import SearchBar from 'app/views/events/searchBar';
import {trackAnalyticsEvent} from 'app/utils/analytics';
import withApi from 'app/utils/withApi';
import withOrganization from 'app/utils/withOrganization';
import withGlobalSelection from 'app/utils/withGlobalSelection';
import EventView, {isAPIPayloadSimilar} from 'app/utils/discover/eventView';
import {ContentBox, Main, Side} from 'app/utils/discover/styles';
import {generateQueryWithTag} from 'app/utils';
import localStorage from 'app/utils/localStorage';
import {decodeScalar} from 'app/utils/queryString';
import {DEFAULT_EVENT_VIEW} from './data';
import Table from './table';
import Tags from './tags';
import ResultsHeader from './resultsHeader';
import ResultsChart from './resultsChart';
import {generateTitle} from './utils';
type Props = {
api: Client;
router: ReactRouter.InjectedRouter;
location: Location;
organization: Organization;
selection: GlobalSelection;
};
type State = {
eventView: EventView;
error: string;
errorCode: number;
totalValues: null | number;
showTags: boolean;
};
const SHOW_TAGS_STORAGE_KEY = 'discover2:show-tags';
function readShowTagsState() {
const value = localStorage.getItem(SHOW_TAGS_STORAGE_KEY);
return value === '1';
}
class Results extends React.Component<Props, State> {
static getDerivedStateFromProps(nextProps: Props, prevState: State): State {
const eventView = EventView.fromLocation(nextProps.location);
return {...prevState, eventView};
}
state = {
eventView: EventView.fromLocation(this.props.location),
error: '',
errorCode: 200,
totalValues: null,
showTags: readShowTagsState(),
};
componentDidMount() {
const {api, organization, selection} = this.props;
loadOrganizationTags(api, organization.slug, selection);
this.checkEventView();
this.fetchTotalCount();
}
componentDidUpdate(prevProps: Props, prevState: State) {
const {api, location, organization, selection} = this.props;
const {eventView} = this.state;
this.checkEventView();
const currentQuery = eventView.getEventsAPIPayload(location);
const prevQuery = prevState.eventView.getEventsAPIPayload(prevProps.location);
if (!isAPIPayloadSimilar(currentQuery, prevQuery)) {
api.clear();
this.fetchTotalCount();
if (
!isEqual(prevQuery.statsPeriod, currentQuery.statsPeriod) ||
!isEqual(prevQuery.start, currentQuery.start) ||
!isEqual(prevQuery.end, currentQuery.end) ||
!isEqual(prevQuery.project, currentQuery.project)
) {
loadOrganizationTags(api, organization.slug, selection);
}
}
}
async fetchTotalCount() {
const {api, organization, location} = this.props;
const {eventView} = this.state;
if (!eventView.isValid()) {
return;
}
try {
const totals = await fetchTotalCount(
api,
organization.slug,
eventView.getEventsAPIPayload(location)
);
this.setState({totalValues: totals});
} catch (err) {
Sentry.captureException(err);
}
}
checkEventView() {
const {eventView} = this.state;
if (eventView.isValid()) {
return;
}
// If the view is not valid, redirect to a known valid state.
const {location, organization, selection} = this.props;
const nextEventView = EventView.fromNewQueryWithLocation(
DEFAULT_EVENT_VIEW,
location
);
if (nextEventView.project.length === 0 && selection.projects) {
nextEventView.project = selection.projects;
}
ReactRouter.browserHistory.replace(
nextEventView.getResultsViewUrlTarget(organization.slug)
);
}
handleChangeShowTags = () => {
const {organization} = this.props;
trackAnalyticsEvent({
eventKey: 'discover_v2.results.toggle_tag_facets',
eventName: 'Discoverv2: Toggle Tag Facets',
organization_id: parseInt(organization.id, 10),
});
this.setState(state => {
const newValue = !state.showTags;
localStorage.setItem(SHOW_TAGS_STORAGE_KEY, newValue ? '1' : '0');
return {...state, showTags: newValue};
});
};
handleSearch = (query: string) => {
const {router, location} = this.props;
const queryParams = getParams({
...(location.query || {}),
query,
});
// do not propagate pagination when making a new search
const searchQueryParams = omit(queryParams, 'cursor');
router.push({
pathname: location.pathname,
query: searchQueryParams,
});
};
handleYAxisChange = (value: string) => {
const {router, location} = this.props;
const newQuery = {
...location.query,
yAxis: value,
};
router.push({
pathname: location.pathname,
query: newQuery,
});
trackAnalyticsEvent({
eventKey: 'discover_v2.y_axis_change',
eventName: "Discoverv2: Change chart's y axis",
organization_id: parseInt(this.props.organization.id, 10),
y_axis_value: value,
});
};
handleDisplayChange = (value: string) => {
const {router, location} = this.props;
const newQuery = {
...location.query,
display: value,
};
router.push({
pathname: location.pathname,
query: newQuery,
});
};
getDocumentTitle(): string {
const {eventView} = this.state;
if (!eventView) {
return '';
}
return generateTitle({eventView});
}
renderTagsTable() {
const {organization, location} = this.props;
const {eventView, totalValues} = this.state;
return (
<Side>
<Tags
generateUrl={this.generateTagUrl}
totalValues={totalValues}
eventView={eventView}
organization={organization}
location={location}
/>
</Side>
);
}
generateTagUrl = (key: string, value: string) => {
const {organization} = this.props;
const {eventView} = this.state;
const url = eventView.getResultsViewUrlTarget(organization.slug);
url.query = generateQueryWithTag(url.query, {
key,
value,
});
return url;
};
renderError(error: string) {
if (!error) {
return null;
}
return (
<Alert type="error" icon="icon-circle-exclamation">
{error}
</Alert>
);
}
setError = (error: string, errorCode: number) => {
this.setState({error, errorCode});
};
render() {
const {organization, location, router} = this.props;
const {eventView, error, errorCode, totalValues, showTags} = this.state;
const query = decodeScalar(location.query.query) || '';
const title = this.getDocumentTitle();
return (
<SentryDocumentTitle title={title} objSlug={organization.slug}>
<StyledPageContent>
<LightWeightNoProjectMessage organization={organization}>
<ResultsHeader
errorCode={errorCode}
organization={organization}
location={location}
eventView={eventView}
/>
<ContentBox>
<Top>
{this.renderError(error)}
<StyledSearchBar
organization={organization}
projectIds={eventView.project}
query={query}
fields={eventView.fields}
onSearch={this.handleSearch}
/>
<ResultsChart
router={router}
organization={organization}
eventView={eventView}
location={location}
onAxisChange={this.handleYAxisChange}
onDisplayChange={this.handleDisplayChange}
total={totalValues}
/>
</Top>
<StyledMain isCollapsed={!!showTags}>
<Table
organization={organization}
eventView={eventView}
location={location}
title={title}
setError={this.setError}
onChangeShowTags={this.handleChangeShowTags}
showTags={showTags}
/>
</StyledMain>
{showTags ? this.renderTagsTable() : null}
</ContentBox>
</LightWeightNoProjectMessage>
</StyledPageContent>
</SentryDocumentTitle>
);
}
}
// These styled components are used in getsentry to create a paywall page.
// Be careful changing their interfaces.
export const StyledPageContent = styled(PageContent)`
padding: 0;
`;
export const StyledSearchBar = styled(SearchBar)`
margin-bottom: ${space(2)};
`;
export const Top = styled('div')`
grid-column: 1/3;
flex-grow: 0;
`;
export const StyledMain = styled(Main)<{isCollapsed: boolean}>`
grid-column: ${p => (p.isCollapsed ? '1/2' : '1/3')};
`;
function ResultsContainer(props: Props) {
/**
* Block `<Results>` from mounting until GSH is ready since there are API
* requests being performed on mount.
*
* Also, we skip loading last used projects if you have multiple projects feature as
* you no longer need to enforce a project if it is empty. We assume an empty project is
* the desired behavior because saved queries can contain a project filter.
*/
return (
<GlobalSelectionHeader
skipLoadLastUsed={props.organization.features.includes('global-views')}
>
<Results {...props} />
</GlobalSelectionHeader>
);
}
export default withApi(withOrganization(withGlobalSelection(ResultsContainer)));
|
<reponame>AltschulerWu-Lab/EnteroidSeg<filename>enteroidseg/segmentation.py
"""
Cell-type specific segmentation pipelines
"""
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
plt.rcParams['image.cmap'] = u'Greys_r'
import numpy as np
import os
from scipy import ndimage as ndi
from skimage import color, filters, io, measure, morphology, restoration, segmentation
import warnings
from utils import imfuns, setting
class Segmentor:
"""
General segmentation class
Attributes:
C (dict): segmentation parameters
im (float ndarray): image to be segmented
im_smooth (float ndarray): smoothed image
im_segmented (label ndarray): labeled objects from segmentation
object_type: type of object being segmented (dna, crypt, edu, goblet, stem)
"""
def __init__(self, im, object_type=None):
"""
Loads attributes
Args:
im (float ndarray): image
object_type (str): type of object to be segmented
"""
self.im = imfuns.check_im(im)
self.object_type = object_type
# store image processing output
self.im_smooth = []
self.im_segmented = []
self.get_params()
def check_path(self, path):
"""Check if path directory exists"""
prefix = os.path.dirname(path)
if not os.path.exists(prefix):
raise ValueError('Output directory does not exist.')
def denoise_image(self):
"""
Abstracted version of denoise_bilateral function. Runs function on raw image using given constants
"""
return restoration.denoise_bilateral(self.im, sigma_color=self.C['BILATERAL_SIGMA_COLOR'],
sigma_spatial=self.C['BILATERAL_SIGMA_SPATIAL'], multichannel=False)
def get_params(self):
"""Get segmentation parameter setting based on object type"""
self.C = setting.seg_params[self.object_type]
def label2rgb(self, im_labeled):
"""
Abstracted version of label2rgb
Args:
im_labeled (labeled ndarray): regions to false color
Returns:
rbg ndarray: colored regions overlay on image
"""
return color.label2rgb(im_labeled, image=self.im, bg_label=0)
def preprocess(self):
"""Runs preprocessing steps (e.g. smooth, threshold)"""
pass
def plot_results(self, save=False, show=True):
"""
Plots results of main steps in pipeline
Args:
save (bool): if True, saves output
show (bool): if True, shows output
"""
pass
def run(self, plot=False, save=True):
"""
Runs and saves segmentation pipeline. Optionally, save results of main pipeline steps
Args:
plot (bool): if True, saves results of main pipeline steps
save (bool): if True, saves output of segmentation
"""
self.preprocess()
self.segment()
if save:
self.save()
if plot:
self.plot_results(save=True, show=False)
def save(self):
"""
Saves the segmentation (labeled ndarray) image and segmentation overlay (rbg ndarray) image
"""
outpath_seg = setting.paths['segmentation'].format(object_type=self.object_type)
outpath_overlay = setting.paths['overlay'].format(object_type=self.object_type)
self.check_path(outpath_seg)
self.check_path(outpath_overlay)
with warnings.catch_warnings():
# catches warnings (e.g. low contrast image)
warnings.simplefilter("ignore")
io.imsave(outpath_seg, np.array(self.im_segmented).astype(np.uint16))
io.imsave(outpath_overlay, self.label2rgb(self.im_segmented))
def segment(self):
"""Runs segmentation step of pipeline"""
pass
def segment_watershed(self, im, im_thresh, params, compact=True, line=False):
"""
Segmentation by first detecting cell locations using scale-space Laplacian of Gaussian blob
detection. Cell boundaries are determined using watershed
Args:
im (ndarray): raw image
im_thresh (bool ndarray): thresholded image
params (dict): segmentation parameters
compact (bool): use compact parameter for watershed
line (bool): if True, draw separating lines in output
Returns:
labeled ndarray: segmented objects
"""
blobs = imfuns.find_blobs(im_thresh, params)
markers = imfuns.blobs_to_markers(im.shape, blobs)
im_segmented = self.watershed(im, markers, im_thresh, line=line, compact=compact)
return markers, im_segmented
def thresh_otsu(self, im):
"""
Otsu thresholding modified by a factor (THRESHOLD_FACTOR). Also if the image is blank, the
'threshold' is greater than the image max intensity
Returns:
float: adjusted Otsu threshold
"""
try:
otsu_thresh = filters.threshold_otsu(im)
modified_otsu = self.C['THRESHOLD_FACTOR']*otsu_thresh
except ValueError:
modified_otsu = np.max(im) + 1
return modified_otsu
def watershed(self, im, markers, im_thresh, compact=True, line=False):
"""
Slightly more abstracted watershed function call
Args:
im (ndarray): raw image
markers (labeled ndarray): labeled seeds
im_thresh (ndarray): is 0 at not-cell pixels
compact (bool): if True, use given constant. Else, use 0
line (bool): if True, draw separating lines in output
Returns:
labeled ndarray: segmented image
"""
if compact:
compactness = self.C['WATERSHED_COMPACTNESS']
else:
compactness = 0
im_inverted = ((1-im)*setting.max_px_val).astype(int)
im_watershed = segmentation.watershed(im_inverted, markers, compactness=compactness,
connectivity=self.C['WATERSHED_CONN'], mask=im_thresh!=0, watershed_line=line)
return imfuns.remove_small_objects(im_watershed, self.C['WATERSHED_MIN_SZ'])
class Crypt_Finder(Segmentor):
"""
Crypt Segmentation
Attributes:
im_dna (ndarray): dna stain image
im_thresh (float ndarray): thresholded image
"""
def __init__(self, im, im_dna=None):
Segmentor.__init__(self, im, object_type='crypt')
self.im_dna = imfuns.check_im(im_dna)
# storing images
self.im_thresh = []
def preprocess(self):
self.threshold()
def segment(self):
self.im_segmented = measure.label(self.im_threshed>0)
def threshold(self):
"""Threshold by first removing nuclear stain bleed through"""
im_subtracted = imfuns.subtract(self.im, self.C['DNA_FACTOR']*self.im_dna)
im_mask = imfuns.imthresh(im_subtracted, self.C['THRESH']) > 0
im_closed = morphology.binary_closing(im_mask, selem=morphology.disk(self.C['MORPH_CLOSING_SZ']))
im_opened = morphology.binary_opening(im_closed, selem=morphology.disk(self.C['MORPH_OPENING_SZ']))
self.im_threshed = imfuns.remove_small_objects(im_opened, self.C['MIN_SZ'])
self.im_threshed = imfuns.mask_im(self.im, self.im_threshed)
def plot_results(self, save=False, show=True):
"""
Plots results of main steps in pipeline
Args:
save (bool): if True, saves output
show (bool): if True, shows output
"""
fig, axes = plt.subplots(2, 2, figsize=(15,15), sharex=True, sharey=True)
ax = axes.ravel()
ax[0].imshow(self.im)
ax[0].set_title('Raw Image')
ax[1].imshow(self.im_threshed)
ax[1].set_title('Thresholded Image')
ax[2].imshow(self.label2rgb(self.im_segmented))
ax[2].set_title('Segmentation')
plt.suptitle('{object_type:s} segmentation'.format(object_type=self.object_type))
if save:
outpath = setting.paths['result'].format(object_type=self.object_type)
fig.savefig(outpath)
plt.close()
if show:
plt.show()
class Goblet_Segmentor(Segmentor):
"""
Goblet Segmentation
Attributes:
im_thresh (float ndarray): threshed image where under threshold has value 0, over threshold has
original value
"""
def __init__(self, im):
"""
See superclass Segmentor
"""
Segmentor.__init__(self, im, object_type='goblet')
# storing images
self.im_thresh = []
def preprocess(self):
self.smooth()
self.threshold()
def segment(self):
markers, self.im_segmented = self.segment_watershed(self.im, self.im_thresh, self.C['LOG_BLOB'])
def smooth(self):
"""
Smooths image using median filtering
"""
self.im_smooth = imfuns.filter_median(self.im, self.C['MEDIAN_FILTER_SZ'])
def threshold(self):
"""
Thresholds by first finding the Otsu threshold. Holes are removed from Otsu thresholded image and
convex hulls are created for foreground objects. Foreground objects are expanded to fill the convex
hulls.
"""
thresh_val = self.thresh_otsu(self.im_smooth)
# convex hull threshold result
thresh_mask = self.im_smooth > thresh_val
thresh_mask = ndi.binary_fill_holes(thresh_mask)
thresh_mask = morphology.convex_hull_object(thresh_mask)
self.im_thresh = self.im_smooth
self.im_thresh[~thresh_mask] = 0
return thresh_val
def plot_results(self, save=False, show=True):
"""
Plots results of main steps in pipeline
Args:
save (bool): if True, saves output
show (bool): if True, shows output
"""
fig, axes = plt.subplots(1, 3, figsize=(21,7), sharex=True, sharey=True)
ax = axes.ravel()
ax[0].imshow(self.im)
ax[0].set_title('Raw Image')
ax[1].imshow(self.im_thresh)
ax[1].set_title('Smoothed & Thresholded Image')
ax[2].imshow(self.label2rgb(self.im_segmented))
ax[2].set_title('Goblet Segmentation')
plt.suptitle('goblet segmentation')
if save:
outpath = setting.paths['result'].format(object_type=self.object_type)
fig.savefig(outpath)
plt.close()
if show:
plt.show()
class Nuclear_Segmentor(Segmentor):
"""
Nuclear Segmentation
Attributes:
im_thresh (float ndarray): threshed image where under threshold has value 0, over threshold has
original value
im_clumps (float ndarray): image of nuclear stain showing only clump regions
seg_firstpass (labeled ndarray): first pass segmentation prior to filtering out clumps
seg_dense (labeled ndarray): dense segmentation result
seg_sparse (labeled ndarray): sparse segmentation result
"""
def __init__(self, im):
"""
See superclass Segmentor
"""
Segmentor.__init__(self, im, object_type='dna')
# storing images
self.im_thresh = []
self.seg_firstpass = []
self.seg_sparse = []
self.im_clumps = []
self.seg_dense = []
def find_clumps(self):
"""
Find clumps in segmentation image. Clumps include: all objects larger than SEG_SINGLE_MAX_SZ and
objects between SEG_SINGLE_MIN_SZ and SEG_SINGLE_MAX_SZ that are irregular. Creates a raw image
masked to show only clumped regions. Filters clumps from sparse segmentation
"""
seg_large_clumps = imfuns.remove_small_objects(self.seg_firstpass, self.C['SEG_SINGLE_MAX_SZ'])
seg_mixed = imfuns.remove_small_objects(self.seg_firstpass, self.C['SEG_SINGLE_MIN_SZ'])
seg_mixed[seg_large_clumps!=0]=0
seg_mixed_irregular = self.find_irregular_objects(seg_mixed, self.C['SEG_CLUMP_SOLIDITY'])
seg_clumps = np.maximum(seg_mixed_irregular, seg_large_clumps)
seg_clumps = imfuns.remove_small_holes(seg_clumps, self.C['SEG_CLOSE_HOLES'])
self.im_clumps = np.copy(self.im)
self.im_clumps[~seg_clumps]=0
self.seg_sparse = np.copy(self.seg_firstpass)
self.seg_sparse[self.im_clumps!=0] = 0
def find_irregular_objects(self, im, solidity_thresh):
"""
Identifies irregular objects in image
Args:
im (labeled ndarray): image of objects
solidity_thresh (float): cutoff of solidity value below which an object is irregular
Returns:
labeled ndarray: Containing only irregular objects in image
"""
irregular_labels = [x.label for x in measure.regionprops(im) if x.solidity < solidity_thresh]
im_irregular = np.copy(im)
im_irregular[~np.isin(im, irregular_labels)] = 0
return im_irregular
def preprocess(self):
self.threshold()
def segment(self):
"""
Runs first sparse segmentation, then dense segmentation. Final output combines the two results
"""
self.segment_sparse()
self.find_clumps()
self.segment_dense()
self.segment_combine()
def segment_combine(self):
"""
Combine sparse and dense seg
"""
label_add = np.max(self.seg_sparse)+1
seg_dense_relabeled = self.seg_dense + label_add
seg_dense_relabeled[seg_dense_relabeled == label_add] = 0
self.im_segmented = np.maximum(seg_dense_relabeled, self.seg_sparse)
def segment_dense(self):
"""
Performs dense segmentation by segmenting the clumps using dense seg parameters
"""
markers, self.seg_dense = self.segment_watershed(self.im_clumps, self.im_clumps,
self.C['LOG_DENSE'], line=True)
def segment_sparse(self):
"""
Performs sparse segmentation
"""
markers, self.seg_firstpass = self.segment_watershed(self.im, self.im_thresh,
self.C['LOG_SPARSE'], compact=False)
def threshold(self):
"""
Threshold using Otsu
"""
self.im_smooth = self.denoise_image()
thresh_val = self.thresh_otsu(self.im_smooth)
self.im_thresh = np.copy(self.im_smooth)
self.im_thresh[self.im_smooth < thresh_val] = 0
return thresh_val
def plot_results(self, save=False, show=True):
"""
Plots results of main steps in pipeline
Args:
save (bool): if True, saves output
show (bool): if True, shows output
"""
fig, axes = plt.subplots(2, 4, figsize=(28,14), sharex=True, sharey=True)
ax = axes.ravel()
ax[0].set_title('Max projected image')
ax[0].imshow(self.im)
ax[1].set_title('Sparse segmentation')
ax[1].imshow(self.label2rgb(self.seg_sparse))
ax[4].set_title('Thresholded image')
ax[4].imshow(self.im_thresh)
ax[5].set_title('Dense segmentation')
ax[5].imshow(self.label2rgb(self.seg_dense))
plt.subplot(1,2,2, sharex=ax[0], sharey=ax[0])
plt.title('Final Segmentation')
plt.imshow(self.label2rgb(self.im_segmented))
plt.suptitle('{object_type:s} segmentation'.format(object_type=self.object_type))
if save:
outpath = setting.paths['result'].format(object_type=self.object_type)
fig.savefig(outpath)
plt.close()
if show:
plt.show()
class EdU_Segmentor(Nuclear_Segmentor):
"""
See superclass Nuclear_Segmentor
"""
def __init__(self, im):
Nuclear_Segmentor.__init__(self, im)
self.object_type = 'edu'
self.get_params()
class Stem_Segmentor(Crypt_Finder):
"""
Stem cell segmentation
Attributes:
crypt_mask (bool ndarray): value 1 if crypt region, 0 otherwise
"""
def __init__(self, im, im_dna, objects_paneth=None):
"""
See superclass Crypt_Finder
"""
Crypt_Finder.__init__(self, im)
self.object_type = 'stem'
self.get_params()
self.im_dna = imfuns.check_im(im_dna)
self.objects_paneth = objects_paneth
self.objects_crypt = self.segment_crypt()
self.objects_dna = self.segment_dna()
# self.objects_paneth = objects_paneth
def segment_crypt(self):
"""Segment required input objects"""
crypt_seg = Crypt_Finder(self.im, im_dna=self.im_dna)
crypt_seg.run(save=False)
return crypt_seg.im_segmented
def segment_dna(self):
"""Segment required input objects"""
nuclei_seg = Nuclear_Segmentor(self.im_dna)
nuclei_seg.run(save=False)
return nuclei_seg.im_segmented
def filter_paneth(self):
"""
Filter out Paneth nuclei (assigned as nuclei closest to centroid of Paneth objects)
"""
paneth_labels = imfuns.assign_centroids(self.im_segmented, self.objects_paneth)
self.im_segmented = imfuns.remove_regions(self.im_segmented, paneth_labels)
def filter_partial(self):
"""
Filter out nuclei partially in the crypt. Partial nuclei are defined as nuclei
where ratio of the area outside the crypt to the area inside the crypt > PARTIAL_RATIO
"""
self.im_segmented = imfuns.overlap_regions(self.objects_dna,
self.objects_crypt, self.C['PARTIAL_RATIO'])
def preprocess(self):
pass
def segment(self):
"""
Identify stem nuclei in crypts (filter out Paneth and partial nuclei)
"""
self.filter_partial()
if self.objects_paneth is not None:
self.filter_paneth()
def plot_results(self, save=False, show=True):
"""
Plots results of main steps in pipeline
Args:
save (bool): if True, saves output
show (bool): if True, shows output
"""
fig, axes = plt.subplots(2, 2, figsize=(15,15), sharex=True, sharey=True)
ax = axes.ravel()
ax[0].imshow(self.im)
ax[0].set_title('Raw Image')
ax[1].imshow(self.label2rgb(self.objects_crypt))
ax[1].set_title('Crypt Objects')
ax[2].imshow(self.label2rgb(self.objects_dna))
ax[2].set_title('Nuclear Objects')
ax[3].imshow(self.label2rgb(self.im_segmented))
ax[3].set_title('Segmentation')
plt.suptitle('{object_type:s} segmentation'.format(object_type=self.object_type))
if save:
outpath = setting.paths['result'].format(object_type=self.object_type)
fig.savefig(outpath)
plt.close()
if show:
plt.show()
|
<gh_stars>1-10
//
// WavefieldScanning.cpp
// AxiSEM3D
//
// Created by <NAME> on 5/28/20.
// Copyright ยฉ 2020 <NAME>. All rights reserved.
//
// wavefield scanning
#include "WavefieldScanning.hpp"
#include "SE_Model.hpp"
#include "Domain.hpp"
#include "inparam.hpp"
#include "timer.hpp"
#include "io.hpp"
#include "bstring.hpp"
// setup
void WavefieldScanning::setup(double dt, double period, int numTotalSteps,
const SE_Model &sem, Domain &domain) {
// create
std::unique_ptr<WavefieldScanning> ws =
std::make_unique<WavefieldScanning>();
// inparam
timer::gPreloopTimer.begin("Building from inparam");
const InparamYAML &gm = inparam::gInparamNr;
const std::string &rt = "wavefield_scanning";
// file
ws->mFileName = gm.get<std::string>(rt + ":output_file");
// tolerance
double tolFourierH1 =
gm.getWithBounds(rt + ":threshold_Fourier_convergence", 1e-4, 1e-1);
double relTolH1 =
gm.getWithBounds(rt + ":relative_amplitude_skipped", 0., 1.);
double absTolH1 =
gm.getWithBounds(rt + ":advanced:absolute_amplitude_skipped", 1e-14, 1e-10);
ws->mTolFourierH2 = tolFourierH1 * tolFourierH1;
ws->mRelTolH2 = relTolH1 * relTolH1;
ws->mAbsTolH2 = absTolH1 * absTolH1;
ws->mMaxNumPeaks = gm.getWithBounds(rt + ":advanced:max_num_peaks", 1);
// vertex
bool vertexOnly = gm.get<bool>(rt + ":advanced:vertex_only");
timer::gPreloopTimer.ended("Building from inparam");
// time step
int nStepsPerPeriod =
gm.getWithBounds(rt + ":advanced:num_steps_per_mesh_period", 4);
ws->mScanningInterval =
std::max((int)round(period / nStepsPerPeriod / dt), 1);
// prepare points for scanning
timer::gPreloopTimer.begin("Initializing scanning on GLL points");
sem.initScanningOnPoints(vertexOnly);
timer::gPreloopTimer.ended("Initializing scanning on GLL points");
// verbose
if (io::gVerbose != io::VerboseLevel::None) {
std::stringstream ss;
using namespace bstring;
ss << boxTitle("Wavefield Scanning");
ss << boxEquals(0, 40, "output file for scanning result",
ws->mFileName);
ss << boxEquals(0, 40, "threshold for Fourier series convergence",
tolFourierH1);
ss << boxEquals(0, 40, "relative amplitude skipped for scanning",
relTolH1);
ss << boxEquals(0, 40, "absolute amplitude skipped for scanning",
absTolH1);
ss << boxEquals(0, 40, "maximum number of energy peaks",
ws->mMaxNumPeaks);
ss << boxEquals(0, 40, "perform scanning only on vertex points",
vertexOnly);
ss << boxEquals(0, 40, "# time steps scanned per mesh period",
nStepsPerPeriod);
ss << boxEquals(0, 40, "# time steps between two scanning steps",
ws->mScanningInterval);
ss << boxEquals(0, 40, "# time steps scanned in total",
numTotalSteps / ws->mScanningInterval +
(numTotalSteps % ws->mScanningInterval > 0));
ss << boxBaseline() << "\n\n";
io::cout << ss.str();
}
// release scanning to domain
domain.setWavefieldScanning(ws);
}
|
<filename>belvo/resources/__init__.py
from belvo.resources.accounts import Accounts # noqa
from belvo.resources.balances import Balances # noqa
from belvo.resources.incomes import Incomes # noqa
from belvo.resources.institutions import Institutions # noqa
from belvo.resources.invoices import Invoices # noqa
from belvo.resources.links import Links # noqa
from belvo.resources.owners import Owners # noqa
from belvo.resources.recurring_expenses import RecurringExpenses # noqa
from belvo.resources.risk_insights import RiskInsights # noqa
from belvo.resources.statements import Statements # noqa
from belvo.resources.tax_compliance_status import TaxComplianceStatus # noqa
from belvo.resources.tax_returns import TaxReturns # noqa
from belvo.resources.tax_status import TaxStatus # noqa
from belvo.resources.transactions import Transactions # noqa
from belvo.resources.widget_token import WidgetToken # noqa
|
import boto3
import csv
import os
if os.path.exists('InspectorFindings.csv'):
os.remove('InspectorFindings.csv')
print("Running Cleanup... Removing 'InspectorFindings.csv'...")
else:
print("File 'InspectorFindings.csv' does not exist to be removed")
#AssessmentRunArn = 'fill in and uncomment this and line 32 to limit to specific assessment run.'
#RulesPackageArn = "Fill in and uncomment this and line 28 to limit to specific rules package arn"
AWS_Region = "us-west-2"
max_results = 250000
inspector = boto3.client('inspector', region_name = AWS_Region)
paginator = inspector.get_paginator('list_findings')
finding_filter = {
'severities': [ # Uncomment which severities you would like the excel spreadsheet to show. As is only high findings will show up on excel report.
'High',
# 'Medium',
# 'Low',
# 'Informational',
],
'rulesPackageArns': [
# RulesPackageArn,
],
}
for findings in paginator.paginate(
maxResults=max_results,
assessmentRunArns=[
# AssessmentRunArn,
],
filter = finding_filter
):
for finding_arn in findings['findingArns']:
response = inspector.describe_findings(
findingArns=[
finding_arn,
],
locale='EN_US'
)
title = (response['findings'][0]['title'])
hostname = (response['findings'][0]['assetAttributes']['hostname'])
amiId = (response['findings'][0]['assetAttributes']['amiId'])
assetType = (response['findings'][0]['assetType'])
cveid = (response['findings'][0]['id'])
cvescore = (response['findings'][0]['numericSeverity'])
recommendation = (response['findings'][0]['recommendation'])
Findings = ((title, hostname, amiId, assetType, cveid, cvescore, recommendation))
with open('InspectorFindings.csv', 'a', newline='') as file:
writer = csv.writer(file)
writer.writerow(Findings)
|
#!/bin/sh
#
# Copyright (c) 2016 Marcin Rataj
# MIT Licensed; see the LICENSE file in this repository.
#
test_description="Test HTTP Gateway CORS Support"
test_config_ipfs_cors_headers() {
ipfs config --json Gateway.HTTPHeaders.Access-Control-Allow-Origin '["*"]'
ipfs config --json Gateway.HTTPHeaders.Access-Control-Allow-Methods '["PUT", "GET", "POST"]'
ipfs config --json Gateway.HTTPHeaders.Access-Control-Allow-Headers '["X-Requested-With"]'
ipfs config --json API.HTTPHeaders.Access-Control-Allow-Origin '["*"]'
ipfs config --json API.HTTPHeaders.Access-Control-Allow-Methods '["PUT", "GET", "POST"]'
ipfs config --json API.HTTPHeaders.Access-Control-Allow-Headers '["X-Requested-With"]'
}
. lib/test-lib.sh
test_init_ipfs
test_config_ipfs_cors_headers
test_launch_ipfs_daemon
gwport=$GWAY_PORT
apiport=$API_PORT
thash='QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
# Gateway
# HTTP GET Request
test_expect_success "GET to Gateway succeeds" '
curl -svX GET "http://127.0.0.1:$gwport/ipfs/$thash" 2>curl_output
'
# GET Response from Gateway should contain CORS headers
test_expect_success "GET response for Gateway resource looks good" '
grep "Access-Control-Allow-Origin:" curl_output &&
grep "Access-Control-Allow-Methods:" curl_output &&
grep "Access-Control-Allow-Headers:" curl_output
'
# HTTP OPTIONS Request
test_expect_success "OPTIONS to Gateway succeeds" '
curl -svX OPTIONS "http://127.0.0.1:$gwport/ipfs/$thash" 2>curl_output
'
# OPTION Response from Gateway should contain CORS headers
test_expect_success "OPTIONS response for Gateway resource looks good" '
grep "Access-Control-Allow-Origin:" curl_output &&
grep "Access-Control-Allow-Methods:" curl_output &&
grep "Access-Control-Allow-Headers:" curl_output
'
# Read-Only API (at the Gateway Port)
# HTTP GET Request
test_expect_success "GET to API succeeds" '
curl -svX GET "http://127.0.0.1:$gwport/api/v0/cat?arg=$thash" 2>curl_output
'
# GET Response from the API should NOT contain CORS headers
# Blacklisting: https://git.io/vzaj2
# Rationale: https://git.io/vzajX
test_expect_success "OPTIONS response for API looks good" '
grep -q "Access-Control-Allow-" curl_output && false || true
'
# HTTP OPTIONS Request
test_expect_success "OPTIONS to API succeeds" '
curl -svX OPTIONS "http://127.0.0.1:$gwport/api/v0/cat?arg=$thash" 2>curl_output
'
# OPTIONS Response from the API should NOT contain CORS headers
test_expect_success "OPTIONS response for API looks good" '
grep -q "Access-Control-Allow-" curl_output && false || true
'
test_kill_ipfs_daemon
test_done
|
require File.dirname(__FILE__) + '/../spec_helper'
describe "Eye::Dsl" do
it "fully empty config" do
conf = <<-E
# haha
E
Eye::Dsl.parse(conf).to_h.should == {:applications => {}, :settings => {}, :defaults => {}}
Eye::Dsl.parse_apps(conf).should == {}
end
it "empty config" do
conf = <<-E
Eye.application("bla") do
end
E
Eye::Dsl.parse_apps(conf).should == {'bla' => {:name => "bla"}}
end
it "should set param " do
conf = <<-E
Eye.application("bla") do
start_timeout 10.seconds
end
E
Eye::Dsl.parse_apps(conf).should == {"bla"=>{:start_timeout => 10.seconds, :name => "bla"}}
end
it "should set param, with self and =" do
conf = <<-E
Eye.application("bla") do
self.start_timeout = 10.seconds
end
E
Eye::Dsl.parse_apps(conf).should == {"bla"=>{:start_timeout => 10.seconds, :name => "bla"}}
end
it "another block syntax" do
conf = <<-E
Eye.application("bla"){ start_timeout 10.seconds }
E
Eye::Dsl.parse_apps(conf).should == {"bla"=>{:start_timeout => 10.seconds, :name => "bla"}}
end
it "should raise on unknown option" do
conf = <<-E
Eye.application("bla") do
pid_file "11"
hoho 10
end
E
expect{Eye::Dsl.parse_apps(conf)}.to raise_error(Eye::Dsl::Error)
end
it "hash should not be with defaults" do
conf = <<-E
Eye.application("bla") do
start_timeout 10.seconds
process("11") do
pid_file "1"
end
end
E
cfg = Eye::Dsl.parse_apps(conf)
cfg[:something].should == nil
cfg['bla'][:something].should == nil
cfg['bla'][:groups]['__default__'][:some].should == nil
cfg['bla'][:groups]['__default__'][:processes][:some].should == nil
end
it "should set application defaults" do
conf = <<-E
Eye.application('__default__'){ env "A" => "B" }
Eye.application(:__default__){ env "B" => "C" }
Eye.application("bla") do
process("11") do
pid_file "1"
end
end
E
cfg = Eye::Dsl.parse_apps(conf)
cfg['bla'][:environment].should == {"A"=>"B", "B"=>"C"}
cfg['bla'][:groups]['__default__'][:processes]['11'][:environment].should == {"A"=>"B", "B"=>"C"}
end
it "set uid option" do
conf = <<-E
Eye.application("bla") do
uid "vasya"
process("11") do
pid_file "1"
end
end
E
if RUBY_VERSION >= '2.0'
expect{Eye::Dsl.parse_apps(conf)}.not_to raise_error
else
expect{Eye::Dsl.parse_apps(conf)}.to raise_error(Eye::Dsl::Error)
end
end
it "should set clear_bundler_env" do
conf = " Eye.app(:bla){ clear_bundler_env; env 'A' => 1 }"
cfg = Eye::Dsl.parse_apps(conf)
cfg['bla'][:environment].should == {"GEM_PATH"=>nil, "GEM_HOME"=>nil, "RUBYOPT"=>nil, "BUNDLE_BIN_PATH"=>nil, "BUNDLE_GEMFILE"=>nil, "A" => 1}
end
end
|
<reponame>tekton/icewall
package main
import (
"fmt"
// "net"
"net/http"
"net/url"
"net/http/httputil"
"encoding/json"
"time"
"os"
"io/ioutil"
// "context"
//
"github.com/rs/xid"
"github.com/spf13/viper"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
//
// "github.com/go-redis/redis/v8"
)
// var ctx = context.Background()
// var redisConn *redis.Client
func handler(w http.ResponseWriter, r *http.Request) {
start := time.Now()
// laod balancer health short circuit rule check
if (r.URL.Path == viper.GetString("health_check.path")) {
log.Debug().Msg("health check")
// we have a health check to perform! Now, which kind?
if (viper.GetString("health_check.action") == "file") {
// ah, a file- we should serve it back and exit early!
w.Header().Set("Content-Type", viper.GetString("health_check.type"))
// what if we want it to say its in maintenance mode?
if (viper.GetString("health_check.maintenance.file") != "") {
// open the file, read the contents...
maintenanceFile, err := os.Open(viper.GetString("health_check.maintenance.file"))
if err != nil {
log.Error().Err(err)
} else { // yes, and else, sue me for not doing a function
defer maintenanceFile.Close()
maintenanceData, readErr := ioutil.ReadAll(maintenanceFile)
if readErr != nil {
log.Error().Err(readErr)
// not going to set the status code to a 5XX because it isn't that opinionated
} else {
if (string(maintenanceData) == viper.GetString("health_check.maintenance.check_val")) {
log.Info().Msg("maintenance mode")
// set the status code!
w.WriteHeader(viper.GetInt("health_check.maintenance.status_code"))
}
}
}
}
http.ServeFile(w, r, viper.GetString("health_check.file"))
return
}
}
//
id := xid.New()
r.Header.Set("x-iw-id", id.String())
host := r.Header.Get("x-iw-fwd")
if host == "" {
host = viper.GetString("default_host")
}
if (viper.GetBool("rules_enabled") == true) {
if (viper.GetBool("global_throttle.enabled") == true) {
// general check first!
}
}
// header rules
// path rules
// query string rules
// headers to look for as an array?
url, _ := url.Parse(host)
proxy := httputil.NewSingleHostReverseProxy(url)
// make a director to control timeouts...
// fmt.Printf("%#v", proxy.Transport)
// TODO: add settings with defaults...the base versions of the dial are fine though...
// proxy.Transport = &http.Transport{
// DialContext: (&net.Dialer{
// Timeout: 1 * time.Second,
// KeepAlive: 1 * time.Second,
// DualStack: true,
// }).DialContext,
// }
// fmt.Println(" ")
// fmt.Printf("%#v", proxy.Transport)
// fmt.Println(" ")
// defer proxy.Close()
r.URL.Host = url.Host
r.URL.Scheme = url.Scheme
r.Header.Set("X-Forwarded-Host", r.Header.Get("Host"))
r.Host = url.Host
// log.Info().Str("scheme", r.URL.Scheme).Msg("r")
headers, h_err := json.Marshal(r.Header)
if h_err != nil {
log.Error().Err(h_err).Str("req", id.String()).Msg("Could not Marshal Req Headers")
}
// it's nice to see when a requests effectively starts- just in case something happens...
log.Info().RawJSON("headers", headers).Str("uri", r.URL.String()).Str("req", id.String()).Msg("req")
// intercept things not to change them, just to log them!
proxy.ModifyResponse = func(res *http.Response) error {
headers, h_err := json.Marshal(res.Header)
if h_err != nil {
log.Error().Err(h_err).Str("req", id.String()).Msg("Could not Marshal Req Headers")
}
// TODecide readd x-iw-id just in case it got dropped?
log.Info().RawJSON("headers", headers).Str("req", id.String()).Msg("res")
return nil
}
proxy.ServeHTTP(w, r)
latency := time.Since(start).Seconds()
log.Info().Float64("latency", latency).Str("req", id.String()).Msg("")
}
func init() {
viper.AddConfigPath(".")
viper.AddConfigPath("/opt/icewall/config/")
viper.AddConfigPath("/etc/icewall/config/")
viper.SetConfigName("icewall")
viper.SetDefault("log_level", "info")
viper.SetDefault("default_host", "http://localhost")
viper_err := viper.ReadInConfig() // Find config, read config, or else...
if viper_err != nil {
panic(fmt.Errorf("Fatal error config file: %s \n", viper_err))
}
zerolog.SetGlobalLevel(zerolog.InfoLevel)
if viper.GetString("log_level") == "debug" {
zerolog.SetGlobalLevel(zerolog.DebugLevel)
}
zerolog.TimestampFieldName = "t"
zerolog.LevelFieldName = "l"
zerolog.MessageFieldName = "m"
log.Info().Msg("starting icewall")
// read basic rules from files on disk? badger?
// subscribe to "ticker" for new rules - how does base get made? does it reset the ticker data?
}
func main() {
http.HandleFunc("/", handler)
// http.HandleFunc("/__iw__/api/add_rule", handler)
// redisConn = redis.NewClient(&redis.Options{
// Network: "tcp",
// Addr: "127.0.0.1:6379",
// })
// defer redisConn.Close()
// val := redisConn.Ping(ctx)
// log.Info().Str("val", val.String()).Msg("ping")
port := fmt.Sprintf(":%s", viper.GetString("port"))
log.Fatal().Err(http.ListenAndServe(port, nil))
}
|
TERMUX_PKG_HOMEPAGE=https://nim-lang.org/
TERMUX_PKG_DESCRIPTION="Nim programming language compiler"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_VERSION=0.20.0
TERMUX_PKG_SRCURL=https://nim-lang.org/download/nim-$TERMUX_PKG_VERSION.tar.xz
TERMUX_PKG_SHA256=51f479b831e87b9539f7264082bb6a64641802b54d2691b3c6e68ac7e2699a90
TERMUX_PKG_DEPENDS="clang, git, libandroid-glob"
TERMUX_PKG_HOSTBUILD=yes
TERMUX_PKG_BUILD_IN_SRC=yes
termux_step_host_build() {
cp -r ../src/* ./
make -j $TERMUX_MAKE_PROCESSES CC=gcc LD=gcc
}
termux_step_make() {
if [ $TERMUX_ARCH = "x86_64" ]; then
export NIM_ARCH=amd64
elif [ $TERMUX_ARCH = "i686" ]; then
export NIM_ARCH=i386
elif [ $TERMUX_ARCH = "aarch64" ]; then
# -Oz breaks aarch64 build
CFLAGS+=" -Os"
export NIM_ARCH=arm64
else
export NIM_ARCH=arm
fi
LDFLAGS+=" -landroid-glob"
sed -i "s%\@CC\@%${CC}%g" config/nim.cfg
sed -i "s%\@CFLAGS\@%${CFLAGS}%g" config/nim.cfg
sed -i "s%\@LDFLAGS\@%${LDFLAGS}%g" config/nim.cfg
sed -i "s%\@CPPFLAGS\@%${CPPFLAGS}%g" config/nim.cfg
find -name "stdlib_osproc.nim.c" | xargs -n 1 sed -i 's',"/system/bin/sh\"\,\ 14","/data/data/com.termux/files/usr/bin/sh\"\,\ 38",'g'
PATH=$TERMUX_PKG_HOSTBUILD_DIR/bin:$PATH
if [ $NIM_ARCH = "amd64" ]; then
sed -i 's/arm64/amd64/g' makefile
fi
export CFLAGS=" $CPPFLAGS $CFLAGS -w -fno-strict-aliasing"
make LD=$CC uos=linux mycpu=$NIM_ARCH myos=android -j $TERMUX_MAKE_PROCESSES useShPath=$TERMUX_PREFIX/bin/sh
cp config/nim.cfg ../host-build/config
nim --opt:size --define:termux -d:release --os:android --cpu:$NIM_ARCH -t:-I/data/data/com.termux/files/usr/include -l:"-L/data/data/com.termux/files/usr/lib -landroid-glob" c koch.nim
cd dist/nimble/src
nim --define:termux -d:release --os:android --cpu:$NIM_ARCH -t:-I/data/data/com.termux/files/usr/include -l:"-L/data/data/com.termux/files/usr/lib -landroid-glob" c nimble.nim
}
termux_step_make_install() {
./install.sh $TERMUX_PREFIX/lib
cp koch $TERMUX_PREFIX/lib/nim/bin/
cp dist/nimble/src/nimble $TERMUX_PREFIX/lib/nim/bin/
ln -sfr $TERMUX_PREFIX/lib/nim/bin/nim $TERMUX_PREFIX/bin/
ln -sfr $TERMUX_PREFIX/lib/nim/bin/koch $TERMUX_PREFIX/bin/
ln -sfr $TERMUX_PREFIX/lib/nim/bin/nimble $TERMUX_PREFIX/bin/
}
|
<filename>timwang-algorithm-leetcode/src/main/java/com/timwang/algorithm/leetcode/stack/MergeNumbers.java
package com.timwang.algorithm.leetcode.stack;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
/**
* @author wangjun
* @date 2020-03-17
*/
public class MergeNumbers {
public static void main(String[] args) {
InputStreamReader ir = new InputStreamReader(System.in);
BufferedReader in = new BufferedReader(ir);
List<String> inputStrs = new ArrayList<>();
int numbers = 0;
try {
String s = in.readLine();
inputStrs.add(s);
numbers++;
while ((s = in.readLine()) != null) {
numbers++;
inputStrs.add(s);
if (numbers == 2) {
numbers = 0;
int[] numberArrOne = getNumbers(inputStrs.get(0));
int[] numberArrTwo = getNumbers(inputStrs.get(1));
int[] merge = merge(numberArrOne, numberArrOne.length, numberArrTwo, numberArrTwo.length);
System.out.println(changeResult(merge));
inputStrs = new ArrayList<>();
}
}
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private static String changeResult(int[] merge) {
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < merge.length; i++) {
if (i == merge.length - 1) {
stringBuilder.append(merge[i]);
continue;
}
stringBuilder.append(merge[i]).append(" ");
}
return stringBuilder.toString();
}
private static int[] getNumbers(String string) {
String[] strArr = string.split(" ");
int[] one = new int[strArr.length];
List<Integer> result = new ArrayList<>(strArr.length);
for (String s : strArr) {
result.add(Integer.valueOf(s));
}
int[] idList = new int[result.size()];
for (int i = 0; i < result.size(); i++) {
idList[i] = result.get(i);
}
return idList;
}
private static int[] merge(int[] firstArr, int firstArrLen, int[] secondArr, int secondArrLen) {
int[] result = new int[firstArrLen + secondArrLen];
int i = 0, j = 0, k = 0;
while (i < firstArrLen && j < secondArrLen) {
if (firstArr[i] < secondArr[j]) {
result[k++] = firstArr[i++];
} else {
result[k++] = secondArr[j++];
}
}
if (i != firstArrLen) {
while (i < firstArrLen) {
result[k++] = firstArr[i++];
}
}
if (j != secondArrLen) {
while (j < secondArrLen) {
result[k++] = secondArr[j++];
}
}
k = 0;
for (i = 0; i < firstArr.length; i++) {
firstArr[i] = result[k++];
}
return result;
}
}
|
<reponame>TeKraft/smle<gh_stars>10-100
import { DynamicElementComponent } from './base/dynamic-element.component';
import { HostDirective } from './base/host.directive';
export const BASE_COMPONENTS = [
DynamicElementComponent,
HostDirective
];
|
def longest_substring(s):
'''This function calculates the length of the longest substring without repeating characters.'''
longest_len = 0
start = 0
seen = {}
for i, char in enumerate(s):
if char in seen and start <= seen[char]:
start = seen[char] + 1
else:
longest_len = max(longest_len, i - start + 1)
seen[char] = i
return longest_len |
#!/bin/sh
#SBATCH --partition general
#SBATCH --mem 512
#SBATCH --job-name hello_world
#SBATCH --output /replace/by/path/to/your/scratch/space/hello_world.%j.out
echo Hello World from the cluster!
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.brooklyn.camp.spi;
import io.brooklyn.camp.commontypes.RepresentationSkew;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import brooklyn.util.collections.MutableMap;
import brooklyn.util.text.Identifiers;
import brooklyn.util.time.Time;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
/** Superclass of CAMP resource implementation objects.
* Typically used to hold common state of implementation objects
* and to populate the DTO's used by the REST API.
* <p>
* These class instances are typically created using the
* static {@link #builder()} methods they contain.
* The resulting instances are typically immutable,
* so where fields can change callers should use a new builder
* (or update an underlying data store).
* <p>
* This class is not meant to be instantiated directly, as
* CAMP only uses defined subclasses (ie containing these fields).
* It is instantiable for testing.
*/
public class AbstractResource {
public static final String CAMP_TYPE = "Resource";
private String id = Identifiers.makeRandomId(8);
private String name;
private String type;
private String description;
private String sourceCode;
private Date created = Time.dropMilliseconds(new Date());
private List<String> tags = Collections.emptyList();
private RepresentationSkew representationSkew;
private Map<String,Object> customAttributes = new MutableMap<String, Object>();
/** Use {@link #builder()} to create */
protected AbstractResource() {}
// getters
public String getId() {
return id;
}
public String getName() {
return name;
}
public String getType() {
return type;
}
public String getDescription() {
return description;
}
public String getSourceCode() {
return sourceCode;
}
public Date getCreated() {
return created;
}
public List<String> getTags() {
return tags;
}
public RepresentationSkew getRepresentationSkew() {
return representationSkew;
}
public Map<String, Object> getCustomAttributes() {
return ImmutableMap.copyOf(customAttributes);
}
// setters
private void setId(String id) {
this.id = id;
}
private void setName(String name) {
this.name = name;
}
private void setDescription(String description) {
this.description = description;
}
private void setSourceCode(String sourceCode) {
this.sourceCode = sourceCode;
}
private void setCreated(Date created) {
// precision beyond seconds breaks equals check
this.created = Time.dropMilliseconds(created);
}
private void setTags(List<String> tags) {
this.tags = ImmutableList.copyOf(tags);
}
private void setType(String type) {
this.type = type;
}
private void setRepresentationSkew(RepresentationSkew representationSkew) {
this.representationSkew = representationSkew;
}
public void setCustomAttribute(String key, Object value) {
this.customAttributes.put(key, value);
}
// builder
@SuppressWarnings("rawtypes")
public static Builder<? extends AbstractResource,? extends Builder> builder() {
return new AbstractResourceBuilder(CAMP_TYPE);
}
/** Builder creates the instance up front to avoid repetition of fields in the builder;
* but prevents object leakage until build and prevents changes after build,
* so effectively immutable.
* <p>
* Similarly setters in the class are private so those objects are also typically effectively immutable. */
public abstract static class Builder<T extends AbstractResource,U extends Builder<T,U>> {
private boolean built = false;
private String type = null;
private T instance = null;
protected Builder(String type) {
this.type = type;
}
@SuppressWarnings("unchecked")
protected T createResource() {
return (T) new AbstractResource();
}
protected synchronized T instance() {
if (built)
throw new IllegalStateException("Builder instance from "+this+" cannot be access after build");
if (instance==null) {
instance = createResource();
initialize();
}
return instance;
}
protected void initialize() {
if (type!=null) type(type);
}
public synchronized T build() {
T result = instance();
built = true;
return result;
}
@SuppressWarnings("unchecked")
protected U thisBuilder() { return (U)this; }
public U type(String x) { instance().setType(x); return thisBuilder(); }
public U id(String x) { instance().setId(x); return thisBuilder(); }
public U name(String x) { instance().setName(x); return thisBuilder(); }
public U description(String x) { instance().setDescription(x); return thisBuilder(); }
public U created(Date x) { instance().setCreated(x); return thisBuilder(); }
public U tags(List<String> x) { instance().setTags(x); return thisBuilder(); }
public U representationSkew(RepresentationSkew x) { instance().setRepresentationSkew(x); return thisBuilder(); }
public U customAttribute(String key, Object value) { instance().setCustomAttribute(key, value); return thisBuilder(); }
public U sourceCode(String x) { instance().setSourceCode(x); return thisBuilder(); }
// public String type() { return instance().type; }
}
@VisibleForTesting
protected static class AbstractResourceBuilder extends Builder<AbstractResource,AbstractResourceBuilder> {
protected AbstractResourceBuilder(String type) {
super(type);
}
}
@Override
public String toString() {
return super.toString()+"[id="+getId()+"; type="+getType()+"]";
}
}
|
const express = require("express");
const { d0_da_g3t } = require("./func");
const app = express();
const port = 8569;
app.use(express.json());
app.post("/back_off_b1tch_u_dont_wanna_t3st_me", async (req, res) => {
let resData = await d0_da_g3t(req.body);
console.log(resData);
res.json({
stats: resData
});
});
app.post("/test", (req, res) => res.json(req.body));
app.listen(port, () => console.log(`app listening on port ${port}!`));
|
<reponame>kully-hmrc/cc-calculator
/*
* Copyright 2018 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package utils
import akka.stream.Materializer
import org.joda.time.LocalDate
import org.joda.time.format.DateTimeFormat
import org.scalatest.Suite
import org.scalatestplus.play.OneAppPerSuite
import play.api.Application
import play.api.i18n.Messages
import play.api.i18n.Messages.Implicits._
import play.api.inject.guice.GuiceApplicationBuilder
import uk.gov.hmrc.play.test.UnitSpec
trait FakeCCCalculatorApplication extends UnitSpec with OneAppPerSuite {
this: Suite =>
val config: Map[String, _] = Map(
"csrf.sign.tokens" -> false,
"govuk-tax.Test.services.contact-frontend.host" -> "localhost",
"govuk-tax.Test.services.contact-frontend.port" -> "9250"
)
val formatter = DateTimeFormat.forPattern("yyyy-MM-dd")
def parseDate(date: String): LocalDate = LocalDate.parse(date, formatter)
implicit override lazy val app: Application = new GuiceApplicationBuilder()
.configure(config)
.build()
implicit lazy val mat: Materializer = app.materializer
implicit lazy val messages: Messages = applicationMessages
}
|
<filename>Ch09/ex09-04.sql
CREATE FUNCTION concat_example_ansi(
in_title VARCHAR(4),
in_gender CHAR(1),
in_firstname VARCHAR(20),
in_middle_initial CHAR(1),
in_surname VARCHAR(20))
RETURNS VARCHAR(60)
BEGIN
DECLARE l_title VARCHAR(4);
DECLARE l_name_string VARCHAR(60);
IF ISNULL(in_title) THEN
IF in_gender='M' THEN
SET l_title='Mr';
ELSE
SET l_title='Ms';
END IF;
END IF;
IF ISNULL(in_middle_initial) THEN
SET l_name_string=l_title||' '||in_firstname||' '||in_surname;
ELSE
SET l_name_string=l_title||' '||in_firstname||' '||
in_middle_initial||' '||in_surname;
END IF;
RETURN(l_name_string);
END;
|
<filename>verification/src/main/java/nl/littlerobots/squadleader/verification/MyActivity.java
package nl.littlerobots.squadleader.verification;
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.webkit.JavascriptInterface;
import com.f2prateek.dart.InjectExtra;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.otto.Produce;
import com.squareup.otto.Subscribe;
import butterknife.InjectView;
import butterknife.OnClick;
import icepick.Icicle;
import nl.littlerobots.squadleader.Keep;
import retrofit.RestAdapter.Builder;
public class MyActivity extends Activity {
@Icicle
String icepickString;
@InjectExtra("test")
String dartString;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_my);
// need a reference because otherwise ProGuard will strip out KeepFieldsTester
KeepFieldsTester object = new KeepFieldsTester();
OkHttpClient client = new OkHttpClient();
RetrofitTester service = new Builder().setEndpoint("http://www.test.com").build().create(RetrofitTester.class);
}
@Subscribe
public void testOttoSubscribeAnnotation() {
}
@Produce
public void testOttoProduceAnnotation() {
}
@Keep
public void testKeepMethod() {
}
public void onEvent(String dummy) {
}
static final class ButterknifeTarget {
@InjectView(R.id.test)
View mTestView;
@OnClick(R.id.test)
public void testAction() {
}
}
static final class DummyJavaScriptInterface {
@JavascriptInterface
public void dummyJsMethod() {
}
}
}
|
<reponame>metaring/spring-boot-app-example<filename>src/main/java/com/metaring/springbootappexample/configuration/FF4JConfiguration.java
package com.metaring.springbootappexample.configuration;
import java.util.HashMap;
import java.util.Map;
import org.ff4j.FF4j;
import org.ff4j.cache.InMemoryCacheManager;
import org.ff4j.mongo.store.EventRepositoryMongo;
import org.ff4j.mongo.store.FeatureStoreMongo;
import org.ff4j.mongo.store.PropertyStoreMongo;
import org.ff4j.property.Property;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoDatabase;
@Configuration
public class FF4JConfiguration {
public static final Map<String, Property<?>> GLOBAL_PROPERTIES = new HashMap<>();
@Value("${spring.data.mongodb.host}")
private String databaseConnectionHost;
@Value("${spring.data.mongodb.port}")
private Integer databaseConnectionPort;
@Value("${spring.ff4j.config.name}")
private String ff4jConfigurationFileName;
@Bean
public FF4j getFF4j() {
FF4j ff4j = new FF4j(ff4jConfigurationFileName + ".xml").autoCreate(true);
GLOBAL_PROPERTIES.putAll(ff4j.getProperties());
// Define Connectivity to DB (see with authorization - Using Mongo Driver
MongoClient mongoClient = new MongoClient(databaseConnectionHost, databaseConnectionPort);
// Using Spring-data-mongodb
MongoDatabase mongoDatabase = mongoClient.getDatabase("ff4j");
ff4j.setFeatureStore(new FeatureStoreMongo(mongoDatabase.getCollection("ff4j_features"), ff4jConfigurationFileName + ".xml"));
ff4j.setPropertiesStore(new PropertyStoreMongo(mongoDatabase.getCollection("ff4j_properties"), ff4jConfigurationFileName + ".xml"));
// Enable audit
ff4j.setEventRepository(new EventRepositoryMongo(mongoClient, "ff4j"));
ff4j.audit(true);
// Enable Cache Proxy
ff4j.cache(new InMemoryCacheManager());
return ff4j;
}
} |
<filename>artifacts/adm/charts/src/main/java/net/community/apps/tools/adm/charts/MainFrame.java
/*
*
*/
package net.community.apps.tools.adm.charts;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyListener;
import java.io.File;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collection;
import java.util.Map;
import java.util.TreeMap;
import javax.swing.AbstractButton;
import javax.swing.JComboBox;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JToolBar;
import net.community.apps.tools.adm.AbstractAccessMainFrame;
import net.community.apps.tools.adm.DBConnectDialog;
import net.community.apps.tools.adm.charts.resources.ResourcesAnchor;
import net.community.chest.Triplet;
import net.community.chest.awt.attributes.Titled;
import net.community.chest.awt.window.EscapeKeyWindowCloser;
import net.community.chest.db.DBAccessConfig;
import net.community.chest.dom.DOMUtils;
import net.community.chest.lang.ExceptionUtil;
import net.community.chest.swing.component.menu.MenuItemExplorer;
import net.community.chest.swing.options.BaseOptionPane;
import net.community.chest.ui.helpers.combobox.TypedComboBox;
import net.community.chest.ui.helpers.combobox.TypedComboBoxActionListener;
import net.community.chest.util.logging.LoggerWrapper;
import net.community.chest.util.logging.factory.WrapperFactoryManager;
import org.jfree.chart.ChartFactory;
import org.jfree.chart.ChartPanel;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.labels.StandardPieSectionLabelGenerator;
import org.jfree.chart.plot.PiePlot;
import org.jfree.chart.plot.Plot;
import org.jfree.data.category.CategoryDataset;
import org.jfree.data.category.DefaultCategoryDataset;
import org.jfree.data.general.DatasetUtilities;
import org.jfree.data.general.PieDataset;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
/**
* <P>Copyright 2010 as per GPLv2</P>
*
* @author <NAME>.
* @since Jun 23, 2010 1:58:40 PM
*/
final class MainFrame extends AbstractAccessMainFrame<ResourcesAnchor> {
private static final LoggerWrapper _logger=WrapperFactoryManager.getLogger(MainFrame.class);
/*
* @see net.community.apps.common.BaseMainFrame#getLogger()
*/
@Override
protected LoggerWrapper getLogger ()
{
return _logger;
}
/*
* @see net.community.apps.common.MainComponent#getResourcesAnchor()
*/
@Override
public ResourcesAnchor getResourcesAnchor ()
{
return ResourcesAnchor.getInstance();
}
private Connection _dbConn /* =null */;
public Connection getConnection ()
{
return _dbConn;
}
public boolean isConnected ()
{
return (getConnection() != null);
}
private JMenuItem _discMenuItem, _refreshMenuItem;
private AbstractButton _discBtn, _refreshBtn;
protected void updateButtonsState (final boolean connected)
{
updateButtonsState(connected, _discBtn, _refreshBtn, _discMenuItem, _refreshMenuItem);
}
private DBAccessConfig _connCfg;
protected void doConnect (final DBAccessConfig cfg)
{
try
{
final int nErr=DBAccessConfig.checkDBAccessConfig(cfg);
if (nErr != 0)
throw new IllegalStateException("Bad (" + nErr + ") DB access configuration");
if (isConnected())
throw new IllegalStateException("Previous connection still active: " + _connCfg);
final Triplet<?,?,? extends Connection> cRes=cfg.createConnection();
if (null == (_dbConn=cRes.getV3()))
throw new IllegalStateException("No connection generated");
if (null == _connCfg)
_connCfg = new DBAccessConfig(cfg);
else
_connCfg.update(cfg);
_logger.info("doConnect(" + _connCfg + ") connected");
doRefresh();
}
catch(Exception e)
{
_logger.error("doConnect(" + cfg + ") " + e.getClass().getName() + ": " + e.getMessage(), e);
BaseOptionPane.showMessageDialog(this, e);
}
}
private final DBAccessConfig _dbAccess=fillDefaults(new DBAccessConfig());
public DBAccessConfig getDBAccessConfig ()
{
return _dbAccess;
}
private Element _connDlgElem /* =null */;
protected void doConnect ()
{
if (null == _connDlgElem)
{
JOptionPane.showMessageDialog(this, "Missing configuration element", "Cannot show dialog", JOptionPane.ERROR_MESSAGE);
return;
}
final DBAccessConfig cfg=getDBAccessConfig();
final DBConnectDialog dlg=new DBConnectDialog(this, cfg, _connDlgElem, true);
dlg.setVisible(true);
if (!dlg.isOkExit())
return; // debug breakpoint
if (isConnected() && (!dlg.isChangedConfig()))
return; // debug breakpoint
doDisconnect(); // disconnect from previous instance
doConnect(cfg);
}
protected boolean okToDisconnect ()
{
return true;
}
protected void doDisconnect ()
{
if (!okToDisconnect())
return;
if (isConnected())
{
_logger.info("Disconnect from " + _connCfg);
try
{
_dbConn.close();
}
catch(Exception e)
{
_logger.error("doDisconnect(" + _connCfg + ") " + e.getClass().getName() + ": " + e.getMessage(), e);
BaseOptionPane.showMessageDialog(this, e);
}
finally
{
_dbConn = null;
}
}
}
private static final String cleanStringValue (final String s)
{
final int sLen=(null == s) ? 0 : s.length();
if (sLen <= 0)
return s;
StringBuilder sb=null;
int lastPos=0;
for (int curPos=0; curPos < sLen; curPos++)
{
final char ch=s.charAt(curPos);
if ((ch >= ' ') && (ch < 0x007F))
continue;
if (null == sb)
sb = new StringBuilder(sLen - 1);
if (curPos > lastPos)
{
final String subText=s.substring(lastPos, curPos);
sb.append(subText);
}
if ((lastPos=curPos+1) >= sLen)
break;
}
if (null == sb)
return s;
if (lastPos < sLen)
sb.append(s.substring(lastPos));
return sb.toString();
}
private final Map<String,? extends Number> runPieChartQuery (
final String title, final String qry)
throws SQLException
{
if ((null == qry) || (qry.length() <= 0) || (!isConnected()))
return null;
if (_logger.isDebugEnabled())
_logger.debug("runPieChartQuery(" + title + ") " + qry);
final Connection c=getConnection();
Statement s=c.createStatement();
try
{
final Map<String,Integer> valsMap=
new TreeMap<String,Integer>(String.CASE_INSENSITIVE_ORDER);
for (final ResultSet rs=s.executeQuery(qry);
rs != null && rs.next();
)
{
final String name=cleanStringValue(rs.getString(1));
final int value=rs.getInt(2);
final Number prev=valsMap.put(name, Integer.valueOf(value));
if (prev != null)
throw new IllegalStateException("runPieChartQuery(" + title + ") multiple values for name=" + name);
if (_logger.isDebugEnabled())
_logger.debug("runPieChartQuery(" + title + ") " + name + "=" + value);
}
return valsMap;
}
finally
{
if (s != null)
s.close();
}
}
private static final CategoryDataset populateCategoryDataset (
final String title, final Map<String,? extends Number> res)
{
final Collection<? extends Map.Entry<String,? extends Number>> vl=
((null == res) || (res.size() <= 0)) ? null : res.entrySet();
if ((null == vl) || (vl.size() <= 0))
return null;
final DefaultCategoryDataset ds=new DefaultCategoryDataset();
for (final Map.Entry<String,? extends Number> vp : vl)
{
final String vn=(null == vp) ? null : vp.getKey();
final Number vv=(null == vp) ? null : vp.getValue();
if ((null == vn) || (vn.length() <= 0) || (null == vv))
continue;
ds.addValue(vv, title, vn);
}
return ds;
}
private JFreeChart _chart;
protected JFreeChart populatePieChart (final String title, final Map<String,? extends Number> res)
{
final CategoryDataset ds=populateCategoryDataset(title, res);
final PieDataset pds=
((null == title) || (title.length() <= 0) || (null == ds)) ? null : DatasetUtilities.createPieDatasetForRow(ds, title);
if (null == _chart)
{
_chart = ChartFactory.createPieChart3D(title, // chart title
pds, // dataset
false, // legend
true, // tooltips
false // url(s)
);
final Plot p=(null == _chart) ? null : _chart.getPlot();
if (p instanceof PiePlot)
((PiePlot) p).setLabelGenerator(new StandardPieSectionLabelGenerator("{0} ({1} / {3}) {2}"));
if (_chart != null)
{
final Container ctPane=getContentPane();
ctPane.add(new ChartPanel(_chart), BorderLayout.CENTER);
}
}
else
{
final Plot p=_chart.getPlot();
if (p instanceof PiePlot)
((PiePlot) p).setDataset(pds);
_chart.setTitle(title);
}
return _chart;
}
protected JFreeChart populatePieChart (final String title, final String qry)
{
if ((null == qry) || (qry.length() <= 0))
return null;
if (!isConnected())
return null;
try
{
final Map<String,? extends Number> res=runPieChartQuery(title, qry);
return populatePieChart(title, res);
}
catch(Exception e)
{
_logger.error("populatePieChart(" + title + ") " + e.getClass().getName() + ": " + e.getMessage(), e);
BaseOptionPane.showMessageDialog(this, e);
return null;
}
}
protected void doRefresh ()
{
populatePieChart((null == _pieQrySelector) ? null : _pieQrySelector.getSelectedText(),
(null == _pieQrySelector) ? null : _pieQrySelector.getSelectedValue());
}
private static final Map<String,String> extractQueriesMap (
final Collection<? extends Element> el)
{
if ((null == el) || (el.size() <= 0))
return null;
Map<String,String> ret=null;
for (final Element elem : el)
{
final String key=(null == elem) ? null : elem.getAttribute(Titled.ATTR_NAME),
val=DOMUtils.getElementStringValue(elem);
if ((null == key) || (key.length() <= 0)
|| (null == val) || (val.length() <= 0))
continue;
if (null == ret)
ret = new TreeMap<String,String>(String.CASE_INSENSITIVE_ORDER);
final String qry=val.replace('\n', ' ')
.replace('\t', ' ')
.replaceAll("[ ]+", " ")
.trim(),
prev=ret.put(key, qry);
if ((prev != null) && (prev.length() > 0))
throw new IllegalStateException("Duplicat item for title=" + key);
}
return ret;
}
private static final Map<String,String> extractQueriesMap (final Element elem)
{
return extractQueriesMap(DOMUtils.extractAllNodes(Element.class, elem, Node.ELEMENT_NODE));
}
private static final Map<String,String> extractQueriesMap (final Document doc)
{
return extractQueriesMap((null == doc) ? null : doc.getDocumentElement());
}
private Map<String,String> loadDefaultQueriesMap ()
{
try
{
final ResourcesAnchor ra=getResourcesAnchor();
final Document doc=
(null == ra) ? null : ra.getDocument("built-in-piechart-queries.xml");
return extractQueriesMap(doc);
}
catch(Exception e)
{
_logger.error("Failed (" + e.getClass().getName() + ") to load default piechart queries: " + e.getMessage(), e);
BaseOptionPane.showMessageDialog(this, e);
return null;
}
}
private static <V, C extends TypedComboBox<V>> C createQueriesSelection (
final C cb, final Map<String,? extends V> qrysMap)
{
if (null == cb)
return null;
final Collection<? extends Map.Entry<String,? extends V>> ql=
((null == qrysMap) || (qrysMap.size() <= 0)) ? null : qrysMap.entrySet();
if ((null == ql) || (ql.size() <= 0))
return cb;
final int numItems=cb.getItemCount();
if (numItems > 0)
cb.removeAllElements();
for (final Map.Entry<String,? extends V> qe : ql)
{
if (null == qe)
continue;
cb.addItem(qe.getKey(), qe.getValue());
}
return cb;
}
private Map<String,String> _pieQueriesMap;
private Map<String,String> getPieQueriesMap ()
{
if (null == _pieQueriesMap)
_pieQueriesMap = loadDefaultQueriesMap();
return _pieQueriesMap;
}
private TypedComboBox<String> createPieQueriesSelection (TypedComboBox<String> cb)
{
return createQueriesSelection(cb, getPieQueriesMap());
}
private TypedComboBox<String> _pieQrySelector;
/*
* @see net.community.apps.common.BaseMainFrame#loadFile(java.io.File, org.w3c.dom.Element)
*/
@Override
public void loadFile (File f, Element dlgElement)
{
try
{
final Document doc=DOMUtils.loadDocument(f);
final Map<String,String> newMap=extractQueriesMap(doc),
curMap=getPieQueriesMap();
if ((null == newMap) || (newMap.size() <= 0))
return;
final JComboBox cb;
if (curMap != null)
{
curMap.putAll(newMap);
cb = createQueriesSelection(_pieQrySelector, curMap);
}
else
cb = createQueriesSelection(_pieQrySelector, newMap);
if (cb != null)
cb.setSelectedIndex(0);
doRefresh();
}
catch(Exception e)
{
_logger.error("loadFile(" + f + ") " + e.getClass().getName() + ": " + e.getMessage(), e);
BaseOptionPane.showMessageDialog(this, e);
}
}
/*
* @see net.community.apps.common.BaseMainFrame#layoutSection(java.lang.String, org.w3c.dom.Element)
*/
@Override
public void layoutSection (String name, Element elem)
throws RuntimeException
{
if ("db-connect-dialog".equalsIgnoreCase(name))
{
if (_connDlgElem != null)
throw new IllegalStateException("layoutSection(" + name + ") re-specified");
_connDlgElem = elem;
}
else
super.layoutSection(name, elem);
}
private static final String CONNECT_CMD="connect",
DISCONNECT_CMD="disconnect",
REFRESH_CMD="refresh";
/*
* @see net.community.apps.common.BaseMainFrame#getActionListenersMap(boolean)
*/
@Override
protected Map<String,? extends ActionListener> getActionListenersMap (boolean createIfNotExist)
{
final Map<String,? extends ActionListener> org=super.getActionListenersMap(createIfNotExist);
if (((org != null) && (org.size() > 0)) || (!createIfNotExist))
return org;
final Map<String,ActionListener> lm=new TreeMap<String,ActionListener>(String.CASE_INSENSITIVE_ORDER);
lm.put(EXIT_CMD, getExitActionListener());
lm.put(ABOUT_CMD, getShowManifestActionListener());
lm.put(LOAD_CMD, getLoadFileListener());
lm.put(CONNECT_CMD, new ActionListener() {
/*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed (ActionEvent e)
{
doConnect();
}
});
lm.put(DISCONNECT_CMD, new ActionListener() {
/*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed (ActionEvent e)
{
doDisconnect();
}
});
lm.put(REFRESH_CMD, new ActionListener() {
/*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed (ActionEvent e)
{
doRefresh();
}
});
setActionListenersMap(lm);
return lm;
}
/*
* @see net.community.apps.common.BaseMainFrame#setMainMenuItemsActionHandlers(net.community.chest.swing.component.menu.MenuItemExplorer)
*/
@Override
protected Map<String,JMenuItem> setMainMenuItemsActionHandlers (MenuItemExplorer ie)
{
final Map<String,JMenuItem> im=super.setMainMenuItemsActionHandlers(ie);
_discMenuItem = (null == im) ? null : im.get(DISCONNECT_CMD);
_refreshMenuItem = (null == im) ? null : im.get(REFRESH_CMD);
return im;
}
/*
* @see net.community.apps.common.BaseMainFrame#layoutComponent()
*/
@Override
public void layoutComponent () throws RuntimeException
{
super.layoutComponent();
final Container ctPane=getContentPane();
final KeyListener kl=new EscapeKeyWindowCloser(this);
addKeyListener(kl);
try
{
final JToolBar b=getMainToolBar();
final Map<String,? extends AbstractButton> hm=setToolBarHandlers(b);
if ((hm != null) && (hm.size() > 0))
{
_discBtn = hm.get(DISCONNECT_CMD);
_refreshBtn = hm.get(REFRESH_CMD);
}
if (null == _pieQrySelector)
{
_pieQrySelector = createPieQueriesSelection(new TypedComboBox<String>(String.class));
_pieQrySelector.addActionListener(
new TypedComboBoxActionListener<String,TypedComboBox<String>>() {
/*
* @see net.community.chest.ui.helpers.combobox.TypedComboBoxActionListener#handleSelectedItem(java.awt.event.ActionEvent, net.community.chest.ui.helpers.combobox.TypedComboBox, java.lang.String, java.lang.Object)
*/
@Override
public void handleSelectedItem (ActionEvent e,
TypedComboBox<String> cb,
String text,
String value)
{
populatePieChart(text, value);
}
});
_pieQrySelector.setSelectedIndex(0);
b.add(_pieQrySelector);
}
ctPane.add(b, BorderLayout.NORTH);
}
catch(Exception e)
{
throw ExceptionUtil.toRuntimeException(e);
}
}
/**
* @param args initial arguments as received by the (@link #main(String[]))
* @throws Exception if illegal option(s) encountered
*/
private void processMainArgs (final String... args) throws Exception
{
final int numArgs=
(null == args) ? 0 : args.length;
Map<String,String> valsMap=null;
DBAccessConfig cfg=null;
File extFile=null;
for (int aIndex=0; aIndex < numArgs; aIndex++)
{
final String arg=args[aIndex];
if ((null == arg) || (arg.length() <= 1) || (arg.charAt(0) != '-'))
throw new IllegalArgumentException("Malformed option: " + arg);
aIndex++;
if (aIndex >= numArgs)
throw new IllegalArgumentException("No value provided for option=" + arg);
final String val=args[aIndex];
if ((null == val) || (val.length() <= 0))
throw new IllegalArgumentException("Null/empty value provided for option=" + arg);
if (null == valsMap)
valsMap = new TreeMap<String,String>(String.CASE_INSENSITIVE_ORDER);
final String prev=valsMap.put(arg, val);
if ((prev != null) && (prev.length() > 0))
throw new IllegalArgumentException("Option=" + arg + " value re-specified");
if ("-file".equalsIgnoreCase(arg))
{
extFile = new File(val);
}
else
{
cfg = getDBAccessConfig();
if (!processDBAccessConfigParameter(cfg, valsMap, arg, val))
throw new IllegalArgumentException("Unknown option: " + arg);
}
}
if (extFile != null)
loadFile(extFile, null);
if (0 == DBAccessConfig.checkDBAccessConfig(cfg))
doConnect(cfg);
}
/**
* @param args original arguments as received by <I>main</I> entry point
* @throws Exception if unable to start main frame and application
*/
MainFrame (final String ... args) throws Exception
{
super(args);
processMainArgs(args);
}
}
|
<filename>integer/digitsProduct_cf-45/betterSolutions.js
function digitsProduct(product) {
if (product == 0) return 10;
if (product == 1) return 1;
var divisor = 10,
power = 1,
result = 0;
while (product > 1) {
if (--divisor == 1) return -1;
while (product % divisor == 0) {
product /= divisor;
result += divisor * power;
power *= 10;
}
}
return result;
}
//////////////////////////////////
function digitsProduct(product) {
"use strict";
for (var i = 1; i < 600 * 600; i++) {
var p = String(i)
.split("")
.map(Number)
.reduce((acc, v) => acc * v, 1);
if (p == product) {
return i;
}
}
return -1;
}
//////////////////////////////////
|
package org.psem2m.isolates.loggers.impl;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Level;
import org.psem2m.isolates.loggers.ILoggingCondition;
import org.psem2m.utilities.CXStringUtils;
import org.psem2m.utilities.json.JSONException;
import org.psem2m.utilities.json.JSONObject;
/**
* CASE 0000222: Logging conditions to control the content of the trace log
*
* Creation of a logging condition
*
* <pre>
* / / declare a Logging condition identified by "ISubSystemStuff"
* pLoggingConditions.newLoggingCondition(ISubSystemStuff.class,"SubSystem Stuff logging condition");
*
* </pre>
*
* Usage of the logging condition using a method "isLoggingConditionDebugOn()"
* witch retreive the ILoggingConditions service.
*
* The method "logActivityDebug()" is also a method implemented in an Abstract
* class to simplify the usage.
*
* <pre>
* if (isLoggingConditionDebugOn(ISubSystemA.class){
*
* logActivityDebug(this, "myMethod", "My message with an arg [%s]", aArg);
* }
* </pre>
*
* @author ogattaz
*
*/
public class CLoggingCondition implements ILoggingCondition {
private static final Level DEFAULT_LEVEL = Level.ALL;
private static final String PROP_CMT = "cmt";
private static final String PROP_ID = "id";
private static final String PROP_LEVEL = "level";
private final String pComment;
private final AtomicReference<Level> pConfitionLevel = new AtomicReference<Level>(
DEFAULT_LEVEL);
private final String pId;
/**
* @param aDef
* @throws JSONException
*/
public CLoggingCondition(final JSONObject aDef) throws JSONException {
this(aDef.getString(PROP_ID), aDef.getString(PROP_CMT));
setLevel(aDef.getString(PROP_LEVEL));
}
/**
* @param aId
* @param aComments
*/
public CLoggingCondition(final String aId, final String... aComments) {
super();
pId = aId;
// convert to a sString even the aComments is null or empty
pComment = CXStringUtils.stringTableToString(aComments, "\n");
}
/*
* (non-Javadoc)
*
* @see
* org.psem2m.utilities.IXDescriber#addDescriptionInBuffer(java.lang.Appendable
* )
*/
@Override
public Appendable addDescriptionInBuffer(Appendable aBuffer) {
CXStringUtils.appendKeyValInBuff(aBuffer, PROP_ID, getId());
CXStringUtils.appendKeyValInBuff(aBuffer, PROP_LEVEL, getLevel()
.getName());
CXStringUtils.appendKeyValInBuff(aBuffer, PROP_CMT, getComment()
.replace('\n', 'ยง'));
return aBuffer;
}
/**
* @return
*/
CLoggingCondition cloneMe() {
final CLoggingCondition wNewLC = new CLoggingCondition(pId,
pComment.split("\n"));
wNewLC.setLevel(getLevel());
return wNewLC;
}
@Override
public String getComment() {
return pComment;
}
@Override
public String getId() {
return pId;
}
/*
* (non-Javadoc)
*
* @see org.psem2m.isolates.loggers.ILoggingCondition#getLevel()
*/
@Override
public Level getLevel() {
return pConfitionLevel.get();
}
/*
* (non-Javadoc)
*
* @see
* org.psem2m.isolates.loggers.ILoggingCondition#isOn(java.util.logging.
* Level)
*/
@Override
public boolean isOn(final Level aLevel) {
final Level wCurrentLevel = pConfitionLevel.get();
if (wCurrentLevel == Level.OFF) {
return false;
}
// eg FINE (500) <= INFO (800)
return wCurrentLevel.intValue() <= aLevel.intValue();
}
/*
* (non-Javadoc)
*
* @see
* org.psem2m.isolates.loggers.ILoggingCondition#setConditionLevel(java.
* util.logging.Level)
*/
@Override
public Level setLevel(final Level aLevel) {
return pConfitionLevel.getAndSet(aLevel);
}
/**
* @param aLevelName
* @return
*/
private Level setLevel(final String aLevelName) {
return setLevel(Level.parse(aLevelName));
}
/*
* (non-Javadoc)
*
* @see org.psem2m.utilities.IXDescriber#toDescription()
*/
@Override
public String toDescription() {
return addDescriptionInBuffer(new StringBuilder()).toString();
}
/*
* (non-Javadoc)
*
* @see org.psem2m.isolates.loggers.ILoggingCondition#toJson()
*/
@Override
public JSONObject toJson() throws JSONException {
final JSONObject wObj = new JSONObject();
wObj.put(PROP_ID, getId());
wObj.put(PROP_CMT, getComment());
wObj.put(PROP_LEVEL, getLevel().getName());
return wObj;
}
}
|
#!/usr/bin/env bash
set -e
PYCMD=${PYCMD:="python"}
if [[ $COVERAGE -eq 1 ]]; then
coverage erase
PYCMD="coverage run --parallel-mode --source torch "
echo "coverage flag found. Setting python command to: \"$PYCMD\""
fi
pushd "$(dirname "$0")"
echo "Running core tests"
$PYCMD test_core.py $@
echo "Running utils tests"
$PYCMD test_core.py $@
popd
|
class ValueDictionaryPersistentKey {
public:
// Constructor to initialize the persistent key
ValueDictionaryPersistentKey(int key) : key_(key) {}
// Getter method to retrieve the persistent key
int getKey() const { return key_; }
// Overriding the equality operator to compare two instances of ValueDictionaryPersistentKey
bool operator==(const ValueDictionaryPersistentKey& other) const {
return this->key_ == other.key_;
}
private:
int key_; // The persistent key
}; |
def non_negative_sum_of_squares(arr):
result = 0
for num in arr:
if num >= 0:
result += num * num
return result
arr = [1,2,3,4,5]
print(non_negative_sum_of_squares(arr)) # 55 |
<filename>ODFAEG/extlibs/headers/MySQL/conncpp/PreparedStatement.hpp
/************************************************************************************
Copyright (C) 2020 MariaDB Corporation AB
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public
License along with this library; if not see <http://www.gnu.org/licenses>
or write to the Free Software Foundation, Inc.,
51 Franklin St., Fifth Floor, Boston, MA 02110, USA
*************************************************************************************/
#ifndef _PREPARESTATEMENT_H_
#define _PREPARESTATEMENT_H_
#include "buildconf.hpp"
#include "SQLString.hpp"
#include "ParameterMetaData.hpp"
#include "Statement.hpp"
namespace sql
{
class MARIADB_EXPORTED PreparedStatement: virtual public Statement {
PreparedStatement(const PreparedStatement &);
void operator=(PreparedStatement &);
public:
PreparedStatement() {}
virtual ~PreparedStatement(){}
virtual bool execute()=0;
virtual bool execute(const SQLString& sql)=0;
virtual ParameterMetaData* getParameterMetaData()=0;
virtual int32_t executeUpdate()=0;
virtual int64_t executeLargeUpdate()=0;
virtual ResultSet* executeQuery()=0;
virtual void addBatch()=0;
virtual void clearParameters()=0;
virtual void setNull(int32_t parameterIndex,int32_t sqlType)=0;
virtual void setNull(int32_t parameterIndex,int32_t sqlType,const SQLString& typeName)=0;
virtual void setBoolean(int32_t parameterIndex, bool value)=0;
virtual void setByte(int32_t parameterIndex, int8_t bit)=0;
virtual void setShort(int32_t parameterIndex, int16_t value)=0;
virtual void setString(int32_t parameterIndex, const SQLString& str)=0;
/* We need either array length passed along with pointer, or make it a vector. Passing vector doesn't feel good */
virtual void setBytes(int32_t parameterIndex, sql::bytes* bytes)=0;
virtual void setInt(int32_t column, int32_t value)=0;
virtual void setLong(int32_t parameterIndex, int64_t value)=0;
virtual void setInt64(int32_t parameterIndex, int64_t value)=0;
virtual void setUInt64(int32_t parameterIndex, uint64_t value)=0;
virtual void setUInt(int32_t parameterIndex, uint32_t value)=0;
virtual void setFloat(int32_t parameterIndex, float value)=0;
virtual void setDouble(int32_t parameterIndex, double value)=0;
virtual void setBigInt(int32_t column, const SQLString& value)=0;
virtual void setBlob(int32_t parameterIndex, std::istream* inputStream,const int64_t length)=0;
virtual void setBlob(int32_t parameterIndex, std::istream* inputStream)=0;
virtual void setDateTime(int32_t parameterIndex, const SQLString& dt)=0;
#ifdef MAKES_SENSE_TO_ADD_TO_EASE_SETTING_NULL_AND_COPY_JDBC_BEHAVIOR
virtual void setBoolean(int32_t parameterIndex, bool *value)=0;
virtual void setByte(int32_t parameterIndex, int8_t* bit)=0;
virtual void setShort(int32_t parameterIndex, const int16_t* value)=0;
virtual void setString(int32_t parameterIndex, const SQLString* str)=0;
/* We need either array length passed along with pointer, or make it a vector. Passing vector doesn't feel good */
virtual void setInt(int32_t column, int32_t* value)=0;
virtual void setLong(int32_t parameterIndex, int64_t* value)=0;
virtual void setFloat(int32_t parameterIndex, float* value)=0;
virtual void setDouble(int32_t parameterIndex, double* value)=0;
#endif
#ifdef MAYBE_IN_NEXT_VERSION
virtual void setRowId(int32_t parameterIndex, const RowId* rowid)=0;
/* Pass refs or pointers? */
virtual void setCharacterStream(int32_t parameterIndex, const std::istringstream& reader, int32_t length)=0;
virtual void setCharacterStream(int32_t parameterIndex, const std::istringstream& reader, int64_t length)=0;
virtual void setCharacterStream(int32_t parameterIndex, const std::istringstream& reader)=0;
virtual void setBinaryStream(int32_t parameterIndex, std::istream& stream, int64_t length)=0;
virtual void setBinaryStream(int32_t parameterIndex, std::istream& stream)=0;
virtual void setBinaryStream(int32_t parameterIndex, std::istream& stream, int32_t length)=0;
virtual void setBlob(int32_t parameterIndex,const Blob& blob)=0;
virtual void setClob(int32_t parameterIndex,const Clob& clob)=0;
virtual void setClob(int32_t parameterIndex,const std::istringstream& reader,const int64_t length)=0;
virtual void setClob(int32_t parameterIndex,const std::istringstream& reader)=0;
virtual void setNString(int32_t parameterIndex,const SQLString& value)=0;
virtual void setAsciiStream(int32_t parameterIndex, std::istream& stream,const int64_t length)=0;
virtual void setAsciiStream(int32_t parameterIndex, std::istream& stream)=0;
virtual void setAsciiStream(int32_t parameterIndex, std::istream& stream, int32_t length)=0;
virtual void setDate(int32_t parameterIndex,const Date& date)=0;
virtual void setTime(int32_t parameterIndex,const Time time)=0;
virtual void setTimestamp(int32_t parameterIndex,const Timestamp timestamp)=0;
virtual void setNCharacterStream(int32_t parameterIndex,const std::istringstream& value,const int64_t length)=0;
virtual void setNCharacterStream(int32_t parameterIndex,const std::istringstream& value)=0;#endif
virtual void setNClob(int32_t parameterIndex, const NClob& value)=0;
virtual void setNClob(int32_t parameterIndex, const std::istringstream& reader, const int64_t length)=0;
virtual void setNClob(int32_t parameterIndex, const std::istringstream& reader)=0;
#endif
#ifdef JDBC_SPECIFIC_TYPES_IMPLEMENTED
virtual void setBigDecimal(int32_t parameterIndex, const BigDecimal& bigDecimal)=0;
virtual void setRef(int32_t parameterIndex, const Ref& ref)=0;
virtual void setArray(int32_t parameterIndex,const sql::Array& array)=0;
virtual void setDate(int32_t parameterIndex,const Date date,const Calendar& cal)=0;
virtual void setTime(int32_t parameterIndex,const Time time,const Calendar& cal)=0;
virtual void setTimestamp(int32_t parameterIndex, const Timestamp timestamp, const Calendar& cal)=0;
virtual void setURL(int32_t parameterIndex, const URL& url)=0;
virtual void setSQLXML(int32_t parameterIndex, const SQLXML& xmlObject)=0;
virtual void setObject(int32_t parameterIndex, const sql::Object* obj, int32_t targetSqlType, int32_t scaleOrLength)=0;
virtual void setObject(int32_t parameterIndex, const sql::Object* obj, int32_t targetSqlType)=0;
virtual void setObject(int32_t parameterIndex, const sql::Object* obj)=0;
virtual void setObject(int32_t parameterIndex, sql::Object* obj, SQLType* targetSqlType, int32_t scaleOrLength)=0;
virtual void setObject(int32_t parameterIndex, sql::Object* obj, SQLType* targetSqlType)=0;
#endif
};
}
#endif
|
/**
* @fileoverview This file is generated by the Angular 2 template compiler.
* Do not edit.
* @suppress {suspiciousCode,uselessCode,missingProperties}
*/
/* tslint:disable */
import * as import0 from '@angular/core/src/linker/ng_module_factory';
import * as import1 from '../../../app/home/home.module';
import * as import2 from '@angular/router/src/router_module';
import * as import3 from '../../../app/home/home-routing.module';
import * as import4 from '@angular/core/src/di/injector';
import * as import5 from './home.component.ngfactory';
import * as import6 from '../../../app/home/home.component';
import * as import7 from '@angular/router/src/router_config_loader';
class HomeModuleInjector extends import0.NgModuleInjector<import1.HomeModule> {
_RouterModule_0:import2.RouterModule;
_HomeRoutingModule_1:import3.HomeRoutingModule;
_HomeModule_2:import1.HomeModule;
__ROUTES_3:any[];
constructor(parent:import4.Injector) {
super(parent,[import5.HomeComponentNgFactory],([] as any[]));
}
get _ROUTES_3():any[] {
if ((this.__ROUTES_3 == null)) { (this.__ROUTES_3 = [[{
path: 'home',
component: import6.HomeComponent
}
]]); }
return this.__ROUTES_3;
}
createInternal():import1.HomeModule {
this._RouterModule_0 = new import2.RouterModule(this.parent.get(import2.ROUTER_FORROOT_GUARD,(null as any)));
this._HomeRoutingModule_1 = new import3.HomeRoutingModule();
this._HomeModule_2 = new import1.HomeModule();
return this._HomeModule_2;
}
getInternal(token:any,notFoundResult:any):any {
if ((token === import2.RouterModule)) { return this._RouterModule_0; }
if ((token === import3.HomeRoutingModule)) { return this._HomeRoutingModule_1; }
if ((token === import1.HomeModule)) { return this._HomeModule_2; }
if ((token === import7.ROUTES)) { return this._ROUTES_3; }
return notFoundResult;
}
destroyInternal():void {
}
}
export const HomeModuleNgFactory:import0.NgModuleFactory<import1.HomeModule> = new import0.NgModuleFactory(HomeModuleInjector,import1.HomeModule); |
#!/bin/bash
#set /p ccomment="Enter Comment: "
#echo "git"
#echo %ccomment%
git add .
#git commit -m %ccomment%
git commit -m "Kommentar"
git push
|
from typing import List
def generate_for_loop_command(items: List[str]) -> str:
dsl_commands = []
dsl_commands.append("!fn --shellpen-private writeDSL writeln 'for $*'")
dsl_commands.append("!fn --shellpen-private writeDSL writeln 'do'")
dsl_commands.append("!fn --shellpen-private writeDSL --push 'done'")
return "\n".join(dsl_commands) |
import { defaultReactVersion } from './shared/constants.ts'
/** `VERSION` managed by https://deno.land/x/publish */
export const VERSION = '0.3.0-alpha.32'
/** `prepublish` will be invoked before publish */
export async function prepublish(version: string) {
const p = Deno.run({
cmd: ['deno', 'run', '-A', 'build.ts'],
cwd: './compiler',
stdout: 'inherit',
stderr: 'inherit',
})
const { success } = await p.status()
p.close()
if (success) {
const data = await Deno.readTextFile('./import_map.json')
const importMap = JSON.parse(data)
Object.assign(importMap.imports, {
'aleph/': `https://deno.land/x/aleph@v${version}/`,
'framework': `https://deno.land/x/aleph@v${version}/framework/core/mod.ts`,
'framework/react': `https://deno.land/x/aleph@v${version}/framework/react/mod.ts`,
'react': `https://esm.sh/react@${defaultReactVersion}`,
'react-dom': `https://esm.sh/react-dom@${defaultReactVersion}`,
})
await Deno.writeTextFile(
'./import_map.json',
JSON.stringify(importMap, undefined, 2)
)
}
return success
}
|
/**
* https://github.com/larryli/u8g2_wqy
*/
#ifndef _U8G2_WQY_H
#define _U8G2_WQY_H
#include <stdint.h>
#include <stddef.h>
#ifdef __cplusplus
extern "C" {
#endif
#ifndef U8G2_USE_LARGE_FONTS
#define U8G2_USE_LARGE_FONTS
#endif
#ifndef U8X8_FONT_SECTION
#ifdef __GNUC__
# define U8X8_SECTION(name) __attribute__ ((section (name)))
#else
# define U8X8_SECTION(name)
#endif
#if defined(__GNUC__) && defined(__AVR__)
# define U8X8_FONT_SECTION(name) U8X8_SECTION(".progmem." name)
#endif
#if defined(ESP8266)
# define U8X8_FONT_SECTION(name) __attribute__((section(".text." name)))
#endif
#ifndef U8X8_FONT_SECTION
# define U8X8_FONT_SECTION(name)
#endif
#endif
#ifndef U8G2_FONT_SECTION
#define U8G2_FONT_SECTION(name) U8X8_FONT_SECTION(name)
#endif
extern const uint8_t u8g2_font_wqy12_t_chinese1[] U8G2_FONT_SECTION("u8g2_font_wqy12_t_chinese1");
extern const uint8_t u8g2_font_wqy12_t_chinese2[] U8G2_FONT_SECTION("u8g2_font_wqy12_t_chinese2");
extern const uint8_t u8g2_font_wqy12_t_chinese3[] U8G2_FONT_SECTION("u8g2_font_wqy12_t_chinese3");
extern const uint8_t u8g2_font_wqy12_t_gb2312[] U8G2_FONT_SECTION("u8g2_font_wqy12_t_gb2312");
extern const uint8_t u8g2_font_wqy12_t_gb2312a[] U8G2_FONT_SECTION("u8g2_font_wqy12_t_gb2312a");
extern const uint8_t u8g2_font_wqy12_t_gb2312b[] U8G2_FONT_SECTION("u8g2_font_wqy12_t_gb2312b");
extern const uint8_t u8g2_font_wqy13_t_chinese1[] U8G2_FONT_SECTION("u8g2_font_wqy13_t_chinese1");
extern const uint8_t u8g2_font_wqy13_t_chinese2[] U8G2_FONT_SECTION("u8g2_font_wqy13_t_chinese2");
extern const uint8_t u8g2_font_wqy13_t_chinese3[] U8G2_FONT_SECTION("u8g2_font_wqy13_t_chinese3");
extern const uint8_t u8g2_font_wqy13_t_gb2312[] U8G2_FONT_SECTION("u8g2_font_wqy13_t_gb2312");
extern const uint8_t u8g2_font_wqy13_t_gb2312a[] U8G2_FONT_SECTION("u8g2_font_wqy13_t_gb2312a");
extern const uint8_t u8g2_font_wqy13_t_gb2312b[] U8G2_FONT_SECTION("u8g2_font_wqy13_t_gb2312b");
extern const uint8_t u8g2_font_wqy14_t_chinese1[] U8G2_FONT_SECTION("u8g2_font_wqy14_t_chinese1");
extern const uint8_t u8g2_font_wqy14_t_chinese2[] U8G2_FONT_SECTION("u8g2_font_wqy14_t_chinese2");
extern const uint8_t u8g2_font_wqy14_t_chinese3[] U8G2_FONT_SECTION("u8g2_font_wqy14_t_chinese3");
extern const uint8_t u8g2_font_wqy14_t_gb2312[] U8G2_FONT_SECTION("u8g2_font_wqy14_t_gb2312");
extern const uint8_t u8g2_font_wqy14_t_gb2312a[] U8G2_FONT_SECTION("u8g2_font_wqy14_t_gb2312a");
extern const uint8_t u8g2_font_wqy14_t_gb2312b[] U8G2_FONT_SECTION("u8g2_font_wqy14_t_gb2312b");
extern const uint8_t u8g2_font_wqy15_t_chinese1[] U8G2_FONT_SECTION("u8g2_font_wqy15_t_chinese1");
extern const uint8_t u8g2_font_wqy15_t_chinese2[] U8G2_FONT_SECTION("u8g2_font_wqy15_t_chinese2");
extern const uint8_t u8g2_font_wqy15_t_chinese3[] U8G2_FONT_SECTION("u8g2_font_wqy15_t_chinese3");
extern const uint8_t u8g2_font_wqy15_t_gb2312[] U8G2_FONT_SECTION("u8g2_font_wqy15_t_gb2312");
extern const uint8_t u8g2_font_wqy15_t_gb2312a[] U8G2_FONT_SECTION("u8g2_font_wqy15_t_gb2312a");
extern const uint8_t u8g2_font_wqy15_t_gb2312b[] U8G2_FONT_SECTION("u8g2_font_wqy15_t_gb2312b");
extern const uint8_t u8g2_font_wqy16_t_chinese1[] U8G2_FONT_SECTION("u8g2_font_wqy16_t_chinese1");
extern const uint8_t u8g2_font_wqy16_t_chinese2[] U8G2_FONT_SECTION("u8g2_font_wqy16_t_chinese2");
extern const uint8_t u8g2_font_wqy16_t_chinese3[] U8G2_FONT_SECTION("u8g2_font_wqy16_t_chinese3");
extern const uint8_t u8g2_font_wqy16_t_gb2312[] U8G2_FONT_SECTION("u8g2_font_wqy16_t_gb2312");
extern const uint8_t u8g2_font_wqy16_t_gb2312a[] U8G2_FONT_SECTION("u8g2_font_wqy16_t_gb2312a");
extern const uint8_t u8g2_font_wqy16_t_gb2312b[] U8G2_FONT_SECTION("u8g2_font_wqy16_t_gb2312b");
#ifdef __cplusplus
}
#endif
#endif
|
<?php
$sample_array = [1, 2, 3, 4, 5];
$first_element = array_shift($sample_array);
echo $first_element;
?> |
cleaned_data = ["John 340 12th Street", "James 900 5th Avenue"] |
"""Plugins for starting Vumi workers from twistd."""
from vumi.servicemaker import (VumiWorkerServiceMaker,
DeprecatedStartWorkerServiceMaker)
# Having instances of IServiceMaker present magically announces the
# service makers to twistd.
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
vumi_worker = VumiWorkerServiceMaker()
start_worker = DeprecatedStartWorkerServiceMaker()
|
#!/bin/bash
# Copyright (C) Microsoft Corporation. All rights reserved.โ
# โ
# Microsoft Corporation (โMicrosoftโ) grants you a nonexclusive, perpetual,
# royalty-free right to use, copy, and modify the software code provided by us
# ('Software Code'). You may not sublicense the Software Code or any use of it
# (except to your affiliates and to vendors to perform work on your behalf)
# through distribution, network access, service agreement, lease, rental, or
# otherwise. This license does not purport to express any claim of ownership over
# data you may have shared with Microsoft in the creation of the Software Code.
# Unless applicable law gives you more rights, Microsoft reserves all other
# rights not expressly granted herein, whether by implication, estoppel or
# otherwise. โ
# โ
# THE SOFTWARE CODE IS PROVIDED โAS ISโ, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# MICROSOFT OR ITS LICENSORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THE SOFTWARE CODE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
set -eux
conda env create -f lmtmlops/ci_dependencies.yml
conda activate mlopspython_ci
|
import ast
def extract_dependencies(setup_content):
dependencies = {
'regular': [],
'testing': [],
'dev': []
}
# Parse the setup.py content as Python code
setup_ast = ast.parse(setup_content)
# Extract install_requires
for node in ast.walk(setup_ast):
if isinstance(node, ast.Assign) and len(node.targets) == 1 and isinstance(node.targets[0], ast.Name) and node.targets[0].id == 'install_requires':
if isinstance(node.value, ast.List):
for item in node.value.elts:
if isinstance(item, ast.Str):
dependencies['regular'].append(item.s)
# Extract extras_require
for node in ast.walk(setup_ast):
if isinstance(node, ast.Assign) and len(node.targets) == 1 and isinstance(node.targets[0], ast.Name) and node.targets[0].id == 'extras_require':
if isinstance(node.value, ast.Dict):
for key, value in zip(node.value.keys, node.value.values):
if isinstance(key, ast.Str) and isinstance(value, ast.List):
category = key.s
for item in value.elts:
if isinstance(item, ast.Str):
dependencies[category].append(item.s)
return dependencies |
from django.conf import settings # import the settings file
def meta(request):
return {'COMPANY_NAME': settings.COMPANY_NAME,
'PROJECT_NAME': settings.PROJECT_NAME,
'MALICIOUS': settings.MALICIOUS,
'SUSPICIOUS': settings.SUSPICIOUS,
}
|
clj-kondo --lint "$(lein classpath)" | grep "dev.russell.batboy"
|
<reponame>thomastay/collectable
import { SortedMapStructure } from '../internals';
import { has as _has } from '@collectable/map';
export function has<K, V, U = any> (key: K, map: SortedMapStructure<K, V, U>): boolean {
return _has(key, map._indexed);
} |
const test = require('ava');
const cloudinaryResizeImage = require('./cloudinary-resize-image');
const redPng = '<KEY>';
test('no Cloudinary API key ', t=> {
t.plan(1)
return cloudinaryResizeImage('png',redPng,500)
.then(results => {
t.fail('should not succeed without API key')
})
.catch(error => {
t.truthy(error.message.match(/Must supply api_key/))
});
}); |
package com.roadrover.sdk.utils;
import android.content.Context;
import android.text.TextUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* ๆไปถ็ๅทฅๅ
ท็ฑป
* ๅฎ็ฐๅๆไปถ็ธๅ
ณ็ๆนๆณ๏ผ็ฎๅไธป่ฆๅชๅฎ็ฐไบๆไปถๆท่ด็ๆนๆณ
* @author bin.xie
* @date 2016/4/25
*/
public class FileUtils {
public FileUtils() {
// TODO Auto-generated constructor stub
}
public interface FileOptionProgressCallback {
void onOptionProgress(long totalSize, long proSizes);
}
/**
* ๆท่ด assets ไธ้ข็ๆไปถๅฐๆฌๆบ๏ผๆ็ป็ฎๅฝไธบ /data/data/ๅ
ๅ/files/assets/
* @param context
* @param assetsFileName assetsไธ้ข้่ฆๆท่ด็ๆไปถๅ
* @param destFileName ็ฎๆ ๆไปถๅ
* @return ๆท่ดๅฎไนๅ๏ผ่ฟๅๆท่ด็ปๆ็ฎๅฝ
*/
public static String copyAssetsFileToLocal(Context context, String assetsFileName, String destFileName) {
if (context == null) {
return "";
}
String dir = context.getFilesDir().toString() + "/assets/";
File dirFile = new File(dir);
if (!dirFile.exists()) {
dirFile.mkdirs();
}
String destFilePath = dir + destFileName;
if (!(new File(destFilePath)).exists()) { // ๆไปถไธๅญๅจ๏ผไปassetsไธ้ขๆท่ด่ฟๆฅ
try {
if (fileChannelCopy(context.getAssets().open(assetsFileName), destFilePath)) {
return destFilePath;
} else {
return "";
}
} catch (IOException e) {
e.printStackTrace();
return "";
}
}
return destFilePath;
}
/**
* ไฝฟ็จๆไปถ้้็ๆนๅผๅคๅถๆไปถ
* @param in
* @param destPath
* @return
*/
public static boolean fileChannelCopy(InputStream in, String destPath) {
if (in == null || TextUtils.isEmpty(destPath)) {
return false;
}
OutputStream os = null;
try {
os = new FileOutputStream(destPath);
byte[] buffer = new byte[4096];
int len;
while((len = in.read(buffer)) != -1) {
os.write(buffer, 0, len);
}
return true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (os != null) {
os.flush();
os.close();
}
} catch (IOException e) {
e.printStackTrace();
}
try {
if (in != null) {
in.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return false;
}
/**
* ๆท่ดๆไปถ
* @param srcPath
* @param destPath
* @return
*/
public static boolean copyFile(String srcPath, String destPath) {
return copyFile(srcPath, destPath, true);
}
public static boolean copyFile(String srcPath, String destPath, boolean overlay) {
return copyFile(srcPath, destPath, overlay, null);
}
/**
* ๆท่ดๆไปถ
* @param srcPath ๅ็ฎๅฝ
* @param destPath ็ฎๆ ็ฎๅฝ
* @param overlay ่ฆ็ๆไปถ
* @throws Exception
*/
public static boolean copyFile(String srcPath, String destPath, boolean overlay, FileOptionProgressCallback callback) {
if (TextUtils.isEmpty(srcPath) || TextUtils.isEmpty(destPath)) {
return false;
}
File srcFile = new File(srcPath);
// ๅคๆญๆบๆไปถๆฏๅฆๅญๅจ
if (!srcFile.exists()) {
Logcat.d("srcFile:" + srcPath + " not exists!");
return false;
} else if (!srcFile.isFile()) {
Logcat.d("srcFile:" + srcPath + " not file!");
return false;
}
// ๅคๆญ็ฎๆ ๆไปถๆฏๅฆๅญๅจ
File destFile = new File(destPath);
if (destFile.exists()) {
// ๅฆๆ็ฎๆ ๆไปถๅญๅจๅนถๅ
่ฎธ่ฆ็
if (overlay) {
// ๅ ้คๅทฒ็ปๅญๅจ็็ฎๆ ๆไปถ๏ผๆ ่ฎบ็ฎๆ ๆไปถๆฏ็ฎๅฝ่ฟๆฏๅไธชๆไปถ
if (TextUtils.equals(srcPath, destPath)) { // ๆ้คๅ็ฎๅฝไธๅคๅถ
return true;
}
new File(destPath).delete();
}
} else {
// ๅฆๆ็ฎๆ ๆไปถๆๅจ็ฎๅฝไธๅญๅจ๏ผๅๅๅปบ็ฎๅฝ
if (!destFile.getParentFile().exists()) {
// ็ฎๆ ๆไปถๆๅจ็ฎๅฝไธๅญๅจ
if (!destFile.getParentFile().mkdirs()) {
// ๅคๅถๆไปถๅคฑ่ดฅ๏ผๅๅปบ็ฎๆ ๆไปถๆๅจ็ฎๅฝๅคฑ่ดฅ
return false;
}
}
}
long total = srcFile.length();
long pro = 0;
FileInputStream in = null;
FileOutputStream out = null;
int byteread = 0; // ่ฏปๅ็ๅญ่ๆฐ
try {
in = new FileInputStream(srcPath);
out = new FileOutputStream(destPath);
byte[] buffer = new byte[8192];
while ((byteread = in.read(buffer)) != -1) {
out.write(buffer, 0, byteread);
if (callback != null) {
pro += byteread;
callback.onOptionProgress(total, pro);
}
}
return true;
} catch (FileNotFoundException e) {
return false;
} catch (IOException e) {
return false;
} finally {
try {
if (out != null)
out.close();
} catch (IOException e) {
e.printStackTrace();
}
try {
if (in != null)
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* ๅ ้คๆไปถ
* @param file
* @return
*/
public static boolean deleteFile(File file) {
if (file != null) {
if (file.isDirectory()) {
File[] childs = file.listFiles();
for (File child : childs) {
deleteFile(child);
}
}
return file.delete();
}
return false;
}
/**
* ๅคๆญๆไปถๆฏๅฆๅญๅจ
* @param strFile
* @return
*/
public static boolean fileIsExists(String strFile) {
try {
File f = new File(strFile);
if(!f.exists()){
return false;
}
} catch (Exception e) {
e.printStackTrace();
return false;
}
return true;
}
/**
* ไปๆไปถ้้ข่ฏปๅ readSize ๅคงๅฐ็ๅญ็ฌฆ
* @param fileName ๆไปถๅ
* @param readSize ่ฏปๅๅคงๅฐ
* @return ่ฟๅ่ฏปๅๅ
ๅฎน
*/
public static String readFile(String fileName, int readSize) {
if (TextUtils.isEmpty(fileName) || readSize < 0) {
Logcat.w("fileName:" + fileName + " readSize:" + readSize);
return "";
}
File file = new File(fileName);
if (!file.exists() || !file.canRead()) {
// Logcat.w("file:" + fileName + " not exists or not canRead!");
return "";
}
InputStream in = null;
try {
in = new FileInputStream(file);
byte[] buffer = new byte[readSize];
in.read(buffer);
String str = new String(buffer);
// Logcat.d(fileName + ": " + str);
return str;
} catch (Exception e){
Logcat.e("exception at read file " + fileName);
e.printStackTrace();
} finally {
try {
if (in != null) {
in.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return "";
}
/**
* ๅพๆไปถ้้ขๅไธไธชๅญ็ฌฆ
* @param filePath ๆๅฎ่ทฏๅพ
* @param value ๅ็ๅญ็ฌฆ
*/
public static void writeFile(String filePath, String value) {
if (TextUtils.isEmpty(filePath) || value == null) {
Logcat.w("filePath:" + filePath + " value:" + value);
return ;
}
OutputStream out = null;
try {
out = new FileOutputStream(filePath);
byte[] bytes = value.getBytes();
out.write(bytes);
} catch (IOException e) {
Logcat.e("exception at write register file");
e.printStackTrace();
} finally {
try {
if (out != null) {
out.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
|
<filename>eventuate-tram-messaging-proxy-service/src/main/java/io/eventuate/tram/messaging/proxy/service/SubscriptionRequestManager.java
package io.eventuate.tram.messaging.proxy.service;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.recipes.cache.TreeCache;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.function.Consumer;
public class SubscriptionRequestManager {
private String path;
private Logger logger = LoggerFactory.getLogger(getClass());
private CuratorFramework curatorFramework;
private TreeCache treeCache;
private int ttl;
public SubscriptionRequestManager(CuratorFramework curatorFramework, String path, int ttl) {
this.path = path;
this.curatorFramework = curatorFramework;
this.ttl = ttl;
}
public void subscribe(Consumer<SubscriptionInfo> nodeAddedCallback, Consumer<SubscriptionInfo> nodeRemovedCallback) {
if (treeCache != null) {
treeCache.close();
}
treeCache = new TreeCache(curatorFramework, path);
treeCache.getListenable().addListener((client, event) -> {
Consumer<SubscriptionInfo> callback;
switch (event.getType()) {
case NODE_ADDED: {
callback = nodeAddedCallback;
break;
}
case NODE_REMOVED: {
callback = nodeRemovedCallback;
break;
}
default: return;
}
if (event.getData().getData() != null && event.getData().getData().length != 0) {
callback.accept(SubscriptionUtils.deserializeSubscriptionInfo(event.getData().getData()));
}
});
try {
treeCache.start();
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
public void touch(String subscriptionInstanceId) {
String path = pathForSubscriptionRequest(subscriptionInstanceId);
try {
curatorFramework
.create()
.orSetData()
.withTtl(ttl)
.creatingParentContainersIfNeeded()
.withMode(CreateMode.PERSISTENT_WITH_TTL)
.forPath(path, curatorFramework.getData().forPath(path));
}
catch (KeeperException.NoNodeException e) {
//ignore
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
public void createSubscriptionRequest(SubscriptionInfo subscriptionInfo) {
try {
curatorFramework
.create()
.orSetData()
.withTtl(ttl)
.creatingParentContainersIfNeeded()
.withMode(CreateMode.PERSISTENT_WITH_TTL)
.forPath(pathForSubscriptionRequest(subscriptionInfo.getSubscriptionInstanceId()), SubscriptionUtils.serializeSubscriptionInfo(subscriptionInfo));
}
catch (KeeperException.NodeExistsException e) {
//ignore
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
public void removeSubscriptionRequest(String subscriptionInstanceId) {
try {
curatorFramework
.delete()
.forPath(pathForSubscriptionRequest(subscriptionInstanceId));
}
catch (KeeperException.NoNodeException e) {
//ignore
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
public void stop() {
treeCache.close();
}
public String pathForSubscriptionRequest(String subscriptionInstanceId) {
return String.format("%s/%s", path, subscriptionInstanceId);
}
}
|
apiVersion: apps/v1
kind: Deployment
metadata:
name: web-app-deployment
spec:
replicas: 5
selector:
matchLabels:
app: web-app
template:
metadata:
labels:
app: web-app
spec:
containers:
- name: web-app
image: nginx
---
apiVersion: v1
kind: Service
metadata:
name: web-app-service
spec:
selector:
app: web-app
ports:
- protocol: TCP
port: 80
targetPort: 80
type: ClusterIP |
def stringToUpper(str):
return str.upper()
inputStr = "hello world"
outputStr = stringToUpper(inputStr)
print(outputStr) |
#!/bin/bash
# Script to configuring an ispconfig3 server in a Debian VPS
# by calocen [at] gmail [dot] com
# getting some enviromment values
myhostname=`hostname -f`
mydomain=`hostname -d`
myip=`hostname -i`
[ ! -x /usr/bin/geoiplookup ] && apt-get --assume-yes install geoip-bin
mycountry=`geoiplookup $myip | cut -f4 -d" " | cut -f1 -d","`
myprovince=`geoiplookup $myip | cut -f5 -d" "`
# reconfiguring webalizer, postfix
# could be cool to modify here webalizer values
dpkg-reconfigure -u webalizer
postconf -e "myhostname = $myhostname"
postconf -e "mydestination = $myhostname, localhost"
echo $myhostname > /etc/mailname
dpkg-reconfigure -u postfix
# request new password
oldpwd=`grep password /root/.my.cnf | tr "\t" " " | tr -s " " | cut -f3 -d" "`
read -p "mysql password: [$oldpwd] " mysqlpwd
[ -z $mysqlpwd ] && mysqlpwd=$oldpwd
echo $mysqlpwd
#read -p "Are you sure? (y/n) " sure
## who said fear ##
set -x
mysqladmin -u root -p$oldpwd password $mysqlpwd
mysqladmin -u root -p$mysqlpwd -h localhost password $mysqlpwd
cat << EOF > /root/.my.cnf
[client]
password = $mysqlpwd
EOF
chmod 600 /root/.my.cnf
# changing mydns password
mysql -e "SET PASSWORD FOR 'mydns'@'%' = PASSWORD( '$mysqlpwd' )"
mysql -e "SET PASSWORD FOR 'mydns'@'localhost' = PASSWORD( '$mysqlpwd' )"
cp -ax /etc/mydns.conf /etc/mydns.conf~
sed s/$oldpwd/$mysqlpwd/g < /etc/mydns.conf~ > /etc/mydns.conf
# enabling mydns
mydns --create-tables > /tmp/mydns.sql
mysql -e "CREATE DATABASE IF NOT EXISTS mydns ; USE mydns ; SOURCE /tmp/mydns.sql;"
rm /tmp/mydns.*
invoke-rc.d mydns restart
# preparing server installation
mv /etc/ssl/openssl.cnf /etc/ssl/openssl.cnf~
sed s/"YOURHOSTNAME"/"$myhostname"/g < /usr/local/bin/openssl.cnf |
sed s/"YOURDOMAIN"/"$mydomain"/g | \
sed s/"YOURCOUNTRY"/"$mycountry"/g | \
sed s/"YOURPROVINCE"/"$myprovince"/g > /etc/ssl/openssl.cnf
tar xfz /root/downloads/ISPConfig-3.0.0.7-beta.tar.gz -C /usr/local/src
# here would be some stuff to update from SVN
cd /usr/local/src/ispconfig3_install/install/
php -q install.php
|
#! @shell@
set -eu -o pipefail +o posix
shopt -s nullglob
if (( "${NIX_DEBUG:-0}" >= 7 )); then
set -x
fi
source @out@/nix-support/utils.bash
if [ -z "${NIX_PKG_CONFIG_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then
source @out@/nix-support/add-flags.sh
fi
if (( ${#role_suffixes[@]} > 0 )); then
# replace env var with nix-modified one
PKG_CONFIG_PATH=$PKG_CONFIG_PATH_@suffixSalt@ exec @prog@ "$@"
else
# pkg-config isn't a bonafied dependency so ignore setup hook entirely
exec @prog@ "$@"
fi
|
<filename>src/network_flow/Boj11375.java
package network_flow;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* ๋ฐฑ์ค 11375๋ฒ: <NAME>
*
* @see https://www.acmicpc.net/problem/11375/
*
*/
public class Boj11375 {
private static boolean[][] connected;
private static int[] emp, work;
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int N = Integer.parseInt(st.nextToken());
int M = Integer.parseInt(st.nextToken());
connected = new boolean[N][M];
for(int emp = 0; emp < N; emp++) {
st = new StringTokenizer(br.readLine());
int count = Integer.parseInt(st.nextToken());
while(count-- > 0) {
int work = Integer.parseInt(st.nextToken()) - 1;
connected[emp][work] = true; // ์์
๊ณผ ์ง์๊ฐ ์ฐ๊ฒฐ ์ํ
}
}
System.out.println(bipartiteMatch(N, M));
}
private static int bipartiteMatch(int n, int m) {
int count = 0;
emp = new int[n]; // ์ง์
Arrays.fill(emp, -1);
work = new int[m]; // ์์
Arrays.fill(work, -1);
int[] visit = new int[n];
int visitCnt = 0;
for(int start = 0; start < n; start++) {
visitCnt++;
if(dfs(n, m, visit, start, visitCnt)) count++;
}
return count;
}
private static boolean dfs(int n, int m, int[] visit, int current, int count) {
if(visit[current] == count) return false; // ์ด๋ฏธ ํด๋น ์ง์์ด ์์
์ ์ก๊ณ ์๋ ๊ฒฝ์ฐ
visit[current] = count;
for(int next = 0; next < m; next++) {
if(!connected[current][next]) continue;
if(work[next] == -1 || dfs(n, m, visit, work[next], count)) { // ๋ชฉํ ์์
์ด ์์ง ํ ๋น๋์ง ์์๊ฑฐ๋, ๋ค๋ฅธ ์ฌ๋์๊ฒ ํ ๋น ๊ฐ๋ฅํ ๊ฒฝ์ฐ
emp[current] = next; // ํ์ฌ ์ง์๊ณผ ๋ชฉํ ์์
์ฐ๊ฒฐ
work[next] = current;
return true;
}
}
return false;
}
}
|
<reponame>faizanu94/repeat-element
'use strict';
module.exports = function repeat(val, num) {
var arr = [];
while (num--) {
arr[num] = val;
}
return arr;
};
|
#!/bin/bash
DEBTEST=`lsb_release -a 2> /dev/null | grep Distributor | awk '{print $3}'`
if [[ "$DEBTEST" == "Ubuntu" ]]; then
TYPE="debs"
PYTHONPACK="/usr/lib/python2.7/dist-packages"
elif [[ -f "/etc/redhat-release" ]]; then
TYPE="rpms"
PYTHONPACK="/usr/lib/python2.7/site-packages"
else
echo "Unknown Operating System"
exit 2
fi
INSTALL=`${PYTHONPACK}/st2common/bin/st2-setup-examples`
EXITCODE=$?
echo $INSTALL
exit $EXITCODE |
import { IAction, IPlainAction, IPlainFailAction } from 'shared/types/redux';
import { IAuthData, IChatMessage, IRoom } from './chatApi/namespace';
export type MessageType = 'message' | 'user_joined' | 'user_left' | 'unknown';
export interface IMessagesState {
[roomId: string]: IChatMessage[];
}
export interface IReduxState {
data: {
status: ChatStatus;
userId: string | null;
error: string | null;
rooms: IRoom[];
roomId: string | null;
firstIndexOfToday: number | null;
messages: IMessagesState;
currentMessageId: string | null;
isCacheValid: boolean;
};
edit: {
messageFilter: string;
};
}
export type DateFormating = 'full' | 'short';
export type ChatStatus = 'offline' | 'connecting' | 'online';
export interface IMessageForm {
text: string;
}
export interface ISearchForm {
text: string;
}
export interface IHistory {
roomId: string;
messages: IChatMessage[];
}
export type IInitChat = IPlainAction<'CHAT:INIT'>;
export type IInitChatSuccess = IPlainAction<'CHAT:INIT_SUCCESS'>;
export type IInitChatFail = IPlainFailAction<'CHAT:INIT_FAIL'>;
export type ILogin = IPlainAction<'CHAT:LOGIN'>;
export type ILoginSuccess = IAction<'CHAT:LOGIN_SUCCESS', IAuthData>;
export type ILoginFail = IPlainFailAction<'CHAT:LOGIN_FAIL'>;
export type IChatConnected = IPlainAction<'CHAT:CONNECTED'>;
export type IChatDisconnected = IPlainAction<'CHAT:DISCONNECTED'>;
export type IMessageReceived = IAction<'CHAT:MESSAGE_RECEIVED', IChatMessage>;
export type IHistoryReceived = IAction<'CHAT:HISTORY_RECEIVED', IHistory>;
export type IMessageDeleted = IAction<'CHAT:MESSAGE_DELETED', {messageId: string, roomId: string}>;
export type ISendMessage = IPlainAction<'CHAT:SEND_MESSAGE'>;
export type ISendMessageSuccess = IPlainAction<'CHAT:SEND_MESSAGE_SUCCESS'>;
export type ISendMessageFail = IPlainFailAction<'CHAT:SEND_MESSAGE_FAIL'>;
export type IChatError = IAction<'CHAT:ERROR', string>;
export type IJoinRoom = IAction<'CHAT:JOIN_ROOM', string>;
export type IFetchRooms = IPlainAction<'CHAT:FETCH_ROOMS'>;
export type IFetchRoomsSuccess = IAction<'CHAT:FETCH_ROOMS_SUCCESS', IRoom[]>;
export type IFetchRoomsFail = IPlainFailAction<'CHAT:FETCH_ROOMS_FAIL'>;
export type ISubmitSearchForm = IAction<'CHAT:SUBMIT_SEARCH_FORM', ISearchForm>;
export type ISwitchRoom = IAction<'CHAT:SWITCH_ROOM', string>;
export type ISwitchRoomSuccess = IAction<'CHAT:SET_CURRENT_ROOM_ID', string>;
export type IEditMessage = IAction<'CHAT:EDIT_MESSAGE', IChatMessage>;
export type ICopyToMessage = IAction<'CHAT:COPY_TO_MESSAGE', string>;
export type ISetCacheValidity = IAction<'CHAT:SET_CACHE_VALIDITY', boolean>;
export type Action =
| IChatError
| IInitChat
| IInitChatSuccess
| IInitChatFail
| IChatConnected
| IChatDisconnected
| ILogin
| ILoginSuccess
| ILoginFail
| IFetchRooms
| IFetchRoomsSuccess
| IFetchRoomsFail
| IJoinRoom
| IEditMessage
| ICopyToMessage
| ISetCacheValidity
| ISendMessage
| ISendMessageSuccess
| ISendMessageFail
| ISubmitSearchForm
| ISwitchRoom
| ISwitchRoomSuccess
| IMessageReceived
| IHistoryReceived
| IMessageDeleted;
|
def calc_perimeter(width, height):
return 2 * (width + height)
perimeter = calc_perimeter(width, height)
print("Perimeter of rectangle:", perimeter) |
#include <vector>
#include <queue>
#include <algorithm>
struct Edge {
int src, dest, weight;
};
struct Graph {
int V, E;
std::vector<Edge> edges;
};
auto get_heap() noexcept {
const auto compare = [](const Edge& lhs, const Edge& rhs) noexcept {
return lhs.weight > rhs.weight;
};
return std::priority_queue<Edge, std::vector<Edge>, decltype(compare)>{compare};
}
int findParent(std::vector<int>& parent, int i) {
if (parent[i] == -1)
return i;
return findParent(parent, parent[i]);
}
void unionSets(std::vector<int>& parent, int x, int y) {
int xset = findParent(parent, x);
int yset = findParent(parent, y);
parent[xset] = yset;
}
std::vector<Edge> findMST(const Graph& graph) {
std::vector<Edge> result;
std::vector<int> parent(graph.V, -1);
auto pq = get_heap();
for (const auto& edge : graph.edges) {
pq.push(edge);
}
while (!pq.empty() && result.size() < graph.V - 1) {
Edge nextEdge = pq.top();
pq.pop();
int x = findParent(parent, nextEdge.src);
int y = findParent(parent, nextEdge.dest);
if (x != y) {
result.push_back(nextEdge);
unionSets(parent, x, y);
}
}
return result;
} |
<gh_stars>0
import React from 'react';
import { mount } from 'enzyme';
import List from '../index';
describe('<List />', () => {
// eslint-disable-next-line jest/expect-expect
it('It should not crash', () => {
mount(<List />);
});
});
|
<reponame>MAMOUN-kamal-alshisani/horned-gallary
import React from "react";
import 'bootstrap/dist/css/bootstrap.min.css';
import Card from 'react-bootstrap/Card'
import Button from 'react-bootstrap/Button'
import Imgselect from "./Imgselect";
class Beast extends React.Component{
constructor(props) {
super(props);
this.wrapper = React.createRef();
this.state = {
numberOFvotes: 0,
show: false
}
}
increaseVotes = () => {
this.setState({
numberOFvotes: this.state.numberOFvotes + 1
})
}
changeState = () => {
let currentState = this.state.show;
if (currentState === false) {
this.setState({
show: true
})
} else if (currentState === true) {
this.setState({
show: false
})
}
}
render() {
return (
<>
<Imgselect show={this.state.show} changeState={this.changeState} title={this.props.title} description={this.props.description} image_url={this.props.image_url}></Imgselect>
<Card style={{ width: '18rem' }} onClick={this.changeState} ref={this.wrapper}>{this.props.children}
<Card.Img variant="top" src={this.props.image_url} />
<Card.Body>
<Card.Title>{this.props.title}</Card.Title>
<Card.Text>
{this.props.description}
</Card.Text>
<Card.Text>
{this.state.numberOFvotes}
</Card.Text>
</Card.Body>
</Card>
<Button onClick={this.increaseVotes}>Vote</Button>
</>
)
}
}
// render(){
// return(
// <div>
// <h3>{this.props.title}</h3>
// <img src={this.props.image_url} alt="" title=""></img>
// <p>{this.props.description}</p>
// <h3>{this.props.keyword}</h3>
// <h3>{this.props.horns}</h3>
// </div>
// )
// }
// }
export default Beast; |
#!/bin/bash -e
TOKEN=${1}
TAG=${2}
PROJECT=${3}
CONTEXT=${4}
echo ${TOKEN} | docker login -u oauth2accesstoken --password-stdin https://gcr.io
docker build -t gcr.io/${PROJECT}/${TAG} ${CONTEXT}
docker push gcr.io/${PROJECT}/${TAG} |
import {Component, EventEmitter, Input, OnInit, Output} from '@angular/core';
@Component({
selector: 'ngx-add-new-compte',
templateUrl: './add-new-compte.component.html',
styleUrls: ['./add-new-compte.component.scss'],
})
export class AddNewCompteComponent implements OnInit {
@Input() account = {
accountId: null,
accountLabel : '',
accountBank: '',
accountBankAdress : '',
accountAgency: '',
accountAgencyAdress: '',
accountChargeCustomerName: '',
accountChargeCustomerPhoneNumber: '',
accountChargeCustomerEmail: '',
accountNumber: '',
accountRIB: '',
accountCurrency: '',
accountContacts: [],
accountInitialAmount: null,
};
@Output() addNewAccountEvent = new EventEmitter();
@Output() cancelEvent = new EventEmitter();
constructor() { }
ngOnInit(): void {
}
addNewContact() {
this.account.accountContacts.push({
contactName : '',
contactPost : '',
contactTel : '',
contactEmail : '',
});
}
checkAccountValid (): boolean {
return this.account.accountAgency == null || this.account.accountAgency === '' ||
this.account.accountBank == null || this.account.accountBank === '' ||
this.account.accountCurrency == null || this.account.accountCurrency === '' ||
this.account.accountNumber == null || this.account.accountNumber === '' ||
this.account.accountRIB == null || this.account.accountRIB === '' ||this.account.accountRIB.length!=20
this.account.accountLabel == null || this.account.accountLabel === '';
}
saveAccount() {
this.addNewAccountEvent.emit(this.account);
}
cancel() {
this.cancelEvent.emit();
}
}
|
<reponame>waricoma/my-first-zoom-app
declare module 'vuejs-dialog'; // the vuetify.js has dialog feature. It's for my learning and memo. ( How to use plugin/outside plugin? )
|
<reponame>fanx-dev/fanx
//
// Copyright (c) 2009, <NAME> and <NAME>
// Licensed under the Academic Free License version 3.0
//
// History:
// 24 Mar 09 <NAME> Creation
// 20 May 09 <NAME> Refactor to new OO model
//
/**
* Slot.
*/
fan.std.Slot = fan.sys.Obj.$extend(fan.sys.Obj);
//////////////////////////////////////////////////////////////////////////
// Constructor
//////////////////////////////////////////////////////////////////////////
fan.std.Slot.prototype.$ctor = function()
{
this.m_parent = null;
this.m_qname = null;
this.m_name = null;
this.m_flags = null;
this.m_facets = null;
}
fan.std.Slot.prototype.isImmutable = function()
{
return true;
}
fan.std.Slot.prototype.toImmutable = function()
{
return this;
}
//////////////////////////////////////////////////////////////////////////
// Identity
//////////////////////////////////////////////////////////////////////////
fan.std.Slot.prototype.$typeof = function() { return fan.std.Slot.$type; }
fan.std.Slot.prototype.toStr = function() { return this.m_qname; }
fan.std.Slot.prototype.$literalEncode = function(out)
{
this.m_parent.$literalEncode(out);
out.w(this.m_name);
}
//////////////////////////////////////////////////////////////////////////
// Management
//////////////////////////////////////////////////////////////////////////
fan.std.Slot.findMethod = function(qname, checked)
{
if (checked === undefined) checked = true;
var slot = fan.std.Slot.find(qname, checked);
if (slot instanceof fan.std.Method || checked)
return fan.sys.ObjUtil.coerce(slot, fan.std.Method.$type);
return null;
}
fan.std.Slot.findField = function(qname, checked)
{
if (checked === undefined) checked = true;
var slot = fan.std.Slot.find(qname, checked);
if (slot instanceof fan.std.Field || checked)
return fan.sys.ObjUtil.coerce(slot, fan.std.Field.$type);
return null;
}
fan.std.Slot.find = function(qname, checked)
{
if (checked === undefined) checked = true;
var typeName, slotName;
try
{
var dot = qname.indexOf('.');
typeName = qname.substring(0, dot);
slotName = qname.substring(dot+1);
}
catch (e)
{
throw fan.sys.Err.make("Invalid slot qname \"" + qname + "\", use <pod>::<type>.<slot>");
}
var type = fan.std.Type.find(typeName, checked);
if (type == null) return null;
return type.slot(slotName, checked);
}
fan.std.Slot.findFunc = function(qname, checked)
{
if (checked === undefined) checked = true;
var m = fan.std.Slot.find(qname, checked);
if (m == null) return null;
return m.m_func;
}
//////////////////////////////////////////////////////////////////////////
// Methods
//////////////////////////////////////////////////////////////////////////
fan.std.Slot.prototype.parent = function() { return this.m_parent; }
fan.std.Slot.prototype.qname = function() { return this.m_qname; }
fan.std.Slot.prototype.$name = function() { return this.m_name; }
fan.std.Slot.prototype.isField = function() { return this instanceof fan.std.Field; }
fan.std.Slot.prototype.isMethod = function() { return this instanceof fan.std.Method; }
//////////////////////////////////////////////////////////////////////////
// Flags
//////////////////////////////////////////////////////////////////////////
fan.std.Slot.prototype.isAbstract = function() { return (this.m_flags & fan.sys.FConst.Abstract) != 0; }
fan.std.Slot.prototype.isConst = function() { return (this.m_flags & fan.sys.FConst.Const) != 0; }
fan.std.Slot.prototype.isCtor = function() { return (this.m_flags & fan.sys.FConst.Ctor) != 0; }
fan.std.Slot.prototype.isInternal = function() { return (this.m_flags & fan.sys.FConst.Internal) != 0; }
fan.std.Slot.prototype.isNative = function() { return (this.m_flags & fan.sys.FConst.Native) != 0; }
fan.std.Slot.prototype.isOverride = function() { return (this.m_flags & fan.sys.FConst.Override) != 0; }
fan.std.Slot.prototype.isPrivate = function() { return (this.m_flags & fan.sys.FConst.Private) != 0; }
fan.std.Slot.prototype.isProtected = function() { return (this.m_flags & fan.sys.FConst.Protected) != 0; }
fan.std.Slot.prototype.isPublic = function() { return (this.m_flags & fan.sys.FConst.Public) != 0; }
fan.std.Slot.prototype.isStatic = function() { return (this.m_flags & fan.sys.FConst.Static) != 0; }
fan.std.Slot.prototype.isSynthetic = function() { return (this.m_flags & fan.sys.FConst.Synthetic) != 0; }
fan.std.Slot.prototype.isVirtual = function() { return (this.m_flags & fan.sys.FConst.Virtual) != 0; }
//////////////////////////////////////////////////////////////////////////
// Facets
//////////////////////////////////////////////////////////////////////////
fan.std.Slot.prototype.facets = function() { return this.m_facets.list(); }
fan.std.Slot.prototype.hasFacet = function(type) { return this.facet(type, false) != null; }
fan.std.Slot.prototype.facet = function(type, checked)
{
if (checked === undefined) checked = true;
return this.m_facets.get(type, checked);
}
//////////////////////////////////////////////////////////////////////////
// Util
//////////////////////////////////////////////////////////////////////////
fan.std.Slot.prototype.$$name = function(n)
{
// must keep in sync with compilerJs::JsNode
switch (n)
{
case "char": return "$char";
case "delete": return "$delete";
case "enum": return "$enum";
case "export": return "$export";
case "fan": return "$fan";
case "float": return "$float";
case "import": return "$import";
case "in": return "$in";
case "int": return "$int";
case "name": return "$name";
case "typeof": return "$typeof";
case "var": return "$var";
case "with": return "$with";
}
return n;
}
//
// Copyright (c) 2011, <NAME> and <NAME>
// Licensed under the Academic Free License version 3.0
//
// History:
// 31 May 2011 <NAME> Creation
//
/**
* Facets manages facet meta-data as a Str:Obj map.
*/
fan.std.Facets = fan.sys.Obj.$extend(fan.sys.Obj);
fan.std.Facets.prototype.$ctor = function(map)
{
this.m_map = map;
this.m_list = null;
}
fan.std.Facets.empty = function()
{
var x = fan.std.Facets.m_emptyVal;
if (x == null) x = fan.std.Facets.m_emptyVal = new fan.std.Facets({});
return x;
}
fan.std.Facets.makeTransient = function()
{
var x = fan.std.Facets.m_transientVal;
if (x == null)
{
var m = {};
m[fan.sys.Transient.$type.qname()] = "";
x = fan.std.Facets.m_transientVal = new fan.std.Facets(m);
}
return x;
}
fan.std.Facets.prototype.list = function()
{
if (this.m_list == null)
{
this.m_list = fan.sys.List.make(8, fan.sys.Facet.$type);
for (var key in this.m_map)
{
var type = fan.std.Type.find(key);
this.m_list.add(this.get(type, true));
}
this.m_list = this.m_list.toImmutable();
}
return this.m_list;
}
fan.std.Facets.prototype.get = function(type, checked)
{
var val = this.m_map[type.qname()];
if (typeof val == "string")
{
var f = this.decode(type, val);
this.m_map[type.qname()] = f;
return f;
}
//if (val instanceof fan.sys.Facet)
if (val != null) return val;
if (checked) throw fan.sys.UnknownFacetErr.make(type.qname());
return null;
}
fan.std.Facets.prototype.decode = function(type, s)
{
try
{
// if no string use make/defVal
if (s.length == 0) return type.make();
// decode using normal Fantom serialization
return fan.std.ObjDecoder.decode(s);
}
catch (e)
{
var msg = "ERROR: Cannot decode facet " + type + ": " + s;
fan.sys.ObjUtil.echo(msg);
delete this.m_map[type.qname()];
throw fan.sys.IOErr.make(msg);
}
}
fan.std.Facets.prototype.dup = function()
{
var dup = {};
for (key in this.m_map) dup[key] = this.m_map[key];
return new fan.std.Facets(dup);
}
fan.std.Facets.prototype.inherit = function(facets)
{
var keys = [];
for (key in facets.m_map) keys.push(key);
if (keys.length == 0) return;
this.m_list = null;
for (var i=0; i<keys.length; i++)
{
var key = keys[i];
// if already mapped skipped
if (this.m_map[key] != null) continue;
// if not an inherited facet skip it
var type = fan.std.Type.find(key);
var meta = type.facet(fan.sys.FacetMeta.$type, false);
if (meta == null || !meta.m_inherited) continue;
// inherit
this.m_map[key] = facets.m_map[key];
}
}
fan.std.Facets.m_emptyVal = null;
fan.std.Facets.m_transientVal = null;
|
<filename>src/redux/reducers/userReducer.ts
import { AnyAction } from "typescript-fsa";
import { addUser, deleteUser, editUser } from "../actions";
// const initState = [
// {
// firstname: 'Gago',
// lastname: 'Ka',
// email: '<EMAIL>',
// password: '<PASSWORD>',
// avatar: '',
// banner: '',
// dateJoined: 0
// }
// ];
export interface User {
id: string;
firstname: string;
lastname: string;
email: string;
password: string;
avatar: string;
banner: string;
dateJoined: number;
}
export type UserState = User[];
export function userReducer(state: UserState = [], action: AnyAction): UserState {
if (state === undefined) {
return [];
}
if (addUser.match(action)) {
return [...state, action.payload];
}
if (editUser.match(action)) {
return state.map((user) => {
if (user.id === action.payload.id) {
return {
...user,
...action.payload,
};
}
return user;
});
}
if (deleteUser.match(action)) {
return state.filter((user) => user.id !== action.payload);
}
return state;
}
|
<filename>packages/web/modules/withEvent.js
// @flow
import { graphql, type OptionProps } from 'react-apollo';
import { gql } from 'graphql.macro';
import { EventDetailFragment } from './fragments';
import { type HOC, type Event } from '../utils/type.flow';
/**
* Type (additional props)
*/
export type Response = OptionProps<*, { event: Event }, {}>;
export type InjectedProps = {
event: { event: Event, loading: boolean, error: ?Object },
};
/**
* GraphQL query
*/
export const GRAPHQL_TAG: Object = gql`
query event($eventId: ID) {
event(eventId: $eventId) {
...EventDetailFragment
}
}
${EventDetailFragment}
`;
/**
* HOC
*/
const withEvent: HOC<*, InjectedProps> = (graphql: any)(GRAPHQL_TAG, {
alias: 'event',
options: ({ eventId }) => ({
variables: {
eventId,
},
}),
props: ({ data: { event, loading, error } }: Response): InjectedProps => ({
event: { event, loading, error },
}),
});
export default withEvent;
|
import React, { Component, PropTypes } from 'react';
var ReactPlayer = require('react-player')
import { DropTarget } from 'react-dnd';
import HTML5Backend from 'react-dnd-html5-backend';
import { ItemTypes } from './Constants';
var Social = require('./Social');
import PlayByPlay from './PlayByPlay';
var PlayersOnField = require('./PlayersOnField');
var Fantasy = require('./Fantasy');
import DragButton from './DragButton';
const spotTarget = { // See: https://gaearon.github.io/react-dnd/docs-drop-target.html
canDrop(props) {
return props.id != 0; //ID 0 is the player position
},
drop(props, monitor, component) {
// Obtain the dragged item
const item = monitor.getItem();
//console.log(item);
//console.log("shit");
// You can do something with it
component.setState({id: item.id});
// You can also do nothing and return a drop result,
// which will be available as monitor.getDropResult()
// in the drag source's endDrag() method
return { id:item.id };
}
// TODO: Make a drop method to swap around modules
};
function collect(connect, monitor) { // See: https://gaearon.github.io/react-dnd/docs-drop-target-connector.html
return {
connectDropTarget: connect.dropTarget(),
canDrop: monitor.canDrop(),
result: monitor.getDropResult()
};
}
function fillSpot(i) {
console.log(i);
if (i==0) { return (<div>
<ReactPlayer style={{background:'black'}} url='https://www.youtube.com/watch?v=mkumGfsv-H4' playing width='100%' height='600px' />
<div style={{diplay:'inline-block', background:'#CFD8DC'}}>
<img src="./img/logo_game_grid.png" height="34px" style={{paddingBottom:'12px', paddingLeft:'10px'}} />
<img src="./img/nfl_logo.png" height="60px" />
<DragButton id='0' text='Player' />
<DragButton id='1' text='Twitter' />
<DragButton id='2' text='PlayByPlay' />
<DragButton id='3' text='PlayersOnField' />
<DragButton id='4' text='Fantasy' />
</div>
</div>);}
if (i==1) { return (<Social />);}
if (i==2) { return (<PlayByPlay />);}
if (i==3) { return (<PlayersOnField />);}
if (i==4) { return (<Fantasy />)};
return (<div height="100%"><p style={{height:'100%',
textAlign: 'center',
verticalAalign: 'middle',
lineHeight:'25',
border: '1px solid #FFFFFF'}}>
Drag A Module Here</p>
</div>);
}
class Spot extends React.Component {
constructor(props) {
super(props);
this.state = {id: props.id};
}
clearChild() {
this.child = (<div></div>);
}
render() {
const { id, connectDropTarget, canDrop, result} = this.props;
var useNew = false;
//console.log("ID:"+id);
//console.log("RESULT:"+result);
// var toUse = (useNew) ? result.id : id;
let backgroundColor = "#222";
if (canDrop) {
backgroundColor = 'darkgreen';
} else {
backgroundColor = 'darkred';
}
return connectDropTarget(
<div style={{display:'inline', backgroundColor}}>
{fillSpot(this.state.id)}
</div>
);
}
}
export default DropTarget(ItemTypes.MODULE, spotTarget, collect)(Spot);
|
import { Injectable } from '@nestjs/common';
import { CommandHelper } from 'src/shared/helpers/command.helper';
import { environment } from 'src/config/environments/environment';
import { TechnicalError } from 'src/shared/errors/technical.error';
import { ArgumentError } from 'src/shared/errors/argument.error';
import { NetworksDto } from '../dtos/networks.dto'
@Injectable()
export class NetworksService {
async listNetworks(): Promise<any> {
const command = `curl -X GET localhost:8081/chains`;
try {
const exec: any = await CommandHelper.run(command);
const response = exec.stdout;
return CommandHelper.wrap(command, exec, JSON.parse(response));
} catch (e) {
console.log(e);
throw new TechnicalError(e.message);
}
}
// async getNetwork(name): Promise<any> {
// const command = `orchestrate networks network -e localhost:8020 -n ${name}`;
// try {
// const exec: any = await CommandHelper.run(command);
// let response = exec.stdout;
// response = response.replace(/'/g, '"');
// response = response.replace(/\[Array\]/g, '"[Array]"');
// response = response.replace(/[a-zA-Z]+:+/g, matchKey =>
// matchKey.replace(/[a-zA-Z]+/g, matchWord => `"${matchWord}"`)
// );
// response = JSON.parse(response);
// return CommandHelper.wrap(command, exec, response);
// } catch (e) {
// console.log(e);
// throw new TechnicalError(e.message);
// }
// }
async registerNetwork(networksDto?: NetworksDto): Promise<any> {
if (networksDto) {
if (!networksDto.name || networksDto.name === '') {
throw new ArgumentError(`Invalid name`);
} else if (networksDto.urls.length === 0) {
throw new ArgumentError(`Invalid urls`);
}
} else {
throw new ArgumentError(`Invalid JSON`);
}
let command = `curl -X POST --data '{"name": "${networksDto.name}", "urls":${JSON.stringify(networksDto.urls)}}' localhost:8081/chains`;
try {
const exec: any = await CommandHelper.run(command);
let response = exec.stdout;
response = JSON.parse(response);
return CommandHelper.wrap(command, exec, response);
} catch (e) {
console.log(e);
throw new TechnicalError(e.message);
}
}
}
|
<gh_stars>0
# -*- coding: utf-8 -*-
""" S3 Logging Facility
@copyright: (c) 2014 Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import logging
import sys
from gluon import current
# =============================================================================
class S3Log(object):
"""
Simple global logging facility, called like:
current.log.error("Something went wrong", value="Example")
gives:
2014-02-16 11:58:41 S3LOG ERROR: Something went wrong: Example
Configurable in 000_config.py (set up in models/00_db.py)
- to include caller details (file name, line number, function name):
2014-02-16 11:58:23 (applications/eden/modules/s3/s3rest.py 477 __init__)
ERROR: Something went wrong: Example
- to write to console (sys.stderr), to a log file, or both.
Configuration see modules/s3cfg.py.
"""
def __init__(self):
"""
Constructor
"""
settings = current.deployment_settings
log_level = settings.get_log_level()
if log_level is None:
self.critical = \
self.error = \
self.warning = \
self.info = \
self.debug = self.ignore
self.log_level = 100
else:
try:
level = getattr(logging, log_level.upper())
except AttributeError:
raise SyntaxError("Invalid settings.log.level: %s" % log_level)
self.log_level = level
self.critical = self._critical \
if level <= logging.CRITICAL else self.ignore
self.error = self._error \
if level <= logging.ERROR else self.ignore
self.warning = self._warning \
if level <= logging.WARNING else self.ignore
self.info = self._info \
if level <= logging.INFO else self.ignore
self.debug = self._debug \
if level <= logging.DEBUG else self.ignore
self.configure_logger()
# -------------------------------------------------------------------------
@classmethod
def setup(cls):
"""
Set up current.log
"""
if hasattr(current, "log"):
return
current.log = cls()
return
# -------------------------------------------------------------------------
def configure_logger(self):
"""
Configure output handlers
"""
if hasattr(current, "log"):
return
settings = current.deployment_settings
console = settings.get_log_console()
logfile = settings.get_log_logfile()
if not console and not logfile:
# No point to log without output channel
self.critical = \
self.error = \
self.warning = \
self.info = \
self.debug = self.ignore
return
logger = logging.getLogger(__name__)
logger.propagate = False
logger.setLevel(self.log_level)
logger.handlers = []
m_format = "%(asctime)s %(caller)s %(levelname)s: %(message)s"
d_format = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(m_format, d_format)
# Set up console handler
if console:
console_handler = logging.StreamHandler(sys.stderr)
console_handler.setFormatter(formatter)
console_handler.setLevel(self.log_level)
logger.addHandler(console_handler)
# Set up log file handler
if logfile:
from logging.handlers import RotatingFileHandler
MAXBYTES = 1048576
logfile_handler = RotatingFileHandler(logfile,
maxBytes = MAXBYTES,
backupCount = 3)
logfile_handler.setFormatter(formatter)
logfile_handler.setLevel(self.log_level)
logger.addHandler(logfile_handler)
return
# -------------------------------------------------------------------------
@staticmethod
def ignore(message, value=None):
"""
Dummy to ignore messages below minimum severity level
"""
return
# -------------------------------------------------------------------------
@staticmethod
def _log(severity, message, value=None):
"""
Log a message
@param severity: the severity of the message
@param message: the message
@param value: message suffix (optional)
"""
logger = logging.getLogger(__name__)
logger.propagate = False
msg = "%s: %s" % (message, value) if value else message
extra = {"caller": "S3LOG"}
if current.deployment_settings.get_log_caller_info():
caller = logger.findCaller()
if caller:
extra = {"caller": "(%s %s %s)" % caller}
logger.log(severity, msg, extra=extra)
return
# -------------------------------------------------------------------------
@classmethod
def _critical(cls, message, value=None):
"""
Log a critical message (highest severity level),
called via current.log.critical()
@param message: the message
@param value: message suffix (optional)
"""
cls._log(logging.CRITICAL, message, value=value)
# -------------------------------------------------------------------------
@classmethod
def _error(cls, message, value=None):
"""
Log an error message,
called via current.log.error()
@param message: the message
@param value: message suffix (optional)
"""
cls._log(logging.ERROR, message, value=value)
# -------------------------------------------------------------------------
@classmethod
def _warning(cls, message, value=None):
"""
Log a warning message,
called via current.log.warning()
@param message: the message
@param value: message suffix (optional)
"""
cls._log(logging.WARNING, message, value=value)
# -------------------------------------------------------------------------
@classmethod
def _info(cls, message, value=None):
"""
Log an general info message,
called via current.log.info()
@param message: the message
@param value: message suffix (optional)
"""
cls._log(logging.INFO, message, value=value)
# -------------------------------------------------------------------------
@classmethod
def _debug(cls, message, value=None):
"""
Log a detailed debug message (lowest severity level),
called via current.log.debug()
@param message: the message
@param value: message suffix (optional)
"""
cls._log(logging.DEBUG, message, value=value)
# END =========================================================================
|
<filename>sql/updates/18000_01_scripted_event_id.sql<gh_stars>0
RENAME TABLE scripted_event_id TO scripted_event;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.