text stringlengths 1 1.05M |
|---|
/**
* HIRSystemParameter.js
*
* Copyright © 2018 daisuke.t.
*/
var HIRSystemParameter = {};
HIRSystemParameter = function(){} // new 演算子用コンストラクタ
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// 列挙、定数
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HIRSystemParameter.FPS_DEFAULT = 60;
HIRSystemParameter.CANVAS_ID_PRIMARY_DEFAULT = "#canvas";
HIRSystemParameter.CANVAS_ID_BACK_DEFAULT = "#canvas_back";
HIRSystemParameter.CANVAS_WIDTH_DEFAULT = 800;
HIRSystemParameter.CANVAS_HEIGHT_DEFAULT = 600;
HIRSystemParameter.CANVAS_COLOR_DEFAULT = HIRColor.White();
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// メンバ
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HIRSystemParameter.mDebug;
HIRSystemParameter.mFPS;
HIRSystemParameter.mCanvasPrimaryID;
HIRSystemParameter.mCanvasBackID;
HIRSystemParameter.mCanvasWidth;
HIRSystemParameter.mCanvasHeight;
HIRSystemParameter.mCanvasColor;
HIRSystemParameter.mCallbackUpdate;
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// メソッド
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
/**
* デフォルト設定のパラメーターを得る
*/
HIRSystemParameter.Default = function()
{
var res = new HIRSystemParameter();
res.mDebug = false;
res.mFPS = HIRSystemParameter.FPS_DEFAULT;
res.mCanvasPrimaryID = HIRSystemParameter.CANVAS_ID_PRIMARY_DEFAULT;
res.mCanvasBackID = HIRSystemParameter.CANVAS_ID_BACK_DEFAULT;
res.mCanvasWidth = HIRSystemParameter.CANVAS_WIDTH_DEFAULT;
res.mCanvasHeight = HIRSystemParameter.CANVAS_HEIGHT_DEFAULT;
res.mCanvasColor = HIRSystemParameter.CANVAS_COLOR_DEFAULT;
res.mCallbackUpdate = null;
return res;
}
|
#!/bin/bash
echo "== Ubuntu installer =="
#Check if root
if [ "$EUID" -ne 0 ]
then echo "Please run as root"
exit 1
fi
#Repo
sudo apt-get install python-software-properties
sudo add-apt-repository ppa:ondrej/php
sudo apt-get update
#Install Dependency
echo "Installing packages.."
apt-get update
apt-get install apache2 mysql-server mysql-client libapache2-mod-php7.0 php7.0 php7.0-mcrypt php7.0-mbstring php7.0-gnupg php7.0-mysql php7.0-gmp php7.0-curl php7.0-bcmath php7.0-gd git mcrypt curl unzip atool git mcrypt curl unzip atool
#Check apt exit status
if [ $? -ne 0 ]; then
exit 1
fi
# Enable Mod Rewrite
a2enmod rewrite
# Install bitcoin
if [ ! -f /usr/bin/bitcoind ]; then
echo "Installing bitcoin.."
cd /tmp
wget https://bitcoin.org/bin/bitcoin-core-0.15.1/bitcoin-0.15.1-x86_64-linux-gnu.tar.gz # or 32bit
aunpack bitcoin-0.15.1-x86_64-linux-gnu.tar.gz
cd bitcoin-0.15.1
cp bin/* /usr/bin/
fi
# Start bitcoind (testnet)
if ! pgrep "bitcoind" >/dev/null 2>&1 ; then
echo "Starting bitcoind (testnet).."
bitcoind -daemon -testnet -rpcport="7530" -rpcuser="bitcoinuser" -rpcpassword="bitcoinpass"
fi
# Enable "AllowOverride All (/var/www)
echo "Setting up 'AllowOverride All' for /var/www"
sed -i '/<Directory \/var\/www\/>/,/<\/Directory>/ s/AllowOverride None/AllowOverride all/' /etc/apache2/apache2.conf
# Change DocumentRoot to /var/www/html/shop
echo "Setting up Apache2 DocumentRoot.."
if ! grep -q "shop" /etc/apache2/sites-enabled/000-default.conf; then
sed -i 's#/var/www/html#/var/www/html/shop#' /etc/apache2/sites-enabled/000-default.conf
fi
# Add mysql user and add database
echo "Setting up MySQL.."
mysql -e "CREATE DATABASE annularis;"
mysql -e "CREATE USER IF NOT EXISTS annularis IDENTIFIED BY 'password';"
mysql -e "GRANT ALL PRIVILEGES ON annularis.* TO annularis@localhost IDENTIFIED BY 'password';"
# Install annularis source (from git)
echo "Cloning Annularis.."
cd /var/www/
git clone https://github.com/annularis/shop
chown www-data:www-data -Rv /var/www/shop
# Restart Apache
systemctl restart apache2.service
# Info
echo "Open your browser to http://your-ip/install and fill as follow:"
echo "---------------------------------------------------------------"
echo "DB User / DB Name: annularis"
echo "DB Pass: password"
echo "Bitcoin port: 7530"
echo "Bitcoin user: bitcoinuser"
echo "Bitcoin pass: bitcoinpass"
echo "For 'Bip32 public key' see 'http://bip32.org'"
echo "Set 'Tidy URL' to 'No'"
echo "Set 'Force Vendor to use PGP' to 'No'"
echo "--------------------------------------------------------"
|
class Company:
def __init__(self):
self.company_name = None
def set_company_name(self, name):
if len(name) > 128:
self.company_name = name[:128]
else:
self.company_name = name
def get_company_name(self):
return self.company_name |
<filename>pinax/apps/tasks/fields.py<gh_stars>1-10
from django.conf import settings
from django.db import models
MARKUP_DEFAULT_FILTER = getattr(settings, "MARKUP_DEFAULT_FILTER", None)
MARKUP_CHOICES = getattr(settings, "MARKUP_CHOICES", [])
# @@@ the behavior here should be available in django-markup
class MarkupField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs["max_length"] = kwargs.get("max_length", 20)
self.markup_default_filter = kwargs.get("default") or MARKUP_DEFAULT_FILTER
if self.markup_default_filter:
kwargs["default"] = self.markup_default_filter
else:
kwargs["choices"] = kwargs.get("choices", MARKUP_CHOICES)
super(MarkupField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
if self.markup_default_filter:
return None
return super(MarkupField, self).formfield(**kwargs)
|
/*
* options.h -- compiler configuration options set at compile time
* Copyright (C) Acorn Computers Ltd. 1988
* SPDX-Licence-Identifier: Apache-2.0
*/
/*
* RCS $Revision$
* Checkin $Date$
* Revising $Author$
*/
#ifndef _options_LOADED
#define _options_LOADED
/*
* The following conditional settings allow the produced compiler (TARGET)
* to depend on the HOST (COMPILING_ON) environment.
* Note that we choose to treat this independently of the target-machine /
* host-machine issue.
*/
#define CALLABLE_COMPILER 1
#define CPLUSPLUS 1
#define USE_PP
#define NO_LISTING_OUTPUT 1
#define NO_OBJECT_OUTPUT 1
#define NO_DEBUGGER 1
#define NO_ASSEMBLER_OUTPUT 1
#define NO_CONFIG 1
#define NO_DUMP_STATE 1
#define NO_INSTORE_FILES 1
#define HOST_CANNOT_INVOKE_LINKER 1
#define HOST_CANNOT_INVOKE_ASSEMBLER 1
#include "toolver.h"
#define NON_RELEASE_VSN TOOLVER_ARMCPP
/* Expire this version at 00:00:01 on Saturday 01 Oct 94 */
/*#define UNIX_TIME_LIMIT 780969601 */
#define DISABLE_ERRORS 1 /* -- to enable -Exyz... error suppression */
#define EXTENSION_SYSV 1 /* -- to allow #ident ... */
#ifndef __acorn
# ifdef TARGET_IS_NEWTON
# define TARGET_MACHINE "Newton"
# define TARGET_SYSTEM "Newton OS"
# define TARGET_IS_NEWTONOS 1
# ifndef VERSIONSTRING
# define VERSIONSTRING "0.11/C5.00"
# endif
# define TARGET_DEFAULT_BIGENDIAN 1
# define NO_INSTORE_FILES 1 /* no in-store headers for Newton. */
# define PCS_DEFAULTS (PCS_CALLCHANGESPSR + PCS_NOSTACKCHECK)
# else /* NOT TARGET_IS_NEWTON */
/* usual cross-development case... */
# define TARGET_SYSTEM ""
# define TARGET_IS_RISC_OS 1
# define TARGET_DEFAULT_BIGENDIAN 0
# define PCS_DEFAULTS (PCS_CALLCHANGESPSR | PCS_FPE3 | PCS_SOFTFP)
/* | PCS_NOSTACKCHECK */
/* | PCS_REENTRANT */
/* | PCS_FPREGARGS */
# endif
# define HOST_WANTS_NO_BANNER 1
# ifndef DRIVER_OPTIONS
/* -D__arm done by TARGET_PREDEFINES */
# define DRIVER_OPTIONS {NULL}
# endif
#else /* __acorn is defined */
# ifdef __unix
# define TARGET_SYSTEM "RISCiX"
# define TARGET_IS_UNIX 1
# define NO_INSTORE_FILES 1 /* no in-store headers under Unix. */
# define HOST_WANTS_NO_BANNER 1
/* #define TARGET_HAS_DIVREM_FUNCTION 1 -- divide fn also returns remainder.*/
/* #define TARGET_HAS_DIV_10_FUNCTION 1 -- fast divide by 10 */
/* but not under ARM Unix... */
# ifndef DRIVER_OPTIONS
/* -D__arm done by TARGET_PREDEFINES */
# define DRIVER_OPTIONS {"-zps1", "-D__unix", "-D__acorn", NULL}
# endif
# endif
# ifdef __riscos
# define TARGET_SYSTEM "RISC OS"
# define TARGET_IS_RISC_OS 1
# define TARGET_HAS_DIVREM_FUNCTION 1 /* divide fn also returns remainder.*/
# define TARGET_HAS_DIV_10_FUNCTION 1 /* fast divide by 10 */
/* the last two would be in target.h*/
/* but are OS-dependent too. */
# ifndef DRIVER_OPTIONS
/* -D__arm done by TARGET_PREDEFINES */
# define DRIVER_OPTIONS {"-D__riscos", "-D__acorn", NULL}
# endif
# endif
#endif /* defined(__acorn) */
#ifndef TARGET_IS_NEWTON
# define PROFILE_COUNTS_INLINE 1
/* to avoid conflict with host compilers */
# define C_INC_VAR "ARMINC"
# define C_LIB_VAR "ARMLIB"
#else
# define PROFILE_DISABLES_TAILCALL 1
# define C_INC_VAR "ARMCIncludes"
# define C_LIB_VAR NULL
#endif
/* #define DO_NOT_EXPLOIT_REGISTERS_PRESERVED_BY_CALLEE 1 */
#define MOVC_KILLS_REGISTER_PRESERVED_BY_CALLEE_EXPLOITATION 1
/* #define TARGET_STACK_MOVES_ONCE 1 / * Experimental option */
#ifndef RELEASE_VSN
# define ENABLE_ALL 1 /* -- to enable all debugging options */
#endif
/* mac-specific options - find a better home for these sometime! */
#ifdef macintosh
/* The origin of time is 0th Jan 1904... */
# ifdef UNIX_TIME_LIMIT
# define TIME_LIMIT (UNIX_TIME_LIMIT+(66*365+16)*24*3600)
# endif
#ifdef applec
/* work-around for MPW C */
# define NO_STATIC_BANNER 1
#endif
pascal void SpinCursor(short increment); /* copied from CursorCtl.h */
# define ExecuteOnSourceBufferFill() SpinCursor(1)
#else /* NOT macintosh */
# ifdef UNIX_TIME_LIMIT
# define TIME_LIMIT UNIX_TIME_LIMIT
# endif
#endif
#ifdef TARGET_IS_NEWTON
# define HOST_OBJECT_INCLUDES_SOURCE_EXTN 1 /* .c -> .c.o */
# define EXTENSION_COUNTED_STRINGS 1 /* to enable Pascal-style strings */
# define EXTENSION_UNSIGNED_STRINGS 1 /* and they are unsigned */
# define ALLOW_WHITESPACE_IN_FILENAMES 1 /* to allow as it says... */
# define ONLY_WARN_ON_NONPRINTING_CHAR 1 /* to do as it says... */
# define HOST_DOES_NOT_FORCE_TRAILING_NL 1
# define HOST_WANTS_NO_BANNER 1 /* no routine banner output */
# define DISABLE_ERRORS 1
# define TARGET_WANTS_LINKER_TO_RESOLVE_FUNCTION_REFERENCES 1
# define HOST_CANNOT_INVOKE_ASSEMBLER 1
# define HOST_CANNOT_INVOKE_LINKER 1
# define PUT_FILE_NAME_IN_AREA_NAME 1
# define CHAR_NL '\n'
# define CHAR_CR '\r'
# define CFRONT_MODE_WARN_LACKS_STORAGE_TYPE 0
# define HOST_DOESNT_WANT_FP_OFFSET_TABLES 1
#ifdef MAKE_WCHAR_T_UNSIGNED_SHORT
/* make wchar_t be unsigned short */
/* maybe this should be cfe somewhere */
# define sizeof_wchar sizeof_short
# define te_wchar te_ushort /* for sem.c */
# define NUM_WCHAR (NUM_INT|NUM_SHORT|NUM_UNSIGN) /* for lex.c */
#endif
#endif
#ifdef TIME_LIMIT
# define VENDOR_NAME "Advanced RISC Machines Limited"
#endif
#ifdef CPLUSPLUS
# ifndef CFRONT_MODE_WARN_LACKS_STORAGE_TYPE
# define CFRONT_MODE_WARN_LACKS_STORAGE_TYPE 1
# endif
#endif
#define MSG_TOOL_NAME "npp.a" /* used to load correct NLS message file */
#endif
/* end of clbcomp/options.h */
|
from django.db import models
class Interface:
pass # Placeholder for the Interface class definition
class UartInterface(Interface):
uri = models.CharField(max_length=30) |
#!/usr/bin/env bash
set -ex
SHARD=$1
pushd $GOPATH/src/k8s.io/kubernetes/
export KUBECONFIG=${HOME}/admin.conf
export MASTER_NAME=${KIND_CLUSTER_NAME}-control-plane
export NODE_NAMES=${MASTER_NAME}
groomTestList() {
echo $(echo "${1}" | sed -e '/^\($\|#\)/d' -e 's/ /\\s/g' | tr '\n' '|' | sed -e 's/|$//')
}
SKIPPED_TESTS="
# PERFORMANCE TESTS: NOT WANTED FOR CI
Networking IPerf IPv[46]
\[Feature:PerformanceDNS\]
# FEATURES NOT AVAILABLE IN OUR CI ENVIRONMENT
\[Feature:Federation\]
should have ipv4 and ipv6 internal node ip
# TESTS THAT ASSUME KUBE-PROXY
kube-proxy
should set TCP CLOSE_WAIT timeout
# TO BE IMPLEMENTED: https://github.com/ovn-org/ovn-kubernetes/issues/819
Services.+session affinity
# TO BE IMPLEMENTED: https://github.com/ovn-org/ovn-kubernetes/issues/1116
EndpointSlices
# TO BE IMPLEMENTED: https://github.com/ovn-org/ovn-kubernetes/issues/1664
should be able to preserve UDP traffic when server pod cycles for a NodePort service
# NOT IMPLEMENTED; SEE DISCUSSION IN https://github.com/ovn-org/ovn-kubernetes/pull/1225
named port.+\[Feature:NetworkPolicy\]
# TO BE FIXED BY https://github.com/kubernetes/kubernetes/pull/93119
GCE
# ???
\[Feature:NoSNAT\]
Services.+(ESIPP|cleanup finalizer)
configMap nameserver
ClusterDns \[Feature:Example\]
should set default value on new IngressClass
# RACE CONDITION IN TEST, SEE https://github.com/kubernetes/kubernetes/pull/90254
should prevent Ingress creation if more than 1 IngressClass marked as default
"
IPV4_ONLY_TESTS="
# Limit the IPv4 related test to IPv4 only deployments
# See: https://github.com/leblancd/kube-v6-test
\[Feature:Networking-IPv4\]
# The following tests currently fail for IPv6 only, but should be passing.
# They will be removed as they are resolved.
# See: https://github.com/ovn-org/ovn-kubernetes/issues/1683
IPBlock.CIDR and IPBlock.Except
# shard-n Tests
# See: https://github.com/kubernetes/kubernetes/pull/94136
Network.+should resolve connection reset issue
# shard-np Tests
# See: https://github.com/ovn-org/ovn-kubernetes/issues/1517
NetworkPolicy.+should allow egress access to server in CIDR block
"
IPV6_ONLY_TESTS="
# Limit the IPv6 related tests to IPv6 only deployments
# See: https://github.com/leblancd/kube-v6-test
\[Feature:Networking-IPv6\]
"
DUALSTACK_ONLY_TESTS="
\[Feature:.*DualStack.*\]
"
# Github CI doesn´t offer IPv6 connectivity, so always skip IPv6 only tests.
# See: https://github.com/ovn-org/ovn-kubernetes/issues/1522
SKIPPED_TESTS=$SKIPPED_TESTS$IPV6_ONLY_TESTS
# IPv6 Only, skip any IPv4 Only Tests
if [ "$KIND_IPV4_SUPPORT" == false ] && [ "$KIND_IPV6_SUPPORT" == true ]; then
echo "IPv6 Only"
SKIPPED_TESTS=$SKIPPED_TESTS$IPV4_ONLY_TESTS
fi
# If not DualStack, skip DualStack tests
if [ "$KIND_IPV4_SUPPORT" == false ] || [ "$KIND_IPV6_SUPPORT" == false ]; then
SKIPPED_TESTS=$SKIPPED_TESTS$DUALSTACK_ONLY_TESTS
fi
SKIPPED_TESTS="$(groomTestList "${SKIPPED_TESTS}")"
# if we set PARALLEL=true, skip serial test
if [ "${PARALLEL:-false}" = "true" ]; then
export GINKGO_PARALLEL=y
export GINKGO_PARALLEL_NODES=20
SKIPPED_TESTS="${SKIPPED_TESTS}|\\[Serial\\]"
fi
case "$SHARD" in
shard-network)
FOCUS="\\[sig-network\\]"
;;
shard-conformance)
FOCUS="\\[Conformance\\]|\\[sig-network\\]"
;;
shard-test)
FOCUS=$(echo ${@:2} | sed 's/ /\\s/g')
;;
*)
echo "unknown shard"
exit 1
;;
esac
# setting this env prevents ginkgo e2e from trying to run provider setup
export KUBERNETES_CONFORMANCE_TEST='y'
# setting these is required to make RuntimeClass tests work ... :/
export KUBE_CONTAINER_RUNTIME=remote
export KUBE_CONTAINER_RUNTIME_ENDPOINT=unix:///run/containerd/containerd.sock
export KUBE_CONTAINER_RUNTIME_NAME=containerd
# FIXME we should not tolerate flakes
# but until then, we retry the test in the same job
# to stop PR retriggers for totally broken code
export GINKGO_TOLERATE_FLAKES='y'
export FLAKE_ATTEMPTS=2
NUM_NODES=2
./hack/ginkgo-e2e.sh \
'--provider=skeleton' "--num-nodes=${NUM_NODES}" \
"--ginkgo.focus=${FOCUS}" "--ginkgo.skip=${SKIPPED_TESTS}" \
"--report-dir=${E2E_REPORT_DIR}" '--disable-log-dump=true'
|
#! /usr/bin/env bash
echo "Starting FCGI process"
php-cgi -n -d "extension=../cli-php-zephir-cblock/treffynnoncblock/ext/modules/treffynnoncblock.so" -b /tmp/treffynnon_bench.socket &
echo "Sleep for a second to allow process to ready itself"
sleep 1
echo "Attempting to warm up the server"
for ((n=0;n<15;n++)); do
# Using a low number for the seed to make the warm up faster
./"$1" 100 > /dev/null 2>&1
done |
<filename>src/com/balceda/archj/service/interfaces/CategoryService.java
package com.balceda.archj.service.interfaces;
import com.balceda.archj.dao.interfaces.CategoryDAO;
import com.balceda.archj.model.Category;
public interface CategoryService extends GenericService<Category, String> {
public void setCategoryDAO(CategoryDAO categoryDAO);
public CategoryDAO getCategoryDAO();
}
|
<filename>build/modules/users/routes/users-get.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getAllUsers = (req, res) => {
res.status(200).json({ hello: 'world' });
};
|
#!/bin/bash
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
# directories to run against
DIRS="src/core/lib src/core/tsi src/core/ext src/cpp test/core test/cpp include src/compiler src/csharp src/ruby third_party/address_sorting src/objective-c"
# file matching patterns to check
GLOB="*.h *.c *.cc *.m *.mm"
# clang format command
CLANG_FORMAT=${CLANG_FORMAT:-clang-format-5.0}
files=
for dir in $DIRS
do
for glob in $GLOB
do
files="$files `find ${CLANG_FORMAT_ROOT}/$dir -name $glob -and -not -name '*.generated.*' -and -not -name '*.upb.h' -and -not -name '*.upb.c' -and -not -name '*.pb.h' -and -not -name '*.pb.c' -and -not -name '*.pb.cc' -and -not -name '*.pbobjc.h' -and -not -name '*.pbobjc.m' -and -not -name '*.pbrpc.h' -and -not -name '*.pbrpc.m' -and -not -name end2end_tests.cc -and -not -name end2end_nosec_tests.cc -and -not -name public_headers_must_be_c89.c -and -not -name grpc_shadow_boringssl.h`"
done
done
# The CHANGED_FILES variable is used to restrict the set of files to check.
# Here we set files to the intersection of files and CHANGED_FILES
if [ -n "$CHANGED_FILES" ]; then
files=$(comm -12 <(echo $files | tr ' ' '\n' | sort -u) <(echo $CHANGED_FILES | tr ' ' '\n' | sort -u))
fi
if [ "$TEST" == "" ]
then
echo $files | xargs $CLANG_FORMAT -i
else
ok=yes
for file in $files
do
tmp=`mktemp`
$CLANG_FORMAT $file > $tmp
diff -u $file $tmp || ok=no
rm $tmp
done
if [ $ok == no ]
then
false
fi
fi
|
/**
* @flow
*/
import React from 'react'
import styled from 'styled-components'
import { darken } from 'polished'
import { Icon } from '../../../common/components'
import { colors } from '../../../common/theme'
const Wrapper = styled.div`
width: calc(100% - 2px);
display: flex;
background: ${darken(0.02, colors.background)};
justify-content: space-between;
align-items: center;
padding: 10px 0;
border-bottom: 1px solid ${colors.borders};
border-radius: 2px;
user-select: none;
@media screen and (max-width: 850px) {
flex-direction: column;
}
`
const ButtonGroup = styled.div`
width: calc(100% / 3);
display: flex;
justify-content: center;
align-items: center;
&:first-child {
justify-content: flex-start;
}
&:last-child {
justify-content: flex-end;
}
@media screen and (max-width: 850px) {
width: 100%;
justify-content: center;
align-items: center;
${props => (props.hideOnMobile ? 'display: none;' : '')} &:first-child,
last-child {
justify-content: center;
}
}
`
const Button = styled.a`
text-align: center;
text-decoration: none;
font-size: 12px;
display: inline-flex;
justify-content: center;
align-items: center;
height: 35px;
min-width: 65px;
background: transparent;
border-radius: 2px;
color: #ddd;
padding: 0 2px;
i {
margin-right: 5px;
color: #ddd;
font-size: 20px;
}
&:hover {
color: ${colors.primary};
cursor: pointer;
i {
color: ${colors.primary};
}
}
`
const ToolButton = Button.extend`
border-radius: 2px;
margin: 0 2px;
@media screen and (max-width: 850px) {
border: 1px solid ${colors.borders};
i {
display: none;
}
}
`
const Pagination = styled.div`
width: calc(100% / 3);
display: flex;
justify-content: center;
align-items: center;
border-radius: 2px;
@media screen and (max-width: 850px) {
width: 100%;
margin-top: 10px;
}
`
const PageNumber = styled.span`
font-size: 12px;
height: 35px;
color: #ddd;
display: flex;
justify-content: center;
align-items: center;
text-align: center;
`
const PageButton = Button.extend`
margin: 0;
padding: 0;
i {
margin: 0;
padding: 0;
}
&:first-of-type {
border-right: none;
}
&:last-of-type {
border-left: none;
}
`
type Props = {
currPage: number,
resumePdfURL: string,
resumeDocXURL: string,
jsonURL?: string,
prevPage: () => void,
nextPage: () => void,
zoomIn: () => void,
zoomOut: () => void,
print: (url: string) => void
}
function Toolbar({
resumePdfURL,
resumeDocXURL,
jsonURL,
currPage,
prevPage,
nextPage,
print,
zoomIn,
zoomOut
}: Props) {
return (
<Wrapper>
<ButtonGroup>
<ToolButton href={resumePdfURL} download="resume.pdf">
<Icon type="file_download" /> PDF
</ToolButton>
<ToolButton href={resumeDocXURL} download="resume.docx">
<Icon type="file_download" /> DOCX
</ToolButton>
<ToolButton href={jsonURL} download="resume.json">
<Icon type="file_download" /> JSON
</ToolButton>
</ButtonGroup>
<Pagination>
<PageButton onClick={prevPage}>
<Icon type="arrow_back" />
</PageButton>
<PageNumber>Page {currPage}</PageNumber>
<PageButton onClick={nextPage}>
<Icon type="arrow_forward" />
</PageButton>
</Pagination>
<ButtonGroup hideOnMobile>
<ToolButton onClick={zoomOut}>
<Icon type="zoom_out" />
</ToolButton>
<ToolButton onClick={zoomIn}>
<Icon type="zoom_in" />
</ToolButton>
<ToolButton onClick={() => print(resumePdfURL)}>
<Icon type="print" />
</ToolButton>
<ToolButton onClick={() => print(resumeDocXURL)}>
<Icon type="print" />
</ToolButton>
</ButtonGroup>
</Wrapper>
)
}
export default Toolbar
|
<reponame>gongdongho12/kmufood-android
package net.sproutlab.kmufood.api;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
/**
* Created by kde713 on 2016. 9. 10..
*/
public class APIGlobal {
private static final Retrofit retrofit = new Retrofit.Builder()
.baseUrl("https://kmucoop.kookmin.ac.kr/")
.addConverterFactory(GsonConverterFactory.create())
.build();
public static final CallInterface callInterface = retrofit.create(CallInterface.class);
}
|
def delete_first_group(self):
# Assuming app.group.get_groups() returns a list of groups
groups = self.get_groups()
if groups:
group_id = groups[0].id # Assuming each group has an 'id' attribute
self.delete_group_by_id(group_id)
else:
raise Exception("No groups found to delete")
def delete_group_by_id(self, group_id):
# Assuming app.group.delete_group(group_id) deletes the group with the given id
self.delete_group(group_id) |
<gh_stars>0
// Solution
public static SinglyLinkedListNode insertNodeAtPosition(SinglyLinkedListNode head, int data, int position) {
SinglyLinkedListNode newNode = new SinglyLinkedListNode(data);
// Inserting to head
if (position == 0) {
newNode.next = head;
head = newNode;
}
else {
// Finding position to insert
SinglyLinkedListNode prevNode = head;
for (int i = 0; i < position - 1; i++) {
prevNode = prevNode.next;
}
// Keeping link to the next node
SinglyLinkedListNode nextNode = prevNode.next;
// Inserting new node
prevNode.next = newNode;
newNode.next = nextNode;
}
return head;
}
|
import numpy as np
from sklearn.cluster import KMeans
def kmeans_clustering(n, k, points):
clusters = np.array(points)
kmeans = KMeans(n_clusters=k).fit(clusters,points)
labels = kmeans.predict(clusters)
return labels
n=6; k=3; points=[[2.5,3.5],[3.,3.5],[3,2],[2.5,2.5],[3.5,2.5],[3,3]]
print(kmeans_clustering(n,k,points)) |
package client
import (
"bytes"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"sync/atomic"
"github.com/alikarimi999/shitcoin/core/types"
netype "github.com/alikarimi999/shitcoin/network/types"
)
func (c *Client) BroadBlock(mb *netype.MsgBlock) {
c.PeerSet.Mu.Lock()
defer c.PeerSet.Mu.Unlock()
for _, node := range c.PeerSet.Peers {
// dont send to miner of block or sender
if mb.Sender == node.ID || c.Ch.Node.ID == node.ID {
continue
}
prev_sender := mb.Sender
// Set new sender
mb.Sender = c.Ch.Node.ID
b, _ := json.Marshal(mb)
fmt.Printf("sending block %d: %x to %s which recieved from %s\n", mb.Block.BH.BlockIndex, mb.Block.BH.BlockHash, node.ID, prev_sender)
c.Cl.Post(fmt.Sprintf("%s/minedblock", node.FullAdd), "application/json", bytes.NewReader(b))
}
}
func getBlock(hash []byte, nid string, syncAddress string, cl http.Client) *netype.MsgBlock {
data := netype.GetBlock{
Node: nid,
BlockHash: hash,
}
mb := netype.NewMsgBlock()
msg, err := json.Marshal(data)
if err != nil {
fmt.Println(err.Error())
return mb
}
resp, err := cl.Post(fmt.Sprintf("%s/getblock", syncAddress), "application/json", bytes.NewReader(msg))
if err != nil {
fmt.Println(err.Error())
return mb
}
body, err := io.ReadAll(resp.Body)
if err != nil {
fmt.Println(err.Error())
return mb
}
err = json.Unmarshal(body, mb)
if err != nil {
fmt.Println(err.Error())
return mb
}
return mb
}
// Downloading genesis block
func (c *Client) getGen(n *types.Node) {
if atomic.LoadUint64(&c.Ch.ChainHeight) == 0 {
mb := getBlock(n.GenesisHash, n.ID, n.FullAdd, c.Cl)
c.Ch.LastBlock = *mb.Block
atomic.AddUint64(&c.Ch.ChainHeight, 1)
// update Node
c.Ch.Node.GenesisHash = mb.Block.BH.BlockHash
c.Ch.Node.LastHash = mb.Block.BH.BlockHash
c.Ch.TxPool.UpdatePool(mb.Block, false)
c.Ch.ChainState.StateTransition(mb.Block, false)
// Save Genesis block in database
err := c.Ch.DB.SaveBlock(mb.Block, mb.Sender, mb.Miner, nil)
if err != nil {
log.Fatalln(err)
}
fmt.Printf("genesis block added to database\n")
}
}
|
import { put, takeEvery, call } from 'redux-saga/effects';
// import { uniqWith, isEqual } from 'lodash';
// import { REHYDRATE } from 'redux-persist';
import { SUCCESS_GET_TOKEN } from '../auth/types';
import { errorMessage } from '../error/actions';
import { getList, postArtist, deleteArtist } from './utilities/artistsUser';
import { successGetUserArtists, successFollowArtist, successUnfollowArtist } from './actions';
import {
FOLLOW_ARTIST, UNFOLLOW_ARTIST, SUCCESS_FOLLOW_ARTIST,
SUCCESS_UNFOLLOW_ARTIST
} from './types';
export function* getListArtistsUser() {
try {
const { artists, recommended } = yield call(getList);
yield put(successGetUserArtists(artists, recommended));
} catch (e) {
yield put(errorMessage(e.message));
}
}
export function* postFollowArtist({ payload }) {
const { name } = payload;
try {
yield call(postArtist, name);
yield put(successFollowArtist());
} catch (e) {
yield put(errorMessage(e.message));
}
}
export function* deleteFollowArtist({ payload }) {
const { name } = payload;
try {
yield call(deleteArtist, name);
yield put(successUnfollowArtist());
} catch (e) {
yield put(errorMessage(e.message));
}
}
export function* sagasArtists() {
yield takeEvery([SUCCESS_GET_TOKEN, SUCCESS_FOLLOW_ARTIST, SUCCESS_UNFOLLOW_ARTIST], getListArtistsUser);
yield takeEvery(FOLLOW_ARTIST, postFollowArtist);
yield takeEvery(UNFOLLOW_ARTIST, deleteFollowArtist);
}
|
# Unzip source package. Edit this script to change directories as your env.
# In terminal window, enter path "MyBox/pack/linux", and run this script.
version=6.5.1
jpackagePath=/usr/java/jdk-17/bin
jdkPath=/usr/java/jdk-17
rm -rf app/*
rm -rf jar/*
mkdir app
mkdir jar
cd ../..
mvn clean
mvn -P linux package
cd pack/linux
mv ../../target/*.jar jar/ && \
$jpackagePath/jpackage --type app-image --app-version $version --vendor Mara --verbose --runtime-image $jdkPath --dest app --name MyBox --input jar --main-jar MyBox-$version.jar --icon res/MyBox.png
cd app
tar cfz MyBox-$version-linux.tar.gz MyBox
mv MyBox*.gz ..
cd ..
mv jar/*.jar .
rm -rf ../../target
|
#!/bin/bash
git clone https://github.com/s-oravec/sqlsn-core.git oradb_modules/sqlsn-core
pushd oradb_modules/sqlsn-core
git checkout tags/0.1.1
popd
|
#!/bin/bash
export FLOWER_WS="ws://127.0.0.1:5556/api/task/events/task-succeeded/"
export UUID_FILE='/data/data/com.termux/files/home/.ro_serialno'
export GROUP_ID='/data/data/com.termux/files/home/.groupid.txt'
export VERSION_FILE='/.cacheresource/version'
export AUTO_UPDATE_FILE='/.cacheresource/workaipython/wtconf/enableWT'
export DOCKER_COMPOSE_YML='/.cacheresource/docker-compose.yml'
export HOST_ADDRESS="workaihost.tiegushi.com"
export HOST_PORT=80
export RUNTIME_DIR='/.cacheresource'
export RESTART_TIMEOUT=20
while true; do
echo "entering index.js"
#pushd /.cacheresource/monitor
#disable watchtower before startup index.js
#rm -rf ${AUTO_UPDATE_FILE}
pushd monitor
node main.bin.js
popd
#popd
echo "exit index.js"
sleep 30
done
|
<reponame>sullivanpt/creep-em-out-server
'use strict'
const models = require('../../server/models')
exports.models = models
let topic = models.Topic.getById('flame')
exports.newMember = (trackerAndHandle) => models.Member.insert(trackerAndHandle || models.Member.generateTracker(), trackerAndHandle)
exports.newArticle = (member, text) => models.Article.insert(topic, { text: text || `message by ${member.handle}` }, member)
|
#!/bin/sh -x
#
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# @APPLE_OSREFERENCE_LICENSE_HEADER_START@
#
# This file contains Original Code and/or Modifications of Original Code
# as defined in and that are subject to the Apple Public Source License
# Version 2.0 (the 'License'). You may not use this file except in
# compliance with the License. The rights granted to you under the License
# may not be used to create, or enable the creation or redistribution of,
# unlawful or unlicensed copies of an Apple operating system, or to
# circumvent, violate, or enable the circumvention or violation of, any
# terms of an Apple operating system software license agreement.
#
# Please obtain a copy of the License at
# http://www.opensource.apple.com/apsl/ and read it before using this file.
#
# The Original Code and all software distributed under the License are
# distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
# EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
# INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
# Please see the License for the specific language governing rights and
# limitations under the License.
#
# @APPLE_OSREFERENCE_LICENSE_HEADER_END@
#
# build inside OBJROOT
cd $OBJROOT
MIG=`xcrun -sdk "$SDKROOT" -find mig`
MIGCC=`xcrun -sdk "$SDKROOT" -find cc`
export MIGCC
MIG_DEFINES="-DLIBSYSCALL_INTERFACE"
MIG_HEADER_DST="$BUILT_PRODUCTS_DIR/mig_hdr/include/mach"
MIG_PRIVATE_HEADER_DST="$BUILT_PRODUCTS_DIR/mig_hdr/local/include/mach"
SERVER_HEADER_DST="$BUILT_PRODUCTS_DIR/mig_hdr/include/servers"
MACH_HEADER_DST="$BUILT_PRODUCTS_DIR/mig_hdr/include/mach"
# from old Libsystem makefiles
MACHINE_ARCH=`echo $ARCHS | cut -d' ' -f 1`
if [[ ( "$MACHINE_ARCH" = "arm64" || "$MACHINE_ARCH" = "x86_64" || "$MACHINE_ARCH" = "x86_64h" ) && `echo $ARCHS | wc -w` -gt 1 ]]
then
# MACHINE_ARCH needs to be a 32-bit arch to generate vm_map_internal.h correctly.
MACHINE_ARCH=`echo $ARCHS | cut -d' ' -f 2`
if [[ ( "$MACHINE_ARCH" = "arm64" || "$MACHINE_ARCH" = "x86_64" || "$MACHINE_ARCH" = "x86_64h" ) && `echo $ARCHS | wc -w` -gt 1 ]]
then
# MACHINE_ARCH needs to be a 32-bit arch to generate vm_map_internal.h correctly.
MACHINE_ARCH=`echo $ARCHS | cut -d' ' -f 3`
fi
fi
SRC="$SRCROOT/mach"
MIG_INTERNAL_HEADER_DST="$BUILT_PRODUCTS_DIR/internal_hdr/include/mach"
MIG_PRIVATE_DEFS_INCFLAGS="-I${SDKROOT}/System/Library/Frameworks/System.framework/PrivateHeaders"
MIGS="clock.defs
clock_priv.defs
clock_reply.defs
exc.defs
host_priv.defs
host_security.defs
lock_set.defs
mach_host.defs
mach_port.defs
mach_voucher.defs
processor.defs
processor_set.defs
task.defs
thread_act.defs
vm_map.defs"
MIGS_PRIVATE=""
MIGS_DUAL_PUBLIC_PRIVATE=""
if [[ "$PLATFORM_NAME" = "iphoneos" || "$PLATFORM_NAME" = "iphonesimulator" || "$PLATFORM_NAME" = "iphoneosnano" || "$PLATFORM_NAME" = "iphonenanosimulator" ]]
then
MIGS_PRIVATE="mach_vm.defs"
else
MIGS+=" mach_vm.defs"
fi
MIGS_INTERNAL="mach_port.defs
mach_vm.defs
thread_act.defs
vm_map.defs"
SERVER_HDRS="key_defs.h
ls_defs.h
netname_defs.h
nm_defs.h"
MACH_HDRS="mach.h
mach_error.h
mach_init.h
mach_interface.h
port_obj.h
sync.h
vm_task.h
vm_page_size.h"
# install /usr/include/server headers
mkdir -p $SERVER_HEADER_DST
for hdr in $SERVER_HDRS; do
install -o 0 -c -m 444 $SRC/servers/$hdr $SERVER_HEADER_DST
done
# install /usr/include/mach headers
mkdir -p $MACH_HEADER_DST
for hdr in $MACH_HDRS; do
install -o 0 -c -m 444 $SRC/mach/$hdr $MACH_HEADER_DST
done
# special case because we only have one to do here
$MIG -novouchers -arch $MACHINE_ARCH -header "$SERVER_HEADER_DST/netname.h" $SRC/servers/netname.defs
# install /usr/include/mach mig headers
mkdir -p $MIG_HEADER_DST
for mig in $MIGS $MIGS_DUAL_PUBLIC_PRIVATE; do
MIG_NAME=`basename $mig .defs`
$MIG -novouchers -arch $MACHINE_ARCH -cc $MIGCC -header "$MIG_HEADER_DST/$MIG_NAME.h" $MIG_DEFINES $SRC/$mig
done
mkdir -p $MIG_PRIVATE_HEADER_DST
for mig in $MIGS_PRIVATE $MIGS_DUAL_PUBLIC_PRIVATE; do
MIG_NAME=`basename $mig .defs`
$MIG -novouchers -arch $MACHINE_ARCH -cc $MIGCC -header "$MIG_PRIVATE_HEADER_DST/$MIG_NAME.h" $MIG_DEFINES $MIG_PRIVATE_DEFS_INCFLAGS $SRC/$mig
if [ ! -e "$MIG_HEADER_DST/$MIG_NAME.h" ]; then
echo "#error $MIG_NAME.h unsupported." > "$MIG_HEADER_DST/$MIG_NAME.h"
fi
done
# special headers used just for building Libsyscall
# Note: not including -DLIBSYSCALL_INTERFACE to mig so we'll get the proper
# 'internal' version of the headers being built
mkdir -p $MIG_INTERNAL_HEADER_DST
for mig in $MIGS_INTERNAL; do
MIG_NAME=`basename $mig .defs`
$MIG -novouchers -arch $MACHINE_ARCH -cc $MIGCC -header "$MIG_INTERNAL_HEADER_DST/${MIG_NAME}_internal.h" $SRC/$mig
done
|
from decimal import Decimal
def process_transactions(transactions):
aggregated_info = {
'total_transactions': len(transactions),
'total_amount': sum(transaction['amount'] for transaction in transactions),
'unique_payers': list(set(transaction['payer'] for transaction in transactions)),
'unique_iban': list(set(transaction['iban'] for transaction in transactions)),
'unique_bic': list(set(transaction['bic'] for transaction in transactions)),
'unique_ids': list(set(id.strip() for transaction in transactions for id in transaction['id'].split(',')))
}
return aggregated_info |
<reponame>darleilippi/node-typeorm-crud<filename>src/routes/categories.routes.ts<gh_stars>0
import { Router } from "express";
import { CreateCategoryController } from "../controllers/Category/CreateCategoryController";
import { DeleteCategoryController } from "../controllers/Category/DeleteCategoryController";
import { GetAllCategoryController } from "../controllers/Category/GetAllCategoryController";
import { UpdateCategoryController } from "../controllers/Category/UpdateCategoryController";
const categoriesRouter = Router();
categoriesRouter.post("/", new CreateCategoryController().handle);
categoriesRouter.get("/", new GetAllCategoryController().handle);
categoriesRouter.delete("/:id", new DeleteCategoryController().handle);
categoriesRouter.put("/:id", new UpdateCategoryController().handle);
export { categoriesRouter }; |
#!/usr/bin/env sh
set -e
./build/tools/caffe test -gpu 0 -model examples/mnist/lenet_train_test.prototxt -weights examples/mnist/lenet_iter_10000.caffemodel -iterations 100
|
def searchInsert(nums: list(), target: int) -> int:
start, end = 0, len(nums)
if len(nums) == 0:
return 0
while start < end:
mid = (start + end) // 2
if target == nums[mid]:
return mid
elif target < nums[mid]:
end = mid
else:
start = mid + 1
return start
print(searchInsert([1, 3, 5, 6], 7))
print(searchInsert([1, 3, 5, 6], 0))
|
<gh_stars>0
DPlayGames.MyGames = CLASS({
preset : () => {
return VIEW;
},
init : (inner, self) => {
let publishedGameList;
let content = DIV({
style : {
paddingTop : 120,
margin : 'auto',
width : 800
},
c : [publishedGameList = DIV(), CLEAR_BOTH()]
});
NEXT([
(next) => {
DPlayInventory.getAccountId((accountId) => {
// 계정 ID를 가져오지 못하면 로그인이 되어있지 않은 경우
if (accountId === undefined) {
DPlayInventory.login(next);
} else {
next();
}
});
},
(next) => {
return () => {
DPlayInventory.getAccountId(next);
};
},
() => {
return (walletAddress) => {
let createPublishedGamePanel = (gameId, title, summary, description, titleImageURL, bannerImageURL) => {
if (title === '') {
title = '지정되지 않음';
}
publishedGameList.append(DPlayGames.GameInfoPanel({
style : {
flt : 'left',
marginRight : 10,
marginBottom : 30
},
gameId : gameId
}));
};
DPlayStoreContract.getPublishedGameIds(walletAddress, (gameIds) => {
EACH(gameIds, (gameId) => {
DPlayStoreContract.getGameDetails({
gameId : gameId,
language : INFO.getLang()
}, (title, summary, description, titleImageURL, bannerImageURL) => {
// 현재 언어에 해당하는 이름이 없는 경우 기본 언어의 정보를 가져옵니다.
if (title === '') {
DPlayStoreContract.getGameInfo(gameId, (publisher, isReleased, price, gameURL, isWebGame, defaultLanguage, createTime, lastUpdateTime) => {
DPlayStoreContract.getGameDetails({
gameId : gameId,
language : defaultLanguage
}, (title, summary, description, titleImageURL, bannerImageURL) => {
createPublishedGamePanel(gameId, title, summary, description, titleImageURL, bannerImageURL);
});
});
}
else {
createPublishedGamePanel(gameId, title, summary, description, titleImageURL, bannerImageURL);
}
});
});
});
DPlayStoreContract.getBoughtGameIds(walletAddress, (boughtGameIds) => {
console.log(boughtGameIds);
});
};
}]);
DPlayGames.Layout.setContent(content);
inner.on('close', () => {
content.remove();
});
}
}); |
#!/bin/bash
set -eo pipefail
run_update_license() {
# doing this because of SC2046 warning
for file in $(find . -name '*.go' | grep -v \.\/vendor); do
update-license $@ "${file}"
done
}
if [ -z "${DRY_RUN}" ]; then
run_update_license
else
DRY_OUTPUT="$(run_update_license --dry)"
if [ -n "${DRY_OUTPUT}" ]; then
echo "The following files do not have correct license headers."
echo "Please run make license and amend your commit."
echo
echo "${DRY_OUTPUT}"
exit 1
fi
fi
|
class SignalProcessingModule:
def __init__(self, samplesBefore, samplesAfter, size, noiseThreshold, preamble, samplesPerSymbol):
self.samplesBefore = samplesBefore
self.samplesAfter = samplesAfter
self.size = size
self.noiseThreshold = noiseThreshold
self.preamble = preamble
self.numberOfBuffers = samplesPerSymbol
self.noiseLevel = 0
self.noiseState = []
self.demodBuffer = ["" for _ in range(samplesPerSymbol)]
def calculateNoiseLevel(self, samples):
noiseCount = 0
for sample in samples:
if sample < self.noiseThreshold:
noiseCount += 1
self.noiseLevel = noiseCount / len(samples) |
export {RadioGroup} from './RadioGroup';
export type {RadioGroupProps} from './RadioGroup';
|
#!/usr/bin/env bash
set -eux
set -o pipefail
# to script directory
cd "$(dirname "$0")"
# import functions
. ../setup-scripts/common/color-print.sh
# import local nameserver config
. ../local-nameserver.sh
echo "\"${LOCAL_ENV_NS}\" will be added to docker run command for ZMS container"
#################################################
### ZMS Deploy
#################################################
cat <<'EOF' | colored_cat c
#################################################
### ZMS Deploy
#################################################
EOF
# set up env.
export BASE_DIR=${SD_DIND_SHARE_PATH}/terraform-provider-athenz
export DOCKER_DIR=${BASE_DIR}/docker
echo "Setup environment : Setting BASE_DIR to : ${BASE_DIR}, DOCKER_DIR: ${DOCKER_DIR}"
. "${BASE_DIR}/docker/env.sh"
echo "Done loading ENV. from ${BASE_DIR}/docker/env.sh" | colored_cat p
if [ -f "${DOCKER_DIR}/setup-scripts/dev-env-exports.sh" ]; then
. "${DOCKER_DIR}/setup-scripts/dev-env-exports.sh"
echo 'NOTE: You are using the DEV settings in dev-env-exports.sh !!!' | colored_cat p
fi
### ----------------------------------------------------------------
# check password
[ -z "$ZMS_DB_ROOT_PASS" ] && echo '$ZMS_DB_ROOT_PASS not set' | colored_cat r && exit 1
[ -z "$ZMS_DB_ADMIN_PASS" ] && echo '$ZMS_DB_ADMIN_PASS not set' | colored_cat r && exit 1
### ----------------------------------------------------------------
echo ''
echo '# Deploy ZMS' | colored_cat r
echo '1. create docker network' | colored_cat g
if ! docker network inspect "${DOCKER_NETWORK}" > /dev/null 2>&1; then
docker network create --subnet "${DOCKER_NETWORK_SUBNET}" "${DOCKER_NETWORK}";
fi
echo '2. start ZMS DB' | colored_cat g
docker run -d -h "${ZMS_DB_HOST}" \
-p "${ZMS_DB_PORT}:3306" \
--network="${DOCKER_NETWORK}" \
--user mysql:mysql \
-v "${DOCKER_DIR}/db/zms/zms-db.cnf:/etc/mysql/conf.d/zms-db.cnf" \
-e "MYSQL_ROOT_PASSWORD=${ZMS_DB_ROOT_PASS}" \
--name "${ZMS_DB_HOST}" athenz/athenz-zms-db:latest
echo "wait for ZMS DB to be ready, DOCKER_DIR: ${DOCKER_DIR}"
docker run --rm -it \
--network="${DOCKER_NETWORK}" \
--user mysql:mysql \
-v "${DOCKER_DIR}/deploy-scripts/common/wait-for-mysql/wait-for-mysql.sh:/bin/wait-for-mysql.sh" \
-v "${DOCKER_DIR}/db/zms/zms-db.cnf:/etc/my.cnf" \
-e "MYSQL_PWD=${ZMS_DB_ROOT_PASS}" \
--entrypoint '/bin/wait-for-mysql.sh' \
--name wait-for-mysql athenz/athenz-zms-db:latest \
--user='root' \
--host="${ZMS_DB_HOST}" \
--port=3306
echo '3. add zms_admin to ZMS DB' | colored_cat g
# also, remove root user with wildcard host
docker exec --user mysql:mysql \
"${ZMS_DB_HOST}" mysql \
--database=zms_server \
--user=root --password="${ZMS_DB_ROOT_PASS}" \
--execute="CREATE USER 'zms_admin'@'%' IDENTIFIED BY '${ZMS_DB_ADMIN_PASS}'; GRANT ALL PRIVILEGES ON zms_server.* TO 'zms_admin'@'%'; FLUSH PRIVILEGES;"
docker exec --user mysql:mysql \
"${ZMS_DB_HOST}" mysql \
--database=mysql \
--user=root --password="${ZMS_DB_ROOT_PASS}" \
--execute="DELETE FROM user WHERE user = 'root' AND host = '%';"
docker exec --user mysql:mysql \
"${ZMS_DB_HOST}" mysql \
--database=mysql \
--user=root --password="${ZMS_DB_ROOT_PASS}" \
--execute="SELECT user, host FROM user;"
echo "4. start ZMS ZMS_HOST : ${ZMS_HOST}, ZMS_PORT: ${ZMS_PORT}, LOCAL_ENV_NS: ${LOCAL_ENV_NS}, DOCKER_NETWORK: ${DOCKER_NETWORK}, DOCKER_DNS: ${DOCKER_DNS}" | colored_cat g
docker run -t -h "${ZMS_HOST}" \
-p "${ZMS_PORT}:${ZMS_PORT}" \
--dns="${DOCKER_DNS}" \
--network="${DOCKER_NETWORK}" \
${LOCAL_ENV_NS} \
--user "$(id -u):$(id -g)" \
-v "${DOCKER_DIR}/zms/var:/opt/athenz/zms/var" \
-v "${DOCKER_DIR}/zms/conf:/opt/athenz/zms/conf/zms_server" \
-v "${DOCKER_DIR}/logs/zms:/opt/athenz/zms/logs/zms_server" \
-v "${DOCKER_DIR}/jars:/usr/lib/jars" \
-e "JAVA_OPTS=${ZMS_JAVA_OPTS}" \
-e "ZMS_DB_ADMIN_PASS=${ZMS_DB_ADMIN_PASS}" \
-e "ZMS_RODB_ADMIN_PASS=${ZMS_RODB_ADMIN_PASS}" \
-e "ZMS_KEYSTORE_PASS=${ZMS_KEYSTORE_PASS}" \
-e "ZMS_TRUSTSTORE_PASS=${ZMS_TRUSTSTORE_PASS}" \
-e "ZMS_PORT=${ZMS_PORT}" \
--name "${ZMS_HOST}" athenz/athenz-zms-server:latest \
2>&1 | sed 's/^/ZMS-DOCKER: /' &
echo "wait for ZMS to be ready ZMS_HOST: ${ZMS_HOST} : "
# wait for ZMS to be ready
until docker run --rm --entrypoint curl \
--network="${DOCKER_NETWORK}" \
--user "$(id -u):$(id -g)" \
--name athenz-curl athenz/athenz-setup-env:latest \
-k -vvv "https://${ZMS_HOST}:${ZMS_PORT}/zms/v1/status" \
; do
echo 'ZMS is unavailable - will sleep 3s...'
sleep 3
done
echo 'ZMS is up!' | colored_cat g
|
<gh_stars>0
def rubyStyleForLoop {
println ("for loop using ruby-style iteration")
args.foreach { arg =>
println(arg)
}
}
rubyStyleForLoop
|
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.jamsimulator.jams.mips.instruction.execution;
import net.jamsimulator.jams.mips.architecture.MultiCycleArchitecture;
import net.jamsimulator.jams.mips.instruction.assembled.AssembledInstruction;
import net.jamsimulator.jams.mips.instruction.basic.ControlTransferInstruction;
import net.jamsimulator.jams.mips.register.Register;
import net.jamsimulator.jams.mips.simulation.MIPSSimulation;
import net.jamsimulator.jams.mips.simulation.multicycle.MultiCycleStep;
import net.jamsimulator.jams.mips.simulation.pipelined.ForwardingSupporter;
import net.jamsimulator.jams.mips.simulation.pipelined.PipelinedSimulation;
import net.jamsimulator.jams.mips.simulation.pipelined.exception.RAWHazardException;
public abstract class MultiCycleExecution<Inst extends AssembledInstruction> extends InstructionExecution<MultiCycleArchitecture, Inst> {
protected int[] decodeResult;
protected int[] executionResult;
protected int[] memoryResult;
protected long instructionId;
protected boolean executesMemory, executesWriteBack;
protected boolean inDelaySlot;
public MultiCycleExecution(MIPSSimulation<? extends MultiCycleArchitecture> simulation, Inst instruction, int address,
boolean executesMemory, boolean executesWriteBack) {
super(simulation, instruction, address);
this.executesMemory = executesMemory;
this.executesWriteBack = executesWriteBack;
this.inDelaySlot = false;
}
/**
* Returns the multi-cycle id of this instruction.
* This is used by the flow section.
*
* @return the id.
*/
public long getInstructionId() {
return instructionId;
}
/**
* THIS METHOD CAN BE USED ONLY BY SIMULATIONS!
* <p>
* Sets the multi-cycle id of this instruction.
*
* @param instructionId the id.
*/
public void setInstructionId(long instructionId) {
this.instructionId = instructionId;
}
/**
* Returns whether this instruction is being executed in a delay slot.
*
* @return whether this instruction is being executed in a delay slot.
*/
public boolean isInDelaySlot() {
return inDelaySlot;
}
/**
* Sets whether this instruction is being executed in a delay slot.
*
* @param inDelaySlot whether this instruction is being executed in a delay slot.
*/
public void setInDelaySlot(boolean inDelaySlot) {
this.inDelaySlot = inDelaySlot;
}
public boolean executesMemory() {
return executesMemory;
}
public boolean executesWriteBack() {
return executesWriteBack;
}
public boolean solveBranchOnDecode() {
return simulation.getData().shouldSolveBranchesOnDecode();
}
//region requires
public void requires(int identifier) {
requires(register(identifier));
}
public void requiresCOP0(int identifier) {
requires(registerCop0(identifier));
}
public void requiresCOP0(int identifier, int sel) {
requires(registerCop0(identifier, sel));
}
public void requiresCOP1(int identifier) {
requires(registerCop1(identifier));
}
public void requires(Register register) {
var supportsForwarding = simulation instanceof ForwardingSupporter
&& simulation.getData().isForwardingEnabled();
if (register.isLocked() && !supportsForwarding) {
throw new RAWHazardException(register);
}
}
//endregion
//region value
public int value(int identifier) {
return value(register(identifier));
}
public int valueCOP0(int identifier) {
return value(registerCop0(identifier));
}
public int valueCOP0(int identifier, int sel) {
return value(registerCop0(identifier, sel));
}
public int valueCOP1(int identifier) {
return value(registerCop1(identifier));
}
public int value(Register register) {
if (!register.isLocked(this)) {
return register.getValue();
}
if (simulation instanceof ForwardingSupporter) {
var optional = ((ForwardingSupporter) simulation).getForwarding().get(register);
if (optional.isPresent()) return optional.getAsInt();
}
throw new RAWHazardException(register);
}
//endregion value
//region lock
public void lock(int identifier) {
register(identifier).lock(this);
}
public void lockCOP0(int identifier) {
registerCop0(identifier).lock(this);
}
public void lockCOP0(int identifier, int sel) {
registerCop0(identifier, sel).lock(this);
}
public void lockCOP1(int identifier) {
registerCop1(identifier).lock(this);
}
public void lock(Register register) {
register.lock(this);
}
//endregion
//region unlock
public void unlock(int identifier) {
register(identifier).unlock(this);
}
public void unlockCOP0(int identifier) {
registerCop0(identifier).unlock(this);
}
public void unlockCOP0(int identifier, int sel) {
registerCop0(identifier, sel).unlock(this);
}
public void unlockCOP1(int identifier) {
registerCop1(identifier).unlock(this);
}
public void unlock(Register register) {
register.unlock(this);
}
//endregion
//region set and unlock
public void setAndUnlock(int identifier, int value) {
setAndUnlock(register(identifier), value);
}
public void setAndUnlockCOP0(int identifier, int value) {
setAndUnlock(registerCop0(identifier), value);
}
public void setAndUnlockCOP0(int identifier, int sel, int value) {
setAndUnlock(registerCop0(identifier, sel), value);
}
public void setAndUnlockCOP1(int identifier, int value) {
setAndUnlock(registerCop1(identifier), value);
}
public void setAndUnlock(Register register, int value) {
register.setValue(value);
register.unlock(this);
}
//endregion
//region forward
public void forward(int identifier, int value, boolean memory) {
forward(register(identifier), value, memory);
}
public void forwardCOP0(int identifier, int value, boolean memory) {
forward(registerCop0(identifier), value, memory);
}
public void forwardCOP0(int identifier, int sel, int value, boolean memory) {
forward(registerCop0(identifier, sel), value, memory);
}
public void forwardCOP1(int identifier, int value, boolean memory) {
forward(registerCop1(identifier), value, memory);
}
public void forward(Register register, int value, boolean memory) {
if (simulation instanceof ForwardingSupporter) {
((ForwardingSupporter) simulation).getForwarding().forward(register, value, memory);
}
}
//endregion
public void jump(int address) {
//The instruction should be a control transfer instruction to perform a jump.
if (!(instruction.getBasicOrigin() instanceof ControlTransferInstruction))
throw new IllegalStateException("The instruction " + instruction.getBasicOrigin() + " is not a control transfer instruction!");
if (simulation instanceof PipelinedSimulation) {
if (!simulation.getData().areDelaySlotsEnabled() || ((ControlTransferInstruction) instruction.getBasicOrigin()).isCompact()) {
((PipelinedSimulation) simulation).getPipeline().removeFetch();
setAndUnlock(pc(), address);
} else {
//The fetch is not cancelled. If there's an instruction to fetch,
//the next one will be fetched at address + 4. We do not want that!
//The instruction at the fetch slot will always be null, so we check its PC instead.
boolean willFetch = ((PipelinedSimulation) simulation).getPipeline().getPc(MultiCycleStep.FETCH) != 0;
setAndUnlock(pc(), willFetch ? address - 4 : address);
}
} else {
setAndUnlock(pc(), address);
}
}
public abstract void decode();
public abstract void execute();
public abstract void memory();
public abstract void writeBack();
}
|
pub fn hex_to_vec(hex: &str) -> Vec<u8> {
let mut vec = Vec::with_capacity((hex.len() + 1) / 2); // Capacity based on the number of bytes
let mut iter = hex.chars().peekable(); // Create a peekable iterator over the characters
while let Some(c) = iter.next() {
let byte = match c.to_digit(16) {
Some(n) => n as u8, // Convert hexadecimal character to u8
None => continue, // Skip non-hexadecimal characters
};
let next_byte = match iter.peek() {
Some(&next) => match next.to_digit(16) {
Some(n) => n as u8, // Convert next character to u8
None => 0, // Treat non-hexadecimal character as 0
},
None => 0, // Treat last character as 0 if odd-length string
};
vec.push((byte << 4) | next_byte); // Combine current and next byte and push to vector
if iter.peek().is_some() {
iter.next(); // Move to the next character if available
}
}
vec
} |
#!/bin/bash
export GIT_EMAIL="hogehoge@users.noreply.github.com"
export GIT_USER="hogehoge"
|
def modify_dictionary(input_lines):
# Extracting the first line and constructing the dictionary d
t, d, p = input_lines[0], dict([l.strip().split(' -> ') for l in input_lines[2:]]), {}
# Function to increment the value at index i in dictionary d
def increment_value(d, i, v):
d[i] = d.setdefault(i, 0) + v
# Function to apply the rules and modify the dictionary p
def apply_rules(P):
o = dict(P)
for p, n in d.items():
if p in o.keys():
increment_value(P, p, -o[p])
increment_value(P, p[0] + n, o[p])
increment_value(P, n + p[1], o[p])
return P
# Function to create a new dictionary with aggregated values
def create_aggregated_dictionary(P):
e = {}
for p, v in P.items():
for c in p:
increment_value(e, c, v)
return {x: -int(e[x] / 2 // -1) for x in e}
# Applying the rules 40 times and calculating the difference between the maximum and minimum values
result = [max(e := create_aggregated_dictionary(apply_rules(p)).values()) - min(e) for i in range(40)]
return result[9], result[-1] |
$(document).ready(function(){
$('#btnCapnhat_group').on('click',function(e){
var sg_id = $("#sg_id").val();
var social_name = $("#socialname").val();
var categ_group = $("[name=categ_group]").val();
var grade2 = $("#grade2").val();
var description = $("[name=description2]").val();
var status = $("[name=status]").val();
if(social_name==""){
if(!$(".group_name_error2").length){
$("#socialname").parent().append("<div class='text-do group_name_error2'>Tên nhóm không được để trống</div>");
}
}else{
if($(".group_name_error2").length)
$(".group_name_error2").remove();
if(grade2==""){
if(!$(".level_error2").length)
$("#grade2").parent().append("<div class='text-do level_error2'>Vui lòng chọn một cấp độ</div>");
}else{
if($(".level_error2").length)
$(".level_error2").remove();
if(categ_group==""){
if(!$(".categ_error2 ").length)
$("#slct2").parent().append("<div class='text-do categ_error2'>Vui lòng chọn một môn học</div>");
}else{
if(grade2=="0") grade2="3,4,5,6,7,8,9,10,11,12,13,14";
if(grade2.indexOf("-")!=-1){
var ar = grade2.split(" - ");
grade2 = parseInt(ar[0])+2;
for(i=parseInt(ar[0])+3; i<=parseInt(ar[1])+2; i++){
grade2+=","+i;
}
}
dt = JSON.stringify({
'sg_id' :sg_id,
'social_name' :social_name,
'categ_group' :categ_group,
'grade2' :grade2,
'description' :description,
'status' :status,
});
$.ajax({
type: "POST",
data: dt,
url: site_url+"/social_group/edit",
success: function(data){
alert("Sửa thông tin nhóm thành công!");
window.location.href=site_url+"/home_user/manage_group";
},
error: function(data){
}
})
}
}
}
})
})
/*function adduser(event, e, sg_id) {
if (e.keyCode == '13') {
search = $(event).val();
add(search, sg_id);
}
}
function adduser_click(sg_id) {
search = $('#name_user').val();
add(search, sg_id);
}
function add(search, sg_id) {
console.log(site_url);
dt = JSON.stringify({
'email': search,
'sg_id': sg_id
});
$.ajax({
type: 'POST',
url: site_url + "/social_group/get_data_add_user/",
data: dt,
contentType: 'application/json',
success: function (data) {
if (data.user.stt == 0) {
$("#error_email").empty();
$("#error_email").append(data.user.error);
} else {
$('#adduser').modal('hide');
alert(data.user.error);
window.location.href = site_url + '/social_group/edit_groupp/' + data['sg_id'];
}
},
error: function (data) {
console.log(data);
}
})
}*/
function drawpage(page) {
search = $('#search').val();
redraws(search, page)
}
function drawsearch(event, e) {
if (e.keyCode == 13) {
search = $(event).val();
redraws(search, 0);
}
}
function drawsearch_btn() {
search = $('#searchuser').val();
redraws(search, 0);
}
function redraws(search, page) {
$('#search').val(search);
dt = JSON.stringify({
'search': search,
'page': page,
'gid': gid,
});
$("#users").empty();
$("#users").append('<div id="circularG"><div id="circularG_1" class="circularG"></div><div id="circularG_2" class="circularG"></div><div id="circularG_3" class="circularG"></div><div id="circularG_4" class="circularG"></div><div id="circularG_5" class="circularG"></div><div id="circularG_6" class="circularG"></div><div id="circularG_7" class="circularG"></div><div id="circularG_8" class="circularG"></div></div>');
$.ajax({
type: 'POST',
data: dt,
url: site_url + '/social_group/get_data_edit_group',
contentType: 'application/json',
success: function (data) {
u = data.users;
index = 0;
html = '<span style="float:right; margin-bottom:10px;">'
html += '<input id="searchuser" style="min-width:250px;margin-top:5%" placeholder="Tìm kiếm" onkeyup="drawsearch(this,event)" value="' + data['search'] + '">'
html += `<i class="pointer fas fa-search" onclick="drawsearch_btn()"></i></span>
<table class="table table-bordered">
<thead>
<tr style="background-color: rgb(233, 235, 238);">
<td>Index</td>
<td>Email</td>
<td>Contact_no</td>
<td>Action</td>
</tr>
</thead>
<tbody>
`;
for (i = 0; i < u.length; i++) {
html += '<tr>';
html += '<td> ' + ++index + ' </td>';
html += '<td> ' + u[i]['email'] + ' </td>';
html += '<td> ';
if (u[i]['contact_no'] == null) {
html += ' '
} else {
html += u[i]['contact_no'];
}
html += '</td>'
html += '<td>';
if (cr == u[i]['uid']) {
html += '<a onclick = "add_member('+ data['gid'] +')"><i title="Thêm thành viên" class="fa fa-user-plus" aria-hidden="true"></i></a>';
} else {
html += '<a onclick="del(' + data['gid'] + ',' + u[i]['uid'] + ')"><i class="pointer fas fa-trash-alt" title="Xóa"></i></a>'
}
html += '</td>';
}
html += '</tbody>';
html += '</table>';
html += `<center>
<ul class="pagination listpage pageqt">`;
if (data.num_page < 7) {
for (i = 0; i < data.num_page; i++) {
html += '<li class="page-item';
if (i == page) {
html += ' active';
}
html += '" onclick="drawpage(' + i + ')"><a class="page-link">' + (i + 1) + '</a></li>'
}
} else {
if (page <= 3) {
for (i = 0; i < 5; i++) {
html += '<li class="page-item';
if (i == page) {
html += ' active';
}
html += '" onclick="drawpage(' + i + ')"><a class="page-link">' + (i + 1) + '</a></li>'
}
html += '<li class="page-item"><a class="page-link">...</a></li>';
html += '<li class="page-item" onclick="drawpage(' + (data.num_page - 1) + ')"><a class="page-link">' + data.num_page + '</a></li>';
}
else {
html += '<li class="page-item" onclick="drawpage(0)"><a class="page-link">1</a></li>';
html += '<li class="page-item"><a class="page-link">...</a></li>';
if (page < data.num_page - 4) {
html += '<li class="page-item" onclick="drawpage(' + (page - 1) + ')"><a class="page-link">' + page + '</a></li>';
html += '<li class="page-item active" onclick="drawpage(' + page + ')"><a class="page-link">' + (page + 1) + '</a></li>';
html += '<li class="page-item" onclick="drawpage(' + (page + 1) + ')"><a class="page-link">' + (page + 2) + '</a></li>';
html += '<li class="page-item"><a class="page-link">...</a></li>';
html += '<li class="page-item" onclick="drawpage(' + (data.num_page - 1) + ')"><a class="page-link">' + data.num_page + '</a></li>';
}
else {
for (i = page - 2; i < data.num_page; i++) {
html += '<li class="page-item';
if (i == page) {
html += " active";
}
html += '" onclick="drawpage(' + i + ')"><a class="page-link">' + (i + 1) + '</a></li>';
}
}
}
}
$("#circularG").remove();
$("#users").append(html);
},
error: function (data) {
}
})
}
function show_grade(){
$('#grade_one_choice2').removeAttr('style');
$("#txtgrade").css({display:"none"});
$("#link_edit_grade").css({display:"none"});
}
function cancel_edit_grade(){
$('#txtgrade').removeAttr('style');
$('#link_edit_grade').removeAttr('style');
$("#grade_one_choice2").css({display:"none"});
}
function add_member(sg_id){
$("#add_member").modal();
$("#bodyadd_member").empty();
$.ajax({
type:"POST",
data: {},
url: site_url + '/social_group/get_member/'+sg_id,
success: function(result){
$("#bodyadd_member").append('<table id="tbmb" class="display" style="width:100%"></table>');
var tbl = $("#tbmb").DataTable({
reponsive:true,
data: result,
columns: [
{"data":null,"title":"","class":"details-control","render":function(data,type,row){
return "";
}},
{"data":null,"title":"<NAME>","render":function(data,type,row){
return data.first_name+' '+data.last_name;
}},
{ "data": "email", "title": "Email" },
// { "data": "birthdate", "title": "Ngày sinh" },
{ "data": "user_code", "title": "Mã người dùng" }
],
language: langs,
order: [[ 0, "desc" ]]
});
$("#tbmb tbody").on('click','td.details-control',function(){
var tr = $(this).closest('tr');
var row = tbl.row(tr);
if (tr.hasClass('shown')){
removemb(sg_id, row.data().uid);
row.child.hide();
tr.removeClass('shown');
}else{
addmb(sg_id, row.data().uid);
row.child.show();
tr.addClass('shown');
}
});
},
error: function(xhr,status,strErr){
console.log(xhr);
console.log(status);
console.log(strErr);
}
})
}
function removemb(sg_id,uid){
$.ajax({
type: "POST",
data: {},
url: site_url + '/social_group/out_group/'+sg_id+'/'+uid,
success: function(data){
},
error: function(xhr,status,sttErr){
console.log(xhr),
console.log(status),
console.log(sttErr)
},
})
}
function addmb(sg_id,uid){
$.ajax({
type: "POST",
data: {},
url: site_url + '/social_group/add_group_member/'+sg_id+'/'+uid,
success: function(data){
},
error: function(xhr,status,sttErr){
console.log(xhr),
console.log(status),
console.log(sttErr)
},
})
} |
#!/bin/bash
config='rtsp-simple-server.yml'
admin_username=$(cat $config | awk 'NR==36 {print $2}')
admin_password=$(cat $config | awk 'NR==38 {print $2}')
user_username=$(cat $config | awk 'NR==43 {print $2}')
user_password=$(cat $config | awk 'NR==45 {print $2}')
end_point='anrp_stream'
host='0.0.0.0'
port='8554'
running_pid=$(sudo lsof -i:$port | awk 'NR==2 {print $2}')
serve_server(){
./rtsp-simple-server & echo $(date +%s) $! > rtsp.pid
echo "server is started."
ffmpeg -f v4l2 -i /dev/video0 -f rtsp rtsp://admin:adminpassword@0.0.0.0:8554/stream_anpr
echo "webcam is published on: http://"$host_port/$end_point
}
terminate_server(){
if [[ $running_pid != $(<rtsp.pid) ]];then
echo "confused process id(s). running pid:'"$running_pid "'& stored pid:'"$(<rtsp.pid | awk '{print $2}')"'"
fi
case "$1" in
-2) kill -2 $running_pid ;;#interrupt
-9) kill -9 $running_pid ;;#terminate
*) kill -15 $running_pid ;;#exit
esac
echo " ">rtsp.pid
}
check_server(){
if [[ -z $running_pid ]];then echo "no such porcess on port: "$port " or with pid: "$(<rtsp.pid)
else
echo "server was started at $(<rtsp.pid | awk '{print $1}')"
echo "server is running at: "$port "with pid: "$running_pid
fi
}
configure_server(){
if [[ -z $running_pid ]]; then
echo "sever is running, stop first! "
else
echo "current_admin: "$admin_username
echo "current_admin_password: "$admin_password
echo "current_user: "$user_username
echo "curret_user_password: "$user_password
read -p "update (u)ser/(a)dmin ?:" choice
case "$choice" in
[uU]*)
read -p "update user name: " changedUserName
read -p "update user password: " changedUserPassword
sed -i "s/readUser: [^ ]*/readUser: $changedUserName/" $config
sed -i "s/readPass: [^ ]*/readPass: $changedUserPassword/" $config
;;
[aA]*)
read -p "update admin name: " changedAdminName
read -p "update admin password: " changedAdminPassword
sed -i "s/publishUser: [^ ]*/publishUser: $changedAdminName/" $config
sed -i "s/publishPassword: [^ ]*/publishPassword: $changedAdminPassword/" $config
;;
*) echo "invalid, break.";;
esac
fi
}
help_you(){
echo "This is super helpful."
echo "./god.sh -s || --serve start server and publish webcam stream"
echo "./god.sh -c || --check check server status"
echo "./god.sh -t || --terminate terminate the server"
echo "./god.sh -t --confused use whe error terminating the server"
echo "./god.sh --config configure rtsp server' user & admin"
}
stVar=$1 #**variable handling**
ndVar=$2
case "$stVar" in
-s | --serve) serve_server ;;
-t | --terminate) terminate_server $ndVar;;
-c | --check) check_server ;;
--config) configure_server ;;
--[hH]*) help_you ;;
*) printf "[--help] for usage.\n" ;;
esac
|
<reponame>kotik-coder/PULsE
package pulse.baseline;
import static pulse.properties.NumericProperties.derive;
import static pulse.properties.NumericProperty.requireType;
import static pulse.properties.NumericPropertyKeyword.BASELINE_INTERCEPT;
import java.util.List;
import java.util.Set;
import pulse.math.ParameterVector;
import pulse.math.Segment;
import pulse.properties.Flag;
import pulse.properties.NumericProperty;
import pulse.properties.NumericPropertyKeyword;
import pulse.util.PropertyHolder;
/**
* A baseline that can shift in the vertical direction.
*
* @author <NAME> <<EMAIL>>
*/
public abstract class AdjustableBaseline extends Baseline {
private double intercept;
/**
* Creates a flat baseline equal to the argument.
*
* @param intercept the constant baseline value.
*/
public AdjustableBaseline(double intercept) {
this.intercept = intercept;
}
/**
* @return the constant value of this {@code FlatBaseline}
*/
@Override
public double valueAt(double x) {
return intercept;
}
protected double mean(List<Double> x) {
double sum = x.stream().reduce((a, b) -> a + b).get();
return sum / x.size();
}
/**
* Provides getter accessibility to the intercept as a NumericProperty
*
* @return a NumericProperty derived from
* NumericPropertyKeyword.BASELINE_INTERCEPT where the value is set to that
* of {@code slope}
*/
public NumericProperty getIntercept() {
return derive(BASELINE_INTERCEPT, intercept);
}
/**
* Checks whether {@code intercept} is a baseline intercept property and
* updates the respective value of this baseline.
*
* @param intercept a {@code NumericProperty} of the
* {@code BASELINE_INTERCEPT} type
* @see set
*/
public void setIntercept(NumericProperty intercept) {
requireType(intercept, BASELINE_INTERCEPT);
this.intercept = (double) intercept.getValue();
firePropertyChanged(this, intercept);
}
/**
* Lists the {@code intercept} as accessible property for this
* {@code FlatBaseline}.
*
* @see PropertyHolder
*/
@Override
public Set<NumericPropertyKeyword> listedKeywords() {
var set = super.listedKeywords();
set.add(BASELINE_INTERCEPT);
return set;
}
@Override
public void set(NumericPropertyKeyword type, NumericProperty property) {
if (type == BASELINE_INTERCEPT) {
setIntercept(property);
this.firePropertyChanged(this, property);
}
}
@Override
public void optimisationVector(ParameterVector output, List<Flag> flags) {
for (int i = 0, size = output.dimension(); i < size; i++) {
var key = output.getIndex(i);
if (key == BASELINE_INTERCEPT) {
output.set(i, intercept, key);
}
}
}
@Override
public void assign(ParameterVector params) {
for (int i = 0, size = params.dimension(); i < size; i++) {
if (params.getIndex(i) == BASELINE_INTERCEPT) {
setIntercept(derive(BASELINE_INTERCEPT, params.get(i)));
}
}
}
}
|
import tensorflow as tf
import numpy as np
import random
import matplotlib.pyplot as plt
from zipfile import ZipFile
with ZipFile("archive.zip","r") as zip:
zip.extractall()
random.seed(42)
np.random.seed(42)
tf.random.set_seed(42)
batch_size = 32
img_size = (160,160)
from tensorflow.keras.preprocessing import image_dataset_from_directory
from tensorflow.data.experimental import cardinality
train_ds = image_dataset_from_directory(
"train/train",
validation_split = 0.2,
subset = "training",
seed = 42,
shuffle = True,
image_size = img_size,
batch_size = batch_size,
)
val_ds = image_dataset_from_directory(
"train/train",
validation_split = 0.2,
subset = "validation",
seed = 42,
shuffle = True,
image_size = img_size,
batch_size = batch_size,
)
class_names = train_ds.class_names
val_batches = cardinality(val_ds)
test_ds = val_ds.take(val_batches//5)
val_ds = val_ds.skip(val_batches//5)
plt.figure(figsize=(10,10))
for images,labels in train_ds.take(1):
for i in range(9):
ax = plt.subplot(3,3,i+1)
plt.imshow(images[i].numpy().astype("uint8"))
plt.title(class_names[labels[i]])
plt.axis("off")
plt.show()
AUTOTUNE = tf.data.AUTOTUNE
train_ds = train_ds.prefetch(buffer_size=AUTOTUNE)
val_ds = val_ds.prefetch(buffer_size=AUTOTUNE)
test_ds = test_ds.prefetch(buffer_size=AUTOTUNE)
from tensorflow.keras import Sequential
from tensorflow.keras.layers.experimental.preprocessing import RandomFlip, RandomZoom, RandomRotation
data_augmentation = Sequential([
RandomFlip("horizontal"),
RandomZoom(0.2),
RandomRotation(10),
])
plt.figure(figsize=(10,10))
for images,_ in train_ds.take(1):
for i in range(9):
ax = plt.subplot(3,3,i+1)
augmented_images = data_augmentation(images)
plt.imshow(augmented_images[0].numpy().astype("uint8"))
plt.axis("off")
plt.show()
from tensorflow.keras import Input, Model
from tensorflow.keras.applications import mobilenet_v2, MobileNetV2
from tensorflow.keras.layers import Dense, Dropout, GlobalAveragePooling2D
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.losses import SparseCategoricalCrossentropy
rescale = mobilenet_v2.preprocess_input
img_shape = img_size+(3,)
base_model = MobileNetV2(input_shape=img_shape,include_top=False,weights="imagenet")
image_batch,label_batch = next(iter(train_ds))
feature_batch = base_model(image_batch)
base_model.trainable = False
base_model.summary()
class Transfer_MobileNetV2():
def model(self,input):
self.x = rescale(input)
self.x = data_augmentation(self.x)
self.x = base_model(self.x,training=False)
self.x = GlobalAveragePooling2D()(self.x)
self.x = Dropout(0.2,seed=42)(self.x)
self.output = Dense(33,activation="softmax")(self.x)
self.model = Model(input,self.output,name="Transfer_MobileNetV2")
return self.model
TMBN2 = Transfer_MobileNetV2()
model = TMBN2.model(Input(shape=img_size+(3,)))
model.compile(Adam(),SparseCategoricalCrossentropy(),metrics=["accuracy"])
model.summary()
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras.models import load_model
if __name__=="__main__":
checkpoint = ModelCheckpoint("fruit.h5",save_weights_only=False,monitor="val_accuracy",save_best_only=True)
model.fit(train_ds,epochs=2,validation_data=val_ds,callbacks=[checkpoint])
best = load_model("fruit.h5")
val_loss,val_acc = best.evaluate(val_ds)
print("\nAccuracy: {:.2f} %".format(100*val_acc))
print("Loss: {:.2f} %".format(100*val_loss)) |
<gh_stars>0
import os
import cv2
import sys
import time
import collections
import torch
import argparse
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from torch.utils import data
from dataset import TestLoader
import models
import util
# c++ version pse based on opencv 3+
from pse import pse
# python pse
#from pypse import pse as pypse
import glob
import shutil
def extend_3c(img):
img = img.reshape(img.shape[0], img.shape[1], 1)
img = np.concatenate((img, img, img), axis=2)
return img
def debug(idx, img_paths, imgs, output_root):
if not os.path.exists(output_root):
os.makedirs(output_root)
col = []
for i in range(len(imgs)):
row = []
for j in range(len(imgs[i])):
# img = cv2.copyMakeBorder(imgs[i][j], 3, 3, 3, 3, cv2.BORDER_CONSTANT, value=[255, 0, 0])
row.append(imgs[i][j])
res = np.concatenate(row, axis=1)
col.append(res)
res = np.concatenate(col, axis=0)
img_name = img_paths[idx].split('/')[-1]
print (idx, '/', len(img_paths), img_name)
cv2.imwrite(output_root + img_name, res)
def write_result_as_txt(image_name, bboxes, path):
filename = util.io.join_path(path, 'res_%s.txt'%(image_name))
lines = []
for b_idx, bbox in enumerate(bboxes):
values = [int(v) for v in bbox]
line = "%d, %d, %d, %d, %d, %d, %d, %d\n"%tuple(values)
lines.append(line)
util.io.write_lines(filename, lines)
def polygon_from_points(points):
"""
Returns a Polygon object to use with the Polygon2 class from a list of 8 points: x1,y1,x2,y2,x3,y3,x4,y4
"""
resBoxes=np.empty([1, 8],dtype='int32')
resBoxes[0, 0] = int(points[0])
resBoxes[0, 4] = int(points[1])
resBoxes[0, 1] = int(points[2])
resBoxes[0, 5] = int(points[3])
resBoxes[0, 2] = int(points[4])
resBoxes[0, 6] = int(points[5])
resBoxes[0, 3] = int(points[6])
resBoxes[0, 7] = int(points[7])
pointMat = resBoxes[0].reshape([2, 4]).T
return plg.Polygon(pointMat)
def test(args):
if os.path.exists("./TestResult/"):
shutil.rmtree("./TestResult/")
os.makedirs("./TestResult/")
data_loader = TestLoader(long_size=args.long_size)
test_loader = torch.utils.data.DataLoader(
data_loader,
batch_size=1,
shuffle=False,
num_workers=2,
drop_last=True)
# Setup Model
if args.arch == "resnet50":
model = models.resnet50(pretrained=True, num_classes=7, scale=args.scale)
elif args.arch == "resnet101":
model = models.resnet101(pretrained=True, num_classes=7, scale=args.scale)
elif args.arch == "resnet152":
model = models.resnet152(pretrained=True, num_classes=7, scale=args.scale)
elif args.arch == "resnet34":
model = models.resnet34(pretrained=True, num_classes=7, scale=args.scale)
elif args.arch == "resnet18":
model = models.resnet18(pretrained=True, num_classes=7, scale=args.scale)
elif args.arch == "se_resnet_50":
model = models.se_resnet_50(pretrained=True, num_classes=7, scale=args.scale)
elif args.arch == "se_resnext_50":
model = models.se_resnext_50(pretrained=True, num_classes=7, scale=args.scale)
elif args.arch == "dcn_resnet50":
model = models.dcn_resnet50(pretrained=True, num_classes=7, scale=args.scale)
elif args.arch == "resnet50_lstm":
model = models.resnet50_lstm(pretrained=True, num_classes=7, scale=args.scale)
elif args.arch == "resnet50_aspp":
model = models.resnet50_aspp(pretrained=True, num_classes=7, scale=args.scale)
elif args.arch == "resnet50_psp":
model = models.resnet50_psp(pretrained=True, num_classes=7, scale=args.scale)
for param in model.parameters():
param.requires_grad = False
if torch.cuda.is_available():
model = model.cuda()
else:
model = model.cpu()
if args.resume is not None:
if os.path.isfile(args.resume):
print("Loading model and optimizer from checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume, map_location=lambda storage, loc: storage)
# model.load_state_dict(checkpoint['state_dict'])
d = collections.OrderedDict()
for key, value in checkpoint['state_dict'].items():
tmp = key[7:]
d[tmp] = value
model.load_state_dict(d)
print("Loaded checkpoint '{}' (epoch {})"
.format(args.resume, checkpoint['epoch']))
sys.stdout.flush()
else:
print("No checkpoint found at '{}'".format(args.resume))
sys.stdout.flush()
model.eval()
#example = torch.rand(1, 3, 800, 800)
#example = Variable(example.cuda())
#traced_script_module = torch.jit.trace(model, (example))
#traced_script_module.save("./model.pt")
#torch.onnx.export(model, example, "model.onnx", verbose=True)
total_frame = 0.0
total_time = 0.0
for idx, (org_img, img, scale) in enumerate(test_loader):
print('progress: %d / %d'%(idx, len(test_loader)))
sys.stdout.flush()
if torch.cuda.is_available():
img = Variable(img.cuda(), volatile=True)
else:
img = Variable(img.cpu(), volatile=True)
org_img = org_img.numpy().astype('uint8')[0]
text_box = org_img.copy()
if torch.cuda.is_available():
torch.cuda.synchronize()
start = time.time()
outputs = model(img)
score = torch.sigmoid(outputs[:, 0, :, :])
outputs = (torch.sign(outputs - args.binary_th) + 1) / 2
text = outputs[:, 0, :, :]
kernels = outputs[:, 0:args.kernel_num, :, :] * text
score = score.data.cpu().numpy()[0].astype(np.float32)
text = text.data.cpu().numpy()[0].astype(np.uint8)
kernels = kernels.data.cpu().numpy()[0].astype(np.uint8)
#cv2.imwrite("./6.jpg", kernels[0]*255)
#cv2.imwrite("./5.jpg", kernels[1]*255)
#cv2.imwrite("./4.jpg", kernels[2]*255)
#cv2.imwrite("./3.jpg", kernels[3]*255)
#cv2.imwrite("./2.jpg", kernels[4]*255)
#cv2.imwrite("./1.jpg", kernels[5]*255)
#cv2.imwrite("./0.jpg", kernels[6]*255)
# c++ version pse
#kernels = kernels[:-1]
pred = pse(kernels, args.min_kernel_area / (args.scale * args.scale))
# python version pse
#pred = pypse(kernels, args.min_kernel_area / (args.scale * args.scale))
if(len(pred) == 0):
continue
#scale = (org_img.shape[0] * 1.0 / pred.shape[0], org_img.shape[1] * 1.0 / pred.shape[1])
#print(org_img.shape, pred.shape, scale)
label = pred
label_num = np.max(label) + 1
bboxes = []
for i in range(1, label_num):
points = np.array(np.where(label == i)).transpose((1, 0))[:, ::-1]
if points.shape[0] < args.min_area / (args.scale * args.scale):
continue
score_i = np.mean(score[label == i])
if score_i < args.min_score:
continue
rect = cv2.minAreaRect(points)
bbox = cv2.boxPoints(rect) / (scale, scale)
bbox = bbox.astype('int32')
bboxes.append(bbox.reshape(-1))
if torch.cuda.is_available():
torch.cuda.synchronize()
end = time.time()
total_frame += 1
total_time += (end - start)
print('fps: %.2f'%(total_frame / total_time))
sys.stdout.flush()
for bbox in bboxes:
cv2.drawContours(text_box, [bbox.reshape(4, 2)], -1, (0, 0, 255), 4)
image_name = data_loader.img_paths[idx].split('/')[-1].split('.')[0]
#write_result_as_txt(image_name, bboxes, './TestResult/')
#text_box = cv2.resize(text_box, (text.shape[1], text.shape[0]))
debug(idx, data_loader.img_paths, [[text_box]], './TestResult/')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Hyperparams')
parser.add_argument('--arch', nargs='?', type=str, default='resnet50')
parser.add_argument('--resume', nargs='?', type=str, default='./checkpoints/Total_ic19_resnet50_bs_16_ep_600_pretrain_ic17/checkpoint.pth.tar',
#parser.add_argument('--resume', nargs='?', type=str, default='./checkpoints/ic19_mlt_resnet50_bs_16_ep_600_pretrain_ic17/checkpoint.pth.tar',
help='Path to previous saved model to restart from')
parser.add_argument('--binary_th', nargs='?', type=float, default=0.5,
help='Path to previous saved model to restart from')
parser.add_argument('--kernel_num', nargs='?', type=int, default=7,
help='Path to previous saved model to restart from')
parser.add_argument('--scale', nargs='?', type=int, default=1,
help='Path to previous saved model to restart from')
parser.add_argument('--long_size', nargs='?', type=int, default=800,
help='Path to previous saved model to restart from')
parser.add_argument('--min_kernel_area', nargs='?', type=float, default=5.0,
help='min kernel area')
parser.add_argument('--min_area', nargs='?', type=float, default=50.0,
help='min area')
parser.add_argument('--min_score', nargs='?', type=float, default=0.60,
help='min score')
args = parser.parse_args()
test(args)
|
def generate_primes(n):
primes = []
for number in range(2, n+1):
is_prime = True
for divisor in range(2, number):
if number % divisor == 0:
is_prime = False
break
if is_prime:
primes.append(number)
return primes |
module.exports = {
parserOptions: {
project: './tsconfig.json',
},
parser: '@typescript-eslint/parser',
plugins: ['prettier', '@typescript-eslint'],
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'prettier',
],
rules: {
'@typescript-eslint/ban-types': ['error', { types: { '{}': false } }],
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-namespace': 'off',
'@typescript-eslint/no-unused-vars': ['error'],
'jsx-a11y/anchor-is-valid': 'off',
'no-extra-boolean-cast': 'off',
'no-useless-escape': 'off',
'object-shorthand': 'error',
},
};
|
import React from "react";
import { Route, Switch, withRouter } from "react-router-dom";
import clsx from "clsx";
import { Box, Link, Button } from "@material-ui/core";
//import Icon from "@mdi/react";
//icons
/*
import {
mdiFacebook as FacebookIcon,
mdiTwitter as TwitterIcon,
mdiGithub as GithubIcon,
} from "@mdi/js";
*/
// styles
import useStyles from "./styles";
// components
import Navbar from "../components/Navbar";
import Copyright from "../components/Copyright";
import Sidebar from "../components/Sidebar";
import Settingbar from "../components/Settingbar";
// pages
import Dashboard from "../views/dashboard";
// context
function Layout(props) {
var classes = useStyles();
const [settingsOpen, setSettingsOpen] = React.useState(false);
const handleSettingsDrawerOpen = () => {
setSettingsOpen(true);
};
const handleSettingsDrawerClose = React.useCallback(() => {
setSettingsOpen(false);
}, []);
const [sidebarOpen, setSidebarOpen] = React.useState(true);
const toggleSidebar = () => {
setSidebarOpen(!sidebarOpen);
};
const handleSidebarOpen = () => setSidebarOpen(true);
const handleSidebarClose = () => setSidebarOpen(false);
//handleSettingsDrawerOpen();
return (
<div className={classes.root}>
<Navbar
history={props.history}
onOpenSettingBar={handleSettingsDrawerOpen}
onToggleSidebar={toggleSidebar}
/>
<Sidebar isSidebarOpened={sidebarOpen} onToggleSidebar={toggleSidebar} />
<div className={clsx(classes.content)}>
<div className={classes.fakeToolbar} />
<Switch>
<Route path="/app/dashboard" component={Dashboard} />
</Switch>
<Box
mt={5}
width={"100%"}
display={"flex"}
alignItems={"center"}
justifyContent="center"
>
<Box>
<Copyright />
</Box>
<Box ml={5}>
<Link
color={"primary"}
href={"https://github.com/freeadmin/react-materialui-admin"}
target={"_blank"}
className={classes.link}
>
About Us
</Link>
</Box>
<Box ml={5}>
<Link
color={"primary"}
href={"https://github.com/freeadmin/react-materialui-admin"}
target={"_blank"}
className={classes.link}
>
Blog
</Link>
</Box>
</Box>
</div>
<Settingbar onClose={handleSettingsDrawerClose} open={settingsOpen} />
</div>
);
}
export default withRouter(Layout);
|
<gh_stars>10-100
package io.opensphere.analysis.base.model;
import javafx.application.Platform;
import javafx.beans.property.ObjectProperty;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import net.jcip.annotations.NotThreadSafe;
import io.opensphere.core.util.javafx.ConcurrentObjectProperty;
import io.opensphere.mantle.data.DataTypeInfo;
/** Model common to various tools containing mostly data type information. */
@NotThreadSafe
public class CommonSettingsModel
{
/** The currently active layer. */
private final ObjectProperty<DataTypeInfo> myCurrentLayer = new ConcurrentObjectProperty<>(this, "currentLayer");
/** The available layers. */
private final ObservableList<DataTypeInfo> myAvailableLayers = FXCollections.observableArrayList();
/**
* Gets the current layer.
*
* @return the current layer
*/
public final DataTypeInfo getCurrentLayer()
{
return myCurrentLayer.get();
}
/**
* Sets the current layer.
*
* @param currentLayer the current layer
*/
public final void setCurrentLayer(DataTypeInfo currentLayer)
{
assert Platform.isFxApplicationThread();
myCurrentLayer.set(currentLayer);
}
/**
* Gets the current layer property.
*
* @return the current layer property
*/
public ObjectProperty<DataTypeInfo> currentLayerProperty()
{
return myCurrentLayer;
}
/**
* Gets the available layers property.
*
* @return the available layers property
*/
public ObservableList<DataTypeInfo> availableLayersProperty()
{
return myAvailableLayers;
}
}
|
#!/bin/bash
source /root/.nvm/nvm.sh
if [ ! -z ${NODE_VERSION+x} ]
then
nvm install $NODE_VERSION
nvm alias default $NODE_VERSION
fi
# exec CMD
echo ">> exec docker CMD"
echo "$@"
"$@"
|
#include "arch.h"
#include "fp_NIST521.h"
namespace NIST521 {
/* Curve NIST521 */
#if CHUNK==16
#error Not supported
#endif
#if CHUNK==32
using namespace B528_28;
// Base Bits= 28
const BIG Modulus= {0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0xFFFFFFF,0x1FFFF};
const BIG R2modp= {0x400000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0};
const chunk MConst= 0x1;
#endif
#if CHUNK==64
using namespace B528_56;
// Base Bits= 60
const BIG Modulus= {0xFFFFFFFFFFFFFFFL,0xFFFFFFFFFFFFFFFL,0xFFFFFFFFFFFFFFFL,0xFFFFFFFFFFFFFFFL,0xFFFFFFFFFFFFFFFL,0xFFFFFFFFFFFFFFFL,0xFFFFFFFFFFFFFFFL,0xFFFFFFFFFFFFFFFL,0x1FFFFFFFFFFL};
const BIG R2modp= {0x4000000000L,0x0L,0x0L,0x0L,0x0L,0x0L,0x0L,0x0L,0x0L};
const chunk MConst= 0x1L;
#endif
}
|
#!/bin/bash
require() {
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source ${SCRIPTDIR}/deps/$1.sh
}
require term/term
howdy() {
name=${1:-"partner"}
color=${2:-"blue"}
term_color ${color}
echo "Howdy ${name}!"
}
if [[ ${BASH_SOURCE[0]} != $0 ]]; then
export -f howdy
else
howdy "${@}"
exit $?
fi |
emca -config dbcontrol db -repos create -silent -respFile ./EMCASilentCreate.rsp |
<filename>scs-web/src/main/java/com/zhcs/controller/ClassesController.java<gh_stars>0
package com.zhcs.controller;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.zhcs.entity.ClassesEntity;
import com.zhcs.service.ClassesService;
import com.zhcs.utils.BeanUtil;
import com.zhcs.utils.PageUtils;
import com.zhcs.utils.R;
//*****************************************************************************
/**
* <p>Title:ClassesController</p>
* <p>Description: 班次管理</p>
* <p>Copyright: Copyright (c) 2017</p>
* <p>Company: 深圳市智慧城市管家信息科技有限公司 </p>
* @author 刘晓东 - Alter
* @version v1.0 2017年2月23日
*/
//*****************************************************************************
@Controller
@RequestMapping("classes")
public class ClassesController extends AbstractController {
@Autowired
private ClassesService classesService;
@RequestMapping("/classes.html")
public String list(){
return "classes/classes.html";
}
/**
* 列表
*/
@ResponseBody
@RequestMapping("/list")
@RequiresPermissions("classes:list")
public R list(String sidx, String name, String order, Integer page, Integer limit){
Map<String, Object> map = new HashMap<String, Object>();
map.put("name", name);
map.put("sidx", sidx);
map.put("order", order);
map.put("offset", (page - 1) * limit);
map.put("limit", limit);
//查询列表数据
// List<Map<String, Object>> classesList = classesService.queryListMap(map);
List<Map<String, Object>> classesList = classesService.queryListMap1(map);
int total = classesService.queryTotal(map);
PageUtils pageUtil = new PageUtils(classesList, total, limit, page);
return R.ok().put("page", pageUtil);
}
/**
* 信息
*/
@ResponseBody
@RequestMapping("/info/{id}")
@RequiresPermissions("classes:info")
public R info(@PathVariable("id") Long id){
List<Map<String, Object>> classes = classesService.queryObjectMap(id);
return R.ok().put("classes", classes);
}
/**
* 保存
*/
@ResponseBody
@RequestMapping("/save")
@RequiresPermissions("classes:save")
public R save(@RequestBody ClassesEntity classes){
BeanUtil.fillCCUUD(classes, getUserId(), getUserId());
classesService.save(classes);
return R.ok();
}
/**
* 修改
*/
@ResponseBody
@RequestMapping("/update")
@RequiresPermissions("classes:update")
public R update(@RequestBody Map<String, Object> classes){
classes.remove("crtid");
ClassesEntity classesEntity = BeanUtil.map2Bean(classes, ClassesEntity.class);
BeanUtil.fillCCUUD(classesEntity,getUserId());
classesService.update(classesEntity);
return R.ok();
}
/**
* 删除
*/
@ResponseBody
@RequestMapping("/delete/{id}")
@RequiresPermissions("classes:delete")
public R delete(@PathVariable("id") Long id){
classesService.delete(id);
return R.ok();
}
}
|
#!/bin/bash
clear
echo -e "\e[4;31m LotsOf Productions !!! \e[0m"
echo -e "\e[1;34m Presents \e[0m"
echo -e "\e[1;32m BombBIT \e[0m"
echo "Press Enter To Continue"
read a1
if [[ -s update.speedx ]];then
echo "All Requirements Found...."
else
echo 'Installing Requirements....'
echo .
echo .
apt install figlet toilet python curl -y
apt install python3-pip
pip install -r requirements.txt
echo This Script Was Made By SpeedX >update.speedx
echo Requirements Installed....
echo Press Enter To Continue...
read upd
fi
while :
do
rm *.xxx >/dev/null 2>&1
clear
echo -e "\e[1;31m"
figlet BombBIT
echo -e "\e[1;34m Created By \e[1;32m"
toilet -f mono12 -F border LotsOf
echo -e "\e[4;34m This Bomber Was Created By Yash Navadiya \e[0m"
echo -e "\e[1;34m For Any Queries Mail Me!!!\e[0m"
echo -e "\e[1;32m Mail: williamwillson4321@gmail.com \e[0m"
echo -e "\e[4;32m YouTube Page: https://www.youtube.com/c/GyanaTech \e[0m"
echo " "
echo -e "\e[4;31m Please Read Instruction Carefully !!! \e[0m"
echo " "
echo "Press 1 To Start SMS Bomber "
echo "Press 2 To Start Call Bomber "
echo "Press 3 To Update (Works On Linux And Linux Emulators) "
echo "Press 4 To View Features "
echo "Press 5 To Exit "
read ch
if [ $ch -eq 1 ];then
clear
echo -e "\e[1;32m"
rm *.xxx >/dev/null 2>&1
python3 bomber.py
rm *.xxx >/dev/null 2>&1
exit 0
elif [ $ch -eq 2 ];then
clear
echo -e "\e[1;32m"
echo 'Call Bomb By Yash Navadiya'> call.xxx
python3 bomber.py call
rm *.xxx >/dev/null 2>&1
python3 bomber.py call
exit 0
elif [ $ch -eq 3 ];then
clear
apt install git -y
echo -e "\e[1;34m Downloading Latest Files..."
git clone https:github.com/YashkumarNavadiya/BombBIT/
if [[ -s BombBIT/BombBIT.sh ]];then
cd BombBIT
cp -r -f * .. > temp
cd ..
rm -rf BombBIT >> temp
rm update.speedx >> temp
rm temp
chmod +x BombBIT.sh
fi
echo -e "\e[1;32m BombBIT Will Restart Now..."
echo -e "\e[1;32m All The Required Packages Will Be Installed..."
echo -e "\e[1;34m Press Enter To Proceed To Restart..."
read a6
./BombBIT.sh
exit
elif [ $ch -eq 4 ];then
clear
echo -e "\e[1;33m"
figlet BombBIT
echo -e "\e[1;34mCreated By \e[1;34m"
toilet -f mono12 -F border LotsOf
echo " "
echo -e "\e[1;32m Features\e[1;34m"
echo " [+] Unlimited And Super-Fast Bombing"
echo " [+] International Bombing"
echo " [+] Call Bombing "
echo " [+] Protection List"
echo " [+] Automated Future Updates"
echo " [+] Easy To Use And Embed in Code"
echo -e "\e[1;32m Contributors\e[1;33m"
echo -e "\e[1;33m [*] SpeedX \e[1;31m"
echo " [-] Mail At: williamwillson4321@gmail.com"
echo -e "\e[1;33m [*] The Black Hacker Roxstar \e[1;31m"
echo " [-] Ping At: http://wa.me/917600140353"
echo -e "\e[1;33m [*] Rieltar \e[1;31m"
echo " [-] Ping At: https://t.me/Rieltar"
echo -e "\e[1;33m [*] 0n1cOn3 (Stefan) \e[1;31m"
echo " [-] Mail At: 0n1cOn3@gmx.ch"
echo ""
echo ""
echo -e "\e[1;31m This Script is Only For Educational Purposes or To Prank.\e[0m"
echo -e "\e[1;31m Do not Use This To Harm Others. \e[0m"
echo -e "\e[1;31m I Am Not Responsible For The Misuse Of The Script. \e[0m"
echo -e "\e[1;32m Make Sure To Update it If It Does not Work.\e[0m"
echo " "
echo -e "\e[4;31m That's All !!!\e[0m"
echo -e "\e[1;34m For Any Queries Mail Me!!!\e[0m"
echo -e "\e[1;32m Mail: williamwillson4321@gmail.com \e[0m"
echo -e "\e[1;32m Whatsapp: https://bit.do/speedxgit \e[0m"
echo -e "\e[4;32m YouTube Page: https://www.youtube.com/c/GyanaTech \e[0m"
echo "Press Enter To Go Home"
read a3
clear
elif [ $ch -eq 5 ];then
clear
echo -e "\e[1;31m"
figlet BombBIT
echo -e "\e[1;34m Created By \e[1;32m"
toilet -f mono12 -F border LotsOf
echo -e "\e[1;34m For Any Queries Mail Me!!!\e[0m"
echo -e "\e[1;32m Mail: williamwillson4321@gmail.com \e[0m"
echo -e "\e[1;32m Whatsapp: https://bit.do/speedxgit \e[0m"
echo -e "\e[4;32m YouTube Page: https://www.youtube.com/c/GyanaTech \e[0m"
echo " "
exit 0
else
echo -e "\e[4;32m Invalid Input !!! \e[0m"
echo "Press Enter To Go Home"
read a3
clear
fi
done |
# scp \
lab2_list-steal \
lab2_list-malloc \
lab2_list-biglock \
lab2_list-steal2-naivepadding \
lab2_list-steal-padding \
*.sh \
makalu:/tmp/lab2b/
rsync -avxP \
list \
list-m \
list-p \
list-pm \
list-pml \
list-pmla \
*.sh \
makalu:/tmp/lab2b/
|
<reponame>soheil555/fairOS-js<gh_stars>1-10
export * from "./utils";
export * from "./request";
export * from "./models/document-db";
export * from "./models/fs";
export * from "./models/kv-store";
export * from "./models/pod";
export * from "./models/user";
export * from "./client/document-db/table";
export * from "./client/fair-os";
export * from "./client/fs/directory";
export * from "./client/fs/file";
export * from "./client/kv-store/table";
export * from "./client/pod";
export * from "./client/user";
export * from "./types/base";
export * from "./types/document-db";
export * from "./types/fs";
export * from "./types/kv-store";
export * from "./types/pod";
export * from "./types/user";
|
/////////////////////////////////////////////////////////////////////////////
// Name: about.cpp
// Purpose: wxHtml sample: about dialog test
// Author: ?
// Modified by:
// Created: ?
// Copyright: (c) wxWidgets team
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
// For compilers that support precompilation, includes "wx/wx.h".
#include "wx/wxprec.h"
// for all others, include the necessary headers (this file is usually all you
// need because it includes almost all "standard" wxWidgets headers
#ifndef WX_PRECOMP
#include "wx/wx.h"
#endif
#include "wx/image.h"
#include "wx/imagpng.h"
#include "wx/wxhtml.h"
#include "wx/statline.h"
#ifndef wxHAS_IMAGES_IN_RESOURCES
#include "../../sample.xpm"
#endif
// ----------------------------------------------------------------------------
// private classes
// ----------------------------------------------------------------------------
// Define a new application type, each program should derive a class from wxApp
class MyApp : public wxApp
{
public:
// override base class virtuals
// ----------------------------
// this one is called on application startup and is a good place for the app
// initialization (doing it here and not in the ctor allows to have an error
// return: if OnInit() returns false, the application terminates)
virtual bool OnInit() wxOVERRIDE;
};
// Define a new frame type: this is going to be our main frame
class MyFrame : public wxFrame
{
public:
// ctor(s)
MyFrame(const wxString& title);
// event handlers (these functions should _not_ be virtual)
void OnQuit(wxCommandEvent& event);
void OnAbout(wxCommandEvent& event);
private:
// any class wishing to process wxWidgets events must use this macro
wxDECLARE_EVENT_TABLE();
};
// ----------------------------------------------------------------------------
// event tables and other macros for wxWidgets
// ----------------------------------------------------------------------------
// the event tables connect the wxWidgets events with the functions (event
// handlers) which process them. It can be also done at run-time, but for the
// simple menu events like this the static method is much simpler.
wxBEGIN_EVENT_TABLE(MyFrame, wxFrame)
EVT_MENU(wxID_ABOUT, MyFrame::OnAbout)
EVT_MENU(wxID_EXIT, MyFrame::OnQuit)
wxEND_EVENT_TABLE()
// Create a new application object: this macro will allow wxWidgets to create
// the application object during program execution (it's better than using a
// static object for many reasons) and also declares the accessor function
// wxGetApp() which will return the reference of the right type (i.e. MyApp and
// not wxApp)
wxIMPLEMENT_APP(MyApp);
// ============================================================================
// implementation
// ============================================================================
// ----------------------------------------------------------------------------
// the application class
// ----------------------------------------------------------------------------
// `Main program' equivalent: the program execution "starts" here
bool MyApp::OnInit()
{
if ( !wxApp::OnInit() )
return false;
// we use a PNG image in our HTML page
wxImage::AddHandler(new wxPNGHandler);
// create and show the main application window
MyFrame *frame = new MyFrame(_("wxHtmlWindow testing application"));
frame->Show();
// success: wxApp::OnRun() will be called which will enter the main message
// loop and the application will run. If we returned false here, the
// application would exit immediately.
return true;
}
// ----------------------------------------------------------------------------
// main frame
// ----------------------------------------------------------------------------
// frame constructor
MyFrame::MyFrame(const wxString& title)
: wxFrame((wxFrame *)NULL, wxID_ANY, title)
{
SetIcon(wxICON(sample));
// create a menu bar
wxMenu *menuFile = new wxMenu;
menuFile->Append(wxID_ABOUT);
menuFile->Append(wxID_EXIT);
// now append the freshly created menu to the menu bar...
wxMenuBar *menuBar = new wxMenuBar;
menuBar->Append(menuFile, _("&File"));
// ... and attach this menu bar to the frame
SetMenuBar(menuBar);
}
// event handlers
void MyFrame::OnQuit(wxCommandEvent& WXUNUSED(event))
{
// true is to force the frame to close
Close(true);
}
void MyFrame::OnAbout(wxCommandEvent& WXUNUSED(event))
{
wxBoxSizer *topsizer;
wxHtmlWindow *html;
wxDialog dlg(this, wxID_ANY, wxString(_("About")));
topsizer = new wxBoxSizer(wxVERTICAL);
html = new wxHtmlWindow(&dlg, wxID_ANY, wxDefaultPosition, wxSize(380, 160), wxHW_SCROLLBAR_NEVER);
html -> SetBorders(0);
html -> LoadPage("data/about.htm");
html -> SetInitialSize(wxSize(html -> GetInternalRepresentation() -> GetWidth(),
html -> GetInternalRepresentation() -> GetHeight()));
topsizer -> Add(html, 1, wxALL, 10);
#if wxUSE_STATLINE
topsizer -> Add(new wxStaticLine(&dlg, wxID_ANY), 0, wxEXPAND | wxLEFT | wxRIGHT, 10);
#endif // wxUSE_STATLINE
wxButton *bu1 = new wxButton(&dlg, wxID_OK, _("OK"));
bu1 -> SetDefault();
topsizer -> Add(bu1, 0, wxALL | wxALIGN_RIGHT, 15);
dlg.SetSizer(topsizer);
topsizer -> Fit(&dlg);
dlg.ShowModal();
}
|
package io.opensphere.core.model.time;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.RandomAccess;
import io.opensphere.core.util.collections.New;
/**
* An unmodifiable array list of non-overlapping {@link TimeSpan}s.
*/
public class TimeSpanArrayList extends TimeSpanList implements RandomAccess
{
/** Serial version UID. */
private static final long serialVersionUID = 1L;
/** My time spans. */
private final List<TimeSpan> myTimeSpans;
/**
* Constructor.
*
* @param timeSpans The list of time spans.
*/
public TimeSpanArrayList(Collection<? extends TimeSpan> timeSpans)
{
if (timeSpans.isEmpty())
{
myTimeSpans = Collections.emptyList();
}
else
{
ArrayList<TimeSpan> list = new ArrayList<>(timeSpans);
mergeOverlaps(list);
myTimeSpans = Collections.unmodifiableList(list);
}
}
@Override
public TimeSpanList clone(Collection<? extends TimeSpan> spans)
{
return new TimeSpanArrayList(spans);
}
@Override
public TimeSpan get(int index)
{
return myTimeSpans.get(index);
}
/**
* Get my time spans.
*
* @return The list of time spans.
*/
public List<TimeSpan> getTimeSpans()
{
return myTimeSpans;
}
@Override
public TimeSpanList intersection(TimeSpan ts)
{
List<TimeSpan> intersectList = New.list();
for (TimeSpan part : this)
{
if (part != null && part.overlaps(ts))
{
TimeSpan intersection = part.getIntersection(ts);
if (intersection != null)
{
intersectList.add(intersection);
}
}
}
if (intersectList.isEmpty())
{
return emptyList();
}
return new TimeSpanArrayList(intersectList);
}
@Override
public TimeSpanList intersection(TimeSpanList other)
{
List<TimeSpan> intersectList = New.list();
for (TimeSpan aTS : this)
{
if (aTS != null)
{
for (TimeSpan oTS : other)
{
if (oTS != null && aTS.overlaps(oTS))
{
TimeSpan intersection = aTS.getIntersection(oTS);
if (intersection != null)
{
intersectList.add(intersection);
}
}
}
}
}
if (intersectList.isEmpty())
{
return emptyList();
}
return new TimeSpanArrayList(intersectList);
}
@Override
public int size()
{
return myTimeSpans.size();
}
@Override
public TimeSpanList union(TimeSpan ts)
{
List<TimeSpan> unionList = New.list(this);
unionList.add(ts);
return new TimeSpanArrayList(unionList);
}
@Override
public TimeSpanList union(TimeSpanList other)
{
List<TimeSpan> unionList = New.list(this);
unionList.addAll(other);
return new TimeSpanArrayList(unionList);
}
}
|
<gh_stars>0
from afqmctools.utils.qmcpack_utils import write_xml_input
options = {
"execute": {
"nWalkers": 10,
"blocks": 1000,
"timestep": 0.01,
"Estimator": {
"back_propagation": {
"ortho": 1,
"naverages": 4,
"obs": {
"OneRDM": {}
},
"block_size": 2,
"nsteps": 200
}
}
}
}
write_xml_input("afqmc.xml", "afqmc.h5", "afqmc.h5",
options=options, rng_seed=7)
|
for row in records:
for key, value in row.items():
try:
do_something(key, value)
except Exception:
pass |
<gh_stars>0
#include <fstream>
#include <sstream>
#include <iostream>
#include <string>
#include <cassert>
#include <cstdint>
#include <winsock.h>
#include "PngFileHeader.h"
using namespace std;
// Пример функции, использующей работу с двоичными файлами для чтения информации о PNG-изображении (PngFileHeader.h)
bool GetPngFileInfo(const wstring & fileName, PNGInfo & pngInfo);
void main()
{
// Запись текстового файла
{
// Открываем файл text.txt для записи
ofstream outputFile("text.txt");
if (outputFile.is_open())
{
outputFile << "Hello, world: " << 10 << " " << 42 << endl; // Форматированный вывод
outputFile.put('!'); // вывод одиночного символа
// Вызов flush() выполняет запись буферизированных данных из памяти в файл
outputFile.flush();
// Лаконичный способ проверить поток на состояние ошибки
if (!outputFile)
{
cout << "An error occurred when writing outputFile" << endl;
}
}
else
{
cout << "Failed to open text.txt for writing" << endl;
}
}
// Чтение текстового файла
{
// Создаем поток для чтения, связанный с только что записанным файлом text.txt
ifstream inputFile("text.txt");
if (inputFile.is_open())
{
string line1;
if (!getline(inputFile, line1))
{
cout << "Failed to read a line from file" << endl;
return;
}
assert(line1 == "Hello, world: 10 42");
}
else
{
cout << "Failed to open text.txt for reading" << endl;
}
}
// Извлечение информации из двоичного файла формата PNG
{
PNGInfo pngInfo;
if (GetPngFileInfo(L"test.png", pngInfo))
{
auto & hdr = pngInfo.imageHeader;
cout << "test.png file size is " << hdr.width << "x" << hdr.height << "pixels" << endl;
}
else
{
cout << "test.png is not a valid PNG image" << endl;
}
}
// Работа с со строковыми потоками
{
// Запись
{
ostringstream strm;
strm << "Hello, " << 42;
assert(strm.str() == "Hello, 42");
}
// Чтение
{
istringstream strm("Hello world 42");
string hello;
string world;
int answerToTheUltimateQuestionOfLifeTheUniverseAndEverything;
strm >> hello >> world >> answerToTheUltimateQuestionOfLifeTheUniverseAndEverything;
assert(hello == "Hello");
assert(world == "world");
assert(answerToTheUltimateQuestionOfLifeTheUniverseAndEverything == 42);
}
// Чтение и запись
{
stringstream strm;
strm << "Hello! How are you?";
string hello, how, are, you;
strm >> hello >> how >> are >> you;
assert(hello == "Hello!");
assert(how == "How");
assert(are == "are");
assert(you == "you?");
}
}
}
bool GetPngFileInfo(const wstring & fileName, PNGInfo & pngInfo)
{
ifstream pngFile(fileName, ios_base::binary | ios_base::in);
if (!pngFile.is_open())
{
return false;
}
if (!pngFile.read(reinterpret_cast<char*>(&pngInfo), sizeof(pngInfo)))
{
// Возникла ошибка при чтении начала PNG-файла
return false;
}
if (memcmp(pngInfo.pngSignature, PNG_SIGNATURE, sizeof(PNG_SIGNATURE)) != 0)
{
// Сигнатура в начале файла не соответствует спецификации PNG
return false;
}
auto & hdr = pngInfo.imageHeader;
// В PNG используется сетевой порядок байт (big-endian) для представления чисел размером больше 1 байта,
// нужно преобразовать его в формат, используемых на данном компьютере
// Для этого воспользуемся функцией ntohl (network to host (long))
hdr.chunkSize = ntohl(hdr.chunkSize);
if (hdr.chunkSize != (sizeof(IHDRChunk) - sizeof(ChunkHeader)))
{
// Длина блока данных не соответствует спецификации блока IHDR
return false;
}
// Первым блоком в PNG файле должен идти блок IHDR
if (memcmp(hdr.signature, IHDR_SIGNATURE, sizeof(IHDR_SIGNATURE)) != 0)
{
return false;
}
// Преобразуем формат байт ширины и высоты из сетевого представления в используемый на компьютере
hdr.width = ntohl(hdr.width);
hdr.height = ntohl(hdr.height);
if (hdr.width == 0 || hdr.height == 0)
{
// Ни ширина, ни высота PNG-изображения не могут быть равны нулю
return false;
}
return true;
}
|
<reponame>hapramp/1Rramp-Android
package com.hapramp.viewmodel.common;
import android.app.Application;
import android.arch.lifecycle.AndroidViewModel;
import android.arch.lifecycle.MutableLiveData;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.ConnectivityManager;
import android.support.annotation.NonNull;
import com.hapramp.utils.ConnectionUtils;
/**
* Created by Ankit on 5/6/2018.
*/
public class ConnectivityViewModel extends AndroidViewModel {
MutableLiveData<Boolean> connectivity;
public ConnectivityViewModel(@NonNull Application application) {
super(application);
application.registerReceiver(new NetworkChangeReceiver(), new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION));
}
public MutableLiveData<Boolean> getConnectivityState() {
if (connectivity == null) {
connectivity = new MutableLiveData<>();
}
return connectivity;
}
class NetworkChangeReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
try {
connectivity.setValue(ConnectionUtils.isConnected(context));
}
catch (NullPointerException e) {
e.printStackTrace();
}
}
}
}
|
<reponame>oozy/automated-flows-chrome-extension
const hello = () => {
chrome.tabs.executeScript({
file: 'contentScript.bundle.js',
});
};
document.getElementById('clickme').addEventListener('click', hello);
// function reddenPage() {
// console.log('document', document);
// document.body.style.backgroundColor = 'red';
// }
// chrome.action.onClicked.addListener((tab) => {
// chrome.scripting.executeScript({
// target: { tabId: tab.id },
// function: reddenPage,
// });
// });
// chrome.tabs.onUpdated.addListener(function (tabId, changeInfo, tab) {
// if (changeInfo.status == 'complete') {
// chrome.tabs.executeScript(null, {
// code: "document.body.style.backgroundColor='red'",
// });
// // chrome.tabs.executeScript(tabId, { file: '../Content/index.js' });
// }
// });
|
#!/bin/bash
date=$(git log --pretty=format:"%cd" --date=short $1)
hash=$(git log --pretty=format:"%h" $1)
changes=$(git log --pretty=format:"" --numstat $1)
counter=1
line_changes=0
array_changes=()
for i in $date
do
echo $i
done
echo '****************'
for i in $hash
do
echo $i
done
echo '****************'
for i in $changes
do
if [ `echo $counter % 3 | bc` -eq 1 ]; then
line_changes=$i
counter=$((counter+1))
elif [ `echo $counter % 3 | bc` -eq 2 ]; then
line_changes=$((line_changes+i))
counter=$((counter+1))
else
echo $line_changes
counter=1
fi
done
|
def Fibonacci(n):
if n<0:
print("Incorrect input")
# First Fibonacci number is 0
elif n==1:
return 0
# Second Fibonacci number is 1
elif n==2:
return 1
else:
return Fibonacci(n-1)+Fibonacci(n-2)
# Driver Program
result = []
for i in range(1,10):
result.append(Fibonacci(i))
print(result) |
<gh_stars>0
/**
* Copyright (c) 2018-2019 Mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author <NAME> <<EMAIL>>
* @author <NAME> <<EMAIL>>
*/
import { Type } from '../type';
import * as Core from './core';
import { Arguments, Argument } from '../symbol';
import { symbol } from '../helpers';
export namespace Types {
export const ElementSymbol = Type.Value('Structure', 'ElementSymbol');
export const AtomName = Type.Value('Structure', 'AtomName');
export const BondFlag = Type.OneOf('Structure', 'BondFlag', Type.Str, ['covalent', 'metallic', 'ion', 'hydrogen', 'sulfide', 'computed', 'aromatic']);
export const BondFlags = Core.Types.Flags(BondFlag, 'BondFlags');
export const SecondaryStructureFlag = Type.OneOf('Structure', 'SecondaryStructureFlag', Type.Str, ['alpha', 'beta', '3-10', 'pi', 'sheet', 'strand', 'helix', 'turn', 'none']);
export const SecondaryStructureFlags = Core.Types.Flags(SecondaryStructureFlag, 'SecondaryStructureFlag');
export const RingFingerprint = Type.Value('Structure', 'RingFingerprint');
export const EntityType = Type.OneOf('Structure', 'EntityType', Type.Str, ['polymer', 'non-polymer', 'water', 'branched']);
export const EntitySubtype = Type.OneOf('Structure', 'EntitySubtype', Type.Str, ['other', 'polypeptide(D)', 'polypeptide(L)', 'polydeoxyribonucleotide', 'polyribonucleotide', 'polydeoxyribonucleotide/polyribonucleotide hybrid', 'cyclic-pseudo-peptide', 'peptide nucleic acid', 'oligosaccharide']);
export const ObjectPrimitive = Type.OneOf('Structure', 'ObjectPrimitive', Type.Str, ['atomistic', 'sphere', 'gaussian', 'other']);
export const ResidueId = Type.Value('Structure', 'ResidueId');
export const ElementSet = Type.Value('Structure', 'ElementSet');
export const ElementSelection = Type.Value('Structure', 'ElementSelection');
export const ElementReference = Type.Value('Structure', 'ElementReference');
export const ElementSelectionQuery = Core.Types.Fn(ElementSelection, 'ElementSelectionQuery');
}
const type = {
'@header': 'Types',
elementSymbol: symbol(
Arguments.Dictionary({ 0: Argument(Type.Str) }),
Types.ElementSymbol, 'Create element symbol representation from a string value.'),
atomName: symbol(
Arguments.Dictionary({ 0: Argument(Type.AnyValue) }), Types.AtomName, 'Convert a value to an atom name.'),
entityType: symbol(
Arguments.Dictionary({ 0: Argument(Types.EntityType) }),
Types.EntityType,
`Create normalized representation of entity type: ${Type.oneOfValues(Types.EntityType).join(', ')}.`),
bondFlags: symbol(
Arguments.List(Types.BondFlag),
Types.BondFlags,
`Create bond flags representation from a list of strings. Allowed flags: ${Type.oneOfValues(Types.BondFlag).join(', ')}.`),
ringFingerprint: symbol(
Arguments.List(Types.ElementSymbol, { nonEmpty: true }),
Types.RingFingerprint,
'Create ring fingerprint from the supplied atom element list.'),
secondaryStructureFlags: symbol(
Arguments.List(Types.SecondaryStructureFlag),
Types.SecondaryStructureFlags,
`Create secondary structure flags representation from a list of strings. Allowed flags: ${Type.oneOfValues(Types.SecondaryStructureFlag).join(', ')}.`),
authResidueId: symbol(Arguments.Dictionary({
0: Argument(Type.Str, { description: 'auth_asym_id' }),
1: Argument(Type.Num, { description: 'auth_seq_id' }),
2: Argument(Type.Str, { description: 'pdbx_PDB_ins_code', isOptional: true })
}), Types.ResidueId, `Residue identifier based on "auth_" annotation.`),
labelResidueId: symbol(Arguments.Dictionary({
0: Argument(Type.Str, { description: 'label_entity_id' }),
1: Argument(Type.Str, { description: 'label_asym_id' }),
2: Argument(Type.Num, { description: 'label_seq_id' }),
3: Argument(Type.Str, { description: 'pdbx_PDB_ins_code', isOptional: true })
}), Types.ResidueId, `Residue identifier based on mmCIF's "label_" annotation.`)
};
const slot = {
'@header': 'Iteration Slots',
element: symbol(Arguments.None, Types.ElementReference, 'A reference to the current element.'),
elementSetReduce: symbol(Arguments.None, Type.Variable('a', Type.AnyValue, true), 'Current value of the element set reducer.')
};
const generator = {
'@header': 'Generators',
all: symbol(Arguments.None, Types.ElementSelectionQuery, 'The entire structure.'),
atomGroups: symbol(Arguments.Dictionary({
'entity-test': Argument(Type.Bool, { isOptional: true, defaultValue: true, description: 'Test for the 1st atom of every entity' }),
'chain-test': Argument(Type.Bool, { isOptional: true, defaultValue: true, description: 'Test for the 1st atom of every chain' }),
'residue-test': Argument(Type.Bool, { isOptional: true, defaultValue: true, description: 'Test for the 1st atom every residue' }),
'atom-test': Argument(Type.Bool, { isOptional: true, defaultValue: true }),
'group-by': Argument(Type.Any, { isOptional: true, defaultValue: `atom-key`, description: 'Group atoms to sets based on this property. Default: each atom has its own set' }),
}), Types.ElementSelectionQuery, 'Return all atoms for which the tests are satisfied, grouped into sets.'),
bondedAtomicPairs: symbol(Arguments.Dictionary({
0: Argument(Type.Bool, { isOptional: true, defaultValue: 'true for covalent bonds' as any, description: 'Test each bond with this predicate. Each bond is visited twice with swapped atom order.' }),
// TODO: shoud we support this or just use queryEach to get similar behavior
// 'group-by': Argument(Type.Any, { isOptional: true, defaultValue: ``, description: 'Group the bonds using the privided value' }),
}), Types.ElementSelectionQuery, 'Return all pairs of atoms for which the test is satisfied.'),
rings: symbol(Arguments.Dictionary({
'fingerprint': Argument(Types.RingFingerprint, { isOptional: true }),
'only-aromatic': Argument(Type.Bool, { isOptional: true, defaultValue: false }),
}), Types.ElementSelectionQuery, 'Return all rings or those with the specified fingerprint and/or only aromatic rings.'),
queryInSelection: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
query: Argument(Types.ElementSelectionQuery),
'in-complement': Argument(Type.Bool, { isOptional: true, defaultValue: false })
}), Types.ElementSelectionQuery, 'Executes query only on atoms that are in the source selection.'),
empty: symbol(Arguments.None, Types.ElementSelectionQuery, 'Nada.'),
};
const modifier = {
'@header': 'Selection Modifications',
queryEach: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
query: Argument(Types.ElementSelectionQuery)
}), Types.ElementSelectionQuery, 'Query every atom set in the input selection separately.'),
intersectBy: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
by: Argument(Types.ElementSelectionQuery)
}), Types.ElementSelectionQuery, 'Intersect each atom set from the first sequence from atoms in the second one.'),
exceptBy: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
by: Argument(Types.ElementSelectionQuery)
}), Types.ElementSelectionQuery, `Remove all atoms from 'selection' that occur in 'by'.`),
unionBy: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
by: Argument(Types.ElementSelectionQuery)
}), Types.ElementSelectionQuery, 'For each atom set A in the orginal sequence, combine all atoms sets in the target selection that intersect with A.'),
union: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery)
}), Types.ElementSelectionQuery, 'Collects all atom sets in the sequence into a single atom set.'),
cluster: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
'min-distance': Argument(Type.Num, { isOptional: true, defaultValue: 0 }),
'max-distance': Argument(Type.Num),
'min-size': Argument(Type.Num, { description: 'Minimal number of sets to merge, must be at least 2', isOptional: true, defaultValue: 2 }),
'max-size': Argument(Type.Num, { description: 'Maximal number of sets to merge, if not set, no limit', isOptional: true }),
}), Types.ElementSelectionQuery, 'Combines atom sets that have mutual distance in the interval [min-radius, max-radius]. Minimum/maximum size determines how many atom sets can be combined.'),
includeSurroundings: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
radius: Argument(Type.Num),
'atom-radius': Argument(Type.Num, { isOptional: true, defaultValue: 0, description: 'Value added to each atom before the distance check, for example VDW radius. Using this argument is computationally demanding.' }),
'as-whole-residues': Argument(Type.Bool, { isOptional: true })
}), Types.ElementSelectionQuery, 'For each atom set in the selection, include all surrouding atoms/residues that are within the specified radius.'),
surroundingLigands: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
radius: Argument(Type.Num),
'include-water': Argument(Type.Bool, { isOptional: true, defaultValue: true })
}), Types.ElementSelectionQuery, 'Find all ligands components around the source query.'),
includeConnected: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
'bond-test': Argument(Type.Bool, { isOptional: true, defaultValue: 'true for covalent bonds' as any }),
'layer-count': Argument(Type.Num, { isOptional: true, defaultValue: 1, description: 'Number of bonded layers to include.' }),
'fixed-point': Argument(Type.Bool, { isOptional: true, defaultValue: false, description: 'Continue adding layers as long as new connections exist.' }),
'as-whole-residues': Argument(Type.Bool, { isOptional: true })
}), Types.ElementSelectionQuery, 'Pick all atom sets that are connected to the target.'),
wholeResidues: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
}), Types.ElementSelectionQuery, 'Expand the selection to whole residues.'),
expandProperty: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
property: Argument(Type.AnyValue)
}), Types.ElementSelectionQuery, 'To each atom set in the selection, add all atoms that have the same property value that was already present in the set.')
};
const filter = {
'@header': 'Selection Filters',
pick: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
test: Argument(Type.Bool)
}), Types.ElementSelectionQuery, 'Pick all atom sets that satisfy the test.'),
first: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery)
}), Types.ElementSelectionQuery, 'Take the 1st atom set in the sequence.'),
withSameAtomProperties: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
source: Argument(Types.ElementSelectionQuery),
property: Argument(Type.Any)
}), Types.ElementSelectionQuery, 'Pick all atom sets for which the set of given atom properties is a subset of the source properties.'),
intersectedBy: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
by: Argument(Types.ElementSelectionQuery)
}), Types.ElementSelectionQuery, 'Pick all atom sets that have non-zero intersection with the target.'),
within: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
target: Argument(Types.ElementSelectionQuery),
'min-radius': Argument(Type.Num, { isOptional: true, defaultValue: 0 }),
'max-radius': Argument(Type.Num),
'atom-radius': Argument(Type.Num, { isOptional: true, defaultValue: 0, description: 'Value added to each atom before the distance check, for example VDW radius. Using this argument is computationally demanding.' }),
invert: Argument(Type.Bool, { isOptional: true, defaultValue: false, description: 'If true, pick only atom sets that are further than the specified radius.' }),
}), Types.ElementSelectionQuery, 'Pick all atom sets from selection that have any atom within the radius of any atom from target.'),
isConnectedTo: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery),
target: Argument(Types.ElementSelectionQuery),
'bond-test': Argument(Type.Bool, { isOptional: true, defaultValue: 'true for covalent bonds' as any }),
disjunct: Argument(Type.Bool, { isOptional: true, defaultValue: true, description: 'If true, there must exist a bond to an atom that lies outside the given atom set to pass test.' }),
invert: Argument(Type.Bool, { isOptional: true, defaultValue: false, description: 'If true, return atom sets that are not connected.' })
}), Types.ElementSelectionQuery, 'Pick all atom sets that are connected to the target.'),
};
const combinator = {
'@header': 'Selection Combinators',
intersect: symbol(Arguments.List(Types.ElementSelectionQuery), Types.ElementSelectionQuery, 'Return all unique atom sets that appear in all of the source selections.'),
merge: symbol(Arguments.List(Types.ElementSelectionQuery), Types.ElementSelectionQuery, 'Merges multiple selections into a single one. Only unique atom sets are kept.'),
distanceCluster: symbol(Arguments.Dictionary({
matrix: Argument(Core.Types.List(Core.Types.List(Type.Num)), { description: 'Distance matrix, represented as list of rows (num[][])). Lower triangle is min distance, upper triangle is max distance.' }),
selections: Argument(Core.Types.List(Types.ElementSelectionQuery), { description: 'A list of held selections.' })
}), Types.ElementSelectionQuery, 'Pick combinations of atom sets from the source sequences that are mutually within distances specified by a matrix.')
};
const atomSet = {
'@header': 'Atom Sets',
atomCount: symbol(Arguments.None, Type.Num),
countQuery: symbol(Arguments.Dictionary({
0: Argument(Types.ElementSelectionQuery)
}), Type.Num, 'Counts the number of occurences of a specific query inside the current atom set.'),
reduce: symbol(Arguments.Dictionary({
initial: Argument(Type.Variable('a', Type.AnyValue, true), { description: 'Initial value assigned to slot.atom-set-reduce. Current atom is set to the 1st atom of the current set for this.' }),
value: Argument(Type.Variable('a', Type.AnyValue, true), { description: 'Expression executed for each atom in the set' })
}), Type.Variable('a', Type.AnyValue, true), 'Execute the value expression for each atom in the current atom set and return the result. Works the same way as Array.reduce in JavaScript (``result = value(value(...value(initial)))``)'),
propertySet: symbol(Arguments.Dictionary({
0: Argument(Core.Types.ConstrainedVar),
}), Core.Types.Set(Core.Types.ConstrainedVar), 'Returns a set with all values of the given property in the current atom set.'),
};
const atomProperty = {
'@header': 'Atom Properties',
core: {
'@header': 'Core Properties',
elementSymbol: atomProp(Types.ElementSymbol),
vdw: atomProp(Type.Num, 'Van der Waals radius'),
mass: atomProp(Type.Num, 'Atomic weight'),
atomicNumber: atomProp(Type.Num, 'Atomic number'),
x: atomProp(Type.Num, 'Cartesian X coordinate'),
y: atomProp(Type.Num, 'Cartesian Y coordinate'),
z: atomProp(Type.Num, 'Cartesian Z coordinate'),
atomKey: atomProp(Type.AnyValue, 'Unique value for each atom. Main use case is grouping of atoms.'),
bondCount: symbol(Arguments.Dictionary({
0: Argument(Types.ElementReference, { isOptional: true, defaultValue: 'slot.current-atom' }),
flags: Argument(Types.BondFlags, { isOptional: true, defaultValue: 'covalent' as any }),
}), Type.Num, 'Number of bonds (by default only covalent bonds are counted).'),
sourceIndex: atomProp(Type.Num, 'Index of the atom/element in the input file.'),
operatorName: atomProp(Type.Str, 'Name of the symmetry operator applied to this element.'),
modelIndex: atomProp(Type.Num, 'Index of the model in the input file.'),
modelLabel: atomProp(Type.Str, 'Label/header of the model in the input file.')
},
topology: {
connectedComponentKey: atomProp(Type.AnyValue, 'Unique value for each connected component.')
},
macromolecular: {
'@header': 'Macromolecular Properties (derived from the mmCIF format)',
authResidueId: atomProp(Types.ResidueId, `type.auth-residue-id symbol executed on current atom's residue`),
labelResidueId: atomProp(Types.ResidueId, `type.label-residue-id symbol executed on current atom's residue`),
residueKey: atomProp(Type.AnyValue, 'Unique value for each tuple ``(label_entity_id,auth_asym_id, auth_seq_id, pdbx_PDB_ins_code)``, main use case is grouping of atoms'),
chainKey: atomProp(Type.AnyValue, 'Unique value for each tuple ``(label_entity_id, auth_asym_id)``, main use case is grouping of atoms'),
entityKey: atomProp(Type.AnyValue, 'Unique value for each tuple ``label_entity_id``, main use case is grouping of atoms'),
isHet: atomProp(Type.Bool, 'Equivalent to atom_site.group_PDB !== ATOM'),
id: atomProp(Type.Num, '_atom_site.id'),
label_atom_id: atomProp(Types.AtomName),
label_alt_id: atomProp(Type.Str),
label_comp_id: atomProp(Type.Str),
label_asym_id: atomProp(Type.Str),
label_entity_id: atomProp(Type.Str),
label_seq_id: atomProp(Type.Num),
auth_atom_id: atomProp(Types.AtomName),
auth_comp_id: atomProp(Type.Str),
auth_asym_id: atomProp(Type.Str),
auth_seq_id: atomProp(Type.Num),
pdbx_PDB_ins_code: atomProp(Type.Str),
pdbx_formal_charge: atomProp(Type.Num),
occupancy: atomProp(Type.Num),
B_iso_or_equiv: atomProp(Type.Num),
entityType: atomProp(Types.EntityType, 'Type of the entity as defined in mmCIF (polymer, non-polymer, branched, water)'),
entitySubtype: atomProp(Types.EntitySubtype, 'Subtype of the entity as defined in mmCIF _entity_poly.type and _pdbx_entity_branch.type (other, polypeptide(D), polypeptide(L), polydeoxyribonucleotide, polyribonucleotide, polydeoxyribonucleotide/polyribonucleotide hybrid, cyclic-pseudo-peptide, peptide nucleic acid, oligosaccharide)'),
entityPrdId: atomProp(Type.Str, `The PRD ID of the entity.`),
entityDescription: atomProp(Core.Types.List(Type.Str)),
objectPrimitive: atomProp(Types.ObjectPrimitive, 'Type of the primitive object used to model this segment as defined in mmCIF/IHM (atomistic, sphere, gaussian, other)'),
secondaryStructureKey: atomProp(Type.AnyValue, 'Unique value for each secondary structure element.'),
secondaryStructureFlags: atomProp(Types.SecondaryStructureFlags),
isModified: atomProp(Type.Bool, 'True if the atom belongs to modification of a standard residue.'),
modifiedParentName: atomProp(Type.Str, `'3-letter' code of the modifed parent residue.`),
isNonStandard: atomProp(Type.Bool, 'True if this is a non-standard residue.'),
chemCompType: atomProp(Type.Str, `Type of the chemical component as defined in mmCIF.`),
}
};
const bondProperty = {
'@header': 'Bond Properties',
flags: bondProp(Types.BondFlags),
order: bondProp(Type.Num),
length: bondProp(Type.Num),
atomA: bondProp(Types.ElementReference),
atomB: bondProp(Types.ElementReference)
};
function atomProp(type: Type, description?: string) {
return symbol(Arguments.Dictionary({ 0: Argument(Types.ElementReference, { isOptional: true, defaultValue: 'slot.current-atom' }) }), type, description);
}
function bondProp(type: Type, description?: string) {
return symbol(Arguments.None, type, description);
}
export const structureQuery = {
'@header': 'Structure Queries',
type,
slot,
generator,
modifier,
filter,
combinator,
atomSet,
atomProperty,
bondProperty: bondProperty
}; |
def sort_dict_by_value(d):
return dict(sorted(d.items(), key=lambda x : x[1]))
d = {"A": 2, "B": 5, "C": 3, "D":1}
print(sort_dict_by_value(d)) |
#!/bin/bash
prettier \
--config ./.prettierrc.js \
--write "./src/**/*.{ts,tsx}" |
/*
* Copyright 2018 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package za.co.absa.abris.avro.read.confluent
import io.confluent.common.config.ConfigException
import org.apache.avro.Schema
import org.scalatest.{BeforeAndAfter, FlatSpec}
import za.co.absa.abris.avro.parsing.utils.AvroSchemaUtils
class SchemaManagerSpec extends FlatSpec with BeforeAndAfter {
private val schema = AvroSchemaUtils.parse("{\"type\": \"record\", \"name\": \"Blah\", \"fields\": [{ \"name\": \"name\", \"type\": \"string\" }]}")
behavior of "SchemaManager"
before {
SchemaManager.reset()
assertResult(false) {SchemaManager.isSchemaRegistryConfigured}
}
it should "throw if no strategy is specified" in {
val topic = "a_subject"
val conf = Map[String,String]()
val message1 = intercept[IllegalArgumentException] {SchemaManager.getSubjectName(topic, isKey = false, (null, null), conf)}
val message2 = intercept[IllegalArgumentException] {SchemaManager.getSubjectName(topic, isKey = true, (null, null), conf)}
assert(message1.getMessage.contains("not specified"))
assert(message2.getMessage.contains("not specified"))
}
it should "retrieve the correct subject name for TopicName strategy" in {
val subject = "a_subject"
val conf = Map(
SchemaManager.PARAM_VALUE_SCHEMA_NAMING_STRATEGY -> SchemaManager.SchemaStorageNamingStrategies.TOPIC_NAME,
SchemaManager.PARAM_KEY_SCHEMA_NAMING_STRATEGY -> SchemaManager.SchemaStorageNamingStrategies.TOPIC_NAME
)
assert(subject + "-value" == SchemaManager.getSubjectName(subject, isKey = false, (null, null), conf).get)
assert(subject + "-key" == SchemaManager.getSubjectName(subject, isKey = true, (null, null), conf).get)
}
it should "retrieve the correct subject name for RecordName strategy" in {
val subject = "a_subject"
val conf = Map(
SchemaManager.PARAM_VALUE_SCHEMA_NAMING_STRATEGY -> SchemaManager.SchemaStorageNamingStrategies.RECORD_NAME,
SchemaManager.PARAM_KEY_SCHEMA_NAMING_STRATEGY -> SchemaManager.SchemaStorageNamingStrategies.RECORD_NAME
)
val schemaName = "schema_name"
val schemaNamespace = "schema_namespace"
assert(s"$schemaNamespace.$schemaName" == SchemaManager.getSubjectName(subject, isKey = false, (schemaName, schemaNamespace), conf).get)
assert(s"$schemaNamespace.$schemaName" == SchemaManager.getSubjectName(subject, isKey = true, (schemaName, schemaNamespace), conf).get)
}
it should "retrieve None for RecordName strategy if schema is null" in {
val subject = "a_subject"
val conf = Map(
SchemaManager.PARAM_VALUE_SCHEMA_NAMING_STRATEGY -> SchemaManager.SchemaStorageNamingStrategies.RECORD_NAME,
SchemaManager.PARAM_KEY_SCHEMA_NAMING_STRATEGY -> SchemaManager.SchemaStorageNamingStrategies.RECORD_NAME
)
val schemaName = null
val schemaNamespace = "namespace"
assert(SchemaManager.getSubjectName(subject, isKey = false, (schemaName, schemaNamespace), conf).isEmpty)
assert(SchemaManager.getSubjectName(subject, isKey = true, (schemaName, schemaNamespace), conf).isEmpty)
}
it should "retrieve the correct subject name for TopicRecordName strategy" in {
val topic = "a_subject"
val conf = Map(
SchemaManager.PARAM_VALUE_SCHEMA_NAMING_STRATEGY -> SchemaManager.SchemaStorageNamingStrategies.TOPIC_RECORD_NAME,
SchemaManager.PARAM_KEY_SCHEMA_NAMING_STRATEGY -> SchemaManager.SchemaStorageNamingStrategies.TOPIC_RECORD_NAME
)
val schemaName = "schema_name"
val schemaNamespace = "schema_namespace"
assert(s"$topic-$schemaNamespace.$schemaName" == SchemaManager.getSubjectName(topic, isKey = false, (schemaName, schemaNamespace), conf).get)
assert(s"$topic-$schemaNamespace.$schemaName" == SchemaManager.getSubjectName(topic, isKey = true, (schemaName, schemaNamespace), conf).get)
}
it should "retrieve None for TopicRecordName strategy if schema is null" in {
val subject = "a_subject"
val conf = Map(
SchemaManager.PARAM_VALUE_SCHEMA_NAMING_STRATEGY -> SchemaManager.SchemaStorageNamingStrategies.TOPIC_RECORD_NAME,
SchemaManager.PARAM_KEY_SCHEMA_NAMING_STRATEGY -> SchemaManager.SchemaStorageNamingStrategies.TOPIC_RECORD_NAME
)
val schemaName = null
val schemaNamespace = "namespace"
assert(SchemaManager.getSubjectName(subject, isKey = false, (schemaName, schemaNamespace), conf).isEmpty)
assert(SchemaManager.getSubjectName(subject, isKey = true, (schemaName, schemaNamespace), conf).isEmpty)
}
it should "not try to configure Schema Registry client if parameters are empty" in {
SchemaManager.configureSchemaRegistry(Map[String,String]())
assertResult(false) {SchemaManager.isSchemaRegistryConfigured} // should still be unconfigured
}
it should "return None as schema if Schema Registry client is not configured" in {
assertResult(None) {SchemaManager.getBySubjectAndId("subject", 1)}
}
it should "return None as latest version if Schema Registry client is not configured" in {
assertResult(None) {SchemaManager.getLatestVersion("subject")}
}
it should "return None as registered schema id if Schema Registry client is not configured" in {
assertResult(None) {SchemaManager.register(schema, "subject")}
}
it should "throw IllegalArgumentException if cluster address is empty or null" in {
val config1 = Map(SchemaManager.PARAM_SCHEMA_REGISTRY_URL -> "")
val config2 = Map(SchemaManager.PARAM_SCHEMA_REGISTRY_URL -> null)
assertThrows[IllegalArgumentException] {SchemaManager.configureSchemaRegistry(config1)}
assertThrows[ConfigException] {SchemaManager.configureSchemaRegistry(config2)}
}
} |
#!/bin/bash
set -e
psql -U postgres -c "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\""
POSTGRES="psql --username ${POSTGRES_USER}"
echo "DB Setup"
DATABASES=($POSTGRES_DEV_DB $POSTGRES_TEST_DB $POSTGRES_PROD_DB)
for i in ${DATABASES[@]}; do
echo "Creating database: ${i}"
psql -U postgres -tc "SELECT 1 FROM pg_database WHERE datname = '${i}'" | grep -q 1 || psql -U postgres -c "CREATE DATABASE \"${i}\""
done
|
<gh_stars>0
<<<<<<< HEAD
/*global describe,it */
var fs = require('fs');
var assert = require('chai').assert;
var common = require('./helpers/common');
var adjustDateByOffset = common.adjustDateByOffset;
var binaryBuffer = common.binaryBuffer;
var BinaryStream = common.BinaryStream;
var DeadEndStream = common.DeadEndStream;
var ChecksumStream = require('../lib/util/ChecksumStream');
var crc32 = require('../lib/util/crc32');
var utils = require('../lib/util');
var testBuffer = binaryBuffer(20000);
var testDate = new Date('Jan 03 2013 14:26:38 GMT');
var testDateEpoch = 1357223198;
var testDateOctal = 12071312436;
var testTimezoneOffset = testDate.getTimezoneOffset();
describe('utils', function() {
describe('ChecksumStream', function() {
it('should checksum data while transforming data', function(done) {
var binary = new BinaryStream(20000);
var checksum = new ChecksumStream();
var deadend = new DeadEndStream();
checksum.on('end', function() {
assert.equal(checksum.digest, -270675091);
done();
});
checksum.pipe(deadend);
binary.pipe(checksum);
});
it('should calculate data size while transforming data', function(done) {
var binary = new BinaryStream(20000);
var checksum = new ChecksumStream();
var deadend = new DeadEndStream();
checksum.on('end', function() {
assert.equal(checksum.rawSize, 20000);
done();
});
checksum.pipe(deadend);
binary.pipe(checksum);
});
});
describe('crc32', function() {
describe('crc32(data)', function() {
it('should initialize CRC32 instance based on data', function() {
var actual = crc32('testing checksum');
assert.equal(actual.crc, 323269802);
});
});
describe('#update(data)', function() {
it('should update CRC32 based on data', function() {
var actual = crc32().update('testing checksum');
assert.equal(actual.crc, 323269802);
});
});
describe('#digest()', function() {
it('should return digest of CRC32', function() {
var actual = crc32().update('testing checksum').digest();
assert.equal(actual, -323269803);
});
});
});
describe('index', function() {
describe('cleanBuffer(size)', function() {
var actual = utils.cleanBuffer(5);
it('should return an instance of Buffer', function() {
assert.instanceOf(actual, Buffer);
});
it('should have a length of size', function() {
assert.lengthOf(actual, 5);
});
it('should be filled with zeros', function() {
var actualArray = [];
for (var i = 0; i < actual.length ; i++) {
actualArray.push(actual[i]);
}
assert.deepEqual(actualArray, [0, 0, 0, 0, 0]);
});
});
describe('convertDateTimeEpoch(input)', function() {
it('should convert epoch input into an instance of Date', function() {
assert.deepEqual(utils.convertDateTimeEpoch(testDateEpoch), testDate);
});
});
describe('convertDateTimeOctal(input)', function() {
it('should convert octal input into an instance of Date', function() {
assert.deepEqual(utils.convertDateTimeOctal(testDateOctal), testDate);
});
});
describe('dateify(dateish)', function() {
it('should return an instance of Date', function() {
assert.instanceOf(utils.dateify(testDate), Date);
assert.instanceOf(utils.dateify('Jan 03 2013 14:26:38 GMT'), Date);
assert.instanceOf(utils.dateify(null), Date);
});
it('should passthrough an instance of Date', function() {
assert.deepEqual(utils.dateify(testDate), testDate);
});
it('should convert dateish string to an instance of Date', function() {
assert.deepEqual(utils.dateify('Jan 03 2013 14:26:38 GMT'), testDate);
});
});
describe('defaults(object, source, guard)', function() {
it('should default when object key is missing', function() {
var actual = utils.defaults({ value1: true }, {
value2: true
});
assert.deepEqual(actual, {
value1: true,
value2: true
});
});
});
describe('epochDateTime(date)', function() {
it('should convert date into its epoch representation', function() {
assert.equal(utils.epochDateTime(testDate), testDateEpoch);
});
});
describe('isStream(source)', function() {
it('should return true if source is a stream', function() {
assert.ok(utils.isStream(new DeadEndStream()));
});
});
describe('octalDateTime(date)', function() {
it('should convert date into its octal representation', function() {
assert.equal(utils.octalDateTime(testDate), testDateOctal);
});
});
describe('padNumber(number, bytes, base)', function() {
it('should pad number to specified bytes', function() {
assert.equal(utils.padNumber(0, 7), '0000000');
});
});
describe('repeat(pattern, count)', function() {
it('should repeat pattern by count', function() {
assert.equal(utils.repeat('x', 4), 'xxxx');
});
});
describe('sanitizePath(filepath)', function() {
it('should sanitize filepath', function() {
assert.equal(utils.sanitizePath('\\this/path//file.txt'), 'this/path/file.txt');
assert.equal(utils.sanitizePath('/this/path/file.txt'), 'this/path/file.txt');
assert.equal(utils.sanitizePath('c:\\this\\path\\file.txt'), 'c/this/path/file.txt');
});
});
describe('unixifyPath(filepath)', function() {
it('should unixify filepath', function() {
assert.equal(utils.unixifyPath('this\\path\\file.txt'), 'this/path/file.txt');
});
});
});
=======
/*global describe,it */
var fs = require('fs');
var assert = require('chai').assert;
var common = require('./helpers/common');
var adjustDateByOffset = common.adjustDateByOffset;
var binaryBuffer = common.binaryBuffer;
var BinaryStream = common.BinaryStream;
var DeadEndStream = common.DeadEndStream;
var ChecksumStream = require('../lib/util/ChecksumStream');
var crc32 = require('../lib/util/crc32');
var utils = require('../lib/util');
var testBuffer = binaryBuffer(20000);
var testDate = new Date('Jan 03 2013 14:26:38 GMT');
var testDateEpoch = 1357223198;
var testDateOctal = 12071312436;
var testTimezoneOffset = testDate.getTimezoneOffset();
describe('utils', function() {
describe('ChecksumStream', function() {
it('should checksum data while transforming data', function(done) {
var binary = new BinaryStream(20000);
var checksum = new ChecksumStream();
var deadend = new DeadEndStream();
checksum.on('end', function() {
assert.equal(checksum.digest, -270675091);
done();
});
checksum.pipe(deadend);
binary.pipe(checksum);
});
it('should calculate data size while transforming data', function(done) {
var binary = new BinaryStream(20000);
var checksum = new ChecksumStream();
var deadend = new DeadEndStream();
checksum.on('end', function() {
assert.equal(checksum.rawSize, 20000);
done();
});
checksum.pipe(deadend);
binary.pipe(checksum);
});
});
describe('crc32', function() {
describe('crc32(data)', function() {
it('should initialize CRC32 instance based on data', function() {
var actual = crc32('testing checksum');
assert.equal(actual.crc, 323269802);
});
});
describe('#update(data)', function() {
it('should update CRC32 based on data', function() {
var actual = crc32().update('testing checksum');
assert.equal(actual.crc, 323269802);
});
});
describe('#digest()', function() {
it('should return digest of CRC32', function() {
var actual = crc32().update('testing checksum').digest();
assert.equal(actual, -323269803);
});
});
});
describe('index', function() {
describe('cleanBuffer(size)', function() {
var actual = utils.cleanBuffer(5);
it('should return an instance of Buffer', function() {
assert.instanceOf(actual, Buffer);
});
it('should have a length of size', function() {
assert.lengthOf(actual, 5);
});
it('should be filled with zeros', function() {
var actualArray = [];
for (var i = 0; i < actual.length ; i++) {
actualArray.push(actual[i]);
}
assert.deepEqual(actualArray, [0, 0, 0, 0, 0]);
});
});
describe('convertDateTimeEpoch(input)', function() {
it('should convert epoch input into an instance of Date', function() {
assert.deepEqual(utils.convertDateTimeEpoch(testDateEpoch), testDate);
});
});
describe('convertDateTimeOctal(input)', function() {
it('should convert octal input into an instance of Date', function() {
assert.deepEqual(utils.convertDateTimeOctal(testDateOctal), testDate);
});
});
describe('dateify(dateish)', function() {
it('should return an instance of Date', function() {
assert.instanceOf(utils.dateify(testDate), Date);
assert.instanceOf(utils.dateify('Jan 03 2013 14:26:38 GMT'), Date);
assert.instanceOf(utils.dateify(null), Date);
});
it('should passthrough an instance of Date', function() {
assert.deepEqual(utils.dateify(testDate), testDate);
});
it('should convert dateish string to an instance of Date', function() {
assert.deepEqual(utils.dateify('Jan 03 2013 14:26:38 GMT'), testDate);
});
});
describe('defaults(object, source, guard)', function() {
it('should default when object key is missing', function() {
var actual = utils.defaults({ value1: true }, {
value2: true
});
assert.deepEqual(actual, {
value1: true,
value2: true
});
});
});
describe('epochDateTime(date)', function() {
it('should convert date into its epoch representation', function() {
assert.equal(utils.epochDateTime(testDate), testDateEpoch);
});
});
describe('isStream(source)', function() {
it('should return true if source is a stream', function() {
assert.ok(utils.isStream(new DeadEndStream()));
});
});
describe('octalDateTime(date)', function() {
it('should convert date into its octal representation', function() {
assert.equal(utils.octalDateTime(testDate), testDateOctal);
});
});
describe('padNumber(number, bytes, base)', function() {
it('should pad number to specified bytes', function() {
assert.equal(utils.padNumber(0, 7), '0000000');
});
});
describe('repeat(pattern, count)', function() {
it('should repeat pattern by count', function() {
assert.equal(utils.repeat('x', 4), 'xxxx');
});
});
describe('sanitizePath(filepath)', function() {
it('should sanitize filepath', function() {
assert.equal(utils.sanitizePath('\\this/path//file.txt'), 'this/path/file.txt');
assert.equal(utils.sanitizePath('/this/path/file.txt'), 'this/path/file.txt');
assert.equal(utils.sanitizePath('c:\\this\\path\\file.txt'), 'c/this/path/file.txt');
});
});
describe('unixifyPath(filepath)', function() {
it('should unixify filepath', function() {
assert.equal(utils.unixifyPath('this\\path\\file.txt'), 'this/path/file.txt');
});
});
});
>>>>>>> master
}); |
import { StatusBar } from 'expo-status-bar';
import React, {useState} from 'react';
import Home from './components/Home';
import { Container } from "./styles/appStyles";
import AsyncStorage from '@react-native-async-storage/async-storage';
import AppLoading from 'expo-app-loading';
export default function App() {
const [ready, setReady] = useState(false);
const initialWorkRequired = []
const [study, setStudy] = useState(initialWorkRequired);
const LoadStudy = () => {
AsyncStorage.getItem("storedStudy").then(data => {
if (data !== null){
setStudy(JSON.parse(data))
}
}).catch((error) => (console.log(error)));
}
if (!ready) {
return(
<AppLoading
startAsync = {LoadStudy}
onFinish = {() => setReady(true)}
onError = {console.warn}
/>
)
}
return (
<Container>
<Home
study = {study}
setStudy = {setStudy}
/>
<StatusBar style="light" />
</Container>
);
}
|
// Code generated from e:\source\prosr\src\parser\Prosr1.g4 by ANTLR 4.8. DO NOT EDIT.
package parser
import (
"fmt"
"unicode"
"github.com/antlr/antlr4/runtime/Go/antlr"
)
// Suppress unused import error
var _ = fmt.Printf
var _ = unicode.IsLetter
var serializedLexerAtn = []uint16{
3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 30, 191,
8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7,
9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12,
4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4,
18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23,
9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9,
28, 4, 29, 9, 29, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3,
3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7,
3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 8, 3, 8, 3, 9,
3, 9, 3, 10, 3, 10, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3,
12, 3, 12, 3, 13, 3, 13, 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 15,
3, 15, 3, 15, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 3, 17, 3,
17, 3, 17, 3, 17, 3, 18, 3, 18, 3, 18, 3, 18, 3, 18, 3, 18, 3, 18, 3, 18,
3, 19, 3, 19, 3, 19, 3, 19, 3, 20, 3, 20, 3, 21, 3, 21, 3, 22, 3, 22, 3,
23, 3, 23, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24,
3, 25, 3, 25, 3, 25, 3, 25, 3, 25, 3, 25, 3, 25, 3, 25, 3, 25, 3, 25, 3,
25, 5, 25, 167, 10, 25, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 27, 3, 27,
6, 27, 176, 10, 27, 13, 27, 14, 27, 177, 3, 28, 6, 28, 181, 10, 28, 13,
28, 14, 28, 182, 3, 29, 6, 29, 186, 10, 29, 13, 29, 14, 29, 187, 3, 29,
3, 29, 2, 2, 30, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19,
11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, 17, 33, 18, 35, 19, 37,
20, 39, 21, 41, 22, 43, 23, 45, 24, 47, 25, 49, 26, 51, 27, 53, 28, 55,
29, 57, 30, 3, 2, 6, 4, 2, 67, 92, 99, 124, 6, 2, 50, 59, 67, 92, 97, 97,
99, 124, 3, 2, 50, 59, 5, 2, 11, 12, 15, 15, 34, 34, 2, 194, 2, 3, 3, 2,
2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2,
2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3,
2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27,
3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2,
35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2,
2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, 2, 2, 2, 2, 49, 3, 2, 2,
2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, 3, 2, 2, 2, 2, 57, 3, 2,
2, 2, 3, 59, 3, 2, 2, 2, 5, 66, 3, 2, 2, 2, 7, 68, 3, 2, 2, 2, 9, 70, 3,
2, 2, 2, 11, 77, 3, 2, 2, 2, 13, 79, 3, 2, 2, 2, 15, 87, 3, 2, 2, 2, 17,
91, 3, 2, 2, 2, 19, 93, 3, 2, 2, 2, 21, 95, 3, 2, 2, 2, 23, 102, 3, 2,
2, 2, 25, 104, 3, 2, 2, 2, 27, 106, 3, 2, 2, 2, 29, 112, 3, 2, 2, 2, 31,
115, 3, 2, 2, 2, 33, 122, 3, 2, 2, 2, 35, 126, 3, 2, 2, 2, 37, 134, 3,
2, 2, 2, 39, 138, 3, 2, 2, 2, 41, 140, 3, 2, 2, 2, 43, 142, 3, 2, 2, 2,
45, 144, 3, 2, 2, 2, 47, 146, 3, 2, 2, 2, 49, 166, 3, 2, 2, 2, 51, 168,
3, 2, 2, 2, 53, 173, 3, 2, 2, 2, 55, 180, 3, 2, 2, 2, 57, 185, 3, 2, 2,
2, 59, 60, 7, 117, 2, 2, 60, 61, 7, 123, 2, 2, 61, 62, 7, 112, 2, 2, 62,
63, 7, 118, 2, 2, 63, 64, 7, 99, 2, 2, 64, 65, 7, 122, 2, 2, 65, 4, 3,
2, 2, 2, 66, 67, 7, 63, 2, 2, 67, 6, 3, 2, 2, 2, 68, 69, 7, 36, 2, 2, 69,
8, 3, 2, 2, 2, 70, 71, 7, 114, 2, 2, 71, 72, 7, 116, 2, 2, 72, 73, 7, 113,
2, 2, 73, 74, 7, 117, 2, 2, 74, 75, 7, 116, 2, 2, 75, 76, 7, 51, 2, 2,
76, 10, 3, 2, 2, 2, 77, 78, 7, 61, 2, 2, 78, 12, 3, 2, 2, 2, 79, 80, 7,
114, 2, 2, 80, 81, 7, 99, 2, 2, 81, 82, 7, 101, 2, 2, 82, 83, 7, 109, 2,
2, 83, 84, 7, 99, 2, 2, 84, 85, 7, 105, 2, 2, 85, 86, 7, 103, 2, 2, 86,
14, 3, 2, 2, 2, 87, 88, 7, 106, 2, 2, 88, 89, 7, 119, 2, 2, 89, 90, 7,
100, 2, 2, 90, 16, 3, 2, 2, 2, 91, 92, 7, 125, 2, 2, 92, 18, 3, 2, 2, 2,
93, 94, 7, 127, 2, 2, 94, 20, 3, 2, 2, 2, 95, 96, 7, 99, 2, 2, 96, 97,
7, 101, 2, 2, 97, 98, 7, 118, 2, 2, 98, 99, 7, 107, 2, 2, 99, 100, 7, 113,
2, 2, 100, 101, 7, 112, 2, 2, 101, 22, 3, 2, 2, 2, 102, 103, 7, 42, 2,
2, 103, 24, 3, 2, 2, 2, 104, 105, 7, 43, 2, 2, 105, 26, 3, 2, 2, 2, 106,
107, 7, 101, 2, 2, 107, 108, 7, 99, 2, 2, 108, 109, 7, 110, 2, 2, 109,
110, 7, 110, 2, 2, 110, 111, 7, 117, 2, 2, 111, 28, 3, 2, 2, 2, 112, 113,
7, 113, 2, 2, 113, 114, 7, 112, 2, 2, 114, 30, 3, 2, 2, 2, 115, 116, 7,
101, 2, 2, 116, 117, 7, 99, 2, 2, 117, 118, 7, 110, 2, 2, 118, 119, 7,
110, 2, 2, 119, 120, 7, 103, 2, 2, 120, 121, 7, 116, 2, 2, 121, 32, 3,
2, 2, 2, 122, 123, 7, 99, 2, 2, 123, 124, 7, 110, 2, 2, 124, 125, 7, 110,
2, 2, 125, 34, 3, 2, 2, 2, 126, 127, 7, 111, 2, 2, 127, 128, 7, 103, 2,
2, 128, 129, 7, 117, 2, 2, 129, 130, 7, 117, 2, 2, 130, 131, 7, 99, 2,
2, 131, 132, 7, 105, 2, 2, 132, 133, 7, 103, 2, 2, 133, 36, 3, 2, 2, 2,
134, 135, 7, 111, 2, 2, 135, 136, 7, 99, 2, 2, 136, 137, 7, 114, 2, 2,
137, 38, 3, 2, 2, 2, 138, 139, 7, 62, 2, 2, 139, 40, 3, 2, 2, 2, 140, 141,
7, 46, 2, 2, 141, 42, 3, 2, 2, 2, 142, 143, 7, 64, 2, 2, 143, 44, 3, 2,
2, 2, 144, 145, 7, 48, 2, 2, 145, 46, 3, 2, 2, 2, 146, 147, 7, 116, 2,
2, 147, 148, 7, 103, 2, 2, 148, 149, 7, 114, 2, 2, 149, 150, 7, 103, 2,
2, 150, 151, 7, 99, 2, 2, 151, 152, 7, 118, 2, 2, 152, 153, 7, 103, 2,
2, 153, 154, 7, 102, 2, 2, 154, 48, 3, 2, 2, 2, 155, 156, 7, 107, 2, 2,
156, 157, 7, 112, 2, 2, 157, 158, 7, 118, 2, 2, 158, 159, 7, 53, 2, 2,
159, 167, 7, 52, 2, 2, 160, 161, 7, 117, 2, 2, 161, 162, 7, 118, 2, 2,
162, 163, 7, 116, 2, 2, 163, 164, 7, 107, 2, 2, 164, 165, 7, 112, 2, 2,
165, 167, 7, 105, 2, 2, 166, 155, 3, 2, 2, 2, 166, 160, 3, 2, 2, 2, 167,
50, 3, 2, 2, 2, 168, 169, 7, 100, 2, 2, 169, 170, 7, 113, 2, 2, 170, 171,
7, 113, 2, 2, 171, 172, 7, 110, 2, 2, 172, 52, 3, 2, 2, 2, 173, 175, 9,
2, 2, 2, 174, 176, 9, 3, 2, 2, 175, 174, 3, 2, 2, 2, 176, 177, 3, 2, 2,
2, 177, 175, 3, 2, 2, 2, 177, 178, 3, 2, 2, 2, 178, 54, 3, 2, 2, 2, 179,
181, 9, 4, 2, 2, 180, 179, 3, 2, 2, 2, 181, 182, 3, 2, 2, 2, 182, 180,
3, 2, 2, 2, 182, 183, 3, 2, 2, 2, 183, 56, 3, 2, 2, 2, 184, 186, 9, 5,
2, 2, 185, 184, 3, 2, 2, 2, 186, 187, 3, 2, 2, 2, 187, 185, 3, 2, 2, 2,
187, 188, 3, 2, 2, 2, 188, 189, 3, 2, 2, 2, 189, 190, 8, 29, 2, 2, 190,
58, 3, 2, 2, 2, 7, 2, 166, 177, 182, 187, 3, 8, 2, 2,
}
var lexerDeserializer = antlr.NewATNDeserializer(nil)
var lexerAtn = lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn)
var lexerChannelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
var lexerModeNames = []string{
"DEFAULT_MODE",
}
var lexerLiteralNames = []string{
"", "'syntax'", "'='", "'\"'", "'prosr1'", "';'", "'package'", "'hub'",
"'{'", "'}'", "'action'", "'('", "')'", "'calls'", "'on'", "'caller'",
"'all'", "'message'", "'map'", "'<'", "','", "'>'", "'.'", "'repeated'",
"", "'bool'",
}
var lexerSymbolicNames = []string{
"", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "",
"", "", "", "", "", "REPEATED", "KEYTYPE", "TYPE", "IDENT", "NUMBER", "WHITESPACE",
}
var lexerRuleNames = []string{
"T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", "T__7", "T__8",
"T__9", "T__10", "T__11", "T__12", "T__13", "T__14", "T__15", "T__16",
"T__17", "T__18", "T__19", "T__20", "T__21", "REPEATED", "KEYTYPE", "TYPE",
"IDENT", "NUMBER", "WHITESPACE",
}
type Prosr1Lexer struct {
*antlr.BaseLexer
channelNames []string
modeNames []string
// TODO: EOF string
}
var lexerDecisionToDFA = make([]*antlr.DFA, len(lexerAtn.DecisionToState))
func init() {
for index, ds := range lexerAtn.DecisionToState {
lexerDecisionToDFA[index] = antlr.NewDFA(ds, index)
}
}
func NewProsr1Lexer(input antlr.CharStream) *Prosr1Lexer {
l := new(Prosr1Lexer)
l.BaseLexer = antlr.NewBaseLexer(input)
l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache())
l.channelNames = lexerChannelNames
l.modeNames = lexerModeNames
l.RuleNames = lexerRuleNames
l.LiteralNames = lexerLiteralNames
l.SymbolicNames = lexerSymbolicNames
l.GrammarFileName = "Prosr1.g4"
// TODO: l.EOF = antlr.TokenEOF
return l
}
// Prosr1Lexer tokens.
const (
Prosr1LexerT__0 = 1
Prosr1LexerT__1 = 2
Prosr1LexerT__2 = 3
Prosr1LexerT__3 = 4
Prosr1LexerT__4 = 5
Prosr1LexerT__5 = 6
Prosr1LexerT__6 = 7
Prosr1LexerT__7 = 8
Prosr1LexerT__8 = 9
Prosr1LexerT__9 = 10
Prosr1LexerT__10 = 11
Prosr1LexerT__11 = 12
Prosr1LexerT__12 = 13
Prosr1LexerT__13 = 14
Prosr1LexerT__14 = 15
Prosr1LexerT__15 = 16
Prosr1LexerT__16 = 17
Prosr1LexerT__17 = 18
Prosr1LexerT__18 = 19
Prosr1LexerT__19 = 20
Prosr1LexerT__20 = 21
Prosr1LexerT__21 = 22
Prosr1LexerREPEATED = 23
Prosr1LexerKEYTYPE = 24
Prosr1LexerTYPE = 25
Prosr1LexerIDENT = 26
Prosr1LexerNUMBER = 27
Prosr1LexerWHITESPACE = 28
)
|
package no.mnemonic.commons.container;
/**
* Interface for plugins which need to be notified when container is stopping/starting.
* Any component defined in a container implementing this interface will be picked up before container initializes.
*/
public interface ContainerListener {
/**
* Notify listeners that this container has been started.
* It is invoked after all components in this container are started.
*
* @param container which has started
*/
void notifyContainerStarted(ComponentContainer container);
/**
* Notify listeners that this container will be destroyed. It is invoked before any
* components in this container are shut down.
* If this operation blocks, it will block the progress of shutting down the components
*
* @param container which will be destroyed
*/
void notifyContainerDestroying(ComponentContainer container);
/**
* Notify listeners that this container has been destroyed.
* It is invoked after all components in this container are shut down.
*
* @param container which has been destroyed
*/
void notifyContainerDestroyed(ComponentContainer container);
/**
* Notify listeners that a new container is about to initialize
*
* @param parent reference to the calling container
* @param subcontainer reference to the initializing subcontainer
*/
void notifyInitializingSubcontainer(ComponentContainer parent, ComponentContainer subcontainer);
}
|
import math
# Input the radius of the circle
radius = float(input("Enter the radius of the circle: "))
# Calculate the area of the circle
area = math.pi * radius ** 2
# Print the area with 4 decimal places
print('A={:.4f}'.format(area)) |
package org.jeecg.modules.tenant.service.impl;
import org.jeecg.modules.tenant.entity.TenantProfile;
import org.jeecg.modules.tenant.mapper.TenantProfileMapper;
import org.jeecg.modules.tenant.service.ITenantProfileService;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
/**
* @Description: 住户配置
* @Author: jeecg-boot
* @Date: 2022-01-21
* @Version: V1.0
*/
@Service
public class TenantProfileServiceImpl extends ServiceImpl<TenantProfileMapper, TenantProfile> implements ITenantProfileService {
}
|
from django.utils.safestring import mark_safe
class Game:
def __init__(self, id, name):
self.id = id
self.name = name
def generate_game_link(self):
url = f'../../../game/{self.id}/change'
return mark_safe(f'<a href="{url}">{self.name}</a>') |
#!/bin/bash -e
# Install Clojure 1.3.0
echo "================= Installing Clojure-1.3.0 ==================="
sudo wget http://repo1.maven.org/maven2/org/clojure/clojure/1.3.0/clojure-1.3.0.jar
mkdir $HOME/lib && mv clojure-1.3.0.jar $HOME/lib/
cat >/usr/local/bin/clj <<EOF
#!/bin/bash
if [[ $# -eq 0 ]]; then
java -server -cp $HOME/lib/clojure-1.3.0.jar clojure.main
else
java -server -cp $HOME/lib/clojure-1.3.0.jar clojure.main $1 -- "$@"
fi
EOF
sudo chmod a+x /usr/local/bin/clj
|
import { TestBed } from '@angular/core/testing';
import { TrainingRoomService } from './training-room.service';
describe('TrainingRoomService', () => {
let service: TrainingRoomService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(TrainingRoomService);
});
it('should be created', () => {
expect(service).toBeTruthy();
});
});
|
<gh_stars>0
'use strict'
const port = process.env.PORT || 8000
const express = require('express')
const bodyParser = require('body-parser')
const cookieParser = require('cookie-parser')
const expressSession = require('express-session')
const passport = require('passport')
const mongoose = require('mongoose')
const path = require('path')
const http = require('http')
const auth = require('./auth')
const config = require('./config')
const lib = require('./lib')
const app = express()
const server = http.createServer(app)
mongoose.connect('mongodb://localhost/passportauthentication')
app.use(express.static('public'))
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: true }))
app.use(cookieParser())
app.use(expressSession({
secret: config.secret,
resave: true,
saveUninitialized: true
}))
app.use(passport.initialize())
app.use(passport.session())
passport.use(auth.localStrategy)
passport.serializeUser(auth.serializeUser)
passport.deserializeUser(auth.deserializeUser)
app.post('/signup', (req, res) => {
lib.saveUser(req.body, (err, created) => {
if (err) return res.status(500).send(err)
res.status(200).send(created)
})
})
app.post('/login', passport.authenticate('local', {
failureRedirect: '/login.html'
}), (req, res) => {
console.log('login:req.isAuthenticated()', req.isAuthenticated())
res.redirect('/home')
})
app.get('/home', (req, res) => {
console.log('home:req.isAuthenticated()', req.isAuthenticated())
res.sendfile(path.join(__dirname, 'public', 'home.html'))
})
server.on('listening', onListening)
function onListening (err) {
if (err) console.error(err), process.exit(1)
console.log(`Server running on port ${port}`)
}
server.listen(port) |
#!/bin/bash
trap "" SIGHUP
trap kill_and_exit EXIT
PGID=$(echo $(ps -p $$ o pgid h))
KILL=false
kill_and_exit()
{
local ret=$?
echo Caught EXIT
do_kill -9
exit $?
}
print_ps()
{
ps -eO pgid | grep -E 'PGID|'" $PGID"
}
do_kill()
{
print_ps
PIDS=
for p in $(ps -e h o pid,pgid | grep -E ' +'${PGID}'$' | awk '{print $1}' | grep -v $$); do
if [ -e /proc/$p ]; then
PIDS="$PIDS $p"
fi
done
if [ -n "$PIDS" ]; then
print_ps
echo PID $PGID has died doing kill "$@" $PIDS
kill "$@" $PIDS
fi
}
main()
{
while sleep 2; do
if [ -e /proc/${PGID} ]; then
#print_ps
continue
fi
if [ "$KILL" = "false" ]; then
do_kill
KILL=true
else
do_kill -9
break
fi
done
}
main
|
# DO NOT use these commands, the .s file is manually modified
# because the syscall() method has a different calling convention than the syscall itself
#gcc -S exploit_basic.c -o exploit_basic.s
# fix syscall
#sed -i 's/call.*syscall@PLT/syscall/' exploit_basic.s
# assemble
as exploit_basic.s -c -o exploit_basic.o
# extract code
objcopy -O binary -j .text exploit_basic.o exploit_basic.bin
# generate image
echo -en "P5\n200 200 255\n" > exploit_basic.ppm
cat exploit_basic.bin >> exploit_basic.ppm
# fill pixels (more than we need, but doesnt matter)
dd if=/dev/zero bs=1 count=40000 >> exploit_basic.ppm
|
<gh_stars>0
import unittest
from unittest.mock import patch
from tmc import points
from tmc.utils import load, load_module, reload_module, get_stdout
from functools import reduce
import os
import textwrap
from random import choice, randint
exercise = 'src.city_bikes'
def f(d):
return '\n'.join(d)
function1 = "get_station_data"
function2 = "greatest_distance"
import os
from shutil import copyfile
testdata = [f"stations{i}.csv" for i in range(1,10)]
def close(a, b):
return abs(a-b)<0.001
@points('6.city_bikes_part_2')
class CityBikesPart2Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect=[AssertionError("Input was not expected")]):
for filename in testdata:
data_file = os.path.join('test', filename)
copyfile(data_file, filename)
cls.module = load_module(exercise, 'fi')
@classmethod
def tearDownClass(cls):
for filename in testdata:
os.remove(filename)
def test_1_greatest_distance_exicsts(self):
try:
from src.city_bikes import greatest_distance
except:
self.fail('Your code should contain function greatest_distance(stations: dict)')
try:
code = """stations = get_station_data("stations1.csv")
greatest_distance(stations)"""
get_station_data = load(exercise, function1, 'fi')
stations = get_station_data("stations1.csv")
val = greatest_distance(stations)
except Exception as ioe:
self.assertTrue(False, f'Function {code} threw an error\n{ioe}')
def test_2_greatest_distance_return_type(self):
code = """stations = get_station_data("stations1.csv")
greatest_distance(stations)"""
get_station_data = load(exercise, function1, 'fi')
greatest_distance = load(exercise, function2, 'fi')
stations = get_station_data("stations1.csv")
val = greatest_distance(stations)
taip = str(type(val)).replace("<class '", '').replace("'>","")
self.assertTrue(type(val) == tuple, f"Function {function2} should return a tuple, now it returns {val} which is of type {taip}.")
taip = str(type(val[0])).replace("<class '", '').replace("'>","")
self.assertTrue(type(val[0]) == str, f"The first item in a tuple returned by {function2} should be a string, now the type is {taip}\nReturn value was {val}")
taip = str(type(val[1])).replace("<class '", '').replace("'>","")
self.assertTrue(type(val[1]) == str, f"The second item in a tuple returned by {function2} should be a string, now the type is {taip}\nReturn value was {val}")
taip = str(type(val[2])).replace("<class '", '').replace("'>","")
self.assertTrue(type(val[2]) == float or type(val[2]) == int , f"The third item in a tuple returned by {function2} should be a float, now the type is {taip}\nReturn value was {val}")
def test_3_greatest_distance_works(self):
for filename, answer in [
("stations1.csv", ("Laivasillankatu", "Hietalahdentori", 1.478708873076181)),
("stations2.csv", ("Puistokaari", "Karhulantie", 14.817410024304905)),
("stations3.csv", ("Puotinkylan kartano", "Friisilanaukio", 21.971314423058754)),
("stations4.csv", ("Kaivopuisto", "Linnuntie", 11.569340603194116)),
("stations5.csv", ("Puotinkylan kartano", "Etuniementie", 21.8490934564622)),
("stations6.csv", ("Karhulantie", "Haukilahdenranta", 19.566890288851994)),
("stations7.csv", ("Karhulantie", "Tiistinkallio", 21.848686409979116)),
("stations8.csv", ("<NAME>", "Etuniementie", 21.8490934564622)),
("stations9.csv", ("Voikukantie", "Friisilanaukio", 20.834906297083204)),
]:
code = f'stations = get_station_data("{filename}")\ngreatest_distance(stations)'
get_station_data = load(exercise, function1, 'fi')
greatest_distance = load(exercise, function2, 'fi')
stations = get_station_data(filename)
a1, a2, et = greatest_distance(stations)
pal = (a1, a2, et)
ma1, ma2, met = answer
self.assertTrue((a1 == ma1 and a2 == ma2) or (a2 == ma1 and a1 == ma2), f'Answer is wrong when executed code is \n{code}\nLongest distance is between stations {ma1} and {ma2}\nYour function returns {pal}')
self.assertTrue(close(et, met), f'Answer is wrong when executed code is\n{code}\nLongest distance is {met}\nYour function returns {pal}')
if __name__ == '__main__':
unittest.main()
|
package parser
import (
"excelc/errors"
"strings"
)
type cfgFilter struct {
Makers string `xml:"makers,attr"`
Sheet string `xml:"sheet,attr"`
}
func (cfg *cfgFilter) parse() (bool, error) {
cfg.Sheet = strings.TrimSpace(cfg.Sheet)
if cfg.Sheet == "" {
return false, errors.ErrConfigFieldEmpty(cfg, "sheet")
}
if isEffectMaker(strings.TrimSpace(cfg.Makers)) {
return true, nil
}
return false, nil
}
|
#!/bin/bash
# before cleanup
before=$(df / -Pm | awk 'NR==2{print $4}')
# clears out the local repository of retrieved package files
# It removes everything but the lock file from /var/cache/apt/archives/ and /var/cache/apt/archives/partial
apt-get clean
rm -rf /tmp/*
# journalctl
if command -v journalctl; then
journalctl --rotate
journalctl --vacuum-time=1s
fi
# delete all .gz and rotated file
find /var/log -type f -regex ".*\.gz$" -delete
find /var/log -type f -regex ".*\.[0-9]$" -delete
# wipe log files
find /var/log/ -type f -exec cp /dev/null {} \;
# after cleanup
after=$(df / -Pm | awk 'NR==2{print $4}')
# display size
echo "Before: $before MB"
echo "After : $after MB"
echo "Delta : $(($after-$before)) MB" |
#!/bin/bash
# Function to remove and create directories
manage_directories() {
for dir in "$@"; do
if [ -d "$dir" ]; then
rm -rf "$dir" || { echo "Error: Failed to remove $dir directory"; continue; }
fi
mkdir "$dir" || { echo "Error: Failed to create $dir directory"; continue; }
echo "Directory $dir managed successfully"
done
}
# Input list of directories
directories=("dir1" "dir2" "dir3")
# Call the function with the input list
manage_directories "${directories[@]}" |
require 'rspec'
require 'coveralls'
Coveralls.wear!
RSpec.configure do |config|
config.color = true
end
|
import os.path
import typing
import subprocess
import base64
from django.conf import settings
from django.core.files.storage import default_storage
from django.http import HttpResponse
from django.urls.base import resolve
from django.views.decorators.csrf import csrf_exempt
from drf_yasg.openapi import Parameter
from drf_yasg.utils import swagger_auto_schema
from rest_framework.filters import BaseFilterBackend
from rest_framework.response import Response
from rest_framework.schemas import coreapi
from rest_framework.views import APIView, Request
from rest_framework import status
from rest_framework.exceptions import ValidationError
from ..drf_auth_override import CsrfExemptSessionAuthentication
from ..utils import xresponse, get_pretty_logger, file_hash, ErrorCode, source_hash, encode_base64
from ..exceptions import ParamError
from ..serializers import ImgSerializer
from ..views import schema_utils
logger = get_pretty_logger('api:views')
class RequestImgFilterBackend(BaseFilterBackend):
def get_schema_fields(self, view):
return [
]
def validate_payload(serializer_class, payload: dict) -> dict:
img_serializer = serializer_class(data=payload)
img_serializer.is_valid(raise_exception=True)
clean_data = img_serializer.validated_data
name = ''.join(clean_data['source'].name.split('.')[:-1]).replace('.', '_').replace(' ', '_')
suffix = ''.join(clean_data['source'].name.split('.')[-1:])
filename = default_storage.save(f'{name}.{suffix}', clean_data['source'])
clean_data['filename'] = filename
clean_data['storage'] = default_storage.location
return clean_data
class ImgProcessAPIView(APIView):
filter_backends = (RequestImgFilterBackend,)
serializer_class = ImgSerializer
authentication_classes = (CsrfExemptSessionAuthentication,)
def process_request(self, clean_data, request):
raise NotImplementedError('not implemented')
@property
def return_format(self):
return ''
@swagger_auto_schema(operation_description="",
manual_parameters=[Parameter('output', in_='query', required=True, type='string')],
request_body=serializer_class,
responses={200: schema_utils.xresponse_ok(),
400: schema_utils.xresponse_nok()})
def post(self, request):
if 'output' not in request.query_params:
output = 'image'
else:
output = str(request.query_params['output']).lower()
supported_output_formats = ['image', 'url']
if output not in supported_output_formats:
return xresponse(
status=status.HTTP_400_BAD_REQUEST,
error_code=ErrorCode.InvalidParams,
msg=f'Unhandled output format. Selected: {output} available: [{", ".join(supported_output_formats)}]'
)
try:
clean_data = validate_payload(self.serializer_class, request.data)
except ParamError as e:
return xresponse(status.HTTP_400_BAD_REQUEST, e.error_code, e.msg)
try:
output_filepath, output_filename = self.process_request(clean_data, request)
if output == 'image':
with open(output_filepath, 'rb') as file:
return HttpResponse(content=file.read(), content_type=f'image/{self.return_format}')
else:
return HttpResponse(
status=status.HTTP_303_SEE_OTHER,
headers={
'Location': request.build_absolute_uri(f'{settings.MEDIA_URL}{output_filename}')
},
)
except Exception as e:
return xresponse(status.HTTP_400_BAD_REQUEST, ErrorCode.NotFound, e)
class Png2Tiff(ImgProcessAPIView):
@property
def return_format(self):
return 'tiff'
def process_request(self, clean_data, request):
# convert easy.png -set colorspace RGB -alpha extract easy_alpha.png
# convert easy_alpha.png easy_alpha.svg
# convert png to tiff
# gimp tiff with alpha.svg
input_filepath = os.path.join(clean_data['storage'], clean_data['filename'])
output_filename = f"{''.join(clean_data['filename'].split('.')[:-1])}.tiff"
output_filepath = os.path.join(clean_data['storage'], output_filename)
output_alpha_filepath = os.path.join(clean_data['storage'], f"{''.join(clean_data['filename'].split('.')[:-1])}_alpha.png")
command_extract_alpha = f'convert {input_filepath} -set colorspace RGB -alpha extract {output_alpha_filepath}'
output_svg_filepath = f'{"".join(output_alpha_filepath.split(".")[:-1])}.svg'
command_alpha_svg = f'convert {output_alpha_filepath} {output_svg_filepath}'
output_tiff_tmp_filepath = os.path.join(clean_data['storage'], f"{''.join(clean_data['filename'].split('.')[:-1])}_tmp.tiff")
command_png_to_tiff = f'convert {input_filepath} {output_tiff_tmp_filepath}'
logger.info(f'command: {command_extract_alpha}')
process = subprocess.Popen(
command_extract_alpha.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(10)
logger.info(f'process resultcode: {process.returncode}')
logger.info(f'command: {command_alpha_svg}')
process = subprocess.Popen(
command_alpha_svg.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(10)
logger.info(f'process resultcode: {process.returncode}')
logger.info(f'command: {command_png_to_tiff}')
process = subprocess.Popen(
command_png_to_tiff.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(10)
logger.info(f'process resultcode: {process.returncode}')
gimp_command = f"gimp -i -b '(svg-clip-path \"{output_tiff_tmp_filepath}\" \"{output_svg_filepath}\" \"{output_filepath}\" )' -b '(gimp-quit 0)'"
logger.info(f'command: {gimp_command}')
process = subprocess.Popen(
gimp_command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(20)
logger.info(f'process resultcode: {process.returncode}')
os.remove(input_filepath)
os.remove(output_alpha_filepath)
os.remove(output_svg_filepath)
os.remove(output_tiff_tmp_filepath)
return output_filepath, output_filename
class Tiff2Png(ImgProcessAPIView):
@property
def return_format(self):
return 'png'
def process_request(self, clean_data, request):
input_filepath = os.path.join(clean_data['storage'], clean_data['filename'])
output_filename = f"{''.join(clean_data['filename'].split('.')[:-1])}.png"
output_filepath = os.path.join(clean_data['storage'], output_filename)
command = f'convert {input_filepath} -alpha transparent -clip -alpha opaque {output_filepath}'
process = subprocess.Popen(
command.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(10)
logger.info(f'command: {command}')
logger.info(f'process resultcode: {process.returncode}')
os.remove(input_filepath)
return output_filepath, output_filename
class Eps2Png(ImgProcessAPIView):
@property
def return_format(self):
return 'png'
def process_request(self, clean_data, request):
input_filepath = os.path.join(clean_data['storage'], clean_data['filename'])
output_filename = f"{''.join(clean_data['filename'].split('.')[:-1])}.png"
output_filepath = os.path.join(clean_data['storage'], output_filename)
command = f'convert {input_filepath} -alpha transparent -clip -alpha opaque {output_filepath}'
process = subprocess.Popen(
command.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(10)
logger.info(f'command: {command}')
logger.info(f'process resultcode: {process.returncode}')
os.remove(input_filepath)
return output_filepath, output_filename
class Png2Eps(ImgProcessAPIView):
@property
def return_format(self):
return 'postscript'
def process_request(self, clean_data, request):
# TODO: convert png-alpha to svg
# convert easy.png -set colorspace RGB -alpha extract easy_alpha.png
# convert easy_alpha.png easy_alpha.svg
# convert png to tiff
# gimp tiff with alpha.svg
input_filepath = os.path.join(clean_data['storage'], clean_data['filename'])
output_filename = f"{''.join(clean_data['filename'].split('.')[:-1])}.eps"
output_filepath = os.path.join(clean_data['storage'], output_filename)
output_alpha_filepath = os.path.join(clean_data['storage'], f"{''.join(clean_data['filename'].split('.')[:-1])}_alpha.png")
command_extract_alpha = f'convert {input_filepath} -set colorspace RGB -alpha extract {output_alpha_filepath}'
output_svg_filepath = f'{"".join(output_alpha_filepath.split(".")[:-1])}.svg'
command_alpha_svg = f'convert {output_alpha_filepath} {output_svg_filepath}'
output_tiff_tmp_filepath = os.path.join(clean_data['storage'], f"{''.join(clean_data['filename'].split('.')[:-1])}_tmp.tiff")
output_filepath_tiff = os.path.join(clean_data['storage'], f"{''.join(clean_data['filename'].split('.')[:-1])}_final.tiff")
command_png_to_tiff = f'convert {input_filepath} {output_tiff_tmp_filepath}'
command_tiff_to_eps = f'convert {output_filepath_tiff} {output_filepath}'
logger.info(f'command: {command_extract_alpha}')
process = subprocess.Popen(
command_extract_alpha.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(10)
logger.info(f'process resultcode: {process.returncode}')
logger.info(f'command: {command_alpha_svg}')
process = subprocess.Popen(
command_alpha_svg.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(10)
logger.info(f'process resultcode: {process.returncode}')
logger.info(f'command: {command_png_to_tiff}')
process = subprocess.Popen(
command_png_to_tiff.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(10)
logger.info(f'process resultcode: {process.returncode}')
gimp_command = f"gimp -i -b '(svg-clip-path \"{output_tiff_tmp_filepath}\" \"{output_svg_filepath}\" \"{output_filepath_tiff}\" )' -b '(gimp-quit 0)'"
logger.info(f'command: {gimp_command}')
process = subprocess.Popen(
gimp_command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(20)
logger.info(f'process resultcode: {process.returncode}')
logger.info(f'command: {command_tiff_to_eps}')
process = subprocess.Popen(
command_tiff_to_eps.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
process.wait(10)
logger.info(f'process resultcode: {process.returncode}')
os.remove(input_filepath)
os.remove(output_alpha_filepath)
os.remove(output_svg_filepath)
os.remove(output_tiff_tmp_filepath)
os.remove(output_filepath_tiff)
return output_filepath, output_filename
|
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.dataimport;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
import com.opensourcestrategies.crmsfa.party.PartyHelper;
import javolution.util.FastList;
import javolution.util.FastMap;
import org.apache.commons.validator.GenericValidator;
import org.ofbiz.base.util.*;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.GenericEntityException;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.entity.util.EntityUtil;
import org.ofbiz.service.LocalDispatcher;
import org.opentaps.common.util.UtilCommon;
/**
* maps DataImportCustomer into a set of opentaps entities that describes the Customer
* TODO: break this up big method into several logical steps, each implemented in class methods.
* This will allow for much easier re-use for custom imports.
*/
public class CustomerDecoder implements ImportDecoder {
private static final String MODULE = CustomerDecoder.class.getName();
protected String initialResponsiblePartyId;
protected String initialResponsibleRoleTypeId;
protected String organizationPartyId;
protected String arGlAccountId;
protected String offsettingGlAccountId;
protected GenericValue userLogin;
/**
* Creates a customer decoder from an input context.
* This will automatically extract the class variables it needs and
* validate for the existence of GL accounts and CRMSFA roles.
* If there is a problem, a GeneralException is thrown.
*/
public CustomerDecoder(Map<String, ?> context) throws GeneralException {
this.initialResponsiblePartyId = (String) context.get("initialResponsiblePartyId");
this.initialResponsibleRoleTypeId = (String) context.get("initialResponsibleRoleTypeId");
this.organizationPartyId = (String) context.get("organizationPartyId");
this.arGlAccountId = (String) context.get("arGlAccountId");
this.offsettingGlAccountId = (String) context.get("offsettingGlAccountId");
this.userLogin = (GenericValue) context.get("userLogin");
validate();
}
// validates the accounts and ensures the initial responsible party has a CRMSFA role
public void validate() throws GeneralException {
Delegator delegator = userLogin.getDelegator();
// first validate the existence of the accounts
GenericValue glAccountOrganization = null;
if (UtilValidate.isNotEmpty(arGlAccountId)) {
glAccountOrganization = delegator.findByPrimaryKey("GlAccountOrganization", UtilMisc.toMap("glAccountId", arGlAccountId, "organizationPartyId", organizationPartyId));
if (glAccountOrganization == null) {
throw new GeneralException("Cannot import: organization [" + organizationPartyId + "] does not have Accounts Receivable General Ledger account [" + arGlAccountId + "] defined in GlAccountOrganization.");
}
}
if (UtilValidate.isNotEmpty(offsettingGlAccountId)) {
glAccountOrganization = delegator.findByPrimaryKey("GlAccountOrganization", UtilMisc.toMap("glAccountId", offsettingGlAccountId, "organizationPartyId", organizationPartyId));
if (glAccountOrganization == null) {
throw new GeneralException("Cannot import: organization [" + organizationPartyId + "] does not have offsetting General Ledger account [" + offsettingGlAccountId + "] defined in GlAccountOrganization.");
}
}
// next ensure the role of the initial responsible party
this.initialResponsibleRoleTypeId = PartyHelper.getFirstValidTeamMemberRoleTypeId(initialResponsiblePartyId, delegator);
if (initialResponsibleRoleTypeId == null) {
throw new GeneralException("Cannot import customers: No internal CRM role found for party [" + initialResponsiblePartyId + "]");
}
}
public List<GenericValue> decode(GenericValue entry, Timestamp importTimestamp, Delegator delegator, LocalDispatcher dispatcher, Object... args) throws Exception {
List<GenericValue> toBeStored = FastList.newInstance();
String baseCurrencyUomId = UtilCommon.getOrgBaseCurrency(organizationPartyId, delegator);
/***********************/
/** Import Party data **/
/***********************/
// create the Person and Party with the roles for each depending on whether companyName or lastName is present
String companyPartyId = null;
String personPartyId = null;
String primaryPartyId = null; // this will be the partyId most other artifacts are associated with. If company is present, it will be company, otherwise person
String primaryPartyName = null;
GenericValue partySupplementalData = null; // this will be the party Supplemental Data to keep the primary contact mech
if ((entry.get("lastName") != null) && !("".equals(entry.getString("lastName")))) {
personPartyId = delegator.getNextSeqId("Party");
toBeStored.addAll(UtilImport.makePartyWithRoles(personPartyId, "PERSON", UtilMisc.toList("CONTACT", "BILL_TO_CUSTOMER"), delegator));
GenericValue person = delegator.makeValue("Person", UtilMisc.toMap("partyId", personPartyId, "firstName", entry.getString("firstName"), "lastName", entry.getString("lastName")));
toBeStored.add(person);
Map<String, Object> partyRelationship = UtilMisc.toMap("partyIdTo", initialResponsiblePartyId, "roleTypeIdTo", initialResponsibleRoleTypeId, "partyIdFrom", personPartyId, "roleTypeIdFrom", "CONTACT", "partyRelationshipTypeId", "RESPONSIBLE_FOR", "fromDate", importTimestamp);
partyRelationship.put("securityGroupId", "CONTACT_OWNER");
toBeStored.add(delegator.makeValue("PartyRelationship", partyRelationship));
primaryPartyId = personPartyId;
primaryPartyName = org.ofbiz.party.party.PartyHelper.getPartyName(person);
Debug.logInfo("Creating Person [" + personPartyId + "] for Customer [" + entry.get("customerId") + "].", MODULE);
}
if ((entry.get("companyName") != null) && !("".equals(entry.getString("companyName")))) {
companyPartyId = delegator.getNextSeqId("Party");
toBeStored.addAll(UtilImport.makePartyWithRoles(companyPartyId, "PARTY_GROUP", UtilMisc.toList("ACCOUNT", "BILL_TO_CUSTOMER"), delegator));
GenericValue partyGroup = delegator.makeValue("PartyGroup", UtilMisc.toMap("partyId", companyPartyId, "groupName", entry.getString("companyName")));
toBeStored.add(partyGroup);
Map<String, Object> partyRelationship = UtilMisc.toMap("partyIdTo", initialResponsiblePartyId, "roleTypeIdTo", initialResponsibleRoleTypeId, "partyIdFrom", companyPartyId, "roleTypeIdFrom", "ACCOUNT", "partyRelationshipTypeId", "RESPONSIBLE_FOR", "fromDate", importTimestamp);
partyRelationship.put("securityGroupId", "ACCOUNT_OWNER");
toBeStored.add(delegator.makeValue("PartyRelationship", partyRelationship));
// make the person a Contact of the company Account
if (UtilValidate.isNotEmpty(personPartyId)) {
partyRelationship = UtilMisc.toMap("partyIdFrom", personPartyId, "roleTypeIdFrom", "CONTACT", "partyRelationshipTypeId", "CONTACT_REL_INV",
"partyIdTo", companyPartyId, "roleTypeIdTo", "ACCOUNT", "fromDate", importTimestamp);
toBeStored.add(delegator.makeValue("PartyRelationship", partyRelationship));
}
primaryPartyId = companyPartyId;
primaryPartyName = org.ofbiz.party.party.PartyHelper.getPartyName(partyGroup);
Debug.logInfo("Creating PartyGroup [" + companyPartyId + "] for Customer [" + entry.get("customerId") + "].", MODULE);
}
if (primaryPartyId == null) {
Debug.logWarning("No person or company associated with customer [" + entry.get("customerId") + "]", MODULE);
return null;
}
// associate person with company
if ((companyPartyId != null) && (personPartyId != null)) {
Map<String, Object> partyRelationship = UtilMisc.toMap("partyIdTo", companyPartyId, "roleTypeIdTo", "ACCOUNT", "partyIdFrom", personPartyId, "roleTypeIdFrom", "CONTACT", "partyRelationshipTypeId", "CONTACT_REL_INV", "fromDate", importTimestamp);
toBeStored.add(delegator.makeValue("PartyRelationship", partyRelationship));
}
/*******************************************************************************************************/
/** Import contact mechs. Note that each contact mech will be associated with the company and person. */
/*******************************************************************************************************/
String billingContactMechId = null; // for later use with things that need billing address
if (!UtilValidate.isEmpty(entry.getString("address1"))) {
// associate this as the GENERAL_LOCATION and BILLING_LOCATION
GenericValue contactMech = delegator.makeValue("ContactMech", UtilMisc.toMap("contactMechId", delegator.getNextSeqId("ContactMech"), "contactMechTypeId", "POSTAL_ADDRESS"));
GenericValue mainPostalAddress = UtilImport.makePostalAddress(contactMech, entry.getString("companyName"), entry.getString("firstName"), entry.getString("lastName"), entry.getString("attnName"), entry.getString("address1"), entry.getString("address2"), entry.getString("city"), entry.getString("stateProvinceGeoId"), entry.getString("postalCode"), entry.getString("postalCodeExt"), entry.getString("countryGeoId"), delegator);
toBeStored.add(contactMech);
toBeStored.add(mainPostalAddress);
if (personPartyId != null) {
toBeStored.add(UtilImport.makeContactMechPurpose("GENERAL_LOCATION", mainPostalAddress, personPartyId, importTimestamp, delegator));
toBeStored.add(UtilImport.makeContactMechPurpose("BILLING_LOCATION", mainPostalAddress, personPartyId, importTimestamp, delegator));
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", personPartyId, "fromDate", importTimestamp)));
toBeStored.add(UtilImport.makePartySupplementalData(partySupplementalData, personPartyId, "primaryPostalAddressId", mainPostalAddress, delegator));
}
if (companyPartyId != null) {
toBeStored.add(UtilImport.makeContactMechPurpose("GENERAL_LOCATION", mainPostalAddress, companyPartyId, importTimestamp, delegator));
toBeStored.add(UtilImport.makeContactMechPurpose("BILLING_LOCATION", mainPostalAddress, companyPartyId, importTimestamp, delegator));
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", companyPartyId, "fromDate", importTimestamp)));
toBeStored.add(UtilImport.makePartySupplementalData(partySupplementalData, companyPartyId, "primaryPostalAddressId", mainPostalAddress, delegator));
}
billingContactMechId = contactMech.getString("contactMechId");
}
if (!UtilValidate.isEmpty(entry.getString("shipToAddress1"))) {
// associate this as SHIPPING_LOCATION
GenericValue contactMech = delegator.makeValue("ContactMech", UtilMisc.toMap("contactMechId", delegator.getNextSeqId("ContactMech"), "contactMechTypeId", "POSTAL_ADDRESS"));
GenericValue secondaryPostalAddress = UtilImport.makePostalAddress(contactMech, entry.getString("shipToCompanyName"), entry.getString("shipToFirstName"), entry.getString("shipToLastName"), entry.getString("shipToAttnName"), entry.getString("shipToAddress1"), entry.getString("shipToAddress2"), entry.getString("shipToCity"), entry.getString("shipToStateProvinceGeoId"), entry.getString("shipToPostalCode"), entry.getString("shipToPostalCodeExt"), entry.getString("shipToCountryGeoId"), delegator);
toBeStored.add(contactMech);
toBeStored.add(secondaryPostalAddress);
if (personPartyId != null) {
toBeStored.add(UtilImport.makeContactMechPurpose("SHIPPING_LOCATION", secondaryPostalAddress, personPartyId, importTimestamp, delegator));
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", personPartyId, "fromDate", importTimestamp)));
}
if (companyPartyId != null) {
toBeStored.add(UtilImport.makeContactMechPurpose("SHIPPING_LOCATION", secondaryPostalAddress, companyPartyId, importTimestamp, delegator));
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", companyPartyId, "fromDate", importTimestamp)));
}
}
if (!UtilValidate.isEmpty(entry.getString("primaryPhoneNumber"))) {
// associate this as PRIMARY_PHONE
GenericValue contactMech = delegator.makeValue("ContactMech", UtilMisc.toMap("contactMechId", delegator.getNextSeqId("ContactMech"), "contactMechTypeId", "TELECOM_NUMBER"));
GenericValue primaryNumber = UtilImport.makeTelecomNumber(contactMech, entry.getString("primaryPhoneCountryCode"), entry.getString("primaryPhoneAreaCode"), entry.getString("primaryPhoneNumber"), delegator);
toBeStored.add(contactMech);
toBeStored.add(primaryNumber);
if (personPartyId != null) {
toBeStored.add(UtilImport.makeContactMechPurpose("PRIMARY_PHONE", primaryNumber, personPartyId, importTimestamp, delegator));
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", personPartyId, "fromDate", importTimestamp, "extension", entry.getString("primaryPhoneExtension"))));
toBeStored.add(UtilImport.makePartySupplementalData(partySupplementalData, personPartyId, "primaryTelecomNumberId", primaryNumber, delegator));
}
if (companyPartyId != null) {
toBeStored.add(UtilImport.makeContactMechPurpose("PRIMARY_PHONE", primaryNumber, companyPartyId, importTimestamp, delegator));
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", companyPartyId, "fromDate", importTimestamp, "extension", entry.getString("primaryPhoneExtension"))));
toBeStored.add(UtilImport.makePartySupplementalData(partySupplementalData, companyPartyId, "primaryTelecomNumberId", primaryNumber, delegator));
}
}
if (!UtilValidate.isEmpty(entry.getString("secondaryPhoneNumber"))) {
// this one has no contactmech purpose type
GenericValue contactMech = delegator.makeValue("ContactMech", UtilMisc.toMap("contactMechId", delegator.getNextSeqId("ContactMech"), "contactMechTypeId", "TELECOM_NUMBER"));
GenericValue secondaryNumber = UtilImport.makeTelecomNumber(contactMech, entry.getString("secondaryPhoneCountryCode"), entry.getString("secondaryPhoneAreaCode"), entry.getString("secondaryPhoneNumber"), delegator);
toBeStored.add(contactMech);
toBeStored.add(secondaryNumber);
if (personPartyId != null) {
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", personPartyId, "fromDate", importTimestamp, "extension", entry.getString("secondaryPhoneExtension"))));
}
if (companyPartyId != null) {
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", companyPartyId, "fromDate", importTimestamp, "extension", entry.getString("secondaryPhoneExtension"))));
}
}
if (!UtilValidate.isEmpty(entry.getString("faxNumber"))) {
// associate this as FAX_NUMBER
GenericValue contactMech = delegator.makeValue("ContactMech", UtilMisc.toMap("contactMechId", delegator.getNextSeqId("ContactMech"), "contactMechTypeId", "TELECOM_NUMBER"));
GenericValue faxNumber = UtilImport.makeTelecomNumber(contactMech, entry.getString("faxCountryCode"), entry.getString("faxAreaCode"), entry.getString("faxNumber"), delegator);
toBeStored.add(contactMech);
toBeStored.add(faxNumber);
if (personPartyId != null) {
toBeStored.add(UtilImport.makeContactMechPurpose("FAX_NUMBER", faxNumber, personPartyId, importTimestamp, delegator));
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", personPartyId, "fromDate", importTimestamp)));
}
if (companyPartyId != null) {
toBeStored.add(UtilImport.makeContactMechPurpose("FAX_NUMBER", faxNumber, companyPartyId, importTimestamp, delegator));
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", companyPartyId, "fromDate", importTimestamp)));
}
}
if (!UtilValidate.isEmpty(entry.getString("didNumber"))) {
// associate this as PHONE_DID
GenericValue contactMech = delegator.makeValue("ContactMech", UtilMisc.toMap("contactMechId", delegator.getNextSeqId("ContactMech"), "contactMechTypeId", "TELECOM_NUMBER"));
GenericValue didNumber = UtilImport.makeTelecomNumber(contactMech, entry.getString("didCountryCode"), entry.getString("didAreaCode"), entry.getString("didNumber"), delegator);
toBeStored.add(contactMech);
toBeStored.add(didNumber);
if (personPartyId != null) {
toBeStored.add(UtilImport.makeContactMechPurpose("PHONE_DID", didNumber, personPartyId, importTimestamp, delegator));
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", personPartyId, "fromDate", importTimestamp, "extension", entry.getString("didExtension"))));
}
if (companyPartyId != null) {
toBeStored.add(UtilImport.makeContactMechPurpose("PHONE_DID", didNumber, companyPartyId, importTimestamp, delegator));
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", contactMech.get("contactMechId"), "partyId", companyPartyId, "fromDate", importTimestamp, "extension", entry.getString("didExtension"))));
}
}
if (!UtilValidate.isEmpty(entry.getString("emailAddress"))) {
// make the email address
GenericValue emailContactMech = delegator.makeValue("ContactMech", UtilMisc.toMap("contactMechId", delegator.getNextSeqId("ContactMech"), "contactMechTypeId", "EMAIL_ADDRESS", "infoString", entry.getString("emailAddress")));
toBeStored.add(emailContactMech);
if (personPartyId != null) {
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", emailContactMech.get("contactMechId"), "partyId", personPartyId, "fromDate", importTimestamp)));
toBeStored.add(UtilImport.makeContactMechPurpose("PRIMARY_EMAIL", emailContactMech, personPartyId, importTimestamp, delegator));
toBeStored.add(UtilImport.makePartySupplementalData(partySupplementalData, personPartyId, "primaryEmailId", emailContactMech, delegator));
}
if (companyPartyId != null) {
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", emailContactMech.get("contactMechId"), "partyId", companyPartyId, "fromDate", importTimestamp)));
toBeStored.add(UtilImport.makeContactMechPurpose("PRIMARY_EMAIL", emailContactMech, companyPartyId, importTimestamp, delegator));
toBeStored.add(UtilImport.makePartySupplementalData(partySupplementalData, companyPartyId, "primaryEmailId", emailContactMech, delegator));
}
}
if (!UtilValidate.isEmpty(entry.getString("webAddress"))) {
// make the web address
GenericValue webContactMech = delegator.makeValue("ContactMech", UtilMisc.toMap("contactMechId", delegator.getNextSeqId("ContactMech"), "contactMechTypeId", "WEB_ADDRESS", "infoString", entry.getString("webAddress")));
toBeStored.add(webContactMech);
if (personPartyId != null) {
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", webContactMech.get("contactMechId"), "partyId", personPartyId, "fromDate", importTimestamp)));
toBeStored.add(UtilImport.makeContactMechPurpose("PRIMARY_WEB_URL", webContactMech, personPartyId, importTimestamp, delegator));
}
if (companyPartyId != null) {
toBeStored.add(delegator.makeValue("PartyContactMech", UtilMisc.toMap("contactMechId", webContactMech.get("contactMechId"), "partyId", companyPartyId, "fromDate", importTimestamp)));
toBeStored.add(UtilImport.makeContactMechPurpose("PRIMARY_WEB_URL", webContactMech, companyPartyId, importTimestamp, delegator));
}
}
/*****************************/
/** Import Party notes. **/
/*****************************/
if (!UtilValidate.isEmpty(entry.getString("note"))) {
// make the party note
if (personPartyId != null) {
GenericValue noteData = delegator.makeValue("NoteData", UtilMisc.toMap("noteId", delegator.getNextSeqId("NoteData"), "noteInfo", entry.getString("note"), "noteParty", initialResponsiblePartyId, "noteDateTime", importTimestamp));
toBeStored.add(noteData);
toBeStored.add(delegator.makeValue("PartyNote", UtilMisc.toMap("noteId", noteData.get("noteId"), "partyId", personPartyId)));
}
if (companyPartyId != null) {
GenericValue noteData = delegator.makeValue("NoteData", UtilMisc.toMap("noteId", delegator.getNextSeqId("NoteData"), "noteInfo", entry.getString("note"), "noteParty", initialResponsiblePartyId, "noteDateTime", importTimestamp));
toBeStored.add(noteData);
toBeStored.add(delegator.makeValue("PartyNote", UtilMisc.toMap("noteId", noteData.get("noteId"), "partyId", companyPartyId)));
}
}
/*****************************/
/** Import Pricing data. **/
/*****************************/
if (!UtilValidate.isEmpty(entry.getString("discount"))) {
BigDecimal discount = entry.getBigDecimal("discount").abs().negate();
discount = discount.movePointRight(2);
// Apply price rule only to company
if (companyPartyId != null) {
// productPriceRule
String productPriceRuleId = delegator.getNextSeqId("ProductPriceRule");
String priceRuleName = "Imported rule for ";
priceRuleName += UtilValidate.isEmpty(entry.get("companyName")) ? "partyId: " + companyPartyId : entry.getString("companyName");
toBeStored.add(delegator.makeValue("ProductPriceRule", UtilMisc.toMap("productPriceRuleId", productPriceRuleId, "ruleName", priceRuleName, "isSale", "N", "fromDate", importTimestamp)));
// productPriceCond
toBeStored.add(delegator.makeValue("ProductPriceCond", UtilMisc.toMap("productPriceRuleId", productPriceRuleId, "productPriceCondSeqId", UtilFormatOut.formatPaddedNumber(1, 2), "inputParamEnumId", "PRIP_PARTY_ID", "operatorEnumId", "PRC_EQ", "condValue", companyPartyId)));
// productPriceAction
toBeStored.add(delegator.makeValue("ProductPriceAction", UtilMisc.toMap("productPriceRuleId", productPriceRuleId, "productPriceActionSeqId", UtilFormatOut.formatPaddedNumber(1, 2), "productPriceActionTypeId", "PRICE_POL", "amount", discount)));
}
}
/***********************************/
/** Import Party Classifications. **/
/***********************************/
if (!UtilValidate.isEmpty(entry.getString("partyClassificationTypeId"))) {
// Apply classification only to partyGroup
if (companyPartyId != null) {
String partyClassificationTypeId = entry.getString("partyClassificationTypeId");
if (delegator.findByPrimaryKey("PartyClassificationType", UtilMisc.toMap("partyClassificationTypeId", partyClassificationTypeId)) != null) {
GenericValue partyClassificationGroup = EntityUtil.getFirst(delegator.findByAnd("PartyClassificationGroup", UtilMisc.toMap("partyClassificationTypeId", partyClassificationTypeId)));
if (partyClassificationGroup != null) {
toBeStored.add(delegator.makeValue("PartyClassification", UtilMisc.toMap("partyId", companyPartyId, "partyClassificationGroupId", partyClassificationGroup.getString("partyClassificationGroupId"), "fromDate", importTimestamp)));
} else {
Debug.logInfo("No partyClassificationGroups exist for partyClassificationId" + partyClassificationTypeId + ", ignoring for customerId " + entry.getString("customerId"), MODULE);
}
} else {
Debug.logInfo("partyClassificationTypeId" + partyClassificationTypeId + "does not exist, ignoring for customerId " + entry.getString("customerId"), MODULE);
}
}
}
// associate party with DONOTSHIP_CUSTOMERS classification group if disableShipping is set.
String disableShipping = entry.getString("disableShipping");
if (UtilValidate.isNotEmpty(disableShipping) && "Y".equals(disableShipping)) {
Map<String, Object> partyClassification = null;
if (UtilValidate.isNotEmpty(companyPartyId)) {
partyClassification = UtilMisc.toMap("partyId", companyPartyId, "partyClassificationGroupId", "DONOTSHIP_CUSTOMERS", "fromDate", importTimestamp);
toBeStored.add(delegator.makeValue("PartyClassification", partyClassification));
}
if (UtilValidate.isNotEmpty(personPartyId)) {
partyClassification = UtilMisc.toMap("partyId", personPartyId, "partyClassificationGroupId", "DONOTSHIP_CUSTOMERS", "fromDate", importTimestamp);
toBeStored.add(delegator.makeValue("PartyClassification", partyClassification));
}
}
/*****************************/
/** Import Accounting data. **/
/*****************************/
if (!UtilValidate.isEmpty(entry.getString("creditCardNumber"))) {
// we need a person with a first and last name, otherwise the import data is malformed
if (personPartyId == null && UtilValidate.isEmpty(entry.getString("firstName")) && UtilValidate.isEmpty(entry.getString("lastName"))) {
Debug.logWarning("Failed to import Credit Card for Party ["+primaryPartyId+"]: First and Last name missing for customer ["+entry.get("customerId")+"].", MODULE);
} else {
// associate this with primaryPartyId as a PaymentMethod of CREDIT_CARD type
GenericValue paymentMethod = delegator.makeValue("PaymentMethod", UtilMisc.toMap("paymentMethodId", delegator.getNextSeqId("PaymentMethod"), "paymentMethodTypeId", "CREDIT_CARD", "partyId", primaryPartyId, "fromDate", importTimestamp));
toBeStored.add(paymentMethod);
// translate the credit card data into a form acceptable to CreditCard
String cardNumber = UtilValidate.stripCharsInBag(entry.getString("creditCardNumber"), UtilValidate.creditCardDelimiters);
String cardType = UtilValidate.getCardType(cardNumber);
String expireDate = UtilImport.decodeExpireDate(entry.getString("creditCardExpDate"));
if (expireDate == null) {
Debug.logWarning("Failed to decode creditCardExpDate ["+entry.getString("creditCardExpDate")+"] into form MM/YYYY for customer ["+entry.get("customerId")+"].", MODULE);
} else {
Map<String, Object> input = UtilMisc.<String, Object>toMap("paymentMethodId", paymentMethod.get("paymentMethodId"), "cardNumber", cardNumber, "cardType", cardType, "expireDate", expireDate);
input.put("firstNameOnCard", entry.get("firstName"));
input.put("lastNameOnCard", entry.get("lastName"));
input.put("companyNameOnCard", entry.get("companyName"));
input.put("contactMechId", billingContactMechId);
toBeStored.add(delegator.makeValue("CreditCard", input));
}
}
}
toBeStored.addAll(createBalances(primaryPartyId, entry.getBigDecimal("outstandingBalance"), importTimestamp, baseCurrencyUomId, delegator));
toBeStored.addAll(createSalesAgreement(entry, primaryPartyId, primaryPartyName, importTimestamp, delegator));
// save the primary party Id
entry.put("primaryPartyId", primaryPartyId);
toBeStored.add(entry);
return toBeStored;
}
/**
* Checks if we can create a balance. The balance from the entry must be non zero and the
* AR and offsetting accounts must exist.
*/
public boolean canCreateBalance(BigDecimal balance) {
if (balance == null || balance.signum() == 0) {
return false;
}
return (! UtilValidate.isEmpty(arGlAccountId)) && (! UtilValidate.isEmpty(offsettingGlAccountId));
}
/**
* Creates AR balances if a balance exists and the accounts are specified.
* @return List containing the balance entities or an empty list if no balances are to be created.
*/
public List<GenericValue> createBalances(String partyId, BigDecimal balance, Timestamp importTimestamp, String currencyUomId, Delegator delegator) {
List<GenericValue> toBeStored = new FastList<GenericValue>();
if (! canCreateBalance(balance)) return toBeStored;
// create an AcctgTrans, DR arGlAccountId, CR offsettingGlAccountId for the amount of outstandingBalance
Map<String, Object> input = UtilMisc.toMap("acctgTransTypeId", "INTERNAL_ACCTG_TRANS", "glFiscalTypeId", "ACTUAL",
"transactionDate", importTimestamp, "partyId", partyId);
input.put("acctgTransId", delegator.getNextSeqId("AcctgTrans"));
input.put("isPosted", "N");
input.put("createdByUserLogin", userLogin.get("userLoginId"));
input.put("lastModifiedByUserLogin", userLogin.get("userLoginId"));
GenericValue acctgTrans = delegator.makeValue("AcctgTrans", input);
toBeStored.add(acctgTrans);
// acctg trans entry input data for both DR and CR
Map<String, Object> acctgTransEntryInput = FastMap.newInstance();
acctgTransEntryInput.put("acctgTransId", acctgTrans.get("acctgTransId"));
acctgTransEntryInput.put("amount", balance);
acctgTransEntryInput.put("partyId", partyId);
acctgTransEntryInput.put("organizationPartyId", organizationPartyId);
acctgTransEntryInput.put("currencyUomId", currencyUomId);
acctgTransEntryInput.put("reconcileStatusId", "AES_NOT_RECONCILED");
// DR arGlAccountId
acctgTransEntryInput.put("acctgTransEntrySeqId", UtilFormatOut.formatPaddedNumber(1, 6));
acctgTransEntryInput.put("glAccountId", arGlAccountId);
acctgTransEntryInput.put("debitCreditFlag", "D");
toBeStored.add(delegator.makeValue("AcctgTransEntry", acctgTransEntryInput));
// CR offsettingGlAccountId
acctgTransEntryInput.put("acctgTransEntrySeqId", UtilFormatOut.formatPaddedNumber(2, 6));
acctgTransEntryInput.put("glAccountId", offsettingGlAccountId);
acctgTransEntryInput.put("debitCreditFlag", "C");
toBeStored.add(delegator.makeValue("AcctgTransEntry", acctgTransEntryInput));
return toBeStored;
}
/**
* Whether we should create a sales agreement for this record. Overload if the details vary.
* In the case of vanilla importCustomers, an agreement is created for a credit limit, a net
* payment days term, or both.
*/
public boolean canCreateSalesAgreement(GenericValue entry) {
BigDecimal creditLimit = entry.getBigDecimal("creditLimit");
Long netPaymentDays = entry.getLong("netPaymentDays");
// make the logic simpler by normalizing null to 0
if (creditLimit == null) {
creditLimit = BigDecimal.ZERO;
}
if (netPaymentDays == null) netPaymentDays = 0L;
return (creditLimit.signum() > 0 || netPaymentDays > 0);
}
/**
* Create the sales agreement and terms between the given partyId (with a partyName) and the organization.
* Entry point which should be called from decode() method. To customize the way agreements are generated
* due to field and data differences, overload canCreateSalesAgreement() and createSalesAgreementTerms().
*/
public List<GenericValue> createSalesAgreement(GenericValue entry, String partyId, String partyName, Timestamp importTimestamp, Delegator delegator) throws GenericEntityException {
List<GenericValue> toBeStored = new FastList<GenericValue>();
if (! canCreateSalesAgreement(entry)) return toBeStored;
String agreementId = delegator.getNextSeqId("Agreement");
GenericValue agreement = delegator.makeValue("Agreement");
agreement.put("agreementId", agreementId);
agreement.put("partyIdFrom", organizationPartyId);
agreement.put("partyIdTo", partyId);
agreement.put("agreementTypeId", "SALES_AGREEMENT");
agreement.put("agreementDate", importTimestamp);
agreement.put("fromDate", importTimestamp);
agreement.put("statusId", "AGR_ACTIVE");
agreement.put("description", "Sales agreement" + (GenericValidator.isBlankOrNull(partyName) ? "" : " for ") + partyName);
toBeStored.add(agreement);
toBeStored.addAll(createSalesAgreementTerms(entry, agreementId, delegator));
return toBeStored;
}
/**
* Invoked from createSalesAgreement(), generates the terms of the agreement. Overload if details vary.
* In the case of vanilla importCustomers, it will generate a credit limit term if the creditLimit field is positive,
* and a net payment days term if the netPaymentDays field is positive.
*/
public List<GenericValue> createSalesAgreementTerms(GenericValue entry, String agreementId, Delegator delegator) throws GenericEntityException {
List<GenericValue> toBeStored = new FastList<GenericValue>();
BigDecimal creditLimit = entry.getBigDecimal("creditLimit");
Long netPaymentDays = entry.getLong("netPaymentDays");
String customerCurrencyUomId = entry.getString("currencyUomId");
int seqId = 1;
toBeStored.addAll(createAgreementCreditLimitTerm(agreementId, customerCurrencyUomId, seqId++, delegator, creditLimit));
toBeStored.addAll(createAgreementNetPaymentDaysTerm(agreementId, customerCurrencyUomId, seqId++, delegator, netPaymentDays));
return toBeStored;
}
/**
* Simplifies the creation of a term/item combination. Specify the agreement type, term type, term value, term days and currency.
* You might want to use one of the more specific methods such as createAgreementCreditLimitTerm() to minimize errors.
*/
public List<GenericValue> createAgreementTerm(String agreementId, String agreementTypeId, String termTypeId, BigDecimal termValue, Long termDays, String currencyUomId, int seqId, Delegator delegator) {
List<GenericValue> toBeStored = new FastList<GenericValue>();
GenericValue item = delegator.makeValue("AgreementItem");
item.put("agreementId", agreementId);
item.put("agreementItemSeqId", Integer.valueOf(seqId).toString());
item.put("agreementItemTypeId", agreementTypeId);
toBeStored.add(item);
GenericValue term = delegator.makeValue("AgreementTerm");
term.put("agreementTermId", delegator.getNextSeqId("AgreementTerm"));
term.put("agreementId", agreementId);
term.put("termTypeId", termTypeId);
term.put("agreementItemSeqId", Integer.valueOf(seqId).toString());
term.put("termValue", termValue);
term.put("termDays", termDays);
term.put("currencyUomId", currencyUomId);
toBeStored.add(term);
return toBeStored;
}
/**
* Helper function to generate a credit limit term. Only creates term if the credit limit is positive.
* Used by createSalesAgreementTerms().
*/
public List<GenericValue> createAgreementCreditLimitTerm(String agreementId, String customerCurrencyUomId, int seqId, Delegator delegator, BigDecimal creditLimit) {
// get currency for customer record or from opentaps.properties
// TODO why not just throw an illegal argument exception and have the importer fix the data?
if (UtilValidate.isEmpty(customerCurrencyUomId)) {
customerCurrencyUomId = UtilProperties.getPropertyValue("opentaps", "defaultCurrencyUomId");
Debug.logWarning("No currency specified for credit limit of agreement [" + agreementId + "], using [" + customerCurrencyUomId + "] from opentaps.properties", MODULE);
}
if (creditLimit != null && creditLimit.signum() > 0) {
return createAgreementTerm(agreementId, "AGREEMENT_CREDIT", "CREDIT_LIMIT", creditLimit, null, customerCurrencyUomId, seqId, delegator);
}
return new FastList<GenericValue>();
}
/**
* Helper function to generate a net payment days term. Only creates term if therre are a positive number of days.
* Used by createSalesAgreementTerms().
*/
public List<GenericValue> createAgreementNetPaymentDaysTerm(String agreementId, String customerCurrencyUomId, int seqId, Delegator delegator, Long netPaymentDays) {
if (netPaymentDays != null && netPaymentDays > 0) {
return createAgreementTerm(agreementId, "AGREEMENT_PAYMENT", "FIN_PAYMENT_TERM", null, netPaymentDays, customerCurrencyUomId, seqId, delegator);
}
return new FastList<GenericValue>();
}
/**
* Helper function to generate a percentage discount term. Make sure that the percentage is represented as a decimal number
* and not a whole number.
* Used by createSalesAgreementTerms().
* TODO: This term isn't really used anywhere.
* TODO: In validating discount rate, throw illegal argument if it's not valid
*/
public List<GenericValue> createAgreementDiscountTerm(String agreementId, String customerCurrencyUomId, int seqId, Delegator delegator, BigDecimal discountRate, Long discountDays) {
List<GenericValue> toBeStored = new FastList<GenericValue>();
if (discountRate != null && discountRate.signum() > 0) {
return createAgreementTerm(agreementId, "AGREEMENT_PAYMENT", "FIN_PAYMENT_DISC", discountRate, discountDays, customerCurrencyUomId, seqId, delegator);
}
return toBeStored;
}
}
|
#!/bin/bash
#LIST_TYPE="balcklist"
LIST_TYPE="whitelist"
kubectl -n lxcfs delete -f deployment/mutatingwebhook-ca-bundle-${LIST_TYPE}.yaml
kubectl -n lxcfs delete -f deployment/service.yaml
kubectl -n lxcfs delete -f deployment/deployment.yaml
kubectl -n lxcfs delete secret lxcfs-admission-webhook-certs
|
<filename>src/views/Home/index.view.tsx
import * as React from "react";
import Navbar from "components/Navbar/index.view";
import MLHBanner from "components/MLHBanner/index.view";
import Background from "components/Background/index.view";
import Hero from "./components/Hero/index.view";
import Button from "components/Button/index.view";
import EmailSubscriptionForm from "components/EmailSubscription/index.view";
import Mission from "./components/Mission/index.view";
import MilestonesComponent from "./components/Milestones/index.view";
import Schedule from "./components/Schedule/index.view";
import PrizesComponent from "./components/Prizes/index.view";
import SpeakersComponent from "./components/Speakers/index.view";
import FAQComponent from "./components/FAQ/index.view";
import SponsorsComponent from "./components/Sponsors/index.view";
import FooterComponent from "./components/Footer/index.view";
import { navbarProps } from "./props/navbar.js";
import * as heroProps from "./props/hero.json";
import * as missionProps from "./props/mission.json";
import * as milestoneProps from "./props/milestones.json";
import { topics } from "./props/prizes";
import { speakers } from "./props/speakers";
import * as FAQprops from "./props/faq.json";
import { sponsors } from "./props/sponsors.js";
import { footerProps } from "./props/footer";
import { organizerFormProps } from "./props/organizerForm";
import "./Home.scss";
const HomepageView: React.FC = () => {
return (
<>
<Navbar {...navbarProps} />
<MLHBanner />
<div className="Homepage">
<Background>
<Hero
pageName={"Home"}
title={heroProps.title}
description={heroProps.description}
>
<div className="Homepage__emailSubscriptionContainer">
<EmailSubscriptionForm />
</div>
<div className="Homepage__organizerApplication">
<Button {...organizerFormProps} />
</div>
</Hero>
<Mission
about_text={missionProps.about}
mission_text={missionProps.mission}
/>
<MilestonesComponent milestones={milestoneProps.milestones} />
<PrizesComponent topics={topics} />
<SpeakersComponent speakers={speakers} />
<Schedule />
<FAQComponent
questionAnswersColumnLeft={FAQprops.questionAnswersColumnLeft}
questionAnswersColumnRight={FAQprops.questionAnswersColumnRight}
questionAnswersColumnMiddle={FAQprops.questionAnswersColumnMiddle}
/>
<SponsorsComponent sponsors={sponsors} />
<FooterComponent links={footerProps} />
</Background>
</div>
</>
);
};
export default HomepageView;
|
import { PrismaClientKnownRequestError } from '@prisma/client/runtime';
import { PrismaError } from './PrismaError';
export class PrismaP2017Error extends PrismaError {
constructor(originalError: PrismaClientKnownRequestError) {
super(originalError, 'The records for the relation are not connected');
}
}
|
<gh_stars>0
package library
import (
"encoding/json"
"fmt"
"github.com/gorilla/mux"
"io/ioutil"
"net/http"
"strconv"
)
// Quote this is our data structure
type Quote struct {
Id int `json:"id"`
Author string `json:"author"`
Text string `json:"text"`
}
// our `splice` collection for the filled Quotes
var data []Quote
// this is a private function to this file because it is not Capitalized
func findHighestId() int {
maxId := data[0].Id
for _, v := range data {
if v.Id > maxId {
maxId = v.Id
}
}
return maxId
}
// LoadData loads the JSON data file for usage.
func LoadData() {
var content, err = ioutil.ReadFile("./api/data.json")
if err != nil {
fmt.Println(err.Error())
}
json.Unmarshal(content, &data)
}
// GetQuote the following are the actual CRUD endpoint functions
func GetQuote(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(data)
}
func GetQuoteById(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
// Get our params from the URL using Mux
params := mux.Vars(r)
// using this atoi method to parses the string into a integer
// https://play.golang.org/p/r5dG6X5YuF
requestId, _ := strconv.Atoi(params["id"])
// Loop through collection of quotes and find one with the id from the params
// the underscore is basically read as `for each item in collection`
for _, item := range data {
if item.Id == requestId {
json.NewEncoder(w).Encode(item)
return
}
}
json.NewEncoder(w).Encode(&Quote{})
}
func CreateQuote(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
quote := Quote{}
_ = json.NewDecoder(r.Body).Decode("e)
quote.Id = findHighestId() + 1
data = append(data, quote)
json.NewEncoder(w).Encode(quote)
}
func UpdateQuote(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
quote := Quote{}
_ = json.NewDecoder(r.Body).Decode("e)
// when you have the `index` defined, you have the actual index of the item from the splice
for index, item := range data {
if item.Id == quote.Id {
// this is very similar to a splice in JavaScript (same idea)
data = append(data[:index], data[index+1:]...)
data = append(data, quote)
json.NewEncoder(w).Encode(quote)
return
}
}
}
func DeleteQuote(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
params := mux.Vars(r)
requestId, _ := strconv.Atoi(params["id"])
for index, item := range data {
if item.Id == requestId {
data = append(data[:index], data[index+1:]...)
break
}
}
json.NewEncoder(w).Encode(data)
}
|
#!/bin/bash
############################################################################
############################################################################
##
## Copyright 2016,2017 International Business Machines
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE#2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions AND
## limitations under the License.
##
############################################################################
############################################################################
set -e
DMA_XFER_SIZE="x\"0\""
DMA_ALIGNMENT="x\"6\""
SDRAM_SIZE="x\"0000\""
if [ "$FPGACARD" == "ADKU3" ]; then
CARD_TYPE="x\"00\""
if [ "${SDRAM_USED^^}" == "TRUE" ]; then
SDRAM_SIZE="x\"2000\""
fi
elif [ "$FPGACARD" == "N250S" ]; then
CARD_TYPE="x\"01\""
if [ "${SDRAM_USED^^}" == "TRUE" ]; then
SDRAM_SIZE="x\"1000\""
fi
elif [ "$FPGACARD" == "S121B" ]; then
CARD_TYPE="x\"02\""
if [ "${SDRAM_USED^^}" == "TRUE" ]; then
SDRAM_SIZE="x\"2000\""
fi
else
CARD_TYPE="x\"03\""
if [ "${SDRAM_USED^^}" == "TRUE" ]; then
SDRAM_SIZE="x\"2000\""
fi
fi
if [ "$FPGACARD" == "N250SP" ]; then
DMA_XFER_SIZE="x\"6\""
CARD_TYPE="x\"10\""
elif [ "$FPGACARD" == "RCXVUP" ]; then
DMA_XFER_SIZE="x\"6\""
CARD_TYPE="x\"11\""
elif [ "$FPGACARD" == "FX609" ]; then
DMA_XFER_SIZE="x\"6\""
CARD_TYPE="x\"12\""
elif [ "$FPGACARD" == "S241" ]; then
DMA_XFER_SIZE="x\"6\""
CARD_TYPE="x\"13\""
elif [ "$FPGACARD" == "AD9V3" ]; then
DMA_XFER_SIZE="x\"6\""
CARD_TYPE="x\"14\""
fi
if [ "${BRAM_USED^^}" == "TRUE" ]; then
SDRAM_SIZE="x\"0001\""
fi
NAME=`basename $1`
echo -e " configuring $NAME"
sed -i 's/CONSTANT[ ^I]*NUM_OF_ACTIONS[ ^I]*:[ ^I]*integer.*;/CONSTANT NUM_OF_ACTIONS : integer RANGE 0 TO 16 := '$NUM_OF_ACTIONS'; /' $1
sed -i 's/CONSTANT[ ^I]*DMA_XFER_SIZE[ ^I]*:[ ^I]std_logic_vector(3 DOWNTO 0).*;/CONSTANT DMA_XFER_SIZE : std_logic_vector(3 DOWNTO 0) := '$DMA_XFER_SIZE'; /' $1
sed -i 's/CONSTANT[ ^I]*DMA_ALIGNMENT[ ^I]*:[ ^I]std_logic_vector(3 DOWNTO 0).*;/CONSTANT DMA_ALIGNMENT : std_logic_vector(3 DOWNTO 0) := '$DMA_ALIGNMENT'; /' $1
sed -i 's/CONSTANT[ ^I]*SDRAM_SIZE[ ^I]*:[ ^I]std_logic_vector(15 DOWNTO 0).*;/CONSTANT SDRAM_SIZE : std_logic_vector(15 DOWNTO 0) := '$SDRAM_SIZE'; /' $1
sed -i 's/CONSTANT[ ^I]*CARD_TYPE[ ^I]*:[ ^I]std_logic_vector(7 DOWNTO 0).*;/CONSTANT CARD_TYPE : std_logic_vector(7 DOWNTO 0) := '$CARD_TYPE'; /' $1
|
// Copyright 2018 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "hermes/modem_qrtr.h"
#include <algorithm>
#include <array>
#include <utility>
#include <base/bind.h>
#include <base/logging.h>
#include <base/strings/string_number_conversions.h>
#include <libqrtr.h>
#include "hermes/apdu.h"
#include "hermes/euicc_manager_interface.h"
#include "hermes/qmi_uim.h"
#include "hermes/sgp_22.h"
#include "hermes/socket_qrtr.h"
namespace {
// As per QMI UIM spec section 2.2
constexpr uint8_t kQmiUimService = 0xB;
// This represents the default logical slot that we want our eSIM to be
// assigned. For dual sim - single standby modems, this will always work. For
// other multi-sim modems, get the first active slot and store it as a ModemQrtr
// field.
constexpr uint8_t kDefaultLogicalSlot = 0x01;
constexpr uint8_t kInvalidChannel = -1;
// Delay between SwitchSlot and the next QMI message
constexpr auto kSwitchSlotDelay = base::TimeDelta::FromSeconds(1);
constexpr auto kInitRetryDelay = base::TimeDelta::FromSeconds(10);
bool CheckMessageSuccess(QmiUimCommand cmd, const uim_qmi_result& qmi_result) {
if (qmi_result.result == 0) {
return true;
}
LOG(ERROR) << cmd.ToString()
<< " response contained error: " << qmi_result.error;
return false;
}
} // namespace
namespace hermes {
struct ApduTxInfo : public ModemQrtr::TxInfo {
explicit ApduTxInfo(CommandApdu apdu,
ModemQrtr::ResponseCallback cb = nullptr)
: apdu_(std::move(apdu)), callback_(cb) {}
CommandApdu apdu_;
ModemQrtr::ResponseCallback callback_;
};
struct SwitchSlotTxInfo : public ModemQrtr::TxInfo {
explicit SwitchSlotTxInfo(const uint32_t physical_slot,
const uint8_t logical_slot)
: physical_slot_(physical_slot), logical_slot_(logical_slot) {}
const uint32_t physical_slot_;
const uint8_t logical_slot_;
};
std::unique_ptr<ModemQrtr> ModemQrtr::Create(
std::unique_ptr<SocketInterface> socket,
Logger* logger,
Executor* executor) {
// Open the socket prior to passing to ModemQrtr, such that it always has a
// valid socket to write to.
if (!socket || !socket->Open()) {
LOG(ERROR) << "Failed to open socket";
return nullptr;
}
return std::unique_ptr<ModemQrtr>(
new ModemQrtr(std::move(socket), logger, executor));
}
ModemQrtr::ModemQrtr(std::unique_ptr<SocketInterface> socket,
Logger* logger,
Executor* executor)
: qmi_disabled_(false),
extended_apdu_supported_(false),
current_transaction_id_(static_cast<uint16_t>(-1)),
channel_(kInvalidChannel),
logical_slot_(kDefaultLogicalSlot),
socket_(std::move(socket)),
buffer_(4096),
euicc_manager_(nullptr),
logger_(logger),
executor_(executor) {
CHECK(socket_);
CHECK(socket_->IsValid());
socket_->SetDataAvailableCallback(
base::Bind(&ModemQrtr::OnDataAvailable, base::Unretained(this)));
// Set SGP.22 specification version supported by this implementation (this is
// not currently constrained by the eUICC we use).
spec_version_.set_major(2);
spec_version_.set_minor(2);
spec_version_.set_revision(0);
}
ModemQrtr::~ModemQrtr() {
Shutdown();
socket_->Close();
}
void ModemQrtr::SetActiveSlot(const uint32_t physical_slot) {
tx_queue_.push_back(
{std::make_unique<SwitchSlotTxInfo>(physical_slot, logical_slot_),
AllocateId(), QmiUimCommand::kSwitchSlot});
current_state_.Transition(State::kUimStarted);
channel_ = kInvalidChannel;
tx_queue_.push_back(
{std::unique_ptr<TxInfo>(), AllocateId(), QmiUimCommand::kReset});
tx_queue_.push_back({std::unique_ptr<TxInfo>(), AllocateId(),
QmiUimCommand::kOpenLogicalChannel});
}
void ModemQrtr::StoreAndSetActiveSlot(const uint32_t physical_slot) {
tx_queue_.push_back(
{std::make_unique<TxInfo>(), AllocateId(), QmiUimCommand::kGetSlots});
SetActiveSlot(physical_slot);
}
void ModemQrtr::RestoreActiveSlot() {
if (stored_active_slot_) {
tx_queue_.push_back({std::make_unique<SwitchSlotTxInfo>(
stored_active_slot_.value(), logical_slot_),
AllocateId(), QmiUimCommand::kSwitchSlot});
stored_active_slot_.reset();
} else {
LOG(ERROR) << "Attempted to restore active slot when none was stored";
}
}
void ModemQrtr::SendApdus(std::vector<lpa::card::Apdu> apdus,
ResponseCallback cb) {
for (size_t i = 0; i < apdus.size(); ++i) {
ResponseCallback callback =
(i == apdus.size() - 1 ? std::move(cb) : nullptr);
CommandApdu apdu(static_cast<ApduClass>(apdus[i].cla()),
static_cast<ApduInstruction>(apdus[i].ins()),
extended_apdu_supported_);
apdu.AddData(apdus[i].data());
tx_queue_.push_back(
{std::make_unique<ApduTxInfo>(std::move(apdu), std::move(callback)),
AllocateId(), QmiUimCommand::kSendApdu});
}
// Begin transmitting if we are not already processing a transaction.
if (!pending_response_type) {
TransmitFromQueue();
}
}
bool ModemQrtr::IsSimValidAfterEnable() {
// This function is called by the lpa after profile enable.
ReacquireChannel();
return true;
}
bool ModemQrtr::IsSimValidAfterDisable() {
// This function is called by the lpa after profile disable.
ReacquireChannel();
return true;
}
void ModemQrtr::Initialize(EuiccManagerInterface* euicc_manager) {
CHECK(current_state_ == State::kUninitialized);
// Initialization succeeds only if our active sim slot has an esim
VLOG(1) << "Trying to initialize channel to eSIM";
euicc_manager_ = euicc_manager;
// StartService should result in a received QRTR_TYPE_NEW_SERVER
// packet. Don't send other packets until that occurs.
if (!socket_->StartService(kQmiUimService, 1, 0)) {
LOG(ERROR) << "Failed starting UIM service during ModemQrtr initialization";
RetryInitialization();
return;
}
current_state_.Transition(State::kInitializeStarted);
// Note: we use push_front so that SendApdus could be called prior to a
// successful initialization.
tx_queue_.push_front({std::unique_ptr<TxInfo>(), AllocateId(),
QmiUimCommand::kOpenLogicalChannel});
// Request initial info about SIM slots.
// TODO(crbug.com/1085825) Add support for getting indications so that this
// info can get updated.
tx_queue_.push_front(
{std::make_unique<TxInfo>(), AllocateId(), QmiUimCommand::kGetSlots});
tx_queue_.push_front(
{std::unique_ptr<TxInfo>(), AllocateId(), QmiUimCommand::kReset});
}
void ModemQrtr::ReacquireChannel() {
if (current_state_ != State::kSendApduReady) {
return;
}
LOG(INFO) << "Reacquiring Channel";
current_state_.Transition(State::kUimStarted);
channel_ = kInvalidChannel;
tx_queue_.push_front({std::unique_ptr<TxInfo>(), AllocateId(),
QmiUimCommand::kOpenLogicalChannel});
tx_queue_.push_front(
{std::unique_ptr<TxInfo>(), AllocateId(), QmiUimCommand::kReset});
}
void ModemQrtr::RetryInitialization() {
VLOG(1) << "Reprobing for eSIM in " << kInitRetryDelay.InSeconds()
<< " seconds";
base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
FROM_HERE,
base::Bind(&ModemQrtr::Initialize, base::Unretained(this),
euicc_manager_),
kInitRetryDelay);
}
void ModemQrtr::FinalizeInitialization() {
if (current_state_ != State::kLogicalChannelOpened) {
VLOG(1) << "Could not open logical channel to eSIM";
Shutdown();
RetryInitialization();
return;
}
LOG(INFO) << "ModemQrtr initialization successful. eSIM found.";
current_state_.Transition(State::kSendApduReady);
// TODO(crbug.com/1117582) Set this based on whether or not Extended Length
// APDU is supported.
extended_apdu_supported_ = false;
}
void ModemQrtr::Shutdown() {
if (current_state_ != State::kUninitialized &&
current_state_ != State::kInitializeStarted) {
socket_->StopService(kQmiUimService, 1, 0);
}
current_state_.Transition(State::kUninitialized);
}
uint16_t ModemQrtr::AllocateId() {
// transaction id cannot be 0, but when incrementing by 1, an overflow will
// result in this method at some point returning 0. Incrementing by 2 when
// transaction_id is initialized as an odd number guarantees us that this
// method will never return 0 without special-casing the overflow.
current_transaction_id_ += 2;
return current_transaction_id_;
}
/////////////////////////////////////
// Transmit method implementations //
/////////////////////////////////////
void ModemQrtr::TransmitFromQueue() {
if (tx_queue_.empty() || pending_response_type || qmi_disabled_) {
return;
}
bool should_pop = true;
switch (tx_queue_[0].uim_type_) {
case QmiUimCommand::kReset:
uim_reset_req reset_request;
SendCommand(QmiUimCommand::kReset, tx_queue_[0].id_, &reset_request,
uim_reset_req_ei);
break;
case QmiUimCommand::kSwitchSlot:
// Don't pop since we need to update the inactive euicc if SwitchSlot
// succeeds
should_pop = false;
TransmitQmiSwitchSlot(&tx_queue_[0]);
break;
case QmiUimCommand::kGetSlots:
uim_get_slots_req slots_request;
SendCommand(QmiUimCommand::kGetSlots, tx_queue_[0].id_, &slots_request,
uim_get_slots_req_ei);
break;
case QmiUimCommand::kOpenLogicalChannel:
TransmitQmiOpenLogicalChannel(&tx_queue_[0]);
current_state_.Transition(State::kLogicalChannelPending);
break;
case QmiUimCommand::kSendApdu:
// kSendApdu element will be popped off the queue after the response has
// been entirely received. This occurs within |ReceiveQmiSendApdu|.
should_pop = false;
TransmitQmiSendApdu(&tx_queue_[0]);
break;
default:
LOG(ERROR) << "Unexpected QMI UIM type in ModemQrtr tx queue";
}
if (should_pop) {
tx_queue_.pop_front();
}
}
void ModemQrtr::TransmitQmiSwitchSlot(TxElement* tx_element) {
auto switch_slot_tx_info =
dynamic_cast<SwitchSlotTxInfo*>(tx_queue_[0].info_.get());
// Slot switching takes time, thus switch slots only when absolutely necessary
if (!stored_active_slot_ ||
stored_active_slot_.value() != switch_slot_tx_info->physical_slot_) {
uim_switch_slot_req switch_slot_request;
switch_slot_request.physical_slot = switch_slot_tx_info->physical_slot_;
switch_slot_request.logical_slot = switch_slot_tx_info->logical_slot_;
SendCommand(QmiUimCommand::kSwitchSlot, tx_queue_[0].id_,
&switch_slot_request, uim_switch_slot_req_ei);
} else {
LOG(INFO) << "Requested slot is already active";
tx_queue_.pop_front();
TransmitFromQueue();
}
}
void ModemQrtr::TransmitQmiOpenLogicalChannel(TxElement* tx_element) {
DCHECK(tx_element &&
tx_element->uim_type_ == QmiUimCommand::kOpenLogicalChannel);
uim_open_logical_channel_req request;
request.slot = logical_slot_;
request.aid_valid = true;
request.aid_len = kAidIsdr.size();
std::copy(kAidIsdr.begin(), kAidIsdr.end(), request.aid);
SendCommand(QmiUimCommand::kOpenLogicalChannel, tx_element->id_, &request,
uim_open_logical_channel_req_ei);
}
void ModemQrtr::TransmitQmiSendApdu(TxElement* tx_element) {
DCHECK(tx_element && tx_element->uim_type_ == QmiUimCommand::kSendApdu);
uim_send_apdu_req request;
request.slot = logical_slot_;
request.channel_id_valid = true;
request.channel_id = channel_;
uint8_t* fragment;
ApduTxInfo* apdu = static_cast<ApduTxInfo*>(tx_element->info_.get());
size_t fragment_size = apdu->apdu_.GetNextFragment(&fragment);
request.apdu_len = fragment_size;
std::copy(fragment, fragment + fragment_size, request.apdu);
SendCommand(QmiUimCommand::kSendApdu, tx_element->id_, &request,
uim_send_apdu_req_ei);
}
bool ModemQrtr::SendCommand(QmiUimCommand type,
uint16_t id,
void* c_struct,
qmi_elem_info* ei) {
if (!socket_->IsValid()) {
LOG(ERROR) << "ModemQrtr socket is invalid!";
return false;
}
if (pending_response_type) {
LOG(ERROR) << "QRTR tried to send buffer while awaiting a qmi response";
return false;
}
if (!current_state_.CanSend()) {
LOG(ERROR) << "QRTR tried to send buffer in a non-sending state: "
<< current_state_;
return false;
}
if (type == QmiUimCommand::kSendApdu &&
current_state_ != State::kSendApduReady) {
LOG(ERROR) << "QRTR tried to send apdu in state: " << current_state_;
return false;
}
// All hermes initiated qmi messages expect a response
pending_response_type = type;
std::vector<uint8_t> encoded_buffer(kBufferDataSize * 2, 0);
qrtr_packet packet;
packet.data = encoded_buffer.data();
packet.data_len = encoded_buffer.size();
size_t len = qmi_encode_message(
&packet, QMI_REQUEST, static_cast<uint16_t>(type), id, c_struct, ei);
if (len < 0) {
LOG(ERROR) << "Failed to encode QMI UIM request: "
<< static_cast<uint16_t>(type);
return false;
}
LOG(INFO) << "ModemQrtr sending transaction type "
<< static_cast<uint16_t>(type)
<< " with data (size : " << packet.data_len
<< ") : " << base::HexEncode(packet.data, packet.data_len);
int success = socket_->Send(packet.data, packet.data_len,
reinterpret_cast<void*>(&metadata_));
if (success < 0) {
LOG(ERROR) << "qrtr_sendto failed";
return false;
}
return true;
}
////////////////////////////////////
// Receive method implementations //
////////////////////////////////////
void ModemQrtr::ProcessQrtrPacket(uint32_t node, uint32_t port, int size) {
sockaddr_qrtr qrtr_sock;
qrtr_sock.sq_family = AF_QIPCRTR;
qrtr_sock.sq_node = node;
qrtr_sock.sq_port = port;
qrtr_packet pkt;
int ret = qrtr_decode(&pkt, buffer_.data(), size, &qrtr_sock);
if (ret < 0) {
LOG(ERROR) << "qrtr_decode failed";
return;
}
switch (pkt.type) {
case QRTR_TYPE_NEW_SERVER:
VLOG(1) << "Received NEW_SERVER QRTR packet";
if (pkt.service == kQmiUimService && channel_ == kInvalidChannel) {
current_state_.Transition(State::kUimStarted);
metadata_.node = pkt.node;
metadata_.port = pkt.port;
}
break;
case QRTR_TYPE_DATA:
VLOG(1) << "Received data QRTR packet";
ProcessQmiPacket(pkt);
break;
case QRTR_TYPE_DEL_SERVER:
case QRTR_TYPE_HELLO:
case QRTR_TYPE_BYE:
case QRTR_TYPE_DEL_CLIENT:
case QRTR_TYPE_RESUME_TX:
case QRTR_TYPE_EXIT:
case QRTR_TYPE_PING:
case QRTR_TYPE_NEW_LOOKUP:
case QRTR_TYPE_DEL_LOOKUP:
LOG(INFO) << "Received QRTR packet of type " << pkt.type << ". Ignoring.";
break;
default:
LOG(WARNING) << "Received QRTR packet but did not recognize packet type "
<< pkt.type << ".";
}
// If we cannot yet send another request, it is because we are waiting for a
// response. After the response is received and processed, the next request
// will be sent.
if (!pending_response_type) {
TransmitFromQueue();
}
}
void ModemQrtr::ProcessQmiPacket(const qrtr_packet& packet) {
uint32_t qmi_type;
if (qmi_decode_header(&packet, &qmi_type) < 0) {
LOG(ERROR) << "QRTR received invalid QMI packet";
return;
}
VLOG(1) << "Received QMI message of type: " << qmi_type;
switch (qmi_type) {
case QmiUimCommand::kReset:
VLOG(1) << "Ignoring received RESET packet";
break;
case QmiUimCommand::kSwitchSlot:
ReceiveQmiSwitchSlot(packet);
break;
case QmiUimCommand::kGetSlots:
ReceiveQmiGetSlots(packet);
break;
case QmiUimCommand::kOpenLogicalChannel:
ReceiveQmiOpenLogicalChannel(packet);
if (!current_state_.IsInitialized()) {
FinalizeInitialization();
}
break;
case QmiUimCommand::kSendApdu:
ReceiveQmiSendApdu(packet);
break;
default:
LOG(WARNING) << "Received QMI packet of unknown type: " << qmi_type;
return;
}
if (!pending_response_type) {
LOG(ERROR) << "Received unexpected QMI response. No pending response.";
return;
}
if (pending_response_type != qmi_type)
LOG(ERROR) << "Received unexpected QMI response. Expected: "
<< pending_response_type->ToString();
pending_response_type.reset();
}
void ModemQrtr::ReceiveQmiGetSlots(const qrtr_packet& packet) {
QmiUimCommand cmd(QmiUimCommand::kGetSlots);
uim_get_slots_resp resp;
unsigned int id;
if (qmi_decode_message(&resp, &id, &packet, QMI_RESPONSE, cmd,
uim_get_slots_resp_ei) < 0) {
LOG(ERROR) << "Failed to decode QMI UIM response: " << cmd.ToString();
return;
} else if (!CheckMessageSuccess(cmd, resp.result)) {
return;
}
if (!resp.status_valid || !resp.info_valid) {
LOG(ERROR) << "QMI UIM response for " << cmd.ToString()
<< " contained invalid slot info";
return;
}
CHECK(euicc_manager_);
bool logical_slot_found = false;
uint8_t max_len = std::max(resp.status_len, resp.info_len);
for (uint8_t i = 0; i < max_len; ++i) {
bool is_present = (resp.status[i].physical_card_status ==
uim_physical_slot_status::kCardPresent);
bool is_euicc = resp.info[i].is_euicc;
bool is_active = (resp.status[i].physical_slot_state ==
uim_physical_slot_status::kSlotActive);
if (is_active) {
stored_active_slot_ = i + 1;
if (!logical_slot_found) {
// This is the logical slot we grab when we perform a switch slot
logical_slot_ = resp.status[i].logical_slot;
logical_slot_found = true;
}
}
if (!is_present || !is_euicc)
euicc_manager_->OnEuiccRemoved(i + 1);
else
euicc_manager_->OnEuiccUpdated(
i + 1, is_active ? EuiccSlotInfo(resp.status[i].logical_slot)
: EuiccSlotInfo());
}
}
void ModemQrtr::ReceiveQmiSwitchSlot(const qrtr_packet& packet) {
QmiUimCommand cmd(QmiUimCommand::kSwitchSlot);
uim_switch_slot_resp resp;
unsigned int id;
if (qmi_decode_message(&resp, &id, &packet, QMI_RESPONSE, cmd,
uim_switch_slot_resp_ei) < 0) {
LOG(ERROR) << "Failed to decode QMI UIM response: " << cmd.ToString();
return;
}
if (!CheckMessageSuccess(cmd, resp.result)) {
return;
}
auto switch_slot_tx_info =
dynamic_cast<SwitchSlotTxInfo*>(tx_queue_.front().info_.get());
euicc_manager_->OnEuiccUpdated(
switch_slot_tx_info->physical_slot_,
EuiccSlotInfo(switch_slot_tx_info->logical_slot_));
if (stored_active_slot_)
euicc_manager_->OnEuiccUpdated(stored_active_slot_.value(),
EuiccSlotInfo());
tx_queue_.pop_front();
// Sending QMI messages immediately after switch slot leads to QMI errors
// since slot switching takes time. If channel reacquisition fails despite
// this delay, we retry after kInitRetryDelay.
DisableQmi(kSwitchSlotDelay);
}
void ModemQrtr::ReceiveQmiOpenLogicalChannel(const qrtr_packet& packet) {
QmiUimCommand cmd(QmiUimCommand::kOpenLogicalChannel);
if (current_state_ != State::kLogicalChannelPending) {
LOG(ERROR) << "Received unexpected QMI UIM response: " << cmd.ToString()
<< " in state " << current_state_;
return;
}
uim_open_logical_channel_resp resp;
unsigned int id;
if (qmi_decode_message(&resp, &id, &packet, QMI_RESPONSE, cmd,
uim_open_logical_channel_resp_ei) < 0) {
LOG(ERROR) << "Failed to decode QMI UIM response: " << cmd.ToString();
return;
}
if (resp.result.result != 0) {
VLOG(1) << cmd.ToString()
<< " Could not open channel to eSIM. This is expected if the "
"active sim slot is not an eSIM. QMI response contained error: "
<< resp.result.error;
return;
}
if (!resp.channel_id_valid) {
LOG(ERROR) << "QMI UIM response for " << cmd.ToString()
<< " contained an invalid channel id";
return;
}
channel_ = resp.channel_id;
current_state_.Transition(State::kLogicalChannelOpened);
}
void ModemQrtr::ReceiveQmiSendApdu(const qrtr_packet& packet) {
QmiUimCommand cmd(QmiUimCommand::kSendApdu);
CHECK(tx_queue_.size());
// Ensure that the queued element is for a kSendApdu command
TxInfo* base_info = tx_queue_[0].info_.get();
CHECK(base_info);
CHECK(dynamic_cast<ApduTxInfo*>(base_info));
static ResponseApdu payload;
uim_send_apdu_resp resp;
unsigned int id;
ApduTxInfo* info = static_cast<ApduTxInfo*>(base_info);
if (!qmi_decode_message(&resp, &id, &packet, QMI_RESPONSE, cmd,
uim_send_apdu_resp_ei)) {
LOG(ERROR) << "Failed to decode QMI UIM response: " << cmd.ToString();
return;
}
if (!CheckMessageSuccess(cmd, resp.result)) {
if (info->callback_) {
info->callback_(responses_, lpa::card::EuiccCard::kSendApduError);
// ResponseCallback interface does not indicate a change in ownership of
// |responses_|, but all callbacks should transfer ownership. Check for
// sanity.
// TODO(pholla) : Make ResponseCallback interface accept const responses_&
// and clear responses_.
CHECK(responses_.empty());
}
// Pop the apdu that caused the error.
tx_queue_.pop_front();
ReacquireChannel();
return;
}
VLOG(2) << "Adding to payload from APDU response ("
<< resp.apdu_response_len - 2 << " bytes): "
<< base::HexEncode(resp.apdu_response, resp.apdu_response_len - 2);
payload.AddData(resp.apdu_response, resp.apdu_response_len);
if (payload.MorePayloadIncoming()) {
// Make the next transmit operation be a request for more APDU data
info->apdu_ = payload.CreateGetMoreCommand(false);
return;
} else if (info->apdu_.HasMoreFragments()) {
// Send next fragment of APDU
VLOG(1) << "Sending next APDU fragment...";
TransmitFromQueue();
return;
}
if (tx_queue_.empty() || static_cast<uint16_t>(id) != tx_queue_[0].id_) {
LOG(ERROR) << "ModemQrtr received APDU from modem with unrecognized "
<< "transaction ID";
return;
}
VLOG(1) << "Finished transaction " << tx_queue_[0].id_ / 2
<< " (id: " << tx_queue_[0].id_ << ")";
responses_.push_back(payload.Release());
if (info->callback_) {
info->callback_(responses_, lpa::card::EuiccCard::kNoError);
// ResponseCallback interface does not indicate a change in ownership of
// |responses_|, but all callbacks should transfer ownership. Check for
// sanity.
CHECK(responses_.empty());
}
tx_queue_.pop_front();
}
void ModemQrtr::OnDataAvailable(SocketInterface* socket) {
CHECK(socket == socket_.get());
void* metadata = nullptr;
SocketQrtr::PacketMetadata data = {0, 0};
if (socket->GetType() == SocketInterface::Type::kQrtr) {
metadata = reinterpret_cast<void*>(&data);
}
int bytes_received = socket->Recv(buffer_.data(), buffer_.size(), metadata);
if (bytes_received < 0) {
LOG(ERROR) << "Socket recv failed";
return;
}
LOG(INFO) << "ModemQrtr recevied raw data (" << bytes_received
<< " bytes): " << base::HexEncode(buffer_.data(), bytes_received);
ProcessQrtrPacket(data.node, data.port, bytes_received);
}
const lpa::proto::EuiccSpecVersion& ModemQrtr::GetCardVersion() {
return spec_version_;
}
bool ModemQrtr::State::Transition(ModemQrtr::State::Value value) {
bool valid_transition = false;
switch (value) {
case kUninitialized:
valid_transition = true;
break;
case kUimStarted:
// we reacquire the channel from kSendApduReady after profile (en/dis)able
valid_transition =
(value_ == kSendApduReady || value_ == kInitializeStarted);
break;
default:
// Most states can only transition from the previous state.
valid_transition = (value == value_ + 1);
}
if (valid_transition) {
value_ = value;
} else {
LOG(ERROR) << "Cannot transition from state " << *this << " to state "
<< State(value);
}
return valid_transition;
}
void ModemQrtr::DisableQmi(base::TimeDelta duration) {
qmi_disabled_ = true;
VLOG(1) << "Blocking QMI messages for " << duration << "seconds";
executor_->PostDelayedTask(
FROM_HERE, base::BindOnce(&ModemQrtr::EnableQmi, base::Unretained(this)),
duration);
}
void ModemQrtr::EnableQmi() {
qmi_disabled_ = false;
TransmitFromQueue();
}
} // namespace hermes
|
<gh_stars>0
package com.cjy.flb.dao;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteStatement;
import de.greenrobot.dao.AbstractDao;
import de.greenrobot.dao.Property;
import de.greenrobot.dao.internal.DaoConfig;
import com.cjy.flb.db.Eat;
// THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT.
/**
* DAO for table "EAT".
*/
public class EatDao extends AbstractDao<Eat, Long> {
public static final String TABLENAME = "EAT";
/**
* Properties of entity Eat.<br/>
* Can be used for QueryBuilder and for referencing column names.
*/
public static class Properties {
public final static Property Id = new Property(0, Long.class, "id", true, "_id");
public final static Property Device_uid = new Property(1, String.class, "device_uid", false, "DEVICE_UID");
public final static Property Eat_medicine_time = new Property(2, String.class, "eat_medicine_time", false, "EAT_MEDICINE_TIME");
public final static Property Number = new Property(3, Integer.class, "number", false, "NUMBER");
public final static Property Taken = new Property(4, Boolean.class, "taken", false, "TAKEN");
};
public EatDao(DaoConfig config) {
super(config);
}
public EatDao(DaoConfig config, DaoSession daoSession) {
super(config, daoSession);
}
/** Creates the underlying database table. */
public static void createTable(SQLiteDatabase db, boolean ifNotExists) {
String constraint = ifNotExists? "IF NOT EXISTS ": "";
db.execSQL("CREATE TABLE " + constraint + "\"EAT\" (" + //
"\"_id\" INTEGER PRIMARY KEY AUTOINCREMENT ," + // 0: id
"\"DEVICE_UID\" TEXT NOT NULL ," + // 1: device_uid
"\"EAT_MEDICINE_TIME\" TEXT," + // 2: eat_medicine_time
"\"NUMBER\" INTEGER," + // 3: number
"\"TAKEN\" INTEGER);"); // 4: taken
}
/** Drops the underlying database table. */
public static void dropTable(SQLiteDatabase db, boolean ifExists) {
String sql = "DROP TABLE " + (ifExists ? "IF EXISTS " : "") + "\"EAT\"";
db.execSQL(sql);
}
/** @inheritdoc */
@Override
protected void bindValues(SQLiteStatement stmt, Eat entity) {
stmt.clearBindings();
Long id = entity.getId();
if (id != null) {
stmt.bindLong(1, id);
}
stmt.bindString(2, entity.getDevice_uid());
String eat_medicine_time = entity.getEat_medicine_time();
if (eat_medicine_time != null) {
stmt.bindString(3, eat_medicine_time);
}
Integer number = entity.getNumber();
if (number != null) {
stmt.bindLong(4, number);
}
Boolean taken = entity.getTaken();
if (taken != null) {
stmt.bindLong(5, taken ? 1L: 0L);
}
}
/** @inheritdoc */
@Override
public Long readKey(Cursor cursor, int offset) {
return cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0);
}
/** @inheritdoc */
@Override
public Eat readEntity(Cursor cursor, int offset) {
Eat entity = new Eat( //
cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0), // id
cursor.getString(offset + 1), // device_uid
cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2), // eat_medicine_time
cursor.isNull(offset + 3) ? null : cursor.getInt(offset + 3), // number
cursor.isNull(offset + 4) ? null : cursor.getShort(offset + 4) != 0 // taken
);
return entity;
}
/** @inheritdoc */
@Override
public void readEntity(Cursor cursor, Eat entity, int offset) {
entity.setId(cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0));
entity.setDevice_uid(cursor.getString(offset + 1));
entity.setEat_medicine_time(cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2));
entity.setNumber(cursor.isNull(offset + 3) ? null : cursor.getInt(offset + 3));
entity.setTaken(cursor.isNull(offset + 4) ? null : cursor.getShort(offset + 4) != 0);
}
/** @inheritdoc */
@Override
protected Long updateKeyAfterInsert(Eat entity, long rowId) {
entity.setId(rowId);
return rowId;
}
/** @inheritdoc */
@Override
public Long getKey(Eat entity) {
if(entity != null) {
return entity.getId();
} else {
return null;
}
}
/** @inheritdoc */
@Override
protected boolean isEntityUpdateable() {
return true;
}
}
|
DROP TABLE if exists b_sec_filter_mask;
DROP TABLE if exists b_sec_iprule_incl_ip;
DROP TABLE if exists b_sec_iprule_excl_ip;
DROP TABLE if exists b_sec_iprule_incl_mask;
DROP TABLE if exists b_sec_iprule_excl_mask;
DROP TABLE if exists b_sec_iprule;
DROP TABLE if exists b_sec_session;
DROP TABLE if exists b_sec_user;
DROP TABLE if exists b_sec_redirect_url;
DROP TABLE if exists b_sec_white_list;
DROP TABLE if exists b_sec_virus;
DROP TABLE if exists b_sec_frame_mask;
DROP TABLE if exists b_security_sitecheck; |
import * as React from 'react'
import { makeStyles } from '@material-ui/core/styles'
import {
List,
ListItem,
ListItemIcon,
ListItemSecondaryAction,
ListItemText,
ListSubheader,
Typography,
} from '@material-ui/core'
import { Emoji } from '@popup/components/Emoji'
import { effectLayoutStyles } from '@popup/styles/reactCss'
import { BlurSlider } from '@popup/components/BlurSlider'
import { ConfigContext } from '@popup/reducers/contexts'
import { EnumConfigAction } from '@popup/reducers/configReducer'
const useStyles = makeStyles(effectLayoutStyles)
export const EffectTab: React.FC<{}> = () => {
const classes = useStyles()
const [config, dispatch] = React.useContext(ConfigContext)
const changeHandler = React.useCallback((_e, value) => dispatch({
type: EnumConfigAction.SET_BLUR,
payload: { blur: value },
}), [])
return <>
<List subheader={<ListSubheader>Settings</ListSubheader>} className={classes.root}>
<ListItem>
<Typography gutterBottom className={classes.text}>Blur: </Typography>
<BlurSlider value={config?.params?.blur ?? 0} onChange={changeHandler} />
</ListItem>
<ListItem>
<ListItemIcon>
<Emoji symbol='🚧' label='building' />
</ListItemIcon>
<ListItemText id="switch-list-label-bluetooth" primary="Comming..." />
<ListItemSecondaryAction>
</ListItemSecondaryAction>
</ListItem>
</List>
</>
} |
input_list = ["hello", "how", "are", "you", "world"]
input_characters = "hwyl"
def extract_words(input_list, input_characters):
# Create a set of characters from input string
input_characters_set = set(input_characters)
# Create empty list to store the resultant words
words_list = []
# Iterate over the input list
for word in input_list:
# Initialize boolean variables
found_characters = []
# Iterate over characters in the word
for character in word:
if character in input_characters_set:
# add the character if present in the characters set
found_characters.append(character)
# check if all the characters from set in present in the word
if set(found_characters) == input_characters_set:
words_list.append(word)
# return the resultant words list
return words_list
if __name__=="__main__":
input_list = ["hello", "how", "are", "you", "world"]
input_characters = "hwyl"
print(extract_words(input_list, input_characters)) |
import { entityPolicy } from "plumier"
import { getRepository } from "typeorm"
import { ShopUser } from "./shops-users-entity"
entityPolicy(ShopUser)
.register("ShopOwner", async ({ user }, id) => {
const shopUser = await getRepository(ShopUser)
.findOne(id, { relations: ["user"], cache: true })
return shopUser?.deleted === false && shopUser?.user.id === user?.userId && shopUser?.role === "ShopOwner"
})
.register("ShopStaff", async ({ user }, id) => {
const shopUser = await getRepository(ShopUser)
.findOne(id, { relations: ["user"], cache: true })
return shopUser?.deleted === false && shopUser?.user.id === user?.userId && shopUser?.role === "ShopStaff"
})
|
/* GENERATED FILE */
import { html, svg, define } from "hybrids";
const PhCalendarX = {
color: "currentColor",
size: "1em",
weight: "regular",
mirrored: false,
render: ({ color, size, weight, mirrored }) => html`
<svg
xmlns="http://www.w3.org/2000/svg"
width="${size}"
height="${size}"
fill="${color}"
viewBox="0 0 256 256"
transform=${mirrored ? "scale(-1, 1)" : null}
>
${weight === "bold" &&
svg`<line x1="152" y1="128" x2="104" y2="176" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>
<line x1="152" y1="176" x2="104" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>
<rect x="40" y="40" width="176" height="176" rx="8" stroke-width="24" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
<line x1="176" y1="20" x2="176" y2="40" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>
<line x1="80" y1="20" x2="80" y2="40" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>
<line x1="40" y1="88" x2="216" y2="88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>`}
${weight === "duotone" &&
svg`<path d="M40,88H216V48a8,8,0,0,0-8-8H48a8,8,0,0,0-8,8Z" opacity="0.2"/>
<rect x="40" y="40" width="176" height="176" rx="8" stroke-width="16" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
<line x1="176" y1="24" x2="176" y2="56" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<line x1="80" y1="24" x2="80" y2="56" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<line x1="40" y1="88" x2="216" y2="88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<line x1="152" y1="128" x2="104" y2="176" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<line x1="152" y1="176" x2="104" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`}
${weight === "fill" &&
svg`<path d="M207.99414,31.9971h-24v-8a8,8,0,1,0-16,0v8h-80v-8a8,8,0,0,0-16,0v8h-24a16.01583,16.01583,0,0,0-16,16v160a16.01582,16.01582,0,0,0,16,16h160a16.01581,16.01581,0,0,0,16-16v-160A16.01582,16.01582,0,0,0,207.99414,31.9971ZM157.65039,170.34085a7.99915,7.99915,0,1,1-11.3125,11.3125L127.99414,163.3096l-18.34375,18.34375a7.99915,7.99915,0,0,1-11.3125-11.3125l18.34375-18.34375L98.33789,133.65335a7.99915,7.99915,0,1,1,11.3125-11.3125l18.34375,18.34375,18.34375-18.34375a7.99915,7.99915,0,1,1,11.3125,11.3125L139.30664,151.9971ZM207.99414,79.9971h-160v-32h24v8a8,8,0,0,0,16,0v-8h80v8a8,8,0,1,0,16,0v-8h24Z"/>`}
${weight === "light" &&
svg`<rect x="40" y="40" width="176" height="176" rx="8" stroke-width="12" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
<line x1="176" y1="24" x2="176" y2="56" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>
<line x1="80" y1="24" x2="80" y2="56" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>
<line x1="40" y1="88" x2="216" y2="88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>
<line x1="152" y1="128" x2="104" y2="176" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>
<line x1="152" y1="176" x2="104" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>`}
${weight === "thin" &&
svg`<rect x="40" y="40" width="176" height="176" rx="8" stroke-width="8" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
<line x1="176" y1="24" x2="176" y2="56" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>
<line x1="80" y1="24" x2="80" y2="56" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>
<line x1="40" y1="88" x2="216" y2="88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>
<line x1="152" y1="128" x2="104" y2="176" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>
<line x1="152" y1="176" x2="104" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>`}
${weight === "regular" &&
svg`<rect x="40" y="40" width="176" height="176" rx="8" stroke-width="16" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
<line x1="176" y1="24" x2="176" y2="56" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<line x1="80" y1="24" x2="80" y2="56" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<line x1="40" y1="88" x2="216" y2="88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<line x1="152" y1="128" x2="104" y2="176" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<line x1="152" y1="176" x2="104" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`}
</svg>
`,
};
define("ph-calendar-x", PhCalendarX);
export default PhCalendarX;
|
def isPalindrome(string):
left = 0
right = len(string) - 1
while left < right:
if string[left] != string[right]:
return False
left += 1
right -= 1
return True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.