text stringlengths 1 1.05M |
|---|
#!/bin/sh
echo "Compile class Calculator.java"
javac com/manning/sdmia/osgi/nativecode/Calculator.java
echo "Create native interface for Calculator"
javah -jni com.manning.sdmia.osgi.nativecode.Calculator
echo "Compile native library code"
gcc -I/usr/lib/jvm/java-6-sun/include -I/usr/lib/jvm/java-6-sun/include/linux -c -o Calculator.o Calculator.c
echo "Create native library"
gcc -I/usr/lib/jvm/java-6-sun/include -I/usr/lib/jvm/java-6-sun/include/linux -shared -o libmathematical.so Calculator.o com_manning_sdmia_osgi_nativecode_Calculator.h
|
#!/bin/sh
set -eu
F="${0%.sh}.go"
trap "rm -f '$F~'" EXIT
exec > "$F~"
cat <<EOT
package errors
//go:generate $0
import (
"go.sancus.dev/core/errors"
)
// Code generated by $0 DO NOT EDIT
EOT
for x in \
InvalidValue \
InvalidArgument \
MissingField \
NotImplemented \
; do
cat <<EOT
func Err$x(s string, args ...interface{}) error {
return errors.Err$x(s, args...)
}
func As${x}Error(err error, s string, args ...interface{}) error {
return errors.As${x}Error(err, s, args...)
}
EOT
done
if ! diff -u "$F" "$F~" >&2; then
mv "$F~" "$F"
fi
|
class ChargebeeParse
TIMESTAMP_FIELDS = [
'started_at', 'activated_at', 'next_billing_at',
'created_at', 'updated_at', 'current_term_start',
'current_term_end', 'cancelled_at', 'trial_start', 'trial_end', 'pause_date'
]
attr_accessor :subscription, :customer, :plan, :event_type, :message
def initialize(params)
self.subscription = params.dig("content", "subscription") || {}
self.message = []
parse_subscription_timestamps
self.customer = params.dig("content", "subscription") || {}
self.event_type = params["event_type"]
end
def maybe_update_subscription_and_customer
if subscription_stale?
self.message << "#{self.class}: Retrieved subscription and customer via ChargeBee gem."
self.subscription = ChargeBee::Subscription.retrieve(self.subscription["id"]).subscription.as_json
parse_subscription_timestamps
self.customer = ChargeBee::Customer.retrieve(self.subscription["customer_id"]).customer.as_json
else
self.message << "#{self.class}: Subscription updated within the last 3 seconds; skipping subscription & customer fetching."
end
end
def licensed_domains
domains = self.subscription["cf_licensed_domains"] || ""
domains.split(" ")
end
def id
@id ||= Digest::FNV.calculate([created_from_ip,subscription["id"]].join(''), 30).to_s.rjust(8,"0")
end
def license_secret(seed)
Digest::SHA256.hexdigest([self.id, seed].join(''))
end
def created_from_ip
IPAddr.new(subscription["created_from_ip"].split(",").first).to_i if subscription["created_from_ip"]
end
def plan_id
subscription["plan_id"]
end
def subscription_quantity
subscription["plan_quantity"]
end
def coupon_strings
(subscription["coupons"] || []).map{|v| [v["coupon_id"], v["coupon_code"]]}.flatten.compact.uniq
end
def imageflow_addon_present?
(subscription["addons"] || []).map{|v| v["id"]}.include?("promotional-imageflow-license")
end
def resizer_present?
features.include?("R_Performance") || features.include?("R4Performance")
end
def imageflow_present?
imageflow_addon_present? || features.include?("Imageflow")
end
def extended_features
if features.kind_of?(Array)
features + (imageflow_addon_present? ? ["Imageflow"] : [])
else
features + (imageflow_addon_present? ? " Imageflow" : "")
end
end
def plan
@plan ||= ChargeBee::Plan.retrieve(plan_id).plan
end
def plan_cores
plan.meta_data.fetch(:cores)
end
def restrictions
plan.meta_data[:restrictions]
end
def kind
plan.meta_data.fetch(:kind)
end
def features
plan.meta_data.fetch(:features)
end
def network_grace_minutes
plan.meta_data.fetch(:network_grace_minutes)
end
def listed_domains_min
plan.meta_data.fetch(:listed_domains_min)
end
def listed_domains_max
plan.meta_data.fetch(:listed_domains_max)
end
def product
plan.invoice_name
end
def is_public
plan.meta_data.fetch(:is_public)
end
def subscription_metadata
subscription["meta_data"]
end
def subscription_grace_minutes
plan.meta_data.fetch(:subscription_grace_minutes, 20160)
end
# for License Text
def issued
subscription["started_at"]
end
def term_end_guess
return subscription["cancelled_at"] if subscription["cancelled_at"]
return subscription["current_term_end"] if subscription["current_term_end"]
return subscription["trial_end"] if subscription["trial_end"]
end
def resizer_expires_on
return if cancelled_after_3_years? || has_perpetual_addon?
term_end_guess.advance(minutes: subscription_grace_minutes)
end
def imageflow_expires_on
term_end_guess.advance(minutes: subscription_grace_minutes)
end
def customer_email
customer["email"]
end
def subscription_expiration_date
subscription["current_term_end"]
end
def owner
customer["company"] || [
customer["first_name"],
customer["last_name"]
].join(" ")
end
def subscription_updated_at
subscription["updated_at"]
end
def domains_required?
kind == 'per-core-domain'
end
def domains_under_min?
licensed_domains.length < listed_domains_min
end
def domains_over_max?
licensed_domains.length > listed_domains_max
end
def cancelled_after_3_years?
three_years = subscription['created_at'] + 3.years
subscription['status'] == 'cancelled' && subscription['cancelled_at'] > three_years
end
def has_perpetual_addon?
subscription["cf_perpetual"].to_s.strip.downcase == "true"
end
private
def parse_subscription_timestamps
TIMESTAMP_FIELDS.each do |field|
subscription[field] = parse_date(subscription[field])
end
end
def parse_date(object)
(object.present? || nil) && Time.zone.at(Integer(object)).to_datetime
end
def subscription_stale?
subscription_updated_at < Time.zone.now - 3.seconds
end
end
|
import React from 'react'
import styled from 'styled-components'
const StyledText = styled.blockquote`
margin: 20px;
p {
margin-top: 0;
border-left: 5px solid ${({ theme }) => theme.colours.grey[700]};
padding-left: 15px;
font-style: italic;
font-size: ${({ theme }) => theme.fontSize['2xl']};
color: ${({ theme }) => theme.colours.grey[700]};
word-break: break-word;
}
`
export const Blockquote = ({ children }) => {
return <StyledText>{children}</StyledText>
}
|
def sort_sequence(lst):
for i in range(len(lst)-1,0,-1):
for j in range(i):
if lst[j]>lst[j+1]:
temp = lst[j]
lst[j] = lst[j+1]
lst[j+1] = temp
return lst
sort_sequence([5,1,9,2,6,3]) |
public static void sortIntegers(int[] list) {
int temp;
// loop through the list
for (int i = 0; i < list.length-1; i++) {
// loop through the list
for (int j = 0; j < list.length-1-i; j++) {
// check if current number is bigger than next
if (list[j] > list[j+1]) {
// swap the numbers
temp = list[j];
list[j] = list[j+1];
list[j+1] = temp;
}
}
}
} |
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.domain.crmsfa.teams;
import org.opentaps.base.entities.SalesTeamRoleSecurity;
import org.opentaps.base.entities.SecurityGroup;
import org.opentaps.foundation.entity.EntityNotFoundException;
import org.opentaps.foundation.repository.RepositoryException;
/**
* A <code>TeamMember</code> with attribute from a specific <code>Team</code>.
*/
public class TeamMemberInTeam extends TeamMember {
private String teamPartyId;
private String securityGroupId;
private Team team;
private SecurityGroup securityGroup;
private SalesTeamRoleSecurity salesTeamRoleSecurity;
/**
* Sets the <code>Team</code> ID where the TeamMember is in.
* @param teamPartyId a <code>String</code> value
*/
public void setTeamPartyId(String teamPartyId) {
this.teamPartyId = teamPartyId;
}
/**
* Gets the <code>Team</code> ID where the TeamMember is in.
* @return a <code>String</code> value
*/
public String getTeamPartyId() {
return teamPartyId;
}
/**
* Sets the <code>SecurityGroup</code> ID of the TeamMember in the Team.
* @param securityGroupId a <code>String</code> value
*/
public void setSecurityGroupId(String securityGroupId) {
this.securityGroupId = securityGroupId;
}
/**
* Gets the <code>SecurityGroup</code> ID of the TeamMember in the Team.
* @return a <code>String</code> value
*/
public String getSecurityGroupId() {
return securityGroupId;
}
/**
* Gets the <code>Team</code> where the TeamMember is in.
* @return a <code>Team</code> value
* @exception RepositoryException if an error occurs
* @exception EntityNotFoundException if an error occurs
*/
public Team getTeam() throws RepositoryException, EntityNotFoundException {
if (team == null) {
team = getRepository().getTeamById(teamPartyId);
}
return team;
}
/**
* Gets the <code>SecurityGroup</code> related to this member in the team.
* @return a <code>SecurityGroup</code> value
* @exception RepositoryException if an error occurs
*/
public SecurityGroup getSecurityGroup() throws RepositoryException {
if (securityGroup == null && securityGroupId != null) {
securityGroup = getRepository().getSecurityGroup(this);
}
return securityGroup;
}
/**
* Gets the <code>SalesTeamRoleSecurity</code> related to this member in the team.
* @return a <code>SalesTeamRoleSecurity</code> value
* @exception RepositoryException if an error occurs
*/
public SalesTeamRoleSecurity getSalesTeamRoleSecurity() throws RepositoryException {
if (salesTeamRoleSecurity == null && securityGroupId != null) {
salesTeamRoleSecurity = getRepository().getSalesTeamRoleSecurity(this);
}
return salesTeamRoleSecurity;
}
}
|
#!/bin/sh
cd /opt/code/ || exit 1
export PYTHONPATH=/opt/code
CMD="drum server -cd . --address 0.0.0.0:8080 --with-error-server"
if [ ! -z "${POSITIVE_CLASS_LABEL}" ]; then
CMD="${CMD} --positive-class-label ${POSITIVE_CLASS_LABEL}"
fi
if [ ! -z "${NEGATIVE_CLASS_LABEL}" ]; then
CMD="${CMD} --negative-class-label ${NEGATIVE_CLASS_LABEL}"
fi
exec ${CMD}
|
#!/bin/bash
rustup target add x86_64-unknown-linux-musl
brew install filosottile/musl-cross/musl-cross
mkdir .cargo
echo '[target.x86_64-unknown-linux-musl]
linker = "x86_64-linux-musl-gcc"' > .cargo/config |
#!/bin/bash
set -Eeo pipefail
cd "$YAK_WORKSPACE"
version=3.1
echo "$version" > "$YAK_WORKSPACE/version"
url="https://ftp.gnu.org/gnu/gperf/gperf-${version}.tar.gz"
wget "$url"
tar -xf *.tar.*
cd *-*/
./configure --prefix=/usr
make
|
def reverseList(list):
list.reverse()
return list
list = ["Apple", "Banana", "Carrot", "Durian"]
reverseList(list) |
<filename>client/src/js/view/index.js
import { h } from 'hyperapp';
import { Rotate } from './rotate';
import { SplashScreen } from './splashscreen';
import { TopBar } from './components/topbar';
import { Stream } from './components/stream';
import { Settings } from './components/settings';
import { Manipulator } from './components/manipulator';
import { Joystick } from './components/joystick';
import { Gripper } from './components/gripper';
import { ModeChooser } from './components/modechooser';
import { Clupi } from './components/clupi';
const view = (state, actions) => (
<main>
<Rotate />
<SplashScreen state={state.showSplashScreen} />
<div id="wrapper" class="wrapper">
<TopBar state={state.telemetry} switchSettings={actions.settings.setVisibility} />
<Settings state={state} actions={actions} />
<div class="crosshair" />
<div class="dots" />
<div class="controls-box-right">
<Manipulator mode={state.mode} state={state.manipulator} action={actions.manipulator} />
<Joystick mode={state.mode} joystick={actions.joystick} motors={actions.motors} />
</div>
<div class="controls-box-left">
<Clupi state={state} actions={actions} />
<Gripper mode={state.mode} state={state.manipulator.gripper} action={actions.manipulator} />
<ModeChooser mode={state.mode} setMode={actions.setMode} />
</div>
<Stream stream={actions.stream} mode={state.mode} />
</div>
</main>
);
export default view;
|
package com.schibsted.account.example;
import android.app.Application;
import android.support.annotation.NonNull;
import com.schibsted.account.common.util.Logger;
import com.schibsted.account.session.User;
import com.schibsted.account.ui.AccountUiHook;
public class App extends Application implements AccountUiHook {
@Override
public void onLoginCompleted(@NonNull User user, @NonNull OnProceedListener onProceedListener) {
Logger.debug("IU can see the UIs are closing! " + user.getUserId().getId());
onProceedListener.proceed();
}
@Override
public void onLoginAborted(OnProceedListener onProceedListener) {
onProceedListener.proceed();
}
}
|
package command
import (
"errors"
"github.com/didi/sharingan/replayer-agent/utils/protocol/pmysql/common"
"github.com/modern-go/parse"
"github.com/modern-go/parse/model"
)
// ConnectReq connect req
type ConnectReq struct {
}
func (q *ConnectReq) String() string {
data, err := json.Marshal(q)
if nil != err {
return err.Error()
}
return string(data)
}
// Map 把self转成一个model.Map对象
func (q *ConnectReq) Map() model.Map {
r := make(model.Map)
return r
}
// DecodeConnectReq 解码com_connect请求
// doc: https://dev.mysql.com/doc/internals/en/com-connect.html
func DecodeConnectReq(src *parse.Source) (*ConnectReq, error) {
common.GetPacketHeader(src)
b := src.Read1()
if b != 0x0b {
return nil, errors.New("packet isn't a connect request")
}
return &ConnectReq{}, nil
}
|
<gh_stars>100-1000
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Scanner;
public class BuildingList
{
public static void main(String[] args)
{
Scanner scanner = new Scanner(System.in);
int testCases = Integer.parseInt(scanner.next());
for (int i = 0; i < testCases; i++)
{
int length = Integer.parseInt(scanner.next());
String input = scanner.next();
List<String> result = new ArrayList<String>();
combine(result, input, 0, "");
Collections.sort(result);
for (String s : result)
{
System.out.println(s);
}
}
scanner.close();
}
private static void combine(List<String> result, String input, int start,
String actual)
{
for (int i = start; i < input.length(); i++)
{
String out = actual;
actual += input.charAt(i);
result.add(actual);
if(i < input.length())
{
combine(result, input, i + 1, out);
}
}
}
}
|
#!/bin/bash
while true
do
echo "Hello changing world"
sleep 10
done |
<reponame>Majored/mcm-js-api-wrapper
// Copyright (c) 2021 MC-Market (Mick Capital Pty. Ltd.)
// MIT License (https://github.com/MC-Market-org/js-api-wrapper/blob/main/LICENSE)
const wrapper = require("./mcm-js-api-wrapper");
const token = {
type: "Private",
value: "Find API Key @ https://www.mc-market.org/account/api",
};
// We're only listening for a specific resource in this example, but this could be expanded to cover multiple.
let resource_id = 0;
// We're only keeping a store of the last purchase ID in memory for this example, but it's likely you'd want to
// read/write this to a secondary data store (ie. a file or database).
let last_purchase_id = 0;
let pm_title = "Your recent purchase!";
let pm_message = `Hi there,
Thank you for your recent purchase of my resource.
If you need any assistance, don't hesitate to contact me.
Thanks,
- Author`;
async function init() {
// Initialise wrapper and exit if a failure occurs.
let init = await wrapper.init(token);
if (init.result === "error") {
console.log(init.error);
process.exit(0);
}
// Poll once every hour.
task();
setInterval(task, 60 * 60 * 1000);
}
async function task() {
let purchases = await wrapper.resources.purchases.list_until(resource_id, (purchase) => {
return purchase.purchase_id > last_purchase_id;
});
if (purchases.result === "error") {
console.log(purchases.error);
return;
}
if (purchases.data.length > 0) {
last_purchase_id = purchases.data[0].purchase_id;
for (const index in purchases.data) {
await on_purchase(purchases.data[index]);
}
}
}
async function on_purchase(purchase) {
let response = await wrapper.conversations.create(pm_title, pm_message, purchase.purchaser_id);
if (response.result === "success") {
console.log(`A PM has been sent to user ${purchase.purchaser_id}.`);
} else {
console.log(`Unable to create a conversation with user ${purchase.purchaser_id}.`);
console.log(response.error);
}
}
init();
|
#!/bin/sh
GREP="grep"
# Extract the directory and the program name
# takes care of symlinks
PRG="$0"
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG="`dirname "$PRG"`/$link"
fi
done
DIRNAME=`dirname "$PRG"`
PROGNAME=`basename "$PRG"`
# OS specific support (must be 'true' or 'false').
cygwin=false;
case "`uname`" in
CYGWIN*)
cygwin=true
;;
esac
# For Cygwin, ensure paths are in UNIX format before anything is touched
if $cygwin ; then
[ -n "$JBOSS_HOME" ] &&
JBOSS_HOME=`cygpath --unix "$JBOSS_HOME"`
[ -n "$JAVA_HOME" ] &&
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
fi
# Setup JBOSS_HOME
RESOLVED_JBOSS_HOME=`cd "$DIRNAME/.."; pwd`
if [ "x$JBOSS_HOME" = "x" ]; then
# get the full path (without any relative bits)
JBOSS_HOME=$RESOLVED_JBOSS_HOME
else
SANITIZED_JBOSS_HOME=`cd "$JBOSS_HOME"; pwd`
if [ "$RESOLVED_JBOSS_HOME" != "$SANITIZED_JBOSS_HOME" ]; then
echo "WARNING JBOSS_HOME may be pointing to a different installation - unpredictable results may occur."
echo ""
fi
fi
export JBOSS_HOME
# Setup the JVM
if [ "x$JAVA" = "x" ]; then
if [ "x$JAVA_HOME" != "x" ]; then
JAVA="$JAVA_HOME/bin/java"
else
JAVA="java"
fi
fi
#JPDA options. Uncomment and modify as appropriate to enable remote debugging .
#JAVA_OPTS="-classic -Xdebug -Xnoagent -Djava.compiler=NONE -agentlib:jdwp=transport=dt_socket,address=8787,server=y,suspend=y $JAVA_OPTS"
# Setup JBoss sepecific properties
JAVA_OPTS="$JAVA_OPTS -Dprogram.name=wsconsume.sh"
# For Cygwin, switch paths to Windows format before running java
if $cygwin; then
JBOSS_HOME=`cygpath --path --windows "$JBOSS_HOME"`
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
fi
if [ "x$JBOSS_MODULEPATH" = "x" ]; then
JBOSS_MODULEPATH="$JBOSS_HOME/modules"
fi
# Process the JAVA_OPTS and fail the script of a java.security.manager was found
SECURITY_MANAGER_SET=`echo $JAVA_OPTS | $GREP "java\.security\.manager"`
if [ "x$SECURITY_MANAGER_SET" != "x" ]; then
echo "ERROR: The use of -Djava.security.manager has been removed. Please use the -secmgr command line argument or SECMGR=true environment variable."
exit 1
fi
# remove -secmgr from JAVA_OPTS. This flag must reside in a different location
NEW_SECURITY_MANAGER_SET=`echo $JAVA_OPTS | $GREP "-secmgr"`
if [ "x$NEW_SECURITY_MANAGER_SET" != "x" ]; then
SECMGR="true"
JAVA_OPTS=`echo $JAVA_OPTS | sed "s/-secmgr//" `
fi
# Set up the module arguments
MODULE_OPTS=""
if [ "$SECMGR" = "true" ]; then
MODULE_OPTS="-secmgr";
fi
# Execute the command
eval \"$JAVA\" $JAVA_OPTS \
-jar \""$JBOSS_HOME"/jboss-modules.jar\" \
$MODULE_OPTS \
-mp \""${JBOSS_MODULEPATH}"\" \
org.jboss.ws.tools.wsconsume \
'"$@"'
|
# Copyright 2016-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
#!/bin/sh
echo "[Configuration Deployment] Executed after instance bootstrapping"
sudo sh -c 'cat /home/ubuntu/.ssh/authorized_keys > /home/ec2-user/.ssh/authorized_keys'
|
<reponame>wzel/GNSS_Compare
/*
* Copyright (c) 2010, <NAME>, <NAME>, Cryms sagl - Switzerland. All Rights Reserved.
*
* This file is part of goGPS Project (goGPS).
*
* goGPS is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* goGPS is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with goGPS. If not, see <http://www.gnu.org/licenses/>.
*
*
*/
package com.galfins.gogpsextracts;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.TimeZone;
/**
* <p>
* Observations class
* </p>
*
* @author <NAME>, Cryms.com
*/
public class Observations implements Streamable {
SimpleDateFormat sdfHeader = getGMTdf();
DecimalFormat dfX4 = new DecimalFormat("0.0000");
private final static int STREAM_V = 1;
private Time refTime; /* Reference time of the dataset */
private int eventFlag; /* Event flag */
private ArrayList<ObservationSet> obsSet; /* sets of observations */
private int issueOfData = -1;
public int index;
/**
* The Rinex filename
*/
public String rinexFileName;
public static SimpleDateFormat getGMTdf(){
SimpleDateFormat sdfHeader = new SimpleDateFormat("dd-MMM-yy HH:mm:ss");
sdfHeader.setTimeZone( TimeZone.getTimeZone("GMT"));
return sdfHeader;
}
public Object clone(){
try{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
this.write(new DataOutputStream(baos));
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
baos.reset();
dis.readUTF();
return new Observations(dis, false);
}catch(IOException ioe){
ioe.printStackTrace();
}
return null;
}
public Observations(Time time, int flag){
this.refTime = time;
this.eventFlag = flag;
}
public Observations(DataInputStream dai, boolean oldVersion) throws IOException{
read(dai, oldVersion);
}
public void cleanObservations(){
if(obsSet != null)
for (int i=obsSet.size()-1;i>=0;i--)
if(obsSet.get(i)==null || Double.isNaN(obsSet.get(i).getPseudorange(0)))
obsSet.remove(i);
}
public int getNumSat(){
if(obsSet == null) return 0;
int nsat = 0;
for(int i=0;i<obsSet.size();i++)
if(obsSet.get(i)!=null) nsat++;
return obsSet==null?-1:nsat;
}
public ObservationSet getSatByIdx(int idx){
return obsSet.get(idx);
}
public ObservationSet getSatByID(Integer satID){
if(obsSet == null || satID==null) return null;
for(int i=0;i<obsSet.size();i++)
if(obsSet.get(i)!=null && obsSet.get(i).getSatID()==satID.intValue()) return obsSet.get(i);
return null;
}
public ObservationSet getSatByIDType(Integer satID, char satType){
if(obsSet == null || satID==null) return null;
for(int i=0;i<obsSet.size();i++)
if(obsSet.get(i)!=null && obsSet.get(i).getSatID()==satID.intValue() && obsSet.get(i).getSatType()==satType) return obsSet.get(i);
return null;
}
// public ObservationSet getGpsByID(char satGnss){
// String sub = String.valueOf(satGnss);
// String str = sub.substring(0, 1);
// char satType = str.charAt(0);
// sub = sub.substring(1, 3);
// Integer satID = Integer.parseInt(sub);
//
// if(gps == null || satID==null) return null;
// for(int i=0;i<gps.size();i++)
// if(gps.get(i)!=null && gps.get(i).getSatID()==satID.intValue() && gps.get(i).getSatType()==satType) return gps.get(i);
// return null;
// }
public Integer getSatID(int idx){
return getSatByIdx(idx).getSatID();
}
public char getGnssType(int idx){
return getSatByIdx(idx).getSatType();
}
public boolean containsSatID(Integer id){
return getSatByID(id) != null;
}
public boolean containsSatIDType(Integer id, Character satType){
return getSatByIDType(id, satType) != null;
}
/**
* @return the refTime
*/
public Time getRefTime() {
return refTime;
}
/**
* @param refTime the refTime to set
*/
public void setRefTime(Time refTime) {
this.refTime = refTime;
}
/**
* Epoch flag
* 0: OK
* 1: power failure between previous and current epoch
* >1: Special event
* 2: start moving antenna
* 3: new site occupation
* (end of kinem. data)
* (at least MARKER NAME record
* follows)
* 4: header information follows
* 5: external event (epoch is significant)
* 6: cycle slip records follow
* to optionally report detected
* and repaired cycle slips
* (same format as OBSERVATIONS
* records; slip instead of observation;
* LLI and signal strength blank)
*
* @return the eventFlag
*/
public int getEventFlag() {
return eventFlag;
}
/**
* @param eventFlag the eventFlag to set
*/
public void setEventFlag(int eventFlag) {
this.eventFlag = eventFlag;
}
// public void init(int nGps, int nGlo, int nSbs){
// gpsSat = new ArrayList<Integer>(nGps);
// gloSat = new ArrayList<Integer>(nGlo);
// sbsSat = new ArrayList<Integer>(nSbs);
//
// // Allocate array of observation objects
// if (nGps > 0) gps = new ObservationSet[nGps];
// if (nGlo > 0) glo = new ObservationSet[nGlo];
// if (nSbs > 0) sbs = new ObservationSet[nSbs];
// }
public void setGps(int i, ObservationSet os ){
if(obsSet==null) obsSet = new ArrayList<ObservationSet>(i+1);
if(i==obsSet.size()){
obsSet.add(os);
}else{
int c=obsSet.size();
while(c++<=i) obsSet.add(null);
obsSet.set(i,os);
}
//gps[i] = os;
//gpsSat.add(os.getSatID());
}
public int write(DataOutputStream dos) throws IOException{
dos.writeUTF(MESSAGE_OBSERVATIONS); // 5
dos.writeInt(STREAM_V); // 4
dos.writeLong(refTime==null?-1:refTime.getMsec()); // 13
dos.writeDouble(refTime==null?-1:refTime.getFraction());
dos.write(eventFlag); // 14
dos.write(obsSet==null?0:obsSet.size()); // 15
int size=19;
if(obsSet!=null){
for(int i=0;i<obsSet.size();i++){
size += ((ObservationSet)obsSet.get(i)).write(dos);
}
}
return size;
}
public String toString(){
String lineBreak = System.getProperty("line.separator");
String out= " GPS Time:"+getRefTime().getGpsTime()+" "+sdfHeader.format(new Date(getRefTime().getMsec()))+" evt:"+eventFlag+lineBreak;
for(int i=0;i<getNumSat();i++){
ObservationSet os = getSatByIdx(i);
out+="satType:"+ os.getSatType() +" satID:"+os.getSatID()+"\tC:"+fd(os.getCodeC(0))
+" cP:"+fd(os.getCodeP(0))
+" Ph:"+fd(os.getPhaseCycles(0))
+" Dp:"+fd(os.getDoppler(0))
+" Ss:"+fd(os.getSignalStrength(0))
+" LL:"+fd(os.getLossLockInd(0))
+" LL2:"+fd(os.getLossLockInd(1))
+lineBreak;
}
return out;
}
private String fd(double n){
return Double.isNaN(n)?"NaN":dfX4.format(n);
}
/* (non-Javadoc)
* @see org.gogpsproject.Streamable#read(java.io.DataInputStream)
*/
@Override
public void read(DataInputStream dai, boolean oldVersion) throws IOException {
int v=1;
if(!oldVersion) v=dai.readInt();
if(v==1){
refTime = new Time(dai.readLong(), dai.readDouble());
eventFlag = dai.read();
int size = dai.read();
obsSet = new ArrayList<ObservationSet>(size);
for(int i=0;i<size;i++){
if(!oldVersion) dai.readUTF();
ObservationSet os = new ObservationSet(dai, oldVersion);
obsSet.add(os);
}
}else{
throw new IOException("Unknown format version:"+v);
}
}
public void setIssueOfData(int iOD) {
this.issueOfData = iOD;
}
public int getIssueOfData() {
return this.issueOfData;
}
}
|
#! /bin/bash
# Copyright Kuei-chun Chen, 2022-present. All rights reserved.
go test -v -coverprofile cover.out .
go tool cover -html=cover.out -o cover.html |
#!/bin/bash
set -e
info() {
echo -e "\033[34m$@\033[m"
}
warn() {
echo -e "\033[33m$@\033[m"
}
error() {
echo -e "\033[31m$@\033[m"
}
DIR=$(pwd)
if [[ ! -L $HOME/.dotfiles ]]; then
info "---> Link dotfiles into HOME directory"
ln -s $DIR $HOME/.dotfiles
fi
info "---> Install Homebrew Packages"
./homebrew/install.sh
info "---> Enable dotfile, make symbolic link to '${HOME}' directory"
rake clean && rake setup
info "---> Install go tools"
./go/install.sh
./go/gets.sh
info "---> Install git contrib/completion scripts"
if [[ ! -d ~/.gitcontrib ]]; then
curl -L --create-dirs -o ~/.gitcontrib/git-completion.zsh \
"https://raw.github.com/git/git/master/contrib/completion/git-completion.zsh"
curl -L --create-dirs -o ~/.gitcontrib/git-prompt.sh \
"https://raw.github.com/git/git/master/contrib/completion/git-prompt.sh"
fi
# Tmux plugins
# if [[ ! -d ~/.tmux/plugins/tpm ]]; then
# info "Install tmux-plugin manager"
# git clone https://github.com/tmux-plugins/tpm ~/.tmux/plugins/tpm
# f
|
<filename>data_prep/wham/scripts/create_wav_2speakers.py
import os
import numpy as np
import pandas as pd
import argparse
from utils import wavwrite, read_scaled_wav, fix_length
S1_DIR = 's1'
S2_DIR = 's2'
MIX_DIR = 'mix'
FILELIST_STUB = os.path.join('data', 'mix_2_spk_filenames_{}.csv')
def main(wsj_root, wham_noise_root, output_root):
scaling_npz_stub = os.path.join(wham_noise_root, 'metadata', 'scaling_{}.npz')
for sr_str in ['16k', '8k']:
wav_dir = 'wav' + sr_str
if sr_str == '8k':
sr = 8000
downsample = True
else:
sr = 16000
downsample = False
for datalen_dir in ['max', 'min']:
for splt in ['tr', 'cv', 'tt']:
output_path = os.path.join(output_root, wav_dir, datalen_dir, splt)
s1_output_dir = os.path.join(output_path, S1_DIR)
os.makedirs(s1_output_dir, exist_ok=True)
s2_output_dir = os.path.join(output_path, S2_DIR)
os.makedirs(s2_output_dir, exist_ok=True)
mix_output_dir = os.path.join(output_path, MIX_DIR)
os.makedirs(mix_output_dir, exist_ok=True)
print('{} {} dataset, {} split'.format(wav_dir, datalen_dir, splt))
# read filenames
wsjmix_path = FILELIST_STUB.format(splt)
wsjmix_df = pd.read_csv(wsjmix_path)
# read scaling file
scaling_path = scaling_npz_stub.format(splt)
scaling_npz = np.load(scaling_path)
wsjmix_key = 'scaling_wsjmix_{}_{}'.format(sr_str, datalen_dir)
scaling_mat = scaling_npz[wsjmix_key]
for i_utt, (output_name, s1_path, s2_path) in enumerate(wsjmix_df.itertuples(index=False, name=None)):
s1 = read_scaled_wav(os.path.join(wsj_root, s1_path), scaling_mat[i_utt][0], downsample)
s2 = read_scaled_wav(os.path.join(wsj_root, s2_path), scaling_mat[i_utt][1], downsample)
s1, s2 = fix_length(s1, s2, datalen_dir)
mix = s1 + s2
wavwrite(os.path.join(mix_output_dir, output_name), mix, sr)
wavwrite(os.path.join(s1_output_dir, output_name), s1, sr)
wavwrite(os.path.join(s2_output_dir, output_name), s2, sr)
if (i_utt + 1) % 500 == 0:
print('Completed {} of {} utterances'.format(i_utt + 1, len(wsjmix_df)))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--output-dir', type=str,
help='Output directory for writing wsj0-2mix 8 k Hz and 16 kHz datasets.')
parser.add_argument('--wsj0-root', type=str,
help='Path to the folder containing wsj0/')
parser.add_argument('--wham-noise-root', type=str,
help='Path to the downloaded and unzipped wham folder containing metadata/')
args = parser.parse_args()
main(args.wsj0_root, args.wham_noise_root, args.output_dir)
|
<reponame>CoderXGC/learn
package com.ylesb.service.Impl;
/**
* @title: OrderServiceImpl4Listener
* @projectName springcloud-alibaba
* @description: TODO
* @author White
* @site : [www.ylesb.com]
* @date 2022/1/1317:05
*/
import com.ylesb.dao.TxLogDao;
import com.ylesb.domain.Order;
import com.ylesb.domain.TxLog;
import org.apache.rocketmq.spring.annotation.RocketMQMessageListener;
import org.apache.rocketmq.spring.annotation.RocketMQTransactionListener;
import org.apache.rocketmq.spring.core.RocketMQLocalTransactionListener;
import org.apache.rocketmq.spring.core.RocketMQLocalTransactionState;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.messaging.Message;
import org.springframework.stereotype.Service;
/**
* @className : OrderServiceImpl4Listener
* @description : [描述说明该类的功能]
* @author : [XuGuangchao]
* @site : [www.ylesb.com]
* @version : [v1.0]
* @createTime : [2022/1/13 17:05]
* @updateUser : [XuGuangchao]
* @updateTime : [2022/1/13 17:05]
* @updateRemark : [描述说明本次修改内容]
*/
@Service
@RocketMQTransactionListener(txProducerGroup = "tx_producer_group")
public class OrderServiceImpl4Listener implements RocketMQLocalTransactionListener {
@Autowired
private OrderServiceImpl4 orderServiceImpl4;
@Autowired
private TxLogDao txLogDao;
//执行本地事务
@Override
public RocketMQLocalTransactionState executeLocalTransaction(Message message, Object o) {
String txId =(String) message.getHeaders().get("txId");
try {
Order order =(Order) o;
orderServiceImpl4.createOrder( txId,order);
return RocketMQLocalTransactionState.COMMIT;
}catch (Exception e) {
//实行失败回滚rollback
return RocketMQLocalTransactionState.ROLLBACK;
}
}
//用于消息回查
@Override
public RocketMQLocalTransactionState checkLocalTransaction(Message message) {
String txId =(String) message.getHeaders().get("txId");
TxLog txLog=txLogDao.findById(txId).get();
if (txLog == null) {
//执行本地事务成功了
return RocketMQLocalTransactionState.COMMIT;
}else{
return RocketMQLocalTransactionState.ROLLBACK;
}
}
}
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/FunkyScrollPicView/FunkyScrollPicView.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/FunkyScrollPicView/FunkyScrollPicView.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<filename>part2/courseinfo/src/components/Total.js
import React from "react";
const Total = ({ parts }) => {
const total = parts.reduce((sum, part) => sum + part.exercises, 0);
return (
<p>
<strong>total of {total} exercises</strong>
</p>
);
};
export default Total;
|
function withHost(url) {
return `//${window.location.host}${url}`;
}
function credentials() {
return "same-origin";
}
export async function fetch(url, options) {
try {
const response = await window.fetch(withHost(url), {
...options,
headers: {
"Content-Type": "application/json",
Accept: "application/json",
...options?.headers,
},
credentials: credentials(),
});
return {
response,
unauthorized: response.status === 401,
serverError: response.status >= 500,
data: response.ok ? await response.json() : null,
error:
!response.ok || response.status >= 400
? new Error("Error in response")
: null,
};
} catch (error) {
return {
error,
serverError: true,
};
}
}
|
#!/bin/bash -
# Usage: ./jib-core/scripts/prepare_release.sh <release version>
set -o errexit
EchoRed() {
echo "$(tput setaf 1; tput bold)$1$(tput sgr0)"
}
EchoGreen() {
echo "$(tput setaf 2; tput bold)$1$(tput sgr0)"
}
Die() {
EchoRed "$1"
exit 1
}
DieUsage() {
Die "Usage: ./jib-core/scripts/prepare_release.sh <release version> [<post-release-version>]"
}
# Usage: CheckVersion <version>
CheckVersion() {
[[ $1 =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z]+)?$ ]] || Die "Version: $1 not in ###.###.###[-XXX] format."
}
[ $# -ne 1 ] && [ $# -ne 2 ] && DieUsage
EchoGreen '===== RELEASE SETUP SCRIPT ====='
VERSION=$1
CheckVersion ${VERSION}
if [ -n "$2" ]; then
POST_RELEASE_VERSION=$2
CheckVersion ${POST_RELEASE_VERSION}
fi
if [[ $(git status -uno --porcelain) ]]; then
Die 'There are uncommitted changes.'
fi
# Runs checks and integration tests.
./gradlew :jib-core:check :jib-core:integrationTest --info --stacktrace
# Checks out a new branch for this version release (eg. 1.5.7).
BRANCH=core_release_v${VERSION}
git checkout -b ${BRANCH}
# Changes the version for release and creates the commits/tags.
echo | ./gradlew :jib-core:release -Prelease.releaseVersion=${VERSION} ${POST_RELEASE_VERSION:+"-Prelease.newVersion=${POST_RELEASE_VERSION}"}
# Pushes the release branch and tag to Github.
git push origin ${BRANCH}
git push origin v${VERSION}-core
# File a PR on Github for the new branch. Have someone LGTM it, which gives you permission to continue.
EchoGreen 'File a PR for the new release branch:'
echo https://github.com/GoogleContainerTools/jib/pull/new/${BRANCH}
EchoGreen "Merge the PR after the library is released."
|
/**
* @file
* @brief SPI execute in place driver.
*/
/* *****************************************************************************
* Copyright (C) 2016 Maxim Integrated Products, Inc., All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL MAXIM INTEGRATED BE LIABLE FOR ANY CLAIM, DAMAGES
* OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* Except as contained in this notice, the name of Maxim Integrated
* Products, Inc. shall not be used except as stated in the Maxim Integrated
* Products, Inc. Branding Policy.
*
* The mere transfer of this software does not imply any licenses
* of trade secrets, proprietary technology, copyrights, patents,
* trademarks, maskwork rights, or any other form of intellectual
* property whatsoever. Maxim Integrated Products, Inc. retains all
* ownership rights.
*
* $Date: 2016-09-08 17:31:41 -0500 (Thu, 08 Sep 2016) $
* $Revision: 24323 $
*
**************************************************************************** */
/* **** Includes **** */
#include <stddef.h>
#include "mxc_config.h"
#include "mxc_assert.h"
#include "spix.h"
#include "spix_regs.h"
/**
* @ingroup spix
* @{
*/
/* **** Definitions **** */
#define CMD_CLOCKS 8
#define ADDR_3BYTE_CLOCKS 24
#define ADDR_4BYTE_CLOCKS 32
/***** Globals *****/
/***** Functions *****/
/******************************************************************************/
#if defined ( __GNUC__ )
#undef IAR_SPIX_PRAGMA //Make sure this is not defined for GCC
#endif
#if IAR_SPIX_PRAGMA
// IAR memory section declaration for the SPIX functions to be loaded in RAM.
#pragma section=".spix_config"
#endif
#if(MXC_SPIX_REV == 0)
#if defined ( __GNUC__ )
__attribute__ ((section(".spix_config"), noinline))
#endif /* __GNUC */
#if IAR_SPIX_PRAGMA
#pragma location=".spix_config" // IAR locate function in RAM section .spix_config
#pragma optimize=no_inline // IAR no inline optimization on this function
#endif /* IAR_PRAGMA */
static void SPIX_UpdateFBIgnore()
{
// Update the feedback ignore clocks
uint8_t clocks = 0;
uint8_t no_cmd_clocks = 0;
// Adjust the clocks for the command
if((MXC_SPIX->fetch_ctrl & MXC_F_SPIX_FETCH_CTRL_CMD_WIDTH) ==
MXC_S_SPIX_FETCH_CTRL_CMD_WIDTH_QUAD_IO) {
clocks += CMD_CLOCKS/4;
} else if((MXC_SPIX->fetch_ctrl & MXC_F_SPIX_FETCH_CTRL_CMD_WIDTH) ==
MXC_S_SPIX_FETCH_CTRL_CMD_WIDTH_DUAL_IO) {
clocks += CMD_CLOCKS/2;
} else {
clocks += CMD_CLOCKS;
}
// Adjust the clocks for the address
if((MXC_SPIX->fetch_ctrl & MXC_F_SPIX_FETCH_CTRL_ADDR_WIDTH) ==
MXC_S_SPIX_FETCH_CTRL_ADDR_WIDTH_QUAD_IO) {
if(MXC_SPIX->fetch_ctrl & MXC_F_SPIX_FETCH_CTRL_FOUR_BYTE_ADDR) {
clocks += ADDR_4BYTE_CLOCKS/4;
no_cmd_clocks += ADDR_4BYTE_CLOCKS/4;
} else {
clocks += ADDR_3BYTE_CLOCKS/4;
no_cmd_clocks += ADDR_3BYTE_CLOCKS/4;
}
} else if((MXC_SPIX->fetch_ctrl & MXC_F_SPIX_FETCH_CTRL_ADDR_WIDTH) ==
MXC_S_SPIX_FETCH_CTRL_ADDR_WIDTH_DUAL_IO) {
if(MXC_SPIX->fetch_ctrl & MXC_F_SPIX_FETCH_CTRL_FOUR_BYTE_ADDR) {
clocks += ADDR_4BYTE_CLOCKS/2;
no_cmd_clocks += ADDR_4BYTE_CLOCKS/2;
} else {
clocks += ADDR_3BYTE_CLOCKS/2;
no_cmd_clocks += ADDR_3BYTE_CLOCKS/2;
}
} else {
if(MXC_SPIX->fetch_ctrl & MXC_F_SPIX_FETCH_CTRL_FOUR_BYTE_ADDR) {
clocks += ADDR_4BYTE_CLOCKS;
no_cmd_clocks += ADDR_4BYTE_CLOCKS;
} else {
clocks += ADDR_3BYTE_CLOCKS;
no_cmd_clocks += ADDR_3BYTE_CLOCKS;
}
}
// Adjust for the mode clocks
clocks += ((MXC_SPIX->mode_ctrl & MXC_F_SPIX_MODE_CTRL_MODE_CLOCKS) >>
MXC_F_SPIX_MODE_CTRL_MODE_CLOCKS_POS);
// Set the FB Ignore clocks
MXC_SPIX->sck_fb_ctrl = ((MXC_SPIX->sck_fb_ctrl & ~MXC_F_SPIX_SCK_FB_CTRL_IGNORE_CLKS) |
(clocks << MXC_F_SPIX_SCK_FB_CTRL_IGNORE_CLKS_POS));
MXC_SPIX->sck_fb_ctrl = ((MXC_SPIX->sck_fb_ctrl & ~MXC_F_SPIX_SCK_FB_CTRL_IGNORE_CLKS_NO_CMD) |
(no_cmd_clocks << MXC_F_SPIX_SCK_FB_CTRL_IGNORE_CLKS_NO_CMD_POS));
}
#endif /* MXC_SPIX_REV==0 */
/******************************************************************************/
#if defined ( __GNUC__ )
__attribute__ ((section(".spix_config"), noinline))
#endif /* __GNUC */
#if IAR_SPIX_PRAGMA
#pragma location=".spix_config" // IAR locate function in RAM section .spix_config
#pragma optimize=no_inline // IAR no inline optimization on this function
#endif /* IAR_SPIX_PRAGMA */
int SPIX_ConfigClock(const sys_cfg_spix_t *sys_cfg, uint32_t baud, uint8_t sample)
{
int err;
uint32_t spix_clk, clocks;
// Check the input parameters
if(sys_cfg == NULL) {
return E_NULL_PTR;
}
// Set system level configurations
if ((err = SYS_SPIX_Init(sys_cfg, baud)) != E_NO_ERROR) {
return err;
}
// Configure the mode and baud
spix_clk = SYS_SPIX_GetFreq();
if(spix_clk <= 0) {
return E_UNINITIALIZED;
}
// Make sure that we can generate this frequency
clocks = (spix_clk / (2*baud));
if((clocks <= 0) || (clocks >= 0x10)) {
return E_BAD_PARAM;
}
// Set the baud
MXC_SPIX->master_cfg = ((MXC_SPIX->master_cfg &
~(MXC_F_SPIX_MASTER_CFG_SCK_HI_CLK | MXC_F_SPIX_MASTER_CFG_SCK_LO_CLK)) |
(clocks << MXC_F_SPIX_MASTER_CFG_SCK_HI_CLK_POS) |
(clocks << MXC_F_SPIX_MASTER_CFG_SCK_LO_CLK_POS));
if(sample != 0) {
// Use sample mode
MXC_SPIX->master_cfg = ((MXC_SPIX->master_cfg & ~MXC_F_SPIX_MASTER_CFG_SDIO_SAMPLE_POINT) |
(sample << MXC_F_SPIX_MASTER_CFG_SDIO_SAMPLE_POINT_POS));
MXC_SPIX->sck_fb_ctrl &= ~(MXC_F_SPIX_SCK_FB_CTRL_ENABLE_SCK_FB_MODE |
MXC_F_SPIX_SCK_FB_CTRL_INVERT_SCK_FB_CLK);
} else {
// Use Feedback mode
MXC_SPIX->master_cfg &= ~(MXC_F_SPIX_MASTER_CFG_SDIO_SAMPLE_POINT);
MXC_SPIX->sck_fb_ctrl |= (MXC_F_SPIX_SCK_FB_CTRL_ENABLE_SCK_FB_MODE |
MXC_F_SPIX_SCK_FB_CTRL_INVERT_SCK_FB_CLK);
#if(MXC_SPIX_REV == 0)
SPIX_UpdateFBIgnore();
#endif
}
return E_NO_ERROR;
}
/******************************************************************************/
#if defined ( __GNUC__ )
__attribute__ ((section(".spix_config"), noinline))
#endif /* __GNUC */
#if IAR_SPIX_PRAGMA
#pragma location=".spix_config" // IAR locate function in RAM section .spix_config
#pragma optimize=no_inline // IAR no inline optimization on this function
#endif /* IAR_SPIX_PRAGMA */
void SPIX_ConfigSlave(uint8_t ssel, uint8_t pol, uint8_t act_delay, uint8_t inact_delay)
{
// Set the slave select
MXC_SPIX->master_cfg = ((MXC_SPIX->master_cfg & ~MXC_F_SPIX_MASTER_CFG_SLAVE_SEL) |
(ssel << MXC_F_SPIX_MASTER_CFG_SLAVE_SEL_POS));
if(pol != 0) {
// Active high
MXC_SPIX->master_cfg &= ~(MXC_F_SPIX_MASTER_CFG_SS_ACT_LO);
} else {
// Active low
MXC_SPIX->master_cfg |= MXC_F_SPIX_MASTER_CFG_SS_ACT_LO;
}
// Set the delays
MXC_SPIX->master_cfg = ((MXC_SPIX->master_cfg & ~(MXC_F_SPIX_MASTER_CFG_ACT_DELAY |
MXC_F_SPIX_MASTER_CFG_INACT_DELAY)) |
(act_delay << MXC_F_SPIX_MASTER_CFG_ACT_DELAY_POS) |
(inact_delay << MXC_F_SPIX_MASTER_CFG_INACT_DELAY_POS));
}
/******************************************************************************/
#if defined ( __GNUC__ )
__attribute__ ((section(".spix_config"), noinline))
#endif /* __GNUC */
#if IAR_SPIX_PRAGMA
#pragma location=".spix_config" // IAR locate function in RAM section .spix_config
#pragma optimize=no_inline // IAR no inline optimization on this function
#endif /* IAR_SPIX_PRAGMA */
void SPIX_ConfigFetch(const spix_fetch_t *fetch)
{
// Configure how the SPIX fetches data
MXC_SPIX->fetch_ctrl = (((fetch->cmd << MXC_F_SPIX_FETCH_CTRL_CMD_VALUE_POS) & MXC_F_SPIX_FETCH_CTRL_CMD_VALUE) |
((fetch->cmd_width << MXC_F_SPIX_FETCH_CTRL_CMD_WIDTH_POS) & MXC_F_SPIX_FETCH_CTRL_CMD_WIDTH) |
((fetch->addr_width << MXC_F_SPIX_FETCH_CTRL_ADDR_WIDTH_POS) & MXC_F_SPIX_FETCH_CTRL_ADDR_WIDTH) |
((fetch->data_width << MXC_F_SPIX_FETCH_CTRL_DATA_WIDTH_POS) & MXC_F_SPIX_FETCH_CTRL_DATA_WIDTH) |
((fetch->addr_size << MXC_F_SPIX_FETCH_CTRL_FOUR_BYTE_ADDR_POS) & MXC_F_SPIX_FETCH_CTRL_FOUR_BYTE_ADDR));
// Set the command mode and clocks
MXC_SPIX->mode_ctrl = (((fetch->mode_clocks << MXC_F_SPIX_MODE_CTRL_MODE_CLOCKS_POS) & MXC_F_SPIX_MODE_CTRL_MODE_CLOCKS) |
(!!fetch->no_cmd_mode << MXC_F_SPIX_MODE_CTRL_NO_CMD_MODE_POS));
MXC_SPIX->mode_data = (((fetch->mode_data << MXC_F_SPIX_MODE_DATA_MODE_DATA_BITS_POS) & MXC_F_SPIX_MODE_DATA_MODE_DATA_BITS) |
MXC_F_SPIX_MODE_DATA_MODE_DATA_OE);
#if(MXC_SPIX_REV == 0)
SPIX_UpdateFBIgnore();
#endif
}
/******************************************************************************/
#if defined ( __GNUC__ )
__attribute__ ((section(".spix_config"), noinline))
#endif /* __GNUC */
#if IAR_SPIX_PRAGMA
#pragma location=".spix_config" // IAR locate function in RAM section .spix_config
#pragma optimize=no_inline // IAR no inline optimization on this function
#endif /* IAR_SPIX_PRAGMA */
int SPIX_Shutdown(mxc_spix_regs_t *spix)
{
int err;
// Clear system level configurations
if ((err = SYS_SPIX_Shutdown()) != E_NO_ERROR) {
return err;
}
return E_NO_ERROR;
}
|
<reponame>davidyu62/egovframe-runtime
/*
* Copyright 2002-2008 MOPAS(Ministry of Public Administration and Security).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.egovframe.rte.fdl.filehandling;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.URL;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.io.FileSystemUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.LineIterator;
import org.apache.commons.vfs2.CacheStrategy;
import org.apache.commons.vfs2.FileContent;
import org.apache.commons.vfs2.FileName;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemManager;
import org.apache.commons.vfs2.FileSystemOptions;
import org.apache.commons.vfs2.FilesCache;
import org.apache.commons.vfs2.Selectors;
import org.apache.commons.vfs2.VFS;
import org.apache.commons.vfs2.auth.StaticUserAuthenticator;
import org.apache.commons.vfs2.impl.DefaultFileSystemConfigBuilder;
import org.apache.commons.vfs2.impl.DefaultFileSystemManager;
import org.apache.commons.vfs2.provider.local.DefaultLocalFileProvider;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
/**
* FileServiceTest is TestCase of File Handling Service
* @author <NAME>
*/
//@RunWith(SpringJUnit4ClassRunner.class)
//@ContextConfiguration(locations = {"classpath*:spring/context-*.xml" })
public class FilehandlingServiceTest {
private static final Logger LOGGER = LoggerFactory.getLogger(FilehandlingServiceTest.class);
private String filename = "";
private String text = "";
private String tmppath = "";
private String absoluteFilePath = "";
@Before
public void onSetUp() throws Exception {
filename = "test.txt";
text = "test입니다.";
tmppath = "tmp";
LOGGER.debug("System's temporary directory : {}", EgovFileUtil.getTmpDirectory());
absoluteFilePath = EgovFileUtil.getTmpDirectory() + "/testing.txt";
EgovFileUtil.cd(System.getProperty("user.dir"));
}
/**
* 특정 위치에 파일을 생성하고 필요에 따라 생성한 파일을 캐싱한다.
*
* @throws Exception
*/
@Test
public void testCeateFile() throws Exception {
FileSystemManager manager = VFS.getManager();
FileObject baseDir = manager.resolveFile(System.getProperty("user.dir"));
final FileObject file = manager.resolveFile(baseDir, "testfolder/file1.txt");
// 모든 파일 삭제
file.delete(Selectors.SELECT_FILES);
assertFalse(file.exists());
// 파일 생성
file.createFile();
assertTrue(file.exists());
}
/**
* 특정 위치에 존재하는 파일에 접근하여 파일 내용을 수정한다.
* 파일 위치는 절대 경로 또는 상대 경로 등 다양한 형식을 지원한다.
*
* @throws Exception
*/
@Test
public void testAccessFile() throws Exception {
FileSystemManager manager = VFS.getManager();
FileObject baseDir = manager.resolveFile(System.getProperty("user.dir"));
FileObject file = manager.resolveFile(baseDir, "testfolder/file1.txt");
// 모든 파일 삭제
file.delete(Selectors.SELECT_FILES);
assertFalse(file.exists());
// 파일 생성
file.createFile();
assertTrue(file.exists());
FileContent fileContent = file.getContent();
assertEquals(0, fileContent.getSize());
// 파일 쓰기
String string = "test입니다.";
OutputStream os = fileContent.getOutputStream();
try {
os.write(string.getBytes());
os.flush();
} finally {
if (os != null) {
try {
os.close();
} catch (Exception ignore) {
// no-op
}
}
}
assertNotSame(0, fileContent.getSize());
// 파일 읽기
StringBuffer sb = new StringBuffer();
FileObject writtenFile = manager.resolveFile(baseDir, "testfolder/file1.txt");
FileContent writtenContents = writtenFile.getContent();
InputStream is = writtenContents.getInputStream();
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
String line = "";
while ((line = reader.readLine()) != null) {
sb.append(line);
}
} finally {
if (is != null) {
try {
is.close();
} catch (Exception ignore) {
// no-op
}
}
}
// 파일내용 검증
assertEquals(sb.toString(), string);
}
/**
* 캐싱 기능을 사용하여, 생성하거나 수정할 파일을 메모리 상에 로딩함으로써
* 파일 접근 시에 소요되는 시간을 단축한다.
*
* @throws Exception
*/
@Test
public void testCaching() throws Exception {
String path = FilehandlingServiceTest.class.getResource("").getPath();
String testFolder = path + "/testfolder";
FileSystemManager manager = VFS.getManager();
FileObject scratchFolder = manager.resolveFile(testFolder);
// testfolder 내의 모든 파일 삭제
scratchFolder.delete(Selectors.EXCLUDE_SELF);
FileObject file = manager.resolveFile(path + "/testfolder/dummy.txt");
file.createFile();
// 캐싱 Manager 생성
DefaultFileSystemManager fs = new DefaultFileSystemManager();
fs.setFilesCache(manager.getFilesCache());
// zip, jar, tgz, tar, tbz2, file
if (!fs.hasProvider("file")) {
fs.addProvider("file", new DefaultLocalFileProvider());
}
fs.setCacheStrategy(CacheStrategy.ON_RESOLVE);
fs.init();
// 캐싱 객체 생성
FileObject foBase2 = fs.resolveFile(testFolder);
LOGGER.debug("## scratchFolder.getName().getPath() : {}", scratchFolder.getName().getPath());
FileObject cachedFolder = foBase2.resolveFile(scratchFolder.getName().getPath());
// 파일이 존재하지 않음
FileObject[] fos = cachedFolder.getChildren();
assertFalse(contains(fos, "file1.txt"));
// 파일생성
scratchFolder.resolveFile("file1.txt").createFile();
// 파일 존재함
// BUT cachedFolder 에는 파일이 존재하지 않음
fos = cachedFolder.getChildren();
assertFalse(contains(fos, "file1.txt"));
// 새로고침
cachedFolder.refresh();
// 파일이 존재함
fos = cachedFolder.getChildren();
assertTrue(contains(fos, "file1.txt"));
}
/**
* 파일 생성 테스트.
*
* @throws Exception
*/
@Test
public void testWriteFile() throws Exception {
// delete file
if (EgovFileUtil.isExistsFile(filename)) {
EgovFileUtil.delete(new File(filename));
}
EgovFileUtil.writeFile(filename, text, "UTF-8");
assertTrue(EgovFileUtil.isExistsFile(filename));
}
/**
* 파일 생성 테스트.
*
* @throws Exception
*/
@Test
public void testWriteFileWithAbsolutePath() throws Exception {
// delete file
if (EgovFileUtil.isExistsFile(absoluteFilePath)) {
EgovFileUtil.delete(new File(absoluteFilePath));
}
EgovFileUtil.writeFile(absoluteFilePath, text, "UTF-8");
assertTrue(EgovFileUtil.isExistsFile(absoluteFilePath));
}
/**
* 파일 읽기 테스트.
*
* @throws Exception
*/
@Test
public void testReadFile() throws Exception {
if (!EgovFileUtil.isExistsFile(filename)) {
EgovFileUtil.writeFile(filename, text, "UTF-8");
}
assertEquals(EgovFileUtil.readFile(new File(filename), "UTF-8"), text);
//LOGGER.debug(EgovFileUtil.readTextFile(filename, false));
List<String> lines = FileUtils.readLines(new File(filename), "UTF-8");
LOGGER.debug(lines.toString());
String string = lines.get(0);
assertEquals(text, string);
}
/**
* 파일 읽기 테스트.
*
* @throws Exception
*/
@Test
public void testReadFileWithAbsolutePath() throws Exception {
if (!EgovFileUtil.isExistsFile(absoluteFilePath)) {
EgovFileUtil.writeFile(absoluteFilePath, text, "UTF-8");
}
assertEquals(EgovFileUtil.readFile(new File(absoluteFilePath), "UTF-8"), text);
//LOGGER.debug(EgovFileUtil.readTextFile(filename, false));
List<String> lines = FileUtils.readLines(new File(absoluteFilePath), "UTF-8");
LOGGER.debug(lines.toString());
String string = lines.get(0);
assertEquals(text, string);
}
/**
* 파일 복사 테스트.
*
* @throws Exception
*/
@Test
public void testCp() throws Exception {
if (!EgovFileUtil.isExistsFile(filename)) {
EgovFileUtil.writeFile(filename, text);
}
EgovFileUtil.cp(filename, tmppath + "/" + filename);
assertEquals(
EgovFileUtil.readFile(new File(filename), "UTF-8"),
EgovFileUtil.readFile(new File(tmppath + "/" + filename), "UTF-8")
);
}
/**
* 파일 복사 테스트.
*
* @throws Exception
*/
@Test
public void testCpWithAbsolutePath() throws Exception {
if (!EgovFileUtil.isExistsFile(absoluteFilePath)) {
EgovFileUtil.writeFile(absoluteFilePath, text);
}
EgovFileUtil.cp(absoluteFilePath, tmppath + "/" + filename);
assertEquals(
EgovFileUtil.readFile(new File(absoluteFilePath), "UTF-8"),
EgovFileUtil.readFile(new File(tmppath + "/" + filename), "UTF-8")
);
}
/**
* 파일 이동 테스트.
*
* @throws Exception
*/
@Test
public void testMv() throws Exception {
if (!EgovFileUtil.isExistsFile(tmppath + "/" + filename)) {
EgovFileUtil.writeFile(tmppath + "/" + filename, text);
}
EgovFileUtil.mv(tmppath + "/" + filename, tmppath + "/movedfile.txt");
assertEquals(
EgovFileUtil.readFile(new File(filename), "UTF-8"),
EgovFileUtil.readFile(new File(tmppath + "/movedfile.txt"), "UTF-8")
);
}
/**
* 파일 터치 테스트.
*
* @throws Exception
*/
@Test
public void testTouch() throws Exception {
String path = tmppath + "/movedfile.txt";
FileObject file = EgovFileUtil.getFileObject(path);
long setTime = EgovFileUtil.touch(path);
assertNotEquals(file.getContent().getLastModifiedTime(), setTime);
}
/**
* 파일 확장자 처리 테스트.
*
* @throws Exception
*/
@Test
public void testGetFileExtension() throws Exception {
assertTrue(EgovFileUtil.isExistsFile(filename));
assertEquals(EgovFileUtil.getFileExtension(filename), "txt");
}
/**
* 파일 존재 유무 테스트.
*
* @throws Exception
*/
@Test
public void testIsExistsFile() throws Exception {
assertTrue(EgovFileUtil.isExistsFile(filename));
}
/**
* 파일명 확인 테스트.
*
* @throws Exception
*/
@Test
public void testStripFilename() throws Exception {
assertTrue(EgovFileUtil.isExistsFile(filename));
assertEquals("test", EgovFileUtil.stripFilename(filename));
}
/**
* 파일 삭제 테스트.
*
* @throws Exception
*/
@Test
public void testRm() throws Exception {
String tmptarget = tmppath;
if (!EgovFileUtil.isExistsFile(tmptarget)) {
EgovFileUtil.writeFile(tmptarget, text);
}
int result = EgovFileUtil.rm(tmptarget);
assertTrue(result > 0);
assertFalse(EgovFileUtil.isExistsFile(tmptarget));
}
/**
* 디렉토리 변경 테스트.
*
* @throws Exception
*/
@Test
public void testCd() throws Exception {
String path = "/Users/EGOV/";
FileName foldername = EgovFileUtil.getFileObject(path).getName();
EgovFileUtil.cd("");
String uri = EgovFileUtil.pwd().getURI();
LOGGER.debug("EgovFileUtil.pwd().getURI() : {}", uri);
LOGGER.debug("foldername.getURI() : {}", foldername.getURI());
assertFalse(foldername.getURI().equals(uri));
EgovFileUtil.cd(path);
uri = EgovFileUtil.pwd().getURI();
LOGGER.debug("EgovFileUtil.pwd().getURI() : {}", uri);
LOGGER.debug("foldername.getURI() : {}", foldername.getURI());
assertEquals(foldername.getURI(), EgovFileUtil.pwd().getURI());
}
/**
* Stream 테스트.
*
* @throws Exception
*/
@Test
public void testIOUtils() throws Exception {
InputStream in = new URL("http://jakarta.apache.org").openStream();
try {
assertFalse(IOUtils.toString(in).equals(""));
} finally {
IOUtils.closeQuietly(in);
}
}
/**
* FileSystemUtils 테스트.
*
* @throws Exception
*/
@Test
public void testFileSystemUtils() throws Exception {
try {
long freeSpace = FileSystemUtils.freeSpaceKb("C:/");
assertTrue(freeSpace > 0);
} catch (Exception e) {
LOGGER.error("error: {}", e.getCause());
}
}
/**
* GREP 테스트.
*
* @throws Exception
*/
@Test
public void testGrep() throws Exception {
try {
String[] search = {"abcdefg", "efghijklmn", "12", "3"};
List<String> lists = EgovFileUtil.grep(search, "\\d{1,2}");
for (Iterator<String> it = lists.iterator(); it.hasNext();) {
LOGGER.info(it.next());
}
lists = EgovFileUtil.grep(new File("pom.xml"), "org.egovframe.rte");
for (Iterator<String> it = lists.iterator(); it.hasNext();) {
LOGGER.info(it.next());
}
} catch (Exception e) {
LOGGER.error("error: {}", e.getCause());
}
}
/**
* Line iterator 테스트.
*
* @throws Exception
*/
@Test
public void testLineIterator() throws Exception {
String[] string = {
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\"",
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"",
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">",
" <modelVersion>4.0.0</modelVersion>",
" <groupId>org.egovframe.rte</groupId>",
" <artifactId>org.egovframe.rte.fdl.filehandling</artifactId>",
" <packaging>jar</packaging>",
" <version>4.0.0</version>",
" <name>org.egovframe.rte.fdl.filehandling</name>"
};
try {
File file = new File("pom.xml");
LineIterator it = FileUtils.lineIterator(file, "UTF-8");
try {
LOGGER.debug("############################# LineIterator ###############################");
for (int i = 0; it.hasNext(); i++) {
String line = it.nextLine();
LOGGER.info(line);
assertEquals(string[i], line);
}
} finally {
LineIterator.closeQuietly(it);
}
} catch (Exception e) {
LOGGER.error("error: {}", e.getCause());
}
}
@Test
public void testUserAuthentication() throws Exception {
StaticUserAuthenticator auth = new StaticUserAuthenticator(null, "username", "password");
FileSystemOptions opts = new FileSystemOptions();
DefaultFileSystemConfigBuilder.getInstance().setUserAuthenticator(opts, auth);
FileSystemManager manager = VFS.getManager();
FileObject baseDir = manager.resolveFile(System.getProperty("user.dir"));
FileObject file = manager.resolveFile(baseDir, "testfolder/file1.txt");
FileObject fo = manager.resolveFile(file.getName().getPath(), opts);
fo.createFile();
}
@Test
public void testCaching1() throws Exception {
String testFolder = FilehandlingServiceTest.class.getResource(".").getPath();
LOGGER.debug("testFolder = {}", testFolder);
FileSystemManager manager = VFS.getManager();
EgovFileUtil.writeFile(testFolder + "/file1.txt", text, "UTF-8");
/*
* 캐싱 Manager 생성
* CacheStrategy.MANUAL : Deal with cached data manually. Call FileObject.refresh() to refresh the object data.
* CacheStrategy.ON_RESOLVE : Refresh the data every time you request a file from FileSystemManager.resolveFile
* CacheStrategy.ON_CALL : Refresh the data every time you call a method on the fileObject.
* You'll use this only if you really need the latest info as this setting is a major performance loss.
*/
DefaultFileSystemManager fs = new DefaultFileSystemManager();
fs.setFilesCache(manager.getFilesCache());
// zip, jar, tgz, tar, tbz2, file
if (!fs.hasProvider("file")) {
fs.addProvider("file", new DefaultLocalFileProvider());
}
// StandardFileSystemManager fs = new StandardFileSystemManager();
fs.setCacheStrategy(CacheStrategy.ON_RESOLVE);
fs.init();
// 캐싱 객체 생성
//FileObject foBase2 = fs.resolveFile(testFolder);
LOGGER.debug("####1");
FileObject cachedFile = fs.toFileObject(new File(testFolder + "/file1.txt"));
LOGGER.debug("####2");
FilesCache filesCache = fs.getFilesCache();
LOGGER.debug("####3");
filesCache.putFile(cachedFile);
FileObject obj = filesCache.getFile(cachedFile.getFileSystem(), cachedFile.getName());
// FileObject baseFile = fs.getBaseFile();
// LOGGER.debug("### cachedFile.getContent().getSize() is {}", cachedFile.getContent().getSize());
// long fileSize = cachedFile.getContent().getSize();
// LOGGER.debug("#########size is {}", fileSize);
// FileObject cachedFile1 = cachedFile.resolveFile("file2.txt");
// FileObject scratchFolder = manager.resolveFile(testFolder);
// scratchFolder.delete(Selectors.EXCLUDE_SELF);
EgovFileUtil.delete(new File(testFolder + "/file1.txt"));
// obj.createFile();
// LOGGER.debug("#########obj is {}", obj.toString());
// LOGGER.debug("#########size is {}", obj.getContent().getSize());
LOGGER.debug("#########file is {}", obj.exists());
fs.close();
}
@Test
public void testCaching3() throws Exception {
FileSystemManager manager = VFS.getManager();
String testFolder = FilehandlingServiceTest.class.getResource(".").getPath();
FileObject scratchFolder = manager.resolveFile(testFolder + "/testfolder");
// releaseable
FileObject dir1 = scratchFolder.resolveFile("file1.txt");
// avoid cache removal
FileObject dir2 = scratchFolder.resolveFile("file2.txt");
dir2.getContent();
// check if the cache still holds the right instance
FileObject dir22 = scratchFolder.resolveFile("file2.txt");
assertTrue(dir2 == dir22);
// check if the cache still holds the right instance
/* FileObject dir1_2 = scratchFolder.resolveFile("file1.txt");
assertFalse(dir1 == dir1_2);*/
}
private boolean contains(FileObject[] fos, String string) {
for (int i = 0; i < fos.length; i++) {
if (string.equals(fos[i].getName().getBaseName())) {
LOGGER.debug("# {}", string);
return true;
}
}
LOGGER.debug("# {} should be seen", string);
return false;
}
}
|
package com.stylefeng.guns.rest.persistence.dao;
import com.stylefeng.guns.rest.persistence.model.User;
import com.baomidou.mybatisplus.mapper.BaseMapper;
/**
* <p>
* 用户表 Mapper 接口
* </p>
*
* @author 夏先鹏
* @since 2019-09-11
*/
public interface UserMapper extends BaseMapper<User> {
}
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var renderMergedProps = function renderMergedProps(component) {
for (var _len = arguments.length, rest = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
rest[_key - 1] = arguments[_key];
}
var theProps = Object.assign.apply(Object, [{}].concat(rest));
return _react2.default.createElement(component, theProps);
};
exports.default = renderMergedProps; |
<gh_stars>1-10
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package lista;
import java.io.File;
/**
*
* @author Administrator
*/
public class Cancion {
private String nombre;
private File archivo;
public Cancion() {
}
public Cancion(String nombre, File archivo) {
this.nombre = nombre;
this.archivo = archivo;
}
public Cancion(File archivo) {
this.nombre = archivo.getName();
this.archivo = archivo;
}
public String getNombre() {
return nombre;
}
public void setNombre(String nombre) {
this.nombre = nombre;
}
public File getArchivo() {
return archivo;
}
public void setArchivo(File archivo) {
this.archivo = archivo;
}
@Override
public String toString() {
return nombre;
}
}
|
# Import decision tree classifier from the sklearn library
from sklearn import tree
# Input data to the model
X = [[0, 0], [1, 1]]
y = [0, 1]
# Create a decision tree classifier object
clf = tree.DecisionTreeClassifier()
# Train the decision tree classifier
clf = clf.fit(X, y) |
BINDIR="$DIR/../../../../external/local/bin"
CONFIGDIR="$DIR/../config"
REPO_PATH="$DIR/gov2_indri"
DUMP_PATH="$DIR/gov2_dump"
LINKS_PATH="$DIR/gov2_links"
WAND_PATH="$DIR/gov2_wand"
BIGRAM_POSTINGS=$DIR/gov2_bigram_u8.inv
UNIGRAM_FILE=$DIR/gov2_unigram.txt
BIGRAM_FILE=$DIR/gov2_bigram.txt
TERMFEAT_FILE=$DIR/gov2_termfeat.csv
DOCFEAT_FILE=$DIR/gov2_docfeat.csv
QRY=$DIR/gov2-all-kstem.qry
STAGE0_RUN=$DIR/stage0.run
PAGERANK_PRIOR=$DIR/pagerank.prior
|
<gh_stars>0
package lab7_stephaniemartinez;
public class Violacion extends Delitos {
int edad;
public Violacion() {
super();
}
public Violacion(int edad, String descripcion, String lgm, String culpable, String fin) {
super(descripcion, lgm, culpable, fin);
this.edad = edad;
}
public int getEdad() {
return edad;
}
public void setEdad(int edad) {
this.edad = edad;
}
@Override
public String toString() {
return "Violacion: " +edad ;
}
}
|
<filename>src/config.rb<gh_stars>0
#!/usr/bin/env ruby
class Config
def self.build(base_config, box_config, user_config)
if not File.exists?(user_config)
puts "The file #{user_config} does not exist, it is needed for provisioning. Check the readme file for more information about the setup of this project.".red
end
if not File.exists?(box_config)
puts "The file #{box_config} does not exist, it is needed for provisioning. Check the readme file for more information about the setup of this project.".red
end
if not File.exists?(user_config) or not File.exists?(box_config)
exit
end
base_config = YAML.load_file(base_config)
box_config = YAML.load_file(box_config)
user_config = YAML.load_file(user_config)
base_config = base_config['config'].deep_merge(box_config['config']).deep_merge(user_config['config'])
return base_config
end
end
|
#ifndef HARDWARE_H
#define HARDWARE_H
#include <types.h>
#include <mm/mmu.h>
extern char _fb_start[];
extern void hw_init();
extern uint32_t get_mmio_base_phy();
extern uint32_t get_mmio_mem_size();
extern uint32_t get_phy_ram_size();
extern void arch_set_kernel_vm(page_dir_entry_t* vm);
#endif
|
<gh_stars>0
// Copyright 2022 @nepoche/
// SPDX-License-Identifier: Apache-2.0
import { ApiInitHandler, AppConfig, NotificationHandler, ProvideCapabilities, WasmFactory, WebbApiProvider, WebbMethods, WebbProviderEvents, WebbRelayerBuilder } from '@nepoche/api-providers/index.js';
import { EventBus } from '@nepoche/app-util/index.js';
import { ApiPromise } from '@polkadot/api';
import { InjectedAccount, InjectedExtension } from '@polkadot/extension-inject/types';
import { AccountsAdapter } from '../account/Accounts.adapter.js';
import { PolkadotProvider } from '../ext-providers/index.js';
import { ActionsBuilder, InteractiveFeedback, WebbError, WebbErrorCodes } from '../webb-error/index.js';
import { PolkadotAnchorApi } from './anchor-api.js';
import { PolkadotBridgeDeposit } from './anchor-deposit.js';
import { PolkadotAnchorWithdraw } from './anchor-withdraw.js';
import { PolkadotChainQuery } from './chain-query.js';
import { PolkadotMixerDeposit } from './mixer-deposit.js';
import { PolkadotMixerWithdraw } from './mixer-withdraw.js';
import { PolkaTXBuilder } from './transaction.js';
import { PolkadotWrapUnwrap } from './wrap-unwrap.js';
export class WebbPolkadot extends EventBus<WebbProviderEvents> implements WebbApiProvider<WebbPolkadot> {
readonly methods: WebbMethods<WebbPolkadot>;
readonly api: ApiPromise;
readonly txBuilder: PolkaTXBuilder;
private constructor (
apiPromise: ApiPromise,
injectedExtension: InjectedExtension,
readonly relayingManager: WebbRelayerBuilder,
public readonly config: AppConfig,
readonly notificationHandler: NotificationHandler,
private readonly provider: PolkadotProvider,
readonly accounts: AccountsAdapter<InjectedExtension, InjectedAccount>,
readonly wasmFactory: WasmFactory
) {
super();
this.provider = new PolkadotProvider(
apiPromise,
injectedExtension,
new PolkaTXBuilder(apiPromise, notificationHandler)
);
this.accounts = this.provider.accounts;
this.api = this.provider.api;
this.txBuilder = this.provider.txBuilder;
this.methods = {
anchor: {
core: null,
deposit: {
enabled: true,
inner: new PolkadotBridgeDeposit(this)
},
withdraw: {
enabled: true,
inner: new PolkadotAnchorWithdraw(this)
}
},
anchorApi: new PolkadotAnchorApi(this, this.config.bridgeByAsset),
chainQuery: new PolkadotChainQuery(this),
mixer: {
deposit: {
enabled: true,
inner: new PolkadotMixerDeposit(this)
},
withdraw: {
enabled: true,
inner: new PolkadotMixerWithdraw(this)
}
},
wrapUnwrap: {
core: {
enabled: false,
inner: new PolkadotWrapUnwrap(this)
}
}
};
}
capabilities?: ProvideCapabilities | undefined;
getProvider () {
return this.provider;
}
async awaitMetaDataCheck () {
/// delay some time till the UI is instantiated and then check if the dApp needs to update extension meta data
await new Promise((resolve) => setTimeout(resolve, 3000));
const metaData = await this.provider.checkMetaDataUpdate();
if (metaData) {
/// feedback body
const feedbackEntries = InteractiveFeedback.feedbackEntries([
{
header: 'Update Polkadot MetaData'
}
]);
/// feedback actions
const actions = ActionsBuilder.init()
/// update extension metadata
.action('Update MetaData', () => this.provider.updateMetaData(metaData), 'success')
.actions();
const feedback = new InteractiveFeedback(
'info',
actions,
() => {
return null;
},
feedbackEntries
);
/// emit the feedback object
this.emit('interactiveFeedback', feedback);
}
}
private async insureApiInterface () {
// check for RPC
console.log(this.api, 'api');
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const merkleRPC = Boolean(this.api.rpc.mt.getLeaves);
// merkle rpc
const merklePallet = this.api.query.merkleTreeBn254;
const mixerPallet = this.api.query.mixerBn254;
if (!merklePallet || !merkleRPC || !mixerPallet) {
throw WebbError.from(WebbErrorCodes.InsufficientProviderInterface);
}
}
static async init (
appName: string, // App name Arbitrary name
endpoints: string[], // Endpoints of the substrate node
errorHandler: ApiInitHandler, // Error handler that will be used to catch errors while initializing the provider
relayerBuilder: WebbRelayerBuilder, // Webb Relayer builder for relaying withdraw
appConfig: AppConfig, // The whole and current app configuration
notification: NotificationHandler, // Notification handler that will be used for the provider
wasmFactory: WasmFactory // A Factory Fn that wil return wasm worker that would be supplied eventually to the `sdk-core`
): Promise<WebbPolkadot> {
const [apiPromise, injectedExtension] = await PolkadotProvider.getParams(appName, endpoints, errorHandler.onError);
const provider = new PolkadotProvider(apiPromise, injectedExtension, new PolkaTXBuilder(apiPromise, notification));
const accounts = provider.accounts;
const instance = new WebbPolkadot(
apiPromise,
injectedExtension,
relayerBuilder,
appConfig,
notification,
provider,
accounts,
wasmFactory
);
await instance.insureApiInterface();
/// check metadata update
await instance.awaitMetaDataCheck();
await apiPromise.isReady;
return instance;
}
static async initWithCustomAccountsAdapter (
appName: string, // App name Arbitrary name
endpoints: string[], // Endpoints of the substrate node
errorHandler: ApiInitHandler, // Error handler that will be used to catch errors while initializing the provider
relayerBuilder: WebbRelayerBuilder, // Webb Relayer builder for relaying withdraw
appConfig: AppConfig, // The whole and current app configuration
notification: NotificationHandler, // Notification handler that will be used for the provider
accounts: AccountsAdapter<InjectedExtension, InjectedAccount>,
apiPromise: ApiPromise,
injectedExtension: InjectedExtension,
wasmFactory: WasmFactory
): Promise<WebbPolkadot> {
const provider = new PolkadotProvider(apiPromise, injectedExtension, new PolkaTXBuilder(apiPromise, notification));
const instance = new WebbPolkadot(
apiPromise,
injectedExtension,
relayerBuilder,
appConfig,
notification,
provider,
accounts,
wasmFactory
);
await instance.insureApiInterface();
/// check metadata update
await instance.awaitMetaDataCheck();
await apiPromise.isReady;
return instance;
}
async destroy (): Promise<void> {
await this.provider.destroy();
}
}
|
<reponame>Luohuayu/CatAntiCheat-Public<filename>CatAntiCheat-1.7.10/src/main/java/luohuayu/anticheat/OpenGLHelper.java
package luohuayu.anticheat;
import cpw.mods.fml.relauncher.ReflectionHelper;
import net.minecraft.client.renderer.OpenGlHelper;
import net.minecraft.client.renderer.texture.TextureUtil;
import net.minecraft.client.shader.Framebuffer;
import net.minecraft.util.ScreenShotHelper;
import java.awt.image.BufferedImage;
import java.nio.IntBuffer;
import org.lwjgl.BufferUtils;
import org.lwjgl.opengl.GL11;
public class OpenGLHelper {
public static BufferedImage createScreenshot(int width, int height, Framebuffer framebufferIn) {
IntBuffer field_74293_b = ReflectionHelper.getPrivateValue(ScreenShotHelper.class, null, "field_74293_b");
int[] field_74294_c = ReflectionHelper.getPrivateValue(ScreenShotHelper.class, null, "field_74294_c");
if (OpenGlHelper.func_148822_b()) {
width = framebufferIn.field_147622_a;
height = framebufferIn.field_147620_b;
}
int k = width * height;
if (field_74293_b == null || field_74293_b.capacity() < k) {
field_74293_b = BufferUtils.createIntBuffer(k);
ReflectionHelper.setPrivateValue(ScreenShotHelper.class, null, new int[k], "field_74294_c");
}
GL11.glPixelStorei(3333, 1);
GL11.glPixelStorei(3317, 1);
field_74293_b.clear();
if (OpenGlHelper.func_148822_b()) {
GL11.glBindTexture(3553, framebufferIn.field_147617_g);
GL11.glGetTexImage(3553, 0, 32993, 33639, field_74293_b);
} else {
GL11.glReadPixels(0, 0, width, height, 32993, 33639, field_74293_b);
}
field_74293_b.get(field_74294_c);
TextureUtil.func_147953_a(field_74294_c, width, height);
BufferedImage bufferedimage = null;
if (OpenGlHelper.func_148822_b()) {
bufferedimage = new BufferedImage(framebufferIn.field_147621_c, framebufferIn.field_147618_d, 1);
int l = framebufferIn.field_147620_b - framebufferIn.field_147618_d;
for(int i1 = l; i1 < framebufferIn.field_147620_b; ++i1) {
for(int j1 = 0; j1 < framebufferIn.field_147621_c; ++j1) {
bufferedimage.setRGB(j1, i1 - l, field_74294_c[i1 * framebufferIn.field_147622_a + j1]);
}
}
} else {
bufferedimage = new BufferedImage(width, height, 1);
bufferedimage.setRGB(0, 0, width, height, field_74294_c, 0, width);
}
return bufferedimage;
}
}
|
<filename>spec/metrics/timings_spec.rb<gh_stars>10-100
require 'rails_helper'
require_relative 'shared_examples_for_metrics'
RSpec.describe Timings do
before do
book_a_luna_visit_late
book_a_luna_visit_on_time
cancel_a_luna_visit_late
cancel_a_luna_visit_on_time
withdraw_a_luna_visit_late
withdraw_a_luna_visit_on_time
reject_a_luna_visit_late
reject_a_luna_visit_on_time
book_a_mars_visit_late
book_a_mars_visit_on_time
reject_a_mars_visit_late
reject_a_mars_visit_on_time
end
include_examples 'when creating visits with dates'
describe Timings::TimelyAndOverdue do
context 'when they are not organized by date' do
it 'counts all overdue visits and group by prison' do
expect(described_class.fetch_and_format).to be ==
{
'Lunar Penal Colony' => { 'overdue' => 4 },
'Martian Penal Colony' => { 'overdue' => 2 }
}
end
end
context 'when they are organized by date' do
describe Timings::TimelyAndOverdueByCalendarWeek do
it 'counts visits and groups by prison, year, calendar week and visit state' do
expect(described_class.fetch_and_format).to be ==
{ 'Lunar Penal Colony' =>
{ 2016 =>
{ 5 => {
'overdue' => {
'rejected' => 1,
'booked' => 1,
'cancelled' => 1,
'withdrawn' => 1
},
'timely' => {
'rejected' => 1,
'booked' => 1,
'cancelled' => 1,
'withdrawn' => 1
}
}
}
},
'Martian Penal Colony' =>
{ 2016 =>
{ 5 => {
'overdue' => { 'booked' => 1, 'rejected' => 1 },
'timely' => { 'booked' => 1, 'rejected' => 1 }
}
}
}
}
end
end
end
end
end
|
<gh_stars>0
export const base = [
{
type: 'heading_open',
tag: 'h1',
map: [0, 1],
nesting: 1,
level: 0,
children: null,
content: '',
markup: '#',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'inline',
tag: '',
map: [0, 1],
nesting: 0,
level: 1,
children: [
{
type: 'text',
tag: '',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: 'Create a folder',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: 'Create a folder',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'heading_close',
tag: 'h1',
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '#',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tabs_open',
tag: 'div',
map: null,
nesting: 1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab-list_open',
tag: 'div',
map: null,
nesting: 1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab_open',
tag: 'div',
map: null,
nesting: 1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'inline',
tag: '',
map: null,
nesting: 0,
level: 0,
children: [
{
type: 'text',
tag: '',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: 'Python',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: '',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
{
type: 'tab_close',
tag: 'div',
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab_open',
tag: 'div',
map: null,
nesting: 1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'inline',
tag: '',
map: null,
nesting: 0,
level: 0,
children: [
{
type: 'text',
tag: '',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: 'Tab with list',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: '',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
{
type: 'tab_close',
tag: 'div',
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab_open',
tag: 'div',
map: null,
nesting: 1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'inline',
tag: '',
map: null,
nesting: 0,
level: 0,
children: [
{
type: 'text',
tag: '',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: 'Tab with list',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: '',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
{
type: 'tab_close',
tag: 'div',
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab-list_close',
tag: 'div',
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab-panel_open',
tag: 'div',
map: null,
nesting: 1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_open',
tag: 'p',
map: [6, 7],
nesting: 1,
level: 2,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'inline',
tag: '',
map: [6, 7],
nesting: 0,
level: 3,
children: [
{
type: 'text',
tag: '',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: 'About python',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: 'About python',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_close',
tag: 'p',
map: null,
nesting: -1,
level: 2,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab-panel_close',
tag: 'div',
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab-panel_open',
tag: 'div',
map: null,
nesting: 1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'bullet_list_open',
tag: 'ul',
map: [9, 12],
nesting: 1,
level: 2,
children: null,
content: '',
markup: '-',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'list_item_open',
tag: 'li',
map: [9, 10],
nesting: 1,
level: 3,
children: null,
content: '',
markup: '-',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_open',
tag: 'p',
map: [9, 10],
nesting: 1,
level: 4,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: true,
},
{
type: 'inline',
tag: '',
map: [9, 10],
nesting: 0,
level: 5,
children: [
{
type: 'text',
tag: '',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: 'One',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: 'One',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_close',
tag: 'p',
map: null,
nesting: -1,
level: 4,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: true,
},
{
type: 'list_item_close',
tag: 'li',
map: null,
nesting: -1,
level: 3,
children: null,
content: '',
markup: '-',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'list_item_open',
tag: 'li',
map: [10, 12],
nesting: 1,
level: 3,
children: null,
content: '',
markup: '-',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_open',
tag: 'p',
map: [10, 11],
nesting: 1,
level: 4,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: true,
},
{
type: 'inline',
tag: '',
map: [10, 11],
nesting: 0,
level: 5,
children: [
{
type: 'text',
tag: '',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: 'Two',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: 'Two',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_close',
tag: 'p',
map: null,
nesting: -1,
level: 4,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: true,
},
{
type: 'list_item_close',
tag: 'li',
map: null,
nesting: -1,
level: 3,
children: null,
content: '',
markup: '-',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'bullet_list_close',
tag: 'ul',
map: null,
nesting: -1,
level: 2,
children: null,
content: '',
markup: '-',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab-panel_close',
tag: 'div',
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab-panel_open',
tag: 'div',
map: null,
nesting: 1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'ordered_list_open',
tag: 'ol',
map: [13, 16],
nesting: 1,
level: 2,
children: null,
content: '',
markup: '.',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'list_item_open',
tag: 'li',
map: [13, 14],
nesting: 1,
level: 3,
children: null,
content: '',
markup: '.',
info: '1',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_open',
tag: 'p',
map: [13, 14],
nesting: 1,
level: 4,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: true,
},
{
type: 'inline',
tag: '',
map: [13, 14],
nesting: 0,
level: 5,
children: [
{
type: 'text',
tag: '',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: 'One',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: 'One',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_close',
tag: 'p',
map: null,
nesting: -1,
level: 4,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: true,
},
{
type: 'list_item_close',
tag: 'li',
map: null,
nesting: -1,
level: 3,
children: null,
content: '',
markup: '.',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'list_item_open',
tag: 'li',
map: [14, 16],
nesting: 1,
level: 3,
children: null,
content: '',
markup: '.',
info: '2',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_open',
tag: 'p',
map: [14, 15],
nesting: 1,
level: 4,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: true,
},
{
type: 'inline',
tag: '',
map: [14, 15],
nesting: 0,
level: 5,
children: [
{
type: 'text',
tag: '',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: 'Two',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: 'Two',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_close',
tag: 'p',
map: null,
nesting: -1,
level: 4,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: true,
},
{
type: 'list_item_close',
tag: 'li',
map: null,
nesting: -1,
level: 3,
children: null,
content: '',
markup: '.',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'ordered_list_close',
tag: 'ol',
map: null,
nesting: -1,
level: 2,
children: null,
content: '',
markup: '.',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tab-panel_close',
tag: 'div',
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'tabs_close',
tag: 'div',
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_open',
tag: 'p',
map: [18, 19],
nesting: 1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'inline',
tag: '',
map: [18, 19],
nesting: 0,
level: 1,
children: [
{
type: 'text',
tag: '',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: 'After tabs',
markup: '',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: 'After tabs',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_close',
tag: 'p',
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
];
export const escaped = [
{
type: 'paragraph_open',
tag: 'p',
attrs: null,
map: [0, 1],
nesting: 1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'inline',
tag: '',
attrs: null,
map: [0, 1],
nesting: 0,
level: 1,
children: [
{
type: 'code_inline',
tag: 'code',
attrs: null,
map: null,
nesting: 0,
level: 0,
children: null,
content: '{% list tabs %}',
markup: '`',
info: '',
meta: null,
block: false,
hidden: false,
},
],
content: '`{% list tabs %}`',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
{
type: 'paragraph_close',
tag: 'p',
attrs: null,
map: null,
nesting: -1,
level: 0,
children: null,
content: '',
markup: '',
info: '',
meta: null,
block: true,
hidden: false,
},
];
|
import threading
import evdev
class EventReader:
def __init__(self):
self.thread = None
self.running = False
self.callback = None
def start(self, callback):
self.thread = threading.Thread(target=self.__loop)
self.running = True
self.callback = callback
self.thread.start()
def join(self):
if self.thread:
self.running = False
self.thread.join()
def __loop(self):
try:
device = evdev.InputDevice('/dev/input/event0')
for event in device.read_loop():
if not self.running:
break
if self.callback:
self.callback(event)
except Exception as e:
print(f"An error occurred: {e}")
# Example usage
def event_callback(event):
print(f"Received event: {event}")
reader = EventReader()
reader.start(event_callback)
# Simulate some activity
import time
time.sleep(5)
reader.join() |
#!/bin/bash -x
function start_all() {
start_worker
start_scheduler
start_webserver
setup_crons
}
function start_scheduler() {
sudo chown -R airflow ${AIRFLOW_LOG_DIR}/scheduler_task_logs/
nohup airflow scheduler &>> ${AIRFLOW_LOG_DIR}/scheduler.log & echo $! > ${AIRFLOW_SCHEDULER_PID}
}
function start_webserver() {
#Starting webserver in debug mode to enable auto refresh of updated/new dags
nohup airflow webserver -p8080 &>> ${AIRFLOW_LOG_DIR}/webserver.log &
# Fetching the child process id, the child process is the actual webapp which runs on the machine
# so we need to monitor the child process, the parent is just shell command, even if it dies child still continue to run
# and the output stream continues to log in log file. Also we will have to add wait for child process to start
pid=$(echo $!)
echo "$pid" > ${AIRFLOW_PARENT_WEBSERVER_PID}
cpid=""
i="0"
echo "Parent Process Id of Webserver" "$pid"
while [ $i -lt 20 ]
do
cpid=$(echo $(pgrep -P $pid))
echo "Child process id" "$cpid"
if [[ ! -z $cpid ]]; then
break
fi
# Generally it takes 3 seconds
sleep 1
i=$[$i+1]
done
echo "$cpid" > ${AIRFLOW_WEBSERVER_PID}
if [[ ! -z $cpid ]]; then
echo "Webserver startup successful"
else
echo "Webserver startup failed, child process is empty"
fi
}
function start_worker() {
if [[ "$use_celery_airflow" == "True" ]]; then
FLOWER_URL_PREFIX=airflow-celery-${CLUSTER_ID}
export FLOWER_URL_PREFIX=$FLOWER_URL_PREFIX
# Forking these commands as child processes(using & at the end) because these services will keep running indefinitely
# so do not want parent process to wait for them else subsequent commands won't be triggered
# This is not done for initdb command because subsequent commands should not start until the tables are not created else they may fail
nohup airflow celery worker &>> ${AIRFLOW_LOG_DIR}/celery-worker.log & echo $! > ${AIRFLOW_WORKER_PID}
nohup airflow celery flower &>> ${AIRFLOW_LOG_DIR}/flower.log &
fi
}
function setup_crons() {
# Setup a cron on hourly basis
crontab -l | { cat; echo "0 * * * * /bin/bash ${airflow_source_dir}/../setup_crons.sh setup_upload_log_cron"; } | sort -u | crontab -
crontab -l | { cat; echo "4 0 * * * /bin/bash ${airflow_source_dir}/../setup_crons.sh setup_delete_scheduler_process_log_cron"; } | sort -u | crontab -
# crontab -l | { cat; echo "4 0 * * * source ${airflow_source_dir}/setup_crons.sh airflow_idle_time_check"; } | sort -u | crontab -
crontab -l | { cat; echo "*/5 * * * * /bin/bash ${airflow_source_dir}/../setup_crons.sh airflow_sync_dags_from_remote"; } | sort -u | crontab -
crontab -l | { cat; echo "*/5 * * * * /bin/bash ${airflow_source_dir}/../setup_crons.sh airflow_sync_plugins_from_remote"; } | sort -u | crontab -
crontab -l | { cat; echo "*/5 * * * * /bin/bash ${airflow_source_dir}/../setup_crons.sh update_sync_location"; } | sort -u | crontab -
}
function activate_virtualenv(){
source ${BASEDIR}/virtualenv.sh activate
}
function deactivate_virtualenv(){
source ${BASEDIR}/virtualenv.sh deactivate
}
SERVICE=$1
BASEDIR="$( cd -P "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
activate_virtualenv
source /usr/lib/hustler/bin/qubole-bash-lib.sh
source /etc/profile.d/airflow.sh
AIRFLOW_HOME=$(echo $AIRFLOW_HOME)
AIRFLOW_SCHEDULER_PID=${AIRFLOW_HOME}/scheduler.pid
AIRFLOW_WEBSERVER_PID=${AIRFLOW_HOME}/webserver.pid
AIRFLOW_PARENT_WEBSERVER_PID=${AIRFLOW_HOME}/parent_webserver.pid
AIRFLOW_RABBITMQ_PID=${AIRFLOW_HOME}/rabbitmq.pid
AIRFLOW_WORKER_PID=${AIRFLOW_HOME}/worker.pid
AIRFLOW_LOG_DIR=/media/ephemeral0/logs/airflow/
CLUSTER_ID=$(echo $CLUSTER_ID)
qubole_base_url=$(echo $QUBOLE_BASE_URL)
use_celery_airflow=$(echo $USE_CELERY_AIRFLOW)
use_cluster_broker_airflow=$(echo $USE_CLUSTER_BROKER_AIRFLOW)
airflow_source_dir="$( cd -P "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
worker_child_pids=
case "${SERVICE}" in
"all" )
start_all
;;
"scheduler" )
start_scheduler
;;
"webserver" )
start_webserver
;;
"worker" )
start_worker
;;
* )
echo "only all, scheduler, webserver are supported"
exit 1
;;
esac
deactivate_virtualenv
|
<reponame>addcolouragency/craft_storefront
export default TransactionService;
declare const TransactionService_base: any;
declare class TransactionService extends TransactionService_base {
[x: string]: any;
createSession(): Promise<import("mongodb").ClientSession>;
}
|
#!/bin/sh
make -C /Users/lbajo/ros2_mod_ws/build/rmw_implementation_cmake -f /Users/lbajo/ros2_mod_ws/build/rmw_implementation_cmake/CMakeScripts/ALL_BUILD_cmakeRulesBuildPhase.make$CONFIGURATION all
|
<gh_stars>10-100
require 'test_helper'
class ExtracurricularActivitiesControllerTest < ActionController::TestCase
include Devise::Test::ControllerHelpers
setup do
@extracurricular_activity = extracurricular_activities(:one)
sign_in users(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:extracurricular_activities)
end
test "should get new" do
get :new
assert_response :success
end
test "should create extracurricular_activity" do
assert_difference('ExtracurricularActivity.count') do
post :create, params: { extracurricular_activity: { name: 'MyStringThree'} }
end
assert_redirected_to extracurricular_activity_path(assigns(:extracurricular_activity))
end
test "should show extracurricular_activity" do
get :show, params: { id: @extracurricular_activity }
assert_response :success
end
test "should get edit" do
get :edit, params: { id: @extracurricular_activity }
assert_response :success
end
test "should update extracurricular_activity" do
patch :update, params: { id: @extracurricular_activity, extracurricular_activity: { name: 'MyStringFour'} }
assert_redirected_to extracurricular_activity_path(assigns(:extracurricular_activity))
end
test "should destroy extracurricular_activity" do
assert_difference('ExtracurricularActivity.count', -1) do
delete :destroy, params: { id: @extracurricular_activity }
end
assert_redirected_to extracurricular_activities_path
end
test "staff user shouldn't be able to delete extracurricular_activities" do
user = User.create!(email: '<EMAIL>', staff: true, password: '<PASSWORD>')
ability = Ability.new(user)
assert ability.cannot? :delete, @extracurricular_activity
end
end
|
package com.professorvennie.bronzeage.common.containers;
import com.professorvennie.bronzeage.tileentitys.TileEntitySteamWashPlant;
import net.minecraft.entity.player.InventoryPlayer;
/**
* Created by ProfessorVennie on 3/8/2015 at 4:14 PM.
*/
public class ContainerSteamWashPlant extends ContainerBasicMachine {
public ContainerSteamWashPlant(InventoryPlayer inventory, TileEntitySteamWashPlant tileEntity) {
super(tileEntity);
addPlayersInv(inventory);
}
}
|
!
! MPI_COMM_WORLD
!
INTEGER MPI_COMM_WORLD
parameter (mpi_comm_world=1)
!
!
!
integer MPI_BOTTOM
parameter (MPI_BOTTOM=0)
!
! source,tag
!
integer MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_TAG_UB
parameter (mpi_any_source=-1, mpi_any_tag= -1, mpi_tag_ub=1681915906)
integer MPI_PROC_NULL, MPI_ROOT
parameter (MPI_PROC_NULL=-2, MPI_ROOT=-3)
integer MPI_COMM_NULL, MPI_REQUEST_NULL
parameter (MPI_COMM_NULL=0, MPI_REQUEST_NULL=0)
integer MPI_GROUP_NULL, MPI_GROUP_EMPTY
parameter (MPI_GROUP_NULL=0, MPI_GROUP_EMPTY= -1)
integer MPI_MAX_ERROR_STRING
parameter (MPI_MAX_ERROR_STRING=128)
integer MPI_MAX_PROCESSOR_NAME
parameter (MPI_MAX_PROCESSOR_NAME=128)
!
! Return codes
!
integer MPI_SUCCESS
parameter (MPI_SUCCESS=0)
integer MPI_ERR_BUFFER
parameter (MPI_ERR_BUFFER= -1)
integer MPI_ERR_COUNT
parameter (MPI_ERR_COUNT= -1)
integer MPI_ERR_TYPE
parameter (MPI_ERR_TYPE= -1)
integer MPI_ERR_TAG
parameter (MPI_ERR_TAG= -1)
integer MPI_ERR_COMM
parameter (MPI_ERR_COMM= -1)
integer MPI_ERR_RANK
parameter (MPI_ERR_RANK= -1)
integer MPI_ERR_REQUEST
parameter (MPI_ERR_REQUEST= -1)
integer MPI_ERR_ROOT
parameter (MPI_ERR_ROOT= -1)
integer MPI_ERR_GROUP
parameter (MPI_ERR_GROUP= -1)
integer MPI_ERR_OP
parameter (MPI_ERR_OP= -1)
integer MPI_ERR_TOPOLOGY
parameter (MPI_ERR_TOPOLOGY= -1)
integer MPI_ERR_DIMS
parameter (MPI_ERR_DIMS= -1)
integer MPI_ERR_ARG
parameter (MPI_ERR_ARG= -1)
integer MPI_ERR_UNKNOWN
parameter (MPI_ERR_UNKNOWN= -1)
integer MPI_ERR_TRUNCATE
parameter (MPI_ERR_TRUNCATE= -1)
integer MPI_ERR_OTHER
parameter (MPI_ERR_OTHER= -1)
integer MPI_ERR_INTERN
parameter (MPI_ERR_INTERN= -1)
integer MPI_PENDING
parameter (MPI_PENDING= -1)
integer MPI_ERR_IN_STATUS
parameter (MPI_ERR_IN_STATUS= -1)
integer MPI_ERR_LASTCODE
parameter (MPI_ERR_LASTCODE= -1)
integer MPI_ERRORS_RETURN
parameter (MPI_ERRORS_RETURN= -1)
!
!
integer MPI_UNDEFINED
parameter (MPI_UNDEFINED= -1)
!
! MPI_Status
!
! The values in this section MUST match the struct definition
! in mpi.h
!
INTEGER MPI_STATUS_SIZE
PARAMETER (MPI_STATUS_SIZE=4)
INTEGER MPI_SOURCE, MPI_TAG, MPI_ERROR
PARAMETER(MPI_SOURCE=1, MPI_TAG=2, MPI_ERROR=3)
! There is a 4th value only used internally
INTEGER MPI_STATUS_IGNORE(MPI_STATUS_SIZE)
INTEGER MPI_STATUSES_IGNORE(MPI_STATUS_SIZE,1)
COMMON /MPISERIAL/ MPI_STATUS_IGNORE
COMMON /MPISERIAL/ MPI_STATUSES_IGNORE
!
! MPI_IN_PLACE
!
INTEGER MPI_IN_PLACE
COMMON /MPISERIAL/ MPI_IN_PLACE
SAVE /MPISERIAL/ ! Technically needed in case goes out of scope
!
! MPI_Datatype values
!
! New datatype values
! Type constants represent integer handles, matching up to the index of the
! type array equal to the absolute value of the constant plus one. For
! example, MPI_BYTE=-12, corresponding to type index 11.
! (Array in type_const.c)
!
INTEGER MPI_DATATYPE_NULL
PARAMETER (MPI_DATATYPE_NULL=0)
INTEGER MPI_BYTE
PARAMETER (MPI_BYTE=-12)
INTEGER MPI_PACKED
PARAMETER (MPI_PACKED=-13)
INTEGER MPI_LB
PARAMETER (MPI_LB=-14)
INTEGER MPI_UB
PARAMETER (MPI_UB=-15)
INTEGER MPI_INTEGER
PARAMETER (MPI_INTEGER=-16)
INTEGER MPI_REAL
PARAMETER (MPI_REAL=-17)
INTEGER MPI_DOUBLE_PRECISION
PARAMETER (MPI_DOUBLE_PRECISION=-18)
INTEGER MPI_COMPLEX
PARAMETER (MPI_COMPLEX=-19)
INTEGER MPI_DOUBLE_COMPLEX
PARAMETER (MPI_DOUBLE_COMPLEX=-20)
INTEGER MPI_LOGICAL
PARAMETER (MPI_LOGICAL=-21)
INTEGER MPI_CHARACTER
PARAMETER (MPI_CHARACTER=-22)
integer MPI_2REAL
parameter (MPI_2REAL= -23)
integer MPI_2DOUBLE_PRECISION
parameter (MPI_2DOUBLE_PRECISION= -24)
integer MPI_2INTEGER
parameter (MPI_2INTEGER= -25)
!
! Size-specific types
!
INTEGER MPI_INTEGER1
PARAMETER (MPI_INTEGER1= -32 )
INTEGER MPI_INTEGER2
PARAMETER (MPI_INTEGER2= -33 )
INTEGER MPI_INTEGER4
PARAMETER (MPI_INTEGER4= -34 )
INTEGER MPI_INTEGER8
PARAMETER (MPI_INTEGER8= -35 )
INTEGER MPI_INTEGER16
PARAMETER (MPI_INTEGER16= -36 )
INTEGER MPI_REAL4
PARAMETER (MPI_REAL4= -37 )
INTEGER MPI_REAL8
PARAMETER (MPI_REAL8= -38 )
INTEGER MPI_REAL16
PARAMETER (MPI_REAL16= -39 )
integer MPI_COMPLEX8
parameter (MPI_COMPLEX8= -40 )
integer MPI_COMPLEX16
parameter (MPI_COMPLEX16= -41 )
integer MPI_COMPLEX32
parameter (MPI_COMPLEX32= -42 )
integer MPI_LONG_LONG_INT
parameter (MPI_LONG_LONG_INT= -43)
integer MPI_LONG_LONG
parameter (MPI_LONG_LONG= MPI_LONG_LONG_INT)
integer MPI_UNSIGNED_LONG_LONG
parameter (MPI_UNSIGNED_LONG_LONG= -44)
integer MPI_OFFSET
parameter (MPI_OFFSET= -45)
!
! MPI_Op values
!
! (All are handled as no-op so no value is necessary; but provide one
! anyway just in case.)
!
INTEGER MPI_SUM
PARAMETER (MPI_SUM=0)
INTEGER MPI_MAX
PARAMETER (MPI_MAX=0)
INTEGER MPI_MIN
PARAMETER (MPI_MIN=0)
INTEGER MPI_PROD
PARAMETER (MPI_PROD=0)
INTEGER MPI_LAND
PARAMETER (MPI_LAND=0)
INTEGER MPI_BAND
PARAMETER (MPI_BAND=0)
INTEGER MPI_LOR
PARAMETER (MPI_LOR=0)
INTEGER MPI_BOR
PARAMETER (MPI_BOR=0)
INTEGER MPI_LXOR
PARAMETER (MPI_LXOR=0)
INTEGER MPI_BXOR
PARAMETER (MPI_BXOR=0)
INTEGER MPI_MINLOC
PARAMETER (MPI_MINLOC=0)
INTEGER MPI_MAXLOC
PARAMETER (MPI_MAXLOC=0)
INTEGER MPI_OP_NULL
PARAMETER (MPI_OP_NULL=0)
!
! MPI_Wtime
!
DOUBLE PRECISION MPI_WTIME
EXTERNAL MPI_WTIME
!
! Kinds
!
INTEGER MPI_OFFSET_KIND
PARAMETER (MPI_OFFSET_KIND=selected_int_kind(13))
INTEGER MPI_MODE_RDONLY
PARAMETER (MPI_MODE_RDONLY=0)
INTEGER MPI_MODE_CREATE
PARAMETER (MPI_MODE_CREATE=1)
INTEGER MPI_MODE_RDWR
PARAMETER (MPI_MODE_RDWR=2)
!
! Info
!
INTEGER MPI_INFO_NULL
PARAMETER (MPI_INFO_NULL=0)
!
! Library version string (must match C value)
!
INTEGER MPI_MAX_LIBRARY_VERSION_STRING
PARAMETER (MPI_MAX_LIBRARY_VERSION_STRING=80)
!
! MPI Version
!
INTEGER MPI_VERSION
PARAMETER (MPI_VERSION=1)
INTEGER MPI_SUBVERSION
PARAMETER (MPI_SUBVERSION=0)
|
from django import forms
MAILING_LIST_CHOICES = [
('yes', 'Yes!'),
('no', 'No thanks')
]
class SignInForm(forms.Form):
email = forms.EmailField(label='What is your RIT (or preferred) email address?')
mailinglist = forms.ChoiceField(choices=MAILING_LIST_CHOICES, initial='yes', widget=forms.RadioSelect, label='Would you like to be added to our announcements mailing list?')
|
# -*- coding: utf-8 -*-
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Non-complete JSON Schema for validating IP addresses.
# Use it for better readability in the main schema.
_IP_ADDRESS_SCHEMA = {
'type': 'string',
'format': 'ipv4',
}
# Non-complete JSON schema for validating NET addresses.
# Use it for better readability in the main schema.
_NET_ADDRESS_SCHEMA = {
'type': 'string',
# check for valid ip address and route prefix
# e.g: 192.168.0.0/24
'pattern': '^(({octet}\.){{3}}{octet})({prefix})?$'.format(
octet='(2(5[0-5]|[0-4][0-9])|[01]?[0-9][0-9]?)',
prefix='/(3[012]|[12]?[0-9])'
),
}
# Non-complete JSON Schema for validating MAC addresses.
# Use it for better readability in the main schema.
_MAC_ADDRESS_SCHEMA = {
'type': 'string',
'pattern': '^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$',
}
# TODO(@ikalnitsky): add `required` properties to all needed objects
node_format_schema = {
'$schema': 'http://json-schema.org/draft-04/schema#',
'title': 'JSONized Node object',
'description': 'Object with node description',
'type': 'object',
'properties': {
'mac': _MAC_ADDRESS_SCHEMA,
'ip': _IP_ADDRESS_SCHEMA,
'meta': {
'type': 'object',
'properties': {
# I guess the format schema below will be used somewhere else,
# so it would be great to move it out in the future.
'interfaces': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'ip': _IP_ADDRESS_SCHEMA,
'netmask': _NET_ADDRESS_SCHEMA,
'mac': _MAC_ADDRESS_SCHEMA,
'state': {'type': 'string'},
'name': {'type': 'string'},
}
}
},
# I guess the format schema below will be used somewhere else,
# so it would be great to move it out in the future.
'disks': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'model': {'type': ['string', 'null']},
'disk': {'type': 'string'},
'size': {'type': 'number'},
'name': {'type': 'string'},
}
}
},
'memory': {
'type': 'object',
'properties': {
'total': {'type': 'number'}
}
},
'cpu': {
'type': 'object',
'properties': {
'spec': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'model': {'type': 'string'},
'frequency': {'type': 'number'}
}
}
},
'total': {'type': 'integer'},
'real': {'type': 'integer'},
}
},
'system': {
'type': 'object',
'properties': {
'manufacturer': {'type': 'string'},
'version': {'type': 'string'},
'serial': {'type': 'string'},
'family': {'type': 'string'},
'fqdn': {'type': 'string'},
}
},
}
},
'id': {'type': 'string'},
'manufacturer': {'type': 'string'},
'os_platform': {'type': 'string'},
'is_agent': {'type': 'boolean'},
'platform_name': {'type': ['string', 'null']},
},
}
|
<gh_stars>0
package com.coding.bat.warmup1;
public class PosNeg {
/*
* Given 2 int values, return true if one is negative and one is positive.
* Except if the parameter "negative" is true, then return true only if both are negative.
*/
public static void main(String[] args) {
System.out.println(posNeg(1, 2, false));
System.out.println(posNeg(1, -2, true));
System.out.println(posNeg(-1, -2, false));
System.out.println(posNeg(-4, -5, true));
System.out.println(posNeg(-5, 6, true));
}
public static boolean posNeg(int a, int b, boolean negative) {
if (negative) {
return (a < 0 && b < 0);
} else {
return ((a > 0 && b < 0) || (a < 0 && b > 0));
}
}
}
|
<reponame>mercedes-benz/sechub
// SPDX-License-Identifier: MIT
package cli
import (
"fmt"
"os"
"path/filepath"
"testing"
. "mercedes-benz.com/sechub/testutil"
)
func Test_fillTemplate_without_data_keeps_data_as_is(t *testing.T) {
jStr := `
{
"apiVersion" : "1.2.3",
"codeScan":{
"fileSystem": {
"folders": ["1111","2222"]
}
}
}
`
data := map[string]string{}
result := fillTemplate(jStr, data)
AssertJSONEquals(string(result), jStr, t)
}
func Test_fillTemplate_with_data_changes_template_content(t *testing.T) {
jStrA := `
{
"apiVersion" : "1.2.3",
"codeScan":{
"fileSystem": {
"folders": ["{{ .DATA1 }}","{{ .DATA2 }}"]
}
}
}
`
jStrB := `
{
"apiVersion" : "1.2.3",
"codeScan":{
"fileSystem": {
"folders": ["12345","67890"]
}
}
}
`
data := map[string]string{
"DATA1": "12345",
"DATA2": "67890",
}
result := fillTemplate(jStrA, data)
AssertJSONEquals(string(result), jStrB, t)
}
func Test_envToMap_works_as_expected(t *testing.T) {
/* execute */
data, _ := envToMap()
/* test */
AssertEquals("", data["THIS_VARIABLE_SHOULD_NEVER_EXIST_12345"], t)
AssertNotEquals("", data["GOPATH"], t) // must exist, we need GOPATH to run test...
}
func Test_newSecHubConfigFromFile_does_resolve_map_entries(t *testing.T) {
configPtr := NewConfigByFlags()
context := NewContext(configPtr)
os.Setenv("SHTEST_VERSION", "1.0")
os.Setenv("SHTEST_FOLDERS1", "testProject1/src/java")
path := filepath.Join("testdata", "sechub-testfile1.json") // relative path
var config SecHubConfig
config, _ = newSecHubConfigurationFromFile(context, path)
fmt.Printf("Loaded config: %s", config)
AssertEquals("1.0", config.APIVersion, t)
AssertEquals("testProject1/src/java", config.CodeScan.FileSystem.Folders[0], t)
}
func Example_newSecHubConfigFromFile_parses_data_sources_section_correctly() {
/* prepare */
sechubJSON := `
{
"data": {
"sources": [
{
"name": "reference-name-sources-1",
"fileSystem": {
"files": [
"somewhere/file1.txt",
"somewhere/file2.txt"
],
"folders": [
"somewhere/subfolder1",
"somewhere/subfolder2"
]
},
"excludes": [
"**/mytestcode/**",
"**/documentation/**"
],
"additionalFilenameExtensions": [
".cplusplus",
".py9"
]
}
]
}
}
`
/* execute */
result := newSecHubConfigFromBytes([]byte(sechubJSON))
/* test */
fmt.Printf("%+v\n", result.Data.Sources)
// Output:
// [{Name:reference-name-sources-1 FileSystem:{Files:[somewhere/file1.txt somewhere/file2.txt] Folders:[somewhere/subfolder1 somewhere/subfolder2]} Excludes:[**/mytestcode/** **/documentation/**] SourceCodePatterns:[.cplusplus .py9]}]
}
func Example_newSecHubConfigFromFile_parses_data_binary_section_correctly() {
/* prepare */
sechubJSON := `
{
"data": {
"binaries": [
{
"name": "reference-name-binaries-1",
"fileSystem": {
"files": [ "somewhere/file1.dll", "somewhere/file2.a" ],
"folders": [ "somewhere/bin/subfolder1", "somewhere/bin/subfolder2" ]
},
"excludes": [ "*.test" ]
},
{
"name": "reference-name-binaries-2",
"fileSystem": {
"files": [ "somewhere-else/mylib.so" ],
"folders": [ "somewhere-else/lib" ]
}
}
]
}
}
`
/* execute */
result := newSecHubConfigFromBytes([]byte(sechubJSON))
/* test */
fmt.Printf("%+v\n", result.Data.Binaries)
// Output:
// [{Name:reference-name-binaries-1 FileSystem:{Files:[somewhere/file1.dll somewhere/file2.a] Folders:[somewhere/bin/subfolder1 somewhere/bin/subfolder2]} Excludes:[*.test]} {Name:reference-name-binaries-2 FileSystem:{Files:[somewhere-else/mylib.so] Folders:[somewhere-else/lib]} Excludes:[]}]
}
func Example_newSecHubConfigFromFile_parses_codeScan_use_correctly() {
/* prepare */
sechubJSON := `
{
"data": {
"sources": [
{
"name": "mysources-1",
"fileSystem": { "folders": [ "src1/" ] }
},
{
"name": "mysources-2",
"fileSystem": { "folders": [ "src2/" ] }
}
]
},
"codeScan": {
"use": [ "mysources-1", "mysources-2" ]
}
}
`
/* execute */
result := newSecHubConfigFromBytes([]byte(sechubJSON))
/* test */
fmt.Printf("%+v\n", result.CodeScan.Use)
// Output:
// [mysources-1 mysources-2]
}
func Example_adjustSourceCodePatterns_respects_whitelistAll_false() {
/* prepare */
var context Context
var config Config
context.config = &config
config.whitelistAll = false
// Override global DefaultSourceCodeAllowedFilePatterns to get reproducable results
DefaultSourceCodeAllowedFilePatterns = []string{".c", ".d", ".e"}
sechubJSON := `
{
"data": {
"sources": [
{
"name": "sources-1",
"fileSystem": { "folders": [ "." ] },
"additionalFilenameExtensions": [
".a",
".b"
]
},
{
"name": "sources-2",
"fileSystem": { "folders": [ "." ] },
"additionalFilenameExtensions": [
".m",
".n"
]
}
]
},
"codeScan":{
"fileSystem": { "folders": [ "." ] },
"additionalFilenameExtensions": [
".y",
".z"
]
}
}
`
sechubConfig := newSecHubConfigFromBytes([]byte(sechubJSON))
context.sechubConfig = &sechubConfig
/* execute */
adjustSourceCodePatterns(&context)
/* test */
for _, i := range context.sechubConfig.Data.Sources {
fmt.Println(i.SourceCodePatterns)
}
fmt.Printf("%+v\n", context.sechubConfig.CodeScan.SourceCodePatterns)
// Output:
// [.a .b .c .d .e]
// [.m .n .c .d .e]
// [.y .z .c .d .e]
}
func Example_adjustSourceCodePatterns_respects_whitelistAll_true() {
/* prepare */
var context Context
var config Config
context.config = &config
config.whitelistAll = true
// Override global DefaultSourceCodeAllowedFilePatterns to get reproducable results
DefaultSourceCodeAllowedFilePatterns = []string{".c", ".d", ".e"}
sechubJSON := `
{
"data": {
"sources": [
{
"name": "sources-1",
"fileSystem": { "folders": [ "." ] },
"additionalFilenameExtensions": [
".a",
".b"
]
},
{
"name": "sources-2",
"fileSystem": { "folders": [ "." ] },
"additionalFilenameExtensions": [
".m",
".n"
]
}
]
},
"codeScan":{
"fileSystem": { "folders": [ "." ] },
"additionalFilenameExtensions": [
".y",
".z"
]
}
}
`
sechubConfig := newSecHubConfigFromBytes([]byte(sechubJSON))
context.sechubConfig = &sechubConfig
/* execute */
adjustSourceCodePatterns(&context)
/* test */
fmt.Printf("%+v\n", context.sechubConfig.CodeScan.SourceCodePatterns)
for _, i := range context.sechubConfig.Data.Sources {
fmt.Println(i.SourceCodePatterns)
}
// Output:
// []
// []
// []
}
func Example_adjustSourceCodePatterns_respects_Excludes() {
/* prepare */
var context Context
var config Config
context.config = &config
config.ignoreDefaultExcludes = false
// Override global DefaultSourceCodeAllowedFilePatterns to get reproducable results
DefaultSourceCodeExcludeDirPatterns = []string{"**/default-exclude1/**", "**/default-exclude2/**"}
sechubJSON := `
{
"data": {
"sources": [
{
"name": "sources-1",
"fileSystem": { "folders": [ "." ] },
"excludes": [
"**/data-exclude1/**",
"**/data-exclude2/**"
]
}
]
},
"codeScan":{
"fileSystem": { "folders": [ "." ] },
"excludes": [
"**/old-exclude1/**",
"**/old-exclude2/**"
]
}
}
`
sechubConfig := newSecHubConfigFromBytes([]byte(sechubJSON))
context.sechubConfig = &sechubConfig
/* execute */
adjustSourceCodePatterns(&context)
/* test */
fmt.Printf("%+v\n", context.sechubConfig.CodeScan.Excludes)
for _, i := range context.sechubConfig.Data.Sources {
fmt.Println(i.Excludes)
}
// Output:
// [**/old-exclude1/** **/old-exclude2/** **/default-exclude1/** **/default-exclude2/**]
// [**/data-exclude1/** **/data-exclude2/** **/default-exclude1/** **/default-exclude2/**]
}
func Example_adjustSourceCodePatterns_respects_Excludes_no_default() {
/* prepare */
var context Context
var config Config
context.config = &config
config.ignoreDefaultExcludes = true
// Override global DefaultSourceCodeAllowedFilePatterns to get reproducable results
DefaultSourceCodeExcludeDirPatterns = []string{"**/default-exclude1/**", "**/default-exclude2/**"}
sechubJSON := `
{
"data": {
"sources": [
{
"name": "sources-1",
"fileSystem": { "folders": [ "." ] },
"excludes": [
"**/data-exclude1/**",
"**/data-exclude2/**"
]
}
]
},
"codeScan":{
"fileSystem": { "folders": [ "." ] },
"excludes": [
"**/old-exclude1/**",
"**/old-exclude2/**"
]
}
}
`
sechubConfig := newSecHubConfigFromBytes([]byte(sechubJSON))
context.sechubConfig = &sechubConfig
/* execute */
adjustSourceCodePatterns(&context)
/* test */
fmt.Printf("%+v\n", context.sechubConfig.CodeScan.Excludes)
for _, i := range context.sechubConfig.Data.Sources {
fmt.Println(i.Excludes)
}
// Output:
// [**/old-exclude1/** **/old-exclude2/**]
// [**/data-exclude1/** **/data-exclude2/**]
}
func Example_adjustSourceCodePatterns_works_with_old_format() {
/* prepare */
var context Context
var config Config
context.config = &config
config.whitelistAll = false
// Override global DefaultSourceCodeAllowedFilePatterns to get reproducable results
DefaultSourceCodeAllowedFilePatterns = []string{".c", ".d", ".e"}
sechubJSON := `
{
"codeScan":{
"fileSystem": { "folders": [ "." ] }
}
}
`
sechubConfig := newSecHubConfigFromBytes([]byte(sechubJSON))
context.sechubConfig = &sechubConfig
/* execute */
adjustSourceCodePatterns(&context)
/* test */
fmt.Printf("%+v\n", context.sechubConfig.CodeScan.SourceCodePatterns)
for _, i := range context.sechubConfig.Data.Sources {
fmt.Println(i.SourceCodePatterns)
}
// Output:
// [.c .d .e]
}
func Example_adjustSourceCodePatterns_works_with_data_section_format() {
/* prepare */
var context Context
var config Config
context.config = &config
config.whitelistAll = false
// Override global DefaultSourceCodeAllowedFilePatterns to get reproducable results
DefaultSourceCodeAllowedFilePatterns = []string{".c", ".d", ".e"}
sechubJSON := `
{
"data": {
"sources": [
{
"name": "mysources-1",
"fileSystem": { "folders": [ "src1/" ] }
}
]
},
"codeScan": {
"use": [ "mysources-1" ]
}
}
`
sechubConfig := newSecHubConfigFromBytes([]byte(sechubJSON))
context.sechubConfig = &sechubConfig
/* execute */
adjustSourceCodePatterns(&context)
/* test */
fmt.Printf("%+v\n", context.sechubConfig.CodeScan.SourceCodePatterns)
for _, i := range context.sechubConfig.Data.Sources {
fmt.Println(i.SourceCodePatterns)
}
// Output:
// []
// [.c .d .e]
}
|
$(aws ecr get-login --no-include-email --region us-west-1)
docker build -t presence-bot/presence-bot-web-app .
docker tag presence-bot/presence-bot-web-app:latest 879970359011.dkr.ecr.us-west-1.amazonaws.com/presence-bot/presence-bot-web-app:latest
docker push 879970359011.dkr.ecr.us-west-1.amazonaws.com/presence-bot/presence-bot-web-app:latest |
brew tap mongodb/brew
brew install mongodb-community
|
#!/bin/bash
set -e
source hack/common.sh
CSV_CHECKSUM="tools/csv-checksum/csv-checksum"
(cd tools/csv-checksum/ && go build)
export CSV_CHECKSUM_OUTFILE="hack/latest-csv-checksum.md5"
export SKIP_MINIMUM="0.0.1"
export SKIP_RANGE=">=${SKIP_MINIMUM} <${CSV_VERSION}"
# Current dependency images our DEV CSV are pinned to
export ROOK_IMAGE=${ROOK_IMAGE:-"rook/ceph:v1.4.0-28.g3c00330"}
export NOOBAA_IMAGE=${NOOBAA_IMAGE:-"noobaa/noobaa-operator:2.3.0"}
export NOOBAA_CORE_IMAGE=${NOOBAA_CORE_IMAGE:-"noobaa/noobaa-core:5.5.0"}
export NOOBAA_DB_IMAGE=${NOOBAA_DB_IMAGE:-"centos/mongodb-36-centos7"}
export CEPH_IMAGE=${CEPH_IMAGE:-"ceph/ceph:v14.2"}
export OCS_IMAGE=${OCS_IMAGE:-"${IMAGE_REGISTRY}/${REGISTRY_NAMESPACE}/${OPERATOR_IMAGE_NAME}:${IMAGE_TAG}"}
export OCS_MUST_GATHER_IMAGE=${OCS_MUST_GATHER_IMAGE:-"${MUST_GATHER_FULL_IMAGE_NAME}"}
echo "=== Generating DEV CSV with the following vars ==="
echo -e "\tCSV_VERSION=$CSV_VERSION"
echo -e "\tROOK_IMAGE=$ROOK_IMAGE"
echo -e "\tNOOBAA_IMAGE=$NOOBAA_IMAGE"
echo -e "\tNOOBAA_CORE_IMAGE=$NOOBAA_CORE_IMAGE"
echo -e "\tNOOBAA_DB_IMAGE=$NOOBAA_DB_IMAGE"
echo -e "\tCEPH_IMAGE=$CEPH_IMAGE"
echo -e "\tOCS_IMAGE=$OCS_IMAGE"
if [ -z "${CSV_CHECKSUM_ONLY}" ]; then
hack/generate-unified-csv.sh
fi
echo "Generating MD5 Checksum for CSV with version $CSV_VERSION"
$CSV_CHECKSUM \
--csv-version="$CSV_VERSION" \
--replaces-csv-version="$REPLACES_CSV_VERSION" \
--rook-image="$ROOK_IMAGE" \
--ceph-image="$CEPH_IMAGE" \
--rook-csi-ceph-image="$ROOK_CSI_CEPH_IMAGE" \
--rook-csi-registrar-image="$ROOK_CSI_REGISTRAR_IMAGE" \
--rook-csi-resizer-image="$ROOK_CSI_RESIZER_IMAGE" \
--rook-csi-provisioner-image="$ROOK_CSI_PROVISIONER_IMAGE" \
--rook-csi-snapshotter-image="$ROOK_CSI_SNAPSHOTTER_IMAGE" \
--rook-csi-attacher-image="$ROOK_CSI_ATTACHER_IMAGE" \
--noobaa-image="$NOOBAA_IMAGE" \
--noobaa-core-image="$NOOBAA_CORE_IMAGE" \
--noobaa-db-image="$NOOBAA_DB_IMAGE" \
--ocs-image="$OCS_IMAGE" \
--ocs-must-gather-image="$OCS_MUST_GATHER_IMAGE" \
--checksum-outfile="$CSV_CHECKSUM_OUTFILE"
|
<gh_stars>0
#!/usr/bin/python3
"""An app that only says hello to an authorized person."""
name = input("Name: ")
pswd = input("Pswd: ")
if name == "Dylan" and pswd == "cheetah":
print("Hello {}!".format(name))
else:
print("***Access Denied***")
|
let arr = [[3, 7, 8],
[2, 9, 5],
[1, 3, 6]];
function sumMatrix(arr) {
let total = 0;
for (let i = 0; i < arr.length; i++) {
for (let j = 0; j < arr[i].length; j++) {
total += arr[i][j];
}
}
return total;
}
console.log(sumMatrix(arr)); // 39 |
package transmission
// SetSessionArgs arguments for Session.Set
type SetSessionArgs struct {
AltSpeedDown int `json:"alt-speed-down,omitempty"`
AltSpeedEnabled bool `json:"alt-speed-enabled,omitempty"`
AltSpeedTimeBegin int `json:"alt-speed-time-begin,omitempty"`
AltSpeedTimeEnabled bool `json:"alt-speed-time-enabled,omitempty"`
AltSpeedTimeEnd int `json:"alt-speed-time-end,omitempty"`
AltSpeedTimeDay int `json:"alt-speed-time-day,omitempty"`
AltSpeedUp int `json:"alt-speed-up,omitempty"`
BlocklistURL string `json:"blocklist-url,omitempty"`
BlocklistEnabled bool `json:"blocklist-enabled,omitempty"`
CacheSizeMb int `json:"cache-size-mb,omitempty"`
DownloadDir string `json:"download-dir,omitempty"`
DownloadQueueSize int `json:"download-queue-size,omitempty"`
DownloadQueueEnabled bool `json:"download-queue-enabled,omitempty"`
DhtEnabled bool `json:"dht-enabled,omitempty"`
Encryption string `json:"encryption,omitempty"`
IdleSeedingLimit int `json:"idle-seeding-limit,omitempty"`
IdleSeedingLimitEnabled bool `json:"idle-seeding-limit-enabled,omitempty"`
IncompleteDir string `json:"incomplete-dir,omitempty"`
IncompleteDirEnabled bool `json:"incomplete-dir-enabled,omitempty"`
LpdEnabled bool `json:"lpd-enabled,omitempty"`
PeerLimitGlobal int `json:"peer-limit-global,omitempty"`
PeerLimitPerTorrent int `json:"peer-limit-per-torrent,omitempty"`
PexEnabled bool `json:"pex-enabled,omitempty"`
PeerPort int `json:"peer-port,omitempty"`
PeerPortRandomOnStart bool `json:"peer-port-random-on-start,omitempty"`
PortForwardingEnabled bool `json:"port-forwarding-enabled,omitempty"`
QueueStalledEnabled bool `json:"queue-stalled-enabled,omitempty"`
QueueStalledMinutes int `json:"queue-stalled-minutes,omitempty"`
RenamePartialFiles bool `json:"rename-partial-files,omitempty"`
ScriptTorrentDoneFilename string `json:"script-torrent-done-filename,omitempty"`
ScriptTorrentDoneEnabled bool `json:"script-torrent-done-enabled,omitempty"`
SeedRatioLimit float64 `json:"seedRatioLimit,omitempty"`
SeedRatioLimited bool `json:"seedRatioLimited,omitempty"`
SeedQueueSize int `json:"seed-queue-size,omitempty"`
SeedQueueEnabled bool `json:"seed-queue-enabled,omitempty"`
SpeedLimitDown int `json:"speed-limit-down,omitempty"`
SpeedLimitDownEnabled bool `json:"speed-limit-down-enabled,omitempty"`
SpeedLimitUp int `json:"speed-limit-up,omitempty"`
SpeedLimitUpEnabled bool `json:"speed-limit-up-enabled,omitempty"`
StartAddedTorrents bool `json:"start-added-torrents,omitempty"`
TrashOriginalTorrentFiles bool `json:"trash-original-torrent-files,omitempty"`
Units *Units `json:"units,omitempty"`
UtpEnabled bool `json:"utp-enabled,omitempty"`
}
// Session object contain information about transmission
// session and interact with it
type Session struct {
Client *Client `json:"-"`
AltSpeedDown int `jsonn:"alt-speed-down"`
AltSpeedEnabled bool `json:"alt-speed-enabled"`
AltSpeedTimeBegin int `json:"alt-speed-time-begin"`
AltSpeedTimeEnabled bool `json:"alt-speed-time-enabled"`
AltSpeedTimeEnd int `json:"alt-speed-time-end"`
AltSpeedTimeDay int `json:"alt-speed-time-day"`
AltSpeedUp int `json:"alt-speed-up"`
BlocklistURL string `json:"blocklist-url"`
BlocklistEnabled bool `json:"blocklist-enabled"`
BlocklistSize int `json:"blocklist-size"`
CacheSizeMb int `json:"cache-size-mb"`
ConfigDir string `json:"config-dir"`
DownloadDir string `json:"download-dir"`
DownloadQueueSize int `json:"download-queue-size"`
DownloadQueueEnabled bool `json:"download-queue-enabled"`
DhtEnabled bool `json:"dht-enabled"`
Encryption string `json:"encryption"`
IdleSeedingLimit int `json:"idle-seeding-limit"`
IdleSeedingLimitEnabled bool `json:"idle-seeding-limit-enabled"`
IncompleteDir string `json:"incomplete-dir"`
IncompleteDirEnabled bool `json:"incomplete-dir-enabled"`
LpdEnabled bool `json:"lpd-enabled"`
PeerLimitGlobal int `json:"peer-limit-global"`
PeerLimitPerTorrent int `json:"peer-limit-per-torrent"`
PexEnabled bool `json:"pex-enabled"`
PeerPort int `json:"peer-port"`
PeerPortRandomOnStart bool `json:"peer-port-random-on-start"`
PortForwardingEnabled bool `json:"port-forwarding-enabled"`
QueueStalledEnabled bool `json:"queue-stalled-enabled"`
QueueStalledMinutes int `json:"queue-stalled-minutes"`
RenamePartialFiles bool `json:"rename-partial-files"`
RPCVersion int `json:"rpc-version"`
RPCVersionMinimum int `json:"rpc-version-minimum"`
ScriptTorrentDoneFilename string `json:"script-torrent-done-filename"`
ScriptTorrentDoneEnabled bool `json:"script-torrent-done-enabled"`
SeedRatioLimit float64 `json:"seedRatioLimit"`
SeedRatioLimited bool `json:"seedRatioLimited"`
SeedQueueSize int `json:"seed-queue-size"`
SeedQueueEnabled bool `json:"seed-queue-enabled"`
SpeedLimitDown int `json:"speed-limit-down"`
SpeedLimitDownEnabled bool `json:"speed-limit-down-enabled"`
SpeedLimitUp int `json:"speed-limit-up"`
SpeedLimitUpEnabled bool `json:"speed-limit-up-enabled"`
StartAddedTorrents bool `json:"start-added-torrents"`
TrashOriginalTorrentFiles bool `json:"trash-original-torrent-files"`
Units *Units `json:"units"`
UtpEnabled bool `json:"utp-enabled"`
Version string `json:"version"`
}
// Statictics represent session statictics
type Statictics struct {
ActiveTorrentCount int
DownloadSpeed int
PausedTorrentCount int
TorrentCount int
UploadSpeed int
CumulativeStats *StaticticDetail `json:"cumulative-stats"`
CurrentStats *StaticticDetail `json:"current-stats"`
}
// StaticticDetail represent statictics details
type StaticticDetail struct {
UploadedBytes int
DownloadedBytes int
FilesAdded int
SessionCount int
SecondsActive int
}
// Units in session
type Units struct {
SpeedUnits []string `json:"speed-units"`
SpeedBytes int `json:"speed-bytes"`
SizeUnits []string `json:"size-units"`
SizeBytes int `json:"size-bytes"`
MemoryUnits []string `json:"memory-units"`
MemoryBytes int `json:"memory-bytes"`
}
// Set set session params see SetSessionArgs
func (s *Session) Set(args SetSessionArgs) error {
tReq := &Request{
Arguments: args,
Method: "session-set",
}
r := &Response{}
err := s.Client.request(tReq, r)
if err != nil {
return err
}
return nil
}
// Update session information from transmission
func (s *Session) Update() error {
tReq := &Request{
Method: "session-get",
}
r := &Response{Arguments: s}
err := s.Client.request(tReq, r)
if err != nil {
return err
}
return nil
}
// Stats return session statictics
func (s *Session) Stats() (Statictics, error) {
tReq := &Request{
Method: "session-stats",
}
stat := Statictics{}
r := &Response{Arguments: &stat}
err := s.Client.request(tReq, r)
if err != nil {
return Statictics{}, err
}
return stat, nil
}
// Close tells the transmission session to shut down.
func (s *Session) Close() error {
tReq := &Request{
Method: "session-close",
}
r := &Response{}
err := s.Client.request(tReq, r)
if err != nil {
return err
}
return nil
}
|
<gh_stars>1-10
#ifndef REDIS_ROARING_TYPE_H
#define REDIS_ROARING_TYPE_H
#define BITMAP_ENCODING_VERSION 1
void BitmapRdbSave(RedisModuleIO* rdb, void* value);
void* BitmapRdbLoad(RedisModuleIO* rdb, int encver);
void BitmapAofRewrite(RedisModuleIO* aof, RedisModuleString* key, void* value);
size_t BitmapMemUsage(const void* value);
void BitmapFree(void* value);
#endif
|
#!/sbin/sh
for FD in `ls /proc/$$/fd`; do
readlink /proc/$$/fd/$FD 2>/dev/null | grep pipe >/dev/null
if [ "$?" -eq "0" ]; then
ps | grep " 3 $FD " | grep -v grep >/dev/null
if [ "$?" -eq "0" ]; then
OUTFD=$FD
break
fi
fi
done
ui_print() {
echo -n -e "ui_print $1\n" >> /proc/self/fd/$OUTFD
echo -n -e "ui_print\n" >> /proc/self/fd/$OUTFD
}
safe_mount() {
IS_MOUNTED=$(cat /proc/mounts | grep "$1")
if [ "$IS_MOUNTED" ]; then
mount -o rw,remount $1
else
mount $1
fi
}
#kernel_repo=/data/media/0/opoverlay
#safe_mount /data
#if [ ! -d $kernel_repo ]; then
# mkdir -p $kernel_repo
# cp -rf /data/media/opoverlay/* $kernel_repo/
#rm -rf /data/media/opoverlay
#cp -rf /data/media/TWRP/* /data/media/0/TWRP/
#fi
|
#!/bin/bash
$CK_ENV_COMPILER_PYTHON_FILE $PACKAGE_DIR/preprocess_image_dataset_using_tensorflow.py "$CK_ENV_DATASET_IMAGENET_VAL" "$INSTALL_DIR"
|
#include <stdio.h>
int main()
{
int n, c;
printf("Enter an integer to print it's multiplication table: ");
scanf("%d", &n);
for (c = 1; c <= 10; c++)
{
printf("%d * %d = %d\n", n, c, n*c);
}
return 0;
} |
import { WGSLParticleCommon } from "./WGSLParticleCommon";
import { BindGroupInfo, WGSL, WGSLCommonFrag, WGSLEncoder } from "../../shaderlib";
import { ShaderMacroCollection } from "../../shader";
export class WGSLParticleDraw extends WGSL {
protected _drawFunction: string;
protected _drawStruct: string;
constructor() {
super();
this._drawFunction =
"// Map a range from [edge0, edge1] to [0, 1].\n" +
"fn maprange(edge0: f32, edge1: f32, x: f32) -> f32 {\n" +
" return clamp((x - edge0) / (edge1 - edge0), 0.0, 1.0);\n" +
"}\n" +
"\n" +
"// Map a value in [0, 1] to peak at edge.\n" +
"fn curve_inout(x: f32, edge: f32) -> f32 {\n" +
" // Coefficient for sub range.\n" +
" let a = maprange(0.0, edge, x);\n" +
" let b = maprange(edge, 1.0, x);\n" +
" \n" +
" // Quadratic ease-in / quadratic ease-out.\n" +
" let easein = a * (2.0 - a); // a * a;\n" +
" let easeout = b*b - 2.0 * b + 1.0; // 1.0f - b * b;\n" +
" \n" +
" // chose between easin / easout function.\n" +
" let result = mix(easein, easeout, step(edge, x));\n" +
" \n" +
" // Makes particles fade-in and out of existence\n" +
" return result;\n" +
"}\n" +
"\n" +
"fn compute_size(z: f32, decay: f32, uMinParticleSize: f32, uMaxParticleSize: f32) -> f32 {\n" +
" let min_size = uMinParticleSize;\n" +
" let max_size = uMaxParticleSize;\n" +
" \n" +
" // tricks to 'zoom-in' the pointsprite, just set to 1 to have normal size.\n" +
" let depth = (max_size-min_size) / (z);\n" +
" \n" +
" return mix(min_size, max_size, decay * depth);\n" +
"}\n" +
"\n" +
"\n" +
"fn base_color(position: vec3<f32>, decay: f32, uColorMode: u32, uBirthGradient: vec3<f32>, uDeathGradient: vec3<f32>) -> vec3<f32> {\n" +
" // Gradient mode\n" +
" if (uColorMode == 1u) {\n" +
" return mix(uBirthGradient, uDeathGradient, decay);\n" +
" }\n" +
" // Default mode\n" +
" return 0.5 * (normalize(position) + 1.0);\n" +
"}\n" +
"\n" +
"fn compute_color(base_color: vec3<f32>, decay: f32, texcoord: vec2<f32>, uFadeCoefficient: f32, uDebugDraw: bool) -> vec4<f32> {\n" +
" if (uDebugDraw) {\n" +
" return vec4<f32>(1.0);\n" +
" }\n" +
" \n" +
" var color = vec4<f32>(base_color, 1.0);\n" +
" \n" +
" // Centered coordinates.\n" +
" let p = 2.0 * (texcoord - 0.5);\n" +
" // Pixel intensity depends on its distance from center.\n" +
" let d = 1.0 - abs(dot(p, p));\n" +
" \n" +
" // Alpha coefficient.\n" +
" let alpha = smoothStep(0.0, 1.0, d);\n" +
" \n" +
" //color = texture(uSpriteSampler2d, texcoord).rrrr;\n" +
" color = color * alpha * decay * uFadeCoefficient;\n" +
" \n" +
" return color;\n" +
"}";
this._drawStruct =
"struct ParticleData {\n" +
" birthGradient: vec3<f32>;\n" +
" minParticleSize: f32;\n" +
" deathGradient: vec3<f32>;\n" +
" maxParticleSize: f32;\n" +
" colorMode: u32;\n" +
" fadeCoefficient: f32;\n" +
" debugDraw: f32;\n" +
" _pad: f32;\n" +
"};\n";
}
}
//------------------------------------------------------------------------------
export class WGSLParticleVertex extends WGSLParticleDraw {
private _particleCommon: WGSLParticleCommon;
private _commonFrag: WGSLCommonFrag;
constructor() {
super();
this._particleCommon = new WGSLParticleCommon();
this._commonFrag = new WGSLCommonFrag("VertexIn");
}
compile(macros: ShaderMacroCollection): [string, BindGroupInfo] {
this._source = "";
this._bindGroupInfo.clear();
const inputStructCounter = WGSLEncoder.startCounter();
const outputStructCounter = WGSLEncoder.startCounter(0);
{
const encoder = this.createSourceEncoder(GPUShaderStage.VERTEX);
this._particleCommon.execute(encoder, macros);
encoder.addFunction(this._drawFunction);
encoder.addStruct(
"var<private> pos : array<vec2<f32>, 4> = array<vec2<f32>, 4>(\n" +
" vec2<f32>(-1.0, 1.0), vec2<f32>(-1.0, -1.0), vec2<f32>(1.0, 1.0), vec2<f32>(1.0, -1.0)\n" +
");\n"
);
encoder.addStruct(this._drawStruct);
encoder.addUniformBinding("u_particleData", "ParticleData");
this._commonFrag.execute(encoder, macros);
encoder.addInoutType("VertexIn", 0, "position", "vec4<f32>");
encoder.addInoutType("VertexIn", 1, "velocity", "vec4<f32>");
encoder.addInoutType("VertexIn", 2, "simulation", "vec4<f32>");
encoder.addBuiltInoutType("VertexIn", "vertex_index", "vertexIndex", "u32");
encoder.addBuiltInoutType("VertexOut", "position", "position", "vec4<f32>");
encoder.addInoutType("VertexOut", 0, "uv", "vec2<f32>");
encoder.addInoutType("VertexOut", 1, "color", "vec3<f32>");
encoder.addInoutType("VertexOut", 2, "decay", "f32");
encoder.addRenderEntry([["in", "VertexIn"]], ["out", "VertexOut"], () => {
let source: string = "";
source +=
" // Time alived in [0, 1].\n" +
" let dAge = 1.0 - maprange(0.0, in.simulation.x, in.simulation.y);\n" +
" let decay = curve_inout(dAge, 0.55);\n" +
" \n" +
" out.uv = pos[in.vertexIndex];\n" +
" let worldPosApprox = u_cameraData.u_projMat * u_cameraData.u_viewMat * vec4<f32>(in.position.xyz, 1.0);\n" +
" let worldPos = vec3<f32>(out.uv, 0.0) * compute_size(worldPosApprox.z/worldPosApprox.w, decay,\n" +
" u_particleData.minParticleSize, u_particleData.maxParticleSize) * 0.025;\n" +
" \n" +
" // Generate a billboarded model view matrix\n" +
" var bbModelViewMatrix:mat4x4<f32> = mat4x4<f32>(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0);\n" +
" bbModelViewMatrix[3] = vec4<f32>(in.position.xyz, 1.0);\n" +
" bbModelViewMatrix = u_cameraData.u_viewMat * bbModelViewMatrix;\n" +
" bbModelViewMatrix[0][0] = 1.0;\n" +
" bbModelViewMatrix[0][1] = 0.0;\n" +
" bbModelViewMatrix[0][2] = 0.0;\n" +
"\n" +
" bbModelViewMatrix[1][0] = 0.0;\n" +
" bbModelViewMatrix[1][1] = 1.0;\n" +
" bbModelViewMatrix[1][2] = 0.0;\n" +
"\n" +
" bbModelViewMatrix[2][0] = 0.0;\n" +
" bbModelViewMatrix[2][1] = 0.0;\n" +
" bbModelViewMatrix[2][2] = 1.0;\n" +
" out.position = u_cameraData.u_projMat * bbModelViewMatrix * vec4<f32>(worldPos, 1.0);\n" +
" \n" +
" // Output parameters.\n" +
" out.color = base_color(in.position.xyz, decay,\n" +
" u_particleData.colorMode, u_particleData.birthGradient, u_particleData.deathGradient);\n" +
" out.decay = decay;\n";
return source;
});
encoder.flush();
}
WGSLEncoder.endCounter(inputStructCounter);
WGSLEncoder.endCounter(outputStructCounter);
return [this._source, this._bindGroupInfo];
}
}
//------------------------------------------------------------------------------
export class WGSLParticleFragment extends WGSLParticleDraw {
constructor() {
super();
}
compile(macros: ShaderMacroCollection): [string, BindGroupInfo] {
this._source = "";
this._bindGroupInfo.clear();
const inputStructCounter = WGSLEncoder.startCounter(0);
{
const encoder = this.createSourceEncoder(GPUShaderStage.FRAGMENT);
encoder.addFunction(this._drawFunction);
encoder.addStruct(this._drawStruct);
encoder.addUniformBinding("u_particleData", "ParticleData");
encoder.addInoutType("VertexOut", 0, "uv", "vec2<f32>");
encoder.addInoutType("VertexOut", 1, "color", "vec3<f32>");
encoder.addInoutType("VertexOut", 2, "decay", "f32");
encoder.addInoutType("Output", 0, "finalColor", "vec4<f32>");
encoder.addRenderEntry([["in", "VertexOut"]], ["out", "Output"], () => {
return "out.finalColor = compute_color(in.color, in.decay, in.uv, u_particleData.fadeCoefficient, bool(u_particleData.debugDraw));\n";
});
encoder.flush();
}
WGSLEncoder.endCounter(inputStructCounter);
return [this._source, this._bindGroupInfo];
}
}
|
### FUNCTIONS
upinfo ()
{
echo -ne "${green}$HOSTNAME ${red}uptime is ${cyan} \t ";uptime | awk /'up/ {print $3,$4,$5,$6,$7,$8,$9,$10}'
}
# Check command exists
function _command_exists() {
type "$1" &> /dev/null ;
}
# Show ruby version
function prompt_rvm {
if _command_exists rvm-prompt; then
rbv=`rvm-prompt`
fi
if _command_exists rbenv; then
eval "$(rbenv init -)"
rbv=`rbenv version-name`
fi
if [[ -n "${rbv/[ ]*\n/}" ]]; then
#rbv=${rbv#ruby-}
[[ $rbv == *"@"* ]] || rbv="${rbv}@default"
echo "["$rbv"]"
else
echo ""
fi
}
# Helper function loading various enable-able files
function load_bash_files() {
subdirectory="$1"
if [ -d "$HOME/.bash/${subdirectory}/enable" ]
then
FILES="$HOME/.bash/${subdirectory}/enable/*.bash"
for config_file in $FILES
do
if [ -e "${config_file}" ]; then
source $config_file
fi
done
fi
}
function load_colors_16() {
for fgbg in 38 48 ; do # Foreground / Background
for color in {0..15} ; do # Colors
# Display the color
printf "\e[${fgbg};5;%sm %3s \e[0m" $color $color
# Display 8 colors per lines
if [ $((($color + 1) % 8)) == 0 ] ; then
echo # New line
fi
done
echo # New line
done
}
function load_colors_256() {
for fgbg in 38 48 ; do # Foreground / Background
for color in {0..255} ; do # Colors
# Display the color
printf "\e[${fgbg};5;%sm %3s \e[0m" $color $color
# Display 6 colors per lines
if [ $((($color + 1) % 6)) == 4 ] ; then
echo # New line
fi
done
echo # New line
done
}
if ! type pathmunge > /dev/null 2>&1
then
function pathmunge () {
about 'prevent duplicate directories in you PATH variable'
group 'helpers'
example 'pathmunge /path/to/dir is equivalent to PATH=/path/to/dir:$PATH'
example 'pathmunge /path/to/dir after is equivalent to PATH=$PATH:/path/to/dir'
if ! [[ $PATH =~ (^|:)$1($|:) ]] ; then
if [ "$2" = "after" ] ; then
export PATH=$PATH:$1
else
export PATH=$1:$PATH
fi
fi
}
fi
function bash_prompt() {
case $TERM in
xterm*|rxvt*)
local TITLEBAR='\[\033]0;\u@\h:$PWD\007\]'
;;
*)
local TITLEBAR=""
;;
esac
local NONE="\[\033[0m\]" # unsets color to term's fg color
# regular colors
local K="\[\033[0;30m\]" # black
local R="\[\033[0;31m\]" # red
local G="\[\033[0;32m\]" # green
local Y="\[\033[0;33m\]" # yellow
local B="\[\033[0;34m\]" # blue
local M="\[\033[0;35m\]" # magenta
local C="\[\033[0;36m\]" # cyan
local W="\[\033[0;37m\]" # white
# emphasized (bolded) colors
local EMK="\[\033[1;30m\]"
local EMR="\[\033[1;31m\]"
local EMG="\[\033[1;32m\]"
local EMY="\[\033[1;33m\]"
local EMB="\[\033[1;34m\]"
local EMM="\[\033[1;35m\]"
local EMC="\[\033[1;36m\]"
local EMW="\[\033[1;37m\]"
# background colors
local BGK="\[\033[40m\]"
local BGR="\[\033[41m\]"
local BGG="\[\033[42m\]"
local BGY="\[\033[43m\]"
local BGB="\[\033[44m\]"
local BGM="\[\033[45m\]"
local BGC="\[\033[46m\]"
local BGW="\[\033[47m\]"
local UC=$EMY # user's color
[ $UID -eq "0" ] && UC=$EMR # root's color
# extra backslash in front of \$ to make bash colorize the prompt
# single line
# PS1="$TITLEBAR${R}[${C}\t${R}]${UC}\u${EMR}@${EMY}\H${W}\w\[\033[m\]\$(prompt_rvm)${EMB}\$(__git_ps1)${EMG}\\$ "
# new line
PS1="$TITLEBAR${R}[${C}\t${R}]${UC}\u${EMR}@${EMY}\H${W}\w\[\033[m\]\$(prompt_rvm)${EMB}\$(__git_ps1 ' (%s)')\n${EMG}\$ "
}
function bash_prompt_powerline() {
case $TERM in
linux|xterm*|rxvt*|screen*)
local TITLEBAR='\[\033]0;\u@\h:$PWD\007\]'
;;
*)
local TITLEBAR=""
;;
esac
local TIME_COLOR="141m"
local INFO_COLOR="61m"
local DIRECTORY_COLOR="59m"
local GIT_COLOR="117m"
local LOCK_COLOR="203m"
local PROMPT_COLOR="84m"
local TEXT_COLOR="00m"
local FOREGROUND_COLOR="231m"
local BACKGROUND_COLOR="17m"
local POWERLINE_SEPARATOR_RIGHT=""
local POWERLINE_LOCK_ICON=""
local POWERLINE_BRANCH_ICON=""
local NONE="\[\e[0m\]"
local GIT_TEXT="\[\e[38;5;$GIT_COLOR\]"
local NORMAL_TEXT="\[\033[$TEXT_COLOR\]"
local TIME_START="\[\e[48;5;$TIME_COLOR\]\[\e[38;5;$BACKGROUND_COLOR\]"
local TIME_END_INFO="\[\e[48;5;$INFO_COLOR\]\[\e[38;5;$TIME_COLOR\]$POWERLINE_SEPARATOR_RIGHT"
local TIME_END_DIRECTORY="\[\e[48;5;$DIRECTORY_COLOR\]\[\e[38;5;$TIME_COLOR\]$POWERLINE_SEPARATOR_RIGHT"
local INFO_START="\[\e[48;5;$INFO_COLOR\]\[\e[38;5;$FOREGROUND_COLOR\]"
local INFO_END="\[\e[48;5;$DIRECTORY_COLOR\]\[\e[38;5;$INFO_COLOR\]$POWERLINE_SEPARATOR_RIGHT"
local DIRECTORY_START="\[\e[48;5;$DIRECTORY_COLOR\]\[\e[38;5;$FOREGROUND_COLOR\]"
local DIRECTORY_END="$NONE\[\e[38;5;$DIRECTORY_COLOR\]$POWERLINE_SEPARATOR_RIGHT" # when next segment is none
if [ ! -w "$PWD" ]; then
# Current directory is not writable
DIRECTORY_END="\[\e[48;5;$LOCK_COLOR\]\[\e[38;5;$DIRECTORY_COLOR\]$POWERLINE_SEPARATOR_RIGHT\[\e[38;5;$FOREGROUND_COLOR\] $POWERLINE_LOCK_ICON $NONE\[\e[38;5;$LOCK_COLOR\]$POWERLINE_SEPARATOR_RIGHT"
fi
PS1="$GIT_TEXT\$(__git_ps1 '$POWERLINE_BRANCH_ICON (%s)')\n\[\e[38;5;$PROMPT_COLOR\]\$ $NORMAL_TEXT"
PS1="$DIRECTORY_START \W $DIRECTORY_END $PS1"
if [[ `tput cols` -gt 50 ]]; then
if [[ `tput cols` -lt 80 ]]; then
PS1="$TIME_START \t $TIME_END_DIRECTORY$PS1"
else
PS1="$TIME_START \t $TIME_END_INFO$INFO_START \u@\h $INFO_END$PS1"
fi
fi
PS1="$TITLEBAR$PS1"
}
#------------------------------------------////
# System Information:
#------------------------------------------////
# clear
# echo -e "${LIGHTGRAY}";figlet "Terminal Fu";
# echo -ne "${red}Today is:\t\t${cyan}" `date`; echo ""
# echo -e "${red}Kernel Information: \t${cyan}" `uname -smr`
# echo -ne "${cyan}";upinfo;echo ""
# echo -e "${cyan}"; cal -3
|
#include <stdio.h>
#include <math.h>
int main() {
int num, i, flag = 0;
// input from the user
printf("enter number: ");
scanf("%d", &num);
int nearestPrime = num;
// to check if nearestPrime is prime we use increment
int increment = (num < 0) ? -1 : 1;
while (1) {
// Flag to determined if the number is prime.
flag = 0;
// check if nearest prime number is divisible by any number.
for (i = 2; i <= sqrt(nearestPrime); i++) {
if (nearestPrime % i == 0) {
flag = 1;
break;
}
}
// If the number is not divisible by any number,
// then it is a prime number
if (flag == 0) {
break;
}
nearestPrime += increment;
}
printf("Nearest prime number is %d", nearestPrime);
return 0;
} |
#!@RCD_SCRIPTS_SHELL@
# PROVIDE: miniircd
# REQUIRE: DAEMON
if [ -f /etc/rc.subr ]; then
. /etc/rc.subr
fi
name="miniircd"
rcvar=$name
command="@PREFIX@/sbin/miniircd"
command_interpreter="@PYTHONBIN@"
command_args="-d"
miniircd_flags=${miniircd_flags-"--setuid @MINIIRCD_USER@:@MINIIRCD_GROUP@"}
if [ -f /etc/rc.subr ]; then
load_rc_config $name
run_rc_command "$1"
else
echo -n "${name}"
${command} ${miniircd_flags} ${command_args}
fi
|
<filename>lib/merge-less/index.js
'use strict';
var _regenerator = require('babel-runtime/regenerator');
var _regenerator2 = _interopRequireDefault(_regenerator);
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _objectWithoutProperties(obj, keys) { var target = {}; for (var i in obj) { if (keys.indexOf(i) >= 0) continue; if (!Object.prototype.hasOwnProperty.call(obj, i)) continue; target[i] = obj[i]; } return target; }
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
/** @format */
var fs = require('fs');
var path = require('path');
var less = require('less');
var rimraf = require('rimraf');
var uglifycss = require('uglifycss');
var defaultDarkTheme = require('@ant-design/dark-theme');
var _require = require('umi-utils'),
winPath = _require.winPath;
var genModuleLess = require('./genModuleLess');
var getVariable = require('./getVariable');
var loopAllLess = require('./loopAllLess');
var _require2 = require('../util'),
genHashCode = _require2.genHashCode;
var darkTheme = _extends({}, defaultDarkTheme.default, {
dark: true,
'@white': '#fff',
'@light': '#fff',
'@text-color': 'fade(@white, 65%)',
'@heading-color': 'fade(@white, 85%)',
// 移动
'@screen-sm': '767.9px',
// 超小屏
'@screen-xs': '375px',
// 官网
'@site-text-color': '@text-color',
'@site-border-color-split': 'fade(@light, 5)',
'@site-heading-color': '@heading-color',
'@site-header-box-shadow': '0 0.3px 0.9px rgba(0, 0, 0, 0.12), 0 1.6px 3.6px rgba(0, 0, 0, 0.12)',
'@home-text-color': '@text-color',
// 自定义需要找设计师
'@gray-8': '@text-color',
'@background-color-base': '#555',
// pro
'@pro-header-box-shadow': '@site-header-box-shadow'
});
var tempPath = winPath(path.join(__dirname, './.temp/'));
var loadAntd = function () {
var _ref = _asyncToGenerator( /*#__PURE__*/_regenerator2.default.mark(function _callee(ignoreAntd, _ref2) {
var _ref2$dark = _ref2.dark,
dark = _ref2$dark === undefined ? false : _ref2$dark,
_ref2$compact = _ref2.compact,
compact = _ref2$compact === undefined ? false : _ref2$compact;
var ignoreFiles, antdPath;
return _regenerator2.default.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.prev = 0;
if (!ignoreAntd) {
_context.next = 4;
break;
}
fs.writeFileSync(path.join(tempPath, './antd.less'), '@import \'../color/bezierEasing\';\n @import \'../color/colorPalette\';\n @import "../color/tinyColor";\n ');
return _context.abrupt('return', false);
case 4:
ignoreFiles = [];
if (!dark) {
ignoreFiles.push('themes/dark.less');
}
if (!compact) {
ignoreFiles.push('themes/compact.less');
}
antdPath = require.resolve('antd');
if (!fs.existsSync(antdPath)) {
_context.next = 12;
break;
}
_context.next = 11;
return loopAllLess(path.resolve(path.join(antdPath, '../../es/')), ignoreFiles).then(function (content) {
fs.writeFileSync(path.join(tempPath, './antd.less'), '@import \'../color/bezierEasing\';\n@import \'../color/colorPalette\';\n@import "../color/tinyColor";\n ' + content + '\n ');
});
case 11:
return _context.abrupt('return', true);
case 12:
_context.next = 17;
break;
case 14:
_context.prev = 14;
_context.t0 = _context['catch'](0);
console.log(_context.t0);
case 17:
fs.writeFileSync(path.join(tempPath, './antd.less'), '@import \'../color/bezierEasing\';\n@import \'../color/colorPalette\';\n@import "../color/tinyColor";\n ');
return _context.abrupt('return', false);
case 19:
case 'end':
return _context.stop();
}
}
}, _callee, undefined, [[0, 14]]);
}));
return function loadAntd(_x, _x2) {
return _ref.apply(this, arguments);
};
}();
var loadLibraryComponents = function () {
var _ref3 = _asyncToGenerator( /*#__PURE__*/_regenerator2.default.mark(function _callee2(_ref4) {
var filterFileLess = _ref4.filterFileLess,
_ref4$extraLibraries = _ref4.extraLibraries,
extraLibraries = _ref4$extraLibraries === undefined ? [] : _ref4$extraLibraries;
var components, jobs, contentList;
return _regenerator2.default.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
components = ['@ant-design/pro-layout', '@ant-design/pro-table'].concat(_toConsumableArray(extraLibraries));
_context2.prev = 1;
if (!components) {
_context2.next = 9;
break;
}
jobs = [];
components.forEach(function (item) {
if (filterFileLess && !filterFileLess(item)) {
return;
}
var componentPath = require.resolve(item);
if (fs.existsSync(componentPath)) {
jobs.push(loopAllLess(path.resolve(path.join(componentPath, '../../es/')), []));
}
});
_context2.next = 7;
return Promise.all(jobs);
case 7:
contentList = _context2.sent;
fs.writeFileSync(path.join(tempPath, '/components.less'), '@import \'./antd\';\n' + contentList.join('\n') + '\n ');
case 9:
_context2.next = 14;
break;
case 11:
_context2.prev = 11;
_context2.t0 = _context2['catch'](1);
fs.writeFileSync(path.join(tempPath, '/components.less'), "@import './antd';");
case 14:
fs.writeFileSync(path.join(tempPath, '/layout.less'), "@import './antd';");
return _context2.abrupt('return', false);
case 16:
case 'end':
return _context2.stop();
}
}
}, _callee2, undefined, [[1, 11]]);
}));
return function loadLibraryComponents(_x3) {
return _ref3.apply(this, arguments);
};
}();
var getModifyVars = function getModifyVars() {
var theme = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 'light';
var modifyVars = arguments[1];
var disableExtendsDark = arguments[2];
try {
if (theme === 'dark') {
return _extends({}, disableExtendsDark ? {} : darkTheme, modifyVars);
}
return _extends({ dark: false }, modifyVars);
} catch (error) {
throw error;
}
};
var getOldFile = function getOldFile(filePath) {
if (fs.existsSync(filePath)) {
return fs.readFileSync(filePath);
}
return false;
};
var isEqual = false;
var genProjectLess = function genProjectLess(filePath, _ref5) {
var isModule = _ref5.isModule,
publicLessPath = _ref5.publicLessPath,
loadAny = _ref5.loadAny,
cache = _ref5.cache,
ignoreAntd = _ref5.ignoreAntd,
ignoreProLayout = _ref5.ignoreProLayout,
rest = _objectWithoutProperties(_ref5, ['isModule', 'publicLessPath', 'loadAny', 'cache', 'ignoreAntd', 'ignoreProLayout']);
return genModuleLess(filePath, _extends({ isModule: isModule, publicLessPath: publicLessPath }, rest)).then(function () {
var _ref6 = _asyncToGenerator( /*#__PURE__*/_regenerator2.default.mark(function _callee3(content) {
var tempFilePath, lessContent;
return _regenerator2.default.wrap(function _callee3$(_context3) {
while (1) {
switch (_context3.prev = _context3.next) {
case 0:
if (cache === false) {
rimraf.sync(tempPath);
}
if (!fs.existsSync(tempPath)) {
fs.mkdirSync(tempPath);
}
tempFilePath = winPath(path.join(tempPath, 'temp.less'));
// 获取新旧文件的 hash
// const newFileHash = genHashCode(content);
//
// const oldFileHash = genHashCode(getOldFile(tempFilePath));
// if (newFileHash === oldFileHash) {
// isEqual = true;
// // 无需重复生成
// return false;
// }
fs.writeFileSync(tempFilePath, content);
_context3.prev = 4;
if (!loadAny) {
_context3.next = 9;
break;
}
fs.writeFileSync(winPath(path.join(tempPath, 'pro.less')), '@import \'./components\';\n ' + content);
_context3.next = 13;
break;
case 9:
_context3.next = 11;
return getVariable(filePath, tempFilePath, fs.readFileSync(tempFilePath), loadAny).then(function (result) {
return result.content.toString();
});
case 11:
lessContent = _context3.sent;
fs.writeFileSync(winPath(path.join(tempPath, 'pro.less')), '@import \'./components\';\n ' + lessContent);
case 13:
_context3.next = 18;
break;
case 15:
_context3.prev = 15;
_context3.t0 = _context3['catch'](4);
console.log(_context3.t0.name, _context3.t0.file, 'line: ' + _context3.t0.line);
case 18:
_context3.next = 20;
return loadLibraryComponents(rest);
case 20:
return _context3.abrupt('return', true);
case 21:
case 'end':
return _context3.stop();
}
}
}, _callee3, undefined, [[4, 15]]);
}));
return function (_x5) {
return _ref6.apply(this, arguments);
};
}());
};
var modifyVarsArrayPath = path.join(tempPath, 'modifyVarsArray.json');
var modifyVarsIsEqual = function modifyVarsIsEqual() {
var modifyVarsArray = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : '';
var modifyVarsArrayString = JSON.stringify(modifyVarsArray);
var old = getOldFile(modifyVarsArrayPath);
if (old && genHashCode(old) === genHashCode(modifyVarsArrayString) && isEqual) {
console.log('📸 less and modifyVarsArray is equal!');
return true;
}
return false;
};
var renderLess = function () {
var _ref7 = _asyncToGenerator( /*#__PURE__*/_regenerator2.default.mark(function _callee4() {
var theme = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 'light';
var modifyVars = arguments[1];
var _ref8 = arguments[2];
var _ref8$min = _ref8.min,
min = _ref8$min === undefined ? true : _ref8$min,
_ref8$ignoreAntd = _ref8.ignoreAntd,
ignoreAntd = _ref8$ignoreAntd === undefined ? false : _ref8$ignoreAntd,
_ref8$disableExtendsD = _ref8.disableExtendsDark,
disableExtendsDark = _ref8$disableExtendsD === undefined ? false : _ref8$disableExtendsD;
var proLess, myModifyVars;
return _regenerator2.default.wrap(function _callee4$(_context4) {
while (1) {
switch (_context4.prev = _context4.next) {
case 0:
proLess = winPath(path.join(tempPath, './pro.less'));
if (fs.existsSync(proLess)) {
_context4.next = 3;
break;
}
return _context4.abrupt('return', '');
case 3:
myModifyVars = getModifyVars(theme || 'light', modifyVars, disableExtendsDark);
_context4.next = 6;
return loadAntd(ignoreAntd, {
dark: myModifyVars.dark,
compact: myModifyVars.compact
});
case 6:
return _context4.abrupt('return', less.render(fs.readFileSync(proLess, 'utf-8'), {
modifyVars: myModifyVars,
javascriptEnabled: true,
filename: path.resolve(proLess)
})
// 如果需要压缩,再打开压缩功能默认打开
.then(function (out) {
return min ? uglifycss.processString(out.css) : out.css;
}).catch(function (e) {
console.log(e);
}));
case 7:
case 'end':
return _context4.stop();
}
}
}, _callee4, undefined);
}));
return function renderLess() {
return _ref7.apply(this, arguments);
};
}();
var build = function () {
var _ref9 = _asyncToGenerator( /*#__PURE__*/_regenerator2.default.mark(function _callee6(cwd, modifyVarsArray) {
var propsOption = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : { isModule: true, loadAny: false, cache: true };
var defaultOption, option, needBuild, loop;
return _regenerator2.default.wrap(function _callee6$(_context6) {
while (1) {
switch (_context6.prev = _context6.next) {
case 0:
console.log('🔩 less render start!');
isEqual = false;
defaultOption = { isModule: true, cache: true };
option = _extends({}, defaultOption, propsOption);
_context6.prev = 4;
_context6.next = 7;
return genProjectLess(cwd, option);
case 7:
needBuild = _context6.sent;
if (!(!needBuild && modifyVarsIsEqual(modifyVarsArray))) {
_context6.next = 11;
break;
}
console.log('🎩 less render end!');
return _context6.abrupt('return');
case 11:
loop = function () {
var _ref10 = _asyncToGenerator( /*#__PURE__*/_regenerator2.default.mark(function _callee5(index) {
var _modifyVarsArray$inde, theme, modifyVars, fileName, disableExtendsDark, css;
return _regenerator2.default.wrap(function _callee5$(_context5) {
while (1) {
switch (_context5.prev = _context5.next) {
case 0:
if (modifyVarsArray[index]) {
_context5.next = 2;
break;
}
return _context5.abrupt('return', false);
case 2:
_modifyVarsArray$inde = modifyVarsArray[index], theme = _modifyVarsArray$inde.theme, modifyVars = _modifyVarsArray$inde.modifyVars, fileName = _modifyVarsArray$inde.fileName, disableExtendsDark = _modifyVarsArray$inde.disableExtendsDark;
_context5.prev = 3;
_context5.next = 6;
return renderLess(theme, modifyVars, _extends({}, option, {
disableExtendsDark: disableExtendsDark
}));
case 6:
css = _context5.sent;
fs.writeFileSync(fileName, css);
// 写入缓存的变量值设置
fs.writeFileSync(modifyVarsArrayPath, JSON.stringify(modifyVars || {}));
_context5.next = 14;
break;
case 11:
_context5.prev = 11;
_context5.t0 = _context5['catch'](3);
console.log(_context5.t0);
case 14:
if (!(index < modifyVarsArray.length)) {
_context5.next = 18;
break;
}
_context5.next = 17;
return loop(index + 1);
case 17:
return _context5.abrupt('return', true);
case 18:
return _context5.abrupt('return', true);
case 19:
case 'end':
return _context5.stop();
}
}
}, _callee5, undefined, [[3, 11]]);
}));
return function loop(_x11) {
return _ref10.apply(this, arguments);
};
}();
// 写入缓存的变量值设置
fs.writeFileSync(modifyVarsArrayPath, JSON.stringify(modifyVarsArray));
_context6.next = 15;
return loop(0);
case 15:
console.log('🎩 less render end!');
_context6.next = 21;
break;
case 18:
_context6.prev = 18;
_context6.t0 = _context6['catch'](4);
console.log(_context6.t0);
case 21:
case 'end':
return _context6.stop();
}
}
}, _callee6, undefined, [[4, 18]]);
}));
return function build(_x8, _x9) {
return _ref9.apply(this, arguments);
};
}();
module.exports = build; |
set -o noglob
# These variables are evaluated so the config file may contain and pass in environment variables to the parameters.
ECS_PARAM_FAMILY=$(eval echo "$ECS_PARAM_FAMILY")
ECS_PARAM_CONTAINER_IMAGE_NAME_UPDATES=$(eval echo "$ECS_PARAM_CONTAINER_IMAGE_NAME_UPDATES")
ECS_PARAM_CONTAINER_ENV_VAR_UPDATES=$(eval echo "$ECS_PARAM_CONTAINER_ENV_VAR_UPDATES")
# shellcheck disable=SC2034
PREVIOUS_TASK_DEFINITION=$(aws ecs describe-task-definition --task-definition "$ECS_PARAM_FAMILY" --include TAGS)
# Prepare script for updating container definitions
UPDATE_CONTAINER_DEFS_SCRIPT_FILE=$(mktemp _update_container_defs.py.XXXXXX)
chmod +x "$UPDATE_CONTAINER_DEFS_SCRIPT_FILE"
cat <<< "$ECS_SCRIPT_UPDATE_CONTAINER_DEFS" > "$UPDATE_CONTAINER_DEFS_SCRIPT_FILE"
# Prepare container definitions
CONTAINER_DEFS=$(python "$UPDATE_CONTAINER_DEFS_SCRIPT_FILE" "$PREVIOUS_TASK_DEFINITION" "$ECS_PARAM_CONTAINER_IMAGE_NAME_UPDATES" "$ECS_PARAM_CONTAINER_ENV_VAR_UPDATES")
# Escape single quotes from environment variables for BASH_ENV
CLEANED_CONTAINER_DEFS=$(echo "$CONTAINER_DEFS" | sed -E "s:':'\\\'':g")
# Prepare script for getting task definition values
GET_TASK_DFN_VAL_SCRIPT_FILE=$(mktemp _get_task_def_value.py.XXXXXX)
chmod +x "$GET_TASK_DFN_VAL_SCRIPT_FILE"
cat <<< "$ECS_SCRIPT_GET_TASK_DFN_VAL" > "$GET_TASK_DFN_VAL_SCRIPT_FILE"
# Get other task definition values
TASK_ROLE=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'taskRoleArn' "$PREVIOUS_TASK_DEFINITION")
EXECUTION_ROLE=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'executionRoleArn' "$PREVIOUS_TASK_DEFINITION")
NETWORK_MODE=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'networkMode' "$PREVIOUS_TASK_DEFINITION")
VOLUMES=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'volumes' "$PREVIOUS_TASK_DEFINITION")
PLACEMENT_CONSTRAINTS=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'placementConstraints' "$PREVIOUS_TASK_DEFINITION")
REQ_COMP=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'requiresCompatibilities' "$PREVIOUS_TASK_DEFINITION")
TASK_CPU=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'cpu' "$PREVIOUS_TASK_DEFINITION")
TASK_MEMORY=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'memory' "$PREVIOUS_TASK_DEFINITION")
PID_MODE=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'pidMode' "$PREVIOUS_TASK_DEFINITION")
IPC_MODE=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'ipcMode' "$PREVIOUS_TASK_DEFINITION")
TAGS=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'tags' "$PREVIOUS_TASK_DEFINITION")
PROXY_CONFIGURATION=$(python "$GET_TASK_DFN_VAL_SCRIPT_FILE" 'proxyConfiguration' "$PREVIOUS_TASK_DEFINITION")
# Make task definition values available as env variables
# shellcheck disable=SC2129
echo "export CCI_ORB_AWS_ECS_TASK_ROLE='${TASK_ROLE}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_EXECUTION_ROLE='${EXECUTION_ROLE}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_NETWORK_MODE='${NETWORK_MODE}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_CONTAINER_DEFS='${CLEANED_CONTAINER_DEFS}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_VOLUMES='${VOLUMES}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_PLACEMENT_CONSTRAINTS='${PLACEMENT_CONSTRAINTS}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_REQ_COMP='${REQ_COMP}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_TASK_CPU='${TASK_CPU}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_TASK_MEMORY='${TASK_MEMORY}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_PID_MODE='${PID_MODE}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_IPC_MODE='${IPC_MODE}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_TAGS='${TAGS}'" >> "$BASH_ENV"
echo "export CCI_ORB_AWS_ECS_PROXY_CONFIGURATION='${PROXY_CONFIGURATION}'" >> "$BASH_ENV"
rm "$UPDATE_CONTAINER_DEFS_SCRIPT_FILE" "$GET_TASK_DFN_VAL_SCRIPT_FILE" |
#!/bin/dumb-init /bin/sh
set -e
if [ -n "$USE_BUNDLE_EXEC" ]; then
BINARY="bundle exec ecs_console"
else
BINARY=ecs_console
fi
if ${BINARY} help "$1" 2>&1 | grep -q "ecs_console $1"; then
set -- gosu ecs_console ${BINARY} "$@"
if [ -n "$FOG_LOCAL" ]; then
chown -R ecs_console:ecs_console /fog
fi
fi
exec "$@"
|
from collections import Counter
import re
def top_n_words(file_path: str, N: int) -> List[Tuple[str, int]]:
with open(file_path, 'r') as file:
text = file.read().lower()
words = re.findall(r'\b\w+\b', text)
stop_words = set(['the', 'and', 'is', 'for', 'a', 'in', 'of', 'on', 'to', 'this', 'it', 'with', 'be', 'or', 'are', 'an', 'as', 'by', 'at', 'file'])
filtered_words = [word for word in words if word not in stop_words]
word_counts = Counter(filtered_words)
top_n = word_counts.most_common(N)
sorted_top_n = sorted(top_n, key=lambda x: (x[1], x[0]))
return sorted_top_n |
<filename>airbyte-integrations/connectors/source-mongodb/lib/airbyte_logger.rb<gh_stars>1000+
require_relative './airbyte_protocol.rb'
class AirbyteLogger
def self.format_log(text, log_level=Level::Info)
alm = AirbyteLogMessage.from_dynamic!({
'level' => log_level,
'message' => text
})
AirbyteMessage.from_dynamic!({
'type' => Type::Log,
'log' => alm.to_dynamic
}).to_json
end
def self.logger_formatter
proc { |severity, datetime, progname, msg|
format_log("[#{datetime}] #{severity} : #{progname} | #{msg.dump}\n\n")
}
end
def self.log(text, log_level=Level::Info)
message = format_log(text, log_level=Level::Info)
puts message
end
end
|
<filename>src/legacy/ui/public/vis/editors/default/fancy_forms/kbn_model_controller.js
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export function decorateModelController($delegate, $injector) {
const [directive] = $delegate;
const ModelController = directive.controller;
class KbnModelController extends ModelController {
// prevent inheriting ModelController's static $inject property
// which is angular's cache of the DI arguments for a function
static $inject = ['$scope', '$element'];
constructor($scope, $element, ...superArgs) {
super(...superArgs);
const onInvalid = () => {
this.$setTouched();
};
// the browser emits an "invalid" event when browser supplied
// validation fails, which implies that the user has indirectly
// interacted with the control and it should be treated as "touched"
$element.on('invalid', onInvalid);
$scope.$on('$destroy', () => {
$element.off('invalid', onInvalid);
});
}
}
// replace controller with our wrapper
directive.controller = [
...$injector.annotate(KbnModelController),
...$injector.annotate(ModelController),
(...args) => new KbnModelController(...args),
];
return $delegate;
}
|
// Largely based on: https://github.com/Uniswap/interface/blob/main/cypress/support/commands.js
import { Eip1193Bridge } from '@ethersproject/experimental/lib/eip1193-bridge'
import { JsonRpcProvider } from '@ethersproject/providers'
import { Wallet } from '@ethersproject/wallet'
const TEST_PRIVATE_KEY =
'0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80'
const provider = new JsonRpcProvider()
const signer = new Wallet(TEST_PRIVATE_KEY, provider)
const TEST_ADDRESS_NEVER_USE = signer.address
class MockProvider extends Eip1193Bridge {
chainId = 1
async sendAsync(...args) {
console.log('sendAsync called', ...args)
return this.send(...args)
}
async send(...args) {
console.log('send called', ...args)
const isCallbackForm =
typeof args[0] === 'object' && typeof args[1] === 'function'
let callback
let method
let params
if (isCallbackForm) {
callback = args[1]
method = args[0].method
params = args[0].params
} else {
method = args[0]
params = args[1]
}
if (method === 'eth_requestAccounts' || method === 'eth_accounts') {
if (isCallbackForm) {
callback({ result: [TEST_ADDRESS_NEVER_USE] })
} else {
return Promise.resolve([TEST_ADDRESS_NEVER_USE])
}
}
if (method === 'eth_chainId') {
if (isCallbackForm) {
callback(null, { result: '0x1' })
} else {
return Promise.resolve('0x1')
}
}
try {
const result = await super.send(method, params)
console.debug('result received', method, params, result)
if (isCallbackForm) {
callback(null, { result })
} else {
return result
}
} catch (error) {
if (isCallbackForm) {
callback(error, null)
} else {
throw error
}
}
}
}
Cypress.Commands.add('injectWeb3Provider', (isONTO = false) => {
cy.on('window:before:load', (win) => {
win.localStorage.clear()
win.ethereum = new MockProvider(signer, provider)
if (isONTO) {
win.ethereum.isONTO = isONTO
}
})
})
|
import datetime
from dataclasses import dataclass
from logging import getLogger
from typing import Any, Callable
from dhis2 import Api
from django.utils import dateparse, timezone
logger = getLogger(__name__)
@dataclass(frozen=True)
class Dhis2Relation:
"""
describe a m2m relation from an object model to some related items
"""
# iterable field containing a list of related item
dhis2_field_name: str
# callable to extract DHIS2 id from each item of said iterable
dhis2_extract_id: Callable[[Any], str]
# OpenHexa model of the related item
model_name: str
# M2M field name in the object model
model_field: str
class Dhis2Result:
"""Base class for DHIS2 result items - handles translations"""
# Mapping dhis2 field name -> field type, field default
FIELD_SPECS = {
"id": (str, None),
"name": (str, None),
"shortName": (str, ""),
"description": (str, ""),
"externalAccess": (bool, None),
"favorite": (bool, None),
"created": (datetime.datetime, None),
"lastUpdated": (datetime.datetime, None),
}
# Mapping dhis2 field name -> (openhexa model, target field, dhis field name of model)
RELATIONS: list[Dhis2Relation] = []
def __init__(self, data):
self._data = data
@property
def fields(self):
return {**Dhis2Result.FIELD_SPECS, **self.FIELD_SPECS}
def get_relations(self) -> dict[Dhis2Relation, list]:
relations = {}
for relation in self.RELATIONS:
links = [
relation.dhis2_extract_id(x)
for x in self._data.get(relation.dhis2_field_name)
]
relations[relation] = links
return relations
def get_values(self, locale=None):
return {
field_name: self.get_value(field_name, locale) for field_name in self.fields
}
def get_value(self, field_name, locale=None):
try:
field_type, field_default = self.fields[field_name]
except KeyError:
raise ValueError(
f'The "{field_name}" field does not exist in {self.__class__.__name__}'
)
# If "dict" type, references another record - return as is (or default)
if field_type is dict:
return self._data.get(field_name, {"id": field_default})
# If "datetime" type, convert to time-aware datetime
if field_type is datetime.datetime:
return timezone.make_aware(
dateparse.parse_datetime(self._data.get(field_name, field_default))
)
# If not a translated property (or no translations), early return
if "translations" not in self._data or not any(
p for p in self._data["translations"] if p["property"] == field_name.upper()
):
return self._data.get(field_name, field_default)
try:
# Attempt to extract the translated value for the provided locale (which can be None)
return next(
p
for p in self._data["translations"]
if p["property"] == field_name.upper()
# If locale is None, the first description will be returned
and (locale is None or locale in p["locale"])
)["value"]
except StopIteration:
if (
locale is None
): # Locale is None: if no description at all, return the default
return field_default
# Could not find a description for the provided locale, find any description
return self.get_value(field_name, None)
class DataElementResult(Dhis2Result):
FIELD_SPECS = {
"code": (str, ""),
"domainType": (str, None),
"valueType": (str, None),
"aggregationType": (str, None),
}
class DataSetResult(Dhis2Result):
FIELD_SPECS = {
"code": (str, ""),
}
RELATIONS = [
Dhis2Relation(
dhis2_field_name="dataSetElements",
dhis2_extract_id=lambda e: e["dataElement"]["id"],
model_name="DataElement",
model_field="data_elements",
),
]
class IndicatorTypeResult(Dhis2Result):
FIELD_SPECS = {
"number": (bool, None),
"factor": (int, None),
}
class OrganisationUnitResult(Dhis2Result):
FIELD_SPECS = {
"code": (str, ""),
"path": (str, "/"),
"leaf": (bool, None),
}
RELATIONS = [
Dhis2Relation(
dhis2_field_name="dataSets",
dhis2_extract_id=lambda e: e["id"],
model_name="DataSet",
model_field="datasets",
),
]
class IndicatorResult(Dhis2Result):
FIELD_SPECS = {
"code": (str, ""),
"indicatorType": (dict, None),
"annualized": (bool, None),
}
class Dhis2Client:
def __init__(self, *, url, username, password, verbose=False):
self._api = Api(url, username, password)
self.name = url
self.verbose = verbose
def fetch_info(self):
info = self._api.get_info()
self.name = info["systemName"]
return info
def fetch_data_elements(self):
for page in self._api.get_paged(
"dataElements", params={"fields": ":all"}, page_size=100
):
if self.verbose:
logger.info(
"sync_log %s: page from data_elements %s",
self.name,
page.get("pager"),
)
yield [DataElementResult(data) for data in page["dataElements"]]
def fetch_datasets(self):
for page in self._api.get_paged(
"dataSets", params={"fields": ":all"}, page_size=100
):
if self.verbose:
logger.info(
"sync_log %s: page from datasets %s", self.name, page.get("pager")
)
yield [DataSetResult(data) for data in page["dataSets"]]
def fetch_indicator_types(self):
for page in self._api.get_paged(
"indicatorTypes", params={"fields": ":all"}, page_size=100
):
if self.verbose:
logger.info(
"sync_log %s: page from indicator_types %s",
self.name,
page.get("pager"),
)
yield [IndicatorTypeResult(data) for data in page["indicatorTypes"]]
def fetch_indicators(self):
for page in self._api.get_paged(
"indicators", params={"fields": ":all"}, page_size=100
):
if self.verbose:
logger.info(
"sync_log %s: page from indicators %s", self.name, page.get("pager")
)
yield [IndicatorResult(data) for data in page["indicators"]]
def fetch_organisation_units(self):
for page in self._api.get_paged(
"organisationUnits", params={"fields": ":all"}, page_size=100
):
if self.verbose:
logger.info(
"sync_log %s: page from organisation_units %s",
self.name,
page.get("pager"),
)
# rewrite path -> replace "/" by "." for correct ltree path
# warning: in place edit, can side effect on tests
for element in page["organisationUnits"]:
if "path" in element:
element["path"] = element["path"].replace("/", ".").strip(".")
yield [OrganisationUnitResult(data) for data in page["organisationUnits"]]
|
class GermanWordProcessor:
def __init__(self, phon, orth):
self.phon = phon # phonetic representation of the word
self.orth = orth # orthographic representation of the word
def extract_stem(self):
# Check if the word ends with a schwa sound represented by 'e'
if self.orth.endswith('e'):
# Remove the schwa sound from the end of the word
return self.orth[:-1]
else:
return self.orth # Return the original word if no schwa sound is found |
require 'rails_helper'
RSpec.describe 'Sessions', type: :request do
# blank
end
|
<reponame>sahilduhan/codeforces
#include <bits/stdc++.h>
using namespace std;
class Solution {
public:
vector<int> sortedSquares(vector<int>& nums) {
vector<int> ans;
for(auto it: nums){
ans.push_back(it*it);
}
sort(ans.begin(), ans.end());
return ans;
}
};
int main()
{
return 0;
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.gui;
import java.util.ArrayList;
import javax.swing.JTable;
import javax.swing.JTextArea;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.AbstractTableModel;
import org.fhwa.c2cri.testmodel.TestCaseResults;
import org.fhwa.c2cri.testmodel.TestStepResult;
import org.fhwa.c2cri.utilities.DateUtils;
/**
* The Class TestStepResultsTableModel.
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public class TestStepResultsTableModel extends AbstractTableModel implements ListSelectionListener {
/** The Constant TimeStamp_Col. */
public static final int TimeStamp_Col = 0;
/** The Constant Description_Col. */
public static final int Description_Col = 1;
/** The Constant Result_Col. */
public static final int Result_Col = 2;
/** The tc results. */
private TestCaseResults tcResults;
/** The test case results table. */
private JTable testCaseResultsTable;
/** The test step list. */
private ArrayList<TestStepResult> testStepList = new ArrayList<TestStepResult>();
/** The test case description Text Area Reference */
private JTextArea testDescriptionText;
/** The column names. */
private String[] columnNames = {"TimeStamp",
"Description",
"Result"};
/**
* Instantiates a new test step results table model.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*/
private TestStepResultsTableModel() {
}
/**
* Instantiates a new test step results table model.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param tcResults the tc results
*/
public TestStepResultsTableModel(TestCaseResults tcResults) {
super();
this.tcResults = tcResults;
}
/* (non-Javadoc)
* @see javax.swing.table.TableModel#getColumnCount()
*/
public int getColumnCount() {
return columnNames.length;
}
/* (non-Javadoc)
* @see javax.swing.table.TableModel#getRowCount()
*/
public int getRowCount() {
if (testCaseResultsTable.getSelectedRow()>-1){
} else { // Not called based on a selection event.
int row = testCaseResultsTable.getRowCount() - 1;
if (row >= 0) {
testStepList = tcResults.getFullResultsList().get(row).getTestStepResults();
testDescriptionText.setText(tcResults.getFullResultsList().get(row).getTestCaseDescription());
} else {
ArrayList<TestStepResult> blankList = new ArrayList<TestStepResult>();
testStepList = blankList;
testDescriptionText.setText("");
}
}
return testStepList.size();
}
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#getColumnName(int)
*/
public String getColumnName(int col) {
return columnNames[col];
}
/* (non-Javadoc)
* @see javax.swing.table.TableModel#getValueAt(int, int)
*/
public Object getValueAt(int row, int col) {
TestStepResult testStepResult = testStepList.get(row);
switch (col) {
case TimeStamp_Col:
return DateUtils.millisecondToDate(testStepResult.getTimeStamp());
case Description_Col:
return testStepResult.getTestStepDescription();
case Result_Col:
return testStepResult.getResult().equals("FAILED")? testStepResult.getResult() + " - " + testStepResult.getErrorDescription():testStepResult.getResult();
}
throw new IllegalArgumentException("Illegal column: "
+ col);
}
/*
* JTable uses this method to determine the default renderer/
* editor for each cell. If we didn't implement this method,
* then the last column would contain text ("true"/"false"),
* rather than a check box.
*/
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#getColumnClass(int)
*/
public Class getColumnClass(int c) {
return getValueAt(0, c).getClass();
}
/*
* Don't need to implement this method unless your table's
* editable.
*/
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#isCellEditable(int, int)
*/
public boolean isCellEditable(int row, int col) {
//Note that the data/cell address is constant,
//no matter where the cell appears onscreen.
if (col != 3) {
return false;
} else {
return true;
}
}
/*
* Don't need to implement this method unless your table's
* data can change.
*/
/**
* public void setValueAt(Object value, int row, int col) {
* if ((row > -1) && (col == FlagVal_Col)) {
* requirementList.get(row).setFlagValue((Boolean) value);
* System.out.println(" Firing Row " + row);
* fireTableCellUpdated(row, col);
* }
*
* }
*
* @param resultsTable the new need list selection table
*/
public void setNeedListSelectionTable(JTable resultsTable) {
this.testCaseResultsTable = resultsTable;
}
/** set the reference to the Test Case Description Text Area */
public void setTestDescriptionText(JTextArea testDescriptionText) {
this.testDescriptionText = testDescriptionText;
}
/* (non-Javadoc)
* @see javax.swing.event.ListSelectionListener#valueChanged(javax.swing.event.ListSelectionEvent)
*/
@Override
public void valueChanged(ListSelectionEvent e) {
// Row selection changed
// int row = e.getLastIndex();
System.out.println("***TestStepResultsTableModel valueChanged: Started");
int row = testCaseResultsTable.getSelectedRow();
if (row > -1) {
if (!e.getValueIsAdjusting()) {
row = testCaseResultsTable.getSelectedRow();
System.out.println("***TestStepResultsTableModel valueChanged: Row " + row + " Selected");
if (row >= 0) {
testStepList = tcResults.getFullResultsList().get(row).getTestStepResults();
testDescriptionText.setText(tcResults.getFullResultsList().get(row).getTestCaseDescription());
this.fireTableDataChanged();
} else {
ArrayList<TestStepResult> blankList = new ArrayList<TestStepResult>();
testStepList = blankList;
testDescriptionText.setText("");
this.fireTableDataChanged();
}
}
}
}
}
|
#include "oneapi/dal/backend/common.hpp"
namespace oneapi::dal::backend {
void memcpy(void* dest, const void* src, std::int64_t size);
template <typename T>
inline void copy(T* dest, const T* src, std::int64_t count) {
// Check for potential overflow errors
if (count < 0 || sizeof(T) < 0 || (sizeof(T) * count) < 0) {
// Handle overflow error
throw std::overflow_error("Memory copy operation may cause overflow");
}
// Perform the memory copy using the provided memcpy function
ONEDAL_ASSERT_MUL_OVERFLOW(std::int64_t, sizeof(T), count);
memcpy(dest, src, sizeof(T) * count);
} |
def insertionSort(arr):
for i in range(1, len(arr)):
curr = arr[i]
j = i - 1
while j >= 0 and curr < arr[j]:
arr[j+1] = arr[j]
j -= 1
arr[j+1] = curr
return arr |
Tinytest.add('example', function (test) {
var schema= {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Example",
// "description": "A product from Acme's catalog",
"type": "object",
"properties": {
"id": {
// "description": "The unique identifier for a product",
"type": "integer"
},
"name": {
// "description": "Name of the product",
"type": "string"
}
},
"required": ["id", "name"]
};
var obj = new ReactiveObject(schema , { x: 1 } );
var changed = false;
var handle = Tracker.autorun(function () {
changed = x==2
});
obj.x = 2
handle.stop();
test.equal(true, changed);
test.equal(JSON.stringify({x:2}), JSON.stringify(obj.get()));
});
ReactiveObject = function (schema, obj, dep) {
if (!dep)
dep = new Tracker.Dependency;
var clone = {}
for (var prop in obj) {
Object.defineProperty(clone, prop, {
get: function(){
dep.depend();
return obj;
}
})
}
var self = this;
self.get = function () {
dep.depend();
return obj;
};
self.set = function (newValue) {
obj = newValue;
dep.changed();
};
self.set(obj)
} |
module.exports = function PgOrderByMultiColumnIndexPlugin(
builder,
{ orderByNullsLast }
) {
builder.hook("build", build => {
const pkg = require("./package.json");
// Check dependencies
if (!build.versions) {
throw new Error(
`Plugin ${pkg.name}@${pkg.version} requires graphile-build@^4.1.0 in order to check dependencies (current version: ${build.graphileBuildVersion})`
);
}
const depends = (name, range) => {
if (!build.hasVersion(name, range)) {
throw new Error(
`Plugin ${pkg.name}@${pkg.version} requires ${name}@${range} (${
build.versions[name]
? `current version: ${build.versions[name]}`
: "not found"
})`
);
}
};
depends("graphile-build-pg", "^4.1.0");
// Register this plugin
build.versions = build.extend(build.versions, { [pkg.name]: pkg.version });
return build;
});
builder.hook("inflection", inflection => {
return Object.assign(inflection, {
orderByMultiColumnIndexEnum(specs) {
const nullOrderingSuffix = (ascending, nullsFirst) => {
// Only include a null ordering suffix if it differs
// from the application default (specified using
// `graphileBuildOptions.orderByNullsLast`).
if (orderByNullsLast === true) {
// Defaults are ASC NULLS LAST and DESC NULLS LAST, so
// ASC NULLS FIRST and DESC NULLS FIRST need a suffix:
if (ascending === true && nullsFirst === true) {
return "-nulls-first";
} else if (ascending === false && nullsFirst === true) {
return "-nulls-first";
} else {
return "";
}
} else {
// Defaults are ASC NULLS LAST and DESC NULLS FIRST, so
// ASC NULLS FIRST and DESC NULLS LAST need a suffix:
if (ascending === true && nullsFirst === true) {
return "-nulls-first";
} else if (ascending === false && nullsFirst === false) {
return "-nulls-last";
} else {
return "";
}
}
};
return `${specs
.map(([attr, ascending, nullsFirst]) =>
this.constantCase(
`${this.orderByColumnEnum(attr, ascending)}${nullOrderingSuffix(
ascending,
nullsFirst
)}`
)
)
.join("__")}`;
},
});
});
builder.hook("GraphQLEnumType:values", (values, build, context) => {
const {
extend,
inflection,
pgIntrospectionResultsByKind: introspectionResultsByKind,
describePgEntity,
} = build;
const {
scope: { isPgRowSortEnum, pgIntrospection: table },
} = context;
if (!isPgRowSortEnum || !table || table.kind !== "class") {
return values;
}
return extend(
values,
introspectionResultsByKind.index
.filter(index => index.class.id === table.id)
.reduce((memo, index) => {
const attributes = index.attributeNums.map(nr =>
index.class.attributes.find(attr => attr.num === nr)
);
if (attributes.length <= 1 || attributes.includes(undefined)) {
// Not a multi-column index
return memo;
}
// Specs for scanning the index forward
const forwardSpecs = attributes.map((attr, idx) => [
attr,
index.attributePropertiesAsc[idx],
index.attributePropertiesNullsFirst[idx],
]);
// Specs for scanning the index backward (flip asc/desc and nulls first/last)
const backwardSpecs = attributes.map((attr, idx) => [
attr,
!index.attributePropertiesAsc[idx],
!index.attributePropertiesNullsFirst[idx],
]);
const forwardEnumName = inflection.orderByMultiColumnIndexEnum(
forwardSpecs
);
const backwardEnumName = inflection.orderByMultiColumnIndexEnum(
backwardSpecs
);
memo = extend(
memo,
{
[forwardEnumName]: {
value: {
alias: forwardEnumName.toLowerCase(),
specs: forwardSpecs.map(([attr, ascending, nullsFirst]) => [
attr.name,
ascending,
nullsFirst,
]),
},
},
},
`Adding multi-column index forward orderBy enum value for ${attributes
.map(attr => describePgEntity(attr))
.join(", ")}.`
);
memo = extend(
memo,
{
[backwardEnumName]: {
value: {
alias: backwardEnumName.toLowerCase(),
specs: backwardSpecs.map(([attr, ascending, nullsFirst]) => [
attr.name,
ascending,
nullsFirst,
]),
},
},
},
`Adding multi-column index backward orderBy enum value for ${attributes
.map(attr => describePgEntity(attr))
.join(", ")}.`
);
return memo;
}, {}),
`Adding multi-column index order values for table '${table.name}'`
);
});
};
|
package com.whoisxmlapi.whoisapi.model;
public class TechnicalContact extends BaseContact{
}
|
#!/usr/bin/env bash
#Czy rok jes przestepny?
main() {
echo "Podaj rok:"
read rok
if [[ $(($rok % 4)) -eq 0 ]]; then
if [[ $(($rok % 100)) -eq 0 ]]; then
if [[ $(($rok % 400)) -eq 0 ]]; then
echo "rok jest przestepny"
else
echo "rok nie jest przestepny"
fi
else
echo "rok jest przestepny"
fi
else
echo "rok nie jest przestepny"
fi
}
main "$@"
|
def count_num(arr, num):
first = first_occurence(arr, num)
last = last_occurence(arr, num)
if first != -1 and last != -1:
return (last - first + 1)
else:
return 0
def first_occurence(arr, num):
start = 0
end = len(arr) - 1
result = -1
while start <= end:
mid = (start + end) // 2
if arr[mid] == num:
result = mid
end = mid - 1
elif arr[mid] > num:
end = mid - 1
else:
start = mid + 1
return result
def last_occurence(arr, num):
start = 0
end = len(arr) - 1
result = -1
while start <= end:
mid = (start + end) // 2
if arr[mid] == num:
result = mid
start = mid + 1
elif arr[mid] > num:
end = mid - 1
else:
start = mid + 1
return result |
<reponame>LarsBehrenberg/e-wallet<filename>src/example-components/ElementsIcons/Icons2/index.js
import React from 'react';
import { Card } from '@material-ui/core';
import AccountBalanceWalletOutlinedIcon from '@material-ui/icons/AccountBalanceWalletOutlined';
import AlarmAddOutlinedIcon from '@material-ui/icons/AlarmAddOutlined';
import CakeOutlinedIcon from '@material-ui/icons/CakeOutlined';
import ContactsOutlinedIcon from '@material-ui/icons/ContactsOutlined';
import Brightness7TwoToneIcon from '@material-ui/icons/Brightness7TwoTone';
import DirectionsBoatTwoToneIcon from '@material-ui/icons/DirectionsBoatTwoTone';
import EventAvailableTwoToneIcon from '@material-ui/icons/EventAvailableTwoTone';
import HomeWorkTwoToneIcon from '@material-ui/icons/HomeWorkTwoTone';
import AirportShuttleIcon from '@material-ui/icons/AirportShuttle';
import CheckCircleOutlineIcon from '@material-ui/icons/CheckCircleOutline';
import DeveloperBoardIcon from '@material-ui/icons/DeveloperBoard';
import TuneIcon from '@material-ui/icons/Tune';
export default function LivePreviewExample() {
return (
<>
<div className="icon-demo-box">
<Card className="p-2 text-primary">
<AccountBalanceWalletOutlinedIcon />
</Card>
<Card className="p-2 text-success">
<AlarmAddOutlinedIcon />
</Card>
<Card className="p-2 text-warning">
<CakeOutlinedIcon />
</Card>
<Card className="p-2 text-danger">
<ContactsOutlinedIcon />
</Card>
<Card className="p-2 text-dark">
<Brightness7TwoToneIcon />
</Card>
<Card className="p-2 text-info">
<DirectionsBoatTwoToneIcon />
</Card>
<Card className="p-2 text-first">
<EventAvailableTwoToneIcon />
</Card>
<Card className="p-2 text-second">
<HomeWorkTwoToneIcon />
</Card>
<Card className="p-2">
<AirportShuttleIcon />
</Card>
<Card className="p-2">
<CheckCircleOutlineIcon />
</Card>
<Card className="p-2">
<DeveloperBoardIcon />
</Card>
<Card className="p-2">
<TuneIcon />
</Card>
</div>
</>
);
}
|
SCRIPTPATH=$(dirname "$0")
# deploy etcd to kubernetes
helm install etcd bitnami/etcd --set auth.rbac.enabled=false,statefulset.replicaCount=6,nodeSelector.etcdnodetype=etcd,persistence.enabled=false;
|
<filename>feign-reactor-jetty/src/test/java/reactivefeign/jetty/h2c/Http2cServerConfig.java
package reactivefeign.jetty.h2c;
import com.github.tomakehurst.wiremock.common.JettySettings;
import com.github.tomakehurst.wiremock.core.Options;
import com.github.tomakehurst.wiremock.core.WireMockConfiguration;
import com.github.tomakehurst.wiremock.http.AdminRequestHandler;
import com.github.tomakehurst.wiremock.http.StubRequestHandler;
import com.github.tomakehurst.wiremock.jetty9.JettyHttpServer;
import org.eclipse.jetty.http2.server.HTTP2CServerConnectionFactory;
import org.eclipse.jetty.io.NetworkTrafficListener;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.ServerConnector;
public class Http2cServerConfig {
public static WireMockConfiguration wireMockConfig(){
return WireMockConfiguration.wireMockConfig()
.httpServerFactory((options, adminRequestHandler, stubRequestHandler) ->
new JettyHttpServer(options, adminRequestHandler, stubRequestHandler) {
@Override
protected ServerConnector createHttpConnector(
String bindAddress,
int port,
JettySettings jettySettings,
NetworkTrafficListener listener) {
HttpConfiguration httpConfig = createHttpConfig(jettySettings);
return createServerConnector(
bindAddress,
jettySettings,
port,
listener,
new HTTP2CServerConnectionFactory(httpConfig)
);
}
});
}
}
|
#!/bin/bash
find ./include -iname *.hpp -o -iname *.cpp -o -iname *.inl -o -iname *.hxx | xargs clang-format -i
find ./examples -maxdepth 1 -iname *.hpp -o -iname *.cpp -o -iname *.inl -o -iname *.hxx | xargs clang-format -i
find ./examples/lowlevel -iname *.hpp -o -iname *.cpp -o -iname *.inl -o -iname *.hxx | xargs clang-format -i
find ./test -iname *.hpp -o -iname *.cpp -o -iname *.inl -o -iname *.hxx | xargs clang-format -i
|
<gh_stars>1-10
toString.length = {};
toString.name = {};
|
package controller
import (
"net/http"
routing "github.com/go-ozzo/ozzo-routing/v2"
"github.com/minipkg/log"
"redditclone/internal/domain/comment"
"redditclone/internal/domain/post"
"redditclone/internal/pkg/apperror"
"redditclone/internal/pkg/auth"
"redditclone/internal/pkg/errorshandler"
)
type commentController struct {
Service comment.IService
PostService post.IService
Logger log.ILogger
}
// POST /api/post/{POST_ID} - добавление коммента
// DELETE /api/post/{POST_ID}/{COMMENT_ID} - удаление коммента
func RegisterCommentHandlers(r *routing.RouteGroup, service comment.IService, postService post.IService, logger log.ILogger, authHandler routing.Handler) {
c := commentController{
Service: service,
PostService: postService,
Logger: logger,
}
r.Use(authHandler)
r.Post(`/post/<postId>`, c.create)
r.Delete(`/post/<postId>/<id>`, c.delete)
}
func (c *commentController) create(ctx *routing.Context) error {
postId := ctx.Param("postId")
entity := c.Service.NewEntity()
if err := ctx.Read(entity); err != nil {
c.Logger.With(ctx.Request.Context()).Info(err)
return errorshandler.BadRequest(err.Error())
}
if err := entity.Validate(); err != nil {
return errorshandler.BadRequest(err.Error())
}
session := auth.CurrentSession(ctx.Request.Context())
entity.PostID = postId
entity.UserID = session.UserID
entity.User = session.User
if err := c.Service.Create(ctx.Request.Context(), entity); err != nil {
c.Logger.With(ctx.Request.Context()).Info(err)
return errorshandler.BadRequest(err.Error())
}
post, err := c.PostService.Get(ctx.Request.Context(), postId)
if err != nil {
if err == apperror.ErrNotFound {
c.Logger.With(ctx.Request.Context()).Info(err)
return errorshandler.NotFound("")
}
c.Logger.With(ctx.Request.Context()).Error(err)
return errorshandler.InternalServerError("")
}
ctx.Response.Header().Set("Content-Type", "application/json; charset=UTF-8")
return ctx.WriteWithStatus(post, http.StatusCreated)
}
func (c *commentController) delete(ctx *routing.Context) error {
postId := ctx.Param("postId")
id := ctx.Param("id")
if err := c.Service.Delete(ctx.Request.Context(), id); err != nil {
if err == apperror.ErrNotFound {
c.Logger.With(ctx.Request.Context()).Info(err)
return errorshandler.NotFound("")
}
c.Logger.With(ctx.Request.Context()).Error(err)
return errorshandler.InternalServerError("")
}
post, err := c.PostService.Get(ctx.Request.Context(), postId)
if err != nil {
if err == apperror.ErrNotFound {
c.Logger.With(ctx.Request.Context()).Info(err)
return errorshandler.NotFound("")
}
c.Logger.With(ctx.Request.Context()).Error(err)
return errorshandler.InternalServerError("")
}
ctx.Response.Header().Set("Content-Type", "application/json; charset=UTF-8")
return ctx.WriteWithStatus(post, http.StatusOK)
}
|
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moduliths.test;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import org.junit.jupiter.api.extension.ExtendWith;
import org.moduliths.model.Module.DependencyDepth;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
import org.springframework.boot.test.autoconfigure.filter.TypeExcludeFilters;
import org.springframework.boot.test.context.SpringBootTestContextBootstrapper;
import org.springframework.core.annotation.AliasFor;
import org.springframework.test.context.BootstrapWith;
import org.springframework.test.context.junit.jupiter.SpringExtension;
/**
* Bootstraps the module containing the package of the test class annotated with {@link ModuleTest}. Will apply the
* following modifications to the Spring Boot configuration:
* <ul>
* <li>Restricts the component scanning to the module's package.
* <li>
* <li>Sets the module's package as the only auto-configuration and entity scan package.
* <li>
* </ul>
*
* @author <NAME>
*/
@Retention(RetentionPolicy.RUNTIME)
@BootstrapWith(SpringBootTestContextBootstrapper.class)
@TypeExcludeFilters(ModuleTypeExcludeFilter.class)
@ImportAutoConfiguration(ModuleTestAutoConfiguration.class)
@ExtendWith(SpringExtension.class)
public @interface ModuleTest {
@AliasFor("mode")
BootstrapMode value() default BootstrapMode.STANDALONE;
@AliasFor("value")
BootstrapMode mode() default BootstrapMode.STANDALONE;
/**
* Whether to automatically verify the module structure for validity.
*
* @return
*/
boolean verifyAutomatically() default true;
/**
* Module names of modules to be included in the test run independent of what the {@link #mode()} defines.
*
* @return
*/
String[] extraIncludes() default {};
@RequiredArgsConstructor
public enum BootstrapMode {
/**
* Boorstraps the current module only.
*/
STANDALONE(DependencyDepth.NONE),
/**
* Bootstraps the current module as well as its direct dependencies.
*/
DIRECT_DEPENDENCIES(DependencyDepth.IMMEDIATE),
/**
* Bootstraps the current module as well as all upstream dependencies (inculding transitive ones).
*/
ALL_DEPENDENCIES(DependencyDepth.ALL);
private final @Getter DependencyDepth depth;
}
}
|
#!/usr/bin/env bash
{ # this ensures the entire script is downloaded #
nvm_has() {
type "$1" > /dev/null 2>&1
}
nvm_install_dir() {
if [ ! -z "$NVM_DIR" ]; then
printf %s "${NVM_DIR}"
elif [ ! -z "$XDG_CONFIG_HOME" ]; then
printf %s "${XDG_CONFIG_HOME/nvm}"
else
printf %s "$HOME/.nvm"
fi
}
nvm_latest_version() {
echo "v0.33.11"
}
nvm_profile_is_bash_or_zsh() {
local TEST_PROFILE
TEST_PROFILE="${1-}"
case "${TEST_PROFILE-}" in
*"/.bashrc" | *"/.bash_profile" | *"/.zshrc")
return
;;
*)
return 1
;;
esac
}
#
# Outputs the location to NVM depending on:
# * The availability of $NVM_SOURCE
# * The method used ("script" or "git" in the script, defaults to "git")
# NVM_SOURCE always takes precedence unless the method is "script-nvm-exec"
#
nvm_source() {
local NVM_METHOD
NVM_METHOD="$1"
local NVM_SOURCE_URL
NVM_SOURCE_URL="$NVM_SOURCE"
if [ "_$NVM_METHOD" = "_script-nvm-exec" ]; then
NVM_SOURCE_URL="https://raw.githubusercontent.com/creationix/nvm/$(nvm_latest_version)/nvm-exec"
elif [ "_$NVM_METHOD" = "_script-nvm-bash-completion" ]; then
NVM_SOURCE_URL="https://raw.githubusercontent.com/creationix/nvm/$(nvm_latest_version)/bash_completion"
elif [ -z "$NVM_SOURCE_URL" ]; then
if [ "_$NVM_METHOD" = "_script" ]; then
NVM_SOURCE_URL="https://raw.githubusercontent.com/creationix/nvm/$(nvm_latest_version)/nvm.sh"
elif [ "_$NVM_METHOD" = "_git" ] || [ -z "$NVM_METHOD" ]; then
NVM_SOURCE_URL="https://github.com/creationix/nvm.git"
else
echo >&2 "Unexpected value \"$NVM_METHOD\" for \$NVM_METHOD"
return 1
fi
fi
echo "$NVM_SOURCE_URL"
}
#
# Node.js version to install
#
nvm_node_version() {
echo "$NODE_VERSION"
}
nvm_download() {
if nvm_has "curl"; then
curl --compressed -q "$@"
elif nvm_has "wget"; then
# Emulate curl with wget
ARGS=$(echo "$*" | command sed -e 's/--progress-bar /--progress=bar /' \
-e 's/-L //' \
-e 's/--compressed //' \
-e 's/-I /--server-response /' \
-e 's/-s /-q /' \
-e 's/-o /-O /' \
-e 's/-C - /-c /')
# shellcheck disable=SC2086
eval wget $ARGS
fi
}
install_nvm_from_git() {
local INSTALL_DIR
INSTALL_DIR="$(nvm_install_dir)"
if [ -d "$INSTALL_DIR/.git" ]; then
echo "=> nvm is already installed in $INSTALL_DIR, trying to update using git"
command printf '\r=> '
command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" fetch origin tag "$(nvm_latest_version)" --depth=1 2> /dev/null || {
echo >&2 "Failed to update nvm, run 'git fetch' in $INSTALL_DIR yourself."
exit 1
}
else
# Cloning to $INSTALL_DIR
echo "=> Downloading nvm from git to '$INSTALL_DIR'"
command printf '\r=> '
mkdir -p "${INSTALL_DIR}"
if [ "$(ls -A "${INSTALL_DIR}")" ]; then
command git init "${INSTALL_DIR}" || {
echo >&2 'Failed to initialize nvm repo. Please report this!'
exit 2
}
command git --git-dir="${INSTALL_DIR}/.git" remote add origin "$(nvm_source)" 2> /dev/null \
|| command git --git-dir="${INSTALL_DIR}/.git" remote set-url origin "$(nvm_source)" || {
echo >&2 'Failed to add remote "origin" (or set the URL). Please report this!'
exit 2
}
command git --git-dir="${INSTALL_DIR}/.git" fetch origin tag "$(nvm_latest_version)" --depth=1 || {
echo >&2 'Failed to fetch origin with tags. Please report this!'
exit 2
}
else
command git -c advice.detachedHead=false clone "$(nvm_source)" -b "$(nvm_latest_version)" --depth=1 "${INSTALL_DIR}" || {
echo >&2 'Failed to clone nvm repo. Please report this!'
exit 2
}
fi
fi
command git -c advice.detachedHead=false --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" checkout -f --quiet "$(nvm_latest_version)"
if [ ! -z "$(command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" show-ref refs/heads/master)" ]; then
if command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" branch --quiet 2>/dev/null; then
command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" branch --quiet -D master >/dev/null 2>&1
else
echo >&2 "Your version of git is out of date. Please update it!"
command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" branch -D master >/dev/null 2>&1
fi
fi
echo "=> Compressing and cleaning up git repository"
if ! command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" reflog expire --expire=now --all; then
echo >&2 "Your version of git is out of date. Please update it!"
fi
if ! command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" gc --auto --aggressive --prune=now ; then
echo >&2 "Your version of git is out of date. Please update it!"
fi
return
}
#
# Automatically install Node.js
#
nvm_install_node() {
local NODE_VERSION_LOCAL
NODE_VERSION_LOCAL="$(nvm_node_version)"
if [ -z "$NODE_VERSION_LOCAL" ]; then
return 0
fi
echo "=> Installing Node.js version $NODE_VERSION_LOCAL"
nvm install "$NODE_VERSION_LOCAL"
local CURRENT_NVM_NODE
CURRENT_NVM_NODE="$(nvm_version current)"
if [ "$(nvm_version "$NODE_VERSION_LOCAL")" == "$CURRENT_NVM_NODE" ]; then
echo "=> Node.js version $NODE_VERSION_LOCAL has been successfully installed"
else
echo >&2 "Failed to install Node.js $NODE_VERSION_LOCAL"
fi
}
install_nvm_as_script() {
local INSTALL_DIR
INSTALL_DIR="$(nvm_install_dir)"
local NVM_SOURCE_LOCAL
NVM_SOURCE_LOCAL="$(nvm_source script)"
local NVM_EXEC_SOURCE
NVM_EXEC_SOURCE="$(nvm_source script-nvm-exec)"
local NVM_BASH_COMPLETION_SOURCE
NVM_BASH_COMPLETION_SOURCE="$(nvm_source script-nvm-bash-completion)"
# Downloading to $INSTALL_DIR
mkdir -p "$INSTALL_DIR"
if [ -f "$INSTALL_DIR/nvm.sh" ]; then
echo "=> nvm is already installed in $INSTALL_DIR, trying to update the script"
else
echo "=> Downloading nvm as script to '$INSTALL_DIR'"
fi
nvm_download -s "$NVM_SOURCE_LOCAL" -o "$INSTALL_DIR/nvm.sh" || {
echo >&2 "Failed to download '$NVM_SOURCE_LOCAL'"
return 1
} &
nvm_download -s "$NVM_EXEC_SOURCE" -o "$INSTALL_DIR/nvm-exec" || {
echo >&2 "Failed to download '$NVM_EXEC_SOURCE'"
return 2
} &
nvm_download -s "$NVM_BASH_COMPLETION_SOURCE" -o "$INSTALL_DIR/bash_completion" || {
echo >&2 "Failed to download '$NVM_BASH_COMPLETION_SOURCE'"
return 2
} &
for job in $(jobs -p | command sort)
do
wait "$job" || return $?
done
chmod a+x "$INSTALL_DIR/nvm-exec" || {
echo >&2 "Failed to mark '$INSTALL_DIR/nvm-exec' as executable"
return 3
}
}
nvm_try_profile() {
if [ -z "${1-}" ] || [ ! -f "${1}" ]; then
return 1
fi
echo "${1}"
}
#
# Detect profile file if not specified as environment variable
# (eg: PROFILE=~/.myprofile)
# The echo'ed path is guaranteed to be an existing file
# Otherwise, an empty string is returned
#
nvm_detect_profile() {
if [ "${PROFILE-}" = '/dev/null' ]; then
# the user has specifically requested NOT to have nvm touch their profile
return
fi
if [ -n "${PROFILE}" ] && [ -f "${PROFILE}" ]; then
echo "${PROFILE}"
return
fi
local DETECTED_PROFILE
DETECTED_PROFILE=''
if [ -n "${BASH_VERSION-}" ]; then
if [ -f "$HOME/.bashrc" ]; then
DETECTED_PROFILE="$HOME/.bashrc"
elif [ -f "$HOME/.bash_profile" ]; then
DETECTED_PROFILE="$HOME/.bash_profile"
fi
elif [ -n "${ZSH_VERSION-}" ]; then
DETECTED_PROFILE="$HOME/.zshrc"
fi
if [ -z "$DETECTED_PROFILE" ]; then
for EACH_PROFILE in ".profile" ".bashrc" ".bash_profile" ".zshrc"
do
if DETECTED_PROFILE="$(nvm_try_profile "${HOME}/${EACH_PROFILE}")"; then
break
fi
done
fi
if [ ! -z "$DETECTED_PROFILE" ]; then
echo "$DETECTED_PROFILE"
fi
}
#
# Check whether the user has any globally-installed npm modules in their system
# Node, and warn them if so.
#
nvm_check_global_modules() {
command -v npm >/dev/null 2>&1 || return 0
local NPM_VERSION
NPM_VERSION="$(npm --version)"
NPM_VERSION="${NPM_VERSION:--1}"
[ "${NPM_VERSION%%[!-0-9]*}" -gt 0 ] || return 0
local NPM_GLOBAL_MODULES
NPM_GLOBAL_MODULES="$(
npm list -g --depth=0 |
command sed -e '/ npm@/d' -e '/ (empty)$/d'
)"
local MODULE_COUNT
MODULE_COUNT="$(
command printf %s\\n "$NPM_GLOBAL_MODULES" |
command sed -ne '1!p' | # Remove the first line
wc -l | command tr -d ' ' # Count entries
)"
if [ "${MODULE_COUNT}" != '0' ]; then
# shellcheck disable=SC2016
echo '=> You currently have modules installed globally with `npm`. These will no'
# shellcheck disable=SC2016
echo '=> longer be linked to the active version of Node when you install a new node'
# shellcheck disable=SC2016
echo '=> with `nvm`; and they may (depending on how you construct your `$PATH`)'
# shellcheck disable=SC2016
echo '=> override the binaries of modules installed with `nvm`:'
echo
command printf %s\\n "$NPM_GLOBAL_MODULES"
echo '=> If you wish to uninstall them at a later point (or re-install them under your'
# shellcheck disable=SC2016
echo '=> `nvm` Nodes), you can remove them from the system Node as follows:'
echo
echo ' $ nvm use system'
echo ' $ npm uninstall -g a_module'
echo
fi
}
nvm_do_install() {
if [ -n "${NVM_DIR-}" ] && ! [ -d "${NVM_DIR}" ]; then
echo >&2 "You have \$NVM_DIR set to \"${NVM_DIR}\", but that directory does not exist. Check your profile files and environment."
exit 1
fi
if [ -z "${METHOD}" ]; then
# Autodetect install method
if nvm_has git; then
install_nvm_from_git
elif nvm_has nvm_download; then
install_nvm_as_script
else
echo >&2 'You need git, curl, or wget to install nvm'
exit 1
fi
elif [ "${METHOD}" = 'git' ]; then
if ! nvm_has git; then
echo >&2 "You need git to install nvm"
exit 1
fi
install_nvm_from_git
elif [ "${METHOD}" = 'script' ]; then
if ! nvm_has nvm_download; then
echo >&2 "You need curl or wget to install nvm"
exit 1
fi
install_nvm_as_script
else
echo >&2 "The environment variable \$METHOD is set to \"${METHOD}\", which is not recognized as a valid installation method."
exit 1
fi
echo
local NVM_PROFILE
NVM_PROFILE="$(nvm_detect_profile)"
local PROFILE_INSTALL_DIR
PROFILE_INSTALL_DIR="$(nvm_install_dir | command sed "s:^$HOME:\$HOME:")"
SOURCE_STR="\\nexport NVM_DIR=\"${PROFILE_INSTALL_DIR}\"\\n[ -s \"\$NVM_DIR/nvm.sh\" ] && \\. \"\$NVM_DIR/nvm.sh\" # This loads nvm\\n"
# shellcheck disable=SC2016
COMPLETION_STR='[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion\n'
BASH_OR_ZSH=false
if [ -z "${NVM_PROFILE-}" ] ; then
local TRIED_PROFILE
if [ -n "${PROFILE}" ]; then
TRIED_PROFILE="${NVM_PROFILE} (as defined in \$PROFILE), "
fi
echo "=> Profile not found. Tried ${TRIED_PROFILE-}~/.bashrc, ~/.bash_profile, ~/.zshrc, and ~/.profile."
echo "=> Create one of them and run this script again"
echo " OR"
echo "=> Append the following lines to the correct file yourself:"
command printf "${SOURCE_STR}"
echo
else
if nvm_profile_is_bash_or_zsh "${NVM_PROFILE-}"; then
BASH_OR_ZSH=true
fi
if ! command grep -qc '/nvm.sh' "$NVM_PROFILE"; then
echo "=> Appending nvm source string to $NVM_PROFILE"
command printf "${SOURCE_STR}" >> "$NVM_PROFILE"
else
echo "=> nvm source string already in ${NVM_PROFILE}"
fi
# shellcheck disable=SC2016
if ${BASH_OR_ZSH} && ! command grep -qc '$NVM_DIR/bash_completion' "$NVM_PROFILE"; then
echo "=> Appending bash_completion source string to $NVM_PROFILE"
command printf "$COMPLETION_STR" >> "$NVM_PROFILE"
else
echo "=> bash_completion source string already in ${NVM_PROFILE}"
fi
fi
if ${BASH_OR_ZSH} && [ -z "${NVM_PROFILE-}" ] ; then
echo "=> Please also append the following lines to the if you are using bash/zsh shell:"
command printf "${COMPLETION_STR}"
fi
# Source nvm
# shellcheck source=/dev/null
\. "$(nvm_install_dir)/nvm.sh"
nvm_check_global_modules
nvm_install_node
nvm_reset
echo "=> Close and reopen your terminal to start using nvm or run the following to use it now:"
command printf "${SOURCE_STR}"
if ${BASH_OR_ZSH} ; then
command printf "${COMPLETION_STR}"
fi
}
#
# Unsets the various functions defined
# during the execution of the install script
#
nvm_reset() {
unset -f nvm_has nvm_install_dir nvm_latest_version nvm_profile_is_bash_or_zsh \
nvm_source nvm_node_version nvm_download install_nvm_from_git nvm_install_node \
install_nvm_as_script nvm_try_profile nvm_detect_profile nvm_check_global_modules \
nvm_do_install nvm_reset
}
[ "_$NVM_ENV" = "_testing" ] || nvm_do_install
} # this ensures the entire script is downloaded #
|
#!/bin/bash
# https://docs.docker.com/storage/volumes/#backup-restore-or-migrate-data-volumes
# If $1 set to 'all' then take the latest complete backup
BACKUP_TYPE=$1
BACKUP_FILENAME=$2
BACKUP_FILENAME_LATEST="backup-latest.tar"
if [ -z $BACKUP_FILENAME ]; then
BACKUP_FILENAME=$BACKUP_FILENAME_LATEST
fi
MONGO_DB_CONTAINER=`docker ps -aqf name=learn-with-jesus_mongo_1`
echo "Restoring from backup $BACKUP_DIR/$BACKUP_FILENAME"
# DELETE EXISTENT DB AND COPY BACKUP
if [ "$BACKUP_TYPE" = "all" ]; then
BACKUP_DIR=$(pwd)/../data/backup-all
docker run --rm \
--volumes-from ${MONGO_DB_CONTAINER} \
-v $BACKUP_DIR:/backup \
ubuntu bash -c "cd data/db && rm -rf * && cd ../../ && tar xvf backup/${BACKUP_FILENAME}"
else
BACKUP_DIR=$(pwd)/../data/backup
docker run --rm \
--volumes-from ${MONGO_DB_CONTAINER} \
-v $BACKUP_DIR:/backup \
ubuntu bash -c "cd data/db && rm -rf * && cd ../../ && tar xvf backup/${BACKUP_FILENAME} \
&& cp -r /backup-tmp/** /data/db"
fi
|
<gh_stars>10-100
import { GameObject, SpritePainter, Timer } from '../core';
import { GameUpdateArgs } from '../game';
import { PointsValue } from '../points';
import * as config from '../config';
const SPRITE_POINTS_PREFIX = 'points';
const SPRITE_ID_SEPARATOR = '.';
export class Points extends GameObject {
public zIndex = config.POINTS_Z_INDEX;
public readonly value: PointsValue;
public readonly painter = new SpritePainter();
private readonly timer = new Timer();
constructor(value: PointsValue, duration: number) {
super(56, 28);
this.value = value;
this.timer.reset(duration);
this.timer.done.addListener(this.handleTimer);
}
protected setup({ spriteLoader }: GameUpdateArgs): void {
const spriteId = this.getSpriteId(this.value);
this.painter.sprite = spriteLoader.load(spriteId);
}
protected update(updateArgs: GameUpdateArgs): void {
this.timer.update(updateArgs.deltaTime);
}
private handleTimer = (): void => {
this.dirtyPaintBox();
this.removeSelf();
};
private getSpriteId(value: PointsValue): string {
const parts = [SPRITE_POINTS_PREFIX, value.toString()];
const spriteId = parts.join(SPRITE_ID_SEPARATOR);
return spriteId;
}
}
|
import { forEach } from "sage-library";
import { templateTextNode } from "./template-textnode.service";
import { templateComponent } from "./template-component.service";
import { templateHtmlElement } from "./template-html-element.service";
import { site } from "../../data/site.data";
/*
Dill works in two parts.
- Create templates that represent HTML and data.
- Render templates.
The following function generates Templates.
A Dill Template not only contains information about what is on the page and the data that reflections what to show on the page.
The templating process is also the way we add the Elements to the page.
A render only updates the Elements already on the page.
*/
export const generateDillTemplate = (
parentTemplate,
rootElement,
parentData,
dillElements,
isSvgOrChildOfSVG = false
) => {
/*
We only allows Arrays. This makes this easier to predict in the code.
We force the value to be an Array below.
*/
if (!(dillElements instanceof Array)) {
dillElements = [dillElements];
}
/*
There are three types of template we can create, each one is handled below.
- Textnode. A simple HTML Element that is used to put text inside other HTML Elements.
- A Dill Component. This contains child templates but does not render anything itself.
- A single HTML Element. Can have child templates.
*/
return forEach(dillElements, dillElement => {
if (typeof dillElement === "string") {
return templateTextNode(rootElement,
parentData,
dillElement
);
}
else if (!!dillElement.Component) {
return templateComponent(parentTemplate,
rootElement,
parentData,
dillElement,
isSvgOrChildOfSVG,
// (a,b,c,d,e) => generateDillTemplate(a,b,c,d,e)
);
}
else if (!!dillElement.nodeName) {
return templateHtmlElement(parentTemplate,
rootElement,
parentData,
dillElement,
isSvgOrChildOfSVG,
// (a,b,c,d,e) => generateDillTemplate(a,b,c,d,e)
);
}
});
}
site.generateDillTemplate = generateDillTemplate;
|
def word_frequency(text):
# Remove punctuation and convert text to lowercase
text = text.lower()
text = ''.join(char for char in text if char.isalnum() or char.isspace())
# Split the text into words
words = text.split()
# Create a dictionary to store word frequencies
word_freq = {}
for word in words:
if word in word_freq:
word_freq[word] += 1
else:
word_freq[word] = 1
# Sort the dictionary by frequency in descending order
sorted_word_freq = sorted(word_freq.items(), key=lambda x: x[1], reverse=True)
# Print the sorted word frequencies
for word, freq in sorted_word_freq:
print(f"{word}: {freq}")
# Test the function with the given example
text = "The quick brown fox jumps over the lazy dog. The dog barks, and the fox runs away."
word_frequency(text) |
def filter_importable_modules(module_names):
importable_modules = []
for module_name in module_names:
try:
__import__(module_name)
importable_modules.append(module_name)
except ImportError:
pass
return importable_modules |
#!/bin/bash
output=$(python tests/integration/my_program.py --database.port 5001)
if [ "$output" != "name='Database Application' database=DatabaseConfig(host='localhost', port=5001, user='any-db-user')" ]; then
echo "$output does not match";
exit 1;
fi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.