text
stringlengths 1
1.05M
|
|---|
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
# Load the data
data = pd.read_csv('stockdata.csv')
# Separate the independent and dependent variables
X = data.iloc[:, :-1].values
y = data.iloc[:, -1].values
# Create the Linear Regression model
regressor = LinearRegression()
regressor.fit(X, y)
# Get the predicted values
y_pred = regressor.predict(X)
|
package com.ajou.kickshare.initial;
import androidx.appcompat.app.AppCompatActivity;
import androidx.viewpager.widget.ViewPager;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.widget.Button;
import com.ajou.kickshare.R;
import com.google.android.material.tabs.TabLayout;
import java.util.ArrayList;
public class MethodActivity extends AppCompatActivity {
public static MethodActivity _MethodActivity;
private ArrayList<Integer> imageList;
private static final int DP = 24;
private Button mStartButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_method);
_MethodActivity = MethodActivity.this;
this.initializeData();
ViewPager viewPager = findViewById(R.id.view_pager);
viewPager.setClipToPadding(false);
float density = getResources().getDisplayMetrics().density;
int margin = (int) (DP * density);
viewPager.setPadding(margin, 0, margin, 0);
viewPager.setPageMargin(margin/2);
viewPager.setAdapter(new ViewPagerAdapter(this, imageList));
TabLayout tabLayout = (TabLayout)findViewById(R.id.tab_layout);
tabLayout.setupWithViewPager(viewPager, true);
mStartButton = findViewById(R.id.btn_start);
Animation mAnimation = new AlphaAnimation(1, 0);
mAnimation.setDuration(800);
mAnimation.setInterpolator(new AccelerateInterpolator());
mAnimation.setRepeatCount(Animation.INFINITE);
mAnimation.setRepeatMode(Animation.REVERSE);
mStartButton.startAnimation(mAnimation);
mStartButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(MethodActivity.this, LoginActivity.class);
startActivity(intent);
}
});
}
public void initializeData()
{
imageList = new ArrayList();
imageList.add(R.drawable.ks_1);
imageList.add(R.drawable.ks_2);
imageList.add(R.drawable.ks_3);
imageList.add(R.drawable.ks_4);
}
}
|
<html>
<head>
<title>Chart Example</title>
<script src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.9.3/Chart.min.js"></script>
</head>
<body>
<canvas id="myChart" width="400" height="400"></canvas>
<script>
var data = {
labels: ['1', '3', '4', '5', '7'],
datasets: [{
label: 'Data',
data: [2,4,5,6,8],
backgroundColor: 'rgba(255, 159, 64, 0.2)',
borderColor: 'rgba(255, 159, 64.1)',
borderWidth: 1
}]
};
var options = {
scales: {
yAxes: [{
ticks: {
beginAtZero: true
}
}]
}
}
var ctx = document.getElementById('myChart').getContext('2d');
var chart = new Chart(ctx, {
type: 'line',
data: data,
options: options
});
</script>
</body>
</html>
|
package model
type BriefContent struct {
ContentID int `json:"contentID"`
Title string `json:"title"`
Duration int `json:"duration"`
CoverURL string `json:"cover"`
Time int64 `json:"createTime"`
ViewNum int `json:"viewNum"`
User *MiniUser `json:"user"`
}
type DetailedContent struct {
ContentID int `json:"contentID"`
Title string `json:"title"`
Description string `json:"description"`
Duration int `json:"duration"`
Time int64 `json:"createTime"`
VideoURL string `json:"video"`
User *MiniUser `json:"user"`
Liked bool `json:"liked"`
ViewNum int `json:"viewNum"`
CommentNum int `json:"commentNum"`
LikeNum int `json:"likeNum"`
Tags []string `json:"tags"`
}
|
/* global CreateMethodProperty, Reflect, Type, ToPropertyKey */
// 26.1.6 Reflect.getOwnPropertyDescriptor ( target, propertyKey )
CreateMethodProperty(Reflect, 'getOwnPropertyDescriptor', function getOwnPropertyDescriptor(target, propertyKey) {
// 1. If Type(target) is not Object, throw a TypeError exception.
if (Type(target) !== "object") {
throw new TypeError(Object.prototype.toString.call(target) + ' is not an Object');
}
// 2. Let key be ? ToPropertyKey(propertyKey).
var key = ToPropertyKey(propertyKey);
// Polyfill-library - these steps are handled by Object.getOwnPropertyDescriptor
// 3. Let desc be ? target.[[GetOwnProperty]](key).
// 4. Return FromPropertyDescriptor(desc).
return Object.getOwnPropertyDescriptor(target, key);
});
|
#!/bin/bash
cd `dirname "$0"`/../../mountaintools
if output=$(git status --porcelain) && [ -z "$output" ]; then
if [ -z "$1" ]; then
echo "You must supply an option, e.g., patch, minor, major"
exit 0
fi
if [ "$2" == "go" ]; then
bumpversion $1 --verbose
echo "Now you should push via 'git push && git push --tags' and replace the explicit version in all the docs."
else
bumpversion $1 --dry-run --verbose
echo "That was a dry run. If it looks okay, then add the 'go' argument"
fi
else
echo "Working directory is not clean:"
echo "$output"
fi
|
(function (width) {
const resizeEvt = 'orientationchange' in window ? 'orientationchange' : 'resize'
const recalc = () => {
const winW = document.documentElement.clientWidth;
if (winW >= width) {
document.documentElement.style.fontSize = "625%"
} else {
document.documentElement.style.fontSize = (winW / width * 625) + "%" //
}
}
window.addEventListener(resizeEvt, recalc, false)
document.addEventListener('DOMContentLoaded', recalc, false)
})(375) // 设计图宽度
|
#!/bin/bash
# OpenDKIM
# --------
#
# OpenDKIM provides a service that puts a DKIM signature on outbound mail.
#
# The DNS configuration for DKIM is done in the management daemon.
source setup/functions.sh # load our functions
source /etc/mailinabox.conf # load global vars
# Install DKIM...
echo Installing OpenDKIM/OpenDMARC...
apt_install opendkim opendkim-tools opendmarc
# Make sure configuration directories exist.
mkdir -p /etc/opendkim;
mkdir -p $STORAGE_ROOT/mail/dkim
# Used in InternalHosts and ExternalIgnoreList configuration directives.
# Not quite sure why.
echo "127.0.0.1" > /etc/opendkim/TrustedHosts
# We need to at least create these files, since we reference them later.
# Otherwise, opendkim startup will fail
touch /etc/opendkim/KeyTable
touch /etc/opendkim/SigningTable
if grep -q "ExternalIgnoreList" /etc/opendkim.conf; then
true # already done #NODOC
else
# Add various configuration options to the end of `opendkim.conf`.
cat >> /etc/opendkim.conf << EOF;
Canonicalization relaxed/simple
MinimumKeyBits 1024
ExternalIgnoreList refile:/etc/opendkim/TrustedHosts
InternalHosts refile:/etc/opendkim/TrustedHosts
KeyTable refile:/etc/opendkim/KeyTable
SigningTable refile:/etc/opendkim/SigningTable
Socket inet:8891@127.0.0.1
RequireSafeKeys false
EOF
fi
# Create a new DKIM key. This creates mail.private and mail.txt
# in $STORAGE_ROOT/mail/dkim. The former is the private key and
# the latter is the suggested DNS TXT entry which we'll include
# in our DNS setup. Note that the files are named after the
# 'selector' of the key, which we can change later on to support
# key rotation.
#
# A 1024-bit key is seen as a minimum standard by several providers
# such as Google. But they and others use a 2048 bit key, so we'll
# do the same. Keys beyond 2048 bits may exceed DNS record limits.
if [ ! -f "$STORAGE_ROOT/mail/dkim/mail.private" ]; then
opendkim-genkey -b 2048 -r -s mail -D $STORAGE_ROOT/mail/dkim
fi
# Ensure files are owned by the opendkim user and are private otherwise.
chown -R opendkim:opendkim $STORAGE_ROOT/mail/dkim
chmod go-rwx $STORAGE_ROOT/mail/dkim
management/editconf.py /etc/opendmarc.conf -s \
"Syslog=true" \
"Socket=inet:8893@[127.0.0.1]"
# SPFIgnoreResults causes the filter to ignore any SPF results in the header
# of the message. This is useful if you want the filter to perfrom SPF checks
# itself, or because you don't trust the arriving header. This added header is
# used by spamassassin to evaluate the mail for spamminess.
management/editconf.py /etc/opendmarc.conf -s \
"SPFIgnoreResults=true"
# SPFSelfValidate causes the filter to perform a fallback SPF check itself
# when it can find no SPF results in the message header. If SPFIgnoreResults
# is also set, it never looks for SPF results in headers and always performs
# the SPF check itself when this is set. This added header is used by
# spamassassin to evaluate the mail for spamminess.
management/editconf.py /etc/opendmarc.conf -s \
"SPFSelfValidate=true"
# AlwaysAddARHeader Adds an "Authentication-Results:" header field even to
# unsigned messages from domains with no "signs all" policy. The reported DKIM
# result will be "none" in such cases. Normally unsigned mail from non-strict
# domains does not cause the results header field to be added. This added header
# is used by spamassassin to evaluate the mail for spamminess.
management/editconf.py /etc/opendkim.conf -s \
"AlwaysAddARHeader=true"
# Add OpenDKIM and OpenDMARC as milters to postfix, which is how OpenDKIM
# intercepts outgoing mail to perform the signing (by adding a mail header)
# and how they both intercept incoming mail to add Authentication-Results
# headers. The order possibly/probably matters: OpenDMARC relies on the
# OpenDKIM Authentication-Results header already being present.
#
# Be careful. If we add other milters later, this needs to be concatenated
# on the smtpd_milters line.
#
# The OpenDMARC milter is skipped in the SMTP submission listener by
# configuring smtpd_milters there to only list the OpenDKIM milter
# (see mail-postfix.sh).
management/editconf.py /etc/postfix/main.cf \
"smtpd_milters=inet:127.0.0.1:8891 inet:127.0.0.1:8893"\
non_smtpd_milters=\$smtpd_milters \
milter_default_action=accept
# We need to explicitly enable the opendmarc service, or it will not start
hide_output systemctl enable opendmarc
# Restart services.
restart_service opendkim
restart_service opendmarc
restart_service postfix
|
#! /bin/bash
java -jar master/target/master-rx.jar \
-cnfFile ./cnfs/hanoi4.cnf \
-assumptionFile ./cnfs/empty.assumptions \
-nsolvers 1
|
package demo._40.lazy;
import java.util.concurrent.TimeUnit;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.base.Stopwatch;
import demo.AbstractTest;
/**
* Created by nlabrot on 01/09/15.
*/
public class LazyApiTest extends AbstractTest {
@Test
public void sleepTest(){
Stopwatch stopwatch = Stopwatch.createStarted();
applicationContext.getBean(LazyApi.class);
Assert.assertTrue(stopwatch.stop().elapsed(TimeUnit.SECONDS) >= 2);
}
}
|
package Atom.Utility;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import java.util.HashMap;
import java.util.WeakHashMap;
public class EncoderJson {
public static Gson gson = new Gson();
public static WeakHashMap<String, JsonElement> cache = new WeakHashMap<>();
public static JsonElement parseJson(String s) {
if (!cache.containsKey(s)) {
cache.put(s, JsonParser.parseString(s));
}
return cache.get(s);
}
public static JsonObject mapToJson(HashMap<String, String> h) {
return gson.toJsonTree(h).getAsJsonObject();
}
}
|
# File: C (Python 2.4)
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from direct.showbase import DirectObject
from direct.interval.IntervalGlobal import *
from pirates.piratesbase import PLocalizer
from pirates.piratesgui import PiratesGuiGlobals
from pirates.piratesbase import PiratesGlobals
class ComboMeter(DirectObject.DirectObject):
COMBO_METER_RESET = 2.0
COMBO_NUM_SCALE = 0.14000000000000001
BACKSTAB_SCALE = 0.089999999999999997
TEXT_COLOR = PiratesGuiGlobals.TextFG1
TEAM_COMBO_TEXT_COLOR = PiratesGuiGlobals.TextFG4
SUB_TEXT_COLOR = PiratesGuiGlobals.TextFG2
NUMBER_COLOR = PiratesGuiGlobals.TextFG1
BACKSTAB_COLOR = Vec4(0.80000000000000004, 0.40000000000000002, 0.20000000000000001, 1)
def __init__(self):
DirectObject.DirectObject.__init__(self)
self.combo = 0
self.totalDamage = 0
self.text = DirectLabel(parent = base.a2dTopLeft, relief = None, text = PLocalizer.HitCombo, text_align = TextNode.ALeft, text_scale = PiratesGuiGlobals.TextScaleTitleLarge, text_fg = self.TEXT_COLOR, text_shadow = PiratesGuiGlobals.TextShadow, textMayChange = 1, pos = (0.5, 0, -0.5), text_font = PiratesGlobals.getPirateOutlineFont())
self.text.setTransparency(1)
self.subText = DirectLabel(parent = self.text, relief = None, text = PLocalizer.Damage, text_align = TextNode.ALeft, text_scale = PiratesGuiGlobals.TextScaleTitleSmall, text_fg = self.SUB_TEXT_COLOR, text_shadow = PiratesGuiGlobals.TextShadow, textMayChange = 1, pos = (0.089999999999999997, 0, -0.070000000000000007), text_font = PiratesGlobals.getPirateOutlineFont())
self.comboCounter = DirectLabel(parent = self.text, relief = None, text = '', text_align = TextNode.ARight, text_scale = self.COMBO_NUM_SCALE, text_fg = self.NUMBER_COLOR, text_shadow = PiratesGuiGlobals.TextShadow, textMayChange = 1, pos = (-0.025999999999999999, 0, -0.01), text_font = PiratesGlobals.getPirateOutlineFont())
self.backstabText = DirectLabel(parent = base.a2dTopLeft, relief = None, text = 'Backstab!', text_align = TextNode.ALeft, text_scale = self.BACKSTAB_SCALE, text_fg = self.BACKSTAB_COLOR, text_shadow = PiratesGuiGlobals.TextShadow, textMayChange = 1, pos = (1.165, 0, -0.96999999999999997), text_font = PiratesGlobals.getPirateOutlineFont())
self.text.hide()
self.backstabText.hide()
self.faderIn = None
self.faderOut = None
self.animIval = None
self.backstabFaderIn = None
self.backstabFaderOut = None
self.backstabAnimIval = None
self.accept('trackCombo', self.newHit)
def destroy(self):
if self.animIval:
self.animIval.pause()
self.animIval = None
if self.faderIn:
self.faderIn.pause()
self.faderIn = None
if self.faderOut:
self.faderOut.pause()
self.faderOut = None
if self.backstabAnimIval:
self.backstabAnimIval.pause()
self.backstabAnimIval = None
if self.backstabFaderIn:
self.backstabFaderIn.pause()
self.backstabFaderIn = None
if self.backstabFaderOut:
self.backstabFaderOut.pause()
self.backstabFaderOut = None
if self.text:
self.text.destroy()
self.text = None
if self.backstabText:
self.backstabText.destroy()
self.backstabText = None
self.ignoreAll()
taskMgr.remove(self._ComboMeter__getResetComboMeter())
def newHit(self, value, numAttackers, totalDamage):
if value == 0 and numAttackers == 0 and totalDamage == 0:
self.resetMeter()
if value <= 1:
return None
self._ComboMeter__showMeter()
if self.combo < value:
self.combo = value
self.comboCounter['text'] = str(self.combo)
color = Vec4(0.59999999999999998 + value * 0.10000000000000001, 1.0 - value * 0.10000000000000001, 0, 1)
self.comboCounter['text_fg'] = color
if self.animIval:
self.animIval.finish()
self.animIval = None
scaleIval = self.comboCounter.scaleInterval(0.20000000000000001, 1.0, startScale = 2.0, blendType = 'easeIn')
self.animIval = Parallel(scaleIval)
self.animIval.start()
if numAttackers > 1:
self.text['text'] = PLocalizer.TeamCombo
self.text['text_fg'] = self.TEAM_COMBO_TEXT_COLOR
if abs(self.totalDamage) < abs(totalDamage):
self.totalDamage = totalDamage
self.subText['text'] = str(abs(self.totalDamage)) + ' ' + PLocalizer.Damage
taskMgr.remove(self._ComboMeter__getResetComboMeter())
taskMgr.doMethodLater(self.COMBO_METER_RESET, self.resetMeter, self._ComboMeter__getResetComboMeter())
def newBackstab(self):
self._ComboMeter__showBackstab()
if self.backstabAnimIval:
self.backstabAnimIval.finish()
self.backstabAnimIval = None
scaleIval = self.backstabText.scaleInterval(0.20000000000000001, 1.0, startScale = 2.0, blendType = 'easeIn')
self.backstabAnimIval = Parallel(scaleIval)
self.backstabAnimIval.start()
taskMgr.remove(self._ComboMeter__getResetBackstab())
taskMgr.doMethodLater(self.COMBO_METER_RESET, self.resetBackstab, self._ComboMeter__getResetBackstab())
def resetMeter(self, args = None):
self._ComboMeter__fadeOutMeter()
self.combo = 0
self.totalDamage = 0
def resetBackstab(self, args = None):
self._ComboMeter__fadeOutBackstab()
def _ComboMeter__getResetComboMeter(self):
return 'resetComboMeter'
def _ComboMeter__getResetBackstab(self):
return 'resetBackstab'
def _ComboMeter__hideMeter(self):
if self.faderIn:
self.faderIn.pause()
self.faderIn = None
if self.faderOut:
self.faderOut.pause()
self.faderOut = None
self.text.hide()
self.backstabText.hide()
def _ComboMeter__showMeter(self):
if self.faderIn:
self.faderIn.pause()
self.faderIn = None
if self.faderOut:
self.faderOut.pause()
self.faderOut = None
self.text.show()
self.text.setAlphaScale(1.0)
self.text['text_fg'] = self.TEXT_COLOR
def _ComboMeter__showBackstab(self):
if self.backstabFaderIn:
self.backstabFaderIn.pause()
self.backstabFaderIn = None
if self.backstabFaderOut:
self.backstabFaderOut.pause()
self.backstabFaderOut = None
self.backstabText.show()
self.backstabText.setAlphaScale(1.0)
def _ComboMeter__fadeInMeter(self):
self.text.show()
if self.faderOut:
self.faderOut.pause()
self.faderOut = None
if self.faderIn:
return None
self.faderIn = LerpFunctionInterval(self.text.setAlphaScale, fromData = 0, toData = 1, duration = 1.0)
self.faderIn.start()
def _ComboMeter__fadeOutMeter(self):
if self.faderIn:
self.faderIn.pause()
self.faderIn = None
if self.faderOut:
return None
def restoreColor():
self.text['text_fg'] = self.TEXT_COLOR
self.text['text'] = PLocalizer.HitCombo
self.subText['text'] = str(0) + ' ' + PLocalizer.Damage
self.comboCounter['text'] = str(0)
self.text.setAlphaScale(1.0)
fadeOut = LerpFunctionInterval(self.text.setAlphaScale, fromData = 1, toData = 0, duration = 1.0)
self.faderOut = Sequence(fadeOut, Func(self.text.hide), Func(restoreColor))
self.faderOut.start()
def _ComboMeter__fadeOutBackstab(self):
if self.backstabFaderIn:
self.backstabFaderIn.pause()
self.backstabFaderIn = None
if self.backstabFaderOut:
return None
self.backstabText.setAlphaScale(1.0)
fadeOut = LerpFunctionInterval(self.backstabText.setAlphaScale, fromData = 1, toData = 0, duration = 1.0)
self.backstabFaderOut = Sequence(fadeOut, Func(self.backstabText.hide))
self.backstabFaderOut.start()
|
<gh_stars>1-10
module Common
module Entities
class Address < Grape::Entity
expose :id, documentation: { type: Integer }
expose :street_name
expose :city
expose :county
expose :postal_code
expose :coordinates
expose :details
end
class PublicUser < Grape::Entity
root :users, :user
expose :id, documentation: { type: Integer }
expose :first_name
expose :last_name
expose :role
expose :description
expose :received_rating, as: :rating, documentation: { type: Float }
expose :address, using: Address, expose_nil: true
end
class User < PublicUser
expose :phone_number
end
class Profile < User
root :profiles, :profile
expose :email
expose :questionnaire_completed, if: ->(user, _) { user.volunteer? }, documentation: { type: 'boolean' }
expose :trusted_volunteer, if: ->(user, _) { user.volunteer? }, documentation: { type: 'boolean' }
end
class BasicNeed < Grape::Entity
root :needs, :need
expose :id, documentation: { type: Integer }
expose :description
expose :category
expose :status
expose :status_updated_at, documentation: { type: DateTime }
end
class Need < BasicNeed
expose :updated_by, using: User, expose_nil: true
end
class PublicReview < Grape::Entity
root :reviews, :review
expose :id, documentation: { type: Integer }
expose :stars, documentation: { type: Integer }
expose :comment
expose :created_at, documentation: { type: DateTime }
end
class Review < PublicReview
expose :provided_by, using: User, expose_nil: true
expose :given_to, using: User, expose_nil: true
end
class Suggestion < Grape::Entity
expose :id, documentation: { type: Integer }
expose :email
expose :name
expose :message
end
class Testimonial < Grape::Entity
expose :id, documentation: { type: Integer }
expose :user, using: PublicUser
expose :message
end
class Volunteer < PublicUser
expose :received_reviews, using: PublicReview, as: :reviews
end
class SpecialCase < Grape::Entity
expose :id, documentation: { type: Integer }
expose :description
expose :status
expose :added_by, using: PublicUser
end
class Device < Grape::Entity
expose :signal_id
end
end
end
|
/* eslint-disable no-path-concat */
/* jslint node: true */
/* jshint -W097 */
/* jshint esversion: 6 */
'use strict';
const express = require('express');
const router = express.Router();
var bodyParser = require("body-parser");
var bcrypt = require('bcryptjs');
router.use(bodyParser.urlencoded({ extended: false }));
router.use(bodyParser.json());
const usersCtrl = require('../controllers/users_controller');
const tokenize = require('../middleware/middleware');
router.post('/login', usersCtrl.login);
router.get('/logout', tokenize.verifyJWT, usersCtrl.logout);
router.get('/fetchAll', tokenize.verifyJWT, usersCtrl.fetchAll);
router.get('/getUser/:id', tokenize.verifyJWT, usersCtrl.getUser);
router.post('/addUser', tokenize.verifyJWT, usersCtrl.addUser);
router.delete('/deleteUser/:id', tokenize.verifyJWT, usersCtrl.deleteUser);
router.patch('/updateUser', tokenize.verifyJWT, usersCtrl.updateUser);
module.exports = router;
|
package com.googlecode.junittoolbox;
import java.lang.annotation.*;
/**
* This annotation can be used with the {@link WildcardPatternSuite}
* and the {@link ParallelSuite} runner. It allows you to specify
* the children classes of a test suite class with a
* <a href="http://ant.apache.org/manual/dirtasks.html#patterns" target="_blank">wildcard pattern</a>.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Inherited
public @interface SuiteClasses {
/**
* <a href="http://ant.apache.org/manual/dirtasks.html#patterns" target="_blank">Wildcard pattern(s)</a>
* relative to the directory containing the actual test suite class
* annotated with <code>@RunWith(WildcardPatternSuite.class)</code> or
* <code>@RunWith(ParallelSuite.class)</code>,
* a wildcard pattern <strong>must not</strong> start with a <code>'/'</code> character,
* and <strong>must</strong> end with <code>".class"</code> (unless it starts
* with a <code>'!'</code> character, which means matching class files are excluded).
*/
public String[] value();
}
|
from textblob import TextBlob
text = "The service was excellent"
sentiment = TextBlob(text).sentiment
print("Sentiment: {}".format(sentiment))
if sentiment.polarity > 0:
print("Positive")
elif sentiment.polarity == 0:
print("Neutral")
else:
print("Negative")
|
package com.wargod.interceptor;
import com.wargod.util.Commons;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.lang.Nullable;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* 页面拦截器
*
* @author tangj
*/
@Component
public class BaseInterceptor implements HandlerInterceptor{
@Autowired
private Commons commons;
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, @Nullable ModelAndView modelAndView) throws Exception {
request.setAttribute("commons", commons);
}
}
|
#!/bin/bash
# Thanks to https://stackoverflow.com/questions/59895/how-to-get-the-source-directory-of-a-bash-script-from-within-the-script-itself?page=1&tab=votes#tab-top
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" >/dev/null 2>&1 && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
scriptPath="$( cd -P "$( dirname "$SOURCE" )" >/dev/null 2>&1 && pwd )"
docker build --pull -t threax/build-dotnet:5.0 $scriptPath
|
#!/bin/sh
# PURPOSE: simple wrapper script for running tests
# -- auto: path variables
scriptSelf=$0;
scriptName=$(basename $scriptSelf)
scriptCallDir=$(dirname $scriptSelf)
scriptFullDir=$(cd $scriptCallDir;echo $PWD)
scriptFullPath=$scriptFullDir/$scriptName;
scriptParentDir=$(dirname $scriptFullDir)
# -- /auto: path variables
# list of enabled tests
TESTS_ENABLED="ansible goss"
# run enabled tests
for TESTNAME in ${TESTS_ENABLED}
do
TESTPATH=${scriptFullDir}/${TESTNAME}/${TESTNAME}.sh
if [ -f ${TESTPATH} ];then
echo "[>] running ${TESTNAME} tests (${TESTPATH})"
/bin/sh ${TESTPATH} $*
fi
done
# eof
|
public class Book {
private String title;
private String author;
public Book(String title, String author) {
this.title = title;
this.author = author;
}
public String getTitle() {
return title;
}
public String getAuthor() {
return author;
}
}
|
#include<stdio.h>
int maxSum(int arr[], int n, int k)
{
if (n < k)
{
printf("Invalid");
return -1;
}
int res = 0;
for (int i=0; i<k; i++)
res += arr[i];
int curr_sum = res;
for (int i=k; i<n; i++)
{
curr_sum += arr[i] - arr[i-k];
res = max(res, curr_sum);
}
return res;
}
|
#!/usr/bin/env bash
/opt/mssql/bin/sqlservr &
echo "Waiting for server to start...."
#do this in a loop because the timing for when the SQL instance is ready is indeterminate
for i in {1..50};
do
/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P ${SA_PASSWORD} -d master -i /docker-entrypoint-initdb.d/mssql-init.sql
if [ $? -eq 0 ]
then
echo "mssql-init.sh completed"
break
else
echo "not ready yet..."
sleep 5
fi
done
sleep infinity
|
import React, { useMemo } from "react";
import { useSelector, shallowEqual } from "react-redux";
import { FEATURES } from "../../Modules/features";
import { getFeatureDetails } from "../../Redux/eventSession";
import { VERTICAL_NAV_OPTIONS } from "../../Contexts/VerticalNavBarContext";
import { makeStyles, Typography } from "@material-ui/core";
import NoVideoImage from "../../Assets/illustrations/undraw_video_call_kxyp.svg";
const useStyles = makeStyles((theme) => ({
videoContainer: {
width: "100%",
height: "100%",
backgroundColor: "white"
},
noVideoImage: {
maxWidth: "100%",
maxHeight: "100%",
position: "absolute",
bottom: 0,
margin: "auto",
width: "100%",
height: "40%"
}
}));
const BackstageContainer = () => {
const classes = useStyles();
const customNavBarFeature = useSelector(
getFeatureDetails(FEATURES.CUSTOM_NAV_BAR),
shallowEqual
);
const backstageFeature = useSelector(
getFeatureDetails(FEATURES.STREAMYARD_BACKSTAGE),
shallowEqual
);
const backstageStageTitle = useMemo(() => {
if (
customNavBarFeature &&
customNavBarFeature[VERTICAL_NAV_OPTIONS.backstage]
) {
return customNavBarFeature[VERTICAL_NAV_OPTIONS.backstage].label;
}
return "Backstage";
}, [customNavBarFeature]);
const streamYardLink = useMemo(
() =>
backstageFeature &&
backstageFeature.enabled &&
backstageFeature.streamYardLink.trim() !== ""
? backstageFeature.streamYardLink.trim()
: null,
[backstageFeature]
);
if (!backstageFeature || !backstageFeature.enabled) {
return (
<div className={classes.videoContainer}>
<Typography align="center" gutterBottom style={{ paddingTop: 100 }}>
{backstageStageTitle} not configured correctly...
</Typography>
<Typography variant="caption" display="block" align="center">
Please contact the event organizer or Veertly team
</Typography>
<img
alt="Not available"
src={NoVideoImage}
className={classes.noVideoImage}
/>
</div>
);
}
return (
<div className={classes.videoContainer}>
<iframe
src={streamYardLink}
width="100%"
height="100%"
frameBorder="0"
allow="camera;microphone"
title={backstageStageTitle}
/>
</div>
);
};
export default BackstageContainer;
|
#!/bin/sh
set -e
ROOT=`pwd`
SRC=${ROOT}/src
TEMPLATE_PO="$ROOT/template.pot"
TEMPLATE_TS="$ROOT/template.ts"
BASE_LST_FILE="$ROOT/base_lst_file"
LCONVERT_BIN=${LCONVERT_BIN:-lconvert}
LRELEASE_BIN=${LRELEASE_BIN:-lrelease}
LUPDATE_BIN=${LUPDATE_BIN:-lupdate}
###############################################################################
echo "Writing lst file..."
cd $SRC
find -type f \( -iname \*.h -o -iname \*.cpp -o -iname \*.ui \) > $BASE_LST_FILE
cd $ROOT
echo " $(cat $BASE_LST_FILE | wc -l) files found"
echo "Generating new template..."
if [ -f $TEMPLATE_PO ]
then
echo " Converting .pot to .ts"
$LCONVERT_BIN -locations relative $TEMPLATE_PO -o $TEMPLATE_TS
fi
echo " Generating .ts"
rm -f "$TEMPLATE_TS"
cd $SRC
$LUPDATE_BIN "@$BASE_LST_FILE" -ts $TEMPLATE_TS
cd $ROOT
echo " Converting .ts to .pot"
$LCONVERT_BIN -locations relative $TEMPLATE_TS -o $TEMPLATE_PO
|
#!/bin/bash
if [[ -n $(git status --porcelain) ]]; then
git config user.email 'beagle@zup.com.br'
git config user.name 'Beagle'
git add "$2"
git commit -sm "$1"
fi
|
<reponame>zrwusa/expo-bunny<filename>src/components/VividAlgorithm/index.tsx
export * from './VividAlgorithm';
|
/** \addtogroup frameworks */
/** @{*/
/****************************************************************************
* Copyright (c) 2016, ARM Limited, All Rights Reserved
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
****************************************************************************
*/
#ifndef UTEST_STACK_TRACE_H
#define UTEST_STACK_TRACE_H
#ifdef UTEST_STACK_TRACE
#include <string>
#define UTEST_MAX_BACKTRACE 100
extern std::string utest_trace[UTEST_MAX_BACKTRACE];
extern void utest_trace_initialise();
extern void utest_add_to_trace(char *func_name);
extern void utest_dump_trace();
#define UTEST_LOG_FUNCTION(); utest_add_to_trace((char *)__func__);
#define UTEST_TRACE_START utest_trace_initialise();
#define UTEST_DUMP_TRACE utest_dump_trace();
#else
#define UTEST_LOG_FUNCTION();
#define UTEST_TRACE_START
#define UTEST_DUMP_TRACE
#endif // UTEST_STACK_TRACE
#endif // UTEST_STACK_TRACE_H
/** @}*/
|
#ifndef PARSER_H
#define PARSER_H
#include "tokens.h"
#include "nodes.h"
typedef struct Parser {
Tokens *tokens;
Token *current;
char error[50];
int index;
} Parser;
Node *parser_parse(Parser *parser, Tokens **tokens);
void parser_advance(Parser *parser);
Node *parser_expr(Parser *parser);
Node *parser_term(Parser *parser);
Node *parser_exponent(Parser *parser);
Node *parser_factor(Parser *parser);
#endif // PARSER_H
|
document.addEventListener('DOMContentLoaded',function(){
setTimeout(function(){
classementDisplay();
},1000)
})
document.addEventListener('click',function(){
if(event.target.classList.contains('classementBtn')){
setTimeout(function(){
classementDisplay();
},1000)
}
})
// display list team
function classementDisplay(){
const url = "https://api.football-data.org/v2/competitions/2021/standings";
fetch(url, {
method: "GET",
withCredentials: true,
headers: {
"X-Auth-Token": "1<PASSWORD>",
}
})
.then(resp => resp.json())
.then(function(data) {
endSeason(data);
cardClassement(data.standings[0].table)
// console.log(data);
})
.catch(function(error) {
console.log(error);
});
}
// display end season
function endSeason(data){
const endSeason=document.querySelector('.league p');
endSeason.innerHTML=`Ends:${data.season.endDate}`;
}
//display team classement on card
function cardClassement(dataItems){
const classementItems=document.querySelector('.classement-items');
let card='';
dataItems.forEach(data => {
card+=`
<div class="card horizontal teamItems" data-id=${data.team.id}>
<div class="card-image">
<img src=${data.team.crestUrl} alt="icon of team">
</div>
<div class="card-stacked">
<div class="card-content ">
<p>${data.team.name}</p>
<span class="won">won : ${data.won}</span>
<span class="draw">draw : ${data.draw}</span>
<span class="lost">lost : ${data.lost}</span>
</div>
</div>
</div>`
});
classementItems.innerHTML=card;
}
|
<filename>stack_array.c<gh_stars>0
#include <stdio.h>
// #include<conio.h>
#define N 5
int stack[N];
int top = -1;
void push()
{
int integer;
printf("Enter an integer value : ");
scanf("%d", &integer);
if (top == N - 1)
{
printf("Overflow !!, integer can not be inserted.\n");
}
else
{
top++;
stack[top] = integer; //number inserted
}
}
void pop()
{
int popedValue;
if (top == -1)
{
printf("Underflow!! ,integer can not be poped.\n");
}
else
{
popedValue = stack[top];
top--;
printf("The poped number is : %d\n", popedValue);
}
}
void peek()
{
if (top == -1)
{
printf("Stack is empty\n");
}
else
{
printf("The top most element is %d\n", stack[top]);
}
}
void display() //traversing in stack
{
int index;
for (index = top; index >= 0; index--)
{
printf("%d \n", stack[index]);
}
}
int main()
{
int choice = 0;
printf("Enter your choice : 1 for push ,2 for pop ,3 for peek and 4 for display: \n");
scanf("%d", &choice);
clrscn();
while (choice != 0)
{
{
switch (choice)
{
case 1:
push();
break;
case 2:
pop();
break;
case 3:
peek();
break;
case 4:
display();
break;
default:
printf("Invalid Choice\n");
}
}
}
}
|
#!/bin/bash
# Run from package root dir!
>&2 echo "For safety reasons command is only echoed and not executed"
>&2 echo "To execute command:"
>&2 echo "./scripts/npm_deprecate.sh | bash"
>&2 echo ""
pkg_min_version="0.5.0-alpha.1"
pkg_name=$(cat package.json \
| grep name \
| head -1 \
| awk -F: '{ print $2 }' \
| sed 's/[",]//g' \
| tr -d '[[:space:]]')
pkg_version=$(npm view $pkg_name version)
echo npm deprecate $pkg_name@"\">=${pkg_min_version} <${pkg_version}\"" "\"Using deprecated version! Please upgrade package.\""
|
<filename>commands/stringtohex.js
const { Message, MessageEmbed } = require("discord.js")
module.exports = {
name : 'stringtohex',
description : 'converts a string into hexadecimal',
execute(client, message, args) {
function stringToHex(str) {
//converting string into buffer
let bufStr = Buffer.from(str, 'utf8');
//with buffer, you can convert it into hex with following code
message.channel.send("Hexadecimal output: `" + bufStr.toString('hex') + "`");
}
if(args[1]){
var sentence = args.join(" ");
stringToHex(sentence);
}
else{
stringToHex(args[0]);
}
}
}
|
// Copyright 2020 Samsung Electronics Co., Ltd. All rights reserved.
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef EMBEDDER_TIZEN_EMBEDDER_ENGINE_H_
#define EMBEDDER_TIZEN_EMBEDDER_ENGINE_H_
#include <memory>
#include "flutter/shell/platform/common/cpp/client_wrapper/include/flutter/plugin_registrar.h"
#include "flutter/shell/platform/common/cpp/incoming_message_dispatcher.h"
#include "flutter/shell/platform/tizen/channels/key_event_channel.h"
#include "flutter/shell/platform/tizen/channels/lifecycle_channel.h"
#include "flutter/shell/platform/tizen/channels/localization_channel.h"
#include "flutter/shell/platform/tizen/channels/navigation_channel.h"
#include "flutter/shell/platform/tizen/channels/platform_channel.h"
#include "flutter/shell/platform/tizen/channels/platform_view_channel.h"
#include "flutter/shell/platform/tizen/channels/settings_channel.h"
#include "flutter/shell/platform/tizen/channels/text_input_channel.h"
#include "flutter/shell/platform/tizen/external_texture_gl.h"
#include "flutter/shell/platform/tizen/key_event_handler.h"
#include "flutter/shell/platform/tizen/public/flutter_tizen.h"
#include "flutter/shell/platform/tizen/public/flutter_tizen_texture_registrar.h"
#include "flutter/shell/platform/tizen/tizen_event_loop.h"
#include "flutter/shell/platform/tizen/tizen_renderer.h"
#ifdef FLUTTER_TIZEN_4
#include "flutter/shell/platform/tizen/tizen_renderer_ecore_wl.h"
#else
#include "flutter/shell/platform/tizen/tizen_renderer_ecore_wl2.h"
#endif
#include "flutter/shell/platform/tizen/tizen_vsync_waiter.h"
#include "flutter/shell/platform/tizen/touch_event_handler.h"
// State associated with the plugin registrar.
struct FlutterDesktopPluginRegistrar {
// The engine that owns this state object.
TizenEmbedderEngine* engine;
// The plugin texture registrar handle given to API clients.
std::unique_ptr<FlutterTextureRegistrar> texture_registrar;
};
// State associated with the messenger used to communicate with the engine.
struct FlutterDesktopMessenger {
// The engine that owns this state object.
TizenEmbedderEngine* engine = nullptr;
};
// Custom deleter for FlutterEngineAOTData.
struct AOTDataDeleter {
void operator()(FlutterEngineAOTData aot_data) {
FlutterEngineCollectAOTData(aot_data);
}
};
// State associated with the texture registrar.
struct FlutterTextureRegistrar {
FLUTTER_API_SYMBOL(FlutterEngine) flutter_engine;
// The texture registrar managing external texture adapters.
std::map<int64_t, std::unique_ptr<ExternalTextureGL>> textures;
};
using UniqueAotDataPtr = std::unique_ptr<_FlutterEngineAOTData, AOTDataDeleter>;
enum DeviceProfile { kUnknown, kMobile, kWearable, kTV };
// Manages state associated with the underlying FlutterEngine.
class TizenEmbedderEngine : public TizenRenderer::Delegate {
public:
explicit TizenEmbedderEngine(
const FlutterWindowProperties& window_properties);
virtual ~TizenEmbedderEngine();
bool RunEngine(const FlutterEngineProperties& engine_properties);
bool StopEngine();
// Returns the currently configured Plugin Registrar.
FlutterDesktopPluginRegistrarRef GetPluginRegistrar();
// Sets |callback| to be called when the plugin registrar is destroyed.
void SetPluginRegistrarDestructionCallback(
FlutterDesktopOnPluginRegistrarDestroyed callback);
void SendWindowMetrics(int32_t width, int32_t height, double pixel_ratio);
void SetWindowOrientation(int32_t degree);
void SendLocales();
void AppIsInactive();
void AppIsResumed();
void AppIsPaused();
void AppIsDetached();
void OnRotationChange(int degree) override;
// The Flutter engine instance.
FLUTTER_API_SYMBOL(FlutterEngine) flutter_engine;
// The plugin messenger handle given to API clients.
std::unique_ptr<FlutterDesktopMessenger> messenger;
// Message dispatch manager for messages from the Flutter engine.
std::unique_ptr<flutter::IncomingMessageDispatcher> message_dispatcher;
// The interface between the Flutter rasterizer and the platform.
std::unique_ptr<TizenRenderer> tizen_renderer;
// The system channels for communicating between Flutter and the platform.
std::unique_ptr<KeyEventChannel> key_event_channel;
std::unique_ptr<LifecycleChannel> lifecycle_channel;
std::unique_ptr<LocalizationChannel> localization_channel;
std::unique_ptr<NavigationChannel> navigation_channel;
std::unique_ptr<PlatformChannel> platform_channel;
std::unique_ptr<SettingsChannel> settings_channel;
std::unique_ptr<TextInputChannel> text_input_channel;
std::unique_ptr<PlatformViewChannel> platform_view_channel;
const DeviceProfile device_profile;
const double device_dpi;
private:
static bool MakeContextCurrent(void* user_data);
static bool ClearContext(void* user_data);
static bool Present(void* user_data);
static bool MakeResourceCurrent(void* user_data);
static uint32_t GetActiveFbo(void* user_data);
static FlutterTransformation Transformation(void* user_data);
static void* GlProcResolver(void* user_data, const char* name);
static void OnFlutterPlatformMessage(
const FlutterPlatformMessage* engine_message, void* user_data);
static void OnVsyncCallback(void* user_data, intptr_t baton);
FlutterDesktopMessage ConvertToDesktopMessage(
const FlutterPlatformMessage& engine_message);
static bool OnAcquireExternalTexture(void* user_data, int64_t texture_id,
size_t width, size_t height,
FlutterOpenGLTexture* texture);
// The handlers listening to platform events.
std::unique_ptr<KeyEventHandler> key_event_handler_;
std::unique_ptr<TouchEventHandler> touch_event_handler_;
// The plugin registrar handle given to API clients.
std::unique_ptr<FlutterDesktopPluginRegistrar> plugin_registrar_;
// A callback to be called when the engine (and thus the plugin registrar)
// is being destroyed.
FlutterDesktopOnPluginRegistrarDestroyed
plugin_registrar_destruction_callback_{nullptr};
// The plugin registrar managing internal plugins.
std::unique_ptr<flutter::PluginRegistrar> internal_plugin_registrar_;
// The event loop for the main thread that allows for delayed task execution.
std::unique_ptr<TizenEventLoop> event_loop_;
// The vsync waiter for the embedder.
std::unique_ptr<TizenVsyncWaiter> tizen_vsync_waiter_;
// AOT data for this engine instance, if applicable.
UniqueAotDataPtr aot_data_;
// The current renderer transformation.
FlutterTransformation transformation_;
};
#endif // EMBEDDER_TIZEN_EMBEDDER_ENGINE_H_
|
<gh_stars>0
# -*- coding: utf-8 -*-
from keras.models import load_model
import numpy as np
from img2str import create_data, get_face
emotion_code = {0:'Angry', 1:'Disgust', 2:'Fear', 3:'Happy', 4:'Sad', 5:'Surprise', 6:'Neutral'}
model = load_model("FER Train-72 Test-45.hdf5")
x = create_data(get_face("3.jpg"))
data = np.fromstring(x, dtype=int, sep=" ").reshape(1, 1, 48, 48)
prediction = model.predict(data)
p = prediction[0]
print(p)
m = max(p)
print(emotion_code.get(np.where(p==m)[0][0]))
correction = input("Was my prediction correct?[y/n] ")
if correction == 'y':
print("Done")
else:
print("Trying again....")
new_arr = np.delete(p, np.where(p==m))
nm = max(new_arr)
print(emotion_code.get(np.where(p==nm)[0][0]))
|
#!/bin/bash -e
set -x
cd "`dirname "${BASH_SOURCE[0]}"`"
cd ..
GITENV_ROOT="`pwd`"
BUILD_DIR=_build_libcxx
# http://libcxx.llvm.org/
git submodule update --init llvm/libcxx
cd llvm/libcxx
git checkout master
git pull
cd "${GITENV_ROOT}/llvm"
# Build static release
rm -rf "${BUILD_DIR}"
cmake -DCMAKE_CXX_COMPILER=clang++ \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-DLIBCXX_ENABLE_SHARED=OFF \
-DLIBCXX_ENABLE_ASSERTIONS=OFF \
-DCMAKE_INSTALL_PREFIX="`pwd`/_install" \
-Hlibcxx "-B${BUILD_DIR}"
cmake --build "${BUILD_DIR}" --target install
# Build static debug
rm -rf "${BUILD_DIR}"
cmake -DCMAKE_CXX_COMPILER=clang++ \
-DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-DLIBCXX_ENABLE_SHARED=OFF \
-DCMAKE_DEBUG_POSTFIX=d \
-DCMAKE_INSTALL_PREFIX="`pwd`/_install" \
-Hlibcxx "-B${BUILD_DIR}"
cmake --build "${BUILD_DIR}" --target install
# Build shared release
rm -rf "${BUILD_DIR}"
cmake -DCMAKE_CXX_COMPILER=clang++ \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-DLIBCXX_ENABLE_SHARED=ON \
-DLIBCXX_ENABLE_ASSERTIONS=OFF \
-DCMAKE_INSTALL_PREFIX="`pwd`/_install" \
-Hlibcxx "-B${BUILD_DIR}"
cmake --build "${BUILD_DIR}" --target install
# Build shared debug
rm -rf "${BUILD_DIR}"
cmake -DCMAKE_CXX_COMPILER=clang++ \
-DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-DLIBCXX_ENABLE_SHARED=ON \
-DCMAKE_DEBUG_POSTFIX=d \
-DCMAKE_INSTALL_PREFIX="`pwd`/_install" \
-Hlibcxx "-B${BUILD_DIR}"
cmake --build "${BUILD_DIR}" --target install
|
describe('Authorization Code Test', () => {
it('It should be redirected to the login page', () => {
cy.visit({url: '/test-connect/authorization-code'} )
cy.url().should('contains', '/login?login_challenge');
cy.get('#email').type('<EMAIL>');
cy.get('#password').type('<PASSWORD>');
cy.get('#accept').click();
cy.url().should('contains', '/consent?consent_challenge');
cy.get('#offline').click();
cy.get('#offline_access').click();
cy.get('#accept').click();
cy.url().should('contains', '/test-connect/authorization-code');
cy.get('pre').then(($pre) => {
const json = JSON.parse($pre.text());
expect(json).to.have.property('accessToken');
expect(json.accessToken).to.have.property('scope', 'offline offline_access') // true
expect(json.accessToken).to.have.property('token_type', 'bearer') // true
expect(json.accessToken).to.have.property('access_token') // true
expect(json.accessToken.access_token).to.be.a('string')
expect(json.accessToken).to.have.property('expires') // true
//expect(json.accessToken.expires).to.be.a('string')
expect(json).to.have.property('resourceOwner');
})
});
})
|
<gh_stars>0
package main
import (
"bytes"
"flag"
"io/ioutil"
"net/http"
"os"
"os/signal"
"text/template"
"time"
"github.com/odwrtw/transmission"
"github.com/rs/xlog"
)
var (
transmissionURL = flag.String("transmission-url", "http://localhost:9091/transmission/rpc", "The URL of the transmission RPC client")
removeStalled = flag.Bool("remove-stalled", false, "Remove stalled torrents")
removeFinished = flag.Bool("remove-finished", false, "Remove finished torrents")
removeTemplate = flag.String("remove-template", "", "A text/template file that is passed a torrent `t` and if evaluates to `true` will remove the torrent")
cycles = flag.Int("cycles", 5, "How many cycles a torrent should maintain stalled or finished cycle before being removed?")
timeout = flag.Duration("timeout", 30*time.Second, "How long to wait between cycles")
tconn *transmission.Client
tstrikes = make(map[int]map[string]int)
rt *template.Template
l xlog.Logger
)
func main() {
var err error
flag.Parse()
l = xlog.New(xlog.Config{
Output: xlog.NewConsoleOutput(),
})
if *removeTemplate != "" {
rtc, err := ioutil.ReadFile(*removeTemplate)
if err != nil {
l.Fatalf("error reading the filea %q: %s", *removeTemplate, err)
}
rt = template.Must(template.New("remove-template").Parse(string(rtc)))
}
tconn, err = transmission.New(transmission.Config{
Address: *transmissionURL,
HTTPClient: &http.Client{Timeout: 10 * time.Second},
})
if err != nil {
l.Fatalf("error connection to transmission: %s", err)
}
signalC := make(chan os.Signal, 1)
signal.Notify(signalC, os.Interrupt)
timeoutTicker := time.NewTicker(*timeout)
for {
select {
case <-signalC:
timeoutTicker.Stop()
return
case <-timeoutTicker.C:
cycle()
}
}
}
func cycle() {
tseen := make(map[int]bool)
ts, err := tconn.GetTorrents()
l.SetField("tstrikes-map-length", len(tstrikes))
l.SetField("transmission-num-torrents", len(ts))
l.Info("running a cycle")
if err != nil {
l.Errorf("error getting the torrents: %s", err)
return
}
// search and remove finished and stalled torrents
for _, t := range ts {
// mark the torrent as seen
tseen[t.ID] = true
// make sure we have it in the strikes map
if tstrikes[t.ID] == nil {
tstrikes[t.ID] = make(map[string]int)
}
// is it finished?
if t.IsFinished {
tstrikes[t.ID]["finished"]++
}
// is it stalled?
if t.IsStalled {
tstrikes[t.ID]["stalled"]++
}
// has this torrent been marked as finished for *cycles?
if tstrikes[t.ID]["finished"] >= *cycles && (*removeFinished || removeTemplateTrue(t)) {
l.Infof("The torrent %s has been finished for %d cycles and will be removed. Data removed: %t", t.Name, tstrikes[t.ID]["finished"], t.LeftUntilDone > 0)
tconn.RemoveTorrents([]*transmission.Torrent{t}, t.LeftUntilDone > 0)
}
// has this torrent been marked as stalled for *cycles?
if tstrikes[t.ID]["stalled"] >= *cycles && (*removeStalled || removeTemplateTrue(t)) {
l.Infof("The torrent %s has been stalled for %d cycles and will be removed. Data removed: %t", t.Name, tstrikes[t.ID]["stalled"], t.LeftUntilDone > 0)
tconn.RemoveTorrents([]*transmission.Torrent{t}, t.LeftUntilDone > 0)
}
}
// Garbage-Collect the tstrikes map
for id := range tstrikes {
if !tseen[id] {
delete(tstrikes, id)
}
}
}
func removeTemplateTrue(t *transmission.Torrent) bool {
if rt == nil {
return false
}
var buf bytes.Buffer
if err := rt.Execute(&buf, t); err != nil {
l.Errorf("error executing the remove template: %s", err)
return false
}
if buf.String() == "true" {
l.Infof("The template has evaluated to true for the torrent %q", t.Name)
return true
}
return false
}
|
<filename>servico/api/usuarioAutenticacao/usuarioServico.js
var passwordHash = require('password-hash'),
jwt = require('jsonwebtoken');
exports.gerarToken = function (configuracao, usuario) {
var usuarioCopia = {
nome: usuario.nome,
email: usuario.email,
linguagem: usuario.linguagem,
privilegio: usuario.privilegio
};
return jwt.sign(usuarioCopia, configuracao.SECRET, {
expiteInMinutes: 60 *5
});
};
exports.validarSenha = function (senha, senhaInformada) {
return passwordHash.verify(senhaInformada, senha);
};
exports.gerarSenha = function (senha) {
return passwordHash.generate(senha);
};
|
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ${PREFIX}'s app. subhra.
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
#!/usr/bin/env bash
set -e
source bosh-src/ci/pipelines/aws-bats/tasks/utils.sh
check_param base_os
check_param aws_access_key_id
check_param aws_secret_access_key
check_param region_name
check_param stack_name
check_param BAT_VCAP_PASSWORD
check_param BAT_STEMCELL_NAME
source /etc/profile.d/chruby.sh
chruby 2.1.2
export AWS_ACCESS_KEY_ID=${aws_access_key_id}
export AWS_SECRET_ACCESS_KEY=${aws_secret_access_key}
export AWS_DEFAULT_REGION=${region_name}
stack_info=$(get_stack_info $stack_name)
stack_prefix=${base_os}
DIRECTOR=$(get_stack_info_of "${stack_info}" "${stack_prefix}DirectorEIP")
VIP=$(get_stack_info_of "${stack_info}" "${stack_prefix}BATsEIP")
SUBNET_ID=$(get_stack_info_of "${stack_info}" "${stack_prefix}SubnetID")
sg_id=$(get_stack_info_of "${stack_info}" "${stack_prefix}SecurityGroupID")
SECURITY_GROUP_NAME=$(aws ec2 describe-security-groups --group-ids ${sg_id} | jq -r '.SecurityGroups[] .GroupName')
AVAILABILITY_ZONE=$(get_stack_info_of "${stack_info}" "${stack_prefix}AvailabilityZone")
BAT_NETWORK_CIDR=$(get_stack_info_of "${stack_info}" "${stack_prefix}CIDR")
BAT_NETWORK_GATEWAY=$(get_stack_info_of "${stack_info}" "${stack_prefix}Gateway")
BAT_NETWORK_RESERVED_RANGE=$(get_stack_info_of "${stack_info}" "${stack_prefix}ReservedRange")
BAT_NETWORK_STATIC_RANGE=$(get_stack_info_of "${stack_info}" "${stack_prefix}StaticRange")
BAT_NETWORK_STATIC_IP=$(get_stack_info_of "${stack_info}" "${stack_prefix}StaticIP1")
BAT_SECOND_STATIC_IP=$(get_stack_info_of "${stack_info}" "${stack_prefix}StaticIP2")
eval $(ssh-agent)
private_key=${PWD}/setup-director-output/deployment/bats.pem
ssh-add ${private_key}
export BAT_DIRECTOR=$DIRECTOR
export BAT_DNS_HOST=$DIRECTOR
export BAT_STEMCELL="${PWD}/stemcell/stemcell.tgz"
export BAT_DEPLOYMENT_SPEC="${PWD}/${base_os}-bats-config.yml"
export BAT_INFRASTRUCTURE=aws
export BAT_NETWORKING=manual
export BAT_VIP=$VIP
export BAT_SUBNET_ID=$SUBNET_ID
export BAT_SECURITY_GROUP_NAME=$SECURITY_GROUP_NAME
export BAT_VCAP_PRIVATE_KEY=${private_key}
bosh -n target $BAT_DIRECTOR
cat > "${BAT_DEPLOYMENT_SPEC}" <<EOF
---
cpi: aws
properties:
vip: $BAT_VIP
second_static_ip: $BAT_SECOND_STATIC_IP
uuid: $(bosh status --uuid)
pool_size: 1
stemcell:
name: ${BAT_STEMCELL_NAME}
version: latest
instances: 1
key_name: bats
networks:
- name: default
static_ip: $BAT_NETWORK_STATIC_IP
type: manual
cidr: $BAT_NETWORK_CIDR
reserved: [$BAT_NETWORK_RESERVED_RANGE]
static: [$BAT_NETWORK_STATIC_RANGE]
gateway: $BAT_NETWORK_GATEWAY
subnet: $BAT_SUBNET_ID
security_groups: [$BAT_SECURITY_GROUP_NAME]
EOF
cd bats
./write_gemfile
bundle install
bundle exec rspec spec
bosh -t $BAT_DIRECTOR login admin admin
bosh -n -t $BAT_DIRECTOR cleanup --all
|
def is_prime(n):
if n <= 1:
return False
if n <= 3:
return True
if n % 2 == 0 or n % 3 == 0:
return False
i = 5
while i * i <= n:
if n % i == 0 or n % (i + 2) == 0:
return False
i += 6
return True
|
family=$1
#family='11388'
echo "$family" > FAMILY
aws s3 sync s3://simonsphase3/software/bitbucket/ .
mkdir bam
mkdir family_bam/
cd bam
aws s3 ls --recursive s3://sscwgs > bucket_contents
grep "$family"/BAM bucket_contents | grep 'bam' | grep -v 'md5' | awk '{FS=" "; print "aws s3 cp s3://sscwgs/"$4" ."}' > download.sh
grep "$family"/BAM bucket_contents | grep 'bai' | grep -v 'md5' | awk '{FS=" "; print "aws s3 cp s3://sscwgs/"$4" ."}' >> download.sh
split -l1 download.sh
for i in x*; do nohup sh "$i" & done
sleep 7200
cd ../family_bam/
awk '{FS="\t"; print "ln -s ../bam/"$2".final.bam "$1".final.bam"}' ../genus/manifests/aws_bucket_stats/sample_file/540families_idmapping | sh
awk '{FS="\t"; print "ln -s ../bam/"$2".final.bai "$1".final.bai"}' ../genus/manifests/aws_bucket_stats/sample_file/540families_idmapping | sh
find -L . -name . -o -type d -prune -o -type l -exec rm {} +
cd ../genus/aws/wham_auto/
sed -i 's:/mnt/family_bam:/home/ec2-user/family_bam:g' config.json
snakemake -s Snakefile_no_genotyping -j 8 -k -w 30 --rerun-incomplete
echo 'ALL DONE NOW!'
|
module Clarke
module Slack
class Action
attr_reader :name, :value
def initialize(name, value)
@name = name
@value = value
end
end
end
end
|
#!/usr/bin/python -w
import datetime
import math
import sys
import os
def format_convert(phaseinput,phaseoutput,nrms,ngap,nres,maxdep,maxdeperr,maxdiserr):
#phaseinput = 'hypoOut.arc' # phase file output by hypoinverse
#phaseoutput = 'hypoDD.pha' # input phase file for hypoDD
g = open(phaseoutput, 'w')
#nn = 100000
nn = 0
nres = float(nres)
nrms = float(nrms)
ngap = int(ngap)
maxdep = float(maxdep)
maxdeperr = float(maxdeperr)
maxdiserr = float(maxdiserr)
with open(phaseinput, "r") as f:
for line in f:
if (len(line) == 180):
iok = 0
RMS = float(line[48:52]) / 100
gap = int(line[42:45])
dep = float(line[31:36])/100
EZ = float(line[89:93])/100
EH = float(line[85:89])/100
if RMS <= nrms and gap <= ngap and dep <= maxdep and EZ <= maxdeperr and EH <= maxdiserr:
nn = nn + 1
year = int(line[0:4])
mon = int(line[4:6])
day = int(line[6:8])
hour = int(line[8:10])
min = int(line[10:12])
sec = int(line[12:16])/100
if line[18] == ' ': #N
lat = (float(line[16:18]) + float(line[19:23]) / 6000)
else:
lat = float(line[16:18]) + float(line[19:23])/6000 * (-1)
if line[26] == 'E':
lon = (float(line[23:26]) + float(line[27:31]) / 6000)
else:
lon = (float(line[23:26]) + float(line[27:31]) / 6000) * (-1)
mag = float(line[123:126])/100
g.write(
'# {:4d} {:2d} {:2d} {:2d} {:2d} {:5.2f} {:7.4f} {:9.4f} {:5.2f} {:5.2f} {:5.2f} {:5.2f} {:5.2f} {:9d}\n'.format(
year, mon, day, hour, min, sec, lat, lon, dep, mag, EH, EZ, RMS, nn))
iok = 1
else:
if (iok == 1 and len(line) == 121):
station = line[0:5]
net = line[5:7]
year1 = int(line[17:21])
mon1 = int(line[21:23])
day1 = int(line[23:25])
hour1 = int(line[25:27])
min1 = int(line[27:29])
if year1 == year and mon1 == mon and day1 == day and hour1 == hour and min1 == min:
sec_p =sec
if line[13:15] == ' P' or line[13:15] == 'IP':
P_residual = abs(int(line[34:38]) / 100)
sec_p = int(line[29:34]) / 100
if sec_p > sec and P_residual <= nres*RMS:
ppick = sec_p-sec
g.write('{:<5s} {:8.3f} 1.000 P\n'.format(station, ppick))
# g.write('{}.{} {:8.3f} 1.000 P\n'.format(net, station, ppick))
if line[46:48] == ' S' or line[46:48] == 'ES':
S_residual = abs(int(line[50:54]) / 100)
sec_s = int(line[41:46]) / 100
if sec_s > sec_p and S_residual <= nres * RMS:
spick = sec_s-sec
g.write('{:<5s} {:8.3f} 1.000 S\n'.format(station, spick))
# g.write('{}.{} {:8.3f} 1.000 S\n'.format(net, station, spick))
f.close()
g.close()
if __name__ == '__main__':
# if len(sys.argv) != 9:
# print('hypoinverse2hypoDD.py hypoOut.arc hypoDD.pha rms_threshold gap_threshold pick_rms maxdep maxdeperr maxdiserr')
# sys.exit()
# format_convert(sys.argv[1],sys.argv[2],sys.argv[3],sys.argv[4],sys.argv[5],sys.argv[6],sys.argv[7],sys.argv[8])
input_file = 'hypoOut.arc'
output_file = 'hypoDD.pha'
rms_threshold=0.5 # in sec, events with rms larger than this will not be used
gap_threshold=300 # in deg., events with station gap larger than this will not be used
pick_nres=3 # if pick's residual larger than nres times event's rms
# the pick will not be used
maxdep=20 # in km, events with larger depth will not be used (< dep in the timetable)
maxdep_err=5 # in km, events with larger depth uncertainty will not be used
maxdis_err=5 # in km, events with larger horizontal uncertainty will not be used
format_convert(input_file, output_file, rms_threshold, gap_threshold, pick_nres, maxdep, maxdep_err, maxdis_err)
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver import ActionChains as Action
from selenium.webdriver.common.by import By
import time
|
<gh_stars>100-1000
try:
try:
import ubinascii as binascii
except ImportError:
import binascii
except ImportError:
print("SKIP")
raise SystemExit
print(binascii.hexlify(b'\x00\x01\x02\x03\x04\x05\x06\x07'))
print(binascii.hexlify(b'\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f'))
print(binascii.hexlify(b'\x7f\x80\xff'))
print(binascii.hexlify(b'1234ABCDabcd'))
|
<reponame>wade-r/nerf<gh_stars>0
package com.ireul.nerf.schedule;
import com.ireul.nerf.application.Application;
import org.quartz.*;
import org.quartz.impl.DirectSchedulerFactory;
import org.quartz.impl.StdSchedulerFactory;
import org.quartz.utils.Key;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.HashMap;
import static org.quartz.CronScheduleBuilder.cronSchedule;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.SimpleScheduleBuilder.simpleSchedule;
import static org.quartz.TriggerBuilder.newTrigger;
/**
* Provides {@link org.quartz} wrappers
* <p><b>Most people may subclass {@link BaseJob} rather than implements {@link Job}</b></p>
*
* @author <NAME>
*/
public class ScheduleContext {
private static final String OPTION_QUARTZ_CONFIG = "quartz-config";
private final Logger logger = LoggerFactory.getLogger(ScheduleContext.class);
private Application application;
private Scheduler scheduler;
public ScheduleContext(Application application) {
this.application = application;
}
public void setup(HashMap<String, String> options) {
String configFile = options.get(OPTION_QUARTZ_CONFIG);
// setup scheduler
if (configFile == null) {
try {
DirectSchedulerFactory.getInstance().createVolatileScheduler(10);
this.scheduler = DirectSchedulerFactory.getInstance().getScheduler();
} catch (SchedulerException e) {
logger.error("Cannot create simple scheduler", e);
System.exit(1);
}
} else {
try {
this.scheduler = new StdSchedulerFactory(configFile).getScheduler();
} catch (SchedulerException e) {
logger.error("Cannot create standard scheduler", e);
System.exit(1);
}
}
// add InjectionListener
try {
this.scheduler.getListenerManager().addJobListener(new InjectionListener());
} catch (SchedulerException e) {
logger.error("Cannot add InjectionListener to scheduler");
System.exit(1);
}
// scan and schedule
ScheduleUtils.findJobs(this.application.getClass()).forEach(caa -> {
Schedule schedule = caa.getAnnotation();
JobDetail job = newJob(caa.getClassType())
.withIdentity(Key.createUniqueName(schedule.group()))
.requestRecovery(schedule.recovery())
.withDescription(schedule.desc())
.build();
Trigger trigger = null;
if (schedule.interval() > 0) {
if (schedule.cron().length() > 0) {
logger.error(
"Both interval() and cron() found from @Schedule, schedule() will be used: "
+ caa.getClassType().getCanonicalName()
);
}
trigger = newTrigger()
.startAt(new Date(System.currentTimeMillis() + schedule.delay() * 1000))
.withSchedule(
simpleSchedule()
.withIntervalInSeconds((int) schedule.interval())
.repeatForever()
)
.build();
} else if (schedule.cron().length() > 0) {
trigger = newTrigger()
.startNow()
.withSchedule(cronSchedule(schedule.cron()))
.build();
} else {
logger.error(
"No interval() and cron() found from @Schedule on class "
+ caa.getClassType().getCanonicalName()
);
}
if (trigger != null) {
try {
this.scheduler.scheduleJob(job, trigger);
} catch (SchedulerException e) {
logger.error("Cannot add Job for " + caa.getClassType().getCanonicalName(), e);
}
}
});
}
public void start() {
try {
this.scheduler.start();
} catch (SchedulerException e) {
e.printStackTrace();
System.exit(1);
}
}
public void stop() {
if (this.scheduler != null) {
try {
this.scheduler.shutdown();
} catch (SchedulerException e) {
e.printStackTrace();
System.exit(1);
}
}
}
public Application application() {
return this.application;
}
private class InjectionListener implements JobListener {
@Override
public String getName() {
return "InjectionListener";
}
@Override
public void jobToBeExecuted(JobExecutionContext context) {
context.put(BaseJob.kINJECTOR, application());
}
@Override
public void jobExecutionVetoed(JobExecutionContext context) {
}
@Override
public void jobWasExecuted(JobExecutionContext context, JobExecutionException jobException) {
}
}
}
|
<gh_stars>10-100
# frozen_string_literal: true
module Neo4j
module Driver
module Internal
class BoltServerAddress
include Net::ServerAddress
attr_reader :host, :port
def initialize(host, port)
@host = host
@port = port
end
end
end
end
end
|
<filename>template/packages.go
/*
* Bulldozer Framework
* Copyright (C) DesertBit
*/
package template
import (
"github.com/desertbit/bulldozer/log"
"reflect"
"strings"
)
const (
MustMethodPrefix = "Must"
)
const (
actionContinue int = 1 << iota
actionError int = 1 << iota
actionRedirect int = 1 << iota
)
var (
packages map[string]interface{} = make(map[string]interface{})
mustFuncs map[string]*mustFunc = make(map[string]*mustFunc)
)
//#############//
//### Types ###//
//#############//
type mustFunc struct {
receiver reflect.Value
method reflect.Value
}
type Action struct {
action int
data string
}
func newAction() *Action {
return &Action{
action: actionContinue,
}
}
func (a *Action) Error(errorMessage string) {
a.action = actionError
a.data = errorMessage
}
func (a *Action) Redirect(url string) {
a.action = actionRedirect
a.data = url
}
//##############//
//### Public ###//
//##############//
// RegisterPackage registeres a new template package.
// This call is not thread-safe! Register packages during program initialization.
// A template package function has the following syntax:
// func (p *Package) MustIsAuth(a *template.Action, c *template.Context) {}
func RegisterPackage(name string, i interface{}) {
// Log an error message if a package is overwritten,
_, ok := packages[name]
if ok {
log.L.Error("template: RegisterPackage: overwritting already present package: '%s'", name)
}
// Add the package to the packages map.
packages[name] = i
// Dummy values.
dummyAction := new(Action)
dummyContext := new(Context)
// Find and register all must functions.
iType := reflect.TypeOf(i)
for x := 0; x < iType.NumMethod(); x++ {
method := iType.Method(x)
funcName := method.Name
// Skip this method if it does not start with the method prefix.
if !strings.HasPrefix(funcName, MustMethodPrefix) {
continue
}
// Trim the prefix from the name.
funcName = strings.TrimPrefix(funcName, MustMethodPrefix)
// Create the function key.
key := name + "." + funcName
// Get the function and the type of the function.
f := method.Func
t := f.Type()
// Check if the fixed parameters are defined.
if t.NumIn() != 3 ||
reflect.TypeOf(i) != t.In(0) ||
reflect.TypeOf(dummyAction) != t.In(1) ||
reflect.TypeOf(dummyContext) != t.In(2) {
log.L.Error("must function '%s': invalid function parameters! The first two parameters have to be an Action and Context pointer.", key)
continue
}
// Create the mustFunc value.
m := &mustFunc{
receiver: reflect.ValueOf(i),
method: f,
}
// Add it to the map.
mustFuncs[key] = m
}
}
//###############//
//### Private ###//
//###############//
func (t *Template) callMustFuncs(c *Context) (action *Action) {
if t.mustFuncs == nil || len(t.mustFuncs) == 0 {
return nil
}
// Create a new action.
action = newAction()
// Create the parameters slice.
in := make([]reflect.Value, 3)
// Fill the parameters slice.
in[1] = reflect.ValueOf(action)
in[2] = reflect.ValueOf(c)
// Iterate through all must functions.
for _, f := range t.mustFuncs {
// Set the receiver.
in[0] = f.receiver
// Call the method.
f.method.Call(in)
// Check if a stop is requested.
if action.action != actionContinue {
return
}
}
return
}
|
<gh_stars>1-10
export * from "./axis.js";
export * from "./face.js";
export * from "./grid.js";
export * from "./line.js";
export * from "./point.js";
export * from "./strip.js";
export * from "./surface.js";
export * from "./ticks.js";
export * from "./vector.js";
|
#!/bin/bash
set -x
CLOUD_PLATFORM="AZURE"
START_LABEL=98
PLATFORM_DISK_PREFIX=sd
setup_tmp_ssh() {
echo "#tmpssh_start" >> /home/cloudbreak/.ssh/authorized_keys
echo "ssh-rsa test" >> /home/cloudbreak/.ssh/authorized_keys
echo "#tmpssh_end" >> /home/cloudbreak/.ssh/authorized_keys
}
get_ip() {
ifconfig eth0 | awk '/inet addr/{print substr($2,6)}'
}
fix_hostname() {
if grep -q $(get_ip) /etc/hosts ;then
sed -i "/$(get_ip)/d" /etc/hosts
else
echo OK
fi
}
extend_rootfs() {
# Usable on GCP, does not harm anywhere else
root_fs_device=$(mount | grep ' / ' | cut -d' ' -f 1 | sed s/1//g)
growpart $root_fs_device 1
xfs_growfs /
}
relocate_docker() {
if [[ $CLOUD_PLATFORM == AZURE* ]] && [ -n "$(mount | grep ' /mnt ')" ]; then
touch /var/docker-relocate
mv /var/lib/docker /mnt/docker
ln -s /mnt/docker /var/lib/docker
fi
}
format_disks() {
mkdir /hadoopfs
for (( i=1; i<=24; i++ )); do
LABEL=$(printf "\x$(printf %x $((START_LABEL+i)))")
DEVICE=/dev/${PLATFORM_DISK_PREFIX}${LABEL}
if [ -e $DEVICE ]; then
MOUNTPOINT=$(grep $DEVICE /etc/fstab | tr -s ' \t' ' ' | cut -d' ' -f 2)
if [ -n "$MOUNTPOINT" ]; then
umount "$MOUNTPOINT"
sed -i "\|^$DEVICE|d" /etc/fstab
fi
mkfs -E lazy_itable_init=1 -O uninit_bg -F -t ext4 $DEVICE
mkdir /hadoopfs/fs${i}
echo $DEVICE /hadoopfs/fs${i} ext4 defaults,noatime 0 2 >> /etc/fstab
mount /hadoopfs/fs${i}
chmod 777 /hadoopfs/fs${i}
fi
done
cd /hadoopfs/fs1 && mkdir logs logs/ambari-server logs/ambari-agent logs/consul-watch logs/kerberos
}
release_udev_cookie() {
cat>/tmp/cookie.sh<<"EOF"
: ${LOGFILE:=/var/log/cookie.log}
: ${LAST_CONTAINER:=logrotate}
: ${TIMEOUT:=2}
echo "Cookie script started at $(date)" >> $LOGFILE
while [ $(docker ps 2>/dev/null | grep $LAST_CONTAINER -c) -eq 0 ]; do
dmsetup udevcookies | grep -v Semid | while read line; do
COOKIE=$(echo $line|cut -f 1 -d ' ')
COOKIE_UPDATE=$(echo $line | awk '{print $4,$5,$6,$7,$8}')
ELAPSED_SEC=$((`date +%s`-`date -d "$COOKIE_UPDATE" +%s`))
ELAPSED_MIN=$((ELAPSED_SEC/60))
echo "Elapsed time for cookie: $COOKIE is: $ELAPSED_MIN min" >> $LOGFILE
if [ $ELAPSED_MIN -gt $TIMEOUT ]; then
echo "Cookie ($COOKIE) stuck, release it" >> $LOGFILE
dmsetup udevcomplete $COOKIE
fi
done
sleep 65
done
echo "Cookie script finished at $(date)" >> $LOGFILE
EOF
chmod +x /tmp/cookie.sh
nohup /tmp/cookie.sh &
}
reload_sysconf() {
sysctl -p
}
main() {
reload_sysconf
if [[ "$1" == "::" ]]; then
shift
eval "$@"
elif [ ! -f "/var/cb-init-executed" ]; then
setup_tmp_ssh
relocate_docker
extend_rootfs
format_disks
fix_hostname
release_udev_cookie
echo $(date +%Y-%m-%d:%H:%M:%S) >> /var/cb-init-executed
fi
[ -e /usr/bin/ssh-aliases ] && /usr/bin/ssh-aliases create
}
[[ "$0" == "$BASH_SOURCE" ]] && main "$@"
|
// Define the WafRuleGroupActivatedRuleAction type
type WafRuleGroupActivatedRuleAction = aws.WafRuleGroupActivatedRuleAction;
// Define the WafRuleGroupActivatedRule class
class WafRuleGroupActivatedRule {
action: WafRuleGroupActivatedRuleAction[];
constructor(action: WafRuleGroupActivatedRuleAction[]) {
this.action = action;
}
}
|
<reponame>CrystalBotDevelopment/command-handler
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CommandHandler = exports.defaultCommandHandlerOptions = void 0;
const objectCompare_1 = require("../functions/objectCompare");
const BaseCommandHandler_1 = require("./BaseCommandHandler");
const CommandError_1 = require("./CommandError");
exports.defaultCommandHandlerOptions = {
deleteCommands: false,
updateCommands: false,
createCommands: false,
autoDefer: false,
guildId: undefined,
handleError: async (err, interaction) => {
console.log(err);
if (interaction.replied)
await interaction.editReply('An error happened while executing this command!' + '\n```\n' + err.message + '\n```');
else
await interaction.reply('An error happened while executing this command!' + '\n```\n' + err.message + '\n```');
}
};
Object.freeze(exports.defaultCommandHandlerOptions);
class CommandHandler extends BaseCommandHandler_1.BaseCommandHandler {
client;
options;
commandById = new Map();
constructor(client, options = {}) {
const parsedOptions = Object.assign({}, exports.defaultCommandHandlerOptions, options);
super(client.token, undefined, parsedOptions.guildId);
this.client = client;
this.options = parsedOptions;
}
async runCommand(interaction) {
const command = this.commandById.get(interaction.commandId);
if (!command)
return false;
try {
if (this.options.autoDefer)
await interaction.deferReply();
await command.run(interaction);
}
catch (err) {
if (err instanceof CommandError_1.CommandError)
await CommandError_1.CommandError.handleError(err, interaction);
else
await this.options.handleError(err, interaction);
}
return true;
}
async loadCommands() {
this.clientId = this.client.user.id;
this._setToken(this.client.token);
const applicationCommands = await this.getApplicationCommands();
for (const command of this.commands) {
let applicationCommand = applicationCommands.get(command.name.toLowerCase());
applicationCommands.delete(applicationCommand?.name ?? '');
const rawCmd = command.toJSON();
if (!applicationCommand) {
if (!this.options.createCommands)
continue;
applicationCommand = await this._createCommand(command);
}
else if (!objectCompare_1.objectCompare(rawCmd, this._transformApplicationCommand(applicationCommand))) {
if (!this.options.updateCommands)
continue;
applicationCommand = await this._updateCommand(command, applicationCommand);
}
this.commandById.set(applicationCommand.id, command);
command.load(applicationCommand);
}
if (this.options.deleteCommands) {
for (const [key, value] of applicationCommands) {
key;
await this._deleteCommand(value);
}
}
return this;
}
async deleteAllCommands() {
for (const [_, applicationCommand] of await this.getApplicationCommands()) {
_;
await this._deleteCommand(applicationCommand);
}
}
_startCommand(constructor) {
const c = new constructor();
c.onStart();
return c;
}
}
exports.CommandHandler = CommandHandler;
|
/*
* SPDX-License-Identifier: BSD-3-Clause
*
* Copyright (c) 2016-2021, <NAME> <<EMAIL>>
*/
#include <gio/gunixfdlist.h>
#include "gattlib_internal.h"
#if BLUEZ_VERSION < BLUEZ_VERSIONS(5, 48)
int gattlib_write_char_by_uuid_stream_open(gatt_connection_t* connection, uuid_t* uuid, gatt_stream_t **stream, uint16_t *mtu)
{
return GATTLIB_NOT_SUPPORTED;
}
int gattlib_write_char_stream_write(gatt_stream_t *stream, const void *buffer, size_t buffer_len)
{
return GATTLIB_NOT_SUPPORTED;
}
int gattlib_write_char_stream_close(gatt_stream_t *stream)
{
return GATTLIB_NOT_SUPPORTED;
}
#else
int gattlib_write_char_by_uuid_stream_open(gatt_connection_t* connection, uuid_t* uuid, gatt_stream_t **stream, uint16_t *mtu)
{
struct dbus_characteristic dbus_characteristic = get_characteristic_from_uuid(connection, uuid);
GError *error = NULL;
GUnixFDList *fd_list;
GVariant *out_fd;
int fd;
GVariantBuilder *variant_options = g_variant_builder_new(G_VARIANT_TYPE("a{sv}"));
org_bluez_gatt_characteristic1_call_acquire_write_sync(
dbus_characteristic.gatt,
g_variant_builder_end(variant_options),
NULL /* fd_list */,
&out_fd, mtu,
&fd_list,
NULL /* cancellable */, &error);
g_variant_builder_unref(variant_options);
if (error != NULL) {
GATTLIB_LOG(GATTLIB_ERROR, "Failed to acquired write DBus GATT characteristic: %s", error->message);
g_error_free(error);
return GATTLIB_ERROR_DBUS;
}
error = NULL;
fd = g_unix_fd_list_get(fd_list, g_variant_get_handle(out_fd), &error);
if (error != NULL) {
GATTLIB_LOG(GATTLIB_ERROR, "Failed to retrieve Unix File Descriptor: %s", error->message);
g_error_free(error);
return GATTLIB_ERROR_DBUS;
}
// We abuse the pointer 'stream' to pass the 'File Descriptor'
*stream = (gatt_stream_t*)(unsigned long)fd;
return GATTLIB_SUCCESS;
}
int gattlib_write_char_stream_write(gatt_stream_t *stream, const void *buffer, size_t buffer_len)
{
write((unsigned long)stream, buffer, buffer_len);
return GATTLIB_SUCCESS;
}
int gattlib_write_char_stream_close(gatt_stream_t *stream)
{
close((unsigned long)stream);
return GATTLIB_SUCCESS;
}
#endif /* #if BLUEZ_VERSION < BLUEZ_VERSIONS(5, 48) */
|
import React, { useState } from "react";
import { Switch, Route, BrowserRouter } from "react-router-dom";
import PrivateRoute from "./Components/Common/PrivateRoute";
import Container from "@material-ui/core/Container";
import mainPage from "./Components/MaterialTuto/mainPage";
import Navbar from "./Components/Common/Navbar";
import Library from "./Components/Library/libraryView";
import Login from "./Components/Login/login";
import PracticeView from "./Components/Practice/PractiveView";
import Register from "./Components/Login/register";
import SecretPage from "./Components/MaterialTuto/secretPage";
import { AuthContext } from "./Context/auth";
const App = () => {
const existingTokens = JSON.parse(localStorage.getItem("tokens"));
const currentUserLocalStorage = JSON.parse(localStorage.getItem("user"));
const [authTokens, setAuthTokens] = useState(existingTokens);
const [currentUser, setCurrentUser] = useState(currentUserLocalStorage);
const setTokens = (token, user) => {
localStorage.setItem("tokens", JSON.stringify(token));
localStorage.setItem("user", JSON.stringify(user));
setAuthTokens(token);
setCurrentUser(user);
};
return (
<AuthContext.Provider
value={{ authTokens, currentUser, setAuthTokens: setTokens }}
>
<BrowserRouter>
<div id="app">
<Navbar />
<Container>
<Switch>
<PrivateRoute path="/practice" component={PracticeView} />
<PrivateRoute path="/library" component={Library} />
<PrivateRoute path="/secret" component={SecretPage} />
<Route path="/login" component={Login} />
<Route path="/register" component={Register} />
<Route path="/" exact component={mainPage} />
</Switch>
</Container>
</div>
</BrowserRouter>
</AuthContext.Provider>
);
};
export default App;
|
<reponame>unixing/springboot_chowder
package com.oven.vo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.Builder;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class User {
private String name;
private Gender gender;
private int age;
private List<String> hobby;
}
|
<filename>Src/InputBindings.h
// InputBindings.h
//
// Allows you to set key bindings for all TacentView operations.
//
// Copyright (c) 2022 <NAME>.
// Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby
// granted, provided that the above copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
// AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
// PERFORMANCE OF THIS SOFTWARE.
#pragma once
#include <Foundation/tString.h>
#include <System/tScript.h>
#include <GLFW/glfw3.h>
#include "Profile.h"
namespace Viewer {
namespace Bindings
{
// Add new operations to the bottom of this enum. If you add in the middle or reorder,
// it will invalidate the key bindings in previous version's config save files.
enum class Operation
{
None,
NextImage, First = NextImage,
PrevImage,
SkipToLastImage,
SkipToFirstImage,
NextImageFrame,
PrevImageFrame,
PixelRight,
PixelLeft,
PixelDown,
PixelUp,
ZoomIn,
ZoomOut,
ZoomFit,
ZoomDownscaleOnly,
ZoomOneToOne,
ResetPan,
FlipVertically,
FlipHorizontally,
Rotate90Anticlockwise,
Rotate90Clockwise,
RotateImage,
Crop,
ResizeImage,
ResizeCanvas,
PixelEdit,
PropertyEdit,
ChannelFilter,
RedChannel,
GreenChannel,
BlueChannel,
AlphaChannel,
ChannelAsIntensity,
Details,
Tile,
Undo,
Redo,
Refresh,
Rename,
Delete,
DeletePermanent,
SaveAs,
SaveAll,
SaveMultiFrameImage,
SaveContactSheet,
MenuBar,
NavBar,
Thumbnails,
FileBrowser,
SlideshowTimer,
CheatSheet,
DebugLog,
Profile,
Preferences,
KeyBindings,
Fullscreen,
Escape, // Exit-Fullscreen | Exit-Basic-Profile.
EscapeSupportingQuit, // Exit-Fullscreen | Exit-Basic-Profile | Quit.
Quit,
OpenFile,
OpenDir,
// Add new operations here. Adding above will invalidate save file keybindings.
NumOperations
};
enum Modifier
{
Modifier_None,
Modifier_Ctrl = 1 << 0,
Modifier_Alt = 1 << 1,
Modifier_Shift = 1 << 2,
Modifier_NumCombinations = 1 << 3
};
const char* GetModifiersText(uint32 modifiers);
// Gets the combined (modifiers plus key) text. ex. "Ctrl-Alt-Shift Space" or "Ctrl A" or "A".
// Returns empty string if the key is not supported.
tString GetModKeyText(int glfwkey, uint32 modifiers);
// Specifies, for a single key, what operations it performs. The same key can map to multiple operations if modifier
// keys are used.
struct KeyOps
{
KeyOps() { Clear(); }
KeyOps(const KeyOps& src) { Set(src); }
void Set(const KeyOps& src) { if (&src == this) return; for (int m = 0; m < Modifier_NumCombinations; m++) Operations[m] = src.Operations[m]; }
void Clear() { for (int m = 0; m < Modifier_NumCombinations; m++) Operations[m] = Operation::None; }
bool IsAnythingAssigned() const { for (int m = 0; m < Modifier_NumCombinations; m++) if (Operations[m] != Operation::None) return true; return false; }
int GetAssignedCount() const { int count = 0; for (int m = 0; m < Modifier_NumCombinations; m++) if (Operations[m] != Operation::None) count++; return count; }
// Finds the mods for the first (there may be more than one) operation matching the search operation.
// Returns false if no such operation was found.
bool FindOperationMods(uint32& mods, Operation searchOp) const { for (uint32 m = 0; m < Modifier_NumCombinations; m++) { if (Operations[m] == searchOp) { mods = m; return true; } } return false; }
KeyOps& operator=(const KeyOps& src) { Set(src); return *this; }
Operation Operations[Modifier_NumCombinations];
};
class InputMap
{
public:
InputMap() { Clear(); }
InputMap(const InputMap& src) { Set(src); }
void Set(const InputMap& src) { if (&src == this) return; for (int k = 0; k <= GLFW_KEY_LAST; k++) KeyTable[k] = src.KeyTable[k]; }
void Clear() /* Unassigns all keys. */ { for (int k = 0; k <= GLFW_KEY_LAST; k++) KeyTable[k].Clear(); }
// Sets all keys to their default operations. Some keys have different operations depending on the profile so we
// need to pass that in. If onlyIfUnassigned is true, the reset will only apply to keys that are not already
// bound to something. A 'key' here refers to the actual key plus any modifiers (ctrl, alt, shift).
void Reset(Viewer::Profile, bool onlyIfUnassigned = false);
// Returns the operation assigned to a particular key and set of modifiers. This can also be used before an
// assign call to see what a current key is bound to so an already-assigned message can be dislayed if needed.
Operation GetOperation(int glfwKey, uint32 modifiers) { return KeyTable[glfwKey].Operations[modifiers]; }
KeyOps& GetKeyOps(int glfwKey) { return KeyTable[glfwKey]; }
// Assigns the operation to the key and modifiers specified. Returns true is there was a previous assignment
// that needed to be replaced. Pass in onlyIfUnassigned if you only want assignment to happen if the key is
// currently unassigned.
bool AssignKey(int glfwkey, uint32 modifiers, Operation, bool onlyIfUnassigned = false);
void ClearKey(int glfwkey, uint32 modifiers) { KeyTable[glfwkey].Operations[modifiers] = Operation::None; }
int GetTotalAssigned() const { int count = 0; for (int k = 0; k <= GLFW_KEY_LAST; k++) count += KeyTable[k].GetAssignedCount(); return count; }
// Searches for the first occurrence of the supplied operation and returns the key and modifiers that are bound
// to it. Note that since there may be more than one key bound to the same operation, this function returns the
// first one found. Returns true if something found.
bool FindModKey(int& key, uint32& mods, Operation);
// Convenience. Basically calls FindModKey on the supplied operation and then calls GetModKeyText on the result.
// The returned string will be empty if nothing is bound to the operation.
tString FindModKeyText(Operation op) { int key = 0; uint32 mods = 0; if (FindModKey(key, mods, op)) return GetModKeyText(key, mods); else return tString(); }
void Read(tExpression);
void Write(tScriptWriter&);
InputMap& operator=(const InputMap& src) { Set(src); return *this; }
private:
KeyOps KeyTable[GLFW_KEY_LAST+1];
};
// Some descriptions may change based on the current config, for example, what the esc key does
const char* GetOperationDesc(Operation);
// Converts from GLFW modifiers to viewer modifiers.
uint32 TranslateModifiers(int glfwModifiers);
void ShowBindingsWindow(bool* popen, bool justOpened);
void ShowCheatSheetWindow(bool* popen);
}
}
|
<reponame>lexfaraday/hamburgo
package wearable.hotelbeds.shared.price;
import android.content.Context;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Zavierazo on 12/10/2015.
*/
public class BookingsBean implements Serializable {
private static final String FILE_NAME = "bookings.obj";
private List<ConfirmDataBean> confirmDataBeans;
public BookingsBean() {
confirmDataBeans = new ArrayList<>();
}
public List<ConfirmDataBean> getConfirmDataBeans() {
return confirmDataBeans;
}
public void setConfirmDataBeans(List<ConfirmDataBean> confirmDataBeans) {
this.confirmDataBeans = confirmDataBeans;
}
public boolean containsBooking(ConfirmDataBean conf) {
for (ConfirmDataBean bean : confirmDataBeans) {
if (bean.getToken().equals(conf.getToken())) {
return true;
}
}
return false;
}
public static BookingsBean load(Context context) {
FileInputStream fis = null;
ObjectInputStream is = null;
try {
fis = context.openFileInput(FILE_NAME);
is = new ObjectInputStream(fis);
return (BookingsBean) is.readObject();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (is != null) {
try {
is.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (fis != null) {
try {
fis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return new BookingsBean();
}
public void save(Context context) {
FileOutputStream fos = null;
ObjectOutputStream os = null;
try {
fos = context.openFileOutput(FILE_NAME, Context.MODE_PRIVATE);
os = new ObjectOutputStream(fos);
os.writeObject(this);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (os != null) {
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
|
<reponame>vaniot-s/sentry
import React from 'react';
import SvgIcon from './svgIcon';
type Props = React.ComponentProps<typeof SvgIcon>;
const IconSentry = React.forwardRef(function IconSentry(
props: Props,
ref: React.Ref<SVGSVGElement>
) {
return (
<SvgIcon {...props} ref={ref}>
<path d="M15.8,14.57a1.53,1.53,0,0,0,0-1.52L9.28,1.43a1.46,1.46,0,0,0-2.56,0L4.61,5.18l.54.32A10.43,10.43,0,0,1,8.92,9.39a10.84,10.84,0,0,1,1.37,4.67H8.81a9.29,9.29,0,0,0-1.16-3.91A9,9,0,0,0,4.41,6.81L3.88,6.5,1.91,10l.53.32a5.12,5.12,0,0,1,2.42,3.73H1.48a.25.25,0,0,1-.21-.12.24.24,0,0,1,0-.25L2.21,12a3.32,3.32,0,0,0-1.07-.63L.2,13.05a1.53,1.53,0,0,0,0,1.52,1.46,1.46,0,0,0,1.28.76H6.13V14.7a6.55,6.55,0,0,0-.82-3.16,6.31,6.31,0,0,0-1.73-2l.74-1.32a7.85,7.85,0,0,1,2.26,2.53,8,8,0,0,1,1,3.92v.63h3.94V14.7A12.14,12.14,0,0,0,10,8.75a11.8,11.8,0,0,0-3.7-4l1.5-2.67a.24.24,0,0,1,.42,0l6.52,11.63a.24.24,0,0,1,0,.25.24.24,0,0,1-.21.12H13c0,.43,0,.85,0,1.27h1.53a1.46,1.46,0,0,0,1.28-.76" />
</SvgIcon>
);
});
IconSentry.displayName = 'IconSentry';
export {IconSentry};
|
<reponame>YaroShkvorets/ant-design-vue
import PropTypes from '../../_util/vue-types';
import type { PropType } from 'vue';
export type IPlacement = 'left' | 'top' | 'right' | 'bottom';
type ILevelMove = number | [number, number];
const props = () => ({
prefixCls: PropTypes.string,
width: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
height: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
style: PropTypes.style,
class: PropTypes.string,
placement: {
type: String as PropType<IPlacement>,
},
wrapperClassName: PropTypes.string,
level: { type: [String, Array] as PropType<string | string[]> },
levelMove: {
type: [Number, Function, Array] as PropType<
ILevelMove | ((e: { target: HTMLElement; open: boolean }) => ILevelMove)
>,
},
duration: PropTypes.string,
ease: PropTypes.string,
showMask: PropTypes.looseBool,
maskClosable: PropTypes.looseBool,
maskStyle: PropTypes.style,
afterVisibleChange: PropTypes.func,
keyboard: PropTypes.looseBool,
contentWrapperStyle: PropTypes.style,
autofocus: PropTypes.looseBool,
open: PropTypes.looseBool,
});
const drawerProps = () => ({
...props(),
forceRender: PropTypes.looseBool,
getContainer: PropTypes.oneOfType([
PropTypes.string,
PropTypes.func,
PropTypes.object,
PropTypes.looseBool,
]),
});
const drawerChildProps = () => ({
...props(),
getContainer: PropTypes.func,
getOpenCount: PropTypes.func,
scrollLocker: PropTypes.any,
switchScrollingEffect: PropTypes.func,
});
export { drawerProps, drawerChildProps };
|
# Copyright (c) 2019 <NAME> and contributors
#
# This file is part of the adb-shell package. It incorporates work
# covered by the following license notice:
#
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants used throughout the code.
"""
import stat
import struct
#: From adb.h
CLASS = 0xFF
#: From adb.h
SUBCLASS = 0x42
#: From adb.h
PROTOCOL = 0x01
#: ADB protocol version.
VERSION = 0x01000000
#: Maximum amount of data in an ADB packet.
MAX_ADB_DATA = 4096
#: Maximum size of a filesync DATA packet.
MAX_PUSH_DATA = 2048
#: Default mode for pushed files.
DEFAULT_PUSH_MODE = stat.S_IFREG | stat.S_IRWXU | stat.S_IRWXG
#: AUTH constant for ``arg0``
AUTH_TOKEN = 1
#: AUTH constant for ``arg0``
AUTH_SIGNATURE = 2
#: AUTH constant for ``arg0``
AUTH_RSAPUBLICKEY = 3
AUTH = b'AUTH'
CLSE = b'CLSE'
CNXN = b'CNXN'
FAIL = b'FAIL'
OKAY = b'OKAY'
OPEN = b'OPEN'
SYNC = b'SYNC'
WRTE = b'WRTE'
DATA = b'DATA'
DENT = b'DENT'
DONE = b'DONE'
LIST = b'LIST'
QUIT = b'QUIT'
RECV = b'RECV'
SEND = b'SEND'
STAT = b'STAT'
#: Commands that are recognized by :meth:`adb_shell.adb_device.AdbDevice._read`
IDS = (AUTH, CLSE, CNXN, OKAY, OPEN, SYNC, WRTE)
#: A dictionary where the keys are the commands in :const:`IDS` and the values are the keys converted to integers
ID_TO_WIRE = {cmd_id: sum(c << (i * 8) for i, c in enumerate(bytearray(cmd_id))) for cmd_id in IDS}
#: A dictionary where the keys are integers and the values are their corresponding commands (type = bytes) from :const:`IDS`
WIRE_TO_ID = {wire: cmd_id for cmd_id, wire in ID_TO_WIRE.items()}
#: Commands that are recognized by :meth:`adb_shell.adb_device.AdbDevice._filesync_read`
FILESYNC_IDS = (DATA, DENT, DONE, FAIL, LIST, OKAY, QUIT, RECV, SEND, STAT)
#: A dictionary where the keys are the commands in :const:`FILESYNC_IDS` and the values are the keys converted to integers
FILESYNC_ID_TO_WIRE = {cmd_id: sum(c << (i * 8) for i, c in enumerate(bytearray(cmd_id))) for cmd_id in FILESYNC_IDS}
#: A dictionary where the keys are integers and the values are their corresponding commands (type = bytes) from :const:`FILESYNC_IDS`
FILESYNC_WIRE_TO_ID = {wire: cmd_id for cmd_id, wire in FILESYNC_ID_TO_WIRE.items()}
#: An ADB message is 6 words in little-endian.
MESSAGE_FORMAT = b'<6I'
#: The format for FileSync "list" messages
FILESYNC_LIST_FORMAT = b'<5I'
#: The format for FileSync "pull" messages
FILESYNC_PULL_FORMAT = b'<2I'
#: The format for FileSync "push" messages
FILESYNC_PUSH_FORMAT = b'<2I'
#: The format for FileSync "stat" messages
FILESYNC_STAT_FORMAT = b'<4I'
#: The size of an ADB message
MESSAGE_SIZE = struct.calcsize(MESSAGE_FORMAT)
#: Default authentication timeout (in s) for :meth:`adb_shell.tcp_handle.TcpHandle.connect`
DEFAULT_AUTH_TIMEOUT_S = 10.
#: Default total timeout (in s) for :meth:`adb_shell.adb_device.AdbDevice._read`
DEFAULT_TOTAL_TIMEOUT_S = 10.
|
#!/bin/bash
FN="MeSH.Eco.55989.eg.db_1.13.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.12/data/annotation/src/contrib/MeSH.Eco.55989.eg.db_1.13.0.tar.gz"
"https://bioarchive.galaxyproject.org/MeSH.Eco.55989.eg.db_1.13.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-mesh.eco.55989.eg.db/bioconductor-mesh.eco.55989.eg.db_1.13.0_src_all.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-mesh.eco.55989.eg.db/bioconductor-mesh.eco.55989.eg.db_1.13.0_src_all.tar.gz"
)
MD5="37249fa8652fe39cb7f9dbdea8205c67"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
class ReferenceX64:
def __init__(self, base, offset):
self.base = base
self.offset = offset
def calculate_address(self, memory):
if self.base in memory and self.offset in memory:
return memory[self.base] + memory[self.offset]
else:
return -1
def execute_instruction(cpu, instruction, memory):
if instruction.type == 'x64':
if instruction.opcode == 'reference':
dst = instruction.operands[0]
base = instruction.operands[1]
offset = instruction.operands[2]
cpu.reg[dst] = ReferenceX64(base, offset)
else:
if ONE_GADGET_LIB_DEBUG:
_print_instruction(instruction)
raise Exception('Unknown instruction found')
|
<reponame>ben-abraham/electrum-ravencoin
import time
from abc import abstractmethod
from enum import IntEnum
from typing import Dict, List, Optional
from PyQt5.QtGui import QPixmap, QKeySequence, QIcon, QCursor, QFont, QRegExpValidator
from PyQt5.QtCore import Qt, QRect, QStringListModel, QSize, pyqtSignal, QPoint
from PyQt5.QtCore import QTimer, QRegExp
from PyQt5.QtWidgets import (QMessageBox, QComboBox, QSystemTrayIcon, QTabWidget,
QMenuBar, QFileDialog, QCheckBox, QLabel,
QVBoxLayout, QGridLayout, QLineEdit,
QHBoxLayout, QPushButton, QScrollArea, QTextEdit,
QShortcut, QMainWindow, QCompleter, QInputDialog,
QWidget, QSizePolicy, QStatusBar, QToolTip, QDialog,
QMenu, QAction, QStackedWidget, QToolButton)
from electrum import constants
from electrum.assets import is_main_asset_name_good, is_unique_asset_name_good, is_sub_asset_name_good, \
create_transfer_asset_script, create_new_asset_script, create_owner_asset_script, create_reissue_asset_script
from electrum.gui.qt.amountedit import FreezableLineEdit
from electrum.gui.qt.util import ComplexLineEdit, HelpLabel, EnterButton, ColorScheme, ChoicesLayout, HelpButtonURL
from electrum.i18n import _
from electrum.logging import get_logger
from electrum.ravencoin import TOTAL_COIN_SUPPLY_LIMIT_IN_BTC, base_decode, address_to_script, COIN, is_address
from electrum.transaction import RavenValue, PartialTxOutput, AssetMeta
from electrum.util import Satoshis, bfh
_logger = get_logger(__name__)
class InterpretType(IntEnum):
NO_DATA = 0
IPFS = 1
HEX = 2
LATIN = 3
# TODO: Clean up these classes
class AssetCreateWorkspace(QWidget):
def __init__(self, parent, create_asset_callable):
super().__init__()
self.parent = parent
self.aval_owner_combo = QComboBox()
self.aval_owner_combo.setCurrentIndex(0)
self.aval_owner_combo.setVisible(False)
c_grid = QGridLayout()
c_grid.setSpacing(4)
self.asset_name = ComplexLineEdit()
self.asset_name.lineEdit.setMaxLength(30)
self.asset_name.setPrefixStyle(ColorScheme.GRAY.as_stylesheet())
self.asset_availability_text = QLabel()
self.asset_availability_text.setAlignment(Qt.AlignCenter)
self.divisions = FreezableLineEdit()
self.asset_amount = FreezableLineEdit()
self.reissuable = QCheckBox()
self.cost_label = QLabel('Cost: {} RVN'.format(constants.net.BURN_AMOUNTS.IssueAssetBurnAmount))
msg = _('Reissuability') + '\n\n' \
+ _('This lets the asset be edited in the future.')
self.reissue_label = HelpLabel(_('Reissuable'), msg)
def on_type_click(clayout_obj):
self.asset_availability_text.setText('')
self.divisions.setFrozen(False)
self.asset_amount.setFrozen(False)
i = clayout_obj.selected_index()
i2 = self.aval_owner_combo.currentIndex()
self.aval_owner_combo.setVisible(i != 0)
if i == 0:
self.cost_label.setText('Cost: {} RVN'.format(constants.net.BURN_AMOUNTS.IssueAssetBurnAmount))
elif i == 1:
self.cost_label.setText('Cost: {} RVN'.format(constants.net.BURN_AMOUNTS.IssueSubAssetBurnAmount))
elif i == 2:
self.cost_label.setText('Cost: {} RVN'.format(constants.net.BURN_AMOUNTS.IssueUniqueAssetBurnAmount))
if i == 2:
self.divisions.setFrozen(True)
self.divisions.setText('0')
self.asset_amount.setFrozen(True)
self.asset_amount.setText('1')
self.reissuable.setCheckState(False)
self.reissuable.setEnabled(False)
self.reissue_label.setStyleSheet(ColorScheme.GRAY.as_stylesheet())
else:
self.reissuable.setCheckState(True)
self.reissuable.setEnabled(True)
self.reissuable.setTristate(False)
self.reissue_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
if i == 0 or i2 == 0:
self.asset_name.lineEdit.setMaxLength(30)
self.asset_name.set_prefix('')
return
text = self.aval_owner_options[i2]
self.asset_name.lineEdit.setMaxLength(30 - len(text) - 1)
if i == 1:
self.asset_name.set_prefix(text + '/')
else:
self.asset_name.set_prefix(text + '#')
create_asset_options = ['Main', 'Sub', 'Unique']
self.create_options_layout = ChoicesLayout('Select an asset type', create_asset_options, on_type_click,
horizontal=True)
def on_combo_change():
self.asset_availability_text.setText('')
i = self.create_options_layout.selected_index()
i2 = self.aval_owner_combo.currentIndex()
self.aval_owner_combo.setVisible(i != 0)
if i == 0 or i2 == 0:
self.asset_name.set_prefix('')
self.asset_name.lineEdit.setMaxLength(30)
return
text = self.aval_owner_options[i2]
self.asset_name.lineEdit.setMaxLength(30 - len(text) - 1)
if i == 1:
self.asset_name.set_prefix(text + '/')
else:
self.asset_name.set_prefix(text + '#')
self.aval_owner_combo.currentIndexChanged.connect(on_combo_change)
msg = _('The asset name.') + '\n\n' \
+ _(
'This name must be unique.')
name_label = HelpLabel(_('Asset Name'), msg)
c_grid.addWidget(name_label, 2, 0)
c_grid.addWidget(self.aval_owner_combo, 2, 1)
c_grid.addWidget(self.asset_name, 2, 2)
self.asset_name_error_message = QLabel()
self.asset_name_error_message.setStyleSheet(ColorScheme.RED.as_stylesheet())
self.asset_name_error_message.setAlignment(Qt.AlignCenter)
self.check_button = EnterButton(_("Check Availability"), self._check_availability)
c_grid.addWidget(self.check_button, 2, 3)
self.asset_name.lineEdit.textChanged.connect(self._check_asset_name)
c_grid.addWidget(self.asset_name_error_message, 3, 2)
c_grid.addWidget(self.asset_availability_text, 3, 3)
c_grid_b = QGridLayout()
c_grid_b.setColumnStretch(2, 1)
c_grid_b.setHorizontalSpacing(10)
def update_amount_line_edit():
t = self.divisions.text()
if not t:
return
split_amt = self.asset_amount.text().split('.')
divs = int(t)
# Update amount
if len(split_amt) == 2:
pre, post = split_amt
post = post[:divs]
if post:
self.asset_amount.setText(pre + '.' + post)
else:
self.asset_amount.setText(pre)
else:
self.asset_amount.setText(split_amt[0])
# Update regex
if divs == 0:
reg = QRegExp('^[1-9][0-9]{1,10}$')
else:
reg = QRegExp('^[0-9]{1,11}\\.([0-9]{1,' + str(divs) + '})$')
validator = QRegExpValidator(reg)
self.asset_amount.setValidator(validator)
msg = _('Asset Divisions') + '\n\n' \
+ _('Asset divisions are a number from 0 to 8. They dictate how much an asset can be divided. '
'The minimum asset amount is 10^-d where d is the division amount. Once an asset is issued, you cannot decrease this number.')
divisions_label = HelpLabel(_('Divisions'), msg)
reg = QRegExp('^[012345678]{1}$')
validator = QRegExpValidator(reg)
self.divisions.setValidator(validator)
self.divisions.setFixedWidth(25)
self.divisions.setText('0')
self.divisions.textChanged.connect(update_amount_line_edit)
divisions_grid = QHBoxLayout()
divisions_grid.setSpacing(0)
divisions_grid.setContentsMargins(0, 0, 0, 0)
divisions_grid.addWidget(divisions_label)
divisions_grid.addWidget(self.divisions)
divisions_w = QWidget()
divisions_w.setLayout(divisions_grid)
c_grid_b.addWidget(divisions_w, 0, 0)
self.reissuable.setCheckState(True)
self.reissuable.setTristate(False)
reissue_grid = QHBoxLayout()
reissue_grid.setSpacing(0)
reissue_grid.setContentsMargins(0, 0, 0, 0)
reissue_grid.addWidget(self.reissue_label)
reissue_grid.addWidget(self.reissuable)
reissue_w = QWidget()
reissue_w.setLayout(reissue_grid)
c_grid_b.addWidget(reissue_w, 0, 1)
self.associated_data_info = QLabel()
self.associated_data_info.setAlignment(Qt.AlignCenter)
self.associated_data_interpret = InterpretType.NO_DATA
msg = _('Associated Data') + '\n\n' \
+ _('Data to associate with this asset.')
data_label = HelpLabel(_('Associated Data'), msg)
self.associated_data = QLineEdit()
self.associated_data.textChanged.connect(self._check_associated_data)
self.associated_data_interpret_override = QComboBox()
self.associated_data_interpret_override.addItems(['AUTO', 'IPFS', 'HEX', 'LATIN-1'])
self.associated_data_interpret_override.currentIndexChanged.connect(self._check_associated_data)
self.associated_data_interpret_override.setVisible(self.parent.config.get('advanced_asset_functions', False))
data_grid = QHBoxLayout()
data_grid.setSpacing(0)
data_grid.setContentsMargins(0, 0, 0, 0)
data_grid.addWidget(data_label)
data_grid.addWidget(self.associated_data)
data_w = QWidget()
data_w.setLayout(data_grid)
c_grid_b.addWidget(data_w, 0, 2)
c_grid_b.addWidget(self.associated_data_info, 1, 2)
c_grid_b.addWidget(self.associated_data_interpret_override, 0, 3)
c_grid_c = QGridLayout()
c_grid_c.setColumnStretch(4, 1)
c_grid_c.setHorizontalSpacing(10)
msg = _('Asset Amount') + '\n\n' \
+ _('The amount of an asset to create')
amount_label = HelpLabel(_('Amount'), msg)
reg = QRegExp('^[1-9][0-9]{0,10}$')
validator = QRegExpValidator(reg)
self.asset_amount.setValidator(validator)
amount_grid = QHBoxLayout()
amount_grid.setSpacing(0)
amount_grid.setContentsMargins(0, 0, 0, 0)
amount_grid.addWidget(amount_label)
amount_grid.addWidget(self.asset_amount)
amount_w = QWidget()
amount_w.setLayout(amount_grid)
c_grid_c.addWidget(amount_w, 0, 0)
self.asset_amount_warning = QLabel()
self.asset_amount_warning.setStyleSheet(ColorScheme.RED.as_stylesheet())
self.asset_amount.textChanged.connect(self._check_amount)
c_grid_c.addWidget(self.asset_amount_warning, 1, 0)
self.change_addrs = None # type: Optional[List[str]]
self.refresh_change_addrs()
self.send_asset_address_error = QLabel()
self.send_asset_address_error.setStyleSheet(ColorScheme.RED.as_stylesheet())
self.send_asset_address = QLineEdit()
self.send_asset_address.textChanged.connect(self._check_asset_addr)
if len(self.change_addrs) > 1:
self.send_asset_address.setText(self.change_addrs[1])
else:
async def delayed_address_update():
time.sleep(5)
self.refresh_change_addrs()
self.send_asset_address.setText(self.change_addrs[1])
self.parent.run_coroutine_from_thread(delayed_address_update())
asset_h = QHBoxLayout()
asset_h.addWidget(QLabel(_('New asset address:')))
asset_h.addWidget(self.send_asset_address)
asset_h.addWidget(self.send_asset_address_error)
self.asset_addr_w = QWidget()
self.asset_addr_w.setLayout(asset_h)
self.asset_addr_w.setVisible(self.parent.config.get('advanced_asset_functions', False))
bottom_buttons = QGridLayout()
bottom_buttons.setColumnStretch(1, 2)
self.exec_asset_b = EnterButton(_("Create Asset"), create_asset_callable)
bottom_buttons.addWidget(self.exec_asset_b, 1, 0)
bottom_buttons.addWidget(self.cost_label, 1, 1)
self.reset_create_b = EnterButton(_("Reset"), self.reset_workspace)
bottom_buttons.addWidget(self.reset_create_b, 1, 3)
top_layout = QHBoxLayout()
top_layout.addLayout(self.create_options_layout.layout())
top_layout.addWidget(HelpButtonURL("https://ravencoin.org/assets/"))
widgetA = QWidget()
widgetA.setLayout(top_layout)
widgetB = QWidget()
widgetB.setLayout(c_grid)
widgetC = QWidget()
widgetC.setLayout(c_grid_b)
widgetD = QWidget()
widgetD.setLayout(c_grid_c)
widgetF = QWidget()
widgetF.setLayout(bottom_buttons)
create_l = QVBoxLayout()
create_l.addWidget(widgetA)
create_l.addWidget(widgetB)
create_l.addWidget(widgetC)
create_l.addWidget(widgetD)
create_l.addWidget(self.asset_addr_w)
create_l.addWidget(widgetF)
self.setLayout(create_l)
self.aval_owner_options = [] # type: List[str]
self.last_checked = None # type: Optional[str]
def _check_asset_addr(self):
addr = self.send_asset_address.text()
if not is_address(addr):
self.send_asset_address_error.setText(_('Invalid Ravencoin Address'))
return False
else:
self.send_asset_address_error.setText('')
return True
def _check_asset_name(self):
self.asset_availability_text.setText('')
name = self.asset_name.text()
if not name:
self.asset_name_error_message.setText('')
return
pre = self.asset_name.get_prefix()
i = self.create_options_layout.selected_index()
if i == 0:
error = is_main_asset_name_good(name)
if error == 'SIZE':
if len(name) < 3:
error = None
else:
error = "Main assets may only use capital letters, numbers, '_', and '.'"
elif i == 1:
error = is_sub_asset_name_good(name)
else:
error = is_unique_asset_name_good(name)
if len(pre + name) > 30:
error = 'Asset name must be less than 31 characters (Including the parent).'
if error:
self.asset_name_error_message.setText(error)
return False
else:
self.asset_name_error_message.setText('')
return True
def _check_availability(self):
asset = self.asset_name.get_prefix() + self.asset_name.text()
if self.create_options_layout.selected_index() == 0:
if len(asset) < 3:
self.asset_name_error_message.setText('Main assets must be more than 3 characters.')
return
elif self.aval_owner_combo.currentIndex() == 0:
self.asset_name_error_message.setText('Please select a parent asset!')
return
if not self._check_asset_name():
return
self.check_asset_availability(asset)
def _check_associated_data(self) -> bool:
text = self.associated_data.text()
i = self.associated_data_interpret_override.currentIndex()
if len(text) == 0:
self.associated_data_info.setText('')
self.associated_data_interpret = InterpretType.NO_DATA
return True
if i != 0:
if i == 1:
self.associated_data_interpret = InterpretType.IPFS
try:
if len(base_decode(text, base=58)) > 34:
self.associated_data_info.setText('Too much data in IPFS hash!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
else:
self.associated_data_info.setText('Reading as IPFS')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
return True
except:
self.associated_data_info.setText('Invalid base 58 encoding!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
if i == 2:
self.associated_data_interpret = InterpretType.HEX
try:
bfh(text)
except:
self.associated_data_info.setText('Not a valid hex string!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
if len(text) > 34 * 2:
self.associated_data_info.setText('Too much data in hex string!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
else:
self.associated_data_info.setText('Reading as hex string')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
return True
else:
self.associated_data_interpret = InterpretType.LATIN
try:
text.encode('latin-1')
except:
self.associated_data_info.setText('Text not compatible with latin-1!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
if len(text) > 34:
self.associated_data_info.setText('Too much data in latin-1 string!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
else:
self.associated_data_info.setText('Reading as latin-1 string')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
return True
if self.parent.config.get('advanced_asset_functions', False):
if text[:2] == 'Qm':
try:
if len(base_decode(text, base=58)) == 34:
self.associated_data_info.setText('Reading as IPFS')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.associated_data_interpret = InterpretType.IPFS
return True
except:
pass
try:
if len(text) == 1:
raise Exception()
bytes.fromhex(text)
self.associated_data_info.setText('Reading as hex string')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.associated_data_interpret = InterpretType.HEX
if len(text) > 34 * 2:
self.associated_data_info.setText('Too much data in hex string!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
return True
except:
self.associated_data_info.setText('Reading as latin-1 string')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.associated_data_interpret = InterpretType.LATIN
try:
text.encode('latin-1')
except:
self.associated_data_info.setText('Text not compatible with latin-1!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
if len(text) > 34:
self.associated_data_info.setText('Too much data in latin-1 string!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
return True
else:
self.associated_data_interpret = InterpretType.IPFS
try:
raw = base_decode(text, base=58)
if len(raw) > 34:
self.associated_data_info.setText('Too much data in IPFS hash!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
elif len(raw) < 34:
self.associated_data_info.setText('Too little data in IPFS hash!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
else:
self.associated_data_info.setText('Reading as IPFS')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
return True
except:
self.associated_data_info.setText('Invalid IPFS hash!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
def _check_amount(self) -> bool:
t = self.asset_amount.text()
if not t:
self.asset_amount_warning.setText('')
return False
v = float(t)
if v > TOTAL_COIN_SUPPLY_LIMIT_IN_BTC:
self.asset_amount_warning.setText(
_('More than the maximum amount ({})').format(TOTAL_COIN_SUPPLY_LIMIT_IN_BTC))
return False
elif v == 0:
self.asset_amount_warning.setText(
_('The amount cannot be 0.')
)
return False
else:
self.asset_amount_warning.setText('')
return True
def check_asset_availability(self, asset):
def x(result):
self.update_screen_based_on_asset_result(asset, result)
self.parent.run_coroutine_from_thread(self.parent.network.get_meta_for_asset(asset),
x)
def update_screen_based_on_asset_result(self, asset, result):
if result:
self.last_checked = None
self.asset_availability_text.setText('Asset Unavailable')
self.asset_availability_text.setStyleSheet(ColorScheme.RED.as_stylesheet())
else:
self.last_checked = asset
self.asset_availability_text.setText('Asset Available')
self.asset_availability_text.setStyleSheet(ColorScheme.GREEN.as_stylesheet())
def refresh_owners(self):
confirmed, unconfirmed, _ = self.parent.wallet.get_balance()
owned_assets = confirmed.assets
in_mempool = unconfirmed.assets
owners = [n for n in owned_assets.keys() if
n[-1] == '!' and owned_assets.get(n, 0) != 0]
indexes_in_mempool = set()
new_aval_owner_options = ['Select a parent'] + \
sorted([n[:-1] for n in owners])
for i in range(len(new_aval_owner_options)):
if i == 0:
continue
a = new_aval_owner_options[i]
if (a + '!') in in_mempool:
indexes_in_mempool.add(i)
new_aval_owner_options[i] = a + ' (Mempool)'
diff = set(new_aval_owner_options) - set(self.aval_owner_options)
if self.aval_owner_options and not diff:
return
self.aval_owner_options = new_aval_owner_options
self.aval_owner_combo.clear()
self.aval_owner_combo.addItems(self.aval_owner_options)
for i in indexes_in_mempool:
self.aval_owner_combo.model().item(i).setEnabled(False)
def refresh_change_addrs(self):
# We just want addresses to send the newly created assets to
# Is there a way to improve this?
addrs = self.parent.wallet.get_change_addresses_for_new_transaction(extra_addresses=3)
if not addrs:
addrs = self.parent.wallet.get_change_addresses_for_new_transaction(allow_reusing_used_change_addrs=True, extra_addresses=3)
if not addrs:
addrs = self.parent.wallet.get_change_addresses(slice_stop=4)
if not addrs:
addrs = self.parent.wallet.get_receiving_addresses(slice_stop=4)
if len(addrs) < 4:
assert len(addrs) > 0
addr = addrs[0]
for _ in range(4 - len(addrs)):
addrs.append(addr)
self.change_addrs = addrs
def verify_valid(self) -> Optional[str]:
asset = self.asset_name.get_prefix() + self.asset_name.text()
if asset != self.last_checked:
self.asset_availability_text.setText('Check if available')
self.asset_availability_text.setStyleSheet(ColorScheme.RED.as_stylesheet())
return 'Check if your asset is available first'
if not self._check_amount():
return 'Invalid amount'
if not self._check_asset_addr():
return 'Invalid address'
if self.create_options_layout.selected_index() == 0:
if len(asset) < 3:
self.asset_name_error_message.setText('Main assets must be more than 3 characters.')
return 'Check name'
elif self.aval_owner_combo.currentIndex() == 0:
self.asset_name_error_message.setText('Please select a parent asset!')
return 'No parent asset'
if not self._check_asset_name():
return 'Check name'
if not self._check_associated_data():
return 'Invalid associated data'
return None
def should_warn_associated_data(self):
text = self.associated_data.text() # type: str
i = self.associated_data_interpret
if i == InterpretType.NO_DATA:
return False
elif i == InterpretType.IPFS:
b = base_decode(text, base=58)
elif i == InterpretType.HEX:
b = bfh(text)
else:
b = text.encode('latin-1')
if len(b) < 34 and self.parent.config.get('warn_asset_small_associated', True):
return True
return False
def should_warn_on_non_reissuable(self):
is_unique = self.create_options_layout.selected_index() == 2
c = self.reissuable.isChecked()
if not c and not is_unique and self.parent.config.get('warn_asset_non_reissuable', True):
return True
return False
def reset_workspace(self):
self.create_options_layout.group.buttons()[0].setChecked(True)
self.asset_name.lineEdit.setText('')
self.asset_name.lineEdit.setMaxLength(30)
self.asset_name.set_prefix('')
self.divisions.setFrozen(False)
self.divisions.setText('0')
self.reissuable.setCheckState(True)
self.reissuable.setEnabled(True)
self.reissuable.setTristate(False)
self.aval_owner_combo.setVisible(False)
self.asset_name_error_message.setText('')
self.asset_availability_text.setText('')
self.associated_data_info.setText('')
self.asset_amount_warning.setText('')
self.associated_data.setText('')
self.asset_amount.setText('')
reg = QRegExp('^[1-9][0-9]{0,10}$')
validator = QRegExpValidator(reg)
self.asset_amount.setValidator(validator)
self.reissue_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.last_checked = None
self.associated_data_interpret = InterpretType.NO_DATA
self.associated_data_interpret_override.setCurrentIndex(0)
self.refresh_change_addrs()
self.send_asset_address.setText(self.change_addrs[1])
self.send_asset_address_error.setText('')
self.cost_label.setText('Cost: {} RVN'.format(constants.net.BURN_AMOUNTS.IssueAssetBurnAmount))
self.refresh_owners()
def get_owner(self):
i = self.aval_owner_combo.currentIndex()
if i == 0:
return None
return self.aval_owner_options[i] + '!'
def get_output(self):
i = self.create_options_layout.selected_index()
if i == 0:
addr = constants.net.BURN_ADDRESSES.IssueAssetBurnAddress
amt = constants.net.BURN_AMOUNTS.IssueAssetBurnAmount
elif i == 1:
addr = constants.net.BURN_ADDRESSES.IssueSubAssetBurnAddress
amt = constants.net.BURN_AMOUNTS.IssueSubAssetBurnAmount
elif i == 2:
addr = constants.net.BURN_ADDRESSES.IssueUniqueAssetBurnAddress
amt = constants.net.BURN_AMOUNTS.IssueUniqueAssetBurnAmount
else:
NotImplementedError()
burn = PartialTxOutput(
scriptpubkey=bfh(address_to_script(addr)),
value=Satoshis(amt * COIN)
)
norm = [burn]
o = self.get_owner()
if o:
script = bfh(address_to_script(self.change_addrs[0]))
norm.append(PartialTxOutput(
scriptpubkey=create_transfer_asset_script(script, o, COIN),
value=Satoshis(COIN),
asset=o
))
asset = self.asset_name.get_prefix() + self.asset_name.text()
is_unique = self.create_options_layout.selected_index() == 2
amt = int(float(self.asset_amount.text()) * COIN)
d = self.associated_data.text() # type: str
i = self.associated_data_interpret
if i == InterpretType.NO_DATA:
data = None
else:
if i == InterpretType.IPFS:
data = base_decode(d, base=58)
elif i == InterpretType.HEX:
data = bfh(d)
else:
data = d.encode('latin-1')
data = data.rjust(34, b'\0')
new = [
PartialTxOutput(
scriptpubkey=create_new_asset_script(bfh(address_to_script(self.send_asset_address.text())),
asset,
amt,
int(self.divisions.text()),
self.reissuable.isChecked(),
data),
value=Satoshis(amt),
asset=asset)
]
if not is_unique:
new.append(
PartialTxOutput(
scriptpubkey=create_owner_asset_script(bfh(address_to_script(self.send_asset_address.text())),
asset + '!'),
value=Satoshis(COIN),
asset=asset + '!'
)
)
return norm, new, self.change_addrs[2]
class AssetReissueWorkspace(QWidget):
def __init__(self, parent, reissue_asset_callable):
super().__init__()
self.parent = parent
self.current_asset_meta = None
self.aval_owner_combo = QComboBox()
self.aval_owner_combo.setCurrentIndex(0)
self.divisions = FreezableLineEdit()
self.asset_amount = FreezableLineEdit()
self.reissuable = QCheckBox()
self.associated_data = FreezableLineEdit()
self.current_sats = QLabel('')
self.associated_data_info = QLabel()
self.associated_data_info.setAlignment(Qt.AlignCenter)
self.associated_data_interpret_override = QComboBox()
self.associated_data_interpret = InterpretType.NO_DATA
self.cost_label = QLabel('Cost: {} RVN'.format(constants.net.BURN_AMOUNTS.ReissueAssetBurnAmount))
msg = _('Reissuability') + '\n\n' \
+ _('This lets the asset be edited in the future.')
self.reissue_label = HelpLabel(_('Reissuable'), msg)
def on_combo_change():
i = self.aval_owner_combo.currentIndex()
if i == 0:
self.reset_gui()
else:
asset = self.aval_owner_options[i]
m = self.current_asset_meta = self.parent.wallet.get_asset_meta(asset) # type: AssetMeta
if not m:
# Edge case where we have the ownership asset, but not the normal asset
async def async_data_get():
# We will trust what the server sends us, since this is just used for GUI and locking out
# invalid options which would be caught in a node broadcast
m = await self.parent.network.get_meta_for_asset(asset)
if not m:
# Dummy data
_logger.warning("Couldn't query asset meta!")
divs = 0
reis = True
data = None
circulation = 0
else:
divs = m['divisions']
reis = False if m['reissuable'] == 0 else True
data = m.get('ipfs', None)
circulation = m['sats_in_circulation']
self.current_asset_meta = AssetMeta(asset, circulation, False, reis, divs, bool(data), data, -1, '', None, None)
r = reis
d = divs
if d < 8:
reg_base = '012345678'
reg = QRegExp('^[' + reg_base[d:] + ']{1}$')
validator = QRegExpValidator(reg)
self.divisions.setValidator(validator)
self.divisions.setFrozen(not r)
self.divisions.setText(str(d))
self.reissuable.setCheckState(r)
if r:
self.reissuable.setEnabled(r)
self.reissuable.setTristate(False)
i = data
if i:
self.associated_data.setFrozen(not r)
self.associated_data.setText(i)
self.associated_data_interpret_override.setCurrentIndex(0)
self._check_associated_data()
else:
self.associated_data.setFrozen(not r)
self.associated_data.setText('')
self._check_associated_data()
self.asset_amount.setFrozen(not r)
self.asset_amount.setText('0')
self.current_sats.setText(
_("({} {} currently in circulation)").format(Satoshis(circulation), asset))
if d == 0:
reg = QRegExp('^[0-9]{1,11}$')
else:
reg = QRegExp('^[0-9]{1,11}\\.([0-9]{1,' + str(d) + '})$')
validator = QRegExpValidator(reg)
self.asset_amount.setValidator(validator)
if r:
self.reissue_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.divisions_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.data_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.amount_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.exec_asset_b.setEnabled(r)
self.parent.run_coroutine_from_thread(async_data_get())
return
r = m.is_reissuable
d = m.divisions
if d < 8:
reg_base = '012345678'
reg = QRegExp('^[' + reg_base[d:] + ']{1}$')
validator = QRegExpValidator(reg)
self.divisions.setValidator(validator)
self.divisions.setFrozen(not r)
else:
self.divisions.setFrozen(True)
self.divisions.setText(str(d))
self.reissuable.setCheckState(r)
if r:
self.reissuable.setEnabled(r)
self.reissuable.setTristate(False)
i = m.ipfs_str
if i:
self.associated_data.setFrozen(not r)
self.associated_data.setText(m.ipfs_str)
self.associated_data_interpret_override.setCurrentIndex(0)
self._check_associated_data()
else:
self.associated_data.setFrozen(not r)
self.associated_data.setText('')
self._check_associated_data()
self.asset_amount.setFrozen(not r)
self.asset_amount.setText('0')
self.current_sats.setText(_("({} {} currently in circulation)").format(Satoshis(m.circulation), m.name))
if d == 0:
reg = QRegExp('^[0-9]{1,11}$')
else:
reg = QRegExp('^[0-9]{1,11}\\.([0-9]{1,' + str(d) + '})$')
validator = QRegExpValidator(reg)
self.asset_amount.setValidator(validator)
if r:
self.reissue_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.divisions_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.data_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.amount_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.aval_owner_combo.currentIndexChanged.connect(on_combo_change)
c_grid_b = QGridLayout()
c_grid_b.setColumnStretch(2, 1)
c_grid_b.setHorizontalSpacing(10)
msg = _('Asset Divisions') + '\n\n' \
+ _('Asset divisions are a number from 0 to 8. They dictate how much an asset can be divided. '
'The minimum asset amount is 10^-d where d is the division amount. Once an asset is issued, you cannot decrease this number.')
self.divisions_label = HelpLabel(_('Divisions'), msg)
self.divisions.setText('')
self.divisions.setFixedWidth(25)
self.divisions.setFrozen(True)
divisions_grid = QHBoxLayout()
divisions_grid.setSpacing(0)
divisions_grid.setContentsMargins(0, 0, 0, 0)
divisions_grid.addWidget(self.divisions_label)
divisions_grid.addWidget(self.divisions)
divisions_w = QWidget()
divisions_w.setLayout(divisions_grid)
c_grid_b.addWidget(divisions_w, 0, 0)
self.reissuable.setCheckState(True)
self.reissuable.setEnabled(False)
reissue_grid = QHBoxLayout()
reissue_grid.setSpacing(0)
reissue_grid.setContentsMargins(0, 0, 0, 0)
reissue_grid.addWidget(self.reissue_label)
reissue_grid.addWidget(self.reissuable)
reissue_w = QWidget()
reissue_w.setLayout(reissue_grid)
c_grid_b.addWidget(reissue_w, 0, 1)
msg = _('Associated Data') + '\n\n' \
+ _('Data to associate with this asset.')
self.data_label = HelpLabel(_('Associated Data'), msg)
self.associated_data.setFrozen(True)
self.associated_data.textChanged.connect(self._check_associated_data)
self.associated_data_interpret_override.addItems(['AUTO', 'IPFS', 'HEX', 'LATIN-1'])
self.associated_data_interpret_override.currentIndexChanged.connect(self._check_associated_data)
self.associated_data_interpret_override.setVisible(self.parent.config.get('advanced_asset_functions', False))
data_grid = QHBoxLayout()
data_grid.setSpacing(0)
data_grid.setContentsMargins(0, 0, 0, 0)
data_grid.addWidget(self.data_label)
data_grid.addWidget(self.associated_data)
data_w = QWidget()
data_w.setLayout(data_grid)
c_grid_b.addWidget(data_w, 0, 2)
c_grid_b.addWidget(self.associated_data_info, 1, 2)
c_grid_b.addWidget(self.associated_data_interpret_override, 0, 3)
c_grid_c = QGridLayout()
c_grid_c.setColumnStretch(4, 1)
c_grid_c.setHorizontalSpacing(10)
msg = _('Amount to Add') + '\n\n' \
+ _('The amount of an asset to add to circulation')
self.amount_label = HelpLabel(_('Additional Amount'), msg)
amount_grid = QHBoxLayout()
amount_grid.setSpacing(0)
amount_grid.setContentsMargins(0, 0, 0, 0)
amount_grid.addWidget(self.amount_label)
amount_grid.addWidget(self.asset_amount)
amount_grid.addWidget(self.current_sats)
amount_w = QWidget()
amount_w.setLayout(amount_grid)
c_grid_c.addWidget(amount_w, 0, 0)
self.asset_amount_warning = QLabel()
self.asset_amount_warning.setStyleSheet(ColorScheme.RED.as_stylesheet())
self.asset_amount.textChanged.connect(self._check_amount)
c_grid_c.addWidget(self.asset_amount_warning, 1, 0)
self.change_addrs = None # type: Optional[List[str]]
self.refresh_change_addrs()
self.send_asset_address_error = QLabel()
self.send_asset_address_error.setStyleSheet(ColorScheme.RED.as_stylesheet())
self.send_asset_address = QLineEdit()
self.send_asset_address.textChanged.connect(self._check_asset_addr)
#self.send_asset_address.setText(self.change_addrs[1])
if len(self.change_addrs) > 1:
self.send_asset_address.setText(self.change_addrs[1])
else:
async def delayed_address_update():
time.sleep(5)
self.refresh_change_addrs()
self.send_asset_address.setText(self.change_addrs[1])
self.parent.run_coroutine_from_thread(delayed_address_update())
asset_h = QHBoxLayout()
asset_h.addWidget(QLabel(_('New asset address:')))
asset_h.addWidget(self.send_asset_address)
asset_h.addWidget(self.send_asset_address_error)
self.asset_addr_w = QWidget()
self.asset_addr_w.setLayout(asset_h)
self.asset_addr_w.setVisible(self.parent.config.get('advanced_asset_functions', False))
bottom_buttons = QGridLayout()
bottom_buttons.setColumnStretch(1, 2)
self.exec_asset_b = EnterButton(_("Reissue Asset"), reissue_asset_callable)
bottom_buttons.addWidget(self.exec_asset_b, 1, 0)
bottom_buttons.addWidget(self.cost_label, 1, 1)
def hard_reset():
self.reset_workspace()
self.reset_create_b = EnterButton(_("Reset"), hard_reset)
bottom_buttons.addWidget(self.reset_create_b, 1, 3)
top_layout = QHBoxLayout()
top_layout.addWidget(self.aval_owner_combo)
top_layout.addWidget(HelpButtonURL("https://ravencoin.org/assets/"))
widgetA = QWidget()
widgetA.setLayout(top_layout)
widgetC = QWidget()
widgetC.setLayout(c_grid_b)
widgetD = QWidget()
widgetD.setLayout(c_grid_c)
widgetF = QWidget()
widgetF.setLayout(bottom_buttons)
create_l = QVBoxLayout()
create_l.addWidget(widgetA)
create_l.addWidget(widgetC)
create_l.addWidget(widgetD)
create_l.addWidget(self.asset_addr_w)
create_l.addWidget(widgetF)
self.setLayout(create_l)
self.aval_owner_options = [] # type: List[str]
def _check_asset_addr(self):
addr = self.send_asset_address.text()
if not is_address(addr):
self.send_asset_address_error.setText(_('Invalid Ravencoin Address'))
return False
else:
self.send_asset_address_error.setText('')
return True
def _check_associated_data(self) -> bool:
text = self.associated_data.text()
i = self.associated_data_interpret_override.currentIndex()
if len(text) == 0:
self.associated_data_info.setText('')
self.associated_data_interpret = InterpretType.NO_DATA
return True
if i != 0:
if i == 1:
self.associated_data_interpret = InterpretType.IPFS
try:
if len(base_decode(text, base=58)) > 34:
self.associated_data_info.setText('Too much data in IPFS hash!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
else:
self.associated_data_info.setText('Reading as IPFS')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
return True
except:
self.associated_data_info.setText('Invalid base 58 encoding!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
if i == 2:
self.associated_data_interpret = InterpretType.HEX
try:
bfh(text)
except:
self.associated_data_info.setText('Not a valid hex string!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
if len(text) > 34 * 2:
self.associated_data_info.setText('Too much data in hex string!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
else:
self.associated_data_info.setText('Reading as hex string')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
return True
else:
self.associated_data_interpret = InterpretType.LATIN
try:
text.encode('latin-1')
except:
self.associated_data_info.setText('Text not compatible with latin-1!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
if len(text) > 34:
self.associated_data_info.setText('Too much data in latin-1 string!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
else:
self.associated_data_info.setText('Reading as latin-1 string')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
return True
if self.parent.config.get('advanced_asset_functions', False):
if text[:2] == 'Qm':
try:
if len(base_decode(text, base=58)) == 34:
self.associated_data_info.setText('Reading as IPFS')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.associated_data_interpret = InterpretType.IPFS
return True
except:
pass
try:
if len(text) == 1:
raise Exception()
bytes.fromhex(text)
self.associated_data_info.setText('Reading as hex string')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.associated_data_interpret = InterpretType.HEX
if len(text) > 34 * 2:
self.associated_data_info.setText('Too much data in hex string!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
return True
except:
self.associated_data_info.setText('Reading as latin-1 string')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.associated_data_interpret = InterpretType.LATIN
try:
text.encode('latin-1')
except:
self.associated_data_info.setText('Text not compatible with latin-1!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
if len(text) > 34:
self.associated_data_info.setText('Too much data in latin-1 string!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
return True
else:
self.associated_data_interpret = InterpretType.IPFS
try:
raw = base_decode(text, base=58)
if len(raw) > 34:
self.associated_data_info.setText('Too much data in IPFS hash!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
elif len(raw) < 34:
self.associated_data_info.setText('Too little data in IPFS hash!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
else:
self.associated_data_info.setText('Reading as IPFS')
self.associated_data_info.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
return True
except:
self.associated_data_info.setText('Invalid IPFS hash!')
self.associated_data_info.setStyleSheet(ColorScheme.RED.as_stylesheet())
return False
def _check_amount(self) -> bool:
t = self.asset_amount.text()
if not t:
self.asset_amount_warning.setText('')
return False
v = float(t) + self.current_asset_meta.circulation / 100_000_000
if v > TOTAL_COIN_SUPPLY_LIMIT_IN_BTC:
self.asset_amount_warning.setText(
_('More than the maximum amount ({})').format(TOTAL_COIN_SUPPLY_LIMIT_IN_BTC))
return False
else:
self.asset_amount_warning.setText('')
return True
def refresh_owners(self):
confirmed, unconfirmed, _ = self.parent.wallet.get_balance()
owned_assets = confirmed.assets
in_mempool = unconfirmed.assets
owners = [n for n in owned_assets.keys() if
n[-1] == '!' and owned_assets.get(n, 0) != 0]
new_aval_owner_options = ['Select an asset'] + \
sorted([n[:-1] for n in owners])
disabled_indexes = set()
for i in range(len(new_aval_owner_options)):
if i == 0:
continue
asset = new_aval_owner_options[i]
meta = self.parent.wallet.get_asset_meta(asset) # type: AssetMeta
if not meta:
continue
else:
if not meta.is_reissuable:
disabled_indexes.add(i)
new_aval_owner_options[i] = asset + ' (Non-reissuable)'
if (asset + '!') in in_mempool:
disabled_indexes.add(i)
new_aval_owner_options[i] = asset + ' (Mempool)'
diff = set(new_aval_owner_options) - set(self.aval_owner_options)
if self.aval_owner_options and not diff:
return
self.aval_owner_options = new_aval_owner_options
self.aval_owner_combo.clear()
self.aval_owner_combo.addItems(self.aval_owner_options)
for i in disabled_indexes:
self.aval_owner_combo.model().item(i).setEnabled(False)
def refresh_change_addrs(self):
# We just want addresses to send the newly created assets to
# Is there a way to improve this?
addrs = self.parent.wallet.get_change_addresses_for_new_transaction(extra_addresses=3)
if not addrs:
addrs = self.parent.wallet.get_change_addresses_for_new_transaction(allow_reusing_used_change_addrs=True, extra_addresses=3)
if not addrs:
addrs = self.parent.wallet.get_change_addresses(slice_stop=4)
if not addrs:
addrs = self.parent.wallet.get_receiving_addresses(slice_stop=4)
if len(addrs) < 4:
assert len(addrs) > 0
addr = addrs[0]
for _ in range(4 - len(addrs)):
addrs.append(addr)
self.change_addrs = addrs
def verify_valid(self) -> Optional[str]:
if not self._check_amount():
return 'Invalid amount'
if not self._check_asset_addr():
return 'Invalid address'
if not self._check_associated_data():
return 'Invalid associated data'
return None
def should_warn_associated_data(self):
text = self.associated_data.text() # type: str
i = self.associated_data_interpret
if i == InterpretType.NO_DATA:
return False
elif i == InterpretType.IPFS:
b = base_decode(text, base=58)
elif i == InterpretType.HEX:
b = bfh(text)
else:
b = text.encode('latin-1')
if len(b) < 34 and self.parent.config.get('warn_asset_small_associated', True):
return True
return False
def should_warn_on_non_reissuable(self):
c = self.reissuable.isChecked()
if not c and self.parent.config.get('warn_asset_non_reissuable', True):
return True
return False
def reset_gui(self):
self.aval_owner_combo.setCurrentIndex(0)
self.current_asset_meta = None
self.divisions.setFrozen(True)
self.divisions.setText('')
self.reissuable.setEnabled(False)
self.reissuable.setCheckState(False)
self.associated_data.setFrozen(True)
self.associated_data.setText('')
self.associated_data_info.setText('')
self.associated_data_interpret = InterpretType.NO_DATA
self.associated_data_interpret_override.setCurrentIndex(0)
self.asset_amount.setFrozen(True)
self.asset_amount.setText('')
self.current_sats.setText('')
self.asset_amount_warning.setText('')
self.reissue_label.setStyleSheet(ColorScheme.GRAY.as_stylesheet())
self.divisions_label.setStyleSheet(ColorScheme.GRAY.as_stylesheet())
self.data_label.setStyleSheet(ColorScheme.GRAY.as_stylesheet())
self.amount_label.setStyleSheet(ColorScheme.GRAY.as_stylesheet())
def reset_workspace(self):
self.reset_gui()
self.refresh_change_addrs()
self.refresh_owners()
self.send_asset_address.setText(self.change_addrs[1])
def get_owner(self):
i = self.aval_owner_combo.currentIndex()
if i == 0:
return None
return self.aval_owner_options[i] + '!'
def get_output(self):
burn = PartialTxOutput(
scriptpubkey=bfh(address_to_script(
constants.net.BURN_ADDRESSES.ReissueAssetBurnAddress
)),
value=Satoshis(constants.net.BURN_AMOUNTS.ReissueAssetBurnAmount * COIN)
)
o = self.get_owner()
script = bfh(address_to_script(self.change_addrs[0]))
ownr = PartialTxOutput(
scriptpubkey=create_transfer_asset_script(script, o, COIN),
value=Satoshis(COIN),
asset=o
)
norm = [burn, ownr]
asset = o[:-1]
amt = int(float(self.asset_amount.text()) * COIN)
d = self.associated_data.text() # type: str
i = self.associated_data_interpret
if i == InterpretType.NO_DATA:
data = None
else:
if i == InterpretType.IPFS:
data = base_decode(d, base=58)
elif i == InterpretType.HEX:
data = bfh(d)
else:
data = d.encode('latin-1')
data = data.rjust(34, b'\0')
divs = int(self.divisions.text())
new = [
PartialTxOutput(
scriptpubkey=create_reissue_asset_script(bfh(address_to_script(self.send_asset_address.text())),
asset,
amt,
bytes([divs]) if divs != self.current_asset_meta.divisions else b'\xff',
self.reissuable.isChecked(),
data),
value=Satoshis(amt),
asset=asset)
]
return norm, new, self.change_addrs[2]
|
<filename>hub-detect/src/main/groovy/com/blackducksoftware/integration/hub/detect/detector/gradle/GradleReportParser.java
/**
* hub-detect
*
* Copyright (C) 2018 Black Duck Software, Inc.
* http://www.blackducksoftware.com/
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.blackducksoftware.integration.hub.detect.detector.gradle;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Optional;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.blackducksoftware.integration.hub.detect.util.DependencyHistory;
import com.blackducksoftware.integration.hub.detect.workflow.codelocation.DetectCodeLocation;
import com.blackducksoftware.integration.hub.detect.workflow.codelocation.DetectCodeLocationType;
import com.synopsys.integration.hub.bdio.graph.MutableDependencyGraph;
import com.synopsys.integration.hub.bdio.graph.MutableMapDependencyGraph;
import com.synopsys.integration.hub.bdio.model.dependency.Dependency;
import com.synopsys.integration.hub.bdio.model.externalid.ExternalId;
import com.synopsys.integration.hub.bdio.model.externalid.ExternalIdFactory;
import com.synopsys.integration.util.NameVersion;
public class GradleReportParser {
private final Logger logger = LoggerFactory.getLogger(GradleReportParser.class);
public static final String PROJECT_PATH_PREFIX = "projectPath:";
public static final String PROJECT_GROUP_PREFIX = "projectGroup:";
public static final String PROJECT_NAME_PREFIX = "projectName:";
public static final String PROJECT_VERSION_PREFIX = "projectVersion:";
public static final String ROOT_PROJECT_NAME_PREFIX = "rootProjectName:";
public static final String ROOT_PROJECT_VERSION_PREFIX = "rootProjectVersion:";
public static final String DETECT_META_DATA_HEADER = "DETECT META DATA START";
public static final String DETECT_META_DATA_FOOTER = "DETECT META DATA END";
private final ExternalIdFactory externalIdFactory;
private GradleReportConfigurationParser gradleReportConfigurationParser = new GradleReportConfigurationParser();
public GradleReportParser(final ExternalIdFactory externalIdFactory) {
this.externalIdFactory = externalIdFactory;
}
public Optional<DetectCodeLocation> parseDependencies(final File codeLocationFile) {
DetectCodeLocation codeLocation = null;
String projectSourcePath = "";
String projectGroup = "";
String projectName = "";
String projectVersionName = "";
boolean processingMetaData = false;
final MutableDependencyGraph graph = new MutableMapDependencyGraph();
final DependencyHistory history = new DependencyHistory();
try (FileInputStream dependenciesInputStream = new FileInputStream(codeLocationFile); BufferedReader reader = new BufferedReader(new InputStreamReader(dependenciesInputStream, StandardCharsets.UTF_8));) {
while (reader.ready()) {
final String line = reader.readLine();
/**
* The meta data section will be at the end of the file after all of the "gradle dependencies" output
*/
if (line.startsWith(DETECT_META_DATA_HEADER)) {
processingMetaData = true;
continue;
}
if (line.startsWith(DETECT_META_DATA_FOOTER)) {
processingMetaData = false;
continue;
}
if (processingMetaData) {
if (line.startsWith(PROJECT_PATH_PREFIX)) {
projectSourcePath = line.substring(PROJECT_PATH_PREFIX.length()).trim();
} else if (line.startsWith(PROJECT_GROUP_PREFIX)) {
projectGroup = line.substring(PROJECT_GROUP_PREFIX.length()).trim();
} else if (line.startsWith(PROJECT_NAME_PREFIX)) {
projectName = line.substring(PROJECT_NAME_PREFIX.length()).trim();
} else if (line.startsWith(PROJECT_VERSION_PREFIX)) {
projectVersionName = line.substring(PROJECT_VERSION_PREFIX.length()).trim();
}
continue;
}
if (StringUtils.isBlank(line)) {
history.clear();
gradleReportConfigurationParser = new GradleReportConfigurationParser();
continue;
}
final Dependency dependency = gradleReportConfigurationParser.parseDependency(externalIdFactory, line);
if (dependency == null) {
continue;
}
final int lineTreeLevel = gradleReportConfigurationParser.getTreeLevel();
try {
history.clearDependenciesDeeperThan(lineTreeLevel);
} catch (final IllegalStateException e) {
logger.warn(String.format("Problem parsing line '%s': %s", line, e.getMessage()));
}
if (history.isEmpty()) {
graph.addChildToRoot(dependency);
} else {
graph.addChildWithParents(dependency, history.getLastDependency());
}
history.add(dependency);
}
final ExternalId id = externalIdFactory.createMavenExternalId(projectGroup, projectName, projectVersionName);
codeLocation = new DetectCodeLocation.Builder(DetectCodeLocationType.GRADLE, projectSourcePath, id, graph).build();
} catch (final IOException e) {
codeLocation = null;
}
return Optional.ofNullable(codeLocation);
}
public Optional<NameVersion> parseRootProjectNameVersion(final File rootProjectMetadataFile) {
NameVersion nameVersion = null;
String rootProjectName = null;
String rootProjectVersionName = null;
boolean processingMetaData = false;
try (FileInputStream dependenciesInputStream = new FileInputStream(rootProjectMetadataFile); BufferedReader reader = new BufferedReader(new InputStreamReader(dependenciesInputStream, StandardCharsets.UTF_8));) {
while (reader.ready()) {
final String line = reader.readLine();
if (line.startsWith(DETECT_META_DATA_HEADER)) {
processingMetaData = true;
continue;
}
if (line.startsWith(DETECT_META_DATA_FOOTER)) {
processingMetaData = false;
continue;
}
if (processingMetaData) {
if (line.startsWith(ROOT_PROJECT_NAME_PREFIX)) {
rootProjectName = line.substring(ROOT_PROJECT_NAME_PREFIX.length()).trim();
} else if (line.startsWith(ROOT_PROJECT_VERSION_PREFIX)) {
rootProjectVersionName = line.substring(ROOT_PROJECT_VERSION_PREFIX.length()).trim();
}
continue;
}
}
nameVersion = new NameVersion(rootProjectName, rootProjectVersionName);
} catch (final IOException e) {
nameVersion = null;
}
return Optional.ofNullable(nameVersion);
}
}
|
#!/usr/bin/env bats
load "$TESTDIR/utils.sh"
@test "Testing calc.d - generation" {
test_generate "$ROOTDIR/examples/calc.peg"
}
@test "Testing calc.d - compilation" {
${CC:-cc} calc.d/parser.c -o calc.d/parser
}
@test "Testing calc.d - run" {
run_for_input calc.d/input.txt
}
|
package me.insidezhou.southernquiet.filesystem;
/**
* 使用路径的哪个元信息进行排序。
*/
public enum PathMetaSort {
Name,
IsDirectory,
CreationTime,
LastModifiedTime,
LastAccessTime,
Size,
NameDesc,
IsDirectoryDesc,
CreationTimeDesc,
LastModifiedTimeDesc,
LastAccessTimeDesc,
SizeDesc
}
|
/*
npm install obj2gltf
*/
var myArgs = process.argv.slice(2);
//console.log('myArgs: ', myArgs);
obj_name = myArgs[0]
glb_name = myArgs[1]
const obj2gltf = require('obj2gltf');
const fs = require('fs');
const options = {
binary : true
}
obj2gltf(obj_name, options)
.then(function(glb) {
fs.writeFileSync(glb_name, glb);
});
|
echo "Downloading stb_image.h..."
mkdir stb
wget https://raw.githubusercontent.com/nothings/stb/master/stb_image.h >/dev/null 2>&1
mv stb_image.h stb
|
<reponame>m-nakagawa/sample
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.riot.lang;
import java.io.ByteArrayInputStream ;
import org.apache.jena.atlas.junit.BaseTest ;
import org.apache.jena.atlas.lib.StrUtils ;
import org.apache.jena.riot.ErrorHandlerTestLib.ErrorHandlerEx;
import org.apache.jena.riot.ErrorHandlerTestLib.ExError;
import org.apache.jena.riot.ErrorHandlerTestLib.ExFatal;
import org.apache.jena.riot.ErrorHandlerTestLib.ExWarning;
import org.apache.jena.riot.Lang ;
import org.apache.jena.riot.out.CharSpace;
import org.apache.jena.riot.system.ErrorHandler ;
import org.apache.jena.riot.system.RiotLib ;
import org.apache.jena.riot.system.StreamRDFLib ;
import org.apache.jena.riot.tokens.Tokenizer ;
import org.apache.jena.riot.tokens.TokenizerFactory ;
import org.junit.AfterClass ;
import org.junit.BeforeClass ;
import org.junit.Test ;
import static org.apache.jena.riot.system.ErrorHandlerFactory.errorHandlerNoLogging ;
import static org.apache.jena.riot.system.ErrorHandlerFactory.getDefaultErrorHandler ;
import static org.apache.jena.riot.system.ErrorHandlerFactory.setDefaultErrorHandler ;
/** Test of syntax by a tuples parser (does not include node validitiy checking) */
abstract public class TestLangNTuples extends BaseTest
{
// Test streaming interface.
private static ErrorHandler errorhandler = null ;
@BeforeClass public static void beforeClass()
{
errorhandler = getDefaultErrorHandler() ;
setDefaultErrorHandler(errorHandlerNoLogging) ;
}
@AfterClass public static void afterClass()
{
setDefaultErrorHandler(errorhandler) ;
}
@Test public void tuple_0()
{
long count = parseCount("") ;
assertEquals(0, count) ;
}
@Test public void tuple_1()
{
long count = parseCount("<x> <y> <z>.") ;
assertEquals(1, count) ;
}
@Test public void tuple_2()
{
long count = parseCount("<x> <y> \"z\".") ;
assertEquals(1, count) ;
}
@Test public void tuple_3()
{
long count = parseCount("<x> <y> <z>. <x> <y> <z>.") ;
assertEquals(2, count) ;
}
@Test public void tuple_4()
{
long count = parseCount("<x> <y> \"123\"^^<int>.") ;
assertEquals(1, count) ;
}
@Test public void tuple_5()
{
long count = parseCount("<x> <y> \"123\"@lang.") ;
assertEquals(1,count) ;
}
// Test iterator interface.
// Test parse errors interface.
@Test(expected=ExFatal.class)
public void tuple_bad_01()
{
parseCount("<x> <y> <z>") ; // No DOT
}
@Test(expected=ExFatal.class)
public void tuple_bad_02()
{
parseCount("<x> _:a <z> .") ; // Bad predicate
}
@Test(expected=ExFatal.class)
public void tuple_bad_03()
{
parseCount("<x> \"p\" <z> .") ; // Bad predicate
}
@Test(expected=ExFatal.class)
public void tuple_bad_4()
{
parseCount("\"x\" <p> <z> .") ; // Bad subject
}
@Test(expected=ExFatal.class)
public void tuple_bad_5()
{
parseCount("<x> <p> ?var .") ; // No variables
}
@Test(expected=ExFatal.class)
public void tuple_bad_6()
{
parseCount("<x> <p> 123 .") ; // No abbreviations.
}
@Test(expected=ExFatal.class)
public void tuple_bad_7()
{
parseCount("<x> <p> x:y .") ; // No prefixed names
}
// Bad terms - but accepted by default.
@Test(expected=ExFatal.class)
public void tuple_bad_10() { parseCount("<x> <p> <bad uri> .") ; }
// Bad terms (value range) - but legal syntax
@Test
public void tuple_bad_11() { parseCount("<x> <p> \"9000\"^^<http://www.w3.org/2001/XMLSchema#byte> .") ; }
// Bad - relative URI.
@Test(expected=ExError.class)
public void tuple_bad_21() { parseCheck("<x> <p> <z> .") ; }
// Bad terms
@Test(expected=ExFatal.class)
public void tuple_bad_22() { parseCheck("<http://example/x> <http://example/p> \"abc\"^^<http://example/bad uri> .") ; }
@Test(expected=ExWarning.class)
public void tuple_bad_23() { parseCheck("<http://example/x> <http://example/p> \"9000\"^^<http://www.w3.org/2001/XMLSchema#byte> .") ; }
// ASCII vs UTF-8
@Test
public void tuple_charset_1()
{
// E9 is e-acute
parseCheck("<http://example/x\\u00E9> <http://example/p> <http://example/s> .") ;
}
@Test
public void tuple_charset_2()
{
parseCheck("<http://example/é> <http://example/p> \"é\" .") ;
}
static protected Tokenizer tokenizer(CharSpace charSpace, String string)
{
byte b[] = StrUtils.asUTF8bytes(string) ;
ByteArrayInputStream in = new ByteArrayInputStream(b) ;
Tokenizer tokenizer = charSpace == CharSpace.ASCII ? TokenizerFactory.makeTokenizerASCII(in) : TokenizerFactory.makeTokenizerUTF8(in) ;
return tokenizer ;
}
//
static protected Tokenizer tokenizer(String string)
{
// UTF-8
byte b[] = StrUtils.asUTF8bytes(string) ;
ByteArrayInputStream in = new ByteArrayInputStream(b) ;
Tokenizer tokenizer = TokenizerFactory.makeTokenizerUTF8(in) ;
return tokenizer ;
}
final protected void parseCheck(String... strings)
{
String string = StrUtils.strjoin("\n", strings) ;
Tokenizer tokenizer = tokenizer(string) ;
StreamRDFCounting sink = StreamRDFLib.count() ;
@SuppressWarnings("deprecation")
LangRIOT x = RiotParsers.createParserNQuads(tokenizer, sink) ;
x.setProfile(RiotLib.profile(null, false, true, new ErrorHandlerEx())) ;
x.parse() ;
}
//
protected abstract Lang getLang() ;
protected long parseCount(String... strings) {
return ParserTestBaseLib.parseCount(getLang(), strings) ;
}
// protected void parseCheck(String... strings) {
// ParserTestBaseLib.parseCount(getLang(), strings) ;
// }
}
|
#!/bin/sh
if [ "$FLASK_ENV" = "development" ]
then
echo "Waiting for postgres container to build..."
while ! nc -z $POSTGRES_CONTAINER_NAME_APP $POSTGRES_PORT_APP; do
sleep 0.1
done
echo "PostgreSQL container started"
else
echo "PostgresSQL database is already running in Heroku"
fi
echo "Creating the database tables..."
# gets executed at /app directory in container
python3 appserver.py create-db
echo "Tables created"
exec "$@"
|
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.model.option.definition;
import org.apache.commons.lang.Validate;
import com.opengamma.util.time.Expiry;
/**
* Class defining a gap option.
* <p>
* Gap options have European-style exercise with payoff
* $$
* \begin{align*}
* \mathrm{payoff} =
* \begin{cases}
* 0 \quad & \mathrm{if} \quad S \leq K_1\\
* S - K_2 \quad & \mathrm{otherwise}
* \end{cases}
* \end{align*}
* $$
* for a call and
* $$
* \begin{align*}
* \mathrm{payoff} =
* \begin{cases}
* 0 \quad & \mathrm{if} \quad S \geq K_1\\
* K_2 - S \quad & \mathrm{otherwise}
* \end{cases}
* \end{align*}
* $$
* for a put, where $K_1$ is the strike, $K_2$ is the payoff strike and $S$ is
* the spot.
*/
public class GapOptionDefinition extends OptionDefinition {
private final OptionExerciseFunction<StandardOptionDataBundle> _exerciseFunction = new EuropeanExerciseFunction<StandardOptionDataBundle>();
private final OptionPayoffFunction<StandardOptionDataBundle> _payoffFunction = new OptionPayoffFunction<StandardOptionDataBundle>() {
@Override
public double getPayoff(final StandardOptionDataBundle data, final Double optionPrice) {
Validate.notNull(data);
final double s = data.getSpot();
final double k = getStrike();
final double x = getPayoffStrike();
return isCall() ? s <= k ? 0 : s - x : s >= k ? 0 : x - s;
}
};
private final double _payoffStrike;
/**
* @param strike The strike
* @param expiry The expiry
* @param isCall Is the option a call or put
* @param payoffStrike The payoff strike of the option, greater than zero
*/
public GapOptionDefinition(final double strike, final Expiry expiry, final boolean isCall, final double payoffStrike) {
super(strike, expiry, isCall);
Validate.isTrue(payoffStrike >= 0, "payoff strike");
_payoffStrike = payoffStrike;
}
/**
* {@inheritDoc}
*/
@Override
public OptionExerciseFunction<StandardOptionDataBundle> getExerciseFunction() {
return _exerciseFunction;
}
/**
* {@inheritDoc}
*/
@Override
public OptionPayoffFunction<StandardOptionDataBundle> getPayoffFunction() {
return _payoffFunction;
}
public double getPayoffStrike() {
return _payoffStrike;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
long temp;
temp = Double.doubleToLongBits(_payoffStrike);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final GapOptionDefinition other = (GapOptionDefinition) obj;
if (Double.doubleToLongBits(_payoffStrike) != Double.doubleToLongBits(other._payoffStrike)) {
return false;
}
return true;
}
}
|
#! /bin/bash
TMP_DIR=~/.dnnbrain_tmp
mkdir -p $TMP_DIR
# do uva with .nii
db_encode -anal uva -act $DNNBRAIN_DATA/test/image/sub-CSI1_ses-01_imagenet.act.h5 -dmask $DNNBRAIN_DATA/test/alexnet.dmask.csv -iteraxis channel -resp $DNNBRAIN_DATA/test/sub-CSI1_ses-01_imagenet_beta_L.nii.gz -bmask $DNNBRAIN_DATA/test/PHA1_L.nii.gz -model glm -out $TMP_DIR
# do mva with .nii
db_encode -anal mva -act $DNNBRAIN_DATA/test/image/sub-CSI1_ses-01_imagenet.act.h5 -dmask $DNNBRAIN_DATA/test/alexnet.dmask.csv -iteraxis channel -resp $DNNBRAIN_DATA/test/sub-CSI1_ses-01_imagenet_beta_L.nii.gz -bmask $DNNBRAIN_DATA/test/PHA1_L.nii.gz -model glm -out $TMP_DIR
# do uva with .roi.h5
db_encode -anal uva -act $DNNBRAIN_DATA/test/image/sub-CSI1_ses-01_imagenet.act.h5 -dmask $DNNBRAIN_DATA/test/alexnet.dmask.csv -iteraxis channel -resp $DNNBRAIN_DATA/test/PHA1.roi.h5 -roi PHA1_R -model glm -out $TMP_DIR
# do mva with .roi.h5
db_encode -anal mva -act $DNNBRAIN_DATA/test/image/sub-CSI1_ses-01_imagenet.act.h5 -dmask $DNNBRAIN_DATA/test/alexnet.dmask.csv -iteraxis channel -resp $DNNBRAIN_DATA/test/PHA1.roi.h5 -roi PHA1_R -model glm -out $TMP_DIR
|
<gh_stars>10-100
export const EVENT_HEADER_NAME = 'x-fc-express-event';
export const CONTEXT_HEADER_NAME = 'x-fc-express-context';
export type Callback = (err?: Error, data?: any) => void;
export type Resolver = (data?: any) => void;
export interface Context {
context: any;
}
export interface ApiGatewayContext extends Context {
event: any;
callback: Callback;
}
export interface HttpTriggerContext extends Context {
request: any;
response: any;
}
export interface Proxy<T extends Context> {
handle(ctx: T): void;
}
|
def simulate_robot(commands):
x, y = 0, 0
angle = 90 # Initial angle, facing positive y-axis
for command in commands:
if command.startswith('enc_tgt'):
_, x_target, y_target, z_angle = map(int, command.strip('enc_tgt()').split(','))
x, y = x_target, y_target
angle = z_angle
elif command == 'fwd()':
if angle == 0:
y += 1
elif angle == 90:
x += 1
elif angle == 180:
y -= 1
elif angle == 270:
x -= 1
return x, y
|
import React from 'react';
const UserProfile = ({name, age, location, handleFollow}) => (
<div>
<h3>{name}</h3>
<p>Age: {age}</p>
<p>Location: {location}</p>
<button onClick={handleFollow}>Follow</button>
</div>
);
export default UserProfile;
|
<filename>FPSLighting/Dependencies/DIRECTX/Samples/C++/Direct3D10/Tutorials/Tutorial09/Tutorial09.cpp
//--------------------------------------------------------------------------------------
// File: Tutorial09.cpp
//
// Mesh loading through DXUT
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//--------------------------------------------------------------------------------------
#include "DXUT.h"
#include "DXUTmisc.h"
#include "SDKmisc.h"
#include "SDKmesh.h"
#define DEG2RAD( a ) ( a * D3DX_PI / 180.f )
//--------------------------------------------------------------------------------------
// Global Variables
//--------------------------------------------------------------------------------------
ID3D10Effect* g_pEffect = NULL;
ID3D10InputLayout* g_pVertexLayout = NULL;
ID3D10EffectTechnique* g_pTechnique = NULL;
CDXUTSDKMesh g_Mesh;
ID3D10EffectShaderResourceVariable* g_ptxDiffuseVariable = NULL;
ID3D10EffectMatrixVariable* g_pWorldVariable = NULL;
ID3D10EffectMatrixVariable* g_pViewVariable = NULL;
ID3D10EffectMatrixVariable* g_pProjectionVariable = NULL;
D3DXMATRIX g_World;
D3DXMATRIX g_View;
D3DXMATRIX g_Projection;
//--------------------------------------------------------------------------------------
// Forward declarations
//--------------------------------------------------------------------------------------
bool CALLBACK IsD3D10DeviceAcceptable( UINT Adapter, UINT Output, D3D10_DRIVER_TYPE DeviceType,
DXGI_FORMAT BufferFormat, bool bWindowed, void* pUserContext );
HRESULT CALLBACK OnD3D10CreateDevice( ID3D10Device* pd3dDevice, const DXGI_SURFACE_DESC* pBufferSurfaceDesc,
void* pUserContext );
HRESULT CALLBACK OnD3D10ResizedSwapChain( ID3D10Device* pd3dDevice, IDXGISwapChain* pSwapChain,
const DXGI_SURFACE_DESC* pBufferSurfaceDesc, void* pUserContext );
void CALLBACK OnD3D10ReleasingSwapChain( void* pUserContext );
void CALLBACK OnD3D10DestroyDevice( void* pUserContext );
void CALLBACK OnD3D10FrameRender( ID3D10Device* pd3dDevice, double fTime, float fElapsedTime, void* pUserContext );
LRESULT CALLBACK MsgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam, bool* pbNoFurtherProcessing,
void* pUserContext );
void CALLBACK OnKeyboard( UINT nChar, bool bKeyDown, bool bAltDown, void* pUserContext );
void CALLBACK OnFrameMove( double fTime, float fElapsedTime, void* pUserContext );
bool CALLBACK ModifyDeviceSettings( DXUTDeviceSettings* pDeviceSettings, void* pUserContext );
//--------------------------------------------------------------------------------------
// Entry point to the program. Initializes everything and goes into a message processing
// loop. Idle time is used to render the scene.
//--------------------------------------------------------------------------------------
INT WINAPI wWinMain( HINSTANCE, HINSTANCE, LPWSTR, int )
{
// Enable run-time memory check for debug builds.
#if defined(DEBUG) | defined(_DEBUG)
_CrtSetDbgFlag( _CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF );
#endif
// DXUT will create and use the best device (either D3D9 or D3D10)
// that is available on the system depending on which D3D callbacks are set below
// Set DXUT callbacks
DXUTSetCallbackD3D10DeviceAcceptable( IsD3D10DeviceAcceptable );
DXUTSetCallbackD3D10DeviceCreated( OnD3D10CreateDevice );
DXUTSetCallbackD3D10SwapChainResized( OnD3D10ResizedSwapChain );
DXUTSetCallbackD3D10SwapChainReleasing( OnD3D10ReleasingSwapChain );
DXUTSetCallbackD3D10DeviceDestroyed( OnD3D10DestroyDevice );
DXUTSetCallbackD3D10FrameRender( OnD3D10FrameRender );
DXUTSetCallbackMsgProc( MsgProc );
DXUTSetCallbackKeyboard( OnKeyboard );
DXUTSetCallbackFrameMove( OnFrameMove );
DXUTSetCallbackDeviceChanging( ModifyDeviceSettings );
DXUTInit( true, true, NULL ); // Parse the command line, show msgboxes on error, no extra command line params
DXUTSetCursorSettings( true, true ); // Show the cursor and clip it when in full screen
DXUTCreateWindow( L"Tutorial09" );
DXUTCreateDevice( true, 640, 480 );
DXUTMainLoop(); // Enter into the DXUT render loop
return DXUTGetExitCode();
}
//--------------------------------------------------------------------------------------
// Reject any D3D10 devices that aren't acceptable by returning false
//--------------------------------------------------------------------------------------
bool CALLBACK IsD3D10DeviceAcceptable( UINT Adapter, UINT Output, D3D10_DRIVER_TYPE DeviceType,
DXGI_FORMAT BufferFormat, bool bWindowed, void* pUserContext )
{
return true;
}
//--------------------------------------------------------------------------------------
// Create any D3D10 resources that aren't dependant on the back buffer
//--------------------------------------------------------------------------------------
HRESULT CALLBACK OnD3D10CreateDevice( ID3D10Device* pd3dDevice, const DXGI_SURFACE_DESC* pBufferSurfaceDesc,
void* pUserContext )
{
HRESULT hr;
// Find the D3DX effect file
WCHAR str[MAX_PATH];
V_RETURN( DXUTFindDXSDKMediaFileCch( str, MAX_PATH, L"Tutorial09.fx" ) );
DWORD dwShaderFlags = D3D10_SHADER_ENABLE_STRICTNESS;
#if defined( DEBUG ) || defined( _DEBUG )
// Set the D3D10_SHADER_DEBUG flag to embed debug information in the shaders.
// Setting this flag improves the shader debugging experience, but still allows
// the shaders to be optimized and to run exactly the way they will run in
// the release configuration of this program.
dwShaderFlags |= D3D10_SHADER_DEBUG;
#endif
V_RETURN( D3DX10CreateEffectFromFile( str, NULL, NULL, "fx_4_0", dwShaderFlags, 0, pd3dDevice, NULL,
NULL, &g_pEffect, NULL, NULL ) );
// Obtain the technique
g_pTechnique = g_pEffect->GetTechniqueByName( "Render" );
g_ptxDiffuseVariable = g_pEffect->GetVariableByName( "g_txDiffuse" )->AsShaderResource();
g_pWorldVariable = g_pEffect->GetVariableByName( "World" )->AsMatrix();
g_pViewVariable = g_pEffect->GetVariableByName( "View" )->AsMatrix();
g_pProjectionVariable = g_pEffect->GetVariableByName( "Projection" )->AsMatrix();
// Define the input layout
const D3D10_INPUT_ELEMENT_DESC layout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D10_INPUT_PER_VERTEX_DATA, 0 },
{ "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D10_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D10_INPUT_PER_VERTEX_DATA, 0 },
};
UINT numElements = sizeof( layout ) / sizeof( layout[0] );
// Create the input layout
D3D10_PASS_DESC PassDesc;
g_pTechnique->GetPassByIndex( 0 )->GetDesc( &PassDesc );
V_RETURN( pd3dDevice->CreateInputLayout( layout, numElements, PassDesc.pIAInputSignature,
PassDesc.IAInputSignatureSize, &g_pVertexLayout ) );
// Set the input layout
pd3dDevice->IASetInputLayout( g_pVertexLayout );
// Load the mesh
V_RETURN( g_Mesh.Create( pd3dDevice, L"Tiny\\tiny.sdkmesh", true ) );
// Initialize the world matrices
D3DXMatrixIdentity( &g_World );
// Initialize the view matrix
D3DXVECTOR3 Eye( 0.0f, 3.0f, -500.0f );
D3DXVECTOR3 At( 0.0f, 1.0f, 0.0f );
D3DXVECTOR3 Up( 0.0f, 1.0f, 0.0f );
D3DXMatrixLookAtLH( &g_View, &Eye, &At, &Up );
// Update Variables that never change
g_pViewVariable->SetMatrix( ( float* )&g_View );
return S_OK;
}
//--------------------------------------------------------------------------------------
// Create any D3D10 resources that depend on the back buffer
//--------------------------------------------------------------------------------------
HRESULT CALLBACK OnD3D10ResizedSwapChain( ID3D10Device* pd3dDevice, IDXGISwapChain* pSwapChain,
const DXGI_SURFACE_DESC* pBufferSurfaceDesc, void* pUserContext )
{
// Setup the projection parameters again
float fAspect = static_cast<float>( pBufferSurfaceDesc->Width ) / static_cast<float>( pBufferSurfaceDesc->Height );
D3DXMatrixPerspectiveFovLH( &g_Projection, D3DX_PI * 0.25f, fAspect, 0.5f, 1000.0f );
g_pProjectionVariable->SetMatrix( ( float* )&g_Projection );
return S_OK;
}
//--------------------------------------------------------------------------------------
// Render the scene using the D3D10 device
//--------------------------------------------------------------------------------------
void CALLBACK OnD3D10FrameRender( ID3D10Device* pd3dDevice, double fTime, float fElapsedTime, void* pUserContext )
{
//
// Clear the back buffer
//
float ClearColor[4] = { 0.0f, 0.125f, 0.3f, 1.0f }; // red, green, blue, alpha
ID3D10RenderTargetView* pRTV = DXUTGetD3D10RenderTargetView();
pd3dDevice->ClearRenderTargetView( pRTV, ClearColor );
//
// Clear the depth stencil
//
ID3D10DepthStencilView* pDSV = DXUTGetD3D10DepthStencilView();
pd3dDevice->ClearDepthStencilView( pDSV, D3D10_CLEAR_DEPTH, 1.0, 0 );
//
// Update variables that change once per frame
//
g_pWorldVariable->SetMatrix( ( float* )&g_World );
//
// Set the Vertex Layout
//
pd3dDevice->IASetInputLayout( g_pVertexLayout );
//
// Render the mesh
//
UINT Strides[1];
UINT Offsets[1];
ID3D10Buffer* pVB[1];
pVB[0] = g_Mesh.GetVB10( 0, 0 );
Strides[0] = ( UINT )g_Mesh.GetVertexStride( 0, 0 );
Offsets[0] = 0;
pd3dDevice->IASetVertexBuffers( 0, 1, pVB, Strides, Offsets );
pd3dDevice->IASetIndexBuffer( g_Mesh.GetIB10( 0 ), g_Mesh.GetIBFormat10( 0 ), 0 );
D3D10_TECHNIQUE_DESC techDesc;
g_pTechnique->GetDesc( &techDesc );
SDKMESH_SUBSET* pSubset = NULL;
ID3D10ShaderResourceView* pDiffuseRV = NULL;
D3D10_PRIMITIVE_TOPOLOGY PrimType;
for( UINT p = 0; p < techDesc.Passes; ++p )
{
for( UINT subset = 0; subset < g_Mesh.GetNumSubsets( 0 ); ++subset )
{
pSubset = g_Mesh.GetSubset( 0, subset );
PrimType = g_Mesh.GetPrimitiveType10( ( SDKMESH_PRIMITIVE_TYPE )pSubset->PrimitiveType );
pd3dDevice->IASetPrimitiveTopology( PrimType );
pDiffuseRV = g_Mesh.GetMaterial( pSubset->MaterialID )->pDiffuseRV10;
g_ptxDiffuseVariable->SetResource( pDiffuseRV );
g_pTechnique->GetPassByIndex( p )->Apply( 0 );
pd3dDevice->DrawIndexed( ( UINT )pSubset->IndexCount, 0, ( UINT )pSubset->VertexStart );
}
}
//the mesh class also had a render method that allows rendering the mesh with the most common options
//g_Mesh.Render( pd3dDevice, g_pTechnique, g_ptxDiffuseVariable );
}
//--------------------------------------------------------------------------------------
// Release D3D10 resources created in OnD3D10ResizedSwapChain
//--------------------------------------------------------------------------------------
void CALLBACK OnD3D10ReleasingSwapChain( void* pUserContext )
{
}
//--------------------------------------------------------------------------------------
// Release D3D10 resources created in OnD3D10CreateDevice
//--------------------------------------------------------------------------------------
void CALLBACK OnD3D10DestroyDevice( void* pUserContext )
{
DXUTGetGlobalResourceCache().OnDestroyDevice();
SAFE_RELEASE( g_pVertexLayout );
SAFE_RELEASE( g_pEffect );
g_Mesh.Destroy();
}
//--------------------------------------------------------------------------------------
// Called right before creating a D3D9 or D3D10 device, allowing the app to modify the device settings as needed
//--------------------------------------------------------------------------------------
bool CALLBACK ModifyDeviceSettings( DXUTDeviceSettings* pDeviceSettings, void* pUserContext )
{
return true;
}
//--------------------------------------------------------------------------------------
// Handle updates to the scene. This is called regardless of which D3D API is used
//--------------------------------------------------------------------------------------
void CALLBACK OnFrameMove( double fTime, float fElapsedTime, void* pUserContext )
{
// Rotate cube around the origin
D3DXMatrixRotationY( &g_World, 60.0f * DEG2RAD((float)fTime) );
}
//--------------------------------------------------------------------------------------
// Handle messages to the application
//--------------------------------------------------------------------------------------
LRESULT CALLBACK MsgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam, bool* pbNoFurtherProcessing,
void* pUserContext )
{
return 0;
}
//--------------------------------------------------------------------------------------
// Handle key presses
//--------------------------------------------------------------------------------------
void CALLBACK OnKeyboard( UINT nChar, bool bKeyDown, bool bAltDown, void* pUserContext )
{
if( bKeyDown )
{
switch( nChar )
{
case VK_F1: // Change as needed
break;
}
}
}
|
#!/usr/bin/env bash
# Run from the project foler (containing the game.project)
set -e
PROJECT=defold-spine
if [ "" == "${BOB}" ]; then
BOB=./bob.jar
# comment out when you want to use the bob version instead!
DEFOLDSDK="--defoldsdk=eb061db73144081bd125b4a028a5ae9a180fc9b6"
fi
#BOB=~/work/defold/tmp/dynamo_home/share/java/bob.jar
echo "Using BOB=${BOB}"
echo "Using DEFOLDSDK=${DEFOLDSDK}"
if [ "" == "${SERVER}" ]; then
SERVER=https://build.defold.com
fi
#SERVER=http://localhost:9000
echo "Using SERVER=${SERVER}"
if [ "" == "${VARIANT}" ]; then
VARIANT=release
fi
echo "Using VARIANT=${VARIANT}"
TARGET_DIR=./$PROJECT/plugins
mkdir -p $TARGET_DIR
function copyfile() {
local path=$1
local folder=$2
if [ -f "$path" ]; then
if [ ! -d "$folder" ]; then
mkdir -v -p $folder
fi
cp -v $path $folder
fi
}
function copy_results() {
local platform=$1
local platform_ne=$2
# Copy the .jar files
for path in ./build/$platform_ne/$PROJECT/*.jar; do
copyfile $path $TARGET_DIR/share
done
# Copy the files to the target folder
for path in ./build/$platform_ne/$PROJECT/*.dylib; do
copyfile $path $TARGET_DIR/lib/$platform_ne
done
for path in ./build/$platform_ne/$PROJECT/*.so; do
copyfile $path $TARGET_DIR/lib/$platform_ne
done
for path in ./build/$platform_ne/$PROJECT/*.dll; do
copyfile $path $TARGET_DIR/lib/$platform_ne
done
}
function build_plugin() {
local platform=$1
local platform_ne=$2
java -jar $BOB --platform=$platform build --build-artifacts=plugins --variant $VARIANT --build-server=$SERVER $DEFOLDSDK
copy_results $platform $platform_ne
}
build_plugin "x86_64-darwin" "x86_64-osx"
build_plugin "x86_64-linux" "x86_64-linux"
build_plugin "x86_64-win32" "x86_64-win32"
tree $TARGET_DIR
|
<reponame>CyberFlameGO/tamperchrome<filename>v2/ui/e2e/src/app.e2e-spec.ts
import { AppPage } from './app.po';
import { by, logging, Key, browser } from 'protractor';
const sendKeysToActiveElement = async (...keys) => {
await browser.waitForAngular();
await browser.controlFlow().execute(() =>
browser.switchTo().activeElement().sendKeys(...keys));
};
const numberOfVisibleElements = async (css) => {
await browser.waitForAngular();
const elems = await browser.findElements(by.css(css));
const elemsViz = await Promise.all(elems.map(elem=>elem.isDisplayed()));
return elemsViz.filter(visible=>visible).length;
};
describe('workspace-project App', () => {
let page: AppPage;
beforeEach(() => {
page = new AppPage();
});
it('should have basic filter at boot', async () => {
await page.navigateTo();
await page.snap('boot-empty');
await sendKeysToActiveElement('testFilter');
await sendKeysToActiveElement(Key.ENTER);
await page.snap('boot-filter-added');
await sendKeysToActiveElement('anotherTestFilter');
await sendKeysToActiveElement(Key.ENTER);
await page.snap('boot-filter-added-again');
await sendKeysToActiveElement(Key.chord(Key.SHIFT, Key.TAB));
await sendKeysToActiveElement(Key.DELETE);
await page.snap('boot-filter-deleted');
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.SPACE);
await page.snap('boot-intercept-switch-enabled');
});
it('should filter requests properly', async () => {
await page.navigateTo();
await page.postMessage({
event: 'onRequest',
request: {
id: 'fil1',
method: 'POST',
url: 'https://example.com/foo?fuu',
requestHeaders: [],
requestBody: undefined
}
}, [
(await page.createMessageChannel())[0]
]);
await page.postMessage({
event: 'onRequest',
request: {
id: 'fil2',
method: 'POST',
url: 'https://example.net/fuu?fuu',
requestHeaders: [],
requestBody: undefined
}
}, [
(await page.createMessageChannel())[0]
]);
await page.postMessage({
event: 'onRequest',
request: {
id: 'fil3',
method: 'PUT',
url: 'https://example.org/foo?fuu',
requestHeaders: [],
requestBody: undefined
}
}, [
(await page.createMessageChannel())[0]
]);
expect(numberOfVisibleElements('[appRequestListItem]')).toBe(3);
await page.snap('filter-unfiltered');
await sendKeysToActiveElement('f');
await sendKeysToActiveElement(Key.DOWN);
await sendKeysToActiveElement(Key.ENTER);
expect(numberOfVisibleElements('[appRequestListItem]')).toBe(2);
await page.snap('filter-filtered-foo');
await sendKeysToActiveElement('P');
await sendKeysToActiveElement(Key.DOWN);
await sendKeysToActiveElement(Key.DOWN);
await sendKeysToActiveElement(Key.ENTER);
expect(numberOfVisibleElements('[appRequestListItem]')).toBe(1);
await page.snap('filter-filtered-foo-put');
await page.postMessage({
event: 'onRequest',
request: {
id: 'fil4',
method: 'PUT',
url: 'https://qux/foo',
requestHeaders: [],
requestBody: undefined
}
}, [
(await page.createMessageChannel())[0]
]);
expect(numberOfVisibleElements('[appRequestListItem]')).toBe(2);
await page.snap('filter-filtered-foo-put-extra');
await page.postMessage({
event: 'onRequest',
request: {
id: 'fil5',
method: 'HEAD',
url: 'https://foo/foo?foo',
requestHeaders: [],
requestBody: undefined
}
}, [
(await page.createMessageChannel())[0]
]);
await page.postMessage({
event: 'onRequest',
request: {
id: 'fil6',
method: 'PUT',
url: 'https://bar/bar?bar',
requestHeaders: [],
requestBody: undefined
}
}, [
(await page.createMessageChannel())[0]
]);
expect(numberOfVisibleElements('[appRequestListItem]')).toBe(2);
await page.snap('filter-filtered-foo-put-extra-nomatch');
});
it('should capture and respond to request', async () => {
await page.navigateTo();
// enable interception
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.SPACE);
// send a request
const [port1, port2] = await page.createMessageChannel();
await page.postMessage({
event: 'onRequest',
request: {
id: 'req1',
method: 'GET',
url: 'https://example.com/',
requestHeaders: [{name: 'Host', value: 'example.com'}],
requestBody: undefined
}
}, [
port1
]);
// wait for the request to show up in the list
await page.snap('capture-request');
// tab to the first element
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await page.snap('capture-request-list-focused');
// tab to request editor
await sendKeysToActiveElement(Key.TAB);
// modify the method
await sendKeysToActiveElement('HEAD');
await page.snap('capture-request-method');
// dont modify the url
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
// modify the host header
await sendKeysToActiveElement('Original-Host');
await page.snap('capture-request-host-changed');
// skip header value and send header checkbox
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
// add new header
await sendKeysToActiveElement(Key.ENTER);
await sendKeysToActiveElement('New-Header');
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement('NewHeader Value!');
await page.snap('capture-request-add-header');
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.ENTER);
await page.snap('capture-request-send');
const modifiedRequest = (await page.waitForMessageToPort(port2)).data.request;
expect(modifiedRequest).toEqual({
method: 'HEAD',
url: 'https://example.com/',
requestHeaders: [
{name: 'Original-Host', value: 'example.com'},
{name: 'New-Header', value: 'NewHeader Value!'},
],
requestBody: null
});
// send a response
const [port3, port4] = await page.createMessageChannel();
await page.postMessage({
event: 'onResponse',
response: {
id: 'req1',
status: 200,
responseHeaders: [
{name: 'Server', value: 'fake'},
{name: 'X-XSS-Protection', value: '1'},
],
}
}, [
port3
]);
// wait for the response to arrive
await page.snap('capture-response-arrived');
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement('302');
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement('Server-Header-Value');
await page.snap('capture-response-edit-server');
await sendKeysToActiveElement(Key.ARROW_DOWN);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.SPACE);
await page.snap('capture-response-disable-xss');
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.ENTER);
await sendKeysToActiveElement('AnotherNewHeader');
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement('NewValue');
await page.snap('capture-response-add-header');
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.TAB);
await sendKeysToActiveElement(Key.ENTER);
const modifiedResponse = (await page.waitForMessageToPort(port4)).data.response;
expect(modifiedResponse).toEqual({
status: 302,
responseHeaders: [
{name: 'Server', value: 'Server-Header-Value'},
{name: 'AnotherNewHeader', value: 'NewValue'}
],
responseBody: null
});
});
afterEach(async () => {
// Assert that there are no errors emitted from the browser
const logs = await browser.manage().logs().get(logging.Type.BROWSER);
expect(logs).not.toContain(jasmine.objectContaining({
level: logging.Level.SEVERE,
} as logging.Entry));
// Assert that there are no snapshot differences
if(page.getDiffs().length) {
fail('Difference in golden files found. If this is expected, run make e2e-goldens\n\n');
page.getDiffs().forEach(diff=>{
expect(diff).toEqual({});
});
}
});
});
|
<filename>lib/shared/addon/utils/percent-gauge.js
import { select, event, svg } from 'd3';
export default function initGraph(options) {
const {
el, width, height, margin, thickness, fontSize
} = getConfig(options);
const svg = select(el).append('svg')
.attr('width', width).attr('height', height);
let value = options.value;
let ticks = options.ticks;
const { valuePath, maxPath } = addArcValue(svg, width, margin, thickness, options.value);
let tooltip = addTooltip();
addTicks(svg, tooltip, width, height, margin, ticks, options.value, thickness);
const {
valueLabel, titleLabel, subtitleLabel
} = addLabels(svg, options, width, height, fontSize);
return {
updateTitle(text) {
titleLabel.text(text);
},
updateSubTitle(text) {
subtitleLabel.text(text);
},
updateValue(text) {
text = text ? `${ text }%` : '';
valueLabel.text(text);
if (text) {
value = parseInt(text, 10);
const r = calcR(width, margin);
valuePath.attr('d', createArc(-135, value, r, thickness));
}
},
updateTicks(t) {
ticks = t;
repaintTicks(svg, tooltip, width, height, margin, ticks, value, thickness);
},
fit() {
fit(svg, el, value, ticks, tooltip, valuePath,
maxPath, valueLabel, titleLabel, subtitleLabel);
},
};
}
function fit(svg, el, value, ticks, tooltip, valuePath, maxPath, valueLabel, titleLabel, subtitleLabel) {
const {
width, height, margin, thickness, fontSize
} = getConfig({ el });
svg.attr('width', width).attr('height', height);
repaintArc(width, margin, value, thickness, valuePath, maxPath);
repaintLabels(valueLabel, titleLabel, subtitleLabel, width, height, fontSize);
repaintTicks(svg, tooltip, width, height, margin, ticks, value, thickness);
}
function repaintTicks(svg, tooltip, width, height, margin, ticks, value, thickness) {
svg.selectAll('path[tick = "custom"]').remove();
svg.selectAll('circle[tick = "custom"]').remove();
addTicks(svg, tooltip, width, height, margin, ticks, value, thickness);
}
function repaintArc(width, margin, value, thickness, valuePath, maxPath) {
const r = calcR(width, margin);
valuePath.attr('d', createArc(-135, value, r, thickness))
.attr('transform', `translate(${ margin + r
},${ margin + r }), scale(1, 1)`);
maxPath.attr('d', createArc(-135, 100, r, thickness))
.attr('transform', `translate(${ margin + r
},${ margin + r }), scale(1, 1)`);
}
function addTooltip() {
let tooltip = select('#percent-gauge-tooltip');
if (tooltip.empty()) {
tooltip = select('body').append('div')
.attr('class', 'hover-label')
.attr('class', 'percent-gauge-tooltip')
.attr('id', 'percent-gauge-tooltip')
.style('opacity', 0);
}
return tooltip;
}
function addArcValue(svg, width, margin, thickness, value) {
const maxPath = addArc(svg, width, margin, thickness, 'gauge-max-path', 100);
const valuePath = addArc(svg, width, margin, thickness, 'gauge-value-path', value > 100 ? 100 : value);
return {
valuePath,
maxPath,
};
}
function getValueLabelY(height, fontSize) {
return height / 5 + fontSize / 1.2;
}
function getTitleLabelY(height, fontSize) {
return height / 5 + 1.9 * fontSize;
}
function getSubtitleLabelY(height, fontSize) {
return height / 5 + 2.5 * fontSize;
}
function addLabels(svg, options, width, height, fontSize) {
return {
valueLabel: addText(options.value ? `${ options.value }%` : '0%', svg, width / 2, getValueLabelY(height, fontSize), fontSize, 'value', 3.5),
titleLabel: addText(options.title, svg, width / 2, getTitleLabelY(height, fontSize), fontSize / 3, 'title'),
subtitleLabel: addText(options.subtitle, svg, width / 2, getSubtitleLabelY(height, fontSize), fontSize / 3, 'subtitle'),
}
}
function repaintLabels(valueLabel, titleLabel, subtitleLabel, width, height, fontSize) {
valueLabel.attr('x', width / 2)
.attr('y', getValueLabelY(height, fontSize))
.attr('dy', fontSize / 2)
.style('font-size', `${ fontSize }px`);
titleLabel.attr('x', width / 2)
.attr('y', getTitleLabelY(height, fontSize))
.attr('dy', fontSize / 6)
.style('font-size', `${ fontSize / 3 }px`);
subtitleLabel.attr('x', width / 2)
.attr('y', getSubtitleLabelY(height, fontSize))
.attr('dy', fontSize / 6)
.style('font-size', `${ fontSize / 3 }px`);
}
function addTicks(svg, tooltip, width, height, margin, ticks, currentValue, thickness) {
let max;
let min;
(ticks || []).forEach((tick) => {
if (tick.value !== 0 && !tick.value) {
return
}
const value = parseInt(tick.value, 10);
max = (max === undefined || value > max) ? value : max;
min = (min === undefined || value < min) ? value : min;
const point = valueToPoint(width, height, margin, value, thickness);
if (ticks.length > 1) {
let tr = '';
tick.labels.forEach((label) => {
tr += `<tr>
<td>${ label }</td>
<td>${ tick.value }%</td>
</tr>`;
});
svg.append('circle').attr('tick', 'custom').attr('class', `gauge-circle-fill`).attr('cx', point.x)
.attr('cy', point.y)
.attr('r', 3);
const tp = svg.append('circle').attr('tick', 'custom').attr('class', `gauge-none-fill`).attr('cx', point.x)
.attr('cy', point.y)
.attr('r', 30);
tp.on('mouseover', () => {
tooltip.transition()
.duration(200)
.style('opacity', .9);
tooltip.html(`<table>
<tbody>
<tr>
<th>${ window.l('service:intl').t('clusterDashboard.node') }</th>
<th>${ window.l('service:intl').t('clusterDashboard.value') }</th>
</tr>
${ tr }
</tbody>
</table>`)
.style('left', `${ (event.pageX) - 30 }px`)
.style('top', `${ (event.pageY) - 30 * (tick.labels.length + 1) }px`);
}).on('mouseout', () => {
tooltip.transition()
.duration(500)
.style('opacity', 0);
});
}
});
if (ticks && ticks.length > 1) {
const point = valueToPoint(width, height, margin, currentValue, thickness);
svg.append('circle').attr('tick', 'custom').attr('class', `gauge-circle-fill`).attr('cx', point.x)
.attr('cy', point.y)
.attr('r', 5);
svg.append('circle').attr('tick', 'custom').attr('class', `gauge-tick-path`).style('stroke-width', '2')
.attr('cx', point.x)
.attr('cy', point.y)
.attr('r', 8);
const rangePath = addArc(svg, width, margin, thickness, 'gauge-tick-path', max, min, 2);
rangePath.attr('tick', 'custom');
}
}
function addText(text, svg, x, y, fontSize, labelType, bold = 0) {
return svg.append('svg:text')
.attr('x', x)
.attr('y', y)
.attr('dy', fontSize / 2)
.attr('text-anchor', 'middle')
.text(text)
.style('font-size', `${ fontSize }px`)
.attr('class', `gauge-${ labelType }-fill`)
.style('stroke-width', `${ bold }px`);
}
function addArc(svg, width, margin, thickness, gaugeColor, value, start = 0, strokeWidth = 1) {
value = value || 0;
const r = calcR(width, margin);
return svg.append('path')
.attr('d', createArc(-135, value, r, thickness, start))
.style('stroke-width', strokeWidth)
.attr('class', `gauge-text-stroke ${ gaugeColor }`)
.attr('transform', `translate(${ margin + r
},${ margin + r }), scale(1, 1)`);
}
function createArc(sa, ea, r, thickness, start = 0) {
ea = 2.7 * parseInt(ea, 10) - 135;
sa = 2.7 * parseInt(start, 10) - 135;
return svg.arc()
.outerRadius(r)
.innerRadius(r - thickness)
.startAngle(d2r(sa))
.endAngle(d2r(ea));
}
function d2r(d) {
return d * (Math.PI / 180);
}
function sin(value) {
return Math.sin((45 - (2.7 * value)) * Math.PI / 180);
}
function cos(value) {
return Math.cos((45 - (2.7 * value)) * Math.PI / 180);
}
function calcR(width, margin) {
return (width - 2 * margin) / 2;
}
function valueToPoint(width, height, margin, value, thickness) {
const r = calcR(width, margin) - (thickness / 2);
return {
x: width - r * cos(value) - r - margin - (thickness / 2),
y: height - r - margin + r * sin(value) - (thickness / 2),
};
}
function getWidth(el) {
const width = el.parentNode.offsetWidth * 0.9;
return width > 0 ? width : 0;
}
function getConfig(options) {
const width = getWidth(options.el);
return {
el: options.el,
fontSize: width / 7,
margin: width / 22,
width,
height: width,
thickness: width / 12,
};
}
|
public class StandardDeviationCalculator {
public static void main(String[] args) {
double[] x = {1.1, 0.8, 0.9, 1.2, 0.7};
int n = x.length;
double mean = 0;
double sum = 0;
for (int i = 0; i < n; i++)
sum += x[i];
mean = sum / n;
// Standard deviation formula
double stddev = 0;
for (int i = 0; i < n; i++) {
stddev += Math.pow((x[i] - mean), 2);
}
stddev /= n;
stddev = Math.sqrt(stddev);
System.out.printf("Standard deviation: %f",stddev);
}
}
|
import os
import random
import shutil
src_dir = 'test2017/' # Source directory containing image files
dst_dir = 'total_bg/' # Destination directory to move selected images
image_list = os.listdir(src_dir) # List of image files in the source directory
target_cnt = 6000 # Number of images to be selected and moved
# Randomly select a specified number of images from the source directory
image_items = random.sample(image_list, target_cnt)
# Move the selected images to the destination directory
for image_item in image_items:
src_path = os.path.join(src_dir, image_item) # Source path of the selected image
dst_path = os.path.join(dst_dir, image_item) # Destination path to move the image
shutil.move(src_path, dst_path) # Move the selected image from source to destination
|
<filename>packages/eslint-plugin/src/rules/object-curly-spacing.ts
import {
AST_NODE_TYPES,
AST_TOKEN_TYPES,
TSESTree,
} from '@typescript-eslint/experimental-utils';
import baseRule from 'eslint/lib/rules/object-curly-spacing';
import {
createRule,
InferMessageIdsTypeFromRule,
InferOptionsTypeFromRule,
isClosingBraceToken,
isClosingBracketToken,
isTokenOnSameLine,
} from '../util';
export type Options = InferOptionsTypeFromRule<typeof baseRule>;
export type MessageIds = InferMessageIdsTypeFromRule<typeof baseRule>;
export default createRule<Options, MessageIds>({
name: 'object-curly-spacing',
meta: {
...baseRule.meta,
docs: {
description: 'Enforce consistent spacing inside braces',
category: 'Stylistic Issues',
recommended: false,
extendsBaseRule: true,
},
},
defaultOptions: ['never'],
create(context) {
const spaced = context.options[0] === 'always';
const sourceCode = context.getSourceCode();
/**
* Determines whether an option is set, relative to the spacing option.
* If spaced is "always", then check whether option is set to false.
* If spaced is "never", then check whether option is set to true.
* @param option The option to exclude.
* @returns Whether or not the property is excluded.
*/
function isOptionSet(
option: 'arraysInObjects' | 'objectsInObjects',
): boolean {
return context.options[1]
? context.options[1][option] === !spaced
: false;
}
const options = {
spaced,
arraysInObjectsException: isOptionSet('arraysInObjects'),
objectsInObjectsException: isOptionSet('objectsInObjects'),
};
//--------------------------------------------------------------------------
// Helpers
//--------------------------------------------------------------------------
/**
* Reports that there shouldn't be a space after the first token
* @param node The node to report in the event of an error.
* @param token The token to use for the report.
*/
function reportNoBeginningSpace(
node: TSESTree.TSMappedType | TSESTree.TSTypeLiteral,
token: TSESTree.Token,
): void {
const nextToken = context
.getSourceCode()
.getTokenAfter(token, { includeComments: true })!;
context.report({
node,
loc: { start: token.loc.end, end: nextToken.loc.start },
messageId: 'unexpectedSpaceAfter',
data: {
token: token.value,
},
fix(fixer) {
return fixer.removeRange([token.range[1], nextToken.range[0]]);
},
});
}
/**
* Reports that there shouldn't be a space before the last token
* @param node The node to report in the event of an error.
* @param token The token to use for the report.
*/
function reportNoEndingSpace(
node: TSESTree.TSMappedType | TSESTree.TSTypeLiteral,
token: TSESTree.Token,
): void {
const previousToken = context
.getSourceCode()
.getTokenBefore(token, { includeComments: true })!;
context.report({
node,
loc: { start: previousToken.loc.end, end: token.loc.start },
messageId: 'unexpectedSpaceBefore',
data: {
token: token.value,
},
fix(fixer) {
return fixer.removeRange([previousToken.range[1], token.range[0]]);
},
});
}
/**
* Reports that there should be a space after the first token
* @param node The node to report in the event of an error.
* @param token The token to use for the report.
*/
function reportRequiredBeginningSpace(
node: TSESTree.TSMappedType | TSESTree.TSTypeLiteral,
token: TSESTree.Token,
): void {
context.report({
node,
loc: token.loc,
messageId: 'requireSpaceAfter',
data: {
token: token.value,
},
fix(fixer) {
return fixer.insertTextAfter(token, ' ');
},
});
}
/**
* Reports that there should be a space before the last token
* @param node The node to report in the event of an error.
* @param token The token to use for the report.
*/
function reportRequiredEndingSpace(
node: TSESTree.TSMappedType | TSESTree.TSTypeLiteral,
token: TSESTree.Token,
): void {
context.report({
node,
loc: token.loc,
messageId: 'requireSpaceBefore',
data: {
token: token.value,
},
fix(fixer) {
return fixer.insertTextBefore(token, ' ');
},
});
}
/**
* Determines if spacing in curly braces is valid.
* @param node The AST node to check.
* @param first The first token to check (should be the opening brace)
* @param second The second token to check (should be first after the opening brace)
* @param penultimate The penultimate token to check (should be last before closing brace)
* @param last The last token to check (should be closing brace)
*/
function validateBraceSpacing(
node: TSESTree.TSMappedType | TSESTree.TSTypeLiteral,
first: TSESTree.Token,
second: TSESTree.Token | TSESTree.Comment,
penultimate: TSESTree.Token | TSESTree.Comment,
last: TSESTree.Token,
): void {
if (isTokenOnSameLine(first, second)) {
const firstSpaced = sourceCode.isSpaceBetween!(first, second);
const secondType = sourceCode.getNodeByRangeIndex(second.range[0])!
.type;
const openingCurlyBraceMustBeSpaced =
options.arraysInObjectsException &&
[
AST_NODE_TYPES.TSMappedType,
AST_NODE_TYPES.TSIndexSignature,
].includes(secondType)
? !options.spaced
: options.spaced;
if (openingCurlyBraceMustBeSpaced && !firstSpaced) {
reportRequiredBeginningSpace(node, first);
}
if (
!openingCurlyBraceMustBeSpaced &&
firstSpaced &&
second.type !== AST_TOKEN_TYPES.Line
) {
reportNoBeginningSpace(node, first);
}
}
if (isTokenOnSameLine(penultimate, last)) {
const shouldCheckPenultimate =
(options.arraysInObjectsException &&
isClosingBracketToken(penultimate)) ||
(options.objectsInObjectsException &&
isClosingBraceToken(penultimate));
const penultimateType = shouldCheckPenultimate
? sourceCode.getNodeByRangeIndex(penultimate.range[0])!.type
: undefined;
const closingCurlyBraceMustBeSpaced =
(options.arraysInObjectsException &&
penultimateType === AST_NODE_TYPES.TSTupleType) ||
(options.objectsInObjectsException &&
penultimateType !== undefined &&
[
AST_NODE_TYPES.TSMappedType,
AST_NODE_TYPES.TSTypeLiteral,
].includes(penultimateType))
? !options.spaced
: options.spaced;
const lastSpaced = sourceCode.isSpaceBetween!(penultimate, last);
if (closingCurlyBraceMustBeSpaced && !lastSpaced) {
reportRequiredEndingSpace(node, last);
}
if (!closingCurlyBraceMustBeSpaced && lastSpaced) {
reportNoEndingSpace(node, last);
}
}
}
/**
* Gets '}' token of an object node.
*
* Because the last token of object patterns might be a type annotation,
* this traverses tokens preceded by the last property, then returns the
* first '}' token.
* @param node The node to get. This node is an
* ObjectExpression or an ObjectPattern. And this node has one or
* more properties.
* @returns '}' token.
*/
function getClosingBraceOfObject(
node: TSESTree.TSTypeLiteral,
): TSESTree.Token | null {
const lastProperty = node.members[node.members.length - 1];
return sourceCode.getTokenAfter(lastProperty, isClosingBraceToken);
}
//--------------------------------------------------------------------------
// Public
//--------------------------------------------------------------------------
const rules = baseRule.create(context);
return {
...rules,
TSMappedType(node: TSESTree.TSMappedType): void {
const first = sourceCode.getFirstToken(node)!;
const last = sourceCode.getLastToken(node)!;
const second = sourceCode.getTokenAfter(first, {
includeComments: true,
})!;
const penultimate = sourceCode.getTokenBefore(last, {
includeComments: true,
})!;
validateBraceSpacing(node, first, second, penultimate, last);
},
TSTypeLiteral(node: TSESTree.TSTypeLiteral): void {
if (node.members.length === 0) {
return;
}
const first = sourceCode.getFirstToken(node)!;
const last = getClosingBraceOfObject(node)!;
const second = sourceCode.getTokenAfter(first, {
includeComments: true,
})!;
const penultimate = sourceCode.getTokenBefore(last, {
includeComments: true,
})!;
validateBraceSpacing(node, first, second, penultimate, last);
},
};
},
});
|
<gh_stars>0
package gittest
import (
"io"
"os"
"os/exec"
"testing"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
)
// Exec runs a git command and returns the standard output, or fails.
func Exec(t testing.TB, cfg config.Cfg, args ...string) []byte {
t.Helper()
return run(t, nil, cfg, args...)
}
// ExecStream runs a git command with an input stream and returns the standard output, or fails.
func ExecStream(t testing.TB, cfg config.Cfg, stream io.Reader, args ...string) []byte {
t.Helper()
return run(t, stream, cfg, args...)
}
func run(t testing.TB, stdin io.Reader, cfg config.Cfg, args ...string) []byte {
t.Helper()
cmd := exec.Command(cfg.Git.BinPath, args...)
cmd.Env = os.Environ()
cmd.Env = append(command.GitEnv, cmd.Env...)
cmd.Env = append(cmd.Env,
"GIT_AUTHOR_DATE=1572776879 +0100",
"GIT_COMMITTER_DATE=1572776879 +0100",
"GIT_CONFIG_COUNT=1",
"GIT_CONFIG_KEY_0=init.defaultBranch",
"GIT_CONFIG_VALUE_0=master",
)
if stdin != nil {
cmd.Stdin = stdin
}
output, err := cmd.Output()
if err != nil {
t.Log(cfg.Git.BinPath, args)
if ee, ok := err.(*exec.ExitError); ok {
t.Logf("%s: %s\n", ee.Stderr, output)
}
t.Fatal(err)
}
return output
}
|
<reponame>matheus2x/covid-quiz
"use strict";
module.exports = {
up: (queryInterface, Sequelize) => {
return queryInterface.bulkInsert(
"tips",
[
{
image: "instrument.svg",
text: "Que tal usar o tempo livre para aprender um novo instrumento?",
created_at: new Date(),
updated_at: new Date(),
},
{
image: "lotus-position.svg",
text: "Cuidado com ansiedade! Por que não meditar?",
created_at: new Date(),
updated_at: new Date(),
},
{
image: "mask.svg",
text:
"Se sair, não esqueça de vestir uma máscara descartável ou de tecido.",
created_at: new Date(),
updated_at: new Date(),
},
{
image: "no-handshake.svg",
text: "Evite ao máximo qualquer contato físico direto.",
created_at: new Date(),
updated_at: new Date(),
},
{
image: "reading.svg",
text: "Leia um bom livro.",
created_at: new Date(),
updated_at: new Date(),
},
{
image: "travel.svg",
text: "Evite e se possível, cancele todas suas viagens.",
created_at: new Date(),
updated_at: new Date(),
},
{
image: "wash-hands.svg",
text:
"Lave as mãos por cerca de 20 segundos, com água e sabão, após tocar em superfícies e animais.",
created_at: new Date(),
updated_at: new Date(),
},
{
image: "watching-tv.svg",
text: "Aproveite para colocar suas séries favoritas em dia!",
created_at: new Date(),
updated_at: new Date(),
},
],
{}
);
},
down: (queryInterface, Sequelize) => {
return queryInterface.bulkDelete("tips", null, {});
},
};
|
import random
def shuffle_list(lst):
if len(lst) > 1:
random.shuffle(lst)
return lst
|
class ArrayProcessor:
def __init__(self, input_array):
self.input_array = input_array
def sum_divisible_by(self, divisor):
divisible_sum = sum(num for num in self.input_array if num % divisor == 0)
return divisible_sum
def product_greater_than(self, threshold):
product = 1
for num in self.input_array:
if num > threshold:
product *= num
return product
# Example usage
input_array = [10, 5, 8, 12, 7, 15, 20]
processor = ArrayProcessor(input_array)
print(processor.sum_divisible_by(5)) # Output: 45 (10 + 5 + 15 + 20)
print(processor.product_greater_than(10)) # Output: 24000 (12 * 15 * 20)
|
<gh_stars>10-100
package io.opensphere.mantle.data.geom.style;
/**
* Interface for style changes to style parameters.
*/
@FunctionalInterface
public interface StyleChangeListener
{
/**
* Style parameters changed.
*
* @param dataTypeKey the data type key
* @param style the style
*/
void styleParametersChanged(String dataTypeKey, VisualizationStyle style);
}
|
#!/usr/bin/env bash
#
# Copyright (C) 2017 DANS - Data Archiving and Networked Services (info@dans.knaw.nl)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#include <service.sh>
NUMBER_OF_INSTALLATIONS=$1
MODULE_NAME=easy-bag-index
PHASE="POST-REMOVE"
echo "$PHASE: START (Number of current installations: $NUMBER_OF_INSTALLATIONS)"
service_remove_systemd_unit $MODULE_NAME $NUMBER_OF_INSTALLATIONS
echo "$PHASE: DONE"
|
<reponame>andromeda/mir
DTRACE_HTTP_CLIENT_RESPONSE.length = {};
DTRACE_HTTP_CLIENT_RESPONSE.name = {};
DTRACE_HTTP_CLIENT_RESPONSE.arguments = {};
DTRACE_HTTP_CLIENT_RESPONSE.caller = {};
DTRACE_HTTP_CLIENT_RESPONSE.prototype = {};
DTRACE_HTTP_CLIENT_RESPONSE();
|
package com.bot.db.mappers;
import com.bot.models.RssChannelSubscription;
import com.bot.models.RssSubscription;
import java.sql.ResultSet;
import java.sql.SQLException;
public class RssChannelSubscriptionMapper {
public static RssChannelSubscription mapToRssSubscription(ResultSet set, RssSubscription subscription) throws SQLException {
return new RssChannelSubscription(
set.getInt("id"),
subscription,
set.getString("text_channel_id"),
set.getString("author")
);
}
}
|
/*
* Copyright (C) 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.internal.bean.validation;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
import javax.validation.executable.ExecutableValidator;
import javax.validation.metadata.BeanDescriptor;
public class FakeValidator implements Validator {
@Override
public <T> Set<ConstraintViolation<T>> validateValue(Class<T> beanType, String propertyName, Object value,
Class<?>... groups) {
return null;
}
@Override
public <T> Set<ConstraintViolation<T>> validateProperty(T object, String propertyName, Class<?>... groups) {
return null;
}
@Override
public <T> Set<ConstraintViolation<T>> validate(T object, Class<?>... groups) {
return null;
}
@Override
public <T> T unwrap(Class<T> type) {
return null;
}
@Override
public BeanDescriptor getConstraintsForClass(Class<?> clazz) {
return null;
}
@Override
public ExecutableValidator forExecutables() {
return null;
}
}
|
package ormx;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
/**
* @author jesus
*/
public class OrmDao<T, ID> implements AutoCloseable {
final OrmDataBase db;
final OrmObjectAdapter<T> adapter;
final String table;
final OrmField<ID> key;
final boolean autoIncrement;
OrmDao(Builder<T, ID> builder) {
this.db = builder.db;
this.adapter = builder.adapter;
this.table = builder.table;
this.key = builder.key;
this.autoIncrement = builder.autoIncrement;
}
public OrmDataBase db() {
return db;
}
public OrmObjectAdapter<T> adapter() {
return adapter;
}
public String table() {
return table;
}
public QueryBuilder queryBuilder() {
return db().queryBuilder().select(adapter.fields()).from(table);
}
public boolean setId(T obj, ID id) {
if (key == null)
return false;
try {
key.set(obj, id);
return true;
} catch (Exception e) {
return false;
}
}
public ID getId(T obj) {
if (key == null)
return null;
try {
return key.get(obj);
} catch (Exception e) {
return null;
}
}
public Map<String, Object> vars(T object) {
final Map<String, Object> vars = adapter.map(object);
if (autoIncrement) vars.remove(key.name);
return vars;
}
public boolean insert(T obj) throws SQLException {
return insert(queryBuilder(), obj);
}
boolean insert(QueryBuilder builder, T obj) throws SQLException {
final Map<String, Object> vars = vars(obj);
OrmResult rs = null;
try {
rs = builder.executeInsert(vars);
if (autoIncrement) {
if (rs.next()) {
ID id = key.type.fromResult(rs, OrmDataBase.GENERATED_KEY, 1);
setId(obj, id);
}
}
return true;
} finally {
OrmUtils.close(rs);
}
}
public int update(T obj) throws SQLException {
final ID id = getId(obj);
final Map<String, Object> vars = vars(obj);
return queryBuilder().where(key.name, id).update(vars);
}
public boolean save(T obj) throws SQLException {
final ID id = getId(obj);
if (id != null && idExists(id)) {
return update(obj) > 0;
} else {
return insert(obj);
}
}
public int delete(T obj) throws SQLException {
ID id = getId(obj);
return deleteById(id);
}
public int deleteById(ID id) throws SQLException {
return queryBuilder().where(key.name, id).delete();
}
public int deleteAll() throws SQLException {
return queryBuilder().delete();
}
public int deleteIds(ID... ids) throws SQLException {
return queryBuilder().where_in(key.name, ids).delete();
}
public long count() throws SQLException {
return queryBuilder().get_select_count(key.name);
}
public long count(String whereClause, Object... whereArgs) throws SQLException {
return db.count(table, key.name, whereClause, whereArgs);
}
public boolean idExists(ID id) throws SQLException {
return queryBuilder().where(key.name, id).get_select_count(key.name) > 0;
}
public T findById(ID id) throws SQLException {
return findByField(key.name, id);
}
public T findByField(String column, Object value) throws SQLException {
return queryBuilder().where(column, value).get().row(adapter);
}
public List<T> query(QueryBuilder query) throws SQLException {
return query.get().list(adapter);
}
public List<T> query(String sql, Object... params) throws SQLException {
return db.query(sql, params).list(adapter);
}
public List<T> queryForEq(String column, Object value) throws SQLException {
return queryBuilder().where(column, value).get().list(adapter);
}
public List<T> queryForAll() throws SQLException {
return queryBuilder().get().list(adapter);
}
public OrmIterator<T> iterator(QueryBuilder query) throws SQLException {
return query.get().it(adapter);
}
public OrmIterator<T> iterator(String sql, Object... params) throws SQLException {
return db.query(sql, params).it(adapter);
}
public OrmIterator<T> iterator() throws SQLException {
return queryBuilder().get().it(adapter);
}
public void truncate() throws SQLException {
exec("TRUNCATE TABLE " + table);
}
public boolean exec(String sql, Object... params) throws SQLException{
return db().execute(sql, params);
}
public void beginTransaction() throws SQLException {
db().beginTransaction();
}
public void commit() throws SQLException {
db().commit();
}
public void rollback() throws SQLException {
db().rollback();
}
public void endTransaction() throws SQLException {
db().endTransaction();
}
@Override public void close() {
db.close();
}
public static class Builder<R, ID>
{
final OrmDataBase db;
Class<R> classOf;
OrmObjectAdapter<R> adapter;
String table;
OrmField<ID> key;
boolean autoIncrement;
public Builder(OrmDataBase db) {
this.db = db;
}
public Builder setClassOf(Class<R> classOf) {
this.classOf = classOf;
return this;
}
public OrmDao<R, ID> build() {
adapter = OrmObjectAdapter.of(classOf);
table = OrmUtils.tableName(classOf);
key = OrmUtils.primaryKey(adapter);
if (key != null) {
autoIncrement = key.info.autoIncrement();
}
return new OrmDao<R, ID>(this);
}
}
}
|
#!/usr/bin/env bash
journalctl --lines 0 --follow _SYSTEMD_UNIT=ip_responder.service
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.transforms as transforms
import torchvision.datasets as datasets
# Define the custom neural network module
class CustomLayer(nn.Module):
def __init__(self, input_dim, output_dim):
super(CustomLayer, self).__init__()
self.linear = nn.Linear(input_dim, output_dim)
def forward(self, x):
return F.relu(self.linear(x))
class CustomModel(nn.Module):
def __init__(self, input_dim, hidden_dim, output_dim):
super(CustomModel, self).__init__()
self.layer1 = CustomLayer(input_dim, hidden_dim)
self.layer2 = CustomLayer(hidden_dim, output_dim)
def forward(self, x):
x = self.layer1(x)
x = self.layer2(x)
return x
# Define the custom loss function
class CustomLoss(nn.Module):
def __init__(self):
super(CustomLoss, self).__init__()
def forward(self, prediction, target):
return F.mse_loss(prediction, target)
# Load the MNIST dataset
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,))])
train_dataset = datasets.MNIST(root='./data', train=True, transform=transform, download=True)
test_dataset = datasets.MNIST(root='./data', train=False, transform=transform, download=True)
# Initialize the custom model and loss function
input_dim = 28 * 28 # MNIST image size
hidden_dim = 128
output_dim = 10 # 10 classes for digits 0-9
model = CustomModel(input_dim, hidden_dim, output_dim)
criterion = CustomLoss()
# Define the optimizer and other training parameters
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
batch_size = 64
train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True)
# Training loop
for epoch in range(5): # 5 epochs for demonstration
for batch_idx, (data, target) in enumerate(train_loader):
optimizer.zero_grad()
data = data.view(-1, 28 * 28) # Flatten the input data
output = model(data)
loss = criterion(output, target)
loss.backward()
optimizer.step()
|
var message = "Hello world!";
setInterval(() => {
console.log(message);
}, 5000);
|
#!/bin/bash
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# This script assumes the standard setup on tensorflow Jenkins windows machines.
# It is NOT guaranteed to work on any other machine. Use at your own risk!
#
# REQUIREMENTS:
# * All installed in standard locations:
# - JDK8, and JAVA_HOME set.
# - Microsoft Visual Studio 2015 Community Edition
# - Msys2
# - Anaconda3
# * Bazel windows executable copied as "bazel.exe" and included in PATH.
# All commands shall pass, and all should be visible.
set -x
set -e
# This script is under <repo_root>/tensorflow/tools/ci_build/windows/cpu/pip/
# Change into repository root.
script_dir=$(dirname $0)
cd ${script_dir%%tensorflow/tools/ci_build/windows/cpu/pip}.
# Setting up the environment variables Bazel and ./configure needs
source "tensorflow/tools/ci_build/windows/bazel/common_env.sh" \
|| { echo "Failed to source common_env.sh" >&2; exit 1; }
# load bazel_test_lib.sh
source "tensorflow/tools/ci_build/windows/bazel/bazel_test_lib.sh" \
|| { echo "Failed to source bazel_test_lib.sh" >&2; exit 1; }
# Recreate an empty bazelrc file under source root
export TMP_BAZELRC=.tmp.bazelrc
rm -f "${TMP_BAZELRC}"
touch "${TMP_BAZELRC}"
function cleanup {
# Remove all options in .tmp.bazelrc
echo "" > "${TMP_BAZELRC}"
}
trap cleanup EXIT
PY_TEST_DIR="py_test_dir"
SKIP_TEST=0
RELEASE_BUILD=0
TEST_TARGET="//${PY_TEST_DIR}/tensorflow/python/..."
PROJECT_NAME=""
EXTRA_BUILD_FLAGS=""
# --skip_test Skip running tests
# --enable_remote_cache Add options to enable remote cache for build and test
# --release_build Build for release, compilation time will be longer to
# ensure performance
# --test_core_only Use tensorflow/python/... as test target
# --test_contrib_only Use tensorflow/contrib/... as test target
#for ARG in "$@"; do
while [[ $# -gt 0 ]]; do
case "$1" in
--tf_nightly) TF_NIGHTLY=1 ;;
--skip_test) SKIP_TEST=1 ;;
--enable_remote_cache) set_remote_cache_options ;;
--release_build) RELEASE_BUILD=1 ;;
--test_core_only) TEST_TARGET="//${PY_TEST_DIR}/tensorflow/python/..." ;;
--test_contrib_only) TEST_TARGET="//${PY_TEST_DIR}/tensorflow/contrib/..." ;;
--extra_build_flags)
shift
if [[ -z "$1" ]]; then
break
fi
EXTRA_BUILD_FLAGS="$1"
;;
--project_name)
shift
if [[ -z "$1" ]]; then
break
fi
PROJECT_NAME="$1"
;;
*)
esac
shift
done
if [[ "$RELEASE_BUILD" == 1 ]]; then
# Overriding eigen strong inline speeds up the compiling of conv_grad_ops_3d.cc and conv_ops_3d.cc
# by 20 minutes. See https://github.com/tensorflow/tensorflow/issues/10521
# Because this hurts the performance of TF, we don't override it in release build.
export TF_OVERRIDE_EIGEN_STRONG_INLINE=0
else
export TF_OVERRIDE_EIGEN_STRONG_INLINE=1
fi
if [[ "$TF_NIGHTLY" == 1 ]]; then
python tensorflow/tools/ci_build/update_version.py --nightly
if [ -z ${PROJECT_NAME} ]; then
EXTRA_PIP_FLAGS="--nightly_flag"
else
EXTRA_PIP_FLAGS="--project_name ${PROJECT_NAME} --nightly_flag"
fi
fi
# Enable short object file path to avoid long path issue on Windows.
echo "startup --output_user_root=${TMPDIR}" >> "${TMP_BAZELRC}"
if ! grep -q "import %workspace%/${TMP_BAZELRC}" .bazelrc; then
echo "import %workspace%/${TMP_BAZELRC}" >> .bazelrc
fi
run_configure_for_cpu_build
bazel build --announce_rc --config=opt ${EXTRA_BUILD_FLAGS} \
tensorflow/tools/pip_package:build_pip_package || exit $?
if [[ "$SKIP_TEST" == 1 ]]; then
exit 0
fi
# Create a python test directory to avoid package name conflict
create_python_test_dir "${PY_TEST_DIR}"
./bazel-bin/tensorflow/tools/pip_package/build_pip_package "$PWD/${PY_TEST_DIR}" "${EXTRA_PIP_FLAGS}"
if [[ "$TF_NIGHTLY" == 1 ]]; then
exit 0
fi
# Running python tests on Windows needs pip package installed
PIP_NAME=$(ls ${PY_TEST_DIR}/tensorflow-*.whl)
reinstall_tensorflow_pip ${PIP_NAME}
# NUMBER_OF_PROCESSORS is predefined on Windows
N_JOBS="${NUMBER_OF_PROCESSORS}"
# Define no_tensorflow_py_deps=true so that every py_test has no deps anymore,
# which will result testing system installed tensorflow
bazel test --announce_rc --config=opt -k --test_output=errors \
--define=no_tensorflow_py_deps=true --test_lang_filters=py \
--test_tag_filters=-no_pip,-no_windows,-no_oss,-gpu \
--build_tag_filters=-no_pip,-no_windows,-no_oss,-gpu --build_tests_only \
--test_size_filters=small,medium \
--jobs="${N_JOBS}" --test_timeout="300,450,1200,3600" \
--flaky_test_attempts=3 \
${TEST_TARGET}
|
<filename>app/containers/Lounge/mainFunctions/isNewSubtitle.js
import { threshSubSubtitle } from '../constants';
export default function isNewSubtitle(prev, next) {
if (!prev || !next) return true;
return prev.sub(next).countNonZero() > threshSubSubtitle;
}
|
<reponame>github-clonner/chef-patissier
const gulp = require('gulp');
const revReplace = require('gulp-rev-replace');
const path = require('path');
const config = require('@dameblanche/core/lib/configLoader');
let revConfig = config.getTaskConfig('rev');
let templatesConfig = config.getTaskConfig(revConfig.htmlTask);
const createIgnoredList = require('./create-ignored-list');
// 5) Update asset references in HTML
const revUpdateHTMLTask = () => {
const manifest = gulp.src(path.join(config.root.dest, '/rev-manifest.json'));
const htmlDest = templatesConfig ? templatesConfig.dest : './';
return gulp.src([path.join(config.root.dest, htmlDest, '/**/*.html'), ...createIgnoredList()])
.pipe(revReplace({
manifest: manifest,
}))
.pipe(gulp.dest(path.join(config.root.dest, htmlDest)));
};
module.exports = revUpdateHTMLTask;
|
/*
* Copyright (C) 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.test.integration.entity;
import static info.archinnov.achilles.test.integration.entity.EntityWithWriteOneAndReadThreeConsistency
.TABLE_NAME;
import static info.archinnov.achilles.type.ConsistencyLevel.ONE;
import static info.archinnov.achilles.type.ConsistencyLevel.THREE;
import info.archinnov.achilles.annotations.Column;
import info.archinnov.achilles.annotations.Consistency;
import info.archinnov.achilles.annotations.Entity;
import info.archinnov.achilles.annotations.Id;
@Entity(table = TABLE_NAME)
@Consistency(read = THREE, write = ONE)
public class EntityWithWriteOneAndReadThreeConsistency {
public static final String TABLE_NAME = "consistency_test2";
@Id
private Long id;
@Column
private String firstname;
@Column
private String lastname;
public EntityWithWriteOneAndReadThreeConsistency() {
}
public EntityWithWriteOneAndReadThreeConsistency(Long id, String firstname, String lastname) {
this.id = id;
this.firstname = firstname;
this.lastname = lastname;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getFirstname() {
return firstname;
}
public void setFirstname(String firstname) {
this.firstname = firstname;
}
public String getLastname() {
return lastname;
}
public void setLastname(String lastname) {
this.lastname = lastname;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.