text stringlengths 1 1.05M |
|---|
from sys import setprofile
from PyQt5 import QtGui
from PyQt5 import QtWidgets
from .drawers import Drawer
from .positioners import Limiter, ChartPositioner
from .models import Line, Candle
from .factories import VertexesFactory
from PyQt5.QtGui import QBrush, QColor, QPainter
from PyQt5.QtWidgets import QWidget
from .themes import ThemeHolder
class Chartilo(QWidget):
data = None
parsedData = None
def __init__(self) -> None:
super(Chartilo, self).__init__()
def resizeEvent(self, event):
try:
amountOnScreen = Limiter.getVertexesAmount(Chartilo.parsedData, VertexesFactory.Type.width, self.painter.device().width(), ChartPositioner.paddingHorizontal) + 1
if (Limiter.vertexesOffset // VertexesFactory.Type.width + amountOnScreen > len(Chartilo.data) ):
Limiter.vertexesOffset -= ((Limiter.vertexesOffset // VertexesFactory.Type.width + amountOnScreen) - len(Chartilo.data)) * VertexesFactory.Type.width
except Exception as e:
pass
def paintEvent(self, event) -> None:
self.painter = QPainter()
self.painter.begin(self)
if not Chartilo.parsedData or not Chartilo.data:
print("There is no data to draw")
self.painter.end()
return
vertexesAmount = Limiter.getVertexesAmount(Chartilo.parsedData, VertexesFactory.Type.width, self.painter.device().width(), ChartPositioner.paddingHorizontal) + Limiter.vertexesOffset // VertexesFactory.Type.width
Limiter.setDrawableData(Limiter.calculateDrawableData(Chartilo.parsedData, Limiter.vertexesOffset // VertexesFactory.Type.width, vertexesAmount))
if (not Limiter.drawableData):
print("There is no data to draw")
return
Drawer.setMaxMinValue(Limiter.drawableData)
if (self.states.get("positions") is not None):
for position in self.states["positions"]:
if (position not in ChartPositioner.__dict__):
raise Exception("Unexpected field: " + position)
setattr(ChartPositioner, position,
self.states["positions"][position])
try:
self.states["drawers"]
except Exception as e:
print(str(e) + "\nThere is not enough state: drawers")
try:
if (self.states.get("theme") is not None):
ThemeHolder.theme = self.states["theme"]()
except Exception as e:
print("Тема не найдена")
pass
try:
self.painter.setBrush(QBrush(QColor(ThemeHolder.theme.backgroundColor)))
self.painter.drawRect(0, 0, self.painter.device().width(), self.painter.device().height())
except Exception as e:
print("Не удалось поменять фон, убедиться что в вашей теме он прописан")
for drawer in self.states["drawers"]:
try:
self.states["drawers"][drawer](
self.painter).draw(Limiter.drawableData)
except Exception as e:
print(str(e) + "\nThere is unexpected drawer: " + str(drawer))
self.painter.end()
def updateCanvas(self):
try:
vertexType = self.states["type"]
VertexesFactory.Type = vertexType
except Exception as e:
print(str(e))
return
Chartilo.parsedData = VertexesFactory().createVertexes(Chartilo.data)
self.update()
def setData(self, data):
Chartilo.data = data
def setStates(self, states):
self.states = states
def mousePressEvent(self, event):
self.beginPosition = int(event.x())
self.previousMove = self.beginPosition
def mouseMoveEvent(self, event):
motion = int(event.x())
amountOnScreen = Limiter.getVertexesAmount(Chartilo.parsedData, VertexesFactory.Type.width, self.painter.device().width(), ChartPositioner.paddingHorizontal) + 1
speed = int(2 * VertexesFactory.Type.width)
if (motion > self.previousMove):
if (not ChartPositioner.paddingHorizontal < 0 and Limiter.vertexesOffset == 0):
ChartPositioner.paddingHorizontal -= speed
else:
if (not len(Chartilo.data) < (Limiter.vertexesOffset + speed) // VertexesFactory.Type.width + amountOnScreen):
Limiter.vertexesOffset += speed
if (motion < self.previousMove):
if (ChartPositioner.paddingHorizontal < ChartPositioner.maximalHorizontalPadding and Limiter.vertexesOffset == 0):
ChartPositioner.paddingHorizontal += speed
else:
if (Limiter.vertexesOffset - speed < 0):
Limiter.vertexesOffset = 0
else:
Limiter.vertexesOffset -= speed
self.previousMove = motion
self.updateCanvas()
def mouseReleaseEvent(self, event):
self.endPosition = event.x()
|
<reponame>phamvinhphat/SOS-BE-
const mongoose = require('mongoose');;
const {createServer} = require('http')
const express = require('express');
const server = express();
const httpServer = createServer(server);
const { Server } = require("socket.io");
const createSocketIO =(httpServer) => {
const io = new Server(httpServer, {
// path: "/accidents/",
serveClient: false,
pingInterval: 10000,
pingTimeout: 30000,
cookie: false
});
io.on("connection",(socket) => {
console.log("connection socket");
// console.log(socket.id);
// socket.on("disconnect",() => {
// console.log("Disconnected"+socket.id);
// });
});
// io.use()
io.engine.on("connection_error", (err) => {
console.log(err.req); // the request object
console.log(err.code); // the error code, for example 1
console.log(err.message); // the error message, for example "Session ID unknown"
console.log(err.context); // some additional error context
});
}
module.exports = {
createSocketIO,
};
// const mongoose = require('mongoose');
// const app = require('./app');
// const {createServer} = require('http')
// const config = require('./config/config');
// const logger = require('./config/logger');
// const express = require('express');
// const server = express();
// const httpServer = createServer(server);
// const { Server } = require("socket.io");
// const io = new Server(httpServer);
//
//
// mongoose.connect(config.mongoose.url, config.mongoose.options).then(() => {
// logger.info('Connected to MongoDB');
//
// httpServer.listen(config.port,() => {
// console.log(`Server is running at PORT ${config.port}`);
// });
// // io.on('connection', (socket) => {
// // console.log('a user connected');
// // });
// //
// // server.listen(3000, () => {
// // console.log('listening on *:3000');
// // });
// });
|
#include <stdio.h>
// A function to print all prime numbers
// less than or equal to 'n'
void printPrimes(int n)
{
// Create an array of size n and
// initialize all elements as 0
int arr[n];
for (int i = 0; i < n; i++)
arr[i] = 0;
// Traverse every number from 2 to n
// and mark them as prime if not marked earlier
for (int i = 2; i <= n; i++) {
if (arr[i] == 0) {
// Mark all multiples of i as non prime
for (int j = 2 * i; j <= n; j += i)
arr[j] = 1;
}
}
// Print all prime numbers
for (int i = 2; i <= n; i++)
if (arr[i] == 0)
printf("%d ", i);
}
// Driver program
int main()
{
int n = 10;
printPrimes(n);
return 0;
} |
words = ['This', 'is', 'sentence', 'with', 'many', 'words'] |
#!/bin/bash
# Dump environment on to file so that we can load it up on the crontab
printenv > /etc/docker-env
# Run cron & tail logs
cron
touch /var/log/cert-update.log
tail -f /var/log/cert-update.log
|
#!/bin/bash
mkdir -p templates/plugins
cp ../../*/assets/* templates/plugins/
# cp ../../automod/assets/* templates/plugins/
# cp ../../automod_legacy/assets/* templates/plugins/
# cp ../../autorole/assets/* templates/plugins/
# cp ../../commands/assets/* templates/plugins/
# cp ../../customcommands/assets/* templates/plugins/
# cp ../../logs/assets/* templates/plugins/
# cp ../../moderation/assets/* templates/plugins/
# cp ../../notifications/assets/* templates/plugins/
# cp ../../reddit/assets/* templates/plugins/
# cp ../../reputation/assets/* templates/plugins/
# cp ../../rolecommands/assets/* templates/plugins/
# cp ../../serverstats/assets/* templates/plugins/
# cp ../../soundboard/assets/* templates/plugins/
# cp ../../streaming/assets/* templates/plugins/
# cp ../../youtube/assets/* templates/plugins/
# cp ../../premium/assets/* templates/plugins/ |
<reponame>go-xe2/xfw<gh_stars>0
// 表单过滤库
package xfilter
import (
"encoding/json"
"github.com/gogf/gf/g/os/glog"
"github.com/gogf/gf/g/text/gstr"
"github.com/gogf/gf/g/util/gconv"
"github.com/gogf/gf/g/util/gvalid"
"reflect"
"strings"
)
const XFilterTagName = "filter"
const XOrmTagName = "orm"
// 检查是否包含字段
func hasField(fieldName string, selects ...map[string]interface{}) bool {
if len(selects) == 0 {
return true
}
fields := selects[0]
if n, ok := fields[fieldName]; ok {
return gconv.Bool(n)
} else {
return false
}
}
// 从map中获取过滤字段列表
func GetFilterFromMap(mp map[string]string, selects ...map[string]interface{}) []*FilterTag {
var result []*FilterTag
for fdName, str := range mp {
if !hasField(fdName, selects...) {
continue
}
item := NewTag(str)
item.SetFieldName(FieldName(fdName))
result = append(result, item)
}
return result
}
// 从字符串数组中获取过滤规则列表
func GetFilterFromArray(arr []string, selects ...map[string]interface{}) []*FilterTag {
var result []*FilterTag
for _, str := range arr {
item := NewTag(str)
if !hasField(string(item.fieldName), selects...) {
continue
}
result = append(result, item)
}
return result
}
// 从json字符串获取过滤规则
func GetFilterFromJson(js string, selects ...map[string]interface{}) []*FilterTag {
var v map[string]interface{}
var result []*FilterTag
err := json.Unmarshal([]byte(js), &v)
if err != nil {
glog.Error("GetFilterFromJson parse json error:", err)
return []*FilterTag{}
}
for fdName, item := range v {
if f, ok := item.(map[string]interface{}); !ok {
continue
} else {
if !hasField(fdName, selects...) {
continue
}
ftItem := NewTag()
ftItem.FromJson(f)
ftItem.SetFieldName(FieldName(fdName))
result = append(result, ftItem)
}
}
return result
}
// 从struct中获取过滤规则
func GetFilterFromStruct(obj interface{}, selects ...map[string]interface{}) []*FilterTag {
if reflect.TypeOf(obj).Kind() != reflect.Ptr {
glog.Error("GetFilterFromStruct fail, obj is not struct ptr, obj kind is:", reflect.TypeOf(obj).Kind())
return []*FilterTag{}
}
var result []*FilterTag
elem := reflect.TypeOf(obj).Elem()
for i := 0; i < elem.NumField(); i++ {
fieldName := elem.Field(i).Name
if !hasField(fieldName, selects...) {
continue
}
tag := elem.Field(i).Tag.Get(XFilterTagName)
if tag != "" {
item := NewTag(tag)
if item.fieldName == "" {
ormTag := elem.Field(i).Tag.Get(XOrmTagName)
if ormTag != "" {
item.fieldName = FieldName(ormTag)
} else {
item.fieldName = FieldName(fieldName)
}
}
if item.dataType == "" {
switch elem.Field(i).Type.Kind() {
case reflect.String:
item.dataType = "string"
break
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
item.dataType = "int"
break
case reflect.Float32, reflect.Float64:
item.dataType = "float"
break
case reflect.Bool:
item.dataType = "bool"
break
}
}
result = append(result, item)
}
}
return result
}
// 解析默认值
func parseDefaultValue(value interface{}) interface{} {
if expr, ok := value.(string); ok && expr != "" {
if strings.HasPrefix(expr, ":") {
fnName := gstr.SubStr(expr, 1)
if FuncManager().HasFunc(fnName) {
v, err := FuncManager().Call(fnName)
if err == nil {
return v
}
}
}
}
return value
}
// 转换成规则所需要的数据类型
func convertToRuleDataType(dateType FieldDataType, value interface{}) interface{} {
switch dateType {
case "string":
return gconv.String(value)
case "int":
return gconv.Int64(value)
case "float":
return gconv.Float64(value)
case "bool":
return gconv.Bool(value)
case "time":
return gconv.Time(value)
default:
return gconv.String(value)
}
}
// 根据规则列表过滤map
// onlyParams 是否只返回src中存在的字段
func doFilter(src map[string]interface{}, filters []*FilterTag, onlyParams ...bool) (map[string]interface{}, *gvalid.Error) {
var result = make(map[string]interface{})
var validRuleMap []string
var only = false
if len(onlyParams) > 0 {
only = onlyParams[0]
}
for _, filter := range filters {
fieldName := string(filter.fieldName)
paramName := filter.paramName
if paramName == "" {
paramName = fieldName
}
if filter.rules != "" {
validRuleMap = append(validRuleMap, filter.ToRuleTag())
}
// 忽略参数
if paramName == "-" {
v := parseDefaultValue(filter.defValue)
if v != nil {
result[fieldName] = v
}
continue
}
fnMgr := FuncManager()
if v, ok := src[paramName]; ok && v != nil {
if fnMgr.HasFormatter(filter.formatter) {
if v, err := fnMgr.Format(filter.formatter, convertToRuleDataType(filter.dataType, v)); err == nil {
result[fieldName] = v
}
} else {
result[fieldName] = convertToRuleDataType(filter.dataType, v)
}
} else {
if !only && filter.defValue != nil {
v := parseDefaultValue(filter.defValue)
if v == nil {
continue
}
if fnMgr.HasFormatter(filter.formatter) {
if v1, err := fnMgr.Format(filter.formatter, v); err != nil {
result[fieldName] = v1
}
} else {
result[fieldName] = convertToRuleDataType(filter.dataType, v)
}
}
}
}
// 验证数据
err := gvalid.CheckMap(result, validRuleMap)
if err != nil {
return result, err
}
return result, nil
}
// 使用map规则过滤
func FilterMap(src map[string]interface{}, rules map[string]string, selects ...map[string]interface{}) (map[string]interface{}, *gvalid.Error) {
filters := GetFilterFromMap(rules, selects...)
return doFilter(src, filters)
}
// 使用规则数组过滤
func Filter(src map[string]interface{}, rules []string, selects ...map[string]interface{}) (map[string]interface{}, *gvalid.Error) {
filters := GetFilterFromArray(rules, selects...)
return doFilter(src, filters)
}
// 使用规则数组过滤,并且只返回输入参数中存在的字段
func FilterOnly(src map[string]interface{}, rules []string, selects ...map[string]interface{}) (map[string]interface{}, *gvalid.Error) {
filters := GetFilterFromArray(rules, selects...)
return doFilter(src, filters, true)
}
// 使用struct定义过滤
func FilterStruct(src map[string]interface{}, rules interface{}, selects ...map[string]interface{}) (map[string]interface{}, *gvalid.Error) {
filters := GetFilterFromStruct(rules, selects...)
return doFilter(src, filters)
}
// 使用struct定义过滤并且只返回src中存在的字段
func FilterStructOnly(src map[string]interface{}, rules interface{}, selects ...map[string]interface{}) (map[string]interface{}, *gvalid.Error) {
filters := GetFilterFromStruct(rules, selects...)
return doFilter(src, filters, true)
}
// 使用json验证规则过滤
func FilterJson(src map[string]interface{}, jsonRule string, selects ...map[string]interface{}) (map[string]interface{}, *gvalid.Error) {
filters := GetFilterFromJson(jsonRule, selects...)
return doFilter(src, filters)
}
// 使用json验证规则过滤,前且只返回src中存在的字段
func FilterJsonOnly(src map[string]interface{}, jsonRule string, selects ...map[string]interface{}) (map[string]interface{}, *gvalid.Error) {
filters := GetFilterFromJson(jsonRule, selects...)
return doFilter(src, filters, true)
}
|
declare -a array=()
declare -a array=("uniform" "bias" "struct")
for ((i = 0; i < ${#array[@]}; i++)) {
echo "type = ${array[i]}" >> log.txt
a=0
while [ $a -lt 10 ]
do
b=`echo "scale=1; $a / 10 " | bc`
echo $b >> log.txt
python run_link_pred.py --rate $b --type ${array[i]} --dataset cora --rec 30 --lr 0.01 --epoch 200
a=`expr $a + 1`
done
}
for ((i = 0; i < ${#array[@]}; i++)) {
echo "type = ${array[i]}" >> log.txt
a=0
while [ $a -lt 10 ]
do
b=`echo "scale=1; $a / 10 " | bc`
echo $b >> log.txt
python run_link_pred.py --rate $b --type ${array[i]} --dataset citeseer --rec 30 --lr 0.01 --epoch 200
a=`expr $a + 1`
done
} |
<filename>public/gulp/routers/a.js<gh_stars>1-10
var _ = require('underscore');
var fs = require('fs');
module.exports = function(router){ |
import React, {useState} from 'react';
const QuoteList = () => {
const [quotes, setQuotes] = useState([
{ name: 'Albert Einstein', text: 'Life is like riding a bicycle. To keep your balance, you must keep moving.'},
{ name: 'Oscar Wilde', text: 'Be yourself; everyone else is already taken.'}
]);
const addQuote = (name, text) => {
setQuotes([...quotes, {name, text}]);
}
const editQuote = (name, text, index) => {
const newQuotes = quotes.map((quote, i) => {
if(i === index) {
return {name, text};
}
return quote;
});
setQuotes(newQuotes);
}
const deleteQuote = index => {
const newQuotes = [...quotes];
newQuotes.splice(index, 1);
setQuotes(newQuotes);
}
return (
<div>
QuoteList
{quotes.map((quote, i) => (
<div key={i}>
<p>{quote.name}: {quote.text}</p>
<button onClick={() => editQuote(quote.name, quote.text, i)}>
Edit
</button>
<button onClick={() => deleteQuote(i)}>
Delete
</button>
</div>
))}
<form>
<input type="text" name="name" placeholder="Name" />
<input type="text" name="text" placeholder="Quote" />
<button onClick={e => {
e.preventDefault();
const name = e.target.name.value;
const text = e.target.text.value;
addQuote(name, text);
}}>
Add
</button>
</form>
</div>
);
}
export default QuoteList; |
import { writable } from 'svelte/store';
const rates = writable([]);
export { rates as default };
|
<reponame>pradeep-gr/mbed-os5-onsemi<filename>features/storage/FEATURE_STORAGE/flash-journal/flash-journal-strategy-sequential/flash_journal_private.h<gh_stars>10-100
/*
* Copyright (c) 2006-2016, ARM Limited, All Rights Reserved
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __FLASH_JOURNAL_PRIVATE_H__
#define __FLASH_JOURNAL_PRIVATE_H__
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#include "flash-journal/flash_journal.h"
static inline uint32_t roundUp_uint32(uint32_t N, uint32_t BOUNDARY) {
return ((((N) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY));
}
static inline uint32_t roundDown_uint32(uint32_t N, uint32_t BOUNDARY) {
return (((N) / (BOUNDARY)) * (BOUNDARY));
}
#define LCM_OF_ALL_ERASE_UNITS 4096 /* Assume an LCM of erase_units for now. This will be generalized later. */
static const uint32_t SEQUENTIAL_FLASH_JOURNAL_INVALD_NEXT_SEQUENCE_NUMBER = 0xFFFFFFFFUL;
static const uint32_t SEQUENTIAL_FLASH_JOURNAL_MAGIC = 0xCE02102AUL;
static const uint32_t SEQUENTIAL_FLASH_JOURNAL_VERSION = 1;
static const uint32_t SEQUENTIAL_FLASH_JOURNAL_HEADER_MAGIC = 0xCEA00AEEUL;
static const uint32_t SEQUENTIAL_FLASH_JOURNAL_HEADER_VERSION = 1;
typedef enum {
SEQUENTIAL_JOURNAL_STATE_NOT_INITIALIZED,
SEQUENTIAL_JOURNAL_STATE_INIT_SCANNING_LOG_HEADERS,
SEQUENTIAL_JOURNAL_STATE_INITIALIZED,
SEQUENTIAL_JOURNAL_STATE_RESETING,
SEQUENTIAL_JOURNAL_STATE_LOGGING_ERASE,
SEQUENTIAL_JOURNAL_STATE_LOGGING_HEAD,
SEQUENTIAL_JOURNAL_STATE_LOGGING_BODY,
SEQUENTIAL_JOURNAL_STATE_LOGGING_TAIL,
SEQUENTIAL_JOURNAL_STATE_READING,
} SequentialFlashJournalState_t;
/**
* Meta-data placed at the head of a Journal. The actual header would be an
* extension of this generic header, and would depend on the implementation
* strategy. Initialization algorithms can expect to find this generic header at
* the start of every Journal.
*/
typedef struct _SequentialFlashJournalHeader {
FlashJournalHeader_t genericHeader; /** Generic meta-data placed at the head of a Journal; common to all journal types. */
uint32_t magic; /** Sequential journal header specific magic code. */
uint32_t version; /** Revision number for this sequential journal header. */
uint32_t numSlots; /** Maximum number of logged blobs; i.e. maximum number of versions of the journaled payload. */
uint32_t sizeofSlot; /** Slot size. Each slot holds a header, blob-payload, and a tail. */
} SequentialFlashJournalHeader_t;
/**
* Meta-data placed at the head of a sequential-log entry.
*/
typedef struct _SequentialFlashJournalLogHead {
uint32_t version;
uint32_t magic;
uint32_t sequenceNumber;
uint32_t reserved;
} SequentialFlashJournalLogHead_t;
#define SEQUENTIAL_JOURNAL_VALID_HEAD(PTR) \
(((PTR)->version == SEQUENTIAL_FLASH_JOURNAL_VERSION) && ((PTR)->magic == SEQUENTIAL_FLASH_JOURNAL_MAGIC))
/**
* Meta-data placed at the tail of a sequential-log entry.
*
* @note the most crucial items (the ones which play a role in the validation of
* the log-entry) are placed at the end of this structure; this ensures that
* a partially written log-entry-tail won't be accepted as valid.
*/
typedef struct _SequentialFlashJournalLogTail {
uint32_t sizeofBlob; /**< the size of the payload in this blob. */
uint32_t magic;
uint32_t sequenceNumber;
uint32_t crc32; /**< This field contains the CRC of the header, body (only including logged data),
* and the tail. The 'CRC32' field is assumed to hold 0x0 for the purpose of
* computing the CRC */
} SequentialFlashJournalLogTail_t;
#define SEQUENTIAL_JOURNAL_VALID_TAIL(TAIL_PTR) ((TAIL_PTR)->magic == SEQUENTIAL_FLASH_JOURNAL_MAGIC)
typedef struct _SequentialFlashJournal_t {
FlashJournal_Ops_t ops; /**< the mandatory OPS table defining the strategy. */
FlashJournal_Callback_t callback; /**< command completion callback. */
FlashJournal_Info_t info; /**< the info structure returned from GetInfo(). */
ARM_DRIVER_STORAGE *mtd; /**< The underlying Memory-Technology-Device. */
ARM_STORAGE_CAPABILITIES mtdCapabilities; /**< the return from mtd->GetCapabilities(); held for quick reference. */
uint64_t mtdStartOffset; /**< the start of the address range maintained by the underlying MTD. */
uint32_t firstSlotOffset; /** Offset from the start of the journal header to the actual logged journal. */
uint32_t numSlots; /** Maximum number of logged blobs; i.e. maximum number of versions of the journaled payload. */
uint32_t sizeofSlot; /**< size of the log stride. */
uint32_t nextSequenceNumber; /**< the next valid sequence number to be used when logging the next blob. */
uint32_t currentBlobIndex; /**< index of the most recently written blob. */
SequentialFlashJournalState_t state; /**< state of the journal. SEQUENTIAL_JOURNAL_STATE_INITIALIZED being the default. */
FlashJournal_OpCode_t prevCommand; /**< the last command issued to the journal. */
/**
* The following is a union of sub-structures meant to keep state relevant
* to the commands during their execution.
*/
union {
/** state relevant to initialization. */
struct {
uint64_t currentOffset;
struct {
uint32_t headSequenceNumber;
SequentialFlashJournalLogTail_t tail;
};
} initScan;
/** state relevant to logging of data. */
struct {
const uint8_t *blob; /**< the original buffer holding source data. */
size_t sizeofBlob;
union {
struct {
uint64_t mtdEraseOffset;
};
struct {
uint64_t mtdOffset; /**< the current Storage offset at which data will be written. */
uint64_t mtdTailOffset; /**< Storage offset at which the SequentialFlashJournalLogTail_t will be logged for this log-entry. */
const uint8_t *dataBeingLogged; /**< temporary pointer aimed at the next data to be logged. */
size_t amountLeftToLog;
union {
SequentialFlashJournalLogHead_t head;
SequentialFlashJournalLogTail_t tail;
};
};
};
} log;
/** state relevant to read-back of data. */
struct {
const uint8_t *blob; /**< the original buffer holding source data. */
size_t sizeofBlob;
uint64_t mtdOffset; /**< the current Storage offset from which data is being read. */
uint8_t *dataBeingRead; /**< temporary pointer aimed at the next data to be read-into. */
size_t amountLeftToRead;
size_t logicalOffset; /**< the logical offset within the blob at which the next read will occur. */
} read;
};
} SequentialFlashJournal_t;
/**<
* A static assert to ensure that the size of SequentialJournal is smaller than
* FlashJournal_t. The caller will only allocate a FlashJournal_t and expect the
* Sequential Strategy to reuse that space for a SequentialFlashJournal_t.
*/
typedef char AssertSequentialJournalSizeLessThanOrEqualToGenericJournal[sizeof(SequentialFlashJournal_t)<=sizeof(FlashJournal_t)?1:-1];
#define SLOT_ADDRESS(JOURNAL, INDEX) ((JOURNAL)->mtdStartOffset + (JOURNAL)->firstSlotOffset + ((INDEX) * (JOURNAL)->sizeofSlot))
#ifdef __cplusplus
}
#endif // __cplusplus
#endif /* __FLASH_JOURNAL_PRIVATE_H__ */
|
class StringList:
def __init__(self, string):
self.stringlist = [string]
def printString(self):
for s in self.stringlist:
print(s + ' was stored')
mylist = StringList('Hello World')
mylist.printString() |
<reponame>zonesgame/StendhalArcClient<gh_stars>1-10
package z.system;
import arc.Core;
import arc.Events;
import arc.math.Mathf;
import arc.struct.ObjectMap;
import arc.util.serialization.XmlReader;
import mindustry.game.EventType;
import mindustry.world.Tile;
import static mindustry.Vars.world;
/**
* 解决原版Floor不包含TileEntity无法获取纹理变体数据.
*/
public class GroundSystem {
/** 地表纹理变体索引*/
private int[][] variants;
private ObjectMap<String, int[]> regionCenter = new ObjectMap<>();
public GroundSystem() {
Events.on(EventType.GroundSystemInitEvent.class, event -> init());
// 初始化xmlshuju
XmlReader.Element groundRoot = new XmlReader().parse(Core.files.internal("debug/xml/groundSystem.xml"));
for (int i = 0; i < groundRoot.getChildCount(); i++) {
XmlReader.Element node = groundRoot.getChild(i);
String name = node.getAttribute("name");
String[] tmp = node.getChildByName("center").getText().split(",");
int[] offset = new int[tmp.length];
for (int j = 0; j < offset.length; j++) {
offset[j] = Integer.parseInt(tmp[j]);
}
regionCenter.put(name, offset);
}
}
private void init() {
variants = new int[world.width()][world.height()];
for (int y = 0, height = world.height(); y < height; y++) {
for (int x = 0, width = world.width(); x < width; x++) {
Tile tile = world.tile(x, y);
if (tile.floor().variants > 0) {
variants[x][y] = Mathf.random(tile.floor().variants - 1);
}
}
}
}
public int getVariants(int x, int y) {
// if (variants == null) return 0;
return variants[x][y];
}
public int[] getOffset(String blockName) {
return regionCenter.get(blockName);
}
}
|
#!/bin/bash
FUNC_TEST_DIR=$(dirname $0)/../ironicclient/tests/functional/
CONFIG_FILE=$FUNC_TEST_DIR/test.conf
if [[ -n "$OS_AUTH_TOKEN" ]] && [[ -n "$IRONIC_URL" ]]; then
cat <<END >$CONFIG_FILE
[functional]
api_version = 1
auth_strategy=noauth
os_auth_token=$OS_AUTH_TOKEN
ironic_url=$IRONIC_URL
END
else
cat <<END >$CONFIG_FILE
[functional]
api_version = 1
os_auth_url=$OS_AUTH_URL
os_username=$OS_USERNAME
os_password=$OS_PASSWORD
os_tenant_name=$OS_TENANT_NAME
os_service_type=baremetal
os_endpoint_type=public
END
fi
tox -e functional
|
#!/bin/bash
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
export JAVA_HOME=
export PATH=$JAVA_HOME/bin:$PATH
module=federation
main_class=com.webank.ai.fate.driver.Federation
getpid() {
pid=`ps aux | grep ${main_class} | grep -v grep | awk '{print $2}'`
if [[ -n ${pid} ]]; then
return 1
else
return 0
fi
}
mklogsdir() {
if [[ ! -d "logs" ]]; then
mkdir logs
fi
}
status() {
getpid
if [[ -n ${pid} ]]; then
echo "status:
`ps aux | grep ${pid} | grep -v grep`"
return 1
else
echo "service not running"
return 0
fi
}
start() {
getpid
if [[ $? -eq 0 ]]; then
mklogsdir
java -cp "conf/:lib/*:fate-${module}.jar" ${main_class} -c conf/${module}.properties >> logs/console.log 2>>logs/error.log &
if [[ $? -eq 0 ]]; then
sleep 2
getpid
echo "service start sucessfully. pid: ${pid}"
else
echo "service start failed"
fi
else
echo "service already started. pid: ${pid}"
fi
}
stop() {
getpid
if [[ -n ${pid} ]]; then
echo "killing:
`ps aux | grep ${pid} | grep -v grep`"
kill -9 ${pid}
if [[ $? -eq 0 ]]; then
echo "killed"
else
echo "kill error"
fi
else
echo "service not running"
fi
}
case "$1" in
start)
start
status
;;
stop)
stop
;;
status)
status
;;
restart)
stop
start
status
;;
*)
echo "usage: $0 {start|stop|status|restart}"
exit -1
esac
|
class BaseApi {
public data: any
constructor(private status: number, private description: string) {
//
}
}
export = BaseApi
|
#!/bin/sh
if [ "$2" = "" ]; then
echo "usage: $0 <unused> <ssh-key-name>"
exit 1
fi
name=$2
key=/etc/polynimbus/ssh/id_oracle_$name
if [ -f $name ] || [ -f $key ]; then
echo "warning: ssh key $key already exists"
exit 0
fi
ssh-keygen -q -t rsa -f $key -N "" -C ubuntu@`hostname`
|
#!/bin/bash
# 1080p Anime with ASS Subtitles to DVD converter script.
# Written by Robert Ian Hawdon (https://robertianhawdon.me.uk) 2016
# Tested on Ubuntu 16.04
# Requires ffmpeg, mencoder
########
# NTSC #
########
# Resize video to 480p at 23.976 FPS.
mencoder -ovc lavc -of mpeg -mpegopts format=dvd:tsaf -vf scale=720:480 -lavcopts vcodec=mpeg2video:vbitrate=4100:keyint=18:vstrict=0:aspect=16/9 -ofps 24000/1001 -o dvd-video.mpg "subtitled.mkv"
# DVD encode audio (ac3).
ffmpeg -i audio.flac -af "aresample=48000:async=1" -target ntsc-dvd dvd-audio.ac3
# Mux video and audio files with NAV Packets at 23.976 FPS - Fully DVD complient.
ffmpeg -i dvd-video.mpg -i dvd-audio.ac3 -vcodec copy -acodec copy -f vob -target ntsc-dvd -r 24000/1001 dvd-ntsc-23.976.mpg
|
<filename>src/main/java/wood/poulos/webcrawler/WebCrawler.java
/*
* MIT License
*
* Copyright (c) 2017 <NAME>, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package wood.poulos.webcrawler;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
/**
* An application for recursively crawling a web page, downloading the elements
* it finds on the page such as images and files.
*/
public class WebCrawler {
private static final Logger logger = LoggerFactory.getLogger(WebCrawler.class);
private final URI uri;
private final int maxDepth;
private final WebElementRepository repository;
private final ExecutorService executorService = Executors.newCachedThreadPool();
private final Queue<Future<?>> crawlerQueue = new ConcurrentLinkedQueue<>();
WebCrawler(URI uri, int maxDepth, WebElementRepository repository) {
this.uri = uri;
this.maxDepth = maxDepth;
this.repository = repository;
}
/**
* Runs the web crawler for the given arguments.
* <p>
* The first argument should be a valid URL to a website. The second
* argument should be an integer greater than 0 indicating the recursive
* page depth to crawl. The third argument should be the path to a local
* directory to download web elements to.
* </p>
*
* @param args the program arguments.
* @throws MalformedURLException
*/
public static void main(String[] args) {
WebCrawler crawler;
try {
verifySufficientArgCount(args);
// Transform program arguments into usable objects.
URI uri = parseValidURL(args[0]);
int maxDepth = parseValidMaxDepth(args[1]);
WebElementRepository repository = parseValidDownloadRepository(args[2]);
crawler = new WebCrawler(uri, maxDepth, repository);
} catch (IllegalArgumentException e) {
logger.error(e.getMessage());
return;
}
try {
crawler.start();
} catch (MalformedURLException e) {
logger.error(e.getMessage());
}
}
/**
* Crawls the web page specified by the URI passed into this WebCrawler's
* constructor.
*/
void start() throws MalformedURLException {
WebPage page;
try {
page = WebElements.createWebPage(uri.toURL());
} catch (IllegalArgumentException e) {
throw new MalformedURLException("URL does not represent a web page URL.");
}
try {
crawlPage(new CrawlerData(page, 0));
} catch (IllegalArgumentException e) {
logger.error(e.getMessage());
return;
}
waitForCrawlsToFinish();
repository.commit();
}
/**
* Recursively crawls the {@link WebPage} contained within the given
* crawlerData.
*
* @param crawlerData Contains the WebPage to crawl and the current depth
* that page is at in the overall crawl.
*/
void crawlPage(CrawlerData crawlerData) {
WebPage page = crawlerData.page;
int currentDepth = crawlerData.depth;
if (currentDepth >= maxDepth) {
logger.trace("{} is deeper than maxDepth", page.getURL());
return;
}
try {
logger.info("Crawling page at {}", page.getURL());
page.crawl();
} catch (IOException e) {
if (currentDepth == 0) {
throw new IllegalArgumentException("Could not connect to url: " + page.getURL());
} else {
logger.warn("Could not connect to url: {}", page.getURL());
}
}
handlePageElements(page, currentDepth);
}
private void handlePageElements(@NotNull WebPage page, int currentDepth) {
for (WebPage p : page.getWebPages()) {
CrawlerData data = new CrawlerData(p, currentDepth + 1);
crawlerQueue.add(executorService.submit(() -> crawlPage(data)));
}
for (WebImage i : page.getImages()) {
repository.addElement(i);
}
for (WebFile f : page.getFiles()) {
repository.addElement(f);
}
}
private void waitForCrawlsToFinish() {
logger.debug("Waiting for crawling to finish");
Future<?> crawlerTask;
while ((crawlerTask = crawlerQueue.poll()) != null) {
try {
crawlerTask.get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
logger.info("Done crawling.");
executorService.shutdown();
}
/**
* The repository where this web crawler will store the web elements it
* locates.
*
* @return this web crawler's element repository.
*/
@NotNull
public WebElementRepository getRepository() {
return repository;
}
static void verifySufficientArgCount(@NotNull String[] args) {
if (args.length < 1) {
throw new IllegalArgumentException("A web address must be specified as the first argument.");
} else if (args.length < 2) {
throw new IllegalArgumentException("A maximum depth must be specified as the second argument.");
} else if (args.length < 3) {
throw new IllegalArgumentException("A local directory must be specified as the third argument.");
}
}
@NotNull
static URI parseValidURL(@NotNull String arg) {
try {
URI uri = URI.create(arg);
try {
WebElements.createWebPage(uri.toURL());
} catch (MalformedURLException e) {
throw new IllegalArgumentException();
}
return uri;
} catch (IllegalArgumentException ignore) {
throw new IllegalArgumentException("The web address (1st arg) is not formatted correctly or does not represent a web page URL.");
}
}
static int parseValidMaxDepth(@NotNull String arg) {
try {
int maxDepth = Integer.parseInt(arg);
if (maxDepth < 1) {
throw new IllegalArgumentException();
}
return maxDepth;
} catch (IllegalArgumentException ignore) {
throw new IllegalArgumentException("The max depth (2nd arg) must be a natural number.");
}
}
@NotNull
static WebElementRepository parseValidDownloadRepository(@NotNull String arg) {
try {
Path localPath = Paths.get(arg);
verifyValidDownloadRepository(localPath);
DownloadRepository.INSTANCE.setDownloadLocation(localPath);
return DownloadRepository.INSTANCE;
} catch (InvalidPathException ignore) {
throw new IllegalArgumentException("The local directory (3rd arg) must be a file path.");
}
}
static void verifyValidDownloadRepository(@NotNull Path path) {
if (isNonDirectory(path)) {
throw new IllegalArgumentException("The local directory (3rd arg) must be a directory or non-existent.");
}
if ((path.toFile().exists() && !Files.isWritable(path))
|| (!path.toFile().exists() && path.getParent() != null && !Files.isWritable(path.getParent()))) {
throw new IllegalArgumentException("The local directory (3rd arg) does not have write access.");
}
}
private static boolean isNonDirectory(@NotNull Path path) {
return Files.exists(path) && !Files.isDirectory(path);
}
/**
* A simple tuple containing a {@link WebPage} and its current depth in a
* crawl.
*/
private static class CrawlerData {
private final WebPage page;
private final int depth;
private CrawlerData(WebPage page, int depth) {
this.page = page;
this.depth = depth;
}
}
}
|
package org.hisp.dhis.de.action;
/*
* Copyright (c) 2004-2012, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import static org.hisp.dhis.options.SystemSettingManager.KEY_ZERO_VALUE_SAVE_MODE;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.hisp.dhis.customvalue.CustomValue;
import org.hisp.dhis.customvalue.CustomValueService;
import org.hisp.dhis.dataelement.CalculatedDataElement;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.dataset.DataEntryForm;
import org.hisp.dhis.dataset.DataEntryFormService;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.datavalue.DataValue;
import org.hisp.dhis.datavalue.DataValueService;
import org.hisp.dhis.de.comments.StandardCommentsManager;
import org.hisp.dhis.de.screen.DataEntryScreenManager;
import org.hisp.dhis.de.state.SelectedStateManager;
import org.hisp.dhis.i18n.I18n;
import org.hisp.dhis.minmax.MinMaxDataElement;
import org.hisp.dhis.minmax.MinMaxDataElementService;
import org.hisp.dhis.options.SystemSettingManager;
import org.hisp.dhis.options.displayproperty.DisplayPropertyHandler;
import org.hisp.dhis.order.manager.DataElementOrderManager;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.period.Period;
import com.opensymphony.xwork2.Action;
/**
* @author <NAME>
* @version $Id: FormAction.java 6216 2008-11-06 18:06:42Z eivindwa $
*/
public class FormAction
implements Action
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private CustomValueService customValueService;
public CustomValueService getCustomValueService()
{
return customValueService;
}
public void setCustomValueService( CustomValueService customValueService )
{
this.customValueService = customValueService;
}
private SystemSettingManager systemSettingManager;
public void setSystemSettingManager( SystemSettingManager systemSettingManager )
{
this.systemSettingManager = systemSettingManager;
}
private DataElementOrderManager dataElementOrderManager;
public void setDataElementOrderManager( DataElementOrderManager dataElementOrderManager )
{
this.dataElementOrderManager = dataElementOrderManager;
}
private DataValueService dataValueService;
public void setDataValueService( DataValueService dataValueService )
{
this.dataValueService = dataValueService;
}
private DataEntryFormService dataEntryFormService;
public void setDataEntryFormService( DataEntryFormService dataEntryFormService )
{
this.dataEntryFormService = dataEntryFormService;
}
private StandardCommentsManager standardCommentsManager;
public void setStandardCommentsManager( StandardCommentsManager standardCommentsManager )
{
this.standardCommentsManager = standardCommentsManager;
}
private MinMaxDataElementService minMaxDataElementService;
public void setMinMaxDataElementService( MinMaxDataElementService minMaxDataElementService )
{
this.minMaxDataElementService = minMaxDataElementService;
}
private SelectedStateManager selectedStateManager;
public void setSelectedStateManager( SelectedStateManager selectedStateManager )
{
this.selectedStateManager = selectedStateManager;
}
private DataEntryScreenManager dataEntryScreenManager;
public void setDataEntryScreenManager( DataEntryScreenManager dataEntryScreenManager )
{
this.dataEntryScreenManager = dataEntryScreenManager;
}
private I18n i18n;
public void setI18n( I18n i18n )
{
this.i18n = i18n;
}
// -------------------------------------------------------------------------
// DisplayPropertyHandler
// -------------------------------------------------------------------------
private DisplayPropertyHandler displayPropertyHandler;
public void setDisplayPropertyHandler( DisplayPropertyHandler displayPropertyHandler )
{
this.displayPropertyHandler = displayPropertyHandler;
}
// -------------------------------------------------------------------------
// Output
// -------------------------------------------------------------------------
private List<DataElement> orderedDataElements = new ArrayList<DataElement>();
public List<DataElement> getOrderedDataElements()
{
return orderedDataElements;
}
private Map<Integer, DataValue> dataValueMap;
public Map<Integer, DataValue> getDataValueMap()
{
return dataValueMap;
}
private Map<CalculatedDataElement, Integer> calculatedValueMap;
public Map<CalculatedDataElement, Integer> getCalculatedValueMap()
{
return calculatedValueMap;
}
private List<String> standardComments;
public List<String> getStandardComments()
{
return standardComments;
}
private Map<String, String> dataElementTypeMap;
public Map<String, String> getDataElementTypeMap()
{
return dataElementTypeMap;
}
private Map<Integer, MinMaxDataElement> minMaxMap;
public Map<Integer, MinMaxDataElement> getMinMaxMap()
{
return minMaxMap;
}
private Integer integer = 0;
public Integer getInteger()
{
return integer;
}
private Boolean cdeFormExists;
private DataEntryForm dataEntryForm;
public DataEntryForm getDataEntryForm()
{
return this.dataEntryForm;
}
private String customDataEntryFormCode;
public String getCustomDataEntryFormCode()
{
return this.customDataEntryFormCode;
}
private Boolean zeroValueSaveMode;
public Boolean getZeroValueSaveMode()
{
return zeroValueSaveMode;
}
// -------------------------------------------------------------------------
// Input/output
// -------------------------------------------------------------------------
private List<CustomValue> customValues = new ArrayList<CustomValue>();
public List<CustomValue> getCustomValues()
{
return customValues;
}
private Integer selectedDataSetId;
public void setSelectedDataSetId( Integer selectedDataSetId )
{
this.selectedDataSetId = selectedDataSetId;
}
public Integer getSelectedDataSetId()
{
return selectedDataSetId;
}
private Integer selectedPeriodIndex;
public void setSelectedPeriodIndex( Integer selectedPeriodIndex )
{
this.selectedPeriodIndex = selectedPeriodIndex;
}
public Integer getSelectedPeriodIndex()
{
return selectedPeriodIndex;
}
private String disabled = " ";
private Integer optionComboId;
public Integer getOptionComboId()
{
return optionComboId;
}
// -------------------------------------------------------------------------
// Action implementation
// -------------------------------------------------------------------------
public String execute()
throws Exception
{
zeroValueSaveMode = (Boolean) systemSettingManager.getSystemSetting( KEY_ZERO_VALUE_SAVE_MODE, false );
if ( zeroValueSaveMode == null )
{
zeroValueSaveMode = false;
}
OrganisationUnit organisationUnit = selectedStateManager.getSelectedOrganisationUnit();
DataSet dataSet = selectedStateManager.getSelectedDataSet();
customValues = (List<CustomValue>) customValueService.getCustomValuesByDataSet( dataSet );
Period period = selectedStateManager.getSelectedPeriod();
if ( dataSet.getLockedPeriods().contains( period ) )
{
disabled = "disabled";
}
Collection<DataElement> dataElements = dataSet.getDataElements();
if ( dataElements.size() == 0 )
{
return SUCCESS;
}
Collection<DataElementCategoryOptionCombo> defaultOptionCombo = dataElements.iterator().next()
.getCategoryCombo().getOptionCombos();
optionComboId = defaultOptionCombo.iterator().next().getId();
// ---------------------------------------------------------------------
// Get the min/max values
// ---------------------------------------------------------------------
Collection<MinMaxDataElement> minMaxDataElements = minMaxDataElementService.getMinMaxDataElements(
organisationUnit, dataElements );
minMaxMap = new HashMap<Integer, MinMaxDataElement>( minMaxDataElements.size() );
for ( MinMaxDataElement minMaxDataElement : minMaxDataElements )
{
minMaxMap.put( minMaxDataElement.getDataElement().getId(), minMaxDataElement );
}
// ---------------------------------------------------------------------
// Get the DataValues and create a map
// ---------------------------------------------------------------------
Collection<DataValue> dataValues = dataValueService.getDataValues( organisationUnit, period, dataElements,
defaultOptionCombo );
dataValueMap = new HashMap<Integer, DataValue>( dataValues.size() );
for ( DataValue dataValue : dataValues )
{
dataValueMap.put( dataValue.getDataElement().getId(), dataValue );
}
// ---------------------------------------------------------------------
// Prepare values for unsaved CalculatedDataElements
// ---------------------------------------------------------------------
calculatedValueMap = dataEntryScreenManager.populateValuesForCalculatedDataElements( organisationUnit, dataSet,
period );
// ---------------------------------------------------------------------
// Make the standard comments available
// ---------------------------------------------------------------------
standardComments = standardCommentsManager.getStandardComments();
// ---------------------------------------------------------------------
// Make the DataElement types available
// ---------------------------------------------------------------------
dataElementTypeMap = new HashMap<String, String>();
dataElementTypeMap.put( DataElement.TYPE_BOOL, i18n.getString( "yes_no" ) );
dataElementTypeMap.put( DataElement.TYPE_INT, i18n.getString( "number" ) );
dataElementTypeMap.put( DataElement.TYPE_STRING, i18n.getString( "text" ) );
// ---------------------------------------------------------------------
// Get the custom data entry form (if any)
// ---------------------------------------------------------------------
dataEntryForm = dataEntryFormService.getDataEntryFormByDataSet( dataSet );
cdeFormExists = (dataEntryForm != null);
if ( cdeFormExists )
{
customDataEntryFormCode = dataEntryScreenManager.populateCustomDataEntryScreen(
dataEntryForm.getHtmlCode(), dataValues, calculatedValueMap, minMaxMap, disabled, zeroValueSaveMode,
i18n, dataSet );
}
// ---------------------------------------------------------------------
// Working on the display of dataelements
// ---------------------------------------------------------------------
orderedDataElements = dataElementOrderManager.getOrderedDataElements( dataSet );
displayPropertyHandler.handle( orderedDataElements );
return SUCCESS;
}
}
|
package ${basePackage}.domain.user.auth;
import ${basePackage}.domain.BaseJpaModel;
import com.chequer.axboot.core.annotations.ColumnPosition;
import com.chequer.axboot.core.annotations.Comment;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.DynamicInsert;
import org.hibernate.annotations.DynamicUpdate;
import javax.persistence.*;
@Setter
@Getter
@DynamicInsert
@DynamicUpdate
@Entity
@Table(name = "USER_AUTH_M")
public class UserAuth extends BaseJpaModel<Long> {
@Id
@Column(name = "ID", precision = 19, nullable = false)
@Comment(value = "ID")
@GeneratedValue(strategy = GenerationType.IDENTITY)
@ColumnPosition(1)
private Long id;
@Column(name = "USER_CD")
@ColumnPosition(2)
private String userCd;
@Column(name = "GRP_AUTH_CD")
@ColumnPosition(3)
private String grpAuthCd;
@Override
public Long getId() {
return id;
}
}
|
<filename>src/middlewares/check-in-token.js
import status from 'http-status'
import auth from '../lib/auth'
import r from '../lib/resjson'
export default () => {
return (req, res, next) => {
let response
if (!req.headers.token) {
response = r('missing token')
return res.status(status.BAD_REQUEST).json(response)
} else {
auth.verify(req.headers.token)
.then((resVerify) => {
if (resVerify) {
auth.create({
_id: resVerify._id,
username: resVerify.username,
email: resVerify.email,
authority: resVerify.authority,
approved: resVerify.approved,
active: resVerify.active
})
.then((response) => {
req.headers.userId = resVerify._id
req.headers.username = resVerify.username
req.headers.token = response
return next()
})
}
})
.catch((err) => {
if (err) {
response = r(err.message)
return res.status(status.BAD_REQUEST).json(response)
}
})
}
}
}
|
#!/bin/bash
source .env
docker-compose -f nts-ng-docker-image-build-ubuntu.yaml build --build-arg NTS_BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') --build-arg NTS_BUILD_VERSION=$NTS_BUILD_VERSION
|
<gh_stars>10-100
const args = process.argv.slice(2);
const start = async () => {
const [hubIpAddress, securityId, coapClientPath] = args;
if (!hubIpAddress) {
console.log('Please provide the ip address of the Tradfri hub as the first argument!')
return;
}
if (!securityId) {
console.log('Please provide the security code as the second argument!')
return;
}
const tradfri = require('./index').create({
coapClientPath: coapClientPath || './lib/coap-client', // use embedded coap-client
securityId,
hubIpAddress,
});
await tradfri.generateDTLSIdentity();
};
start().then(() => {
console.log('End.');
});
|
<gh_stars>0
/* eslint-disable no-undef */
// sc: https://ru.hexlet.io/courses/js-advanced-testing/lessons/stub/exercise_unit
// tests/getUserMainLanguage.test.js
// Протестируйте функцию getUserMainLanguage(username, client), которая определяет
// язык на котором пользователь создал больше всего репозиториев. Для реализации
// этой задачи, функция getUserMainLanguage выполняет запрос через @octokit/rest,
// который извлекает все репозитории указанного пользователя (по первому параметру
// username). Каждый репозиторий в этом списке, содержит указание основного языка
// репозитория. Эта информация используется для поиска того языка, которые
// используется чаще.
// // Запрос который выполняет функция getUserByUsername
// // Именно этот метод нужно будет подменить в фейковом клиенте
// const { data } = await client.repos.listForUser({ username });
// // data – список репозиториев. У каждого репозитория может быть много полей
// // но нас интересует ровно одно – language
// // Эти данные нужно подготовить в тестах для фейкового клиента
// console.log(data);
// // [{ language: 'php', ... }, { language: 'javascript', ... }, ...]
// support/OctokitFake.js
// Реализуйте фейковый клиент по такому же принципу как это было сделано в теории.
// Используйте этот клиент в тестах для подмены.
// BEGIN (write your solution here)
// Структура этого класса описывает только ту часть,
// которая необходима для вызова await client.repos.listForOrg(...)
export default class OctokitFake {
constructor(data) {
this.data = data;
}
repos = {
listForUser: () => Promise.resolve({ data: this.data }),
};
}
// END
// const { default: OctokitFake } = require('../support/OctokitFake');
// const getFunction = require('../functions');
// const getUserMainLanguage = getFunction();
// BEGIN (write your solution here)
test('getUserMainLanguage first found', async () => {
const data = [
{ language: 'javascript' },
{ language: 'php' },
{ language: 'javascript' },
{ language: 'php' },
];
const client = new OctokitFake(data);
const mainLanguage = await getUserMainLanguage('', client);
expect(mainLanguage).toEqual('javascript');
});
test('getUserMainLanguage when empty', async () => {
const client = new OctokitFake([]);
const mainLanguage = await getUserMainLanguage('user-without-repos', client);
expect(mainLanguage).toBeNull();
});
// END
|
export default () => {
const emptyState = {
balances: {
search: {
visible: false,
query: ``
}
},
blocks: {
search: {
visible: false,
query: ``
}
},
delegates: {
search: {
visible: false,
query: ``
}
},
proposals: {
search: {
visible: false,
query: ``
}
},
transactions: {
search: {
visible: false,
query: ``
}
},
validators: {
search: {
visible: false,
query: ``
}
}
}
const state = JSON.parse(JSON.stringify(emptyState))
const mutations = {
resetSearch(state, type) {
state[type].search.visible = false
state[type].search.query = ``
},
setSearchVisible(state, [type, bool]) {
state[type].search.visible = bool
},
setSearchQuery(state, [type, string]) {
state[type].search.query = string
}
}
const actions = {
resetSessionData({ rootState }) {
rootState.filters = JSON.parse(JSON.stringify(emptyState))
}
}
return { state, mutations, actions }
}
|
<reponame>iotshaman/node-primer
// THIS FILE IS JUST AN ENTRY POINT FOR RUNNING ALL OF THE LESSONS
// IF YOU JUST WANT TO RUN 1 LESSON, USE "npm run lesson [lesson number]"
var lessons = [
require("./lessons/1.importing"),
require("./lessons/2.truthiness-and-falsiness"),
require("./lessons/3.arrays"),
require("./lessons/4.complex-arrays"),
require("./lessons/5.string-interpolation")
];
for (var i = 0; i < lessons.length; i++) {
// CALL THE EXPORTED "Run" FUNCTION ON THE LESSON AT INDEX "i"
lessons[i].Run();
// ADD AN EMPTY STRING TO SEPARATE CONSOLE OUTPUT
console.log('');
} |
<filename>App.js<gh_stars>0
'use strict';
const GA = require('./algorithm/GA');
const City = require('./algorithm/City');
const Population = require('./algorithm/Population');
const RouteManager = require('./algorithm/RouteManager');
const Point = require('./Point');
class App {
static reset() {
App.points = [];
App.population = [];
}
static addPoint(x, y) {
App.points.push(new Point(x, y));
}
static initAlgorithm() {
App.population = [];
RouteManager.init();
for (let p of App.points) {
RouteManager.addCity(new City(p.x, p.y));
}
App.population = new Population(App.points.length * 2, true);
}
static iterateAlgorithm() {
App.population = GA.evolvePopulation(App.population);
const fittest = App.population.getFittest();
return { route: fittest, distance: fittest.getDistance(), numberOfCities: RouteManager.numberOfCities() };
}
}
App.points = [];
App.population = [];
module.exports = App; |
#!/bin/bash
# given (a) a list of benchmarks, (b) a compile mode, and (c) a target apisspec, determine the compile time and place it in an output file.
if [[ $# -ne 3 ]]; then
echo "Usge: $0 <benchmark list (newline separated)> <compile settings file> <target apispec>"
exit 1
fi
mkdir -p binding_candidates
pushd ../synth
bmarks=( $(cat $1 | cut -f1 -d:) )
compsettings=$2
apispec=$3
echo "" -n > ../results/binding_candidates/candidates
# Use small number of tests as dictated by theory.
for bmark in "${bmarks[@]}"; do
echo "Starting new test"
# use the benchmark list folder as a base.
bmark_folder="$(dirname $1)"
iospec="iospec.json"
if [[ -f $bmark_folder/$bmark/iospec_in_context.json ]]; then
iospec="iospec_in_context.json"
fi
# Just try to make the whole execution as fast as possible -- aren't trying to get the
# real resul,t, just the no of candidates.
( ./main.byte --skip-build --print-synthesizer-numbers --only-test 1 --number-of-tests 1 $compsettings $bmark_folder/$bmark/$iospec $apispec ) 1>> ../results/binding_candidates/candidates
done
|
<reponame>opulencesix/escalate<gh_stars>0
package com.o6.dto;
import java.util.ArrayList;
import java.util.List;
/*
* User profiles, and currently selected profile.
*
*/
public class UserProfileDTO {
List<ProfileTemplate> allProfiles = new ArrayList<ProfileTemplate>();
String currentProfileName;
public UserProfileDTO() {}
public List<ProfileTemplate> getAllProfiles() {
return allProfiles;
}
public void setAllProfiles(List<ProfileTemplate> allProfiles) {
this.allProfiles = allProfiles;
}
public String getCurrentProfileName() {
return currentProfileName;
}
public void setCurrentProfileName(String currentProfileName) {
this.currentProfileName = currentProfileName;
}
}
|
<reponame>m-nakagawa/sample
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Package
///////////////
package org.apache.jena.ontology;
// Imports
///////////////
import java.util.Iterator;
import org.apache.jena.rdf.model.* ;
import org.apache.jena.util.iterator.ExtendedIterator ;
/**
* <p>
* Interface defining an individual in which all members of a collection are
* declared pair-wise disjoint. This allows ontologies that wish to support the
* unique names assumption to add this condition in languages (like OWL) that
* do not make the same assumption, with a minimum number of statements.
* Instances of the all different axiom are expected to have a property
* (e.g. <code>owl:distinctMembers</code> defining the list of distinct
* individuals in the ontology. For a given vocabulary, this will be defined by
* the {@linkplain Profile#DISTINCT_MEMBERS distinctMembers} entry.
* </p>
*/
public interface AllDifferent
extends OntResource
{
// Constants
//////////////////////////////////
// External signature methods
//////////////////////////////////
/**
* <p>Assert that the list of distinct individuals in this AllDifferent declaration
* is the given list. Any existing
* statements for <code>distinctMembers</code> will be removed.</p>
* @param members A list of the members that are declared to be distinct.
* @exception ProfileException If the {@link Profile#DISTINCT_MEMBERS()} property is not supported in the current language profile.
*/
public void setDistinctMembers( RDFList members );
/**
* <p>Add the given individual to the list of distinct members of this AllDifferent declaration.</p>
* @param res A resource that will be added to the list of all different members.
* @exception ProfileException If the {@link Profile#DISTINCT_MEMBERS()} property is not supported in the current language profile.
*/
public void addDistinctMember( Resource res );
/**
* <p>Add the given individuals to the list of distinct members of this AllDifferent declaration.</p>
* @param individuals An iterator over the distinct invididuals that will be added
* @exception ProfileException If the {@link Profile#DISTINCT_MEMBERS()} property is not supported in the current language profile.
*/
public void addDistinctMembers( Iterator<? extends Resource> individuals );
/**
* <p>Answer the list of distinct members for this AllDifferent declaration.</p>
* @return The list of individuals declared distinct by this AllDifferent declaration.
* @exception ProfileException If the {@link Profile#DISTINCT_MEMBERS()} property is not supported in the current language profile.
*/
public RDFList getDistinctMembers();
/**
* <p>Answer an iterator over all of the individuals that are declared to be distinct by
* this AllDifferent declaration. Each element of the iterator will be an {@link OntResource}.</p>
* @return An iterator over distinct individuals.
* @exception ProfileException If the {@link Profile#DISTINCT_MEMBERS()} property is not supported in the current language profile.
*/
public ExtendedIterator<? extends OntResource> listDistinctMembers();
/**
* <p>Answer true if this AllDifferent declaration includes <code>res</code> as one of the distinct individuals.</p>
* @param res A resource to test against
* @return True if <code>res</code> is declared to be distinct from the other individuals in this declation.
* @exception ProfileException If the {@link Profile#DISTINCT_MEMBERS()} property is not supported in the current language profile.
*/
public boolean hasDistinctMember( Resource res );
/**
* <p>Remove the given resource from the list of distinct individuals. If this statement
* is not true of the current model, nothing happens.</p>
* @param res A resource that is no longer distinct from the other listed individuals
*/
public void removeDistinctMember( Resource res );
}
|
<reponame>nofurtherinformation/netlify-express
'use strict';
const express = require('express');
const path = require('path');
const serverless = require('serverless-http');
const app = express();
const bodyParser = require('body-parser');
const initSqlJs = require('sql.js');
var fs = require('fs');
// get date range helper
function getDateRange(start, end) {
const startString = `${start-100}`;
const endString = `${end-100}`;
const currDate = new Date(startString.slice(0,4), startString.slice(4,6), startString.slice(6,8));
const endDate = new Date(endString.slice(0,4), endString.slice(4,6), endString.slice(6,8));
var dateArray = [];
while (currDate < endDate) {
dateArray.push(currDate.toISOString().slice(0,10));
currDate.setDate(currDate.getDate() + 1);
}
return dateArray.join('","')
}
const tableTree = {
"confirmed": {
"usafacts":{
"county":"cases",
"state":"usaFactsCasesCounty"
}
},
"deaths": {
"usafacts":{
"county":"deaths",
"state":"usaFactsCasesCounty"
}
}
}
const router = express.Router();
router.get('/', (req, res) => {
res.writeHead(200, { 'Content-Type': 'text/html' });
res.write('<h1>Hello from Express.js!</h1>');
res.end();
});
router.get('/api', async (req, res) => {
const testFolder = '';
var result = []
result.push('test');
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
result.push(file);
result.push('test');
});
});
res.json({ result })
});
router.post('/', (req, res) => res.json({ postBody: req.body }));
app.use(bodyParser.json());
app.use('/.netlify/functions/server', router); // path must route to lambda
app.use('/', (req, res) => res.sendFile(path.join(__dirname, '../index.html')));
router.get('/v1/lisa/', async (req, res) => {
// destructure parameters from URL query
const {state,category,start,end,source,type,level} = req.query
var filebuffer = fs.readFileSync('usaFactsCovid.db');
initSqlJs().then(function(SQL){
// Load the db
var db = new SQL.Database(filebuffer);
// Prepare an sql statement
// get a list of date columns to pull from
const dateRange = getDateRange(start, end)
if (category === "data") { // Call for Raw Data
const columnList = `countyFIPS,"County Name",State,StateFIPS,"${dateRange}"`
// table name
const table = tableTree[type||"confirmed"][source||"usafacts"][level||"county"]
// construct SQL query
const sql = `SELECT ${columnList} FROM ${table} WHERE State = '${state}'`;
var stmt = db.prepare(sql);
var result = [];
while(stmt.step()) { //
result.push(stmt.getAsObject())
}
// return results
res.json(result);
} else { // Call for getting lisa data
res.json('na')
}
});
});
module.exports = app;
module.exports.handler = serverless(app); |
#!/bin/bash
# LinuxGSM fix_ut2.sh function
# Author: Daniel Gibbs
# Website: https://linuxgsm.com
# Description: Resolves various issues with configs in Unreal Tournament 3.
local commandname="FIX"
local commandaction="Fix"
local function_selfname="$(basename "$(readlink -f "${BASH_SOURCE[0]}")")"
fn_parms(){
parms="server VCTF-Suspense?Game=UTGameContent.UTVehicleCTFGame_Content?bIsDedicated=true?bIsLanMatch=false?bUsesStats=false?bShouldAdvertise=false?PureServer=1?bAllowJoinInProgress=true?ConfigSubDir=${servicename} -port=${port} -queryport=${queryport} -multihome=${ip} -nohomedir -unattended -log=${gamelog}"
}
fn_print_information "starting ${gamename} server to generate configs."
sleep 1
exitbypass=1
command_start.sh
sleep 10
command_stop.sh |
<gh_stars>0
import axios from './axios'
const filterParams = params => {
let resultParams = {}
for (const k of Object.keys(params)) {
if (params[k] === '') {
continue
}
resultParams[k] = params[k]
}
return resultParams
}
const create = (url, params) => {
return axios.post(url, filterParams(params))
}
const readAll = (url, params) => {
// console.log(url, params)
const paramsStr = Object.keys(params).map((k) => `${k}=${params[k]}`).join('&')
return axios.get(`${url}?${paramsStr}`)
}
const update = (url, params) => {
// console.log(params)
return axios.patch(url, filterParams(params))
}
const deleteOne = (url) => {
return axios.delete(url)
}
export default { create, readAll, update, deleteOne } |
rosrun moveit_ikfast create_ikfast_moveit_plugin.py fetch arm fetch_ikfast_plugin /home/hanhong/indigo/src/fetch_ikfast_plugin/src/fetch_arm_ikfast_solver.cpp |
package cucumber.runtime.java;
import cucumber.api.java8.StepdefBody;
import cucumber.runtime.CucumberException;
import cucumber.runtime.JdkPatternArgumentMatcher;
import cucumber.runtime.ParameterInfo;
import cucumber.runtime.StepDefinition;
import cucumber.runtime.Utils;
import gherkin.I18n;
import gherkin.formatter.Argument;
import gherkin.formatter.model.Step;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
public class Java8StepDefinition implements StepDefinition {
private final Pattern pattern;
private final long timeoutMillis;
private final StepdefBody body;
private final JdkPatternArgumentMatcher argumentMatcher;
private final StackTraceElement location;
private final List<ParameterInfo> parameterInfos;
private final Method method;
public Java8StepDefinition(Pattern pattern, long timeoutMillis, StepdefBody body, TypeIntrospector typeIntrospector) throws Exception {
this.pattern = pattern;
this.timeoutMillis = timeoutMillis;
this.body = body;
this.argumentMatcher = new JdkPatternArgumentMatcher(pattern);
this.location = new Exception().getStackTrace()[3];
Class<? extends StepdefBody> bodyClass = body.getClass();
Type genericInterface = bodyClass.getGenericInterfaces()[0];
Type[] argumentTypes;
if (genericInterface instanceof ParameterizedType) {
argumentTypes = ((ParameterizedType) genericInterface).getActualTypeArguments();
} else {
argumentTypes = typeIntrospector.getGenericTypes(bodyClass);
}
verifyNotListOrMap(argumentTypes);
this.parameterInfos = ParameterInfo.fromTypes(argumentTypes);
Class[] parameterTypes = new Class[parameterInfos.size()];
for (int i = 0; i < parameterInfos.size(); i++) {
parameterTypes[i] = Object.class;
}
this.method = bodyClass.getDeclaredMethod("accept", parameterTypes);
}
private void verifyNotListOrMap(Type[] argumentTypes) {
for (Type argumentType : argumentTypes) {
if(argumentType instanceof Class) {
Class<?> argumentClass = (Class<?>) argumentType;
if(List.class.isAssignableFrom(argumentClass) || Map.class.isAssignableFrom(argumentClass)) {
throw withLocation(new CucumberException("Can't use " + argumentClass.getName() + " in lambda step definition. Declare a DataTable argument instead and convert manually with asList/asLists/asMap/asMaps"));
}
}
}
}
private CucumberException withLocation(CucumberException exception) {
exception.setStackTrace(new StackTraceElement[] {this.location});
return exception;
}
@Override
public List<Argument> matchedArguments(Step step) {
return argumentMatcher.argumentsFrom(step.getName());
}
@Override
public String getLocation(boolean detail) {
return location.getFileName() + ":" + location.getLineNumber();
}
@Override
public Integer getParameterCount() {
return parameterInfos.size();
}
@Override
public ParameterInfo getParameterType(int n, Type argumentType) throws IndexOutOfBoundsException {
return parameterInfos.get(n);
}
@Override
public void execute(final I18n i18n, final Object[] args) throws Throwable {
Utils.invoke(body, method, timeoutMillis, args);
}
@Override
public boolean isDefinedAt(StackTraceElement stackTraceElement) {
return location.getFileName().equals(stackTraceElement.getFileName());
}
@Override
public String getPattern() {
return pattern.pattern();
}
@Override
public boolean isScenarioScoped() {
return true;
}
}
|
<reponame>HiAwesome/kotlin-demo
package com.moqi.java.ch08;
import kotlin.jvm.functions.Function1;
import static com.moqi.kotlin.ch08.ProcessTheAnswer.*;
/**
* Java ProcessTheAnswerJava
*
* @author moqi On 12/8/20 15:34
*/
public class ProcessTheAnswerJava {
public static void main(String[] args) {
// 非 lambda
processTheAnswer(
new Function1<Integer, Integer>() {
@Override
public Integer invoke(Integer number) {
System.out.println("number = " + number);
return number + 1;
}
}
);
// lambda
processTheAnswer(
number -> {
System.out.println("number = " + number);
return number + 1;
}
);
// lambda 不带打印
processTheAnswer(number -> number + 1);
}
}
|
#Generate Virtual Environment
virtualenv -p python3 environment
source environment/bin/activate
pip3 install -r requirements.txt
deactivate
|
class EdiTransactionSetsController < ApplicationController
def index
# @edi_transaction_sets = EdiTransactionSet.all
@edi_transaction_sets = Protocols::X12::TransactionSetEnrollment.limit(100)
respond_to do |format|
format.html # index.html.erb
format.json { render json: @edi_transaction_sets }
end
end
def errors
@q = params[:q]
@transaction = Protocols::X12::TransactionSetEnrollment.search(carrier_map(@q)).where("error_list" => {"$exists" => true, "$not" => {"$size" => 0}}).page(params[:page]).per(15)
authorize! params, @transaction || Protocols::X12::TransactionSetEnrollment
end
def show
@edi_transaction_set = Protocols::X12::TransactionSetEnrollment.find(params[:id])
respond_to do |format|
format.html # index.html.erb
format.json { render json: @edi_transaction_set }
end
end
private
def carrier_map(name)
c_hash = Carrier.all.to_a.inject({}){|result, c| result.merge({c.name => c.carrier_profiles.first.try(:fein)}) }
@q = c_hash[name] if c_hash[name].present?
@q
end
end
|
import java.util.*;
public class Main {
public static void main(String[] args) {
int[] nums = {4, 7, 10, 11, 24};
List<Integer> list = new ArrayList<>();
for(int num : nums) {
list.add(num);
}
System.out.println(list);
}
} |
<reponame>realAaronWu/pravega-operator
// +build !ignore_autogenerated
// Code generated by operator-sdk. DO NOT EDIT.
package v1beta1
import (
v1 "k8s.io/api/core/v1"
runtime "k8s.io/apimachinery/pkg/runtime"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *AuthenticationParameters) DeepCopyInto(out *AuthenticationParameters) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuthenticationParameters.
func (in *AuthenticationParameters) DeepCopy() *AuthenticationParameters {
if in == nil {
return nil
}
out := new(AuthenticationParameters)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClusterCondition) DeepCopyInto(out *ClusterCondition) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterCondition.
func (in *ClusterCondition) DeepCopy() *ClusterCondition {
if in == nil {
return nil
}
out := new(ClusterCondition)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClusterSpec) DeepCopyInto(out *ClusterSpec) {
*out = *in
if in.ExternalAccess != nil {
in, out := &in.ExternalAccess, &out.ExternalAccess
*out = new(ExternalAccess)
**out = **in
}
if in.TLS != nil {
in, out := &in.TLS, &out.TLS
*out = new(TLSPolicy)
(*in).DeepCopyInto(*out)
}
if in.Authentication != nil {
in, out := &in.Authentication, &out.Authentication
*out = new(AuthenticationParameters)
**out = **in
}
if in.Pravega != nil {
in, out := &in.Pravega, &out.Pravega
*out = new(PravegaSpec)
(*in).DeepCopyInto(*out)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpec.
func (in *ClusterSpec) DeepCopy() *ClusterSpec {
if in == nil {
return nil
}
out := new(ClusterSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClusterStatus) DeepCopyInto(out *ClusterStatus) {
*out = *in
if in.Conditions != nil {
in, out := &in.Conditions, &out.Conditions
*out = make([]ClusterCondition, len(*in))
copy(*out, *in)
}
if in.VersionHistory != nil {
in, out := &in.VersionHistory, &out.VersionHistory
*out = make([]string, len(*in))
copy(*out, *in)
}
in.Members.DeepCopyInto(&out.Members)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterStatus.
func (in *ClusterStatus) DeepCopy() *ClusterStatus {
if in == nil {
return nil
}
out := new(ClusterStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ECSSpec) DeepCopyInto(out *ECSSpec) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ECSSpec.
func (in *ECSSpec) DeepCopy() *ECSSpec {
if in == nil {
return nil
}
out := new(ECSSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ExternalAccess) DeepCopyInto(out *ExternalAccess) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalAccess.
func (in *ExternalAccess) DeepCopy() *ExternalAccess {
if in == nil {
return nil
}
out := new(ExternalAccess)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *FileSystemSpec) DeepCopyInto(out *FileSystemSpec) {
*out = *in
if in.PersistentVolumeClaim != nil {
in, out := &in.PersistentVolumeClaim, &out.PersistentVolumeClaim
*out = new(v1.PersistentVolumeClaimVolumeSource)
**out = **in
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FileSystemSpec.
func (in *FileSystemSpec) DeepCopy() *FileSystemSpec {
if in == nil {
return nil
}
out := new(FileSystemSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *HDFSSpec) DeepCopyInto(out *HDFSSpec) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HDFSSpec.
func (in *HDFSSpec) DeepCopy() *HDFSSpec {
if in == nil {
return nil
}
out := new(HDFSSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ImageSpec) DeepCopyInto(out *ImageSpec) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ImageSpec.
func (in *ImageSpec) DeepCopy() *ImageSpec {
if in == nil {
return nil
}
out := new(ImageSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LongTermStorageSpec) DeepCopyInto(out *LongTermStorageSpec) {
*out = *in
if in.FileSystem != nil {
in, out := &in.FileSystem, &out.FileSystem
*out = new(FileSystemSpec)
(*in).DeepCopyInto(*out)
}
if in.Ecs != nil {
in, out := &in.Ecs, &out.Ecs
*out = new(ECSSpec)
**out = **in
}
if in.Hdfs != nil {
in, out := &in.Hdfs, &out.Hdfs
*out = new(HDFSSpec)
**out = **in
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LongTermStorageSpec.
func (in *LongTermStorageSpec) DeepCopy() *LongTermStorageSpec {
if in == nil {
return nil
}
out := new(LongTermStorageSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *MembersStatus) DeepCopyInto(out *MembersStatus) {
*out = *in
if in.Ready != nil {
in, out := &in.Ready, &out.Ready
*out = make([]string, len(*in))
copy(*out, *in)
}
if in.Unready != nil {
in, out := &in.Unready, &out.Unready
*out = make([]string, len(*in))
copy(*out, *in)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MembersStatus.
func (in *MembersStatus) DeepCopy() *MembersStatus {
if in == nil {
return nil
}
out := new(MembersStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PravegaCluster) DeepCopyInto(out *PravegaCluster) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Spec.DeepCopyInto(&out.Spec)
in.Status.DeepCopyInto(&out.Status)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PravegaCluster.
func (in *PravegaCluster) DeepCopy() *PravegaCluster {
if in == nil {
return nil
}
out := new(PravegaCluster)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *PravegaCluster) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PravegaClusterList) DeepCopyInto(out *PravegaClusterList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]PravegaCluster, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PravegaClusterList.
func (in *PravegaClusterList) DeepCopy() *PravegaClusterList {
if in == nil {
return nil
}
out := new(PravegaClusterList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *PravegaClusterList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PravegaSpec) DeepCopyInto(out *PravegaSpec) {
*out = *in
if in.Image != nil {
in, out := &in.Image, &out.Image
*out = new(ImageSpec)
**out = **in
}
if in.Options != nil {
in, out := &in.Options, &out.Options
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
if in.ControllerJvmOptions != nil {
in, out := &in.ControllerJvmOptions, &out.ControllerJvmOptions
*out = make([]string, len(*in))
copy(*out, *in)
}
if in.SegmentStoreJVMOptions != nil {
in, out := &in.SegmentStoreJVMOptions, &out.SegmentStoreJVMOptions
*out = make([]string, len(*in))
copy(*out, *in)
}
if in.CacheVolumeClaimTemplate != nil {
in, out := &in.CacheVolumeClaimTemplate, &out.CacheVolumeClaimTemplate
*out = new(v1.PersistentVolumeClaimSpec)
(*in).DeepCopyInto(*out)
}
if in.LongTermStorage != nil {
in, out := &in.LongTermStorage, &out.LongTermStorage
*out = new(LongTermStorageSpec)
(*in).DeepCopyInto(*out)
}
if in.ControllerResources != nil {
in, out := &in.ControllerResources, &out.ControllerResources
*out = new(v1.ResourceRequirements)
(*in).DeepCopyInto(*out)
}
if in.SegmentStoreResources != nil {
in, out := &in.SegmentStoreResources, &out.SegmentStoreResources
*out = new(v1.ResourceRequirements)
(*in).DeepCopyInto(*out)
}
if in.SegmentStoreSecret != nil {
in, out := &in.SegmentStoreSecret, &out.SegmentStoreSecret
*out = new(SegmentStoreSecret)
**out = **in
}
if in.ControllerServiceAnnotations != nil {
in, out := &in.ControllerServiceAnnotations, &out.ControllerServiceAnnotations
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
if in.ControllerPodLabels != nil {
in, out := &in.ControllerPodLabels, &out.ControllerPodLabels
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
if in.SegmentStoreServiceAnnotations != nil {
in, out := &in.SegmentStoreServiceAnnotations, &out.SegmentStoreServiceAnnotations
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
if in.SegmentStorePodLabels != nil {
in, out := &in.SegmentStorePodLabels, &out.SegmentStorePodLabels
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
if in.SegmentStoreSecurityContext != nil {
in, out := &in.SegmentStoreSecurityContext, &out.SegmentStoreSecurityContext
*out = new(v1.PodSecurityContext)
(*in).DeepCopyInto(*out)
}
if in.ControllerSecurityContext != nil {
in, out := &in.ControllerSecurityContext, &out.ControllerSecurityContext
*out = new(v1.PodSecurityContext)
(*in).DeepCopyInto(*out)
}
if in.ControllerPodAffinity != nil {
in, out := &in.ControllerPodAffinity, &out.ControllerPodAffinity
*out = new(v1.Affinity)
(*in).DeepCopyInto(*out)
}
if in.SegmentStorePodAffinity != nil {
in, out := &in.SegmentStorePodAffinity, &out.SegmentStorePodAffinity
*out = new(v1.Affinity)
(*in).DeepCopyInto(*out)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PravegaSpec.
func (in *PravegaSpec) DeepCopy() *PravegaSpec {
if in == nil {
return nil
}
out := new(PravegaSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *SegmentStoreSecret) DeepCopyInto(out *SegmentStoreSecret) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SegmentStoreSecret.
func (in *SegmentStoreSecret) DeepCopy() *SegmentStoreSecret {
if in == nil {
return nil
}
out := new(SegmentStoreSecret)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *StaticTLS) DeepCopyInto(out *StaticTLS) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StaticTLS.
func (in *StaticTLS) DeepCopy() *StaticTLS {
if in == nil {
return nil
}
out := new(StaticTLS)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TLSPolicy) DeepCopyInto(out *TLSPolicy) {
*out = *in
if in.Static != nil {
in, out := &in.Static, &out.Static
*out = new(StaticTLS)
**out = **in
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TLSPolicy.
func (in *TLSPolicy) DeepCopy() *TLSPolicy {
if in == nil {
return nil
}
out := new(TLSPolicy)
in.DeepCopyInto(out)
return out
}
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.test import TestCase
from mock import MagicMock
from pipeline.component_framework.test import (
ComponentTestMixin,
ComponentTestCase,
CallAssertion,
ExecuteAssertion,
Call,
Patcher,
)
from pipeline_plugins.components.collections.sites.open.cc import CmdbDeleteHostLockComponent
class CmdbTransferFaultHostComponentTest(TestCase, ComponentTestMixin):
def cases(self):
# DELETE_HOST_LOCK_SUCCESS_CASE 加锁成功的测试用例
# DELETE_HOST_LOCK_FAIL_CASE 加锁失败的测试用例
return [DELETE_HOST_LOCK_SUCCESS_CASE, DELETE_HOST_LOCK_FAIL_CASE]
def component_cls(self):
return CmdbDeleteHostLockComponent
class MockClient(object):
def __init__(self, delete_host_lock_return=None):
self.set_bk_api_ver = MagicMock()
self.cc = MagicMock()
self.cc.delete_host_lock = MagicMock(return_value=delete_host_lock_return)
# mock path
GET_CLIENT_BY_USER = "pipeline_plugins.components.collections.sites.open.cc.host_lock.base.get_client_by_user"
CC_GET_IPS_INFO_BY_STR = "pipeline_plugins.components.collections.sites.open.cc.host_lock.base.cc_get_ips_info_by_str"
# mock client
DELETE_HOST_LOCK_SUCCESS_CLIENT = MockClient(
delete_host_lock_return={"result": True, "code": 0, "message": "success", "data": {}}
)
DELETE_HOST_LOCK_FAIL_CLIENT = MockClient(
delete_host_lock_return={"result": False, "code": 1, "message": "fail", "data": {}}
)
DELETE_HOST_LOCK_SUCCESS_CASE = ComponentTestCase(
name="delete host lock success case",
inputs={"cc_host_ip": "1.1.1.1;2.2.2.2"},
parent_data={"executor": "executor_token", "biz_cc_id": 2, "biz_supplier_account": 0, "language": "中文"},
execute_assertion=ExecuteAssertion(success=True, outputs={}),
schedule_assertion=None,
execute_call_assertion=[
CallAssertion(func=CC_GET_IPS_INFO_BY_STR, calls=[Call("executor_token", 2, "1.1.1.1;2.2.2.2")]),
CallAssertion(func=DELETE_HOST_LOCK_SUCCESS_CLIENT.cc.delete_host_lock, calls=[Call({"id_list": [1, 2]})]),
],
# delete patch
patchers=[
Patcher(target=GET_CLIENT_BY_USER, return_value=DELETE_HOST_LOCK_SUCCESS_CLIENT),
Patcher(
target=CC_GET_IPS_INFO_BY_STR,
return_value={"result": True, "ip_result": [{"HostID": 1}, {"HostID": 2}], "invalid_ip": []},
),
],
)
DELETE_HOST_LOCK_FAIL_CASE = ComponentTestCase(
name="delete host lock fail case",
inputs={"cc_host_ip": "1.1.1.1;2.2.2.2"},
parent_data={"executor": "executor_token", "biz_cc_id": 2, "biz_supplier_account": 0, "language": "中文"},
execute_assertion=ExecuteAssertion(
success=False,
outputs={"ex_data": ('调用配置平台(CMDB)接口cc.delete_host_lock返回失败, params={"id_list":[1,2]}, error=fail')},
),
schedule_assertion=None,
execute_call_assertion=[
CallAssertion(func=CC_GET_IPS_INFO_BY_STR, calls=[Call("executor_token", 2, "1.1.1.1;2.2.2.2")]),
CallAssertion(func=DELETE_HOST_LOCK_FAIL_CLIENT.cc.delete_host_lock, calls=[Call({"id_list": [1, 2]})]),
],
# delete patch
patchers=[
Patcher(target=GET_CLIENT_BY_USER, return_value=DELETE_HOST_LOCK_FAIL_CLIENT),
Patcher(
target=CC_GET_IPS_INFO_BY_STR,
return_value={"result": True, "ip_result": [{"HostID": 1}, {"HostID": 2}], "invalid_ip": []},
),
],
)
|
#!/bin/bash -e
#
# Purpose: Pack a Chromium extension directory into crx format
crx="archive.crx"
pub="publickey.tmp"
sig="signature.tmp"
zip="../compressed-standard/archive.zip"
key="./key"
# signature
openssl sha1 -sha1 -binary -sign key.pem < "$zip" > "$sig"
# public key
openssl rsa -pubout -outform DER < key.pem > "$pub" 2>/dev/null
byte_swap () {
# Take "abcdefgh" and return it as "ghefcdab"
echo "${1:6:2}${1:4:2}${1:2:2}${1:0:2}"
}
crmagic_hex="4372 3234" # Cr24
version_hex="0200 0000" # 2
pub_len_hex=$(byte_swap $(printf '%08x\n' $(ls -l "$pub" | awk '{print $5}')))
sig_len_hex=$(byte_swap $(printf '%08x\n' $(ls -l "$sig" | awk '{print $5}')))
(
echo "$crmagic_hex $version_hex $pub_len_hex $sig_len_hex" | xxd -r -p
cat "$pub" "$sig" "$zip"
) > "$crx"
rm *.tmp
echo "Wrote $crx"
|
<gh_stars>0
import {
Module,
NestModule,
MiddlewareConsumer,
RequestMapping,
RequestMethod,
} from '@nestjs/common';
import CatsModule from './cat/cats.module';
import LoggerMiddleware from './common/middleware/logger.middleware';
import { CatsController } from './cat/cats.controller';
@Module({
imports: [CatsModule],
providers: [],
})
// 中间件不能在 @Module() 装饰器中列出。
// 我们必须使用模块类的 configure() 方法来设置它们。
// 包含中间件的模块必须实现 NestModule 接口。
export default class AppModule implements NestModule {
configure(consumer: MiddlewareConsumer) {
// forRoutes() 可接受一个字符串、多个字符串、对象、一个控制器类甚至多个控制器类
// 限制于一个特定的请求
// consumer.apply(LoggerMiddleware).forRoutes({ path: 'cats', method: RequestMethod.GET });
// 所有 /cats/*, /cats/*/*... 路由
// consumer.apply(LoggerMiddleware).forRoutes('cats');
// 传入单个单个控制器
consumer
.apply(LoggerMiddleware)
.exclude({ path: 'cats', method: RequestMethod.POST })
.forRoutes(CatsController);
}
}
|
def sum_natural(n):
return n * (n+1)//2
result = sum_natural(7)
print(result) |
package net.natroutter.hubcore.features.SelectorItems;
import java.util.*;
import net.natroutter.hubcore.Handler;
import org.bukkit.Material;
import org.bukkit.entity.Player;
import org.bukkit.inventory.Inventory;
import net.natroutter.hubcore.features.gadgets.Gadget;
import net.natroutter.hubcore.features.gadgets.GadgetHandler;
import net.natroutter.hubcore.utilities.Items;
import net.natroutter.natlibs.objects.BaseItem;
public class SelectorItemHandler {
public HashMap<UUID, LinkedList<HubItem>> hubItemMap = new HashMap<>();
private Items items;
private GadgetHandler gadgetHandler;
public SelectorItemHandler(Handler handler) {
this.items = handler.getItems();
this.gadgetHandler = handler.getGadgetHandler();
}
public void InitializeItems(Player p) {
if (!hubItemMap.containsKey(p.getUniqueId())) {
hubItemMap.put(p.getUniqueId(), new LinkedList<>(Arrays.asList(
new HubItem("ParticleSelector", 0, items.particleSelector()),
new HubItem("GadgetSelector", 1, items.gadgetSelector()),
new HubItem("ServerSelector", 4, items.serverSelector(p)),
new HubItem("InfoBook", 8, items.Info())
)));
}
}
public void addHubItem(Player p, HubItem item) {
if (!hubItemMap.containsKey(p.getUniqueId())) {InitializeItems(p);}
LinkedList<HubItem> list = hubItemMap.get(p.getUniqueId());
list.add(item);
hubItemMap.put(p.getUniqueId(), list);
}
public void setHubItems(Player p, LinkedList<HubItem> items) {
if (!hubItemMap.containsKey(p.getUniqueId())) {InitializeItems(p);}
hubItemMap.put(p.getUniqueId(), items);
}
public void addHubItems(Player p, HubItem... items) {
if (!hubItemMap.containsKey(p.getUniqueId())) {InitializeItems(p);}
LinkedList<HubItem> list = hubItemMap.get(p.getUniqueId());
list.addAll(new LinkedList<>(Arrays.asList(items)));
hubItemMap.put(p.getUniqueId(), list);
}
public void replaceHubItems(HashMap<UUID, LinkedList<HubItem>> map) {
hubItemMap = map;
}
public List<HubItem> getHubItems(Player p) {
if (!hubItemMap.containsKey(p.getUniqueId())) {InitializeItems(p);}
return hubItemMap.get(p.getUniqueId());
}
protected ArrayList<UUID> bypassHubItems = new ArrayList<>();
public void useHubItems(Player p, boolean status) {
if (!status) {
if (!bypassHubItems.contains(p.getUniqueId())) {
bypassHubItems.add(p.getUniqueId());
}
} else {
bypassHubItems.remove(p.getUniqueId());
}
}
public Boolean isHubItem(Player p, BaseItem item) {
for(HubItem hubitem : getHubItems(p)) {
if (item.isSimilar(hubitem.item())) {
return true;
}
}
return false;
}
public void update(Player p) {
if (bypassHubItems.contains(p.getUniqueId())) {return;}
Inventory inv = p.getInventory();
ClearInvalidItems(p);
Gadget gad = gadgetHandler.getGadget(p);
for (HubItem hubitem : getHubItems(p)) {
if (hubitem.id().equals("GadgetSelector")) {
if (gad != null) {
if (!BaseItem.from(inv.getItem(hubitem.slot())).isSimilar(gad.getItem())) {
inv.setItem(hubitem.slot(), gad.getItem());
}
} else {
if (!BaseItem.from(inv.getItem(hubitem.slot())).isSimilar(hubitem.item())) {
inv.setItem(hubitem.slot(), hubitem.item());
}
}
} else {
if (!BaseItem.from(inv.getItem(hubitem.slot())).isSimilar(hubitem.item())) {
inv.setItem(hubitem.slot(), hubitem.item());
}
}
}
p.updateInventory();
}
private void ClearInvalidItems(Player p) {
if (bypassHubItems.contains(p.getUniqueId())) {return;}
Inventory inv = p.getInventory();
Gadget selectedGad = gadgetHandler.getGadget(p);
for (int slot = 0; slot < inv.getContents().length; slot++) {
BaseItem item = BaseItem.from(inv.getItem(slot));
if (!item.getType().equals(Material.AIR)) {
if (selectedGad != null) {
if (item.isSimilar(selectedGad.getItem())) {
continue;
} else if (item.isSimilar(selectedGad.getIcon())) {
continue;
}
}
if (isHubItem(p, item)) {continue;}
item.Destroy();
}
}
p.updateInventory();
}
}
|
#!/bin/bash
set -e # Any subsequent(*) commands which fail will cause the shell script to exit immediately
PROJECT_NAME=TestProject
# Clean up.
rm -rf $PROJECT_NAME
mkdir -p $PROJECT_NAME && cd $PROJECT_NAME
# Create a new Xcode project.
swift package init
swift package generate-xcodeproj
# Create a Podfile with our pod as dependency.
echo "
platform :ios, '10.0'
target '$PROJECT_NAME' do
use_frameworks!
pod 'AEPMedia', :path => '../AEPMedia.podspec'
end
" >>Podfile
# Install the pods.
pod install
# Archive for generic iOS device
echo '############# Archive for generic iOS device ###############'
xcodebuild archive -scheme TestProject-Package -workspace TestProject.xcworkspace -destination 'generic/platform=iOS'
# Build for generic iOS device
echo '############# Build for generic iOS device ###############'
xcodebuild clean build -scheme TestProject-Package -workspace TestProject.xcworkspace -destination 'generic/platform=iOS'
# Archive for x86_64 simulator
echo '############# Archive for simulator ###############'
xcodebuild archive -scheme TestProject-Package -workspace TestProject.xcworkspace -destination 'generic/platform=iOS Simulator'
# Build for x86_64 simulator
echo '############# Build for simulator ###############'
xcodebuild clean build -scheme TestProject-Package -workspace TestProject.xcworkspace -destination 'generic/platform=iOS Simulator'
# Clean up.
cd ../
rm -rf $PROJECT_NAME
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ed.biodare2.backend.repo.dao;
import com.fasterxml.jackson.databind.ObjectMapper;
import ed.biodare2.backend.features.search.ExperimentIndexer;
import ed.biodare2.backend.repo.db.dao.DBSystemInfoRep;
import ed.biodare2.backend.repo.db.dao.db.DBSystemInfo;
import ed.biodare2.backend.repo.dao.AssayPackAssembler.AssayPackImpl;
import ed.biodare2.backend.repo.system_dom.AssayPack;
import static ed.biodare2.backend.repo.system_dom.SystemDomTestBuilder.emptySystemInfo;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.AdditionalAnswers.returnsFirstArg;
import static org.mockito.AdditionalMatchers.not;
import static org.mockito.AdditionalMatchers.and;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
/**
*
* @author Zielu
*/
public class ExperimentPackHubTest {
AssayPackAssembler assembler;
DBSystemInfoRep dbSysInfos;
ExperimentIndexer indexer;
ExperimentPackHub hub;
AssayPackImpl testPack;
public ExperimentPackHubTest() {
}
@Before
public void setUp() {
testPack = MockReps.testAssayPack();
assembler = mock(AssayPackAssembler.class);
when(assembler.findOne(eq(testPack.expId))).thenReturn(Optional.of(testPack));
when(assembler.findOne(not(eq(testPack.expId)))).thenReturn(Optional.empty());
when(assembler.save(any())).then(returnsFirstArg());
dbSysInfos = mock(DBSystemInfoRep.class);
when(dbSysInfos.findById(anyLong())).thenReturn(Optional.of(emptySystemInfo(1)));
ObjectMapper mapper = new ObjectMapper();
mapper.findAndRegisterModules();
SystemCopier copier = new SystemCopier(dbSysInfos, mapper);
indexer = mock(ExperimentIndexer.class);
hub = new ExperimentPackHub(assembler,copier, indexer);
}
@After
public void tearDown() {
}
@Test
public void findOneForWritingReturnsPackWriteEnabled() {
Optional<AssayPack> ans = hub.findOneForWriting(testPack.getId());
assertTrue(ans.isPresent());
AssayPackImpl pack = (AssayPackImpl)ans.get();
assertFalse(pack.readOnly);
}
@Test
public void findOneForWritingReturnsEmptyOnMissing() {
long id = 10000;
assertNotEquals(id,testPack.getId());
Optional<AssayPack> ans = hub.findOneForWriting(id);
assertFalse(ans.isPresent());
}
@Test
public void findOneForWritingReturnsCopyFromAssembly() {
Optional<AssayPack> ans = hub.findOneForWriting(testPack.getId());
assertTrue(ans.isPresent());
AssayPackImpl pack = (AssayPackImpl)ans.get();
assertNotSame(pack,testPack);
}
@Test
public void enableWritingMakesCopyAndSetsReadonlyToFalse() {
testPack.readOnly = true;
AssayPackImpl pack = (AssayPackImpl)hub.enableWriting(testPack);
assertFalse(pack.readOnly);
assertNotSame(pack,testPack);
assertNotSame(pack.assay,testPack.assay);
assertNotSame(pack.systemInfo,testPack.systemInfo);
assertNotSame(pack.dbSystemInfo,testPack.dbSystemInfo);
assertEquals(pack.expId,testPack.expId);
}
@Test
public void findOneFinds() {
Optional<AssayPack> ans = hub.findOne(testPack.getId());
assertTrue(ans.isPresent());
AssayPackImpl pack = (AssayPackImpl)ans.get();
assertSame(pack,testPack);
}
@Test
public void findOneReturnsEmptyOnMissing() {
long id = 10000;
assertNotEquals(id,testPack.getId());
Optional<AssayPack> ans = hub.findOne(id);
assertFalse(ans.isPresent());
}
@Test
public void findByIdsFindsMatching() {
long missing = 10000;
AssayPackImpl other = MockReps.testAssayPack();
assertNotEquals(other.getId(),testPack.getId());
assertNotEquals(missing,testPack.getId());
assertNotEquals(missing,other.getId());
when(assembler.findOne(eq(other.getId()))).thenReturn(Optional.of(other));
when(assembler.findOne(and(not(eq(testPack.getId())),not(eq(other.getId()))))).thenReturn(Optional.empty());
List<Long> ids = Arrays.asList(missing,
testPack.getId(),
missing,missing,
testPack.getId(),
other.getId(),
other.getId());
List<AssayPack> packs = hub.findByIds(ids).collect(Collectors.toList());
List<AssayPack> exp = Arrays.asList(testPack,testPack,other,other);
assertEquals(exp,packs);
}
@Test
public void saveSavesAndIdexes() {
AssayPack pack = hub.save(testPack);
assertSame(pack,testPack);
verify(indexer).updateSearchInfo(pack);
verify(indexer).indexExperiment(pack);
verify(assembler).save(testPack);
}
@Test
public void newPackMakesWritablePack() {
AssayPackImpl pack = (AssayPackImpl) hub.newPack(testPack.getAssay(), testPack.getSystemInfo(), testPack.getDbSystemInfo().getAcl());
assertNotNull(pack);
assertFalse(pack.readOnly);
assertSame(pack.assay,testPack.assay);
assertSame(pack.systemInfo,testPack.systemInfo);
assertNotNull(pack.dbSystemInfo);
assertSame(pack.dbSystemInfo.getAcl(),testPack.getDbSystemInfo().getAcl());
}
@Test
public void newPackSetsExpIdOnSystemAndSuch() {
testPack.expId = 1;
testPack.systemInfo.parentId = 2;
testPack.dbSystemInfo.setParentId(3);
testPack.assay.setId(4);
AssayPackImpl pack = (AssayPackImpl) hub.newPack(testPack.getAssay(), testPack.getSystemInfo(), testPack.getDbSystemInfo().getAcl());
assertNotNull(pack);
assertEquals(pack.expId,testPack.assay.getId());
assertEquals(pack.assay.getId(),testPack.assay.getId());
assertEquals(pack.systemInfo.parentId,testPack.assay.getId());
assertEquals(pack.dbSystemInfo.getParentId(),testPack.assay.getId());
}
@Test
public void newPackAddsSearchInfo() {
AssayPackImpl pack = (AssayPackImpl) hub.newPack(testPack.getAssay(), testPack.getSystemInfo(), testPack.getDbSystemInfo().getAcl());
assertNotNull(pack);
assertNotNull(pack.dbSystemInfo);
assertNotNull(pack.dbSystemInfo.getSearchInfo());
}
}
|
#!/bin/bash -e
IMG_FILE="${STAGE_WORK_DIR}/${IMG_DATE}-${IMG_NAME}${IMG_SUFFIX}.img"
INFO_FILE="${STAGE_WORK_DIR}/${IMG_DATE}-${IMG_NAME}${IMG_SUFFIX}.info"
on_chroot << EOF
/etc/init.d/fake-hwclock stop
hardlink -t /usr/share/doc
EOF
if [ -d "${ROOTFS_DIR}/home/${MAIN_USER}/.config" ]; then
chmod 700 "${ROOTFS_DIR}/home/${MAIN_USER}/.config"
fi
rm -f "${ROOTFS_DIR}/etc/apt/apt.conf.d/51cache"
rm -f "${ROOTFS_DIR}/usr/bin/qemu-arm-static"
rm -f "${ROOTFS_DIR}/etc/apt/sources.list~"
rm -f "${ROOTFS_DIR}/etc/apt/trusted.gpg~"
rm -f "${ROOTFS_DIR}/etc/passwd-"
rm -f "${ROOTFS_DIR}/etc/group-"
rm -f "${ROOTFS_DIR}/etc/shadow-"
rm -f "${ROOTFS_DIR}/etc/gshadow-"
rm -f "${ROOTFS_DIR}/etc/subuid-"
rm -f "${ROOTFS_DIR}/etc/subgid-"
rm -f "${ROOTFS_DIR}"/var/cache/debconf/*-old
rm -f "${ROOTFS_DIR}"/var/lib/dpkg/*-old
rm -f "${ROOTFS_DIR}"/usr/share/icons/*/icon-theme.cache
rm -f "${ROOTFS_DIR}/var/lib/dbus/machine-id"
true > "${ROOTFS_DIR}/etc/machine-id"
ln -nsf /proc/mounts "${ROOTFS_DIR}/etc/mtab"
find "${ROOTFS_DIR}/var/log/" -type f -exec cp /dev/null {} \;
rm -f "${ROOTFS_DIR}/root/.vnc/private.key"
rm -f "${ROOTFS_DIR}/etc/vnc/updateid"
update_issue "$(basename "${EXPORT_DIR}")"
install -m 644 "${ROOTFS_DIR}/etc/rpi-issue" "${ROOTFS_DIR}/boot/issue.txt"
install files/LICENSE.oracle "${ROOTFS_DIR}/boot/"
cp "$ROOTFS_DIR/etc/rpi-issue" "$INFO_FILE"
{
firmware=$(zgrep "firmware as of" \
"$ROOTFS_DIR/usr/share/doc/raspberrypi-kernel/changelog.Debian.gz" | \
head -n1 | sed -n 's|.* \([^ ]*\)$|\1|p')
printf "\nFirmware: https://github.com/raspberrypi/firmware/tree/%s\n" "$firmware"
kernel="$(curl -s -L "https://github.com/raspberrypi/firmware/raw/$firmware/extra/git_hash")"
printf "Kernel: https://github.com/raspberrypi/linux/tree/%s\n" "$kernel"
uname="$(curl -s -L "https://github.com/raspberrypi/firmware/raw/$firmware/extra/uname_string7")"
printf "Uname string: %s\n" "$uname"
printf "\nPackages:\n"
dpkg -l --root "$ROOTFS_DIR"
} >> "$INFO_FILE"
ROOT_DEV="$(mount | grep "${ROOTFS_DIR} " | cut -f1 -d' ')"
unmount "${ROOTFS_DIR}"
zerofree -v "${ROOT_DEV}"
unmount_image "${IMG_FILE}"
mkdir -p "${DEPLOY_DIR}"
rm -f "${DEPLOY_DIR}/image_${IMG_DATE}-${IMG_NAME}${IMG_SUFFIX}.zip"
pushd "${STAGE_WORK_DIR}" > /dev/null
zip "${DEPLOY_DIR}/image_${IMG_DATE}-${IMG_NAME}${IMG_SUFFIX}.zip" \
"$(basename "${IMG_FILE}")"
popd > /dev/null
cp "$INFO_FILE" "$DEPLOY_DIR"
|
<reponame>nortal/spring-mvc-component-web
package com.nortal.spring.cw.jsp.web.portal.support.file;
import java.io.InputStream;
import com.nortal.spring.cw.core.model.FileHolderModel;
import com.nortal.spring.cw.core.web.component.support.file.FileDownloadStream;
import com.nortal.spring.cw.core.web.util.BeanUtil;
import com.nortal.spring.cw.jsp.service.file.FileService;
/**
* @author <NAME>
*
*/
public class DefaultFileDownloadStream implements FileDownloadStream {
@Override
public InputStream getFileStream(FileHolderModel fileModel) {
return BeanUtil.getBean(FileService.class).getInputStream(fileModel);
}
@Override
public InputStream getFileStream() {
throw new UnsupportedOperationException("Method not implemented");
}
}
|
#!/bin/bash
#
# setup_after_boot.sh will run after boot and only once the network is online.
# The script runs as the root user.
# Log all output.
exec 2> /var/log/setup_after_boot.log 1>&2
# Stop on any failure.
set -euxo pipefail
echo "Running epoxy client"
/usr/bin/epoxy_client -action epoxy.stage3
|
// winescrape.js
// Scrapes wine.com website
const axios = require("axios");
const cheerio = require("cheerio");
async function scrapeAll() {
var allWines = [];
var colors = ["red", "white", "rose", "sparkling"];
for (var c = 0; c < colors.length; c++) {
var color = colors[c];
for (var i = 1; i <= 4; i++) {
var wines = await scrapePage(color, i);
allWines.push.apply(allWines, wines);
}
}
return allWines;
}
var urlsByColor = {
red: "https://www.wine.com/list/wine/california/red-wine/7155-106870-124/",
white: "https://www.wine.com/list/wine/california/white-wine/7155-106870-125/",
rose: "https://www.wine.com/list/wine/california/rose-wine/7155-106870-126/",
sparkling: "https://www.wine.com/list/wine/california/champagne-and-sparkling/7155-106870-123/"
}
function scrapePage(color, pageNumber) {
console.log("Scraping wine.com for " + color + " wines, page " + pageNumber + "...");
var url = urlsByColor[color] + pageNumber;
return axios.get(url)
.then((response) => {
console.log("Received " + response.status + " " + response.statusText);
const html = response.data;
// Parse html using Cheerio library
const $ = cheerio.load(html, { xmlMode: false });
const scriptContents = $('script[name="sharify"]').get(0).children[0].data;
const startIndex = scriptContents.indexOf('{');
const stopIndex = scriptContents.lastIndexOf('}');
const jsonString = scriptContents.substring(startIndex, stopIndex + 1);
const json = JSON.parse(jsonString);
const models = json.model.collection.models;
// Holds the array of scraped wines
var scrapedWines = [];
for (var i = 0; i < models.length; i++) {
var wine = models[i];
// console.log("Name: " + wine.catalogModel.fullName);
// console.log("Region: " + wine.catalogModel.region);
// console.log("Nested region: " + wine.catalogModel.nestedRegionName);
// console.log("Varietal: " + wine.catalogModel.varietal.shortDesc);
// console.log("Alcohol %: " + wine.catalogModel.alcoholPercent);
// console.log("Vintage: " + wine.catalogModel.vintage);
// console.log("Volume: " + wine.catalogModel.volume);
// console.log("Price: " + wine.catalogModel.regularPrice.whole);
// console.log("Vineyard: " + wine.catalogModel.vineyard.fullName);
// console.log("Stock: " + wine.catalogModel.stock);
// console.log(wine.catalogModel.longDescription);
// console.log("---");
scrapedWines.push({
name: wine.catalogModel.fullName,
region: wine.catalogModel.region,
nested_region: wine.catalogModel.nestedRegionName,
varietal: wine.catalogModel.varietal.shortDesc,
alcohol_percent: wine.catalogModel.alcoholPercent ? parseInt(wine.catalogModel.alcoholPercent) : null,
vintage: wine.catalogModel.vintage,
price: parseInt(wine.catalogModel.regularPrice.whole) + parseInt(wine.catalogModel.regularPrice.fractional)/100,
vineyard: {
id: wine.catalogModel.vineyard.id,
fullName: wine.catalogModel.vineyard.fullName,
longDescription: wine.catalogModel.vineyard.longDescription,
region: wine.catalogModel.vineyard.region,
imageUrl: wine.catalogModel.vineyard.imageUrl,
geoLat:wine.catalogModel.vineyard.geoLat,
geoLong: wine.catalogModel.vineyard.geoLong
},
shortDescription: wine.catalogModel.shortDescription,
longDescription: wine.catalogModel.longDescription,
stock: wine.catalogModel.stock,
pictures: wine.productMediaModel.medias,
color: color,
volume: wine.catalogModel.volume
});
}
return scrapedWines;
});
};
module.exports = {
scrapeAll: scrapeAll
};
|
'''
MIT License
Copyright (c) 2018-2019 Onur
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
#!/usr/bin/python
import os
import re
import pathlib
import sys
import getopt
regex_word = "\#include\s+(.*)$"
def get_option_vars():
base_dir = "."
module_dir = "./util"
output_folder = "output"
options, remainder = getopt.getopt(sys.argv[1:], 'o:v', [ 'base=', 'module=', 'output=' ])
for opt, arg in options:
if opt == "--base":
base_dir = arg
if opt == "--module":
module_dir = arg
if opt == "--output":
output_folder = arg
return base_dir, module_dir, output_folder
def load_modules(module_files, directory):
modules = {}
for file in module_files:
name = file[:file.index(".glsl")]
f = open(os.path.normpath(directory + "/" + file), "r")
modules[name] = f.read()
f.close()
return modules
def find_folders(path):
for fname in os.listdir(path):
dir_to_check = os.path.join(path, fname)
if os.path.isdir(dir_to_check):
yield dir_to_check
def produce_output_folders(folder_list, out_folder):
pathlib.Path(out_folder).mkdir(exist_ok=True)
for f in folder_list:
new_folder = out_folder + "/" + f
pathlib.Path(new_folder).mkdir(exist_ok=True)
def produce_new_shader(output_folder, old_shader):
with open(old_shader) as shader_file:
new_file = open(output_folder + "/" + old_shader, "w")
for line in shader_file:
to_write = line
if line.startswith("#include"):
m = re.search(regex_word, line)
module = m.group(1)
to_write = modulesMap[module]
new_file.write(to_write)
new_file.close()
shader_file.close()
if __name__ == "__main__":
baseDirectory, baseModuleDirectory, outputFolder = get_option_vars()
moduleFileList = os.listdir(baseModuleDirectory)
modulesMap = load_modules(moduleFileList, baseModuleDirectory)
fileList = [baseDirectory] + list(find_folders(baseDirectory))
fileList = [f for f in fileList if not (f.endswith(baseModuleDirectory) or f.endswith(outputFolder))]
produce_output_folders(fileList, outputFolder)
for folder in fileList:
fList = os.listdir(folder)
shaderList = [os.path.normpath(folder + '/' + f) for f in fList if f.endswith(".glsl")]
for shader in shaderList:
produce_new_shader(outputFolder, shader)
print("New shaders are produced successfully!")
|
MAXTOKENS=3200
FRQ=8
# users' code dir
UDIR="deepnmt"
# translation task
TASK="adv_translation"
# model arch
ARCH="adv_transformer_wmt_en_de"
# Transformer Large
#ARCH="adv_transformer_vaswani_wmt_en_de_big"
MODEL_PATH="/mnt/data/admin-data/models/wmt14-en-fr-admin-60-12l"
LOG="/mnt/data/admin-data/logs/wmt14-en-fr-admin-60-12l.log"
INITF="/mnt/duck/data/admin-data/init_files/wmt14-en-fr-admin-60-12l.txt"
DATA_PATH="/mnt/duck/data/admin-data/data/wmt14_en_fr_joined_dict/"
CKP="/mnt/duck/data/admin-data/models/wmt14-en-fr-gadmin-60-12l/checkpoint_last.pt"
# number of encoder layers
ELAYER=60
# number of decoder layers
DLAYER=12
# adv 0 denotes standard training
# adv 1 denotes vat
# adv 2 reg
# disable adv training in the release, just set AOPT=0
AOPT=0
# learning rate
LR=0.001
# Epoch
EPOCH=50
# Generate scalars using only one gpu
CUDA_VISIBLE_DEVICES=0 fairseq-train ${DATA_PATH} \
--arch ${ARCH} --task ${TASK} --adv-opt 0 --share-all-embeddings --optimizer radam \
--adam-betas '(0.9, 0.98)' --clip-norm 0.0 --lr-scheduler inverse_sqrt \
--warmup-init-lr 1e-07 --warmup-updates 8000 --lr ${LR} --min-lr 1e-09 \
--dropout 0.1 --attention-dropout 0.1 --relu-dropout 0.1 --weight-decay 0.0 \
--criterion adv_label_smoothed_cross_entropy --label-smoothing 0.1 \
--max-tokens $MAXTOKENS --update-freq $FRQ \
--fp16 --fp16-scale-window 256 --threshold-loss-scale 0.03125 \
--seed 1111 --restore-file x.pt --max-epoch ${EPOCH} --save-dir ${MODEL_PATH} \
--encoder-layers ${ELAYER} --decoder-layers ${DLAYER} \
--user-dir ${UDIR} --admin-init-type adaptive-profiling \
--admin-init-path ${INITF} \
--log-format simple --log-interval 100 | tee ${LOG}
# multi gpu training
GPUS=0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
CUDA_VISIBLE_DEVICES=$GPUS fairseq-train ${DATA_PATH} \
--arch ${ARCH} --task ${TASK} --adv-opt ${AOPT} --share-all-embeddings --optimizer radam \
--adam-betas "(0.9,0.98)" --clip-norm 0.0 --lr-scheduler inverse_sqrt \
--warmup-init-lr 1e-07 --warmup-updates 8000 --lr ${LR} --min-lr 1e-09 \
--dropout 0.2 --attention-dropout 0.1 --relu-dropout 0.1 --weight-decay 0.0 \
--criterion adv_label_smoothed_cross_entropy --label-smoothing 0.1 \
--max-tokens $MAXTOKENS --update-freq $FRQ \
--fp16 --fp16-scale-window 256 --threshold-loss-scale 0.03125 \
--ddp-backend=no_c10d \
--seed 1111 --restore-file ${CKP} --max-epoch ${EPOCH} --save-dir ${MODEL_PATH} \
--encoder-layers ${ELAYER} --decoder-layers ${DLAYER} \
--admin-init-path ${INITF} \
--user-dir ${UDIR} --admin-init-type adaptive --log-format simple --log-interval 100 | tee ${LOG}
|
func getPowerSet(arr: [Int]) -> [[Int]] {
if arr.isEmpty {
return [[]]
}
let firstElement = arr[0]
var powerSet = [[Int]]()
for subset in getPowerSet(arr: Array(arr.dropFirst())) {
powerSet.append(subset)
powerSet.append(subset + [firstElement])
}
return powerSet
}
let arr = [1, 2, 3]
let powerSetResult = getPowerSet(arr: arr)
print("Power set of \(arr) is: \(powerSetResult)") |
<gh_stars>0
package deployment
import (
"fmt"
"reflect"
"testing"
appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
fakeclientapps "k8s.io/client-go/kubernetes/fake"
)
func Test_HasDeploymentsInNamespace(t *testing.T) {
deployment := &appsv1.Deployment{
ObjectMeta: metav1.ObjectMeta{
Name: "mydeployment",
Namespace: "mynamespace",
},
}
deploymentNotReady := &appsv1.Deployment{
ObjectMeta: metav1.ObjectMeta{
Name: "mydeployment",
Namespace: "mynamespace",
},
Status: appsv1.DeploymentStatus{
Replicas: 1,
ReadyReplicas: 0,
},
}
deploymentMinAvailable := &appsv1.Deployment{
ObjectMeta: metav1.ObjectMeta{
Name: "mydeployment",
Namespace: "mynamespace",
},
Status: appsv1.DeploymentStatus{
Conditions: []appsv1.DeploymentCondition{
{Reason: "MinimumReplicasAvailable", Status: corev1.ConditionTrue},
},
},
}
deploymentNoMinAvailable := &appsv1.Deployment{
ObjectMeta: metav1.ObjectMeta{
Name: "mydeployment",
Namespace: "mynamespace",
},
Status: appsv1.DeploymentStatus{
Conditions: []appsv1.DeploymentCondition{
{Reason: "MinimumReplicasAvailable", Status: corev1.ConditionFalse},
},
},
}
client := fakeclientapps.NewSimpleClientset(deployment)
clientMinAvailable := fakeclientapps.NewSimpleClientset(deploymentMinAvailable)
clientNoMinAvailable := fakeclientapps.NewSimpleClientset(deploymentNoMinAvailable)
clientNotReady := fakeclientapps.NewSimpleClientset(deploymentNotReady)
type args struct {
client kubernetes.Interface
namespace string
expectedDeploymentNames []string
}
tests := []struct {
name string
args args
wantHas bool
wantMissingDeployments []MissingDeployment
wantErr bool
}{
{
name: "deployment exists",
args: args{
client: client,
namespace: "mynamespace",
expectedDeploymentNames: []string{"mydeployment"},
},
wantHas: true,
wantMissingDeployments: []MissingDeployment{},
wantErr: false,
},
{
name: "all deployment not present",
args: args{
client: client,
namespace: "mynamespace",
expectedDeploymentNames: []string{"mydeployment", "notexists"},
},
wantHas: false,
wantMissingDeployments: []MissingDeployment{
{Name: "notexists"},
},
wantErr: false,
},
{
name: "Deployment not ready",
args: args{
client: clientNotReady,
namespace: "mynamespace",
expectedDeploymentNames: []string{"mydeployment"},
},
wantHas: false,
wantMissingDeployments: []MissingDeployment{
{
Name: deploymentNoMinAvailable.Name,
ReadyReplicasError: fmt.Errorf("Expect %d for deployment %s but got %d Ready replicas",
deploymentNotReady.Status.Replicas,
deploymentNotReady.Name,
deploymentNotReady.Status.ReadyReplicas),
},
},
wantErr: false,
},
{
name: "Deployment no minimum available",
args: args{
client: clientNoMinAvailable,
namespace: "mynamespace",
expectedDeploymentNames: []string{"mydeployment"},
},
wantHas: false,
wantMissingDeployments: []MissingDeployment{
{
Name: deploymentNoMinAvailable.Name,
MinimumlReplicasAvailableError: fmt.Errorf("Expect %s for deployment %s but got %s",
corev1.ConditionFalse,
deploymentNoMinAvailable.Name, corev1.ConditionTrue),
},
},
wantErr: false,
},
{
name: "Deployment minimum available",
args: args{
client: clientMinAvailable,
namespace: "mynamespace",
expectedDeploymentNames: []string{"mydeployment"},
},
wantHas: true,
wantMissingDeployments: []MissingDeployment{},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotHas, gotMissingDeployments, err := HasDeploymentsInNamespace(tt.args.client, tt.args.namespace, tt.args.expectedDeploymentNames)
if (err != nil) != tt.wantErr {
t.Errorf("HasDeploymentsInNamespace() error = %v, wantErr %v", err, tt.wantErr)
return
}
if gotHas != tt.wantHas {
t.Errorf("HasDeploymentsInNamespace() gotHas = %v, want %v", gotHas, tt.wantHas)
}
if !reflect.DeepEqual(gotMissingDeployments, tt.wantMissingDeployments) {
t.Errorf("HasDeploymentsInNamespace() gotMissingDeployments = %v, want %v", gotMissingDeployments, tt.wantMissingDeployments)
}
})
}
}
|
#!/usr/bin/env bash
# Functions to get first 10 files in current directory and function to display the array of files and displays with a counter
function getFiles() {
# define global variable
FILES=`ls -1 | sort | head -10`
}
function displayFiles() {
local COUNT=1
for FILE in $@
do
echo "File#$COUNT = $FILE"
((COUNT++))
done
}
getFiles
displayFiles $FILES
exit 0
|
<filename>src/globus-client-java/src/main/java/org/globus/jsonUtil/GlobusJsonDateDeserializer.java<gh_stars>0
/**
* Copyright 2014 University of Chicago
* All rights reserved.
* Created Aug 21, 2014 by pruyne
*/
package org.globus.jsonUtil;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonProcessingException;
import org.codehaus.jackson.map.DeserializationContext;
import org.codehaus.jackson.map.JsonDeserializer;
/**
* @author pruyne
*
*/
public class GlobusJsonDateDeserializer extends JsonDeserializer<Date>
{
// input is in the form 2014-08-21 07:15:53+00:00
private static final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd' 'HH:mm:ssz");
/*
* (non-Javadoc)
*
* @see org.codehaus.jackson.map.JsonDeserializer#deserialize(org.codehaus.jackson.JsonParser,
* org.codehaus.jackson.map.DeserializationContext)
*/
@Override
public Date deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException,
JsonProcessingException
{
String dateText = jp.getText();
int lastColon = dateText.lastIndexOf(':');
String fixedDateText = dateText.substring(0, lastColon);
fixedDateText = fixedDateText + dateText.substring(lastColon + 1);
Date parsed = null;
try {
parsed = format.parse(fixedDateText);
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return parsed;
}
}
|
# SPDX-License-Identifier: BSD-3-Clause
if [ "`uname`" == "FreeBSD" ]; then
exit 77
fi
source helpers.sh
nv_test_index=0x1500018
large_file_name="nv.test_large_w"
large_file_read_name="nv.test_large_r"
pcr_specification=sha256:0,1,2,3+sha1:0,1,2,3
file_pcr_value=pcr.bin
file_policy=policy.data
cleanup() {
tpm2 nvundefine -Q $nv_test_index -C o 2>/dev/null || true
tpm2 nvundefine -Q 0x1500016 -C o 2>/dev/null || true
tpm2 nvundefine -Q 0x1500015 -C o -P owner 2>/dev/null || true
rm -f policy.bin test.bin nv.test_w $large_file_name $large_file_read_name \
nv.readlock foo.dat cmp.dat $file_pcr_value $file_policy nv.out cap.out yaml.out
if [ "$1" != "no-shut-down" ]; then
shut_down
fi
}
trap cleanup EXIT
start_up
cleanup "no-shut-down"
tpm2 clear
#Test nvdefine with no options
tpm2 nvdefine > yaml.out
tpm2 nvundefine $(yaml_get_kv yaml.out "nv-index")
#Test default values for the hierarchy "-a" parameter
tpm2 nvdefine -Q $nv_test_index -s 32 -a "ownerread|policywrite|ownerwrite"
tpm2 nvundefine -Q $nv_test_index
#Test writing and reading
tpm2 nvdefine -Q $nv_test_index -C o -s 32 \
-a "ownerread|policywrite|ownerwrite"
echo "please123abc" > nv.test_w
tpm2 nvwrite -Q $nv_test_index -C o -i nv.test_w
tpm2 nvread -Q $nv_test_index -C o -s 32 -o 0
tpm2 nvreadpublic > nv.out
yaml_get_kv nv.out "$nv_test_index" > /dev/null
yaml_get_kv nv.out "$nv_test_index" "name" > /dev/null
# Test writing to and reading from an offset by:
# 1. writing "foo" into the nv file at an offset
# 2. writing to the same offset in the nv index
# 3. reading back the index
# 4. comparing the result.
echo -n "foo" > foo.dat
dd if=foo.dat of=nv.test_w bs=1 seek=4 conv=notrunc 2>/dev/null
# Test a pipe input
cat foo.dat | tpm2 nvwrite -Q $nv_test_index -C o --offset 4 -i -
tpm2 nvread $nv_test_index -C o -s 13 > cmp.dat
cmp nv.test_w cmp.dat
# Writing at an offset and data size too big shouldn't result in a change
# to the index value.
trap - ERR
tpm2 nvwrite -Q $nv_test_index -C o -o 30 -i foo.dat 2>/dev/null
if [ $? -eq 0 ]; then
echo "Writing past the public size shouldn't work!"
exit 1
fi
trap onerror ERR
tpm2 nvread $nv_test_index -C o -s 13 > cmp.dat
cmp nv.test_w cmp.dat
tpm2 nvundefine $nv_test_index -C o
tpm2 pcrread -Q -o $file_pcr_value $pcr_specification
tpm2 createpolicy -Q --policy-pcr -l $pcr_specification -f $file_pcr_value \
-L $file_policy
tpm2 nvdefine -Q 0x1500016 -C o -s 32 -L $file_policy \
-a "policyread|policywrite"
# Write with index authorization for now, since tpm2 nvwrite does not support
# pcr policy.
echo -n "policy locked" | tpm2 nvwrite -Q 0x1500016 -C 0x1500016 \
-P pcr:$pcr_specification=$file_pcr_value -i -
str=`tpm2 nvread 0x1500016 -C 0x1500016 \
-P pcr:$pcr_specification=$file_pcr_value -s 13`
test "policy locked" == "$str"
# this should fail because authread is not allowed
trap - ERR
tpm2 nvread 0x1500016 -C 0x1500016 -P "index" 2>/dev/null
trap onerror ERR
tpm2 nvundefine -Q 0x1500016 -C o
#
# Test large writes
#
tpm2 getcap properties-fixed > cap.out
large_file_size=`yaml_get_kv cap.out "TPM2_PT_NV_INDEX_MAX" "raw"`
nv_test_index=0x1000000
# Create an nv space with attributes 1010 = TPMA_NV_PPWRITE and
# TPMA_NV_AUTHWRITE
tpm2 nvdefine -Q $nv_test_index -C o -s $large_file_size -a 0x2000A
base64 /dev/urandom | head -c $(($large_file_size)) > $large_file_name
# Test file input redirection
tpm2 nvwrite -Q $nv_test_index -C o -i -< $large_file_name
tpm2 nvread $nv_test_index -C o > $large_file_read_name
cmp -s $large_file_read_name $large_file_name
# test per-index readpublic
tpm2 nvreadpublic "$nv_test_index" > nv.out
yaml_get_kv nv.out "$nv_test_index" > /dev/null
tpm2 nvundefine -Q $nv_test_index -C o
#
# Test NV access locked
#
tpm2 nvdefine -Q $nv_test_index -C o -s 32 \
-a "ownerread|policywrite|ownerwrite|read_stclear|writedefine"
echo "foobar" > nv.readlock
tpm2 nvwrite -Q $nv_test_index -C o -i nv.readlock
tpm2 nvread -Q $nv_test_index -C o -s 6 -o 0
tpm2 nvreadlock -Q $nv_test_index -C o
# Reset ERR signal handler to test for expected nvread error
trap - ERR
tpm2 nvread -Q $nv_test_index -C o -s 6 -o 0 2> /dev/null
if [ $? != 1 ];then
echo "nvread didn't fail!"
exit 1
fi
trap onerror ERR
# Test that write lock works
tpm2 nvwritelock -C o $nv_test_index
trap - ERR
tpm2 nvwrite $nv_test_index -C o -i nv.readlock
if [ $? != 1 ];then
echo "nvwrite didn't fail!"
exit 1
fi
tpm2 nvundefine -C o $nv_test_index
trap onerror ERR
#
# Test that owner and index passwords work by
# 1. Setting up the owner password
# 2. Defining an nv index that can be satisfied by an:
# a. Owner authorization
# b. Index authorization
# 3. Using index and owner based auth during write/read operations
# 4. Testing that auth is needed or a failure occurs.
#
tpm2 changeauth -c o owner
tpm2 nvdefine 0x1500015 -C o -s 32 \
-a "policyread|policywrite|authread|authwrite|ownerwrite|ownerread" \
-p "index" -P "owner"
# Use index password write/read, implicit -a
tpm2 nvwrite -Q 0x1500015 -P "index" -i nv.test_w
tpm2 nvread -Q 0x1500015 -P "index"
# Use index password write/read, explicit -a
tpm2 nvwrite -Q 0x1500015 -C 0x1500015 -P "index" -i nv.test_w
tpm2 nvread -Q 0x1500015 -C 0x1500015 -P "index"
# use owner password
tpm2 nvwrite -Q 0x1500015 -C o -P "owner" -i nv.test_w
tpm2 nvread -Q 0x1500015 -C o -P "owner"
# Check a bad password fails
trap - ERR
tpm2 nvwrite -Q 0x1500015 -C 0x1500015 -P "wrong" -i nv.test_w 2>/dev/null
if [ $? -eq 0 ];then
echo "nvwrite with bad password should fail!"
exit 1
fi
# Check using authorisation with tpm2 nvundefine
trap onerror ERR
tpm2 nvundefine 0x1500015 -C o -P "owner"
# Check nv index can be specified simply as an offset
tpm2 nvdefine -Q -C o -s 32 -a "ownerread|ownerwrite" 1 -P "owner"
tpm2 nvundefine 0x01000001 -C o -P "owner"
# Test setbits
tpm2 nvdefine -C o -P "owner" -a "nt=bits|ownerread|policywrite|ownerwrite|writedefine" $nv_test_index
tpm2 nvsetbits -C o -P "owner" -i 0xbadc0de $nv_test_index
check=$(tpm2 nvread -C o -P "owner" $nv_test_index | xxd -p | sed s/'^0*'/0x/)
if [ "$check" != "0xbadc0de" ]; then
echo "Expected setbits read value of 0xbadc0de, got \"$check\""
exit 1
fi
# Test global writelock
if is_cmd_supported "NV_GlobalWriteLock"; then
tpm2 nvdefine -C o -P "owner" -s 32 -a "ownerread|ownerwrite|globallock" 42
tpm2 nvdefine -C o -P "owner" -s 32 -a "ownerread|ownerwrite|globallock" 43
tpm2 nvdefine -C o -P "owner" -s 32 -a "ownerread|ownerwrite|globallock" 44
echo foo | tpm2 nvwrite -C o -P "owner" -i- 42
echo foo | tpm2 nvwrite -C o -P "owner" -i- 43
echo foo | tpm2 nvwrite -C o -P "owner" -i- 44
tpm2 nvwritelock -Co -P owner --global
# These writes should fail now that its in a writelocked state
trap - ERR
echo foo | tpm2 nvwrite -C o -P "owner" -i- 42
if [ $? -eq 0 ]; then
echo "Expected tpm2 nvwrite to fail after globalwritelock of index 42"
exit 1
fi
echo foo | tpm2 nvwrite -C o -P "owner" -i- 43
if [ $? -eq 0 ]; then
echo "Expected tpm2 nvwrite to fail after globalwritelock of index 43"
exit 1
fi
echo foo | tpm2 nvwrite -C o -P "owner" -i- 44
if [ $? -eq 0 ]; then
echo "Expected tpm2 nvwrite to fail after globalwritelock of index 44"
exit 1
fi
fi
trap onerror ERR
tpm2 nvundefine -C o -P "owner" $nv_test_index
# Test extend
tpm2 nvdefine -C o -P "owner" -a "nt=extend|ownerread|policywrite|ownerwrite" $nv_test_index
echo "foo" | tpm2 nvextend -C o -P "owner" -i- $nv_test_index
check=$(tpm2 nvread -C o -P "owner" $nv_test_index | xxd -p -c 64 | sed s/'^0*'//)
expected="1c8457de84bb43c18d5e1d75c43e393bdaa7bca8d25967eedd580c912db65e3e"
if [ "$check" != "$expected" ]; then
echo "Expected setbits read value of \"$expected\", got \"$check\""
exit 1
fi
# Test nvextend and nvdefine with aux sessions
tpm2 clear
tpm2 createprimary -C o -c prim.ctx
tpm2 startauthsession -S enc_session.ctx --hmac-session -c prim.ctx
tpm2 changeauth -c o owner
tpm2 nvdefine -C o -P owner -a "nt=extend|ownerread|policywrite|ownerwrite" \
$nv_test_index -p nvindexauth -S enc_session.ctx
echo "foo" | tpm2 nvextend -C o -P owner -i- $nv_test_index -S enc_session.ctx
tpm2 flushcontext enc_session.ctx
rm enc_session.ctx
rm prim.ctx
check=$(tpm2 nvread -C o -P owner $nv_test_index | xxd -p -c 64 | sed s/'^0*'//)
expected="1c8457de84bb43c18d5e1d75c43e393bdaa7bca8d25967eedd580c912db65e3e"
if [ "$check" != "$expected" ]; then
echo "Expected setbits read value of \"$expected\", got \"$check\""
exit 1
fi
exit 0
|
<gh_stars>10-100
#include <iostream>
#include <exception>
#include <string>
#include <sqlitepp/sqlitepp.hpp>
struct employee
{
std::string name;
int age;
float salary;
};
std::ostream& operator<<(std::ostream& os, employee const& e)
{
return os << e.name << ": " << e.age << ", earns " << e.salary << "$ in month";
}
std::istream& operator>>(std::istream& is, employee & e)
{
return is >> e.name >> e.age >> e.salary;
}
int main()
{
using namespace sqlitepp;
try
{
// already known things
session db("enterprise.db");
db << "create table employee(id integer primary key, name text, age integer, salary real)";
// Heh!
employee e;
statement st(db);
// (1)
st << "insert into employee values(null, :name, :age, :salary)"
, use(e.name), use(e.age), use(e.salary);
while ( std::cin >> e )
{
st.reset(true); // (1.1)
st.exec(); // (1.2)
}
// (2)
std::cout << "\n\t-- Employees --\n";
st << "select name, age, salary from employee", into(e.name), into(e.age), into(e.salary);
while ( st.exec() )
{
std::cout << e << std::endl;
}
}
catch (std::exception const& ex)
{
std::cerr << ex.what();
return -1;
}
}
|
from util.utils import Region, Utils
from util.logger import Logger
from util.stats import Stats
from util.config import Config
class HeadquartersModule(object):
def __init__(self, config, stats):
"""Initializes the HQ module.
Args:
config (Config): ALAuto Config instance
stats (Stats): ALAuto stats instance
"""
self.enabled = True
self.config = config
self.stats = stats
self.region = {
'hq_tab': Region(745, 1000, 205, 65),
'tap_out': Region(760, 865, 380, 105),
'dorm_tab': Region(845, 390, 260, 295),
'academy_tab': Region(255, 390, 260, 295),
'dorm_back_button': Region(21, 47, 65, 65),
'dorm_eye_button': Region(27, 223, 50, 47),
'supplies_bar': Region(310, 975, 215, 65),
'oxy_cola': Region(470, 580, 105, 90),
'exit_snacks_menu': Region(900, 880, 380, 135),
'button_back': Region(48, 43, 76, 76),
'confirm_dorm_summary': Region(1545, 905, 235, 65),
'ignore_give_food_button': Region(690, 750, 185, 60),
'tactical_class_building': Region(1050, 195, 115, 64),
'start_lesson_button': Region(1660, 900, 150, 60),
'cancel_lesson_button': Region(1345, 900, 170, 60)
}
def hq_logic_wrapper(self):
"""Method that fires off the necessary child methods that encapsulates
all the actions related to the headquarters tab.
These actions are:
- Collecting dorm tokens/affinity points
- Refilling dorm
- Skill levelling
"""
Logger.log_msg("Found HQ alert.")
Utils.touch_randomly(self.region["hq_tab"])
# counter variables are used to prevent longer loops, i.e. when their corresponding alerts get stuck
counterHQ = 0
counterAcademy = 0
counterDorm = 0
while True:
Utils.wait_update_screen(1)
if self.config.academy['enabled'] and counterAcademy < 2 and Utils.find("headquarters/academy_alert", 0.99):
Logger.log_msg("Found academy alert.")
# open academy
Utils.touch_randomly(self.region["academy_tab"])
Utils.script_sleep(2)
# open tactical class
Logger.log_debug("Opening tactical class.")
Utils.touch_randomly(self.region["tactical_class_building"])
self.skill_levelling()
# exit academy
Utils.touch_randomly(self.region["button_back"])
counterAcademy += 1
Logger.log_debug("Going back to main menu.")
continue
if self.config.dorm['enabled'] and counterDorm < 3 and Utils.find("headquarters/dorm_alert", 0.99):
Logger.log_msg("Found dorm alert.")
# open the dorm
Utils.touch_randomly(self.region["dorm_tab"])
Logger.log_debug("Opening tactical class.")
self.refill_dorm()
self.collect_dorm_balloons()
Utils.script_sleep(1)
Logger.log_msg("Cleaned dorm.")
# exit dorm
Utils.touch_randomly(self.region["dorm_back_button"])
counterDorm += 1
Logger.log_debug("Going back to main menu.")
continue
if Utils.find("headquarters/cat_lodge_alert", 0.99):
# if only the cat lodge alert is detected as valid alert, ignore it
Logger.log_msg("Cat lodge alert detected, ignoring it.")
if counterHQ < 5 and Utils.find("headquarters/hq_alert"):
# counterHQ = 5 only if academy has been opened two times and dorm three times
# the find fails if it's on the main menu and there is no alert or if it is on the selection screen
Logger.log_msg("Found HQ alert.")
Utils.touch_randomly(self.region["hq_tab"])
counterHQ += 1
continue
else:
# exit loop
if Utils.find("headquarters/dorm_sign"):
# academy alert is stuck or dorm alert is stuck or cat lodge alert is on
Logger.log_debug("Alert is on, but nothing else to do.")
Utils.touch_randomly(self.region["tap_out"])
Logger.log_debug("Ending HQ loop.")
break
Utils.wait_update_screen(1)
return True
def collect_dorm_balloons(self):
""""
This method finds and collects all the dorm tokens and affinity points visible to the script.
The various swipes may not work if there is a shipgirl at the starting point of the swipe.
For this reason the wrapper to this methoed iterates its cycle for three times, refreshing the dorm.
"""
Utils.script_sleep(1)
# tap dorm eye in order to hide UI
Utils.touch_randomly(self.region["dorm_eye_button"])
Logger.log_debug("Collecting all visible dorm tokens/affinity points.")
for i in range(0, 4):
Utils.wait_update_screen(1)
# since a rather low similarity is used, the variable j ensures a finite loop
j = 0
while Utils.find_and_touch("headquarters/dorm_token", 0.75) and j < 5:
Logger.log_msg("Collected dorm token.")
Utils.wait_update_screen()
j += 1
j = 0
while Utils.find_and_touch("headquarters/affinity_point", 0.75) and j < 5:
Logger.log_msg("Collected affinity points.")
Utils.wait_update_screen()
j += 1
if i == 0:
# swipe right and refresh
Utils.swipe(960, 540, 560, 540, 300)
continue
if i == 1:
# swipe left (also countering the previous swipe) and refresh
Utils.swipe(960, 540, 1760, 540, 300)
continue
if i == 2:
# undo previous swipe
Utils.swipe(960, 540, 560, 540, 300)
# swipe up and refresh
Utils.swipe(960, 540, 960, 790, 300)
continue
if i == 3:
# swipe bottom (also countering the previous swipe) and refresh
Utils.swipe(960, 540, 960, 40, 300)
continue
# restore UI
Utils.touch_randomly(self.region["dorm_eye_button"])
def refill_dorm(self):
"""
This method refill the dorm supplies with 10 oxy cola (150 minutes) if the supplies bar is empty.
"""
Utils.script_sleep(5)
Logger.log_debug("Refilling dorm supplies if empty.")
while True:
Utils.wait_update_screen(1)
if Utils.find("headquarters/dorm_summary_confirm_button"):
# dismiss dorm summary, if any
Utils.touch_randomly(self.region["confirm_dorm_summary"])
continue
if Utils.find("headquarters/give_food_button"):
# dismiss notification by tapping ignore
Utils.touch_randomly(self.region["ignore_give_food_button"])
continue
if Utils.find("headquarters/supplies_bar_empty"):
# proceed to refill
Utils.touch_randomly(self.region["supplies_bar"])
Utils.script_sleep(1)
# tap oxy cola ten times
for i in range(0, 10):
Utils.touch_randomly(self.region["oxy_cola"])
Logger.log_msg("Refilled dorm supplies.")
# tap out
Utils.touch_randomly(self.region["exit_snacks_menu"])
else:
# exit loop
Logger.log_debug("Ending refill loop.")
break
def skill_levelling(self):
"""
This method ensures that the skills currently being levelled continue to do so.
The skillbooks used are the ones indicated by the SkillBookTier setting in the config.ini file.
"""
Utils.script_sleep(5)
Logger.log_msg("Levelling the skills of the previously chosen ships.")
while True:
Utils.wait_update_screen(1)
if Utils.find_and_touch("menu/button_confirm"):
Logger.log_msg("Starting/ending skill levelling session.")
Utils.script_sleep(3.5)
continue
if Utils.find("headquarters/skill_exp_gain"):
if Utils.find_and_touch("headquarters/t{}_offense_skillbook".format(self.config.academy["skill_book_tier"]), 0.99):
# levelling offesinve skill
Logger.log_msg("Selected T{} offensive skill book.".format(self.config.academy["skill_book_tier"]))
self.stats.increment_offensive_skillbook_used()
elif Utils.find_and_touch("headquarters/t{}_defense_skillbook".format(self.config.academy["skill_book_tier"]), 0.99):
# levelling defesinve skill
Logger.log_msg("Selected T{} defensive skill book.".format(self.config.academy["skill_book_tier"]))
self.stats.increment_defensive_skillbook_used()
elif Utils.find_and_touch("headquarters/t{}_support_skillbook".format(self.config.academy["skill_book_tier"]), 0.99):
# levelling support skill
Logger.log_msg("Selected T{} support skill book.".format(self.config.academy["skill_book_tier"]))
self.stats.increment_support_skillbook_used()
else:
Logger.log_warning("Skillbook specified not found. Cancelling lesson.")
Utils.touch_randomly(self.region["cancel_lesson_button"])
continue
Utils.script_sleep(1)
Utils.touch_randomly(self.region["start_lesson_button"])
continue
if Utils.find("headquarters/tactical_class"):
# exit tactical class
Utils.touch_randomly(self.region["button_back"])
Logger.log_msg("All classes have started.")
Utils.script_sleep(1)
break
|
<reponame>danieldiamond/gitlab-analytics
import sys
import re
from io import StringIO
import json
import time
from logging import error, info, basicConfig, getLogger, warning
from os import environ as env
from typing import Dict, Tuple, List
from yaml import load, safe_load, YAMLError
import boto3
import gspread
import pandas as pd
from fire import Fire
from gitlabdata.orchestration_utils import (
postgres_engine_factory,
snowflake_engine_factory,
query_executor,
)
from google_sheets_client import GoogleSheetsClient
from google.cloud import storage
from google.oauth2 import service_account
from gspread.exceptions import APIError
from gspread import Client
from oauth2client.service_account import ServiceAccountCredentials
from sheetload_dataframe_utils import dw_uploader
from sqlalchemy.engine.base import Engine
from qualtrics_sheetload import qualtrics_loader
def sheet_loader(
sheet_file: str,
table_name: str = None,
schema: str = "sheetload",
database: str = "RAW",
gapi_keyfile: str = None,
conn_dict: Dict[str, str] = None,
) -> None:
"""
Load data from a google sheet into a DataFrame and pass it to dw_uploader.
The sheet must have been shared with the google service account of the runner.
Loader expects the name of the sheet to be:
<sheet_name>.<tab>
The tab name will become the table name.
Column names can not contain parentheses. Spaces and slashes will be
replaced with underscores.
sheet_file: path to yaml file with sheet configurations
table_name: Optional, name of the tab to be loaded -- matches the table_name as well as part of the document name
-- For example for the test sheet this should be "test_sheet" as that is the name of the tab to be loaded from the document.
-- Also should match the second half of the sheet name -- for example `sheetload.test_sheet`.
-- Also the name of the final table in Snowflake. The test sheet turns into a RAW.SHEETLOAD.test_sheet table.
python sheetload.py sheets <sheet_file>
"""
with open(sheet_file, "r") as file:
try:
stream = safe_load(file)
except YAMLError as exc:
print(exc)
sheets = [
(sheet["name"], tab)
for sheet in stream["sheets"]
for tab in sheet["tabs"]
if (table_name is None or tab == table_name)
]
if database != "RAW":
engine = snowflake_engine_factory(conn_dict or env, "ANALYTICS_LOADER", schema)
database = env["SNOWFLAKE_TRANSFORM_DATABASE"]
# Trys to create the schema its about to write to
# If it does exists, {schema} already exists, statement succeeded.
# is returned.
schema_check = f"""CREATE SCHEMA IF NOT EXISTS "{database}".{schema}"""
query_executor(engine, schema_check)
else:
engine = snowflake_engine_factory(conn_dict or env, "LOADER", schema)
info(engine)
# Get the credentials for sheets and the database engine
google_sheet_client = GoogleSheetsClient()
for sheet_name, tab in sheets:
info(f"Processing sheet: {sheet_name}")
dataframe = google_sheet_client.load_google_sheet(
gapi_keyfile, schema + "." + sheet_name, tab
)
dw_uploader(engine, tab, dataframe, schema)
info(f"Finished processing for table: {tab}")
query = f"""grant select on all tables in schema "{database}".{schema} to role transformer"""
query_executor(engine, query)
info("Permissions granted.")
def gcs_loader(
path: str,
bucket: str,
schema: str = "sheetload",
compression: str = "gzip",
conn_dict: Dict[str, str] = None,
gapi_keyfile: str = None,
) -> None:
"""
Download a CSV file from a GCS bucket and then pass it to dw_uploader.
Loader expects <table_name>.*
Column names can not contain parentheses. Spaces and slashes will be
replaced with underscores.
Paths is a list that is separated spaces. i.e.:
python sheetload.py gcs --bucket <bucket> --destination <snowflake|postgres> <path_1> <path_2> ...
"""
# Set some vars
chunksize = 15000
chunk_iter = 0
engine = snowflake_engine_factory(conn_dict or env, "LOADER", schema)
# Get the gcloud storage client and authenticate
scope = ["https://www.googleapis.com/auth/cloud-platform"]
keyfile = load(gapi_keyfile or env["GCP_SERVICE_CREDS"])
credentials = service_account.Credentials.from_service_account_info(keyfile)
scoped_credentials = credentials.with_scopes(scope)
storage_client = storage.Client(credentials=scoped_credentials)
bucket = storage_client.get_bucket(bucket)
# Download the file and then pass it in chunks to dw_uploader
blob = bucket.blob(path)
blob.download_to_filename(path)
table = path.split(".")[0]
try:
sheet_df = pd.read_csv(
path,
engine="c",
low_memory=False,
compression=compression,
chunksize=chunksize,
)
except FileNotFoundError:
info("File {} not found.".format(path))
# Upload each chunk of the file
for chunk in sheet_df:
chunk[chunk.columns] = chunk[chunk.columns].astype("str")
dw_uploader(engine=engine, table=table, data=chunk, chunk=chunk_iter)
chunk_iter += 1
max_size_of_relation = chunk_iter * chunksize
min_size_of_relation = max((chunk_iter - 1) * chunksize, 0)
actual_size = query_executor(f"SELECT COUNT(*) FROM {table};", engine)[0][0]
if (actual_size > max_size_of_relation) or (actual_size < min_size_of_relation):
error(
f"Count in Snowflake for table {table} ({actual_size})"
/ " did not match the range of what was read in code "
/ f"({min_size_of_relation} to {max_size_of_relation})"
)
sys.exit(1)
def count_records_in_s3_csv(bucket: str, s3_file_key: str, s3_client) -> int:
"""
This function is used to count the number of records found in a CSV on S3 with path
s3://bucket/s3_file_key . The number of records is returned.
This function uses an AWS feature known as "s3 select" which can perform queries
directly on s3 objects. This function assumes that the CSVs it is querying have header rows at the top.
"""
query_result = s3_client.select_object_content(
Bucket=bucket,
Key=s3_file_key,
ExpressionType="SQL",
Expression="select count(*) as line_count from s3object",
InputSerialization={
"CSV": {"FileHeaderInfo": "IGNORE", "AllowQuotedRecordDelimiter": True}
},
OutputSerialization={"JSON": {}},
)
for event in query_result["Payload"]:
if "Records" in event:
json_payload_dict = json.loads(event["Records"]["Payload"].decode("utf-8"))
return json_payload_dict["line_count"]
return -1
def check_s3_csv_count_integrity(
bucket, file_key, s3_client, snowflake_engine, table_name
) -> None:
"""
This function is used to verify that the count of rows in the snowflake table with name "table_name"
is equivalent to the count of records found in the csv on aws s3 with path s3://bucket/file_key .
If the counts are equal, this function returns gracefully and returns nothing. If the counts are not equal,
this function logs an error and exits the current running program with an exit code of 1. The exit code is
used to signal airflow that it should fail the task.
"""
snowflake_count_result_set = query_executor(
snowflake_engine, f"select count(*) from {table_name}"
)
snowflake_count = snowflake_count_result_set[0][0]
s3_count = count_records_in_s3_csv(bucket, file_key, s3_client)
if snowflake_count != s3_count:
error(
f"Error replicating CSV from S3 for table name: {table_name}. Snowflake count: {snowflake_count}, S3 count: {s3_count}."
)
sys.exit(1)
def s3_loader(bucket: str, schema: str, conn_dict: Dict[str, str] = None) -> None:
"""
Load data from csv files stored in an S3 Bucket into a DataFrame and pass it to dw_uploader
for loading into Snowflake.
Loader will iterate through all files in the provided bucket that have the `.csv` extension.
python sheetload.py s3 --bucket datateam-greenhouse-extract --schema greenhouse
"""
# Create Snowflake engine
engine = snowflake_engine_factory(conn_dict or env, "LOADER", schema)
info(engine)
# Set S3 Client
if schema == "greenhouse":
aws_access_key_id = env["GREENHOUSE_ACCESS_KEY_ID"]
aws_secret_access_key = env["GREENHOUSE_SECRET_ACCESS_KEY"]
session = boto3.Session(
aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key
)
s3_client = session.client("s3")
s3_bucket = s3_client.list_objects(Bucket=bucket)
# Iterate through files and upload
for obj in s3_bucket["Contents"]:
file = obj["Key"]
info(f"Working on {file}...")
if re.search(r"\.csv", file):
csv_obj = s3_client.get_object(Bucket=bucket, Key=file)
body = csv_obj["Body"]
csv_string = body.read().decode("utf-8")
sheet_df = pd.read_csv(StringIO(csv_string), engine="c", low_memory=False)
table, extension = file.split(".")[0:2]
dw_uploader(engine, table, sheet_df, truncate=True)
check_s3_csv_count_integrity(bucket, file, s3_client, engine, table)
def csv_loader(
filename: str,
schema: str,
database: str = "RAW",
tablename: str = None,
header: str = "infer",
conn_dict: Dict[str, str] = None,
):
"""
Loads csv files from a local file system into a DataFrame and pass it to dw_uploader
for loading into Snowflake.
Tablename will use the name of the csv by default.
python sheetload.py csv --filename nvd.csv --schema engineering_extracts
becomes raw.engineering_extracts.nvd
Header will read the first row of the csv as the column names by default. Passing
None will use integers for each column.
python sheetload.py csv --filename nvd.csv --schema engineering_extracts --tablename nvd_data --header None
"""
# Create Snowflake engine
engine = snowflake_engine_factory(conn_dict or env, "LOADER", schema)
info(engine)
csv_data = pd.read_csv(filename, header=header)
if tablename:
table = tablename
else:
table = filename.split(".")[0].split("/")[-1]
info(f"Uploading {filename} to {database}.{schema}.{table}")
dw_uploader(engine, table=table, data=csv_data, schema=schema, truncate=True)
if __name__ == "__main__":
basicConfig(stream=sys.stdout, level=20)
getLogger("snowflake.connector.cursor").disabled = True
Fire(
{
"sheets": sheet_loader,
"gcs": gcs_loader,
"s3": s3_loader,
"csv": csv_loader,
"qualtrics": qualtrics_loader,
}
)
info("Complete.")
|
'use strict'
import _ from 'lodash'
import config from 'config'
import nodeDebug from 'debug'
import Twit from 'twit'
import TweetsService from './TweetsService'
const debug = nodeDebug('tweetwall:services:TweetsWorker')
const {
auth: twitterAuth,
hashtags: twitterHashtags,
retweets
} = config.get('tweetwall.twitter')
const timeout_ms = 60 * 1000
class TweetsWorker extends Twit {
constructor () {
super(_.assign({}, twitterAuth, {timeout_ms}))
this.lang = 'en'
this.hashTags = twitterHashtags
return this.startWorking()
}
startWorking () {
const options = {
track: this.hashTags,
language: this.lang
}
debug('statuses/filter', options)
return this.stream('statuses/filter', options)
.on('tweet', (tweet) => {
if (retweets && _.isEmpty(tweet.retweeted_status)) {
debug('processing tweet', tweet.id_str)
TweetsService.putTweet(tweet)
.then(() => {
const {user: lead} = tweet
return TweetsService.isLeadExists(lead.id_str)
.then((isExists) => {
if (!isExists) {
return TweetsService.putLead(lead)
.then(() => {
return lead
})
}
return lead
})
})
.then((lead) => {
return TweetsService.increaseLeadTweetCount(lead.id_str)
})
} else {
debug('not processed', tweet.id_str)
}
})
.on('error', (err) => {
debug('worker error', err)
})
.on('warning', (err) => {
debug('worker warning', err)
})
.on('disconnect', (err) => {
debug('worker disconnect', err)
})
}
}
export default new TweetsWorker
|
def sum_of_numbers(n):
# base case
if(n == 0):
return 0
# recursive case
else:
return n + sum_of_numbers(n-1) |
#!/bin/bash
#--------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#--------------------------------------------------------------------------------------------------------------
set -ex
splitSdksDir="/opt/dotnet/sdks"
allSdksDir="/home/vsonline/.dotnet"
mkdir -p "$allSdksDir"
# Copy latest muxer and license files
cp -f "$splitSdksDir/3/dotnet" "$allSdksDir"
cp -f "$splitSdksDir/3/LICENSE.txt" "$allSdksDir"
cp -f "$splitSdksDir/3/ThirdPartyNotices.txt" "$allSdksDir"
function createLinks() {
local sdkVersion="$1"
local runtimeVersion="$2"
cd "$splitSdksDir/$sdkVersion"
# Find folders with name as sdk or runtime version
find . -name "$sdkVersion" -o -name "$runtimeVersion" | while read subPath; do
# Trim beginning 2 characters from the line which currently looks like, for example, './sdk/2.2.402'
subPath="${subPath:2}"
linkFrom="$allSdksDir/$subPath"
linkFromParentDir=$(dirname $linkFrom)
mkdir -p "$linkFromParentDir"
linkTo="$splitSdksDir/$sdkVersion/$subPath"
ln -s $linkTo $linkFrom
done
}
createLinks "3.1.201" "3.1.0"
echo
createLinks "3.0.103" "3.0.1"
echo
createLinks "2.2.402" "2.2.7"
echo
createLinks "2.1.805" "2.1.13"
echo
createLinks "1.1.14" "1.1.13" |
#! /bin/bash
#SBATCH -J sdenet
#SBATCH -o result/sdenet_train.out
#SBATCH -p compute
#SBATCH --qos=debug
#SBATCH -N 1
#SBATCH --ntasks-per-node=1
#SBATCH --cpus-per-task=12
#SBATCH -w node1_3060
#SBATCH -t 24:00:00
python sdenet_mnist.py |
#!/bin/bash
# Name of the software
software_name="java"
# Path of the software
soft_path=$(which $software_name)
# Check to see if the software is already installed
if [ -z "$soft_path" ]; then
# Install the software
sudo apt-get install $software_name
fi |
<gh_stars>10-100
# Copyright (c) 2017-2019 <NAME>
#
# SPDX-License-Identifier: BSD-3-Clause
# The BSD-3-Clause license for this file can be found in the LICENSE file included with this distribution
# or at https://spdx.org/licenses/BSD-3-Clause.html#licenseText
from struct import pack, unpack_from
from hashlib import sha256
from .misc import modulus_fmt
from .header import SegTag, Header
from .commands import EnumAlgorithm
class SecretKeyBlob(object):
""" Secret Key Blob """
@property
def mode(self):
return self._mode
@mode.setter
def mode(self, value):
# assert value
self._mode = value
@property
def algorithm(self):
return self._alg
@algorithm.setter
def algorithm(self, value):
# assert value
self._alg = value
@property
def flag(self):
return self._flg
@flag.setter
def flag(self, value):
# assert value
self._flg = value
@property
def blob(self):
return self._data
@blob.setter
def blob(self, value):
assert isinstance(value, (bytes, bytearray))
self._data = value
@property
def size(self):
return len(self._data) + 4
def __init__(self, mode, algorithm, flag):
self._mode = mode
self._alg = algorithm
self._flg = flag
self._data = bytearray()
def __repr__(self):
return "SecKeyBlob <Mode: {}, Algo: {}, Flag: 0x{:02X}, Size: {}>".format(self.mode, self.algorithm,
self.flag, len(self._data))
def __eq__(self, obj):
if not isinstance(obj, SecretKeyBlob):
return False
if self.mode != obj.mode or \
self.algorithm != obj.algorithm or \
self.flag != obj.flag:
return False
if self.blob != obj.blob:
return False
return True
def __ne__(self, obj):
return not self.__eq__(obj)
def info(self):
msg = "-" * 60 + "\n"
msg += "SecKeyBlob\n"
msg += "-" * 60 + "\n"
msg += "Mode: {}\n".format(self.mode)
msg += "Algorithm: {}\n".format(self.algorithm)
msg += "Flag: 0x{:02X}\n".format(self.flag)
msg += "Size: {} Bytes\n".format(len(self._data))
return msg
def export(self):
raw_data = pack("4B", self.mode, self.algorithm, self.size, self.flag)
raw_data += bytes(self._data)
return raw_data
@classmethod
def parse(cls, data, offset=0):
(mode, alg, size, flg) = unpack_from("4B", data, offset)
offset += 4
obj = cls(mode, alg, flg)
obj.blob = data[offset: offset + size]
return obj
class Certificate(object):
@property
def version(self):
return self._header.param
@property
def size(self):
return Header.SIZE + len(self._data)
def __init__(self, version=0x40, data=None):
self._header = Header(tag=SegTag.CRT, param=version)
self._data = bytearray() if data is None else bytearray(data)
def __repr__(self):
return "Certificate <Ver: {:X}.{:X}, Size: {}>".format(self.version >> 4, self.version & 0xF, len(self._data))
def __eq__(self, obj):
if not isinstance(obj, Certificate):
return False
if self.version != obj.version:
return False
for i, value in enumerate(self._data):
if obj[i] != value:
return False
return True
def __ne__(self, obj):
return not self.__eq__(obj)
def __len__(self):
return len(self._data)
def __getitem__(self, key):
return self._data[key]
def __setitem__(self, key, value):
self._data[key] = value
def __iter__(self):
return self._data.__iter__()
def info(self):
msg = "-" * 60 + "\n"
msg += "Certificate (Ver: {:X}.{:X}, Size: {})\n".format(self.version >> 4, self.version & 0xF, len(self._data))
msg += "-" * 60 + "\n"
return msg
def export(self):
self._header.length = self.size
raw_data = self._header.export()
raw_data += self._data
return raw_data
@classmethod
def parse(cls, data, offset=0):
header = Header.parse(data, offset, SegTag.CRT)
offset += Header.SIZE
return cls(header.param, data[offset: offset + header.length - Header.SIZE])
class Signature(object):
@property
def version(self):
return self._header.param
@property
def size(self):
return Header.SIZE + len(self._data)
def __init__(self, version=0x40, data=None):
self._header = Header(tag=SegTag.SIG, param=version)
self._data = bytearray() if data is None else bytearray(data)
def __repr__(self):
return "Signature <Ver: {:X}.{:X}, Size: {}>".format(self.version >> 4, self.version & 0xF, len(self._data))
def __eq__(self, obj):
if not isinstance(obj, Signature):
return False
if self.version != obj.version:
return False
for i, value in enumerate(self._data):
if obj[i] != value:
return False
return True
def __ne__(self, obj):
return not self.__eq__(obj)
def __len__(self):
return len(self._data)
def __getitem__(self, key):
return self._data[key]
def __setitem__(self, key, value):
self._data[key] = value
def __iter__(self):
return self._data.__iter__()
def info(self):
msg = "-" * 60 + "\n"
msg += "Signature (Ver: {:X}.{:X}, Size: {})\n".format(self.version >> 4, self.version & 0xF, len(self._data))
msg += "-" * 60 + "\n"
return msg
def export(self):
self._header.length = self.size
raw_data = self._header.export()
raw_data += self._data
return raw_data
@classmethod
def parse(cls, data, offset=0):
header = Header.parse(data, offset, SegTag.SIG)
offset += Header.SIZE
return cls(header.param, data[offset: offset + header.length - Header.SIZE])
class MAC(object):
@property
def version(self):
return self._header.param
@property
def size(self):
return Header.SIZE + 4 + len(self._data)
def __init__(self, version=0x40, nonce_bytes=0, mac_bytes=0, data=None):
self._header = Header(tag=SegTag.MAC, param=version)
self.nonce_bytes = nonce_bytes
self.mac_bytes = mac_bytes
self._data = bytearray() if data is None else bytearray(data)
def __repr__(self):
return "MAC <Ver: {:X}.{:X}, Nonce: {}, MAC: {}>".format(self.version >> 4, self.version & 0xF,
self.nonce_bytes, self.mac_bytes)
def __eq__(self, obj):
if not isinstance(obj, MAC):
return False
if self.version != obj.version or \
self.nonce_bytes != obj.nonce_bytes or \
self.mac_bytes != obj.mac_bytes:
return False
for i, value in enumerate(self._data):
if obj[i] != value:
return False
return True
def __ne__(self, obj):
return not self.__eq__(obj)
def __len__(self):
return len(self._data)
def __getitem__(self, key):
return self._data[key]
def __setitem__(self, key, value):
self._data[key] = value
def __iter__(self):
return self._data.__iter__()
def info(self):
msg = "-" * 60 + "\n"
msg += "MAC (Version: {:X}.{:X})\n".format(self.version >> 4, self.version & 0xF)
msg += "-" * 60 + "\n"
msg += "Nonce Len: {} Bytes\n".format(self.nonce_bytes)
msg += "MAC Len: {} Bytes\n".format(self.mac_bytes)
msg += "[{}]\n".format(self._data)
return msg
def export(self):
self._header.length = self.size
raw_data = self._header.export()
raw_data += pack(">4B", 0, self.nonce_bytes, 0, self.mac_bytes)
raw_data += bytes(self._data)
return raw_data
@classmethod
def parse(cls, data, offset=0):
header = Header.parse(data, offset, SegTag.MAC)
(_, nonce_bytes, _, mac_bytes) = unpack_from(">4B", data, offset)
offset += Header.SIZE + 4
return cls(header.param, nonce_bytes, mac_bytes, data[offset: offset + header.length - (Header.SIZE + 4)])
class SrkItem(object):
SRK_TAG = 0xE1
@property
def algorithm(self):
return self._header.param
@property
def flag(self):
return self._flag
@flag.setter
def flag(self, value):
assert value in (0, 0x80)
self._flag = value
@property
def key_length(self):
return len(self.modulus) * 8
@property
def size(self):
return Header.SIZE + 8 + len(self.modulus) + len(self.exponent)
def __init__(self, modulus, exponent, flag=0, algorithm=EnumAlgorithm.PKCS1):
assert isinstance(modulus, bytes)
assert isinstance(exponent, bytes)
self._header = Header(tag=self.SRK_TAG, param=algorithm)
self.flag = flag
self.modulus = modulus
self.exponent = exponent
def __repr__(self):
return "SRK <Algorithm: {}, CA: {}>".format(EnumAlgorithm[self.algorithm], 'YES' if self.flag == 0x80 else 'NO')
def __eq__(self, obj):
if not isinstance(obj, SrkItem):
return False
if self.algorithm != obj.algorithm or \
self.flag != obj.flag or \
self.key_length != obj.key_length or \
self.modulus != obj.modulus or \
self.exponent != obj.exponent:
return False
return True
def __ne__(self, obj):
return not self.__eq__(obj)
def info(self):
msg = str()
msg += "Algorithm: {}\n".format(EnumAlgorithm[self.algorithm])
msg += "Flag: 0x{:02X} {}\n".format(self.flag, '(CA)' if self.flag == 0x80 else '')
msg += "Length: {} bit\n".format(self.key_length)
msg += "Modulus:\n"
msg += modulus_fmt(self.modulus)
msg += "\n"
msg += "Exponent: {0} (0x{0:X})\n".format(int.from_bytes(self.exponent, 'big'))
return msg
def export(self):
self._header.length = self.size
data = self._header.export()
data += pack(">4B2H", 0, 0, 0, self.flag, len(self.modulus), len(self.exponent))
data += bytes(self.modulus)
data += bytes(self.exponent)
return data
@classmethod
def parse(cls, data, offset=0):
""" Parse segment from bytes array
:param data: The bytes array of SRK segment
:param offset: The offset of input data
:return SrkItem object
"""
header = Header.parse(data, offset, cls.SRK_TAG)
offset += Header.SIZE + 3
(flag, modulus_len, exponent_len) = unpack_from(">B2H", data, offset)
offset += 5
modulus = data[offset: offset + modulus_len]
offset += modulus_len
exponent = data[offset: offset + exponent_len]
return cls(modulus, exponent, flag, header.param)
@classmethod
def from_certificate(cls, cert):
from cryptography import x509
assert isinstance(cert, x509.Certificate)
flag = 0
for extension in cert.extensions:
if extension.oid._name == 'keyUsage':
if extension.value.key_cert_sign:
flag = 0x80
# get modulus and exponent of public key
pub_key_numbers = cert.public_key().public_numbers()
modulus_len = pub_key_numbers.n.bit_length() // 8
if pub_key_numbers.n.bit_length() % 8:
modulus_len += 1
exponent_len = pub_key_numbers.e.bit_length() // 8
if pub_key_numbers.e.bit_length() % 8:
exponent_len += 1
modulus = pub_key_numbers.n.to_bytes(modulus_len, "big")
exponent = pub_key_numbers.e.to_bytes(exponent_len, "big")
return cls(modulus, exponent, flag)
class SrkTable(object):
@property
def version(self):
return self._header.param
@property
def size(self):
size = Header.SIZE
for key in self._keys:
size += key.size
return size
def __init__(self, version=0x40):
self._header = Header(tag=SegTag.CRT, param=version)
self._keys = []
def __repr__(self):
return "SRK_Table <Version: {:X}.{:X}, Keys: {}>".format(self.version >> 4, self.version & 0xF, len(self._keys))
def __eq__(self, obj):
if not isinstance(obj, SrkTable):
return False
if self.version != obj.version:
return False
for key in obj:
if key not in self._keys:
return False
return True
def __ne__(self, obj):
return not self.__eq__(obj)
def __len__(self):
return len(self._keys)
def __getitem__(self, key):
return self._keys[key]
def __setitem__(self, key, value):
assert isinstance(value, SrkItem)
self._keys[key] = value
def __iter__(self):
return self._keys.__iter__()
def info(self):
msg = "-" * 60 + "\n"
msg += "SRK Table (Version: {:X}.{:X}, Keys: {})\n".format(self.version>>4, self.version&0xF, len(self._keys))
msg += "-" * 60 + "\n"
for i, srk in enumerate(self._keys):
msg += "Key Index: {} \n".format(i)
msg += srk.info()
msg += "\n"
return msg
def append(self, srk):
self._keys.append(srk)
def export_fuses(self):
data = b''
for srk in self._keys:
srk_data = srk.export()
data += sha256(srk_data).digest()
return sha256(data).digest()
def export(self):
self._header.length = self.size
raw_data = self._header.export()
for srk in self._keys:
raw_data += srk.export()
return raw_data
@classmethod
def parse(cls, data, offset=0):
header = Header.parse(data, offset, SegTag.CRT)
offset += Header.SIZE
obj = cls(header.param)
length = header.length - Header.SIZE
while length > 0:
srk = SrkItem.parse(data, offset)
offset += srk.size
length -= srk.size
obj.append(srk)
return obj
|
<gh_stars>100-1000
import * as React from "react";
import Automation from "../../_helpers/automation-attribute";
import styled from "../../styled";
import { colors, fonts } from "../../tokens";
const BaseHeading = styled.h1`
margin: 1em 0;
color: ${colors.text.default};
font-weight: ${fonts.weight.normal};
line-height: 1.3;
`;
export interface IHeadingProps {
/** HTML ID of the component */
id?: string;
size?: 1 | 2 | 3 | 4;
children?: React.ReactNode;
onClick?: Function;
}
const Heading = (props: IHeadingProps) => {
const Component = Heading.Element[props.size];
return (
<Component {...Automation("heading")} {...props}>
{props.children}
</Component>
);
};
Heading.Element = [];
Heading.Element[1] = styled(BaseHeading).attrs({ as: "h1" })`
font-size: 36px;
`;
Heading.Element[2] = styled(BaseHeading).attrs({ as: "h2" })`
font-size: 24px;
font-weight: ${fonts.weight.medium};
`;
Heading.Element[3] = styled(BaseHeading).attrs({ as: "h3" })`
font-size: 18px; /* TO-DO: tokenize */
font-weight: ${fonts.weight.bold};
`;
Heading.Element[4] = styled(BaseHeading).attrs({ as: "h4" })`
font-size: 14px;
font-weight: ${fonts.weight.medium};
`;
const StyledHeading = Heading.Element;
Heading.defaultProps = {
size: 1,
children: null
};
export default Heading;
export { StyledHeading, BaseHeading };
|
/**
* @typedef {import('../store/initialState').DefaultSessionData} DefaultSessionData
*/
/**
* @param {number} milliseconds Expiration date
* @returns {boolean} Returns if still valid.
*/
const isExpirationValid = (milliseconds) => {
let currentTime = new Date().getTime();
return currentTime < milliseconds;
};
/**
* @param {string} token Token
* @param {DefaultSessionData} sessionData Session data
* @returns {boolean} Returns if the session is valid.
*/
export const verifySessionData = (token, sessionData) => {
return Boolean(
token && sessionData && sessionData.validUntil && isExpirationValid(sessionData.validUntil),
);
};
|
#include "Utils.hh"
#include "Types.hh"
#include "HiveMind.hh"
#include "Colony.hh"
#include "Z.hh"
#include <iostream>
#include <fstream>
#include <random>
#include <thread>
#include <mutex>
// TODO: Precompiled header
void PrintMenu();
int main(int argc, char* argv[])
{
try
{
std::ifstream fin("data/definitions.json");
nlohmann::json json(nlohmann::json::parse(fin, nullptr, true /* allow exceptions */, true /* ignore comments */));
for (const auto& jso : json["resources"])
{
ResourceDef def;
jso.get_to(def);
ResourceDef::Add(def);
}
for (const auto& jso : json["disciplines"])
{
Discipline def;
jso.get_to(def);
Discipline::Add(def);
}
for (const auto& jso : json["productions"])
{
ProductionDef def;
jso.get_to(def);
ProductionDef::Add(def);
}
for (const auto& jso : json["zoningRestrictions"])
{
ZoningRestriction def;
jso.get_to(def);
ZoningRestriction::Add(def);
}
for (const auto& jso : json["buildings"])
{
BuildingDef def;
jso.get_to(def);
BuildingDef::Add(def);
}
}
catch (const std::exception& e)
{
std::cerr << "!!! Failed to parse definiations.json\n" << e.what() << std::endl;
return 1;
}
Z z;
Colony colony(z);
HiveMind hiveMind(z);
std::random_device random;
std::chrono::high_resolution_clock::time_point then;
std::mutex cliMutex;
std::jthread sim([&](std::stop_token st)
{
TimeStep time
{
.now = Duration(0),
};
while (!st.stop_requested())
{
const std::lock_guard lock(cliMutex);
const auto now(std::chrono::high_resolution_clock::now());
time.Advance(now - then);
colony.Update(time);
hiveMind.Update(time);
then = now;
}
});
while (true)
{
PrintMenu();
const auto key(std::cin.get());
std::cin.get(); // remove enter
const std::lock_guard lock(cliMutex);
switch (key)
{
case '1':
std::cout << z << "\n";
break;
case '2':
std::cout << "Resources:\n" << ResourceDef::Definitions << "\n";
std::cout << "Disciplines:\n" << Discipline::Definitions << "\n";
std::cout << "Productions:\n" << ProductionDef::Definitions << "\n";
std::cout << "Zoning restrictions:\n" << ZoningRestriction::Definitions << "\n";
std::cout << "Buildings:\n" << BuildingDef::Definitions << "\n";
break;
case '3':
z.Add(Citizen{});
break;
case '4':
if (BuildingDef::Count() < 1)
{
std::cout << "No building definitions available";
break;
}
colony.Provision(BuildingDef::Get(BuildingDef::Id(random() % BuildingDef::Count() + 1)));
break;
case 'q':
case 'Q':
sim.request_stop();
return 0;
default:
std::cout << "Unknown key: '" << (char)key << "' (" << key << ")";
}
std::cout << std::endl;
}
return 0;
}
void PrintMenu()
{
std::cout <<
"Simulation:"
"\n--------"
"\n1: State"
"\n2: Definitions"
"\n3: Provision citizen"
"\n4: Provision random building"
"\nQ: Quit"
"\n";
} |
cargo watch --ignore frontend --ignore migrations -x run |
<gh_stars>10-100
// the angular2-template-loader translates templateUrl into require() calls
declare function require(id: string): any;
// set using the Webpack DefinePlugin
declare var build: {
mode: string;
target: string;
};
|
<reponame>ukoloff/docpad
var structdbs_1_1i_1_1_r26 =
[
[ "name", "structdbs_1_1i_1_1_r26.html#a612a39e36f67da721ba0ac0038d31cd9", null ],
[ "name", "structdbs_1_1i_1_1_r26.html#a2dc70c51881c2808d727a905ce6ed1a0", null ],
[ "partid", "structdbs_1_1i_1_1_r26.html#aeaef0161ba244cb90e8f62c76f308af0", null ]
]; |
<filename>src/main/java/org/codingmatters/tests/reflect/matchers/impl/FieldMatcherImpl.java
package org.codingmatters.tests.reflect.matchers.impl;
import org.codingmatters.tests.reflect.matchers.FieldMatcher;
import org.codingmatters.tests.reflect.matchers.TypeMatcher;
import org.codingmatters.tests.reflect.matchers.support.MatcherChain;
import org.codingmatters.tests.reflect.matchers.support.MemberDeleguate;
import org.codingmatters.tests.reflect.matchers.support.ReflectMatcherConfiguration;
import org.hamcrest.Description;
import org.hamcrest.TypeSafeMatcher;
import java.lang.reflect.Field;
/**
* Created by nelt on 9/11/16.
*/
public class FieldMatcherImpl extends TypeSafeMatcher<Field> implements FieldMatcher {
static public FieldMatcher aField(ReflectMatcherConfiguration builder) {
return new FieldMatcherImpl().configure(builder);
}
private final MatcherChain<Field> matchers = new MatcherChain<>();
private final MemberDeleguate<FieldMatcher> memberDeleguate;
private FieldMatcherImpl() {
this.memberDeleguate = new MemberDeleguate<>(this.matchers);
}
@Override
public FieldMatcher named(String name) {
return this.memberDeleguate.named(name, this);
}
@Override
public FieldMatcher final_() {
return this.memberDeleguate.final_(this);
}
@Override
public FieldMatcher withType(Class type) {
this.matchers.addMatcher("field type", item -> item.getType().equals(type));
return this;
}
@Override
public FieldMatcher withType(TypeMatcher typeMatcher) {
this.matchers.addMatcher(
"field type",
item -> typeMatcher.matches(item.getGenericType()),
(item, description) -> description.appendDescriptionOf(typeMatcher)
);
return this;
}
@Override
protected boolean matchesSafely(Field aField) {
return matchers.compoundMatcher().matches(aField);
}
@Override
public void describeTo(Description description) {
description.appendText("filed");
this.matchers.compoundMatcher().describeTo(description);
}
@Override
protected void describeMismatchSafely(Field item, Description mismatchDescription) {
this.matchers.compoundMatcher().describeMismatch(item, mismatchDescription);
}
private FieldMatcher configure(ReflectMatcherConfiguration builder) {
builder.levelModifier().apply(this.memberDeleguate, this);
builder.accessModifier().apply(this.memberDeleguate, this);
return this;
}
}
|
#include <stdio.h>
int main (int argc, char * argv[]) {
int number1 = atoi(argv[1]);
int number2 = atoi(argv[2]);
printf("The sum is %d", number1 + number2);
return 0;
} |
#include <vector>
template<class T>
class Stack {
private:
std::vector<T> m_Stack;
unsigned int m_Index;
public:
Stack() : m_Index(0) {}
void push(const T& obj, bool override) {
if (override || m_Stack.size() == 0)
m_Stack.push_back(obj);
else
m_Stack.push_back(nextStack(m_Stack[m_Stack.size() - 1], obj));
m_Index = (unsigned int)m_Stack.size() - 1;
}
T nextStack(const T& elem1, const T& elem2) {
// Implement the logic to combine elem1 and elem2 to produce a new element of type T
// Example: return elem1 + elem2; // for combining two numbers
// Example: return elem1.concat(elem2); // for combining two strings
// Add the appropriate logic based on the data type T
}
// Add any other necessary functions or components to support the described functionalities
}; |
<gh_stars>10-100
package test161
import (
"fmt"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"strings"
)
// TestEnvironment encapsultes the environment tests runs in. Much of the
// environment is global - commands, targets, etc. However, some state
// is local, such as the secure keyMap and OS/161 root directory.
type TestEnvironment struct {
// These do not depend on the TestGroup/Target
TestDir string
Commands map[string]*CommandTemplate
Targets map[string]*Target
// Optional - added in version 1.2.6
Tags map[string]*TagDescription
manager *manager
CacheDir string
OverlayRoot string
KeyDir string
Persistence PersistenceManager
Log *log.Logger
// These depend on the TestGroup/Target
keyMap map[string]string
RootDir string
}
// Create a new TestEnvironment by copying the global state from an existing
// environment. Local test state will be initialized to default values.
func (env *TestEnvironment) CopyEnvironment() *TestEnvironment {
// Global
copy := *env
// Local
copy.keyMap = make(map[string]string)
copy.RootDir = ""
return ©
}
// Handle a single commands file (.tc) and load it into the TestEnvironment.
func envCommandHandler(env *TestEnvironment, f string) error {
if templates, err := CommandTemplatesFromFile(f); err != nil {
return err
} else {
// If we already know about the command, it's an error
for _, templ := range templates.Templates {
if _, ok := env.Commands[templ.Name]; ok {
return fmt.Errorf("Duplicate command (%v) in file %v", templ.Name, f)
}
env.Commands[templ.Name] = templ
}
return nil
}
}
// Handle a single targets file (.tt) and load it into the TestEnvironment.
func envTargetHandler(env *TestEnvironment, f string) error {
if t, err := TargetFromFile(f); err != nil {
return err
} else {
// Only track the most recent version, and only track active targets.
if t.Active == "true" {
prev, ok := env.Targets[t.Name]
if !ok || t.Version > prev.Version {
env.Targets[t.Name] = t
}
}
if env.Persistence != nil {
return env.Persistence.Notify(t, MSG_TARGET_LOAD, 0)
} else {
return nil
}
}
}
// Handle a single tag description file (.td) and load it into the
// TestEnvironment.
func envTagDescHandler(env *TestEnvironment, f string) error {
if tags, err := TagDescriptionsFromFile(f); err != nil {
return err
} else {
// If we already know about the tag, it's an error
for _, tag := range tags.Tags {
if _, ok := env.Tags[tag.Name]; ok {
return fmt.Errorf("Duplicate tag (%v) in file %v", tag.Name, f)
}
env.Tags[tag.Name] = tag
}
return nil
}
}
// envReadLoop searches a directory for files with a certain extention. When it
// finds one, it calls handler().
func (env *TestEnvironment) envReadLoop(searchDir, ext string,
handler func(env *TestEnvironment, f string) error) error {
dir, err := ioutil.ReadDir(searchDir)
if err != nil {
return err
}
for _, f := range dir {
if f.Mode().IsRegular() {
if strings.HasSuffix(f.Name(), ext) {
if err := handler(env, filepath.Join(searchDir, f.Name())); err != nil {
return err
}
}
}
}
return nil
}
// Create a new TestEnvironment from the given test161 directory. The directory
// must contain these subdirectories: commands/ targets/ tests/
// In addition to loading tests, commands, and targets, a logger is set up that
// writes to os.Stderr. This can be changed by changing env.Log.
func NewEnvironment(test161Dir string, pm PersistenceManager) (*TestEnvironment, error) {
cmdDir := path.Join(test161Dir, "commands")
testDir := path.Join(test161Dir, "tests")
targetDir := path.Join(test161Dir, "targets")
tagDir := path.Join(test161Dir, "tags")
env := &TestEnvironment{
TestDir: testDir,
manager: testManager,
Commands: make(map[string]*CommandTemplate),
Targets: make(map[string]*Target),
Tags: make(map[string]*TagDescription),
keyMap: make(map[string]string),
Log: log.New(os.Stderr, "test161: ", log.Ldate|log.Ltime|log.Lshortfile),
Persistence: pm,
}
resChan := make(chan error)
go func() {
resChan <- env.envReadLoop(targetDir, ".tt", envTargetHandler)
}()
go func() {
resChan <- env.envReadLoop(cmdDir, ".tc", envCommandHandler)
}()
// Tags are optional
numExpected := 2
if _, err := os.Stat(tagDir); err == nil {
numExpected += 1
go func() {
resChan <- env.envReadLoop(tagDir, ".td", envTagDescHandler)
}()
}
// Get the results
var err error = nil
for i := 0; i < numExpected; i++ {
// Let the other finish, but just return one error
temp := <-resChan
if err == nil {
err = temp
}
}
if err == nil {
err = env.linkMetaTargets()
}
return env, err
}
func (env *TestEnvironment) linkMetaTargets() error {
// First, if the target is a subtarget, link it to its metatarget
// and sibling subtargets.
for _, target := range env.Targets {
if len(target.MetaName) > 0 {
if err := target.initAsSubTarget(env); err != nil {
return err
}
}
}
// Next, validate the metatargets
for _, target := range env.Targets {
if target.IsMetaTarget {
if err := target.initAsMetaTarget(env); err != nil {
return err
}
}
}
return nil
}
func (env *TestEnvironment) TargetList() *TargetList {
list := &TargetList{}
list.Targets = make([]*TargetListItem, 0, len(env.Targets))
for _, t := range env.Targets {
list.Targets = append(list.Targets, &TargetListItem{
Name: t.Name,
Version: t.Version,
PrintName: t.PrintName,
Description: t.Description,
Active: t.Active,
Points: t.Points,
Type: t.Type,
FileName: t.FileName,
FileHash: t.FileHash,
CollabMsg: collabMsgs[t.Name],
})
}
return list
}
// Helper function for logging persistence errors
func (env *TestEnvironment) notifyAndLogErr(desc string, entity interface{}, msg, what int) {
if env.Persistence != nil {
err := env.Persistence.Notify(entity, msg, what)
if err != nil {
if env.Log != nil {
env.Log.Printf("(%v) Error writing data: %v\n", desc, err)
}
}
}
}
func (env *TestEnvironment) SetNullLogger() {
env.Log.SetFlags(0)
env.Log.SetOutput(ioutil.Discard)
}
|
def generate_debug_command(docstring, selectable):
if selectable:
return "start_server --debug"
else:
return "" |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-SS-N/13-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-SS-N/13-512+0+512-ST-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_within_trigrams_first_half_quarter --eval_function penultimate_quarter_eval |
<form>
<label>Name:</label>
<input type="text" name="name" />
<label>Age:</label>
<input type="number" name="age" />
<input type="submit" value="Submit" />
</form> |
declare const _default: (req: any, res: any) => Promise<void>;
/**
* @oas [post] /orders/{id}/swaps/{swap_id}/fulfillments
* operationId: "PostOrdersOrderSwapsSwapFulfillments"
* summary: "Create a Swap Fulfillment"
* description: "Creates a Fulfillment for a Swap."
* x-authenticated: true
* parameters:
* - (path) id=* {string} The id of the Order.
* - (path) swap_id=* {string} The id of the Swap.
* requestBody:
* content:
* application/json:
* schema:
* properties:
* metadata:
* description: An optional set of key-value pairs to hold additional information.
* type: object
* no_notification:
* description: If set to true no notification will be send related to this Claim.
* type: boolean
* tags:
* - Order
* responses:
* 200:
* description: OK
* content:
* application/json:
* schema:
* properties:
* order:
* $ref: "#/components/schemas/order"
*/
export default _default;
export declare class AdminPostOrdersOrderSwapsSwapFulfillmentsReq {
metadata?: object;
no_notification?: boolean;
}
|
<filename>main.go
// Package main initializes the cli of terragen
package main
import (
cli "github.com/nikhilsbhat/terragen/cmd"
)
// This function is responsible for starting the application.
func main() {
cli.Main()
}
|
import re
import requests
from .baseclient import BaseClient
class FishCClient(BaseClient):
def __init__(self, reload_history=True, **kwargs):
super(FishCClient, self).__init__(website_name='fishc', reload_history=reload_history, **kwargs)
def checksessionstatus(self, session, infos_return):
url = 'https://fishc.com.cn/'
response = session.get(url)
# Check if the session has expired based on the response
if response.status_code == 200:
# Assuming a specific pattern or content in the response indicates an active session
# For example, checking for a specific string in the response content
if 'Welcome to FishC Forum' in response.text:
return False # Session is active
else:
return True # Session has expired
else:
# Handle non-200 status codes as per the specific requirements
# For example, returning True for any non-200 status code
return True |
def sort_array(arr):
"""
Sorts an array in ascending order.
Parameters:
arr (list): the input array
Returns:
list: sorted array
"""
# iterate over the array
for i in range(len(arr)):
# find the minimum element in the array
min_idx = i
for j in range(i+1, len(arr)):
if arr[j] < arr[min_idx]:
min_idx = j
# swap the elements
arr[min_idx], arr[i] = arr[i], arr[min_idx]
return arr |
#!/bin/bash
array=($@)
IFS=$'\n' sorted=($(sort <<<"${array[*]}"))
unset IFS
echo "${sorted[@]}" |
<reponame>girish-kamble/C-language
#include<stdio.h>
#include<assert.h>
int main(){
int a=3,b=3,c=0;
c=a+b;
assert(c); //assert function checks if the value of c is true or not.
// it can also check directly assert(a+b);
// if the value is true or non-zero, then it allows execution of further lines of program
// else if found as not true or zero, the assert function terminates the program calling "abort"
printf("the value is %d \n",c);
c=a-b;
assert(c); // here since value of c is 0, it terminates the program causing an runtime error
printf("the value is %d \n",c);
return 0;
}
|
<gh_stars>0
import makeStyles from '@material-ui/styles/makeStyles';
import TablePagination from '@material-ui/core/TablePagination';
import * as React from 'react';
import { ITbTableInstance } from 'tubular-react-common';
import { useResolutionSwitch } from 'uno-react';
import { AdvancePaginationActions } from './AdvancePaginationActions';
import Lang from '../utils/Lang';
const useStyles = makeStyles({
caption: {
flexShrink: 1,
height: '55px',
},
root: {
height: '75px',
maxWidth: '95%',
},
});
const outerWidth = 800;
const timeout = 400;
const message =
(totalRecordCount: number, filteredRecordCount: number) =>
({ from, to, count }: any) =>
totalRecordCount === filteredRecordCount
? Lang.translate('Pages', from, to, count)
: filteredRecordCount === 0
? Lang.translate('NoRecords')
: Lang.translate('TotalRecords', from, to, count, totalRecordCount);
export interface PaginatorProps {
tbTableInstance: ITbTableInstance;
rowsPerPageOptions: number[];
advancePagination: boolean;
}
export const Paginator: React.FunctionComponent<PaginatorProps> = ({
tbTableInstance,
rowsPerPageOptions,
advancePagination,
}: PaginatorProps) => {
const [isMobileResolution] = useResolutionSwitch(outerWidth, timeout);
const classes = useStyles({});
const { state, api } = tbTableInstance;
if (!state.itemsPerPage) {
return null;
}
const newProps = {
count: state.filteredRecordCount,
labelDisplayedRows: message(state.totalRecordCount, state.filteredRecordCount),
onChangePage: (_e: any, page: number) => api.goToPage(page),
onChangeRowsPerPage: (e: any) => api.updateItemsPerPage(Number(e.target.value)),
page: state.filteredRecordCount > 0 ? state.page : 0,
rowsPerPage: state.itemsPerPage,
rowsPerPageOptions: rowsPerPageOptions || [10, 20, 50],
} as any;
// eslint-disable-next-line react/display-name
newProps.ActionsComponent = () => (
<AdvancePaginationActions
count={newProps.count}
isAdvanced={advancePagination}
isLoading={newProps.isLoading}
onChangePage={newProps.onChangePage}
page={newProps.page}
rowsPerPage={newProps.rowsPerPage}
/>
);
return (
<TablePagination
classes={{
caption: isMobileResolution && classes.caption,
root: classes.root,
}}
{...newProps}
/>
);
};
|
use nalgebra::{Matrix, MatrixView};
struct NeuralNetwork(Matrix<f64>);
impl NeuralNetwork {
fn new(weights: Matrix<f64>) -> Self {
NeuralNetwork(weights)
}
fn weights(&self) -> Matrix<f64> {
self.0.clone() // Return a clone of the weights matrix
}
fn weights_view(&self) -> MatrixView<f64> {
self.0.view() // Return a read-only view of the weights matrix
}
}
fn main() {
// Create a neural network with random weights
let weights = Matrix::new_random(3, 2);
let neural_network = NeuralNetwork::new(weights);
// Access the weights using the provided methods
let all_weights = neural_network.weights();
let weights_view = neural_network.weights_view();
println!("All weights: \n{}", all_weights);
println!("Weights view: \n{}", weights_view);
} |
import { Message } from 'node-nats-streaming';
import { BaseListener, ItemUpdatedEvent, Subjects } from '../../common';
import { Item } from '../../models/item';
import { queueGroupName } from './queue-group-name';
export class ItemUpdatedListener extends BaseListener<ItemUpdatedEvent> {
readonly subject = Subjects.ItemUpdated;
queueGroupName = queueGroupName;
async onMessage(data: ItemUpdatedEvent['data'], msg: Message) {
const item = await Item.findByEvent(data);
if (!item) {
throw new Error('Item not found');
}
const { title, price } = data;
item.set({ title, price });
await item.save();
msg.ack();
}
}
|
#!/bin/sh
set -e
pkgname=perl
version=5.32.1
ext=tar.gz
url=ftp://ftp.jaist.ac.jp/pub/CPAN/authors/id/S/SH/SHAY/${pkgname}-${version}.${ext}
if [ ! -e archives/${pkgname}-${version}.${ext} ]; then
ftp -o archives/${pkgname}-${version}.tar.gz ${url}
else
echo skip download
fi
echo extract ${pkgname}-${version}.${ext}...
tar -C src -xzf archives/${pkgname}-${version}.${ext}
echo finished.
CWD=`pwd`
cd src/${pkgname}-${version}
sh configure.gnu --prefix=/usr/pkg
make
sudo porg -lp ${pkgname}-${version} "make install"
cd $CWD
|
#!/bin/bash
set -eo pipefail
shopt -s nullglob
# if command starts with an option, prepend mysqld
if [ "${1:0:1}" = '-' ]; then
set -- mysqld "$@"
fi
# skip setup if they want an option that stops mysqld
wantHelp=
for arg; do
case "$arg" in
-'?'|--help|--print-defaults|-V|--version)
wantHelp=1
break
;;
esac
done
# usage: file_env VAR [DEFAULT]
# ie: file_env 'XYZ_DB_PASSWORD' 'example'
# (will allow for "$XYZ_DB_PASSWORD_FILE" to fill in the value of
# "$XYZ_DB_PASSWORD" from a file, especially for Docker's secrets feature)
file_env() {
local var="$1"
local fileVar="${var}_FILE"
local def="${2:-}"
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
exit 1
fi
local val="$def"
if [ "${!var:-}" ]; then
val="${!var}"
elif [ "${!fileVar:-}" ]; then
val="$(< "${!fileVar}")"
fi
export "$var"="$val"
unset "$fileVar"
}
_check_config() {
toRun=( "$@" --verbose --help --log-bin-index="$(mktemp -u)" )
if ! errors="$("${toRun[@]}" 2>&1 >/dev/null)"; then
cat >&2 <<-EOM
ERROR: mysqld failed while attempting to check config
command was: "${toRun[*]}"
$errors
EOM
exit 1
fi
}
# Fetch value from server config
# We use mysqld --verbose --help instead of my_print_defaults because the
# latter only show values present in config files, and not server defaults
_get_config() {
local conf="$1"; shift
"$@" --verbose --help --log-bin-index="$(mktemp -u)" 2>/dev/null | awk '$1 == "'"$conf"'" { print $2; exit }'
}
if [ "$1" = 'mysqld' -a -z "$wantHelp" ]; then
_check_config "$@"
DATADIR="$(_get_config 'datadir' "$@")"
if [ ! -d "$DATADIR/mysql" ]; then
file_env "SERVER_ID" $RANDOM
file_env "EXPIRE_LOGS_DAYS" "10"
file_env "MAX_BINLOG_SIZE" "100M"
if [ ! -z "$MYSQL_DATABASE" ]; then
# @TODO
# Move that config in other path
# @see README.md
CONFIG_FILE="/etc/mysql/conf.d/master-slave.cnf"
cat <<-EOF > $CONFIG_FILE
[mysqld]
expire_logs_days = $EXPIRE_LOGS_DAYS
max_binlog_size = $MAX_BINLOG_SIZE
server_id = $SERVER_ID
binlog_do_db = $MYSQL_DATABASE
relay_log = /var/log/mysql/relay-bin.log
relay_log_index = /var/log/mysql/relay-bin.index
relay_log_info_file = /var/log/mysql/relay-bin.info
log_bin = /var/log/mysql/mariadb-bin.log
log_bin_index = /var/log/mysql/mariadb-bin.index
binlog-ignore-db = information_schema
binlog-ignore-db = mysql
replicate-ignore-db = information_schema
replicate-ignore-db = mysql
binlog_format = ROW
EOF
if [ ! -z "$MYSQL_MASTER_HOST" ]; then
if [ ! -z "$MYSQL_SLAVE_SKIP_ERRORS" ]; then
cat <<-EOF >> $CONFIG_FILE
slave-skip-errors = $MYSQL_SLAVE_SKIP_ERRORS
EOF
fi
# @TODO
# innodb_change_buffering = 0
# innodb-read-only = 1
cat <<-EOF >> $CONFIG_FILE
log_slave_updates = 1
read_only = 1
EOF
fi
chown -R mysql:mysql "$CONFIG_FILE"
else
echo >&2 'error: master/slave feature is uninitialized. MYSQL_DATABASE variable is not defined.'
fi
fi
fi
docker-entrypoint.sh $@ |
#!/bin/bash
cd ~/rtp++
mkdir -p log/traces
hosts=$(cat 195.148.127.98_hosts.txt)
echo $hosts
dt=`eval date +%Y%m%d"_"%H_%M_%S`
for host in $hosts
do
sh tr.sh $host $dt
done
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
touch log/stderror.err
ktools_monitor.sh $$ & pid0=$!
exit_handler(){
exit_code=$?
kill -9 $pid0 2> /dev/null
if [ "$exit_code" -gt 0 ]; then
echo 'Ktools Run Error - exitcode='$exit_code
else
echo 'Run Completed'
fi
set +x
group_pid=$(ps -p $$ -o pgid --no-headers)
sess_pid=$(ps -p $$ -o sess --no-headers)
script_pid=$$
printf "Script PID:%d, GPID:%s, SPID:%d
" $script_pid $group_pid $sess_pid >> log/killout.txt
ps -jf f -g $sess_pid > log/subprocess_list
PIDS_KILL=$(pgrep -a --pgroup $group_pid | awk 'BEGIN { FS = "[ \t\n]+" }{ if ($1 >= '$script_pid') print}' | grep -v celery | egrep -v *\\.log$ | egrep -v *\\.sh$ | sort -n -r)
echo "$PIDS_KILL" >> log/killout.txt
kill -9 $(echo "$PIDS_KILL" | awk 'BEGIN { FS = "[ \t\n]+" }{ print $1 }') 2>/dev/null
exit $exit_code
}
trap exit_handler QUIT HUP INT KILL TERM ERR EXIT
check_complete(){
set +e
proc_list="eve getmodel gulcalc fmcalc summarycalc eltcalc aalcalc leccalc pltcalc ordleccalc"
has_error=0
for p in $proc_list; do
started=$(find log -name "$p*.log" | wc -l)
finished=$(find log -name "$p*.log" -exec grep -l "finish" {} + | wc -l)
if [ "$finished" -lt "$started" ]; then
echo "[ERROR] $p - $((started-finished)) processes lost"
has_error=1
elif [ "$started" -gt 0 ]; then
echo "[OK] $p"
fi
done
if [ "$has_error" -ne 0 ]; then
false # raise non-zero exit code
fi
}
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
mkdir output/full_correlation/
rm -R -f fifo/*
mkdir fifo/full_correlation/
rm -R -f work/*
mkdir work/kat/
mkdir work/full_correlation/
mkdir work/full_correlation/kat/
mkdir work/gul_S1_summaryleccalc
mkdir work/gul_S1_summaryaalcalc
mkdir work/full_correlation/gul_S1_summaryleccalc
mkdir work/full_correlation/gul_S1_summaryaalcalc
mkdir work/il_S1_summaryleccalc
mkdir work/il_S1_summaryaalcalc
mkdir work/full_correlation/il_S1_summaryleccalc
mkdir work/full_correlation/il_S1_summaryaalcalc
mkfifo fifo/full_correlation/gul_fc_P14
mkfifo fifo/gul_P14
mkfifo fifo/gul_S1_summary_P14
mkfifo fifo/gul_S1_summary_P14.idx
mkfifo fifo/gul_S1_eltcalc_P14
mkfifo fifo/gul_S1_summarycalc_P14
mkfifo fifo/gul_S1_pltcalc_P14
mkfifo fifo/il_P14
mkfifo fifo/il_S1_summary_P14
mkfifo fifo/il_S1_summary_P14.idx
mkfifo fifo/il_S1_eltcalc_P14
mkfifo fifo/il_S1_summarycalc_P14
mkfifo fifo/il_S1_pltcalc_P14
mkfifo fifo/full_correlation/gul_P14
mkfifo fifo/full_correlation/gul_S1_summary_P14
mkfifo fifo/full_correlation/gul_S1_summary_P14.idx
mkfifo fifo/full_correlation/gul_S1_eltcalc_P14
mkfifo fifo/full_correlation/gul_S1_summarycalc_P14
mkfifo fifo/full_correlation/gul_S1_pltcalc_P14
mkfifo fifo/full_correlation/il_P14
mkfifo fifo/full_correlation/il_S1_summary_P14
mkfifo fifo/full_correlation/il_S1_summary_P14.idx
mkfifo fifo/full_correlation/il_S1_eltcalc_P14
mkfifo fifo/full_correlation/il_S1_summarycalc_P14
mkfifo fifo/full_correlation/il_S1_pltcalc_P14
# --- Do insured loss computes ---
( eltcalc -s < fifo/il_S1_eltcalc_P14 > work/kat/il_S1_eltcalc_P14 ) 2>> log/stderror.err & pid1=$!
( summarycalctocsv -s < fifo/il_S1_summarycalc_P14 > work/kat/il_S1_summarycalc_P14 ) 2>> log/stderror.err & pid2=$!
( pltcalc -s < fifo/il_S1_pltcalc_P14 > work/kat/il_S1_pltcalc_P14 ) 2>> log/stderror.err & pid3=$!
tee < fifo/il_S1_summary_P14 fifo/il_S1_eltcalc_P14 fifo/il_S1_summarycalc_P14 fifo/il_S1_pltcalc_P14 work/il_S1_summaryaalcalc/P14.bin work/il_S1_summaryleccalc/P14.bin > /dev/null & pid4=$!
tee < fifo/il_S1_summary_P14.idx work/il_S1_summaryleccalc/P14.idx > /dev/null & pid5=$!
( summarycalc -m -f -1 fifo/il_S1_summary_P14 < fifo/il_P14 ) 2>> log/stderror.err &
# --- Do ground up loss computes ---
( eltcalc -s < fifo/gul_S1_eltcalc_P14 > work/kat/gul_S1_eltcalc_P14 ) 2>> log/stderror.err & pid6=$!
( summarycalctocsv -s < fifo/gul_S1_summarycalc_P14 > work/kat/gul_S1_summarycalc_P14 ) 2>> log/stderror.err & pid7=$!
( pltcalc -s < fifo/gul_S1_pltcalc_P14 > work/kat/gul_S1_pltcalc_P14 ) 2>> log/stderror.err & pid8=$!
tee < fifo/gul_S1_summary_P14 fifo/gul_S1_eltcalc_P14 fifo/gul_S1_summarycalc_P14 fifo/gul_S1_pltcalc_P14 work/gul_S1_summaryaalcalc/P14.bin work/gul_S1_summaryleccalc/P14.bin > /dev/null & pid9=$!
tee < fifo/gul_S1_summary_P14.idx work/gul_S1_summaryleccalc/P14.idx > /dev/null & pid10=$!
( summarycalc -m -i -1 fifo/gul_S1_summary_P14 < fifo/gul_P14 ) 2>> log/stderror.err &
# --- Do insured loss computes ---
( eltcalc -s < fifo/full_correlation/il_S1_eltcalc_P14 > work/full_correlation/kat/il_S1_eltcalc_P14 ) 2>> log/stderror.err & pid11=$!
( summarycalctocsv -s < fifo/full_correlation/il_S1_summarycalc_P14 > work/full_correlation/kat/il_S1_summarycalc_P14 ) 2>> log/stderror.err & pid12=$!
( pltcalc -s < fifo/full_correlation/il_S1_pltcalc_P14 > work/full_correlation/kat/il_S1_pltcalc_P14 ) 2>> log/stderror.err & pid13=$!
tee < fifo/full_correlation/il_S1_summary_P14 fifo/full_correlation/il_S1_eltcalc_P14 fifo/full_correlation/il_S1_summarycalc_P14 fifo/full_correlation/il_S1_pltcalc_P14 work/full_correlation/il_S1_summaryaalcalc/P14.bin work/full_correlation/il_S1_summaryleccalc/P14.bin > /dev/null & pid14=$!
tee < fifo/full_correlation/il_S1_summary_P14.idx work/full_correlation/il_S1_summaryleccalc/P14.idx > /dev/null & pid15=$!
( summarycalc -m -f -1 fifo/full_correlation/il_S1_summary_P14 < fifo/full_correlation/il_P14 ) 2>> log/stderror.err &
# --- Do ground up loss computes ---
( eltcalc -s < fifo/full_correlation/gul_S1_eltcalc_P14 > work/full_correlation/kat/gul_S1_eltcalc_P14 ) 2>> log/stderror.err & pid16=$!
( summarycalctocsv -s < fifo/full_correlation/gul_S1_summarycalc_P14 > work/full_correlation/kat/gul_S1_summarycalc_P14 ) 2>> log/stderror.err & pid17=$!
( pltcalc -s < fifo/full_correlation/gul_S1_pltcalc_P14 > work/full_correlation/kat/gul_S1_pltcalc_P14 ) 2>> log/stderror.err & pid18=$!
tee < fifo/full_correlation/gul_S1_summary_P14 fifo/full_correlation/gul_S1_eltcalc_P14 fifo/full_correlation/gul_S1_summarycalc_P14 fifo/full_correlation/gul_S1_pltcalc_P14 work/full_correlation/gul_S1_summaryaalcalc/P14.bin work/full_correlation/gul_S1_summaryleccalc/P14.bin > /dev/null & pid19=$!
tee < fifo/full_correlation/gul_S1_summary_P14.idx work/full_correlation/gul_S1_summaryleccalc/P14.idx > /dev/null & pid20=$!
( summarycalc -m -i -1 fifo/full_correlation/gul_S1_summary_P14 < fifo/full_correlation/gul_P14 ) 2>> log/stderror.err &
( tee < fifo/full_correlation/gul_fc_P14 fifo/full_correlation/gul_P14 | fmcalc -a2 > fifo/full_correlation/il_P14 ) 2>> log/stderror.err &
( eve 14 20 | getmodel | gulcalc -S100 -L100 -r -j fifo/full_correlation/gul_fc_P14 -a1 -i - | tee fifo/gul_P14 | fmcalc -a2 > fifo/il_P14 ) 2>> log/stderror.err &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16 $pid17 $pid18 $pid19 $pid20
# --- Do insured loss kats ---
kat -s work/kat/il_S1_eltcalc_P14 > output/il_S1_eltcalc.csv & kpid1=$!
kat work/kat/il_S1_pltcalc_P14 > output/il_S1_pltcalc.csv & kpid2=$!
kat work/kat/il_S1_summarycalc_P14 > output/il_S1_summarycalc.csv & kpid3=$!
# --- Do insured loss kats for fully correlated output ---
kat -s work/full_correlation/kat/il_S1_eltcalc_P14 > output/full_correlation/il_S1_eltcalc.csv & kpid4=$!
kat work/full_correlation/kat/il_S1_pltcalc_P14 > output/full_correlation/il_S1_pltcalc.csv & kpid5=$!
kat work/full_correlation/kat/il_S1_summarycalc_P14 > output/full_correlation/il_S1_summarycalc.csv & kpid6=$!
# --- Do ground up loss kats ---
kat -s work/kat/gul_S1_eltcalc_P14 > output/gul_S1_eltcalc.csv & kpid7=$!
kat work/kat/gul_S1_pltcalc_P14 > output/gul_S1_pltcalc.csv & kpid8=$!
kat work/kat/gul_S1_summarycalc_P14 > output/gul_S1_summarycalc.csv & kpid9=$!
# --- Do ground up loss kats for fully correlated output ---
kat -s work/full_correlation/kat/gul_S1_eltcalc_P14 > output/full_correlation/gul_S1_eltcalc.csv & kpid10=$!
kat work/full_correlation/kat/gul_S1_pltcalc_P14 > output/full_correlation/gul_S1_pltcalc.csv & kpid11=$!
kat work/full_correlation/kat/gul_S1_summarycalc_P14 > output/full_correlation/gul_S1_summarycalc.csv & kpid12=$!
wait $kpid1 $kpid2 $kpid3 $kpid4 $kpid5 $kpid6 $kpid7 $kpid8 $kpid9 $kpid10 $kpid11 $kpid12
check_complete
exit_handler
|
<filename>src/components/showDevice.js
import React, { Component } from 'react';
import { Link } from 'react-router-dom';
import { devicesURl } from './Constants';
export default class showDevice extends Component{
constructor(serial){
super();
this.state={
device: []
}
}
//Fetch data from external API and set states of varius objects above
componentDidMount() {
const { match: { params }, history } = this.props;
return fetch(devicesURl + params.serial)
.then((response) => response.json())
.then((responseJson) => {
this.setState({
isLoading: false,
device: responseJson.data,
});
})
.catch((error) => {
console.error(error);
});
};
render(){
return(
<div className="content-wrapper">
<section className="content-header">
<div className="container-fluid">
<div className="row mb-2">
<div className="col-sm-6">
<h1>Device ID: {this.state.device.serial} </h1>
</div>
<div className="col-sm-6">
<ol className="breadcrumb float-sm-right">
<li className="breadcrumb-item"><Link to="/">Smart Devices</Link></li>
<li className="breadcrumb-item active">show</li>
</ol>
</div>
</div>
</div>
</section>
<section className="content">
<div className="container-fluid">
<div className="container">
<div className="row justify-content-center">
<div className="col-md-8">
<div className="card">
<div className="card-header">Show Device Details</div>
<div className="card-body">
<form>
<div className="form-group row">
<label htmlFor="category" className="col-md-4 col-form-label text-md-right">Device Description:</label>
<div className="col-md-6">
<input type="text" className="form-control" value={this.state.device.description} disabled/>
</div>
</div>
<div className="form-group row">
<label htmlFor="category" className="col-md-4 col-form-label text-md-right">Manufacturer:</label>
<div className="col-md-6">
<input id="category" type="text" className="form-control" value={this.state.device.manufacturer} disabled/>
</div>
</div>
<div className="form-group row mb-0">
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
</section>
</div>
)
}
} |
package file_fetcher
import (
"context"
"encoding/json"
"fmt"
"testing"
"github.com/eugene-fedorenko/prebid-server/stored_requests"
"github.com/stretchr/testify/assert"
)
func TestFileFetcher(t *testing.T) {
fetcher, err := NewFileFetcher("./test")
if err != nil {
t.Errorf("Failed to create a Fetcher: %v", err)
}
storedReqs, storedImps, errs := fetcher.FetchRequests(context.Background(), []string{"1", "2"}, []string{"some-imp"})
assertErrorCount(t, 0, errs)
validateStoredReqOne(t, storedReqs)
validateStoredReqTwo(t, storedReqs)
validateImp(t, storedImps)
}
func TestAccountFetcher(t *testing.T) {
fetcher, err := NewFileFetcher("./test")
assert.NoError(t, err, "Failed to create test fetcher")
account, errs := fetcher.FetchAccount(context.Background(), "valid")
assertErrorCount(t, 0, errs)
assert.JSONEq(t, `{"disabled":false, "id":"valid"}`, string(account))
account, errs = fetcher.FetchAccount(context.Background(), "nonexistent")
assertErrorCount(t, 1, errs)
assert.Error(t, errs[0])
assert.Equal(t, stored_requests.NotFoundError{"nonexistent", "Account"}, errs[0])
}
func TestInvalidDirectory(t *testing.T) {
_, err := NewFileFetcher("./nonexistant-directory")
if err == nil {
t.Errorf("There should be an error if we use a directory which doesn't exist.")
}
}
func validateStoredReqOne(t *testing.T, storedRequests map[string]json.RawMessage) {
value, hasID := storedRequests["1"]
if !hasID {
t.Fatalf("Expected stored request data to have id: %d", 1)
}
var req1Val map[string]string
if err := json.Unmarshal(value, &req1Val); err != nil {
t.Errorf("Failed to unmarshal 1: %v", err)
}
if len(req1Val) != 1 {
t.Errorf("Unexpected req1Val length. Expected %d, Got %d", 1, len(req1Val))
}
data, hadKey := req1Val["test"]
if !hadKey {
t.Errorf("req1Val should have had a \"test\" key, but it didn't.")
}
if data != "foo" {
t.Errorf(`Bad data in "test" of stored request "1". Expected %s, Got %s`, "foo", data)
}
}
func validateStoredReqTwo(t *testing.T, storedRequests map[string]json.RawMessage) {
value, hasId := storedRequests["2"]
if !hasId {
t.Fatalf("Expected stored request map to have id: %d", 2)
}
var req2Val string
if err := json.Unmarshal(value, &req2Val); err != nil {
t.Errorf("Failed to unmarshal %d: %v", 2, err)
}
if req2Val != `esca"ped` {
t.Errorf(`Bad data in stored request "2". Expected %v, Got %s`, `esca"ped`, req2Val)
}
}
func validateImp(t *testing.T, storedImps map[string]json.RawMessage) {
value, hasId := storedImps["some-imp"]
if !hasId {
t.Fatal("Expected Stored Imp map to have id: some-imp")
}
var impVal map[string]bool
if err := json.Unmarshal(value, &impVal); err != nil {
t.Errorf("Failed to unmarshal some-imp: %v", err)
}
if len(impVal) != 1 {
t.Errorf("Unexpected impVal length. Expected %d, Got %d", 1, len(impVal))
}
data, hadKey := impVal["imp"]
if !hadKey {
t.Errorf("some-imp should have had a \"imp\" key, but it didn't.")
}
if !data {
t.Errorf(`Bad data in "imp" of stored request "some-imp". Expected true, Got %t`, data)
}
}
func assertErrorCount(t *testing.T, num int, errs []error) {
t.Helper()
if len(errs) != num {
t.Errorf("Wrong number of errors. Expected %d. Got %d. Errors are %v", num, len(errs), errs)
}
}
func newCategoryFetcher(directory string) (stored_requests.CategoryFetcher, error) {
fetcher, err := NewFileFetcher(directory)
if err != nil {
return nil, err
}
catfetcher, ok := fetcher.(stored_requests.CategoryFetcher)
if !ok {
return nil, fmt.Errorf("Failed to type cast fetcher to CategoryFetcher")
}
return catfetcher, nil
}
func TestCategoriesFetcherWithPublisher(t *testing.T) {
fetcher, err := newCategoryFetcher("./test/category-mapping")
if err != nil {
t.Errorf("Failed to create a category Fetcher: %v", err)
}
category, err := fetcher.FetchCategories(nil, "test", "categories", "IAB1-1")
assert.Equal(t, nil, err, "Categories were loaded incorrectly")
assert.Equal(t, "Beverages", category, "Categories were loaded incorrectly")
}
func TestCategoriesFetcherWithoutPublisher(t *testing.T) {
fetcher, err := newCategoryFetcher("./test/category-mapping")
if err != nil {
t.Errorf("Failed to create a category Fetcher: %v", err)
}
category, err := fetcher.FetchCategories(nil, "test", "", "IAB1-1")
assert.Equal(t, nil, err, "Categories were loaded incorrectly")
assert.Equal(t, "VideoGames", category, "Categories were loaded incorrectly")
}
func TestCategoriesFetcherNoCategory(t *testing.T) {
fetcher, err := newCategoryFetcher("./test/category-mapping")
if err != nil {
t.Errorf("Failed to create a category Fetcher: %v", err)
}
_, fetchingErr := fetcher.FetchCategories(nil, "test", "", "IAB1-100")
assert.Equal(t, fmt.Errorf("Unable to find category for adserver 'test', publisherId: '', iab category: 'IAB1-100'"),
fetchingErr, "Categories were loaded incorrectly")
}
func TestCategoriesFetcherBrokenJson(t *testing.T) {
fetcher, err := newCategoryFetcher("./test/category-mapping")
if err != nil {
t.Errorf("Failed to create a category Fetcher: %v", err)
}
_, fetchingErr := fetcher.FetchCategories(nil, "test", "broken", "IAB1-100")
assert.Equal(t, fmt.Errorf("Unable to unmarshal categories for adserver: 'test', publisherId: 'broken'"),
fetchingErr, "Categories were loaded incorrectly")
}
func TestCategoriesFetcherNoCategoriesFile(t *testing.T) {
fetcher, err := newCategoryFetcher("./test/category-mapping")
if err != nil {
t.Errorf("Failed to create a category Fetcher: %v", err)
}
_, fetchingErr := fetcher.FetchCategories(nil, "test", "not_exists", "IAB1-100")
assert.Equal(t, fmt.Errorf("Unable to find mapping file for adserver: 'test', publisherId: 'not_exists'"),
fetchingErr, "Categories were loaded incorrectly")
}
|
using UnityEngine;
public class FallingObject : MonoBehaviour
{
public bool onceOnly = false;
public Transform targetCenter;
public Transform baseTransform;
public Transform fallenDownTransform;
public float fallTime = 0.5f;
private const float targetRadius = 0.25f;
private bool targetEnabled = true;
private bool hasFallen = false;
public void TriggerFall()
{
if (!hasFallen && !onceOnly)
{
hasFallen = true;
StartCoroutine(FallAnimation());
}
}
private IEnumerator FallAnimation()
{
float t = 0f;
Vector3 initialPosition = baseTransform.position;
Vector3 targetPosition = fallenDownTransform.position;
while (t < 1f)
{
t += Time.deltaTime / fallTime;
baseTransform.position = Vector3.Lerp(initialPosition, targetPosition, t);
yield return null;
}
}
public void CheckTargetInRange()
{
if (Vector3.Distance(targetCenter.position, transform.position) <= targetRadius)
{
targetEnabled = true;
}
else
{
targetEnabled = false;
}
}
} |
var Vue = require('../../../node_modules/vue/dist/vue.min.js');
require('../../../node_modules/material-design-lite/material.min.js');
require('../common/user-story.js');
/* rquire style */
// require('../../../node_modules/material-design-lite/material.min.css');
// require('./../styles/styles.css');
// require('./../styles/layout.css');
//define Vue.js app
var data = {
story: {
feature: 'eggroll editor',
scenarios: [
{
name: 'add new one',
given: ['i logged in with my github account', 'and i authorized eggroll'],
when: ['i save a story'],
then: ['i should get feature files in my repo']
},
{
name: 'add new one',
given: ['i logged in with my github account', 'and i authorized eggroll'],
when: ['i save a story'],
then: ['i should get feature files in my repo']
}
]
}
};
start();
//////////////
function start () {
return new Vue({
el: '#app',
data: data
});
}
|
#!/usr/bin/env bash
git checkout gh-pages
rm -r src
rm -r art
rm -r lib
git checkout master -- src art lib index.html |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.