text stringlengths 1 1.05M |
|---|
<gh_stars>0
import { hideError } from '@error-handlers/helpers/hide-error';
import { replaceLayoutSymbols } from '@error-handlers/helpers/replace-layout-symbols';
import { HookProps, SetForm, FormProps } from '@common-types';
import { DefaultLiveErrorHandler } from './types';
/**
* @description
* Дефолтный обработчик живых ошибок(парсинг, отображение, скрытие)
*
* @param {ValidatorErrorProps} errorDataForControl - Результат работы живого валидатора(текст ошибки, данные когда и как показыавть ошибку)
* @param {HookProps} hooksData - Данные хука
* @param {FormProps} form - Глобальный объект формы
* @param {SetForm} setForm - Функция изменяющая глобальный объект формы
*
* @returns {void}
*
*/
export const defaultLiveErrorHandler: DefaultLiveErrorHandler = (
errorDataForControl,
hooksData,
form,
setForm,
) => {
const { controlName, newValue: writeToControlValue } = hooksData,
currentControl = form.controls[controlName],
{ label: controlLabel } = currentControl,
{
message = null,
limit = null,
hideErrorTimeout = null,
} = errorDataForControl || {},
beforeError =
currentControl.beforeLiveValidatorError ||
form.formSettings.beforeLiveValidatorError ||
null,
afterError =
currentControl.afterLiveValidatorError ||
form.formSettings.afterLiveValidatorError ||
null;
/**
* Хук перед всплытием ошибки
*/
if (typeof beforeError === 'function') {
beforeError(hooksData);
}
/**
* Заменить шаблонные слова в тексте ошибки, на значения
*/
if (errorDataForControl) {
currentControl.error = replaceLayoutSymbols(message, {
limit,
controlLabel,
writeToControlValue,
});
}
/**
* Отобразить ошибку
*/
currentControl.hasError = true;
/**
* Скрыть ошибку через таймаут если его указали
*/
if (hideErrorTimeout) {
const hideErrorTimeoutId = hideError(
hooksData,
setForm,
hideErrorTimeout,
);
currentControl._hideErrorTimeoutId = hideErrorTimeoutId;
}
/**
* Хук после всплытием ошибки
*/
if (typeof afterError === 'function') {
afterError(hooksData);
}
};
|
<filename>src/remoteservices/gw2profits.ts<gh_stars>0
import * as _ from "lodash";
import fetch, { Request, Response } from "node-fetch";
import * as Rx from "rxjs/Rx";
import get from "../configuration";
import { feedObservable, fetchwrap } from "./base";
export interface IMyRecipe {
name: string;
type: string;
disciplines: string[];
output_item_id: number;
output_item_count: number;
ingredients: IMyIngredient[];
}
export interface IMyIngredient {
item_id: number;
count: number;
}
export type FetchFunction = (url: string) => Promise<IMyRecipe[]>;
async function getRecipesPromise(
observer: Rx.Observer<IMyRecipe[]>,
fetchFunction: FetchFunction): Promise<void> {
const rv = (await fetchFunction(get().remoteServices.gw2ProfitsUrl))
.filter((recipe) => recipe.disciplines.indexOf("Achievement") === -1);
observer.next(rv);
}
export const getRecipes = (fetchFunction: FetchFunction = fetchwrap<IMyRecipe[]>(fetch)) =>
feedObservable(_.partialRight(getRecipesPromise, fetchFunction));
|
package com.pl.app.model;
/**
* Created by Alex on 2016/7/22.
*/
public class Staff {
private String name;
private String title;
private int age;
private int salary;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public int getSalary() {
return salary;
}
public void setSalary(int salary) {
this.salary = salary;
}
}
|
import time
class Miner:
def __init__(self, miner_address):
self.miner_address = miner_address
self.running = False
self.hashrate = 0
def start_miner(self):
if self.running == True and self.miner_address is not None:
print('Miner started')
return 0 # Der Miner wurde gestartet
time.sleep(0.01)
# Der Miner konnte nicht gestartet werden
print('Miner start, aborted')
return 1
def get_hashrate(self):
# Gibt die Hashrate aus
return self.hashrate
# Example usage
miner = Miner("0x1234567890")
result = miner.start_miner()
print("Start result:", result)
miner.hashrate = 100 # Set a sample hashrate
hashrate = miner.get_hashrate()
print("Current hashrate:", hashrate) |
#!/usr/bin/python3
"""
The message parser parses and generates binary messages to communicate with other modules like the KX module.
"""
from struct import *
import ipaddress # imported here as sphynx documentation generator crashes if it is written on top
from jsonschema import validate
DHTCommands = {
500: "MSG_DHT_PUT",
501: "MSG_DHT_GET",
502: "MSG_DHT_TRACE",
503: "MSG_DHT_GET_REPLY",
504: "MSG_DHT_TRACE_REPLY",
505: "MSG_DHT_ERROR"
}
# the inverse dict, unfortunately python does not have a built in function to solve it another way
DHTCommandsInv = {
"MSG_DHT_PUT": 500,
"MSG_DHT_GET": 501,
"MSG_DHT_TRACE": 502,
"MSG_DHT_GET_REPLY": 503,
"MSG_DHT_TRACE_REPLY": 504,
"MSG_DHT_ERROR": 505
}
class DHTMessage():
"""
Base class for other classes representing incoming data such as as ``DHTMessagePUT``
"""
def __init__(self):
self.message = None
def read_file(self, filename):
"""Read a binary file representing a message. The message is automatically parsed
afterwards.
:param filename: the location of the file
"""
with open(filename, "rb") as f:
self.data = f.read()
return self.parse()
def read_binary(self, data):
"""Read and parse binary data
:param data: The data
:type data: bytearray
"""
self.data = data
return self.parse()
def parse(self):
"""
Parse the message
``self.message`` will automatically become the type of message specified with the
command number (``DHTMessageGET``, ``DHTMessagePUT`` etc.).
"""
commandNumber = int.from_bytes( self.data[2:4], byteorder='big')
command = DHTCommands[commandNumber]
if command=="MSG_DHT_GET":
self.message = DHTMessageGET(self.data, self.getSize())
elif command=="MSG_DHT_PUT":
self.message = DHTMessagePUT(self.data, self.getSize())
elif command=="MSG_DHT_TRACE":
self.message = DHTMessageTRACE(self.data, self.getSize())
elif command=="MSG_DHT_ERROR":
self.message = DHTMessageERROR(self.data, self.getSize())
else: # TODO: throw exception here
pass
self.message.command = command
return self.message
def is_valid(self):
try:
validate(self.message.make_dict(), SCHEMA_MSG_DHT[self.message.command])
return True
except:
return False
def get_validation_execption(self):
try:
validate(self.message.make_dict(), SCHEMA_MSG_DHT[self.message.command])
return None
except Exception as e:
return str(e)
def getSize(self):
"""
Returns the size of the message
:returns: Message Size in bytes
:rtype: int
"""
return int.from_bytes( self.data[0:2], byteorder='big')
class DHTMessageParent():
def __init__(self, data, size):
self.data = data
self.size = size
class DHTMessagePUT(DHTMessageParent):
"""
Provides additional parameters for a DHTMessage which is a PUT message
"""
def make_dict(self):
return {
"ttl" : self.get_ttl(),
"key" : self.get_key(),
"replication" : self.get_replication(),
"content_length" : len(self.get_content())
}
def get_key(self):
"""
Returns the key as integer
:rtype: int
"""
return int.from_bytes(self.data[4:36], byteorder='big')
def get_ttl(self):
"""
Returns the time to live (ttl) in seconds
:rtype: int
"""
return int.from_bytes(self.data[36:38], byteorder='big')
def get_replication(self):
"""
Returns the replication
:rtype: int
"""
return int.from_bytes(self.data[38:39], byteorder='big')
def get_reserved(self):
return self.data[39:44]
def get_content(self):
"""
Returns the content
:returns: content
:rtype: bytes
"""
return self.data[44:self.size]
class DHTMessageGET(DHTMessageParent):
"""
Provides additional parameters for a DHTMessage which is a GET message.
"""
def make_dict(self):
return {
"key" : self.get_key()
}
def get_key(self):
"""
Returns the key as integer
:rtype: int
"""
return int.from_bytes(self.data[4:36], byteorder='big')
class DHTMessageTRACE(DHTMessageParent):
"""
Provides additional parameters for a DHTMessage which is a TRACE message.
"""
def make_dict(self):
return {
"key" : self.get_key()
}
def get_key(self):
"""
Returns the key as integer
:rtype: int
"""
return int.from_bytes(self.data[4:36], byteorder='big')
class DHTMessageGET_REPLY:
"""
Initializes a ``MSG_DHT_GET_REPLY`` message to send later.
:param key: the key as integer
:param content: the content of the get query in binary format.
"""
def __init__(self, key, content):
assert type(content) is bytes
frame = bytearray()
size = int(16+16+256)
size = int(size / 8) + len(content)
frame += size.to_bytes(2, byteorder='big')
frame += (503).to_bytes(2, byteorder='big') # 503 is MSG_DHT_GET?REPLY
frame += key.to_bytes(32, byteorder='big')
frame += content
self.frame = frame
def get_data(self):
"""
Returns the data in binary format
:rtype: bytearray
"""
return self.frame
class MAKE_MSG_DHT_GET:
"""
Initializes a `MSG_DHT_GET`` message to send later.
:param key: the key as integer
:param hops: a list of :py:meth:`DHTHop` objects
"""
def __init__(self, key):
size = 40
frame = bytearray()
frame += size.to_bytes(2, byteorder='big')
frame += (501).to_bytes(2, byteorder='big') # 501 is MSG_DHT_GET
frame += int(key).to_bytes(32, byteorder='big')
self.frame = frame
def get_data(self):
return self.frame
class MAKE_MSG_DHT_PUT:
"""
Initializes a ``MSG_DHT_PUT`` message to send later.
:param key: key as integer
:type key: int
:param content: The content to be stored
:type content: bytearray
:param ttl: time the content is available in seconds (43200 per default)
:type ttl: int
:param replication: The amount of replication. A replication degree of three means a tripple redundancy. If one node crashes, there are still two nodes available for example.
:type replication: int
"""
def __init__(self, key, content, ttl=43200,replication=3):
frame = bytearray()
size = 44+len(content)
frame += size.to_bytes(2, byteorder='big')
frame += (500).to_bytes(2, byteorder='big') # 500 is MSG_DHT_PUT
frame += int(key).to_bytes(32, byteorder='big')
frame += int(ttl).to_bytes(2, byteorder='big')
frame += int(replication).to_bytes(1, byteorder='big') # replication
frame += int(0).to_bytes(1, byteorder='big') # reserved
frame += int(0).to_bytes(4, byteorder='big') # reserved
frame += content # content
self.frame = frame
"""
:returns: Message in binary format
:rtype: bytearray
"""
def get_data(self):
return self.frame
class MAKE_MSG_DHT_TRACE_REPLY:
"""
Initializes a ``MSG_DHT_TRACE_REPLY`` message to send later.
:param key: the key as integer
:param hops: a list of :py:meth:`DHTHop` objects
"""
def __init__(self, key, hops):
frame = bytearray()
size = int(32+256+len(hops)*(256+32*2+128))
assert size < 65536 # Size field only has length of 2 bytes (2^16 bits)
size = int(size / 8) # convert to byte as size should be byte instead of bit
frame += size.to_bytes(2, byteorder='big')
frame += (504).to_bytes(2, byteorder='big') # 504 is MSG_DHT_TRACE_REPLY
frame += key.to_bytes(32, byteorder='big')
for i, hop in enumerate(hops):
frame += hop.as_bytes()
self.frame = frame
def get_data(self):
"""
Return the binary representation
:returns: Message in binary format
:rtype: bytearray
"""
return self.frame
class DHTHop:
"""
A DHT Hop for ``MSG_DHT_TRACE_REPLY`` message
:param peerId: the peer id with a maximum length of 32 bytes
:type peerId: Int
:param kxPort: the KX port with a maximum length of 2 bytes
:type kxPort: Int
:param IPv4Address: For example 192.168.0.1
:type IPv4Address: str
:param IPv6Address: For example FE80:0000:0000:0000:0202:B3FF:FE1E:8329
:type IPv6Address: str
"""
def __init__(self, peerId, kxPort, IPv4Address, IPv6Address):
self.peerId = peerId.to_bytes(32, byteorder='big')
self.kxPort = kxPort.to_bytes(2, byteorder='big')
self.reserved = (0).to_bytes(2, byteorder='big')
ipv4 = ipaddress.ip_address(IPv4Address).packed
ipv6 = ipaddress.ip_address(IPv6Address).packed
self.IPv4Address = ipv4
self.IPv6Address = ipv6
"""
Return the binary representation of a DHT Hop, which can be appended to a trace message
:returns: DHTHop in binary format
:rtype: bytearray
"""
def as_bytes(self):
frame = bytearray()
frame += self.peerId
frame += self.kxPort
frame += self.reserved
frame += self.IPv4Address
frame += self.IPv6Address
return frame
class DHTMessageERROR:
"""
Generates an error message
:param requestType: The type of request
:type requestType: int
:param requestKey: The key
:type requestKey: int
"""
def __init__(self, requestType, requestKey):
frame = bytearray()
size = int(32*2+256)
size = int(size / 8) # convert to byte as size should be byte instead of bit
frame += size.to_bytes(2, byteorder='big')
frame += (505).to_bytes(2, byteorder='big') # 505 is MSG_DHT_ERROR
frame += requestKey.to_bytes(32, byteorder='big')
frame += requestType.to_bytes(2, byteorder='big') # unused
frame += requestKey.to_bytes(32, byteorder='big')
self.frame = frame
def get_data(self):
"""
Return the binary representation
:returns: Message in binary format
:rtype: bytearray
"""
return self.frame
|
#!/usr/bin/env bats
load helpers
WAIT_TIME=120
SLEEP_TIME=1
if [ ${IS_SOAK_TEST} = true ]; then
export ETCD_CA_CERT=/etc/kubernetes/certs/ca.crt
export ETCD_CERT=/etc/kubernetes/certs/etcdclient.crt
export ETCD_KEY=/etc/kubernetes/certs/etcdclient.key
else
export ETCD_CA_CERT=/etc/kubernetes/pki/etcd/ca.crt
export ETCD_CERT=/etc/kubernetes/pki/etcd/server.crt
export ETCD_KEY=/etc/kubernetes/pki/etcd/server.key
fi
@test "azure keyvault kms plugin is running" {
wait_for_process ${WAIT_TIME} ${SLEEP_TIME} "kubectl -n kube-system wait --for=condition=Ready --timeout=60s pod -l component=azure-kms-provider"
}
@test "creating secret resource" {
run kubectl create secret generic secret1 -n default --from-literal=foo=bar
assert_success
}
@test "read the secret resource test" {
result=$(kubectl get secret secret1 -o jsonpath='{.data.foo}' | base64 -d)
[[ "${result//$'\r'}" == "bar" ]]
}
@test "check if secret is encrypted in etcd" {
if [ ${IS_SOAK_TEST} = true ]; then
local node_name=$(kubectl get nodes -l kubernetes.azure.com/role=master -o jsonpath="{.items[0].metadata.name}")
run kubectl node-shell ${node_name} -- sh -c "ETCDCTL_API=3 etcdctl --cacert=${ETCD_CA_CERT} --cert=${ETCD_CERT} --key=${ETCD_KEY} get /registry/secrets/default/secret1"
assert_match "k8s:enc:kms:v1:azurekmsprovider" "${output}"
assert_success
else
local pod_name=$(kubectl get pod -n kube-system -l component=etcd -o jsonpath="{.items[0].metadata.name}")
run kubectl exec ${pod_name} -n kube-system -- etcdctl --cacert=${ETCD_CA_CERT} --cert=${ETCD_CERT} --key=${ETCD_KEY} get /registry/secrets/default/secret1
assert_match "k8s:enc:kms:v1:azurekmsprovider" "${output}"
assert_success
fi
# cleanup
run kubectl delete secret secret1 -n default
}
@test "check if metrics endpoint works" {
# ToDo - enbale this test after v0.0.12 release
if [ ${SKIP_METRICS} = true ]; then
skip "metrics endpoint is not yet released in soak cluster."
fi
kubectl run curl --image=curlimages/curl:7.75.0 -- tail -f /dev/null
kubectl wait --for=condition=Ready --timeout=60s pod curl
local pod_ip=$(kubectl get pod -n kube-system -l component=azure-kms-provider -o jsonpath="{.items[0].status.podIP}")
run kubectl exec curl -- curl http://${pod_ip}:8095/metrics
assert_match "kms_request_bucket" "${output}"
assert_success
# cleanup
run kubectl delete pod curl --force --grace-period 0
}
@test "check healthz for kms plugin" {
kubectl run curl --image=curlimages/curl:7.75.0 -- tail -f /dev/null
kubectl wait --for=condition=Ready --timeout=60s pod curl
local pod_ip=$(kubectl get pod -n kube-system -l component=azure-kms-provider -o jsonpath="{.items[0].status.podIP}")
result=$(kubectl exec curl -- curl http://${pod_ip}:8787/healthz)
[[ "${result//$'\r'}" == "ok" ]]
result=$(kubectl exec curl -- curl http://${pod_ip}:8787/healthz -o /dev/null -w '%{http_code}\n' -s)
[[ "${result//$'\r'}" == "200" ]]
# cleanup
run kubectl delete pod curl --force --grace-period 0
}
|
#!/bin/sh
set -euo >/dev/null
echo ${DOCKER_HUB_TOKEN} | docker login --username ${DOCKER_HUB_USERNAME} --password-stdin
|
using System;
using System.Runtime.InteropServices;
namespace MemoryStatusProgram
{
public static unsafe partial class Windows
{
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
public struct MEMORYSTATUSEX
{
public uint dwLength;
public uint dwMemoryLoad;
public ulong ullTotalPhys;
public ulong ullAvailPhys;
public ulong ullTotalPageFile;
public ulong ullAvailPageFile;
public ulong ullTotalVirtual;
public ulong ullAvailVirtual;
public ulong ullAvailExtendedVirtual;
}
[DllImport("kernel32", ExactSpelling = true, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool GlobalMemoryStatusEx(ref MEMORYSTATUSEX lpBuffer);
public static void Main()
{
MEMORYSTATUSEX memoryStatus = new MEMORYSTATUSEX();
memoryStatus.dwLength = (uint)Marshal.SizeOf(typeof(MEMORYSTATUSEX));
if (GlobalMemoryStatusEx(ref memoryStatus))
{
Console.WriteLine("Memory Status:");
Console.WriteLine($"Total Physical Memory: {memoryStatus.ullTotalPhys} bytes");
Console.WriteLine($"Available Physical Memory: {memoryStatus.ullAvailPhys} bytes");
Console.WriteLine($"Total Virtual Memory: {memoryStatus.ullTotalVirtual} bytes");
Console.WriteLine($"Available Virtual Memory: {memoryStatus.ullAvailVirtual} bytes");
}
else
{
Console.WriteLine("Failed to retrieve memory status.");
}
}
}
} |
def print_nums(n):
if n > 0:
print_nums(n - 1)
print(n)
print_nums(10) |
java -Xms1g -Xmx1g -XX:MaxMetaspaceSize=128m -XX:CompileThreshold=1000 -XX:+UseG1GC -jar ./target/benchmarks.jar |
#!/bin/sh
set -e
DOCKER_NAME='ga4gh/htsget-refserver'
SERVER_VERSION='1.1.0'
DOCKER_COORDINATE=${DOCKER_NAME}:${SERVER_VERSION}
WORKING_DIR=/home/travis/build/samtools/htsjdk
docker pull ${DOCKER_COORDINATE}
docker container run -d --name htsget-server -p 3000:3000 --env HTSGET_PORT=3000 --env HTSGET_HOST=http://127.0.0.1:3000 \
-v $WORKING_DIR/src/test/resources/htsjdk/samtools/BAMFileIndexTest/:/data \
-v $WORKING_DIR/scripts/htsget-scripts:/data/scripts \
${DOCKER_COORDINATE} \
./htsref -config /data/scripts/htsget_config.json
docker container ls -a
curl http://localhost:3000
|
class LazyPropertyExample {
lazy var detailTextLabel: UILabel = {
let label = UILabel()
label.textColor = .red
label.font = UIFont.systemFont(ofSize: 14)
label.textAlignment = .center
return label
}()
init() {
setupDetailTextLabel()
}
func setupDetailTextLabel() {
// Add the detailTextLabel as a subview to the class's view
// For example:
// self.view.addSubview(detailTextLabel)
}
} |
<filename>packages/fes-plugin-model/src/runtime/models/initialState.js
import { inject } from 'vue';
export default function initialStateModel() {
return inject('initialState');
}
|
CREATE TABLE applicants (
id INTEGER AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(100) NOT NULL,
age SMALLINT NOT NULL,
gender ENUM('M','F') NOT NULL,
address VARCHAR(200) NOT NULL,
phone_number VARCHAR(20) NOT NULL,
email VARCHAR(50) NOT NULL
); |
#!/bin/bash
M1=60 # 1 Min
M10=600 # 10 Min
M30=1800 # 30 Min
H1=3600 # 1 Hour
H3=10800 # 3 Hour
H6=21600 # 6 Hour
H12=43200 # 12 Hour
H24=86400 # 24 Hour
CNT=10
SLEEP=$H12
function loop() {
for (( i=0; i < $CNT; i++ ))
do
./azure-image-list-curl.sh
sleep $SLEEP
done
}
loop
|
package main
import (
"os"
"testing"
"github.com/giodamelio/aoc-2020-go/intcode"
"github.com/gitchander/permutation"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/stretchr/testify/assert"
)
func init() {
out := zerolog.NewConsoleWriter()
out.Out = os.Stderr
out.NoColor = true
log.Logger = log.Output(out)
zerolog.SetGlobalLevel(zerolog.InfoLevel)
}
func TestChainingComputers(t *testing.T) {
// Take an input, double it and output the result
program := []intcode.AddressValue{
// Program
3, 9, // INPUT read input into address 9
102, 2, 9, 9, // MULTIPLY address 9 by 2
4, 9, // OUTPUT send the contents of address 9 to output
99, // HALT
// Data
0,
}
computer1 := intcode.NewComputer(program)
computer2 := intcode.NewComputer(program)
go send(computer1.Input, intcode.AddressValue(10))
go pipe(computer1.Output, []chan intcode.AddressValue{computer2.Input})
go computer1.Run()
go computer2.Run()
output := <-computer2.Output
assert.Equal(t, intcode.AddressValue(40), output)
}
func TestPermutations(t *testing.T) {
a := []int{1, 2, 3}
p := permutation.New(permutation.IntSlice(a))
i := 0
for p.Next() {
i++
}
assert.Equal(t, 6, i)
}
func TestPipe(t *testing.T) {
input := make(chan intcode.AddressValue)
output := make(chan intcode.AddressValue)
output2 := make(chan intcode.AddressValue)
go send(input, 10)
go pipe(input, []chan intcode.AddressValue{output, output2})
assert.Equal(t, intcode.AddressValue(10), <-output)
assert.Equal(t, intcode.AddressValue(10), <-output2)
}
func TestSend(t *testing.T) {
output := make(chan intcode.AddressValue)
go send(output, 10)
assert.Equal(t, intcode.AddressValue(10), <-output)
}
func TestAmplifyChain(t *testing.T) {
exampleProgram1 := []intcode.AddressValue{3, 15, 3, 16, 1002, 16, 10, 16, 1, 16, 15, 15, 4, 15, 99, 0, 0}
output1 := amplifierChain(exampleProgram1, []int{4, 3, 2, 1, 0})
assert.Equal(t, 43210, output1)
exampleProgram2 := []intcode.AddressValue{
3, 23, 3, 24, 1002, 24, 10, 24, 1002, 23, -1, 23,
101, 5, 23, 23, 1, 24, 23, 23, 4, 23, 99, 0, 0,
}
output2 := amplifierChain(exampleProgram2, []int{0, 1, 2, 3, 4})
assert.Equal(t, 54321, output2)
exampleProgram3 := []intcode.AddressValue{
3, 31, 3, 32, 1002, 32, 10, 32, 1001, 31, -2, 31, 1007, 31, 0, 33,
1002, 33, 7, 33, 1, 33, 31, 31, 1, 32, 31, 31, 4, 31, 99, 0, 0, 0,
}
output3 := amplifierChain(exampleProgram3, []int{1, 0, 4, 3, 2})
assert.Equal(t, 65210, output3)
}
func TestAmplifyChainFeedbackMode(t *testing.T) {
exampleProgram1 := []intcode.AddressValue{
3, 26, 1001, 26, -4, 26, 3, 27, 1002, 27, 2, 27, 1, 27, 26,
27, 4, 27, 1001, 28, -1, 28, 1005, 28, 6, 99, 0, 0, 5,
}
output1 := amplifierChainFeedbackMode(exampleProgram1, []int{9, 8, 7, 6, 5})
assert.Equal(t, 139629729, output1)
exampleProgram2 := []intcode.AddressValue{
3, 52, 1001, 52, -5, 52, 3, 53, 1, 52, 56, 54, 1007, 54, 5, 55, 1005, 55, 26, 1001, 54,
-5, 54, 1105, 1, 12, 1, 53, 54, 53, 1008, 54, 0, 55, 1001, 55, 1, 55, 2, 53, 55, 53, 4,
53, 1001, 56, -1, 56, 1005, 56, 6, 99, 0, 0, 0, 0, 10,
}
output2 := amplifierChainFeedbackMode(exampleProgram2, []int{9, 7, 8, 5, 6})
assert.Equal(t, 18216, output2)
}
func TestPart1(t *testing.T) {
parsedInput, err := intcode.ParseInput(rawInput)
assert.Nil(t, err)
output := part1(parsedInput)
assert.Equal(t, 359142, output)
}
func TestPart2(t *testing.T) {
parsedInput, err := intcode.ParseInput(rawInput)
assert.Nil(t, err)
output := part2(parsedInput)
assert.Equal(t, 4374895, output)
}
|
#!/bin/bash -eu
# Copyright 2018 Google Inc.
# Modifications copyright (C) 2021 ISP RAS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
export LIB_FUZZING_ENGINE=$(find $(llvm-config --libdir) -name libclang_rt.fuzzer-x86-64.a | head -1)
export CC=clang
export CXX=clang++
export CFLAGS="-g -fsanitize=fuzzer-no-link,undefined,address"
export CXXFLAGS="-g -fsanitize=fuzzer-no-link,undefined,address"
SANITIZERS="address undefined"
# Force Python3, run configure.py to pick the right build config
PYTHON=python3
yes "" | ${PYTHON} configure.py
# Since Bazel passes flags to compilers via `--copt`, `--conlyopt` and
# `--cxxopt`, we need to move all flags from `$CFLAGS` and `$CXXFLAGS` to these.
# We don't use `--copt` as warnings issued by C compilers when encountering a
# C++-only option results in errors during build.
#
# Note: Make sure that by this line `$CFLAGS` and `$CXXFLAGS` are properly set
# up as further changes to them won't be visible to Bazel.
#
# Note: for builds using the undefined behavior sanitizer we need to link
# `clang_rt` ubsan library. Since Bazel uses `clang` for linking instead of
# `clang++`, we need to add the additional `--linkopt` flag.
# See issue: https://github.com/bazelbuild/bazel/issues/8777
mkdir /fuzzer
declare EXTRA_FLAGS="\
$(
for f in ${CFLAGS}; do
echo "--conlyopt=${f}" "--linkopt=${f}"
done
for f in ${CXXFLAGS}; do
echo "--cxxopt=${f}" "--linkopt=${f}"
done
for f in ${SANITIZERS}; do
if [ "${f}" = "undefined" ]
then
echo "--linkopt=$(find $(llvm-config --libdir) -name libclang_rt.ubsan_standalone_cxx-x86_64.a | head -1)"
fi
done
)"
# Ugly hack to get LIB_FUZZING_ENGINE only for fuzz targets
# and not for other binaries such as protoc
sed -i -e 's/linkstatic/linkopts = \["-fsanitize=fuzzer,address,undefined"\],\nlinkstatic/' tensorflow/security/fuzzing/tf_fuzzing.bzl
# Determine all fuzz targets. To control what gets fuzzed with OSSFuzz, all
# supported fuzzers are in `//tensorflow/security/fuzzing`.
# Ignore fuzzers tagged with `no_oss` in opensource.
declare FUZZERS=$(bazel query 'kind(cc_.*, tests(//tensorflow/security/fuzzing/...)) - attr(tags, no_oss, kind(cc_.*, tests(//tensorflow/security/fuzzing/...)))' | grep -v checkpoint_reader_fuzz)
# checkpoint_reader_fuzz seems out of date with the API
# Build the fuzzer targets.
# Pass in `--config=libc++` to link against libc++.
# Pass in `--verbose_failures` so it is easy to debug compile crashes.
# Pass in `--strip=never` to ensure coverage support.
# Since we have `assert` in fuzzers, make sure `NDEBUG` is not defined
bazel build \
--jobs=$(nproc) \
--config=libc++ \
--subcommands \
${EXTRA_FLAGS} \
--verbose_failures \
--strip=never \
--copt='-UNDEBUG' \
-- ${FUZZERS}
# The fuzzers built above are in the `bazel-bin/` symlink. But they need to be
# in `$OUT`, so move them accordingly.
for bazel_target in ${FUZZERS}; do
colon_index=$(expr index "${bazel_target}" ":")
fuzz_name="${bazel_target:$colon_index}"
bazel_location="bazel-bin/${bazel_target/:/\/}"
cp ${bazel_location} /fuzzer/$fuzz_name
done
export LDFLAGS="-g -fsanitize=fuzzer-no-link,address,undefined -fno-sanitize=vptr"
export CFLAGS="-g -fsanitize=fuzzer-no-link,address,undefined -fno-sanitize=vptr"
export CXXFLAGS="-g -fsanitize=fuzzer-no-link,address,undefined -fno-sanitize=vptr"
echo " write_to_bazelrc('import %workspace%/tools/bazel.rc')" >> configure.py
yes "" | ./configure
declare EXTRA_FLAGS="\
$(
for f in ${CFLAGS}; do
echo "--conlyopt=${f}" "--linkopt=${f}"
done
for f in ${CXXFLAGS}; do
echo "--cxxopt=${f}" "--linkopt=${f}"
done
for f in ${SANITIZERS}; do
if [ "${f}" = "undefined" ]
then
echo "--linkopt=$(find $(llvm-config --libdir) -name libclang_rt.ubsan_standalone_cxx-x86_64.a | head -1)"
fi
if [ "${f}" = "address" ]
then
echo "--linkopt=$(find $(llvm-config --libdir) -name libclang_rt.asan_cxx-x86_64.a | head -1)"
echo "--linkopt=$(find $(llvm-config --libdir) -name libclang_rt.asan-x86_64.a | head -1)"
fi
done
)"
declare FUZZERS=$(grep '^tf_ops_fuzz_target' tensorflow/core/kernels/fuzzing/BUILD | cut -d'"' -f2 | grep -v decode_base64)
cat >> tensorflow/core/kernels/fuzzing/tf_ops_fuzz_target_lib.bzl << END
def cc_tf(name):
native.cc_test(
name = name + "_fuzz",
deps = [
"//tensorflow/core/kernels/fuzzing:fuzz_session",
"//tensorflow/core/kernels/fuzzing:" + name + "_fuzz_lib",
"//tensorflow/cc:cc_ops",
"//tensorflow/cc:scope",
"//tensorflow/core:core_cpu",
],
linkopts = ["-fsanitize=fuzzer,address,undefined"]
)
END
cat >> tensorflow/core/kernels/fuzzing/BUILD << END
load("//tensorflow/core/kernels/fuzzing:tf_ops_fuzz_target_lib.bzl", "cc_tf")
END
for fuzzer in ${FUZZERS}; do
echo cc_tf\(\"${fuzzer}\"\) >> tensorflow/core/kernels/fuzzing/BUILD
done
bazel build \
-k \
--jobs=$(nproc) \
--config=monolithic \
--dynamic_mode=off \
--subcommands \
${EXTRA_FLAGS} \
--verbose_failures \
--strip=never \
--copt='-UNDEBUG' \
--copt='-DADDRESS_SANITIZER' \
--copt='-DUNDEFINED_BEHAVIOR_SANITIZER' \
--define=framework_shared_object=false \
--action_env=ASAN_OPTIONS="detect_leaks=0,detect_odr_violation=0" \
-- //tensorflow/core/kernels/fuzzing:all || true
for fuzzer in ${FUZZERS}; do
fuzz_path="bazel-bin/tensorflow/core/kernels/fuzzing/${fuzzer}_fuzz"
cp ${fuzz_path} /fuzzer/${fuzzer}_fuzz
done
# Finally, make sure we don't accidentally run with stuff from the bazel cache.
rm -f bazel-*
|
#!/bin/bash
# Copyright 2020 Johns Hopkins University (Jesus Villalba)
# Apache 2.0.
#
cmd=run.pl
num_parts=8
if [ -f path.sh ]; then . ./path.sh; fi
. parse_options.sh || exit 1;
set -e
if [ $# -ne 4 ]; then
echo "Usage: $0 <ndx> <enroll-file> <vector-file> <output-scores>"
exit 1;
fi
ndx_file=$1
enroll_file=$2
vector_file=$3
output_file=$4
output_dir=$(dirname $output_file)
mkdir -p $output_dir/log
name=$(basename $output_file)
echo "$0 score $ndx_file"
for((i=1;i<=$num_parts;i++));
do
for((j=1;j<=$num_parts;j++));
do
$cmd $output_dir/log/${name}_${i}_${j}.log \
hyp_utils/conda_env.sh steps_backend/eval-be-cos-Nvs1.py \
--iv-file scp:$vector_file \
--ndx-file $ndx_file \
--enroll-file $enroll_file \
--score-file $output_file \
--model-part-idx $i --num-model-parts $num_parts \
--seg-part-idx $j --num-seg-parts $num_parts &
done
done
wait
if [ $num_parts -gt 1 ];then
for((i=1;i<=$num_parts;i++));
do
for((j=1;j<=$num_parts;j++));
do
cat $output_file-$(printf "%03d" $i)-$(printf "%03d" $j)
done
done | sort -u > $output_file
fi
|
package com.github.chen0040.leetcode.day08.medium;
/**
* Created by xschen on 3/8/2017.
*
* summary:
* You are given two non-empty linked lists representing two non-negative integers. The most significant digit comes first and each of their nodes contain a single digit. Add the two numbers and return it as a linked list.
* You may assume the two numbers do not contain any leading zero, except the number 0 itself.
*
* link: https://leetcode.com/problems/add-two-numbers-ii/description/
*/
public class AddTwoNumbersII {
public class ListNode {
int val;
ListNode next;
ListNode(int x) { val = x; }
}
public class Solution {
public ListNode addTwoNumbers(ListNode a, ListNode b) {
if(a == null) return b;
if(b == null) return a;
if(isZero(b)) {
return trim(a);
}
ListNode carry = computeCarry(a, b);
a = computeRemainder(a, b);
return addTwoNumbers(a, carry);
}
ListNode trim(ListNode a) {
while(a != null && a.val == 0) {
a = a.next;
}
if(a == null){
return new ListNode(0);
}
return a;
}
boolean isZero(ListNode b) {
while(b !=null) {
if(b.val != 0) {
return false;
}
b = b.next;
}
return true;
}
ListNode computeCarry(ListNode a, ListNode b) {
a = reverse(a);
b = reverse(b);
ListNode head = null;
ListNode old = null;
int val3 = 0;
while(a != null || b != null) {
int val1 = a != null ? a.val : 0;
int val2 = b != null ? b.val : 0;
val3 = (val1 + val2) / 10;
old = head;
head = new ListNode(val3);
head.next = old;
if(a != null) a = a.next;
if(b != null) b = b.next;
}
ListNode x = head;
while(x != null) {
if(x.next == null){
x.next = new ListNode(0);
break;
}
x = x.next;
}
return head;
}
ListNode computeRemainder(ListNode a, ListNode b) {
a = reverse(a);
b = reverse(b);
ListNode head = null;
while(a != null || b != null) {
int val1 = a != null ? a.val : 0;
int val2 = b != null ? b.val : 0;
int val3 = (val1 + val2) % 10;
ListNode old = head;
head = new ListNode(val3);
head.next = old;
if(a != null) a = a.next;
if(b != null) b = b.next;
}
return head;
}
ListNode reverse(ListNode a) {
ListNode x = a;
ListNode head = null;
while(x != null) {
ListNode old = head;
head = new ListNode(x.val);
head.next = old;
x = x.next;
}
return head;
}
}
}
|
<gh_stars>0
from collections import defaultdict
import networkx as nx
from .message import get_constructor_by_platform
class Conversation:
"""
A container class for managing collections of UniMessage (post) objects.
"""
def __init__(self, posts=None, convo_id=None):
"""
Constructor for Conversation object.
Parameters
---------
posts
An optional dictionary of messages/posts; keys should be unique IDs.
"""
if not posts:
posts = {}
self._posts = posts # uid -> post object
self._convo_id = convo_id
self._relation_map = defaultdict(dict)
self._author_set = set()
def __add__(self, other):
"""
Defines the addition operation over Conversation objects.
Returns a new copy of a conversation.
Parameters
---------
other : UniMessage
Another conversation to be added to this one.
Returns
-------
Conversation
The combination of this conversation and the conversation in `other`
"""
convo = Conversation(convo_id=self.convo_id + '++' + other.convo_id)
for post in other.posts.values():
convo.add_post(post)
for post in self.posts.values():
convo.add_post(post)
return convo
@property
def posts(self):
"""
Returns a dictionary of posts, keyed by their UIDs.
Returns
-------
dict(UID, UniMessage)
The dictionary of posts contained in this Conversation object
"""
return self._posts
@property
def convo_id(self):
"""
The conversation identifier
Returns
-------
Any (or str)
Returns a conversation identifier. Creates ones from sources if unspecified.
"""
return self._convo_id if self._convo_id else 'CONV_' + '-'.join(map(str, sorted(self.get_sources())))
@property
def authors(self):
if not self._author_set:
self._author_set = set([self.posts[pid].author for pid in self.posts])
return self._author_set
def add_post(self, post):
"""
Adds a post to the conversational container.
Parameters
---------
post : UniMessage, or derivative concrete class
The post object to be added.
Returns
-------
None
"""
if post.uid in self._posts and self._posts[post.uid]:
self._posts[post.uid] |= post
else:
self._posts[post.uid] = post
self._author_set.add(post.author)
def remove_post(self, uid):
"""
Deletes a post from the conversational container using its UID.
Parameters
---------
uid : Hashable
Unique identifier for the post to delete.
Returns
-------
None
"""
del self._posts[uid]
self._author_set = set()
def as_graph(self):
"""
Constructs (and returns) a networkx Graph object
from the contained posts and edges.
Returns
-------
networkx.Graph
The networkx graph associated with this Conversation
"""
graph = nx.Graph()
# add posts as nodes
for uid in self._posts:
graph.add_node(uid)
# add reply connections as edges
for uid, post in self._posts.items():
for rid in post.reply_to:
if uid in self._posts and rid in self._posts:
graph.add_edge(uid, rid)
return graph
def segment(self):
"""
Segments a conversation into disjoint (i.e., not connected by any replies) sub-conversations.
If a single conversation is contained in this object,
this function will return a list with a single element: a copy of this object.
Returns
-------
list(Conversation)
A list of sub-conversations
"""
segments = []
for node_set in nx.connected_components(self.as_graph()):
convo = Conversation()
for uid in node_set:
convo.add_post(self.posts[uid])
segments.append(convo)
return segments
def to_json(self):
"""
Returns a JSON representation of this object.
Returns
-------
list(JSON/dict)
The dictionary/JSON representation of the Conversation
"""
return [post.to_json() for post in self.posts.values()]
@staticmethod
def from_json(raw):
"""
Converts a JSON representation of a Conversation into a full object.
Parameters
---------
raw : JSON/dict
The raw JSON
Returns
-------
Conversation
The conversation read from the raw JSON
"""
convo = Conversation()
for p in [get_constructor_by_platform(pjson['platform']).from_json(pjson) for pjson in raw]:
convo.add_post(p)
return convo
def get_sources(self):
"""
Returns the originating (non-reply) posts included in this conversation.
Returns
-------
set(UID)
The set of unique IDs of posts that originate conversation (are not replies)
"""
return {uid for uid, post in self._posts.items() if not {rid for rid in post.reply_to if rid in self._posts}}
def filter(self, by_langs=None, min_chars=0, before=None, after=None, by_tags=None, by_platform=None, by_author=None):
"""
Returns the set of post UIDs that meet the parameterized criteria
Parameters
---------
by_langs : set(str)
The desired language codes to be retained. (Default: None)
min_chars : int
The minimum number of characters a post should have. (Default: 0)
before : datetime.datetime
The earliest datetime desired. (Default: None)
after : datetime.datetime
The latest datetime desired. (Default: None)
by_tags : set(str)
The required tags. (Default: None)
by_platform : set(str)
A set of string names of platforms that should be retained
by_author : str
An author
Returns
-------
set(hashable)
Set of UIDs
"""
drop = set()
keep = set(self.posts.keys())
for uid, post in self._posts.items():
if by_author is not None and post.author != by_author:
drop.add(uid)
continue
if len(post.text) < min_chars:
drop.add(uid)
continue
if by_langs and post.lang not in by_langs:
drop.add(uid)
continue
if before and (post.created_at is None or post.created_at >= before):
drop.add(uid)
continue
if after and (post.created_at is None or post.created_at <= after):
drop.add(uid)
continue
if by_tags and by_tags != (by_tags & post.tags):
drop.add(uid)
continue
if by_platform and post.platform not in by_platform:
drop.add(uid)
continue
keep -= drop
return keep
def time_order(self):
"""
Returns a time series of the UIDs of posts within this Conversation.
Returns
-------
list(UID)
The list of UIDs of the posts in the conversation, in temporal order
"""
try:
return sorted(self._posts.keys(), key=lambda k: self._posts[k].created_at)
except TypeError:
return []
def text_stream(self):
"""
Returns the text of the Conversation as a single stream.
If timestamps are available, text will appear in temporal order.
Returns
-------
list(str)
The text of the conversation, by post, in temporal order (if available)
"""
order = self.time_order()
if order:
return [self._posts[uid].text for uid in order]
else:
return [self._posts[uid].text for uid in self._posts]
def redact(self, assign_ints=True):
"""
Redacts user information from the conversation.
Parameters
----------
assign_ints : bool
If True, assigns a unique integer to each user such the user will be referred to as `USER><d+>`
Otherwise, all user redactions will become a `USER` token.
Returns
-------
None
"""
rd = {}
for uid in self._posts:
for user in self._posts[uid].get_mentions():
if user not in rd:
rd[user] = f'USER{len(rd)}' if assign_ints else 'USER'
for uid in self._posts:
self._posts[uid].redact(rd)
def get_ancestors(self, uid, include_post=False):
"""
Returns the ancestor posts/path for post `uid`.
Parameters
----------
uid : Hashable
The unique identifier of desired post
include_post : bool
Whether the post should be included in returned collection. Default: False
Returns
-------
Conversation
The collection of ancestor posts
"""
if 'ancestors' in self._relation_map and uid in self._relation_map['ancestors']:
pids = self._relation_map['ancestors'][uid]
filt_ps = {pid: self.posts[pid] for pid in pids}
else:
# get parents
ps = self.get_parents(uid)
filt_ps = dict(ps.posts)
# for each parent, add its ancestors
for pid in ps.posts:
for xid in self.get_ancestors(pid).posts:
if xid not in filt_ps:
filt_ps[xid] = self.posts[xid]
self._relation_map['ancestors'][uid] = set(filt_ps.keys())
ancestors = Conversation(posts=filt_ps, convo_id=self.convo_id + '-' + str(uid) + '-ancestors')
if include_post:
ancestors.add_post(self.posts[uid])
return ancestors
def get_descendants(self, uid, include_post=False):
"""
Returns the descendant sub-tree for post `uid`.
Parameters
----------
uid : Hashable
The unique identifier of desired post
include_post : bool
Whether the post should be included in returned collection. Default: False
Returns
-------
Conversation
The collection of descendant posts
"""
if 'descendant' in self._relation_map and uid in self._relation_map['descendant']:
pids = self._relation_map['descendant'][uid]
filt_ps = {pid: self.posts[pid] for pid in pids}
else:
# get children
ps = self.get_children(uid)
filt_ps = dict(ps.posts)
# for each child, add its descendants
for pid in ps.posts:
for xid in self.get_descendants(pid).posts:
if xid not in filt_ps:
filt_ps[xid] = self.posts[xid]
self._relation_map['descendant'][uid] = set(filt_ps.keys())
descendants = Conversation(posts=filt_ps, convo_id=self.convo_id + '-' + str(uid) + '-descendant')
if include_post:
descendants.add_post(self.posts[uid])
return descendants
def get_parents(self, uid, include_post=False):
"""
Returns the parent(s) of a post specified by `uid`.
Parameters
----------
uid : Hashable
The unique identifier of desired post
include_post : bool
Whether the post should be included in returned collection. Default: False
Returns
-------
Conversation
The collection of parent posts
"""
if 'parents' in self._relation_map and uid in self._relation_map['parents']:
pids = self._relation_map['parents'][uid]
filt_ps = {pid: self.posts[pid] for pid in pids}
else:
filt_ps = {pid: post for pid, post in self.posts.items() if pid in self.posts[uid].reply_to}
self._relation_map['parents'][uid] = set(filt_ps.keys())
cx = Conversation(posts=filt_ps, convo_id=self.convo_id + '-' + str(uid) + '-parents')
if include_post:
cx.add_post(self.posts[uid])
return cx
def get_children(self, uid, include_post=False):
"""
Returns the children of a post specified by `uid`.
Parameters
----------
uid : Hashable
The unique identifier of desired post
include_post : bool
Whether the post should be included in returned collection. Default: False
Returns
-------
Conversation
The collection of children posts
"""
if 'children' in self._relation_map and uid in self._relation_map['children']:
pids = self._relation_map['children'][uid]
filt_ps = {pid: self.posts[pid] for pid in pids}
else:
filt_ps = {pid: post for pid, post in self.posts.items() if uid in self.posts[pid].reply_to}
self._relation_map['children'][uid] = set(filt_ps.keys())
cx = Conversation(posts=filt_ps, convo_id=self.convo_id + '-' + str(uid) + '-children')
if include_post:
cx.add_post(self.posts[uid])
return cx
def get_siblings(self, uid, include_post=False):
"""
Returns the siblings of a post specified by `uid`.
Siblings are the child posts of this post's parent posts.
Parameters
----------
uid : Hashable
The unique identifier of desired post
include_post : bool
Whether the post should be included in returned collection. Default: False
Returns
-------
Conversation
The collection of sibling posts
"""
if 'siblings' in self._relation_map and uid in self._relation_map['siblings']:
pids = self._relation_map['siblings'][uid]
siblings = Conversation(posts={pid: self.posts[pid] for pid in pids}, convo_id=self.convo_id + '-' + str(uid) + '-siblings')
else:
# just caches the parent IDs
self.get_parents(uid)
siblings = Conversation(convo_id=self.convo_id + '-' + str(uid) + '-siblings')
for pid in self._relation_map['parents'][uid]:
siblings += self.get_children(pid)
self._relation_map['siblings'][uid] = set(siblings.posts.keys())
if uid in siblings.posts and not include_post:
siblings.remove_post(uid)
if include_post and uid not in siblings.posts:
siblings.add_post(self.posts[uid])
return siblings
def get_before(self, uid, include_post=False):
"""
Returns the collection of posts in this conversation that were created before the post
with UID `uid`
Parameters
----------
uid : Hashable
The UID of the post that is the pivot
include_post : bool
Whether the post should be included in returned collection. Default: False
Returns
-------
Conversation
The collection of posts posted before uid
Raises
------
KeyError
When `uid` is not in the Conversation
"""
if 'before' in self._relation_map and uid in self._relation_map['before']:
pids = self._relation_map['before'][uid]
filt_posts = {pid: self.posts[pid] for pid in pids}
else:
filt_posts = {pid: self.posts[pid] for pid in self.filter(before=self._posts[uid].created_at)}
self._relation_map['before'][uid] = set(filt_posts.keys())
cx = Conversation(posts=filt_posts, convo_id=self.convo_id + '-' + str(uid) + '-before')
if include_post:
cx.add_post(self.posts[uid])
return cx
def get_after(self, uid, include_post=False):
"""
Returns the collection of posts in this conversation that were created after the post
with UID `uid`
Parameters
----------
uid : Hashable
The UID of the post that is the pivot
include_post : bool
Whether the post should be included in returned collection. Default: False
Returns
-------
Conversation
The collection of posts posted after uid
Raises
------
KeyError
When `uid` is not in the Conversation
"""
if 'after' in self._relation_map and uid in self._relation_map['after']:
pids = self._relation_map['after'][uid]
filt_posts = {pid: self.posts[pid] for pid in pids}
else:
filt_posts = {pid: self.posts[pid] for pid in self.filter(after=self._posts[uid].created_at)}
self._relation_map['after'][uid] = set(filt_posts.keys())
cx = Conversation(posts=filt_posts, convo_id=self.convo_id + '-' + str(uid) + '-after')
if include_post:
cx.add_post(self.posts[uid])
return cx
|
#! /bin/sh
# mkcard.sh v0.5
# (c) Copyright 2009 Graeme Gregory <dp@xora.org.uk>
# Licensed under terms of GPLv2
#
# Parts of the procudure base on the work of Denys Dmytriyenko
# http://wiki.omap.com/index.php/MMC_Boot_Format
export LC_ALL=C
if [ $# -ne 1 ]; then
echo "Usage: $0 <drive>"
exit 1;
fi
DRIVE=$1
dd if=/dev/zero of=$DRIVE bs=1024 count=1024
SIZE=`fdisk -l $DRIVE | grep Disk | grep bytes | awk '{print $5}'`
echo DISK SIZE - $SIZE bytes
CYLINDERS=`echo $SIZE/255/63/512 | bc`
echo CYLINDERS - $CYLINDERS
{
echo ,1,0x0,
echo ,20,,*
echo ,,,
} | sfdisk -D -H 255 -S 63 -C $CYLINDERS $DRIVE
sleep 1
if [ -b ${DRIVE}2 ]; then
umount ${DRIVE}2
mke2fs -t ext2 -L "boot" ${DRIVE}2
else
if [ -b ${DRIVE}p2 ]; then
umount ${DRIVE}p2
mke2fs -t ext2 -L "boot" ${DRIVE}p2
else
echo "Cant find boot partition in /dev"
fi
fi
if [ -b ${DRIVE}3 ]; then
umount ${DRIVE}3
mke2fs -j -t ext4 -L "Angstrom" ${DRIVE}3
else
if [ -b ${DRIVE}p3 ]; then
umount ${DRIVE}p3
mke2fs -j -t ext4 -L "Angstrom" ${DRIVE}p3
else
echo "Cant find rootfs partition in /dev"
fi
fi
|
#!/bin/bash
SESSION=$USER
BABEL_COMMAND="clear &&./node_modules/.bin/babel ./src --out-dir dist --extensions \".ts\" --watch --verbose"
NODE_COMMAND="sleep 2 && ./node_modules/.bin/nodemon -x \"clear && node --inspect=localhost:9999 dist/app.js\""
tmux new-session -d -s $SESSION
tmux new-window -t $SESSION:1 -n 'Watching node App'
tmux set-option -t -g default-shell /bin/zsh
tmux split-window -h
tmux select-pane -t 1
tmux split-window
tmux select-pane -t 1
tmux send-keys "$BABEL_COMMAND" C-m
tmux select-pane -t 2
tmux send-keys "pkill tmux"
tmux split-window
tmux select-pane -t 3
tmux clock-mode
tmux select-pane -t 0
tmux send-keys "$NODE_COMMAND" C-m
tmux setw -t -g mouse on
tmux attach-session -t $SESSION
|
#!/usr/bin/env python
# this script is used for running the application
# in Windows PowerShell
# assign the variables for
# - Flask Environment
FLASK_ENV=$1
# set some default values for the variables
if [$FLASK_ENV -eq ""]; then
echo "environment not set: setting to 'development'"
FLASK_ENV=development
fi
# set the flask environment variables
export FLASK_APP=text_embedding
export FLASK_ENV=$FLASK_ENV
cd ..
# run the flask app (with autoreload)
python -m flask run
|
<gh_stars>0
import Cookies from 'js-cookie'
const TokenKey = 'token';
export function getToken() {
return sessionStorage.getItem(TokenKey)
}
export function setToken(token) {
return sessionStorage.setItem(TokenKey, token)
}
export function removeToken() {
return sessionStorage.removeItem(TokenKey)
}
/**
* @param {string} path
* @returns {Boolean}
*/
export function isExternal(path) {
return /^(https?:|mailto:|tel:)/.test(path)
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/FBSDKCoreKit/FBSDKCoreKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/FBSDKLoginKit/FBSDKLoginKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SideMenu/SideMenu.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/FBSDKCoreKit/FBSDKCoreKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/FBSDKLoginKit/FBSDKLoginKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SideMenu/SideMenu.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
// ==UserScript==
// @namespace https://tampermonkey.myso.kr/
// @name 네이버 블로그 문단 단위 키워드 분석
// @description 네이버 블로그로 작성된 문서를 문단 단위로 키워드를 분석하고 문장의 주요 주제를 간략하게 확인할 수 있습니다.
// @copyright 2021, myso (https://tampermonkey.myso.kr)
// @license Apache-2.0
// @version 1.0.11
// @updateURL https://github.com/myso-kr/kr.myso.tampermonkey/raw/master/service/com.naver.blog-read.contents.analaysis.user.js
// @downloadURL https://github.com/myso-kr/kr.myso.tampermonkey/raw/master/service/com.naver.blog-read.contents.analaysis.user.js
// @author <NAME>
// @connect naver.com
// @match *://blog.naver.com/PostView*
// @grant GM_addStyle
// @grant GM_xmlhttpRequest
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/vendor/gm-app.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/vendor/gm-add-style.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/vendor/gm-add-script.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/vendor/gm-xmlhttp-request-async.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/donation.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/lib/naver-blog.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/lib/naver-search-nx.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/lib/naver-search-rx.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.25/assets/lib/smart-editor-one.js
// @require https://cdnjs.cloudflare.com/ajax/libs/uuid/8.3.2/uuidv4.min.js
// @require https://cdnjs.cloudflare.com/ajax/libs/bluebird/3.7.2/bluebird.min.js
// @require https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.21/lodash.min.js
// ==/UserScript==
// ==OpenUserJS==
// @author myso
// ==/OpenUserJS==
GM_App(async function main() {
GM_donation('#viewTypeSelector, #postListBody, #wrap_blog_rabbit, #writeTopArea, #editor_frame', 0);
GM_addStyle(`
.se-text-paragraph[data-nx-status-loading="true"] { position: relative; }
.se-text-paragraph[data-nx-status-loading="true"]::after { position: absolute; z-index: 1; display: block; padding: 0.2em; background: rgba(0,0,0,0.3); color: #fff; font-size: 11px; line-height: 1.3em; border-radius: 0.2em; content: '문장 내 키워드 분석 중...'; right: 0; bottom: 0; word-wrap: break-word; word-break: break-word; overflow-wrap: break-word; white-space: pre-wrap; }
.se-text-paragraph[data-nx-status-keywords] { position: relative; }
.se-text-paragraph[data-nx-status-keywords]::after { position: absolute; z-index: 1; display: block; padding: 0.2em; background: rgba(0,0,0,0.3); color: #fff; font-size: 10px; line-height: 1.3em; border-radius: 0.2em; content: '분석 완료'; right: 0; bottom: 0; opacity: 0.5; word-wrap: break-word; word-break: break-word; overflow-wrap: break-word; white-space: pre-wrap; }
.se-text-paragraph[data-nx-status-keywords]:hover { outline: 1px solid rgba(255, 0, 0, 0.3); }
.se-text-paragraph[data-nx-status-keywords]:hover::after { z-index: 10000; background: #b4a996; color: #fff; font-size: 11px; content: attr(data-nx-status-keywords); overflow-y: auto; max-height: 240px; opacity: 1; }
`);
const se = SE_parse(document); if(!se.content) return;
const sections = SE_parseNodes(document);
const sentences = se.sections.filter((section)=>['text'].includes(section.type));
await Promise.map(sentences, async (sentence, index) => {
const section = sections[sentence.offset]; if(!section) return;
const lines = Array.from(section.querySelectorAll('.se-text-paragraph'));
await Promise.map(sentence.text || [], async (text, offset)=>{
const line = lines[offset]; if(!line) return;
line.onmouseover = async function() {
event.preventDefault();
if(line.dataset.nxStatusKeywords || line.dataset.nxStatusLoading) return;
line.dataset.nxStatusLoading = true;
const terms = await NX_termsParagraph(text);
const uniqs = terms.filter((word, index, terms)=>terms.indexOf(word) == index);
const title = await NR_termsAll(...uniqs);
const group = uniqs.reduce((group, query, index)=>(group[index] = Object.assign({ query, count: terms.filter(item=>item==query).length }, title.find(o=>o.query == query)), group), []).sort((a, b)=>b.count - a.count);
line.dataset.nxStatusKeywords = group.map((item)=>{
const info = [`${_.padEnd(`(${item.count})`, 8)}${_.padEnd(item.query, 10)}`];
if(item.r_category) info.push(`생산선호주제: ${item.r_category}`)
if(item.theme && item.theme.main) info.push(`메인소비주제: ${item.theme.main.name}`);
if(item.theme && item.theme.sub) info.push(`서브소비주제: ${item.theme.sub.map(o=>o.name).join(', ')}`);
return info.join('\n');
}).join('\n\n');
line.dataset.nxStatusLoading = false;
};
});
});
}); |
#!/usr/bin/env bash
# Get Bucket Names from Stacks
TEMPLATES_BUCKET=$(aws cloudformation describe-stacks --stack-name batch-scgenomics-zone --query 'Stacks[].Outputs[?OutputKey==`TemplatesBucket`].OutputValue' --output text); echo ${TEMPLATES_BUCKET}
RESULTS_BUCKET=$(aws cloudformation describe-stacks --stack-name batch-scgenomics-pipeline --query 'Stacks[].Outputs[?OutputKey==`JobResultsBucket`].OutputValue' --output text); echo ${RESULTS_BUCKET}
# Clear Buckets
aws s3 rm --recursive s3://${TEMPLATES_BUCKET}/
aws s3 rm --recursive s3://${RESULTS_BUCKET}/
# Disable Termination Protection on Stacks
aws cloudformation update-termination-protection --no-enable-termination-protection --stack-name batch-scgenomics-tools
aws cloudformation update-termination-protection --no-enable-termination-protection --stack-name batch-scgenomics-pipeline
aws cloudformation update-termination-protection --no-enable-termination-protection --stack-name batch-scgenomics-zone
# Delete Stacks
aws cloudformation delete-stack --stack-name batch-scgenomics-tools; aws cloudformation wait stack-delete-complete --stack-name batch-scgenomics-tools
aws cloudformation delete-stack --stack-name batch-scgenomics-pipeline; aws cloudformation wait stack-delete-complete --stack-name batch-scgenomics-pipeline
aws cloudformation delete-stack --stack-name batch-scgenomics-zone; aws cloudformation wait stack-delete-complete --stack-name batch-scgenomics-zone
|
#visibilidade public, protect and private
class caneta:
def __init__(self):
self.cor = 'Eu sou público'
self._tinta = 'Eu sou protegido'
self.__tampa = 'Eu sou privado'
obj = caneta()
print(obj.cor)
print(obj._tinta)
print(obj.__tampa) |
#! /bin/sh
# Copyright (C) 2012-2020 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# parallel-tests: "make recheck" and "make -k recheck" in the face of
# build failures for the test cases. See automake bug#11791.
required='cc native'
. test-init.sh
cat >> configure.ac << 'END'
AC_PROG_CC
AC_OUTPUT
END
cat > Makefile.am << 'END'
TESTS = $(EXTRA_PROGRAMS)
EXTRA_PROGRAMS = foo
END
echo 'int main (void) { return 1; }' > foo.c
$ACLOCAL
$AUTOCONF
$AUTOMAKE -a
./configure
run_make -O -e FAIL check
count_test_results total=1 pass=0 fail=1 xpass=0 xfail=0 skip=0 error=0
using_gmake || $sleep # Required by BSD make.
run_make -O -e IGNORE -- -k recheck
# Don't trust the exit status of "make -k" for non-GNU makes.
! using_gmake || test $am_make_rc -gt 0 || exit 1
count_test_results total=1 pass=0 fail=1 xpass=0 xfail=0 skip=0 error=0
# Introduce an error in foo.c, that should cause a compilation failure.
$sleep
echo choke me >> foo.c
run_make -O -e FAIL recheck
# We don't get a change to run the testsuite.
$EGREP '(X?PASS|X?FAIL|SKIP|ERROR):' stdout && exit 1
# These shouldn't be removed, otherwise the next make recheck will do
# nothing.
test -f foo.log
test -f foo.trs
using_gmake || $sleep # Required by BSD make.
run_make -O -e IGNORE -- -k recheck
# Don't trust the exit status of "make -k" for non-GNU makes.
! using_gmake || test $am_make_rc -gt 0 || exit 1
# We don't get a change to run the testsuite.
$EGREP '(X?PASS|X?FAIL|SKIP|ERROR):' stdout && exit 1
test -f foo.log
test -f foo.trs
# "Repair" foo.c, and expect everything to work.
$sleep
echo 'int main (void) { return 0; }' > foo.c
run_make -O recheck
count_test_results total=1 pass=1 fail=0 xpass=0 xfail=0 skip=0 error=0
test -f foo.log
test -f foo.trs
run_make -O recheck
count_test_results total=0 pass=0 fail=0 xpass=0 xfail=0 skip=0 error=0
test -f foo.log
test -f foo.trs
:
|
#!/bin/sh
#
### BEGIN INIT INFO
# Provides: buildkite-agent
# Required-Start: $network $local_fs $remote_fs
# Required-Stop: $remote_fs
# Should-Start: $named
# Should-Stop:
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: The Buildkite Build Agent
# Description: The Buildkite Build Agent
### END INIT INFO
user="buildkite-agent"
cmd="/usr/bin/buildkite-agent start"
name=`basename $0`
pid_file="/var/run/$name.pid"
log="/var/log/$name.log"
stderr_log="/var/log/$name.err"
get_pid() {
cat "$pid_file"
}
is_running() {
[ -f "$pid_file" ] && ps `get_pid` > /dev/null 2>&1
}
case "$1" in
start)
if is_running; then
echo "Already started"
else
echo "Starting $name"
sudo -u "$user" $cmd >>"$log" 2>&1 &
echo $! > "$pid_file"
if ! is_running; then
echo "Unable to start, see $log"
exit 1
fi
fi
;;
stop)
if is_running; then
echo -n "Stopping $name.."
kill `get_pid`
for i in {1..10}
do
if ! is_running; then
break
fi
echo -n "."
sleep 1
done
echo
if is_running; then
echo "Not stopped; may still be shutting down or shutdown may have failed"
exit 1
else
echo "Stopped"
if [ -f "$pid_file" ]; then
rm "$pid_file"
fi
fi
else
echo "Not running"
fi
;;
restart)
$0 stop
if is_running; then
echo "Unable to stop, will not attempt to start"
exit 1
fi
$0 start
;;
status)
if is_running; then
echo "Running"
else
echo "Stopped"
exit 1
fi
;;
*)
echo "Usage: $0 {start|stop|restart|status}"
exit 1
;;
esac
exit 0
|
<gh_stars>0
import com.littlejenny.gulimall.auth.GuliAuth15000Main;
import com.littlejenny.gulimall.auth.constants.AuthConstants;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.web.client.RestTemplate;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = {GuliAuth15000Main.class})
public class RedisTest {
@Autowired
private RedisTemplate<String, String> template;
@Test
public void redis(){
System.out.println(template);
}
@Autowired
AuthConstants constants;
}
|
<filename>open-sphere-base/control-panels/src/main/java/io/opensphere/controlpanels/animation/view/AnimationInternalFrame.java<gh_stars>10-100
package io.opensphere.controlpanels.animation.view;
import java.awt.Color;
import java.awt.Component;
import java.util.Collections;
import javax.swing.JFrame;
import io.opensphere.core.control.ui.UIRegistry;
import io.opensphere.core.hud.awt.AbstractInternalFrame;
import io.opensphere.core.hud.awt.HUDJInternalFrame;
/**
* The main animation (timeline) internal frame.
*/
public class AnimationInternalFrame extends AbstractInternalFrame
{
/** The serialVersionUID. */
private static final long serialVersionUID = 1L;
/** The internal frame border height. */
private static final int FRAME_BORDER_HEIGHT = 30;
/** The UI registry. */
private final UIRegistry myUIRegistry;
/** The component in the frame. */
private Component myComponent;
/**
* Constructor.
*
* @param uiRegistry The UI registry
*/
public AnimationInternalFrame(UIRegistry uiRegistry)
{
super("Timeline", true, true, false);
myUIRegistry = uiRegistry;
setDefaultCloseOperation(JFrame.HIDE_ON_CLOSE);
setOpaque(false);
setBackground(new Color(0, 0, 0, 0));
HUDJInternalFrame hudFrame = new HUDJInternalFrame(new HUDJInternalFrame.Builder().setInternalFrame(this));
myUIRegistry.getComponentRegistry().addObjectsForSource(this, Collections.singleton(hudFrame));
}
/**
* Sizes and positions the frame to the default location.
*/
public void resizeAndPositionToDefault()
{
setSize(getParent().getWidth() - 6, myComponent.getPreferredSize().height + FRAME_BORDER_HEIGHT);
setLocation(3, getParent().getHeight() - getHeight());
validate();
}
/**
* Sets the component within the frame.
*
* @param comp the component
*/
public final void setComponent(Component comp)
{
// Set the new component
if (myComponent != null)
{
remove(myComponent);
}
myComponent = comp;
add(comp);
resizeAndPositionToDefault();
}
}
|
<filename>blitzd/packets/bnet/BNetFriendsList.cpp
#include "Config.h"
#include "BNetFriendsList.h"
#include "cache/UserCache.h"
namespace Packets
{
namespace BNet
{
bool BNetFriendsList::Pack()
{
Cache::UserCacheItem::Pointer ptr = cl->GetUser();
if(ptr.get() == NULL)
return false;
byte cc = (byte)ptr->GetFriendCount();
_packet << cc;
for(byte i = 0; i < cc; ++ i)
{
Cache::UserCacheItem::Friend_t * fr = ptr->GetFriend(i);
if(fr->user.get() == NULL)
continue;
std::string uname = fr->user->GetUsername();
_packet << uname.c_str();
Core::Client * clptr = Core::clientPool[uname];
byte status = 0;
byte location = 0;
uint gameid = 0;
std::string locname;
if(clptr != NULL)
{
status = (fr->mutual ? 1 : 0);
gameid = clptr->GetGameID();
if(clptr->GetGame() != NULL)
{
if(clptr->GetGame()->GetFlag() & 1)
{
if(fr->mutual)
{
location = 5;
locname = clptr->GetGame()->GetName();
}
else
{
location = 4;
}
}
else
{
location = 3;
locname = clptr->GetGame()->GetName();
}
}
else if(clptr->GetChannel())
{
location = 2;
locname = clptr->GetChannel()->GetName();
}
else
{
location = 1;
}
if(!clptr->GetDND().empty())
status |= 2;
if(!clptr->GetAway().empty())
status |= 4;
}
_packet << status << location << gameid << locname;
}
return true;
}
}
}
|
# shellcheck shell=bash
# shellcheck disable=SC2034
# All generated files should be placed in $OUTPUT_DIR, which is .gitignored.
OUTPUT_DIR="${PWD}/output"
mkdir -p "${OUTPUT_DIR}"
# Temporary files (e.g. downloads) should go to $TEMP_DIR.
TEMP_DIR="${OUTPUT_DIR}/tmp"
mkdir -p "${TEMP_DIR}"
# All downloaded tools will be installed into $TOOLS_DIR.
TOOLS_DIR="${OUTPUT_DIR}/bin"
mkdir -p "${TOOLS_DIR}"
# Python3 virtual env goes into $VENV_DIR
VENV_DIR="${OUTPUT_DIR}/venv"
mkdir -p "${VENV_DIR}"
# UNAME should be DARWIN, LINUX, or WINDOWS.
UNAME="$(uname | tr "[:lower:]" "[:upper:]")"
# Source all tool definitions.
TOOLS=()
# shellcheck disable=SC2044
for TOOL in $(find scripts/tools/*.sh); do
# shellcheck disable=SC1090
source "${TOOL}"
done
# Sort tools alphabetically and remove duplicates (there shouldn't be any).
# shellcheck disable=SC2207
TOOLS=($(printf '%s\n' "${TOOLS[@]}" | sort | uniq))
# require_tools makes sure all required tools are available. If the current version
# is too old (or doesn't match the required version exactly when PINNED_TOOLS is set),
# then the tool is downloaded and installed into $TOOLS_DIR.
function require_tools {
local status
local tool
for tool in "$@"; do
# Make sure additional prerequisites for the tool are also available.
# They must be installed first because they might be needed to install the tool itself.
# In this case we *want* word-splitting.
# shellcheck disable=SC2046
require_tools $(var_lookup "${tool}_requires")
if ! status=$(tool_status "${tool}"); then
tool_install "${tool}"
if ! status="$(tool_status "${tool}")"; then
red "Could not install ${tool}"
die "${status}"
fi
fi
done
}
# tool_status checks the current installation status of a single tool. It return
# a status string, that can be displayed to the user. The return value is either 0
# when an acceptable version of the tool is available, or 1 when the correct
# version needs to be installed.
function tool_status {
local tool=$1
local version
local rc=0
local status
version="$(tool_version "${tool}")"
if [[ "${version}" =~ ^installed|internal|missing$ ]]; then
if [ "${version}" = "missing" ]; then
rc=1
fi
status="is ${version}"
else
status="version is ${version}"
local minimum
minimum="$(var_lookup "${tool}_version")"
if [ -n "${minimum}" ]; then
case "$(ruby -e "puts Gem::Version.new('${minimum}') <=> Gem::Version.new('${version}')")" in
-1)
status="${status} (newer than ${minimum})"
# For PINNED_TOOLS only an exact match is a success (if there is a download URL).
if [[ -n "${PINNED_TOOLS:-}" && -n "$(var_lookup "${tool}_url_${UNAME}")" ]]; then
rc=1
fi
;;
0)
# PINNED_TOOLS *must* be installed in $TOOLS_DIR $VENV_DIR/bin.
if [[ -n "${PINNED_TOOLS:-}" && ! -x "${TOOLS_DIR}/$(exe_name "${tool}")" && ! -x "${VENV_DIR}/bin/$(exe_name "${tool}")" ]];
then
status="${status} (but not installed in ${TOOLS_DIR} or ${VENV_DIR}/bin)"
rc=1
fi
;;
1|*)
status="${status} (older than ${minimum})"
rc=1
;;
esac
fi
fi
status="${tool} ${status}"
if [ $rc -eq 0 ]; then
status="$(green "${status}")"
else
status="$(red "${status}")"
fi
echo "${status}"
return ${rc}
}
# tool_version returns the semantic version of the installed tool. I will
# return "internal" for tools implemented as aliases/functions, "missing"
# for tools that cannot be found, and "installed" if the version cannot be
# determined. It is a fatal error if the version cannot be determined for
# a tool that defines a minimum required version.
function tool_version {
local tool=$1
local version=""
local tool_type
local minimum_version
tool_type="$(type -t "${tool}")"
minimum_version="$(var_lookup "${tool}_version")"
# (Maybe) determine installed version of the tool.
if [ -z "${tool_type}" ]; then
echo "missing"
else
# Call custom tool version function, if defined.
if [ -n "$(type -t "${tool}_version")" ]; then
version="$("${tool}_version")"
# only call default "$tool version" command if minimum version is defined.
elif [[ "${tool_type}" = "file" && -n "${minimum_version}" ]]; then
version="$("${tool}" version)"
fi
# Version number must have at least a single dot.
if [[ "${version}" =~ [0-9]+(\.[0-9]+)+ ]]; then
echo "${BASH_REMATCH[0]}"
elif [ "${version}" = "missing" ]; then
echo "${version}"
else
if [ -n "${minimum_version}" ]; then
die "Cannot determine '${tool}' version (requires ${minimum_version})"
fi
case "${tool_type}" in
file)
echo "installed"
;;
'')
echo "missing"
;;
*)
echo "internal"
;;
esac
fi
fi
}
function tool_install {
local tool=$1
local sha256
local url
local version
version="$(var_lookup "${tool}_version")"
blue "Installing ${tool}"
# Look for custom install command first (e.g. for Python module install via pip).
if [ -n "$(type -t "${tool}_install")" ]; then
eval "${tool}_install"
return
fi
require_tools file gzip sha256sum xz
url="$(var_lookup "${tool}_url_${UNAME}")"
if [ -z "${url}" ]; then
die "Can't find URL for ${tool}-${version}"
fi
local output="${TEMP_DIR}/output"
curl -s -L "${url//\{version\}/${version}}" -o "${output}"
sha256="$(var_lookup "${tool}_sha256_${UNAME}")"
if [ -n "${sha256}" ] && ! echo "${sha256} ${output}" | sha256sum --check --status; then
die "sha256 for ${url} does not match ${sha256}"
fi
local install_location
install_location="${TOOLS_DIR}/$(exe_name "${tool}")"
# Keep previous version in case installation fails.
if [ -f "${install_location}" ]; then
mv "${install_location}" "${install_location}.prev"
fi
if [[ "$(file "${output}")" =~ "gzip compressed" ]]; then
mv "${output}" "${output}.gz"
gzip -d "${output}.gz"
fi
if [[ "$(file "${output}")" =~ "XZ compressed" ]]; then
mv "${output}" "${output}.xz"
xz -d "${output}.xz"
fi
local file_type
file_type="$(file "${output}")"
case "${file_type}" in
*executable*)
mv "${output}" "${install_location}"
;;
*tar*)
local outdir="${TEMP_DIR}/outdir"
mkdir -p "${outdir}"
tar xf "${output}" -C "${outdir}"
find "${outdir}" -name "$(exe_name "${tool}")" -exec cp {} "${install_location}" \;
if [ -f "${install_location}" ]; then
rm -rf "${output}" "${outdir}"
fi
;;
*)
die "Unsupported file type of ${output}:\n${file_type}"
;;
esac
if [ -f "${install_location}" ]; then
chmod +x "${install_location}"
else
if [ -f "${install_location}.prev" ]; then
mv "${install_location}.prev" "${install_location}"
fi
die "Installation of ${tool} failed (previous version may have been restored)"
fi
}
# exe_name return the filename for the tool executable (including .exe extension on Windows)
function exe_name {
if [ "${UNAME}" = "WINDOWS" ]; then
echo "$1.exe"
else
echo "$1"
fi
}
|
<gh_stars>1-10
/* VMakeList.cpp */
//----------------------------------------------------------------------------------------
//
// Project: VMakeList 1.00
//
// License: Boost Software License - Version 1.0 - August 17th, 2003
//
// see http://www.boost.org/LICENSE_1_0.txt or the local copy
//
// Copyright (c) 2019 <NAME>. All rights reserved.
//
//----------------------------------------------------------------------------------------
#include <inc/VMakeList.h>
#include <CCore/inc/FileName.h>
#include <CCore/inc/FileToMem.h>
#include <CCore/inc/Print.h>
#include <CCore/inc/Exception.h>
#include <CCore/inc/ddl/DDLEngine.h>
#include <CCore/inc/ddl/DDLTypeSet.h>
namespace App {
#include "VMakeList.TypeSet.gen.h"
/* functions */
StrLen Pretext()
{
return
#include "VMakeList.Pretext.gen.h"
""_c;
}
/* class DataFile */
DataFile::DataFile(StrLen file_name)
{
// process
PrintCon eout;
DDL::FileEngine<FileName,FileToMem> engine(eout);
auto result=engine.process(Range(file_name),Pretext());
eout.flush();
if( !result )
{
Printf(Exception,"VMakeList file #.q; : load failed",file_name);
}
// map
DDL::TypedMap<TypeSet> map(result);
MemAllocGuard guard(map.getLen());
map(guard);
param=map.findConst<TypeDef::Param>("Data"_c);
if( !param )
{
Printf(Exception,"VMakeList file #.q; : no data",file_name);
}
mem=guard.disarm();
}
DataFile::~DataFile()
{
MemFree(Replace_null(mem));
}
/* class ToolFile */
ToolFile::ToolFile(StrLen file_name)
{
// process
PrintCon eout;
DDL::FileEngine<FileName,FileToMem> engine(eout);
auto result=engine.process(Range(file_name),Pretext());
eout.flush();
if( !result )
{
Printf(Exception,"VMakeList tools file #.q; : load failed",file_name);
}
// map
DDL::TypedMap<TypeSet> map(result);
MemAllocGuard guard(map.getLen());
map(guard);
tools=map.findConst<TypeDef::Tools>("Data"_c);
if( !tools )
{
Printf(Exception,"VMakeList tools file #.q; : no data",file_name);
}
mem=guard.disarm();
}
ToolFile::~ToolFile()
{
MemFree(Replace_null(mem));
}
/* class TargetFile */
TargetFile::TargetFile(StrLen file_name)
{
// process
PrintCon eout;
DDL::FileEngine<FileName,FileToMem> engine(eout);
auto result=engine.process(Range(file_name));
eout.flush();
if( !result )
{
Printf(Exception,"VMakeList target file #.q; : load failed",file_name);
}
// map
DDL::TypedMap<DDL::EmptyTypeSet> map(result);
MemAllocGuard guard(map.getLen());
map(guard);
DDL::MapText *ptr=map.findConst<DDL::MapText>("CCORE_TARGET"_c);
if( !ptr )
{
Printf(Exception,"VMakeList target file #.q; : no target",file_name);
}
target=*ptr;
mem=guard.disarm();
}
TargetFile::~TargetFile()
{
MemFree(Replace_null(mem));
}
} // namespace App
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
install_artifact() {
artifact="$1"
base="$(basename "$artifact")"
case $base in
*.framework)
install_framework "$artifact"
;;
*.dSYM)
# Suppress arch warnings since XCFrameworks will include many dSYM files
install_dsym "$artifact" "false"
;;
*.bcsymbolmap)
install_bcsymbolmap "$artifact"
;;
*)
echo "error: Unrecognized artifact "$artifact""
;;
esac
}
copy_artifacts() {
file_list="$1"
while read artifact; do
install_artifact "$artifact"
done <$file_list
}
ARTIFACT_LIST_FILE="${BUILT_PRODUCTS_DIR}/cocoapods-artifacts-${CONFIGURATION}.txt"
if [ -r "${ARTIFACT_LIST_FILE}" ]; then
copy_artifacts "${ARTIFACT_LIST_FILE}"
fi
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/xAPI/xAPI.framework"
install_framework "${BUILT_PRODUCTS_DIR}/xDefine/xDefine.framework"
install_framework "${BUILT_PRODUCTS_DIR}/xExtension/xExtension.framework"
install_framework "${BUILT_PRODUCTS_DIR}/xWebBrowser/xWebBrowser.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/xAPI/xAPI.framework"
install_framework "${BUILT_PRODUCTS_DIR}/xDefine/xDefine.framework"
install_framework "${BUILT_PRODUCTS_DIR}/xExtension/xExtension.framework"
install_framework "${BUILT_PRODUCTS_DIR}/xWebBrowser/xWebBrowser.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
import { Container } from 'typedi';
import { IStorable } from '../interface/Storage';
import { ValidatorFileStorageService } from '../service/storage/file/Validator';
import { Channel } from './Channel';
export class Validator implements IStorable {
/** The validator's script content */
content: string;
/** Validator storage */
private storageService: ValidatorFileStorageService;
constructor(private parent: Channel, public path: string) {
this.storageService = Container.get(ValidatorFileStorageService);
}
public async load(): Promise<void> {
await this.validate();
this.content = await this.storageService.get([this.parent.path, this.path]);
}
async save(): Promise<void> {
await this.storageService.set([this.parent.path, this.path], this.content);
}
/** Check resource validity */
private async validate(): Promise<void> {
if (!(await this.storageService.exists([this.parent.path, this.path]))) {
throw new Error(`Validator's path ${this.parent.path}/${this.path} does not exists.`);
}
}
/** Denotes if the validator should be considered as empty */
public isEmpty(): boolean {
return typeof this.content !== 'string' || this.content.trim().length === 0;
}
}
|
<gh_stars>0
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 18 22:42:12 2019
@author: Siddhesh
"""
from cloudmesh.common.FlatDict import FlatDict
data = {
"name" : "Siddhesh",
"address" : {
"city" : "Bloomington",
"state" : "Indiana"
}
}
data = FlatDict(data)
print("Using FlatDict")
print(data)
|
/**
* Flym
* <p>
* Copyright (c) 2012-2015 <NAME>
* <p>
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* <p>
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* <p>
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package yali.org.fragment;
import android.content.ActivityNotFoundException;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.provider.BaseColumns;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.ContextCompat;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.support.v7.widget.SearchView;
import android.text.TextUtils;
import android.util.Log;
import android.view.GestureDetector;
import android.view.GestureDetector.SimpleOnGestureListener;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.crowdfire.cfalertdialog.CFAlertDialog;
import com.facebook.ads.Ad;
import com.facebook.ads.AdError;
import com.facebook.ads.AdSize;
import com.facebook.ads.AdView;
import com.facebook.ads.InterstitialAd;
import com.facebook.ads.InterstitialAdListener;
import com.loopj.android.http.AsyncHttpClient;
import com.loopj.android.http.AsyncHttpResponseHandler;
import com.loopj.android.http.RequestParams;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import yali.org.Constants;
import yali.org.MainApplication;
import yali.org.R;
import yali.org.adapter.EntriesCursorAdapter;
import yali.org.provider.FeedData;
import yali.org.provider.FeedDataContentProvider;
import yali.org.service.AutoRefreshService;
import yali.org.service.FetcherService;
import yali.org.utils.PrefUtils;
import yali.org.utils.UiUtils;
import static yali.org.Constants.URL_TOPICS;
import static yali.org.Constants.URL_VERSION;
public class EntriesListFragment extends SwipeRefreshListFragment {
private static final String STATE_CURRENT_URI = "STATE_CURRENT_URI";
private static final String STATE_ORIGINAL_URI = "STATE_ORIGINAL_URI";
private static final String STATE_SHOW_FEED_INFO = "STATE_SHOW_FEED_INFO";
private static final String STATE_LIST_DISPLAY_DATE = "STATE_LIST_DISPLAY_DATE";
private boolean ISPROGRESSIVE = false;
private static final int ENTRIES_LOADER_ID = 1;
private static final int NEW_ENTRIES_NUMBER_LOADER_ID = 2;
private SharedPreferences shared;
private SharedPreferences.Editor editor;
private Uri mCurrentUri, mOriginalUri;
private boolean mShowFeedInfo = false;
private EntriesCursorAdapter mEntriesCursorAdapter;
private Cursor mJustMarkedAsReadEntries;
private FloatingActionButton mFab;
private ListView mListView;
private long mListDisplayDate = new Date().getTime();
private final LoaderManager.LoaderCallbacks<Cursor> mEntriesLoader = new LoaderManager.LoaderCallbacks<Cursor>() {
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
String entriesOrder = PrefUtils.getBoolean(PrefUtils.DISPLAY_OLDEST_FIRST, false) ? Constants.DB_ASC : Constants.DB_DESC;
String where = "(" + FeedData.EntryColumns.FETCH_DATE + Constants.DB_IS_NULL + Constants.DB_OR + FeedData.EntryColumns.FETCH_DATE + "<=" + mListDisplayDate + ')';
CursorLoader cursorLoader = new CursorLoader(getActivity(), mCurrentUri, null, where, null, FeedData.EntryColumns.DATE + entriesOrder);
cursorLoader.setUpdateThrottle(150);
return cursorLoader;
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
mEntriesCursorAdapter.swapCursor(data);
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
mEntriesCursorAdapter.swapCursor(Constants.EMPTY_CURSOR);
}
};
private final OnSharedPreferenceChangeListener mPrefListener = new OnSharedPreferenceChangeListener() {
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if (PrefUtils.IS_REFRESHING.equals(key)) {
refreshSwipeProgress();
}
}
};
private int mNewEntriesNumber, mOldUnreadEntriesNumber = -1;
private boolean mAutoRefreshDisplayDate = false;
private final LoaderManager.LoaderCallbacks<Cursor> mEntriesNumberLoader = new LoaderManager.LoaderCallbacks<Cursor>() {
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
CursorLoader cursorLoader = new CursorLoader(getActivity(), mCurrentUri, new String[]{"SUM(" + FeedData.EntryColumns.FETCH_DATE + '>' + mListDisplayDate + ")", "SUM(" + FeedData.EntryColumns.FETCH_DATE + "<=" + mListDisplayDate + Constants.DB_AND + FeedData.EntryColumns.WHERE_UNREAD + ")"}, null, null, null);
cursorLoader.setUpdateThrottle(150);
return cursorLoader;
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
data.moveToFirst();
mNewEntriesNumber = data.getInt(0);
mOldUnreadEntriesNumber = data.getInt(1);
if (mAutoRefreshDisplayDate && mNewEntriesNumber != 0 && mOldUnreadEntriesNumber == 0) {
mListDisplayDate = new Date().getTime();
restartLoaders();
} else {
refreshUI();
}
mAutoRefreshDisplayDate = false;
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
}
};
private Button mRefreshListBtn;
private InterstitialAd interstitialAd;
@Override
public void onCreate(Bundle savedInstanceState) {
UiUtils.setPreferenceTheme(getActivity());
setHasOptionsMenu(true);
super.onCreate(savedInstanceState);
shared = getContext().getSharedPreferences(getString(R.string.app_name), Context.MODE_PRIVATE);
editor = shared.edit();
// MobileAds.initialize(getActivity(), getString(R.string.app_id));
initInterstitialAds();
if (savedInstanceState != null) {
mCurrentUri = savedInstanceState.getParcelable(STATE_CURRENT_URI);
mOriginalUri = savedInstanceState.getParcelable(STATE_ORIGINAL_URI);
mShowFeedInfo = savedInstanceState.getBoolean(STATE_SHOW_FEED_INFO);
mListDisplayDate = savedInstanceState.getLong(STATE_LIST_DISPLAY_DATE);
mEntriesCursorAdapter = new EntriesCursorAdapter(getActivity(), mCurrentUri, Constants.EMPTY_CURSOR, mShowFeedInfo);
}
/* interstitialAd = new InterstitialAd(getActivity());
interstitialAd.setAdUnitId(getString(R.string.interstitial_ad_unit_id));
AdRequest inter_adRequest = new AdRequest.Builder().build();
interstitialAd.loadAd(inter_adRequest);*/
}
@Override
public void onStart() {
super.onStart();
refreshUI(); // Should not be useful, but it's a security
refreshSwipeProgress();
PrefUtils.registerOnPrefChangeListener(mPrefListener);
mFab = getActivity().findViewById(R.id.fab);
mFab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
markAllAsRead();
Runnable runnable = new Runnable() {
@Override
public void run() {
if(interstitialAd.isAdLoaded()) {
interstitialAd.show();
}
}
};
new Handler().postDelayed(runnable, 3000);
}
});
if (mCurrentUri != null) {
// If the list is empty when we are going back here, try with the last display date
if (mNewEntriesNumber != 0 && mOldUnreadEntriesNumber == 0) {
mListDisplayDate = new Date().getTime();
} else {
mAutoRefreshDisplayDate = true; // We will try to update the list after if necessary
}
restartLoaders();
}
// First open => we open the drawer for you
//if (PrefUtils.getBoolean(PrefUtils.FIRST_OPEN, true)) {
fetchRss();
fetchVersion();
// }
}
@Override
public View inflateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_entry_list, container, true);
if (mEntriesCursorAdapter != null) {
setListAdapter(mEntriesCursorAdapter);
}
mListView = (ListView) rootView.findViewById(android.R.id.list);
mListView.setOnTouchListener(new SwipeGestureListener(mListView.getContext()));
if (PrefUtils.getBoolean(PrefUtils.DISPLAY_TIP, true)) {
final TextView header = new TextView(mListView.getContext());
header.setMinimumHeight(UiUtils.dpToPixel(70));
int footerPadding = UiUtils.dpToPixel(10);
header.setPadding(footerPadding, footerPadding, footerPadding, footerPadding);
header.setText(R.string.tip_sentence);
header.setGravity(Gravity.CENTER_VERTICAL);
header.setCompoundDrawablePadding(UiUtils.dpToPixel(5));
header.setCompoundDrawablesWithIntrinsicBounds(R.drawable.ic_action_about, 0, R.drawable.ic_action_cancel, 0);
header.setClickable(true);
header.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mListView.removeHeaderView(header);
PrefUtils.putBoolean(PrefUtils.DISPLAY_TIP, false);
}
});
mListView.addHeaderView(header);
}
AdView adView = new AdView(getActivity(),
getString(R.string.fb_test_ad) +
getString(R.string.fb_banner_placement_id), AdSize.BANNER_HEIGHT_90);
mListView.addHeaderView(adView);
mListView.addFooterView(adView);
adView.loadAd();
/* AdView mAdView = new AdView(getActivity());//findViewById(R.id.adView);
mAdView.setAdSize(AdSize.LARGE_BANNER);
mAdView.setAdUnitId(getString(R.string.banner_ad_unit_id));
AdRequest adRequest = new AdRequest.Builder()
.build();
mAdView.loadAd(adRequest);*/
/* RelativeLayout relativeLayout = new RelativeLayout(getActivity());
RelativeLayout.LayoutParams adViewParams = new RelativeLayout.LayoutParams
(AdView.LayoutParams.MATCH_PARENT, AdView.LayoutParams.MATCH_PARENT);
//adViewParams.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
adViewParams.addRule(RelativeLayout.CENTER_IN_PARENT, RelativeLayout.TRUE);*/
// Load an ad into the AdMob banner view.
// relativeLayout.addView(mAdView, adViewParams);
/* mListView.addHeaderView(mAdView);
mListView.addFooterView(mAdView);*/
UiUtils.addEmptyFooterView(mListView, 90);
mRefreshListBtn = rootView.findViewById(R.id.refreshListBtn);
mRefreshListBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mNewEntriesNumber = 0;
mListDisplayDate = new Date().getTime();
refreshUI();
if (mCurrentUri != null) {
restartLoaders();
}
}
});
//disableSwipe();
/* RelativeLayout relativeLayout = new RelativeLayout(getActivity());
RelativeLayout.LayoutParams adViewParams = new RelativeLayout.LayoutParams
(AdView.LayoutParams.MATCH_PARENT, AdView.LayoutParams.WRAP_CONTENT);
adViewParams.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
adViewParams.addRule(RelativeLayout.CENTER_IN_PARENT, RelativeLayout.TRUE);
// Load an ad into the AdMob banner view.
AdView mAdView = new AdView(getActivity());//findViewById(R.id.adView);
mAdView.setAdSize(AdSize.LARGE_BANNER);
mAdView.setAdUnitId(getString(R.string.banner_ad_unit_id));
AdRequest adRequest = new AdRequest.Builder()
.build();
mAdView.loadAd(adRequest);
relativeLayout.addView(mAdView, adViewParams);
mListView.addView(mAdView);*/
showMessage();
return rootView;
}
@Override
public void onStop() {
PrefUtils.unregisterOnPrefChangeListener(mPrefListener);
if (mJustMarkedAsReadEntries != null && !mJustMarkedAsReadEntries.isClosed()) {
mJustMarkedAsReadEntries.close();
}
mFab = null;
super.onStop();
}
@Override
public void onSaveInstanceState(Bundle outState) {
outState.putParcelable(STATE_CURRENT_URI, mCurrentUri);
outState.putParcelable(STATE_ORIGINAL_URI, mOriginalUri);
outState.putBoolean(STATE_SHOW_FEED_INFO, mShowFeedInfo);
outState.putLong(STATE_LIST_DISPLAY_DATE, mListDisplayDate);
super.onSaveInstanceState(outState);
}
@Override
public void onRefresh() {
startRefresh();
}
@Override
public void onListItemClick(ListView listView, View view, int position, long id) {
if (id >= 0) { // should not happen, but I had a crash with this on PlayStore...
Intent intent = new Intent(Intent.ACTION_VIEW, ContentUris.withAppendedId(mCurrentUri, id));
//intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
startActivity(intent);
//Toast.makeText(getContext(), mCurrentUri+"\n\n"+id, Toast.LENGTH_LONG).show();
/* Intent intent = new Intent(getActivity(), EntryActivity.class);
intent.putExtra("mCurrentUri", ContentUris.withAppendedId(mCurrentUri, id));
startActivity(intent);*/
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
menu.clear(); // This is needed to remove a bug on Android 4.0.3
inflater.inflate(R.menu.entry_list, menu);
MenuItem searchItem = menu.findItem(R.id.menu_search);
final SearchView searchView = (SearchView) searchItem.getActionView();
if (FeedData.EntryColumns.isSearchUri(mCurrentUri)) {
searchItem.expandActionView();
searchView.post(new Runnable() { // Without that, it just does not work
@Override
public void run() {
searchView.setQuery(mCurrentUri.getLastPathSegment(), false);
searchView.clearFocus();
}
});
}
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
if (TextUtils.isEmpty(newText)) {
setData(mOriginalUri, true);
} else {
setData(FeedData.EntryColumns.SEARCH_URI(newText), true, true);
}
return false;
}
});
searchView.setOnCloseListener(new SearchView.OnCloseListener() {
@Override
public boolean onClose() {
setData(mOriginalUri, true);
return false;
}
});
if (FeedData.EntryColumns.FAVORITES_CONTENT_URI.equals(mCurrentUri)) {
menu.findItem(R.id.menu_refresh).setVisible(false);
} else {
menu.findItem(R.id.menu_share_starred).setVisible(false);
}
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_share_starred: {
if (mEntriesCursorAdapter != null) {
String starredList = "";
Cursor cursor = mEntriesCursorAdapter.getCursor();
if (cursor != null && !cursor.isClosed()) {
int titlePos = cursor.getColumnIndex(FeedData.EntryColumns.TITLE);
int linkPos = cursor.getColumnIndex(FeedData.EntryColumns.LINK);
if (cursor.moveToFirst()) {
do {
starredList += cursor.getString(titlePos) + "\n" + cursor.getString(linkPos) + "\n\n";
} while (cursor.moveToNext());
}
startActivity(Intent.createChooser(
new Intent(Intent.ACTION_SEND).putExtra(Intent.EXTRA_SUBJECT, getString(R.string.share_favorites_title))
.putExtra(Intent.EXTRA_TEXT, starredList).setType(Constants.MIMETYPE_TEXT_PLAIN), getString(R.string.menu_share)
));
startRefresh();
}
}
return true;
}
case R.id.menu_refresh: {
startRefresh();
return true;
}
}
return super.onOptionsItemSelected(item);
}
public void markAllAsRead() {
if (mEntriesCursorAdapter != null) {
Snackbar snackbar = Snackbar.make(getActivity().findViewById(R.id.coordinator_layout), R.string.marked_as_read, Snackbar.LENGTH_LONG)
.setActionTextColor(ContextCompat.getColor(getActivity(), R.color.light_theme_color_primary))
.setAction(R.string.undo, new View.OnClickListener() {
@Override
public void onClick(View v) {
new Thread() {
@Override
public void run() {
if (mJustMarkedAsReadEntries != null && !mJustMarkedAsReadEntries.isClosed()) {
ArrayList<Integer> ids = new ArrayList<>();
while (mJustMarkedAsReadEntries.moveToNext()) {
ids.add(mJustMarkedAsReadEntries.getInt(0));
}
ContentResolver cr = MainApplication.getContext().getContentResolver();
String where = BaseColumns._ID + " IN (" + TextUtils.join(",", ids) + ')';
cr.update(FeedData.EntryColumns.CONTENT_URI, FeedData.getUnreadContentValues(), where, null);
mJustMarkedAsReadEntries.close();
}
}
}.start();
}
});
snackbar.getView().setBackgroundResource(R.color.material_grey_900);
snackbar.show();
new Thread() {
@Override
public void run() {
ContentResolver cr = MainApplication.getContext().getContentResolver();
String where = FeedData.EntryColumns.WHERE_UNREAD + Constants.DB_AND + '(' + FeedData.EntryColumns.FETCH_DATE + Constants.DB_IS_NULL + Constants.DB_OR + FeedData.EntryColumns.FETCH_DATE + "<=" + mListDisplayDate + ')';
if (mJustMarkedAsReadEntries != null && !mJustMarkedAsReadEntries.isClosed()) {
mJustMarkedAsReadEntries.close();
}
mJustMarkedAsReadEntries = cr.query(mCurrentUri, new String[]{BaseColumns._ID}, where, null, null);
cr.update(mCurrentUri, FeedData.getReadContentValues(), where, null);
}
}.start();
// If we are on "all items" uri, we can remove the notification here
if (mCurrentUri != null && Constants.NOTIF_MGR != null && (FeedData.EntryColumns.CONTENT_URI.equals(mCurrentUri) || FeedData.EntryColumns.UNREAD_ENTRIES_CONTENT_URI.equals(mCurrentUri))) {
Constants.NOTIF_MGR.cancel(0);
}
}
}
private void startRefresh() {
if (!PrefUtils.getBoolean(PrefUtils.IS_REFRESHING, false)) {
if (mCurrentUri != null && FeedDataContentProvider.URI_MATCHER.match(mCurrentUri) == FeedDataContentProvider.URI_ENTRIES_FOR_FEED) {
getActivity().startService(new Intent(getActivity(), FetcherService.class).setAction(FetcherService.ACTION_REFRESH_FEEDS).putExtra(Constants.FEED_ID,
mCurrentUri.getPathSegments().get(1)));
} else {
getActivity().startService(new Intent(getActivity(), FetcherService.class).setAction(FetcherService.ACTION_REFRESH_FEEDS));
}
}
refreshSwipeProgress();
}
public Uri getUri() {
return mOriginalUri;
}
public void setData(Uri uri, boolean showFeedInfo) {
setData(uri, showFeedInfo, false);
}
private void setData(Uri uri, boolean showFeedInfo, boolean isSearchUri) {
mCurrentUri = uri;
if (!isSearchUri) {
mOriginalUri = mCurrentUri;
}
mShowFeedInfo = showFeedInfo;
mEntriesCursorAdapter = new EntriesCursorAdapter(getActivity(), mCurrentUri, Constants.EMPTY_CURSOR, mShowFeedInfo);
setListAdapter(mEntriesCursorAdapter);
mListDisplayDate = new Date().getTime();
if (mCurrentUri != null) {
restartLoaders();
}
refreshUI();
}
private void restartLoaders() {
LoaderManager loaderManager = getLoaderManager();
//HACK: 2 times to workaround a hard-to-reproduce bug with non-refreshing loaders...
loaderManager.restartLoader(ENTRIES_LOADER_ID, null, mEntriesLoader);
loaderManager.restartLoader(NEW_ENTRIES_NUMBER_LOADER_ID, null, mEntriesNumberLoader);
loaderManager.restartLoader(ENTRIES_LOADER_ID, null, mEntriesLoader);
loaderManager.restartLoader(NEW_ENTRIES_NUMBER_LOADER_ID, null, mEntriesNumberLoader);
}
private void refreshUI() {
if (mNewEntriesNumber > 0) {
mRefreshListBtn.setText(getResources().getQuantityString(R.plurals.number_of_new_entries, mNewEntriesNumber, mNewEntriesNumber));
mRefreshListBtn.setVisibility(View.VISIBLE);
} else {
mRefreshListBtn.setVisibility(View.GONE);
}
}
private void refreshSwipeProgress() {
if (PrefUtils.getBoolean(PrefUtils.IS_REFRESHING, false)) {
showSwipeProgress();
} else {
hideSwipeProgress();
}
}
private class SwipeGestureListener extends SimpleOnGestureListener implements OnTouchListener {
static final int SWIPE_MIN_DISTANCE = 120;
static final int SWIPE_MAX_OFF_PATH = 150;
static final int SWIPE_THRESHOLD_VELOCITY = 150;
private final GestureDetector mGestureDetector;
public SwipeGestureListener(Context context) {
mGestureDetector = new GestureDetector(context, this);
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if (mListView != null && e1 != null && e2 != null && Math.abs(e1.getY() - e2.getY()) <= SWIPE_MAX_OFF_PATH && Math.abs(velocityX) >= SWIPE_THRESHOLD_VELOCITY) {
long id = mListView.pointToRowId(Math.round(e2.getX()), Math.round(e2.getY()));
int position = mListView.pointToPosition(Math.round(e2.getX()), Math.round(e2.getY()));
View view = mListView.getChildAt(position - mListView.getFirstVisiblePosition());
if (view != null) {
// Just click on views, the adapter will do the real stuff
if (e1.getX() - e2.getX() > SWIPE_MIN_DISTANCE) {
mEntriesCursorAdapter.toggleReadState(id, view);
} else if (e2.getX() - e1.getX() > SWIPE_MIN_DISTANCE) {
mEntriesCursorAdapter.toggleFavoriteState(id, view);
}
// Just simulate a CANCEL event to remove the item highlighting
mListView.post(new Runnable() { // In a post to avoid a crash on 4.0.x
@Override
public void run() {
MotionEvent motionEvent = MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0);
mListView.dispatchTouchEvent(motionEvent);
motionEvent.recycle();
}
});
return true;
}
}
return super.onFling(e1, e2, velocityX, velocityY);
}
@Override
public boolean onTouch(View v, MotionEvent event) {
return mGestureDetector.onTouchEvent(event);
}
}
//Fetch Rss
public void fetchRss() {
// Create AsycHttpClient object
AsyncHttpClient client = new AsyncHttpClient();
// Http Request Params Object
RequestParams params = new RequestParams();
if (PrefUtils.getBoolean(PrefUtils.FIRST_OPEN, true)) {
ISPROGRESSIVE = true;
showSwipeProgress();
}
client.post(URL_TOPICS, params, new AsyncHttpResponseHandler() {
@Override
public void onSuccess(String response) {
if(!getActivity().getSharedPreferences(getString(R.string.app_name), Context.MODE_PRIVATE)
.getString("response", response).equals(response)){
syncJSON(response);
}
}
@Override
public void onFailure(int statusCode, Throwable error, String content) {
if (statusCode == 404) {
fetchRss();
//Toast.makeText(getApplicationContext(), "Requested resource not found", Toast.LENGTH_LONG).show();
} else if (statusCode == 500) {
fetchRss();
//Toast.makeText(getApplicationContext(), "Something went wrong at server end", Toast.LENGTH_LONG).show();
} else {
fetchRss();
//Toast.makeText(getApplicationContext(), "Unexpected Error occcured! [Most common Error: Device might not be connected to Internet]",
// Toast.LENGTH_LONG).show();
}
}
});
}
public void fetchVersion() {
AsyncHttpClient client = new AsyncHttpClient();
// Http Request Params Object
RequestParams params = new RequestParams();
client.post(URL_VERSION, params, new AsyncHttpResponseHandler() {
@Override
public void onSuccess(String response) {
syncVersion(response);
}
@Override
public void onFailure(int statusCode, Throwable error, String content) {
if (statusCode == 404) {
fetchVersion();
//Toast.makeText(getApplicationContext(), "Requested resource not found", Toast.LENGTH_LONG).show();
} else if (statusCode == 500) {
fetchVersion();
//Toast.makeText(getApplicationContext(), "Something went wrong at server end", Toast.LENGTH_LONG).show();
} else {
fetchVersion();
//Toast.makeText(getApplicationContext(), "Unexpected Error occcured! [Most common Error: Device might not be connected to Internet]",
// Toast.LENGTH_LONG).show();
}
}
});
}
public void showToast(String str){
Toast.makeText(getActivity(), str, Toast.LENGTH_LONG).show();
}
public void syncJSON(String response){
try {
// Extract JSON array from the response
JSONArray arr = new JSONArray(response);
//only load if there are changes
if (arr.length() != 0) {
// clearApplicationData();
// Loop through each array element, get JSON object which has userid and username
for (int i = 0; i < arr.length(); i++) {
// Get JSON object
JSONObject object = (JSONObject) arr.get(i);
try {
String title = object.getString("title").replaceAll("'", "''");
String url = object.getString("url").replaceAll("'", "''");
editor.putString("version", object.getString("version")
.replaceAll("'", "''"));
//showToast(object.getString("version")+" from fetch");
editor.putString("features", object.getString("features")
.replaceAll("'", "''"));
editor.putString("response", response);
editor.commit();
/* if(pref.getString("response", null).contains(title) ||
pref.getString("response", null).contains(url))*/
FeedDataContentProvider.addFeed(getActivity(), url, title, true);
//FeedDataContentProvider.addFeed(this, "http://archive.org/services/collection-rss.php", "Archive", true);
//this.getLoaderManager().initLoader(0, null, this);
AutoRefreshService.initAutoRefresh(getActivity());
} catch (JSONException e) {
// Toast.makeText(getApplicationContext(), "Error now... "+e.getMessage(), Toast.LENGTH_LONG).show();
}
}
if(ISPROGRESSIVE) {
hideSwipeProgress();
}
getActivity().startService(new Intent(getActivity(), FetcherService.class).setAction(FetcherService.ACTION_REFRESH_FEEDS));
/* while (mListView.getAdapter().getCount()==0) {
mListView.invalidateViews();
}*/
//mEntriesCursorAdapter.notifyDataSetChanged();
}
} catch (JSONException e) {}
}
public void syncVersion(String response){
try {
JSONObject object = new JSONObject(response);
String version = object.getString("version").replaceAll("'", "''");
String features = object.getString("features").replaceAll("'", "''");
//showToast(version);
editor.putString("version", version);
//showToast(object.getString("version")+" from fetch");
editor.putString("features", features);
editor.commit();
} catch (JSONException e) {
// showToast(e.getMessage());
}
}
private final String TAG = EntriesListFragment.class.getSimpleName();
public void initInterstitialAds(){
// Instantiate an InterstitialAd object.
// NOTE: the placement ID will eventually identify this as your App, you can ignore it for
// now, while you are testing and replace it later when you have signed up.
// While you are using this temporary code you will only get test ads and if you release
// your code like this to the Google Play your users will not receive ads (you will get a no fill error).
interstitialAd = new InterstitialAd(getContext(), getString(R.string.fb_test_ad)+
getString(R.string.fb_interstitial_placement_id));
// Set listeners for the Interstitial Ad
interstitialAd.setAdListener(new InterstitialAdListener() {
@Override
public void onInterstitialDisplayed(Ad ad) {
// Interstitial ad displayed callback
Log.e(TAG, "Interstitial ad displayed.");
}
@Override
public void onInterstitialDismissed(Ad ad) {
// Interstitial dismissed callback
Log.e(TAG, "Interstitial ad dismissed.");
}
@Override
public void onError(Ad ad, AdError adError) {
// Ad error callback
Log.e(TAG, "Interstitial ad failed to load: " + adError.getErrorMessage());
}
@Override
public void onAdLoaded(Ad ad) {
// Interstitial ad is loaded and ready to be displayed
Log.d(TAG, "Interstitial ad is loaded and ready to be displayed!");
// Show the ad
//interstitialAd.show();
}
@Override
public void onAdClicked(Ad ad) {
// Ad clicked callback
Log.d(TAG, "Interstitial ad clicked!");
}
@Override
public void onLoggingImpression(Ad ad) {
// Ad impression logged callback
Log.d(TAG, "Interstitial ad impression logged!");
}
});
// For auto play video ads, it's recommended to load the ad
// at least 30 seconds before it is shown
interstitialAd.loadAd();
}
public void showMessage(){
String version = "noversion";
PackageManager manager = getActivity().getPackageManager();
try {
PackageInfo info = manager.getPackageInfo(getActivity().getPackageName(), 0);
version = info.versionName;
} catch (PackageManager.NameNotFoundException unused) {
version = "noversion";
}
//showToast(shared.getString("version", "noversion")+" vs real version is "+version);
if(!version.equals("noversion")
&& !shared.getString("version", "noversion").equals(version)
&& !shared.getString("version", "noversion").equals("noversion")) {
CFAlertDialog.Builder builder2 = new CFAlertDialog.Builder(getActivity())
.setDialogStyle(CFAlertDialog.CFAlertStyle.NOTIFICATION)
.setTitle("New Update!")
.setCornerRadius(32)
.setTextGravity(Gravity.LEFT)
.setTextColor(Color.BLACK)
//.setHeaderView(imageView)
.setIcon(R.drawable.logo)
.setMessage(shared.getString("features", "New features added, bug fixes..."))
.addButton("UPDATE NOW",-1, Color.parseColor("#4285F4"),
CFAlertDialog.CFAlertActionStyle.POSITIVE, CFAlertDialog.CFAlertActionAlignment.JUSTIFIED,
(dialog, which) -> {
dialog.dismiss();
updateApp();
})
.addButton("CANCEL",-1, -1,
CFAlertDialog.CFAlertActionStyle.DEFAULT, CFAlertDialog.CFAlertActionAlignment.JUSTIFIED,
(dialog, which) -> {
//updateApp();
dialog.dismiss();
});
// Show the alert
builder2.show();
}
// }
}
public void updateApp(){
Uri rateLink = Uri.parse("market://details?id=" + getActivity().getPackageName());
Uri rateLinkNotFound = Uri.parse("https://play.google.com/store/apps/details?id=" + getActivity().getPackageName());
Intent rateIntent = new Intent(Intent.ACTION_VIEW, rateLink);
Intent rateNotFound = new Intent(Intent.ACTION_VIEW, rateLinkNotFound);
try{
startActivity(rateIntent);
}catch (ActivityNotFoundException e){
startActivity(rateNotFound);
}
}
}
|
<filename>readfile.cpp
#include <iostream>
#include <fstream>
#include <string>
using namespace std;
//den tin xrisimopoiw sto programma telika
void readFile(){//diavazei arxeio
string line;
ifstream myfile("small.txt");
if (myfile.is_open()) {
while (getline(myfile, line)) {
cout << line << '\n';
}
myfile.close();
} else {
cout << "Unable to open file";
}}
|
public static IList<int> FibonacciSequence(int n)
{
// Create a list to store the sequence
List<int> sequence = new List<int>();
// Add the first two numbers of the sequence
sequence.Add(0);
sequence.Add(1);
// Generate the rest of the sequence
for (int i = 2; i < n; i++)
{
int prev = sequence[i - 1];
int current = sequence[i - 2];
sequence.Add(prev + current);
}
return sequence;
}
FibonacciSequence(10); // 0, 1, 1, 2, 3, 5, 8, 13, 21, 34 |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.catalyst.serializer;
import io.atomix.catalyst.CatalystException;
/**
* Default serializer factory.
* <p>
* The default serializer factory constructs {@link TypeSerializer} instances given a serializer {@link Class}. The serializer
* must implement a default no-argument constructor.
*
* @author <a href="http://github.com/kuujo"><NAME></a>
*/
public class DefaultTypeSerializerFactory implements TypeSerializerFactory {
@SuppressWarnings("rawtypes")
private final Class<? extends TypeSerializer> type;
@SuppressWarnings("rawtypes")
public DefaultTypeSerializerFactory(Class<? extends TypeSerializer> type) {
if (type == null)
throw new NullPointerException("type cannot be null");
this.type = type;
}
@Override
public TypeSerializer<?> createSerializer(Class<?> type) {
try {
return this.type.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new CatalystException("failed to instantiate serializer: " + this.type, e);
}
}
}
|
<filename>src/app/shared/models/sidebar.ts
export const SidebarMenusList = ['DASHBOARD','CLINICIANS_LIST','AGENCIES_LIST','PATIENTS_LIST','VISITS_LIST','ADMIN_USERS_LIST','PAYEMENT_REVENUE_LIST','CATEGORY_FEE_LIST'];
export enum SidebarMenus {
DASHBOARD = '/dashboard/list',
CLINICIANS_LIST = '/clinicians/list',
AGENCIES_LIST = '/agencies/list',
PATIENTS_LIST = '/patients/list',
VISITS_LIST = '/Scheduled Visits/list',
ADMIN_USERS_LIST = '/Admin Users/list',
PAYEMENT_REVENUE_LIST = '/Payment Revenue/list',
CATEGORY_FEE_LIST = '/Category Fee/list',
}
|
#!/bin/bash
#
# 5. Ganglia
#
GANGLIA=${GANGLIA:-"127.0.0.1"}
CLUSTER_NAME=${CLUSTER_NAME:-"unspecified"}
CLUSTER_OWNER=${CLUSTER_OWNER:-"unspecified"}
CLUSTER_LOCATION=${CLUSTER_LOCATION:-"unspecified"}
# 5.1. ganglia
yum -y install ganglia ganglia-gmond ganglia-gmond-python
find / -group ganglia | xargs chgrp 117
find / -user ganglia | xargs chown 117
groupmod -g 117 ganglia
usermod -u 117 ganglia
etckeeper commit -m "SETUP ganglia"
# 5.2. ganglia-config
# NOTE: if enable reciever, "deaf = no"
sed -e "s/send_metadata_interval = 0/send_metadata_interval = 10/g" \
-e "s/name = \"unspecified\"/name = \"${CLUSTER_NAME}\"/g" \
-e "s/owner = \"unspecified\"/owner = \"${CLUSTER_OWNER}\"/g" \
-e "s/location = \"unspecified\"/location = \"${CLUSTER_LOCATION}\"/g" \
-e 's/host_dmax = 0/host_dmax = 86400/g' \
-e 's/deaf = no/deaf = yes/g' \
-e '/mcast_join = 239.2.11.71/d' \
-e '/ttl = 1/d' \
-e '/bind = 239.2.11.71/d' \
-i.dist /etc/ganglia/gmond.conf
sed -e "/^udp_send_channel {/,/}/c udp_send_channel {\n host = ${GANGLIA}\n port = 8649\n}" \
-i /etc/ganglia/gmond.conf
# 5.X. ganglia-service
service gmond start
chkconfig gmond on
# vim:ts=4
|
import React, { useState } from "react"
import { navigate } from 'gatsby'
function encode(data) {
return Object.keys(data)
.map(key => encodeURIComponent(key) + "=" + encodeURIComponent(data[key]))
.join("&");
}
const NewContactForm = (props) => {
const [name, setName] = useState("")
const [email, setEmail] = useState("")
const [message, setMessage] = useState("")
const handleChange = (e) => {
setName({ ...name, [e.target.name]: e.target.value })
setEmail({ ...email, [e.target.email]: e.target.value })
setMessage({ ...message, [e.target.message]: e.target.value })
}
const handleSubmit = (event) => {
// Prevent the default onSubmit behavior
event.preventDefault();
// POST the encoded form with the content-type header that's required for a text submission
// Note that the header will be different for POSTing a file
fetch("/", {
method: "POST",
headers: { "Content-Type": "application/x-www-form-urlencoded" },
body: encode({
"form-name": event.target.getAttribute("name"),
...name
})
})
// On success, redirect to the custom success page using Gatsby's `navigate` helper function
.then(() => navigate("/Thanks/"))
// On error, show the error in an alert
.catch(error => alert(error));
};
return (
<div className="container grid grid-cols-2 h-half lg:w-3/5 bg-bg-light m-auto mb-4 py-4 px-12">
<form data-netlify="true" action="/" name="contactUs" method="post" onSubmit={handleSubmit} className="w-full">
{/* <label htmlFor="nameInput" className="text-4xl text-dark-tan mb-2 w-full font-bold">Send us a Message</label> */}
<input type="hidden" name="form-name" value="contact-form" />
<input className='p-2 mb-2 w-64 text-black' name="Name" type="text" onChange={handleChange} required placeholder="Name" />
<input className='p-2 mb-2 w-64 text-black' type='email' placeholder='Email' name='email' onChange={handleChange} required />
<textarea className='w-64 lg:w-80 text-black' placeholder='Message' name='message' rows='8' minLength="8" onChange={handleChange} required />
<button className='bg-dark-tan p-2 mt-4' type='submit' >Contact Us</button>
</form>
</div>
)
}
export default NewContactForm |
<reponame>r-f-g/mlflow-operator
import logging
import os
import random
import re
import tempfile
import mlflow
import pytest
import pymysql
from minio import Minio
log = logging.getLogger(__name__)
def _get_ip(text):
"""Get subnet IP address."""
try:
return re.findall(r"[0-9]+(?:\.[0-9]+){3}", text)[0]
except IndexError:
return None
def _run_test_train():
"""Run test train."""
experiment_id = mlflow.create_experiment(f"experiment-{random.randint(0, 1000):04d}")
with mlflow.start_run(experiment_id=experiment_id) as run:
mlflow.log_params({"param1": 1, "param2": 2})
mlflow.log_metric("score", 0.8)
with tempfile.TemporaryDirectory() as tmpdir:
local_artifact_path = os.path.join(tmpdir, "test")
with open(local_artifact_path, "w") as file:
file.write(str(random.randint(0, 10)))
mlflow.log_artifact(local_artifact_path)
return run.info.run_id
async def _check_mlflow_server(model, use_ingress=False):
"""Validate that the mlflow server is working correctly."""
if use_ingress:
mlflow_host = _get_ip(model.applications["ingress"].units[0].workload_status_message)
assert mlflow_host is not None, "Failed to get IP address from ingress unit."
else:
status = await model.get_status()
mlflow_host = status.applications["mlflow"].units["mlflow/0"].address
mlflow_config = await model.applications["mlflow"].get_config()
mlflow_port = mlflow_config.get("port", {}).get("value")
mlflow.set_tracking_uri(f"http://{mlflow_host}:{mlflow_port}")
run_id = _run_test_train()
run = mlflow.get_run(run_id)
assert run.info.status == "FINISHED"
assert run.data.metrics == {"score": 0.8}
assert run.data.params == {"param1": "1", "param2": "2"}
log.info(f"the training '{run.info.run_id}' was successful ")
return run
@pytest.mark.abort_on_fail
async def test_build_and_deploy(ops_test):
"""Build and deploy Flannel in bundle."""
mlflow_operator = await ops_test.build_charm(".")
# work around bug https://bugs.launchpad.net/juju/+bug/1928796
rc, stdout, stderr = await ops_test._run(
"juju",
"deploy",
mlflow_operator,
"-m", ops_test.model_full_name,
"--resource", "server=blueunicorn90/mlflow-operator:1.18",
"--channel", "edge"
)
assert rc == 0, f"Failed to deploy with resource: {stderr or stdout}"
await ops_test.model.deploy(ops_test.render_bundle(
"tests/data/bundle.yaml", master_charm=mlflow_operator))
# work around bug https://github.com/juju/python-libjuju/issues/511
rc, stdout, stderr = await ops_test._run(
"juju",
"deploy",
"nginx-ingress-integrator",
"ingress",
"-m", ops_test.model_full_name,
"--channel", "stable"
)
assert rc == 0, f"Failed to deploy with resource: {stderr or stdout}"
await ops_test.model.wait_for_idle(wait_for_active=True)
async def test_mlflow_status_message(ops_test):
"""Validate mlflow status message."""
unit = ops_test.model.applications["mlflow"].units[0]
assert unit.workload_status == "active"
assert unit.workload_status_message == "MLflow server is ready"
await _check_mlflow_server(ops_test.model)
async def test_add_ingress_relations(ops_test):
"""Validate that adding the Nginx Ingress Integrator relations works."""
await ops_test.model.add_relation("mlflow", "ingress")
await ops_test.model.wait_for_idle(wait_for_active=True)
await _check_mlflow_server(ops_test.model, use_ingress=True)
async def test_remove_ingress_relations(ops_test):
"""Validate that removing the Nginx Ingress Integrator relations works."""
ingress_application = ops_test.model.applications["ingress"]
await ingress_application.destroy_relation("ingress", "mlflow")
await ops_test.model.wait_for_idle(wait_for_active=True)
await _check_mlflow_server(ops_test.model)
async def test_add_minio_relations(ops_test):
"""Validate that adding the Minio relation works."""
await ops_test.model.add_relation("mlflow", "minio")
await ops_test.model.wait_for_idle(wait_for_active=True)
# configuration environment variables before test run
minio_app = ops_test.model.applications["minio"]
await minio_app.set_config({"secret-key": "minio1234"})
await ops_test.model.wait_for_idle(wait_for_active=True)
status = await ops_test.model.get_status()
minio_ip = status.applications["minio"].units["minio/0"].address
os.environ["MLFLOW_S3_ENDPOINT_URL"] = f"http://{minio_ip}:9000"
os.environ["AWS_ACCESS_KEY_ID"] = "minio"
os.environ["AWS_SECRET_ACCESS_KEY"] = "minio1234"
os.environ["MLFLOW_S3_IGNORE_TLS"] = "true"
run = await _check_mlflow_server(ops_test.model)
client = Minio(f"{minio_ip}:9000", access_key="minio", secret_key="minio1234", secure=False)
assert client.bucket_exists("mlflow")
prefix = run.info.artifact_uri.replace("s3://mlflow/", "")
objects = [obj.object_name for obj in client.list_objects("mlflow", prefix, recursive=True)]
assert f"{prefix}/test" in objects
async def test_remove_minio_relations(ops_test):
"""Validate that removing the Minio relations works."""
minio_application = ops_test.model.applications["minio"]
await minio_application.destroy_relation("object-storage", "mlflow")
await ops_test.model.wait_for_idle(wait_for_active=True)
await _check_mlflow_server(ops_test.model)
async def test_add_db_relations(ops_test):
"""Validate that adding a DB relation works."""
await ops_test.model.add_relation("mlflow", "mariadb-k8s")
await ops_test.model.wait_for_idle(wait_for_active=True)
run = await _check_mlflow_server(ops_test.model)
status = await ops_test.model.get_status()
mariadb_k8s_ip = status.applications["mariadb-k8s"].units["mariadb-k8s/0"].address
connection = pymysql.connect(
host=mariadb_k8s_ip,
port=3306,
user="root",
password="<PASSWORD>",
db="database",
cursorclass=pymysql.cursors.DictCursor
)
with connection:
with connection.cursor() as cursor:
cursor.execute("SELECT run_uuid FROM runs;")
results = cursor.fetchall()
assert run.info.run_uuid in [result.get("run_uuid") for result in results]
async def test_remove_db_relations(ops_test):
"""Validate that removing a DB relations works."""
db_application = ops_test.model.applications["mariadb-k8s"]
await db_application.destroy_relation("mysql", "mlflow")
await ops_test.model.wait_for_idle(wait_for_active=True)
await _check_mlflow_server(ops_test.model)
|
<gh_stars>10-100
var data={};
data['16_70'] = 'text:WPS Office\u4ea7\u54c1\u5bb6\u65cf ; url:/Expert/ForumsList.asp?typenum=1&roomid=70';
data['70_7003'] = 'text:WPS\u9ad8\u6821\u6280\u672f\u4e13\u5bb6\u53d1\u6398\u8ba1\u5212 ; data:roomid=7003';
data['70_7002'] = 'text:WPS\u4e8c\u6b21\u5f00\u53d1\u53ca\u4f7f\u7528 ; data:roomid=7002';
data['70_7001'] = 'text:\u91d1\u5c71\u7535\u5b50\u5370\u7ae0 ; data:roomid=7001';
data['16_1612'] = 'text:\u82f1\u7279\u5c14\u4fe1\u606f\u6280\u672f\u5cf0\u4f1a ; data:roomid=1612';
data['16_1601'] = 'text:IBM\u4eba\u624d\u8bba\u575b ; data:roomid=1601';
data['16_1602'] = 'text:\u7a0b\u5e8f\u5458\u5927\u672c\u8425 ; data:roomid=1602';
data['16_1603'] = 'text:Cell\u7ec4\u4ef6\u3001\u63d2\u4ef6 ; data:roomid=1603';
data['16_161605'] = 'text:\u4eba\u6c11\u90ae\u7535\u51fa\u7248\u793e ; data:roomid=1605';
data['16_161608'] = 'text:\u6e05\u534e\u5927\u5b66\u51fa\u7248\u793e ; data:roomid=1608';
data['16_16406'] = 'text:\u5fae\u521b\u8f6f\u4ef6\u5f00\u53d1\u7ba1\u7406 ; url:/expert/forums_list_406.asp';
data['16_67'] = 'text:\u5fae\u8f6f\u521b\u65b0\u676f\u8f6f\u4ef6\u5f00\u53d1\u5927\u8d5b ; url:/Expert/ForumsList.asp?typenum=1&roomid=67';
data['67_6701'] = 'text:\u8f6f\u4ef6\u5f00\u53d1\u7ade\u8d5b ; data:roomid=6701';
data['67_6702'] = 'text:\u903b\u8f91\u7b97\u6cd5\u7ade\u8d5b ; data:roomid=6702';
data['67_6703'] = 'text:\u6e32\u67d3\u6280\u672f\u7ade\u8d5b ; data:roomid=6703';
data['67_6704'] = 'text:\u77ed\u7247\u5236\u4f5c\u7ade\u8d5b ; data:roomid=6704';
data['16_1604'] = 'text:IBM \u5f00\u53d1\u8005\u5927\u4f1a ; data:roomid=1604';
data['16_1607'] = 'text:\u5fae\u8f6fTech-Ed\u6280\u672f\u6559\u80b2\u5927\u4f1a ; data:roomid=1607';
data['16_161609'] = 'text:\u4e2d\u56fd\u8f6f\u4ef6\u6280\u672f\u5927\u4f1a ; data:roomid=1609';
data['16_1610'] = 'text:\u5e94\u7528\u6027\u80fd\u7ba1\u7406 ; data:roomid=1610';
|
#!/usr/bin/env bash
set -euf -o pipefail
apk --update --no-cache add \
bzip2 \
bzip2-dev \
cassandra-cpp-driver \
curl-dev \
cyrus-sasl-dev \
freetype-dev \
gmp-dev \
icu-dev \
imagemagick \
imagemagick-dev \
imap-dev \
krb5-dev \
libbz2 \
libedit-dev \
libintl \
libjpeg-turbo-dev \
libltdl \
libmemcached-dev \
libpng-dev \
libtool \
libxml2-dev \
libxslt-dev \
openldap-dev \
pcre-dev \
postgresql-dev \
rabbitmq-c \
rabbitmq-c-dev \
readline-dev \
sqlite-dev \
zlib-dev
if [[ $PHP_VERSION == "7.4" || $PHP_VERSION == "7.3" ]]; then
apk --update --no-cache add libzip-dev libsodium-dev
else
apk --no-cache add --repository http://dl-cdn.alpinelinux.org/alpine/v3.5/community libzip-dev
fi
docker-php-ext-configure ldap
docker-php-ext-install -j "$(nproc)" ldap
PHP_OPENSSL=yes docker-php-ext-configure imap --with-kerberos --with-imap-ssl
docker-php-ext-install -j "$(nproc)" imap
docker-php-ext-install -j "$(nproc)" exif xmlrpc pcntl bcmath bz2 calendar intl mysqli opcache pdo_mysql pdo_pgsql pgsql soap xsl zip gmp
docker-php-source delete
if [[ $PHP_VERSION == "7.4" ]]; then
docker-php-ext-configure gd --with-freetype --with-jpeg
else
docker-php-ext-configure gd \
--with-gd \
--with-freetype-dir=/usr/include \
--with-jpeg-dir=/usr/include \
--with-png-dir=/usr/include
fi
docker-php-ext-install -j "$(nproc)" gd
if [[ $PHP_VERSION == "7.4" || $PHP_VERSION == "7.3" ]]; then
git clone --depth 1 -b 2.9.0 "https://github.com/xdebug/xdebug" \
&& cd xdebug \
&& phpize \
&& ./configure \
&& make clean \
&& make \
&& make install \
&& docker-php-ext-enable xdebug
elif [[ $PHP_VERSION == "7.2" ]]; then
git clone --depth 1 -b 2.7.2 "https://github.com/xdebug/xdebug" \
&& cd xdebug \
&& phpize \
&& ./configure \
&& make \
&& make install \
&& docker-php-ext-enable xdebug
else
apk --update --no-cache add \
libmcrypt-dev \
libmcrypt \
docker-php-ext-install -j$(getconf _NPROCESSORS_ONLN) mcrypt
pecl install xdebug \
&& docker-php-ext-enable xdebug
fi
docker-php-source extract \
&& curl -L -o /tmp/redis.tar.gz "https://github.com/phpredis/phpredis/archive/5.1.1.tar.gz" \
&& tar xfz /tmp/redis.tar.gz \
&& rm -r /tmp/redis.tar.gz \
&& mv phpredis-5.1.1 /usr/src/php/ext/redis \
&& docker-php-ext-install redis \
&& docker-php-source delete
docker-php-source extract \
&& apk add --no-cache --virtual .phpize-deps-configure $PHPIZE_DEPS \
&& pecl install apcu \
&& docker-php-ext-enable apcu \
&& apk del .phpize-deps-configure \
&& docker-php-source delete
docker-php-source extract \
&& apk add --no-cache --virtual .cassandra-deps libressl-dev libuv-dev cassandra-cpp-driver-dev \
&& curl -L -o /tmp/cassandra.tar.gz "https://github.com/datastax/php-driver/archive/24d85d9f1d.tar.gz" \
&& mkdir /tmp/cassandra \
&& tar xfz /tmp/cassandra.tar.gz --strip 1 -C /tmp/cassandra \
&& rm -r /tmp/cassandra.tar.gz \
&& curl -L "https://github.com/datastax/php-driver/pull/135.patch" | patch -p1 -d /tmp/cassandra -i - \
&& mv /tmp/cassandra/ext /usr/src/php/ext/cassandra \
&& rm -rf /tmp/cassandra \
&& docker-php-ext-install cassandra \
&& apk del .cassandra-deps \
&& docker-php-source delete
pecl install imagick \
&& docker-php-ext-enable imagick
pecl install mongodb \
&& docker-php-ext-enable mongodb
pecl install amqp \
&& docker-php-ext-enable amqp
git clone "https://github.com/php-memcached-dev/php-memcached.git" \
&& cd php-memcached \
&& phpize \
&& ./configure --disable-memcached-sasl \
&& make \
&& make install \
&& cd ../ && rm -rf php-memcached \
&& docker-php-ext-enable memcached
{ \
echo 'opcache.enable=1'; \
echo 'opcache.revalidate_freq=0'; \
echo 'opcache.validate_timestamps=1'; \
echo 'opcache.max_accelerated_files=10000'; \
echo 'opcache.memory_consumption=192'; \
echo 'opcache.max_wasted_percentage=10'; \
echo 'opcache.interned_strings_buffer=16'; \
echo 'opcache.fast_shutdown=1'; \
} > /usr/local/etc/php/conf.d/opcache-recommended.ini
{ \
echo 'apc.shm_segments=1'; \
echo 'apc.shm_size=512M'; \
echo 'apc.num_files_hint=7000'; \
echo 'apc.user_entries_hint=4096'; \
echo 'apc.ttl=7200'; \
echo 'apc.user_ttl=7200'; \
echo 'apc.gc_ttl=3600'; \
echo 'apc.max_file_size=50M'; \
echo 'apc.stat=1'; \
} > /usr/local/etc/php/conf.d/apcu-recommended.ini
echo "memory_limit=1G" > /usr/local/etc/php/conf.d/zz-conf.ini
|
#include <iostream>
#include <string>
#include <map>
using namespace std;
int main()
{
string sentence = "This is an example sentence for testing";
string word = "";
map<string, int> m;
int count = 0;
for (auto x : sentence)
{
if (x == ' ')
{
m[word]++;
if (m[word] > count)
count = m[word];
word = "";
}
else
{
word = word + x;
}
}
m[word]++;
if (m[word] > count)
{
count = m[word];
}
for (auto x : m)
{
if (x.second == count)
{
cout << "The word " << x.first << " occurs maximum = "
<< count << " times" << endl;
}
}
return 0;
} |
<gh_stars>0
import React, {Component} from 'react';
import Search from './serarch';
import request from 'superagent';
import BookList from './BookList';
class Book extends Component {
//logic
constructor (props){
super(props)
this.state= {
book:[],
searchField: '',
Save: [],
}
}
//methods
handleSearch = (e) => {
//this state will handle the the information entered in the search box. this 'event' must be entered to the search component to connect them.
this.setState ({searchField: e.target.value})
}
searchBook = (e) => {
e.preventDefault();
request
.get("https://www.googleapis.com/books/v1/volumes?")
.query({q: this.state.searchField})
.then((data)=> {
this.setState({book: [...data.body.items]})
})
}
render(){
return (
<div>
<Search searchBook={this.searchBook} handleSearch={this.handleSearch}/>
<BookList book={this.state.book}/>
</div>
);
}
}
export default Book; |
import React from "react";
export default class FooterComponent extends React.Component {
render() {
return (
<div className="footer">
<span>© {new Date().getFullYear()} Iteration, Inc.</span>
<a href="https://buildbuddy.io/terms" target="_blank">
Terms
</a>
<a href="https://buildbuddy.io/privacy" target="_blank">
Privacy
</a>
<a href="https://buildbuddy.io" target="_blank">
BuildBuddy
</a>
<a href="mailto:<EMAIL>" target="_blank">
Contact us
</a>
<a href="https://slack.buildbuddy.io" target="_blank">
Slack
</a>
<a href="https://twitter.com/buildbuddy_io" target="_blank">
Twitter
</a>
<a href="https://github.com/buildbuddy-io/buildbuddy/" target="_blank">
GitHub
</a>
</div>
);
}
}
|
<gh_stars>0
package com.java110.things.sip.message.helper;
import com.java110.things.util.DateUtils;
import gov.nist.javax.sip.header.SIPHeader;
import java.util.Date;
public class CustomSIPDateHeader extends SIPHeader{
@Override
protected StringBuilder encodeBody(StringBuilder buffer) {
return buffer.append(DateUtils.getGBFormatDate(new Date()));
}
}
|
export const getTextareas = theme => {
const { toRem } = theme
return {
minHeight: toRem(130)
}
}
|
<gh_stars>0
package hapi
import (
"github.com/akamai/AkamaiOPEN-edgegrid-golang/edgegrid"
)
var (
config = edgegrid.Config{
Host: "akaa-baseurl-xxxxxxxxxxx-xxxxxxxxxxxxx.luna.akamaiapis.net/",
AccessToken: "<KEY>",
ClientToken: "<KEY>",
ClientSecret: "<KEY>
MaxBody: 2048,
Debug: false,
}
)
|
import kotlinx.coroutines.*
fun main() {
val task1 = CoroutineScope(Job()).launch{
//Something
}
val task2 = CoroutineScope(Job()).launch {
//Something
}
runBlocking {
task1.join()
task2.join()
}
} |
#!/bin/bash
#################################
# Speech Script by Dan Fountain #
# TalkToDanF@gmail.com #
#################################
INPUT=$*
STRINGNUM=0
ary=($INPUT)
echo "---------------------------"
echo "Speech Script by Dan Fountain"
echo "TalkToDanF@gmail.com"
echo "---------------------------"
for key in "${!ary[@]}"
do
SHORTTMP[$STRINGNUM]="${SHORTTMP[$STRINGNUM]} ${ary[$key]}"
LENGTH=$(echo ${#SHORTTMP[$STRINGNUM]})
#echo "word:$key, ${ary[$key]}"
#echo "adding to: $STRINGNUM"
if [[ "$LENGTH" -lt "100" ]]; then
#echo starting new line
SHORT[$STRINGNUM]=${SHORTTMP[$STRINGNUM]}
else
STRINGNUM=$(($STRINGNUM+1))
SHORTTMP[$STRINGNUM]="${ary[$key]}"
SHORT[$STRINGNUM]="${ary[$key]}"
fi
done
for key in "${!SHORT[@]}"
do
#echo "line: $key is: ${SHORT[$key]}"
echo "Playing line: $(($key+1)) of $(($STRINGNUM+1))"
NEXTURL=$(echo ${SHORT[$key]} | xxd -plain | tr -d '\n' | sed 's/\(..\)/%\1/g')
mpg123 -q "http://translate.google.com/translate_tts?tl=en&q=$NEXTURL"
done |
log_error_exit()
{
echo $@
exit 1
}
if [ -z "$LAB_REGISTRY_HOST"]; then
log_error_exit "No registry host specified"
fi
if [ -z "$LAB_REGISTRY_USER"]; then
log_error_exit "No registry user specified"
fi
if [ -z "$LAB_REGISTRY_PASSWORD"]; then
log_error_exit "No registry password specified"
fi
docker $LAB_REGISTRY_HOST -u $LAB_REGISTRY_USER -p $LAB_REGISTRY_PASSWORD $LAB_REGISTRY_HOST
|
public class MaxSubArraySum {
public static int maxSubArraySum(int[] nums) {
int max_so_far = 0;
int max_ending_here = 0;
for (int num : nums) {
max_ending_here += num;
if (max_ending_here < 0)
max_ending_here = 0;
if (max_so_far < max_ending_here)
max_so_far = max_ending_here;
}
return max_so_far;
}
public static void main(String[] args) {
int[] array = {-2, 1, -3, 4, -1, 2, 1, -5, 4};
System.out.println(maxSubArraySum(array));
}
} |
package lemongrenade.api.services;
import lemongrenade.core.database.lemongraph.LemonGraph;
import lemongrenade.core.database.mongo.MongoDBStore;
import lemongrenade.core.models.*;
import lemongrenade.core.util.LGConstants;
import lemongrenade.core.util.LGProperties;
import org.bson.Document;
import org.json.JSONArray;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.format.DateTimeParseException;
import java.util.*;
/**
* See /docs/coordinator-api.txt for documentation
*/
@Path("/api/")
public class Job {
@Context
HttpServletRequest request;
@Context
HttpServletResponse response;
@Context
ServletContext context;
private static final Logger log = LoggerFactory.getLogger(lemongrenade.api.services.Job.class);
public void close() {
try {
Utils.close();
}
catch(Exception e) {
log.error("Error closing connections.");
}
}
//alters a job and returns its job_id
String transformJob(JSONObject job) {
if(job.has("config")) {
job.put("meta", job.get("config"));
job.remove("config");
}
String job_id = job.getString("graph");
job.remove("graph");
job.put("errors", getErrors(job_id));
job.put("status", getStatus(job_id));
return job_id;
}
//alters every job in a graph
JSONObject transformGraph(JSONArray graph) {
log.info("Started transformGraph.");
Iterator graphIterator = graph.iterator();
JSONObject returnObject = new JSONObject();
JSONArray jobIds = new JSONArray();
//Gather all job ids and make a single request to MongoDB for all matching jobs
while(graphIterator.hasNext()) {
JSONObject job = (JSONObject) graphIterator.next();
String job_id = job.getString("graph");
jobIds.put(job_id);
}
log.info("Requesting " + jobIds.length() + " jobs from MongoDB.");
Long startTime = Utils.startTime("dao.getByJobIds(jobIds)");
HashMap<String, LGJob> jobs = Utils.getJobManager().getJobs(jobIds);
Utils.duration(startTime, "dao.getByJobIds(jobIds)");
//Iterate over every job in the graph, transform and add the errors and status
graphIterator = graph.iterator();
while(graphIterator.hasNext()) {
JSONObject job = (JSONObject) graphIterator.next();
LGJob lgJob = null;
String job_id = job.getString("graph");
if(jobs.containsKey(job_id)) {
lgJob = jobs.get(job_id);
}
//Transform config->meta
if(job.has("config")) {
job.put("meta", job.get("config"));
job.remove("config");
}
job.remove("graph");
job.put("errors", getErrorsHelper(lgJob));
job.put("status", getStatusHelper(lgJob));
returnObject.put(job_id, job);
}
log.info("Finished transformGraph.");
return returnObject;
}
@GET
@Path("jobs/all")
@Produces(MediaType.APPLICATION_JSON)
//Proxies LEMONGRAPH /graph endpoint and replaces "graph" and "meta" items with "job_id" and "config" respectively
public Response jobs() {
log.info("Received request for api/jobs/all.");
Response graphResponse = Utils.lemongraphProxy(request, "graph", "");;
int status = graphResponse.getStatus();
if(status != 200)
return graphResponse;
try {
String body = graphResponse.readEntity(String.class);
JSONArray graph = new JSONArray(body);
JSONObject returnObject = transformGraph(graph);
return Response.status(200).entity(returnObject.toString()).build();
}
catch(Exception e) { //When an error is returned jetty returns 500 and error, even if it's caught
log.warn("Error processing /api/jobs/all");
e.printStackTrace();
return graphResponse;
}
}
@GET
@Path("jobs/{id}")
@Produces(MediaType.APPLICATION_JSON)
//Proxies LEMONGRAPH /graph endpoint and replaces "graph" and "meta" items with "job_id" and "config" respectively
public Response job(@PathParam("id") String id) {
Response graphResponse = Utils.lemongraphProxy(request, "graph/" + id, "");
int status = graphResponse.getStatus();
if(status != 200)
return graphResponse;
try {
String body = graphResponse.readEntity(String.class);
JSONObject job = new JSONObject(body);
JSONObject jobs = new JSONObject();
String job_id = transformJob(job);
jobs.put(job_id, job);
return Response.status(200).entity(jobs.toString()).build();
}
catch(Exception e) { //When an error is returned jetty returns 500 and error, even if it's caught
log.warn("Error processing /api/graph");
e.printStackTrace();
return graphResponse;
}
}
@POST
@Path("jobs")
@Produces(MediaType.APPLICATION_JSON)
//Proxies LEMONGRAPH /graph endpoint and replaces "graph" and "meta" items with "job_id" and "config" respectively
public Response jobs(String input) {
JSONObject returnObject = new JSONObject();
try {
JSONArray jobIDs = new JSONArray(input);
Iterator idIterator = jobIDs.iterator();
while (idIterator.hasNext()) {
String id = idIterator.next().toString();
Response graphResponse = Utils.lemongraphProxy(request, "GET", "graph/" + id, "");
int status = graphResponse.getStatus();
if (status != 200)
return graphResponse;
try {
String body = graphResponse.readEntity(String.class);
JSONObject job = new JSONObject(body);
String job_id = transformJob(job);
returnObject.put(job_id, job);
} catch (Exception e) { //When an error is returned jetty returns 500 and error, even if it's caught
log.warn("Error processing /api/graph");
e.printStackTrace();
return graphResponse;
}
}//end while loop
}
catch(Exception e) {
log.info("Failed processing for input:" + input);
e.printStackTrace();
return Response.status(500).entity("Error fetching jobs.").build();
}
return Response.status(200).entity(returnObject.toString()).build();
}
/**
* GetJobs()
*
* Deprecated in favor of /rest/jobs
* If NO params, returns all jobs in database.
*
* You can submit one or both params at once:
*
* @param createdBefore Finds all jobs created Before time and date "2016-08-08T18:04:23.514Z"
* @param createdAfter Finds all jobs created After time and date. Format: 2016-08-08T18:04:23.514Z"
* @return List of LGJobs
*/
@GET
@Deprecated
@Path("jobs")
@Produces(MediaType.APPLICATION_JSON)
public Response jobGet(@QueryParam("created_before") String createdBefore,
@QueryParam("created_after") String createdAfter) throws Exception {
//List<LGJob> jobs;
boolean doCreatedBefore = false;
boolean doCreatedAfter = false;
Date beforeDate = null;
Date afterDate = null;
// Parse created_before if present
if ((createdBefore != null) && (!createdBefore.equals(""))) {
Instant beforeInstant = null;
try {
beforeInstant = Instant.parse(createdBefore);
} catch (DateTimeParseException e) {
return Response.status(201).entity("Invalid created_on Date format [" + createdBefore + "]").build();
}
beforeDate = Date.from(beforeInstant);
doCreatedBefore = true;
}
// Parse created_after if present
if ((createdAfter != null) && (!createdAfter.equals(""))) {
Instant afterInstant = null;
try {
afterInstant = Instant.parse(createdAfter);
} catch (DateTimeParseException e) {
return Response.status(201).entity("Invalid created_after Date format [" + createdAfter + "]").build();
}
afterDate = Date.from(afterInstant);
doCreatedAfter = true;
}
JSONObject graphJobs = new JSONObject();
try {
graphJobs = Utils.getLemongraphJob(request, "", null);//this considers createdBefore/createdAfter params
} catch (Exception e) {
e.printStackTrace();
}
JSONArray ids = new JSONArray();
Iterator<String> idIterator = graphJobs.keys();
while (idIterator.hasNext()) {
String id = idIterator.next();
ids.put(id);
}
JSONObject ob = new JSONObject();
if(ids.length() > 0) {
// Build response
SimpleDateFormat sdf = new SimpleDateFormat("MMM dd,yyyy HH:mm:ss");
JSONObject mongoJobs = Utils.getMongoJobs(ids);
Iterator iterator = mongoJobs.keySet().iterator();
while (iterator.hasNext()) {
String id = iterator.next().toString();
try {
JSONObject mongoJob = mongoJobs.getJSONObject(id);
JSONObject job = new JSONObject();
//Default value checks
if (!mongoJob.has("reason")) {
mongoJob.put("reason", "");
}
if (!mongoJob.has("endTime")) {
mongoJob.put("endTime", 0);
}
if (!mongoJob.has("totalRunningTimeSeconds")) {
mongoJob.put("totalRunningTimeSeconds", 0);
}
if (!mongoJob.has("approvedAdapterNames")) {
mongoJob.put("approvedAdapterNames", new JSONArray());
}
if (!mongoJob.has("startTime")) {
mongoJob.put("startTime", 0);
}
if (!mongoJob.has("jobErrors")) {
mongoJob.put("jobErrors", new JSONArray());
}
if (!mongoJob.has("taskMap")) {
mongoJob.put("taskMap", new JSONObject());
}
if (!mongoJob.has("jobConfig")) {
mongoJob.put("jobConfig", new JSONObject());
}
if (!mongoJob.has("expireDate")) {
mongoJob.put("expireDate", 0);
}
if (!mongoJob.has("graphActivity")) {
mongoJob.put("graphActivity", 0);
}
if (!mongoJob.has("createDate")) {
mongoJob.put("createDate", 0);
}
if (!mongoJob.has("status")) {
mongoJob.put("status", 0);
}
job.put("reason", mongoJob.get("reason"));
job.put("endtime", sdf.format(mongoJob.get("endTime")));
job.put("runtime", mongoJob.get("totalRunningTimeSeconds"));
job.put("approvedadapters", mongoJob.getJSONArray("approvedAdapterNames"));
job.put("starttime", sdf.format(mongoJob.get("startTime")));
job.put("error_count", mongoJob.getJSONArray("jobErrors").length());
job.put("task_count", mongoJob.getJSONObject("taskMap").length());
job.put("job_config", new JSONObject(mongoJob.get("jobConfig").toString()));
job.put("expire_date", sdf.format(mongoJob.get("expireDate")));
job.put("job_id", id);
job.put("graph_activity", mongoJob.get("graphActivity"));
job.put("create_date", sdf.format(mongoJob.get("createDate")));
int status = mongoJob.getInt("status");
job.put("status", LGJob.getStatusString(status));
int active = 0;
try {
if (status == LGJob.STATUS_PROCESSING) { //only processing jobs have non-zero active_task_count
ArrayList<Document> docs = new MongoDBStore().getTasksFromJob(id);
JSONObject tasks = MongoDBStore.toJSON(docs);
active = LGJob.getActiveTaskCount(tasks);
}
} catch (Exception e) {
log.debug("Couldn't fetch active task count for job:" + id + " Error:" + e.getMessage());
}
job.put("active_task_count", active);
ob.put(id, job);
} catch (Exception e) {
log.info("Invalid job:" + id + " Error:" + e.getMessage());
}
}
}
return Response.status(200).entity(ob.toString()).build();
}
/**
* Gets all the jobs for the given status.
* Valid status values are "NEW", PROCESSING", "FINISHED", "FINISHED_WITH_ERRORS"
* "QUEUED", "STOPPED","EXPIRED", "RESET", "ERROR"
* */
@GET
@Path("jobs/status/{value}")
@Produces(MediaType.APPLICATION_JSON)
public Response jobActive(@PathParam("value") String status) {
if (status.equalsIgnoreCase("NEW")
|| status.equalsIgnoreCase("PROCESSING")
|| status.equalsIgnoreCase("QUEUED")
|| status.equalsIgnoreCase("STOPPED")
|| status.equalsIgnoreCase("EXPIRED")
|| status.equalsIgnoreCase("RESET")
|| status.equalsIgnoreCase("FINISHED_WITH_ERRORS")
|| status.equalsIgnoreCase("FINISHED")
|| (status.equalsIgnoreCase("ERROR"))) {
List<LGJob> jobs = Utils.getJobManager().getAllByStatus(status);
JSONObject ob = new JSONObject();
for (LGJob job : jobs) {
ob.put(job.getJobId(), job.toJson());
}
return Response.status(200).entity(ob.toString(1)).build();
}
return Response.status(500).entity("Invalid State Query ["+status+"]").build();
}
/** Just gives you jobId: status */
@GET
@Path("jobs/status")
@Produces(MediaType.APPLICATION_JSON)
public Response jobActiveByStatus() {
List<LGJob> jobs = Utils.getJobManager().getAllLimitFields("_id", "status", "reason");
JSONObject ob = new JSONObject();
for (LGJob job : jobs) {
JSONObject t = new JSONObject();
t.put("status", job.getStatusString(job.getStatus()));
t.put("reason", job.getReason());
ob.put(job.getJobId(), t);
}
return Response.status(200).entity(ob.toString(1)).build();
}
@POST
@Path("jobs/status/{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response jobByStatusSingle(@PathParam("id") String job_id) {
try {
JSONObject newJob = getStatusObject(job_id);
return Response.status(200).entity(newJob.toString()).build();
}
catch(Exception e) {
e.printStackTrace();
JSONObject ret = new JSONObject();
ret.put("error", e.getMessage());
return Response.status(500).entity(ret.toString()).build();
}
}
/** Takes an array of job IDs as input; e.g. [1,2,3]**/
@PUT
@Path("jobs/status")
@Produces(MediaType.APPLICATION_JSON)
public Response jobActiveByStatusBulk(String body) {
try {
JSONArray jobs = new JSONArray(body);
JSONObject ob = new JSONObject();
for (int i = 0; i < jobs.length(); i++) {
String jobId = jobs.getString(i);
JSONObject t = new JSONObject();
LGJob lg = Utils.getJobManager().getJob(jobId);
if (null == lg) {
t.put("error", "unknown");
t.put("reason", "");
ob.put(jobId, t);
} else {
t.put("status", lg.getStatusString(lg.getStatus()));
t.put("reason", lg.getReason());
ob.put(jobId, t);
}
}
return Response.status(200).entity(ob.toString(1)).build();
}
catch(Exception e) {
e.printStackTrace();
JSONObject ret = new JSONObject();
ret.put("error", e.getMessage());
return Response.status(500).entity(ret.toString()).build();
}
}
@POST
@Path("jobs/status")
@Produces(MediaType.APPLICATION_JSON)
public Response jobByStatusBulk(String input) {
try {
JSONArray jobs = new JSONArray(input);
JSONArray newJobs = new JSONArray();
Iterator jobsIterator = jobs.iterator();
while(jobsIterator.hasNext()) {
String job_id = jobsIterator.next().toString();
try {
JSONObject newJob = getStatusObject(job_id);
newJobs.put(newJob);
}
catch(Exception e) {
e.printStackTrace();
JSONObject ret = new JSONObject();
ret.put("error", e.getMessage());
return Response.status(500).entity(ret.toString()).build();
}
}
return Response.status(200).entity(newJobs.toString()).build();
}
catch(Exception e) {
e.printStackTrace();
JSONObject ret = new JSONObject();
ret.put("error", e.getMessage());
return Response.status(500).entity(ret.toString()).build();
}
}
JSONObject getStatusObject(String job_id) throws Exception {
String status = getStatus(job_id);
JSONObject newJob = new JSONObject();
if (status == null) {
newJob
.put("status", "404")
.put("job_id", job_id)
;
}
else {
Response graphResponse = Utils.lemongraphProxy(request, "GET", "graph/" + job_id + "/status", "");
String body = graphResponse.readEntity(String.class);
JSONObject job = new JSONObject(body);
newJob
.put("job_id", job.get("graph"))
.put("config", job.get("meta"))
.put("maxID", job.get("maxID"))
.put("size", job.get("size"))
.put("status", status)
.put("errors", getErrors(job_id))
;
}
return newJob;
}
/**
* Gets all the jobs for the given status.
* Valid status values are "NEW", PROCESSING", "FINISHED", "QUEUED", "STOPPED","EXPIRED", "RESET", "ERROR"
* */
@GET
@Path("jobs/status/{value}/reason/{reason}")
@Produces(MediaType.APPLICATION_JSON)
public Response jobActive(@PathParam("value") String status, @PathParam("reason") String reason) {
if (status.equalsIgnoreCase("NEW")
|| status.equalsIgnoreCase("PROCESSING")
|| status.equalsIgnoreCase("QUEUED")
|| status.equalsIgnoreCase("STOPPED")
|| status.equalsIgnoreCase("EXPIRED")
|| status.equalsIgnoreCase("RESET")
|| status.equalsIgnoreCase("FINISHED")
|| (status.equalsIgnoreCase("ERROR"))) {
List<LGJob> jobs = Utils.getJobManager().getAllByStatusAndReason(status, reason);
JSONObject ob = new JSONObject();
for (LGJob job : jobs) {
ob.put(job.getJobId(), job.toJson());
}
return Response.status(200).entity(ob.toString(1)).build();
}
return Response.status(500).entity("Invalid State Query ["+status+"]").build();
}
/**
* Used by the lgstats program for now.
*/
@GET
@Path("/jobs/days/full/{from_days}/{to_days}")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobByIdDaysFull(@PathParam("from_days") int fdays, @PathParam("to_days") int tdays) {
List<LGJob> jobs = Utils.getJobManager().getAllByDays(fdays, tdays);
if (Utils.getLemongraph().client == null) {
return Response.status(404).entity("Not found").build();
}
JSONObject ob = new JSONObject();
for (LGJob job : jobs) {
JSONObject tmpJob = new JSONObject();
tmpJob.put("job",job.toJson());
JSONArray tasks = job.getTaskList();
tmpJob.put("tasks",tasks);
List<LGJobError> errors = job.getJobErrors();
JSONArray errorsJson = new JSONArray();
for (LGJobError je : errors) {
errorsJson.put(je);
}
tmpJob.put("errors",errorsJson);
List<LGJobHistory> history = job.getJobHistory();
JSONArray historyJson = new JSONArray();
for (LGJobHistory jh : history) {
historyJson.put(jh.toJson());
}
tmpJob.put("history",historyJson);
ob.put(job.getJobId(),tmpJob);
}
return Response.status(200).entity(ob.toString()).build();
}
/**
* Gets you values from a certain day to a certain date. If you set to_days to 0, it will get all
* days older than from_days. Deprecated because still used by the api pages and scripts.
*
* For example, /jobs/days/30/0 Will get you all jobs older than 30 days
* /jobs/days/30/60 Will get you all jobs between 30 and 60 days old
*
*/
@GET
@Deprecated
@Path("/jobs/days/{from_days}/{to_days}")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobByIdDays(@PathParam("from_days") int fdays, @PathParam("to_days") int tdays) {
if ((tdays != 0)&& (tdays < fdays)) {
return Response.status(404).entity("Invalid parameters").build();
}
List<LGJob> jobs = Utils.getJobManager().getAllByDays(fdays, tdays);
if (Utils.getLemongraph().client == null) {
return Response.status(404).entity("Not found").build();
}
JSONObject ob = new JSONObject();
for (LGJob job : jobs) {
ob.put(job.getJobId(),job.toJson());
}
return Response.status(200).entity(ob.toString()).build();
}
/** */
@GET
@Deprecated
@Path("/jobs/days/olderthan/{days}")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobByIdOlderThanDays(@PathParam("days") int days) {
List<LGJob> jobs = Utils.getJobManager().getAllByOlderThanDays(days);
if (Utils.getLemongraph().client == null) {
return Response.status(404).entity("Not found").build();
}
JSONObject ob = new JSONObject();
for (LGJob job : jobs) {
ob.put(job.getJobId(),job.toJson());
}
return Response.status(200).entity(ob.toString()).build();
}
/** */
@GET
@Path("/jobs/mins/{mins}/{tomins}")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobByIdMins(@PathParam("mins") int mins, @PathParam("tomins") int tomins) {
List<LGJob> jobs = Utils.getJobManager().getAllByMins(mins, tomins);
if (Utils.getLemongraph().client == null) {
return Response.status(404).entity("Not found").build();
}
JSONObject ob = new JSONObject();
for (LGJob job : jobs) {
ob.put(job.getJobId(),job.toJson());
}
return Response.status(200).entity(ob.toString()).build();
}
/** Gets the last X jobs by create_date */
@GET
@Path("/jobs/last/{count}")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobsCount(@PathParam("count") int count) {
List<LGJob> jobs = Utils.getJobManager().getLast(count);
if (Utils.getLemongraph().client == null) {
return Response.status(404).entity("Not found").build();
}
JSONObject ob = new JSONObject();
for (LGJob job : jobs) {
ob.put(job.getJobId(),job.toJson());
}
return Response.status(200).entity(ob.toString(1)).build();
}
/** */
@GET
@Path("/jobs/age/{days}")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobByIdAge(@PathParam("days") int days) {
List<LGJob> jobs = Utils.getJobManager().getAllByAge(days);
if (Utils.getLemongraph().client == null) {
return Response.status(404).entity("Not found").build();
}
JSONObject ob = new JSONObject();
for (LGJob job : jobs) {
ob.put(job.getJobId(),job.toJson());
}
return Response.status(200).entity(ob.toString()).build();
}
/** */
@GET
@Path("/job/{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobById(@PathParam("id") String jobId) {
LGJob lg = Utils.getJobManager().getJob(jobId);
if (null == lg) {
return Response.status(404).entity("Not found").build();
}
return Response.status(200).entity(lg.toJson().toString(1)).build();
}
/** */
@GET
@Path("/job/standardJob/{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobByIdStandardJob(@PathParam("id") String jobId) throws Exception {
JSONObject job = Utils.getStandardJob(request, jobId);
if (null ==job) {
return Response.status(404).entity("Not found").build();
}
return Response.status(200).entity(job.toString(1)).build();
}
/** */
@GET
@Path("/job/{id}/full")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobByIdFull(@PathParam("id") String jobId) {
LGJob lg = Utils.getJobManager().getJob(jobId);
if (null == lg) {
return Response.status(404).entity("Not found").build();
}
JSONObject jobResult = lg.toJson();
jobResult.put("history",getHistoryHelper(lg));
jobResult.put("errors",getErrorsHelper(lg));
jobResult.put("tasks",getTasksHelper(lg));
return Response.status(200).entity(jobResult.toString(1)).build();
}
/**
* Get job specific metrics
*
* @param jobId
* @return JSON blob that contains data that's easily graphed/displayed with javascript tools
*/
@GET
@Path("/job/{id}/metrics")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobByIdMetrics(@PathParam("id") String jobId) {
LGJob lg = Utils.getJobManager().getJob(jobId);
if (null == lg) {
return Response.status(404).entity("Not found").build();
}
JSONObject jobResult = new JSONObject();
Map<String, LGTask> tasks = lg.getTasks();
// BUild history graph size per task (bar graph)
List<LGJobHistory> history = lg.getJobHistory();
JSONArray graphPerTask = new JSONArray();
JSONArray idPerTask = new JSONArray();
JSONArray adapterPerTask = new JSONArray();
JSONArray errorPerTask = new JSONArray();
JSONArray adapterPie = new JSONArray();
JSONArray adapterPieLabels= new JSONArray();
HashMap<String, Integer> adapterPieCounters = new HashMap<String,Integer>();
for(LGJobHistory l: history) {
if (l.getCommandType() == 1) {
LGTask t = tasks.get(l.getTaskId());
if (t == null) {
log.error("Missing task for taskid:"+l.getTaskId());
} else {
String adapter = t.getAdapterName();
int graphChange = l.getGraphChanges();
graphPerTask.put(graphChange);
idPerTask.put(l.getTaskId());
adapterPerTask.put(adapter);
int count = graphChange;
if (adapterPieCounters.containsKey(adapter)) {
count = adapterPieCounters.get(adapter).intValue();
count += graphChange;
}
adapterPieCounters.put(adapter,count);
int error = 0;
if (t.getStatus() != t.TASK_STATUS_COMPLETE) {
error = 1;
}
errorPerTask.put(error);
}
}
}
for (Map.Entry<String, Integer> entry : adapterPieCounters.entrySet()) {
String adapter = entry.getKey();
Integer value = entry.getValue();
adapterPieLabels.put(adapter);
adapterPie.put(value.intValue());
}
jobResult.put("graph_changes_per_task",graphPerTask);
jobResult.put("id_per_task",idPerTask);
jobResult.put("adapter_per_task",adapterPerTask);
jobResult.put("error_per_task",errorPerTask);
jobResult.put("adapter_pie_labels",adapterPieLabels);
jobResult.put("adapter_pie",adapterPie);
return Response.status(200).entity(jobResult.toString(1)).build();
}
/** */
@GET
@Path("/job/{id}/status")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobStatus(@PathParam("id") String jobId) {
LGJob lg = Utils.getJobManager().getJob(jobId);
if (null == lg) {
return Response.status(404).entity("Not found").build();
}
JSONObject result = new JSONObject();
result.put("status",lg.getStatusString(lg.getStatus()));
result.put("reason",lg.getReason());
return Response.status(200).entity(result.toString()).build();
}
/** */
@GET
@Path("/job/{id}/history")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobHistory(@PathParam("id") String jobId) {
LGJob lg = Utils.getJobManager().getJob(jobId);
if (null == lg) {
return Response.status(404).entity("Not found").build();
}
JSONObject historyResult = new JSONObject();
historyResult.put("history", getHistoryHelper(lg));
return Response.status(200).entity(historyResult.toString(1)).build();
}
/** */
private JSONArray getHistoryHelper(LGJob lg) {
List<LGJobHistory> history = lg.getJobHistory();
JSONArray result = new JSONArray();
for(LGJobHistory l: history) {
JSONObject rl = l.toJson();
result.put(rl);
}
return result;
}
/** Gets task map for job, useful for troubleshooting/testing */
@GET
@Path("/job/{id}/tasks")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobTasks(@PathParam("id") String jobId) {
LGJob lg = Utils.getJobManager().getJob(jobId);
if (null == lg) {
return Response.status(404).entity("Not found").build();
}
JSONObject result = new JSONObject();
result.put("tasks",getTasksHelper(lg));
return Response.status(200).entity(result.toString(1)).build();
}
/** */
private JSONArray getTasksHelper(LGJob lg) {
JSONObject result = new JSONObject();
JSONArray s = lg.getTaskList();
return s;
}
/** Gets task map for job, useful for troubleshooting/testing */
@GET
@Path("/job/dbValues/{dbValue}")
@Produces(MediaType.APPLICATION_JSON)
public Response getAllJobsThatHavedbValue(@PathParam("dbValue") String dbValue) {
JSONArray allJobs = Utils.getJobManager().getAllJobsThatHaveDbValueKeyJSONArray(dbValue.toLowerCase());
if (allJobs.length() == 0) {
return Response.status(404).entity("dbValue " + dbValue + " was not found in the database.").build();
}
return Response.status(200).entity(allJobs.toString(1)).build();
}
/** Gets task map for job, useful for troubleshooting/testing */
@GET
@Path("/job/{id}/{key}")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobdbValueByKey(@PathParam("id") String jobId, @PathParam("key") String key) {
LGdbValue value = Utils.getJobManager().getDbValuesByJobIdandKey(jobId, key);
if (null == value) {
return Response.status(404).entity("JobId " + jobId + " not found in " + key + " database.").build();
}
// JSONObject result = new JSONObject();
// result.put(key,lgValue.toJson());
return Response.status(200).entity(value.toJson().toString(1)).build();
}
@GET
@Path("/job/{id}/dbValues")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobdbValueAll(@PathParam("id") String jobId) {
LGdbValue lGdbValue = Utils.getJobManager().getDbValuesByJobId(jobId);
if (null == lGdbValue) {
return Response.status(404).entity("JobId " + jobId + " not found in database.").build();
}
// JSONObject result = new JSONObject();
// result.put(lGdbValue.toJson());
return Response.status(200).entity(lGdbValue.toJson().toString(1)).build();
}
/** Gets error list for job */
@GET
@Path("/job/{id}/errors")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobErrors(@PathParam("id") String jobId) {
JSONObject errorResult = new JSONObject();
errorResult.put("errors",getErrors(jobId));
if (null == errorResult) {
return Response.status(404).entity("Not found").build();
}
return Response.status(200).entity(errorResult.toString(1)).build();
}
JSONArray getErrors(String jobId) {
LGJob lg = Utils.getJobManager().getJob(jobId);
if (null == lg) { return null;
}
return getErrorsHelper(lg);
}
String getStatus(String jobId) {
LGJob lg = Utils.getJobManager().getJob(jobId);
if (null == lg) { return null;
}
return lg.getStatusString(lg.getStatus());
}
/** */
private String getStatusHelper(LGJob lg) {
if (null == lg) { return null; }
return lg.getStatusString(lg.getStatus());
}
/** */
public static JSONArray getErrorsHelper(LGJob lg) {
if (null == lg) { return null; }
List<LGJobError> errors = lg.getJobErrors();
JSONArray result = new JSONArray();
for(LGJobError l: errors) {
JSONObject rl = l.toJson();
result.put(rl);
}
return result;
}
/** Gets the graph from LemonGraph/DB*/
@GET
@Path("/job/{id}/graph")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobGraphData(@PathParam("id") String jobId) {
if (Utils.graphStoreStr.equalsIgnoreCase("lemongraph")) {
try {
JSONObject graph = LemonGraph.getGraph(jobId);
return Response.status(200).entity(graph.toString()).build();
} catch (Exception e) {
log.error("Lookup from LemonGraph failed " + e.getMessage());
return Response.status(404).entity("Graph Not stored in lemongraph").build();
}
}
return Response.status(404).entity("Not found").build();
}
/** Gets the graph from LemonGraph/DB in cytoscape format */
@GET
@Path("/job/{id}/graph/cytoscape")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobGraphDataCytoscape(@PathParam("id") String jobId) {
//
if (Utils.graphStoreStr.equalsIgnoreCase("lemongraph")) {
try {
JSONObject graph = LemonGraph.getGraphCytoscape(jobId);
return Response.status(200).entity(graph.toString()).build();
} catch (Exception e) {
log.error("Lookup from LemonGraph failed " + e.getMessage());
return Response.status(404).entity("Graph Not stored in lemongraph").build();
}
}
return Response.status(404).entity("Not found").build();
}
/** Gets the graph from LemonGraph/DB in d3 format */
@GET
@Path("/job/{id}/graph/d3")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobGraphDataD3(@PathParam("id") String jobId) {
//
if (Utils.graphStoreStr.equalsIgnoreCase("lemongraph")) {
try {
JSONObject graph = LemonGraph.getGraphD3(jobId);
return Response.status(200).entity(graph.toString()).build();
} catch (Exception e) {
log.error("Lookup from LemonGraph failed " + e.getMessage());
return Response.status(404).entity("Graph Not stored in lemongraph").build();
}
}
return Response.status(404).entity("Not found").build();
}
/** Gets the graph from LemonGraph/DB in d3 format */
@GET
@Path("/job/{id}/graph/d3/{start}/{stop}")
@Produces(MediaType.APPLICATION_JSON)
public Response getJobGraphDataD3(@PathParam("id") String jobId
, @PathParam("start") int start
, @PathParam("stop") int stop) {
if (Utils.graphStoreStr.equalsIgnoreCase("lemongraph")) {
try {
JSONObject graph = LemonGraph.getGraphD3(jobId, start, stop);
return Response.status(200).entity(graph.toString()).build();
} catch (Exception e) {
log.error("Lookup from LemonGraph failed " + e.getMessage());
return Response.status(404).entity("Graph Not stored in lemongraph").build();
}
}
return Response.status(404).entity("Not found").build();
}
/** */
@PUT
@Path("/job/{id}/cancel")
@Produces(MediaType.APPLICATION_JSON)
public Response cancelJob(@PathParam("id") String jobId) throws Exception {
log.info("Cancel job received for "+jobId);
JSONObject params = Utils.getRequestParameters(request);
params.put("ids", new JSONArray().put(jobId));
JSONObject ret = Utils.cancelHelper(params);
return Response.status(200).entity(ret.toString()).build();
}
/** */
@PUT
@Path("/jobs/cancel")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response cancelJobs(String body) throws Exception {
JSONArray jobIds = new JSONArray(body);
JSONArray retVals = new JSONArray();
JSONObject params = Utils.getRequestParameters(request);
params.put("ids", jobIds);
JSONObject ret = Utils.cancelHelper(params);
retVals.put(ret);
return Response.status(200).entity(retVals.toString()).build();
}
/**
* Single Job reset call
* Optional: pass a jsonobject in the body with key of "REASON" and this will be stored with the
* job history.
* Delete graph information from LEMONGRAPH. Job meta data remains in database(mongo) and can be reran in
* the future.
*
* Result
* status : job status after this operation
* reason : job status after this operation
* status_code:: 409 is conflist
*
*
*
* See api documentation for more information
*/
@PUT
@Path("/job/{id}/reset")
@Produces(MediaType.APPLICATION_JSON)
public Response resetJob(@PathParam("id") String jobId, String body) {
log.info("Raw resetjob " + body.toString());
String reason = "";
JSONObject jb = new JSONObject(body);
if (jb.has("reason")) {
reason = jb.getString("reason");
}
Boolean allowOverwrite = false; // Default behaviour (if you send a reset on a reset , overwrite will allow
// you to change the reason
if (jb.has(LGConstants.LG_RESET_OVERWRITE)) {
allowOverwrite = jb.getBoolean(LGConstants.LG_RESET_OVERWRITE);
}
log.info("Reset job received for [" + jobId + "] Reason [" + reason + "]");
JSONObject ret = Utils.resetHelper(jobId, reason, allowOverwrite);
int statusCode = 200;
if (ret.has(jobId)) {
if (ret.getJSONObject(jobId).has("job_code"))
statusCode = ret.getInt("status_code");
}
return Response.status(statusCode).entity(ret.toString()).build();
}
/**
* Bulk Job reset call
* Deletes graph information from LEMONGRAPH. Job meta data remains in database(mongo) and can be reran in
* the future.
* See api documentation for more information
*/
@PUT
@Path("/jobs/reset")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response resetJobsPut(String body) {
return resetJobs(body);
}
@POST
@Path("/jobs/reset")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response resetJobsPost(String body) {
try {
JSONArray jobIds = new JSONArray(body);
return resetJobs(jobIds);
} catch (Exception e) {
e.printStackTrace();
return Response.status(500).entity(e.getMessage()).build();
}
}
public Response resetJobs(String body) {
JSONObject jb = new JSONObject(body);
if (!jb.has("jobs")) {
log.error("Missing 'jobs' field.");
return Response.status(500).entity("{'error':'missing jobs field'}").build();
}
// Note: this is the global reason. If the parser sees a reason for an individual job listing
// it will use that instead.
String globalreason = "";
if (jb.has(LGConstants.LG_RESET_REASON)) {
globalreason = jb.getString(LGConstants.LG_RESET_REASON);
}
log.info("Received bulk reset command global reason ["+globalreason+"]");
Boolean allowOverwrite = false; // Default behaviour (if you send a reset on a reset , overwrite will allow
// you to change the reason
if (jb.has(LGConstants.LG_RESET_OVERWRITE)) {
allowOverwrite = jb.getBoolean(LGConstants.LG_RESET_OVERWRITE);
}
JSONObject jobs = jb.getJSONObject("jobs");
JSONObject retVals = new JSONObject();
for(Object key: jobs.keySet()) {
String jobId = (String) key;
JSONObject info = jobs.getJSONObject(jobId);
System.out.println(" Processing:"+info.toString());
// You can supply individual reason for a specific job, otherwise the 'global'
String tmpReason = globalreason;
if (info.has(LGConstants.LG_RESET_REASON)) {
tmpReason = info.getString(LGConstants.LG_RESET_REASON);
}
try {
log.info("Resetting job : "+jobId);
JSONObject ret = Utils.resetHelper(jobId, tmpReason, allowOverwrite);
JSONObject data = ret.getJSONObject(jobId);
retVals.put(jobId, data);
} catch (Exception e) {
JSONObject ret = new JSONObject();
ret.put("message", "Job with the id " + jobId + " Reset FAILED: "+e.getMessage());
ret.put("status_code", 201);
ret.put("reset",false);
retVals.put(jobId, ret);
}
}
return Response.status(200).entity(retVals.toString()).build();
}
public Response resetJobs(JSONArray jobIds) {
String globalreason = "";
log.info("Received bulk reset command global reason [" + globalreason + "]");
Boolean allowOverwrite = false; // Default behaviour (if you send a reset on a reset , overwrite will allow
JSONObject retVals = new JSONObject();
for(int i = 0; i < jobIds.length(); i++) {
String jobId = jobIds.getString(i);
System.out.println(" Processing:"+jobId);
// You can supply individual reason for a specific job, otherwise the 'global'
String tmpReason = globalreason;
try {
log.info("Resetting job : "+jobId);
JSONObject ret = Utils.resetHelper(jobId, tmpReason, allowOverwrite);
JSONObject data = ret.getJSONObject(jobId);
retVals.put(jobId, data);
} catch (Exception e) {
JSONObject ret = new JSONObject();
ret.put("message", "Job with the id " + jobId + " Reset FAILED: "+e.getMessage());
ret.put("status_code", 201);
ret.put("reset",false);
retVals.put(jobId, ret);
}
}
return Response.status(200).entity(retVals.toString()).build();
}
/**
* Single Job Retry call
*
* This call will retry all your failed tasks for a job. Any new tasks spawned from these
* failed tasks will continue processing as normal. Job state will return to PROCESSING
* If there are no failed tasks, nothing will occur.
*
*/
@PUT
@Path("/job/{id}/retry")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response retryJob(@PathParam("id") String jobId, String body) {
String reason = "";
JSONObject jb = new JSONObject(body);
log.info("Retry job received for [" + jobId + "]");
JSONObject ret = retryHelper(jobId, "");
return Response.status(200).entity(ret.toString()).build();
}
@PUT
@Path("/job/{id}/retry/{taskid}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response retryJobForTask(@PathParam("id") String jobId, @PathParam("taskid") String taskId, String body) {
String reason = "";
JSONObject jb = new JSONObject(body);
log.info("Retry job received for [" + jobId + "] and task [" + taskId + "]");
if ((jobId == null) || jobId.equals("")) {
return Response.status(201).entity("{ 'error':'Invalid jobId'}").build();
}
if ((taskId == null) || taskId.equals("")) {
return Response.status(201).entity("{ 'error':'Invalid taskId'}").build();
}
JSONObject ret = retryHelper(jobId,taskId);
return Response.status(200).entity(ret.toString()).build();
}
/**
* Used by /jobs/retry and /job/retry
*
*
*
* @param jobId the job id to retry
* @param taskId if you want to only retry a certain task inside a job. set to "" otherwise
* @return JSONOBject - results
*/
private JSONObject retryHelper(String jobId, String taskId) {
LGJob lg = Utils.getJobManager().getJob(jobId);
JSONObject jobInfo = new JSONObject();
JSONObject ret = new JSONObject();
if (null == lg) {
jobInfo.put("message", "job not found");
jobInfo.put("retry",false);
jobInfo.put("status_code",400);
ret.put(jobId, jobInfo);
return ret;
}
int status = lg.getStatus();
if ( (status != LGJob.STATUS_FINISHED_WITH_ERRORS)) {
jobInfo.put("message", "Can not retry job, Job is "+lg.getStatusString(status));
jobInfo.put("retry",false);
jobInfo.put("status_code", 201);
ret.put(jobId, jobInfo);
return ret;
}
try {
Utils.getSubmitToRabbit().sendRetry(jobId, taskId);
}
catch (Exception e) {
jobInfo.put("message", "Can not retry job "+e.getMessage());
jobInfo.put("retry",false);
jobInfo.put("status_code",201);
ret.put(jobId, jobInfo);
return ret;
}
jobInfo.put("message", "");
jobInfo.put("retry", true);
jobInfo.put("status_code",200);
ret.put(jobId, jobInfo);
return ret;
}
/**
* Deletes from Mongo Lemongrenade and from Lemongraph
*/
@DELETE
@Path("/job/{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response deleteJobEndpoint(@PathParam("id") String jobId) {
log.info("API Delete Job received " + jobId);
JSONObject ret = Utils.deleteJob(jobId);
return Response.status(500).entity(ret.toString()).build();
}
/**
* BULK Delete from Mongo Lemongrenade and from Lemongraph
*/
@POST
@Path("/jobs/delete")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response deleteJobs(String body) {
log.info("API Delete Jobs received " + body);
JSONArray retVals = new JSONArray();
String jobId;
JSONObject params = Utils.getRequestParameters(request);
try {
JSONArray input_jobs = new JSONArray(body);
for (int i = 0; i < input_jobs.length(); i++) {
jobId = input_jobs.getString(i);
try {
log.info("Deleting job : " + jobId);
params.put("ids", new JSONArray().put(jobId));
JSONObject ret = Utils.deleteHelper(params);
retVals.put(ret);
} catch (Exception e) {
JSONObject ret = new JSONObject();
ret.put("error", "Job with the id " + jobId + " FAILED: " + e.getMessage());
ret.put("deleted", false);
retVals.put(ret);
}
}
return Response.status(200).entity(retVals.toString()).build();
}
catch(Exception e) {
e.printStackTrace();
return Response.status(500).entity(retVals.toString()).build();
}
}
/**
* Create New Job
*/
@POST
@Path("/job")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public static Response createJobFromJsonNoJobId(String jobJson) throws Exception {
JSONObject result = new JSONObject();
log.info("RAW POST REQUEST : "+jobJson);
JSONObject job;
try {
job = new JSONObject(jobJson);
} catch (org.json.JSONException e) {
JSONObject ret = new JSONObject();
ret.put("error", "Error Parsing JSON. " + e.getMessage() );
ret.put("original_request",jobJson);
return Response.status(500).entity(ret.toString()).build();
}
// Required adapterlist, seed
ArrayList<String> approvedAdapters = new ArrayList<>();
JSONObject seedData = new JSONObject();
if (job.has("seed")) {
seedData = job.getJSONObject("seed");
}
// Add Incoming Nodes
JSONArray nodes = new JSONArray();
if (seedData.has("nodes")) {
nodes = seedData.getJSONArray("nodes");
}
JSONObject jobConfig = new JSONObject();
if (job.has("job_config")) {
jobConfig = job.getJSONObject("job_config");
}
boolean graph_group = false;
if(jobConfig.has("graph_group") && jobConfig.getBoolean("graph_group") == true) {
graph_group = true;
}
else if(jobConfig.has("job_type") && jobConfig.get("job_type").toString().toLowerCase().equals("graph_group")) {
graph_group = true;
}
String jobId = "";
if(jobConfig.has("job_id")) {
jobId = jobConfig.getString("job_id");
}
if(jobId.length() == 0) { //create a job ID
jobId = LemonGraph.createGraph(new JSONObject()); //fetch a valid jobId from LEMONGRAPH
LemonGraph.deleteGraph(jobId);//clear the empty job we got the ID from
}
jobConfig.put("job_id", jobId);
// Parse the adapter list from the job_config
try {
approvedAdapters = Utils.getAdapterManager().parseAdaptersListFromJobConfig(jobConfig);
}
catch (Exception e) {
JSONObject ret = new JSONObject();
ret.put("error", "Error parsing adapter list:"+e.getMessage());
return Response.status(500).entity(ret.toString()).build();
}
log.info("Adapter list : "+approvedAdapters.toString());
// Set any missing default information in jobConfig
if (!jobConfig.has("depth")) {
int depth = LGProperties.getInteger("api.default_depth", 3);
jobConfig.put("depth", depth);
}
if (!jobConfig.has("ttl")) {
int ttl = LGProperties.getInteger("api.default_ttl", 0);
jobConfig.put("ttl", ttl);
}
if (!jobConfig.has("priority")) {
String priority = LGProperties.get("api.default_priority", "user_low");
jobConfig.put("priority", priority);
}
if (!jobConfig.has("description")) {
String description = LGProperties.get("api.default_description", "");
jobConfig.put("description", description);
}
log.info("Job Post - adapterlist: " + approvedAdapters.toString()
+ " seed" + seedData.toString()
+ " job_config:" + jobConfig.toString());
LGPayload newPayload = new LGPayload(jobConfig);
for (int i = 0; i < nodes.length(); i++) {
JSONObject node = nodes.getJSONObject(i);
if(!node.has("LG:METADATA")) {node.put("LG:METADATA", new JSONObject());}
newPayload.addResponseNode(node);
}
// Add incoming Edges
JSONArray edges = new JSONArray();
if (seedData.has("edges")) {
edges = seedData.getJSONArray("edges");
}
for (int i = 0; i < edges.length(); i++) {
JSONArray edgeData = edges.getJSONArray(i); //source, edge, target
if(edgeData.length() == 3) {//add an edge of the size is correct
Iterator iterator = edgeData.iterator();
while(iterator.hasNext()) {
JSONObject node = (JSONObject) iterator.next();
if(!node.has("LG:METADATA")) {node.put("LG:METADATA", new JSONObject());}
}
JSONObject src = edgeData.getJSONObject(0);
JSONObject edge = edgeData.getJSONObject(1);
JSONObject target = edgeData.getJSONObject(2);
newPayload.addResponseEdge(src, edge, target);
}
throw new Exception("Invalid edge present:"+edgeData);
}
try {
LGJob lgJob = Utils.getSubmitToRabbit().sendNewJobToCommandController(approvedAdapters, newPayload);
jobId = lgJob.getJobId();
} catch (Exception e) {
log.error("Failed to sendNewJobToCommandController.");
e.printStackTrace();
return Response.status(500).entity(e.getMessage()).build();
}
// Return result in JSON
result.put("status", "created");
result.put("job_id", jobId);
return Response.status(200).entity(result.toString()).build();
}
/**
* Add To Job
*
* adapterList (You can add new adapters?)
*
* Little different than Create Job, users have the option of passing in data they want to
* add to the job and marking each piece as a seed or not.
*
* Required:
*
* job_data {} - New Data to post to graph
* job_config.adapers() - Just like the post job, it's a list of adapters
*
* */
@POST
@Path("/job/{id}/insert")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response addToJob(@PathParam("id") String jobId, String jobJson) {
JSONObject job = new JSONObject(jobJson);
// Required adapterlist, seed
ArrayList<String> approvedAdapters = new ArrayList<>();
JSONObject jobData = new JSONObject();
if (job.has("job_data")) {
jobData = job.getJSONObject("job_data");
}
JSONObject jobConfig = new JSONObject();
if (job.has("job_config")) {
jobConfig = job.getJSONObject("job_config");
}
// Parse the adapter list from the job_config
try {
approvedAdapters = Utils.getAdapterManager().parseAdaptersListFromJobConfig(jobConfig);
}
catch (Exception e) {
JSONObject ret = new JSONObject();
ret.put("error", "Error parsing adapter list:"+e.getMessage());
return Response.status(500).entity(ret.toString()).build();
}
log.info("Adapter list "+approvedAdapters.toString());
if ((approvedAdapters.size() == 0) || jobData.equals("")) {
JSONObject ret = new JSONObject();
ret.put("error", "Missing required job information, adapterlist, seed");
return Response.status(500).entity(ret.toString()).build();
}
// If job_id is in jobConfig make sure it matches the job_id variable
if (jobConfig.has("job_id")) {
String jid = jobConfig.getString("job_id");
if (!jobId.equals(jid)) {
// Throw error
return Response.status(500).entity("Job ID mismatch job_id and jobconfig{job_id}").build();
}
} else {
// Append job_id to job_config
jobConfig.put("job_id",jobId);
}
// Set missing default information in jobConfig
if (!jobConfig.has("depth")) {
int depth = LGProperties.getInteger("api.default_depth",3);
jobConfig.put("depth",depth);
}
if (!jobConfig.has("ttl")) {
int ttl = LGProperties.getInteger("api.default_ttl",0);
jobConfig.put("ttl",ttl);
}
if (!jobConfig.has("priority")) {
String priority = LGProperties.get("api.default_priority", "user_low");
jobConfig.put("priority",priority);
}
if (!jobConfig.has("description")) {
String description = LGProperties.get("api.default_description", "");
jobConfig.put("description",description);
}
log.info("Job ADD TO JOB job_id "+jobId+" adapterlist: "+approvedAdapters.toString()+" seed"+jobData.toString()
+" job_config:"+jobConfig.toString());
// Make sure the job already exists in the system
LGJob lg = Utils.getJobManager().getJob(jobId);
if (null == lg) {
JSONObject ret = new JSONObject();
ret.put("error", "Job with the id " + jobId + " Does not exist. Job add cancelled.");
return Response.status(404).entity(ret.toString()).build();
}
LGPayload newPayload = new LGPayload(jobConfig);
// Add incoming Nodes
JSONArray nodes = new JSONArray();
if (jobData.has("nodes")) {
nodes = jobData.getJSONArray("nodes");
}
for(int i = 0; i < nodes.length(); i++){
JSONObject node = nodes.getJSONObject(i);
newPayload.addResponseNode(node);
}
// Add incoming Edges
JSONArray edges = new JSONArray();
if (jobData.has("edges")) {
edges = jobData.getJSONArray("edges");
}
for(int i = 0; i < edges.length(); i++){
JSONObject edge = edges.getJSONObject(i);
// TODO: proper support for edges required here (src, tgt, data)
//newPayload.addResponseEdge(edge);
}
// Set the payload type
newPayload.setPayloadType(LGConstants.LG_PAYLOAD_TYPE_COMMAND);
/**
rrayList<String> approvedAdapters = new ArrayList<>();
for (int i=0; i< adapterList.length(); i++) {
approvedAdapters.add(adapterList.getString(i));
}*/
try {
Utils.getSubmitToRabbit().sendAddToJobToCommandController(jobId, approvedAdapters, newPayload);
}
catch (Exception e) {
return Response.status(201).entity(e.getMessage()).build();
}
// Return result in JSON
JSONObject result = new JSONObject();
result.put("status", "added");
result.put("job_id",jobId);
return Response.status(200).entity(result.toString()).build();
}
/**
* postaction - Allows you to run certain adapters on specific nodes.
* Only required information is post_action_job_config, post_action_nodes, and post_action_adapters
*
* {
* "post_action_job_config": {
* "post_action_nodes": ["2","3","4"],
* "description": "job description",
* "adapters": {
* "PlusBang": {}
* }
* }
* }
*
*/
@POST
@Path("/job/{id}/postaction")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response postAction(@PathParam("id") String jobId, String jobJson) {
JSONObject result = new JSONObject();
ArrayList<String> postActionAdapters = new ArrayList<>();
JSONArray postActionNodes = new JSONArray() ;
JSONObject jobConfig = new JSONObject();
result.put("job_id", jobId);
JSONObject job = new JSONObject(jobJson);
log.info("Received RAW post action request:"+jobJson);
// Sanity Check input data for required
if (job.has("post_action_job_config")) {
jobConfig = job.getJSONObject("post_action_job_config");
} else {
result.put("message", "Missing required post_action_job_config");
return Response.status(201).entity(result.toString()).build();
}
if (jobConfig.has("nodes")) {
postActionNodes = jobConfig.getJSONArray("nodes");
} else {
result.put("message", "Missing required post_action_job_config:nodes");
return Response.status(201).entity(result.toString()).build();
}
if (postActionNodes.length() == 0) {
result.put("message", "Empty post_action_nodes array");
return Response.status(201).entity(result.toString()).build();
}
if(!(postActionNodes.get(0) instanceof String)) { //if the first postActionNode isn't a String, warn the submitter
result.put("message", "post_action_nodes array must contain String types.");
return Response.status(201).entity(result.toString()).build();
}
// Parse the adapter list from the post_action_job_config
if (!jobConfig.has("adapters")) {
result.put("message", "Missing required post_action_job_config:adapters");
return Response.status(201).entity(result.toString()).build();
}
try {
postActionAdapters = Utils.getAdapterManager().parseAdaptersListFromJobConfig(jobConfig);
}
catch (Exception e) {
JSONObject ret = new JSONObject();
ret.put("error", "Error parsing adapter list:"+e.getMessage());
return Response.status(500).entity(ret.toString()).build();
}
if (postActionAdapters.size() == 0) {
result.put("message", "Empty post_action_adapters array");
return Response.status(201).entity(result.toString()).build();
}
log.info("Adapter list : "+postActionAdapters.toString());
// If job_id is in jobConfig make sure it matches the job_id variable
if (jobConfig.has("job_id")) {
String jid = jobConfig.getString("job_id");
if (!jobId.equals(jid)) {
return Response.status(201).entity("Job ID mismatch job_id and post_action_job_confg{job_id}").build();
}
} else {
// Append job_id to job_config
jobConfig.put("job_id",jobId);
}
// Set missing default information in jobConfig. None of which is required
if (!jobConfig.has("depth")) {
int depth = LGProperties.getInteger("api.default_depth",5);
jobConfig.put("depth",depth);
}
if (!jobConfig.has("ttl")) {
int ttl = LGProperties.getInteger("api.default_ttl",0);
jobConfig.put("ttl",ttl);
}
if (!jobConfig.has("priority")) {
String priority = LGProperties.get("api.default_priority", "user_low");
jobConfig.put("priority",priority);
}
if (!jobConfig.has("description")) {
String description = LGProperties.get("api.default_description", "");
jobConfig.put("description",description);
}
log.info("Execute postaction on job:"+jobId
+" adapters: "+postActionAdapters.toString()+" nodes:"+ postActionNodes.toString());
// Make sure the job already exists in the system
LGJob lg = Utils.getJobManager().getJob(jobId);
if (lg == null) {
result.put("message", "Job with the id " + jobId + " Does not exist. postaction canceled.");
return Response.status(201).entity(result.toString()).build();
}
// The new job config gets stored in the LG_INTERnAL_DATA structure
jobConfig.put(LGConstants.LG_INTERNAL_OP,LGConstants.LG_INTERNAL_OP_EXECUTE_ON_ADAPTERS);
JSONObject tmpData = new JSONObject();
tmpData.put("post_action_job_config",jobConfig);
jobConfig.put(LGConstants.LG_INTERNAL_DATA,tmpData.toString());
LGPayload newPayload = new LGPayload(jobConfig);
newPayload.setPayloadType(LGConstants.LG_PAYLOAD_TYPE_COMMAND);
try {
Utils.getSubmitToRabbit().sendPostActionCommandController(jobId, postActionAdapters, newPayload);
}
catch (Exception e) {
return Response.status(201).entity(e.getMessage()).build();
}
// Submit was success
result.put("message", "submitted");
return Response.status(200).entity(result.toString()).build();
}
public static void main(String[] args) {
Job job = new Job();
for (int i = 0; i < 10; i++){
long start = System.currentTimeMillis();
Response res = job.jobActiveByStatus();
System.out.println(System.currentTimeMillis() - start);
}
}
}
|
#!/bin/sh
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
WHITE='\033[1;37m'
RESET='\033[0m'
if ! [ -x "$(command -v crowdin)" ]; then
if [ -f /usr/local/bin/crowdin-cli.jar ]; then
alias crowdin="java -jar /usr/local/bin/crowdin-cli.jar"
else
echo ${YELLOW}"crowdin-cli not found. Please follow the instructions here: https://support.crowdin.com/cli-tool/#installation"${RESET}
exit 1
fi
fi
if [[ $(git config --get remote.origin.url) =~ binary-com/deriv-app ]]; then
echo ${RED}" > ERROR: "${RESET}"remote 'origin' should be your fork."
exit 1
fi
function message {
echo ${GREEN}" >"${RESET} $1
}
function confirm {
read -p "$(echo "\n${WHITE}$1 ${RESET}(y/n)${YELLOW}") " -n 1 -r &&
echo "${RESET}"
}
cd $(git rev-parse --show-toplevel) &&
message "Updating p2p translations branch" &&
git branch -D p2p_translations &&
git push origin --delete p2p_translations
git checkout -b p2p_translations &&
git pull upstream dev &&
confirm "Update the source file (messages.json) and push to Crowdin?" &&
if [[ $REPLY =~ ^[Yy]$ ]]
then
message "Updating p2p translations source file" &&
cd $(git rev-parse --show-toplevel) && cd packages/p2p/scripts && node extract-translations.js &&
message "Uploading source file to Crowdin"
cd $(git rev-parse --show-toplevel) && cd packages/p2p && source ~/.bash_profile && crowdin upload sources
message "Complete, new p2p translations have been uploaded to Crowdin"
fi &&
confirm "Download p2p translation files and update javascript texts?" &&
if [[ $REPLY =~ ^[Yy]$ ]]
then
message "Downloading p2p translation files from Crowdin (*.json)" &&
crowdin download
fi &&
confirm "Commit changes and push to origin?" &&
if [[ $REPLY =~ ^[Yy]$ ]]
then
cd $(git rev-parse --show-toplevel) &&
message "Committing"
git commit -a -m "Update p2p translations" &&
message "Pushing"
git push -u origin p2p_translations
fi &&
echo ${GREEN}"\nSuccessfully Done." |
#!/bin/bash
function CallTB() {
echo "- Fetch images in ${MCIRRegionName}"
resp=$(
curl -H "${AUTH}" -sX POST http://$TumblebugServer/tumblebug/ns/$NSID/resources/fetchImages -H 'Content-Type: application/json' -d @- <<EOF
{
"connectionName": "${CONN_CONFIG[$INDEX,$REGION]}"
}
EOF
); echo ${resp} | jq ''
echo ""
}
SECONDS=0
echo "####################################################################"
echo "## 6. image: Fetch"
echo "####################################################################"
source ../init.sh
if [ "${INDEX}" == "0" ]; then
echo "[Parallel execution for all CSP regions]"
INDEXX=${NumCSP}
for ((cspi = 1; cspi <= INDEXX; cspi++)); do
INDEXY=${NumRegion[$cspi]}
CSP=${CSPType[$cspi]}
echo "[$cspi] $CSP details"
for ((cspj = 1; cspj <= INDEXY; cspj++)); do
echo "[$cspi,$cspj] ${RegionName[$cspi,$cspj]}"
MCIRRegionName=${RegionName[$cspi,$cspj]}
CallTB
done
done
wait
else
echo ""
MCIRRegionName=${CONN_CONFIG[$INDEX,$REGION]}
CallTB
fi
source ../common-functions.sh
printElapsed $@
|
#!/bin/sh
koopa_alias_colorls() {
# """
# colorls alias.
# @note Updated 2022-04-14.
#
# Use of '--git-status' is slow for large directories / monorepos.
# """
local color_flag color_mode
color_mode="$(koopa_color_mode)"
case "$color_mode" in
'dark')
color_flag='--dark'
;;
'light')
color_flag='--light'
;;
esac
colorls \
"$color_flag" \
--group-directories-first \
"$@"
return 0
}
|
<gh_stars>1-10
import {
AGGLOMERATION_CARD_INTERFACE_NAME,
AVAILABLE_AGGLOMERATION_SLOT,
} from '../structures/agglomeration';
import {
AVAILABLE_DEVELOPMENT_SLOT,
DEVELOPMENT_CARD_INTERFACE_NAME,
} from '../structures/development';
import {
AVAILABLE_LAND_SLOT,
LAND_CARD_INTERFACE_NAME,
} from '../structures/land';
export enum DomainColor {
Red = 'RED',
Blue = 'BLUE',
}
export type DomainCardType =
| typeof AGGLOMERATION_CARD_INTERFACE_NAME
| typeof DEVELOPMENT_CARD_INTERFACE_NAME
| typeof LAND_CARD_INTERFACE_NAME
| typeof AVAILABLE_AGGLOMERATION_SLOT
| typeof AVAILABLE_DEVELOPMENT_SLOT
| typeof AVAILABLE_LAND_SLOT;
/* eslint-disable no-magic-numbers */
export enum RowValue {
Lower = -2,
Low = -1,
Middle = 0,
Up = 1,
Upper = 2,
}
/* eslint-enable no-magic-numbers */
|
<filename>node_modules/ts-toolbelt/out/Union/IntersectOf.d.ts
/**
* Transform a [[Union]] to an * *intersection**
* @param U to transform
* @returns `&`
* @example
* ```ts
* ```
*/
export declare type IntersectOf<U extends any> = (U extends unknown ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never;
|
#!/bin/sh
RUST_BACKTRACE=1 RUST_LOG=xflow::validation=debug ./target/debug/gears-cli \
--locale nl_NL \
--path ../xflow-rust/resource/projects/random \
export
|
<reponame>Null-LLC/neon
//========= Copyright N11 Software, All rights reserved. ============//
//
// Purpose: Commit a file(s) to the repository.
//
// Author: <NAME>
//
//===================================================================//
#include <iostream>
#include <string>
#include "include/commit.h"
#include "include/file.h"
#include "include/nlohmann/json.hpp"
struct commit_options {
std::string message;
size_t insertions;
size_t deletions;
size_t files_modified;
size_t new_files;
}; |
<reponame>zGrav/slate-playground<gh_stars>1-10
export SK5 from './5';
|
node bot.js
|
<reponame>zenglongGH/spresense<filename>externals/cmsis/CMSIS_5/docs/Driver/html/group__usbh__hci__gr_structARM__DRIVER__USBH__HCI.js
var group__usbh__hci__gr_structARM__DRIVER__USBH__HCI =
[
[ "GetVersion", "group__usbh__hci__gr.html#a8834b281da48583845c044a81566c1b3", null ],
[ "GetCapabilities", "group__usbh__hci__gr.html#a7a41769405bb3bb4cc9eaba26cf220d4", null ],
[ "Initialize", "group__usbh__hci__gr.html#a40cbaad9fd2458b1008d31e1469903bb", null ],
[ "Uninitialize", "group__usbh__hci__gr.html#adcf20681a1402869ecb5c6447fada17b", null ],
[ "PowerControl", "group__usbh__hci__gr.html#aba8f1c8019af95ffe19c32403e3240ef", null ],
[ "PortVbusOnOff", "group__usbh__hci__gr.html#ab859fb9f73a60ffa1ce71ed961d4744f", null ]
]; |
require 'test_helper'
describe BrNfe::Product::Nfe::Cobranca::Duplicata do
subject { FactoryGirl.build(:product_cobranca_duplicata) }
describe "Alias attributes" do
it { must_have_alias_attribute :nDup, :numero_duplicata }
it { must_have_alias_attribute :vDup, :total }
it { must_have_alias_attribute :dVenc, :vencimento }
end
describe 'Validations' do
it { must validate_presence_of(:total) }
it { must validate_numericality_of(:total).is_greater_than_or_equal_to(0.0) }
it { must validate_length_of(:numero_duplicata).is_at_most(60) }
end
end |
#!/usr/bin/env bash
# https://spacy.io/usage/models
python -m spacy download en
python -m spacy download en_core_web_sm
python -m textblob.download_corpora
|
import { toBool } from "@andrewcaires/utils.js";
import { config } from "dotenv";
config();
export const API_AUTH_SLEEP = parseInt(process.env.API_AUTH_SLEEP || "1000");
export const API_DB_DATABASE = process.env.API_DB_DATABASE || "";
export const API_DB_HOST = process.env.API_DB_HOST || "127.0.0.1";
export const API_DB_PASSWORD = process.env.API_DB_PASSWORD || "";
export const API_DB_PORT = parseInt(process.env.API_DB_PORT || "3306");
export const API_DB_TYPE = process.env.API_DB_TYPE || "";
export const API_DB_USERNAME = process.env.API_DB_USERNAME || "root";
export const API_DB_LOG = toBool(process.env.API_DB_LOG || "false");
export const API_HTTP_CRT = process.env.API_HTTP_CRT || "";
export const API_HTTP_KEY = process.env.API_HTTP_KEY || "";
export const API_HTTP_PORT = parseInt(process.env.API_HTTP_PORT || "3000");
export const API_HTTP_PUBLIC = process.env.API_HTTP_PUBLIC || "";
export const API_HTTP_CROSS = toBool(process.env.API_HTTP_CROSS || "false");
export const API_HTTP_HYBRID = toBool(process.env.API_HTTP_HYBRID || "false");
export const API_TOKEN_CRT = process.env.API_TOKEN_CRT || "";
export const API_TOKEN_KEY = process.env.API_TOKEN_KEY || "";
export const API_TOKEN_HEADER = process.env.API_TOKEN_HEADER || "authorization";
export const API_TOKEN_LIFETIME = parseInt(process.env.API_TOKEN_LIFETIME || "15");
export const API_UPLOAD_COUNT = parseInt(process.env.API_UPLOAD_COUNT || "1");
export const API_UPLOAD_FIELD = process.env.API_UPLOAD_FIELD || "upload";
export const API_UPLOAD_SIZE = parseInt(process.env.API_UPLOAD_SIZE || "2");
export const API_UPLOAD_TEMP = process.env.API_UPLOAD_TEMP || "./temp";
export const API_WEBSOCKET_START = toBool(process.env.API_WEBSOCKET_START || "false");
|
import numpy as np
from numpy import linalg as LA
# ---------------------------------------------
# Checking boundary line crossing detection
def line(p1, p2):
A = (p1[1] - p2[1])
B = (p2[0] - p1[0])
C = (p1[0]*p2[1] - p2[0]*p1[1])
return A, B, -C
# Calcuate the coordination of intersect point of line segments - 線分同士が交差する座標を計算
def calcIntersectPoint(line1p1, line1p2, line2p1, line2p2):
L1 = line(line1p1, line1p2)
L2 = line(line2p1, line2p2)
D = L1[0] * L2[1] - L1[1] * L2[0]
Dx = L1[2] * L2[1] - L1[1] * L2[2]
Dy = L1[0] * L2[2] - L1[2] * L2[0]
x = Dx / D
y = Dy / D
return x,y
# Check if line segments intersect - 線分同士が交差するかどうかチェック
def checkIntersect(p1, p2, p3, p4):
tc1 = (p1[0] - p2[0]) * (p3[1] - p1[1]) + (p1[1] - p2[1]) * (p1[0] - p3[0])
tc2 = (p1[0] - p2[0]) * (p4[1] - p1[1]) + (p1[1] - p2[1]) * (p1[0] - p4[0])
td1 = (p3[0] - p4[0]) * (p1[1] - p3[1]) + (p3[1] - p4[1]) * (p3[0] - p1[0])
td2 = (p3[0] - p4[0]) * (p2[1] - p3[1]) + (p3[1] - p4[1]) * (p3[0] - p2[0])
return tc1*tc2<0 and td1*td2<0
# convert a line to a vector
# line(point1)-(point2)
def line_vectorize(point1, point2):
a = point2[0]-point1[0]
b = point2[1]-point1[1]
return [a,b]
# Calculate the angle made by two line segments - 線分同士が交差する角度を計算
# point = (x,y)
# line1(point1)-(point2), line2(point3)-(point4)
def calcVectorAngle( point1, point2, point3, point4 ):
u = np.array(line_vectorize(point1, point2))
v = np.array(line_vectorize(point3, point4))
i = np.inner(u, v)
n = LA.norm(u) * LA.norm(v)
c = i / n
a = np.rad2deg(np.arccos(np.clip(c, -1.0, 1.0)))
if u[0]*v[1]-u[1]*v[0]<0:
return a
else:
return 360-a
# Test whether the test_point is in the polygon or not - 指定の点がポリゴン内に含まれるかどうかを判定
# test_point = (x,y)
# polygon = collection of points [ (x0,y0), (x1,y1), (x2,y2) ... ]
def pointPolygonTest(polygon, test_point):
if len(polygon)<3:
return False
prev_point = polygon[-1] # Use the last point as the starting point to close the polygon
line_count = 0
for point in polygon:
if test_point[1] >= min(prev_point[1], point[1]) and test_point[1] <= max(prev_point[1], point[1]): # Check if Y coordinate of the test point is in range
gradient = (point[0]-prev_point[0]) / (point[1]-prev_point[1]) # delta_x / delta_y
line_x = prev_point[0] + (test_point[1]-prev_point[1]) * gradient # Calculate X coordinate of a line
if line_x < test_point[0]:
line_count += 1
prev_point = point
included = True if line_count % 2 == 1 else False # Check how many lines exist on the left to the test_point
return included
|
<reponame>FTC-10072/SkystoneCode-10072
package org.firstinspires.ftc.teamcode.RobotClasses;
import com.qualcomm.hardware.bosch.BNO055IMU;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.util.ElapsedTime;
import com.qualcomm.hardware.modernrobotics.ModernRoboticsI2cColorSensor;
public class HardwareRobot {
public DcMotor leftFrontMotor, leftBackMotor, rightFrontMotor, rightBackMotor,intakeMotor1, intakeMotor2, worstMotor, horiMotor;
public Servo gripperServo, move1Servo, move2Servo;
public BNO055IMU imu;
public ModernRoboticsI2cColorSensor rSense;
HardwareMap hwMap = null;
private ElapsedTime period = new ElapsedTime();
public HardwareRobot(){}
public void init(HardwareMap ahwMap){
hwMap = ahwMap;
// setup motors
leftFrontMotor = hwMap.get(DcMotor.class,"left_motor_1");
leftBackMotor = hwMap.get(DcMotor.class,"left_motor_2");
rightFrontMotor = hwMap.get(DcMotor.class,"right_motor_1");
rightBackMotor = hwMap.get(DcMotor.class,"right_motor_2");
intakeMotor1 = hwMap.get(DcMotor.class, "intake_motor_1");
intakeMotor2 = hwMap.get(DcMotor.class, "intake_motor_2");
worstMotor = hwMap.get(DcMotor.class, "worst_motor");
horiMotor = hwMap.get(DcMotor.class, "hori_motor");
leftFrontMotor.setDirection(DcMotorSimple.Direction.REVERSE);
leftBackMotor.setDirection(DcMotorSimple.Direction.REVERSE);
rightFrontMotor.setDirection(DcMotorSimple.Direction.FORWARD);
rightBackMotor.setDirection(DcMotorSimple.Direction.FORWARD);
intakeMotor1.setDirection(DcMotorSimple.Direction.FORWARD);
intakeMotor2.setDirection(DcMotorSimple.Direction.REVERSE);
worstMotor.setDirection(DcMotorSimple.Direction.REVERSE);
horiMotor.setDirection(DcMotorSimple.Direction.FORWARD);
// positive is clockwise/up, negative is anticlockwise/down
leftFrontMotor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
leftBackMotor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
rightFrontMotor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
rightBackMotor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
intakeMotor1.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
intakeMotor2.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
worstMotor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
horiMotor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
leftFrontMotor.setPower(0);
leftBackMotor.setPower(0);
rightFrontMotor.setPower(0);
rightBackMotor.setPower(0);
intakeMotor1.setPower(0);
intakeMotor2.setPower(0);
worstMotor.setPower(0);
horiMotor.setPower(0);
leftFrontMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
leftBackMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
rightFrontMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
rightBackMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
intakeMotor1.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
intakeMotor2.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
worstMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
horiMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
leftFrontMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
leftBackMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightFrontMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightBackMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
intakeMotor1.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
intakeMotor2.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
worstMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
horiMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// set up servos
gripperServo = hwMap.get(Servo.class, "gripper_servo");
move1Servo = hwMap.get(Servo.class, "move_1_servo");
move2Servo = hwMap.get(Servo.class, "move_2_servo");
gripperServo.setPosition(1);
move1Servo.setPosition(.5);
move2Servo.setPosition(.5);
rSense = hwMap.get(ModernRoboticsI2cColorSensor.class, "rSense");
// set up IMU
BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
parameters.angleUnit = BNO055IMU.AngleUnit.DEGREES;
parameters.accelUnit = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC;
parameters.calibrationDataFile = "AdafruitIMUCalibration.json"; // see the calibration sample opmode
parameters.loggingEnabled = true;
parameters.loggingTag = "IMU";
imu = hwMap.get(BNO055IMU.class, "imu");
imu.initialize(parameters);
}
}
|
import requests
import json
from bs4 import BeautifulSoup
# Get the data from the web page
url = '<YOUR URL HERE>'
r = requests.get(url)
html_doc = r.text
# Initialize the JSON results
results = []
# Parse the HTML content
soup = BeautifulSoup(html_doc, 'html.parser')
# Extract the html elements from the page
tags = soup.find_all('<YOUR HTML TAG HERE>')
# Iterate over the tags and extract the required data
for tag in tags:
data = {}
# Extract data from the tag
data['<KEY1>'] = tag.get('<ATTRIBUTE1>')
data['<KEY2>'] = tag.get_text()
# Append the data to the JSON results
results.append(data)
# Save the results as a JSON file
with open('file.json', 'w') as fp:
json.dump(results, fp) |
<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.data;
import org.apache.druid.collections.bitmap.ImmutableBitmap;
import org.apache.druid.collections.bitmap.WrappedRoaringBitmap;
import org.apache.druid.segment.BaseNullableColumnValueSelector;
import org.apache.druid.segment.ColumnValueSelector;
import org.apache.druid.segment.SimpleAscendingOffset;
import org.apache.druid.segment.column.DoublesColumn;
import org.apache.druid.segment.column.FloatsColumn;
import org.apache.druid.segment.column.LongsColumn;
import org.apache.druid.segment.vector.NoFilterVectorOffset;
import org.apache.druid.segment.vector.VectorOffset;
import org.apache.druid.segment.vector.VectorValueSelector;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
public class NumericNullColumnSelectorTest
{
private final int seed = 1337;
private final Random rando = new Random(seed);
private final int numBitmaps = 32;
private final int numRows = 1024;
private final int vectorSize = 128;
private final SimpleAscendingOffset offset = new SimpleAscendingOffset(numRows);
private final NoFilterVectorOffset vectorOffset = new NoFilterVectorOffset(vectorSize, 0, numRows);
private final NoFilterOffsetThatCanBeMangledToTestOverlapping anotherVectorOffset =
new NoFilterOffsetThatCanBeMangledToTestOverlapping(vectorSize, 0, numRows);
private ImmutableBitmap[] bitmaps;
@Before
public void setup()
{
bitmaps = new ImmutableBitmap[numBitmaps];
for (int bitmap = 0; bitmap < numBitmaps; bitmap++) {
WrappedRoaringBitmap mutable = new WrappedRoaringBitmap();
for (int i = 0; i < numRows; i++) {
if (rando.nextDouble() > 0.2) {
mutable.add(i);
}
}
bitmaps[bitmap] = mutable.toImmutableBitmap();
}
}
@Test
public void testLongSelectorWithNullsCanResetOffset()
{
for (ImmutableBitmap bitmap : bitmaps) {
ColumnarLongs longs = new ColumnarLongs()
{
@Override
public int size()
{
return numRows;
}
@Override
public long get(int index)
{
return ThreadLocalRandom.current().nextLong();
}
@Override
public void close()
{
}
};
LongsColumn columnWithNulls = LongsColumn.create(longs, bitmap);
ColumnValueSelector<?> selector = columnWithNulls.makeColumnValueSelector(offset);
assertOffsetCanReset(selector, bitmap, offset);
VectorValueSelector vectorSelector = columnWithNulls.makeVectorValueSelector(vectorOffset);
assertVectorOffsetCanReset(vectorSelector, bitmap, vectorOffset);
}
}
@Test
public void testFloatSelectorWithNullsCanResetOffset()
{
for (ImmutableBitmap bitmap : bitmaps) {
ColumnarFloats floats = new ColumnarFloats()
{
@Override
public int size()
{
return numRows;
}
@Override
public float get(int index)
{
return ThreadLocalRandom.current().nextFloat();
}
@Override
public void close()
{
}
};
FloatsColumn columnWithNulls = FloatsColumn.create(floats, bitmap);
ColumnValueSelector<?> selector = columnWithNulls.makeColumnValueSelector(offset);
assertOffsetCanReset(selector, bitmap, offset);
VectorValueSelector vectorSelector = columnWithNulls.makeVectorValueSelector(vectorOffset);
assertVectorOffsetCanReset(vectorSelector, bitmap, vectorOffset);
VectorValueSelector anotherSelector = columnWithNulls.makeVectorValueSelector(anotherVectorOffset);
assertVectorChillWhenOffsetsOverlap(anotherSelector, bitmap, anotherVectorOffset);
}
}
@Test
public void testDoubleSelectorWithNullsCanResetOffset()
{
for (ImmutableBitmap bitmap : bitmaps) {
ColumnarDoubles doubles = new ColumnarDoubles()
{
@Override
public int size()
{
return numRows;
}
@Override
public double get(int index)
{
return ThreadLocalRandom.current().nextDouble();
}
@Override
public void close()
{
}
};
DoublesColumn columnWithNulls = DoublesColumn.create(doubles, bitmap);
ColumnValueSelector<?> selector = columnWithNulls.makeColumnValueSelector(offset);
assertOffsetCanReset(selector, bitmap, offset);
VectorValueSelector vectorSelector = columnWithNulls.makeVectorValueSelector(vectorOffset);
assertVectorOffsetCanReset(vectorSelector, bitmap, vectorOffset);
}
}
private static void assertOffsetCanReset(
BaseNullableColumnValueSelector selector,
ImmutableBitmap bitmap,
SimpleAscendingOffset readItAll
)
{
boolean encounteredNull = false;
while (readItAll.withinBounds()) {
Assert.assertEquals(bitmap.get(readItAll.getOffset()), selector.isNull());
encounteredNull |= selector.isNull();
readItAll.increment();
}
readItAll.reset();
Assert.assertTrue(encounteredNull);
encounteredNull = false;
while (readItAll.withinBounds()) {
Assert.assertEquals(bitmap.get(readItAll.getOffset()), selector.isNull());
encounteredNull |= selector.isNull();
readItAll.increment();
}
Assert.assertTrue(encounteredNull);
readItAll.reset();
}
private static void assertVectorOffsetCanReset(
VectorValueSelector selector,
ImmutableBitmap bitmap,
NoFilterVectorOffset readAllVectors
)
{
boolean encounteredNull = false;
boolean nullVector[];
// read it all, advancing offset
while (!readAllVectors.isDone()) {
nullVector = selector.getNullVector();
for (int i = readAllVectors.getStartOffset(); i < readAllVectors.getCurrentVectorSize(); i++) {
Assert.assertEquals(bitmap.get(readAllVectors.getStartOffset() + i), nullVector[i]);
encounteredNull |= nullVector[i];
}
readAllVectors.advance();
}
// reset and read it all again to make sure matches
readAllVectors.reset();
Assert.assertTrue(encounteredNull);
encounteredNull = false;
while (!readAllVectors.isDone()) {
nullVector = selector.getNullVector();
for (int i = readAllVectors.getStartOffset(); i < readAllVectors.getCurrentVectorSize(); i++) {
Assert.assertEquals(bitmap.get(readAllVectors.getStartOffset() + i), nullVector[i]);
encounteredNull |= nullVector[i];
}
readAllVectors.advance();
}
Assert.assertTrue(encounteredNull);
readAllVectors.reset();
}
public static void assertVectorChillWhenOffsetsOverlap(
VectorValueSelector selector,
ImmutableBitmap bitmap,
NoFilterOffsetThatCanBeMangledToTestOverlapping readAllVectors
)
{
boolean encounteredNull = false;
boolean nullVector[];
// test overlapping reads (should reset iterator anyway)
readAllVectors.mangleOffset(0);
nullVector = selector.getNullVector();
for (int i = readAllVectors.getStartOffset(); i < readAllVectors.getCurrentVectorSize(); i++) {
Assert.assertEquals(bitmap.get(readAllVectors.getStartOffset() + i), nullVector[i]);
encounteredNull |= nullVector[i];
}
Assert.assertTrue(encounteredNull);
// this can't currently happen, but we want to protect selectors in case offsets ever try to overlap
readAllVectors.mangleOffset(1);
nullVector = selector.getNullVector();
for (int i = readAllVectors.getStartOffset(); i < readAllVectors.getCurrentVectorSize(); i++) {
Assert.assertEquals(bitmap.get(readAllVectors.getStartOffset() + i), nullVector[i]);
encounteredNull |= nullVector[i];
}
readAllVectors.reset();
Assert.assertTrue(encounteredNull);
}
private static class NoFilterOffsetThatCanBeMangledToTestOverlapping implements VectorOffset
{
private final int maxVectorSize;
private final int start;
private final int end;
private int theOffset;
NoFilterOffsetThatCanBeMangledToTestOverlapping(final int maxVectorSize, final int start, final int end)
{
this.maxVectorSize = maxVectorSize;
this.start = start;
this.end = end;
reset();
}
public void mangleOffset(int replacement)
{
theOffset = replacement;
}
@Override
public int getId()
{
return theOffset;
}
@Override
public void advance()
{
theOffset += maxVectorSize;
}
@Override
public boolean isDone()
{
return theOffset >= end;
}
@Override
public boolean isContiguous()
{
return true;
}
@Override
public int getMaxVectorSize()
{
return maxVectorSize;
}
@Override
public int getCurrentVectorSize()
{
return Math.min(maxVectorSize, end - theOffset);
}
@Override
public int getStartOffset()
{
return theOffset;
}
@Override
public int[] getOffsets()
{
throw new UnsupportedOperationException("no filter");
}
@Override
public void reset()
{
theOffset = start;
}
}
}
|
#!/bin/bash
mkdir -p "$OPENSHIFT_DATA_DIR/.ssh/"
touch "$OPENSHIFT_DATA_DIR/.ssh/config"
/usr/bin/ssh -o 'StrictHostKeyChecking=no' -o "UserKnownHostsFile=$OPENSHIFT_DATA_DIR/.ssh/known_hosts" -F "$OPENSHIFT_DATA_DIR/.ssh/config" "$@"
#/usr/bin/ssh -o 'IdentityFile=$OPENSHIFT_DATA_DIR/.ssh/jenkins_id_rsa' -o 'StrictHostKeyChecking=no' -F "$OPENSHIFT_DATA_DIR/.ssh/config" "$@"
|
const router = require('express-promise-router')()
const { celebrate } = require('celebrate')
import * as controller from '../controllers/categories.controller'
router.route('/')
.get(controller.index)
export default router |
<filename>open-sphere-plugins/arcgis/src/main/java/io/opensphere/arcgis2/esri/package-info.java<gh_stars>10-100
/** ArcGIS rest JSON classes. */
package io.opensphere.arcgis2.esri;
|
package com.example.expedia.fragment;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.example.expedia.MyApplication;
import com.example.expedia.activity.HotelListActivity;
import com.example.expedia.adapter.MainRecommendationRVAdapter;
import com.example.expedia.R;
import com.example.expedia.activity.HotelSearchActivity;
import com.example.expedia.activity.LogInSignUpActivity;
import com.example.expedia.sampledata.RecommendationDataSample;
/**
* A simple {@link Fragment} subclass.
*/
public class MainReservationFragment extends Fragment {
private ImageView ivLoginImage;
private TextView tvAfterLogin;
private MainRecommendationRVAdapter adapter = new MainRecommendationRVAdapter();
private boolean prev_loginStatus;
public MainReservationFragment() {
// Required empty public constructor
}
@Override
public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_main_reservation, container, false);
prev_loginStatus = MyApplication.isLogInStatus();
RecyclerView recyclerView = view.findViewById(R.id.Main_category_recyclerView);
recyclerView.setLayoutManager(new LinearLayoutManager(getContext()));
recyclerView.setAdapter(adapter);
adapter.setItems(new RecommendationDataSample().getItems());
ImageView ivHotel = view.findViewById(R.id.Main_Hotel);
ivHotel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
startActivity(new Intent(getContext(), HotelSearchActivity.class));
}
});
//ImageView ivAirport = view.findViewById(R.id.Main_Air);
//ImageView ivHotel_airport = view.findViewById(R.id.Main_hotel_air);
ivLoginImage = view.findViewById(R.id.login_imageView);
ivLoginImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
startActivity(new Intent(getContext(), LogInSignUpActivity.class));
}
});
tvAfterLogin = view.findViewById(R.id.afterLogin_textView);
ImageView ivDeadlineImage = view.findViewById(R.id.imageView2);
ivDeadlineImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(getContext(), HotelListActivity.class);
intent.putExtra("no", 2);
startActivity(intent);
}
});
checkLoginStatus();
return view;
}
@Override
public void onStart() {
super.onStart();
if(prev_loginStatus != MyApplication.isLogInStatus()) {
checkLoginStatus();
}
}
public void checkLoginStatus(){
if(MyApplication.isLogInStatus()){
ivLoginImage.setVisibility(View.GONE);
tvAfterLogin.setVisibility(View.VISIBLE);
}else{
ivLoginImage.setVisibility(View.VISIBLE);
tvAfterLogin.setVisibility(View.GONE);
}
prev_loginStatus = MyApplication.isLogInStatus();
}
}
|
#!/bin/bash
set -e # exit if any command fail
echo -e "\n\n\n1/2 Building JAR"
mvn clean package
DOCKER_IMAGE_NAME="belaboros/customerservicemem"
echo -e "\n\n\n2/2 Building docker image: ${DOCKER_IMAGE_NAME}"
docker build . -t ${DOCKER_IMAGE_NAME}
|
package math;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author minchoba
* 백준 2292번 : 벌집
*
* @see https://www.acmicpc.net/problem/2292/
*
*/
public class Boj2292 {
public static void main(String[] args) throws Exception {
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
long N = Integer.parseInt(br.readLine());
long idx = 1, six = 1;
int res = 1;
while(true){
if(idx >= N){ // 해당숫자 범위밖으로 나가면 반복문 종료
break;
}
six = 6 * (res++); // 등차수열로 올라가므로 6을 곱하면서 배수 변수에 넣고
idx += six; // 범위 변수에 배수 변수를 더해가면서 반복문을 정지할지 체크
}
System.out.println(res); // 최종 결과값, 즉 몇번째 등차수열에 속하는 수인지 출력
}
}
|
SELECT *
FROM purchases
INNER JOIN users
ON purchases.user_id = users.id; |
const jwt = require('jsonwebtoken')
const assert = require('http-assert')
const AdminUser = require('../models/AdminUser')
module.exports = option => {
return async(req,res,next) =>{
//获取请求头中的信息,后端取请求头信息全部小写,.pop提取数组最后一个元素
const token = String(req.headers.authorization || '').split(' ').pop()
assert(token,401,'未经授权,请先登录!')
const { id } = jwt.verify(token,req.app.get('secret'))
assert(id,401,'无效的用户名,请先登录!')
req.user = await AdminUser.findById(id)
assert(req.user,401,'用户不存在,请先登录!')
await next()
}
}
// 主要通过在登录请求成功后,将自定义的token返回前端保存在loclStorage中,
// 并在axios请求配置拦截器中将localStorage中token存入请求头中,
// 在后续进行接口请求时,利用中间件的方式对请求头中是否存在token和token是否正确进行验证。 |
#!/bin/bash
docker image rm -f srv_chatroom_plat cli_chatroom_plat;
docker container rm -f srv_cr cli1_cr cli2_cr
rm servercontainer/server.c clientcontainer/client.c
|
import { curry1 } from './Curry';
export interface PRIMEFACTORIZATION {
(x: number): {
number: number;
count: number;
}[];
(): (x: number) => {
number: number;
count: number;
}[];
}
/**
* 素因数分解
* @param {number} x
*/
const primeFactorization = curry1(function (x: number) {
let n = 0;
const out: { number: number; count: number }[] = [];
for (let i = 2; i <= x; i++) {
if (x % i === 0) {
n = 0;
while (x % i === 0) {
n++;
x /= i;
}
out.push({ number: i, count: n });
}
}
return out;
}) as PRIMEFACTORIZATION;
export default primeFactorization;
|
import java.io.*;
public class FileProcessor {
public static byte[] processFile(File f) throws IOException {
int size = (int) f.length();
byte buff[] = new byte[size];
FileInputStream fis = new FileInputStream(f);
DataInputStream dis = new DataInputStream(fis);
dis.readFully(buff);
dis.close();
// Perform a specific operation on the data (e.g., modify the byte array)
for (int i = 0; i < buff.length; i++) {
// Example: Increment each byte value by 1
buff[i] = (byte) (buff[i] + 1);
}
return buff;
}
} |
package services
import (
"strings"
"github.com/chryscloud/go-microkit-plugins/docker"
g "github.com/chryscloud/video-edge-ai-proxy/globals"
"github.com/chryscloud/video-edge-ai-proxy/models"
)
// StatsAllProcesses created a statistics object or all running containers (related to edge)
func (pm *ProcessManager) StatsAllProcesses(sett *models.Settings) (*models.AllStreamProcessStats, error) {
cl := docker.NewSocketClient(docker.Log(g.Log), docker.Host("unix:///var/run/docker.sock"))
systemInfo, diskUsage, err := cl.SystemWideInfo()
stats := &models.AllStreamProcessStats{}
// calculate disk usage and gather system info
totalContainers := systemInfo.Containers
runningContainers := systemInfo.ContainersRunning
stoppedContainers := systemInfo.ContainersStopped
totalImgSize := int64(0)
activeImages := 0
totalVolumeSize := int64(0)
activeVolumes := int64(0)
for _, im := range diskUsage.Images {
activeImages += int(im.Containers)
totalImgSize += im.SharedSize
}
for _, v := range diskUsage.Volumes {
activeVolumes += v.UsageData.RefCount
totalVolumeSize += v.UsageData.Size
}
stats.Containers = totalContainers
stats.ContainersRunning = runningContainers
stats.ContainersStopped = stoppedContainers
stats.ActiveImages = int(activeImages)
stats.TotalVolumeSize = totalVolumeSize
stats.TotalActiveVolumes = int(activeVolumes)
stats.GatewayID = sett.GatewayID
stats.TotalImageSize = totalImgSize
stats.ContainersStats = make([]*models.ProcessStats, 0)
pList, err := pm.List()
if err != nil {
g.Log.Error("failed to list all containers", err)
return nil, err
}
// gather all container stats
for _, process := range pList {
c, err := cl.ContainerGet(process.ContainerID)
if err != nil {
g.Log.Error("failed to get container from docker system", err)
continue
}
n := c.Name
// skip default running components
if strings.Contains(n, "chrysedgeportal") || strings.Contains(n, "chrysedgeserver") || strings.Contains(n, "redis") {
continue
}
s, err := cl.ContainerStats(c.ID)
if err != nil {
return nil, err
}
calculated := cl.CalculateStats(s)
calculated.Status = c.State.Status
restartCount := 0
if c.State.ExitCode > 0 {
restartCount = c.RestartCount
}
procStats := &models.ProcessStats{
Name: process.Name,
ImageTag: process.ImageTag,
Cpu: int(calculated.CPUPercent),
Memory: int(calculated.MemoryPercent),
NetworkRx: int64(calculated.NetworkRx),
NetworkTx: int64(calculated.NetworkTx),
NumRestarts: restartCount,
Status: c.State.Status,
}
stats.ContainersStats = append(stats.ContainersStats, procStats)
}
return stats, nil
}
|
// Chapter 11 Programming Challenge 13
#include<iostream>
#include<string>
using namespace std;
struct PopMachine
{
string name;
double cost;
int amountInMachine;
};
int main()
{
// create constant variable for types of drinks
const int TYPES_DRINKS = 5;
// create array of structures for the drinks
PopMachine drinks[TYPES_DRINKS];
// set up the drinks in the machine laid out visually to compare to table
drinks[0].name = "Cola"; drinks[0].cost = 0.75; drinks[0].amountInMachine = 20;
drinks[1].name = "Root Beer"; drinks[1].cost = 0.75; drinks[1].amountInMachine = 20;
drinks[2].name = "Lemon-Lime"; drinks[2].cost = 0.75; drinks[2].amountInMachine = 20;
drinks[3].name = "Grape Soda"; drinks[3].cost = 0.80; drinks[3].amountInMachine = 20;
drinks[4].name = "Cream Soda"; drinks[4].cost = 0.80; drinks[4].amountInMachine = 20;
// create a buffer for user's choice
int buffer = 0;
// varialbe to hold user entered money
double money = 0;
// display menu to user
cout << "Would you like to order a drink?" << endl;
cout << "1.) Yes :)" << endl;
cout << "2.) No, I quit :(" << endl;
cin >> buffer;
// if statement to determine if the user is going to order a drink
if (buffer == 1 || buffer == 2)
{
// nested if statement to go continue or quit program
if (buffer == 1)
{
// new buffer variable
int drinkBuffer = 0;
cout << "Choose a drink!" << endl;
cout << "_______________" << endl;
cout << "1.)" << drinks[0].name << " " << drinks[0].cost << " " << drinks[0].amountInMachine << endl;
cout << "2.)" << drinks[1].name << " " << drinks[1].cost << " " << drinks[1].amountInMachine << endl;
cout << "3.)" << drinks[2].name << " " << drinks[2].cost << " " << drinks[2].amountInMachine << endl;
cout << "4.)" << drinks[3].name << " " << drinks[3].cost << " " << drinks[3].amountInMachine << endl;
cout << "5.)" << drinks[4].name << " " << drinks[4].cost << " " << drinks[4].amountInMachine << endl;
cin >> drinkBuffer;
// another nested if for their selection
if (drinkBuffer == 1)
{
cout << drinks[0].name << " " << drinks[0].cost << endl;
cout << "Please enter money." << endl;
cin >> money;
while (money < 0 || money > 1)
{
cout << "Please enter a dollar or less in change." << endl;
cin >> money;
}
// create variable for remaining cost or change
double remCost = drinks[0].cost - money;
while (remCost > 0)
{
cout << remCost << " remaining to pay. Please insert money." << endl;
cin >> money;
while (money < 0 || money > 1)
{
cout << "Please enter a dollar or less in change." << endl;
cin >> money;
}
remCost = remCost - money;
}
if (remCost == 0)
{
cout << "Drink is dispensing. Thank you have a nice day!" << endl;
}
else if (remCost < 0)
{
cout << "Drink is dispensing. Don't forget your change. Thank you have a nice day!" << endl;
}
}
// another nested if for their selection
if (drinkBuffer == 2)
{
cout << drinks[1].name << " " << drinks[1].cost << endl;
cout << "Please enter money." << endl;
cin >> money;
while (money < 0 || money > 1)
{
cout << "Please enter a dollar or less in change." << endl;
cin >> money;
}
// create variable for remaining cost or change
double remCost = drinks[1].cost - money;
while (remCost > 0)
{
cout << remCost << " remaining to pay. Please insert money." << endl;
cin >> money;
while (money < 0 || money > 1)
{
cout << "Please enter a dollar or less in change." << endl;
cin >> money;
}
remCost = remCost - money;
}
if (remCost == 0)
{
cout << "Drink is dispensing. Thank you have a nice day!" << endl;
}
else if (remCost < 0)
{
cout << "Drink is dispensing. Don't forget your change. Thank you have a nice day!" << endl;
}
}
// another nested if for their selection
if (drinkBuffer == 3)
{
cout << drinks[2].name << " " << drinks[2].cost << endl;
cout << "Please enter money." << endl;
cin >> money;
while (money < 0 || money > 1)
{
cout << "Please enter a dollar or less in change." << endl;
cin >> money;
}
// create variable for remaining cost or change
double remCost = drinks[2].cost - money;
while (remCost > 0)
{
cout << remCost << " remaining to pay. Please insert money." << endl;
cin >> money;
while (money < 0 || money > 1)
{
cout << "Please enter a dollar or less in change." << endl;
cin >> money;
}
remCost = remCost - money;
}
if (remCost == 0)
{
cout << "Drink is dispensing. Thank you have a nice day!" << endl;
}
else if (remCost < 0)
{
cout << "Drink is dispensing. Don't forget your change. Thank you have a nice day!" << endl;
}
}
// another nested if for their selection
if (drinkBuffer == 4)
{
cout << drinks[3].name << " " << drinks[3].cost << endl;
cout << "Please enter money." << endl;
cin >> money;
while (money < 0 || money > 1)
{
cout << "Please enter a dollar or less in change." << endl;
cin >> money;
}
// create variable for remaining cost or change
double remCost = drinks[3].cost - money;
while (remCost > 0)
{
cout << remCost << " remaining to pay. Please insert money." << endl;
cin >> money;
while (money < 0 || money > 1)
{
cout << "Please enter a dollar or less in change." << endl;
cin >> money;
}
remCost = remCost - money;
}
if (remCost == 0)
{
cout << "Drink is dispensing. Thank you have a nice day!" << endl;
}
else if (remCost < 0)
{
cout << "Drink is dispensing. Don't forget your change. Thank you have a nice day!" << endl;
}
}
// another nested if for their selection
if (drinkBuffer == 5)
{
cout << drinks[4].name << " " << drinks[4].cost << endl;
cout << "Please enter money." << endl;
cin >> money;
while (money < 0 || money > 1)
{
cout << "Please enter a dollar or less in change." << endl;
cin >> money;
}
// create variable for remaining cost or change
double remCost = drinks[4].cost - money;
while (remCost > 0)
{
cout << remCost << " remaining to pay. Please insert money." << endl;
cin >> money;
while (money < 0 || money > 1)
{
cout << "Please enter a dollar or less in change." << endl;
cin >> money;
}
remCost = remCost - money;
}
if (remCost == 0)
{
cout << "Drink is dispensing. Thank you have a nice day!" << endl;
}
else if (remCost < 0)
{
cout << "Drink is dispensing. Don't forget your change. Thank you have a nice day!" << endl;
}
}
}
if (buffer == 2)
{
cout << "Have a nice day!" << endl;
}
}
return 0;
} |
<reponame>Surfndez/mcs-lite-app
import React from 'react';
import { compose, pure } from 'recompose';
import { withGetMessages } from 'react-intl-inject-hoc';
import messages from '../messages';
import DataChannelWrapper from '../../dataChannelCards/common/wrapper';
import NewDisplayCard from '../newDisplayCard';
import styles from './styles.css';
const DataChannelContentLayout = ({
datachannels,
prototypeId,
checkDatachannelIdAvailable,
createDataChannel,
deleteDataChannel,
retrieveUnitTypes,
unitTypes,
createUnitTypes,
pushToast,
readOnly,
getMessages: t,
}) => (
<div className={styles.dataChannelContent}>
{
readOnly && datachannels.length === 0 &&
<div className={styles.noDatachannels}>{t('noDatachannels')}</div>
}
{
!readOnly &&
<NewDisplayCard
checkDatachannelIdAvailable={checkDatachannelIdAvailable}
createDataChannel={createDataChannel}
prototypeId={prototypeId}
retrieveUnitTypes={retrieveUnitTypes}
createUnitTypes={createUnitTypes}
unitTypes={unitTypes}
pushToast={pushToast}
/>
}
{
typeof datachannels === 'object' &&
datachannels.map((dataChannel) => {
let displayName = dataChannel.channelType.name;
if (dataChannel.type === 1) {
displayName += '_Control';
} else {
displayName += '_Display';
}
return (
<DataChannelWrapper
key={dataChannel.datachannelId}
displayName={displayName}
isPrototype
onSubmit={() => {}}
id={dataChannel.datachannelId}
title={dataChannel.datachannelName}
className={styles.displayCard}
format={dataChannel.format}
description={dataChannel.datachannelDescription}
deleteDataChannel={deleteDataChannel}
prototypeId={prototypeId}
pushToast={pushToast}
/>
);
})
}
</div>
);
export default compose(
pure,
withGetMessages(messages, 'PrototypeDetail'),
)(DataChannelContentLayout);
|
# !/bin/bash
set -e
[[ -z "${DEBUG}" ]] || set -x
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
TEST_DIR="$( cd "${SCRIPT_DIR}/.." && pwd )"
PROJECT_DIR="$( cd "${TEST_DIR}/.." && pwd )"
CASES_DIR="$( cd "${TEST_DIR}/cases" && pwd )"
# The terraform command is executed from the TERRAFORM_DIR
TERRAFORM_DIR=${PROJECT_DIR}/aws-kube-ci
TERRAFORM="terraform -chdir=${TERRAFORM_DIR}"
# Set default values if not defined
: ${HELM:="helm"}
: ${LOG_DIR:="/tmp/logs"}
: ${PROJECT:="$(basename "${PROJECT_DIR}")"}
: ${TEST_NAMESPACE:="test-operator"}
: ${TARGET_DRIVER_VERSION:="470.103.01"}
: ${OPERATOR_IMAGE:="nvcr.io/nvidia/gpu-operator"}
: ${CONTAINER_RUNTIME:="docker"}
|
<reponame>DerSchmale/spirv4web
import { IVariant } from "./IVariant";
import { Types } from "./Types";
import { defaultClone, defaultCopy } from "../utils/defaultCopy";
import { Dim } from "../spirv/Dim";
import { ImageFormat } from "../spirv/ImageFormat";
import { AccessQualifier } from "../spirv/AccessQualifier";
import { StorageClass } from "../spirv/StorageClass";
export enum SPIRBaseType
{
Unknown,
Void,
Boolean,
SByte,
UByte,
Short,
UShort,
Int,
UInt,
Int64,
UInt64,
AtomicCounter,
Half,
Float,
Double,
Struct,
Image,
SampledImage,
Sampler,
AccelerationStructure,
RayQuery,
// Keep internal types at the end.
ControlPointArray,
Interpolant,
Char
}
export class SPIRTypeImageType
{
type: TypeID;
dim: Dim;
depth: boolean;
arrayed: boolean;
ms: boolean;
sampled: number;
format: ImageFormat;
access: AccessQualifier;
clone() { return defaultClone(SPIRTypeImageType, this); }
equals(b: SPIRTypeImageType): boolean
{
return this.type === b.type && this.dim === b.dim && this.depth === b.depth && this.arrayed === b.arrayed &&
this.ms === b.ms && this.sampled === b.sampled && this.format === b.format && this.access === b.access;
}
}
export class SPIRType extends IVariant
{
static type = Types.Type;
// Scalar/vector/matrix support.
basetype: SPIRBaseType = SPIRBaseType.Unknown;
width: number = 0;
vecsize: number = 1;
columns: number = 1;
// Arrays, support array of arrays by having a vector of array sizes.
array: number[] = [];
// Array elements can be either specialization constants or specialization ops.
// This array determines how to interpret the array size.
// If an element is true, the element is a literal,
// otherwise, it's an expression, which must be resolved on demand.
// The actual size is not really known until runtime.
array_size_literal: boolean[] = [];
// Pointers
// Keep track of how many pointer layers we have.
pointer_depth: number = 0;
pointer: boolean = false;
forward_pointer: boolean = false;
storage: StorageClass = StorageClass.Generic;
member_types: TypeID[] = [];
// If member order has been rewritten to handle certain scenarios with Offset,
// allow codegen to rewrite the index.
member_type_index_redirection: number[] = [];
image: SPIRTypeImageType = new SPIRTypeImageType();
// Structs can be declared multiple times if they are used as part of interface blocks.
// We want to detect this so that we only emit the struct definition once.
// Since we cannot rely on OpName to be equal, we need to figure out aliases.
type_alias: TypeID = 0;
// Denotes the type which this type is based on.
// Allows the backend to traverse how a complex type is built up during access chains.
parent_type: TypeID = 0;
// Used in backends to avoid emitting members with conflicting names.
member_name_cache: Set<string> = new Set<string>();
constructor(other?: SPIRType)
{
super();
if (other)
defaultCopy(other, this);
}
} |
<gh_stars>0
package com.tracy.competition.utils
import com.tracy.competition.domain.enums.{ErrorEnum, SuccessEnum}
import scala.beans.BeanProperty
/**
* @author Tracy
* @date 2021/2/9 10:28
*/
class ResponseMessage extends Serializable {
@BeanProperty var status: String = _
@BeanProperty var msg: String = _
@BeanProperty var data = new java.util.HashMap[String, Object]
def this(status: String, msg: String) {
this()
this.status = status
this.msg = msg
}
def this(successEnum: SuccessEnum) {
this()
this.status = successEnum.getSuccessCode
this.msg = successEnum.getSuccessMsg
}
def this(errorEnum: ErrorEnum) {
this()
this.status = errorEnum.getErrorCode
this.msg = errorEnum.getErrorMsg
}
}
object ResponseMessage {
def apply(status: String, msg: String): ResponseMessage = new ResponseMessage(status: String, msg: String)
def apply(successEnum: SuccessEnum): ResponseMessage = new ResponseMessage(successEnum: SuccessEnum)
def apply(errorEnum: ErrorEnum): ResponseMessage = new ResponseMessage(errorEnum: ErrorEnum)
}
|
import random
def generate_data(n):
data = []
for i in range(n):
data.append(random.randint(0, 100))
return data |
<reponame>drix00/xray-spectrum-modeling
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. py:currentmodule:: mcxray.map.simulation_data
:synopsis: Module to extract the simulation data from the hdf5 file.
.. moduleauthor:: <NAME> <<EMAIL>>
Module to extract the simulation data from the hdf5 file.
"""
###############################################################################
# Copyright 2017 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# Standard library modules.
import logging
# Third party modules.
import numpy as np
import h5py
# Local modules.
from pymcxray.mcxray import HDF5_PARAMETERS
# Project modules.
# Globals and constants variables.
class SimulationData():
def __init__(self, hdf5_file_path, positions, symbols):
self.hdf5_file_path = hdf5_file_path
self.intensity_data_map = None
self.positions = positions
self.symbols = symbols
def get_bse_map(self):
result_name = "Backscattering coefficient"
bse_map = self._get_electron_result(result_name)
return bse_map
def get_te_map(self):
result_name = "Transmitted coefficient"
te_map = self._get_electron_result(result_name)
return te_map
def get_skirted_electron_map(self):
result_name = "Skirted coefficient"
se_map = self._get_electron_result(result_name)
return se_map
def _get_electron_result(self, result_name):
shape = (self.positions.x_pixels, self.positions.y_pixels)
electron_result_map = np.zeros(shape, dtype=np.float)
with h5py.File(self.hdf5_file_path, 'r', driver='core') as hdf5_file:
simulations_group = hdf5_file["simulations"]
for group in simulations_group.values():
if not group.name.endswith(HDF5_PARAMETERS):
try:
index_x, index_y = self.find_position_index(self.positions, group.attrs["beamPosition"])
bse = group["ElectronResults"].attrs[result_name]
electron_result_map[index_y, index_x] = bse
except IndexError as message:
logging.debug(message)
logging.debug("%s", group.name)
return electron_result_map
def get_intensity_data(self, symbol):
if self.intensity_data_map is None:
self._extract_intensity_data()
if symbol in self.intensity_data_map:
return self.intensity_data_map[symbol]
else:
raise ValueError
def _extract_intensity_data(self):
self.intensity_data_map = {}
shape = (self.positions.x_pixels, self.positions.y_pixels, 10, 9, 6)
for symbol in self.symbols:
self.intensity_data_map[symbol] = np.zeros(shape, dtype=np.float)
with h5py.File(self.hdf5_file_path, 'r', driver='core') as hdf5_file:
simulations_group = hdf5_file["simulations"]
for group in simulations_group.values():
if not group.name.endswith(HDF5_PARAMETERS):
try:
index_x, index_y = self.find_position_index(self.positions, group.attrs["beamPosition"])
for symbol in self.symbols:
intensity = group["Intensity"][symbol]
self.intensity_data_map[symbol][index_y, index_x] = intensity[...]
except IndexError as message:
logging.info(message)
logging.info("%s", group.name)
def get_emitted_spectrum(self, position):
with h5py.File(self.hdf5_file_path, 'r', driver='core') as hdf5_file:
simulations_group = hdf5_file["simulations"]
for group in simulations_group.values():
if not group.name.endswith(HDF5_PARAMETERS):
try:
index_x, index_y = self.find_position_index(self.positions, group.attrs["beamPosition"])
energy_data = group["XraySpectraRegionsEmitted/energies_keV"][:]
intensity_data_1_ekeVsr = group["XraySpectraRegionsEmitted/total_1_ekeVsr"][:]
except IndexError as message:
logging.debug(message)
logging.debug("%s", group.name)
return energy_data, intensity_data_1_ekeVsr
def get_detected_spectrum(self, position):
with h5py.File(self.hdf5_file_path, 'r', driver='core') as hdf5_file:
simulations_group = hdf5_file["simulations"]
for group in simulations_group.values():
if not group.name.endswith(HDF5_PARAMETERS):
try:
index_x, index_y = self.find_position_index(self.positions, group.attrs["beamPosition"])
energy_data = group["XraySpectraSpecimenEmittedDetected/Energy (keV)"][:]
intensity_data = group["XraySpectraSpecimenEmittedDetected/Spectra Total"][:]
except IndexError as message:
logging.debug(message)
logging.debug("%s", group.name)
return energy_data, intensity_data
def find_position_index(self, positions, position):
index_x = np.where(positions.xs_nm == position[0])[0][0]
index_y = np.where(positions.ys_nm == position[1])[0][0]
return index_x, index_y
|
/*
* HBOLockTest.java
* JUnit based test
*
* Created on November 12, 2006, 9:16 PM
*/
package tamp.ch07.Spin.spin;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
public class HBOLockTest extends TestCase {
private final static int THREADS = 32;
private final static int COUNT = 32 * 32;
private final static int PER_THREAD = COUNT / THREADS;
Thread[] thread = new Thread[THREADS];
int counter = 0;
HBOLock instance = new HBOLock();
public HBOLockTest(String testName) {
super(testName);
}
public static Test suite() {
TestSuite suite = new TestSuite(HBOLockTest.class);
return suite;
}
public void testParallel() throws Exception {
for (int i = 0; i < THREADS; i++) {
thread[i] = new MyThread();
}
for (int i = 0; i < THREADS; i++) {
thread[i].start();
}
for (int i = 0; i < THREADS; i++) {
thread[i].join();
}
assertEquals(COUNT, counter);
}
class MyThread extends Thread {
public void run() {
for (int i = 0; i < PER_THREAD; i++) {
instance.lock();
try {
counter = counter + 1;
} finally {
instance.unlock();
}
}
}
}
}
|
#!/usr/bin/env bash
buildArgs=
if [ ! -z "$1" ]; then
buildArgs="-t $1"
fi
docker build $buildArgs .
echo "removing intermediate container"
docker rmi -f $(docker images -q --filter label=stage=intermediate) |
#!/bin/sh
docker-compose down
rm -rf ./docker/db/data/*
docker-compose up -d
docker-compose ps
|
import * as _ from "lodash";
import * as Rsa from "node-rsa";
import {inject, singleton} from "../di";
import {
ICascSettings,
ICascSettingsToken,
IEncryptionStrategy,
IEncryptionStrategyToken,
IPublicKeyReader,
IPublicKeyReaderToken,
} from "../interfaces";
@singleton(IEncryptionStrategyToken)
export class RsaEncryptionStrategy implements IEncryptionStrategy {
private __publicKey?: Rsa;
public constructor(
@inject(ICascSettingsToken)
private readonly __cascSettings: ICascSettings,
@inject(IPublicKeyReaderToken)
private readonly __publicKeyReader: IPublicKeyReader,
) {}
public get name(): string {
return "rsa";
}
public async encrypt(value: string): Promise<string> {
if (_.isNil(this.__publicKey)) {
if (this.__cascSettings.publicKeyFormat !== "pkcs8-public-pem") {
throw new Error("unsupported public key format");
}
this.__publicKey = new Rsa(
await this.__publicKeyReader.read(),
this.__cascSettings.publicKeyFormat,
);
}
return this.__publicKey.encrypt(value, "base64");
}
}
|
<reponame>ayocodingit/adonis-enum
import Enum from './lib/enum.js'
export default Enum
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.