text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
# Base16 Atelier Lakeside - Mate Terminal color scheme install script
# Bram de Haan (http://atelierbramdehaan.nl)
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="Base 16 Atelier Lakeside 256"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="base-16-atelier-lakeside-256"
[[ -z "$DCONFTOOL" ]] && DCONFTOOL=dconf
[[ -z "$BASE_KEY" ]] && BASE_KEY=/org/mate/terminal
PROFILE_KEY="$BASE_KEY/profiles/$PROFILE_SLUG"
DEFAULT_SLUG=$("$DCONFTOOL" read "$BASE_KEY/global/default-profile" | tr -d \')
DEFAULT_KEY="$BASE_KEY/profiles/$DEFAULT_SLUG"
dcopy() {
local from="$1"; shift
local to="$1"; shift
"$DCONFTOOL" dump "$from/" | "$DCONFTOOL" load "$to/"
}
dset() {
local key="$1"; shift
local val="$1"; shift
"$DCONFTOOL" write "$PROFILE_KEY/$key" "$val"
}
glist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONFTOOL" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONFTOOL" write "$key" "[$entries]"
}
if [ -n "$DEFAULT_SLUG" ]; then
dcopy "$DEFAULT_KEY" "$PROFILE_KEY"
fi
dset visible-name "'$PROFILE_NAME'"
dset palette "'#161b1d:#d22d72:#568c3b:#8a8a0f:#257fad:#6b6bb8:#2d8f6f:#7ea2b4:#5a7b8c:#d22d72:#568c3b:#8a8a0f:#257fad:#6b6bb8:#2d8f6f:#ebf8ff'"
dset background-color "'#161b1d'"
dset foreground-color "'#7ea2b4'"
dset bold-color "'#7ea2b4'"
dset bold-color-same-as-fg "true"
dset use-theme-colors "false"
dset use-theme-background "false"
glist_append "$BASE_KEY/global/profile-list" "$PROFILE_SLUG"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONFTOOL
unset BASE_KEY
unset PROFILE_KEY
unset DEFAULT_SLUG
unset DEFAULT_KEY
|
<filename>mfc14x/version.h
#include "atlbuild.h"
#define rmj _LIBS_FILE_VERSION_MAJOR
#define rmm _LIBS_FILE_VERSION_MINOR
#define rup _LIBS_BUILD
#define szVerName ""
#define szVerUser ""
|
#include <stdio.h>
// Function to find maximum in arr[] and
// return its index
int findIndexOfMax(int arr[], int n)
{
// Initialize max element
int max = arr[0];
int index = 0;
// Traverse array elements from second and
// compare every element with current max
for (int i=1; i<n; i++)
{
// If current element is greater than max
if (arr[i] > max)
{
max = arr[i];
index = i;
}
}
return index;
}
// Driver program to test above function
int main()
{
int arr[] = {10, 24, 34, 58, 81};
int n = sizeof(arr)/sizeof(arr[0]);
int maxIndex = findIndexOfMax(arr, n);
printf("Maximum element is %d and its index is %d", arr[maxIndex],
maxIndex);
return 0;
} |
def print_max(nums):
max = nums[0]
for n in nums:
if n > max:
max = n
print(max) |
<filename>calses.js
function Persona(nombre, apellido, altura){
this.nombre = nombre
this.apellido = apellido
this.altura = altura
}
Persona.prototype.saludar = function(){
console.log(`Hola, me llamo ${this.nombre} ${this.apellido}`)
}
Persona.prototype.Altura = function(){
if (this.altura > 1.5) {
console.log(`Mido ${this.altura}, soy alto`)
}else{
console.log(`Mido ${this.altura}, soy bajo`)
}
}
var rickert = new Persona('Rickert', 'Gonzales', '1.7') |
<reponame>dHannasch/py4j
# -*- coding: UTF-8 -*-
from contextlib import contextmanager
import gc
from multiprocessing import Process
import subprocess
import unittest
from py4j.java_gateway import (
JavaGateway, GatewayParameters, CallbackServerParameters,
DEFAULT_PORT, DEFAULT_PYTHON_PROXY_PORT)
from py4j.clientserver import (
ClientServer, JavaParameters, PythonParameters)
from py4j.tests.java_gateway_test import (
PY4J_JAVA_PATH, check_connection, sleep)
from py4j.tests.py4j_callback_recursive_example import HelloState
from py4j.tests.instrumented import (
InstrJavaGateway, InstrumentedPythonPing, register_creation,
CREATED, FINALIZED, MEMORY_HOOKS, InstrClientServer)
def start_instrumented_gateway_server():
subprocess.call([
"java", "-Xmx512m", "-cp", PY4J_JAVA_PATH,
"py4j.instrumented.InstrumentedApplication"])
def start_instrumented_clientserver():
subprocess.call([
"java", "-Xmx512m", "-cp", PY4J_JAVA_PATH,
"py4j.instrumented.InstrumentedClientServerApplication"])
def start_gateway_server_example_app_process(start_gateway_server=True):
# XXX DO NOT FORGET TO KILL THE PROCESS IF THE TEST DOES NOT SUCCEED
if start_gateway_server:
p = Process(target=start_instrumented_gateway_server)
else:
p = Process(target=start_instrumented_clientserver)
p.start()
sleep()
check_connection()
return p
@contextmanager
def gateway_server_example_app_process(start_gateway_server=True):
p = start_gateway_server_example_app_process(start_gateway_server)
try:
yield p
finally:
p.join()
class HelloState2(HelloState):
def __init__(self, run_gc=True):
self.gateway = None
self.run_gc = run_gc
super(HelloState2, self).__init__()
register_creation(self)
def _play_with_jvm(self):
al = self.gateway.jvm.java.util.ArrayList()
al.append("Hello World")
obj = self.gateway.jvm.py4j.\
instrumented.InstrumentedObject("test")
al.append(obj)
return str(al)
def sayHello(self, int_value=None, string_value=None):
self._play_with_jvm()
if self.run_gc:
python_gc()
return super(HelloState2, self).sayHello(
int_value, string_value)
class Java:
implements = ["py4j.examples.IHello"]
def assert_python_memory(test, size):
test.assertEqual(size, len(CREATED))
test.assertEqual(size, len(FINALIZED))
test.assertEqual(set(CREATED), set(FINALIZED))
def python_gc():
"""Runs the gc three times to ensure that all circular reference are
correctly removed.
"""
for i in range(3):
gc.collect()
class GatewayServerTest(unittest.TestCase):
def tearDown(self):
MEMORY_HOOKS.clear()
CREATED.clear()
FINALIZED.clear()
def testPythonToJava(self):
def work_with_object(gateway):
obj = gateway.jvm.py4j.\
instrumented.InstrumentedObject("test")
return str(obj)
def internal_work():
gateway2 = InstrJavaGateway(gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5))
sleep()
work_with_object(gateway2)
python_gc()
sleep()
gateway2.shutdown()
with gateway_server_example_app_process():
gateway = JavaGateway()
gateway.entry_point.startServer2()
internal_work()
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 4 objects: GatewayServer, GatewayConnection, CallbackClient,
# InstrumentedObject
self.assertEqual(4, len(createdSet))
self.assertEqual(4, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 4 objects: JavaGateway, GatewayClient, GatewayProperty,
# GatewayConnection
assert_python_memory(self, 4)
def testPythonToJavaToPython(self):
def play_with_ping(gateway):
ping = InstrumentedPythonPing()
pingpong = gateway.jvm.py4j.examples.PingPong()
total = pingpong.start(ping)
return total
def internal_work():
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5))
sleep()
play_with_ping(gateway2)
python_gc()
sleep()
gateway2.shutdown()
with gateway_server_example_app_process():
gateway = JavaGateway()
gateway.entry_point.startServer2()
internal_work()
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 9 objects: GatewayServer, 4 GatewayConnection, CallbackClient,
# 3 CallbackConnection
self.assertEqual(9, len(createdSet))
self.assertEqual(9, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 11 objects: JavaGateway, CallbackSerer, GatewayClient,
# GatewayProperty, PythonPing, 4 GatewayConnection,
# 3 CallbackConnection. Notice the symmetry
assert_python_memory(self, 12)
def testPythonToJavaToPythonClose(self):
def play_with_ping(gateway):
ping = InstrumentedPythonPing()
pingpong = gateway.jvm.py4j.examples.PingPong()
total = pingpong.start(ping)
return total
def internal_work(assert_memory):
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5))
sleep()
play_with_ping(gateway2)
python_gc()
sleep()
gateway2.close(close_callback_server_connections=True,
keep_callback_server=True)
sleep()
assert_memory()
gateway2.shutdown()
sleep()
with gateway_server_example_app_process():
gateway = JavaGateway()
gateway.entry_point.startServer2()
def perform_memory_tests():
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 10 objects: GatewayServer, 4 GatewayConnection,
# CallbackClient, 4 CallbackConnection
self.assertEqual(10, len(createdSet))
# 13 objects: JavaGateway, CallbackSerer, GatewayClient,
# GatewayProperty, PythonPing, 4 GatewayConnection,
# 4 CallbackConnection. Notice the symmetry between callback
# and gateway connections.
self.assertEqual(13, len(CREATED))
# 4 gateway connections, 3 callback connections.
# There is still one callback connection staying around
# following Java finalization that called back Python.
self.assertEqual(7, len(finalizedSet))
# Same amount of connections for the Python side
self.assertEqual(7, len(FINALIZED))
internal_work(perform_memory_tests)
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
gateway.shutdown()
# 14 objects: JavaGateway, CallbackSerer, GatewayClient,
# GatewayProperty, PythonPing, 5 GatewayConnection,
# 4 CallbackConnection. Notice the symmetry
# One more gateway connection created because we called shutdown
# after close (which requires a connection to send a shutdown
# command).
assert_python_memory(self, 14)
def testJavaToPythonToJavaCleanGC(self):
def internal_work(gateway):
hello_state = HelloState2()
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = gateway2
sleep()
gateway.entry_point.startServerWithPythonEntry(True)
sleep()
gateway2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process():
gateway = JavaGateway()
internal_work(gateway)
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 6 objects: 2 InstrumentedObject (sayHello called twice), 1
# InstrGatewayServer, 1 CallbackClient, 1 CallbackConnection, 1
# GatewayConnection
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 7 objects: JavaGateway, GatewayClient, CallbackServer,
# GatewayProperty, HelloState, GatewayConnection,
# CallbackConnection
assert_python_memory(self, 7)
def testJavaToPythonToJavaNoGC(self):
def internal_work(gateway):
hello_state = HelloState2(run_gc=False)
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = gateway2
sleep()
gateway.entry_point.startServerWithPythonEntry(True)
sleep()
gateway2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process():
gateway = JavaGateway()
# We disable gc to test whether a shut down on one side will
# garbage collect everything.
gc.disable()
internal_work(gateway)
gc.enable()
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 6 objects: 2 InstrumentedObject (sayHello called twice), 1
# InstrGatewayServer, 1 CallbackClient, 1 CallbackConnection, 1
# GatewayConnection
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 7 objects: JavaGateway, GatewayClient, CallbackServer,
# GatewayProperty, HelloState, GatewayConnection,
# CallbackConnection
assert_python_memory(self, 7)
def testJavaToPythonToJavaCleanGCNoShutdown(self):
def internal_work(gateway):
hello_state = HelloState2()
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = gateway2
sleep()
gateway.entry_point.startServerWithPythonEntry(False)
sleep()
gateway2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process():
gateway = JavaGateway()
internal_work(gateway)
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 6 objects: 2 InstrumentedObject (sayHello called twice), 1
# InstrGatewayServer, 1 CallbackClient, 1 CallbackConnection, 1
# GatewayConnection
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 7 objects: JavaGateway, GatewayClient, CallbackServer,
# GatewayProperty, HelloState, GatewayConnection,
# CallbackConnection
assert_python_memory(self, 7)
def testJavaToPythonToJavaNoGCNoShutdown(self):
def internal_work(gateway):
hello_state = HelloState2(run_gc=False)
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = gateway2
sleep()
gateway.entry_point.startServerWithPythonEntry(False)
sleep()
gateway2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process():
gateway = JavaGateway()
# We disable gc to test whether a shut down on one side will
# garbage collect everything.
gc.disable()
internal_work(gateway)
gc.enable()
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 6 objects: 2 InstrumentedObject (sayHello called twice), 1
# InstrGatewayServer, 1 CallbackClient, 1 CallbackConnection, 1
# GatewayConnection
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 7 objects: JavaGateway, GatewayClient, CallbackServer,
# GatewayProperty, HelloState, GatewayConnection,
# CallbackConnection
assert_python_memory(self, 7)
class ClientServerTest(unittest.TestCase):
def tearDown(self):
MEMORY_HOOKS.clear()
CREATED.clear()
FINALIZED.clear()
def testPythonToJava(self):
def work_with_object(clientserver):
obj = clientserver.jvm.py4j.\
instrumented.InstrumentedObject("test")
return str(obj)
def internal_work():
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5))
sleep()
work_with_object(clientserver2)
python_gc()
sleep()
clientserver2.shutdown()
with gateway_server_example_app_process(False):
clientserver = ClientServer()
clientserver.entry_point.startServer2()
internal_work()
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 5 objects: ClientServer, ClientServerConnection, PythonClient,
# JavaServer, InstrumentedObject
self.assertEqual(5, len(createdSet))
self.assertEqual(5, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 5 objects: ClientServer, ClientServerConnection, PythonClient,
# JavaServer, GatewayProperty
assert_python_memory(self, 5)
def testPythonToJavaToPython(self):
def play_with_ping(clientserver):
ping = InstrumentedPythonPing()
pingpong = clientserver.jvm.py4j.examples.PingPong()
total = pingpong.start(ping)
return total
def internal_work():
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5))
sleep()
play_with_ping(clientserver2)
python_gc()
sleep()
clientserver2.shutdown()
with gateway_server_example_app_process(False):
clientserver = ClientServer()
clientserver.entry_point.startServer2()
internal_work()
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 4 objects: ClientServer, ClientServerConnection, JavaServer,
# PythonClient
self.assertEqual(4, len(createdSet))
self.assertEqual(4, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 6 objects: ClientServer, PythonServer, JavaClient,
# GatewayProperty, PythonPing, ClientServerConnection
def testPythonToJavaToPythonClose(self):
def play_with_ping(clientserver):
ping = InstrumentedPythonPing()
pingpong = clientserver.jvm.py4j.examples.PingPong()
total = pingpong.start(ping)
return total
def internal_work(assert_memory):
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5))
sleep()
play_with_ping(clientserver2)
python_gc()
sleep()
clientserver2.close(
close_callback_server_connections=True,
keep_callback_server=True)
sleep()
assert_memory()
clientserver2.shutdown()
sleep()
with gateway_server_example_app_process(False):
clientserver = ClientServer()
clientserver.entry_point.startServer2()
def perform_memory_tests():
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.\
MetricRegistry.getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.\
MetricRegistry.getFinalizedObjectsKeySet()
# 6 objects: ClientServer, JavaServer,
# PythonClient, 3 ClientServerConnection.
self.assertEqual(6, len(createdSet))
# Should be 2: ClientServer, 1 ClientServerConnection
# But for some reasons, Java refuses to collect the
# clientserverconnection even though there are no strong
# references.
self.assertEqual(1, len(finalizedSet))
# 8 objects: ClientServer, PythonServer, JavaClient,
# GatewayProperty, PythonPing, 3 ClientServerConnection
self.assertEqual(8, len(CREATED))
# PythonPing + ClientServerConnection
self.assertEqual(2, len(FINALIZED))
internal_work(perform_memory_tests)
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
clientserver.shutdown()
# 9 objects: ClientServer, PythonServer, JavaClient,
# GatewayProperty, PythonPing, 4 ClientServerConnection
assert_python_memory(self, 9)
def testJavaToPythonToJavaCleanGC(self):
def internal_work(clientserver):
hello_state = HelloState2()
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = clientserver2
sleep()
clientserver.entry_point.startServerWithPythonEntry(True)
sleep()
clientserver2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process(False):
clientserver = ClientServer()
internal_work(clientserver)
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 7 objects: 2 InstrumentedObject (sayHello called twice), 1
# JavaServer, 1 PythonClient, 1 ClientServer, 2
# ClientServerConnection (1 to call sayHello)
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 8 objects: ClientServer (ok), PythonServer (ok), JavaClient,
# GatewayProperty, HelloState (ok), 3 ClientServer Connections (1)
assert_python_memory(self, 7)
def testJavaToPythonToJavaNoGC(self):
def internal_work(clientserver):
hello_state = HelloState2()
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = clientserver2
sleep()
clientserver.entry_point.startServerWithPythonEntry(True)
sleep()
clientserver2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process(False):
clientserver = ClientServer()
# We disable gc to test whether a shut down on one side will
# garbage collect everything.
gc.disable()
internal_work(clientserver)
gc.enable()
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 7 objects: 2 InstrumentedObject (sayHello called twice), 1
# JavaServer, 1 PythonClient, 1 ClientServer, 2
# ClientServerConnection (1 to call sayHello)
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 8 objects: ClientServer (ok), PythonServer (ok), JavaClient,
# GatewayProperty, HelloState (ok), 3 ClientServer Connections (2)
assert_python_memory(self, 7)
def testJavaToPythonToJavaCleanGCNoShutdown(self):
def internal_work(clientserver):
hello_state = HelloState2()
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = clientserver2
sleep()
clientserver.entry_point.startServerWithPythonEntry(False)
sleep()
clientserver2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process(False):
clientserver = ClientServer()
# We disable gc to test whether a shut down on one side will
# garbage collect everything.
internal_work(clientserver)
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 8 objects: 2 InstrumentedObject (sayHello called twice), 1
# JavaServer, 1 PythonClient, 1 ClientServer, 3
# ClientServerConnection (1 to call sayHello,
# 1 that receives shutdown command)
self.assertEqual(7, len(createdSet))
self.assertEqual(7, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 8 objects: ClientServer (ok), PythonServer (ok), JavaClient,
# GatewayProperty, HelloState (ok), 3 ClientServer Connections (2)
assert_python_memory(self, 7)
def testJavaToPythonToJavaNoGCNoShutdown(self):
def internal_work(clientserver):
hello_state = HelloState2()
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = clientserver2
sleep()
clientserver.entry_point.startServerWithPythonEntry(False)
sleep()
clientserver2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process(False):
clientserver = ClientServer()
# We disable gc to test whether a shut down on one side will
# garbage collect everything.
gc.disable()
internal_work(clientserver)
gc.enable()
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 7 objects: 2 InstrumentedObject (sayHello called twice), 1
# JavaServer, 1 PythonClient, 1 ClientServer, 3
# ClientServerConnection (1 to call sayHello,
# 1 that receives shutdown command)
self.assertEqual(7, len(createdSet))
self.assertEqual(7, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 8 objects: ClientServer (ok), PythonServer (ok), JavaClient,
# GatewayProperty, HelloState (ok), 3 ClientServer Connections (2)
assert_python_memory(self, 7)
|
<gh_stars>1-10
package entity
type Channel struct {
Id int `json:"id" xorm:"not null pk autoincr INT(11)"`
ChainId int `json:"chainId" xorm:"not null INT(11)"`
Orgs string `json:"orgs" xorm:"not null VARCHAR(255)"`
ChannelName string `json:"channelName" xorm:"not null VARCHAR(64)"`
UserAccount string `json:"userAccount" xorm:"not null VARCHAR(100)"`
Created int64 `json:"created" xorm:"not null BIGINT(20)"`
}
|
#!/bin/bash -e
# shellcheck disable=SC2119
run_sub_stage()
{
log "Begin ${SUB_STAGE_DIR}"
pushd "${SUB_STAGE_DIR}" > /dev/null
for i in {00..99}; do
if [ -f "${i}-debconf" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-debconf"
on_chroot << EOF
debconf-set-selections <<SELEOF
$(cat "${i}-debconf")
SELEOF
EOF
log "End ${SUB_STAGE_DIR}/${i}-debconf"
fi
if [ -f "${i}-packages-nr" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-packages-nr"
PACKAGES="$(sed -f "${SCRIPT_DIR}/remove-comments.sed" < "${i}-packages-nr")"
if [ -n "$PACKAGES" ]; then
on_chroot << EOF
apt-get -o APT::Acquire::Retries=3 install --no-install-recommends -y $PACKAGES
EOF
if [ "${USE_QCOW2}" = "1" ]; then
on_chroot << EOF
apt-get clean
EOF
fi
fi
log "End ${SUB_STAGE_DIR}/${i}-packages-nr"
fi
if [ -f "${i}-packages" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-packages"
PACKAGES="$(sed -f "${SCRIPT_DIR}/remove-comments.sed" < "${i}-packages")"
if [ -n "$PACKAGES" ]; then
on_chroot << EOF
apt-get -o APT::Acquire::Retries=3 install -y $PACKAGES
EOF
if [ "${USE_QCOW2}" = "1" ]; then
on_chroot << EOF
apt-get clean
EOF
fi
fi
log "End ${SUB_STAGE_DIR}/${i}-packages"
fi
if [ -d "${i}-patches" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-patches"
pushd "${STAGE_WORK_DIR}" > /dev/null
if [ "${CLEAN}" = "1" ]; then
rm -rf .pc
rm -rf ./*-pc
fi
QUILT_PATCHES="${SUB_STAGE_DIR}/${i}-patches"
SUB_STAGE_QUILT_PATCH_DIR="$(basename "$SUB_STAGE_DIR")-pc"
mkdir -p "$SUB_STAGE_QUILT_PATCH_DIR"
ln -snf "$SUB_STAGE_QUILT_PATCH_DIR" .pc
quilt upgrade
if [ -e "${SUB_STAGE_DIR}/${i}-patches/EDIT" ]; then
echo "Dropping into bash to edit patches..."
bash
fi
RC=0
quilt push -a || RC=$?
case "$RC" in
0|2)
;;
*)
false
;;
esac
popd > /dev/null
log "End ${SUB_STAGE_DIR}/${i}-patches"
fi
if [ -x ${i}-run.sh ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-run.sh"
./${i}-run.sh
log "End ${SUB_STAGE_DIR}/${i}-run.sh"
fi
if [ -f ${i}-run-chroot.sh ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-run-chroot.sh"
on_chroot < ${i}-run-chroot.sh
log "End ${SUB_STAGE_DIR}/${i}-run-chroot.sh"
fi
done
popd > /dev/null
log "End ${SUB_STAGE_DIR}"
}
run_stage(){
log "Begin ${STAGE_DIR}"
STAGE="$(basename "${STAGE_DIR}")"
pushd "${STAGE_DIR}" > /dev/null
STAGE_WORK_DIR="${WORK_DIR}/${STAGE}"
ROOTFS_DIR="${STAGE_WORK_DIR}"/rootfs
if [ "${USE_QCOW2}" = "1" ]; then
if [ ! -f SKIP ]; then
load_qimage
fi
else
# make sure we are not umounting during export-image stage
if [ "${USE_QCOW2}" = "0" ] && [ "${NO_PRERUN_QCOW2}" = "0" ]; then
unmount "${WORK_DIR}/${STAGE}"
fi
fi
if [ ! -f SKIP_IMAGES ]; then
if [ -f "${STAGE_DIR}/EXPORT_IMAGE" ]; then
EXPORT_DIRS="${EXPORT_DIRS} ${STAGE_DIR}"
fi
fi
if [ ! -f SKIP ]; then
if [ "${CLEAN}" = "1" ] && [ "${USE_QCOW2}" = "0" ] ; then
if [ -d "${ROOTFS_DIR}" ]; then
rm -rf "${ROOTFS_DIR}"
fi
fi
if [ -x prerun.sh ]; then
log "Begin ${STAGE_DIR}/prerun.sh"
./prerun.sh
log "End ${STAGE_DIR}/prerun.sh"
fi
for SUB_STAGE_DIR in "${STAGE_DIR}"/*; do
if [ -d "${SUB_STAGE_DIR}" ] && [ ! -f "${SUB_STAGE_DIR}/SKIP" ]; then
run_sub_stage
fi
done
fi
if [ "${USE_QCOW2}" = "1" ]; then
unload_qimage
else
# make sure we are not umounting during export-image stage
if [ "${USE_QCOW2}" = "0" ] && [ "${NO_PRERUN_QCOW2}" = "0" ]; then
unmount "${WORK_DIR}/${STAGE}"
fi
fi
PREV_STAGE="${STAGE}"
PREV_STAGE_DIR="${STAGE_DIR}"
PREV_ROOTFS_DIR="${ROOTFS_DIR}"
popd > /dev/null
log "End ${STAGE_DIR}"
}
if [ "$(id -u)" != "0" ]; then
echo "Please run as root" 1>&2
exit 1
fi
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
export BASE_DIR
if [ -f config ]; then
# shellcheck disable=SC1091
source config
fi
while getopts "c:" flag
do
case "$flag" in
c)
EXTRA_CONFIG="$OPTARG"
# shellcheck disable=SC1090
source "$EXTRA_CONFIG"
;;
*)
;;
esac
done
term() {
if [ "${USE_QCOW2}" = "1" ]; then
log "Unloading image"
unload_qimage
fi
}
trap term EXIT INT TERM
export PI_GEN=${PI_GEN:-pi-gen}
export PI_GEN_REPO=${PI_GEN_REPO:-https://github.com/RPi-Distro/pi-gen}
if [ -z "${IMG_NAME}" ]; then
echo "IMG_NAME not set" 1>&2
exit 1
fi
export USE_QEMU="${USE_QEMU:-0}"
export IMG_DATE="${IMG_DATE:-"$(date +%Y-%m-%d)"}"
export IMG_FILENAME="${IMG_FILENAME:-"${IMG_DATE}-${IMG_NAME}"}"
export ZIP_FILENAME="${ZIP_FILENAME:-"image_${IMG_DATE}-${IMG_NAME}"}"
export SCRIPT_DIR="${BASE_DIR}/scripts"
export WORK_DIR="${WORK_DIR:-"${BASE_DIR}/work/${IMG_DATE}-${IMG_NAME}"}"
export DEPLOY_DIR=${DEPLOY_DIR:-"${BASE_DIR}/deploy"}
export DEPLOY_ZIP="${DEPLOY_ZIP:-1}"
export LOG_FILE="${WORK_DIR}/build.log"
export TARGET_HOSTNAME=${TARGET_HOSTNAME:-raspberrypi}
export FIRST_USER_NAME=${FIRST_USER_NAME:-pi}
export FIRST_USER_PASS=${FIRST_USER_PASS:-raspberry}
export RELEASE=${RELEASE:-buster}
export WPA_ESSID
export WPA_PASSWORD
export WPA_COUNTRY
export ENABLE_SSH="${ENABLE_SSH:-0}"
export PUBKEY_ONLY_SSH="${PUBKEY_ONLY_SSH:-0}"
export LOCALE_DEFAULT="${LOCALE_DEFAULT:-en_GB.UTF-8}"
export KEYBOARD_KEYMAP="${KEYBOARD_KEYMAP:-gb}"
export KEYBOARD_LAYOUT="${KEYBOARD_LAYOUT:-English (UK)}"
export TIMEZONE_DEFAULT="${TIMEZONE_DEFAULT:-Europe/London}"
export GIT_HASH=${GIT_HASH:-"$(git rev-parse HEAD)"}
export PUBKEY_SSH_FIRST_USER
export CLEAN
export IMG_NAME
export APT_PROXY
export STAGE
export STAGE_DIR
export STAGE_WORK_DIR
export PREV_STAGE
export PREV_STAGE_DIR
export ROOTFS_DIR
export PREV_ROOTFS_DIR
export IMG_SUFFIX
export NOOBS_NAME
export NOOBS_DESCRIPTION
export EXPORT_DIR
export EXPORT_ROOTFS_DIR
export QUILT_PATCHES
export QUILT_NO_DIFF_INDEX=1
export QUILT_NO_DIFF_TIMESTAMPS=1
export QUILT_REFRESH_ARGS="-p ab"
# shellcheck source=scripts/common
source "${SCRIPT_DIR}/common"
# shellcheck source=scripts/dependencies_check
source "${SCRIPT_DIR}/dependencies_check"
export NO_PRERUN_QCOW2="${NO_PRERUN_QCOW2:-1}"
export USE_QCOW2="${USE_QCOW2:-1}"
export BASE_QCOW2_SIZE=${BASE_QCOW2_SIZE:-12G}
source "${SCRIPT_DIR}/qcow2_handling"
if [ "${USE_QCOW2}" = "1" ]; then
NO_PRERUN_QCOW2=1
else
NO_PRERUN_QCOW2=0
fi
export NO_PRERUN_QCOW2="${NO_PRERUN_QCOW2:-1}"
dependencies_check "${BASE_DIR}/depends"
#check username is valid
if [[ ! "$FIRST_USER_NAME" =~ ^[a-z][-a-z0-9_]*$ ]]; then
echo "Invalid FIRST_USER_NAME: $FIRST_USER_NAME"
exit 1
fi
if [[ -n "${APT_PROXY}" ]] && ! curl --silent "${APT_PROXY}" >/dev/null ; then
echo "Could not reach APT_PROXY server: ${APT_PROXY}"
exit 1
fi
if [[ -n "${WPA_PASSWORD}" && ${#WPA_PASSWORD} -lt 8 || ${#WPA_PASSWORD} -gt 63 ]] ; then
echo "WPA_PASSWORD" must be between 8 and 63 characters
exit 1
fi
if [[ "${PUBKEY_ONLY_SSH}" = "1" && -z "${PUBKEY_SSH_FIRST_USER}" ]]; then
echo "Must set 'PUBKEY_SSH_FIRST_USER' to a valid SSH public key if using PUBKEY_ONLY_SSH"
exit 1
fi
mkdir -p "${WORK_DIR}"
log "Begin ${BASE_DIR}"
STAGE_LIST=${STAGE_LIST:-${BASE_DIR}/stage*}
for STAGE_DIR in $STAGE_LIST; do
STAGE_DIR=$(realpath "${STAGE_DIR}")
run_stage
done
CLEAN=1
for EXPORT_DIR in ${EXPORT_DIRS}; do
STAGE_DIR=${BASE_DIR}/export-image
# shellcheck source=/dev/null
source "${EXPORT_DIR}/EXPORT_IMAGE"
EXPORT_ROOTFS_DIR=${WORK_DIR}/$(basename "${EXPORT_DIR}")/rootfs
if [ "${USE_QCOW2}" = "1" ]; then
USE_QCOW2=0
EXPORT_NAME="${IMG_FILENAME}${IMG_SUFFIX}"
echo "------------------------------------------------------------------------"
echo "Running export stage for ${EXPORT_NAME}"
rm -f "${WORK_DIR}/export-image/${EXPORT_NAME}.img" || true
rm -f "${WORK_DIR}/export-image/${EXPORT_NAME}.qcow2" || true
rm -f "${WORK_DIR}/${EXPORT_NAME}.img" || true
rm -f "${WORK_DIR}/${EXPORT_NAME}.qcow2" || true
EXPORT_STAGE=$(basename "${EXPORT_DIR}")
for s in $STAGE_LIST; do
TMP_LIST=${TMP_LIST:+$TMP_LIST }$(basename "${s}")
done
FIRST_STAGE=${TMP_LIST%% *}
FIRST_IMAGE="image-${FIRST_STAGE}.qcow2"
pushd "${WORK_DIR}" > /dev/null
echo "Creating new base "${EXPORT_NAME}.qcow2" from ${FIRST_IMAGE}"
cp "./${FIRST_IMAGE}" "${EXPORT_NAME}.qcow2"
ARR=($TMP_LIST)
# rebase stage images to new export base
for CURR_STAGE in "${ARR[@]}"; do
if [ "${CURR_STAGE}" = "${FIRST_STAGE}" ]; then
PREV_IMG="${EXPORT_NAME}"
continue
fi
echo "Rebasing image-${CURR_STAGE}.qcow2 onto ${PREV_IMG}.qcow2"
qemu-img rebase -f qcow2 -u -b ${PREV_IMG}.qcow2 image-${CURR_STAGE}.qcow2
if [ "${CURR_STAGE}" = "${EXPORT_STAGE}" ]; then
break
fi
PREV_IMG="image-${CURR_STAGE}"
done
# commit current export stage into base export image
echo "Committing image-${EXPORT_STAGE}.qcow2 to ${EXPORT_NAME}.qcow2"
qemu-img commit -f qcow2 -p -b "${EXPORT_NAME}.qcow2" image-${EXPORT_STAGE}.qcow2
# rebase stage images back to original first stage for easy re-run
for CURR_STAGE in "${ARR[@]}"; do
if [ "${CURR_STAGE}" = "${FIRST_STAGE}" ]; then
PREV_IMG="image-${CURR_STAGE}"
continue
fi
echo "Rebasing back image-${CURR_STAGE}.qcow2 onto ${PREV_IMG}.qcow2"
qemu-img rebase -f qcow2 -u -b ${PREV_IMG}.qcow2 image-${CURR_STAGE}.qcow2
if [ "${CURR_STAGE}" = "${EXPORT_STAGE}" ]; then
break
fi
PREV_IMG="image-${CURR_STAGE}"
done
popd > /dev/null
mkdir -p "${WORK_DIR}/export-image/rootfs"
mv "${WORK_DIR}/${EXPORT_NAME}.qcow2" "${WORK_DIR}/export-image/"
echo "Mounting image ${WORK_DIR}/export-image/${EXPORT_NAME}.qcow2 to rootfs ${WORK_DIR}/export-image/rootfs"
mount_qimage "${WORK_DIR}/export-image/${EXPORT_NAME}.qcow2" "${WORK_DIR}/export-image/rootfs"
CLEAN=0
run_stage
CLEAN=1
USE_QCOW2=1
else
run_stage
fi
if [ "${USE_QEMU}" != "1" ]; then
if [ -e "${EXPORT_DIR}/EXPORT_NOOBS" ]; then
# shellcheck source=/dev/null
source "${EXPORT_DIR}/EXPORT_NOOBS"
STAGE_DIR="${BASE_DIR}/export-noobs"
if [ "${USE_QCOW2}" = "1" ]; then
USE_QCOW2=0
run_stage
USE_QCOW2=1
else
run_stage
fi
fi
fi
done
if [ -x postrun.sh ]; then
log "Begin postrun.sh"
cd "${BASE_DIR}"
./postrun.sh
log "End postrun.sh"
fi
if [ "${USE_QCOW2}" = "1" ]; then
unload_qimage
fi
log "End ${BASE_DIR}"
|
<filename>lambdas/codebuild_result_fn/index.py
import boto3
codecommit_client = boto3.client('codecommit')
def lambda_handler(event, context):
print(event)
for item in event['detail']['additional-information']['environment']['environment-variables']:
if item['name'] == 'pullRequestId':
pull_request_id = item['value']
if item['name'] == 'repositoryName':
repository_name = item['value']
if item['name'] == 'sourceCommit':
before_commit_id = item['value']
if item['name'] == 'destinationCommit':
after_commit_id = item['value']
build_id_arn = event['detail']['build-id']
build_id = build_id_arn.split("/", 1)[-1]
log_link = event['detail']['additional-information']['logs']['deep-link']
for phase in event['detail']['additional-information']['phases']:
if phase.get('phase-status') == 'FAILED':
content = f'💥 {build_id} **Failed** - See the [Logs]({log_link})'
break
else:
content = f'✔️ {build_id} **Succeed** - See the [Logs]({log_link})'
codecommit_client.post_comment_for_pull_request(
pullRequestId=pull_request_id,
repositoryName=repository_name,
beforeCommitId=before_commit_id,
afterCommitId=after_commit_id,
content=content
)
|
import { ipcRenderer } from 'electron';
import React, { useReducer } from 'react';
import injectTwitterScript from '../utils/scripts';
import { CONTEXT_CONFIG, INITIAL_STATE } from './constants';
import { ACTIONS, TwitterResponse } from './interfaces';
import reducer from './reducer';
import channels from '../utils/constants';
/**
* Define the interface of the context.
*/
export interface ContextProps {
loading: boolean;
lastSearchs: string[];
search: string;
setSearch: (search: string) => void;
htmlTweets: string;
loadTweets: (newSearch: string | null) => void;
}
const Context = React.createContext({} as ContextProps);
/**
* Define the interface of the context provider.
*/
interface Props {
children: React.ReactNode;
}
/**
* Define the context provider (data and functionality).
* @param props Object of the Props interface
* @returns The context provider, with the exposed data and functionality.
*/
export const TweetsContextProvider = ({ children }: Props) => {
const [state, dispatch] = useReducer(reducer, INITIAL_STATE);
const { loading, lastSearchs, search, htmlTweets } = state;
/**
* Function to show the loadding spinner.
*/
const show = () => {
dispatch({
type: ACTIONS.UPDATE_LOADING,
payload: { loading: true },
});
};
/**
* Function to hide the loading spinner.
*/
const hide = () => {
dispatch({
type: ACTIONS.UPDATE_LOADING,
payload: { loading: false },
});
};
/**
* Function to add a new user search to the last searches.
* @param searchToAdd Is the search value to save.
*/
const addLastSearch = (searchToAdd: string) => {
const alreadyExist = lastSearchs.find((s) => s === searchToAdd);
if (alreadyExist === undefined) {
lastSearchs.unshift(searchToAdd);
const newLastSearchs = lastSearchs.slice(0, CONTEXT_CONFIG.MAX_STORE);
localStorage.setItem(
CONTEXT_CONFIG.STORE_KEY,
JSON.stringify(newLastSearchs)
);
dispatch({
type: ACTIONS.ADD_LAST_SEARCH,
payload: { lastSearchs: newLastSearchs },
});
}
};
/**
* Function to update the state search value.
* @param newSearch Is the new search value to save.
*/
const setSearch = (newSearch: string) => {
dispatch({
type: ACTIONS.UPDATE_SEARCH,
payload: { search: newSearch },
});
};
/**
* Function to update the state htmlTweets value
* @param newHtmlTweets Is the new htmlTweets to save.
*/
const setHtmlTweets = (newHtmlTweets: string) => {
dispatch({
type: ACTIONS.UPDATE_HTML_TWEETS,
payload: { htmlTweets: newHtmlTweets },
});
};
/**
* Handler executed when the call to Publish Twitter Api work fine.
* @param res The response return by the service.
* @param searchToUse Search value sended to the service.
* @returns The same entried response object.
*/
const onSuccessSearch = (res: string, searchToUse: string): string => {
setHtmlTweets(res);
addLastSearch(searchToUse);
const script = injectTwitterScript('article');
if (script !== null) {
script.addEventListener('load', () => {
hide();
});
} else {
hide();
}
return res;
};
/**
* Function to load the new Tweets on screen.
* @param newSearch Value of the new search if a last user search is clicked. Else null.
*/
const loadTweets = async (newSearch: string | null) => {
show();
let searchToUse = search;
if (newSearch !== null) {
setSearch(newSearch);
searchToUse = newSearch;
}
const result: TwitterResponse = await ipcRenderer.invoke(
channels.FETCH_TWEETS_ON_MAIN,
searchToUse
);
if (result.status === 'ok') {
onSuccessSearch(result.html, searchToUse);
} else {
setHtmlTweets(result.html);
}
hide();
};
return (
<Context.Provider
value={{
loading,
lastSearchs,
htmlTweets,
search,
loadTweets,
setSearch,
}}
>
{children}
</Context.Provider>
);
};
export default Context;
|
# Adapted from Kaldi librispeech and Eesen WSJ recipes by Jayadev Billa (2017)
. ./cmd.sh ## You'll want to change cmd.sh to something that will work on your system.
## This relates to the queue.
[ -f path.sh ] && . ./path.sh;
stage=1
#libri=/export/data/en-asr-data/OpenSLR/LibriSpeech
libri=/nfs2/yyshi/OpenSLR/LibriSpeech
num_layers=4
learn_rate=0.0004
dropout_rate=0.9
left_context=1
right_context=1
subsample=3
num_projects=320
num_experts=44
moe_temp=20.0
nnet_type=blstm
batch_size=64
use_decay=2
target_length_cutoff=2
prior_label_sm=0
uniform_label_sm=0
use_bn=false
num_neurons=320 # number of memory cells in every LSTM layer
gpus=2
. utils/parse_options.sh
# Specify network structure and generate the network topology
input_dim=120 # dimension of the input features; we will use 40-dimensional fbanks with deltas and double deltas
optimizer="adam"
sort_by_len=true
batch_threads=8
report_interval=1
export CUDA_VISIBLE_DEVICES=$gpus
dir=exp/libri_${nnet_type}_proj_${num_layers}_${num_neurons}_${num_projects}_${learn_rate}_l${left_context}r${right_context}_d${dropout_rate}_ex${num_experts}_moet${moe_temp}_bn${use_bn}_ud${use_decay}_usm${uniform_label_sm}_psm${prior_label_sm}
mkdir -p $dir
# hostname=$(hostname)
# case $hostname in
# Red-*)
# tfdata=$PWD/data/tfrecord
# ;;
# mobvoi-*)
# tfdata=/cache/yyshi/tfrecord/libri
# libri=/export/data/en-asr-data/OpenSLR/LibriSpeech
# ;;
# esac
tfdata=$PWD/data/tfrecord
## Setup up features
norm_vars=true
add_deltas=true
echo $norm_vars > $dir/norm_vars # output feature configs which will be used in decoding
echo $add_deltas > $dir/add_deltas
data=data_libri
lm_data=$data/lm #data/local/lm
lm_tmp=$data/lm_tmp
dict_dir=$data/dict #data/local/dict
lang_dir=$data/lang #data/lang
feats_tmpdir=./tmp # this should ideally be a tmp dir local to the machine.
train_dir=$exp_base/train_lstm # working directory
dict_name=librispeech_phn_reduced_dict.txt
dict_type="char"
fb_conf=$dir/fbconf
# create directories and copy relevant files
mkdir -p $data/{lm,lm_tmp,dict,lang}
cp conf/$dict_name $lm_data
cp conf/fbconf-{8,10,11} $dir
# base url for downloads.
data_url=www.openslr.org/resources/12
lm_url=www.openslr.org/resources/11
echo =====================================================================
echo "Started run @ ", `date`
echo =====================================================================
if [ $stage -le 1 ]; then
echo =====================================================================
echo " Data Preparation "
echo =====================================================================
# download the 100hr training data and test sets.
#for part in dev-clean test-clean dev-other test-other train-clean-100; do
# local/download_and_untar.sh $data $data_url $part || exit 1;
#done
# download the LM resources
local/download_lm.sh $lm_url $lm_data || exit 1;
# format the data as Kaldi data directories
for part in dev-clean test-clean dev-other test-other train-clean-100 train-clean-360 train-other-500; do
# use underscore-separated names in data directories.
local/libri_data_prep.sh ${libri}/$part $data/$(echo $part | sed s/-/_/g) || exit 1;
done
fi
if [ $stage -le 2 ]; then
echo =====================================================================
echo " Prepare dictionary and FST "
echo =====================================================================
## See Kaldi librispeech recipe for additional information/context
# Normally dict is in $lm_data but for this sequence of experiments well provide the dict
# in $exp_base
local/ls_prepare_phoneme_dict.sh $lm_data $dict_dir $dict_name || exit 1;
# Compile the lexicon and token FSTs
# usage: utils/ctc_compile_dict_token.sh <dict-src-dir> <tmp-dir> <lang-dir>"
utils/ctc_compile_dict_token.sh --dict-type $dict_type --space-char "<SPACE>" \
$dict_dir $lang_dir/tmp $lang_dir || exit 1;
# Compile the language-model FST and the final decoding graph TLG.fst
local/ls_decode_graph.sh $lang_dir $lm_data $lm_tmp/tmp || exit 1;
# make the const ngram for tglarge and fglarge
local/ls_const_graph.sh $lang_dir $lm_data || exit 1;
fi
if [ $stage -le 3 ]; then
echo =====================================================================
echo " FBank Feature Generation "
echo =====================================================================
# combin train_clean_100, train_clean_360 and train_other_500
mkdir -p $data/train
for name in spk2gender spk2utt utt2spk text wav.scp ; do
cat $data/train_clean_100/$name $data/train_clean_360/$name $data/train_other_500/$name > $data/train/$name
done
# Split the whole training data into training (95%) and cross-validation (5%) sets
# utils/subset_data_dir_tr_cv.sh --cv-spk-percent 5 data/train_si284 data/train_tr95 data/train_cv05 || exit 1
utils/subset_data_dir_tr_cv.sh --cv-spk-percent 5 $data/train $data/train_tr95 $data/train_cv05 || exit 1
export LC_ALL=C
for set in train_tr95 train_cv05 dev_clean test_clean dev_other test_other ; do
for name in utt2spk spk2utt text wav.scp spk2gender ; do
cat $data/$set/$name | sort -k1 > $data/$set/$name.b
mv $data/$set/$name.b $data/$set/$name
done
done
# Generate the fbank features; by default 40-dimensional fbanks on each frame
fbankdir=fbank
for set in train_tr95 train_cv05 ; do
steps/make_fbank.sh --cmd "$train_cmd" --nj 14 --fbank-config ${fb_conf}-10 $data/$set $data/make_fbank/$set $data/$fbankdir || exit 1;
utils/fix_data_dir.sh $data/$set || exit;
steps/compute_cmvn_stats.sh $data/$set $data/make_fbank/$set $data/$fbankdir || exit 1;
done
for set in dev_clean test_clean dev_other test_other ; do
steps/make_fbank.sh --cmd "$train_cmd" --nj 14 --fbank-config ${fb_conf}-10 $data/$set $data/make_fbank/$set $data/$fbankdir || exit 1;
utils/fix_data_dir.sh $data/$set || exit;
steps/compute_cmvn_stats.sh $data/$set $data/make_fbank/$set $data/$fbankdir || exit 1;
done
fi
if [ $stage -le 4 ]; then
echo =====================================================================
echo " TFRecords Generation "
echo =====================================================================
datadir=$tfdata
mkdir -p $datadir
mkdir -p $datadir/data
# Label sequences; simply convert words into their label indices
# In tensorflow, the <blk> index is n-1
if [ ! -e $datadir/label.tr.scp ] || [ ! -e $datadir/label.tr.ark ]; then
utils/prep_ctc_trans.py $lang_dir/lexicon_numbers.txt $data/train_tr95/text "<UNK>" "<SPACE>" | \
awk -v s=1 '{printf $1 " "; for(i=2;i<=NF;i++)printf($i-s)" "};{print FS}' | \
copy-int-vector ark:- ark,scp:$datadir/label.tr.ark,$datadir/label.tr.scp || exit 1;
fi
if [ ! -e $datadir/label.cv.scp ] || [ ! -e $datadir/label.cv.ark ]; then
utils/prep_ctc_trans.py $lang_dir/lexicon_numbers.txt $data/train_cv05/text "<UNK>" "<SPACE>" | \
awk -v s=1 '{printf $1 " "; for(i=2;i<=NF;i++)printf($i-s)" "};{print FS}' | \
copy-int-vector ark:- ark,scp:$datadir/label.cv.ark,$datadir/label.cv.scp || exit 1;
fi
##
if $sort_by_len; then
feat-to-len scp:$data/train_tr95/feats.scp ark,t:- | awk '{print $2}' > $dir/len.tmp || exit 1;
paste -d " " $data/train_tr95/feats.scp $dir/len.tmp | sort -k3 -n - | awk -v m=$min_len '{ if ($3 >= m) {print $1 " " $2} }' > $dir/train.scp || exit 1;
feat-to-len scp:$data/train_cv05/feats.scp ark,t:- | awk '{print $2}' > $dir/len.tmp || exit 1;
paste -d " " $data/train_cv05/feats.scp $dir/len.tmp | sort -k3 -n - | awk '{print $1 " " $2}' > $dir/cv.scp || exit 1;
rm -f $dir/len.tmp
feats_tr="cat $dir/train.scp |"
feats_cv="cat $dir/cv.scp |"
feats_tr="$feats_tr copy-feats scp:- ark:- |"
feats_cv="$feats_cv copy-feats scp:- ark:- |"
else
feats_tr="cat $data/train_tr95/feats.scp | utils/shuffle_list.pl --srand ${seed:-777}|"
feats_cv="cat $data/train_cv05/feats.scp | utils/shuffle_list.pl --srand ${seed:-777}|"
feats_tr="$feats_tr copy-feats scp:- ark:- |"
feats_cv="$feats_cv copy-feats scp:- ark:- |"
fi
feats_tr="$feats_tr apply-cmvn --norm-vars=$norm_vars --utt2spk=ark:$data/train_tr95/utt2spk scp:$data/train_tr95/cmvn.scp ark:- ark:- |"
feats_cv="$feats_cv apply-cmvn --norm-vars=$norm_vars --utt2spk=ark:$data/train_cv05/utt2spk scp:$data/train_cv05/cmvn.scp ark:- ark:- |"
if $add_deltas; then
feats_tr="$feats_tr add-deltas ark:- ark:- |"
feats_cv="$feats_cv add-deltas ark:- ark:- |"
fi
## End of feature setup
if [ ! -e $datadir/tfrecords.tr.scp ]; then
echo "[$(date +'%Y/%m/%d %H:%M:%S')] tfrecords.tr.scp"
nnet_input="ark:$feats_tr"
nnet_target="scp:$datadir/label.tr.scp"
echo $nnet_input
echo ${nnet_target:+ --nnet-target="$nnet_target"}
python bin/convert-to-tfrecords.py \
${nnet_target:+ --nnet-target="$nnet_target"} \
--check-length=false --target-length-cutoff=$target_length_cutoff \
"$nnet_input" $datadir/data $datadir/tfrecords.tr.scp || exit 1
fi
if [ ! -e $datadir/tfrecords.cv.scp ]; then
echo "[$(date +'%Y/%m/%d %H:%M:%S')] tfrecords.cv.scp"
nnet_input="ark:$feats_cv"
nnet_target="scp:$datadir/label.cv.scp"
python bin/convert-to-tfrecords.py \
${nnet_target:+ --nnet-target="$nnet_target"} \
--check-length=false \
"$nnet_input" $datadir/data $datadir/tfrecords.cv.scp || exit 1
fi
fi
num_targets=`cat $data/dict/units.txt | wc -l`; num_targets=$[$num_targets+1]; # the number of targets
# equals [the number of labels] + 1 (the blank)
# Compute the occurrence counts of labels in the label sequences. These counts will be used to derive prior probabilities of
# the labels.
if [ ! -e $dir/label.counts ]; then
utils/prep_ctc_trans.py $lang_dir/lexicon_numbers.txt $data/train_tr95/text "<UNK>" "<SPACE>" | gzip -c - > $dir/labels.tr.gz
gunzip -c $dir/labels.tr.gz | awk '{line=$0; gsub(" "," 0 ",line); print line " 0";}' | \
analyze-counts --verbose=1 --binary=false ark:- $dir/label.counts >& $dir/compute_label_counts.log || exit 1
fi
prior_label_path=$dir/labels.counts
if [ $stage -le 5 ]; then
echo =====================================================================
echo " Network Training "
echo =====================================================================
if [ "$use_decay" == "1" ] ; then
train_script=scripts/decay_train.sh
elif [ "$use_decay" == "2" ] ; then
train_script=scripts/train_oplr.sh
else
train_script=scripts/train.sh
fi
nnet_config=$dir/nnet.config
(echo "nnet_type = $nnet_type"
echo "input_dim = $input_dim"
echo "left_context = $left_context"
echo "right_context = $right_context"
echo "subsample = $subsample"
echo "num_layers = $num_layers"
echo "num_neurons = $num_neurons"
echo "num_projects = $num_projects"
echo "num_targets = $num_targets"
echo "use_peepholes = true"
echo "use_bn = $use_bn"
echo "dropout_rate = $dropout_rate"
echo "num_experts = $num_experts"
echo "uniform_label_sm = $uniform_label_sm"
echo "prior_label_sm = $prior_label_sm"
echo "prior_label_path = $prior_label_path"
echo "seed = 777") > $nnet_config
$train_script \
--objective "ctc" \
--report-interval $report_interval \
--batch-size $batch_size \
--batch-threads $batch_threads \
--tr-tfrecords-scp $tfdata/tfrecords.tr.scp \
--cv-tfrecords-scp $tfdata/tfrecords.cv.scp \
--nnet-config $nnet_config \
--learn-rate $learn_rate \
--optimizer $optimizer \
--cv_goal loss \
--num_targets $num_targets \
--decode_graph_dir $data/lang_test_tgmed \
--decode_data_dir $data/dev_clean \
--decode_name decode_dev_clearn \
--dir $dir > $dir/train_log || exit 1
fi
if [ $stage -le 6 ]; then
echo =====================================================================
echo " Decoding "
echo =====================================================================
# Decoding with the librispeech dict
for test in test_clean test_other dev_clean dev_other; do
for lm_suffix in tgsmall tgmed; do
scripts/decode_ctc_lat.sh --cmd "$decode_cmd" --nj 8 --beam 17.0 --lattice_beam 8.0 --max-active 5000 --acwt 0.9 --ntargets $num_targets \
$data/lang_test_${lm_suffix} $data/$test $dir/decode_${test}_${lm_suffix} || exit 1;
done
for lm_suffix in tglarge fglarge ; do
steps/lmrescore_const_arpa.sh \
--cmd "$decode_cmd" $data/lang_test_tgmed $data/lang_test_const_${lm_suffix} \
$data/$test $dir/decode_${test}_{tgmed,${lm_suffix}} || exit 1;
done
done
fi
|
Blockchain technology can be used in a variety of applications, such as financial services, smart contracts, tracking of physical assets, healthcare, voting, digital identities, and digital rights management. In the financial services industry, blockchain technology could be used to facilitate faster and more secure payments or investments. Smart contracts could be executed using blockchain technology, which would automate the process of entering into an agreement and executing it. Furthermore, blockchain technology could be used to track and store physical assets, such as cars, or assets in the supply chain, in order to prevent counterfeiting and fraud. In addition, blockchain technology could be used in healthcare to securely store and manage healthcare records and to ensure the privacy of patients. Furthermore, blockchain technology could be used for secure online voting and for the storage and management of digital identities. Finally, blockchain technology could be used to securely store and manage digital rights. |
package com.portfolio.bugtracker.repositories;
import com.portfolio.bugtracker.models.Status;
import org.springframework.data.repository.CrudRepository;
public interface StatusRepository extends CrudRepository<Status, Long>
{
}
|
<gh_stars>1-10
// Copyright 2021 99cloud
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { hypervisorListUrl } from '../../../support/constants';
describe('The Hypervisor Page', () => {
const listUrl = hypervisorListUrl;
beforeEach(() => {
cy.loginAdmin(listUrl);
});
it('successfully list', () => {
cy.tableSearchText('node');
});
it('successfully detail', () => {
cy.goToDetail(0).wait(5000).goBackToList(listUrl);
});
it('successfully compute host', () => {
cy.clickTab('Compute Hosts', 'ComputeHost').tableSearchText('node');
});
it('successfully disable compute host', () => {
cy.clickTab('Compute Hosts')
.clickActionButtonByTitle('Disable')
.formText('disabled_reason', 'e2e-test')
.clickModalActionSubmitButton();
});
it('successfully enable compute host', () => {
cy.clickTab('Compute Hosts')
.tableSearchSelect('Service Status', 'Disabled')
.clickActionButtonByTitle('Enable')
.clickConfirmActionSubmitButton();
});
});
|
<gh_stars>0
var jumboHeight = $('.intro-body').outerHeight();
function parallax(){
var scrolled = $(window).scrollTop();
$('.intro').css('height', (jumboHeight-scrolled) + 'px');
}
$(window).scroll(function(e){
parallax();
}); |
/* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.gui;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JProgressBar;
import javax.swing.Timer;
public class ProgressBar extends JDialog {
private static final long serialVersionUID = 6241161656154797719L;
/** Default delay between updating */
private static final int SLEEP_TIME = 200;
/** Maximum value for the progress bar */
private static final int MAX_VALUE = 100;
/** Default step size */
private static final int STEP_SIZE = MAX_VALUE / 50;
private JProgressBar progressBar;
/** Speed factor for updating the bar */
private int stepSizeMultiplier = 1;
/**
* Keeps track of how many times it has looped with a multiplier greater
* than 0
*/
private int stepCounter;
private final Timer timer = new Timer(SLEEP_TIME, new Updater());
/**
* Create a new ProgressBar.
*
* @param w parent dialog
*/
public ProgressBar(final JDialog w) {
super(w, "Connecting...", true);
initializeComponents();
this.pack();
setLocationRelativeTo(w);
}
private void initializeComponents() {
JPanel contentPane = (JPanel) this.getContentPane();
contentPane.setLayout(new BoxLayout(contentPane, BoxLayout.PAGE_AXIS));
contentPane.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
contentPane.add(new JLabel("Connecting..."));
contentPane.add(Box.createVerticalStrut(5));
progressBar = new JProgressBar(0, MAX_VALUE);
progressBar.setStringPainted(false);
contentPane.add(progressBar);
}
/**
* Timer task that updates the progress bar.
*/
private class Updater implements ActionListener {
private int counter = 0;
@Override
public void actionPerformed(ActionEvent arg0) {
counter += STEP_SIZE * stepSizeMultiplier;
progressBar.setValue(counter);
if (stepCounter >= 0) {
if (stepCounter == 0) {
stepSizeMultiplier = 1;
}
stepCounter--;
}
if (counter > 100) {
cancel();
}
}
}
/** Start updating the progress bar */
public void start() {
timer.start();
setVisible(true);
}
/**
* Temporarily speeds up the bar.
*/
public void step() {
stepCounter = 3;
stepSizeMultiplier = 3;
}
/**
* Speeds up to quickly finish.
*/
public void finish() {
stepCounter = 20;
stepSizeMultiplier = 3;
timer.setDelay(15);
}
/**
* Exits quickly.
*/
public void cancel() {
timer.stop();
// workaround near failures in AWT at openjdk (tested on openjdk-1.6.0.0)
try {
this.dispose();
} catch(NullPointerException npe) {
return;
}
}
}
|
#!/bin/bash
sudo apt install python3-libtorrent
pip3 install PyQt5 lxml quamash python-Levenshtein
|
<reponame>raulrozza/Gametask_Web<filename>src/shared/infra/routes/index.tsx
import React, { Suspense } from 'react';
// Components
import { BrowserRouter, Link, Route, Switch } from 'react-router-dom';
import { Loading, NotFound } from 'shared/view/components';
// Hooks
import { useSessionContext } from 'shared/view/contexts';
// Routes
import LoggedRoutes from './logged.routes';
import PublicRoutes from './public.routes';
import SelectedGameRoutes from './selectedGame.routes';
const Routes: React.FC = () => {
const session = useSessionContext();
if (session.loading) return <Loading />;
return (
<BrowserRouter>
<Suspense fallback={<Loading />}>
<Switch>
{!session.userToken && PublicRoutes()}
{session.userToken && !session.selectedGame && LoggedRoutes()}
{session.userToken && session.selectedGame && SelectedGameRoutes()}
<Route>
<NotFound
message={
<>
404: Página não encontrada. <br />
Está perdido? Vá para a nossa{' '}
<Link to="/">Página Principal</Link>.
</>
}
/>
</Route>
</Switch>
</Suspense>
</BrowserRouter>
);
};
export default Routes;
|
#!/usr/bin/env bash
# shellcheck disable=SC1091,SC2154
# Copyright 2021 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Bash safeties: exit on error, no unset variables, pipelines can't hide errors
set -o errexit
set -o nounset
set -o pipefail
# Locate the root directory
ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )"
# Run common.sh script for validation
source "${ROOT}/scripts/common.sh"
# Set environment variables
# Create a gcs bucket through cli to store terraform statefiles
# Create google storage bucket for the terraform backend.
export bucket_name="${TF_VAR_name}-${TF_VAR_project_id}-statefiles"
# Create Google cloud storage bucket to store the state files.
python3 "${ROOT}/scripts/make_bucket.py"
# Generate Backend Template to store Terraform State files.
readonly backend_config="terraform {
backend \"gcs\" {
bucket = \"${bucket_name}\"
prefix = \"terraform/${TF_VAR_environment}/\"
}
}"
# Terraform initialize should run on env folder.
cd "${ROOT}/env"
echo -e "${backend_config}" > backend.tf
# Terraform initinalize the backend bucket
terraform init -input=false
# Create workspace based on the environment, by doing this you don't overlap wih the resources in different environments.
terraform workspace new "$TF_VAR_environment" || terraform workspace select "$TF_VAR_environment"
#make validate : this command will validate the terraform code
terraform validate
# Functions Script to enable google api's
source "${ROOT}/scripts/enable.sh"
|
#!/bin/bash
# rust-china is an alias to rust-china.org server in $HOME/.ssh/config
server=rust-china
remote_path=/home/tennix/rust-china.org/public
echo "Generate site..."
hexo generate
echo "Sync site to remote machine..."
rsync -r public/ $server:$remote_path
|
export const operatingStatusCodes = {
ONLINE: t('Online'),
DRAINING: t('Draining'),
OFFLINE: t('Offline'),
DEGRADED: t('Degraded'),
ERROR: t('Error'),
NO_MONITOR: t('No Monitor'),
};
export const provisioningStatusCodes = {
ACTIVE: t('Active'),
DELETED: t('Deleted'),
ERROR: t('Error'),
PENDING_CREATE: t('Pending Create'),
PENDING_UPDATE: t('Pending Update'),
PENDING_DELETE: t('Pending Delete'),
};
|
<reponame>EsriJapan/gcf2018-geoevent-twitter
package com.esrij.geoevent.solutions.processor.textanalysis;
import com.esri.ges.core.property.PropertyDefinition;
import com.esri.ges.core.property.PropertyException;
import com.esri.ges.core.property.PropertyType;
import com.esri.ges.processor.GeoEventProcessorDefinitionBase;
public class TextAnalysisProcessorDefinition extends GeoEventProcessorDefinitionBase {
public TextAnalysisProcessorDefinition() throws PropertyException {
PropertyDefinition pdTextFieldName = new PropertyDefinition("textfield", PropertyType.String, "", "${com.esrij.geoevent.solutions.processor.textanalysis.textanalysis-processor.LBL_TEXT_FIELD}", "${com.esrij.geoevent.solutions.processor.textanalysis.textanalysis-processor.DESC_TEXT_FIELD}", true, false);
propertyDefinitions.put(pdTextFieldName.getPropertyName(), pdTextFieldName);
PropertyDefinition pdGEDName = new PropertyDefinition("gedName", PropertyType.String, "TextAnalysisDef", "${com.esrij.geoevent.solutions.processor.textanalysis.textanalysis-processor.LBL_GEOEVENT_DEFINITION_NAME}", "${com.esrij.geoevent.solutions.processor.textanalysis.textanalysis-processor.DESC_GEOEVENT_DEFINITION_NAME}", true, false);
propertyDefinitions.put(pdGEDName.getPropertyName(), pdGEDName);
PropertyDefinition pdPNSCOREField = new PropertyDefinition("pnscorefield", PropertyType.String, "pnscore", "${com.esrij.geoevent.solutions.processor.textanalysis.textanalysis-processor.LBL_PNSCORE_FIELD}", "${com.esrij.geoevent.solutions.processor.textanalysis.textanalysis-processor.DESC_PNSCORE_FIELD}", true, false);
propertyDefinitions.put(pdPNSCOREField.getPropertyName(), pdPNSCOREField);
// PropertyDefinition pdYField = new PropertyDefinition("yfield", PropertyType.String, "y", "${com.esri.geoevent.solutions.processor.addxyz.addxyz-processor.LBL_Y_FIELD}", "${com.esri.geoevent.solutions.processor.addxyz.addxyz-processor.DESC_Y_FIELD}", true, false);
// propertyDefinitions.put(pdYField.getPropertyName(), pdYField);
//
// PropertyDefinition pdZField = new PropertyDefinition("zfield", PropertyType.String, "z", "${com.esri.geoevent.solutions.processor.addxyz.addxyz-processor.LBL_Z_FIELD}", "${com.esri.geoevent.solutions.processor.addxyz.addxyz-processor.DESC_Z_FIELD}", false, false);
// propertyDefinitions.put(pdZField.getPropertyName(), pdZField);
PropertyDefinition pdLocGrpField = new PropertyDefinition("locGrpField", PropertyType.String, "location_group", "${com.esrij.geoevent.solutions.processor.textanalysis.textanalysis-processor.LBL_LOCATIONGRP_FIELD}", "${com.esrij.geoevent.solutions.processor.textanalysis.textanalysis-processor.DESC_LOCATIONGRP_FIELD}", true, false);
propertyDefinitions.put(pdLocGrpField.getPropertyName(), pdLocGrpField);
}
@Override
public String getName() {
return "TextAnalysisProcessor";
}
@Override
public String getDomain() {
return "com.esrij.geoevent.solutions.processor.textanalysis";
}
@Override
public String getVersion() {
return "10.6.0";
}
@Override
public String getLabel() {
return "${com.esrij.geoevent.solutions.processor.textanalysis.textanalysis-processor.PROCESSOR_LABEL}";
}
@Override
public String getDescription() {
return "${com.esrij.geoevent.solutions.processor.textanalysis.textanalysis-processor.PROCESSOR_DESCRIPTION}";
}
@Override
public String getContactInfo() {
return "<EMAIL>";
}
}
|
@RestController
public class MyController {
@GetMapping("/my-page")
public String example(@RequestParam("myParam") String param) {
return "The parameter is " + param;
}
} |
<gh_stars>0
import React from "react";
import techBlog from "../../../assets/images/tech-blog.png";
import bTracker from "../../../assets/images/bugdet-tracker.png";
import covid from "../../../assets/images/covid-positivity.png";
import dangles from "../../../assets/images/dangles.png";
import dDash from "../../../assets/images/dog-dash.png";
import rBuddy from "../../../assets/images/run-buddy.png";
function Portfolio() {
return (
<section>
<div className="card-group">
<div className="card">
<img src={techBlog} className="card-img-top" alt="..." />
<div className="card-body">
<h5 className="card-title">
<a href="https://pacific-brook-34647.herokuapp.com/">Tech Blog</a>
</h5>
<p className="card-text">
<small className="text-muted">
<a href="https://github.com/Rajendra-Dhanraj/Tech-Blog">
Github Repository
</a>
</small>
</p>
</div>
</div>
<div className="card">
<img src={bTracker} className="card-img-top" alt="..." />
<div className="card-body">
<h5 className="card-title">
<a href="https://ancient-journey-18280.herokuapp.com/">
Budget Tracker
</a>
</h5>
<p className="card-text">
<small className="text-muted">
<a href="https://github.com/Rajendra-Dhanraj/Budget-Tracker">
Github Repository
</a>
</small>
</p>
</div>
</div>
<div className="card">
<img src={covid} className="card-img-top" alt="..." />
<div className="card-body">
<h5 className="card-title">
<a href="https://uot-project1-group07.github.io/project1-group07/">
Covid Posi+vity
</a>
</h5>
<p className="card-text">
<small className="text-muted">
<a href="https://github.com/Uot-Project1-Group07/project1-group07">
Github Repository
</a>
</small>
</p>
</div>
</div>
</div>
<div className="card-group">
<div className="card">
<img src={dDash} className="card-img-top" alt="..." />
<div className="card-body">
<h5 className="card-title">
<a href="https://dog-dash.herokuapp.com/">
Dog Dash
</a>
</h5>
<p className="card-text">
<small className="text-muted">
<a href="https://github.com/pro3gro3/walk-my-dog">
Github Repository
</a>
</small>
</p>
</div>
</div>
<div className="card">
<img src={rBuddy} className="card-img-top" alt="..." />
<div className="card-body">
<h5 className="card-title">
<a href="https://rajendra-dhanraj.github.io/run-buddy-2/">
Run Buddy
</a>
</h5>
<p className="card-text">
<small className="text-muted">
<a href="https://github.com/Rajendra-Dhanraj/run-buddy-2">
Github Repository
</a>
</small>
</p>
</div>
</div>
<div className="card">
<img src={dangles} className="card-img-top" alt="..." />
<div className="card-body">
<h5 className="card-title">
<a href="https://intense-headland-44008.herokuapp.com/">
Dangles
</a>
</h5>
<p className="card-text">
<small className="text-muted">
<a href="https://github.com/uoft-project2-group7/project">
Github Repository
</a>
</small>
</p>
</div>
</div>
</div>
</section>
);
}
export default Portfolio;
|
<gh_stars>0
package org.rs2server.rs2.model.skills.crafting;
import org.rs2server.rs2.action.impl.ProductionAction;
import org.rs2server.rs2.model.*;
import java.util.HashMap;
import java.util.Map;
/**
* @author Clank1337
*/
public class ZulrahCrafting extends ProductionAction {
private final ZulrahItems zulrahItems;
public static final Animation CRAFT_ANIMATION = Animation.create(1249);
public static final Animation CUT_ANIM = Animation.create(6702);
/**
* Creates the production action for the specified mob.
*
* @param mob The mob to create the action for.
*/
public ZulrahCrafting(Mob mob, ZulrahItems zulrahItems) {
super(mob);
this.zulrahItems = zulrahItems;
}
public enum ZulrahItems {
TANZANITE_FANG(5605, CUT_ANIM, 12922, 12924, 53, Skills.FLETCHING, 110),
SERPENTINE_VISAGE(1755, CRAFT_ANIMATION, 12927, 12929, 52, Skills.CRAFTING, 120);
private int requiredItem;
private Animation animation;
private int consumed;
private int reward;
private int levelReq;
private int skill;
private int xp;
ZulrahItems(int requiredItem, Animation animation, int consumed, int reward, int levelReq, int skill, int xp) {
this.requiredItem = requiredItem;
this.animation = animation;
this.consumed = consumed;
this.reward = reward;
this.levelReq = levelReq;
this.skill = skill;
this.xp = xp;
}
private static Map<Integer, ZulrahItems> zulrahItemsMap = new HashMap<>();
public static ZulrahItems of(int id) { return zulrahItemsMap.get(id);}
static {
for (ZulrahItems zulrahItem : ZulrahItems.values()) {
zulrahItemsMap.put(zulrahItem.getConsumed(), zulrahItem);
}
}
public int getConsumed() {
return consumed;
}
public int getReward() {
return reward;
}
public int getLevelReq() {
return levelReq;
}
public int getXp() {
return xp;
}
public int getRequiredItem() {
return requiredItem;
}
public int getSkill() {
return skill;
}
public Animation getAnimation() {
return animation;
}
}
@Override
public int getCycleCount() {
return 4;
}
@Override
public int getProductionCount() {
return 1;
}
@Override
public Item[] getRewards() {
return new Item[] {new Item(zulrahItems.getReward())};
}
@Override
public Item[] getConsumedItems() {
return new Item[] {new Item(zulrahItems.getConsumed())};
}
@Override
public int getSkill() {
return zulrahItems.getSkill();
}
@Override
public int getRequiredLevel() {
return zulrahItems.getLevelReq();
}
@Override
public double getExperience() {
return zulrahItems.getXp();
}
@Override
public String getLevelTooLowMessage() {
return "You need a Crafting level of " + zulrahItems.getLevelReq() + " to create this item.";
}
@Override
public String getSuccessfulProductionMessage() {
return "";
}
@Override
public Animation getAnimation() {
return zulrahItems.getAnimation();
}
@Override
public Graphic getGraphic() {
return null;
}
@Override
public boolean canProduce() {
return true;
}
@Override
public boolean isSuccessfull() {
return true;
}
@Override
public String getFailProductionMessage() {
return null;
}
@Override
public Item getFailItem() {
return null;
}
@Override
public Sound getSound() {
// TODO Auto-generated method stub
return null;
}
}
|
<reponame>jepler/aocl-libm-ose
/*
* Copyright (C) 2008-2020 Advanced Micro Devices, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "fn_macros.h"
#include "libm_util_amd.h"
#include "libm_special.h"
double FN_PROTOTYPE_REF(scalbn)(double x, int n)
{
UT64 val,val_x;
unsigned int sign;
int exponent;
val.f64 = x;
val_x.f64 = x;
sign = val.u32[1] & 0x80000000;
val.u32[1] = val.u32[1] & 0x7fffffff; /* remove the sign bit */
if (val.u64 > 0x7ff0000000000000) /* x is NaN */
#ifdef WINDOWS
return __amd_handle_error("scalbn", __amd_scalbn, val_x.u64|0x0008000000000000, _DOMAIN, 0, EDOM, x, n, 2);
#else
{
if(!(val.u64 & 0x0008000000000000))// x is snan
return __amd_handle_error("scalbn", __amd_scalbn, val_x.u64|0x0008000000000000, _DOMAIN, AMD_F_INVALID, EDOM, x, n, 2);
else
return x;
}
#endif
if(val.u64 == 0x7ff0000000000000)/* x = +-inf*/
return x;
if((val.u64 == 0x0000000000000000) || (n==0))
return x; /* x= +-0 or n= 0*/
exponent = val.u32[1] >> 20; /* get the exponent */
if(exponent == 0)/*x is denormal*/
{
val.f64 = val.f64 * VAL_2PMULTIPLIER_DP;/*multiply by 2^53 to bring it to the normal range*/
exponent = val.u32[1] >> 20; /* get the exponent */
exponent = exponent + n - MULTIPLIER_DP;
if(exponent < -MULTIPLIER_DP)/*underflow*/
{
val.u32[1] = sign | 0x00000000;
val.u32[0] = 0x00000000;
return __amd_handle_error("scalbn", __amd_scalbn, val.u64, _UNDERFLOW, AMD_F_INEXACT|AMD_F_UNDERFLOW, ERANGE, x, (double)n, 2);
}
if(exponent > 2046)/*overflow*/
{
val.u32[1] = sign | 0x7ff00000;
val.u32[0] = 0x00000000;
return __amd_handle_error("scalbn", __amd_scalbn, val.u64, _OVERFLOW, AMD_F_INEXACT|AMD_F_OVERFLOW, ERANGE, x, (double) n, 2);
}
exponent += MULTIPLIER_DP;
val.u32[1] = sign | (exponent << 20) | (val.u32[1] & 0x000fffff);
val.f64 = val.f64 * VAL_2PMMULTIPLIER_DP;
return val.f64;
}
exponent += n;
if(exponent < -MULTIPLIER_DP)/*underflow*/
{
val.u32[1] = sign | 0x00000000;
val.u32[0] = 0x00000000;
return __amd_handle_error("scalbn", __amd_scalbn, val.u64, _UNDERFLOW, AMD_F_INEXACT|AMD_F_UNDERFLOW, ERANGE, x, (double)n, 2);
}
if(exponent < 1)/*x is normal but output is debnormal*/
{
exponent += MULTIPLIER_DP;
val.u32[1] = sign | (exponent << 20) | (val.u32[1] & 0x000fffff);
val.f64 = val.f64 * VAL_2PMMULTIPLIER_DP;
return val.f64;
}
if(exponent > 2046)/*overflow*/
{
val.u32[1] = sign | 0x7ff00000;
val.u32[0] = 0x00000000;
return __amd_handle_error("scalbn", __amd_scalbn, val.u64, _OVERFLOW, AMD_F_INEXACT|AMD_F_OVERFLOW, ERANGE, x, (double) n, 2);
}
val.u32[1] = sign | (exponent << 20) | (val.u32[1] & 0x000fffff);
return val.f64;
}
|
def calculate_mean(nums):
total = 0
for num in nums:
total += num
mean = total / len(nums)
return mean |
"""
Secret file encoder.
"""
import pathlib
from typing import Iterator
from .bitmap import Bitmap
class EncodingError(Exception):
pass
class SecretFile:
"""Convenience class for serializing secret data."""
def __init__(self, path: pathlib.Path):
self.path = path
self.filename = path.name.encode("utf-8") + b"\x00"
self.size_bytes = path.stat().st_size
@property
def num_secret_bytes(self) -> int:
"""Total number of bytes including the null-terminated string."""
return len(self.filename) + self.size_bytes
@property
def secret_bytes(self) -> Iterator[int]:
"""Null-terminated name followed by the file content."""
yield from self.filename
with self.path.open(mode="rb") as file:
yield from file.read()
def encode(bitmap: Bitmap, path: pathlib.Path) -> None:
"""Embed a secret file in the bitmap."""
file = SecretFile(path)
if file.num_secret_bytes > bitmap.max_bytes:
raise EncodingError("Not enough pixels to embed a secret file")
bitmap.reserved_field = file.size_bytes
for secret_byte, eight_bytes in zip(file.secret_bytes, bitmap.byte_slices):
secret_bits = [(secret_byte >> i) & 1 for i in reversed(range(8))]
bitmap[eight_bytes] = bytes(
[
byte | 1 if bit else byte & ~1
for byte, bit in zip(bitmap[eight_bytes], secret_bits)
]
)
print("Secret file was embedded in the bitmap")
|
public class ArrayCreate {
public static void main(String[] args) {
// Create an array of size 11
int[] array = new int[11];
// Fill the array with integers from 0 to 10
for (int i = 0; i < 11; ++i) {
array[i] = i;
}
// Print out the elements of the array
for (int i = 0; i < 11; ++i) {
System.out.print(array[i] + " ");
}
System.out.println();
}
} |
require "springcm-sdk/folder"
require_relative "builder"
# Builder for SpringCM Folders.
class FolderBuilder < Builder
property :uid, default: UUID.generate, validate: -> (uid) {
raise ArgumentError.new("Invalid UID #{uid.inspect}") if !UUID.validate(uid)
}
property :name, default: "Folder"
property :path, default: -> (builder) {
dir = "/#{builder.client.account.name}"
if !builder.parent.nil?
dir = builder.parent.path
end
"#{dir}/#{builder.name}"
}
property :description, default: "A folder"
property :created_date, default: Time.utc(2000, "jan", 1, 0, 0, 0)
property :updated_date, default: Time.utc(2000, "jan", 1, 0, 0, 0)
property :created_by, default: "FolderBuilder"
property :updated_by, default: "FolderBuilder"
property :access, default: Set[:see, :read, :write, :move, :create, :set_access], validate: -> (*args) {
allowed = Set[:see, :read, :write, :move, :create, :set_access]
new_access = Set[*args]
invalid = new_access - allowed
if invalid.size > 0
raise ArgumentError.new("Invalid access setting(s) #{invalid.inspect}")
end
}, collect: -> (*args) { Set[*args] }
property :parent, type: Springcm::Folder, validate: -> (*args) {
folder = args.first
if !folder.is_a?(Springcm::Folder)
raise ArgumentError.new("Invalid parent folder (must be a Springcm::Folder)")
end
}
def initialize(client)
super
end
def build
return nil if !valid?
Springcm::Folder.new(data, client)
end
def valid?
!uid.nil?
end
def data
hash = {
"Name" => "#{name}",
"CreatedDate" => "#{created_date.strftime("%FT%T.%3NZ")}",
"CreatedBy" => "#{created_by}",
"UpdatedDate" => "#{updated_date.strftime("%FT%T.%3NZ")}",
"UpdatedBy" => "#{updated_by}",
"Description" => "#{description}",
"BrowseDocumentsUrl" => "https://uatna11.springcm.com/atlas/Link/Folder/0/#{@uid}",
"Path" => path,
"AccessLevel" => {
"See" => access.include?(:see),
"Read" => access.include?(:read),
"Write" => access.include?(:write),
"Move" => access.include?(:move),
"Create" => access.include?(:create),
"SetAccess" => access.include?(:set_access)
},
"AttributeGroups" => {}, # TODO: Allow building attributes
"Documents" => {
"Href" => "#{client.object_api_url}/folders/#{uid}/documents"
},
"Folders" => {
"Href" => "#{client.object_api_url}/folders/#{uid}/folders"
},
"ShareLinks" => {
"Href" => "#{client.object_api_url}/folders/#{uid}/sharelinks"
},
"CreateDocumentHref" => "#{client.upload_api_url}/folders/#{uid}/documents{?name}",
"Href" => "#{client.object_api_url}/folders/#{uid}"
}
if !parent.nil?
hash.merge!({
"ParentFolder" => {
"Href" => parent.href
}
})
end
return hash
end
end
|
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
# create the dataset
x_train = np.array([[1], [2], [3], [4], [5], [6], [7], [8], [9], [10]])
y_train = np.array([[3], [5], [7], [9], [11], [14], [17], [22], [25], [27]])
# create and fit the polynomial regression model
pr = LinearRegression()
quadratic = PolynomialFeatures(degree=2)
x_train_quad = quadratic.fit_transform(x_train)
# fit the model
pr.fit(x_train_quad, y_train)
# predict the output for the values in x
y_pred = pr.predict(x_train_quad)
# visualize the input and output
plt.scatter(x_train, y_train, label='Training Data')
plt.plot(x_train, y_pred, label='Polynomial Regression', color='red')
plt.legend()
plt.show() |
import * as React from 'react';
import {
Audio,
BallTriangle,
Bars,
Circles,
Grid,
Hearts,
Oval,
Puff,
Rings,
SpinningCircles,
TailSpin,
ThreeDots,
} from '../../.';
import SpinnerSection from './Spinner';
const spinners = [
{ indicator: Audio, width: 40 },
{ indicator: BallTriangle, width: 50 },
{ indicator: Bars, width: 40 },
{ indicator: Circles, width: 50 },
{ indicator: Grid, width: 40 },
{ indicator: Hearts, width: 80 },
{ indicator: Oval, width: 50 },
{ indicator: Puff, width: 50 },
{ indicator: Rings, width: 60 },
{ indicator: TailSpin, width: 50 },
{ indicator: ThreeDots, width: 60 },
{ indicator: SpinningCircles, width: 50 },
];
const IconList = () => {
return (
<ul>
{spinners.map(({ indicator: Indicator, width }, index) => (
<SpinnerSection indicator={<Indicator width={width} key={index} />} />
))}
</ul>
);
};
export default IconList;
|
<reponame>wuhou123/mpvue-shares
// 爬虫
const ajax = require('./ajax.js')
const request = require('request-promise')
iconv = require('iconv-lite')
const URL = {
calendar:
'http://comment.10jqka.com.cn/tzrl/getTzrlData.php?callback=callback_dt&type=data',
market: 'http://q.10jqka.com.cn/api.php?t=indexflash&',
search:
'http://qd.10jqka.com.cn/quote.php?cate=real&type=stock&return=json&callback=showStockData&callback=jQuery183031558640597485343_1554345337454',
jetton: 'http://doctor.10jqka.com.cn/',
rankList: 'https://sec.wedengta.com/getSecInfo',
rankReal: 'https://sec.wedengta.com/getMarketQuotation',
news: 'https://api.wallstreetcn.com/apiv1/content/fabricate-articles',
newsDetail: 'https://api.wallstreetcn.com/apiv1/content/articles/',
lives: 'https://api.xuangubao.cn/api/messages/live',
sessionKey: 'https://api.weixin.qq.com/sns/jscode2session?'
}
module.exports = {
getCalendar() {
let date =
`${new Date().getFullYear()}${
(new Date().getMonth() + 1).toString().length < 10
? '0' + (new Date().getMonth() + 1)
: new Date().getMonth() + 1
}` || ''
let options = {
url: URL.calendar,
qs: {
date: date
},
encoding: null
}
return new Promise((resolve, reject) => {
request(options)
.then(res => {
resolve(iconv.decode(res, 'gb2312'))
})
.catch(error => {
console.log(error)
reject('获取韭菜日历失败!')
})
})
},
getMarket() {
let options = {
url: URL.market
}
return new Promise((resolve, reject) => {
request(options)
.then(res => {
resolve(res)
})
.catch(error => {
console.log(error)
reject('获取行情失败!')
})
})
},
getSearch(item) {
let options = {
method: 'GET',
url: URL.search,
qs: {
code: item.id
},
json: true,
allow_redirects: false,
headers: {
'content-type': 'application/json'
}
}
return new Promise((resolve, reject) => {
request(options)
.then(res => {
resolve(res)
})
.catch(error => {
console.log(error)
reject('获取详情失败!')
})
})
},
getStockJetton(item) {
let options = {
method: 'GET',
url: URL.jetton + item.id,
qs: {
code: item.id
},
json: true,
allow_redirects: false,
encoding: null,
headers: {
'content-type': 'application/json'
}
}
return new Promise((resolve, reject) => {
request(options)
.then(res => {
resolve(iconv.decode(res, 'gb2312'))
})
.catch(error => {
console.log(error)
reject('获取详情失败!')
})
})
},
getRankList(item) {
let options = {
method: 'GET',
url: URL.rankList,
qs: item,
json: true,
allow_redirects: false,
headers: {
'content-type': 'application/json'
}
}
return new Promise((resolve, reject) => {
request(options)
.then(res => {
resolve(res)
})
.catch(error => {
console.log(error)
reject('获取详情失败!')
})
})
},
getRankReal(item) {
let options = {
method: 'GET',
url: URL.rankReal,
qs: item,
json: true,
allow_redirects: false,
headers: {
'content-type': 'application/json'
}
}
return new Promise((resolve, reject) => {
request(options)
.then(res => {
resolve(res)
})
.catch(error => {
console.log(error)
reject('获取详情失败!')
})
})
},
getNews(item) {
let options = {
method: 'GET',
url: URL.news,
qs: item,
json: true,
allow_redirects: false,
headers: {
'content-type': 'application/json'
}
}
return new Promise((resolve, reject) => {
request(options)
.then(res => {
resolve(res)
})
.catch(error => {
console.log(error)
reject('获取新闻失败!')
})
})
},
getDetail(item) {
let options = {
method: 'GET',
url: URL.newsDetail + item.id,
qs: item,
json: true,
allow_redirects: false,
headers: {
'content-type': 'application/json'
}
}
return new Promise((resolve, reject) => {
request(options)
.then(res => {
resolve(res)
})
.catch(error => {
console.log(error)
reject('获取新闻详情失败!')
})
})
},
getLives(item) {
let options = {
method: 'GET',
url: URL.lives,
qs: item,
json: true,
allow_redirects: false,
headers: {
'content-type': 'application/json'
}
}
return new Promise((resolve, reject) => {
request(options)
.then(res => {
resolve(res)
})
.catch(error => {
console.log(error)
reject('获取直播失败!')
})
})
},
getSessionKey(item) {
let options = {
method: 'GET',
url: URL.sessionKey,
qs: item,
json: true,
allow_redirects: false,
headers: {
'content-type': 'application/json'
}
}
return new Promise((resolve, reject) => {
request(options)
.then(res => {
resolve(res)
})
.catch(error => {
console.log(error)
reject('获取session_key失败!')
})
})
}
}
|
def join_sorted_lists(lst_1,lst_2):
new_lst = lst_1 + lst_2
return sorted(new_lst)
print(join_sorted_lists([2, 5, 8], [1, 3, 4])) |
// Set up the database object
var db = CreateDatabaseObject();
// Set up router
var router = new Router();
router.route("/list", function (req, res) {
// Get all items from the database
var items = db.getItems();
// Render the list page
res.render("list", {items:items});
});
router.route("/search", function (req, res) {
// Get query from the request
var query = req.query.q;
// Search the database for item matching the query
var items = db.searchItems(query);
// Render the list page
res.render("list", {items:items});
});
router.route("/detail/:id", function (req, res) {
// Get item ID from the request
var itemId = req.params.id;
// Get item details from the database
var itemDetails = db.getItemDetails(itemId);
// Render the detail page
res.render("detail", {item:itemDetails});
}); |
import { async, ComponentFixture, TestBed } from "@angular/core/testing";
import { ResultViewerComponent } from "./result-viewer.component";
describe("ResultViewerComponent", () => {
let component: ResultViewerComponent;
let fixture: ComponentFixture<ResultViewerComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ ResultViewerComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(ResultViewerComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it("should create", () => {
expect(component).toBeTruthy();
});
});
|
import React from 'react';
import styled from 'styled-components';
import { Icon } from 'antd';
import { Card } from 'components/Basic/Card';
import { ProposalInfo } from 'types';
import { BASE_BSC_SCAN_URL } from '../../../config';
const ProposalUserWrapper = styled.div`
width: 100%;
height: 67px;
border-radius: 25px;
background-color: var(--color-bg-primary);
padding: 0 52px;
p {
font-size: 17.5px;
font-weight: 900;
color: var(--color-text-main);
}
.copy-btn {
width: 20px;
height: 20px;
border-radius: 50%;
background-color: var(--color-bg-active);
margin-left: 26px;
i {
color: var(--color-text-main);
svg {
transform: rotate(-45deg);
}
}
}
`;
interface ProposalUserProps {
proposalInfo: Partial<ProposalInfo>;
}
function ProposalUser({ proposalInfo }: ProposalUserProps) {
const handleAddLink = (linkType: $TSFixMe, v: $TSFixMe) => {
window.open(`${BASE_BSC_SCAN_URL}/${linkType === 'address' ? 'address' : 'tx'}/${v}`, '_blank');
};
return (
<Card>
<ProposalUserWrapper className="flex align-center">
<div
className="flex align-center just-center pointer"
onClick={() => handleAddLink('address', proposalInfo.proposer || '')}
>
<p className="highlight">
{proposalInfo.proposer
? `${proposalInfo.proposer.substr(0, 5)}...${proposalInfo.proposer.substr(-4, 4)}`
: ''}
</p>
<div className="flex align-center just-center copy-btn">
<Icon type="arrow-right" />
</div>
</div>
</ProposalUserWrapper>
</Card>
);
}
export default ProposalUser;
|
def min_element(lst):
min_num = lst[0]
for i in range(1, len(lst)):
if lst[i] < min_num:
min_num = lst[i]
return min_num |
<reponame>taowu750/LeetCodeJourney
package training.dynamicprogramming;
import org.junit.jupiter.api.Test;
import training.backtracking.E139_Medium_WordBreak;
import java.util.*;
import java.util.function.BiFunction;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static util.CollectionUtil.equalsIgnoreOrder;
/**
* 140. 单词拆分 II: https://leetcode-cn.com/problems/word-break-ii/
*
* 给定一个「非空」字符串 s 和一个包含「非空」单词列表的字典 wordDict,在字符串中增加空格来构建一个句子,
* 使得句子中所有的单词都在词典中。返回所有这些可能的句子。
*
* 例 1:
* 输入:
* s = "catsanddog"
* wordDict = ["cat", "cats", "and", "sand", "dog"]
* 输出:
* [
* "cats and dog",
* "cat sand dog"
* ]
*
* 例 2:
* 输入:
* s = "pineapplepenapple"
* wordDict = ["apple", "pen", "applepen", "pine", "pineapple"]
* 输出:
* [
* "pine apple pen apple",
* "pineapple pen apple",
* "pine applepen apple"
* ]
* 解释: 注意你可以重复使用字典中的单词。
*
* 例 3:
* 输入:
* s = "catsandog"
* wordDict = ["cats", "dog", "sand", "and", "cat"]
* 输出:
* []
*
* 约束:
* - 分隔时可以重复使用字典中的单词。
* - 你可以假设字典中没有重复的单词。
*/
public class E140_Hard_WordBreakII {
static void test(BiFunction<String, List<String>, List<String>> method) {
equalsIgnoreOrder(asList("cats and dog", "cat sand dog"),
method.apply("catsanddog", asList("cat", "cats", "and", "sand", "dog")));
equalsIgnoreOrder(asList("pine apple pen apple", "pineapple pen apple", "pine applepen apple"),
method.apply("pineapplepenapple", asList("apple", "pen", "applepen", "pine", "pineapple")));
equalsIgnoreOrder(emptyList(), method.apply("catsandog", asList("cats", "dog", "sand", "and", "cat")));
}
/**
* 参见 {@link E139_Medium_WordBreak}。
*
* LeetCode 耗时:1ms - 99%
* 内存消耗:36.6MB - 83%
*/
public List<String> wordBreak(String s, List<String> wordDict) {
HashSet<String> set = new HashSet<>(wordDict);
int maxLen = 0;
for (String word: wordDict) {
if (word.length() > maxLen) {
maxLen = word.length();
}
}
boolean[] dp = new boolean[s.length() + 1];
@SuppressWarnings("unchecked")
List<String>[] dpStr = (List<String>[]) new ArrayList[s.length() + 1];
dp[0] = true;
for (int size = 1; size <= s.length(); size++) {
int limit = Math.max(0, size - maxLen);
List<String> match = null;
for (int i = size - 1; i >= limit; i--) {
String sub = s.substring(i, size);
if (dp[i] && set.contains(sub)) {
dp[size] = true;
if (match == null) {
match = new ArrayList<>(4);
dpStr[size] = match;
}
match.add(sub);
}
}
}
if (dp[s.length()]) {
List<String> result = new ArrayList<>();
Deque<String> stack = new LinkedList<>();
addResult(s.length(), dpStr, s.length(), stack, result);
return result;
} else {
return emptyList();
}
}
private void addResult(int sLen, List<String>[] dpStr, int idx, Deque<String> stack, List<String> result) {
if (idx <= 0) {
int i = 0;
StringBuilder sb = new StringBuilder(sLen + stack.size() - 1);
for (String s: stack) {
sb.append(s);
if (++i != stack.size()) {
sb.append(' ');
}
}
result.add(sb.toString());
return;
}
for (String s: dpStr[idx]) {
stack.push(s);
addResult(sLen, dpStr, idx - s.length(), stack, result);
stack.pop();
}
}
@Test
public void testWordBreak() {
test(this::wordBreak);
}
}
|
<gh_stars>1-10
package cim4j;
import java.util.Map;
import java.util.HashMap;
import java.lang.ArrayIndexOutOfBoundsException;
import java.lang.IllegalArgumentException;
/*
Validity for MeasurementValue.
*/
public class Validity extends BaseClass
{
private enum Validity_ENUM
{
/**
* The value is marked good if no abnormal condition of the acquisition function or the information source is detected.
*/
GOOD,
/**
* The value is marked questionable if a supervision function detects an abnormal behaviour, however the value could still be valid. The client is responsible for determining whether or not values marked "questionable" should be used.
*/
QUESTIONABLE,
/**
* The value is marked invalid when a supervision function recognises abnormal conditions of the acquisition function or the information source (missing or non-operating updating devices). The value is not defined under this condition. The mark invalid is used to indicate to the client that the value may be incorrect and shall not be used.
*/
INVALID,
MAX_Validity_ENUM;
}
private Validity_ENUM value;
public BaseClass construct() {
return new Validity();
}
public Validity() {}
public Validity(java.lang.String s) {
setValue(s);
}
public void setValue(java.lang.String s) {
try
{
value = Validity_ENUM.valueOf(s.trim());
}
catch (IllegalArgumentException iae)
{
System.out.println("NumberFormatException: " + iae.getMessage());
}
}
public void setAttribute(java.lang.String a, java.lang.String s) {
try
{
value = Validity_ENUM.valueOf(s.trim());
}
catch (IllegalArgumentException iae)
{
System.out.println("NumberFormatException: " + iae.getMessage());
}
}
public void setAttribute(java.lang.String attributeName, BaseClass value) {
throw new IllegalArgumentException("ENUM cannot set attribute: " + attributeName);
}
private java.lang.String debugName = "Validity";
public java.lang.String debugString(){
return debugName;
}
public java.lang.String toString(boolean b) {
return "Enum (" + value.toString() + ")";
}
};
|
const { Section2 } = require('../objects/section-2')
describe('API Interactions', () => {
beforeEach(() => {
Section2.actions.navigate()
})
it('Http: Waiting for network calls ', () => {
Section2.actions.clickNetworkCallButtonAndAssertNetworkCall()
})
it('Browser API: Opening a new tab', () => {
Section2.actions.clickNewTabButton()
Section2.actions.assertNewTab()
})
it('Browser API: Downloading a file', () => {
Section2.actions.clickFileDownloadButton()
Section2.actions.assertFileDownload()
})
})
|
#!/bin/sh
# gets directory of this file to find the others
INSTALLPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
cd $INSTALLPATH/..
mkdir build
mkdir build_debug
toolpath="$PWD/vcpkg/scripts/buildsystems/vcpkg.cmake"
cmake -DCMAKE_TOOLCHAIN_FILE=$toolpath -DCMAKE_BUILD_TYPE=Release -S . -B build
cmake -DCMAKE_TOOLCHAIN_FILE=$toolpath -DCMAKE_BUILD_TYPE=Debug -S . -B build_debug
|
<reponame>freddieFishCake/natural-alarm<gh_stars>1-10
package net.htb.naturalalarm;
import org.immutables.value.Value;
/**
*
* @author <NAME>
*/
@Value.Immutable
public interface ChangeRequest {
int getSteps();
int getMillisBetweenSteps();
@Value.Default
default LedControl getRed() {
return ImmutableLedControl.builder().build();
}
@Value.Default
default LedControl getGreen() {
return ImmutableLedControl.builder().build();
}
@Value.Default
default LedControl getBlue() {
return ImmutableLedControl.builder().build();
}
}
|
#!/usr/bin/env bash
python krakendash/manage.py runserver 0.0.0.0:8000 > kraken.log 2>&1 &
|
#!/bin/sh
SCRIPT="$0"
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
if [ ! -d "${APP_DIR}" ]; then
APP_DIR=`dirname "$SCRIPT"`/..
APP_DIR=`cd "${APP_DIR}"; pwd`
fi
executable="./modules/swagger-codegen-cli/target/swagger-codegen-cli.jar"
if [ ! -f "$executable" ]
then
mvn clean package
fi
# if you've executed sbt assembly previously it will use that instead.
export JAVA_OPTS="${JAVA_OPTS} -XX:MaxPermSize=256M -Xmx1024M -Dlogback.configurationFile=bin/logback.xml"
ags="$@ generate -t modules/swagger-codegen/src/main/resources/sinatra -i modules/swagger-codegen/src/test/resources/2_0/petstore.yaml -l sinatra -o samples/server/petstore/sinatra"
# java $JAVA_OPTS -jar $executable $ags
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-N-VB-fill/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-N-VB-fill/7-1024+0+512-N-VB-fill-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_and_verbs_fill_first_two_thirds_full --eval_function last_element_eval |
from sklearn.model_selection import KFold, train_test_split
from sklearn.metrics import recall_score, precision_score, accuracy_score, f1_score
def cross_val_fit(model, X, y, N_FOLDS, cross_val, apply_smote, test_size, metric):
if metric=='accuracy':
metric_am = accuracy_score
elif metric=='f1_score':
metric_am = f1_score
elif metric=='precision':
metric_am = precision_score
scores_am, scores_am_train = [], []
if cross_val=='k_fold':
cv = KFold(n_splits=N_FOLDS,shuffle=True, random_state=0)
for train_index, test_index in cv.split(X):
X_train, X_test, y_train, y_test = X[train_index], X[test_index], y[train_index], y[test_index]
# if apply_smote:
# sm = SMOTE(random_state=42)
# X_train, y_train = sm.fit_resample(X_train, y_train)
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
y_pred_train = model.predict(X_train)
scores_am.append(metric_am(y_test, y_pred))
scores_am_train.append(metric_am(y_train, y_pred_train))
return {
'metric_am_train':sum(scores_am_train)/N_FOLDS,
'metric_am_test':sum(scores_am)/N_FOLDS,
}
if cross_val=='train_test':
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_size, random_state=42)
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
y_pred_train = model.predict(X_train)
scores_am.append(metric_am(y_test, y_pred))
scores_am_train.append(metric_am(y_train, y_pred_train))
return {
'metric_am_train':scores_am_train[0],
'metric_am_test':scores_am[0],
} |
<reponame>tylertucker202/argovis_backend
const Covar = require('../models/covar');
const GJV = require('geojson-validation');
exports.radius_selection = function(req, res , next) {
req.checkQuery('lat', 'lat should be specified.').notEmpty();
req.checkQuery('lon', 'lon should be specified.').notEmpty();
req.checkQuery('forcastDays', 'forcastDays should be specified.').notEmpty();
req.checkQuery('lat', 'lat should be a number.').isNumeric();
req.checkQuery('lon', 'lon should be a number.').isNumeric();
req.checkQuery('forcastDays', 'forcastDays should be a number.').isNumeric();
req.sanitize('forcastDays').escape();
req.sanitize('forcastDays').trim();
req.sanitize('lat').escape();
req.sanitize('lat').trim();
req.sanitize('lon').escape();
req.sanitize('lon').trim();
let lat = JSON.parse(req.params.lat)
let lon = JSON.parse(req.params.lon)
let forcastDays = JSON.parse(req.params.forcastDays)
point = {'type': 'Point', 'coordinates': [lat, lon]}
GJV.valid(point)
GJV.isPoint(point)
const query = Covar.findOne({forcastDays: forcastDays, geoLocation: {
$near: {
$geometry: point,
//$maxDistance: radius
}
}
});
query.exec(function (err, covars) {
if (err) return next(err);
res.json(covars);
})
} |
package mock;
import com.linkedin.metadata.models.EntitySpec;
import com.linkedin.metadata.models.EventSpec;
import com.linkedin.metadata.models.registry.EntityRegistry;
import java.util.Collections;
import java.util.Map;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class MockEntityRegistry implements EntityRegistry {
@Nonnull
@Override
public EntitySpec getEntitySpec(@Nonnull String entityName) {
return new MockEntitySpec(entityName);
}
@Nullable
@Override
public EventSpec getEventSpec(@Nonnull String eventName) {
return null;
}
@Nonnull
@Override
public Map<String, EntitySpec> getEntitySpecs() {
return Collections.emptyMap();
}
@Nonnull
@Override
public Map<String, EventSpec> getEventSpecs() {
return null;
}
}
|
package com.matpag.dagger.starter;
import javax.inject.Singleton;
import dagger.Component;
import dagger.android.AndroidInjectionModule;
import dagger.android.AndroidInjector;
/**
* Created by <NAME> on 11/06/2017.
*/
@Singleton
@Component(modules = {
AndroidInjectionModule.class,
ActivityModule.class,
AppModule.class
})
public interface AppComponent extends AndroidInjector<App> {}
|
<gh_stars>1-10
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.domain.order;
import java.sql.Timestamp;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import org.opentaps.foundation.repository.LookupRepositoryInterface;
import org.opentaps.foundation.repository.RepositoryException;
/**
* Repository to lookup Purchase Orders.
*/
public interface PurchaseOrderLookupRepositoryInterface extends LookupRepositoryInterface {
/**
* Sets the order Id to search for.
* @param orderId a <code>String</code> value
*/
public void setOrderId(String orderId);
/**
* Sets the status Id to search for.
* @param statusId a <code>String</code> value
*/
public void setStatusId(String statusId);
/**
* Sets the order name to search for.
* @param orderName a <code>String</code> value
*/
public void setOrderName(String orderName);
/**
* Sets the product pattern to search for.
* @param productPattern a <code>String</code> value
*/
public void setProductPattern(String productPattern);
/**
* Sets the supplier party Id to search for.
* @param supplierPartyId a <code>String</code> value
*/
public void setSupplierPartyId(String supplierPartyId);
/**
* Sets the from date string to search for.
* @param fromDate a <code>String</code> value
*/
public void setFromDate(String fromDate);
/**
* Sets the from date string to search for.
* @param fromDate a <code>Timestamp</code> value
*/
public void setFromDate(Timestamp fromDate);
/**
* Sets the thru date string to search for.
* @param thruDate a <code>String</code> value
*/
public void setThruDate(String thruDate);
/**
* Sets the thru date string to search for.
* @param thruDate a <code>Timestamp</code> value
*/
public void setThruDate(Timestamp thruDate);
/**
* Sets the createdBy to search for.
* @param createdBy a <code>String</code> value
*/
public void setCreatedBy(String createdBy);
/**
* Sets the organization party Id to search for.
* @param organizationPartyId a <code>String</code> value
*/
public void setOrganizationPartyId(String organizationPartyId);
/**
* Sets the find all desired orders only.
* @param findDesiredOnly a <code>boolean</code> value
*/
public void setFindDesiredOnly(boolean findDesiredOnly);
/**
* Sets the locale for format date string.
* @param locale a <code>Locale</code> value
*/
public void setLocale(Locale locale);
/**
* Sets the timeZone for format date string.
* @param timeZone a <code>TimeZone</code> value
*/
public void setTimeZone(TimeZone timeZone);
/**
* Sets the orderBy list for sorting the result.
* @param orderBy a <code>List<String></code> value
*/
public void setOrderBy(List<String> orderBy);
/**
* Finds the list of <code>Order</code>.
* @return list of orders
* @throws RepositoryException if an error occurs
*/
public List<OrderViewForListing> findOrders() throws RepositoryException;
}
|
/*
* Copyright (C) 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.internal.metadata.holder;
import static info.archinnov.achilles.schemabuilder.Create.Options.ClusteringOrder;
import java.util.ArrayList;
import java.util.List;
public class EmbeddedIdPropertiesBuilder {
private final List<PropertyMeta> propertyMetas = new ArrayList<>();
private List<ClusteringOrder> clusteringOrders;
public void addPropertyMeta(PropertyMeta propertyMeta) {
propertyMetas.add(propertyMeta);
}
public List<PropertyMeta> getPropertyMetas() {
return propertyMetas;
}
public void setClusteringOrders(List<ClusteringOrder> clusteringOrders) {
this.clusteringOrders = clusteringOrders;
}
public PartitionComponents buildPartitionKeys() {
return new PartitionComponents(propertyMetas);
}
public ClusteringComponents buildClusteringKeys() {
return new ClusteringComponents(propertyMetas, clusteringOrders);
}
public static EmbeddedIdProperties buildEmbeddedIdProperties(PartitionComponents partitionComponents, ClusteringComponents clusteringComponents, String entityName) {
final List<PropertyMeta> propertyMetas = new ArrayList<>(partitionComponents.propertyMetas);
propertyMetas.addAll(clusteringComponents.propertyMetas);
return new EmbeddedIdProperties(partitionComponents, clusteringComponents, propertyMetas, entityName);
}
}
|
class InsuranceState extends \Eloquent
{
protected $fillable = ['state_name', 'state_code'];
public function getAllStates()
{
return self::all();
}
public function addState($stateName, $stateCode)
{
return self::create(['state_name' => $stateName, 'state_code' => $stateCode]);
}
} |
export * from './MultipleFileUpload';
export * from './MultipleFileUploadMain';
export * from './MultipleFileUploadStatus';
export * from './MultipleFileUploadStatusItem';
|
<filename>src/leetcode/array/SortColors.java
package leetcode.medium.array;
import java.util.Arrays;
public class SortColors {
public static void main(String[] args) {
int[] colors = {2,0,2,1,1,0};
sortColors(colors);
System.out.println(Arrays.toString(colors));
}
// O(N) time O(1) space
public static void sortColors(int[] nums) {
int[] counts = new int[3];
for(int i = 0 ; i < nums.length ; i++){
counts[nums[i]] = ++counts[nums[i]];
}
int index = 0;
while(counts[0] > 0 ){
nums[index++] = 0 ;
counts[0] = --counts[0];
}
while(counts[1] > 0 ){
nums[index++] = 1 ;
counts[1] = --counts[1];
}
while(counts[2] > 0 ){
nums[index++] = 2 ;
counts[2] = --counts[2];
}
}
}
|
<gh_stars>0
const dynamoose = require('dynamoose');
require('dotenv').config()
// setup region and credentials
dynamoose.aws.sdk.config.update({
accessKeyId: process.env.ACCESS_KEY_ID,
secretAccessKey: process.env.SECRET_KEY,
region: 'us-east-2'
});
module.exports = dynamoose; |
import { Component, OnInit, Input } from '@angular/core';
import { RangeFilter, RangeFilterSelection } from 'src/app/shared/models/range-filter';
import { Store } from '@ngxs/store';
import { ChangeActiveRangeFilter } from 'src/app/states/search.state';
@Component({
selector: 'app-range-box',
templateUrl: './range-box.component.html',
styleUrls: ['./range-box.component.scss']
})
export class RangeBoxComponent implements OnInit {
@Input() rangeFilter: RangeFilter;
_activeRangeFilter: RangeFilterSelection;
@Input() set activeRangeFilter(rangeFilter: RangeFilterSelection) {
this._activeRangeFilter = rangeFilter;
this.setRangeFilter();
};
@Input() initialRangeBox: boolean = false;
@Input() last: boolean = false;
hidden: boolean;
loading: boolean;
rangeFilterSelection: RangeFilterSelection;
constructor(private store: Store) {
}
ngOnInit() {
this.setRangeFilter();
}
setRangeFilter() {
if (this._activeRangeFilter && this._activeRangeFilter.from && this._activeRangeFilter.to) {
this.rangeFilterSelection = this._activeRangeFilter;
} else {
this.rangeFilterSelection = { from: this.rangeFilter.from, to: this.rangeFilter.to };
}
}
rangeChanged(rangeSelection: RangeFilterSelection) {
console.log('changed range', rangeSelection);
this.store.dispatch(new ChangeActiveRangeFilter(this.rangeFilter.key, rangeSelection, this.initialRangeBox));
}
}
|
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { User } from './user.entity';
const md5 = require('md5');
@Injectable()
export class UsersService {
constructor(
@InjectRepository(User)
private usersRepository: Repository<User>,
) {}
// 测试网络连通的方法
userTest(): string {
return md5('admin');
}
/**
* @description 根绝 Id 获取用户详细信息
* @date 2021-04-13
* @param {number} id - 用户的 ID
* @returns {any}
*/
async getUserInfoById(id: number): Promise<User | undefined> {
return this.usersRepository.findOne(id);
}
/**
* @description 根绝 name 获取用户详细信息
* @date 2021-04-13
* @param {number} id - 用户的 ID
* @returns {any}
*/
async findOne(username: string): Promise<User | undefined> {
return this.usersRepository.findOne({ where: { name: username } });
}
findAll(): Promise<User[]> {
return this.usersRepository.find();
}
}
|
<reponame>protop-io/nexus-repository-protop<filename>src/main/java/org/sonatype/nexus/repository/protop/internal/ProtopPackageRootMetadataUtils.java
package org.sonatype.nexus.repository.protop.internal;
import org.joda.time.DateTime;
import org.sonatype.nexus.common.collect.NestedAttributesMap;
import org.sonatype.nexus.repository.Repository;
import org.sonatype.nexus.repository.storage.Asset;
import org.sonatype.nexus.repository.storage.Bucket;
import org.sonatype.nexus.repository.storage.StorageTx;
import org.sonatype.nexus.transaction.UnitOfWork;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.BiFunction;
import static java.util.Objects.isNull;
import static java.util.Objects.nonNull;
import static org.sonatype.nexus.repository.protop.internal.ProtopAttributes.P_NAME;
import static org.sonatype.nexus.repository.protop.internal.ProtopAttributes.P_ORG;
import static org.sonatype.nexus.repository.protop.internal.ProtopFacetUtils.findPackageRootAsset;
import static org.sonatype.nexus.repository.protop.internal.ProtopFacetUtils.loadPackageRoot;
import static org.sonatype.nexus.repository.protop.internal.ProtopMetadataUtils.*;
/**
* Helper for protop package root metadata.
* <p>
* See https://github.com/protop/registry/blob/master/docs/responses/package-metadata.md
*
* @since 3.7
*/
public class ProtopPackageRootMetadataUtils {
private static final String MODIFIED = "modified";
private static final String CREATED = "created";
private static final String LATEST = "latest";
private static final String DEPENDENCIES = "dependencies";
private static final String DEV_DEPENDENCIES = "devDependencies";
private static final String[] FULL_HOISTED_FIELDS = new String[]{ProtopAttributes.P_AUTHOR,
ProtopAttributes.P_CONTRIBUTORS, ProtopAttributes.P_DESCRIPTION, ProtopAttributes.P_HOMEPAGE, ProtopAttributes.P_KEYWORDS,
ProtopAttributes.P_LICENSE, ProtopAttributes.P_MAINTAINERS, ProtopAttributes.P_NAME, ProtopAttributes.P_ORG, ProtopAttributes.P_README,
ProtopAttributes.P_README_FILENAME, ProtopAttributes.P_REPOSITORY};
private static final String[] FULL_VERSION_MAP_FIELDS = new String[]{ProtopAttributes.P_AUTHOR,
ProtopAttributes.P_CONTRIBUTORS,
ProtopAttributes.P_DEPRECATED, DEPENDENCIES, ProtopAttributes.P_DESCRIPTION, ProtopAttributes.P_LICENSE,
ProtopAttributes.P_MAIN, ProtopAttributes.P_MAINTAINERS, ProtopAttributes.P_NAME, ProtopAttributes.P_VERSION,
ProtopAttributes.P_ORG, ProtopAttributes.P_OPTIONAL_DEPENDENCIES, DEV_DEPENDENCIES, ProtopAttributes.P_BUNDLE_DEPENDENCIES,
ProtopAttributes.P_PEER_DEPENDENCIES, ProtopAttributes.P_BIN, ProtopAttributes.P_DIRECTORIES, ProtopAttributes.P_ENGINES,
ProtopAttributes.P_README, ProtopAttributes.P_README_FILENAME,
// This isn't currently in protop.json but could be determined by the presence
// of protop-shrinkwrap.json
ProtopAttributes.P_HAS_SHRINK_WRAP};
private ProtopPackageRootMetadataUtils() {
// sonar
}
/**
* Creates full package data from the metadata of an individual version. May change <code>packageJson</code> if child
* nodes do not exist.
*
* @param packageJson the metadata for the version
* @param repositoryName the repository name
* @param sha1sum the hash of the version
* @since 3.7
*/
public static NestedAttributesMap createFullPackageMetadata(final NestedAttributesMap packageJson,
final String repositoryName,
final String sha1sum,
@Nullable final Repository repository,
final BiFunction<String, String, String> function) {
String org = packageJson.get(ProtopAttributes.P_ORG, String.class);
String name = packageJson.get(ProtopAttributes.P_NAME, String.class);
ProtopProjectId projectId = new ProtopProjectId(org, name);
String version = packageJson.get(ProtopAttributes.P_VERSION, String.class);
String now = PROTOP_TIMESTAMP_FORMAT.print(DateTime.now());
NestedAttributesMap packageRoot = new NestedAttributesMap("metadata", new HashMap<String, Object>());
packageRoot.set(META_ID, projectId.id());
String packageRootLatestVersion = isNull(repository) ? "" : getPackageRootLatestVersion(packageJson, repository);
packageRoot.child(DIST_TAGS).set(LATEST, function.apply(packageRootLatestVersion, version));
packageRoot.child(ProtopAttributes.P_USERS);
NestedAttributesMap time = packageRoot.child(TIME);
time.set(version, now);
time.set(MODIFIED, now);
time.set(CREATED, now);
// Hoisting fields from version metadata
setBugsUrl(packageJson, packageRoot);
for (String field : FULL_HOISTED_FIELDS) {
copy(packageRoot, packageJson, field);
}
// Copy version specific metadata fields
NestedAttributesMap versionMap = packageRoot.child(VERSIONS).child(version);
versionMap.set(META_ID, projectId + "@" + version);
// required fields
versionMap.child(DIST).set(ProtopAttributes.P_SHASUM, sha1sum);
versionMap.child(DIST).set(TARBALL,
String.format("%s/repository/%s",
repositoryName,
ProtopMetadataUtils.createRepositoryPath(org, name, version)));
// optional fields
for (String field : FULL_VERSION_MAP_FIELDS) {
copy(versionMap, packageJson, field);
}
// needs to happen after copying fields
rewriteTarballUrl(repositoryName, packageRoot);
return packageRoot;
}
private static String getPackageRootLatestVersion(final NestedAttributesMap protopJson,
final Repository repository) {
StorageTx tx = UnitOfWork.currentTx();
String org = (String) protopJson.get(P_ORG);
String name = (String) protopJson.get(P_NAME);
ProtopProjectId projectId = new ProtopProjectId(org, name);
try {
NestedAttributesMap packageRoot = getPackageRoot(tx, repository, projectId);
if (nonNull(packageRoot)) {
String latestVersion = getLatestVersionFromPackageRoot(packageRoot);
if (nonNull(latestVersion)) {
return latestVersion;
}
}
} catch (IOException ignored) { // NOSONAR
}
return "";
}
/**
* Fetches the package root as {@link NestedAttributesMap}
*
* @param tx
* @param repository
* @param packageId
* @return package root if found otherwise null
* @throws IOException
*/
@Nullable
public static NestedAttributesMap getPackageRoot(final StorageTx tx,
final Repository repository,
final ProtopProjectId packageId) throws IOException {
Bucket bucket = tx.findBucket(repository);
Asset packageRootAsset = findPackageRootAsset(tx, bucket, packageId);
if (packageRootAsset != null) {
return loadPackageRoot(tx, packageRootAsset);
}
return null;
}
private static void copy(final NestedAttributesMap map, final NestedAttributesMap src, final String field) {
Object object = src.get(field);
if (object instanceof Map) {
NestedAttributesMap destChild = map.child(field);
NestedAttributesMap srcChild = src.child(field);
for (String key : srcChild.keys()) {
if (srcChild.get(field) instanceof Map) {
copy(destChild, srcChild, key);
} else {
destChild.set(key, srcChild.get(key));
}
}
} else if (object != null) {
map.set(field, object);
}
}
private static void setBugsUrl(NestedAttributesMap packageJson, NestedAttributesMap packageRoot) {
Object bugs = packageJson.get(ProtopAttributes.P_BUGS);
String bugsUrl = null;
if (bugs instanceof String) {
bugsUrl = (String) bugs;
} else if (bugs != null) {
bugsUrl = packageJson.child(ProtopAttributes.P_BUGS).get(ProtopAttributes.P_URL, String.class);
}
if (bugsUrl != null) {
packageRoot.set(ProtopAttributes.P_BUGS, bugsUrl);
}
}
}
|
# frozen_string_literal: true
class DashboardController < ApplicationController
before_action :verify_core, only: %i[db_dumps download_dump]
before_action :verify_developer, only: [:reset_query_time]
def funride
response.cache_control = 'public, max-age=86400'
redirect_to 'https://speed.hetzner.de/10GB.bin', status: :moved_permanently
end
def index
if params[:id].present?
redirect_to '/magic/funride', status: :moved_permanently
return
end
Rack::MiniProfiler.step('Redis queries') do
@inactive_reasons, @active_reasons = # Rails.cache.fetch 'reasons_index', expires_in: 6.hours do
[true, false].map do |inactive|
results = Reason.where(inactive: inactive).to_a.map do |reason|
reason.define_singleton_method(:post_count) { Redis::Reason.find(reason.id).count }
reason
end.sort_by(&:post_count).reverse
counts = results.map do |reason|
reason = Redis::Reason.find(reason.id)
per_feedback_counts = %w[tps fps naas].map do |fb|
reason.clear_reason_feedback_cache(fb)
reason.update_reason_feedback_cache(fb)
[fb[0..-2].to_sym, reason.for_feedback(fb).cardinality]
end.to_h
[reason.id, { total: reason.count }.merge(per_feedback_counts)]
end.to_h
{ counts: counts, results: results }
end
end
@reasons = Reason.all
@posts = Post.all
end
def new_dash; end
def spam_by_site
@posts = Post.includes_for_post_row
@posts = @posts.where(site_id: params[:site]) if params[:site].present?
@posts = @posts.undeleted if params[:undeleted].present?
@posts = @posts.order(id: :desc).paginate(per_page: 50, page: params[:page])
@sites = Site.where(id: @posts.map(&:site_id))
end
def query_times
@query_times = redis(logger: true).scan_each(match: 'request_timings/db/by_path/*').map do |k|
Redis::QueryAverage.new(*k.split('/', 5)[3..-1])
end.sort_by(&:average).reverse
end
def reset_query_time
QueryAverage.find(params[:id]).update(counter: 0, average: 0)
redirect_back fallback_location: root_path
end
def site_dash
@posts = Post.includes_for_post_row.includes(:flag_logs)
params[:site_id] = Site.first.id if params[:site_id].blank?
@site = Site.find(params[:site_id])
@months = params[:months].to_s.empty? ? 3 : params[:months].to_i
@months_string = @months <= 1 ? 'month' : "#{@months} months"
@all_posts = @posts.where(site_id: @site.id)
@tabs = {
'All' => @all_posts,
'Autoflagged' => @all_posts.where(autoflagged: true),
'Deleted' => @all_posts.where.not(deleted_at: nil),
'Undeleted' => @all_posts.where(deleted_at: nil)
}
special_tabs = %w[Spammers Autoflaggers]
@active_tab = (@tabs.keys + special_tabs).map(&:downcase).include?(params[:tab]&.downcase) ? params[:tab]&.downcase : 'all'
@posts = @tabs.map { |k, v| [k.downcase, v] }.to_h[params[:tab]&.downcase] || @tabs['All']
@flags = FlagLog.where(site: @site).where('`flag_logs`.`created_at` >= ?', @months.months.ago).auto
@spammers = StackExchangeUser.joins(:feedbacks).includes(:posts).where(site: @site, still_alive: true)
.where("feedbacks.feedback_type LIKE 't%'").group('stack_exchange_users.id')
.order('COUNT(posts.stack_exchange_user_id) DESC')
# .order(Arel.sql('stack_exchange_users.reputation DESC'))
@spammers_page = @spammers.paginate(per_page: 50, page: params[:page])
@autoflaggers = User.joins(:flag_logs, flag_logs: [:post])
.where(flag_logs: { site: @site, success: true, is_auto: true })
.group(Arel.sql('users.id'))
.order(Arel.sql('COUNT(DISTINCT flag_logs.id) DESC'))
.select(Arel.sql('users.stack_exchange_account_id, users.username, COUNT(DISTINCT flag_logs.id) AS total_flags,'\
'COUNT(DISTINCT IF(posts.is_tp = 1, flag_logs.id, NULL)) AS tp_flags'))
@autoflaggers_page = @autoflaggers.paginate(per_page: 50, page: params[:page])
@delimiter = "\u2005".encode('utf-8')
@posts_timescaled = @posts.where('posts.created_at >= ?', @months.months.ago)
@posts = @posts.order(id: :desc).paginate(per_page: 50, page: params[:page])
end
end
|
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
set -o errexit
source $GRENADE_DIR/grenaderc
source $GRENADE_DIR/functions
source $TOP_DIR/openrc admin admin
source $TOP_DIR/inc/ini-config
set -o xtrace
HEAT_USER=heat_grenade
HEAT_PROJECT=heat_grenade
HEAT_PASS=pass
function _heat_set_user {
OS_TENANT_NAME=$HEAT_PROJECT
OS_PROJECT_NAME=$HEAT_PROJECT
OS_USERNAME=$HEAT_USER
OS_PASSWORD=$HEAT_PASS
}
function create {
# run heat_integrationtests instead of tempest smoke before create
# TODO(sbaker) run with tempest after the next major release
pushd $BASE_DEVSTACK_DIR/../heat
conf_file=heat_integrationtests/heat_integrationtests.conf
iniset $conf_file heat_plugin username $OS_USERNAME
iniset $conf_file heat_plugin password $OS_PASSWORD
iniset $conf_file heat_plugin tenant_name $OS_PROJECT_NAME
iniset $conf_file heat_plugin auth_url $OS_AUTH_URL
iniset $conf_file heat_plugin user_domain_name $OS_USER_DOMAIN_NAME
iniset $conf_file heat_plugin project_domain_name $OS_PROJECT_DOMAIN_NAME
iniset $conf_file heat_plugin region $OS_REGION_NAME
tox -eintegration heat_integrationtests.functional.test_create_update
popd
# creates a tenant for the server
eval $(openstack project create -f shell -c id $HEAT_PROJECT)
if [[ -z "$id" ]]; then
die $LINENO "Didn't create $HEAT_PROJECT project"
fi
resource_save heat project_id $id
# creates the user, and sets $id locally
eval $(openstack user create $HEAT_USER \
--project $id \
--password $HEAT_PASS \
-f shell -c id)
if [[ -z "$id" ]]; then
die $LINENO "Didn't create $HEAT_USER user"
fi
resource_save heat user_id $id
_heat_set_user
local stack_name='grenadine'
resource_save heat stack_name $stack_name
local loc=`dirname $BASH_SOURCE`
heat stack-create -f $loc/templates/random_string.yaml $stack_name
}
function verify {
_heat_set_user
stack_name=$(resource_get heat stack_name)
heat stack-show $stack_name
# TODO(sirushtim): Create more granular checks for Heat.
}
function verify_noapi {
# TODO(sirushtim): Write tests to validate liveness of the resources
# it creates during possible API downtime.
:
}
function destroy {
_heat_set_user
heat stack-delete $(resource_get heat stack_name)
source $TOP_DIR/openrc admin admin
local user_id=$(resource_get heat user_id)
local project_id=$(resource_get heat project_id)
openstack user delete $user_id
openstack project delete $project_id
}
# Dispatcher
case $1 in
"create")
create
;;
"verify_noapi")
verify_noapi
;;
"verify")
verify
;;
"destroy")
destroy
;;
esac
|
"use strict";
const {Gtk, Soup} = imports.gi;
const ExtensionUtils = imports.misc.extensionUtils;
const GS_SCHEMA = "org.gnome.shell.extensions.thanatophobia";
const GS_KEY_YEAR = "year";
const GS_KEY_MONTH = "month";
const GS_KEY_DAY = "day";
const GS_KEY_HOUR = "hour";
const GS_KEY_MINUTE = "minute";
const GS_KEY_SEX = "sex";
const GS_KEY_COUNTRY = "country";
const GS_KEY_LIFE_EXPECTANCY = "expectancy";
const GS_KEY_LIFE_EXPECTANCY_YEAR = "expectancy-year";
const GS_KEY_COUNTDOWN = "countdown";
const GS_KEY_DIGITS = "rounding";
const session = new Soup.Session();
function init() {
}
function limit(min, max, x) {
if (isNaN(x)) return min;
return Math.min(max, Math.max(min, x));
}
function getExpectancyString(le, year) {
return `Your current live expectancy is: ${le} (as of ${year})`
}
function GET(url, callback) {
try {
const request = Soup.Message.new("GET", url);
const data = session.send_and_read(request, null)
if (!data) callback(null)
else callback(data.get_data())
} catch (e) {
callback(null)
}
}
function buildPrefsWidget() {
this.settings = ExtensionUtils.getSettings(GS_SCHEMA);
/******************************
* Set up labels
******************************/
let birthdateLabel = new Gtk.Label({
label: "Birthdate:", halign: Gtk.Align.START, visible: true
});
let timeLabel = new Gtk.Label({
label: "Time:", halign: Gtk.Align.START, visible: true
});
let timeSeparatorLabel = new Gtk.Label({
label: ":", halign: Gtk.Align.START, visible: true
});
let countryLabel = new Gtk.Label({
label: "Country (ISO-a3):", halign: Gtk.Align.START, visible: true
});
let sexLabel = new Gtk.Label({
label: "Sex:", halign: Gtk.Align.START, visible: true
});
let expectancyLabel = new Gtk.Label({
label: getExpectancyString(this.settings.get_double(GS_KEY_LIFE_EXPECTANCY), this.settings.get_int(GS_KEY_LIFE_EXPECTANCY_YEAR)),
halign: Gtk.Align.START,
visible: true
});
let countryLink = new Gtk.LinkButton({
label: "ISO 3166 Code List",
uri: "https://www.iso.org/obp/ui/#search/code/",
halign: Gtk.Align.END,
visible: true
});
let modeLabel = new Gtk.Label({
label: "Display Mode:", halign: Gtk.Align.START, visible: true
});
let digitLabel = new Gtk.Label({
label: "Number of digits:", halign: Gtk.Align.START, visible: true
});
/******************************
* Set up widgets
******************************/
// Calendar
let birthdateEntry = new Gtk.Calendar({
year: this.settings.get_int(GS_KEY_YEAR),
month: this.settings.get_int(GS_KEY_MONTH) - 1,
day: this.settings.get_int(GS_KEY_DAY),
halign: Gtk.Align.START,
visible: true
});
// Birthdate hours
let hourEntry = new Gtk.SpinButton();
hourEntry.set_sensitive(true);
hourEntry.set_numeric(true);
hourEntry.set_range(0, 23);
hourEntry.set_value(limit(0, 23, settings.get_int(GS_KEY_HOUR)));
hourEntry.set_increments(1, 2);
// Birthdate minutes
let minuteEntry = new Gtk.SpinButton();
minuteEntry.set_sensitive(true);
minuteEntry.set_numeric(true);
minuteEntry.set_range(0, 59);
minuteEntry.set_value(limit(0, 59, settings.get_int(GS_KEY_MINUTE)));
minuteEntry.set_increments(1, 5);
// Residency country
let countryEntry = new Gtk.Entry({
buffer: new Gtk.EntryBuffer()
});
countryEntry.set_text(settings.get_string(GS_KEY_COUNTRY))
// Biological sex
let sexEntry = new Gtk.ComboBoxText()
sexEntry.append_text("Male");
sexEntry.append_text("Female");
sexEntry.set_active(settings.get_int(GS_KEY_SEX) === 1 ? 0 : 1);
// Recalculate button
let recalculateButton = new Gtk.Button({label: "Recalculate"})
// Mode selection
let modeEntry = new Gtk.ComboBoxText()
modeEntry.append_text("Count up from birthday (age)");
modeEntry.append_text("Count down from life expectancy");
modeEntry.set_active(settings.get_int(GS_KEY_COUNTDOWN) === 1 ? 1 : 0);
// Digit selection
let digitEntry = new Gtk.SpinButton();
digitEntry.set_sensitive(true);
digitEntry.set_numeric(true);
digitEntry.set_range(0, 15);
digitEntry.set_value(limit(0, 15, settings.get_int(GS_KEY_DIGITS)));
digitEntry.set_increments(1, 2);
/******************************
* Add widgets to container
******************************/
// Container
let prefsWidget = new Gtk.Grid({
"margin-start": 18,
"margin-end": 18,
"margin-top": 18,
"margin-bottom": 18,
"column_spacing": 12,
"row_spacing": 12,
"visible": true
});
// Calendar
prefsWidget.attach(birthdateLabel, 0, 1, 1, 1);
prefsWidget.attach_next_to(birthdateEntry, birthdateLabel, Gtk.PositionType.RIGHT, 3, 1);
// Birthdate hours and minutes
prefsWidget.attach_next_to(timeLabel, birthdateLabel, Gtk.PositionType.BOTTOM, 1, 1);
prefsWidget.attach_next_to(hourEntry, timeLabel, Gtk.PositionType.RIGHT, 1, 1);
prefsWidget.attach_next_to(timeSeparatorLabel, hourEntry, Gtk.PositionType.RIGHT, 1, 1);
prefsWidget.attach_next_to(minuteEntry, timeSeparatorLabel, Gtk.PositionType.RIGHT, 1, 1);
// Biological sex
prefsWidget.attach_next_to(sexLabel, timeLabel, Gtk.PositionType.BOTTOM, 1, 1);
prefsWidget.attach_next_to(sexEntry, sexLabel, Gtk.PositionType.RIGHT, 3, 1);
// Residency country
prefsWidget.attach_next_to(countryLabel, sexLabel, Gtk.PositionType.BOTTOM, 1, 1);
prefsWidget.attach_next_to(countryEntry, countryLabel, Gtk.PositionType.RIGHT, 3, 1);
// Recalculate
prefsWidget.attach_next_to(expectancyLabel, countryLabel, Gtk.PositionType.BOTTOM, 4, 1);
prefsWidget.attach(recalculateButton, 0, 7, 1, 1);
// Country link
prefsWidget.attach(countryLink, 1, 7, 3, 1);
// Display mode
prefsWidget.attach_next_to(modeLabel, recalculateButton, Gtk.PositionType.BOTTOM, 1, 1);
prefsWidget.attach_next_to(modeEntry, modeLabel, Gtk.PositionType.RIGHT, 3, 1);
// Digit
prefsWidget.attach_next_to(digitLabel, modeLabel, Gtk.PositionType.BOTTOM, 1, 1);
prefsWidget.attach_next_to(digitEntry, digitLabel, Gtk.PositionType.RIGHT, 3, 1);
/******************************
* Add callbacks
******************************/
// Calendar changed
function update_date(inputField) {
settings.set_int(GS_KEY_YEAR, inputField.get_date().get_year());
settings.set_int(GS_KEY_MONTH, inputField.get_date().get_month());
settings.set_int(GS_KEY_DAY, inputField.get_date().get_day_of_month());
}
birthdateEntry.connect("day-selected", update_date);
birthdateEntry.connect("next-month", update_date);
birthdateEntry.connect("prev-month", update_date);
birthdateEntry.connect("next-year", update_date);
birthdateEntry.connect("prev-year", update_date);
// Hours
hourEntry.connect("value-changed", function (field) {
settings.set_int(GS_KEY_HOUR, field.get_value_as_int());
});
// Minutes
minuteEntry.connect("value-changed", function (field) {
settings.set_int(GS_KEY_MINUTE, field.get_value_as_int());
});
// Country
countryEntry.connect("changed", function (field) {
settings.set_string(GS_KEY_COUNTRY, field.get_text().toUpperCase());
})
// Sex
sexEntry.connect("changed", function (field) {
settings.set_int(GS_KEY_SEX, field.get_active() === 1 ? 0 : 1);
});
// Button
recalculateButton.connect("clicked", () => {
// Users country ISO-3 code
let country = settings.get_string(GS_KEY_COUNTRY);
// User's gender converted to a string for filtering the results of the WHO API
// "BTSX" (both sexes) and "UNK" (unknown) are also available and could be a fall-back
let sex = settings.get_int(GS_KEY_SEX) === 1 ? "MLE" : "FMLE";
// The WHO may not have data for every year so "getData" gets called recursively
// in order to find the latest year with data available
const getData = yearOffset => {
// If the data is more than 20 years old, give up, this may also happen
// because an invalid country ISO alpha-3 code was provided
if (yearOffset > 20) {
expectancyLabel.set_text("Could not fetch data, using previous: " + this.settings.get_double(GS_KEY_LIFE_EXPECTANCY) + "\nDid you input a correct ISO alpha-3 country code?")
return;
}
// Try to fetch data from WHO
expectancyLabel.set_text(`Fetching data from WHO (${yearOffset} years old)...`);
// Get year with offset
let year = new Date().getFullYear() - yearOffset;
// Make API call
GET(`https://apps.who.int/gho/athena/api/GHO/WHOSIS_000001.json?profile=simple&filter=COUNTRY:${country};YEAR:${year};SEX:${sex}`, (body) => {
if (body) {
// Try to parse body on success
try {
const json = JSON.parse(body);
if (json["fact"].length === 0) {
// The JSON returned by the API has no data
getData(yearOffset + 1);
return;
}
// Average the results returned by the API
let sum = 0
for (const fact of json["fact"]) {
sum += parseFloat(fact["Value"])
}
const expectancy = sum / json["fact"].length;
// Update settings
settings.set_double(GS_KEY_LIFE_EXPECTANCY, expectancy);
settings.set_int(GS_KEY_LIFE_EXPECTANCY_YEAR, year);
// Update label
expectancyLabel.set_text(getExpectancyString(this.settings.get_double(GS_KEY_LIFE_EXPECTANCY), this.settings.get_int(GS_KEY_LIFE_EXPECTANCY_YEAR)))
} catch (e) {
// The WHO API sometimes returns badly formatted JSON strings
// it could also change, making the parsing process break
getData(yearOffset + 1);
}
} else {
// Fail on error code
getData(yearOffset + 1);
}
})
}
getData(0);
})
// Display mode
modeEntry.connect("changed", function (field) {
settings.set_int(GS_KEY_COUNTDOWN, field.get_active() === 1 ? 1 : 0);
});
// Digit
digitEntry.connect("changed", function (field) {
settings.set_int(GS_KEY_DIGITS, field.get_value_as_int());
});
// Put container on window
prefsWidget.connect("realize", () => {
{
let window = prefsWidget.get_root();
window.default_width = 300;
window.default_height = 700;
}
});
return prefsWidget;
}
|
<filename>packages/react-core/src/components/AlertGroup/examples/AlertGroupAsync.tsx
import React from 'react';
import {
Alert,
AlertProps,
AlertGroup,
AlertActionCloseButton,
AlertVariant,
InputGroup,
useInterval
} from '@patternfly/react-core';
export const AlertGroupAsync: React.FunctionComponent = () => {
const [alerts, setAlerts] = React.useState<Partial<AlertProps>[]>([]);
const [isRunning, setIsRunning] = React.useState(false);
const btnClasses = ['pf-c-button', 'pf-m-secondary'].join(' ');
const getUniqueId = () => new Date().getTime();
const addAlert = () => {
setAlerts(prevAlerts => [
...prevAlerts,
{
title: `Async notification ${prevAlerts.length + 1} was added to the queue.`,
variant: 'danger',
key: getUniqueId()
}
]);
};
const removeAlert = (key: React.Key) => {
setAlerts(prevAlerts => [...prevAlerts.filter(alert => alert.key !== key)]);
};
const startAsyncAlerts = () => {
setIsRunning(true);
};
const stopAsyncAlerts = () => {
setIsRunning(false);
};
useInterval(addAlert, isRunning ? 4500 : null);
return (
<React.Fragment>
<InputGroup style={{ marginBottom: '16px' }}>
<button onClick={startAsyncAlerts} type="button" className={btnClasses}>
Start async alerts
</button>
<button onClick={stopAsyncAlerts} type="button" className={btnClasses}>
Stop async alerts
</button>
</InputGroup>
<AlertGroup isToast isLiveRegion aria-live="assertive">
{alerts.map(({ title, variant, key }) => (
<Alert
variant={AlertVariant[variant]}
title={title}
key={key}
actionClose={
<AlertActionCloseButton
title={title as string}
variantLabel={`${variant} alert`}
onClose={() => removeAlert(key)}
/>
}
/>
))}
</AlertGroup>
</React.Fragment>
);
};
|
#!/bin/bash
set -x
if [ -z "$BASEIMAGE" ] || [ -z "$REGBASE" ]; then
echo need to define BASEIMAGE and REGBASE variables
exit 1
fi
FINALIMAGE=$REGBASE/netflixoss/osstracker-scraper:latest
docker pull $BASEIMAGE
docker tag -f $BASEIMAGE javabase:latest
docker build -t netflixoss/osstracker-scraper:latest .
docker tag -f netflixoss/osstracker-scraper:latest $FINALIMAGE
RETRY_COUNT=5
build_succeeded=0
while [[ $RETRY_COUNT -gt 0 && $build_succeeded != 1 ]]; do
docker push $FINALIMAGE
if [ $? != 0 ]; then
echo "push failed, will retry"
RETRY_COUNT=$RETRY_COUNT-1
else
build_succeeded=1
fi
done
if [[ $RETRY_COUNT -eq 0 ]]; then
echo "all push retries failed, failing script"
exit 1
fi
|
import org.springframework.stereotype.Service;
@Service
public class TeamServiceImpl implements TeamService {
@Autowired
TeamRepository teamRepository;
@Override
public Team createTeam(Team team) {
return teamRepository.save(team);
}
@Override
public Team getTeamById(Long teamId) {
return teamRepository.findById(teamId).orElse(null);
}
} |
<gh_stars>1-10
/**
* desktopBrowsers contributed by <NAME> @ Badoo
* translates desktop browsers events to touch events and prevents defaults
* It can be used independently in other apps but it is required for using the touchLayer in the desktop
*
* @param {Function} $ The appframework selector function
*/
(function ($) {
"use strict";
var cancelClickMove = false;
//See if we can create a touch event
var tmp;
if($.os.supportsTouch) return;
try {
tmp = document.createEvent("TouchEvent");
return;
} catch (ex) {
}
$.os.supportsTouch=true;
var preventAll = function (e) {
e.preventDefault();
e.stopPropagation();
};
var ieThreshold=navigator.userAgent.match(/Phone/i)?2:7;
/**
* Stop propagation, and remove default behavior for everything but INPUT, TEXTAREA & SELECT fields
*
* @param {Event} event
* @param {HTMLElement} target
*/
var preventAllButInputs = function(event, target) {
var tag = target.tagName.toUpperCase();
if (tag.indexOf("SELECT") > -1 || tag.indexOf("TEXTAREA") > -1 || tag.indexOf("INPUT") > -1) {
return;
}
preventAll(event);
};
var redirectMouseToTouch = function (type, originalEvent, newTarget,skipPrevent) {
var theTarget = newTarget ? newTarget : originalEvent.target;
if(!skipPrevent)
preventAllButInputs(originalEvent, theTarget);
var touchevt = document.createEvent("MouseEvent");
touchevt.initEvent(type, true, true);
touchevt.initMouseEvent(type, true, true, window, originalEvent.detail, originalEvent.screenX, originalEvent.screenY, originalEvent.clientX, originalEvent.clientY, originalEvent.ctrlKey, originalEvent.shiftKey, originalEvent.altKey, originalEvent.metaKey, originalEvent.button, originalEvent.relatedTarget);
touchevt.touches= new $.feat.TouchList();
touchevt.changedTouches = new $.feat.TouchList();
touchevt.targetTouches = new $.feat.TouchList();
var thetouch=new $.feat.Touch();
thetouch.pageX=originalEvent.pageX;
thetouch.pageY=originalEvent.pageY;
thetouch.target=originalEvent.target;
touchevt.changedTouches._add(thetouch);
if (type !== "touchend") {
touchevt.touches = touchevt.changedTouches;
touchevt.targetTouches = touchevt.changedTouches;
}
//target
touchevt.mouseToTouch = true;
if ($.os.ie) {
// handle inline event handlers for target and parents (for bubbling)
var elem = originalEvent.target;
while (elem !== null) {
if (elem.hasAttribute("on" + type)) {
eval(elem.getAttribute("on" + type));
}
elem = elem.parentElement;
}
}
theTarget.dispatchEvent(touchevt);
};
var mouseDown = false,
lastTarget = null,
prevX=0,
prevY=0;
if (!window.navigator.msPointerEnabled) {
document.addEventListener("mousedown", function (e) {
mouseDown = true;
lastTarget = e.target;
if (e.target.nodeName.toLowerCase() === "a" && e.target.href.toLowerCase() === "javascript:;")
e.target.href = "#";
redirectMouseToTouch("touchstart", e);
cancelClickMove = false;
prevX=e.clientX;
prevY=e.clientY;
}, true);
document.addEventListener("mouseup", function (e) {
if (!mouseDown) return;
redirectMouseToTouch("touchend", e, lastTarget); //bind it to initial mousedown target
lastTarget = null;
mouseDown = false;
}, true);
document.addEventListener("mousemove", function (e) {
if(e.clientX===prevX&&e.clientY===prevY) return;
if (!mouseDown) return;
redirectMouseToTouch("touchmove", e, lastTarget);
cancelClickMove = true;
}, true);
} else { //Win8
var skipMove=false;
document.addEventListener("MSPointerDown", function (e) {
mouseDown = true;
skipMove=true;
lastTarget = e.target;
if (e.target.nodeName.toLowerCase() === "a" && e.target.href.toLowerCase() === "javascript:;")
e.target.href = "";
redirectMouseToTouch("touchstart", e,null,true);
cancelClickMove = false;
prevX=e.clientX;
prevY=e.clientY;
return true;
}, true);
document.addEventListener("MSPointerUp", function (e) {
if (!mouseDown) return;
redirectMouseToTouch("touchend", e, lastTarget,true); // bind it to initial mousedown target
lastTarget = null;
mouseDown = false;
return true;
}, true);
document.addEventListener("MSPointerMove", function (e) {
//IE is very flakey...we need 7 pixel movement before we trigger it
if(Math.abs(e.clientX-prevX)<=ieThreshold||Math.abs(e.clientY-prevY)<=ieThreshold) return;
if (!mouseDown) return;
redirectMouseToTouch("touchmove", e, lastTarget,true);
cancelClickMove = true;
return true;
}, true);
}
// prevent all mouse events which don't exist on touch devices
document.addEventListener("drag", preventAll, true);
document.addEventListener("dragstart", preventAll, true);
document.addEventListener("dragenter", preventAll, true);
document.addEventListener("dragover", preventAll, true);
document.addEventListener("dragleave", preventAll, true);
document.addEventListener("dragend", preventAll, true);
document.addEventListener("drop", preventAll, true);
// Allow selection of input elements
document.addEventListener("selectstart", function(e){
preventAllButInputs(e, e.target);
}, true);
document.addEventListener("click", function (e) {
if (!e.mouseToTouch && e.target === lastTarget) {
preventAll(e);
}
if (cancelClickMove) {
preventAll(e);
cancelClickMove = false;
}
}, true);
})(jQuery,window);
|
#!/bin/bash
#
# Copyright (C) 2016 The CyanogenMod Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
export BITS=64
export DEVICE=r7plus
export DEVICE_COMMON=msm8939-common
export VENDOR=oppo
./../$DEVICE_COMMON/extract-files.sh $@
|
import * as React from 'react';
import './App.css';
import './datasets.css';
import {Link, useParams} from 'react-router-dom';
import Paper from '@mui/material/Paper'
import Table from '@mui/material/Table';
import TableBody from '@mui/material/TableBody';
import TableCell from '@mui/material/TableCell';
import TableContainer from '@mui/material/TableContainer';
import TableHead from '@mui/material/TableHead';
import TablePagination from '@mui/material/TablePagination';
import TableRow from '@mui/material/TableRow';
const columns = [
{ id: 'name', label: 'Name', minWidth: 170 },
{ id: 'desc', label: 'Description', minWidth: 200 },
{ id: 'download', label: 'Download', minWidth: 170 },
];
function createTable() {
var data = require('./dataset_list.json');
var index = 0;
var data_list = [];
while (data[index] != null) {
var link = data[index].Link;
var name = <a rel="noopener noreferrer" href={link} target="_blank"> {data[index].Name} </a>;
var desc = data[index].Description;
var download_link = data[index].Download;
var download = <a href={download_link}> {download_link} </a>
var element = { name, desc, download };
data_list.push(element);
index++;
}
return data_list;
}
const rows = createTable();
function StickyHeadTable() {
const [page, setPage] = React.useState(0);
const [rowsPerPage, setRowsPerPage] = React.useState(10);
const handleChangePage = (event, newPage) => {
setPage(newPage);
};
const handleChangeRowsPerPage = (event) => {
setRowsPerPage(+event.target.value);
setPage(0);
};
return (
<Paper sx={{ width: '100%', overflow: 'hidden' }}>
<TableContainer sx={{ maxHeight: 700 }}>
<Table stickyHeader aria-label="sticky table">
<TableHead>
<TableRow>
{columns.map((column) => (
<TableCell
key={column.id}
align={column.align}
style={{ minWidth: column.minWidth }}
>
{column.label}
</TableCell>
))}
</TableRow>
</TableHead>
<TableBody>
{rows
.slice(page * rowsPerPage, page * rowsPerPage + rowsPerPage)
.map((row) => {
return (
<TableRow hover role="checkbox" tabIndex={-1} key={row.code}>
{columns.map((column) => {
const value = row[column.id];
return (
<TableCell key={column.id} align={column.align}>
{column.format && typeof value === 'number'
? column.format(value)
: value}
</TableCell>
);
})}
</TableRow>
);
})}
</TableBody>
</Table>
</TableContainer>
<TablePagination
rowsPerPageOptions={[10, 25, 100]}
component="div"
count={rows.length}
rowsPerPage={rowsPerPage}
page={page}
onPageChange={handleChangePage}
onRowsPerPageChange={handleChangeRowsPerPage}
/>
</Paper>
);
}
function Buttons () {
const {userId} = useParams();
return(
<div>
<Link to={`/blank/${userId}`}>
<button className = "button">User Home</button>
</Link>
<Link to = "/">
<button className = "button">Logout</button>
</Link>
</div>
);
}
export default function datasets() {
return(
<div className = "datasets">
<h1 id = "header">CIRQUICK</h1>
<br />
<Buttons />
<h2>Dataset Download</h2>
<div id="table">
<StickyHeadTable />
</div>
</div>
);
} |
import logging
from datetime import datetime
import numpy as np
from netCDF4 import Dataset
logger = logging.getLogger(__name__)
def get_netcdf_dimensions(file_path):
with Dataset(file_path, 'r', format='NETCDF4') as rootgrp:
return {
dimension_name: dimension.size
for dimension_name, dimension in rootgrp.dimensions.items()
}
def get_netcdf_variables(file_path):
with Dataset(file_path, 'r', format='NETCDF4') as rootgrp:
variables = {}
for variable_name, variable in rootgrp.variables.items():
variables[variable_name] = {}
for key, value in variable.__dict__.items():
if type(value) in [np.float32, np.float64]:
value = float(value)
elif type(value) in [np.int32, np.int64]:
value = int(value)
variables[variable_name][key] = value
return variables
def get_netcdf_global_attributes(file_path):
with Dataset(file_path, 'r', format='NETCDF4') as rootgrp:
global_attributes = {}
for key, value in rootgrp.__dict__.items():
if type(value) in [np.float32, np.float64]:
value = float(value)
elif type(value) in [np.int32, np.int64]:
value = int(value)
global_attributes[key] = value
return global_attributes
def update_netcdf_global_attributes(file_path, set_attributes={}, delete_attributes=[]):
with Dataset(file_path, 'a', format='NETCDF4') as rootgrp:
for attr in rootgrp.__dict__:
if attr in delete_attributes:
logger.debug('delete %s in %s', attr, file_path)
rootgrp.delncattr(attr)
for attr, value in set_attributes.items():
logger.debug('set %s to %s in %s', attr, value, file_path)
rootgrp.setncattr(attr, value2string(value))
def value2string(value):
if isinstance(value, datetime):
return value.isoformat() + 'Z',
else:
return str(value)
|
require 'rails_helper'
feature 'uploading a cancel/term CV', :dbclean => :after_each do
given(:premium) do
PremiumTable.new(
rate_start_date: Date.new(2014, 1, 1),
rate_end_date: Date.new(2014, 12, 31),
age: 53,
amount: 398.24
)
end
background do
user = create :user, :admin
visit root_path
sign_in_with(user.email, user.password)
# Note: The file fixture is dependent on this record.
plan = Plan.new(coverage_type: 'health', hios_plan_id: '11111111111111-11', year: 2014, ehb: 0.5)
plan.premium_tables << premium
plan.save!
end
scenario 'nonsubscriber member canceled' do
visit new_vocab_upload_path
choose 'Maintenance'
file_path = Rails.root + "spec/support/fixtures/cancel/nonsubscriber_cancel.xml"
attach_file('vocab_upload_vocab', file_path)
click_button "Upload"
expect(page).to have_content 'Uploaded successfully.'
end
scenario 'subscriber member canceled' do
visit new_vocab_upload_path
choose 'Maintenance'
file_path = Rails.root + "spec/support/fixtures/cancel/subscriber_cancel.xml"
attach_file('vocab_upload_vocab', file_path)
click_button "Upload"
expect(page).to have_content 'Uploaded successfully.'
end
scenario 'incorrect premium total' do
visit new_vocab_upload_path
choose 'Maintenance'
file_path = Rails.root + "spec/support/fixtures/cancel/incorrect_premium_total.xml"
attach_file('vocab_upload_vocab', file_path)
click_button "Upload"
expect(page).to have_content 'Failed to Upload.'
expect(page).to have_content 'premium_amount_total is incorrect'
end
end
|
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ${PREFIX}'s app. A clasic hello world example!
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
#include <winsock2.h>
#include <windows.h>
#include <mapi.h>
#include <mapiform.h>
#include <mapiutil.h>
#include <mapival.h>
#include <mapispi.h>
#include <imessage.h>
#include <tnef.h>
#include <abhelp.h>
#include <strsafe.h>
// Check that we have the Outlook 2010 MAPI headers or higher
// We do this by checking for the presence of a macro not present in the older headers
#ifndef MAPIFORM_CPU_X64
#pragma message("Compilation requires Outlook 2010 MAPI headers or higher")
#pragma message("Go to the following URL")
#pragma message(" http://www.microsoft.com/downloads/en/details.aspx?FamilyID=f8d01fc8-f7b5-4228-baa3-817488a66db1&displaylang=en")
#pragma message("and follow the instructions to install the Outlook 2010 MAPI headers")
#pragma message("Then go to Tools\\Options\\Projects and Solutions\\VC++ Directories and ensure the headers include")
#pragma message("directory preceeds the Visual Studio include directories.")
#pragma message(" ")
#error Outlook 2010 MAPI headers or higher must be installed
#endif
#if defined(_M_X64) || defined(_M_ARM)
#define ExpandFunction(fn, c) #fn
#elif defined(_M_IX86)
#define ExpandFunction(fn, c) #fn"@"#c
#else
#error "Unsupported Platform"
#endif
// Forward declarations for types not documented in MAPI headers
struct MAPIOFFLINE_CREATEINFO;
struct IMAPIOfflineMgr;
struct RTF_WCSINFO;
struct RTF_WCSRETINFO;
#define LINKAGE_EXTERN_C extern "C"
#define LINKAGE_NO_EXTERN_C /* */
// Forward declares from MapiStubUtil.cpp
HMODULE GetMAPIHandle();
HMODULE GetPrivateMAPI();
void UnLoadPrivateMAPI();
extern volatile ULONG g_ulDllSequenceNum;
#define DEFINE_STUB_FUNCTION_V0(_linkage, _modifiers, _name, _lookup) \
\
_linkage typedef void (_modifiers * _name##TYPE)(void); \
\
_linkage void _modifiers _name(void) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(); \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_V0(_linkage, _modifiers, _name, _ordinal) \
\
_linkage typedef void (_modifiers * _name##TYPE)(void); \
\
_linkage void _modifiers _name(void) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(); \
} \
}
#define DEFINE_STUB_FUNCTION_0(_linkage, _ret_type, _modifiers, \
_name, _lookup, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)(void);\
\
_linkage _ret_type _modifiers _name(void) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_0(_linkage, _ret_type, _modifiers, \
_name, _ordinal, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)(void); \
\
_linkage _ret_type _modifiers _name(void) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_V1(_linkage, _modifiers, \
_name, _lookup, _param1_type) \
\
_linkage typedef void (_modifiers * _name##TYPE)(_param1_type); \
\
_linkage void _modifiers _name(_param1_type a) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(a); \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_V1(_linkage, _modifiers, \
_name, _ordinal, _param1_type) \
\
_linkage typedef void (_modifiers * _name##TYPE)(_param1_type); \
\
_linkage void _modifiers _name(_param1_type a) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(a); \
} \
}
#define DEFINE_STUB_FUNCTION_1(_linkage, _ret_type, \
_modifiers, _name, _lookup, _param1_type, _default) \
\
_linkage typedef _ret_type \
(_modifiers * _name##TYPE)(_param1_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_1(_linkage, _ret_type, \
_modifiers, _name, _ordinal, _param1_type, _default) \
\
_linkage typedef _ret_type \
(_modifiers * _name##TYPE)(_param1_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_V2(_linkage, _modifiers, \
_name, _lookup, _param1_type, _param2_type) \
\
_linkage typedef void (_modifiers * _name##TYPE)( \
_param1_type, _param2_type); \
\
_linkage void _modifiers _name(_param1_type a, _param2_type b) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(a, b); \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_V2(_linkage, _modifiers, \
_name, _ordinal, _param1_type, _param2_type) \
\
_linkage typedef void (_modifiers * _name##TYPE)( \
_param1_type, _param2_type); \
\
_linkage void _modifiers _name(_param1_type a, _param2_type b) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(a, b); \
} \
}
#define DEFINE_STUB_FUNCTION_2(_linkage, _ret_type, _modifiers, \
_name, _lookup, _param1_type, _param2_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, _param2_type b) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_2(_linkage, _ret_type, _modifiers, \
_name, _ordinal, _param1_type, _param2_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, _param2_type b) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_V3(_linkage, _modifiers, \
_name, _lookup, _param1_type, _param2_type, _param3_type) \
\
_linkage typedef void (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type); \
\
_linkage void _modifiers _name( \
_param1_type a, _param2_type b, _param3_type c) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(a, b, c); \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_V3(_linkage, _modifiers, \
_name, _ordinal, _param1_type, _param2_type, _param3_type) \
\
_linkage typedef void (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type); \
\
_linkage void _modifiers _name( \
_param1_type a, _param2_type b, _param3_type c) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(a, b, c); \
} \
}
#define DEFINE_STUB_FUNCTION_3(_linkage, _ret_type, _modifiers, \
_name, _lookup, _param1_type, _param2_type, _param3_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type); \
\
_linkage _ret_type _modifiers _name( \
_param1_type a, _param2_type b, _param3_type c) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_3(_linkage, _ret_type, _modifiers, \
_name, _ordinal, _param1_type, _param2_type, _param3_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type); \
\
_linkage _ret_type _modifiers _name( \
_param1_type a, _param2_type b, _param3_type c) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_V4(_linkage, \
_modifiers, _name, _lookup, _param1_type, \
_param2_type, _param3_type, _param4_type) \
\
_linkage typedef void (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type) \
\
_linkage void _modifiers _name( \
_param1_type a, _param2_type b, _param3_type c, _param4_type d) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(a, b, c, d); \
} \
}
#define DEFINE_STUB_FUNCTION_4(_linkage, \
_ret_type, _modifiers, _name, _lookup, _param1_type, \
_param2_type, _param3_type, _param4_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type); \
\
_linkage _ret_type _modifiers _name( \
_param1_type a, _param2_type b, _param3_type c, _param4_type d) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_4(_linkage, \
_ret_type, _modifiers, _name, _ordinal, _param1_type, \
_param2_type, _param3_type, _param4_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type); \
\
_linkage _ret_type _modifiers _name( \
_param1_type a, _param2_type b, _param3_type c, _param4_type d) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_5(_linkage, \
_ret_type, _modifiers, _name, _lookup, \
_param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type, _param5_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, _param2_type b, \
_param3_type c, _param4_type d, _param5_type e) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d, e); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_5(_linkage, \
_ret_type, _modifiers, _name, _ordinal, \
_param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type, _param5_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, _param2_type b, \
_param3_type c, _param4_type d, _param5_type e) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d, e); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_6(_linkage, _ret_type, _modifiers, \
_name, _lookup, _param1_type, _param2_type, \
_param3_type, _param4_type, _param5_type, _param6_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, _param2_type b, \
_param3_type c, _param4_type d, _param5_type e, _param6_type f) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d, e, f); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_V7(_linkage, _modifiers, _name, \
_lookup, _param1_type, _param2_type, _param3_type, _param4_type,\
_param5_type, _param6_type, _param7_type) \
\
_linkage typedef void (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type, _param7_type); \
\
_linkage void _modifiers _name(_param1_type a, \
_param2_type b, _param3_type c, _param4_type d, \
_param5_type e, _param6_type f, _param7_type g) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(a, b, c, d, e, f, g); \
} \
}
#define DEFINE_STUB_FUNCTION_7(_linkage, _ret_type, _modifiers, \
_name, _lookup, _param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type, _param7_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type, \
_param5_type, _param6_type, _param7_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, _param2_type b, \
_param3_type c, _param4_type d, _param5_type e, \
_param6_type f, _param7_type g) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d, e, f, g); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_8(_linkage, _ret_type, _modifiers, \
_name, _lookup, _param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type, _param7_type, _param8_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type, \
_param5_type, _param6_type, _param7_type, _param8_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, \
_param2_type b, _param3_type c, _param4_type d, _param5_type e, \
_param6_type f, _param7_type g, _param8_type h) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d, e, f, g, h); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_9(_linkage, _ret_type, \
_modifiers, _name, _lookup, _param1_type, _param2_type, \
_param3_type, _param4_type, _param5_type, _param6_type, \
_param7_type, _param8_type, _param9_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type, \
_param7_type, _param8_type, _param9_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, _param2_type b, \
_param3_type c, _param4_type d, _param5_type e, \
_param6_type f, _param7_type g, _param8_type h, _param9_type i) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d, e, f, g, h, i); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_10(_linkage, _ret_type, _modifiers, \
_name, _lookup, _param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type, _param7_type, \
_param8_type, _param9_type, _param10_type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type, \
_param7_type, _param8_type, _param9_type, _param10_type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, _param2_type b, \
_param3_type c, _param4_type d, _param5_type e, _param6_type f, \
_param7_type g, _param8_type h, _param9_type i, _param10_type j) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d, e, f, g, h, i, j); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_11(_linkage, _ret_type, _modifiers, \
_name, _lookup, _param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type, _param7_type, _param8_type, \
_param9_type, _param10_type, _param11Type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type, \
_param5_type, _param6_type, _param7_type, _param8_type, \
_param9_type, _param10_type, _param11Type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, \
_param2_type b, _param3_type c, _param4_type d, \
_param5_type e, _param6_type f, _param7_type g, \
_param8_type h, _param9_type i, _param10_type j, _param11Type k) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d, e, f, g, h, i, j, k); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_12(_linkage, _ret_type, _modifiers, \
_name, _lookup, _param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type, _param7_type, _param8_type, \
_param9_type, _param10_type, _param11Type, _param12Type, _default) \
\
_linkage typedef _ret_type (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type, \
_param5_type, _param6_type, _param7_type, _param8_type, \
_param9_type, _param10_type, _param11Type, _param12Type); \
\
_linkage _ret_type _modifiers _name(_param1_type a, \
_param2_type b, _param3_type c, _param4_type d, \
_param5_type e, _param6_type f, _param7_type g, \
_param8_type h, _param9_type i, _param10_type j, _param11Type k, \
_param12Type l) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
return _name##VAR(a, b, c, d, e, f, g, h, i, j, k, l); \
} \
else \
{ \
return _default; \
} \
}
#define DEFINE_STUB_FUNCTION_V12(_linkage, _modifiers, \
_name, _lookup, _param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type, _param7_type, _param8_type, \
_param9_type, _param10_type, _param11Type, _param12Type) \
\
_linkage typedef void (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type, \
_param5_type, _param6_type, _param7_type, _param8_type, \
_param9_type, _param10_type, _param11Type, _param12Type); \
\
_linkage void _modifiers _name(_param1_type a, \
_param2_type b, _param3_type c, _param4_type d, \
_param5_type e, _param6_type f, _param7_type g, \
_param8_type h, _param9_type i, _param10_type j, \
_param11Type k, _param11Type l) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), _lookup); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(a, b, c, d, e, f, g, h, i, j, k, l); \
} \
}
#define DEFINE_STUB_FUNCTION_ORD_V12(_linkage, _modifiers, \
_name, _ordinal, _param1_type, _param2_type, _param3_type, \
_param4_type, _param5_type, _param6_type, _param7_type, _param8_type, \
_param9_type, _param10_type, _param11Type, _param12Type) \
\
_linkage typedef void (_modifiers * _name##TYPE)( \
_param1_type, _param2_type, _param3_type, _param4_type, \
_param5_type, _param6_type, _param7_type, _param8_type, \
_param9_type, _param10_type, _param11Type, _param12Type); \
\
_linkage void _modifiers _name(_param1_type a, \
_param2_type b, _param3_type c, _param4_type d, \
_param5_type e, _param6_type f, _param7_type g, \
_param8_type h, _param9_type i, _param10_type j, \
_param11Type k, _param11Type l) \
{ \
static _name##TYPE _name##VAR = NULL; \
static UINT ulDllSequenceNum = 0; \
\
if ( (ulDllSequenceNum != g_ulDllSequenceNum) \
|| (NULL == GetMAPIHandle())) \
{ \
_name##VAR = (_name##TYPE) \
::GetProcAddress(GetPrivateMAPI(), (LPSTR)(_ordinal)); \
ulDllSequenceNum = g_ulDllSequenceNum; \
} \
\
if ((NULL != _name##VAR) && (NULL != GetMAPIHandle())) \
{ \
_name##VAR(a, b, c, d, e, f, g, h, i, j, k, l); \
} \
}
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
MAPILogonEx, ExpandFunction(MAPILogonEx, 20),
ULONG_PTR, LPTSTR, LPTSTR, ULONG, LPMAPISESSION *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_V0(LINKAGE_EXTERN_C, STDAPICALLTYPE,
MAPIUninitialize, ExpandFunction(MAPIUninitialize, 0))
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, SCODE, STDMETHODCALLTYPE,
MAPIAllocateBuffer, ExpandFunction(MAPIAllocateBuffer, 8),
ULONG, LPVOID FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, SCODE, STDMETHODCALLTYPE,
MAPIAllocateMore, ExpandFunction(MAPIAllocateMore, 12),
ULONG, LPVOID, LPVOID FAR *, (SCODE)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
MAPIReallocateBuffer, ExpandFunction(MAPIReallocateBuffer, 12),
LPVOID, ULONG, LPVOID *, (SCODE)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, HRESULT, STDMETHODCALLTYPE,
MAPIAdminProfiles, ExpandFunction(MAPIAdminProfiles, 8),
ULONG, LPPROFADMIN FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
MAPIInitialize, ExpandFunction(MAPIInitialize, 4),
LPVOID, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
LaunchWizard, ExpandFunction(LaunchWizard, 20),
HWND, ULONG, LPCSTR FAR *, ULONG, LPSTR, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
MAPIOpenFormMgr, ExpandFunction(MAPIOpenFormMgr, 8),
LPMAPISESSION, LPMAPIFORMMGR FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
MAPIOpenLocalFormContainer, ExpandFunction(MAPIOpenLocalFormContainer, 4),
LPMAPIFORMCONTAINER FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, SCODE,
STDAPICALLTYPE, ScInitMapiUtil, ExpandFunction(ScInitMapiUtil, 4), ULONG, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_V0(LINKAGE_EXTERN_C, STDAPICALLTYPE, DeinitMapiUtil, ExpandFunction(DeinitMapiUtil, 0))
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrAllocAdviseSink, ExpandFunction(HrAllocAdviseSink, 12), LPNOTIFCALLBACK, LPVOID,
LPMAPIADVISESINK FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, HRESULT,
STDAPICALLTYPE, HrThisThreadAdviseSink, ExpandFunction(HrThisThreadAdviseSink, 8),
LPMAPIADVISESINK, LPMAPIADVISESINK FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrDispatchNotifications, ExpandFunction(HrDispatchNotifications, 4), ULONG, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
ScBinFromHexBounded, ExpandFunction(ScBinFromHexBounded, 12),
__in LPTSTR, LPBYTE, ULONG, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, BOOL,
STDAPICALLTYPE, FBinFromHex, ExpandFunction(FBinFromHex, 8), __in LPTSTR, LPBYTE, FALSE)
DEFINE_STUB_FUNCTION_V3(LINKAGE_EXTERN_C, STDAPICALLTYPE,
HexFromBin, ExpandFunction(HexFromBin, 12), LPBYTE, int, __in LPTSTR)
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, HRESULT,
STDAPICALLTYPE, HrGetAutoDiscoverXML, ExpandFunction(HrGetAutoDiscoverXML, 20),
LPCWSTR, LPCWSTR, HANDLE, ULONG, IStream **, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_10(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
BuildDisplayTable, ExpandFunction(BuildDisplayTable, 40),
LPALLOCATEBUFFER, LPALLOCATEMORE, LPFREEBUFFER, LPMALLOC,
HINSTANCE, UINT, LPDTPAGE, ULONG, LPMAPITABLE *, LPTABLEDATA *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, HRESULT,
STDAPICALLTYPE, MAPIInitIdle, ExpandFunction(MAPIInitIdle, 4), LPVOID, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_V0(LINKAGE_EXTERN_C, STDAPICALLTYPE, MAPIDeinitIdle, ExpandFunction(MAPIDeinitIdle, 0))
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, FTG, STDAPICALLTYPE,
FtgRegisterIdleRoutine, ExpandFunction(FtgRegisterIdleRoutine, 20),
PFNIDLE, LPVOID, short, ULONG, USHORT, NULL)
DEFINE_STUB_FUNCTION_V2(LINKAGE_EXTERN_C, STDAPICALLTYPE,
EnableIdleRoutine, ExpandFunction(EnableIdleRoutine, 8), FTG, BOOL)
DEFINE_STUB_FUNCTION_V1(LINKAGE_EXTERN_C, STDAPICALLTYPE,
DeregisterIdleRoutine, ExpandFunction(DeregisterIdleRoutine, 4), FTG)
DEFINE_STUB_FUNCTION_V7(LINKAGE_EXTERN_C, STDAPICALLTYPE,
ChangeIdleRoutine, ExpandFunction(ChangeIdleRoutine, 28),
FTG, PFNIDLE, LPVOID, short, ULONG, USHORT, USHORT)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
CreateIProp, ExpandFunction(CreateIProp, 24),
LPCIID, ALLOCATEBUFFER FAR *, ALLOCATEMORE FAR *,
FREEBUFFER FAR *, LPVOID, LPPROPDATA FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_9(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
CreateTable, ExpandFunction(CreateTable, 36),
LPCIID, ALLOCATEBUFFER FAR *, ALLOCATEMORE FAR *,
FREEBUFFER FAR *, LPVOID, ULONG, ULONG,
LPSPropTagArray, LPTABLEDATA FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, int, WINAPI,
MNLS_lstrlenW, ExpandFunction(MNLS_lstrlenW, 4), LPCWSTR, 0)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, int, WINAPI,
MNLS_lstrcmpW, ExpandFunction(MNLS_lstrcmpW, 8), LPCWSTR, LPCWSTR, 0)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, LPWSTR,
WINAPI, MNLS_lstrcpyW, ExpandFunction(MNLS_lstrcpyW, 8), LPWSTR, LPCWSTR, NULL)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, int, WINAPI,
MNLS_CompareStringW, ExpandFunction(MNLS_CompareStringW, 24),
LCID, DWORD, LPCWSTR, int, LPCWSTR, int, 0)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, int, WINAPI,
MNLS_MultiByteToWideChar, ExpandFunction(MNLS_MultiByteToWideChar, 24),
UINT, DWORD, LPCSTR, int, LPWSTR, int, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_8(LINKAGE_EXTERN_C, int, WINAPI,
MNLS_WideCharToMultiByte, ExpandFunction(MNLS_WideCharToMultiByte, 32),
UINT, DWORD, LPCWSTR, int, LPSTR, int, LPCSTR, BOOL FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, BOOL,
WINAPI, MNLS_IsBadStringPtrW, ExpandFunction(MNLS_IsBadStringPtrW, 8), LPCWSTR, UINT, TRUE)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, BOOL, STDAPICALLTYPE,
FEqualNames, ExpandFunction(FEqualNames, 8), LPMAPINAMEID, LPMAPINAMEID, FALSE)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
WrapStoreEntryID, ExpandFunction(WrapStoreEntryID, 24),
ULONG, __in LPTSTR, ULONG, LPENTRYID, ULONG *, LPENTRYID *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, BOOL, WINAPI,
IsBadBoundedStringPtr, ExpandFunction(IsBadBoundedStringPtr, 8),
const void FAR *, UINT, FALSE)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrQueryAllRows, ExpandFunction(HrQueryAllRows, 24), LPMAPITABLE, LPSPropTagArray,
LPSRestriction, LPSSortOrderSet, LONG, LPSRowSet FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
ScCreateConversationIndex, ExpandFunction(ScCreateConversationIndex, 16), ULONG, LPBYTE,
ULONG FAR *, LPBYTE FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
PropCopyMore, ExpandFunction(PropCopyMore, 16),
LPSPropValue, LPSPropValue, ALLOCATEMORE *, LPVOID, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG,
STDAPICALLTYPE, UlPropSize, ExpandFunction(UlPropSize, 4), LPSPropValue, 0)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, BOOL, STDAPICALLTYPE,
FPropContainsProp, ExpandFunction(FPropContainsProp, 12), LPSPropValue, LPSPropValue, ULONG, FALSE)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, BOOL, STDAPICALLTYPE,
FPropCompareProp, ExpandFunction(FPropCompareProp, 12), LPSPropValue, ULONG, LPSPropValue, FALSE)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, LONG, STDAPICALLTYPE,
LPropCompareProp, ExpandFunction(LPropCompareProp, 8), LPSPropValue, LPSPropValue, 0)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrAddColumns, ExpandFunction(HrAddColumns, 16),
LPMAPITABLE, LPSPropTagArray, LPALLOCATEBUFFER, LPFREEBUFFER, MAPI_E_CALL_FAILED)
typedef void (FAR * HrAddColumnsEx5ParamType)(LPSPropTagArray);
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrAddColumnsEx, ExpandFunction(HrAddColumnsEx, 20), LPMAPITABLE, LPSPropTagArray,
LPALLOCATEBUFFER, LPFREEBUFFER, HrAddColumnsEx5ParamType, MAPI_E_CALL_FAILED)
const FILETIME ZERO_FILETIME = { 0, 0 };
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, FILETIME,
STDAPICALLTYPE, FtMulDwDw, ExpandFunction(FtMulDwDw, 8), DWORD, DWORD, ZERO_FILETIME)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, FILETIME,
STDAPICALLTYPE, FtAddFt, ExpandFunction(FtAddFt, 16), FILETIME, FILETIME, ZERO_FILETIME)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, FILETIME, STDAPICALLTYPE,
FtAdcFt, ExpandFunction(FtAdcFt, 20), FILETIME, FILETIME, WORD FAR *, ZERO_FILETIME)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, FILETIME,
STDAPICALLTYPE, FtSubFt, ExpandFunction(FtSubFt, 16), FILETIME, FILETIME, ZERO_FILETIME)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, FILETIME,
STDAPICALLTYPE, FtMulDw, ExpandFunction(FtMulDw, 12), DWORD, FILETIME, ZERO_FILETIME)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, FILETIME,
STDAPICALLTYPE, FtNegFt, ExpandFunction(FtNegFt, 8), FILETIME, ZERO_FILETIME)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG,
STDAPICALLTYPE, UlAddRef, ExpandFunction(UlAddRef, 4), LPVOID, 1)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG,
STDAPICALLTYPE, UlRelease, ExpandFunction(UlRelease, 4), LPVOID, 1)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, LPTSTR,
STDAPICALLTYPE, SzFindCh, ExpandFunction(SzFindCh, 8), LPCTSTR, USHORT, NULL)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, LPTSTR,
STDAPICALLTYPE, SzFindLastCh, ExpandFunction(SzFindLastCh, 8), LPCTSTR, USHORT, NULL)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, LPTSTR,
STDAPICALLTYPE, SzFindSz, ExpandFunction(SzFindSz, 8), LPCTSTR, LPCTSTR, NULL)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, unsigned int,
STDAPICALLTYPE, UFromSz, ExpandFunction(UFromSz, 4), LPCTSTR, 0)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrGetOneProp, ExpandFunction(HrGetOneProp, 12),
LPMAPIPROP, ULONG, LPSPropValue FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrSetOneProp, ExpandFunction(HrSetOneProp, 8), LPMAPIPROP, LPSPropValue, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, BOOL,
STDAPICALLTYPE, FPropExists, ExpandFunction(FPropExists, 8), LPMAPIPROP, ULONG, FALSE)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, LPSPropValue, STDAPICALLTYPE,
PpropFindProp, ExpandFunction(PpropFindProp, 12), LPSPropValue, ULONG, ULONG, NULL)
DEFINE_STUB_FUNCTION_V1(LINKAGE_EXTERN_C, STDAPICALLTYPE,
FreePadrlist, ExpandFunction(FreePadrlist, 4), LPADRLIST)
DEFINE_STUB_FUNCTION_V1(LINKAGE_EXTERN_C, STDAPICALLTYPE,
FreeProws, ExpandFunction(FreeProws, 4), LPSRowSet)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrSzFromEntryID, ExpandFunction(HrSzFromEntryID, 12), ULONG, LPENTRYID, __in LPTSTR FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrEntryIDFromSz, ExpandFunction(HrEntryIDFromSz, 12),
__in LPTSTR, ULONG FAR *, LPENTRYID FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_7(LINKAGE_NO_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrComposeEID, ExpandFunction(HrComposeEID, 28), LPMAPISESSION, ULONG, LPBYTE,
ULONG, LPENTRYID, ULONG FAR *, LPENTRYID FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_7(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrDecomposeEID, ExpandFunction(HrDecomposeEID, 28), LPMAPISESSION, ULONG, LPENTRYID,
ULONG FAR *, LPENTRYID FAR *, ULONG FAR *, LPENTRYID FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrComposeMsgID, ExpandFunction(HrComposeMsgID, 24),
LPMAPISESSION, ULONG, LPBYTE, ULONG, LPENTRYID, __in LPTSTR FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrDecomposeMsgID, ExpandFunction(HrDecomposeMsgID, 24), LPMAPISESSION, __in LPTSTR,
ULONG FAR *, LPENTRYID FAR *, ULONG FAR *, LPENTRYID FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, HRESULT,
STDMETHODCALLTYPE, OpenStreamOnFile, ExpandFunction(OpenStreamOnFile, 24),
LPALLOCATEBUFFER, LPFREEBUFFER, ULONG,
__in LPCTSTR, __in_opt LPCTSTR, LPSTREAM FAR *, MAPI_E_CALL_FAILED)
#ifdef _INC_WINAPIFAMILY
DEFINE_STUB_FUNCTION_7(LINKAGE_EXTERN_C, _Check_return_ HRESULT,
STDMETHODCALLTYPE, OpenTnefStream, ExpandFunction(OpenTnefStream, 28), LPVOID, LPSTREAM,
__in LPTSTR, ULONG, LPMESSAGE, WORD, LPITNEF FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_8(LINKAGE_EXTERN_C, _Check_return_ HRESULT, STDMETHODCALLTYPE,
OpenTnefStreamEx, ExpandFunction(OpenTnefStreamEx, 32), LPVOID, LPSTREAM, __in LPTSTR,
ULONG, LPMESSAGE, WORD, LPADRBOOK, LPITNEF FAR *, MAPI_E_CALL_FAILED)
#else
DEFINE_STUB_FUNCTION_7(LINKAGE_EXTERN_C, HRESULT,
STDMETHODCALLTYPE, OpenTnefStream, ExpandFunction(OpenTnefStream, 28), LPVOID, LPSTREAM,
__in LPTSTR, ULONG, LPMESSAGE, WORD, LPITNEF FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_8(LINKAGE_EXTERN_C, HRESULT, STDMETHODCALLTYPE,
OpenTnefStreamEx, ExpandFunction(OpenTnefStreamEx, 32), LPVOID, LPSTREAM, __in LPTSTR,
ULONG, LPMESSAGE, WORD, LPADRBOOK, LPITNEF FAR *, MAPI_E_CALL_FAILED)
#endif
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, HRESULT, STDMETHODCALLTYPE,
GetTnefStreamCodepage, ExpandFunction(GetTnefStreamCodepage, 12),
LPSTREAM, ULONG FAR *, ULONG FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG,
STDAPICALLTYPE, UlFromSzHex, ExpandFunction(UlFromSzHex, 4), LPCTSTR, 0)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
ScCountNotifications, ExpandFunction(ScCountNotifications, 12),
int, LPNOTIFICATION, ULONG FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
ScCopyNotifications, ExpandFunction(ScCopyNotifications, 16),
int, LPNOTIFICATION, LPVOID, ULONG FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, SCODE,
STDAPICALLTYPE, ScRelocNotifications, ExpandFunction(ScRelocNotifications, 20), int,
LPNOTIFICATION, LPVOID, LPVOID, ULONG FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
ScCountProps, ExpandFunction(ScCountProps, 12),
int, LPSPropValue, ULONG FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
ScCopyProps, ExpandFunction(ScCopyProps, 16),
int, LPSPropValue, LPVOID, ULONG FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
ScRelocProps, ExpandFunction(ScRelocProps, 20),
int, LPSPropValue, LPVOID, LPVOID, ULONG FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, LPSPropValue, STDAPICALLTYPE,
LpValFindProp, ExpandFunction(LpValFindProp, 12), ULONG, ULONG, LPSPropValue, NULL)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
ScDupPropset, ExpandFunction(ScDupPropset, 16),
int, LPSPropValue, LPALLOCATEBUFFER, LPSPropValue FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, BOOL,
STDAPICALLTYPE, FBadRglpszW, ExpandFunction(FBadRglpszW, 8), __in LPWSTR FAR *, ULONG, TRUE)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, BOOL,
STDAPICALLTYPE, FBadRowSet, ExpandFunction(FBadRowSet, 4), LPSRowSet, TRUE)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, BOOL, STDAPICALLTYPE,
FBadRglpNameID, ExpandFunction(FBadRglpNameID, 8), LPMAPINAMEID FAR *, ULONG, TRUE)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG,
STDAPICALLTYPE, FBadPropTag, ExpandFunction(FBadPropTag, 4), ULONG, TRUE)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG,
STDAPICALLTYPE, FBadRow, ExpandFunction(FBadRow, 4), LPSRow, TRUE)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG,
STDAPICALLTYPE, FBadProp, ExpandFunction(FBadProp, 4), LPSPropValue, TRUE)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG,
STDAPICALLTYPE, FBadColumnSet, ExpandFunction(FBadColumnSet, 4), LPSPropTagArray, TRUE)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
RTFSync, ExpandFunction(RTFSync, 12), LPMESSAGE, ULONG, __out BOOL FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
WrapCompressedRTFStream, ExpandFunction(WrapCompressedRTFStream, 12),
__in LPSTREAM, ULONG, __out LPSTREAM FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
__ValidateParameters, ExpandFunction(__ValidateParameters, 8),
METHODS, void *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
__CPPValidateParameters, ExpandFunction(__CPPValidateParameters, 8),
METHODS, const LPVOID, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_2(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE,
HrValidateParameters, ExpandFunction(HrValidateParameters, 8),
METHODS, LPVOID FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG,
STDAPICALLTYPE, FBadSortOrderSet, ExpandFunction(FBadSortOrderSet, 4), LPSSortOrderSet, TRUE)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, BOOL,
STDAPICALLTYPE, FBadEntryList, ExpandFunction(FBadEntryList, 4), LPENTRYLIST, TRUE)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG,
STDAPICALLTYPE, FBadRestriction, ExpandFunction(FBadRestriction, 4), LPSRestriction, TRUE)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
ScUNCFromLocalPath, ExpandFunction(ScUNCFromLocalPath, 12), __in LPSTR, __in LPSTR, UINT, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
ScLocalPathFromUNC, ExpandFunction(ScLocalPathFromUNC, 12), __in LPSTR, __in LPSTR, UINT, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, HRESULT,
STDAPICALLTYPE, HrIStorageFromStream, ExpandFunction(HrIStorageFromStream, 16),
LPUNKNOWN, LPCIID, ULONG, LPSTORAGE FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, HRESULT,
STDAPICALLTYPE, HrValidateIPMSubtree, ExpandFunction(HrValidateIPMSubtree, 20), LPMDB, ULONG,
ULONG FAR *, LPSPropValue FAR *, LPMAPIERROR FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, SCODE, STDAPICALLTYPE,
OpenIMsgSession, ExpandFunction(OpenIMsgSession, 12),
LPMALLOC, ULONG, LPMSGSESS FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_V1(LINKAGE_EXTERN_C, STDAPICALLTYPE,
CloseIMsgSession, ExpandFunction(CloseIMsgSession, 4), LPMSGSESS)
DEFINE_STUB_FUNCTION_11(LINKAGE_EXTERN_C, SCODE,
STDAPICALLTYPE, OpenIMsgOnIStg, ExpandFunction(OpenIMsgOnIStg, 44), LPMSGSESS, LPALLOCATEBUFFER,
LPALLOCATEMORE, LPFREEBUFFER, LPMALLOC, LPVOID, LPSTORAGE,
MSGCALLRELEASE FAR *, ULONG, ULONG, LPMESSAGE FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, HRESULT,
STDAPICALLTYPE, SetAttribIMsgOnIStg, ExpandFunction(SetAttribIMsgOnIStg, 16), LPVOID, LPSPropTagArray,
LPSPropAttrArray, LPSPropProblemArray FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, HRESULT,
STDAPICALLTYPE, GetAttribIMsgOnIStg, ExpandFunction(GetAttribIMsgOnIStg, 12), LPVOID,
LPSPropTagArray, LPSPropAttrArray FAR *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, SCODE,
STDAPICALLTYPE, MapStorageSCode, ExpandFunction(MapStorageSCode, 4), SCODE, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, SCODE, STDMETHODCALLTYPE, ScMAPIXFromSMAPI,
"ScMAPIXFromSMAPI", LHANDLE, ULONG, LPCIID, LPMAPISESSION FAR *, MAPI_E_CALL_FAILED);
DEFINE_STUB_FUNCTION_11(LINKAGE_EXTERN_C, ULONG, FAR PASCAL, MAPIAddress, "MAPIAddress",
LHANDLE, ULONG_PTR, LPSTR, ULONG, LPSTR, ULONG, lpMapiRecipDesc,
FLAGS, ULONG, LPULONG, lpMapiRecipDesc FAR *, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, ULONG, FAR PASCAL, MAPIReadMail, "MAPIReadMail",
LHANDLE, ULONG_PTR, LPSTR, FLAGS, ULONG, lpMapiMessage FAR *, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, ULONG, FAR PASCAL, MAPIResolveName, "MAPIResolveName",
LHANDLE, ULONG_PTR, LPSTR, FLAGS, ULONG, lpMapiRecipDesc FAR *, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, ULONG, FAR PASCAL,
MAPISendDocuments, "MAPISendDocuments",
ULONG_PTR, LPSTR, LPSTR, LPSTR, ULONG, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, ULONG, FAR PASCAL,
MAPILogon, "MAPILogon",
ULONG_PTR, LPSTR, LPSTR, FLAGS, ULONG, LPLHANDLE, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, ULONG, FAR PASCAL,
MAPILogoff, "MAPILogoff", LHANDLE, ULONG_PTR, FLAGS, ULONG, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, ULONG, FAR PASCAL,
MAPISendMail, "MAPISendMail",
LHANDLE, ULONG_PTR, lpMapiMessage, FLAGS, ULONG, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, ULONG, FAR PASCAL,
MAPISaveMail, "MAPISaveMail", LHANDLE, ULONG_PTR, lpMapiMessage,
FLAGS, ULONG, LPSTR, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_7(LINKAGE_EXTERN_C, ULONG, FAR PASCAL,
MAPIFindNext, "MAPIFindNext",
LHANDLE, ULONG_PTR, LPSTR, LPSTR, FLAGS, ULONG, LPSTR, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, ULONG, FAR PASCAL,
MAPIDeleteMail, "MAPIDeleteMail",
LHANDLE, ULONG_PTR, LPSTR, FLAGS, ULONG, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, ULONG, FAR PASCAL,
MAPIDetails, "MAPIDetails",
LHANDLE, ULONG_PTR, lpMapiRecipDesc, FLAGS, ULONG, (ULONG)MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_V1(LINKAGE_EXTERN_C, STDAPICALLTYPE, MAPICrashRecovery, ExpandFunction(MAPICrashRecovery, 4), ULONG)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, ULONG, STDAPICALLTYPE, MAPIFreeBuffer, ExpandFunction(MAPIFreeBuffer, 4), LPVOID, 0)
DEFINE_STUB_FUNCTION_0(LINKAGE_EXTERN_C, LPMALLOC, STDAPICALLTYPE, MAPIGetDefaultMalloc, ExpandFunction(MAPIGetDefaultMalloc, 0), NULL)
DEFINE_STUB_FUNCTION_6(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE, OpenStreamOnFileW, ExpandFunction(OpenStreamOnFileW, 24), LPALLOCATEBUFFER, LPFREEBUFFER,
ULONG, LPWSTR, LPWSTR, LPSTREAM FAR*, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_7(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE, HrCreateNewWrappedObject, ExpandFunction(HrCreateNewWrappedObject, 28),
void*, ULONG, ULONG, const IID*, const ULONG*, BOOL, void**, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_5(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE, HrOpenOfflineObj, ExpandFunction(HrOpenOfflineObj, 20),
ULONG, LPCWSTR, const GUID*, const GUID*, IMAPIOfflineMgr**, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_3(LINKAGE_EXTERN_C, HRESULT, STDAPICALLTYPE, HrCreateOfflineObj, ExpandFunction(HrCreateOfflineObj, 12),
ULONG, MAPIOFFLINE_CREATEINFO*, IMAPIOfflineMgr**, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_4(LINKAGE_EXTERN_C, HRESULT, STDMETHODCALLTYPE, WrapCompressedRTFStreamEx, ExpandFunction(WrapCompressedRTFStreamEx, 16),
LPSTREAM, CONST RTF_WCSINFO *, LPSTREAM *, RTF_WCSRETINFO *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, BOOL, WINAPI, GetDefCachedMode, ExpandFunction(GetDefCachedMode, 4),
BOOL*, FALSE)
DEFINE_STUB_FUNCTION_1(LINKAGE_EXTERN_C, BOOL, WINAPI, GetDefCachedModeDownloadPubFoldFavs, ExpandFunction(GetDefCachedModeDownloadPubFoldFavs, 4),
BOOL*, FALSE)
DEFINE_STUB_FUNCTION_9(LINKAGE_NO_EXTERN_C, HRESULT, WINAPI, HrOpenABEntryWithExchangeContext, ExpandFunction(HrOpenABEntryWithExchangeContext, 36),
LPMAPISESSION, LPMAPIUID, LPADRBOOK, ULONG, LPENTRYID, LPCIID, ULONG, ULONG *, LPUNKNOWN *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_12(LINKAGE_NO_EXTERN_C, HRESULT, WINAPI, HrDoABDetailsWithExchangeContext, ExpandFunction(HrDoABDetailsWithExchangeContext, 48),
LPMAPISESSION, LPMAPIUID, LPADRBOOK, ULONG_PTR *, LPFNDISMISS, LPVOID, ULONG, LPENTRYID, LPFNBUTTON, LPVOID, LPSTR, ULONG, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_11(LINKAGE_NO_EXTERN_C, HRESULT, WINAPI, HrDoABDetailsWithProviderUID, ExpandFunction(HrDoABDetailsWithProviderUID, 44),
LPMAPIUID, LPADRBOOK, ULONG_PTR *, LPFNDISMISS, LPVOID, ULONG, LPENTRYID, LPFNBUTTON, LPVOID, LPSTR, ULONG, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_8(LINKAGE_EXTERN_C, HRESULT, WINAPI, HrOpenABEntryUsingDefaultContext, ExpandFunction(HrOpenABEntryUsingDefaultContext, 32),
LPMAPISESSION, LPADRBOOK, ULONG, LPENTRYID, LPCIID, ULONG, ULONG *, LPUNKNOWN *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_8(LINKAGE_NO_EXTERN_C, HRESULT, WINAPI, HrOpenABEntryWithProviderUID, ExpandFunction(HrOpenABEntryWithProviderUID, 32),
LPMAPIUID, LPADRBOOK, ULONG, LPENTRYID, LPCIID, ULONG, ULONG *, LPUNKNOWN *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_8(LINKAGE_NO_EXTERN_C, HRESULT, WINAPI, HrOpenABEntryWithProviderUIDSupport, ExpandFunction(HrOpenABEntryWithProviderUIDSupport, 32),
LPMAPIUID, LPMAPISUP, ULONG, LPENTRYID, LPCIID, ULONG, ULONG *, LPUNKNOWN *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_8(LINKAGE_EXTERN_C, HRESULT, WINAPI, HrOpenABEntryWithResolvedRow, ExpandFunction(HrOpenABEntryWithResolvedRow, 32),
LPSRow, LPADRBOOK, ULONG, LPENTRYID, LPCIID, ULONG, ULONG *, LPUNKNOWN *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_9(LINKAGE_NO_EXTERN_C, HRESULT, WINAPI, HrCompareABEntryIDsWithExchangeContext, ExpandFunction(HrCompareABEntryIDsWithExchangeContext, 36),
LPMAPISESSION, LPMAPIUID, LPADRBOOK, ULONG, LPENTRYID, ULONG, LPENTRYID, ULONG, ULONG *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_7(LINKAGE_EXTERN_C, HRESULT, WINAPI, HrOpenABEntryWithSupport, ExpandFunction(HrOpenABEntryWithSupport, 28),
LPMAPISUP, ULONG, LPENTRYID, LPCIID, ULONG, ULONG *, LPUNKNOWN *, MAPI_E_CALL_FAILED)
DEFINE_STUB_FUNCTION_5(LINKAGE_NO_EXTERN_C, HRESULT, WINAPI, HrGetGALFromEmsmdbUID, ExpandFunction(HrGetGALFromEmsmdbUID, 20),
LPMAPISESSION, LPADRBOOK, LPMAPIUID, ULONG *, LPENTRYID *, MAPI_E_CALL_FAILED)
|
/***************************************************************************
* (C) Copyright 2003-2013 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.gui.chatlog;
import javax.swing.text.BadLocationException;
import javax.swing.text.Document;
import games.stendhal.client.gui.textformat.AttributedTextSink;
import games.stendhal.client.gui.textformat.StyleSet;
import marauroa.common.Logger;
/**
* AttributedTextSink for writing to a styled document.
*/
public class ChatTextSink implements AttributedTextSink<StyleSet> {
/** Logger instance. */
private static final Logger logger = Logger.getLogger(ChatTextSink.class);
/** DEstination document. */
private final Document document;
/**
* Create a new ChatTextSink.
*
* @param document destination document
*/
public ChatTextSink(Document document) {
this.document = document;
}
@Override
public void append(String s, StyleSet attrs) {
try {
document.insertString(document.getLength(), s, attrs.contents());
} catch (BadLocationException e) {
logger.error("Failed to insert text.", e);
}
}
}
|
CREATE TABLE users (
id serial PRIMARY KEY,
name VARCHAR (50) NOT NULL,
email VARCHAR (255) NOT NULL,
age INTEGER NOT NULL,
phone VARCHAR (50) NOT NULL
); |
// Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.pantsbuild.tools.junit.lib;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class MockParameterizedParallelClassesAndMethodsTest2 {
private String parameter;
@Parameterized.Parameters
public static List<String> data() {
return Arrays.asList("arg1", "arg2");
}
public MockParameterizedParallelClassesAndMethodsTest2(String parameter) {
this.parameter = parameter;
}
@Test
public void ppcamtest2() throws Exception {
MockParameterizedParallelClassesAndMethodsTest1.awaitLatch("ppcamtest2:" + parameter);
}
}
|
// Detect Left Mouse Button Up
if (Input.GetMouseButtonUp(0))
{
Vector2 releasePosition = GetBrushPositionFromMouse();
AddBrush(brushType, releasePosition); // Assuming brushType is defined elsewhere
} |
<filename>package/spack-mitofates/package.py
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import glob
class Mitofates(Package):
"""MitoFates predicts mitochondrial presequence, a cleavable localization
signal located in N-terminal, and its cleaved position."""
homepage = "http://mitf.cbrc.jp/MitoFates/cgi-bin/top.cgi"
url = "http://mitf.cbrc.jp/MitoFates/program/MitoFates_1.2.tar.gz"
version('1.2', 'aaac42a8e8c7318a4abde9df3a4b72d1')
depends_on('libsvm')
depends_on('perl', type='run')
depends_on('perl-inline-c', type='run')
depends_on('perl-perl6-slurp', type='run')
depends_on('perl-math-cephes', type='run')
# The DirichletRegulator_fast.pm sets the perl Inline directory
# to be inside the deployed source (which won't be writable by
# the end user of site wide deployed software.
# Removing that config entry will cause the inline module to auto
# create a directory in the user's homedir instead
patch('DirichletRegulator_fast.patch')
def patch(self):
perlscripts = FileFilter('MitoFates.pl')
perlscripts.filter('#!/usr/bin/perl', '#!/usr/bin/env perl')
# other perl module files probably should get this filter too
with working_dir(join_path(self.stage.source_path, 'bin/modules')):
perlmodules = glob.glob('*.pm')
filter_file('#!/usr/bin/perl', '#!/usr/bin/env perl', *perlmodules)
def install(self, spec, prefix):
install_tree('bin', prefix.bin)
install('MitoFates.pl', prefix)
chmod = which('chmod')
chmod('+x', join_path(prefix, 'MitoFates.pl'))
def setup_environment(self, spack_env, run_env):
# We want the main MitoFates.pl script in the path
run_env.prepend_path('PATH', self.prefix)
|
/*
## Serves static files in the current directory
* index.html will be handled as expected
* Does not list directory contents
Download, build and run example:
go get github.com/ungerik/go-start/examples/ServeStatic
go install github.com/ungerik/go-start/examples/ServeStatic && ServeStatic
*/
package main
import (
"fmt"
"path/filepath"
"github.com/ungerik/go-start/view"
)
func main() {
absPath, _ := filepath.Abs(".")
fmt.Printf("Serving %s/ at http://0.0.0.0:8080/\n", absPath)
view.Config.StaticDirs = []string{"."}
view.RunServerAddr("0.0.0.0:8080", nil)
}
|
//
// Copyright (C) 2016, <NAME>. <<EMAIL>>
//
#pragma once
#include <pebble.h>
#include "global.h"
#if PBL_DISPLAY_WIDTH == 200
#define BATT_GAUGE_SIZE_W 45
#define BATT_GAUGE_SIZE_H 51
#define BATT_HAND_LENGTH 33
#define BATT_GAUGE_DOT_RADIUS 3
#else
#define BATT_GAUGE_SIZE_W 35
#define BATT_GAUGE_SIZE_H 33
#define BATT_HAND_LENGTH 23
#define BATT_GAUGE_DOT_RADIUS 3
#endif
#define BATT_GAUGE_RIGHT_GAP 3
#define BATTERY_GAUGE_POS_X ( BATT_GAUGE_RIGHT_GAP )
#define BATTERY_GAUGE_POS_Y ( PBL_DISPLAY_HEIGHT/2 - BATT_GAUGE_SIZE_H/2 )
#define BATTERY_GAUGE_FRAME ( GRect ( BATTERY_GAUGE_POS_X, BATTERY_GAUGE_POS_Y, BATT_GAUGE_SIZE_W, BATT_GAUGE_SIZE_H ) )
#define BATTERY_GAUGE_PIVOT ( GPoint( BATT_GAUGE_SIZE_W - BATT_GAUGE_RIGHT_GAP - 1, BATT_GAUGE_SIZE_H/2 ) )
#define BATTERY_GAUGE_MAX_ANGLE_DEG 20
#define BATTERY_GAUGE_TICK_POS_X ( -BATT_GAUGE_SIZE_W )
#define BATTERY_GAUGE_TICK_POS_Y ( BATT_GAUGE_SIZE_H / 2 )
#define BATTERY_GAUGE_TICK_POS ( GPoint( -BATTERY_GAUGE_TICK_POS_X, BATTERY_GAUGE_TICK_POS_Y ) )
#if PBL_DISPLAY_WIDTH == 200
static GPathInfo BATT_GAUGE_TICK = {
2, (GPoint[]) {
{ BATTERY_GAUGE_TICK_POS_X + 5, 0 },
{ BATTERY_GAUGE_TICK_POS_X, 0 }
}
};
#else
static GPathInfo BATT_GAUGE_TICK = {
2, (GPoint[]) {
{ BATTERY_GAUGE_TICK_POS_X + 3, 0 },
{ BATTERY_GAUGE_TICK_POS_X, 0 }
}
};
#endif
typedef struct {
GContext *ctx;
uint32_t batt_angle;
GPoint center_pt;
GPath *s_hand;
GColor hand_colour;
GColor hand_outline_colour;
uint16_t dot_radius;
BatteryChargeState charge_state;
} BATTERY_HAND_DRAW_PARAMS;
extern Layer *battery_layer;
void battery_init( Layer *parent_layer );
void battery_deinit( void );
|
export * from './query';
export * from './result';
|
<reponame>aral/extraterm
/*
* Copyright 2020 <NAME> <<EMAIL>>
*
* This source code is licensed under the MIT license which is detailed in the LICENSE.txt file.
*/
import * as child_process from 'child_process';
import * as fs from 'fs';
import * as path from 'path';
import * as _ from 'lodash';
import { ShellStringParser } from 'extraterm-shell-string-parser';
import { Logger, Pty, SessionConfiguration, SessionBackend, EnvironmentMap} from '@extraterm/extraterm-extension-api';
import { ProxyPtyConnector, PtyOptions } from './ProxyPty';
import * as SourceDir from './SourceDir';
interface WslProxySessionConfiguration extends SessionConfiguration {
useDefaultShell?: boolean;
shell?: string;
distribution?: string;
}
export class WslProxySessionBackend implements SessionBackend {
private _connectorMap = new Map<string, WslProxyPtyConnector>();
private _hasWsl = false;
private _distributions: string[] = [];
constructor(private _log: Logger) {
this._hasWsl = this._validateExe("wsl.exe");
if (this._hasWsl) {
this._distributions = this._findWSLDistributions();
}
}
defaultSessionConfigurations(): SessionConfiguration[] {
const configs: WslProxySessionConfiguration[] = [];
if (this._hasWsl) {
const wslSessionConfig: WslProxySessionConfiguration = {
uuid: "",
name: "WSL Default",
type: "wsl",
useDefaultShell: true,
shell: "",
args: "",
distribution: "",
};
configs.push(wslSessionConfig);
for (const distro of this._distributions) {
const wslSessionConfig: WslProxySessionConfiguration = {
uuid: "",
name: "WSL " + distro,
type: "wsl",
useDefaultShell: true,
shell: "",
args: "",
distribution: distro,
};
configs.push(wslSessionConfig);
}
}
return configs;
}
private _validateExe(exe: string): boolean {
const searchPaths: string[] = process.env.PATH.split(";");
for (const p of searchPaths) {
const testPath = path.join(p, exe);
if (this._validateExePath(testPath)) {
return true;
}
}
return false;
}
private _validateExePath(exePath: string): boolean {
try {
fs.accessSync(exePath, fs.constants.X_OK);
return true;
} catch(err) {
return false;
}
}
private _findWSLDistributions(): string[] {
const wslResult = child_process.spawnSync("wsl.exe", ["--list"], {encoding: "utf16le"});
if (wslResult.status !== 0) {
return [];
}
const result: string[] = [];
const lines = wslResult.stdout.trim().split("\n");
for (const line of lines.slice(1)) {
if (line.trim() === "") {
continue;
}
const parts = line.split(" ");
result.push(parts[0].trim());
}
return result;
}
createSession(sessionConfiguration: SessionConfiguration, extraEnv: EnvironmentMap, cols: number, rows: number): Pty {
const sessionConfig = <WslProxySessionConfiguration> sessionConfiguration;
const defaultShell = "/bin/bash";
const shell = sessionConfig.useDefaultShell ? defaultShell : sessionConfig.shell;
const args = ["-l"].concat(ShellStringParser(sessionConfig.args));
const extraPtyEnv = {
TERM: "xterm-256color"
};
for (const prop in extraEnv) {
extraPtyEnv[prop] = extraEnv[prop];
}
const options: PtyOptions = {
exe: shell,
args,
env: null,
extraEnv: extraPtyEnv,
cols,
rows
};
if (sessionConfig.initialDirectory != null && sessionConfig.initialDirectory !== "") {
options.cwd = sessionConfig.initialDirectory;
}
const connector = this._getConnector(sessionConfig.distribution);
return connector.spawn(options);
}
private _getConnector(distribution: string): ProxyPtyConnector {
const distro = distribution == null || distribution === "" ? "" : distribution;
let connector = this._connectorMap.get(distro);
if (connector == null) {
connector = new WslProxyPtyConnector(this._log, distro);
connector.onProxyClosed(() => {
this._connectorMap.delete(distro);
});
this._connectorMap.set(distro, connector);
connector.start();
}
return connector;
}
}
let _log: Logger = null;
class WslProxyPtyConnector extends ProxyPtyConnector {
constructor(logger: Logger, private _distribution: string) {
super(logger);
_log = logger;
}
protected _spawnServer(): child_process.ChildProcess {
const distro = this._distribution == null || this._distribution === "" ? "" : this._distribution;
const distroArgs = distro === "" ? [] : ["-d", distro];
// Clever way of mapping a Windows side dir to its WSL/Linux side equivalent.
const proxyDirPath = SourceDir.path;
const cdResult = child_process.spawnSync("wsl.exe", [...distroArgs, "pwd"],
{cwd: proxyDirPath, shell: true, encoding: "utf8"});
if (cdResult.status !== 0) {
_log.warn("'wsl.exe pwd' returned status code ${cdResult.status} and stdout '${cdResult.stdout}'.");
// FIXME throw new Exception();
}
const wslPath = cdResult.stdout.trim();
if (wslPath.split("\n").length !== 1) {
_log.warn("'wsl.exe pwd' gave unexpected output. stdout '${cdResult.stdout}'.");
// FIXME throw new Exception();
}
const serverPath = wslPath + "/extraterm-wsl-proxy";
_log.debug(`serverPath: ${serverPath}`);
return child_process.spawn("wsl.exe", [...distroArgs, serverPath], {});
}
}
|
#!/usr/bin/env bash
# Copyright 2016, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# WARNING:
# This file is use by all OpenStack-Ansible roles for testing purposes.
# Any changes here will affect all OpenStack-Ansible role repositories
# with immediate effect.
# PURPOSE:
# This script executes flake8 against all the files it find that match
# the search pattern. The search pattern is meant to find any python
# scripts present in the role.
## Shell Opts ----------------------------------------------------------------
set -e
## Vars ----------------------------------------------------------------------
export WORKING_DIR=${WORKING_DIR:-$(pwd)}
## Main ----------------------------------------------------------------------
grep --recursive --binary-files=without-match \
--files-with-match '^.!.*python$' \
--exclude-dir .eggs \
--exclude-dir .git \
--exclude-dir .tox \
--exclude-dir *.egg-info \
--exclude-dir doc \
"${WORKING_DIR}" | xargs flake8 --verbose
|
def is_palindrome(word):
word_list = list(word)
if word_list == word_list[::-1]:
return True
else:
return False
result = is_palindrome('madam')
print(result) |
//-------- js/AnimgenInstance.js --------
// Generated by CoffeeScript 1.12.2
(function () {
var AGI, AvCache, Config, Data, HtoG, console, cwaenv, document, log, theConfig;
cwaenv = this.getCWAEnv();
Data = cwaenv.get("Data");
AvCache = cwaenv.get("AvCache");
Config = cwaenv.get("Config");
HtoG = cwaenv.get("HtoG");
console = this.console;
document = this.document;
theConfig = Config != null ? Config.theConfig : void 0;
log = console.log;
AGI = (function () {
AGI.animInit = Module.cwrap('animgenInit', ['number'], []);
AGI.animAlloc = Module.cwrap('animgenAllocate', ['number'], ['number', 'string', 'number', 'number']);
AGI.animSetOutput = Module.cwrap('animgenSetOutput', ['number'], ['number', 'string']);
AGI.animSetAvatar = Module.cwrap('animgenSetAvatar', ['number'], ['number', 'string', 'string', 'string', 'string', 'string']);
AGI.animSetSequence = Module.cwrap('animgenSetSequence', ['number'], ['number', 'number']);
AGI.animGenerateFrames = Module.cwrap('animgenGenerateFrames', ['number'], ['number', 'string']);
AGI.animDeAlloc = Module.cwrap('animgenDeAllocate', ['number'], ['number']);
AGI.animTerm = Module.cwrap('animgenTerminate', ['number'], []);
AGI.MAX = 1;
AGI.STATE = {
Unset: 0,
Alloc: 1,
AvatarSet: 2,
SeqReady: 3,
SeqInProgress: 4,
Terminated: 5
};
AGI.CB = {
Unset: 0,
Sign: 1,
Skip: 2,
Done: 3,
Fail: 4
};
AGI.initialised = AGI.STATE.Unset;
AGI.instances = [];
AGI.H2G = null;
AGI.Valid = function (id) {
return id >= 0 && id < AGI.MAX;
};
AGI.Get = function (id) {
return AGI.instances[id] || null;
};
AGI.Init = function () {
if (AGI.initialised !== AGI.STATE.Unset) {
console.warn("Animgen already initialised");
return false;
} else if (AGI.animInit()) {
Module.CB_SeqStart = function (jagid, status, nframes) {
return (AGI.Get(jagid)).SeqStart(status, nframes);
};
Module.CB_SignStart = function (jagid, gloss, signIx, frameIx) {
return (AGI.Get(jagid)).SignStart(gloss, signIx, frameIx);
};
Module.CB_SeqEnd = (function (_this) {
return function (jagid) {
return (AGI.Get(jagid)).SeqEnd();
};
})(this);
Module.CB_FrameStart = (function (_this) {
return function (jagid, nmorphs, nbones, time, duration) {
return (AGI.Get(jagid)).FrameStart(nmorphs, nbones, time, duration);
};
})(this);
Module.CB_Morph = function (jagid, morph) {
return (AGI.Get(jagid)).Morph(morph);
};
Module.CB_Bone = function (jagid, bone) {
return (AGI.Get(jagid)).Bone(bone);
};
AGI.initialised = AGI.STATE.Alloc;
return true;
} else {
console.warn("Animgen failed to initialise");
return false;
}
};
AGI.Terminate = function () {
if (AGI.initialised !== AGI.STATE.Alloc) {
console.warn("Animgen not initialised");
return false;
} else {
AGI.initialised = AGI.STATE.Unset;
if (AGI.animTerm()) {
return true;
} else {
console.warn("Animgen failed to terminate");
return false;
}
}
};
AGI.Alloc = function (errPath, errAppend, errNoErr) {
var id, newAGI;
if (errPath == null) {
errPath = null;
}
if (errAppend == null) {
errAppend = false;
}
if (errNoErr == null) {
errNoErr = true;
}
if (AGI.initialised !== AGI.STATE.Alloc) {
AGI.Init();
}
newAGI = null;
id = 0;
while (id < AGI.MAX && (AGI.Get(id)) !== null) {
id++;
}
if (id < AGI.MAX) {
if (AGI.animAlloc(id, errPath, errAppend, errNoErr)) {
newAGI = AGI.instances[id] = new AGI(id);
} else {
console.warn("Animgen failed to allocate instance " + id);
}
} else {
console.warn("Animgen instances all allocated");
}
return newAGI;
};
function AGI(jagid1) {
this.jagid = jagid1;
this.state = AGI.STATE.Alloc;
this.doneProm = null;
this.doneResolve = null;
this.avatar = null;
this.guiPanel = null;
this.fps = 25;
this.sigmlSigns = null;
this.totFrames = 0;
this.currSigns = null;
this.currFrames = this.currGloss = this.currSignIx = this.currFrameIx = null;
this.frameMorphs = this.frameBones = this.frameTime = this.frameDur = null;
if (theConfig != null) {
if (AGI.H2G == null) {
AGI.H2G = new HtoG(Data.absoluteURI("h2s.xsl", theConfig.cwaBase), false, 0);
}
}
}
AGI.prototype.SetAvatar = function (newAv, avXMLs, gui) {
if (this.state === AGI.STATE.Alloc) {
console.log("Animgen SetAvatar " + newAv + " for instance " + this.jagid + " (" + this.state + ") gui " + gui);
if (AGI.animSetAvatar(this.jagid, newAv, avXMLs[1], avXMLs[0], avXMLs[2], avXMLs[3])) {
this.avatar = newAv;
this.guiPanel = gui;
this.state = AGI.STATE.AvatarSet;
return true;
} else {
console.warn("Animgen failed to SetAvatar " + newAv + " for instance " + this.jagid);
return false;
}
} else {
console.warn("Animgen wrong state to SetAvatar " + newAv + " for instance " + this.jagid + " (" + this.state + ")");
return false;
}
};
AGI.prototype.SetSequence = function (fps) {
if (this.state === AGI.STATE.AvatarSet || this.state === AGI.STATE.SeqReady) {
if (AGI.animSetSequence(this.jagid, fps)) {
this.fps = fps;
this.state = AGI.STATE.SeqReady;
return true;
} else {
console.warn("Animgen failed to SetSequence " + fps + " for instance " + this.jagid);
return false;
}
} else {
console.warn("Animgen wrong state to SetSequence " + fps + " for instance " + this.jagid + " (" + this.state + ")");
return false;
}
};
AGI.prototype.EndSequence = function () {
if (this.state === AGI.STATE.AvatarSet || this.state === AGI.STATE.SeqReady) {
console.log("Animgen EndSequence for instance " + this.jagid + " (" + this.state + ")");
this.state = AGI.STATE.AvatarSet;
if (typeof this.doneResolve === "function") {
this.doneResolve(null);
}
return true;
} else {
console.warn("Animgen wrong state to EndSequence for instance " + this.jagid + " (" + this.state + ")");
return false;
}
};
AGI.prototype.GenerateFrames = function (sigml, casCB) {
var theSigns;
this.casCB = casCB;
if (this.state === AGI.STATE.SeqReady) {
this.state = AGI.STATE.SeqInProgress;
// console.log(AGI.animGenerateFrames(this.jagid, sigml));
console.log("***********************************************");
if (!AGI.animGenerateFrames(this.jagid, sigml)) {
console.warn("Animgen failed to GenerateFrames for instance " + this.jagid);
}
this.state = AGI.STATE.SeqReady;
theSigns = this.currSigns;
this.currSigns = null;
return theSigns;
} else {
console.warn("Animgen wrong state to GenerateFrames for instance " + this.jagid + " (" + this.state + ")");
return null;
}
};
AGI.prototype.DeAlloc = function () {
if (this.state === AGI.STATE.AvatarSet) {
this.state = AGI.STATE.Terminated;
AGI.instances[this.jagid] = null;
if (AGI.animDeAlloc(this.jagid)) {
return true;
} else {
console.warn("Animgen could not DeAlloc instance " + this.jagid);
return false;
}
} else {
console.log("Animgen wrong state to DeAlloc for instance " + this.jagid + " (" + this.state + ")");
return false;
}
};
AGI.prototype.SeqStart = function (status, nframes) {
this.totFrames = nframes;
return this.currSigns = [];
};
AGI.prototype._endFrame = function () {
var newFrame;
if (this.frameBones) {
newFrame = {
time: this.frameTime,
duration: this.frameDur,
morphs: this.frameMorphs,
bones: this.frameBones
};
this.currFrames.push(newFrame);
return this.frameMorphs = this.frameBones = null;
}
};
AGI.prototype.FrameStart = function (nmorphs, nbones, time, duration) {
this._endFrame();
this.frameTime = time;
this.frameDur = duration;
this.frameMorphs = [];
return this.frameBones = [];
};
AGI.prototype.Morph = function (newMorph) {
return this.frameMorphs.push(newMorph);
};
AGI.prototype.Bone = function (newBone) {
return this.frameBones.push(newBone);
};
AGI.prototype._endSign = function () {
var newSign;
if (this.currFrames) {
this._endFrame();
newSign = {
type: AGI.CB.Sign,
signIndex: this.currSignIx,
gloss: this.currGloss,
baseFrameIndex: this.currFrameIx,
frames: this.currFrames
};
if (this.casCB) {
this.casCB(newSign);
} else {
this.currSigns.push(newSign);
}
return this.currFrames = this.currGloss = null;
}
};
AGI.prototype.SignStart = function (gloss, signIx, frameIx) {
this._endSign();
this.currGloss = gloss;
this.currSignIx = signIx;
this.currFrameIx = frameIx;
return this.currFrames = [];
};
AGI.prototype.SeqEnd = function () {
return this._endSign();
};
AGI.PrepInstance = function (av, gui, fps) {
return new Promise((function (_this) {
return function (resolve, reject) {
var makeNewAGI, theAGI, waitAGIDone;
theAGI = AGI.Get(0);
if ((theAGI != null ? theAGI.avatar : void 0) === av && (theAGI != null ? theAGI.guiPanel : void 0) === gui) {
return resolve(theAGI);
} else {
console.log("PrepInstance finds " + (theAGI != null ? theAGI.avatar : void 0) + "/" + (theAGI != null ? theAGI.guiPanel : void 0) + " but needs new " + av + "/" + gui);
makeNewAGI = function () {
var avC, cmC, newAGI, ref;
if ((ref = AGI.Get(0)) != null) {
ref.DeAlloc();
}
newAGI = AGI.Alloc();
cmC = AvCache.get("COMMON");
avC = AvCache.get(av);
return Promise.all([cmC.getZIPEnt("config"), avC.getZIPEnt("config"), avC.getZIPEnt("asd"), avC.getZIPEnt("nonManuals")])["catch"](function (err) {
console.warn(err);
throw err;
}).then(function (XMLs) {
newAGI.SetAvatar(av, XMLs, gui);
return resolve(newAGI);
});
};
waitAGIDone = function () {
theAGI = AGI.Get(0);
if ((theAGI != null) && theAGI.state !== AGI.STATE.AvatarSet) {
if (theAGI.doneProm == null) {
theAGI.doneProm = new Promise(function (resolveDone, rejectDone) {
return theAGI.doneResolve = resolveDone;
});
}
return theAGI.doneProm.then(function () {
return waitAGIDone();
});
} else {
return makeNewAGI();
}
};
return waitAGIDone();
}
};
})(this));
};
AGI.SiGMLToCAS = function (sigml, av, gui, fps, CB) {
(AGI.PrepInstance(av, gui, fps)).then((function (_this) {
return function (theAGI) {
var gloss, i, len, nd, nn, okSigns, procSigns, ref, ref1, sigDoc;
console.log("SiGMLToCAS " + theAGI.jagid + " call SetSequence " + av + "/" + gui + " fps " + fps);
_this.sigmlSigns = [];
sigDoc = Data.toDOM(sigml, function (err, msg) {
return console.log("SiGML malformed: " + err);
});
ref = (sigDoc != null ? sigDoc.documentElement.childNodes : void 0) || [];
for (i = 0, len = ref.length; i < len; i++) {
nd = ref[i];
nn = nd != null ? nd.nodeName : void 0;
if (nn === "hamgestural_sign" || nn === "hns_sign") {
gloss = (ref1 = nd.attributes.getNamedItem("gloss")) != null ? ref1.value : void 0;
console.log("SiGML XML has: " + gloss);
_this.sigmlSigns.push({
type: nn,
gloss: gloss,
sigml: "<sigml>" + (Data.mnDOM(nd)) + "</sigml>"
});
}
}
if (_this.sigmlSigns.length === 0) {
console.log("No signs");
CB({
type: AGI.CB.Fail,
err: "SiGML invalid"
});
} else {
theAGI.SetSequence(fps);
okSigns = 0;
procSigns = function () {
var h2gCB, sign;
if (_this.sigmlSigns.length === 0) {
if (okSigns > 0) {
theAGI.EndSequence();
return CB({
type: AGI.CB.Done
});
} else {
return CB({
type: AGI.CB.Fail,
err: "SiGML invalid"
});
}
} else {
sign = _this.sigmlSigns.shift();
if (sign.type === "hns_sign") {
h2gCB = function (res) {
if (res.errCount > 0 || !res.sigDoc) {
console.warn("HtoG Errors:\n" + res.errText);
CB({
type: AGI.CB.Skip,
err: "SiGML invalid HNS sign"
});
} else if (theAGI.GenerateFrames(Data.mnDOM(res.sigDoc), CB)) {
okSigns++;
} else {
CB({
type: AGI.CB.Skip,
err: "SiGML invalid sign"
});
}
return setTimeout(procSigns, 5);
};
var temp=AGI.H2G.parseHtoGText(sign.sigml, h2gCB);
console.log(temp);
return temp;
} else {
if (theAGI.GenerateFrames(sign.sigml, CB)) {
okSigns++;
}
return setTimeout(procSigns, 5);
}
}
};
procSigns();
}
return void 0;
};
})(this));
return void 0;
};
return AGI;
})();
cwaenv.add(AGI, "AGI");
}).call(this); |
#!/bin/bash
# Copyright (c) 2017, WSO2 Inc. (http://wso2.com) All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#properties
#TODO:read below property from infra.json file
appName="travelocity.com"
appName2="playground2"
tomcatHost=$tomcatHost
tomcatPort=$tomcatPort
tomcatUsername=$tomcatUsername
tomcatPassword=$tomcatPassword
tomcatVersion=7
serverHost=$serverHost
serverPort=$serverPort
solutionPath=/
#travelocity properties
SAML2AssertionConsumerURL="http://$tomcatHost:$tomcatPort/$appName/home.jsp"
SAML2IdPURL="https://$serverHost:$serverPort/samlsso"
SAML2SPEntityId="$appName"
SkipURIs="/$appName/index.jsp"
SAML2IdPEntityId=$serverHost
ConsumerKey="testmaneeshacode0000001230000"
ConsumerSecret="testmaneeshasec00000000456000"
PlaygroundCallbackURL="http://$tomcatHost:$tomcatPort/playground2/oauth2client"
#create temporary directory
mkdir $scriptPath/../temp
#copying travalocity app to temp direcory
cp -r $scriptPath/../../apps/sso-agent-sample $scriptPath/../temp/
cd $scriptPath/../temp/sso-agent-sample/
#build travelocity app from source
mvn clean install
mkdir $scriptPath/../temp/travelocity.com
cd $scriptPath/../temp/travelocity.com
#extract travelocity.com.war to temp directory for further configurations
jar xvf $scriptPath/../temp/sso-agent-sample/target/travelocity.com.war
#updating travelocity.conf file
sed -i -e "s|^\(SAML2\.AssertionConsumerURL\s*=\s*\).*\$|\1${SAML2AssertionConsumerURL}|" $scriptPath/../temp/travelocity.com/WEB-INF/classes/travelocity.properties
sed -i -e "s|^\(SAML2\.IdPURL\s*=\s*\).*\$|\1${SAML2IdPURL}|" $scriptPath/../temp/travelocity.com/WEB-INF/classes/travelocity.properties
sed -i -e "s|^\(SAML2\.SPEntityId\s*=\s*\).*\$|\1${SAML2SPEntityId}|" $scriptPath/../temp/travelocity.com/WEB-INF/classes/travelocity.properties
sed -i -e "s|^\(SkipURIs\s*=\s*\).*\$|\1${SkipURIs}|" $scriptPath/../temp/travelocity.com/WEB-INF/classes/travelocity.properties
sed -i -e "s|^\(SAML2\.IdPEntityId\s*=\s*\).*\$|\1${SAML2IdPEntityId}|" $scriptPath/../temp/travelocity.com/WEB-INF/classes/travelocity.properties
#repackaging travelocity app
cd $scriptPath/../temp/travelocity.com/
jar cvf $scriptPath/../temp/travelocity.com.war .
#copying playground app to temp direcory
cp -r $scriptPath/../../apps/playground2 $scriptPath/../temp/
cd $scriptPath/../temp/playground2/
#updating playground.conf file
sed -i -e "s|^\(ConsumerKey\s*=\s*\).*\$|\1${ConsumerKey}|" $scriptPath/../temp/playground2/src/main/resources/playground2.properties
sed -i -e "s|^\(ConsumerSecret\s*=\s*\).*\$|\1${ConsumerSecret}|" $scriptPath/../temp/playground2/src/main/resources/playground2.properties
sed -i -e "s|^\(IdentityServerHostName\s*=\s*\).*\$|\1${serverHost}|" $scriptPath/../temp/playground2/src/main/resources/playground2.properties
sed -i -e "s|^\(IdentityServerPort\s*=\s*\).*\$|\1${serverPort}|" $scriptPath/../temp/playground2/src/main/resources/playground2.properties
sed -i -e "s|^\(CallbackURL\s*=\s*\).*\$|\1${PlaygroundCallbackURL}|" $scriptPath/../temp/playground2/src/main/resources/playground2.properties
#build playground app from source
mvn clean install
cp -r $scriptPath/../temp/playground2/target/playground2.war $scriptPath/../temp/
#deploy webapp on tomcat
cd $scriptPath/../temp/
#tomcat6
#curl --upload-file target\debug.war "http://tomcat:tomcat@localhost:8088/manager/deploy?path=/debug&update=true"
#tomcat7/8
curl -T "playground2.war" "http://$tomcatUsername:$tomcatPassword@$tomcatHost:$tomcatPort/manager/text/deploy?path=/playground2&update=true"
curl -T "travelocity.com.war" "http://$tomcatUsername:$tomcatPassword@$tomcatHost:$tomcatPort/manager/text/deploy?path=/travelocity.com&update=true"
x=0;
retry_count=10;
while true
do
echo $(date)" Waiting until deploying the app on Tomcat!"
#STATUS=$(curl -s http://$scriptuser:$scriptuser@localhost:8080/manager/text/list | grep ${appName})
if curl -s http://$tomcatUsername:$tomcatPassword@$tomcatHost:$tomcatPort/manager/text/list | grep "${appName}:running"
then
echo "Found ${appName} is running on Tomcat"
echo "Done base-setup.sh"
exit 0
else
echo "Context /${appName} Not Found"
if [ $x = $retry_count ]; then
echo "ERROR on app deployment"
exit 1
fi
fi
x=$((x+1))
sleep 1
done
|
# jpegtclConfig.sh --
#
# This shell script (for sh) is generated automatically by jpegtcl's
# configure script. It will create shell variables for most of
# the configuration options discovered by the configure script.
# This script is intended to be included by the configure scripts
# for jpegtcl extensions so that they don't have to figure this all
# out for themselves. This file does not duplicate information
# already provided by tclConfig.sh, so you may need to use that
# file in addition to this one.
#
# The information in this file is specific to a single platform.
# jpegtcl's version number.
jpegtcl_VERSION='9.2'
jpegtcl_MAJOR_VERSION=''
jpegtcl_MINOR_VERSION=''
jpegtcl_RELEASE_LEVEL=''
# The name of the jpegtcl library (may be either a .a file or a shared library):
jpegtcl_LIB_FILE=libjpegtcl9.2.so
# String to pass to linker to pick up the jpegtcl library from its
# build directory.
jpegtcl_BUILD_LIB_SPEC='-L/tmp/---warning-do-not-use----/Img/libjpeg -ljpegtcl9.2'
# String to pass to linker to pick up the jpegtcl library from its
# installed directory.
jpegtcl_LIB_SPEC='-L/home/ericson/Downloads/1_Projects_on_GIT/CURICA_FRAMEWORK/TrainingScripts/HTSTrainingScripts/dependencies/newActiveTcl/lib/Img1.4.6 -ljpegtcl9.2'
# The name of the jpegtcl stub library (a .a file):
jpegtcl_STUB_LIB_FILE=libjpegtclstub9.2.a
# String to pass to linker to pick up the jpegtcl stub library from its
# build directory.
jpegtcl_BUILD_STUB_LIB_SPEC='-L/tmp/---warning-do-not-use----/Img/libjpeg -ljpegtclstub9.2'
# String to pass to linker to pick up the jpegtcl stub library from its
# installed directory.
jpegtcl_STUB_LIB_SPEC='-L/home/ericson/Downloads/1_Projects_on_GIT/CURICA_FRAMEWORK/TrainingScripts/HTSTrainingScripts/dependencies/newActiveTcl/lib/Img1.4.6 -ljpegtclstub9.2'
# String to pass to linker to pick up the jpegtcl stub library from its
# build directory.
jpegtcl_BUILD_STUB_LIB_PATH='/tmp/---warning-do-not-use----/Img/libjpeg/libjpegtclstub9.2.a'
# String to pass to linker to pick up the jpegtcl stub library from its
# installed directory.
jpegtcl_STUB_LIB_PATH='/home/ericson/Downloads/1_Projects_on_GIT/CURICA_FRAMEWORK/TrainingScripts/HTSTrainingScripts/dependencies/newActiveTcl/lib/Img1.4.6/libjpegtclstub9.2.a'
# Location of the top-level source directories from which jpegtcl
# was built. This is the directory that contains generic, unix, etc.
# If jpegtcl was compiled in a different place than the directory
# containing the source files, this points to the location of the
# sources, not the location where jpegtcl was compiled. This can
# be relative to the build directory.
jpegtcl_SRC_DIR='/tmp/---warning-do-not-use----/Img/libjpeg'
|
<reponame>smagill/opensphere-desktop<filename>open-sphere-plugins/csv-common/src/main/java/io/opensphere/csvcommon/detect/location/model/LocationResults.java
package io.opensphere.csvcommon.detect.location.model;
import java.util.Collections;
import java.util.List;
import io.opensphere.core.util.collections.New;
import io.opensphere.importer.config.ColumnType;
/**
* The LocationResults contain all potential location columns identified in a
* file header including either single latitude and longitude columns that have
* been paired or individual columns that have been identified as location
* columns.
*/
public class LocationResults
{
/** The Lat lon results. */
private final List<LatLonColumnResults> myLatLonResults;
/** The Location columns. */
private final List<PotentialLocationColumn> myLocationColumns;
/**
* Instantiates a new location results.
*/
public LocationResults()
{
myLatLonResults = New.list();
myLocationColumns = New.list();
}
/**
* Adds a result to the lat/lon pair list.
*
* @param latLonResults the lat lon results
*/
public void addResult(LatLonColumnResults latLonResults)
{
myLatLonResults.add(latLonResults);
}
/**
* Adds a result to the location column list.
*
* @param locationResult the location result
*/
public void addResult(PotentialLocationColumn locationResult)
{
myLocationColumns.add(locationResult);
}
/**
* Gets the most likely lat lon column pair.
*
* @return the most likely lat lon column pair
*/
public LatLonColumnResults getMostLikelyLatLonColumnPair()
{
float confidence = 0;
LatLonColumnResults results = null;
if (myLatLonResults != null && !myLatLonResults.isEmpty())
{
for (LatLonColumnResults res : myLatLonResults)
{
if (res.getConfidence() > confidence)
{
confidence = res.getConfidence();
results = res;
}
}
}
return results;
}
/**
* Gets the most likely location column.
*
* @param type the type
* @return the most likely location column
*/
public PotentialLocationColumn getMostLikelyLocationColumn(ColumnType type)
{
PotentialLocationColumn result = null;
float conf = 0;
if (myLocationColumns != null && !myLocationColumns.isEmpty())
{
for (PotentialLocationColumn res : myLocationColumns)
{
if (res.getType().equals(type) && res.getConfidence() > conf)
{
conf = res.getConfidence();
result = res;
}
}
}
return result;
}
/**
* Gets the most likely location column.
*
* @return the most likely location column
*/
public PotentialLocationColumn getMostLikelyLocationColumn()
{
float confidence = 0;
PotentialLocationColumn result = null;
if (myLocationColumns != null && !myLocationColumns.isEmpty())
{
for (PotentialLocationColumn res : myLocationColumns)
{
if (res.getConfidence() > confidence)
{
confidence = res.getConfidence();
result = res;
}
}
}
return result;
}
/**
* Gets the greater of the 2 potential confidence values.
*
* @return the confidence
*/
public float getConfidence()
{
float latLonConfidence = getMostLikelyLatLonColumnPair() == null ? 0f : getMostLikelyLatLonColumnPair().getConfidence();
float locationConfidence = getMostLikelyLocationColumn() == null ? 0f : getMostLikelyLocationColumn().getConfidence();
return latLonConfidence > locationConfidence ? latLonConfidence : locationConfidence;
}
/**
* Gets the lat/lon results.
*
* @return the lat lon results
*/
public List<LatLonColumnResults> getLatLonResults()
{
return Collections.unmodifiableList(myLatLonResults);
}
/**
* Removes a LatLonColumnResults result.
*
* @param toRemove the result to remove
*/
public void removeLatLonResult(LatLonColumnResults toRemove)
{
myLatLonResults.remove(toRemove);
}
/**
* Gets the location results.
*
* @return the location results
*/
public List<PotentialLocationColumn> getLocationResults()
{
return Collections.unmodifiableList(myLocationColumns);
}
/**
* Removes a location column.
*
* @param toRemove the to remove
*/
public void removeLocationColumn(PotentialLocationColumn toRemove)
{
myLocationColumns.remove(toRemove);
}
}
|
import zlib
import click
__version__ = "0.1alpha"
def crc32(file, chunk_size):
checksum = None
while True:
data = file.read(chunk_size)
if not data:
break
if checksum:
checksum = zlib.crc32(data, checksum) & 0xffffffff
else:
checksum = zlib.crc32(data) & 0xffffffff
return checksum
@click.command()
@click.argument('files', type=click.File('rb', lazy=True), nargs=-1)
@click.option('--chunk-size', type=int, default=4096)
def main(files, chunk_size):
for file in files:
checksum = crc32(file, chunk_size)
if not checksum:
continue
hex_checksum = "{0:08x}".format(checksum)
message = ' [ {checksum} ] "{filename}"'.format(
checksum=hex_checksum, filename=file.name)
if hex_checksum in file.name.lower():
message = click.style("".join((" ", message)), fg="green")
else:
message = click.style("".join(("!", message)), fg="red")
click.echo(message)
|
/*!
\brief Implementation of methods of the TextureWriterXNB class.
\file PVRCore/textureio/TextureReaderXNB.cpp
\author PowerVR by Imagination, Developer Technology Team
\copyright Copyright (c) Imagination Technologies Limited.
*/
//!\cond NO_DOXYGEN
#include "PVRCore/textureio/TextureReaderXNB.h"
#include "FileDefinesXNB.h"
using std::vector;
namespace pvr {
namespace assetReaders {
namespace {
void read7BitEncodedInt(const ::pvr::Stream& stream, int32_t& decodedInteger)
{
// Check that the read operations succeed
// Values used to decode the integer
int32_t bitsRead = 0;
int32_t value = 0;
// Initialize the decoded integer
decodedInteger = 0;
// Loop through and read all the appropriate data to decode the integer
do
{
// Read the first 7 bit value
stream.readExact(1, 1, &value);
// Add the bits to the decoded integer and increase the bit counter
decodedInteger |= (value & 0x7f) << bitsRead;
bitsRead += 7;
} while (value & 0x80);
}
void readFileHeader(const ::pvr::Stream& stream, texture_xnb::FileHeader& xnbFileHeader)
{
// Read the identifier
stream.readExact(1, 3, xnbFileHeader.identifier);
// Verify that it's an XNB header before doing anything else.
if ((xnbFileHeader.identifier[0] != 'X') || (xnbFileHeader.identifier[1] != 'N') || (xnbFileHeader.identifier[2] != 'B'))
{ throw InvalidDataError("[TextureReaderXNB::readFileHeader][" + stream.getFileName() + "]: Stream was not a valid XNB"); } // Read the platform
stream.readExact(1, 1, &xnbFileHeader.platform);
// Read the version
stream.readExact(1, 1, &xnbFileHeader.version);
// Check that the version is '5' to ensure it's a supported version
if (xnbFileHeader.version != 5) { throw InvalidDataError("[TextureReaderXNB::readFileHeader][" + stream.getFileName() + "]: XNB Version must be 5"); }
// Read the flags
stream.readExact(1, 1, &xnbFileHeader.flags);
// Read the file size
stream.readExact(4, 1, &xnbFileHeader.fileSize);
}
void readString(const ::pvr::Stream& stream, std::string& stringToRead)
{
// Read the std::string length
int32_t stringLength = 0;
read7BitEncodedInt(stream, stringLength);
// Allocate a buffer to read in the std::string, don't forget to add a char for the NULL character.
stringToRead.resize(static_cast<size_t>(stringLength + 1));
// Read in the std::string data
stream.readExact(1u, static_cast<size_t>(stringLength + 1), &stringToRead);
}
void read2DTexture(const ::pvr::Stream& stream, texture_xnb::Texture2DHeader& assetHeader, Texture& asset)
{
// Read the surface format
stream.readExact(sizeof(assetHeader.format), 1, &assetHeader.format);
// Read the width
stream.readExact(sizeof(assetHeader.width), 1, &assetHeader.width);
// Read the height
stream.readExact(sizeof(assetHeader.height), 1, &assetHeader.height);
// Read the mip map count
stream.readExact(sizeof(assetHeader.numMipMaps), 1, &assetHeader.numMipMaps);
// Setup the texture header
TextureHeader textureHeader;
textureHeader.setPixelFormat(getPVRFormatFromXNBFormat(static_cast<uint64_t>(assetHeader.format)));
textureHeader.setChannelType(getPVRTypeFromXNBFormat(static_cast<uint64_t>(assetHeader.format)));
textureHeader.setWidth(assetHeader.width);
textureHeader.setHeight(assetHeader.height);
textureHeader.setNumMipMapLevels(assetHeader.numMipMaps);
// Create the texture
asset = Texture(textureHeader, NULL);
// Read the texture data
for (uint32_t mipMapLevel = 0; mipMapLevel < asset.getNumMipMapLevels(); ++mipMapLevel)
{
// Read in the size of the next surface
uint32_t surfaceSize = 0;
stream.readExact(sizeof(surfaceSize), 1, &surfaceSize);
// Make sure the surface size matches...
if (surfaceSize != asset.getDataSize(mipMapLevel))
{ throw InvalidDataError("[TextureReaderXNB::readFileHeader][" + stream.getFileName() + "]: Expected data size did not match actual size"); }
// Read in the texture data.
stream.readExact(1, surfaceSize, asset.getDataPointer(mipMapLevel));
}
}
void read3DTexture(const ::pvr::Stream& stream, texture_xnb::Texture3DHeader& assetHeader, Texture& asset)
{
// Read the surface format
stream.readExact(sizeof(assetHeader.format), 1, &assetHeader.format);
// Read the width
stream.readExact(sizeof(assetHeader.width), 1, &assetHeader.width);
// Read the height
stream.readExact(sizeof(assetHeader.height), 1, &assetHeader.height);
// Read the depth
stream.readExact(sizeof(assetHeader.depth), 1, &assetHeader.depth);
// Read the mip map count
stream.readExact(sizeof(assetHeader.numMipMaps), 1, &assetHeader.numMipMaps);
// Setup the texture header
TextureHeader textureHeader;
textureHeader.setPixelFormat(getPVRFormatFromXNBFormat(static_cast<uint64_t>(assetHeader.format)));
textureHeader.setChannelType(getPVRTypeFromXNBFormat(static_cast<uint64_t>(assetHeader.format)));
textureHeader.setWidth(assetHeader.width);
textureHeader.setHeight(assetHeader.height);
textureHeader.setDepth(assetHeader.depth);
textureHeader.setNumMipMapLevels(assetHeader.numMipMaps);
// Create the texture
asset = Texture(textureHeader, NULL);
// Read the texture data
for (uint32_t mipMapLevel = 0; mipMapLevel < asset.getNumMipMapLevels(); ++mipMapLevel)
{
// Read in the size of the next surface
uint32_t surfaceSize = 0;
stream.readExact(sizeof(surfaceSize), 1, &surfaceSize);
// Make sure the surface size matches...
if (surfaceSize != asset.getDataSize(mipMapLevel))
{ throw InvalidDataError("[TextureReaderXNB::readFileHeader][" + stream.getFileName() + "]: Expected data size did not match actual size"); }
// Read in the texture data.
stream.readExact(1, surfaceSize, asset.getDataPointer(mipMapLevel));
}
}
void readCubeTexture(const ::pvr::Stream& stream, texture_xnb::TextureCubeHeader& assetHeader, Texture& asset)
{
// Read the surface format
stream.readExact(sizeof(assetHeader.format), 1, &assetHeader.format);
// Read the width
stream.readExact(sizeof(assetHeader.size), 1, &assetHeader.size);
// Read the mip map count
stream.readExact(sizeof(assetHeader.numMipMaps), 1, &assetHeader.numMipMaps);
// Setup the texture header
TextureHeader textureHeader;
textureHeader.setPixelFormat(getPVRFormatFromXNBFormat(assetHeader.format));
textureHeader.setChannelType(getPVRTypeFromXNBFormat(assetHeader.format));
textureHeader.setWidth(assetHeader.size);
textureHeader.setHeight(assetHeader.size);
textureHeader.setNumFaces(6);
textureHeader.setNumMipMapLevels(assetHeader.numMipMaps);
// Create the texture
asset = Texture(textureHeader, NULL);
// Read the texture data
for (uint32_t face = 0; face < asset.getNumFaces(); ++face)
{
for (uint32_t mipMapLevel = 0; mipMapLevel < asset.getNumMipMapLevels(); ++mipMapLevel)
{
// Read in the size of the next surface
uint32_t surfaceSize = 0;
stream.readExact(sizeof(surfaceSize), 1, &surfaceSize);
// Make sure the surface size matches...
if (surfaceSize != asset.getDataSize(mipMapLevel, false, false))
{ throw InvalidDataError("[TextureReaderXNB::readFileHeader][" + stream.getFileName() + "]: Expected data size did not match actual size"); }
// Read in the texture data.
stream.readExact(1, surfaceSize, asset.getDataPointer(mipMapLevel, 0, face));
}
}
}
void initializeFile(const ::pvr::Stream& stream, texture_xnb::FileHeader& fileheader, std::vector<std::string>& objectsStrings)
{
const uint32_t c_objectNotFound = 0xffffffffu;
// Read the file header
readFileHeader(stream, fileheader);
// Check if the file is compressed, if it is it's currently unsupported
if ((fileheader.flags & texture_xnb::e_fileCompressed) != 0)
{ throw InvalidOperationError("[TextureReaderXNB::getSupportedFileExtensions][" + stream.getFileName() + "]: Cannot load compressed XNB files - not supported."); }
// Check the file size makes sense
if (fileheader.fileSize != stream.getSize())
{ throw InvalidDataError("[TextureReaderXNB::getSupportedFileExtensions][" + stream.getFileName() + "]: Data error: File size does not match stream size"); }
// Read the number of primary objects in the file
int32_t numAssets = 0;
read7BitEncodedInt(stream, numAssets);
// Resize the std::string array to hold std::string identifiers for all the assets
objectsStrings.resize(numAssets);
// Loop through and get all the object names
for (int32_t assetIndex = 0; assetIndex < numAssets; ++assetIndex)
{
// Get the asset information
std::string typeReaderInformation;
readString(stream, typeReaderInformation);
// Make sure the version is 4. something, and not incorrectly thrown in by something else.
if (typeReaderInformation.find("Version=4") == std::string::npos)
{ throw InvalidDataError("[TextureReaderXNB::getSupportedFileExtensions][" + stream.getFileName() + "]: Data error: Version should be 4"); } // Extract the object name
if (typeReaderInformation.find("Microsoft.Xna.framework.content.") == std::string::npos)
{ throw InvalidDataError("[TextureReaderXNB::getSupportedFileExtensions][" + stream.getFileName() + "]: Could not get the object name"); }
// Extract the name of the content reader type
size_t contentLocation = typeReaderInformation.find("Content.", 0);
size_t typeStart = typeReaderInformation.find('.', contentLocation) + 1;
size_t typeEnd = typeReaderInformation.find(',', typeStart);
if (contentLocation != c_objectNotFound || typeStart != c_objectNotFound || typeEnd != c_objectNotFound)
{
objectsStrings[assetIndex] = typeReaderInformation;
objectsStrings[assetIndex].erase(0, typeStart);
objectsStrings[assetIndex].erase(typeReaderInformation.length() - typeEnd, std::string::npos);
}
// Get the asset version
int32_t readerVersion = 0;
stream.readExact(sizeof(readerVersion), 1, &readerVersion);
// If it's not version 0, it's not supported
if (readerVersion != 0) { throw InvalidDataError("[TextureReaderXNB::getSupportedFileExtensions][" + stream.getFileName() + "]: Reader version should be 0"); }
}
// Read the number of shared objects in the file
int32_t numSharedAssets = 0;
read7BitEncodedInt(stream, numSharedAssets);
}
} // namespace
Texture readXNB(const ::pvr::Stream& stream, int assetIndex)
{
Texture asset;
texture_xnb::FileHeader fileHeader;
std::vector<std::string> objectsStrings;
initializeFile(stream, fileHeader, objectsStrings);
// Make sure that the next data is a texture
if (objectsStrings[assetIndex] == "Texture2DReader")
{
texture_xnb::Texture2DHeader assetHeader;
read2DTexture(stream, assetHeader, asset);
}
else if (objectsStrings[assetIndex] == "Texture3DReader")
{
texture_xnb::Texture3DHeader assetHeader;
read3DTexture(stream, assetHeader, asset);
}
else if (objectsStrings[assetIndex] == "TextureCubeReader")
{
texture_xnb::TextureCubeHeader assetHeader;
readCubeTexture(stream, assetHeader, asset);
}
else
{
// Don't know how to handle it.
throw InvalidDataError("[TextureReaderXNB::readAsset_]: Could not determine the texture type - was none of 2D, 3D or Cube");
}
return asset;
}
bool isXNB(const Stream& assetStream)
{
// Read the identifier
char identifier[3];
try
{
assetStream.readExact(1, 3, identifier);
}
catch (...)
{
return false;
}
// Reset the file
// Check that the identifier matches
if ((identifier[0] == 'X') && (identifier[1] == 'N') && (identifier[2] == 'B')) { return true; }
return false;
}
uint64_t getPVRFormatFromXNBFormat(uint32_t xnbFormat)
{
const uint64_t mappedFormats[] = {
GeneratePixelType4<'r', 'g', 'b', 'a', 8, 8, 8, 8>::ID, //???
GeneratePixelType3<'b', 'g', 'r', 5, 6, 5>::ID,
GeneratePixelType4<'b', 'g', 'r', 'a', 5, 5, 5, 1>::ID,
GeneratePixelType4<'b', 'g', 'r', 'a', 4, 4, 4, 4>::ID,
static_cast<uint64_t>(CompressedPixelFormat::DXT1),
static_cast<uint64_t>(CompressedPixelFormat::DXT3),
static_cast<uint64_t>(CompressedPixelFormat::DXT5),
GeneratePixelType2<'r', 'g', 8, 8>::ID, //???
GeneratePixelType4<'r', 'g', 'b', 'a', 8, 8, 8, 8>::ID, //???
GeneratePixelType4<'r', 'g', 'b', 'a', 10, 10, 10, 2>::ID,
GeneratePixelType2<'r', 'g', 16, 16>::ID,
GeneratePixelType4<'r', 'g', 'b', 'a', 16, 16, 16, 16>::ID,
GeneratePixelType1<'a', 8>::ID,
GeneratePixelType1<'r', 32>::ID,
GeneratePixelType2<'r', 'g', 32, 32>::ID,
GeneratePixelType4<'r', 'g', 'b', 'a', 32, 32, 32, 32>::ID,
GeneratePixelType1<'r', 16>::ID,
GeneratePixelType2<'r', 'g', 16, 16>::ID,
GeneratePixelType4<'r', 'g', 'b', 'a', 16, 16, 16, 16>::ID,
GeneratePixelType4<'r', 'g', 'b', 'a', 32, 32, 32, 32>::ID,
};
return mappedFormats[xnbFormat];
}
VariableType getPVRTypeFromXNBFormat(uint32_t xnbFormat)
{
const VariableType mappedTypes[] = { VariableType::UnsignedByteNorm, VariableType::UnsignedShortNorm, VariableType::UnsignedShortNorm, VariableType::UnsignedShortNorm,
VariableType::UnsignedByteNorm, VariableType::UnsignedByteNorm, VariableType::UnsignedByteNorm, VariableType::UnsignedByteNorm, VariableType::UnsignedByteNorm,
VariableType::UnsignedIntegerNorm, VariableType::UnsignedShortNorm, VariableType::UnsignedShortNorm, VariableType::UnsignedByteNorm, VariableType::SignedFloat,
VariableType::SignedFloat, VariableType::SignedFloat, VariableType::SignedFloat, VariableType::SignedFloat, VariableType::SignedFloat, VariableType::SignedFloat };
return mappedTypes[xnbFormat];
}
} // namespace assetReaders
} // namespace pvr
//!\endcond
|
<reponame>yuewawa/ShowHelper<filename>app/src/main/java/com/yuewawa/showhelper/dao/gen/DaoSession.java
package com.yuewawa.showhelper.dao.gen;
import java.util.Map;
import org.greenrobot.greendao.AbstractDao;
import org.greenrobot.greendao.AbstractDaoSession;
import org.greenrobot.greendao.database.Database;
import org.greenrobot.greendao.identityscope.IdentityScopeType;
import org.greenrobot.greendao.internal.DaoConfig;
import com.yuewawa.showhelper.entity.FollowedShow;
import com.yuewawa.showhelper.entity.Show;
import com.yuewawa.showhelper.entity.User;
import com.yuewawa.showhelper.dao.gen.FollowedShowDao;
import com.yuewawa.showhelper.dao.gen.ShowDao;
import com.yuewawa.showhelper.dao.gen.UserDao;
// THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT.
/**
* {@inheritDoc}
*
* @see org.greenrobot.greendao.AbstractDaoSession
*/
public class DaoSession extends AbstractDaoSession {
private final DaoConfig followedShowDaoConfig;
private final DaoConfig showDaoConfig;
private final DaoConfig userDaoConfig;
private final FollowedShowDao followedShowDao;
private final ShowDao showDao;
private final UserDao userDao;
public DaoSession(Database db, IdentityScopeType type, Map<Class<? extends AbstractDao<?, ?>>, DaoConfig>
daoConfigMap) {
super(db);
followedShowDaoConfig = daoConfigMap.get(FollowedShowDao.class).clone();
followedShowDaoConfig.initIdentityScope(type);
showDaoConfig = daoConfigMap.get(ShowDao.class).clone();
showDaoConfig.initIdentityScope(type);
userDaoConfig = daoConfigMap.get(UserDao.class).clone();
userDaoConfig.initIdentityScope(type);
followedShowDao = new FollowedShowDao(followedShowDaoConfig, this);
showDao = new ShowDao(showDaoConfig, this);
userDao = new UserDao(userDaoConfig, this);
registerDao(FollowedShow.class, followedShowDao);
registerDao(Show.class, showDao);
registerDao(User.class, userDao);
}
public void clear() {
followedShowDaoConfig.clearIdentityScope();
showDaoConfig.clearIdentityScope();
userDaoConfig.clearIdentityScope();
}
public FollowedShowDao getFollowedShowDao() {
return followedShowDao;
}
public ShowDao getShowDao() {
return showDao;
}
public UserDao getUserDao() {
return userDao;
}
}
|
<filename>modules/BULLET/src/CBulletSoftMultiMesh.cpp<gh_stars>1-10
//==============================================================================
/*
Software License Agreement (BSD License)
Copyright (c) 2003-2016, CHAI3D
(www.chai3d.org)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of CHAI3D nor the names of its contributors may
be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
\author <http://www.aimlab.wpi.edu>
\autor <<EMAIL>>
\author <NAME>
\version 3.2.1 $Rev: 2161 $
*/
//==============================================================================
//------------------------------------------------------------------------------
#include "CBulletSoftMultiMesh.h"
//------------------------------------------------------------------------------
#include "CBulletWorld.h"
//------------------------------------------------------------------------------
#include "chai3d.h"
#include "btBulletDynamicsCommon.h"
#include "BulletCollision/Gimpact/btGImpactShape.h"
#include "BulletCollision/Gimpact/btGImpactCollisionAlgorithm.h"
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
namespace chai3d {
//------------------------------------------------------------------------------
//==============================================================================
/*!
This method assigns a desired position to the object.
\param a_position New desired position.
*/
//==============================================================================
void cBulletSoftMultiMesh::setLocalPos(const cVector3d& a_position)
{
m_gelMesh.setLocalPos(a_position);
// get transformation matrix of object
btTransform trans;
btVector3 pos;
btQuaternion q;
// set new position
pos[0] = a_position(0);
pos[1] = a_position(1);
pos[2] = a_position(2);
// set new orientation
cQuaternion quaternion;
quaternion.fromRotMat(m_localRot);
q.setW(quaternion.w);
q.setX(quaternion.x);
q.setY(quaternion.y);
q.setZ(quaternion.z);
// set new transform
trans.setOrigin(pos);
trans.setRotation(q);
if (m_bulletMotionState)
m_bulletMotionState->setWorldTransform(trans);
if (m_bulletSoftBody)
m_bulletSoftBody->translate(pos);
}
//==============================================================================
/*!
This method assigns a desired rotation to the object.
\param a_rotation New desired orientation.
*/
//==============================================================================
void cBulletSoftMultiMesh::setLocalRot(const cMatrix3d& a_rotation)
{
m_gelMesh.setLocalRot(a_rotation);
// get transformation matrix of object
btTransform trans;
btVector3 pos;
btQuaternion q;
// set new position
pos[0] = m_localPos(0);
pos[1] = m_localPos(1);
pos[2] = m_localPos(2);
// set new orientation
cQuaternion quaternion;
quaternion.fromRotMat(a_rotation);
q.setW(quaternion.w);
q.setX(quaternion.x);
q.setY(quaternion.y);
q.setZ(quaternion.z);
// set new transform
trans.setOrigin(pos);
trans.setRotation(q);
if (m_bulletMotionState)
m_bulletMotionState->setWorldTransform(trans);
if (m_bulletSoftBody)
m_bulletSoftBody->rotate(q);
}
//==============================================================================
/*!
This method updates x,y,z to the max value as compared to v
*/
//==============================================================================
void updateMins(double &x, double &y, double &z, cVector3d &v){
x = cMin(x,v.x());
y = cMin(y,v.y());
z = cMin(z,v.z());
}
//==============================================================================
/*!
This method updates vMin to the min value as compared to v
*/
//==============================================================================
void updateMins(cVector3d &vMin, cVector3d &v){
vMin.x(cMin(vMin.x(),v.x()));
vMin.y(cMin(vMin.y(),v.y()));
vMin.z(cMin(vMin.z(),v.z()));
}
//==============================================================================
/*!
This method updates x,y,z to the max value as compared to v
*/
//==============================================================================
void updateMaxs(double &x, double &y, double &z, cVector3d &v){
x = cMax(x,v.x());
y = cMax(y,v.y());
z = cMax(z,v.z());
}
//==============================================================================
/*!
This method updates vMax to the max value as compared to v
*/
//==============================================================================
void updateMaxs(cVector3d &vMax, cVector3d &v){
vMax.x(cMax(vMax.x(),v.x()));
vMax.y(cMax(vMax.y(),v.y()));
vMax.z(cMax(vMax.z(),v.z()));
}
//==============================================================================
/*!
This method updates the vertices from the nodes of bullet soft body
*/
//==============================================================================
void updateMesh(cMesh* mesh, btSoftBody* sb, std::vector<VertexTree>* tree){
btVector3 bVec;
cVector3d cVec;
for (int i = 0 ; i < tree->size() ; i++){
bVec = sb->m_nodes[i].m_x;
cVec.set(bVec.x(), bVec.y(), bVec.z());
for (int j = 0 ; j < (*tree)[i].vertexIdx.size() ; j++){
int idx = (*tree)[i].vertexIdx[j];
mesh->m_vertices->setLocalPos(idx, cVec);
}
}
mesh->computeAllNormals();
}
void cBulletSoftMultiMesh::render(cRenderOptions &a_options){
m_gelMesh.updateVertexPosition();
m_gelMesh.computeAllNormals();
m_gelMesh.render(a_options);
}
//==============================================================================
/*!
This method updates the position and orientation data from the Bullet
representation to the CHAI3D representation.
*/
//==============================================================================
void cBulletSoftMultiMesh::updatePositionFromDynamics()
{
if (m_bulletSoftBody)
{
updateGELSkeletonFrombtSoftBody();
}
// update Transform data for m_rosObj
#ifdef C_ENABLE_CHAI_ENV_SUPPORT
if(m_afObjectPtr.get() != nullptr){
m_afObjectPtr->cur_position(m_localPos.x(), m_localPos.y(), m_localPos.z());
cQuaternion q;
q.fromRotMat(m_localRot);
m_afObjectPtr->cur_orientation(q.x, q.y, q.z, q.w);
}
#endif
}
//bool isPresentInGrid(int cntIdx, cVector3d &v, cVector3d &vMin, cVector3d &vBounds, bool* vtxCheckGrid, int* vtxIdxGrid){
// int xIdx, yIdx, zIdx;
// xIdx = (v.x() + vMin.x()) / vBounds.x();
// yIdx = (v.y() + vMin.y()) / vBounds.y();
// zIdx = (v.z() + vMin.z()) / vBounds.z();
// if (vtxCheckGrid[xIdx + yIdx + zIdx] == false){
// }
//}
void clearArrays(bool * vtxChkBlock, int * vtxIdxBlock, int blockSize){
int s = blockSize*blockSize*blockSize;
memset(vtxChkBlock, false, s*sizeof(bool));
memset(vtxIdxBlock, -1, s*sizeof(int));
}
bool cBulletSoftMultiMesh::loadFromFile(std::string a_filename){
return m_gelMesh.loadFromFile(a_filename);
}
void cBulletSoftMultiMesh::computeUniqueVerticesandTriangles(cMesh* mesh, std::vector<btScalar>* outputVertices, std::vector<int>* outputTriangles, bool print_debug_info){
// read number of triangles of the object
int numTriangles = mesh->m_triangles->getNumElements();
int numVertices = mesh->m_vertices->getNumElements();
if (print_debug_info){
printf("# Triangles %d, # Vertices %d \n", numTriangles, numVertices);
}
// The max number of vertices to check per block
int blockSize = 60;
// Number of default blocks
int numBlocks = 1;
//Define bound for lowest value of vertices
cVector3d vMin(9999,9999,9999);
//Define bound for max value of vertices
cVector3d vMax(-9999,-9999,-9999);
cVector3d vBounds;
// Update the min and max value x,y,z value of vertices to get bounds
for (int x = 0 ; x < numVertices ; x++){
cVector3d v = mesh->m_vertices->getLocalPos(x);
updateMins(vMin, v);
updateMaxs(vMax, v);
}
// Update magnitude of bound
vBounds = vMax - vMin;
if (print_debug_info){
printf("***************************************\n");
printf("Vmin = [%f, %f, %f] \n", vMin.x(), vMin.y(), vMin.z());
printf("Vmax = [%f, %f, %f] \n", vMax.x(), vMax.y(), vMax.z());
printf("VBounds = [%f, %f, %f] \n", vBounds.x(), vBounds.y(), vBounds.z());
printf("***************************************\n");
}
// Place holder for count of repeat and duplicate vertices
int uniqueVtxCount = 0;
int duplicateVtxCount = 0;
// If number of vertices is greater the vertices per block, increase no of blocks
// This is to prevent memory exhaustion
if (numVertices > blockSize){
numBlocks = std::ceil((float)numVertices / (float)blockSize);
}
if (print_debug_info){
printf("Using %d blocks \n", numBlocks);
}
// Copy over the vertices to process without altering the original data
auto vtxArrCopy = mesh->m_vertices->copy();
// This tri vector is to store the unaltered indices in the first row vertices referring to their
// original copy in the second row. The third row contains the index to the vertices after
// the unique vertices have been place in the outputVertices array
// . E.g. if a vertex at idx 5 was a repeat of vtx at idx 3, vtxIdxPair[5][0] = 5 ; vtxIdxPair[5][1] = 3;
// and if the vertex was added to the array of unique vertices at Idx 2 then vtxIdxPair[5][2] = 2;
int vtxIdxTriPair [numVertices][3];
memset(vtxIdxTriPair, -1, numVertices*3*sizeof(int));
// This forms a 3D block with all value init to false
// If we visit a specific 3D idx, its set to true to know that we have been there
bool vtxChkBlock[blockSize][blockSize][blockSize];
// This forms a 3D block with all values init to -1
// What ever 3D idx we visited we set the corresponding corrected idx value in this 3D block
int vtxIdxBlock[blockSize][blockSize][blockSize];
// To reduce computational cost, if we have already checked a vertex, we can mark it
bool vtxAlreadyChkd[numVertices];
memset(vtxAlreadyChkd, false, numVertices*sizeof(bool));
int xblockLowerBound; int xblockUpperBound;
int yblockLowerBound; int yblockUpperBound;
int zblockLowerBound; int zblockUpperBound;
int vxKey;
int vyKey;
int vzKey;
cVector3d vPos;
double xCoeff = (double) (numVertices - 1) / vBounds.x();
double yCoeff = (double) (numVertices - 1) / vBounds.y();
double zCoeff = (double) (numVertices - 1) / vBounds.z();
for (int xblockNum = 0 ; xblockNum < numBlocks ; xblockNum ++){
xblockLowerBound = xblockNum * blockSize;
xblockUpperBound = xblockLowerBound + blockSize;
for (int yblockNum = 0 ; yblockNum < numBlocks ; yblockNum ++){
yblockLowerBound = yblockNum * blockSize;
yblockUpperBound = yblockLowerBound + blockSize;
for (int zblockNum = 0 ; zblockNum < numBlocks ; zblockNum ++){
zblockLowerBound = zblockNum * blockSize;
zblockUpperBound = zblockLowerBound + blockSize;
if (print_debug_info) {printf("Block Num [%d, %d, %d] \n", xblockNum, yblockNum, zblockNum);}
// Clear the 3D idx and chk arrays to be reused for the new block
clearArrays(&vtxChkBlock[0][0][0], &vtxIdxBlock[0][0][0], blockSize);
for(int idx = 0; idx < numVertices ; idx++){
if (!vtxAlreadyChkd[idx]){
vPos = vtxArrCopy->getLocalPos(idx);
// Generate keys to parse the 3D idx and chk block
vxKey = xCoeff * (vPos.x() - vMin.x());
vyKey = yCoeff * (vPos.y() - vMin.y());
vzKey = zCoeff * (vPos.z() - vMin.z());
// Check if the generated keys are in the bounds of the current block
if (vxKey >= xblockLowerBound && vyKey >= yblockLowerBound && vzKey >= zblockLowerBound){
if (vxKey <= xblockUpperBound && vyKey <= yblockUpperBound && vzKey <= zblockUpperBound){
// If the key lies inside the block, offset the value to the block bounds
vxKey -= xblockLowerBound; vyKey -= yblockLowerBound; vzKey -= zblockLowerBound;
// Mark that we already checked this vertex, so we don't have to check it again
vtxAlreadyChkd[idx] = true;
// Check if the key is already set in the chk block
if (vtxChkBlock[vxKey][vyKey][vzKey] == false){
// Unique vertex, so mark it as such in the corresponding blocks
vtxChkBlock[vxKey][vyKey][vzKey] = true;
// Set the idx block to the original idx
vtxIdxBlock[vxKey][vyKey][vzKey] = idx;
// Set the vertexIdx Pair value
vtxIdxTriPair[idx][0] = idx;
vtxIdxTriPair[idx][1] = idx;
uniqueVtxCount ++;
}
else{
// This is not a unique vertex, so get the original idx
// and set it in the corresponding blocks
vtxIdxTriPair[idx][0] = idx;
vtxIdxTriPair[idx][1] = vtxIdxBlock[vxKey][vyKey][vzKey];
duplicateVtxCount++;
}
}
}
}
}
}
}
}
//Resize once to save on iterative push/pop time
outputVertices->resize(uniqueVtxCount*3);
outputTriangles->resize(numTriangles*3);
m_vertexTree.resize(uniqueVtxCount);
// This loop uses a logic that appends the index of the new resized array containing
// the unique vertices to the index of the original array of duplicated vertices.
int vtxCounted = -1;
for (int i = 0 ; i < numVertices ; i++){
if (vtxIdxTriPair[i][1] == vtxIdxTriPair[i][0] && vtxIdxTriPair[i][2] == -1){
vPos = mesh->m_vertices->getLocalPos(i);
vtxCounted++;
(*outputVertices)[3*vtxCounted + 0] = vPos.x();
(*outputVertices)[3*vtxCounted + 1] = vPos.y();
(*outputVertices)[3*vtxCounted + 2] = vPos.z();
vtxIdxTriPair[i][2] = vtxCounted;
m_vertexTree[vtxCounted].vertexIdx.push_back(i);
}
else if(vtxIdxTriPair[i][1] < vtxIdxTriPair[i][0]){
int bi = vtxIdxTriPair[i][1];
int ci = vtxIdxTriPair[bi][2];
if (vtxIdxTriPair[bi][1] != vtxIdxTriPair[bi][0] || ci == -1){
throw 'Algorithm Failed for (b[i] < a[i]), a[b[i]] != b[b[i]] : %d and c[b[i]] != -1';
}
vtxIdxTriPair[i][2] = ci;
m_vertexTree[ci].vertexIdx.push_back(i);
}
else if(vtxIdxTriPair[i][1] > vtxIdxTriPair[i][0]){
int bi = vtxIdxTriPair[i][1];
if (vtxIdxTriPair[bi][1] != vtxIdxTriPair[bi][0]){
throw 'Algorithm Failed for (b[i] > a[i]), a[b[i]] != b[b[i]] : %d';
}
if (vtxIdxTriPair[bi][2] == -1){
vPos = mesh->m_vertices->getLocalPos(bi);
vtxCounted++;
(*outputVertices)[3*vtxCounted + 0] = vPos.x();
(*outputVertices)[3*vtxCounted + 1] = vPos.y();
(*outputVertices)[3*vtxCounted + 2] = vPos.z();
vtxIdxTriPair[bi][2] = vtxCounted;
}
vtxIdxTriPair[i][2] = vtxIdxTriPair[bi][2];
}
}
// This last loop iterates over the triangle idxes and assigns the re-idxd vertices from the
// third row of vtxIdxTriPair
for (int i = 0 ; i < numTriangles*3 ; i++){
(*outputTriangles)[i] = vtxIdxTriPair[i][2];
}
if (print_debug_info){
for (int i = 0 ; i < uniqueVtxCount; i ++){
printf("Vertex %d = [%f, %f, %f] \n", i, (*outputVertices)[3*i + 0], (*outputVertices)[3*i + 1], (*outputVertices)[3*i + 2]);
}
for (int i = 0 ; i < uniqueVtxCount; i ++){
printf("%d) Children = [", i );
for (int j = 0 ; j < m_vertexTree[i].vertexIdx.size(); j++){
printf(" %d", m_vertexTree[i].vertexIdx[j]);
}
printf(" ]\n");
}
for (int i = 0 ; i < numTriangles; i ++){
printf("Triangle %d = [%d, %d, %d] \n", i, (*outputTriangles)[3*i], (*outputTriangles)[3*i+1], (*outputTriangles)[3*i+2]);
}
for (int i = 0 ; i < numTriangles*3 ; i++){
printf("v[0] = %d \t v[1] = %d \t v[2] = %d \n", vtxIdxTriPair[i][0], vtxIdxTriPair[i][1], vtxIdxTriPair[i][2]);
}
}
printf("Unique Vertices Found = %d, Duplicate Vertices Found = %d\n", uniqueVtxCount, duplicateVtxCount);
}
//==============================================================================
/*!
This method creates a GEL Skeleton based on the underlying bullet softbody
*/
//==============================================================================
void cBulletSoftMultiMesh::createGELSkeleton(){
int nLinks = m_bulletSoftBody->m_links.size();
int nNodes = m_bulletSoftBody->m_nodes.size();
std::vector<cGELSkeletonNode*> vNodes;
vNodes.resize(nNodes);
for (int i = 0 ; i < nNodes ; i++){
auto btNode = m_bulletSoftBody->m_nodes[i];
cGELSkeletonNode* gelNode = new cGELSkeletonNode;
m_gelMesh.m_nodes.push_back(gelNode);
vNodes[i] = gelNode;
gelNode->m_pos.set(btNode.m_x.x(), btNode.m_x.y(), btNode.m_x.z());
gelNode->m_nextRot.identity();
}
for (int i = 0 ; i < m_trianglesPtr.size()/3 ; i++){
int nodeIdx0 = m_trianglesPtr[3*i + 0];
int nodeIdx1 = m_trianglesPtr[3*i + 1];
int nodeIdx2 = m_trianglesPtr[3*i + 2];
if (m_bulletSoftBody->checkLink(nodeIdx0, nodeIdx1)){
cGELSkeletonLink* link = new cGELSkeletonLink(vNodes[nodeIdx0], vNodes[nodeIdx1]);
m_gelMesh.m_links.push_back(link);
}
if (m_bulletSoftBody->checkLink(nodeIdx1, nodeIdx2)){
cGELSkeletonLink* link = new cGELSkeletonLink(vNodes[nodeIdx1], vNodes[nodeIdx2]);
m_gelMesh.m_links.push_back(link);
}
if (m_bulletSoftBody->checkLink(nodeIdx2, nodeIdx0)){
cGELSkeletonLink* link = new cGELSkeletonLink(vNodes[nodeIdx2], vNodes[nodeIdx0]);
m_gelMesh.m_links.push_back(link);
}
}
m_gelMesh.m_showSkeletonModel = true;
m_gelMesh.m_useSkeletonModel = true;
}
void cBulletSoftMultiMesh::updateGELSkeletonFrombtSoftBody(){
std::list<cGELSkeletonNode*>::iterator n;
int i = 0;
for(n = m_gelMesh.m_nodes.begin(); n != m_gelMesh.m_nodes.end(); ++n)
{
btVector3 &vPos = m_bulletSoftBody->m_nodes[i].m_x;
btVector3 &vNorm = m_bulletSoftBody->m_nodes[i].m_n;
(*n)->m_nextPos.set(vPos.x(), vPos.y(), vPos.z());
cVector3d nz = (*n)->m_rot.getCol2();
cVector3d nzSB(vNorm.x(), vNorm.y(), vNorm.z());
double angle = cAngle(nz, nzSB);
cVector3d rotAxes = cNormalize(cCross(nz, nzSB));
if (rotAxes.length() == 1.0){
(*n)->m_nextRot.rotateAboutGlobalAxisRad(rotAxes, angle);
}
// (*n)->m_nextRot.identity();
i++;
}
for(n = m_gelMesh.m_nodes.begin(); n != m_gelMesh.m_nodes.end(); ++n)
{
(*n)->applyNextPose();
}
}
//==============================================================================
/*!
This method creates a Bullet collision model for this object.
*/
//==============================================================================
void cBulletSoftMultiMesh::buildContactTriangles(const double a_margin, cMultiMesh* lowResMesh)
{
m_gelMesh.buildVertices();
// create compound shape
btCompoundShape* compound = new btCompoundShape();
m_bulletCollisionShape = compound;
std::vector<cMesh*> *v_meshes;
if (lowResMesh ){
v_meshes = lowResMesh->m_meshes;
}
else{
v_meshes = m_gelMesh.m_meshes;
}
// create collision detector for each mesh
std::vector<cMesh*>::iterator it;
for (it = v_meshes->begin(); it < v_meshes->end(); it++)
{
cMesh* mesh = (*it);
// read number of triangles of the object
int numTriangles = mesh->m_triangles->getNumElements();
computeUniqueVerticesandTriangles(mesh, &m_verticesPtr, &m_trianglesPtr);
m_bulletSoftBody = btSoftBodyHelpers::CreateFromTriMesh(*m_dynamicWorld->m_bulletSoftBodyWorldInfo,
m_verticesPtr.data(), m_trianglesPtr.data(), numTriangles);
createGELSkeleton();
m_gelMesh.connectVerticesToSkeleton(false);
// add to compound object
btTransform localTrans;
btVector3 pos;
btQuaternion q;
// set new position
cVector3d posMesh = mesh->getLocalPos();
pos[0] = posMesh(0);
pos[1] = posMesh(1);
pos[2] = posMesh(2);
// set new orientation
cMatrix3d rotMesh = mesh->getLocalRot();
cQuaternion quaternion;
quaternion.fromRotMat(rotMesh);
q.setW(quaternion.w);
q.setX(quaternion.x);
q.setY(quaternion.y);
q.setZ(quaternion.z);
// set new transform
localTrans.setOrigin(pos);
localTrans.setRotation(q);
// Apply the inertial transform offset
localTrans *= m_inertialOffsetTransform.inverse();
}
if(lowResMesh){
lowResMesh->m_meshes->clear();
}
}
//==============================================================================
/*!
This method creates a Bullet collision model for this object.
*/
//==============================================================================
void cBulletSoftMultiMesh::buildContactConvexTriangles(const double a_margin)
{
}
//==============================================================================
/*!
This method creates a Bullet collision model for this object.
*/
//==============================================================================
void cBulletSoftMultiMesh::buildContactHull(const double a_margin)
{
m_gelMesh.buildVertices();
// create collision detector for each mesh
std::vector<cMesh*>::iterator it;
for (it = m_meshes->begin(); it < m_meshes->end(); it++)
{
cMesh* mesh = (*it);
// read number of triangles of the object
int numVertices = mesh->m_vertices->getNumElements();
m_verticesVecPtr.resize(numVertices);
// add all triangles to Bullet model
for (int i=0; i<numVertices; i++)
{
auto vPos = mesh->m_vertices->getLocalPos(i);
m_verticesVecPtr[i].setValue(vPos.x(), vPos.y(), vPos.z());
}
m_bulletSoftBody = btSoftBodyHelpers::CreateFromConvexHull(*m_dynamicWorld->m_bulletSoftBodyWorldInfo, m_verticesVecPtr.data(), numVertices);
}
}
//==============================================================================
/*!
Build the dynamic model of the bullet soft body.
*/
//==============================================================================
void cBulletSoftMultiMesh::buildDynamicModel(){
// add collision shape to compound
m_bulletSoftBody->setTotalMass(m_mass, true);
m_bulletSoftBody->getCollisionShape()->setUserPointer(m_bulletSoftBody);
btSoftRigidDynamicsWorld *softWorld = (btSoftRigidDynamicsWorld*) m_dynamicWorld->m_bulletWorld;
softWorld->addSoftBody(m_bulletSoftBody);
m_dynamicWorld->m_bulletSoftBodyWorldInfo->m_sparsesdf.Reset();
}
//==============================================================================
/*!
Scale the mesh for the softBody
*/
//==============================================================================
void cBulletSoftMultiMesh::scale(const double &a_scaleFactor, const bool a_affectChildren){
m_gelMesh.scale(a_scaleFactor);
}
//------------------------------------------------------------------------------
} // namespace chai3d
//------------------------------------------------------------------------------
|
#!/bin/bash
IS_FI=`echo "$LANG" | grep -P "^fi"`
IS_SV=`echo "$LANG" | grep -P "^sv"`
# Defaults to Finnish
DOC_LANG=fi
TITLE="Koejärjestelmän ohjeet"
if [ "${IS_FI}" != "" ]; then
echo "Finnish language selected"
DOC_LANG=fi
elif [ "${IS_SV}" != "" ]; then
echo "Swedish language selected"
DOC_LANG=sv
TITLE="Provmiljöns instruktioner"
if [ ! -f /usr/share/digabi-koe-ohje/build/index-${DOC_LANG}.html ]; then
echo "Oops, Swedish documentation is missing"
DOC_LANG=fi
TITLE="Koejärjestelmän ohjeet"
fi
else
echo "$0: No known language selected, defaults to ${DOC_LANG}"
fi
# Kill existing browsers
pkill -TERM -f "/usr/bin/digabi-koe-browser"
# Start browser
/usr/bin/digabi-koe-browser -t "${TITLE}" -W 1045 -H 600 -x 30 -y 30 file:///usr/share/digabi-koe-ohje/build/index-${DOC_LANG}.html &
|
const {
prettyPrintMatrix,
isValueInArray,
arrayFromMatrixByColumn,
traverseArray,
isClue,
isPossible,
} = require('./utils');
const EMPTY_ARRAY = [];
const EMPTY_MATRIX = [[]];
const SUDOKU = [
[0, 2, 0, 5, 0, 8, 4, 0, 0],
[5, 8, 0, 7, 0, 0, 9, 2, 6],
[0, 0, 0, 0, 0, 0, 8, 1, 0],
[4, 5, 8, 2, 0, 0, 0, 0, 1],
[0, 0, 0, 4, 0, 3, 0, 0, 0],
[1, 0, 0, 0, 0, 5, 2, 9, 4],
[0, 6, 5, 0, 0, 0, 0, 0, 0],
[3, 1, 9, 0, 0, 2, 0, 8, 7],
[0, 0, 7, 1, 0, 9, 0, 6, 0],
];
const FIRST_COLUMN = [0, 5, 0, 4, 0, 1, 0, 3, 0];
const PRETTY_SUDOKU = `0 2 0 5 0 8 4 0 0
5 8 0 7 0 0 9 2 6
0 0 0 0 0 0 8 1 0
4 5 8 2 0 0 0 0 1
0 0 0 4 0 3 0 0 0
1 0 0 0 0 5 2 9 4
0 6 5 0 0 0 0 0 0
3 1 9 0 0 2 0 8 7
0 0 7 1 0 9 0 6 0`;
describe('prettyPrintMatrix', () => {
test('writes 0x0 data properly', () => {
expect(prettyPrintMatrix(EMPTY_MATRIX)).toBe('');
});
test('pretty prints sudoku data properly', () => {
expect(prettyPrintMatrix(SUDOKU)).toBe(PRETTY_SUDOKU);
});
});
describe('isValueInArray', () => {
test('false if array is empty', () => {
expect(isValueInArray(EMPTY_ARRAY, 9)).toBe(false);
});
test('false if value is does not exist', () => {
expect(isValueInArray(SUDOKU[0], 9)).toBe(false);
});
test('true if value is does exist', () => {
expect(isValueInArray(SUDOKU[0], 2)).toBe(true);
});
});
describe('arrayFromMatrixByColumn', () => {
test('proper array for source matrix', () => {
expect(arrayFromMatrixByColumn(SUDOKU)(0)).toStrictEqual(FIRST_COLUMN);
});
});
describe('traverseArray', () => {
test('undefined if matrix is empty', () => {
expect(traverseArray(EMPTY_MATRIX)(0, 0)).toBe(undefined);
});
test('value of [0,0] in matrix', () => {
expect(traverseArray(SUDOKU)(0, 0)).toBe(0);
});
test('value of [1,1] in matrix', () => {
expect(traverseArray(SUDOKU)(1, 1)).toBe(8);
});
});
describe('isClue', () => {
test('[0,0] should return false', () => {
expect(isClue(SUDOKU)(0, 0)).toBe(false);
});
test('[2,2] should return false', () => {
expect(isClue(SUDOKU)(2, 2)).toBe(false);
});
test('[0,1] should return true', () => {
expect(isClue(SUDOKU)(0, 1)).toBe(true);
});
test('[1,0] should return true', () => {
expect(isClue(SUDOKU)(1, 0)).toBe(true);
});
});
describe('isPossible', () => {
test('returns false if value is clue', () => {
expect(isPossible(SUDOKU, 0, 1, 1)).toBe(false);
});
test('returns true if space is not clue and value is unique in row', () => {
expect(isPossible(SUDOKU, 0, 0, 1)).toBe(true);
});
test('returns false if space is not clue and value is repeated in row', () => {
expect(isPossible(SUDOKU, 0, 0, 2)).toBe(false);
});
});
|
#!/bin/bash
function docker_tag_exists() {
EXISTS=$(curl -s https://hub.docker.com/v2/repositories/$1/tags/?page_size=10000 | jq -r "[.results | .[] | .name == \"$2\"] | any")
test $EXISTS = true
}
if docker_tag_exists svenruppert/maven-3.6.1-liberica 1.11.0-5; then
echo skip building, image already existing - svenruppert/maven-3.6.1-liberica 1.11.0-5
else
echo start building the images
docker build -t svenruppert/maven-3.6.1-liberica .
docker tag svenruppert/maven-3.6.1-liberica:latest svenruppert/maven-3.6.1-liberica:1.11.0-5
docker push svenruppert/maven-3.6.1-liberica:1.11.0-5
fi
docker image rm svenruppert/maven-3.6.1-liberica:latest
docker image rm svenruppert/maven-3.6.1-liberica:1.11.0-5 |
from typing import Union
def validate_and_process_euro_value(value: str) -> Union[bool, str]:
try:
value = float(value)
if value < 0:
return False
value_str = '{:.2f}'.format(value)
if len(value_str) > 15 or len(value_str.split('.')[0]) > 12:
return False
return True, '€' + value_str
except ValueError:
return False |
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Observable } from 'rxjs';
import { Product } from "../models/product";
const httpOptions = {
headers: new HttpHeaders({ 'Content-Type': 'application/json' })
};
@Injectable({
providedIn: 'root'
})
export class ProductService {
private productUrl = 'http://localhost:5000/api/products';
constructor(private http: HttpClient) { }
// Get all products
getProducts(): Observable<Product[]> {
return this.http.get<Product[]>(this.productUrl);
}
// Get a product by id
getProductById(id: any): Observable<Product> {
return this.http.get<Product>(`${this.productUrl}/${id}`);
}
// Add a product
addProduct(product: Product): Observable<any> {
return this.http.post<any>(this.productUrl, product, httpOptions);
}
// Update product by id
updateProductById(product: Product, id: any): Observable<Product> {
return this.http.put<Product>(`${this.productUrl}/${id}`, product, httpOptions);
}
// Delete a product
deleteProductById(id: any): Observable<Product> {
return this.http.delete<Product>(`${this.productUrl}/${id}`);
}
} |
#!/bin/bash
BIN_DIR="$( dirname "${BASH_SOURCE[0]}" )"
pushd "$BIN_DIR" > /dev/null
# Default values
IM=0 # Throughput Monitoring Interval in milliseconds
SW=300 # Sampling window
source ./config.sh
# Allow overriding parameters on the command line as KEY=VALUE
for param in "$@"
do
eval $param
done
echo "execute htap-$SCALE benchmark, time: $TIME tpcc: $CWORKERS chbenchmark: $HWORKERS"
# Write config file (use Mac/Linux compatible 'sed -i')
cp template-config.xml config.xml
if [[ "${KERBEROS}" == "true" ]]; then
url="jdbc:splice://$SPLICE_HOST:$SPLICE_PORT/splicedb;principal=${KERBEROS_PRINCIPAL};keytab=${KERBEROS_KEYTAB}"
else
url="jdbc:splice://$SPLICE_HOST:$SPLICE_PORT/splicedb"
fi
sed -i.bak -e "s%###SPLICE_URL###%$url%g" config.xml
sed -i.bak -e "s%###SCALE###%$SCALE%g" config.xml
sed -i.bak -e "s%###TIME###%$TIME%g" config.xml
sed -i.bak -e "s%###WORKERS_TPCC###%$CWORKERS%g" config.xml
sed -i.bak -e "s%###WORKERS_TPCH###%$HWORKERS%g" config.xml
rm -rf config.xml.bak
cd ..
# Changing DDL or dialect files requires rebuild of oltpbenchmark
if [[ "$PREBUILT" != "true" ]]; then
rm -rf build
ant
fi
mkdir -p results
session=htap-${SCALE}_${CWORKERS}_${HWORKERS}
./oltpbenchmark -b 'tpcc,chbenchmark' -c quickstart/config.xml --execute=true -im $IM -s $SW -ss -o $session | tee results/$session.out
popd > /dev/null
echo 'done'
|
<gh_stars>0
const path = require('path');
module.exports = {
resolve: {
alias: {
"styled-components": path.resolve(__dirname, "..","node_modules", "styled-components"),
}
}
};
|
def custom_validation(field_value, related_field_value):
if related_field_value and not field_value:
return False # Validation fails if related field is set and field under validation is blank
return True # Validation passes if related field is not set or field under validation is not blank |
<gh_stars>1-10
#!/usr/bin/env python3
"""Build script for Jigsaw."""
import argparse
import subprocess
import shutil
import os
import json
RUST_CRATE_NAME = "kuudos_web"
RUST_CRATE_PATH = "web" # Relative to git repo root
def exit_with_message(msg):
print(msg)
exit(1)
def get_wasm_location(cargo_build_stdout):
for l in cargo_build_stdout.split("\n"):
# Skip blank lines
if l == "":
continue
# Parse the compiler message into JSON
message = json.loads(l)
# Skip if this build message is either not a release artefact or refers to the wrong crate
if not (
message["reason"] == "compiler-artifact"
and message["target"]["name"] == RUST_CRATE_NAME
):
continue
# Now that we know this is the right message, we extract the location of the '*.wasm' file and
# move it into the out directory
output_files = [
file_path
for file_path in message["filenames"]
if file_path.endswith(".wasm")
]
# Sanity check that there is exactly one wasm file
if len(output_files) == 0:
exit_with_message("No wasm files emitted!")
if len(output_files) > 1:
exit_with_message(f"Multiple wasm files emitted: {output_files}")
return output_files[0]
exit_with_message("No compiler messages found for the wasm files")
# ===== ARG PARSING =====
parser = argparse.ArgumentParser(description="Build script for Kuudos' web API")
parser.add_argument(
"-r",
"--release",
action="store_true",
help="Switches the build between debug and release.",
)
parser.add_argument(
"-o",
"--out-dir",
type=str,
default="out/",
help="Custom location for the output directory, relative to the project root. Defaults to `out`.",
)
args = parser.parse_args()
is_release = args.release
out_dir_arg = args.out_dir
# TODO: Run dependency check, and install things if necessary
# ===== GET DIRECTORIES =====
# Use 'git' to find the location of the project root (so that this can be run from any subdirectory
# of the project)
this_files_dir = os.path.split(__file__)[0]
root_dir = subprocess.run(
["git", "rev-parse", "--show-toplevel"],
capture_output=True,
text=True,
cwd=this_files_dir,
).stdout.strip()
web_dir = os.path.join(root_dir, "web")
rust_dir = os.path.join(root_dir, RUST_CRATE_PATH)
# Get the absolute path to the `out` directory
out_dir = (
out_dir_arg if os.path.isabs(out_dir_arg) else os.path.join(root_dir, out_dir_arg)
)
# Make sure the out directory exists before building anything
os.makedirs(out_dir, exist_ok=True)
# ===== BUILD WEB COMPONENTS (JS, HTML, CSS, etc) =====
EXCLUDE_FILES = ["Cargo.toml", "Cargo.lock", "README.md", "src"]
for file_name in os.listdir(web_dir):
if file_name in EXCLUDE_FILES:
continue
source_path = os.path.join(web_dir, file_name)
out_path = os.path.join(out_dir, file_name)
# Remove the output file if it already exists
if os.path.exists(out_path):
os.remove(out_path)
# Move the source file to the out dir
if is_release:
# In release mode, we actually copy the files
shutil.copy2(source_path, out_path)
else:
# In debug mode, just symlink the files so that we don't have to re-build every time the web
# stuff is changed
# TODO: Generate a relative path to stop the symlinks being horrible to look at
os.symlink(source_path, out_path)
# ===== BUILD RUST CODE =====
cargo_args = ["cargo", "build", "--target", "wasm32-unknown-unknown"] + (
["--release"] if is_release else []
)
# Run the cargo build process once to actually perform the build
cargo_build_proc = subprocess.run(
cargo_args,
cwd=rust_dir,
text=True,
)
# Check that the compilation was successful
if cargo_build_proc.returncode != 0:
exit_with_message("Rust build failed. Stopping build.")
# Now that cargo ran normally, we run it again to get path of the wasm file
# TODO: Once Cargo's `--out-dir` hits stable, all of this will become unnecessary
build_location_proc = subprocess.run(
cargo_args + ["--message-format", "json"],
cwd=rust_dir,
capture_output=True,
text=True,
)
wasm_path = get_wasm_location(build_location_proc.stdout)
# Run wasm-bindgen to generate the Rust/JS interaction code
subprocess.run(
[
"wasm-bindgen",
wasm_path,
"--target",
"no-modules",
"--no-typescript",
"--out-dir",
out_dir,
]
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.