text stringlengths 1 1.05M |
|---|
const { attachExtensions, buildCommand } = require('../utils');
module.exports = (toolbox) => {
const ctx = toolbox.extensions.context.get();
function init() {
// prettier-ignore
const cmd = buildCommand([
`cd ${ctx.targetPath} &&`,
'git',
'init',
]);
return toolbox.system.run(cmd);
}
attachExtensions(toolbox, 'git', {
init,
});
};
|
docker-compose pull
|
#!/usr/bin/env bash
python util/mesh_viewer.py \
--files \
checkpoints/coseg_aliens/meshes/142_0.obj \
checkpoints/coseg_aliens/meshes/142_2.obj \
checkpoints/coseg_aliens/meshes/142_3.obj \ |
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/util/DefaultValidatorSupport.java
package io.opensphere.core.util;
import io.opensphere.core.util.ChangeSupport.Callback;
/**
* Default implementation of {@link ValidatorSupport} that keeps track of the
* validation object and the last validation result, as well as listeners
* interested in changes to the validation state.
*/
public class DefaultValidatorSupport implements ValidatorSupport
{
/** The change support. */
private final transient ChangeSupport<ValidatorSupport.ValidationStatusChangeListener> myChangeSupport = StrongChangeSupport
.create();
/** The last validation message. */
private String myValidationMessage;
/** The validation object. */
private final Object myValidationObject;
/** The last validation result. */
private ValidationStatus myValidationSuccessful;
/** Callback used to notify listeners. */
private final Callback<ValidatorSupport.ValidationStatusChangeListener> myCallback;
/**
* Construct the validator support.
*
* @param validationObject The object being validated.
*/
public DefaultValidatorSupport(Object validationObject)
{
myValidationObject = validationObject;
myCallback = listener -> listener.statusChanged(myValidationObject, myValidationSuccessful, myValidationMessage);
}
@Override
public void addAndNotifyValidationListener(ValidationStatusChangeListener listener)
{
myChangeSupport.addListener(listener);
myCallback.notify(listener);
}
@Override
public String getValidationMessage()
{
return myValidationMessage;
}
@Override
public ValidationStatus getValidationStatus()
{
return myValidationSuccessful;
}
/**
* Notify validation listeners.
*
* @param callback The callback.
*/
public void notifyListeners(Callback<ValidationStatusChangeListener> callback)
{
myChangeSupport.notifyListeners(callback);
}
@Override
public void removeValidationListener(ValidationStatusChangeListener listener)
{
myChangeSupport.removeListener(listener);
}
/**
* Set the validation result and notify listeners.
*
* @param successful If the validation was successful.
* @param message The validation message.
*/
public void setValidationResult(ValidationStatus successful, String message)
{
if (!Utilities.sameInstance(successful, myValidationSuccessful) || !Utilities.sameInstance(message, myValidationMessage)
&& (message == null || !message.equals(myValidationMessage)))
{
myValidationSuccessful = successful;
myValidationMessage = message;
myChangeSupport.notifyListeners(myCallback);
}
}
/**
* Set the validation result from another validator and notify listeners.
*
* @param validator The other validator.
*/
public void setValidationResult(ValidatorSupport validator)
{
setValidationResult(validator.getValidationStatus(), validator.getValidationMessage());
}
}
|
def get_balance(acct_id=None):
"""
:param acct_id: Optional account ID for which the balance needs to be retrieved
:return: Balance information for the specified account
"""
if acct_id is None:
acct_id = ACCOUNT_ID # Use the default account ID if not provided
path = f"/v1/account/balance/{acct_id}" # API endpoint for retrieving account balance
params = {} # Optional parameters for the API call
# Make an authenticated API call to retrieve the account balance
return api_key_get(params, path) |
export declare enum LoopControlMask {
None = 0,
Unroll = 1,
DontUnroll = 2,
DependencyInfinite = 4,
DependencyLength = 8,
MinIterations = 16,
MaxIterations = 32,
IterationMultiple = 64,
PeelCount = 128,
PartialCount = 256,
InitiationIntervalINTEL = 65536,
MaxConcurrencyINTEL = 131072,
DependencyArrayINTEL = 262144,
PipelineEnableINTEL = 524288,
LoopCoalesceINTEL = 1048576,
MaxInterleavingINTEL = 2097152,
SpeculatedIterationsINTEL = 4194304
}
|
{{ansible_managed|comment(decoration='#')}}
#-------------------------------------------------------------------------------
# NOTE
#-------------------------------------------------------------------------------
#
# All default Bash environment variables have been moved from 'bash.bash' to
# 'zz_bash.bash'. Files in '/etc/profile.d/' are loaded in alphabetical order.
# Changing the order helps to ensure important variables do not get overwritten.
#
#-------------------------------------------------------------------------------
|
module DigitalCircuit (
input wire [width-1:0] inType,
input wire S,
output reg [width-1:0] outType,
input wire clk
);
always @(posedge clk) begin
// Perform the specific operation based on the input bits and the clock signal
// Example: Perform a bitwise AND operation with the input bits and the control signal S
if (S) begin
outType <= inType & width'b1; // Perform bitwise AND with a constant value
end else begin
outType <= inType; // Pass through the input bits without any operation
end
end
endmodule |
<filename>src/modules/user/view/pages/Lobby/components/GameForm/index.tsx
import React from 'react';
import { Formik, Form } from 'formik';
import useCreateGameController from 'modules/user/infra/controllers/useCreateGameController';
import GameFormSchema, {
requiredImageValidation,
} from 'modules/user/view/validation/GameFormSchema';
import { Button, ImageInput, Input, Textarea } from 'shared/view/components';
import { Container, Footer } from './styles';
interface IGameValues {
name: string;
description: string;
image: string | File;
}
interface GameFormProps {
onSuccess: () => Promise<void>;
closeModal: () => void;
}
const initialValues: IGameValues = {
name: '',
description: '',
image: '',
};
const GameForm: React.FC<GameFormProps> = ({ onSuccess, closeModal }) => {
const { loading, onSubmit } = useCreateGameController({
onSuccess,
closeModal,
});
return (
<Container>
<Formik
initialValues={initialValues}
validationSchema={GameFormSchema}
validate={requiredImageValidation}
onSubmit={onSubmit}
>
{() => (
<Form>
<ImageInput name="image" />
<Input name="name" placeholder="O nome do seu jogo" />
<Textarea
name="description"
placeholder="Descreva seu jogo: o que ele representa? Onde será jogado? Quem participará?"
/>
<Footer>
<Button outlined onClick={closeModal}>
Cancelar
</Button>
<Button type="submit" loading={loading}>
Criar
</Button>
</Footer>
</Form>
)}
</Formik>
</Container>
);
};
export default GameForm;
|
import asyncio
import datetime
import json
import logging
import websockets
from websockets.exceptions import ConnectionClosed
from .dumpling import Dumpling, DumplingDriver
from .exceptions import InvalidDumpling, NetDumplingsError
from ._shared import (
validate_dumpling, HUB_HOST, HUB_IN_PORT, HUB_OUT_PORT, HUB_STATUS_FREQ,
)
class DumplingHub:
"""
Implements a dumpling hub.
A dumpling hub is two websocket servers: one receives dumplings from any
number of running ``nd-sniff`` scripts; and the other sends those dumplings
to any number of dumpling eaters. The hub also makes its own dumplings
which describe its own system status which are also sent to all the
dumpling eaters at regular intervals.
``nd-hub`` is a simple wrapper around ``DumplingHub``.
:param address: Address the hub is running on.
:param in_port: Port used to receive connections from `nd-sniff`.
:param out_port: Port used to receive connections from `dumpling eaters`.
:param status_freq: Frequency (in secs) to send system status dumplings.
"""
def __init__(
self,
address: str = HUB_HOST,
in_port: int = HUB_IN_PORT,
out_port: int = HUB_OUT_PORT,
status_freq: int = HUB_STATUS_FREQ,
) -> None:
self.address = address
self.in_port = in_port
self.out_port = out_port
self.status_freq = status_freq
# Maintain a dictionary of all connected kitchens and eaters. The
# key is the websocket and the value is a dictionary of information
# on the kitchen/eater.
self._dumpling_eaters = {}
self._dumpling_kitchens = {}
self._start_time = datetime.datetime.now()
self._system_stats = {
'dumplings_in': 0,
'dumplings_out': 0
}
self._logger = logging.getLogger(__name__)
def __repr__(self):
return (
'{}('
'address={}, '
'in_port={}, '
'out_port={}, '
'status_freq={})'.format(
type(self).__name__,
repr(self.address),
repr(self.in_port),
repr(self.out_port),
repr(self.status_freq),
)
)
def _get_system_status(self):
"""
Generates current system status information.
:return: System status information.
"""
uptime = (datetime.datetime.now() - self._start_time).total_seconds()
system_status = {
'total_dumplings_in': self._system_stats['dumplings_in'],
'total_dumplings_out': self._system_stats['dumplings_out'],
'server_uptime': uptime,
'dumpling_kitchen_count': len(self._dumpling_kitchens),
'dumpling_eater_count': len(self._dumpling_eaters),
'dumpling_kitchens':
[self._dumpling_kitchens[kitchen]['metadata']
for kitchen in self._dumpling_kitchens],
'dumpling_eaters':
[self._dumpling_eaters[eater]['metadata']
for eater in self._dumpling_eaters]
}
return system_status
async def _grab_dumplings(self, websocket, path):
"""
A coroutine for grabbing dumplings from a single instance of
``nd-sniff``. A single instance of this coroutine exists for each
connected ``nd-sniff`` and is invoked via
:meth:`websockets.server.serve`.
:param websocket: A :class:`websockets.server.WebSocketServerProtocol`.
:param path: Websocket request URI.
"""
host = websocket.remote_address[0]
port = websocket.remote_address[1]
# Retain some information on this dumpling kitchen.
kitchen_json = await websocket.recv()
kitchen = {
'metadata': {
'info_from_kitchen': json.loads(kitchen_json),
'info_from_hub': {
'host': host,
'port': port
}
},
'websocket': websocket
}
self._dumpling_kitchens[websocket] = kitchen
kitchen_name = kitchen['metadata']['info_from_kitchen']['kitchen_name']
self._logger.info(
"Received dumpling kitchen connection from {0} at {1}:{2}".format(
kitchen_name, host, port))
try:
while True:
dumpling_json = await websocket.recv()
# Validate the dumpling.
try:
dumpling = validate_dumpling(dumpling_json)
except InvalidDumpling as e:
self._logger.error(
"Received invalid dumpling: {0}; kitchen: {1}".format(
e,
json.dumps(
kitchen['metadata']['info_from_kitchen']
)
))
continue
self._system_stats['dumplings_in'] += 1
chef = dumpling['metadata']['chef']
self._logger.debug(
"Received {} dumpling from {} at {}:{}; {} bytes".format(
chef, kitchen_name, host, port, len(dumpling_json)))
# Send this dumpling to all the eager dumpling eaters.
for eater in self._dumpling_eaters:
await self._dumpling_eaters[eater]['queue'].put(
dumpling_json
)
except ConnectionClosed as e:
self._logger.info(
"Dumpling kitchen {0} connection closed: {1}".format(
kitchen_name, e))
del self._dumpling_kitchens[websocket]
async def _emit_dumplings(self, websocket, path):
"""
A coroutine for sending all received dumplings to a single connected
dumpling eater over a websocket connection. A single instance of this
coroutine exists for each eater and is invoked via
:meth:`websockets.server.serve`.
:param websocket: A :class:`websockets.server.WebSocketServerProtocol`.
:param path: Websocket request URI.
"""
host = websocket.remote_address[0]
port = websocket.remote_address[1]
# Retain some information on this dumpling eater.
eater_json = await websocket.recv()
eater = {
'metadata': {
'info_from_eater': json.loads(eater_json),
'info_from_hub': {
'host': host,
'port': port
}
},
'websocket': websocket,
'queue': asyncio.Queue()
}
self._dumpling_eaters[websocket] = eater
eater_name = eater['metadata']['info_from_eater']['eater_name']
self._logger.info(
"Received dumpling eater connection from {0} at {1}:{2}".format(
eater_name, host, port))
# Each dumpling eater has their own queue. These queues receive all
# the fresh new dumplings received by each instance of the
# _grab_dumplings coroutine.
dumpling_queue = eater['queue']
try:
while True:
dumpling = await dumpling_queue.get()
dumpling_obj = json.loads(dumpling)
chef = dumpling_obj['metadata']['chef']
self._logger.debug(
"Sending {0} dumpling to {1} at {2}:{3}; {4} bytes".format(
chef, eater_name, host, port, len(dumpling)))
await websocket.send(dumpling)
self._system_stats['dumplings_out'] += 1
except ConnectionClosed as e:
self._logger.info(
"Dumpling eater {0} connection closed: {1}".format(
eater_name, e))
del self._dumpling_eaters[websocket]
async def _announce_system_status(self):
"""
Sends system status (as a dumpling) to all connected dumpling eaters.
:param freq_secs: Frequency (in seconds) of status announcements.
"""
while True:
# We create our own system status dumplings (rather than going
# through a chef+kitchen pair).
status_dumpling = Dumpling(
chef='SystemStatusChef', driver=DumplingDriver.interval,
payload=self._get_system_status())
status_dumpling_json = status_dumpling.to_json()
for eater in self._dumpling_eaters:
await self._dumpling_eaters[eater]['queue'].put(
status_dumpling_json
)
await asyncio.sleep(self.status_freq)
def run(self):
"""
Run the dumpling hub.
Starts two websocket servers: one to receive dumplings from zero or
more instances of ``nd-sniff``; and another to send those dumplings to
zero or more dumpling eaters. Also creates its own dumplings at regular
intervals to send system status information to all connected dumpling
eaters.
"""
dumpling_in_server = \
websockets.serve(self._grab_dumplings, self.address, self.in_port)
dumpling_out_server = \
websockets.serve(self._emit_dumplings, self.address, self.out_port)
loop = asyncio.get_event_loop()
try:
srv_in = loop.run_until_complete(dumpling_in_server)
srv_out = loop.run_until_complete(dumpling_out_server)
except OSError as e:
raise NetDumplingsError(
"Cannot instantiate dumpling hub: {0}".format(e))
status_task = asyncio.ensure_future(self._announce_system_status())
self._logger.info("Dumpling hub initiated; waiting for connections")
in_uri = "ws://{0}:{1}".format(self.address, self.in_port)
out_uri = "ws://{0}:{1}".format(self.address, self.out_port)
self._logger.info(
"Dumplings in: {0} out: {1}".format(in_uri, out_uri)
)
try:
loop.run_forever()
except KeyboardInterrupt:
self._logger.warning(
"Caught keyboard interrupt; attempting graceful shutdown...")
except asyncio.CancelledError:
pass
finally:
srv_in.close()
srv_out.close()
loop.run_until_complete(srv_in.wait_closed())
loop.run_until_complete(srv_out.wait_closed())
self._logger.info("Dumpling hub signing off. Thanks!")
|
def process_sklearn_version(version: str) -> str:
try:
major, minor, *rest = version.split(".")
if int(major) == 0 and int(minor) >= 22:
return "Compatible version"
else:
raise ValueError("Incompatible version")
except (ValueError, AttributeError):
raise ValueError("Incompatible version") |
# Copyright 2000-2021 Nokia
#
# Licensed under the Apache License 2.0
# SPDX-License-Identifier: Apache-2.0
#
trap 'exit 1' ERR
response=`mktemp`
expectedNum=$RANDOM
curl -v --http2-prior-knowledge --proxy "http://localhost:8088" http://localhost:8080/services/helloworld?$expectedNum > $response
grep "\"GET /services/helloworld?$expectedNum HTTP/2.0\" 200" ../runtimes/jersey-server/var/log/csf.runtime__component.instance/msg.log
echo "OK" |
package nlp_package;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import opennlp.tools.parser.Parser;
import opennlp.tools.parser.ParserFactory;
import opennlp.tools.parser.ParserModel;
import opennlp.tools.util.InvalidFormatException;
public class SentenceParse {
public ParserModel parseModel(String file) throws InvalidFormatException, IOException{
InputStream modelInParse = new FileInputStream (file);
ParserModel parseModel = new ParserModel(modelInParse);
return parseModel;
}
public Parser parser(ParserModel modelParser){
Parser parser = ParserFactory.create(modelParser);
return parser;
}
}
|
a, b = 0, 1
for _ in range(10):
a, b = b, a + b
print(b) |
echo "Installation des dépendances"
/usr/bin/pip3 install --user -r requirements.yaml
/usr/local/bin/ansible-galaxy install -r molecule/commun/requirements.yml |
#!/bin/bash
mkdir astrobee-repo-version
mkdir -p astrobee-repo-version/DEBIAN
mkdir -p astrobee-repo-version/etc/
echo $1 > astrobee-repo-version/etc/astrobee-release
VERSION=`date +%Y%m%d%H%m`
read -r -d '' RELEASE << EOM
SNAPSHOT="$1"
TIMESTAMP="$VERSION"
TIMESTAMP_UNIX="`date +%s`"
EOM
echo "$RELEASE" > astrobee-repo-version/etc/astrobee-release
read -r -d '' CONTROL << EOM
Package: astrobee-repo-version
Version: $VERSION
Maintainer: Brian Coltin <nospam@nospam.org>
Architecture: all
Description: Astrobee debian repo version. Updates with each snapshot.
EOM
echo "$CONTROL" > astrobee-repo-version/DEBIAN/control
cd astrobee-repo-version
tar czf ../data.tar.gz etc
cd DEBIAN
tar czf ../../control.tar.gz *
cd ../..
echo 2.0 > debian-binary
ar r astrobee-repo-version-$VERSION.deb debian-binary control.tar.gz data.tar.gz
rm -r astrobee-repo-version debian-binary control.tar.gz data.tar.gz
|
import cv2
import numpy as np
def scaleImage(image, scale):
# Get the dimensions of the input image
x, y = image.shape
# Calculate the new dimensions based on the scale factor
x1 = x
y1 = int(round(y * scale))
# Resize the input image to the new dimensions
resized_image = cv2.resize(image.astype('float32'), (y1, x1))
# Create a new image with the scaled dimensions
new_image = np.zeros((x, y))
# Handle any necessary adjustments
if y1 > y:
start = int(round(y1 / 2 - y / 2))
# Perform additional adjustments if needed
# Return the scaled image
return resized_image |
import requests
def make_cpanel_api_request(username: str, api_token: str, endpoint: str, parameter: str) -> str:
url = f"{endpoint}?parameter={parameter}"
headers = {'Authorization': f'cpanel {username}:{api_token}'}
response = requests.get(url, headers=headers)
return response.content.decode('utf-8') |
package at.ac.tuwien.infosys.util;
import at.ac.tuwien.infosys.model.Location;
import at.ac.tuwien.infosys.model.LocationRange;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URL;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
/**
* Created by <NAME> on 29/11/2016.
* Service to read properties from the additional "main.properties" file
*/
@Service
@Slf4j
public class PropertyService {
@Value("${fog.docker}")
private boolean DOCKER;
private String cloudIp;
private int cloudPort;
private String ip;
private String parentIp;
private int parentPort;
private String neighborIp;
private int neighborPort;
private String grandParentIp;
private int grandParentPort;
private long latitude;
private long longitude;
private Set<String> serviceTypes;
private LocationRange locationRange;
@PostConstruct
public void init(){
readProperties();
}
private void readProperties(){
Properties props = new Properties();
FileInputStream file = null;
String path= "./main.properties";
if(!DOCKER) {
URL url = this.getClass().getClassLoader().getResource("main.properties");
path = url.getPath();
}
try {
file = new FileInputStream(path);
//load properties
props.load(file);
file.close();
//retrieve property
this.cloudIp = props.getProperty("fog.controller.ip");
this.cloudPort = Integer.valueOf(props.getProperty("fog.controller.port"));
this.ip = props.getProperty("fog.device.ip");
this.parentIp = props.getProperty("fog.parent.ip");
this.parentPort = Integer.valueOf(props.getProperty("fog.parent.port"));
this.neighborIp = props.getProperty("fog.neighbor.ip");
this.neighborPort = Integer.valueOf(props.getProperty("fog.neighbor.port"));
this.grandParentIp = props.getProperty("fog.grandparent.ip");
this.grandParentPort = Integer.valueOf(props.getProperty("fog.grandparent.port"));
this.latitude = Long.valueOf(props.getProperty("fog.location.latitude"));
this.longitude = Long.valueOf(props.getProperty("fog.location.longitude"));
String serviceTypeString = props.getProperty("fog.service.types");
if(!serviceTypeString.isEmpty())
this.serviceTypes = new HashSet<String>(Arrays.asList(serviceTypeString.split(",")));
else
this.serviceTypes = new HashSet<String>();
// location range
String[] lowerBoundStr = props.getProperty("fog.location.range.lower").split(",");
long lowerBoundLat = Long.valueOf(lowerBoundStr[0]);
long lowerBoundLong = Long.valueOf(lowerBoundStr[1]);
String[] upperBoundStr = props.getProperty("fog.location.range.upper").split(",");
long upperBoundLat = Long.valueOf(upperBoundStr[0]);
long upperBoundLong = Long.valueOf(upperBoundStr[1]);
this.locationRange = new LocationRange(new Location(lowerBoundLat, lowerBoundLong),
new Location(upperBoundLat, upperBoundLong));
} catch (IOException e) {
e.printStackTrace();
}
}
public boolean isDOCKER() {
return DOCKER;
}
public String getIp() {
return ip;
}
public String getParentIp() {
return parentIp;
}
public int getParentPort() {
return parentPort;
}
public String getGrandParentIp() {
return grandParentIp;
}
public int getGrandParentPort() {
return grandParentPort;
}
public long getLatitude() {
return latitude;
}
public long getLongitude() {
return longitude;
}
public Set<String> getServiceTypes() {
return serviceTypes;
}
public LocationRange getLocationRange() {
return locationRange;
}
public String getCloudIp() {
return cloudIp;
}
public int getCloudPort() { return cloudPort; }
public String getNeighborIp() { return neighborIp; }
public int getNeighborPort() { return neighborPort; }
}
|
/**
* @copyright Copyright 2021 <NAME> <<EMAIL>>
* @license MIT
*/
import assert from 'assert';
import deepFreeze from 'deep-freeze';
import RenameComponentsTransformer from '../rename-components.js';
describe('RenameComponentsTransformer', () => {
it('throws TypeError with null options', () => {
assert.throws(
() => new RenameComponentsTransformer(null),
TypeError,
);
});
it('throws TypeError with number options', () => {
assert.throws(
() => new RenameComponentsTransformer(1),
TypeError,
);
});
it('throws TypeError with number options.schemas', () => {
assert.throws(
() => new RenameComponentsTransformer({ schemas: 1 }),
TypeError,
);
});
it('rename with options.schemas function openapi 3', () => {
const transformer = new RenameComponentsTransformer({
schemas: (name) => (name === 'ResponseType' ? `New${name}` : name),
});
assert.deepStrictEqual(
transformer.transformOpenApi(deepFreeze({
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
RequestType: {
type: 'object',
},
ResponseType: {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/RequestType',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/ResponseType',
},
},
},
},
},
},
},
},
})),
{
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
RequestType: {
type: 'object',
},
NewResponseType: {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/RequestType',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/NewResponseType',
},
},
},
},
},
},
},
},
},
);
});
it('preserves properties on JSON Reference', () => {
const transformer = new RenameComponentsTransformer({
schemas: (name) => (name === 'ResponseType' ? `New${name}` : name),
});
assert.deepStrictEqual(
transformer.transformOpenApi(deepFreeze({
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
RequestType: {
type: 'object',
},
ResponseType: {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
prop1: 'foo',
$ref: '#/components/schemas/RequestType',
prop2: 'bar',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
prop3: 'baz',
$ref: '#/components/schemas/ResponseType',
prop4: 'quux',
},
},
},
},
},
},
},
},
})),
{
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
RequestType: {
type: 'object',
},
NewResponseType: {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
prop1: 'foo',
$ref: '#/components/schemas/RequestType',
prop2: 'bar',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
prop3: 'baz',
$ref: '#/components/schemas/NewResponseType',
prop4: 'quux',
},
},
},
},
},
},
},
},
},
);
});
it('does not throw for unresolvable URI in $ref', () => {
const transformer = new RenameComponentsTransformer({
schemas: (name) => (name === 'ResponseType' ? `New${name}` : name),
});
assert.deepStrictEqual(
transformer.transformOpenApi(deepFreeze({
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
RequestType: {
type: 'object',
},
ResponseType: {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/RequestType2',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/ResponseType2',
},
},
},
},
},
},
},
},
})),
{
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
RequestType: {
type: 'object',
},
NewResponseType: {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/RequestType2',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/ResponseType2',
},
},
},
},
},
},
},
},
},
);
});
it('ignores invalid URI in $ref', () => {
const transformer = new RenameComponentsTransformer({
schemas: (name) => (name === 'ResponseType' ? `New${name}` : name),
});
assert.deepStrictEqual(
transformer.transformOpenApi(deepFreeze({
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
paths: {
'/': {
post: {
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '##',
},
},
},
},
},
},
},
},
})),
{
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
paths: {
'/': {
post: {
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '##',
},
},
},
},
},
},
},
},
},
);
});
it('does not modify remote URI in $ref openapi 3', () => {
const transformer = new RenameComponentsTransformer({
schemas: (name) => (name === 'ResponseType' ? `New${name}` : name),
});
assert.deepStrictEqual(
transformer.transformOpenApi(deepFreeze({
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
RequestType: {
type: 'object',
},
ResponseType: {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: 'remote.json#/components/schemas/RequestType',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: 'remote.json#/components/schemas/ResponseType',
},
},
},
},
},
},
},
},
})),
{
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
RequestType: {
type: 'object',
},
NewResponseType: {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: 'remote.json#/components/schemas/RequestType',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: 'remote.json#/components/schemas/ResponseType',
},
},
},
},
},
},
},
},
},
);
});
it('rename encoded with options.schemas function openapi 3', () => {
const transformer = new RenameComponentsTransformer({
schemas: (name) => (name === 'Response~Type#2' ? `New${name}` : name),
});
assert.deepStrictEqual(
transformer.transformOpenApi(deepFreeze({
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
RequestType: {
type: 'object',
},
'Response~Type#2': {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/RequestType',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/Response~0Type%232',
},
},
},
},
},
},
},
},
})),
{
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
RequestType: {
type: 'object',
},
'NewResponse~Type#2': {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/RequestType',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/NewResponse~0Type%232',
},
},
},
},
},
},
},
},
},
);
});
it('rename options.schemas handles child schemas correctly', () => {
const transformer = new RenameComponentsTransformer({
schemas: (name) => `New${name}2`,
});
assert.deepStrictEqual(
transformer.transformOpenApi(deepFreeze({
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
ResponseType: {
type: 'object',
properties: {
request: {
type: 'object',
},
},
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref:
'#/components/schemas/ResponseType/properties/request',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/ResponseType',
},
},
},
},
},
},
},
},
})),
{
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
schemas: {
NewResponseType2: {
type: 'object',
properties: {
request: {
type: 'object',
},
},
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/NewResponseType2'
+ '/properties/request',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '#/components/schemas/NewResponseType2',
},
},
},
},
},
},
},
},
},
);
});
it('options.schemas does not rename outside components.schemas', () => {
const transformer = new RenameComponentsTransformer({
schemas: (name) => `New${name}`,
});
assert.deepStrictEqual(
transformer.transformOpenApi(deepFreeze({
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
'x-stuff': {
RequestType: {
type: 'object',
},
},
components: {
myschemas: {
ResponseType: {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: '#/x-stuff/RequestType',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '#/components/myschemas/ResponseType',
},
},
},
},
},
},
},
},
})),
{
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
'x-stuff': {
RequestType: {
type: 'object',
},
},
components: {
myschemas: {
ResponseType: {
type: 'object',
},
},
},
paths: {
'/': {
post: {
requestBody: {
content: {
'application/json': {
schema: {
$ref: '#/x-stuff/RequestType',
},
},
},
},
responses: {
default: {
description: 'Example response',
content: {
'application/json': {
schema: {
$ref: '#/components/myschemas/ResponseType',
},
},
},
},
},
},
},
},
},
);
});
it('rename with options.schemas function swagger 2', () => {
const transformer = new RenameComponentsTransformer({
schemas: (name) => (name === 'ResponseType' ? `New${name}` : name),
});
assert.deepStrictEqual(
transformer.transformOpenApi(deepFreeze({
swagger: '2.0',
info: {
title: 'Title',
version: '1.0',
},
definitions: {
RequestType: {
type: 'object',
},
ResponseType: {
type: 'object',
},
},
paths: {
'/': {
post: {
parameters: [
{
in: 'body',
name: 'body',
schema: {
$ref: '#/definitions/RequestType',
},
},
],
responses: {
default: {
description: 'Example response',
schema: {
$ref: '#/definitions/ResponseType',
},
},
},
},
},
},
})),
{
swagger: '2.0',
info: {
title: 'Title',
version: '1.0',
},
definitions: {
RequestType: {
type: 'object',
},
NewResponseType: {
type: 'object',
},
},
paths: {
'/': {
post: {
parameters: [
{
in: 'body',
name: 'body',
schema: {
$ref: '#/definitions/RequestType',
},
},
],
responses: {
default: {
description: 'Example response',
schema: {
$ref: '#/definitions/NewResponseType',
},
},
},
},
},
},
},
);
});
it('rename with options.schemas doesn\'t affect responses', () => {
const transformer = new RenameComponentsTransformer({
schemas: (name) => 'renamed',
});
assert.deepStrictEqual(
transformer.transformOpenApi(deepFreeze({
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
responses: {
myresponse: {
description: 'Example response',
},
},
},
paths: {
'/': {
get: {
responses: {
204: {
$ref: '#/components/responses/myresponse',
},
},
},
},
},
})),
{
openapi: '3.0.3',
info: {
title: 'Title',
version: '1.0',
},
components: {
responses: {
myresponse: {
description: 'Example response',
},
},
},
paths: {
'/': {
get: {
responses: {
204: {
$ref: '#/components/responses/myresponse',
},
},
},
},
},
},
);
});
});
|
require('isomorphic-fetch')
const registry = require('./registry')
const urlWithHost = url => {
const host = registry.getItem('apiHost') || ''
return `${host}${url}`
}
const authTokenHeader = () => ({ 'X-Auth-Token': registry.getInstance().token })
const jsonHeaders = () => ({
'Content-Type': 'application/json',
Accept: 'application/json',
})
const http = {
bare: {},
json: {
post (url, body, additionalHeaders = {}) {
const params = {
method: 'POST',
body: JSON.stringify(body),
headers: {
...jsonHeaders(),
...additionalHeaders,
}
}
return fetch(urlWithHost(url), params)
}
},
authenticated: {
openstack: {
get (url) {
const params = {
method: 'GET',
headers: {
...authTokenHeader()
}
}
return fetch(urlWithHost(url), params).then(x => x.json())
},
post (url, body) {
const params = {
method: 'POST',
headers: {
...authTokenHeader(),
...jsonHeaders(),
},
body: JSON.stringify(body),
}
return fetch(urlWithHost(url), params).then(x => x.json())
},
delete (url) {
const params = {
method: 'DELETE',
headers: {
...authTokenHeader(),
},
}
return fetch(urlWithHost(url), params)
}
}
}
}
export default http
|
#!/bin/bash
exec=out-perf.masstree/benchmarks/dbtest
bench=tpcc
cores_per_numa=8
mem_per_numa=10
sched_local=1
outstanding_txns=1
workload_mix=100,0,0,0,0
runtime=30
THREADS=( 1 2 4 8 16 24 32 40 48 56 62)
#THREADS=( 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62)
THREADS_PER_WAREHOUSE=( 64 )
n_threads=
bench_result="perf.csv"
rm -f ${bench_result}
touch ${bench_result}
threads_per_warehouse=
for threads_per_warehouse in ${THREADS_PER_WAREHOUSE[@]}
do
scale_factor_str="scale_factor"
threads_str="n_threads"
throughput_sched_str="throughput"
latency_sched_str="latency"
running_time_sched_str="running_time"
txn_init_sched_str="txn_init_time"
txn_free_sched_str="txn_free_time"
txn_sched_sched_str="txn_sched_time"
for n_threads in ${THREADS[@]}
do
n_numas=$((${n_threads}/${cores_per_numa}))
mod_buf=$((${n_threads}%${cores_per_numa}))
if [ ${mod_buf} -ne 0 ]
then
n_numas=$((${n_numas}+1))
fi
numa_mem=$((${mem_per_numa}*${n_numas}))
scale_factor=1
if [ ${n_threads} -gt ${threads_per_warehouse} ]
then
scale_factor_check=$((${n_threads}/${threads_per_warehouse}))
scale_factor_mul=$((${threads_per_warehouse}*${scale_factor_check}))
if [ ${scale_factor_mul} -eq ${n_threads} ]
then
scale_factor=${scale_factor_check}
else
scale_factor=0
fi
fi
if [ ${scale_factor} -ne 0 ]
then
scale_factor_str=${scale_factor_str}",${scale_factor}"
threads_str=${threads_str}",${n_threads}"
ntry=5
ret=
while [ ${ntry} -gt 0 ]
do
ret=`./${exec} \
--bench ${bench} \
--db-type ndb-ic3 \
--txn-flags 1 \
--retry-aborted-transactions \
--num-threads ${n_threads} \
--runtime ${runtime} \
--parallel-loading \
--bench-opts "--workload-mix ${workload_mix}" \
| grep "RESULT" | sed "s/^RESULT \(.*\)$/\1/g"`
if [ ${#ret} -ne 0 ]
then
IFS=',' read throughput latency running_time init_time free_time sched_time <<<"${ret}"
echo "success: throughput: ${throughput}, latency: ${latency}, running_time: ${running_time}, init_time: ${init_time}, free_time: ${free_time}, sched_time: ${sched_time}"
break
else
ntry=$((${ntry}-1))
fi
done
if [ ${#ret} -ne 0 ]
then
throughput_sched_str="${throughput_sched_str},${throughput}"
latency_sched_str="${latency_sched_str},${latency}"
running_time_sched_str="${running_time_sched_str},${running_time}"
txn_init_sched_str="${txn_init_sched_str},${init_time}"
txn_free_sched_str="${txn_free_sched_str},${free_time}"
txn_sched_sc66hed_str="${txn_sched_sched_str},${sched_time}"
else
throughput_sched_str="${throughput_sched_str},0"
latency_sched_str="${latency_sched_str},0"
running_time_sched_str="${running_time_sched_str},0"
txn_init_sched_str="${txn_init_sched_str},0"
txn_free_sched_str="${txn_free_sched_str},0"
txn_sched_sched_str="${txn_sched_sched_str},0"
fi
fi
done
echo ${threads_per_warehouse} >>${bench_result}
echo ${scale_factor_str} >>${bench_result}
echo ${threads_str} >>${bench_result}
echo ${throughput_sched_str} >>${bench_result}
echo ${latency_sched_str} >>${bench_result}
echo ${running_time_sched_str} >>${bench_result}
echo ${txn_init_sched_str} >>${bench_result}
echo ${txn_free_sched_str} >>${bench_result}
echo ${txn_sched_sched_str} >>${bench_result}
echo "" >>${bench_result}
done
|
# KDE only
## Blur effect ##
if [[ $(ps --no-header -p $PPID -o comm) =~ '^yakuake|konsole|kitty$' ]]; then
for wid in $(xdotool search --pid $PPID); do
xprop -f _KDE_NET_WM_BLUR_BEHIND_REGION 32c -set _KDE_NET_WM_BLUR_BEHIND_REGION 0 -id $wid; done
fi
|
<reponame>marvelperseus/Real-Estate-website-frontend<filename>src/containers/Profile.js
import React, { Component } from 'react';
import { observer } from 'mobx-react';
import faker from 'faker';
import { withStyles } from 'material-ui/styles';
import Snackbar from 'material-ui/Snackbar';
import IconButton from 'material-ui/IconButton';
import CloseIcon from '@material-ui/icons/Close';
import Button from 'material-ui/Button';
import Profile from '../components/Profile';
import ProfilePicDialogBox from '../components/EditProfilePicDialogBox';
import EditAgentDialogBox from '../components/EditAgentDialogBox';
import EditAgentPasswordDialogBox from '../components/EditAgentPasswordDialogBox';
import BlockAgentDialogBox from '../components/BlockAgentDialogBox';
const styles = theme => ({
submitInvoiceBtn: {},
wrapper: {
position: 'relative',
},
buttonsWrapper: {
display: 'flex',
marginBottom: '25px',
justifyContent: 'center',
},
snackBar: {
marginTop: 30,
},
});
@observer
@withStyles(styles)
class ProfileContainer extends Component {
constructor(props) {
super(props);
const { agent } = this.props;
this.state = {
user: agent,
isEditing: false,
profilePicEditorDialogBoxOpen: false,
submittingEditProfilePicForm: false,
editProfilePicFormSubmitted: false,
editAgentModalOpen: false,
editAgentDialogBoxOpen: false,
blockAgentDialogBoxOpen: false,
editAgentPasswordDialogBoxOpen: false,
snackbarOpen: false,
snackbarText: '',
isLoadingProfilePicture: true,
};
}
openProfilePicEditor = () => {
this.setState({
profilePicEditorDialogBoxOpen: true,
});
};
closeProfilePicEditor = () => {
this.setState({
profilePicEditorDialogBoxOpen: false,
editProfilePicFormSubmitted: false,
});
};
confirmProfilePicSubmitted = () => {
this.setState({
profilePicEditorDialogBoxOpen: false,
});
};
toggleIsLoadingProfilePicture = (bool = false) => {
console.log(`loaded: ${bool}`);
this.setState({
isLoadingProfilePicture: bool,
});
};
setFinishedSubmittingForm = url => {
const { user } = this.state;
this.setState({
user: {
...user,
agent: {
...user.agent,
profilePicURL: `${url}?cacheBust=${faker.random.uuid()}`,
},
},
profilePicEditorDialogBoxOpen: false,
isLoadingProfilePicture: true,
submittingEditProfilePicForm: false,
editProfilePicFormSubmitted: false,
snackbarOpen: true,
snackbarText: 'Profile picture successfully changed!',
});
};
editPasswordFormSubmittedSuccessfully = () => {
this.setState({
editAgentPasswordDialogBoxOpen: false,
snackbarOpen: true,
snackbarText: 'Password successfully changed!',
});
};
editAgentFormSubmittedSuccessfully = agent => {
this.setState({
user: agent,
editAgentDialogBoxOpen: false,
snackbarOpen: true,
snackbarText: 'Agent Information successfully updated!',
});
};
blockAgentFormSubmittedSuccessfully = agent => {
this.setState({
user: agent,
editAgentPasswordDialogBoxOpen: false,
snackbarOpen: true,
snackbarText: 'Agent blocked successfully!',
});
};
toggleSubmittingEditProfilePicForm = bool => {
this.setState({
submittingEditProfilePicForm:
typeof bool === 'boolean'
? bool
: !this.state.submittingEditProfilePicForm,
});
};
setFormSubmitted = () => {
this.setState({
editProfilePicFormSubmitted: true,
});
};
toggleEditAgentModal = state => {
const { editAgentModalOpen } = this.state;
this.setState({
editAgentModalOpen:
typeof state === 'boolean' ? state : !editAgentModalOpen,
});
};
openEditAgentDialogBox = () => {
this.setState({
editAgentDialogBoxOpen: true,
});
};
openBlockAgentDialogBox = () => {
this.setState({
blockAgentDialogBoxOpen: true,
});
};
closeEditAgentDialogBox = () => {
this.setState({
editAgentDialogBoxOpen: false,
});
};
closeBlockAgentDialogBox = () => {
this.setState({
blockAgentDialogBoxOpen: false,
});
};
openEditAgentPasswordDialogBox = () => {
this.setState({
editAgentPasswordDialogBoxOpen: true,
});
};
closeEditAgentPasswordDialogBox = () => {
this.setState({
editAgentPasswordDialogBoxOpen: false,
});
};
handleCloseSnackbar = () => {
this.setState({
snackbarOpen: false,
snackbarUndoFunction: null,
});
};
agentSuccessfullyDeleted = () => {
this.setState({
snackbarOpen: true,
snackbarText: 'Agent has been successfully deleted!',
});
this.props.setAgentDeleted();
};
agentSuccessfullyBlocked = () => {
this.setState({
snackbarOpen: true,
snackbarText: 'Agent has been successfully blocked!',
});
this.props.setAgentDeleted();
};
createProfilePicDataURL = blob => URL.createObjectURL(blob);
render() {
const {
isEditing,
profileEdited,
profilePicEditorDialogBoxOpen,
submittingEditProfilePicForm,
editProfilePicFormSubmitted,
isLoadingProfilePicture,
} = this.state;
const {
toggleEditingMode,
toggleProfileEdited,
setMobileNumber,
setDescription,
enterEditingMode,
cancelEditingMode,
saveUser,
openProfilePicEditor,
closeProfilePicEditor,
undoSave,
toggleEditAgentModal,
} = this;
const { classes } = this.props;
return (
<div>
<Profile
agent={this.state.user}
isEditing={isEditing}
enterEditingMode={enterEditingMode}
cancelEditingMode={cancelEditingMode}
profileEdited={profileEdited}
toggleProfileEdited={toggleProfileEdited}
setMobileNumber={setMobileNumber}
setDescription={setDescription}
saveUser={saveUser}
undoSave={undoSave}
openProfilePicEditor={openProfilePicEditor}
currentUserRole={this.props.currentUserRole}
currentUserUUID={this.props.currentUserUUID}
openBlockAgentDialogBox={this.openBlockAgentDialogBox}
openEditAgentDialogBox={this.openEditAgentDialogBox}
openEditAgentPasswordDialogBox={this.openEditAgentPasswordDialogBox}
uuid={this.props.uuid}
isLoadingProfilePicture={isLoadingProfilePicture}
toggleIsLoadingProfilePicture={this.toggleIsLoadingProfilePicture}
/>
<EditAgentDialogBox
open={this.state.editAgentDialogBoxOpen}
closeEditAgentDialogBox={this.closeEditAgentDialogBox}
confirmAgentCreated={this.confirmAgentCreated}
viewingAgentUUID={this.props.uuid}
currentUserRole={this.props.currentUserRole}
agentSuccessfullyDeleted={this.agentSuccessfullyDeleted}
editAgentFormSubmittedSuccessfully={
this.editAgentFormSubmittedSuccessfully
}
/>
<BlockAgentDialogBox
agent={this.state.user}
closeBlockAgentDialogBox={this.closeBlockAgentDialogBox}
open={this.state.blockAgentDialogBoxOpen}
viewingAgentUUID={this.props.uuid}
blockAgentFormSubmittedSuccessfully={
this.blockAgentFormSubmittedSuccessfully
}
/>
<EditAgentPasswordDialogBox
closeEditAgentPasswordDialogBox={this.closeEditAgentPasswordDialogBox}
open={this.state.editAgentPasswordDialogBoxOpen}
viewingAgentUUID={this.props.uuid}
editPasswordFormSubmittedSuccessfully={
this.editPasswordFormSubmittedSuccessfully
}
/>
<ProfilePicDialogBox
submitProfilePicEditForm={this.submitProfilePicEditForm}
closeProfilePicEditor={closeProfilePicEditor}
open={profilePicEditorDialogBoxOpen}
confirmProfilePicSubmitted={this.confirmProfilePicSubmitted}
setFinishedSubmittingForm={this.setFinishedSubmittingForm}
submittingEditProfilePicForm={submittingEditProfilePicForm}
editProfilePicFormSubmitted={editProfilePicFormSubmitted}
setFormSubmitted={this.setFormSubmitted}
createProfilePicDataURL={this.createProfilePicDataURL}
toggleSubmittingEditProfilePicForm={
this.toggleSubmittingEditProfilePicForm
}
uuid={this.props.uuid}
/>
<Snackbar
classes={{ root: classes.snackBar }}
anchorOrigin={{
vertical: 'top',
horizontal: 'center',
}}
open={this.state.snackbarOpen}
autoHideDuration={4000}
onClose={this.handleCloseSnackbar}
message={<span id="snackbar-id">{this.state.snackbarText}</span>}
action={[
this.snackbarUndoFunction ? (
<Button
key="undo"
color="secondary"
size="small"
onClick={() => {
this.handleCloseSnackbar();
if (
this.state.snackbarUndoFunction &&
typeof snackbarUndoFunction === 'function'
) {
this.snackbarUndoFunction();
}
}}
>
UNDO
</Button>
) : (
undefined
),
<IconButton
key="close"
aria-label="Close"
color="inherit"
className={classes.close}
onClick={this.handleCloseSnackbar}
>
<CloseIcon />
</IconButton>,
]}
/>
</div>
);
}
}
export default ProfileContainer;
|
#shellcheck shell=sh
set -eu
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
IFS="$SHELLSPEC_LF$SHELLSPEC_TAB"
# Workaround for ksh
shellspec_redefinable shellspec_output
shellspec_redefinable shellspec_output_failure_message
shellspec_redefinable shellspec_output_failure_message_when_negated
shellspec_redefinable shellspec_on
shellspec_redefinable shellspec_off
shellspec_redefinable shellspec_yield
shellspec_redefinable shellspec_parameters
shellspec_redefinable shellspec_profile_start
shellspec_redefinable shellspec_profile_end
shellspec_redefinable shellspec_invoke_example
shellspec_redefinable shellspec_statement_evaluation
shellspec_redefinable shellspec_statement_preposition
shellspec_redefinable shellspec_append_shell_option
shellspec_redefinable shellspec_evaluation_cleanup
shellspec_redefinable shellspec_statement_ordinal
shellspec_redefinable shellspec_statement_subject
shellspec_redefinable shellspec_subject
shellspec_redefinable shellspec_syntax_dispatch
shellspec_redefinable shellspec_set_long
# Workaround for busybox-1.1.3
shellspec_unbuiltin "ps"
shellspec_unbuiltin "last"
shellspec_unbuiltin "sleep"
shellspec_unbuiltin "date"
shellspec_unbuiltin "wget"
shellspec_spec_helper_configure() {
shellspec_import 'support/custom_matcher'
set_subject() {
shellspec_capture SHELLSPEC_SUBJECT subject
}
set_status() {
shellspec_capture SHELLSPEC_STATUS status
}
set_stdout() {
shellspec_capture SHELLSPEC_STDOUT stdout
}
set_stderr() {
shellspec_capture SHELLSPEC_STDERR stderr
}
# modifier for test
shellspec_syntax shellspec_modifier__modifier_
shellspec_modifier__modifier_() {
[ "${SHELLSPEC_SUBJECT+x}" ] || return 1
shellspec_puts "$SHELLSPEC_SUBJECT"
}
subject_mock() {
shellspec_output() { shellspec_puts "$1" >&2; }
}
modifier_mock() {
shellspec_output() { shellspec_puts "$1" >&2; }
}
matcher_mock() {
shellspec_output() { shellspec_puts "$1" >&2; }
shellspec_proxy "shellspec_matcher_do_match" "shellspec_matcher__match"
}
shellspec_syntax_alias 'shellspec_subject_switch' 'shellspec_subject_value'
switch_on() { shellspec_if "$SHELLSPEC_SUBJECT"; }
switch_off() { shellspec_unless "$SHELLSPEC_SUBJECT"; }
posh_pattern_matching_bug() {
# shellcheck disable=SC2194
case "a[d]" in (*"a[d]"*) false; esac # posh <= 0.12.6
}
accuracy_error_bug() { # ksh on Ubuntu 18.04 on WSL
[ "$((99999999 * 999999999))" = "99999998900000000" ]
}
miscalculate_signed_32bit_int_bug() { # yash 2.30 ans = -2147483648
ans=$((21474836478 ^ 0))
[ "$ans" = 21474836478 ] && return 1
[ "$ans" = -2 ] && return 1
return 0
}
not_exist_failglob() {
#shellcheck disable=SC2039
shopt -s failglob 2>/dev/null && return 1
return 0
}
exists_tty() {
(: < /dev/tty) 2>/dev/null
}
}
|
#!/bin/ksh
# This script is ONLY for interix. It is to create some of the admin
# files that are normally done when a user is created.
# Modified for adding non-admin users
# arg1 = User Name
# arg2 = User Home Dir
#default vnmrsystem to /vnmr if undefined
if [ x"$vnmrsystem" = "x" ]
then
vnmrsystem="/vnmr"
fi
gethomedirInterix()
{
home_dir=""
if [ "x$osname" = "xInterix" ]
then
# home_dir=`/vnmr/bin/getuserinfo $1 | awk 'BEGIN { FS = ";" } {print $2}'`
home_dir="$3"
echo "gethomedirInterix: home_dir : $home_dir"
# home_dir=`$home_dir | sed -e 's@\\\\@\\\\\\\\@g'`
fi
}
setdatadir()
{
oldIFS=$IFS
if [ "x$osname" = "xInterix" ]
then
IFS=';'
fi
if [ "x$osname" = "xInterix" ]
then
datadir=`grep datadir $1 | sed -e 's@\\\\@\\\\\\\\@g'`
else
datadir=`grep datadir $1`
fi
if [ -n "$datadir" ]
then
hasdatadir="y"
for dir in $datadirline
do
dirfound="false"
for dir2 in $datadir
do
if [ "x$dir2" = x"$dir" ]
then
dirfound="true"
fi
done
if [ "x$dirfound" = "xfalse" ]
then
if [ "x$osname" = "xInterix" ]
then
datadir="$datadir;$dir"
else
datadir="$datadir $dir"
fi
fi
done
cat $1 | sed '/^datadir/c\
'"$datadir"'' > ${1}.bak
mv ${1}.bak ${1}
fi
IFS=$oldIFS
}
makeadmfiles()
{
seperator=""
echo "Top makeadmfiles()"
# userlist
file="$vnmrsystem"/adm/users/userlist
if [ ! -f "$file" ]
then
echo "Creating $file"
echo "$1" > "$file"
else
echo "Found userlist"
change_vnmr_adm="false"
grep $1 "$file"
if [ $? -ne 0 ]
then
/bin/echo $1 >> "$file"
/bin/cat "$file" | /usr/bin/tr '\n' ' ' > ${file}.new
/bin/mv ${file}.new "$file"
change_vnmr_adm="true"
fi
fi
#make sure there is only one admin
cd "$vnmrsystem"/adm/users/profiles/system/
# For non-admin users we don't want to do this
# list=`grep -l -s System *`
# for item in $list
# do
# grep -v "System Administrator" $item > ${item}.new
# echo "name $item" >> ${item}.new
# mv ${item}.new $item
# done
osname=`uname -s`
vnmrdir="/vnmr"
vnmrsysdir="$vnmrdir"
# The users vnmrsys directory path
user_dir="$4"
slash="/"
if [ "x$osname" = "xInterix" ]
then
# change the unix path to windows path, and replace \ with \\ to escape slash.
# To escape one slash in sed it's \\\\.
# Use unixpath2win for getting the actual path for softlinks
vnmrdirInterix=`/bin/ntpath2posix "$vnmrdir"`
user_dirInterix=`/bin/ntpath2posix "$user_dir"`
vnmrsysdir=`/bin/unixpath2win "$vnmrdir" | sed -e 's@\\\\@\\\\\\\\@g'`
vnmrdir=`/bin/posixpath2nt "$vnmrdir" | sed -e 's@\\\\@\\\\\\\\@g'`
user_dir=`/bin/posixpath2nt "$user_dir" | sed -e 's@\\\\@\\\\\\\\@g'`
slash="\\\\"
if [ "x$2" != "xdb" ]
then
vnmruser_dir="$user_dir"
vnmruser_dirInterix="$user_dirInterix"
fi
datadirline="${user_dir}${slash}data;${vnmrdir}${slash}fidlib;${vnmrdir}${slash}stdpar;${vnmrdir}${slash}tests;${vnmrdir}${slash}parlib;${vnmrdir}${slash}imaging${slash}tests;${vnmrdir}${slash}shims;${user_dir}${slash}parlib;${user_dir}${slash}shims"
else
datadirline="${user_dir}/data ${vnmrdir}/fidlib ${vnmrdir}/stdpar ${vnmrdir}/tests ${vnmrdir}/parlib ${vnmrdir}/imaging/tests ${vnmrdir}/shims ${user_dir}/parlib ${user_dir}/shims"
fi
# system
file="$vnmrsystem"/adm/users/profiles/system/$1
if [ ! -f "$file" ]
then
echo "Creating $file"
echo "accname $1" > "$file"
echo "name $1" >> "$file"
# echo fails if path has some backslash characters, use print -R
print -R "home $3" >> "$file"
if [ "x$osname" = "xInterix" ]
then
if [ "x$2" != "xdb" ]
then
print -R -n "owned ${3}" >> "$file"
echo ";${vnmrdir}" >> "$file"
else
print -R -n "owned ${3}" >> "$file"
fi
else
echo "owned ${3} ${vnmrsysdir}" >> "$file"
fi
echo "usrlvl 2" >> "$file"
echo "access all" >> "$file"
if [ "x$APP_MODE" = "ximaging" ]
then
echo "itype Imaging" >> "$file"
elif [ "x$APP_MODE" = "xwalkup" ]
then
echo "itype Spectroscopy" >> "$file"
else
echo "itype Spectroscopy" >> "$file"
fi
# echo "cmdArea Yes" >> "$file"
# If file already exists, just leave it along
# else
# grep -v -w "name" "$file" > ${file}.new
# if [ "x$2" != "xdb" ]
# then
# echo "name System Administrator" >> ${file}.new
# fi
# mv ${file}.new "$file"
# setdatadir "$file"
fi
# user
file="$vnmrsystem"/adm/users/profiles/user/$1
if [ ! -f ""$file"" ]
then
echo "Creating $file"
echo "userdir ${user_dir}" > "$file"
echo "sysdir ${vnmrsysdir}" >> "$file"
if [ "x$APP_MODE" = "ximaging" ]
then
echo "appdir Imaging" >> "$file"
else
echo "appdir Spectroscopy" >> "$file"
fi
echo "datadir ${datadirline}" >> "$file"
elif [ "x$hasdatadir" != "xy" ]
then
setdatadir "$file"
fi
# data
file="$vnmrsystem"/adm/users/profiles/data/$1
if [ ! -f "$file" ]
then
echo "Creating $file"
if [ "x$osname" = "xInterix" ]
then
echo 'private;'"$user_dir"'/data' > "$file"
else
echo 'private:'"$user_dir"'/data' > $file
fi
fi
# templates
file="$vnmrsystem"/adm/users/profiles/templates/$1
if [ ! -f "$file" ]
then
echo "Creating $file"
if [ "x$APP_MODE" = "ximaging" ]
then
cat "$vnmrsystem"/imaging/templates/vnmrj/properties/filename_templates > "$file"
echo 'RFCOIL:$RFCOIL$_' >> "$file"
elif [ "x$APP_MODE" = "xwalkup" ]
then
cat "$vnmrsystem"/walkup/templates/vnmrj/properties/filename_templates > "$file"
else
cat "$vnmrsystem"/templates/vnmrj/interface/dataTemplateDefault > "$file"
fi
fi
# Kludge an operatorlist file
dir="$vnmrsystem"/adm/users/operators
if test ! -d $dir
then
echo Creating "$vnmrsystem"/adm/users/operators
mkdir -p "$vnmrsystem/adm/users/operators"
fi
file="$dir/operatorlist"
# If a file exists, we want to append to it
if test ! -e $file
then
echo "Creating $file"
echo "# Operator;Users;Email;Panel Level;Full Name;Profile Name;" > $file
echo "$vnmr_adm $vnmr_adm;null;30;$vnmr_adm;AllLiquids" >> $file
else
# Append to the file
echo "$1 $1;null;30;$1;AllLiquids" >> $file
fi
# group
file="$vnmrsystem"/adm/users/group
if [ ! -f "$file" ]
then
echo "Creating $file"
echo "vnmr:VNMR group:$1" > "$file"
echo "agilent and me: Myself and system:me, varian" >> "$file"
else
grep $1 "$file"
if [ $? -ne 0 ]
then
( cd "$vnmrsystem"/adm/users;
cat "$file" | sed "s/VNMR group:/VNMR group: $1, /g" > gp.out;
mv gp.out group
)
fi
fi
}
updateadminfiles()
{
if [ "x$osname" = "xInterix" ]
then
# user
file="$vnmrsystem"/adm/users/profiles/user/$1
appmode=`/bin/cat "$vnmrsystem"/adm/users/profiles/system/"$1" | awk '/itype/ {print $2}'`
if [ "x$appmode" = "x" ]
then
appmode=`/bin/cat "$file" | awk '/itype/ {print $2}'`
fi
# if the file exists, then update sysdir and winappdir to current vnmrsysdir
if [ -f ""$file"" ]
then
/bin/cat "$file" | sed '/^sysdir/c\
sysdir '${vnmrsysdir}'
' > ${file}.new
/bin/mv ${file}.new "$file"
if [ "x$appmode" = "xImaging" ]
then
/bin/cat "$file" | sed '/^winappdir/c\
winappdir '${user_dir}';'${vnmrsysdir}''${slash}'imaging;'${vnmrsysdir}'
' > ${file}.new
elif [ "x$appmode" = "xWalkup" ]
then
/bin/cat "$file" | sed '/^winappdir/c\
winappdir '${user_dir}';'${vnmrsysdir}''${slash}'walkup;'${vnmrsysdir}'
' > ${file}.new
else
/bin/cat "$file" | sed '/^winappdir/c\
winappdir '${user_dir}';'${vnmrsysdir}'
' > ${file}.new
fi
/bin/mv ${file}.new "$file"
fi
fi
}
makealladminfiles()
{
prfiles='system user data templates'
for file in $prfiles
do
if test ! -d "$vnmrsystem/adm/users/profiles/$file"
then
mkdir -p "$vnmrsystem/adm/users/profiles/$file"
fi
done
# Only used if DB is being used and a separate DB owner is used
# if [ "x$osname" = "xInterix" ]
# then
# db_adm="postgres"
# echo "$vnmrdb_home_dir" "$vnmrdbuser_dir"
# makeadminfiles "$db_adm" "db" "$vnmrdb_home_dir" "$vnmrdbuser_dir"
# fi
# This is called for all OS types for the vnmr admin owner
makeadmfiles "$vnmr_adm" " " "$vnmradm_home_dir" "$vnmruser_dir"
}
makechoicefiles()
{
# pis
if [ "x$APP_MODE" = "x" ]
then
file="$vnmradm_home_dir/vnmrsys/global"
if [ -f "$file" ]
then
APP_MODE=`cat "$file" | awk '/appmode/ {getline; print $NF}' | sed 's/"//g'`
fi
fi
if [ "x$APP_MODE" = "ximaging" ]
then
file=$vnmrsystem/imaging/templates/vnmrj/choicefiles
else
file="$vnmrsystem"/templates/vnmrj/choicefiles
fi
if [ ! -f "$file" ]
then
echo "Creating $file/pis"
if test ! -d "$file"
then
mkdir -p "$file"
fi
echo "\"\" \"\"" > "$file"/pis
echo "\"$vnmr_adm\" \"$vnmr_adm\"" >> "$file"/pis
fi
}
########################################################################################
# main, Main, MAIN
########################################################################################
username=$1
userhome=$2
vnmradm_home_dir=$2
osname=`uname -s`
if [ "x$osname" = "xInterix" ]
then
vnmruser_dir="${userhome}"\\\\vnmrsys
# vnmrdbuser_dir="${vnmrdb_home_dir}"\\\\vnmrsys
else
vnmruser_dir=${userhome}/vnmrsys
fi
if [ "x$vnmruser" = "x" ]
then
vnmruser="${vnmruser_dir}"
export vnmruser
fi
sfu_ver=`uname -r`
if [ "x$osname" = "xInterix" -a "x$sfu_ver" = "x3.5" ]
then
# hardcoding XP for now .... GMB
vnmradm_home_dir="C:/SFU/home/$username"
vnmruser="/home/$username/vnmrsys"
vnmruser_dir="C:/SFU/home/$username/vnmrsys"
fi
export vnmruser
export vnmradm_home_dir
export vnmruser_dir
echo "user: $username"
echo "vnmruser: $vnmruser"
echo "vnmradm_home_dir: $vnmradm_home_dir"
echo "vnmruser_dir: $vnmruser_dir"
# Create directories as needed
prfiles='system user data templates'
for file in $prfiles
do
if test ! -d "$vnmrsystem/adm/users/profiles/$file"
then
mkdir -p "$vnmrsystem/adm/users/profiles/$file"
fi
done
# Only used if DB is being used and a separate DB owner is used
# if [ x"$osname" = "xInterix" ]
# then
# db_adm="postgres"
# echo "$vnmrdb_home_dir" "$vnmrdbuser_dir"
# makeadminfiles "$db_adm" "db" "$vnmrdb_home_dir" "$vnmrdbuser_dir"
# fi
#makechoicefiles
makeadmfiles $username " " "$vnmradm_home_dir" "$vnmruser_dir"
#updateadminfiles $username "$vnmradm_home_dir"
|
<reponame>THK-ADV/lwm-ui
import {Component} from '@angular/core'
import {Creatable, TableHeaderColumn} from '../abstract-crud/abstract-crud.component'
import {Observable} from 'rxjs'
import {CourseAtom} from '../models/course.model'
import {CourseProtocol, CourseService} from '../services/course.service'
import {emptyCourseProtocol, partialCourseFormInputData} from '../utils/component.utils'
import {UserService} from '../services/user.service'
@Component({
selector: 'lwm-course',
templateUrl: './course.component.html',
styleUrls: ['./course.component.scss']
})
export class CourseComponent {
columns: TableHeaderColumn[]
tableContent: (model: Readonly<CourseAtom>, attr: string) => string
courses$: Observable<CourseAtom[]>
creatable: Creatable<CourseProtocol, CourseAtom>
constructor(
private readonly courseService: CourseService,
private readonly userService: UserService
) {
this.columns = [
{attr: 'label', title: 'Bezeichnung'},
{attr: 'description', title: 'Beschreibung'},
{attr: 'abbreviation', title: 'Abkürzung'},
{attr: 'lecturer', title: 'Dozent'},
{attr: 'semesterIndex', title: 'Fachsemester'}
]
this.tableContent = (c, attr) => {
switch (attr) {
case 'lecturer':
return `${c.lecturer.lastname}, ${c.lecturer.firstname}`
default:
return c[attr]
}
}
this.courses$ = courseService.getAll()
this.creatable = {
dialogTitle: 'Modul',
emptyProtocol: emptyCourseProtocol,
makeInput: partialCourseFormInputData(userService),
commitProtocol: (p, s) => ({
...p,
label: s?.label ?? p.label,
lecturer: s?.lecturer?.id ?? p.lecturer
}),
create: courseService.create,
update: courseService.update
}
}
}
|
# $NetBSD: t_script.sh,v 1.7 2014/11/16 04:47:18 uebayasi Exp $
#
# Copyright (c) 2014 The NetBSD Foundation, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
atf_test_case order_default
order_default_head() {
atf_set "descr" "check if default object ordering works"
atf_set "require.progs" "cc" "ld" "readelf" "nm" "sed" "grep"
}
order_default_body() {
cat > test.x << EOF
SECTIONS {
/* do nothing; but ld has implicit scripts internally */
/* which usually do: *(.data) *(.data.*) */
}
EOF
order_assert_descending
}
################################################################################
atf_test_case order_merge
order_merge_head() {
atf_set "descr" "check if glob merge keeps object ordering"
atf_set "require.progs" ${order_require_progs}
}
order_merge_body() {
cat > test.x << EOF
SECTIONS {
.data : {
*(.data .data.*)
}
}
EOF
order_assert_descending
}
################################################################################
atf_test_case order_reorder
order_reorder_head() {
atf_set "descr" "check if object reordering works"
atf_set "require.progs" ${order_require_progs}
}
order_reorder_body() {
cat > test.x << EOF
SECTIONS {
.data : {
*(.data)
*(.data.a)
*(.data.b)
*(.data.c)
}
}
EOF
order_assert_ascending
}
################################################################################
atf_test_case order_sort
order_sort_head() {
atf_set "descr" "check if object sort works"
atf_set "require.progs" ${order_require_progs}
}
order_sort_body() {
cat > test.x << EOF
SECTIONS {
.data : {
*(.data)
/* SORT_BY_NAME */
SORT(*)(.data.*)
}
}
EOF
order_assert_ascending
}
################################################################################
atf_test_case multisec
multisec_head() {
atf_set "descr" "check if multiple SECTIONS commands work"
atf_set "require.progs" ${order_require_progs}
}
multisec_body() {
cat > test.c << EOF
#include <sys/cdefs.h>
char a __section(".data.a") = 'a';
char b __section(".data.b") = 'b';
char c __section(".data.c") = 'c';
EOF
atf_check -s exit:0 -o ignore -e ignore cc -c test.c
cat > test.x << EOF
SECTIONS {
.data : {
*(.data)
*(.data.a)
}
}
SECTIONS {
.data : {
*(.data)
*(.data.b)
}
}
EOF
atf_check -s exit:0 -o ignore -e ignore \
ld -r -T test.x -Map test.map -o test.ro test.o
extract_section_names test.ro >test.secs
extract_symbol_names test.ro >test.syms
assert_nosec '\.data\.a'
assert_nosec '\.data\.b'
assert_sec '\.data\.c'
}
################################################################################
order_require_progs="cc ld readelf nm sed grep"
order_assert_ascending() {
order_assert_order a b c
}
order_assert_descending() {
order_assert_order c b a
}
order_assert_order() {
order_compile
order_link
{
match $1 && match $2 && match $3
} <test.syms
atf_check test "$?" -eq 0
}
order_compile() {
for i in a b c; do
cat > $i.c << EOF
#include <sys/cdefs.h>
char $i __section(".data.$i") = '$i';
EOF
atf_check -s exit:0 -o ignore -e ignore cc -c $i.c
done
cat > test.c << EOF
int main(void) { return 0; }
EOF
atf_check -s exit:0 -o ignore -e ignore cc -c test.c
}
order_link() {
# c -> b -> a
atf_check -s exit:0 -o ignore -e ignore \
ld -r -T test.x -Map test.map -o x.o c.o b.o a.o
atf_check -s exit:0 -o ignore -e ignore \
cc -o test test.o x.o
extract_symbol_names test |
grep '^[abc]$' >test.syms
}
extract_section_names() {
readelf -S "$1" |
sed -ne '/\] \./ { s/^.*\] //; s/ .*$//; p }'
}
extract_symbol_names() {
nm -n "$1" |
sed -e 's/^.* //'
}
match() {
read line
case "$line" in
*"$1"*) return 0;
esac
return 1
}
assert_sec() {
atf_check -s exit:0 -o ignore -e ignore \
grep "^$1\$" test.secs
}
assert_nosec() {
atf_check -s exit:1 -o ignore -e ignore \
grep "^$1\$" test.secs
}
################################################################################
atf_init_test_cases()
{
atf_add_test_case order_default
atf_add_test_case order_merge
atf_add_test_case order_reorder
atf_add_test_case order_sort
atf_add_test_case multisec
}
|
#!/bin/sh
python -m torch.distributed.launch \
--nproc_per_node=8 --master_port=29502 \
train.py \
--config-file configs/R50_3x.yaml \
--opts MODEL.WEIGHTS SOLOv2_R50_3x.pth |
<gh_stars>1-10
import { Injectable, Inject, HttpException, HttpStatus } from '@nestjs/common'
import { Nodemailer, NODEMAILER, NODEMAILER_CONFIG, Config, SendMailOption } from './nodemailer.provider'
import { RedisService } from '@/module/redis/redis.service'
import * as DTO from './nodemailer.interface'
@Injectable()
export class NodemailerService {
constructor(
@Inject(NODEMAILER_CONFIG) public readonly options: Config,
@Inject(NODEMAILER) public readonly client: Nodemailer,
private readonly redisService: RedisService
) {}
/**创建数字验证码**/
public async createNumberCode(): Promise<number> {
const captcha = Array(6)
.fill(6)
.map(() => Math.floor(Math.random() * 10))
.join('')
if (Number(captcha) < 600000) {
return await this.createNumberCode()
}
return Number(captcha)
}
/**发送邮箱验证码**/
private sendEmailCode(props: SendMailOption): Promise<DTO.NodemailerResponse> {
return new Promise((resolve, reject) => {
this.client.sendMail(props, (error, data) => {
if (error) {
reject(new HttpException('发送失败', HttpStatus.BAD_REQUEST))
} else {
resolve({ code: 200, message: '发送成功' })
}
})
})
}
/**发送注册验证码**/
public async registerCode(props: DTO.RegisterCode): Promise<DTO.NodemailerResponse> {
try {
const code = await this.createNumberCode()
await this.redisService.setStore(props.email, code, 1800)
return await this.sendEmailCode({
from: '"妖雨纯" <<EMAIL>>',
to: props.email,
subject: '温馨提示',
html: `欢迎注册情雨随风的妖雨录, 您的验证码是: <b>${code}</b> 有效时间30分钟`
})
} catch (e) {
throw new HttpException(e.message || e.toString(), HttpStatus.BAD_REQUEST)
}
}
/**注册成功发送账号**/
public async registerSend(account: number, email: string) {
try {
return await this.sendEmailCode({
from: '"妖雨纯" <<EMAIL>>',
to: email,
subject: '温馨提示',
html: `欢迎注册情雨随风的妖雨录, 您的登录账户是: <b>${account}</b>`
})
} catch (e) {
throw new HttpException(e.message || e.toString(), HttpStatus.BAD_REQUEST)
}
}
}
|
#!/usr/bin/env -S bash ../.port_include.sh
port=gnucobol
version=3.1.2
useconfigure="true"
depends=("gmp" "gcc" "bash" "ncurses")
files="https://ftpmirror.gnu.org/gnu/gnucobol/gnucobol-${version}.tar.bz2 gnucobol-${version}.tar.bz2
https://ftpmirror.gnu.org/gnu/gnucobol/gnucobol-${version}.tar.bz2.sig gnucobol-${version}.tar.bz2.sig
https://ftpmirror.gnu.org/gnu/gnu-keyring.gpg gnu-keyring.gpg"
auth_type="sig"
auth_opts=("--keyring" "./gnu-keyring.gpg" "gnucobol-${version}.tar.bz2.sig")
configopts=(
"--prefix=/usr/local"
"--enable-hardening"
"--disable-rpath"
"--disable-nls"
"--with-gnu-ld"
"--with-dl"
"--with-math=gmp"
"--with-curses=ncurses"
"--with-db=no"
"--with-json=no"
)
|
<gh_stars>0
package com.artemzi.controllers.advices;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import javax.servlet.http.HttpServletResponse;
@Order(Ordered.HIGHEST_PRECEDENCE)
@RestControllerAdvice
public class RestExceptionsControllerAdvice {
private final Logger logger = LoggerFactory.getLogger(getClass());
/**
* Дефолтный обработчик ошибок
*/
@ExceptionHandler(Exception.class)
public @ResponseBody
ResponseEntity exceptionHandler(Exception e, HttpServletResponse response) {
response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value());
logException(e);
return ResponseEntity.badRequest().body(e.getMessage());
}
private void logException(Exception e) {
String errorMsg;
if (e.getCause() != null)
errorMsg = String.format("%s: %s [cause: %s: %s]", e.getClass().getSimpleName(), e.getMessage(),
e.getCause().getClass().getSimpleName(), e.getCause().getMessage());
else errorMsg = String.format("%s: %s", e.getClass().getSimpleName(), e.getMessage());
logger.warn(errorMsg);
}
}
|
<filename>lib/statustile-view.js
'use strict';
const getIconClass = state => `icon aec-icon-mouse text-${state || 'subtle'}`;
const getIcon = () => document.querySelector('#aec-status-bar-tile');
const getContainer = () => document.querySelector('#aec-status-bar-container');
const createIcon = state => {
const icon = document.createElement('span');
icon.id = 'aec-status-bar-tile';
icon.className = getIconClass(state);
icon.addEventListener('click', () => {
atom.commands.dispatch(atom.views.getView(atom.workspace), 'EditorConfig:show-state');
});
return icon;
};
const removeIcon = () => {
if (getIcon() !== null) {
getIcon().parentNode.removeChild(getIcon());
}
};
const containerExists = () => getContainer() !== null;
const displayIcon = state => {
const icon = getIcon() || createIcon(state);
if (icon.parentNode === null && containerExists()) {
getContainer().append(icon);
} else {
icon.className = getIconClass(state);
}
};
const updateIcon = state => {
if (state === 'warning' ||
state === 'error') {
displayIcon(state);
} else {
removeIcon();
}
};
// The container stays as placeholder in the statusBar,
// the icon is then added and removed as needed
const createContainer = () => {
const div = document.createElement('div');
div.id = 'aec-status-bar-container';
div.className = 'inline-block';
return div;
};
module.exports = {
containerExists,
createContainer,
removeIcon,
updateIcon
};
|
We can use a hashtable to solve this problem. We can loop over the characters of one string and insert them into the hashtable. We then loop over the second string and check if it exists in the hashtable. If all characters of the second string exist in the hashtable, then the two strings are anagrams of each other. |
#!/bin/bash
aws --version >/dev/null 2>&1 || { echo >&2 "I require AWS CLI utility but it's not installed. ¯\_(ツ)_/¯ Aborting."; exit 1; }
source 00-set-environment.sh
export AWS_ACCOUNT_ID=<YOUR_AWS_ACCOUNT_ID>
# Delete CloudWatch Log Group
aws logs delete-log-group --log-group-name /aws/lambda/data-lake-helper
# Delete log Buckets
aws s3 rb s3://data-lake-$AWS_ACCOUNT_ID-eu-west-1-s3-access-log --force
aws s3 rb s3://data-lake-$AWS_ACCOUNT_ID-eu-west-1-cf-access-log --force
# Delete Stack
aws cloudformation delete-stack --stack-name $STACK_NAME >> /dev/null
|
<filename>src/test/java/net/dean/jraw/test/RedditTest.java
package net.dean.jraw.test;
import net.dean.jraw.ApiException;
import net.dean.jraw.util.JrawUtils;
import net.dean.jraw.RedditClient;
import net.dean.jraw.util.Version;
import net.dean.jraw.http.LoggingMode;
import net.dean.jraw.http.NetworkException;
import net.dean.jraw.http.UserAgent;
import net.dean.jraw.http.oauth.Credentials;
import net.dean.jraw.managers.AccountManager;
import net.dean.jraw.managers.ModerationManager;
import net.dean.jraw.models.*;
import net.dean.jraw.models.meta.JsonProperty;
import net.dean.jraw.paginators.UserSubredditsPaginator;
import org.testng.Assert;
import org.testng.SkipException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.List;
/**
* This class is the base class of all JRAW test classes. It provides several utility methods.
*/
public abstract class RedditTest {
protected static final RedditClient reddit = new RedditClient(UserAgent.of("desktop",
"net.dean.jraw.test",
"v" + Version.get().formatted(),
"thatJavaNerd"));
protected final AccountManager account;
protected final ModerationManager moderation;
protected RedditTest() {
reddit.setLoggingMode(LoggingMode.ON_FAIL);
Credentials creds = getCredentials();
if (!reddit.isAuthenticated()) {
try {
reddit.authenticate(reddit.getOAuthHelper().easyAuth(creds));
} catch (NetworkException | ApiException e) {
handle(e);
}
}
this.account = new AccountManager(reddit);
this.moderation = new ModerationManager(reddit);
}
public long epochMillis() {
return new Date().getTime();
}
protected void handle(Throwable t) {
if (t instanceof NetworkException) {
NetworkException e = (NetworkException) t;
int code = e.getResponse().getStatusCode();
if ((code >= 500 && code < 600) || code == 429)
throw new SkipException("Received " + code + ", skipping");
}
t.printStackTrace();
Assert.fail(t.getMessage() == null ? t.getClass().getName() : t.getMessage(), t);
}
protected final boolean isRateLimit(ApiException e) {
return e.getReason().equals("QUOTA_FILLED") || e.getReason().equals("RATELIMIT");
}
protected void handlePostingQuota(ApiException e) {
if (!isRateLimit(e)) {
Assert.fail(e.getMessage());
}
String msg = null;
// toUpperCase just in case (no pun intended)
String method = getCallingMethod();
switch (e.getReason().toUpperCase()) {
case "QUOTA_FILLED":
msg = String.format("Skipping %s(), link posting quota has been filled for this user", method);
break;
case "RATELIMIT":
msg = String.format("Skipping %s(), reached ratelimit (%s)", method, e.getExplanation());
break;
}
if (msg != null) {
JrawUtils.logger().error(msg);
throw new SkipException(msg);
} else {
Assert.fail(e.getMessage());
}
}
protected String getCallingMethod() {
StackTraceElement[] elements = Thread.currentThread().getStackTrace();
// [0] = Thread.currentThread().getStackTrace()
// [1] = this method
// [2] = caller of this method
// [3] = Caller of the caller of this method
return elements[3].getMethodName();
}
protected final <T extends JsonModel> void validateModels(Iterable<T> iterable) {
for (T model : iterable) {
validateModel(model);
}
}
/**
* Validates all of the CommentNode's children's Comments
*/
protected final void validateModel(CommentNode root) {
for (CommentNode node : root.walkTree()) {
validateModel(node.getComment());
}
}
protected final <T extends JsonModel> void validateModel(T model) {
Assert.assertNotNull(model);
List<Method> jsonInteractionMethods = JsonModel.getJsonProperties(model.getClass());
try {
for (Method method : jsonInteractionMethods) {
JsonProperty jsonProperty = method.getAnnotation(JsonProperty.class);
Object returnVal = null;
try {
returnVal = method.invoke(model);
} catch (InvocationTargetException e) {
// InvocationTargetException thrown when the method.invoke() returns null and @JsonInteraction "nullable"
// property is false
if (e.getCause().getClass().equals(NullPointerException.class) && !jsonProperty.nullable()) {
Assert.fail("Non-nullable JsonInteraction method returned null: " + model.getClass().getName() + "." + method.getName() + "()");
} else {
// Other reason for InvocationTargetException
Throwable cause = e.getCause();
cause.printStackTrace();
Assert.fail(cause.getClass().getName() + ": " + cause.getMessage());
}
}
if (returnVal != null && returnVal instanceof JsonModel) {
validateModel((JsonModel) returnVal);
}
}
} catch (IllegalAccessException e) {
handle(e);
}
}
/**
* Short for CredentialsUtils.instance().script()
*/
protected final Credentials getCredentials() {
return CredentialsUtils.instance().script();
}
/**
* Gets a subreddit that the testing user moderates
* @return A subreddit
*/
protected final Subreddit getModeratedSubreddit() {
Listing<Subreddit> moderatorOf = new UserSubredditsPaginator(reddit, "moderator").next();
if (moderatorOf.size() == 0) {
throw new IllegalStateException("Must be a moderator of at least one subreddit");
}
return moderatorOf.get(0);
}
}
|
#!/system/bin/sh
#
# This script make a backup of pds partition to your data partition
# to allow us to use tuned battd versions without data loss risks
#
# Note: This pds partition contains unique informations related to
# your device, like battery calibration, wifi and baseband
#
export PATH=/system/xbin:$PATH
PDS_FILE=/data/pdsdata.img
mount_pds_image() {
mkdir -p /pds
umount /pds 2>/dev/null
losetup -d /dev/block/loop7 2>/dev/null
losetup /dev/block/loop7 $PDS_FILE
busybox mount -o rw,nosuid,nodev,noatime,nodiratime,barrier=1 /dev/block/loop7 /pds
}
if [ -f /data/pds.img ]; then
#delete old pds image that may have broken permissions
rm -f /data/pds.img
fi
if [ ! -f $PDS_FILE ] ; then
#make a copy of pds in /data
dd if=/dev/block/mmcblk1p7 of=$PDS_FILE bs=4096
#mount the fake pds
mount_pds_image
cd /pds
#find and change moto users first
busybox find -user 9000 -exec chown 1000 {} \;
busybox find -user 9003 -exec chown 1000 {} \;
busybox find -user 9004 -exec chown 1000 {} \;
busybox find -user 9007 -exec chown 1000 {} \;
#find and change moto groups
busybox find -group 9000 -exec chgrp 1000 {} \;
busybox find -group 9003 -exec chgrp 1000 {} \;
busybox find -group 9004 -exec chgrp 1000 {} \;
busybox find -group 9007 -exec chgrp 1000 {} \;
busybox find -group 9009 -exec chgrp 1000 {} \;
echo "PDS Backed up, permissions fixed and mounted"
if [ -d /data/battd ] ; then
cd /data/battd
busybox find -user 9000 -exec chown 1000 {} \;
busybox find -group 9000 -exec chgrp 1000 {} \;
fi
else
#mount the existing pds backup
mount_pds_image
if [ -d /pds/public ] ; then
echo "PDS partition mounted from data image."
fi
fi
|
<reponame>lee820/design-pattern-go<filename>08_FilterPattern/filter_test.go
package filter
import (
"container/list"
"fmt"
"testing"
)
var allPersons *list.List
func Test(t *testing.T) {
t.Run("AddPersons", AddPersons)
t.Run("GetAllMales: ", GetAllMales)
t.Run("GetAllFemales: ", GetAllFemales)
t.Run("GetAllSingles: ", GetAllSingles)
t.Run("GetSingleMales: ", AndFilterTest)
t.Run("GetSingleOrFemales: ", OrFilterTest)
}
func PrintPersons(persons *list.List) {
for i := persons.Front(); i != nil; i = i.Next() {
fmt.Printf("Person: [ Name: %s, Gender: %s, Marital Status: %s ] \n",
i.Value.(Person).Name,
i.Value.(Person).Gender,
i.Value.(Person).MaritalStatus)
}
}
func AddPersons(t *testing.T) {
allPersons = list.New()
lee := NewPerson("lee", "male", "married")
lu := NewPerson("lv", "male", "single")
yang := NewPerson("yang", "female", "married")
zhu := NewPerson("zhu", "female", "single")
allPersons.PushBack(*lee)
allPersons.PushBack(*lu)
allPersons.PushBack(*yang)
allPersons.PushBack(*zhu)
PrintPersons(allPersons)
}
func GetAllMales(t *testing.T) {
maleFilter := NewMaleFilter()
allmales := maleFilter.PersonFilter(allPersons)
PrintPersons(&allmales)
}
func GetAllFemales(t *testing.T) {
femaleFilter := NewFemaleFilter()
allFemales := femaleFilter.PersonFilter(allPersons)
PrintPersons(&allFemales)
}
func GetAllSingles(t *testing.T) {
singleFilter := NewSingleFilter()
allSingles := singleFilter.PersonFilter(allPersons)
PrintPersons(&allSingles)
}
func AndFilterTest(t *testing.T) {
maleFilter := NewMaleFilter()
singleFilter := NewSingleFilter()
andFilter := NewAndFilter(maleFilter, singleFilter)
persons := andFilter.PersonFilter(allPersons)
PrintPersons(&persons)
}
func OrFilterTest(t *testing.T) {
femaleFilter := NewFemaleFilter()
singleFilter := NewSingleFilter()
orFilter := NewOrFilter(femaleFilter, singleFilter)
persons := orFilter.PersonFilter(allPersons)
PrintPersons(&persons)
}
|
<reponame>cisocrgroup/ocrd-postcorrection<filename>src/main/java/de/lmu/cis/iba/LCS_Alignment_Pairwise.java
package de.lmu.cis.iba;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
public class LCS_Alignment_Pairwise {
public ArrayList<ArrayList<LCS_Triple>> longest_common_subsequences = new ArrayList<>();
ArrayList<LCS_Triple> pairs;
Online_CDAWG_sym scdawg;
Common_SCDAWG_Functions scdawg_functions;
public LCS_Alignment_Pairwise(String n1, String n2) {
ArrayList<String> stringset = new ArrayList<>();
stringset.add("#" + n1 + "$");
stringset.add("#" + n2 + "$");
Online_CDAWG_sym scdawg = new Online_CDAWG_sym(stringset, false);
scdawg.determineAlphabet(false);
scdawg.build_cdawg();
this.scdawg = scdawg;
Common_SCDAWG_Functions scdawg_functions = new Common_SCDAWG_Functions(scdawg);
this.scdawg_functions = scdawg_functions;
}
public String getString(int i) {
return scdawg.stringset.get(i);
}
public void align() {
// System.out.println("Searching quasi max nodes for s1 and s2 pairs...");
ArrayList<Endpos_Pair> quasi_max_nodes = scdawg_functions.get_quasi_maximal_nodes_pairwise();
if (quasi_max_nodes.size() == 0) {
return;
}
Node[] nodes_in_s1 = new Node[scdawg.stringset.get(0).length() + 1];
HashMap<Node, ArrayList> nodes_endpos_s2 = new HashMap();
for (Endpos_Pair pair : quasi_max_nodes) {
Node x = pair.node;
// System.out.println("-----------------------------------");
// System.out.println(scdawg.get_node_label(x) + ", s1=" + pair.endpos_s1 + " ::
// s2=" + pair.endpos_s2);
for (int e1 : pair.endpos_s1) {
nodes_in_s1[e1] = pair.node;
}
ArrayList e2 = pair.endpos_s2;
Collections.sort(e2);
nodes_endpos_s2.put(x, e2);
}
// System.out.println("Calculating LCS for s1 and s2 pairs...");
LIS_Graph g = new LIS_Graph(scdawg);
g.build_LCS_graph(nodes_in_s1, nodes_endpos_s2);
ArrayList<LCS_Triple> greedy_alignment = g.get_alignment_greedy();
this.pairs = greedy_alignment;
}
public ArrayList<AlignmentPair> getAligmentPairs() {
ArrayList<AlignmentPair> res = new ArrayList<AlignmentPair>();
if (pairs == null) {
res.add(new AlignmentPair("", 0, 0));
return res;
}
for (int i = 0; i < pairs.size(); i++) {
int e1 = pairs.get(i).endpos_s1;
int e2 = pairs.get(i).endpos_s2;
String nodelabel = scdawg.get_node_label(pairs.get(i).node);
res.add(new AlignmentPair(nodelabel, e1, e2));
}
return res;
}
public static class AlignmentPair {
public final int epos1, epos2, spos1, spos2;
public final String label;
public AlignmentPair(String label, int epos1, int epos2) {
this.epos1 = epos1;
this.epos2 = epos2;
this.label = label;
spos1 = epos1 - this.label.length();
spos2 = epos2 - this.label.length();
}
@Override
public String toString() {
return String.format("{%s,%d,%d,%d,%d}", label, spos1, epos1, spos2, epos2);
}
}
}
|
<filename>vital/config/config_block_io.h
/*ckwg +29
* Copyright 2013-2020 by Kitware, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither name of Kitware, Inc. nor the names of any contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* \file
* \brief IO Operation utilities for \c kwiver::config
*
* Refer to \ref config_file_format "config file format" for more
* information on the file entries.
*/
#ifndef KWIVER_CONFIG_BLOCK_IO_H_
#define KWIVER_CONFIG_BLOCK_IO_H_
#include <vital/config/vital_config_export.h>
#include "config_block.h"
#include <ostream>
namespace kwiver {
namespace vital {
// ------------------------------------------------------------------
/// Read in a configuration file, producing a \c config_block object
/**
* This method reads the specified config file and returns the
* resulting config block. Any files included by config files that are not in
* absolute form are resolved using search paths supplied in the environment
* variable \c KWIVER_CONFIG_PATH first, and then by using paths supplied in
* \c search_paths. If \c no_system_paths is set to \c true, then the contents
* of the \c KWIVER_CONFIG_PATH variable is not used, i.e. only the paths given
* in \c search_paths are used.
*
* \throws config_file_not_found_exception
* Thrown when the file could not be found on the file system.
* \throws config_file_not_read_exception
* Thrown when the file could not be read or parsed for whatever reason.
*
* \param file_path
* The path to the file to read in.
* \param search_path
* An optional list of directories to use in locating included files.
* \param use_system_paths
* If false, we do not use paths in the KWIVER_CONFIG_PATH environment
* variable or current working directory for searching, otherwise those paths are
* searched first.
*
* \return A \c config_block object representing the contents of the read-in
* file.
*/
config_block_sptr VITAL_CONFIG_EXPORT read_config_file(
config_path_t const& file_path,
config_path_list_t const& search_paths = config_path_list_t(),
bool use_system_paths = true );
// ------------------------------------------------------------------
/// Read in (a) configuration file(s), producing a \c config_block object
/**
* This function reads one or more configuration files from a search
* path. The search path is based on environment variables, system
* defaults, and application defaults. More on this later.
*
* The config reader tries to locate the specified config file using
* the search path. If the file is not found, an exception is
* thrown. If the file is located and the \c merge parameter is \b
* true (default value), then the remaining directories in the search
* path are checked to see if additional versions of the file can be
* found. If so, then the contents are merged, with values in files earlier in
* the search order taking precedence, into the resulting config block. If the
* \c merge parameter is \b false. then reading process stops after the first
* file is found.
*
* A platform specific search path is constructed as follows:
*
* ## Windows Platform
* - . (the current working directory
* - ${KWIVER_CONFIG_PATH} (if set)
* - $<CSIDL_LOCAL_APPDATA>/<app-name>[/<app-version>]/config
* - $<CSIDL_APPDATA>/<app-name>[/<app-version>]/config
* - $<CSIDL_COMMON_APPDATA>/<app-name>[/<app-version>]/config
* - <install-dir>/share/<app-name>[/<app-version>]/config
* - <install-dir>/share/config
* - <install-dir>/config
*
* ## OS/X Apple Platform
* - . (the current working directory
* - ${KWIVER_CONFIG_PATH} (if set)
* - ${XDG_CONFIG_HOME}/<app-name>[/<app-version>]/config (if $XDG_CONFIG_HOME set)
* - ${HOME}/.config/<app-name>[/<app-version>]/config (if $HOME set)
* - /etc/xdg/<app-name>[/<app-version>]/config
* - /etc/<app-name>[/<app-version>]/config
* - ${HOME}/Library/Application Support/<app-name>[/<app-version>]/config (if $HOME set)
* - /Library/Application Support/<app-name>[/<app-version>]/config
* - /usr/local/share/<app-name>[/<app-version>]/config
* - /usr/share/<app-name>[/<app-version>]/config
*
* If <install-dir> is not `/usr` or `/usr/local`:
*
* - <install-dir>/share/<app-name>[/<app-version>]/config
* - <install-dir>/share/config
* - <install-dir>/config
* - <install-dir>/Resources/config
*
* ## Other Posix Platforms (e.g. Linux)
* - . (the current working directory
* - ${KWIVER_CONFIG_PATH} (if set)
* - ${XDG_CONFIG_HOME}/<app-name>[/<app-version>]/config (if $XDG_CONFIG_HOME set)
* - ${HOME}/.config/<app-name>[/<app-version>]/config (if $HOME set)
* - /etc/xdg/<app-name>[/<app-version>]/config
* - /etc/<app-name>[/<app-version>]/config
* - /usr/local/share/<app-name>[/<app-version>]/config
* - /usr/share/<app-name>[/<app-version>]/config
*
* If <install-dir> is not `/usr` or `/usr/local`:
*
* - <install-dir>/share/<app-name>[/<app-version>]/config
* - <install-dir>/share/config
* - <install-dir>/config
*
* The environment variable \c KWIVER_CONFIG_PATH can be set with a
* list of one or more directories, in the same manner as the native
* execution \c PATH variable, to be searched for config files.
*
* \throws config_file_not_found_exception
* Thrown when the no matching file could be found in the searched paths.
* \throws config_file_not_read_exception
* Thrown when a file could not be read or parsed for whatever reason.
*
* \param file_name
* The name to the file(s) to read in.
* \param application_name
* The application name, used to build the list of standard locations to be
* searched.
* \param application_version
* The application version number, used to build the list of standard
* locations to be searched.
* \param install_prefix
* The prefix to which the application is installed (should be one directory
* higher than the location of the executing binary).
* \param merge
* If \c true, search all locations for matching config files, merging their
* contents, with files earlier in the search order taking precedence. If
* \c false, read only the first matching file. If this parameter is omitted
* the configs are merged.
*
* \return
* A \c config_block object representing the contents of the read-in file.
*/
config_block_sptr
VITAL_CONFIG_EXPORT read_config_file(
std::string const& file_name,
std::string const& application_name,
std::string const& application_version,
config_path_t const& install_prefix = config_path_t(),
bool merge = true );
// ------------------------------------------------------------------
/// Output to file the given \c config_block object to the specified file path
/**
* This function writes the specified config block to the specified
* file. If a key has an associated description, it will be written
* as a comment. The key and value strings are written in a format
* that can be read by the read_config_file() function.
*
* If a file exists at the target location, it will be overwritten. If the
* containing directory of the given path does not exist, it will be created
* before the file is opened for writing.
*
* \throws config_file_write_exception
* Thrown when something prevents output of the file.
*
* \param config The \c config_block object to output.
* \param file_path The path to output the file to.
*/
void VITAL_CONFIG_EXPORT write_config_file( config_block_sptr const& config,
config_path_t const& file_path );
// ------------------------------------------------------------------
/// Output to file the given \c config_block object to the specified stream.
/**
* This function writes the specified config block to the specified
* stream. If a key has an associated description, it will be written
* as a comment. The key and value strings are written in a format
* that can be read by the read_config_file() function or it can be
* displayed.
*
* \throws config_file_write_exception
* Thrown when something prevents output of the file.
*
* \param config The \c config_block object to output.
* \param str The output stream.
*/
void VITAL_CONFIG_EXPORT write_config( config_block_sptr const& config,
std::ostream& str );
// ------------------------------------------------------------------
/// Get additional application configuration file paths
/**
* This includes the KWIVER_CONFIG_PATH contents at the head of the returned
* list. The paths returned include application config paths followed by
* KWIVER config paths for the current version of KWIVER and assuming
* the same install prefix for both.
*
* \param application_name
* The application name, used to build the list of standard locations to be
* searched.
* \param application_version
* The application version number, used to build the list of standard
* locations to be searched.
* \param install_prefix
* The prefix to which the application and KWIVER are installed (should be
* one directory higher than the location of the executing binary).
*
* \return
* List of additional application configuration search paths.
*/
config_path_list_t VITAL_CONFIG_EXPORT
application_config_file_paths(std::string const& application_name,
std::string const& application_version,
config_path_t const& install_prefix);
// ------------------------------------------------------------------
/// Get additional application configuration file paths
/**
* This includes the KWIVER_CONFIG_PATH contents at the head of the returned
* list. The paths returned include application config paths followed by
* KWIVER config paths for the current version of KWIVER.
*
* \param application_name
* The application name, used to build the list of standard locations to be
* searched.
* \param application_version
* The application version number, used to build the list of standard
* locations to be searched.
* \param app_install_prefix
* The prefix to which the application is installed (should be one directory
* higher than the location of the executing binary).
* \param kwiver_install_prefix
* The prefix to which KWIVER is installed.
*
* \return
* List of additional application configuration search paths.
*/
config_path_list_t VITAL_CONFIG_EXPORT
application_config_file_paths(std::string const& application_name,
std::string const& application_version,
config_path_t const& app_install_prefix,
config_path_t const& kwiver_install_prefix);
// ------------------------------------------------------------------
/// Get KWIVER configuration file paths
/**
* This includes the KWIVER_CONFIG_PATH contents at the head of the returned
* list. The remainder of the list provides config paths for KWIVER
* installed configuration files for the current version of KWIVER
*
* \param install_prefix
* The prefix to which KWIVER is installed.
*
* \return
* List of KWIVER configuration search paths.
*/
config_path_list_t VITAL_CONFIG_EXPORT
kwiver_config_file_paths(config_path_t const& install_prefix);
} }
#endif
|
const arrayMax = (arr) => {
let maximum = arr[0];
for (let i = 0; i < arr.length; i++) {
if (arr[i] > maximum) {
maximum = arr[i];
}
}
return maximum;
}
console.log(arrayMax([45, 21, 89, 34, 0, 25])) // 89 |
package com.freud.zkadmin.business.zk.vo;
/**
* @Description: TODO <br>
* @date 2017年6月13日 下午1:52:23 <br>
*
* @author Freud
*/
public class EditNodeRequest {
private String parentNodeName;
private String nodeName;
private String nodeData;
private Integer id;
/**
* @return String parentNodeName.
*/
public String getParentNodeName() {
return parentNodeName;
}
/**
* @param parentNodeName
* The parentNodeName to set.
*/
public void setParentNodeName(String parentNodeName) {
this.parentNodeName = parentNodeName;
}
/**
* @return String nodeName.
*/
public String getNodeName() {
return nodeName;
}
/**
* @param nodeName
* The nodeName to set.
*/
public void setNodeName(String nodeName) {
this.nodeName = nodeName;
}
/**
* @return String nodeData.
*/
public String getNodeData() {
return nodeData;
}
/**
* @param nodeData
* The nodeData to set.
*/
public void setNodeData(String nodeData) {
this.nodeData = nodeData;
}
/**
* @return Integer id.
*/
public Integer getId() {
return id;
}
/**
* @param id
* The id to set.
*/
public void setId(Integer id) {
this.id = id;
}
}
|
package cn.stylefeng.roses.kernel.socket.api.session.pojo;
import cn.stylefeng.roses.kernel.socket.api.session.SocketSessionOperatorApi;
import lombok.Data;
/**
* Socket会话
*
* @author majianguo
* @date 2021/6/1 上午11:28
*/
@Data
public class SocketSession<T extends SocketSessionOperatorApi> {
/**
* 会话ID,每一个新建的会话都有(目前使用通道ID)
*/
private String sessionId;
/**
* 会话唯一标识
*/
private String userId;
/**
* 该会话监听的消息类型
*/
private String messageType;
/**
* token信息
*/
private String token;
/**
* 连接时间
*/
private Long connectionTime;
/**
* 最后活跃时间
*/
private Long lastActiveTime;
/**
* 操作API
*/
private T socketOperatorApi;
/**
* 自定义数据
*/
private Object data;
}
|
<gh_stars>0
import { createStore } from 'redux'
import throttle from 'lodash.throttle'
import reducers from './reducers'
import { loadState, saveState } from './localStorage'
const persistedState = loadState()
const store = createStore(reducers, persistedState)
// Subscribe to state events
// and save them to localStorage.
// Throttle to only update once per second, maximum
store.subscribe(throttle(() => saveState(store.getState()), 1000))
export default store
|
pub fn format_modules(modules: &[&str]) -> String {
let mut thick_modules = Vec::new();
let mut ofn_modules = Vec::new();
let mut other_modules = Vec::new();
for module in modules {
if module.starts_with("thick") {
thick_modules.push(module);
} else if module.starts_with("ofn") {
ofn_modules.push(module);
} else {
other_modules.push(module);
}
}
thick_modules.sort();
ofn_modules.sort();
other_modules.sort();
let mut formatted_modules = String::new();
formatted_modules.push_str(&ofn_modules.join(", "));
if !ofn_modules.is_empty() && (!thick_modules.is_empty() || !other_modules.is_empty()) {
formatted_modules.push_str(", ");
}
formatted_modules.push_str(&thick_modules.join(", "));
if !thick_modules.is_empty() && !other_modules.is_empty() {
formatted_modules.push_str(", ");
}
formatted_modules.push_str(&other_modules.join(", "));
formatted_modules
} |
<reponame>gjhkael/Atom<gh_stars>1-10
package com.ctrip.persistence.entity;
import javax.persistence.Entity;
import javax.persistence.Table;
/**
* @author jtzhang
*/
@Entity
@Table(name = "t_element_param")
public class ElementParam extends AbstractElementParam {
}
|
package chylex.hee.mechanics.essence.handler;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.SortedSet;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.enchantment.Enchantment;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.AxisAlignedBB;
import net.minecraft.world.World;
import chylex.hee.entity.item.EntityItemAltar;
import chylex.hee.init.ItemList;
import chylex.hee.mechanics.essence.EssenceType;
import chylex.hee.mechanics.essence.handler.dragon.AltarItemRecipe;
import chylex.hee.packets.PacketPipeline;
import chylex.hee.packets.client.C11ParticleAltarOrb;
import chylex.hee.system.abstractions.Pos;
import chylex.hee.system.abstractions.entity.EntitySelector;
import chylex.hee.system.abstractions.facing.Facing4;
import chylex.hee.system.abstractions.nbt.NBT;
import chylex.hee.system.abstractions.nbt.NBTCompound;
import chylex.hee.system.abstractions.nbt.NBTList;
import chylex.hee.system.collections.CollectionUtil;
import chylex.hee.system.collections.weight.WeightedMap;
import chylex.hee.system.util.MathUtil;
import chylex.hee.tileentity.TileEntityEssenceAltar;
public class DragonEssenceHandler extends AltarActionHandler{
public static final List<AltarItemRecipe> recipes = CollectionUtil.newList(new AltarItemRecipe[]{
new AltarItemRecipe(new ItemStack(Items.brewing_stand), new ItemStack(ItemList.enhanced_brewing_stand), 20),
new AltarItemRecipe(new ItemStack(ItemList.ghost_amulet, 1, 0), new ItemStack(ItemList.ghost_amulet, 1, 1), 8)
});
private AxisAlignedBB itemBoundingBox;
private final List<Pos> pedestals = new ArrayList<>();
private byte updatePedestalTimer = 2;
private long pedestalAreaHash;
private byte lastMaxPedestals;
private short repairCounter;
public DragonEssenceHandler(TileEntityEssenceAltar altar){
super(altar);
}
@Override
public void onUpdate(){
if (--updatePedestalTimer <= 0){
updatePedestalTimer = 20;
int maxPedestals = /* TODO altar.getEnhancements().contains(EssenceAltarEnhancements.RANGE) ? 12 : */8;
int range = maxPedestals == 12 ? 4 : 3;
long currentHash = 0L;
if (lastMaxPedestals != maxPedestals){
lastMaxPedestals = (byte)maxPedestals;
pedestalAreaHash = 0L;
}
World world = altar.getWorldObj();
for(int xx = -range, id; xx <= range; xx++){
for(int zz = -range; zz <= range; zz++){
id = Block.getIdFromBlock(Pos.at(altar.xCoord+xx, altar.yCoord, altar.zCoord+zz).getBlock(world));
currentHash += ((4+xx)*7+(4+zz)+id)*262144L+(xx*id)+(zz*id);
}
}
if (pedestalAreaHash != currentHash){
pedestalAreaHash = currentHash;
pedestals.clear();
IdentityHashMap<Block, Byte> blockCounts = new IdentityHashMap<>();
Block[][] blocks = new Block[range*2+1][range*2+1];
Pos tilePos = Pos.at(altar);
Pos.forEachBlock(tilePos.offset(-range, 0, -range), tilePos.offset(range, 0, range), pos -> { // TODO rework a bit?
if (Math.abs(pos.x-tilePos.getX()) <= 1 && Math.abs(pos.z-tilePos.getZ()) <= 1)return;
if (!(pos.getUp().isAir(world) && hasCollisionBox(altar, pos.getX(), pos.getY(), pos.getZ())))return;
for(Facing4 facing:Facing4.list){
Pos offset = pos.offset(facing);
if (!(offset.isAir(world) || !hasCollisionBox(altar, offset.getX(), offset.getY(), offset.getZ())))return;
}
Block block = pos.getBlock(world);
if (block.getMaterial() == Material.air)return;
blocks[range+pos.getX()-tilePos.getX()][range+pos.getZ()-tilePos.getZ()] = block;
if (blockCounts.containsKey(block))blockCounts.put(block, (byte)(blockCounts.get(block)+1));
else blockCounts.put(block, (byte)1);
});
SortedSet<Entry<Block, Byte>> sorted = CollectionUtil.sortMapByValueDesc(blockCounts);
for(Entry<Block, Byte> entry:sorted){
if (entry.getValue() > maxPedestals)continue;
for(int xx = -range; xx <= range; xx++){
for(int zz = -range; zz <= range; zz++){
if (blocks[range+xx][range+zz] != entry.getKey())continue;
pedestals.add(Pos.at(altar.xCoord+xx, altar.yCoord, altar.zCoord+zz));
}
}
break;
}
}
for(Pos pos:pedestals){
if (world.rand.nextInt(5) <= 1){
PacketPipeline.sendToAllAround(altar, 64D, new C11ParticleAltarOrb(altar, pos.getX()+0.5D, pos.getY()+0.5D, pos.getZ()+0.5D));
}
}
}
if (itemBoundingBox == null){
itemBoundingBox = AxisAlignedBB.getBoundingBox(altar.xCoord+0.5D-4.5D, altar.yCoord+0.9D, altar.zCoord+0.5D-4.5D, altar.xCoord+0.5+4.5D, altar.yCoord+1.6D, altar.zCoord+0.5D+4.5D);
}
World world = altar.getWorldObj();
List<EntityItem> thrownItems = EntitySelector.type(world, EntityItem.class, itemBoundingBox);
double targX, targY, targZ;
for(EntityItem item:thrownItems){
for(Pos pos:pedestals){
targX = pos.getX()+0.5D;
targY = pos.getY()+1.15D;
targZ = pos.getZ()+0.5D;
if (Math.abs(item.posX-targX) > 0.001D || Math.abs(item.posY-targY) > 0.001D || Math.abs(item.posZ-targZ) > 0.001D){
if (EntitySelector.type(world, EntityItemAltar.class, AxisAlignedBB.getBoundingBox(targX, targY, targZ, targX, targY, targZ)).isEmpty() &&
Math.sqrt(MathUtil.square(targX-item.posX)+MathUtil.square(targY-item.posY)+MathUtil.square(targZ-item.posZ)) < 0.275D){
world.spawnEntityInWorld(new EntityItemAltar(world, targX, targY, targZ, item, EssenceType.DRAGON.id));
}
}
else if ((updatePedestalTimer&3) == 1 && item instanceof EntityItemAltar){
EntityItemAltar altarItem = (EntityItemAltar)item;
altarItem.pedestalUpdate = 0;
if (altar.getEssenceLevel() > 0){
updatePedestalItem(altarItem);
if (world.rand.nextInt(5) == 0){
PacketPipeline.sendToAllAround(altar.getWorldObj().provider.dimensionId, targX, pos.getY()+0.5D, targZ, 64D, new C11ParticleAltarOrb(targX, pos.getY()+0.5D, targZ, item.posX, item.posY+0.3D, item.posZ, altar.getEssenceType().id, (byte)1));
}
}
}
}
}
}
public static boolean hasCollisionBox(TileEntityEssenceAltar altar, int x, int y, int z){
Block block = altar.getWorldObj().getBlock(x, y, z);
return block.getMaterial() == Material.air ? false : block.getCollisionBoundingBoxFromPool(altar.getWorldObj(), x, y, z) != null;
}
private void updatePedestalItem(EntityItemAltar item){
// TODO EnhancementList<EssenceAltarEnhancements> enhancements = altar.getEnhancements();
ItemStack is = item.getEntityItem();
/*
* REPAIRING
*/
if (item.worldObj.rand.nextInt(3) != 0){
if (is.isItemStackDamageable() && is.getItemDamage() != 0 && is.getItem().isRepairable()){
for(int a = /* TODO enhancements.contains(EssenceAltarEnhancements.SPEED) ? 2 : */1; a > 0; a--){
if (++repairCounter > (/* TODO enhancements.contains(EssenceAltarEnhancements.EFFICIENCY) ? 72 : */56)){
altar.drainEssence(1);
repairCounter = 0;
}
if (updateItemCounter(is, "HEE_repair", 1) < 18)continue;
updateItemCounter(is, "HEE_repair", 0);
int amount = MathUtil.clamp(MathUtil.floor(Math.sqrt(is.getMaxDamage())*0.65D), 1, is.getItemDamage());
is.setItemDamage(is.getItemDamage()-amount);
item.setSparkling();
}
}
}
/*
* ENCHANTMENT UPGRADES
*/
else if (is.isItemEnchanted() && is.getItem() != Items.enchanted_book){
for(int b = /* TODO enhancements.contains(EssenceAltarEnhancements.SPEED) ? 2 : */1; b > 0; b--){
if (updateItemCounter(is, "HEE_enchant", 1) < 280-is.getItem().getItemEnchantability()*5)return;
updateItemCounter(is, "HEE_enchant", 0);
NBTList enchants = is.hasTagCompound() ? new NBTList(is.getEnchantmentTagList()) : null;
if (enchants == null || enchants.isEmpty())return;
for(int attempt = 0; attempt < 3; attempt++){
WeightedMap<Enchantment> list = new WeightedMap<>();
for(int a = 0; a < enchants.size(); a++){
Enchantment e = Enchantment.enchantmentsList[enchants.getCompound(a).getShort("id")];
if (e == null)continue;
list.add(e, e.getWeight());
}
if (list.isEmpty())continue; // the enchantments are no longer in the game
Enchantment chosenEnchantment = list.getRandomItem(item.worldObj.rand);
for(int a = 0; a < enchants.size(); a++){
NBTCompound tag = enchants.getCompound(a);
if (tag.getShort("id") != chosenEnchantment.effectId)continue;
int level = tag.getShort("lvl"), cost = getEnchantmentCost(chosenEnchantment, level+1);
// TODO if (enhancements.contains(EssenceAltarEnhancements.EFFICIENCY))cost = MathUtil.ceil(cost*0.65F);
if (level >= chosenEnchantment.getMaxLevel() || altar.getEssenceLevel() < cost)continue;
altar.drainEssence(cost);
tag.setShort("lvl", (short)(level+1));
NBT.item(is, true).setList("ench", enchants);
item.setSparkling();
attempt = 999;
break;
}
}
}
}
/*
* ITEM TRANSFORMATIONS
*/
else if (item.worldObj.rand.nextInt(5) == 0){
for(AltarItemRecipe recipe:recipes){
if (recipe.isApplicable(is)){
for(int a = /* TODO enhancements.contains(EssenceAltarEnhancements.SPEED) ? 2 : */1; a > 0; a--){
if (updateItemCounter(is, "HEE_transform", 1) <= Math.max(MathUtil.ceil(recipe.cost*(/* TODO enhancements.contains(EssenceAltarEnhancements.EFFICIENCY) ? 0.65F : */1F)), recipe.cost>>1)){
altar.drainEssence(1);
continue;
}
updateItemCounter(is, "HEE_transform", 0);
recipe.doTransaction(item);
item.setSparkling();
break;
}
break;
}
}
}
}
private int getEnchantmentCost(Enchantment ench, int level){
return MathUtil.floor(Math.max(1F, 1F+(2F*level*((float)level/ench.getMaxLevel()))+(10-ench.getWeight())*0.2F));
}
/**
* @param operation 0 = reset, 1 = increment
* @return current value
*/
private short updateItemCounter(ItemStack is, String counterName, int operation){
NBTCompound tag = NBT.item(is, true);
if (operation == 0){
tag.removeTag(counterName);
return 0;
}
short counter = tag.getShort(counterName);
if (operation == 1)tag.setShort(counterName, ++counter);
return counter;
}
@Override
public void onTileWriteToNBT(NBTTagCompound nbt){
nbt.setShort("D_repairCnt", repairCounter);
}
@Override
public void onTileReadFromNBT(NBTTagCompound nbt){
repairCounter = nbt.getShort("D_repairCnt");
}
}
|
// D3DTexture.cpp
// <NAME>, 29th November 1997.
#include "DDLib.h"
#include "tga.h"
#ifdef TARGET_DC
#include "target.h"
#endif
#ifndef VERIFY
#ifdef NDEBUG
#define VERIFY(x) x
#else
#define VERIFY(x) {ASSERT(x);}
#endif
#endif
#ifdef TEX_EMBED
static D3DTexture* EmbedSource = NULL;
static LPDIRECTDRAWSURFACE4 EmbedSurface = NULL;
static LPDIRECT3DTEXTURE2 EmbedTexture = NULL;
static UBYTE EmbedOffset = 0;
#endif
extern void POLY_reset_render_states ( void );
static DWORD dwSizeOfFastLoadBuffer = 0;
void *pvFastLoadBuffer = NULL;
inline void *GetMeAFastLoadBufferAtLeastThisBigPlease ( DWORD dwSize )
{
if ( dwSizeOfFastLoadBuffer < dwSize )
{
if ( pvFastLoadBuffer != NULL )
{
VirtualFree ( pvFastLoadBuffer, NULL, MEM_RELEASE );
}
// Grow slightly more than needed to prevent hammering.
dwSizeOfFastLoadBuffer = ( dwSize * 5 / 4 + 1024 );
// Ensure it's 4k-aligned.
dwSizeOfFastLoadBuffer = ( ( dwSizeOfFastLoadBuffer + 4095 ) & ~4095 );
TRACE ( "Growing FastLoad buffer to 0x%x bytes\n", dwSizeOfFastLoadBuffer );
pvFastLoadBuffer = VirtualAlloc ( NULL, dwSizeOfFastLoadBuffer, MEM_COMMIT, PAGE_READWRITE );
ASSERT ( pvFastLoadBuffer != NULL );
}
return ( pvFastLoadBuffer );
}
void NotGoingToLoadTexturesForAWhileNowSoYouCanCleanUpABit ( void )
{
if ( pvFastLoadBuffer != NULL )
{
TRACE ( "Freeing FastLoad buffer\n" );
VirtualFree ( pvFastLoadBuffer, NULL, MEM_RELEASE );
pvFastLoadBuffer = NULL;
dwSizeOfFastLoadBuffer = 0;
}
}
inline void *FastLoadFileSomewhere ( MFFileHandle handle, DWORD dwSize )
{
void *pvData = GetMeAFastLoadBufferAtLeastThisBigPlease ( dwSize );
ASSERT ( pvData != NULL );
DWORD dwAlignedFileSize = dwSize & ( ~4095 );
// DMA read
DWORD dwRead = 0;
if ( dwAlignedFileSize > 0 )
{
dwRead = FileRead ( handle, pvData, dwAlignedFileSize );
}
// Finish off with PIO or whatever.
if ( dwSize - dwAlignedFileSize > 0 )
{
dwRead += FileRead ( handle, (void *)( (char *)pvData + dwAlignedFileSize ), dwSize - dwAlignedFileSize );
}
ASSERT ( dwRead == dwSize );
return ( pvData );
}
static bool m_bTexturePagesInitialised = FALSE;
void FreeAllD3DPages ( void )
{
// And redo all the render states and sorting.
POLY_reset_render_states();
}
void D3DTexture::BeginLoading()
{
SLONG first_time = TRUE;
#ifdef TEX_EMBED
EmbedSource = NULL;
EmbedSurface = NULL;
EmbedTexture = NULL;
EmbedOffset = 0;
#endif
// And redo all the render states and sorting.
POLY_reset_render_states();
}
void D3DPage::EnsureLoaded ( void )
{
if ( this->pTex != NULL )
{
// Cool - already done.
return;
}
// OK, we need to load this up.
this->pTex = MFnew<D3DTexture>();
ASSERT ( this->pTex != NULL );
HRESULT hres = this->pTex->LoadTextureTGA ( this->pcFilename, -1, TRUE );
if ( FAILED(hres) )
{
this->pTex = NULL;
}
}
void D3DPage::Unload ( void )
{
if ( this->pTex != NULL )
{
ASSERT ( pTex != NULL );
pTex->Destroy();
MFdelete(pTex);
pTex = NULL;
}
}
#ifdef TEX_EMBED
void D3DTexture::GetTexOffsetAndScale ( float *pfUScale, float *pfUOffset, float *pfVScale, float *pfVOffset )
{
switch ( bPageType )
{
case D3DPAGE_NONE:
*pfUScale = 1.0f;
*pfVScale = 1.0f;
*pfUOffset = 0.0f;
*pfVOffset = 0.0f;
break;
case D3DPAGE_64_3X3:
case D3DPAGE_32_3X3:
// Arranged with 32-pixel gaps between textures, and
// the textures are right up against the edge.
// So along the edge, you have 64 texels, 32 pagging, 64 texels, 32 padding, 64 texels.
// So the offsets are 0.0, 0.375, 0.75
*pfUScale = 0.25f;
*pfVScale = 0.25f;
*pfUOffset = 0.375f * (float)( bPagePos % 3 );
*pfVOffset = 0.375f * (float)( bPagePos / 3 );
break;
case D3DPAGE_64_4X4:
case D3DPAGE_32_4X4:
// Edge-to-edge packing.
*pfUScale = 0.25f;
*pfVScale = 0.25f;
*pfUOffset = 0.25f * (float)( bPagePos & 0x3 );
*pfVOffset = 0.25f * (float)( bPagePos >> 2 );
break;
default:
ASSERT ( FALSE );
break;
}
}
#endif //#ifdef TEX_EMBED
HRESULT D3DTexture::ChangeTextureTGA(CBYTE *tga_file) {
if (Type != D3DTEXTURE_TYPE_UNUSED)
{
//
// There must be one already loaded.
//
Destroy();
strcpy(texture_name, tga_file);
Reload();
return DD_OK;
}
return DDERR_GENERIC;
}
HRESULT D3DTexture::LoadTextureTGA(CBYTE *tga_file, ULONG id,BOOL bCanShrink)
{
HRESULT result;
if (Type != D3DTEXTURE_TYPE_UNUSED)
{
//
// Already loaded.
//
return DD_OK;
}
lp_Texture = NULL;
lp_Surface = NULL;
this->bCanShrink = bCanShrink;
// Check parameters.
if(!tga_file)
{
// Invalid parameters.
return DDERR_GENERIC;
}
strcpy(texture_name, tga_file);
ID = id;
Type = D3DTEXTURE_TYPE_TGA;
result = Reload();
if(FAILED(result))
{
DebugText("LoadTextureTGA: unable to load texture\n");
return ( result );
}
//
// Finally let the display driver know about this texture page.
//
the_display.AddLoadedTexture(this);
return DD_OK;
}
HRESULT D3DTexture::CreateUserPage(SLONG texture_size, BOOL i_want_an_alpha_channel)
{
HRESULT result;
ASSERT(Type == D3DTEXTURE_TYPE_UNUSED);
lp_Texture = NULL;
lp_Surface = NULL;
UserWantsAlpha = i_want_an_alpha_channel;
//
// A user page.
//
size = texture_size;
//
// Reload it... or rather, re-create it, or even create it in the first place!
//
Type = D3DTEXTURE_TYPE_USER;
result = Reload();
if (FAILED(result))
{
DebugText("Could not create user page.\n");
}
//
// Let the display driver know about this texture page.
//
the_display.AddLoadedTexture(this);
return DD_OK;
}
//
// Given the bitmask for a colour in a pixel format, it calculates the mask and
// shift so that you can construct a pixel in pixel format given its RGB values.
// The formula is...
//
// PIXEL(r,g,b) = ((r >> mask) << shift) | ((g >> mask) << shift) | ((b >> mask) << shift);
//
// THIS ASSUMES that r,g,b are 8-bit values.
//
void OS_calculate_mask_and_shift(
ULONG bitmask,
SLONG *mask,
SLONG *shift)
{
SLONG i;
SLONG b;
SLONG num_bits = 0;
SLONG first_bit = -1;
for (i = 0, b = 1; i < 32; i++, b <<= 1)
{
if (bitmask & b)
{
num_bits += 1;
if (first_bit == -1)
{
//
// We have found the first bit.
//
first_bit = i;
}
}
}
ASSERT(first_bit != -1 && num_bits != 0);
*mask = 8 - num_bits;
*shift = first_bit;
if (*mask < 0)
{
//
// More than 8 bits per colour component? May
// as well support it!
//
*shift -= *mask;
*mask = 0;
}
}
HRESULT D3DTexture::Reload_TGA(void)
{
D3DDeviceInfo *current_device;
DDModeInfo *mi;
//SLONG bpp;
DDSURFACEDESC2 dd_sd;
TRACE ("Tex<%s>\n", texture_name);
TGA_Info ti;
TGA_Pixel *tga;
//HRESULT result;
//
// Allocate memory for the texture.
//
tga = (TGA_Pixel *) MemAlloc (256 * 256 * sizeof(TGA_Pixel));
if (tga == NULL)
{
TRACE("Not enough MAIN memory to load tga %s\n", texture_name);
return DDERR_GENERIC;
}
//
// Load the texture.
//
ti = TGA_load(
texture_name,
256,
256,
tga,
ID,
bCanShrink);
if (!ti.valid)
{
//
// Invalid tga.
//
TRACE("TGA %s is invalid\n", texture_name);
//ASSERT ( FALSE );
MemFree(tga);
return DDERR_GENERIC;
}
if (ti.width != ti.height)
{
TRACE("TGA %s is not square\n", texture_name);
MemFree(tga);
return DDERR_GENERIC;
}
if ( ( ti.width & ( ti.width - 1 ) ) != 0 )
{
TRACE("TGA %s is not a valid size", texture_name);
MemFree(tga);
return DDERR_GENERIC;
}
size = ti.width;
//
// Get the current device.
//
current_device = the_display.GetDeviceInfo();
if (!current_device)
{
TRACE("No device!\n");
return DDERR_GENERIC;
}
TRACE("texture = %s\n", this->texture_name);
//
// Does this texture page contain alpha?
//
ContainsAlpha = ti.contains_alpha;
//
// Find the best texture format.
//
if (ContainsAlpha)
{
mi = current_device->AlphaTexFmt;
}
else
{
mi = current_device->OpaqueTexFmt;
}
//
// Use the best texture format.
//
SLONG dwMaskR, dwMaskG, dwMaskB, dwMaskA;
SLONG dwShiftR, dwShiftG, dwShiftB, dwShiftA;
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwRBitMask, &dwMaskR, &dwShiftR );
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwGBitMask, &dwMaskG, &dwShiftG );
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwBBitMask, &dwMaskB, &dwShiftB );
if (ContainsAlpha)
{
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwRGBAlphaBitMask, &dwMaskA, &dwShiftA);
}
mask_red = (UBYTE)dwMaskR;
mask_green = (UBYTE)dwMaskG;
mask_blue = (UBYTE)dwMaskB;
mask_alpha = (UBYTE)dwMaskA;
shift_red = (UBYTE)dwShiftR;
shift_green = (UBYTE)dwShiftG;
shift_blue = (UBYTE)dwShiftB;
shift_alpha = (UBYTE)dwShiftA;
//
// Get rid of the old texture stuff.
//
Destroy();
// Guy. Do all the font mapping stuff here.
if(IsFont())
{
CreateFonts(&ti,tga);
// Change the outline colour to black.
SLONG size = (ti.width*ti.height);
while(size--)
{
if (
(tga+size)->red==0xff &&
(tga+size)->green==0 &&
(tga+size)->blue==0xff
)
{
(tga+size)->red = 0;
(tga+size)->green = 0;
(tga+size)->blue = 0;
}
}
}
// replace red-only pixels with black
//
// WITHOUT AFFECTING BLACK'S ALPHA-CHANNELS. ATF.
if (IsFont2())
{
SLONG size = (ti.width * ti.height);
while (size--)
{
if ((tga[size].green == 0) && (tga[size].blue == 0) && (tga[size].red > 128 ) )
{
tga[size].red = 0;
tga[size].alpha = 0;
}
}
}
int interlace;
int xoff,yoff;
{
dd_sd = mi->ddSurfDesc;
dd_sd.dwSize = sizeof(dd_sd);
dd_sd.dwFlags =
DDSD_CAPS |
DDSD_HEIGHT |
DDSD_WIDTH |
DDSD_PIXELFORMAT;
dd_sd.dwWidth = ti.width;
dd_sd.dwHeight = ti.height;
dd_sd.ddsCaps.dwCaps = DDSCAPS_TEXTURE;
#ifdef TARGET_DC
dd_sd.ddsCaps.dwCaps2 = 0;
#else
dd_sd.ddsCaps.dwCaps2 = DDSCAPS2_TEXTUREMANAGE;
#endif
dd_sd.dwTextureStage = 0;
VERIFY(SUCCEEDED(the_display.lp_DD4->CreateSurface(&dd_sd, &lp_Surface, NULL)));
VERIFY(SUCCEEDED(lp_Surface->QueryInterface(IID_IDirect3DTexture2,(LPVOID *)&lp_Texture)));
interlace = ti.width;
xoff = yoff = 0;
}
//
// Lock the surface.
//
dd_sd.dwSize = sizeof(dd_sd);
HRESULT res = lp_Surface->Lock(NULL, &dd_sd, 0, NULL);
ASSERT(SUCCEEDED(res));
//
// Copy the texture in
//
// ASSUMES 16 or 32-bits PER PIXEL!
//
{
UWORD *wscreenw = (UWORD *) dd_sd.lpSurface;
ULONG *wscreenl = (ULONG *) dd_sd.lpSurface;
SLONG i;
SLONG j;
ULONG pixel;
SLONG red;
SLONG green;
SLONG blue;
SLONG bright;
for (j = 0; j < ti.height; j++)
for (i = 0; i < ti.width; i++)
{
pixel = 0;
red = tga[i + j * ti.width].red;
green = tga[i + j * ti.width].green;
blue = tga[i + j * ti.width].blue;
/*
//
// Add some gamma!
//
red = 256 - ((256 - red) * (256 - red) >> 8);
green = 256 - ((256 - green) * (256 - green) >> 8);
blue = 256 - ((256 - blue) * (256 - blue) >> 8);
if (red > 255) {red = 255;}
if (green > 255) {green = 255;}
if (blue > 255) {blue = 255;}
*/
if (GreyScale)
{
bright = (red + green + blue) * 85 >> 8;
red = bright;
green = bright;
blue = bright;
}
pixel |= (red >> mask_red ) << shift_red;
pixel |= (green >> mask_green) << shift_green;
pixel |= (blue >> mask_blue ) << shift_blue;
#define ISPIXEL(x,y) (tga[(x) + (y) * ti.width].red | tga[(x) + (y) * ti.width].green | tga[(x) + (y) * ti.width].blue)
if (ContainsAlpha)
{
pixel |= (tga[i + j * ti.width].alpha >> mask_alpha) << shift_alpha;
if (!pixel && !ISPIXEL(i,j))
{
// this is a bit bad ... we want to copy the nearest texel across
int i2,j2;
if ((i - 1 >= 0) && ISPIXEL(i - 1, j))
{
i2 = i - 1;
j2 = j;
}
else if ((i + 1 < ti.width) && ISPIXEL(i + 1, j))
{
i2 = i + 1;
j2 = j;
}
else if ((j - 1 >= 0) && ISPIXEL(i, j - 1))
{
i2 = i;
j2 = j - 1;
}
else if ((j + 1 < ti.height) && ISPIXEL(i, j + 1))
{
i2 = i;
j2 = j + 1;
}
else if ((i - 1 >= 0) && (j - 1 >= 0) && ISPIXEL(i - 1, j - 1))
{
i2 = i - 1;
j2 = j - 1;
}
else if ((i - 1 >= 0) && (j + 1 < ti.height) && ISPIXEL(i - 1, j + 1))
{
i2 = i - 1;
j2 = j + 1;
}
else if ((i + 1 < ti.width) && (j - 1 >= 0) && ISPIXEL(i + 1, j - 1))
{
i2 = i + 1;
j2 = j - 1;
}
else if ((i + 1 < ti.width) && (j + 1 < ti.height) && ISPIXEL(i + 1, j + 1))
{
i2 = i + 1;
j2 = j + 1;
}
else
{
i2 = i;
j2 = j;
}
red = tga[i2 + j2 * ti.width].red;
green = tga[i2 + j2 * ti.width].green;
blue = tga[i2 + j2 * ti.width].blue;
pixel |= (red >> mask_red ) << shift_red;
pixel |= (green >> mask_green) << shift_green;
pixel |= (blue >> mask_blue ) << shift_blue;
}
}
if (dd_sd.ddpfPixelFormat.dwRGBBitCount == 32)
{
wscreenl[i + xoff + (j + yoff) * interlace] = pixel;
}
else
{
wscreenw[i + xoff + (j + yoff) * interlace] = (WORD)pixel;
}
}
}
//
// Unlock the surface.
//
VERIFY(SUCCEEDED(lp_Surface->Unlock(NULL)));
MemFree(tga);
return DD_OK;
}
HRESULT D3DTexture::Reload_user()
{
D3DDeviceInfo *current_device;
SLONG score;
DDModeInfo *mi;
SLONG best_score;
DDModeInfo *best_mi;
//SLONG bpp;
SLONG try_shift_alpha;
SLONG try_shift_red;
SLONG try_shift_green;
SLONG try_shift_blue;
SLONG try_mask_alpha;
SLONG try_mask_red;
SLONG try_mask_green;
SLONG try_mask_blue;
DDSURFACEDESC2 dd_sd;
HRESULT result;
//
// Get the current device.
//
current_device = the_display.GetDeviceInfo();
if (!current_device)
{
TRACE("No device!\n");
return DDERR_GENERIC;
}
best_score = 0;
best_mi = NULL;
if (UserWantsAlpha)
{
//
// Find the texture format with the most bits of alpha.
//
for (mi = current_device->FormatList; mi; mi = mi->Next)
{
if (mi->ddSurfDesc.ddpfPixelFormat.dwFlags & DDPF_RGB)
{
if (mi->ddSurfDesc.ddpfPixelFormat.dwFlags & DDPF_ALPHAPIXELS)
{
if (mi->ddSurfDesc.ddpfPixelFormat.dwRGBBitCount == 16)
{
//
// Find out how many bits there are for each component.
//
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwRGBAlphaBitMask, &try_mask_alpha, &try_shift_alpha);
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwRBitMask , &try_mask_red, &try_shift_red );
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwGBitMask , &try_mask_green, &try_shift_green);
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwBBitMask , &try_mask_blue, &try_shift_blue );
score = (32 - try_mask_alpha) << 8;
score /= mi->ddSurfDesc.ddpfPixelFormat.dwRGBBitCount;
if (score > best_score)
{
best_score = score;
best_mi = mi;
}
}
}
}
}
}
else
{
//
// Find a 5:6:5 or similar format.
//
for (mi = current_device->FormatList; mi; mi = mi->Next)
{
if (mi->ddSurfDesc.ddpfPixelFormat.dwFlags & DDPF_RGB)
{
//
// True colour...
//
if (mi->ddSurfDesc.ddpfPixelFormat.dwRGBBitCount >= 16)
{
score = 0x100;
score -= mi->ddSurfDesc.ddpfPixelFormat.dwRGBBitCount;
if (mi->ddSurfDesc.ddpfPixelFormat.dwFlags & DDPF_ALPHAPIXELS)
{
//
// Knock off score for alpha
//
score -= 1;
}
if (score > best_score)
{
best_score = score;
best_mi = mi;
}
}
}
}
}
if (best_mi == NULL)
{
//
// Couldn't find a suitable texture format.
//
TRACE("Could not find texture format for the user texture page\n");
return DDERR_GENERIC;
}
mi = best_mi;
SLONG dwMaskR, dwMaskG, dwMaskB, dwMaskA;
SLONG dwShiftR, dwShiftG, dwShiftB, dwShiftA;
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwRBitMask, &dwMaskR, &dwShiftR );
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwGBitMask, &dwMaskG, &dwShiftG );
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwBBitMask, &dwMaskB, &dwShiftB );
if (UserWantsAlpha)
{
OS_calculate_mask_and_shift(mi->ddSurfDesc.ddpfPixelFormat.dwRGBAlphaBitMask, &dwMaskA, &dwShiftA);
}
else
{
dwMaskA = 0;
dwShiftA = 0;
}
mask_red = (UBYTE)dwMaskR;
mask_green = (UBYTE)dwMaskG;
mask_blue = (UBYTE)dwMaskB;
mask_alpha = (UBYTE)dwMaskA;
shift_red = (UBYTE)dwShiftR;
shift_green = (UBYTE)dwShiftG;
shift_blue = (UBYTE)dwShiftB;
shift_alpha = (UBYTE)dwShiftA;
//
// Get rid of the old texture stuff.
//
Destroy();
//
// The surface
//
dd_sd = mi->ddSurfDesc;
dd_sd.dwSize = sizeof(dd_sd);
dd_sd.dwFlags =
DDSD_CAPS |
DDSD_HEIGHT |
DDSD_WIDTH |
DDSD_PIXELFORMAT;
dd_sd.dwWidth = size;
dd_sd.dwHeight = size;
dd_sd.ddsCaps.dwCaps = DDSCAPS_TEXTURE;
#ifdef TARGET_DC
dd_sd.ddsCaps.dwCaps2 = 0;
#else
dd_sd.ddsCaps.dwCaps2 = DDSCAPS2_TEXTUREMANAGE;
#endif
VERIFY(SUCCEEDED(the_display.lp_DD4->CreateSurface(&dd_sd, &lp_Surface, NULL)));
//
// Get d3d texture interface.
//
result = lp_Surface->QueryInterface(IID_IDirect3DTexture2,(LPVOID *)&lp_Texture);
if(FAILED(result))
{
TRACE("ReloadTextureUser: Could not get the texture interface.\n");
Destroy();
return DDERR_GENERIC;
}
//
// Success.
//
return DD_OK;
}
HRESULT D3DTexture::LockUser(UWORD **bitmap, SLONG *pitch)
{
DDSURFACEDESC2 dd_sd;
// ASSERT(Type == D3DTEXTURE_TYPE_USER);
InitStruct(dd_sd);
if (lp_Surface == NULL || FAILED(lp_Surface->Lock(NULL, &dd_sd, DDLOCK_WAIT, NULL)))
{
return DDERR_GENERIC;
}
else
{
*bitmap = (UWORD *) dd_sd.lpSurface;
*pitch = dd_sd.lPitch;
return DD_OK;
}
}
void D3DTexture::UnlockUser()
{
// ASSERT(Type == D3DTEXTURE_TYPE_USER);
VERIFY(SUCCEEDED(lp_Surface->Unlock(NULL)));
}
HRESULT D3DTexture::Reload(void)
{
Font *current_font,
*next_font;
HRESULT ans;
//
// erk ... we have to call the POLY engine from here
// this hook needs calling when the textures are reloaded
// en masse, but tracking down each point in the game where
// this happens is tricky ...
// so there's a cheeky little call here ...
//
POLY_reset_render_states();
if(IsFont())
{
current_font = FontList;
while(current_font)
{
next_font = current_font->NextFont;
MFdelete(current_font);
current_font = next_font;
}
FontList = NULL;
}
switch(Type)
{
case D3DTEXTURE_TYPE_TGA:
ans = Reload_TGA();
break;
case D3DTEXTURE_TYPE_USER:
ans = Reload_user();
break;
default:
ASSERT(0);
break;
}
return ans;
}
HRESULT D3DTexture::Destroy(void)
{
int a = 0, b = 0, c = 0, d = 0;
Font *current_font,
*next_font;
// Get rid of fonts.
if(IsFont())
{
current_font = FontList;
while(current_font)
{
next_font = current_font->NextFont;
MFdelete(current_font);
current_font = next_font;
}
FontList = NULL;
}
// Release texture.
if(lp_Texture)
{
DebugText("Releasing texture\n");
a = lp_Texture->Release();
DebugText("Done\n");
lp_Texture = NULL;
}
// Release surface.
if(lp_Surface)
{
DebugText("Releasing surface\n");
c = lp_Surface->Release();
DebugText("Done\n");
lp_Surface = NULL;
}
// TextureHandle = NULL;
return DD_OK;
}
#define MATCH_TGA_PIXELS(p1,p2) ((p1)->red==(p2)->red&&(p1)->green==(p2)->green&&(p1)->blue==(p2)->blue)
BOOL scan_for_baseline(TGA_Pixel **line_ptr,TGA_Pixel *underline,TGA_Info *info,SLONG *y_ptr)
{
while(*y_ptr<info->height)
{
if(MATCH_TGA_PIXELS(*line_ptr,underline))
{
// Got the baseline so drop to the next line.
*y_ptr += 1;
*line_ptr += info->width;
return TRUE;
}
*y_ptr += 1;
*line_ptr += info->width;
}
return FALSE;
}
HRESULT D3DTexture::CreateFonts(TGA_Info *tga_info,TGA_Pixel *tga_data)
{
SLONG current_char,
char_x,char_y,
char_height,char_width,
tallest_char;
Font *the_font;
TGA_Pixel underline,
*current_line,
*current_pixel;
// Scan down the image looking for the underline.
underline.red = 0xff;
underline.green = 0x00;
underline.blue = 0xff;
current_line = tga_data;
char_y = 0;
if(scan_for_baseline(¤t_line,&underline,tga_info,&char_y))
{
map_font:
// Found a font baseline so map it.
the_font = MFnew<Font>();
if(FontList)
{
the_font->NextFont = FontList;
FontList = the_font;
}
else
{
the_font->NextFont = NULL;
FontList = the_font;
}
current_char = 0;
char_x = 0;
tallest_char = 1;
while(current_char<93)
{
// Scan across to find the width of char.
char_width = 0;
current_pixel = current_line+char_x;
while(!MATCH_TGA_PIXELS(current_pixel,&underline))
{
current_pixel++;
char_width++;
// Reached the end of the line.
if(char_x+char_width>=tga_info->width)
{
// Find the next baseline.
char_y += tallest_char+1;
if(char_y>=tga_info->height)
return DDERR_GENERIC;
current_line = tga_data+(char_y*tga_info->width);
if(!scan_for_baseline(¤t_line,&underline,tga_info,&char_y))
return DDERR_GENERIC;
char_x = 0;
tallest_char = 1;
char_width = 0;
current_pixel = current_line;
}
}
// Now scan down to find the height of the char
char_height = 0;
current_pixel = current_line+char_x;
while(!MATCH_TGA_PIXELS(current_pixel,&underline))
{
current_pixel += tga_info->width;
char_height++;
// Reached the bottom of the page.
if(char_height>=tga_info->height)
return DDERR_GENERIC;
}
the_font->CharSet[current_char].X = char_x;
the_font->CharSet[current_char].Y = char_y;
the_font->CharSet[current_char].Width = char_width;
the_font->CharSet[current_char].Height = char_height;
char_x += char_width+1;
if(tallest_char<char_height)
tallest_char = char_height;
current_char++;
}
// Find out if there's another font in this file.
char_y += tallest_char+1;
if(char_y>=tga_info->height)
return DDERR_GENERIC;
current_line = tga_data+(char_y*tga_info->width);
if(scan_for_baseline(¤t_line,&underline,tga_info,&char_y))
goto map_font;
}
return DD_OK;
}
Font *D3DTexture::GetFont(SLONG id)
{
Font *current_font;
current_font = FontList;
while(id && current_font)
{
current_font = current_font->NextFont;
id--;
}
return current_font;
}
void D3DTexture::set_greyscale(BOOL is_greyscale)
{
if (is_greyscale != GreyScale)
{
GreyScale = is_greyscale;
if (Type != D3DTEXTURE_TYPE_UNUSED)
{
Reload();
}
}
}
|
export const CONTEXT_AWS_ENV = 'AWS_ENV';
export const CONTEXT_COSMOS_PARTITION = 'COSMOS_PARTITION';
export const CONTEXT_COSMOS_VERSION = 'COSMOS_VERSION';
export const CONTEXT_COSMOS_NETWORK_BUILDER = 'COSMOS_NETWORK_BUILDER';
export const CONTEXT_COSMOS_NAMING = 'COSMOS_NAMING';
export const PATTERN = {
STACK: '{Partition}{Cosmos}{Galaxy}?{SolarSystem}?{Version}?{Type}',
COSMOS: '{Partition}{Cosmos}{Galaxy}?{SolarSystem}?{Resource}*{Version}?',
SINGLETON_COSMOS: '${Partition}{Galaxy}?{SolarSystem}?{Resource}+',
RESOURCE: '{Resource}+{Version}?',
};
|
const mix = require('laravel-mix');
/*
|--------------------------------------------------------------------------
| Mix Asset Management
|--------------------------------------------------------------------------
|
| Mix provides a clean, fluent API for defining some Webpack build steps
| for your Laravel application. By default, we are compiling the Sass
| file for the application as well as bundling up all the JS files.
|
*/
//mix.minify('resources/assets/css/backend/AdminLTE.css');
//mix.minify('resources/assets/css/backend/bootstrap/css/bootstrap.css');
mix.scripts([
'resources/assets/js/payment.js',
], 'public/js/payment.js');
mix.styles([
'public/bower_components/admin-lte/plugins/select2/select2.min.css',
'public/bower_components/external-plugins/jquery-validate/jquery.validate.css',
'public/bower_components/external-plugins/sweetalert-master/dist/sweetalert.css',
], 'public/css/payment.css');
mix.scripts([
'public/bower_components/external-plugins/toastr/toastr.js',
'public/bower_components/admin-lte/plugins/moment/moment.min.js',
'public/bower_components/admin-lte/plugins/select2/select2.full.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.date.extensions.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.extensions.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.numeric.extensions.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.phone.extensions.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.regex.extensions.js',
'public/bower_components/admin-lte/plugins/daterangepicker/daterangepicker.js',
'public/bower_components/admin-lte/plugins/datepicker/bootstrap-datepicker.js',
'public/bower_components/admin-lte/plugins/colorpicker/bootstrap-colorpicker.min.js',
'public/bower_components/admin-lte/plugins/timepicker/bootstrap-timepicker.js',
'public/bower_components/admin-lte/plugins/slimScroll/jquery.slimscroll.js',
'public/bower_components/admin-lte/plugins/iCheck/icheck.min.js',
'public/bower_components/admin-lte/plugins/fastclick/fastclick.js',
'public/bower_components/admin-lte/plugins/bootstrap-wysihtml5/bootstrap3-wysihtml5.all.min.js',
'public/bower_components/admin-lte/plugins/datatables/jquery.dataTables.min.js',
'public/bower_components/admin-lte/plugins/datatables/dataTables.bootstrap.min.js',
'public/bower_components/external-plugins/sweetalert-master/dist/sweetalert.min.js',
'public/bower_components/external-plugins/jquery-validate/jquery.validate.min.js',
'resources/assets/js/helpers.js',
], 'public/js/plugins.js');
mix.styles([
/*'public/bower_components/external-plugins/ionicons/ionicons.min.css',
'public/bower_components/admin-lte/bootstrap/css/font-awesome.min.css',
'public/bower_components/admin-lte/plugins/daterangepicker/daterangepicker.css',
'public/bower_components/admin-lte/plugins/datepicker/datepicker3.css',
'public/bower_components/admin-lte/plugins/iCheck/minimal/_all.css',
'public/bower_components/admin-lte/plugins/iCheck/square/_all.css',
'public/bower_components/admin-lte/plugins/iCheck/flat/_all.css',
'public/bower_components/admin-lte/plugins/iCheck/line/_all.css',
'public/bower_components/admin-lte/plugins/iCheck/polaris/polaris.css',
'public/bower_components/admin-lte/plugins/iCheck/futurico/futurico.css',
'public/bower_components/admin-lte/plugins/iCheck/all.css',
'public/bower_components/admin-lte/plugins/colorpicker/bootstrap-colorpicker.min.css',
'public/bower_components/admin-lte/plugins/timepicker/bootstrap-timepicker.min.css',
'public/bower_components/admin-lte/plugins/select2/select2.min.css',
'public/bower_components/admin-lte/dist/css/skins/_all-skins.min.css',
'public/bower_components/admin-lte/plugins/bootstrap-wysihtml5/bootstrap3-wysihtml5.min.css',
'public/bower_components/admin-lte/plugins/datatables/dataTables.bootstrap.css',
'public/bower_components/external-plugins/sweetalert-master/dist/sweetalert.css',
'public/bower_components/external-plugins/jquery-validate/jquery.validate.css',*/
'public/bower_components/external-plugins/toastr/toastr.css',
/*
CSS do tema Gentella
'node_modules/gentelella/vendors/iCheck/skins/flat/green.css',
'node_modules/gentelella/vendors/bootstrap-progressbar/css/bootstrap-progressbar-3.3.4.min.css',
'node_modules/gentelella/css/maps/jquery-jvectormap-2.0.3.css',
'node_modules/gentelella/build/css/custom.min.css',*/
], 'public/css/plugins.css');
mix.js([
'resources/assets/js/app.js'
], 'public/js/app.js')
.sass('resources/assets/sass/app.scss', 'public/css/app.css');
/*mix.js([
'resources/assets/js/painel.js'
], 'public/js/painel.js');*/
mix.scripts([
/*
JS do tema Gentella
*/
'public/bower_components/gentelella/vendors/fastclick/lib/fastclick.js',
'public/bower_components/gentelella/vendors/nprogress/nprogress.js',
'public/bower_components/gentelella/vendors/Chart.js/dist/Chart.min.js',
'public/bower_components/gentelella/vendors/bernii/gauge.js/dist/gauge.min.js',
'public/bower_components/gentelella/vendors/bootstrap-progressbar/bootstrap-progressbar.min.js',
'public/bower_components/gentelella/vendors/skycons/skycons.js',
'public/bower_components/gentelella/vendors/Flot/jquery.flot.js',
'public/bower_components/gentelella/vendors/Flot/jquery.flot.pie.js',
'public/bower_components/gentelella/vendors/Flot/jquery.flot.time.js',
'public/bower_components/gentelella/vendors/Flot/jquery.flot.stack.js',
'public/bower_components/gentelella/vendors/Flot/jquery.flot.resize.js',
'public/bower_components/gentelella/production/js/flot/jquery.flot.orderBars.js',
'public/bower_components/gentelella/production/js/flot/date.js',
'public/bower_components/gentelella/production/js/flot/jquery.flot.spline.js',
'public/bower_components/gentelella/production/js/flot/curvedLines.js',
'public/bower_components/gentelella/production/js/maps/jquery-jvectormap-2.0.3.min.js',
'public/bower_components/gentelella/production/js/moment/moment.min.js',
'public/bower_components/gentelella/production/js/datepicker/daterangepicker.js',
'public/bower_components/gentelella/production/js/maps/jquery-jvectormap-world-mill-en.js',
'public/bower_components/gentelella/production/js/maps/jquery-jvectormap-us-aea-en.js',
'public/bower_components/gentelella/production/js/maps/gdp-data.js',
'public/bower_components/gentelella/build/js/custom.js',
], 'public/js/gentelella.js');
mix.styles([
/*
CSS do tema Gentella
*/
'public/bower_components/gentelella/vendors/iCheck/skins/flat/green.css',
'public/bower_components/gentelella/vendors/bootstrap-progressbar/css/bootstrap-progressbar-3.3.4.min.css',
'public/bower_components/gentelella/production/css/maps/jquery-jvectormap-2.0.3.css',
'public/bower_components/gentelella/build/css/custom.min.css',
], 'public/css/painel.css');
/* Fim Scripts para o painel */
mix.styles([
'public/bower_components/admin-lte/bootstrap/css/bootstrap.css'
], 'public/backend/css/bootstrap.css');
mix.styles([
'public/bower_components/external-plugins/toastr/toastr.css',
'public/bower_components/external-plugins/ionicons/ionicons.min.css',
'public/bower_components/admin-lte/bootstrap/css/font-awesome.min.css',
'public/bower_components/admin-lte/plugins/daterangepicker/daterangepicker.css',
'public/bower_components/admin-lte/plugins/datepicker/datepicker3.css',
'public/bower_components/admin-lte/plugins/iCheck/minimal/_all.css',
'public/bower_components/admin-lte/plugins/iCheck/square/_all.css',
'public/bower_components/admin-lte/plugins/iCheck/flat/_all.css',
'public/bower_components/admin-lte/plugins/iCheck/line/_all.css',
'public/bower_components/admin-lte/plugins/iCheck/polaris/polaris.css',
'public/bower_components/admin-lte/plugins/iCheck/futurico/futurico.css',
'public/bower_components/admin-lte/plugins/iCheck/all.css',
'public/bower_components/admin-lte/plugins/colorpicker/bootstrap-colorpicker.min.css',
'public/bower_components/admin-lte/plugins/timepicker/bootstrap-timepicker.min.css',
'public/bower_components/admin-lte/plugins/select2/select2.min.css',
'public/bower_components/admin-lte/dist/css/AdminLTE.min.css',
'public/bower_components/admin-lte/dist/css/skins/_all-skins.min.css',
'public/bower_components/admin-lte/plugins/bootstrap-wysihtml5/bootstrap3-wysihtml5.min.css',
'public/bower_components/admin-lte/plugins/datatables/dataTables.bootstrap.css',
'public/bower_components/external-plugins/sweetalert-master/dist/sweetalert.css',
'public/bower_components/external-plugins/jquery-validate/jquery.validate.css',
'resources/assets/backend/css/main.css',
], 'public/backend/css/app.css');
mix.scripts([
'public/bower_components/admin-lte/plugins/jQuery/jquery-2.2.3.min.js'
], 'public/backend/js/jquery.js');
mix.scripts([
'public/bower_components/admin-lte/bootstrap/js/bootstrap.min.js'
], 'public/backend/js/bootstrap.js');
mix.scripts([
'public/bower_components/external-plugins/toastr/toastr.js',
'public/bower_components/admin-lte/plugins/moment/moment.min.js',
'public/bower_components/admin-lte/plugins/select2/select2.full.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.date.extensions.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.extensions.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.numeric.extensions.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.phone.extensions.js',
'public/bower_components/admin-lte/plugins/input-mask/jquery.inputmask.regex.extensions.js',
'node_modules/jquery-maskmoney/dist/jquery.maskMoney.min.js',
'public/bower_components/admin-lte/plugins/daterangepicker/daterangepicker.js',
'public/bower_components/admin-lte/plugins/datepicker/bootstrap-datepicker.js',
'public/bower_components/admin-lte/plugins/colorpicker/bootstrap-colorpicker.min.js',
'public/bower_components/admin-lte/plugins/timepicker/bootstrap-timepicker.js',
'public/bower_components/admin-lte/plugins/slimScroll/jquery.slimscroll.js',
'public/bower_components/admin-lte/plugins/iCheck/icheck.min.js',
'public/bower_components/admin-lte/plugins/fastclick/fastclick.js',
'public/bower_components/admin-lte/plugins/bootstrap-wysihtml5/bootstrap3-wysihtml5.all.min.js',
'public/bower_components/admin-lte/plugins/datatables/jquery.dataTables.min.js',
'public/bower_components/admin-lte/plugins/datatables/dataTables.bootstrap.min.js',
'public/bower_components/external-plugins/sweetalert-master/dist/sweetalert.min.js',
'public/bower_components/external-plugins/jquery-validate/jquery.validate.min.js',
'public/bower_components/admin-lte/dist/js/app.js',
'public/bower_components/admin-lte/dist/js/demo.js',
'resources/assets/backend/js/helpers.js',
], 'public/backend/js/app.js');
mix.scripts([
'resources/assets/backend/js/auth/auth.js',
'resources/assets/backend/js/tests/frequency-heart/frequency-heart.js',
'resources/assets/backend/js/tests/vo2/vo2.js',
'resources/assets/backend/js/tests/resistance/resistance.js',
'resources/assets/backend/js/tests/target-zone/target-zone.js',
'resources/assets/backend/js/tests/flexibility/flexibility.js',
'resources/assets/backend/js/tests/additional_data/additional-data.js',
], 'public/backend/js/main.js');
/*
//Styles para tela de login administrativo
mix.styles([
'resources/assets/dist/css/AdminLTE.css',
'resources/assets/plugins/iCheck/square/blue.css',
], 'public/css/backend/login.css');
//Scripts para tela de login administrativo
mix.scripts([
'resources/assets/plugins/iCheck/icheck.min.js'
], 'public/js/backend/login.js');
*/
/*mix.combine([
'resources/assets/js/backend/plugins/jQuery/jquery-2.2.3.min.js',
'resources/assets/js/backend/plugins/jQueryUI/jquery-ui.js',
], 'public/js/backend/backend.js');*/
/*mix.js('resources/assets/js/app.js', 'public/js')
.sass('resources/assets/sass/app.scss', 'public/css');*/
//mix.version(); |
<gh_stars>0
package base;
import java.util.ArrayList;
import java.util.List;
/**
*
* 内存查看
* @author angilin
*
*/
public class MemoryScanner {
public static void main(String [] args) throws Exception {
int mb = 1024*1024;
Runtime runtime = Runtime.getRuntime();
System.out.println("##### Heap utilization statistics [MB] #####");
List testList = new ArrayList();
for(int i =0; i<3;i++){
//看上去像等待回收生命周期结束的对象,但增加临时对象后却对内存没影响
Thread.sleep(1000);
for(int i1 =0; i1<5000; i1++){
String[] bytes = new String[1000];
for(int j=0;j<1000;j++){
bytes[j] = new String("1");
//String a = new String("12");
//a.substring(0);
}
testList.add(bytes);
}
//http://www.importnew.com/1305.html 计算java对象内存
//http://mercyblitz.iteye.com/blog/710998 深入java对象大小
//一个String数组,头8byte+长度头4byte,1000个空String对象,???每个只有引用(long型内存地址)4byte,引用为null,循环500次,为20MB-
//空Long,4 Long=1L,4 Long=111111L,28 Long=111111111111L,28
// Long=new Long(1),28 Long=new Long(111111),28 Long=new Long(111111111111),28
//空long,8 long=1L,8 long=111111L,8 long=111111111111L,8
//空Byte,4 Byte=1,4
// Byte=Byte.valueOf("1"),20
//空byte,1 byte=1,1
//空String,4 String="",4 String="1",4
// String=new String(""),36 String=new String("1"),36
System.out.println("Used Memory:" + (runtime.totalMemory() - runtime.freeMemory()) / mb+"M");
System.out.println("average:" + (runtime.totalMemory() - runtime.freeMemory()) / 5000000);
}
System.out.println("Free Memory:" + runtime.freeMemory() / mb+"M");
System.out.println("Total Memory:" + runtime.totalMemory() / mb+"M");
System.out.println("Max Memory:" + runtime.maxMemory()/mb+"M");
}
/**
* get JVM used memory
* @return MB
*/
public static long getUsedMemory(){
int mb = 1024*1024;
Runtime runtime = Runtime.getRuntime();
return (runtime.totalMemory() - runtime.freeMemory()) / mb;
}
}
|
-- Check for negative balances
-- Some balances are very small negative numbers due to loss of precision from large ints
select amount
from {{ ref('balances_ethereum_erc20_day') }}
where round(amount/power(10, 18), 6) < 0
-- limiting to a selection of tokens because we haven't filtered out all non-compliant tokens
and symbol in ('AAVE', 'DAI', 'UNI', 'LINK')
|
#!/bin/bash
# WARNING: DO NOT EDIT, THIS FILE IS PROBABLY A COPY
#
# The original version of this file is located in the https://github.com/istio/common-files repo.
# If you're looking at this file in a different repo and want to make a change, please go to the
# common-files repo, make the change there and check it in. Then come back to this repo and run
# "make update-common".
# Copyright Istio Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script builds and version stamps the output
VERBOSE=${VERBOSE:-"0"}
V=""
if [[ "${VERBOSE}" == "1" ]];then
V="-x"
set -x
fi
SCRIPTPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
APP_NAME=${1:?"app name"}
OUT=${2:?"output path"}
shift
set -e
GOOS=${GOOS:-linux}
GOARCH=${GOARCH:-amd64}
GOBINARY=${GOBINARY:-go}
GOPKG="$GOPATH/pkg"
BUILDINFO=${BUILDINFO:-""}
STATIC=${STATIC:-1}
LDFLAGS="-extldflags -static"
GOBUILDFLAGS=${GOBUILDFLAGS:-""}
GCFLAGS=${GCFLAGS:-}
# Split GOBUILDFLAGS by spaces into an array called GOBUILDFLAGS_ARRAY.
IFS=' ' read -r -a GOBUILDFLAGS_ARRAY <<< "$GOBUILDFLAGS"
GCFLAGS=${GCFLAGS:-}
export CGO_ENABLED=0
if [[ "${STATIC}" != "1" ]];then
LDFLAGS=""
fi
# gather buildinfo if not already provided
# For a release build BUILDINFO should be produced
# at the beginning of the build and used throughout
if [[ -z ${BUILDINFO} ]];then
BUILDINFO=$(mktemp)
"${SCRIPTPATH}/report_build_info.sh" ${APP_NAME} > "${BUILDINFO}"
fi
# BUILD LD_EXTRAFLAGS
LD_EXTRAFLAGS=""
while read -r line; do
LD_EXTRAFLAGS="${LD_EXTRAFLAGS} -X ${line}"
done < "${BUILDINFO}"
# verify go version before build
# NB. this was copied verbatim from Kubernetes hack
minimum_go_version=go1.13 # supported patterns: go1.x, go1.x.x (x should be a number)
IFS=" " read -ra go_version <<< "$(${GOBINARY} version)"
if [[ "${minimum_go_version}" != $(echo -e "${minimum_go_version}\n${go_version[2]}" | sort -s -t. -k 1,1 -k 2,2n -k 3,3n | head -n1) && "${go_version[2]}" != "devel" ]]; then
echo "Warning: Detected that you are using an older version of the Go compiler. APP requires ${minimum_go_version} or greater."
fi
OPTIMIZATION_FLAGS="-trimpath"
if [ "${DEBUG}" == "1" ]; then
OPTIMIZATION_FLAGS=""
fi
#echo time ${GOBINARY} build ${V} ${GOBUILDFLAGS} ${GCFLAGS:+-gcflags "${GCFLAGS}"} -o ${OUT} \
# -pkgdir=${GOPKG}/${GOOS}_${GOARCH} -ldflags "${LDFLAGS} ${LD_EXTRAFLAGS}"
time ${GOBINARY} build ${V} ${GOBUILDFLAGS} ${GCFLAGS:+-gcflags "${GCFLAGS}"} -o ${OUT} \
-pkgdir=${GOPKG}/${GOOS}_${GOARCH} -ldflags "${LDFLAGS} ${LD_EXTRAFLAGS}"
#time GOOS=${GOOS} GOARCH=${GOARCH} ${GOBINARY} build ${V} ${GOBUILDFLAGS} ${GCFLAGS:+-gcflags "${GCFLAGS}"} -o ${OUT} \
|
<filename>src/main/java/com/bullhornsdk/data/model/response/list/customobject/ClientCorporationCustomObjectInstance26ListWrapper.java
package com.bullhornsdk.data.model.response.list.customobject;
import com.bullhornsdk.data.model.entity.core.customobject.ClientCorporationCustomObjectInstance26;
import com.bullhornsdk.data.model.response.list.StandardListWrapper;
public class ClientCorporationCustomObjectInstance26ListWrapper extends StandardListWrapper<ClientCorporationCustomObjectInstance26> {
}
|
public class ImageEditorError {
public static final int UNEXPECTED_ERROR = 500;
// Other error codes and constants
}
public class CallbackContext {
public void error(int errorCode) {
// Handle error reporting logic
}
// Other callback methods
}
public class ImageEditor {
private Exception lastException;
public boolean handleImageEditingError(Exception e, CallbackContext callbackContext, int errorType) {
lastException = e;
callbackContext.error(errorType);
return false;
}
} |
<gh_stars>0
package net.alteridem.mileage.data;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import net.alteridem.mileage.R;
/**
* Created by <NAME> on 13/06/13.
*/
public class DbHelper extends SQLiteOpenHelper {
static final String TAG = DbHelper.class.getSimpleName();
static final String DB_NAME = "mileage.db";
static final int DB_VERSION = 2;
Context context;
public DbHelper(Context context) {
super(context, DB_NAME, null, DB_VERSION);
this.context = context;
}
@Override
public void onCreate(SQLiteDatabase db) {
Vehicle.createTable(db);
Entry.createTable(db);
// Insert some default data
String name = context.getString(R.string.default_vehicle );
Vehicle vehicle = new Vehicle(0, name);
vehicle.save(db);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
Log.d(TAG, "onUpgrade");
Vehicle.upgradeTable(db, oldVersion, newVersion);
Entry.upgradeTable(db, oldVersion, newVersion);
}
}
|
<gh_stars>0
import React from 'react'
import Helmet from 'react-helmet'
import { graphql } from 'gatsby'
import Layout from 'src/components/layout'
import PostList from 'src/components/post-list'
import { TagPageQuery } from 'autogenerated/graphql-types'
type Props = {
data: TagPageQuery
pageContext: {
currentPage: number
numPages: number
name: string
}
}
const Tag: React.FC<Props> = (props) => {
const { data, pageContext } = props
const { edges: posts, totalCount } = data.allWpNews
const { name: tag } = pageContext
return (
<Layout
title={tag}
breadcrumbs={[
{ title: 'Birkas', path: null },
{ title: tag, path: `/tags/${tag}` },
]}
>
<PostList posts={posts.map((x) => x.node) ?? []} />
</Layout>
)
}
export default Tag
export const pageQuery = graphql`
query TagPage($slug: String!) {
allWpNews(
filter: { tags: { nodes: { elemMatch: { slug: { eq: $slug } } } } }
) {
totalCount
edges {
node {
...PostListFields
}
}
}
}
`
|
<gh_stars>0
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
define([
"app",
"api",
"addons/fauxton/components",
'addons/fauxton/notifications/notifications.react',
'addons/fauxton/notifications/actions',
"addons/fauxton/navigation/components.react",
"addons/fauxton/navigation/actions",
'addons/components/react-components.react',
'addons/components/actions'
],
function (app, FauxtonAPI, Components, NotificationComponents, Actions, NavbarReactComponents, NavigationActions,
ReactComponents, ComponentActions) {
var Fauxton = FauxtonAPI.addon();
FauxtonAPI.addNotification = function (options) {
options = _.extend({
msg: "Notification Event Triggered!",
type: "info",
selector: "#global-notifications",
escape: true
}, options);
// log all notifications in a store
Actions.addNotification(options);
var view = new Fauxton.Notification(options);
return view.renderNotification();
};
FauxtonAPI.UUID = FauxtonAPI.Model.extend({
initialize: function (options) {
options = _.extend({count: 1}, options);
this.count = options.count;
},
url: function () {
return app.host + "/_uuids?count=" + this.count;
},
next: function () {
return this.get("uuids").pop();
}
});
Fauxton.initialize = function () {
FauxtonAPI.RouteObject.on('beforeEstablish', function (routeObject) {
NavigationActions.setNavbarActiveLink(_.result(routeObject, 'selectedHeader'));
// always attempt to render the API Bar. Even if it's hidden on initial load, it may be enabled later
routeObject.setComponent('#api-navbar', ReactComponents.ApiBarController, {
buttonVisible: true,
contentVisible: false
});
if (routeObject.get('apiUrl')) {
var apiAndDocs = routeObject.get('apiUrl');
ComponentActions.updateAPIBar({
buttonVisible: true,
contentVisible: false,
endpoint: apiAndDocs[0],
docURL: apiAndDocs[1]
});
} else {
ComponentActions.hideAPIBarButton();
}
if (!routeObject.get('hideNotificationCenter')) {
routeObject.setComponent('#notification-center-btn', NotificationComponents.NotificationCenterButton);
}
if (routeObject.overrideBreadcrumbs) { return; }
FauxtonAPI.masterLayout.removeView('#breadcrumbs');
var crumbs = routeObject.get('crumbs');
if (crumbs.length) {
FauxtonAPI.masterLayout.setView('#breadcrumbs', new Components.Breadcrumbs({
crumbs: crumbs
}), true).render();
}
});
var primaryNavBarEl = $('#primary-navbar')[0];
if (primaryNavBarEl) {
NavbarReactComponents.renderNavBar(primaryNavBarEl);
}
var notificationCenterEl = $('#notification-center')[0];
if (notificationCenterEl) {
NotificationComponents.renderNotificationCenter(notificationCenterEl);
}
var versionInfo = new Fauxton.VersionInfo();
versionInfo.fetch().then(function () {
NavigationActions.setNavbarVersionInfo(versionInfo.get("version"));
});
};
Fauxton.VersionInfo = Backbone.Model.extend({
url: function () {
return app.host;
}
});
Fauxton.Notification = FauxtonAPI.View.extend({
animationTimer: 8000,
id: 'global-notification-id',
events: {
'click .js-dismiss': 'onClickRemoveWithAnimation'
},
initialize: function (options) {
this.htmlToRender = options.msg;
// escape always, except the value is false
if (options.escape !== false) {
this.htmlToRender = _.escape(this.htmlToRender);
}
this.type = options.type || "info";
this.selector = options.selector;
this.fade = options.fade === undefined ? true : options.fade;
this.data = options.data || "";
this.template = options.template || "addons/fauxton/templates/notification";
},
serialize: function () {
var icon;
switch (this.type) {
case 'error':
icon = 'fonticon-attention-circled';
break;
case 'info':
icon = 'fonticon-info-circled';
break;
case 'success':
icon = 'fonticon-ok-circled';
break;
default:
icon = 'fonticon-info-circled';
break;
}
return {
icon: icon,
data: this.data,
htmlToRender: this.htmlToRender,
type: this.type
};
},
onClickRemoveWithAnimation: function (event) {
event.preventDefault();
window.clearTimeout(this.timeout);
this.removeWithAnimation();
},
removeWithAnimation: function () {
this.$el.velocity('reverse', FauxtonAPI.constants.MISC.TRAY_TOGGLE_SPEED, function () {
this.$el.remove();
this.removeCloseListener();
}.bind(this));
},
addCloseListener: function () {
$(document).on('keydown.notificationClose', this.onKeyDown.bind(this));
},
onKeyDown: function (e) {
var code = e.keyCode || e.which;
if (code === 27) { // ESC key
this.removeWithAnimation();
}
},
removeCloseListener: function () {
$(document).off('keydown.notificationClose', this.removeWithAnimation);
},
delayedRemoval: function () {
this.timeout = setTimeout(function () {
this.removeWithAnimation();
}.bind(this), this.animationTimer);
},
renderNotification: function (selector) {
selector = selector || this.selector;
if (this.clear) {
$(selector).html('');
}
this.render().$el.appendTo(selector);
this.$el.velocity('transition.slideDownIn', FauxtonAPI.constants.MISC.TRAY_TOGGLE_SPEED);
this.delayedRemoval();
this.addCloseListener();
return this;
}
});
return Fauxton;
});
|
#!/bin/bash
#SBATCH -J test_BL1.po
#SBATCH -p dl
#SBATCH --gres=gpu:v100:1
#SBATCH -o log/test_BL1.po.log
#SBATCH -e log/test_BL1.po.err
#SBATCH --mail-type=END,FAIL
#SBATCH --mail-user=bizi@iu.edu
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=1
#SBATCH --cpus-per-task=1
#SBATCH --time=48:00:00
#SBATCH --mem=32G
module load deeplearning/2.3.0
cd ..
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o NPC.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o NPC.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o NPC.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o NPC.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o NPC.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o NPC.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o NPC.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o NPC.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o NPC.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o NPC.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o GM.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o GM.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o GM.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o GM.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o GM.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o GM.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o GM.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o GM.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o GM.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o GM.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o BL1.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o BL1.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o BL1.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o BL1.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o BL1.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o BL1.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o BL1.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o BL1.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o BL1.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o BL1.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o AO.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o AO.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o AO.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o AO.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o AO.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o AO.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o AO.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o AO.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o AO.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o AO.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o LI11.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o LI11.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o LI11.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o LI11.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o LI11.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o LI11.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o LI11.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o LI11.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o LI11.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o LI11.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o CM.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o CM.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o CM.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o CM.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o CM.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o CM.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o CM.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o CM.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o CM.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o CM.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o H1.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o H1.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o H1.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o H1.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o H1.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o H1.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o H1.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o H1.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o H1.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o H1.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o TH1.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o TH1.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o TH1.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o TH1.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o TH1.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o TH1.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o TH1.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o TH1.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o TH1.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o TH1.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o IMR90.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o IMR90.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o IMR90.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o IMR90.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o IMR90.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o IMR90.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o IMR90.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o IMR90.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o IMR90.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o IMR90.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o MSC.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o MSC.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o MSC.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o MSC.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o MSC.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o MSC.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o MSC.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o MSC.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o MSC.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o MSC.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o HCmerge.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o HCmerge.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o HCmerge.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o HCmerge.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o HCmerge.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o HCmerge.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o HCmerge.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o HCmerge.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o HCmerge.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o HCmerge.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o SX.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o SX.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o SX.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o SX.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o SX.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o SX.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o SX.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o SX.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o SX.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o SX.po
python3 DeepChromeHiC.py -m onehot_cnn_one_branch -t test -n BL1.po -o X5628FC.po
python3 DeepChromeHiC.py -m onehot_cnn_two_branch -t test -n BL1.po -o X5628FC.po
python3 DeepChromeHiC.py -m onehot_embedding_dense -t test -n BL1.po -o X5628FC.po
python3 DeepChromeHiC.py -m onehot_dense -t test -n BL1.po -o X5628FC.po
python3 DeepChromeHiC.py -m onehot_resnet18 -t test -n BL1.po -o X5628FC.po
python3 DeepChromeHiC.py -m embedding_cnn_one_branch -t test -n BL1.po -o X5628FC.po
python3 DeepChromeHiC.py -m embedding_cnn_two_branch -t test -n BL1.po -o X5628FC.po
python3 DeepChromeHiC.py -m embedding_dense -t test -n BL1.po -o X5628FC.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_one_branch -t test -n BL1.po -o X5628FC.po
python3 DeepChromeHiC.py -m onehot_embedding_cnn_two_branch -t test -n BL1.po -o X5628FC.po
|
#!/bin/bash
set -e
# Add "graylog" as command if needed
if [ "${1:0:1}" = '-' ]; then
set -- graylog "$@"
fi
# Drop root privileges if we are running elasticsearch
# allow the container to be started with `--user`
if [ "$1" = 'graylog' -a "$(id -u)" = '0' ]; then
# Change the ownership of user-mutable directories to elasticsearch
for path in \
/opt/graylog/data \
/opt/graylog/config \
; do
chown -R graylog:graylog "$path"
done
set -- su-exec graylog "$@"
fi
# As argument is not related to Graylog,
# then assume that user wants to run their own process,
# for example a `bash` shell to explore this image
exec "$@"
|
package models
import (
"time"
"gopkg.in/mgo.v2"
"gopkg.in/mgo.v2/bson"
)
const (
membersColName = "members"
)
// Member represents a member
type Member struct {
dbSession *DBSession `bson:"-"`
ID bson.ObjectId `bson:"_id,omitempty" json:"id"`
CreatedAt time.Time `bson:"created_at" json:"createdAt"`
UpdatedAt time.Time `bson:"updated_at" json:"updatedAt"`
SiteID string `bson:"site_id" json:"site"`
Fullname string `bson:"fullname" json:"fullname"`
Role string `bson:"role" json:"role"`
Description string `bson:"description" json:"description"`
Photo bson.ObjectId `bson:"photo,omitempty" json:"photo,omitempty"`
Order int `bson:"order" json:"order"`
}
// MembersList represents a list of Member
type MembersList []*Member
//
// DBSession
//
// MembersCol returns members collection
func (session *DBSession) MembersCol() *mgo.Collection {
return session.DB().C(membersColName)
}
// EnsureMembersIndexes ensure indexes on members collection
func (session *DBSession) EnsureMembersIndexes() {
index := mgo.Index{
Key: []string{"site_id", "order"},
Background: true,
}
err := session.MembersCol().EnsureIndex(index)
if err != nil {
panic(err)
}
}
// FindMember finds member by id
func (session *DBSession) FindMember(memberID bson.ObjectId) *Member {
var result Member
if err := session.MembersCol().FindId(memberID).One(&result); err != nil {
return nil
}
result.dbSession = session
return &result
}
// CreateMember creates a new member in database
// Side effect: 'Id', 'CreatedAt' and 'UpdatedAt' fields are set on member record
func (session *DBSession) CreateMember(member *Member) error {
member.ID = bson.NewObjectId()
now := time.Now()
member.CreatedAt = now
member.UpdatedAt = now
if err := session.MembersCol().Insert(member); err != nil {
return err
}
member.dbSession = session
return nil
}
// RemoveImageReferencesFromMembers removes all references to given image from all members
func (session *DBSession) RemoveImageReferencesFromMembers(image *Image) error {
// @todo
return nil
}
//
// Member
//
// FindSite fetches site that member belongs to
func (member *Member) FindSite() *Site {
return member.dbSession.FindSite(member.SiteID)
}
// FindPhoto fetches Photo from database
func (member *Member) FindPhoto() *Image {
if member.Photo != "" {
var result Image
if err := member.dbSession.ImagesCol().FindId(member.Photo).One(&result); err != nil {
return nil
}
result.dbSession = member.dbSession
return &result
}
return nil
}
// Delete deletes member from database
func (member *Member) Delete() error {
var err error
// delete from database
if err = member.dbSession.MembersCol().RemoveId(member.ID); err != nil {
return err
}
return nil
}
// Update updates member in database
func (member *Member) Update(newMember *Member) (bool, error) {
var set, unset, modifier bson.D
// Fullname
if member.Fullname != newMember.Fullname {
member.Fullname = newMember.Fullname
if member.Fullname == "" {
unset = append(unset, bson.DocElem{"fullname", 1})
} else {
set = append(set, bson.DocElem{"fullname", member.Fullname})
}
}
// Role
if member.Role != newMember.Role {
member.Role = newMember.Role
if member.Role == "" {
unset = append(unset, bson.DocElem{"role", 1})
} else {
set = append(set, bson.DocElem{"role", member.Role})
}
}
// Description
if member.Description != newMember.Description {
member.Description = newMember.Description
if member.Description == "" {
unset = append(unset, bson.DocElem{"description", 1})
} else {
set = append(set, bson.DocElem{"description", member.Description})
}
}
// Photo
if member.Photo != newMember.Photo {
member.Photo = newMember.Photo
if member.Photo == "" {
unset = append(unset, bson.DocElem{"photo", 1})
} else {
set = append(set, bson.DocElem{"photo", member.Photo})
}
}
// Order
if member.Order != newMember.Order {
member.Order = newMember.Order
set = append(set, bson.DocElem{"order", member.Order})
}
if len(unset) > 0 {
modifier = append(modifier, bson.DocElem{"$unset", unset})
}
if len(set) > 0 {
modifier = append(modifier, bson.DocElem{"$set", set})
}
if len(modifier) > 0 {
member.UpdatedAt = time.Now()
set = append(set, bson.DocElem{"updated_at", member.UpdatedAt})
return true, member.dbSession.MembersCol().UpdateId(member.ID, modifier)
}
return false, nil
}
|
<reponame>mmvvpp123/Reminderse-API<filename>components/SkeletonCard.tsx
import React from "react";
export function SkeletonCard() {
return (
<div className="main-card rounded-box shadow-sm">
<div className="animate-pulse flex flex-col space-y-4 grid grid-rows-3">
<div className="mx-auto imgArea row-span-2">
<div className="rounded-full bg-secondary-content h-48 w-48"></div>
</div>
<div className="bg-primary-content cardInfo space-y-5">
<div className="h-4 bg-secondary-content rounded"></div>
<div className="h-3 bg-secondary-content rounded"></div>
<div className="h-2 bg-secondary-content rounded"></div>
</div>
</div>
</div>
);
}
|
The main difference between a list and a tuple is that a list is mutable (can be changed) while a tuple is immutable (cannot be changed). This means that elements in a list can be added and removed, and the order of items can be changed, while elements in a tuple cannot be modified after creation. Lists also use square brackets ([]), while tuples use parentheses (()). Additionally, tuples can represent multiple data points in a single object, while lists can only represent a single data point. |
/*
* Copyright 2012 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package haibison.android.lockpattern.utils;
import android.content.Context;
import java.util.List;
import haibison.android.lockpattern.widget.LockPatternView.Cell;
import haibison.android.underdogs.NonNull;
/**
* Interface for encrypter.
*
* @author <NAME>
* @since v2 beta
*/
public interface Encrypter {
/**
* Encrypts {@code pattern}.
*
* @param context the context.
* @param pattern the pattern in the form of a list of {@link Cell}.
* @return the encrypted char array of the pattern.
* @since v2.1 beta
*/
@NonNull
char[] encrypt(@NonNull Context context, @NonNull List<Cell> pattern);
/**
* Decrypts an encrypted pattern.
*
* @param context the context.
* @param encryptedPattern the encrypted pattern.
* @return the original pattern.
*/
@NonNull
List<Cell> decrypt(@NonNull Context context, @NonNull char[] encryptedPattern);
} |
#!/bin/sh
# Run this script from the root directory just before starting to contribute code.
#================================= Tooling =================================
echo "Installing tools (SwiftLint, SwiftFormat, and SourceDocs"
swift build
echo "Installing pre-commit hook"
swift run komondor install
|
package main.fieldStaticAndOverridesStatic;
public class FieldStaticAndOverridesStatic extends FieldStaticAndOverridesStaticS {
}
|
#!/bin/bash
set -ev
docker pull zebralucky/electrum-dash-winebuild:Linux40x
docker pull zebralucky/electrum-dash-winebuild:AppImage40x
docker pull zebralucky/electrum-dash-winebuild:Wine41x
|
kubectl apply -f deployment.yaml
kubectl apply -f service.yaml
|
#!/bin/bash
set -eu
here="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"
echo Testing OPA polices
opa test -v "${here}/../helmfile/charts/gatekeeper-templates/policies/"
|
#!/bin/bash
#
# Normalize a face down image by flipping it over like a pancake.
#
# Temporarily creates working files in the current working directory.
#
if [ $# -ne 2 ]; then
echo "Usage: `basename $0` [source path] [target path]"
exit 65
fi
# Exit on error
set -e
Vaa3D="/app/vaa3d/vaa3d"
SOURCE_FILE=$1
TARGET_FILE=$2
echo "Vaa3D: $Vaa3D"
echo "Source: $SOURCE_FILE"
echo "Target: $TARGET_FILE"
TEMP_FILE1="ymirrored.v3draw"
#TEMP_FILE2="zflipped.v3draw"
echo "Mirroring in Y-axis"
time $Vaa3D -x ireg -f xflip -i $SOURCE_FILE -o $TEMP_FILE1
echo "Mirroring in Z-axis"
time $Vaa3D -x ireg -f zflip -i $TEMP_FILE1 -o $TARGET_FILE
# This is useful for images that are rotated 45 degrees, but it messes up images that are not rotated to begin with
#echo "Rotating back"
#time $Vaa3D -x rotate -f left90 -i $TEMP_FILE2 -o $TARGET_FILE
echo "Removing temporary files"
rm $TEMP_FILE1
#rm $TEMP_FILE2
echo "Flip completed"
|
package io.cattle.platform.inator;
import java.util.Collection;
public interface Unit {
enum UnitState {
GOOD,
WAITING,
ERROR
}
Result scheduleActions(InatorContext context);
Result define(InatorContext context, boolean desired);
Collection<UnitRef> dependencies(InatorContext context);
UnitRef getRef();
Result remove(InatorContext context);
String getDisplayName();
}
|
<gh_stars>1-10
import { File } from '../walker/index.js';
import prettier from 'prettier';
// Options for prettier, TODO: move to WalkerAS
const prettierOptions: prettier.Options = {
parser: 'typescript',
tabWidth: 2,
};
export function prettify(files: File[]): File[] {
return files.map((file: File) => <File>{name: file.name, content: prettier.format(file.content, prettierOptions)});
} |
import spacy
spacy_nlp = spacy.load("en_core_web_sm")
doc = spacy_nlp(Alice in Wonderland is an 1865 novel written by English author Lewis Carroll.)
word_count = len(list(doc))
print(word_count) # 27 |
<reponame>sturmundbraem/kursausschreibung
import Route from '@ember/routing/route';
import { getEventById } from 'kursausschreibung/framework/store';
export default Route.extend({
model(params) {
const event = getEventById(params.event_id);
// redirect to event if it exists
if (event !== undefined) {
this.replaceWith('list.category.event', event.areaKey, event.categoryKey, event.Id);
} else {
this.replaceWith('');
}
}
});
|
export GPG_TTY=$(tty)
export SSH_AUTH_SOCK=$(gpgconf --list-dirs agent-ssh-socket)
gpgconf --launch gpg-agent
|
<reponame>pragyasingh123/AngularProject_BookingFlow
export class AddNectarCardRequest
{
sessionid : string;
schemeType : string;
addNectarCardEventContext : string;
loyaltyCardNumber : string;
} |
#!/bin/bash -f
# Vivado (TM) v2016.2 (64-bit)
#
# Filename : dataRAM.sh
# Simulator : Mentor Graphics ModelSim Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Tue Nov 07 10:38:10 +0800 2017
# IP Build 1577682 on Fri Jun 3 12:00:54 MDT 2016
#
# usage: dataRAM.sh [-help]
# usage: dataRAM.sh [-lib_map_path]
# usage: dataRAM.sh [-noclean_files]
# usage: dataRAM.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'dataRAM.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
# ********************************************************************************************************
# Script info
echo -e "dataRAM.sh - Script generated by export_simulation (Vivado v2016.2 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <simulate>
simulate()
{
vsim -64 -c -do "do {simulate.do}" -l simulate.log
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./dataRAM.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy modelsim.ini file
copy_setup_file()
{
file="modelsim.ini"
lib_map_path=""
if [[ ($1 != "") ]]; then
lib_map_path="$1"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
cp $src_file .
fi
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaborate.log simulate.log vsim.wlf work msim)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./dataRAM.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: dataRAM.sh [-help]\n\
Usage: dataRAM.sh [-lib_map_path]\n\
Usage: dataRAM.sh [-reset_run]\n\
Usage: dataRAM.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
<reponame>iafinn/twitter-mov-rec
package com.iafinn;
import java.util.List;
/**
* This runs the recomendation model
* for a fixed number of similar raters,
* and minimal raters per movie. It takes in year
* and genre guidelines to make filter objects
*
*/
public class ModelRunner {
protected List<Rating> printSimilarRatingsByYearAndGenre(Rater me, int startYear, int endYear, String genre) {
String movieFile = "com/iafinn/movies_clean.csv";
String raterFile = "com/iafinn/ratings_clean.csv";
MovieDatabase.initialize(movieFile);
RaterDatabase.initialize(raterFile);
int numSimilarRaters = 500;
int minimalRaters = 5;
AllFilters f = new AllFilters();
Filter f1 = new YearFilter(startYear, endYear);
f.addFilter(f1);
if (!genre.equals("Genre")) {
Filter f2 = new GenreFilter(genre);
f.addFilter(f2);
}
SimilarityModel fr2 = new SimilarityModel();
List<Rating> ratings = fr2.getSimilarRatingsByFilterRater(me, numSimilarRaters,
minimalRaters, f);
if (ratings.size()>10) {
ratings = ratings.subList(0, 10);
}
return ratings;
}
}
|
#!/bin/sh
xrandr --output DVI-D-0 --off --output HDMI-0 --off --output DP-0 --primary --mode 1920x1080 --rate 165 --pos 0x0 --rotate normal --output DP-1 --off
|
import React from 'react';
import { connect } from 'react-redux';
import '../code.css';
const mapStateToProps = store => ({
database: store.schema.database,
tables: store.schema.tables,
});
const CodeServerContainer = (props) => {
const enter = `
`;
const tab = ' ';
function parseGraphqlServer(data, database) {
let query = `const graphql = require('graphql');${enter}`;
if (database === 'MongoDB') {
for (const prop in data) {
query += buildDbModelRequirePaths(data[prop]);
}
}
if (database === 'MySQL') {
query += 'const getConnection = require(\'../db/mysql_pool.js\');\n';
}
if (database === 'PostgreSQL') {
query += 'const connect = require(\'../db/postgresql_pool.js\');\n';
}
query += `
const {
GraphQLObjectType,
GraphQLSchema,
GraphQLID,
GraphQLString,
GraphQLInt,
GraphQLBoolean,
GraphQLList,
GraphQLNonNull
} = graphql;
${enter}`;
// BUILD TYPE SCHEMA
for (const prop in data) {
query += buildGraphqlTypeSchema(data[prop], data, database);
}
// BUILD ROOT QUERY
query += `const RootQuery = new GraphQLObjectType({${enter}${tab}name: 'RootQueryType',${enter}${tab}fields: {${enter}`;
let firstRootLoop = true;
for (const prop in data) {
if (!firstRootLoop) query += `,${enter}`;
firstRootLoop = false;
query += buildGraphqlRootQuery(data[prop], database);
}
query += `${enter}${tab}}${enter}});${enter}${enter}`;
// BUILD MUTATIONS
query += `const Mutation = new GraphQLObjectType({${enter}${tab}name: 'Mutation',${enter}${tab}fields: {${enter}`;
let firstMutationLoop = true;
for (const prop in data) {
if (!firstMutationLoop) query += `,${enter}`;
firstMutationLoop = false;
query += buildGraphqlMutationQuery(data[prop], database);
}
query += `${enter}${tab}}${enter}});${enter}${enter}`;
query += `module.exports = new GraphQLSchema({${enter}${tab}query: RootQuery,${enter}${tab}mutation: Mutation${enter}});`;
return query;
}
function buildDbModelRequirePaths(data) {
return `const ${data.type} = require('../db/${data.type.toLowerCase()}.js');${enter}`;
}
function buildGraphqlTypeSchema(table, data, database) {
let query = `const ${table.type}Type = new GraphQLObjectType({${enter}${tab}name: '${table.type}',${enter}${tab}fields: () => ({`;
let firstLoop = true;
for (let prop in table.fields) {
if (!firstLoop) query+= ',';
firstLoop = false;
query += `${enter}${tab}${tab}${table.fields[prop].name}: { type: ${checkForMultipleValues(table.fields[prop].multipleValues, 'front')}`;
query += `${tableTypeToGraphqlType(table.fields[prop].type)}`;
query += `${checkForMultipleValues(table.fields[prop].multipleValues, 'back')} }`;
if (table.fields[prop].relation.tableIndex > -1) {
query += createSubQuery(table.fields[prop], data, database);
}
const refBy = table.fields[prop].refBy;
if (refBy.size > 0) {
refBy.forEach(value => {
const parsedValue = value.split('.');
const field = {
name: table.fields[prop].name,
relation: {
tableIndex: parsedValue[0],
fieldIndex: parsedValue[1],
refType: parsedValue[2],
type: table.fields[prop].type
}
};
query += createSubQuery(field, data, database);
});
}
}
return query += `${enter}${tab}})${enter}});${enter}${enter}`;
}
function tableTypeToGraphqlType(type) {
switch (type) {
case 'ID':
return 'GraphQLID';
case 'String':
return 'GraphQLString';
case 'Number':
return 'GraphQLInt';
case 'Boolean':
return 'GraphQLBoolean';
case 'Float':
return 'GraphQLFloat';
default:
return 'GraphQLString';
}
}
function toTitleCase(refTypeName) {
let name = refTypeName[0].toUpperCase();
name += refTypeName.slice(1).toLowerCase();
return name;
}
function createSubQuery(field, data, database) {
const refTypeName = data[field.relation.tableIndex].type;
const refFieldName = data[field.relation.tableIndex].fields[field.relation.fieldIndex].name;
const refFieldType = data[field.relation.tableIndex].fields[field.relation.fieldIndex].type;
let query = `,${enter}${tab}${tab}${createSubQueryName(refTypeName)}: {${enter}${tab}${tab}${tab}type: `;
if (field.relation.refType === 'one to many' || field.relation.refType === 'many to many') {
query += `new GraphQLList(${refTypeName}Type),`;
} else {
query += `${refTypeName}Type,`;
}
query += `${enter}${tab}${tab}${tab}resolve(parent, args) {${enter}`;
query += `${tab}${tab}${tab}${tab}`;
if (database === 'MongoDB') {
query += `return ${refTypeName}.${findDbSearchMethod(refFieldName, refFieldType, field.relation.refType)}`;
query += `(${createSearchObject(refFieldName, refFieldType, field)});${enter}`;
query += `${tab}${tab}${tab}}${enter}`;
query += `${tab}${tab}}`;
}
if (database === 'MySQL') {
query += `getConnection((err, con) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}const sql = \`SELECT * FROM ${refTypeName} WHERE `;
if (field.type === 'ID') {
query += `${field.name} = \${parent.${field.name}}`;
} else {
query += `${refFieldName} = \${parent.${field.name}}`;
}
query += `\`;${enter}${tab}${tab}${tab}${tab}${tab}con.query(sql, (err, result) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}con.release();${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}}${enter}`;
query += `${tab}${tab}}`;
}
if (database === 'PostgreSQL') {
query += `connect((err, client) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}const pool = \`SELECT * FROM ${refTypeName} WHERE `;
if (field.type === 'ID') {
query += `${field.name} = \${parent.${field.name}}`;
} else {
query += `${refFieldName} = \${parent.${field.name}}`;
}
query += `\`;${enter}${tab}${tab}${tab}${tab}${tab}client.query(pool, (err, result) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}client.release();${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}}${enter}`;
query += `${tab}${tab}}`;
}
return query;
function createSubQueryName() {
switch (field.relation.refType) {
case 'one to one':
return `related${toTitleCase(refTypeName)}`;
case 'one to many':
return `everyRelated${toTitleCase(refTypeName)}`;
case 'many to one':
return `related${toTitleCase(refTypeName)}`;
case 'many to many':
return `everyRelated${toTitleCase(refTypeName)}`;
default:
return `everyRelated${toTitleCase(refTypeName)}`;
}
}
function findDbSearchMethod(refFieldName, refFieldType, refType) {
if (refFieldName === 'id' || refFieldType === 'ID') return 'findById';
switch (refType) {
case 'one to one':
return 'findOne';
case 'one to many':
return 'find';
case 'many to one':
return 'find';
case 'many to many':
return 'find';
default:
return 'find';
}
}
function createSearchObject(refFieldName, refFieldType, field) {
if (refFieldName === 'id' || refFieldType === 'ID') {
return `parent.${field.name}`;
} else {
return `{ ${refFieldName}: parent.${field.name} }`;
}
}
}
function buildGraphqlRootQuery(data, database) {
let query = '';
query += createFindAllRootQuery(data, database);
if (!!data.fields[0]) {
query += createFindByIdQuery(data, database);
}
return query;
}
function createFindAllRootQuery(table, database) {
let query = `${tab}${tab}every${toTitleCase(table.type)}: {${enter}`;
query += `${tab}${tab}${tab}type: new GraphQLList(${table.type}Type),${enter}`;
query += `${tab}${tab}${tab}resolve() {${enter}${tab}${tab}${tab}${tab}`;
if (database === 'MongoDB') {
query += `return ${table.type}.find({});`;
}
if (database === 'MySQL') {
query += `getConnection((err, con) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}const sql = \'SELECT * FROM ${table.type}\';${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}con.query(sql, (err, results) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}con.release();${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}return results;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}${tab}})`;
}
if (database === 'PostgreSQL') {
query += `connect((err, client) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}const pool = \'SELECT * FROM ${table.type}\';${enter}`;
query += `\`;\n${tab}${tab}${tab}${tab}${tab}client.query(pool, (err, result) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}client.release();${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}}${enter}`;
query += `${tab}${tab}}`;
}
return query += `${enter}${tab}${tab}${tab}}${enter}${tab}${tab}}`;
}
function createFindByIdQuery(table, database) {
const idFieldName = table.fields[0].name;
let query = `,${enter}${tab}${tab}${table.type.toLowerCase()}: {${enter}`;
query += `${tab}${tab}${tab}type: ${table.type}Type,${enter}`;
query += `${tab}${tab}${tab}args: { ${idFieldName}: { type: ${tableTypeToGraphqlType(table.fields[0].type)} }},${enter}`;
query += `${tab}${tab}${tab}resolve(parent, args) {${enter}`;
query += `${tab}${tab}${tab}${tab}`;
if (database === 'MongoDB') {
query += `return ${table.type}.findById(args.id);`;
}
if (database === 'MySQL') {
query += `getConnection((err, con) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}const sql = \`SELECT * FROM ${table.type} WHERE ${idFieldName} = \${args.${idFieldName}}\`;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}con.query(sql, (err, result) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}con.release();${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}${tab}})`;
}
if (database === 'PostgreSQL') {
query += `connect((err, client) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}const pool = \`SELECT * FROM ${table.type} WHERE ${idFieldName} = \${args.${idFieldName}}\`;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}clieny.query(pool, (err, result) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}client.release();${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}${tab}})`;
}
return query += `${enter}${tab}${tab}${tab}}${enter}${tab}${tab}}`;
}
function buildGraphqlMutationQuery(table, database) {
let string = ``;
string += `${addMutation(table, database)}`;
if (table.fields[0]) {
string += `,${enter}${updateMutation(table, database)},${enter}`;
string += `${deleteMutation(table, database)}`;
}
return string;
}
function addMutation(table, database) {
let query = `${tab}${tab}add${table.type}: {${enter}${tab}${tab}${tab}type: ${table.type}Type,${enter}${tab}${tab}${tab}args: {${enter}`;
let firstLoop = true;
for (const prop in table.fields) {
if (!firstLoop) query += `,${enter}`;
firstLoop = false;
query += `${tab}${tab}${tab}${tab}${table.fields[prop].name}: ${buildMutationArgType(table.fields[prop])}`;
}
query += `${enter}${tab}${tab}${tab}},${enter}${tab}${tab}${tab}resolve(parent, args) {${enter}${tab}${tab}${tab}${tab}`;
if (database === 'MongoDB') query += `const ${table.type.toLowerCase()} = new ${table.type}(args);${enter}${tab}${tab}${tab}${tab}return ${table.type.toLowerCase()}.save();`;
if (database === 'MySQL') {
query += `getConnection((err, con) => {${enter}${tab}${tab}${tab}${tab}${tab}const sql = 'INSERT INTO ${table.type} SET ?';${enter}${tab}${tab}${tab}${tab}${tab}con.query(sql, args, (err, result) => {${enter}${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}${tab}${tab}${tab}${tab}${tab}${tab}con.release();${enter}${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}${tab}${tab}${tab}${tab}${tab}})${enter}${tab}${tab}${tab}${tab}})`;
}
if (database === 'PostgreSQL') {
query += `connect((err, client) => {${enter}${tab}${tab}${tab}${tab}${tab}const pool = 'INSERT INTO ${table.type} SET ?';${enter}${tab}${tab}${tab}${tab}${tab}client.query(pool, args, (err, result) => {${enter}${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}${tab}${tab}${tab}${tab}${tab}${tab}client.release();${enter}${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}${tab}${tab}${tab}${tab}${tab}})${enter}${tab}${tab}${tab}${tab}})`;
}
return query += `${enter}${tab}${tab}${tab}}${enter}${tab}${tab}}`;
function buildMutationArgType(field) {
const query = `{ type: ${checkForRequired(field.required, 'front')}${checkForMultipleValues(field.multipleValues, 'front')}${tableTypeToGraphqlType(field.type)}${checkForMultipleValues(field.multipleValues, 'back')}${checkForRequired(field.required, 'back')} }`;
return query;
}
}
function updateMutation(table, database) {
let query = `${tab}${tab}update${table.type}: {${enter}${tab}${tab}${tab}type: ${table.type}Type,${enter}${tab}${tab}${tab}args: {${enter}`;
let firstLoop = true;
for (const prop in table.fields) {
if (!firstLoop) query += `,${enter}`;
firstLoop = false;
query += `${tab}${tab}${tab}${tab}${table.fields[prop].name}: ${buildMutationArgType(table.fields[prop])}`;
}
query += `${enter}${tab}${tab}${tab}},${enter}${tab}${tab}${tab}resolve(parent, args) {${enter}${tab}${tab}${tab}${tab}`;
if (database === 'MongoDB') query += `return ${table.type}.findByIdAndUpdate(args.id, args);`;
if (database === 'MySQL') {
const idFieldName = table.fields[0].name;
query += `getConnection((err, con) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}let updateValues = '';${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}for (const prop in args) {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}updateValues += \`\${prop} = '\${args[prop]}' \`${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}}${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}const sql = \`UPDATE ${table.type} SET \${updateValues} WHERE ${idFieldName} = \${args.`;
query += `${idFieldName}}\`;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}con.query(sql, args, (err, result) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}con.release();${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}${tab}})`;
}
if (database === 'PostgreSQL') {
const idFieldName = table.fields[0].name;
query += `connect((err, client) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}let updateValues = '';${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}for (const prop in args) {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}updateValues += \`\${prop} = "\${args[prop]}" \`${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}}${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}const pool = \`UPDATE ${table.type} SET \${updateValues} WHERE ${idFieldName} = \${args.`;
query += `${idFieldName}}\`;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}client.query(pool, args, (err, result) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}client.release();${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}${tab}})`;
}
return query += `${enter}${tab}${tab}${tab}}${enter}${tab}${tab}}`;
function buildMutationArgType(field, database) {
const query = `{ type: ${checkForMultipleValues(field.multipleValues, 'front')}${tableTypeToGraphqlType(field.type)}${checkForMultipleValues(field.multipleValues, 'back')} }`;
return query;
}
}
function deleteMutation(table, database) {
const idFieldName = table.fields[0].name;
let query = `${tab}${tab}delete${table.type}: {${enter}`;
query += `${tab}${tab}${tab}type: ${table.type}Type,${enter}`;
query += `${tab}${tab}${tab}args: { ${idFieldName}: { type: ${tableTypeToGraphqlType(table.fields[0].type)} }},${enter}`;
query += `${tab}${tab}${tab}resolve(parent, args) {${enter}`;
query += `${tab}${tab}${tab}${tab}`;
if (database === 'MongoDB') query += `return ${table.type}.findByIdAndRemove(args.id);`;
if (database === 'MySQL') {
query += `getConnection((err, con) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}const sql = \`DELETE FROM ${table.type} WHERE ${idFieldName} = \${args.`;
query += `${idFieldName}}\`;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}con.query(sql, (err, result) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}con.release();${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}${tab}})`;
}
if (database === 'PostgreSQL') {
query += `connect((err, client) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}const pool = \`DELETE FROM ${table.type} WHERE ${idFieldName} = \${args.`;
query += `${idFieldName}}\`;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}client.query(pool, (err, result) => {${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}if (err) throw err;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}client.release();${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}${tab}return result;${enter}`;
query += `${tab}${tab}${tab}${tab}${tab}})${enter}`;
query += `${tab}${tab}${tab}${tab}})`;
}
return query += `${enter}${tab}${tab}${tab}}${enter}${tab}${tab}}`;
}
function checkForRequired(required, position) {
if (required) {
if (position === 'front') {
return 'new GraphQLNonNull(';
}
return ')';
}
return '';
}
function checkForMultipleValues(multipleValues, position) {
if (multipleValues) {
if (position === 'front') {
return 'new GraphQLList(';
}
return ')';
}
return '';
}
// run parseGraphQLServer to generate code to render to the page
const code = parseGraphqlServer(props.tables, props.database);
return (
<div id="code-container-server">
<h4 className='codeHeader'>GraphQl Types, Root Queries, and Mutations</h4>
<hr/>
<pre>
{code}
</pre>
</div>
);
};
export default connect(mapStateToProps, null)(CodeServerContainer);
|
<reponame>Jasig/ssp-data-importer
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.ssp.util.importer.job.staging;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import javax.sql.DataSource;
import org.jarbframework.utils.orm.ColumnReference;
import org.jasig.ssp.util.importer.job.config.MetadataConfigurations;
import org.jasig.ssp.util.importer.job.domain.RawItem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
import org.springframework.batch.core.annotation.BeforeStep;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource;
import org.springframework.dao.DataAccessException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.PreparedStatementCallback;
import org.springframework.jdbc.core.SqlTypeValue;
import org.springframework.jdbc.core.StatementCreatorUtils;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
public class SqlServerStagingTableWriter implements ItemWriter<RawItem>,
StepExecutionListener {
private Resource currentResource;
private String[] orderedHeaders = null;
private MetadataConfigurations metadataRepository;
private StepExecution stepExecution;
private static final Logger logger = LoggerFactory.getLogger(SqlServerStagingTableWriter.class);
private static final Logger queryLogger = LoggerFactory.getLogger("QUERYLOG." + SqlServerStagingTableWriter.class);
@Autowired
private DataSource dataSource;
@Override
public void write(final List<? extends RawItem> items) {
NamedParameterJdbcTemplate jdbcTemplate = new NamedParameterJdbcTemplate(dataSource);
String fileName = items.get(0).getResource().getFilename();
final String[] tableName = fileName.split("\\.");
Integer batchStart = (Integer) (stepExecution.getExecutionContext()
.get("batchStart") == null ? null : stepExecution
.getExecutionContext().get("batchStart"));
Integer batchStop = (Integer) (stepExecution.getExecutionContext().get(
"batchStop") == null ? null : stepExecution
.getExecutionContext().get("batchStop"));
Object currentEntity = stepExecution.getExecutionContext().get(
"currentEntity");
if (currentEntity == null || !currentEntity.equals(tableName[0])) {
batchStart = 0;
batchStop = items.size() - 1;
currentEntity = tableName[0];
stepExecution.getExecutionContext().put("currentEntity",
currentEntity);
stepExecution.getExecutionContext().put("batchStart", batchStart);
stepExecution.getExecutionContext().put("batchStop", batchStop);
} else {
batchStart = batchStop + 1;
batchStop = (Integer) batchStart + items.size() - 1;
stepExecution.getExecutionContext().put("batchStart", batchStart);
stepExecution.getExecutionContext().put("batchStop", batchStop);
}
RawItem firstItem = items.get(0);
Resource firstItemResource = firstItem.getResource();
if (currentResource == null || !(this.currentResource.equals(firstItemResource))) {
this.orderedHeaders = writeHeader(firstItem);
this.currentResource = firstItemResource;
}
StringBuilder insertSql = new StringBuilder();
insertSql.append("INSERT INTO stg_" + tableName[0] + " (batch_id,");
StringBuilder valuesSqlBuilder = new StringBuilder();
valuesSqlBuilder.append(" VALUES (?,");
for (String header : this.orderedHeaders) {
insertSql.append(header).append(",");
valuesSqlBuilder.append("?").append(",");
}
insertSql.setLength(insertSql.length() - 1); // trim comma
valuesSqlBuilder.setLength(valuesSqlBuilder.length() - 1); // trim comma
insertSql.append(")");
valuesSqlBuilder.append(");");
insertSql.append(valuesSqlBuilder);
final AtomicInteger batchStartRef = new AtomicInteger(batchStart);
final String sql = insertSql.toString();
jdbcTemplate.getJdbcOperations().execute(sql, new PreparedStatementCallback() {
@Override
public Object doInPreparedStatement(PreparedStatement ps) throws SQLException, DataAccessException {
for (RawItem item : items) {
final List<Object> paramsForLog = new ArrayList(orderedHeaders.length);
int counter = 1;
paramsForLog.add(batchStartRef.get());
StatementCreatorUtils.setParameterValue(ps, counter, SqlTypeValue.TYPE_UNKNOWN, batchStartRef.getAndIncrement());
counter++;
for ( String header : orderedHeaders ) {
final Map<String, String> record = item.getRecord();
String value = record.get(header);
final Integer sqlType = metadataRepository
.getRepository()
.getColumnMetadataRepository()
.getColumnMetadata(
new ColumnReference(tableName[0], header))
.getJavaSqlType();
paramsForLog.add(value);
StatementCreatorUtils.setParameterValue(ps, counter, sqlType, value);
counter++;
}
sayQuery(sql, paramsForLog);
ps.addBatch();
}
return ps.executeBatch();
}
});
batchStart = batchStartRef.get();
say("******CHUNK SQLSERVER******");
}
private String[] writeHeader(RawItem item) {
Map<String, String> firstRecord = item.getRecord();
StringBuilder sb = new StringBuilder();
List<String> headerColumns = new ArrayList<String>();
for (String key : firstRecord.keySet()) {
sb.append(key).append(",");
headerColumns.add(key);
}
sb.setLength(sb.length() - 1); // trim comma
return headerColumns.toArray(new String[headerColumns.size()]);
}
private void say(Object message) {
logger.info(message.toString());
}
private void sayQuery(String sql, List<Object> bindParams) {
queryLogger.info("Query: [{}] Bind Params: [{}]", sql, bindParams);
}
private void say() {
say("");
}
public DataSource getDataSource() {
return dataSource;
}
public void setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
}
public MetadataConfigurations getMetadataRepository() {
return metadataRepository;
}
public void setMetadataRepository(MetadataConfigurations metadataRepository) {
this.metadataRepository = metadataRepository;
}
@Override
public void beforeStep(StepExecution arg0) {
this.stepExecution = arg0;
}
@Override
public ExitStatus afterStep(StepExecution arg0) {
return ExitStatus.COMPLETED;
}
@BeforeStep
public void saveStepExecution(StepExecution stepExecution) {
this.stepExecution = stepExecution;
}
}
|
#include "Person.h"
#include <iostream>
using namespace std;
Person::Person() {}
Person::Person(ListType n, ListType t) {
name = n;
telNo = t;
}
void Person::setName(ListType n) {
name = n;
}
ListType Person::getName() {
return name;
}
void Person::setTelNo(ListType t) {
telNo = t;
}
ListType Person::getTelNo() {
return telNo;
}
|
<filename>test/test-phantomjs.js
var async = require('async'),
chai = require('chai');
var should = chai.should,
expect = chai.expect;
describe("The main page", function () {
var main = document.getElementById('main');
it("should have more than a dozen fonts", function () {
expect(main.querySelectorAll('div').length).to.be.above(12);
});
}); |
Object.defineProperty(exports, "__esModule", {
value: true
});
var _index = require("../rule/index.js");
var _index2 = _interopRequireDefault(_index);
var _util = require("../util");
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function type(rule, value, callback, source, options) {
var ruleType = rule.type;
var errors = [];
var validate = rule.required || !rule.required && source.hasOwnProperty(rule.field);
if (validate) {
if ((0, _util.isEmptyValue)(value, ruleType) && !rule.required) {
return callback();
}
_index2.default.required(rule, value, source, errors, options, ruleType);
if (!(0, _util.isEmptyValue)(value, ruleType)) {
_index2.default.type(rule, value, source, errors, options);
}
}
callback(errors);
}
exports.default = type; |
import { Injectable } from '@angular/core';
import { Http, RequestOptions, Headers } from '@angular/http';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/operator/map';
@Injectable()
export class HttpService {
baseUrl: string = 'http://ponyracer.ninja-squad.com';
headers: Headers = new Headers();
options: RequestOptions = new RequestOptions({ headers: this.headers });
constructor(private http: Http) {
}
get(path: string): Observable<any> {
this.addJwtTokenIfExists();
return this.http.get(`${this.baseUrl}${path}`, this.options)
.map(res => res.json());
}
post(path: string, body: any): Observable<any> {
this.addJwtTokenIfExists();
return this.http.post(`${this.baseUrl}${path}`, body, this.options)
.map(res => res.json());
}
delete(path: string): Observable<any> {
this.addJwtTokenIfExists();
return this.http.delete(`${this.baseUrl}${path}`, this.options);
}
addJwtTokenIfExists() {
const value = window.localStorage.getItem('rememberMe');
if (value) {
const user = JSON.parse(value);
this.headers.set('Authorization', `Bearer ${user.token}`);
} else {
this.headers.delete('Authorization');
}
}
}
|
#@IgnoreInspection BashAddShebang
exit_if_failed() {
local MESSAGE=$1
local EXIT_STATUS=${2:-1}
if [ $? -ne 0 ]; then
echo "FAILED: $MESSAGE."
exit $EXIT_STATUS
fi
}
warn_if_failed() {
local MESSAGE=$1
if [ $? -ne 0 ]; then
echo "WARNING: $MESSAGE."
fi
}
service_is_systemd_controlled() {
pidof systemd > /dev/null && echo -n 1 || echo -n 0
}
service_is_initd_controlled() {
# According to this post https://unix.stackexchange.com/questions/121654/convenient-way-to-check-if-system-is-using-systemd-or-sysvinit-in-bash
# it should be possible to establish that initd is controlling the system by checking that /sbin/init has pid 1. However it
# seems that when running as root /sbin/init has pid 1 even though systemd is running. Possibly it is just an alias of systemd in that case.
# We therefore use the absence of systemd as the criterion for initd being in charge.
pidof systemd > /dev/null && echo -n 0 || echo -n 1
}
service_save_restart_memo() {
# Parameters
local SERVICE_NAME=$1
# Constants
local RESTART_MEMO="/tmp/$SERVICE_NAME-restart-memo"
if [ -d /tmp ]; then
echo -n "Service is running; /tmp found; creating memo to restart service after upgrade..."
touch $RESTART_MEMO
echo "OK"
fi
}
service_stop() {
# Parameters
local SERVICE_NAME=$1
local NUMBER_OF_INSTALLATIONS=$2
# If the package has not been installed yet, there is no need to attempt stopping the service.
if [ $NUMBER_OF_INSTALLATIONS -gt 1 ]; then
if (( $(service_is_systemd_controlled) )); then
systemctl status $SERVICE_NAME 2> /dev/null 1> /dev/null
local STATE=$?
if [ $STATE -eq 0 ]; then # Service is running
service_save_restart_memo $SERVICE_NAME
echo -n "Attempting to stop service..."
systemctl stop $SERVICE_NAME 2> /dev/null 1> /dev/null
exit_if_failed "Could not stop service $SERVICE_NAME"
echo "OK"
fi
else # we assume it is initd-controlled
# Depends on the output of the status command. This should end in "is running." or "is stopped."
local STATE=$(service $SERVICE_NAME status | sed 's/^.*is \(.*\)\.$/\1/')
if [ "$STATE" == "running" ]; then
service_save_restart_memo $SERVICE_NAME
echo -n "Attempting to stop service..."
service $SERVICE_NAME stop 2> /dev/null 1> /dev/null
exit_if_failed "Could not stop service $SERVICE_NAME"
echo "OK"
fi
fi
fi
}
# Attempts to stop the service under all conditions. Does NOT safe a restart memo
service_stop_unconditional() {
# Parameters
local SERVICE_NAME=$1
if (( $(service_is_systemd_controlled) )); then
systemctl status $SERVICE_NAME 2> /dev/null 1> /dev/null
local STATE=$?
if [ $STATE -eq 0 ]; then # Service is running
echo -n "Attempting to stop service..."
systemctl stop $SERVICE_NAME 2> /dev/null 1> /dev/null
exit_if_failed "Could not stop service $SERVICE_NAME"
echo "OK"
fi
else # we assume it is initd-controlled
# Depends on the output of the status command. This should end in "is running." or "is stopped."
local STATE=$(service $SERVICE_NAME status | sed 's/^.*is \(.*\)\.$/\1/')
if [ "$STATE" == "running" ]; then
echo -n "Attempting to stop service..."
service $SERVICE_NAME stop 2> /dev/null 1> /dev/null
exit_if_failed "Could not stop service $SERVICE_NAME"
echo "OK"
fi
fi
}
service_restart() {
# Parameters
local SERVICE_NAME=$1
# Constants
local RESTART_MEMO="/tmp/$SERVICE_NAME-restart-memo"
if [ -f $RESTART_MEMO ]; then
echo -n "Found restart memo; attempting to start service..."
rm $RESTART_MEMO
if (( $(service_is_systemd_controlled) )); then
systemctl daemon-reload
systemctl start $SERVICE_NAME 2> /dev/null 1> /dev/null
warn_if_failed "Could not restart service $SERVICE_NAME after upgrade"
echo "OK"
else
service $SERVICE_NAME start 2> /dev/null 1> /dev/null
warn_if_failed "Could not restart service $SERVICE_NAME after upgrade"
echo "OK"
fi
fi
}
service_create_module_user() {
# Parameters
local MODULE_OWNER=$1
# Getting the user ID of a non-existent user will result in exit status 1.
# We do not want to see the error messages, so we redirect them to the memory hole.
id -u $1 2> /dev/null 1> /dev/null
if [ "$?" == "1" ]; # User not found
then
echo -n "Creating module user: $MODULE_OWNER..."
useradd --system $MODULE_OWNER 2> /dev/null
exit_if_failed "Unable to create user $MODULE_OWNER."
echo "OK"
else
echo "Module user $MODULE_OWNER already exists. No action taken."
fi
}
service_install_initd_service_script() {
# Parameters
local SCRIPT=$1
local MODULE_NAME=$2
# Constants
local INITD_SCRIPTS_DIR="/etc/init.d"
if (( $(service_is_initd_controlled) )); then
echo -n "Installing initd service script..."
cp $SCRIPT $INITD_SCRIPTS_DIR/$MODULE_NAME
exit_if_failed "Unable to copy initd service script."
chmod o+x $INITD_SCRIPTS_DIR/$MODULE_NAME
exit_if_failed "Unable to make service script executable for owner"
echo "OK"
fi
}
service_remove_initd_service_script() {
# Parameters
local MODULE_NAME=$1
local NUMBER_OF_INSTALLATIONS=$2
# Constants
local INITD_SCRIPTS_DIR="/etc/init.d"
if ([ $NUMBER_OF_INSTALLATIONS -eq 0 ] && [ -f $INITD_SCRIPTS_DIR/$MODULE_NAME ]); then
service_stop_unconditional $MODULE_NAME
echo -n "Removing initd service script..."
rm $INITD_SCRIPTS_DIR/$MODULE_NAME
warn_if_failed "initd service script could not be removed: $INITD_SCRIPTS_DIR/$MODULE_NAME."
echo "OK"
fi
}
service_install_systemd_unit() {
# Parameters
local UNIT_FILE=$1
local MODULE_NAME=$2
local DROP_IN_FILE=$3
# Constants
local SYSTEMD_SCRIPTS_DIR="/usr/lib/systemd/system"
local SYSTEMD_DROP_INS_PARENT_DIR="/etc/systemd/system"
if (( $(service_is_systemd_controlled) )); then
echo -n "Installing systemd unit file..."
cp $UNIT_FILE $SYSTEMD_SCRIPTS_DIR/
exit_if_failed "Could not copy systemd unit file."
echo "OK"
if [ $# -gt 2 ]; then
echo -n "Installing drop-ins..."
local DROP_IN_DIR="$SYSTEMD_DROP_INS_PARENT_DIR/$MODULE_NAME.service.d/"
if [ ! -d $DROP_IN_DIR ]; then
mkdir $DROP_IN_DIR
fi
cp $DROP_IN_FILE $DROP_IN_DIR
exit_if_failed "Could not install drop-ins."
echo "OK"
fi
fi
}
service_remove_systemd_unit() {
# Parameters
local MODULE_NAME=$1
local NUMBER_OF_INSTALLATIONS=$2
# Constants
local SYSTEMD_SCRIPTS_DIR="/usr/lib/systemd/system"
local SYSTEMD_DROP_INS_PARENT_DIR="/etc/systemd/system"
if ([ $NUMBER_OF_INSTALLATIONS -eq 0 ] && [ -f $SYSTEMD_SCRIPTS_DIR/${MODULE_NAME}.service ]); then
service_stop_unconditional $MODULE_NAME
echo -n "Removing systemd unit file..."
rm $SYSTEMD_SCRIPTS_DIR/${MODULE_NAME}.service
warn_if_failed "systemd unit file could not be removed: $SYSTEMD_SCRIPTS_DIR/${MODULE_NAME}.service"
local DROP_IN_DIR="$SYSTEMD_DROP_INS_PARENT_DIR/$MODULE_NAME.service.d/"
if [ -d $DROP_IN_DIR ]; then
rm -fr $DROP_IN_DIR
warn_if_failed "systemd drop-in directory at $DROP_IN_DIR could not be removed."
fi
echo "OK"
fi
}
service_create_log_directory() {
# Parameters
local MODULE_NAME=$1
# Constants
local LOG_BASE="/var/opt/dans.knaw.nl/log"
local LOG_DIR="$LOG_BASE/$MODULE_NAME"
if [ ! -d $LOG_DIR ]; then
echo -n "Creating directory for logging..."
mkdir -p $LOG_DIR
exit_if_failed "Could not create directory for logging at $LOG_DIR"
echo "OK"
fi
echo -n "Making sure logging directory is owned by service user..."
chown $MODULE_NAME $LOG_DIR
exit_if_failed "Could not change ownership of $LOG_DIR to $MODULE_NAME."
echo "OK"
}
|
#!/bin/bash
#SBATCH --time=72:00:00
#SBATCH --mem=256000M
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=32
#SBATCH --job-name=M33_17B_OH-%A-%a
#SBATCH --output=casa-m33_17B_OH-%A-%a.out
#SBATCH --array=0-7
# Use array to set which channels will get imaged.
# Run from a separate folder so the log files are in one place.
module restore my_default
source /home/ekoch/.bashrc
source /home/ekoch/preload.bash
job_num=$SLURM_ARRAY_TASK_ID
# Build in a slight offset for each job to avoid starting a bunch of CASA
# sessions at once
# sleep $(($job_num + 30))
# Move to scratch space b/c casa write out the temporary files into the same folder
export scratch_path=/home/ekoch/scratch/17B-162_imaging/
cd $scratch_path
Xvfb :1 &
export DISPLAY=:1
# Path to the casa files
export casa_scratch_path="$HOME/casa-release-5.5.0-149.el7"
# Make a new directory on the node storage
tmp_dir=$SLURM_TMPDIR/OH_imaging_${chan_num}
mkdir $tmp_dir
cd $tmp_dir
# Copy a new casa instance to avoid slower i/o on scratch or in home
cp -r $casa_scratch_path .
# Copy the init file
mkdir .casa
cp $HOME/.casa/init.py .casa/
rc_path="${tmp_dir}/.casa"
weighting='natural'
if (( $job_num==0 )); then
# OH1612 contsub
mkdir $scratch_path/OH1612_imaging
# Copy data
cp -r $scratch_path/17B-162_OH1612_spw_3_LSRK.ms.contsub .
casa-release-5.5.0-149.el7/bin/casa --rcdir ${rc_path} --nologger --nogui --log2term --logfile $scratch_path/OH_17B_imaging_$(date "+%Y%m%d-%H%M%S")_1612.log --nocrashreport -c ~/code/VLA_Lband/17B-162/OH/OH_imaging.py 3 True $weighting False
cp -r OH1612_imaging/* $scratch_path/OH1612_imaging/
elif (( $job_num==1 )); then
# OH1612 no contsub
mkdir $scratch_path/OH1612_imaging
# Copy data
cp -r $scratch_path/17B-162_OH1612_spw_3_LSRK.ms .
casa-release-5.5.0-149.el7/bin/casa --rcdir ${rc_path} --nologger --nogui --log2term --logfile $scratch_path/OH_17B_imaging_$(date "+%Y%m%d-%H%M%S")_1612_nocontsub.log --nocrashreport -c ~/code/VLA_Lband/17B-162/OH/OH_imaging.py 3 False $weighting False
cp -r OH1612_imaging/* $scratch_path/OH1612_imaging/
elif (( $job_num==2 )); then
# OH1665 contsub
mkdir $scratch_path/OH1665_imaging
# Copy data
cp -r $scratch_path/17B-162_OH1665_spw_5_LSRK.ms.contsub .
casa-release-5.5.0-149.el7/bin/casa --rcdir ${rc_path} --nologger --nogui --log2term --logfile $scratch_path/OH_17B_imaging_$(date "+%Y%m%d-%H%M%S")_1665.log --nocrashreport -c ~/code/VLA_Lband/17B-162/OH/OH_imaging.py 5 True $weighting False
cp -r OH1665_imaging/* $scratch_path/OH1665_imaging/
elif (( $job_num==3 )); then
# OH1665 no contsub
mkdir $scratch_path/OH1665_imaging
# Copy data
cp -r $scratch_path/17B-162_OH1665_spw_5_LSRK.ms .
casa-release-5.5.0-149.el7/bin/casa --rcdir ${rc_path} --nologger --nogui --log2term --logfile $scratch_path/OH_17B_imaging_$(date "+%Y%m%d-%H%M%S")_1665_nocontsub.log --nocrashreport -c ~/code/VLA_Lband/17B-162/OH/OH_imaging.py 5 False $weighting False
cp -r OH1665_imaging/* $scratch_path/OH1665_imaging/
elif (( $job_num==4 )); then
# OH1667 contsub
mkdir $scratch_path/OH1667_imaging
# Copy data
cp -r $scratch_path/17B-162_OH1667_spw_6_LSRK.ms.contsub .
casa-release-5.5.0-149.el7/bin/casa --rcdir ${rc_path} --nologger --nogui --log2term --logfile $scratch_path/OH_17B_imaging_$(date "+%Y%m%d-%H%M%S")_1667.log --nocrashreport -c ~/code/VLA_Lband/17B-162/OH/OH_imaging.py 6 True $weighting False
cp -r OH1667_imaging/* $scratch_path/OH1667_imaging/
elif (( $job_num==5 )); then
# OH1667 no contsub
mkdir $scratch_path/OH1667_imaging
# Copy data
cp -r $scratch_path/17B-162_OH1667_spw_6_LSRK.ms .
casa-release-5.5.0-149.el7/bin/casa --rcdir ${rc_path} --nologger --nogui --log2term --logfile $scratch_path/OH_17B_imaging_$(date "+%Y%m%d-%H%M%S")_1667_nocontsub.log --nocrashreport -c ~/code/VLA_Lband/17B-162/OH/OH_imaging.py 6 False $weighting False
cp -r OH1667_imaging/* $scratch_path/OH1667_imaging/
elif (( $job_num==6 )); then
# OH1712 contsub
mkdir $scratch_path/OH1720_imaging
# Copy data
cp -r $scratch_path/17B-162_OH1720_spw_7_LSRK.ms.contsub .
casa-release-5.5.0-149.el7/bin/casa --rcdir ${rc_path} --nologger --nogui --log2term --logfile $scratch_path/OH_17B_imaging_$(date "+%Y%m%d-%H%M%S")_1720.log --nocrashreport -c ~/code/VLA_Lband/17B-162/OH/OH_imaging.py 7 True $weighting False
cp -r OH1720_imaging/* $scratch_path/OH1720_imaging/
elif (( $job_num==7 )); then
# OH1720 no contsub
mkdir $scratch_path/OH1720_imaging
# Copy data
cp -r $scratch_path/17B-162_OH1720_spw_7_LSRK.ms .
casa-release-5.5.0-149.el7/bin/casa --rcdir ${rc_path} --nologger --nogui --log2term --logfile $scratch_path/OH_17B_imaging_$(date "+%Y%m%d-%H%M%S")_1720_nocontsub.log --nocrashreport -c ~/code/VLA_Lband/17B-162/OH/OH_imaging.py 7 False $weighting False
cp -r OH1720_imaging/* $scratch_path/OH1720_imaging/
else
echo "Stage must be 1 or 2, not ${stage}".
exit 1
fi
echo "All CASA jobs exited."
|
# import numpy
import numpy as np
# Declare matrix A and B
A = np. array([[2, 3, 5], [5, 7, 2], [4, 2, 6]])
B = np.array([[6, 2, 1], [3, 4, 5], [5, 3, 7]])
# use numpy for matrix multiplication
C = np.dot(A, B)
# Vectorization
D = np.zeros((3,3))
for i in range(3):
for j in range(3):
for k in range(3):
D[i,j] += A[i,k] * B[k,j]
# Compare matrix multiplication results
if np.array_equal(C, D):
print("Matrix optimzation is successful")
else:
print("Matrix optimization failed") |
# File: D (Python 2.4)
from direct.gui.DirectGui import *
from direct.interval.IntervalGlobal import *
from direct.directnotify import DirectNotifyGlobal
from pandac.PandaModules import *
from otp.otpgui import OTPDialog
from pirates.battle.CannonGUI import CannonGUI
from pirates.battle import CannonGlobals
from pirates.piratesbase import PLocalizer
from pirates.piratesbase import PiratesGlobals
from pirates.piratesgui import PiratesGuiGlobals
from pirates.piratesgui.PDialog import PDialog
from pirates.piratesgui.ReputationMeter import ReputationMeter
from pirates.battle import CannonGlobals
from pirates.battle import WeaponGlobals
from pirates.reputation import ReputationGlobals
from pirates.uberdog.UberDogGlobals import InventoryType
from pirates.minigame.CannonDefenseHUD import CannonDefenseHUD
from pirates.piratesgui.CannonDefenseHelpManager import CannonDefenseHelpManager
from pirates.audio import SoundGlobals
from pirates.audio.SoundGlobals import loadSfx
import pirates.minigame.AmmoPanel as pirates
import random
import math
class DefenseCannonGUI(CannonGUI):
notify = DirectNotifyGlobal.directNotify.newCategory('DefenseCannonGUI')
def __init__(self, cannon):
CannonGUI.__init__(self, cannon)
self.exitEvent = None
self._DefenseCannonGUI__dialog = None
self.helpButton = None
self.helpUI = None
self.flashHelp = None
self.ammoFade = None
self._DefenseCannonGUI__ammoCountersHidden = False
self.setupExtraButtons()
self.exitCannon['command'] = self.showExitDialog
self.volleyLabel.setPos(-0.28000000000000003, 0, 0.089999999999999997)
self.reloadBar.setPos(-0.13, 0, 0.080000000000000002)
self.ammoImage.setPos(-0.38, 0, 0.059999999999999998)
self.repMeter = ReputationMeter(InventoryType.DefenseCannonRep, width = 0.69999999999999996)
self.repMeter.reparentTo(base.a2dBottomCenter)
self.repMeter.setPos(0.0, 0.0, 0.025000000000000001)
self.hud = CannonDefenseHUD()
self.hud.create()
self._exp = 0
self.lastLevel = 1
self.accept('incDefenseCannonExp', self.increaseExp)
if __dev__:
base.dcg = self
def destroy(self):
if self.ammoFade:
self.ammoFade.finish()
self.ammoFade = None
if self.flashHelp:
self.flashHelp.finish()
self.flashHelp = None
if self.helpButton:
self.helpButton.destroy()
self.helpButton = None
if self.helpUI:
self.helpUI.destroy()
self.helpUI = None
if self.hud:
self.hud.destroy()
self.hud = None
base.musicMgr.requestFadeOut(SoundGlobals.MUSIC_MINIGAME_CANNON)
self.repMeter.destroy()
self.ignore('incDefenseCannonExp')
CannonGUI.destroy(self)
def setupExtraButtons(self):
weaponIcons = loader.loadModel('models/gui/gui_icons_weapon')
self.helpButton = DirectButton(parent = base.a2dBottomRight, relief = None, pos = (-0.59999999999999998, 0, 0.089999999999999997), scale = 0.5, text = '?', text_pos = (0, -0.055), text_scale = 0.20999999999999999, text_fg = PiratesGuiGlobals.TextFG2, text_shadow = PiratesGuiGlobals.TextShadow, text_font = PiratesGlobals.getPirateBoldOutlineFont(), sortOrder = 2, command = self.toggleHelpUI)
DirectLabel(parent = self.helpButton, text = PLocalizer.CannonDefense['Help'], text_pos = (0, -0.14999999999999999), text_scale = 0.080000000000000002, text_fg = PiratesGuiGlobals.TextFG2, text_shadow = PiratesGuiGlobals.TextShadow, text_font = PiratesGlobals.getPirateBoldOutlineFont(), frameColor = (1, 1, 1, 0))
def increaseExp(self, amt, total):
self._exp += amt
if self._exp > total:
return None
(level, leftoverValue) = ReputationGlobals.getLevelFromTotalReputation(InventoryType.DefenseCannonRep, self._exp)
self.repMeter.update(self._exp)
if level > self.lastLevel:
base.localAvatar.levelUpMsg(InventoryType.DefenseCannonRep, level, 0)
self.lastLevel = level
def toggleHelpUI(self):
if self.helpUI == None:
self._DefenseCannonGUI__createHelpUI()
self.fadeOutAmmoCounters()
if self.cannon.ammoPanel.state == pirates.minigame.AmmoPanel.CLOSED:
self.cannon.ammoPanel.onTabClick()
else:
self._DefenseCannonGUI__destroyHelpUI()
self.fadeInAmmoCounters()
if self.cannon.ammoPanel.state == pirates.minigame.AmmoPanel.OPENED:
self.cannon.ammoPanel.onTabClick()
def _DefenseCannonGUI__createHelpUI(self):
self.helpUI = CannonDefenseHelpManager(0.5)
self.helpUI.exit.reparentTo(self.exitCannon)
self.helpUI.exit.setScale(2.0)
self.helpUI.help.reparentTo(self.helpButton)
self.helpUI.help.setScale(2.0)
self.helpUI.ammoPanel.reparentTo(self.cannon.ammoPanel.panel)
self.helpUI.ammoPanel.setScale(1.0 / 3.0)
self.helpUI.ammo.reparentTo(base.a2dBottomCenter)
self.helpUI.mine.reparentTo(self.hud.goldRemainingUI.mineCounter)
self.helpUI.mine.setScale(2.0 / 3.0)
self.helpUI.wave.reparentTo(self.hud.timeRemainingUI.timeRemaining)
self.helpUI.wave.setScale(1.0 / 0.75)
self.helpUI.fadeIn.start()
def _DefenseCannonGUI__destroyHelpUI(self):
cleanup = Sequence(Func(self.helpUI.fadeIn.pause), self.helpUI.fadeOut, Func(self.helpUI.destroy), name = self.cannon.uniqueName('HelpUI_FadeIn'))
cleanup.start()
self.helpUI = None
def isHelpUIVisible(self):
return self.helpUI != None
def flashHelpButton(self, delay = 0.20000000000000001, length = 5):
if self.flashHelp:
self.flashHelp.finish()
self.flashHelp = None
self.flashHelp = Sequence(name = self.cannon.uniqueName('HelpButton_Flash'))
def setColor(key, value):
self.helpButton[key] = value
for i in range(0, length):
self.flashHelp.append(Wait(delay))
self.flashHelp.append(Func(setColor, 'text_fg', PiratesGuiGlobals.TextFG19))
self.flashHelp.append(Wait(delay))
self.flashHelp.append(Func(setColor, 'text_fg', PiratesGuiGlobals.TextFG2))
self.flashHelp.start()
def fadeOutAmmoCounters(self, length = 0.5):
if self._DefenseCannonGUI__ammoCountersHidden:
return None
transparent = Vec4(1, 1, 1, 0)
if self.ammoFade:
self.ammoFade.finish()
self.ammoFade = Parallel(self.volleyLabel.colorScaleInterval(length, transparent), self.reloadBar.colorScaleInterval(length, transparent), self.ammoImage.colorScaleInterval(length, transparent))
self.ammoFade.start()
self._DefenseCannonGUI__ammoCountersHidden = True
def fadeInAmmoCounters(self, length = 0.5):
if self._DefenseCannonGUI__ammoCountersHidden == False:
return None
opaque = Vec4(1, 1, 1, 1)
transparent = Vec4(1, 1, 1, 0)
if self.ammoFade:
self.ammoFade.finish()
self.ammoFade = Parallel(self.volleyLabel.colorScaleInterval(length, opaque, transparent), self.reloadBar.colorScaleInterval(length, opaque, transparent), self.ammoImage.colorScaleInterval(length, opaque, transparent))
self.ammoFade.start()
self._DefenseCannonGUI__ammoCountersHidden = False
def showExitDialog(self):
if self._DefenseCannonGUI__dialog == None:
self._DefenseCannonGUI__dialog = PDialog(text = PLocalizer.CannonDefense['ExitCannon'], style = OTPDialog.YesNo, giveMouse = False, command = self._DefenseCannonGUI__onDialogItemSelected)
else:
self._DefenseCannonGUI__dialog.cleanup()
self._DefenseCannonGUI__dialog = None
def _DefenseCannonGUI__onDialogItemSelected(self, value):
if value == 1:
if self.exitEvent:
self.exitEvent()
self._DefenseCannonGUI__dialog.cleanup()
self._DefenseCannonGUI__dialog = None
|
<reponame>wcm-io/io.wcm.handler.link
/*
* #%L
* wcm.io
* %%
* Copyright (C) 2014 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.handler.link.type;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ValueMap;
import org.apache.sling.models.annotations.Model;
import org.apache.sling.models.annotations.injectorspecific.Self;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.osgi.annotation.versioning.ProviderType;
import io.wcm.handler.link.Link;
import io.wcm.handler.link.LinkNameConstants;
import io.wcm.handler.link.LinkRequest;
import io.wcm.handler.link.SyntheticLinkResource;
import io.wcm.handler.link.spi.LinkType;
import io.wcm.handler.media.Media;
import io.wcm.handler.media.MediaArgs;
import io.wcm.handler.media.MediaHandler;
/**
* Default implementation of {@link io.wcm.handler.link.spi.LinkType} for media links.
* Media links are links to media items from media sources
* that implement the {@link io.wcm.handler.media.spi.MediaSource} interface.
*/
@Model(adaptables = {
SlingHttpServletRequest.class, Resource.class
})
@ProviderType
public final class MediaLinkType extends LinkType {
/**
* Default root folder für DAM
*/
private static final String DEFAULT_DAM_ROOT = "/content/dam/";
/**
* Link type ID
*/
public static final @NotNull String ID = "media";
@Self
private MediaHandler mediaHandler;
/**
* @return Link type ID (is stored as identifier in repository)
*/
@Override
public @NotNull String getId() {
return ID;
}
@Override
public @NotNull String getLabel() {
return "Asset";
}
@Override
public String getPrimaryLinkRefProperty() {
return LinkNameConstants.PN_LINK_MEDIA_REF;
}
@Override
public @Nullable String getEditComponentResourceType() {
return "wcm-io/handler/link/components/granite/form/linktype/media";
}
@Override
public boolean hasRichTextPlugin() {
return true;
}
@Override
public boolean accepts(@NotNull String linkRef) {
// accept as media link if the ref is inside default media subtrees
return MediaLinkType.isDefaultMediaContentPath(linkRef);
}
@SuppressWarnings("null")
@Override
public @NotNull Link resolveLink(@NotNull Link link) {
LinkRequest linkRequest = link.getLinkRequest();
ValueMap props = linkRequest.getResourceProperties();
// get properties
String mediaRef = props.get(LinkNameConstants.PN_LINK_MEDIA_REF, link.getLinkRequest().getReference());
boolean isDownload = props.get(LinkNameConstants.PN_LINK_MEDIA_DOWNLOAD, false);
MediaArgs mediaArgs = new MediaArgs()
// only allow linking to "download" media formats
.download(true)
.contentDispositionAttachment(isDownload)
.urlMode(linkRequest.getLinkArgs().getUrlMode());
// resolve media library reference
Media media = mediaHandler.get(mediaRef, mediaArgs).build();
// set resovled media references information in link metadata
link.setUrl(media.getUrl());
link.setTargetAsset(media.getAsset());
link.setTargetRendition(media.getRendition());
// mark link as invalid if a reference was set that could not be resolved
if (link.getUrl() == null && StringUtils.isNotEmpty(mediaRef)) {
link.setLinkReferenceInvalid(true);
}
return link;
}
/**
* @param path Content path
* @return true if Path is located below DAM default root folders.
*/
public static boolean isDefaultMediaContentPath(String path) {
return StringUtils.startsWith(path, DEFAULT_DAM_ROOT);
}
/**
* Get synthetic link resource for this link type.
* @param resourceResolver Resource resolver
* @param path Resource path. Can be a non-existing path, but the path should be located somewhere within the
* applications content paths to make sure the handler configuration looked up via context-aware services
* is the expected one.
* @param mediaRef Media asset reference
* @return Synthetic link resource
*/
public static @NotNull Resource getSyntheticLinkResource(@NotNull ResourceResolver resourceResolver,
@NotNull String path, @NotNull String mediaRef) {
Map<String, Object> map = new HashMap<>();
map.put(LinkNameConstants.PN_LINK_TYPE, ID);
map.put(LinkNameConstants.PN_LINK_MEDIA_REF, mediaRef);
return new SyntheticLinkResource(resourceResolver, path, map);
}
/**
* Get synthetic link resource for this link type.
* @param resourceResolver Resource resolver
* @param mediaRef Media asset reference
* @return Synthetic link resource
* @deprecated Please use {@link #getSyntheticLinkResource(ResourceResolver, String, String)}
*/
@Deprecated
public static @NotNull Resource getSyntheticLinkResource(@NotNull ResourceResolver resourceResolver, @NotNull String mediaRef) {
Map<String, Object> map = new HashMap<>();
map.put(LinkNameConstants.PN_LINK_TYPE, ID);
map.put(LinkNameConstants.PN_LINK_MEDIA_REF, mediaRef);
return new SyntheticLinkResource(resourceResolver, map);
}
@Override
public String toString() {
return ID;
}
}
|
package de.siphalor.tweed.config.constraints;
import de.siphalor.tweed.config.entry.ValueConfigEntry;
public interface Constraint<T> {
/**
* A constraint which may modify the {@link ValueConfigEntry} or if an error occurred should throw a {@link ConstraintException}.
*
* @param value
* @param configEntry the entry to check and/or modify
* @throws ConstraintException a possible exception in case of problems
*/
void apply(T value, ValueConfigEntry<T> configEntry) throws ConstraintException;
String getDescription();
}
|
#!/bin/sh
make -C /Users/chushoutv/Desktop/opencv-3.3.1/bulid/modules/python2 -f /Users/chushoutv/Desktop/opencv-3.3.1/bulid/modules/python2/CMakeScripts/opencv_python2_cmakeRulesBuildPhase.make$CONFIGURATION all
|
#!/usr/bin/env bash
# Copyright 2021 Contributors to the Parsec project.
# SPDX-License-Identifier: Apache-2.0
# Continuous Integration test script
set -euf -o pipefail
RUST_BACKTRACE=1 cargo build
RUST_BACKTRACE=1 cargo build --target arm-unknown-linux-gnueabi
RUST_BACKTRACE=1 cargo build --target armv7-unknown-linux-gnueabi
RUST_BACKTRACE=1 cargo build --target armv7-unknown-linux-gnueabihf
RUST_BACKTRACE=1 cargo build --target aarch64-unknown-linux-gnu
RUST_BACKTRACE=1 cargo build --target i686-unknown-linux-gnu
RUST_BACKTRACE=1 cargo build --target powerpc64-unknown-linux-gnu
RUST_BACKTRACE=1 cargo build --target powerpc64le-unknown-linux-gnu
RUST_BACKTRACE=1 cargo build --target x86_64-pc-windows-msvc
RUST_BACKTRACE=1 cargo build --target x86_64-apple-darwin
pushd cryptoki-sys
RUST_BACKTRACE=1 cargo build --features generate-bindings
popd
if cargo fmt -h; then
cargo fmt --all -- --check
fi
if cargo clippy -h; then
cargo clippy --all-targets -- -D clippy::all -D clippy::cargo
fi
RUST_BACKTRACE=1 cargo test
|
#!/bin/bash
yum install qemu-img caja xorg-* mesa-* -y
sudo dnf -y install dnf-plugins-core
sudo dnf config-manager \
--add-repo \
https://download.docker.com/linux/fedora/docker-ce.repo
sudo dnf install docker-ce docker-ce-cli containerd.io
systemctl start docker
systemctl enable docker (for starting docker at system startup)
|
#!/bin/bash
# DRY=1 (only display what would be done)
if [ -z "$1" ]
then
echo "$0: you need to specify env: test, dev, stg, prod"
exit 1
fi
if [ -z "$2" ]
then
echo "$0: please provide namespace name as a 2nd argument"
exit 2
fi
if [ -z "$3" ]
then
echo "$0: please provide pod status, for exampel 'Failed'"
exit 3
fi
list=`"${1}k.sh" get po -n "$2" -o=jsonpath='{range .items[*]}{.metadata.name}{";"}{.status.phase}{"\n"}{end}'`
pods=""
for data in $list
do
IFS=';'
arr=($data)
unset IFS
pod=${arr[0]}
sts=${arr[1]}
if [ "$sts" = "$3" ]
then
pods="${pods} ${pod}"
fi
done
if [ ! -z "$pods" ]
then
if [ -z "$DRY" ]
then
echo "Deleting pods: ${pods}"
"${1}k.sh" -n "$2" delete pod ${pods}
else
echo "Would delete pods: ${pods}"
fi
fi
|
export { scriptMain as renderMarkdown, MarkdownOptions } from './renderMarkdown';
export { MARKDOWN_VALIDATE } from './markdownConfigValidate';
export { scriptMain as renderMarkdownConfig } from './renderMarkdownConfig';
|
# cd ../Downloads/
cdir=`cd $(dirname $0); pwd`
cd $cdir
cd test
pwd
./test_all.sh |
<filename>lib/rahel/glass.rb
# 2014-06-17 <EMAIL>
module Rahel
class Glass
def initialize controller
@controller = controller
end
def humanize individual, predicate
I18n.translate "#{individual.class.name}.#{predicate}"
end
#
# INLINE
#
def inline individual, predicate, locals: {}
@controller.render_to_string(
partial: "glass/inline/property_group",
locals: {
individual: individual,
predicate: predicate,
}.merge(locals)
).html_safe
end
def inline_individual individual, locals: {}
if individual.respond_to? "inline_template"
@controller.render_to_string(
partial: individual.inline_template,
locals: { individual: individual }.merge(locals)
).html_safe
else
individual.to_s
end
end
def inline_property property, locals: {}
@controller.render_to_string(
partial: "glass/inline/property",
locals: { property: property }.merge(locals)
).html_safe
end
#
# EDIT
#
def edit individual, predicate, locals: {}
props = individual.get_sorted_properties_array(predicate)
prop_class = individual.class_of(predicate)
if props.any?
props.map { |prop| edit_property prop }.join.html_safe
elsif individual.cardinality_of(predicate) == 1
# Wir müssen vielleicht etwas anzeigen, obwohl kein Property in der DB ist.
if prop_class == PropertyObjekt && individual.editable?(predicate)
# Es handelt sich um ein "weak"-Individual-Fall.
# Zuerst das Individual erstellen (mit Revision)
objekt = Individual.create(type: individual.singular_range_of(predicate))
Revision.create_from_new_individual(objekt, @controller.current_user)
# Nun die Property dazwischen (wieder mit Revision). (Inverse werden ggf. erstellt.)
prop = individual.send("#{predicate}=", objekt)
Revision.create_from_new_property(prop, @controller.current_user)
Revision.create_from_new_property(prop.inverse, @controller.current_user) if prop.inverse
edit_property prop
elsif prop_class != PropertyObjekt
# Es ist nicht Objekt, es reicht daher ein fake Property.
prop = prop_class.new(subject: individual,
predicate: predicate)
edit_property prop
end
end
end
def edit_property property, locals: {}
@controller.render_to_string(
partial: "glass/edit/property",
locals: { property: property }.merge(locals)
).html_safe
end
def edit_individual individual, locals: {}
if individual.respond_to? "edit_template"
@controller.render_to_string(
partial: individual.edit_template,
locals: { individual: individual }.merge(locals)
).html_safe
else
individual.to_s
end
end
#
# NEW
#
def new individual, predicate
@controller.render_to_string(
partial: "glass/new/property",
locals: { predicate: predicate, individual: individual }
).html_safe
end
end
end
|
import * as myExtension from '../src/extension';
import * as vscodeTestContent from 'vscode-test-content';
import * as sinon from 'sinon';
import * as chai from 'chai';
import * as sinonChai from 'sinon-chai';
import * as mocha from 'mocha';
import { FileContents } from './../src/file-contents';
import { TemplateType } from './../src/enums/template-type';
import { config as defaultConfig } from './../src/config/cli-config';
import { IConfig } from '../src/models/config';
import * as dJSON from 'dirty-json';
chai.use(sinonChai);
const expect = chai.expect;
let config: IConfig = dJSON.parse(JSON.stringify(defaultConfig));
describe('File content tests', () => {
const fc = new FileContents();
fc.loadTemplates();
beforeEach(() => {
config = dJSON.parse(JSON.stringify(defaultConfig));
});
describe('Class tests', () => {
it('Should create a valid class', () => {
const content = fc.getTemplateContent(TemplateType.Class, config, 'angular-files');
expect(content).to.contain('export class', 'Should export class').throw;
expect(content).to.contain('AngularFiles', 'Should have a valid class name').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
it('Should create a valid class spec', () => {
const content = fc.getTemplateContent(TemplateType.ClassSpec, config, 'angular-files');
expect(content).to.contain(`import {AngularFiles} from './angular-files'`, 'Should have a valid import in spec').throw;
expect(content).to.contain(`describe('AngularFiles'`, 'Should have a valid describe in spec').throw;
expect(content).to.contain(`expect(new AngularFiles())`, 'Should have a valid expect in spec').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
});
describe('Enum tests', () => {
it('Should create a valid enum', () => {
const content = fc.getTemplateContent(TemplateType.Enum, config, 'angular-files');
expect(content).to.contain('export enum', 'Should export enum').throw;
expect(content).to.contain('AngularFiles', 'Should have a valid enum name').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
});
describe('Interface tests', () => {
it('Should create a valid interface', () => {
const content = fc.getTemplateContent(TemplateType.Inteface, config, 'angular-files');
expect(content).to.contain('export interface', 'Should export enum').throw;
expect(content).to.contain('AngularFiles', 'Should have a valid interface name').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
it('Should create a valid interface with prefix', () => {
config.defaults.interface.prefix = 'I';
const content = fc.getTemplateContent(TemplateType.Inteface, config, 'angular-files');
expect(content).to.contain(`${config.defaults.interface.prefix}AngularFiles`, 'Should have a valid interface name').throw;
});
});
describe('Route tests', () => {
it('Should create a valid route', () => {
const content = fc.getTemplateContent(TemplateType.Route, config, 'angular-files');
expect(content).to.contain('export const AngularFilesRoutes', 'Should export route').throw;
expect(content).to.contain(`import { Routes, RouterModule } from '@angular/router'`, 'Should have valid routes imports').throw;
expect(content).to.contain('const routes: Routes', 'Should declare routes').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
});
describe('Service tests', () => {
it('Should create a valid service', () => {
const content = fc.getTemplateContent(TemplateType.Service, config, 'angular-files');
expect(content).to.contain('@Injectable()', 'Should be injectable service').throw;
expect(content).to.contain('export class', 'Should export service').throw;
expect(content).to.contain('AngularFilesService', 'Should have a valid service name').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
it('Should create a valid service spec', () => {
const content = fc.getTemplateContent(TemplateType.ServiceSpec, config, 'angular-files');
expect(content).to.contain(`import { AngularFilesService } from './angular-files.service'`, 'Should have a valid import in spec').throw;
expect(content).to.contain(`describe('Service: AngularFiles'`, 'Should have a valid describe in spec').throw;
expect(content).to.contain(`expect(service)`, 'Should have a valid expect in spec').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
});
describe('Pipe tests', () => {
it('Should create a valid pipe', () => {
const content = fc.getTemplateContent(TemplateType.Pipe, config, 'angular-files');
expect(content).to.contain('export class', 'Should export service').throw;
expect(content).to.contain('AngularFilesPipe', 'Should have a valid service name').throw;
expect(content).to.contain('transform(', 'Should implement transform method').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
it('Should create a valid pipe spec', () => {
const content = fc.getTemplateContent(TemplateType.PipeSpec, config, 'angular-files');
expect(content).to.contain(`import { AngularFilesPipe } from './angular-files.pipe'`, 'Should have a valid import in spec').throw;
expect(content).to.contain(`describe('Pipe: AngularFilese'`, 'Should have a valid describe in spec').throw;
expect(content).to.contain(`expect(pipe)`, 'Should have a valid expect in spec').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
});
describe('Module tests', () => {
it('Should create a valid module', () => {
const content = fc.getTemplateContent(TemplateType.Module, config, 'angular-files');
expect(content).to.contain('export class AngularFilesModule', 'Should export module').throw;
expect(content).to.contain('declarations: [AngularFilesComponent]', 'Should declare component').throw;
expect(content).to.contain('@NgModule({', 'Should define new module').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
});
describe('Directive tests', () => {
it('Should create a valid directive', () => {
const content = fc.getTemplateContent(TemplateType.Directive, config, 'angular-files');
expect(content).to.contain('export class AngularFilesDirective', 'Should export directive').throw;
expect(content).to.contain(`selector: '[appAngularFiles]'`, 'Should have a valid selector').throw;
expect(content).to.contain('@Directive({', 'Should define new directive').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
it('Should create a valid directive with app prefix', () => {
config.apps[0].prefix = 'ng';
const content = fc.getTemplateContent(TemplateType.Directive, config, 'angular-files');
expect(content).to.contain(`selector: '[ngAngularFiles]'`, 'Should have a valid selector').throw;
});
it('Should create a valid directive spec', () => {
const content = fc.getTemplateContent(TemplateType.DirectiveSpec, config, 'angular-files');
expect(content).to.contain(`import { AngularFilesDirective } from './angular-files.directive'`, 'Should have a valid import in spec').throw;
expect(content).to.contain(`describe('Directive: AngularFiles'`, 'Should have a valid describe in spec').throw;
expect(content).to.contain(`expect(directive)`, 'Should have a valid expect in spec').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
});
describe('Component tests', () => {
it('Should create a valid component', () => {
const content = fc.getTemplateContent(TemplateType.Component, config, 'angular-files');
expect(content).to.contain('export class AngularFilesComponent', 'Should export component').throw;
expect(content).to.contain(`selector: 'app-angular-files'`, 'Should have a valid selector').throw;
expect(content).to.contain('@Component({', 'Should define new component').throw;
expect(content).to.contain('templateUrl', 'Should define templateUrl').throw;
expect(content).to.contain('styleUrls', 'Should define styleUrls').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
it('Should create a valid component with app prefix', () => {
config.apps[0].prefix = 'ng';
const content = fc.getTemplateContent(TemplateType.Component, config, 'angular-files');
expect(content).to.contain(`selector: 'ng-angular-files'`, 'Should have a valid selector').throw;
});
it('Should create a valid component style', () => {
const content = fc.getTemplateContent(TemplateType.ComponentStyle, config, 'angular-files');
expect(content).to.be.eql('', 'Should have a valid component style').throw;
});
it('Should create a valid component html', () => {
const content = fc.getTemplateContent(TemplateType.ComponentHtml, config, 'angular-files');
expect(content).to.contain('angular-files works!', 'Should have a valid html paragraph').throw;
});
it('Should create a valid component spec', () => {
const content = fc.getTemplateContent(TemplateType.ConponentSpec, config, 'angular-files');
expect(content).to.contain(`import { AngularFilesComponent } from './angular-files.component'`, 'Should have a valid import in spec').throw;
expect(content).to.contain(`describe('AngularFilesComponent'`, 'Should have a valid describe in spec').throw;
expect(content).to.contain(`expect(component)`, 'Should have a valid expect in spec').throw;
expect(content.split(/\r?\n/).pop()).to.be.eql('', 'Should end with a newline').throw;
});
});
});
|
<filename>src/main/java/edu/wpi/first/gradlerio/deploy/DebuggableArtifact.java
package edu.wpi.first.gradlerio.deploy;
import edu.wpi.first.deployutils.deploy.artifact.Artifact;
public interface DebuggableArtifact extends Artifact {
TargetDebugInfo getTargetDebugInfo();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.