text stringlengths 1 1.05M |
|---|
#!/bin/bash
set -e
INTEGRATION_DIR="$( cd "$( dirname "$0" )" && pwd )"
. "${INTEGRATION_DIR}"/functions.sh
echo "Launching core Trillian log components"
log_prep_test 1 1
# Cleanup for the Trillian components
TO_DELETE="${TO_DELETE} ${ETCD_DB_DIR}"
TO_KILL+=(${LOG_SIGNER_PIDS[@]})
TO_KILL+=(${RPC_SERVER_PIDS[@]})
TO_KILL+=(${ETCD_PID})
echo "Provision log"
go build ${GOFLAGS} github.com/google/trillian/cmd/createtree/
TEST_TREE_ID=$(./createtree \
--admin_server="${RPC_SERVER_1}" \
--pem_key_path=testdata/log-rpc-server.privkey.pem \
--pem_key_path=${GOPATH}/src/github.com/google/trillian/testdata/log-rpc-server.privkey.pem \
--pem_key_password=towel \
--signature_algorithm=ECDSA)
echo "Created tree ${TEST_TREE_ID}"
echo "Running test"
pushd "${INTEGRATION_DIR}"
set +e
go test -run ".*LiveLog.*" --timeout=5m ./ --treeid ${TEST_TREE_ID} --log_rpc_server="${RPC_SERVER_1}"
RESULT=$?
set -e
popd
log_stop_test
TO_KILL=()
if [ $RESULT != 0 ]; then
sleep 1
echo "Server log:"
echo "--------------------"
cat "${TMPDIR}"/trillian_log_server.INFO
echo "Signer log:"
echo "--------------------"
cat "${TMPDIR}"/trillian_log_signer.INFO
exit $RESULT
fi
|
<gh_stars>0
define(function(){
return function(obj){
var __t,__p='',__j=Array.prototype.join,print=function(){__p+=__j.call(arguments,'');};
__p+='<head><title data-binding="obj.title">'+
((__t=(typeof obj.title !== 'undefined' ? obj.title : ''))==null?'':__t)+
'</title></head>';
return __p;
}
}); |
import express from 'express';
import axios from 'axios';
const app = express();
app.get('/stocks', async (req, res) => {
const stocks = await axios.get('https://api.iextrading.com/1.0/stock/market/batch?types=quote&symbols=aapl,fb,msft');
return res.json(stocks.data);
});
app.listen(3000, () => {
console.log('Server listening on port 3000.');
}); |
from __future__ import unicode_literals
from documents.models import Document
from documents.permissions import permission_document_create
from documents.tests import (
GenericDocumentViewTestCase, TEST_SMALL_DOCUMENT_PATH,
)
from sources.models import WebFormSource
from sources.tests.literals import (
TEST_SOURCE_LABEL, TEST_SOURCE_UNCOMPRESS_N
)
from ..models import Tag
from .literals import TEST_TAG_COLOR, TEST_TAG_LABEL
class TaggedDocumentUploadTestCase(GenericDocumentViewTestCase):
def setUp(self):
super(TaggedDocumentUploadTestCase, self).setUp()
self.login_user()
self.source = WebFormSource.objects.create(
enabled=True, label=TEST_SOURCE_LABEL,
uncompress=TEST_SOURCE_UNCOMPRESS_N
)
self.document.delete()
def _request_upload_interactive_document_create_view(self):
with open(TEST_SMALL_DOCUMENT_PATH) as file_object:
return self.post(
viewname='sources:upload_interactive', args=(self.source.pk,),
data={
'document_type_id': self.document_type.pk,
'source-file': file_object,
'tags': self.tag.pk
}
)
def _create_tag(self):
self.tag = Tag.objects.create(
color=TEST_TAG_COLOR, label=TEST_TAG_LABEL
)
def test_upload_interactive_view_with_access(self):
self._create_tag()
self.grant_access(
permission=permission_document_create, obj=self.document_type
)
response = self._request_upload_interactive_document_create_view()
self.assertEqual(response.status_code, 302)
self.assertTrue(self.tag in Document.objects.first().tags.all())
|
sortedTuples = sorted(tuples, key = lambda x: x[1]) |
#!/bin/bash
# The MIT License.
#
# Copyright (c) 2021 Martín E. Zahnd < mzahnd at itba dot edu dot ar >
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Folder with images/videos to display
# Yes. I hardcoded it on purpose.
if [ "${USER}" = 'seabmo' ]; then
readonly FOLDER='/home/seabmo/Videos/LockScreen/Apple/'
else
readonly FOLDER=''
fi
readonly FIND_CMD="-type f -regex '^.*\.\(mov\|mp4\|mkv\)$'"
# For more environment variables run `xsecurelock --help`
# == Auth box ==
export XSECURELOCK_AUTH_CURSOR_BLINK=1
export XSECURELOCK_AUTH_TIMEOUT=5
export XSECURELOCK_AUTH_SOUNDS=0
# Single auth window = 1; One auth window per screen = 0
export XSECURELOCK_SINGLE_AUTH_WINDOW=0
export XSECURELOCK_SHOW_HOSTNAME=0
export XSECURELOCK_SHOW_USERNAME=1
# One of:
# asterisks / cursor / disco / emoji / emoticon / hidden / kaomoji /
# time / time_hex
export XSECURELOCK_PASSWORD_PROMPT='time_hex'
# -- Colors --
export XSECURELOCK_AUTH_BACKGROUND_COLOR='#003366'
export XSECURELOCK_AUTH_FOREGROUND_COLOR='#ffffff'
export XSECURELOCK_AUTH_WARNING_COLOR='#ff416d'
# -- Font --
export XSECURELOCK_FONT="IBM Plex Sans:style=Regular:size=14"
# -- Date-time --
export XSECURELOCK_DATETIME_FORMAT='%A %d, %B %Y - %H:%M:%S'
export XSECURELOCK_SHOW_DATETIME=1
# == END Auth box ==
# == General config ==
# < 0 : Never blank the screen
# In seconds
export XSECURELOCK_BLANK_TIMEOUT=1800
export XSECURELOCK_COMPOSITE_OBSCURER=1
export XSECURELOCK_DISCARD_FIRST_KEYPRESS=1
export XSECURELOCK_LIST_VIDEOS_COMMAND="find ${FOLDER} ${FIND_CMD}"
export XSECURELOCK_SAVER=saver_mpv
# This can cause problems. Be careful.
export XSECURELOCK_FORCE_GRAB=1
# Milliseconds to wait after dimming (and before locking) when above xss-lock
# command line is used. Should be at least as large as the period time set
# using "xset s". Also used by wait_nonidle to know when to assume dimming and
# waiting has finished and exit.
export XSECURELOCK_WAIT_TIME_MS=0
# -- DIM --
#export XSECURELOCK_DIM_FPS=60
#export XSECURELOCK_DIM_OVERRIDE_COMPOSITOR_DETECTION=1
#export XSECURELOCK_DIM_TIME_MS=1500
# -- Keyboard --
# .. Brightness keys ..
export XSECURELOCK_KEY_XF86MonBrightnessDown_COMMAND='/usr/local/bin/backlight-manager dec 1'
export XSECURELOCK_KEY_XF86MonBrightnessUp_COMMAND='/usr/local/bin/backlight-manager inc 1'
# .. Media keys ..
export XSECURELOCK_KEY_XF86AudioMute_COMMAND="pactl set-sink-mute 0 toggle"
export XSECURELOCK_KEY_XF86AudioLowerVolume_COMMAND="pactl set-sink-volume 0 -1%"
export XSECURELOCK_KEY_XF86AudioRaiseVolume_COMMAND="pactl set-sink-volume 0 +1%"
export XSECURELOCK_KEY_XF86AudioPrev_COMMAND="playerctl previous"
export XSECURELOCK_KEY_XF86AudioNext_COMMAND="playerctl next"
export XSECURELOCK_KEY_XF86AudioPlay_COMMAND="playerctl play-pause"
# == END General config ==
xsecurelock &
exit ${?}
|
#!/bin/sh
javac -cp src:lib/* src/*.java -d .
jar -cvfm Triangular.jar MANIFEST.MF *.class data
rm *.class
|
package api
import (
"fmt"
"net/http"
"github.com/storageos/go-api/v2/api"
)
// badRequestError indicates that the request made by the client is invalid.
type badRequestError struct {
msg string
}
func (e badRequestError) Error() string {
if e.msg == "" {
return "bad request"
}
return e.msg
}
func newBadRequestError(msg string) badRequestError {
return badRequestError{
msg: msg,
}
}
// notFoundError indicates that a resource involved in carrying out the API
// request was not found.
type notFoundError struct {
msg string
}
func (e notFoundError) Error() string {
if e.msg == "" {
return "not found"
}
return e.msg
}
func newNotFoundError(msg string) notFoundError {
return notFoundError{
msg: msg,
}
}
// conflictError indicates that the requested operation could not be carried
// out due to a conflict between the current state and the desired state.
type conflictError struct {
msg string
}
func (e conflictError) Error() string {
if e.msg == "" {
return "conflict"
}
return e.msg
}
func newConflictError(msg string) conflictError {
return conflictError{
msg: msg,
}
}
type openAPIError struct {
inner Error
}
func (e openAPIError) Error() string {
return e.inner.Error
}
func newOpenAPIError(err Error) openAPIError {
return openAPIError{
inner: err,
}
}
// MapAPIError will given err and its corresponding resp attempt to map the
// HTTP error to an application level error.
//
// err is returned as is when any of the following are true:
//
// → resp is nil
// → err is not a GenericOpenAPIError or the unexported openAPIError
//
// Some response codes must be mapped by the caller in order to provide useful
// application level errors:
//
// → http.StatusBadRequest returns a badRequestError, which must have a 1-to-1
// mapping to a context specific application error
// → http.StatusNotFound returns a notFoundError, which must have a 1-to-1
// mapping to a context specific application error
// → http.StatusConflict returns a conflictError which must have a 1-to-1
// mapping to a context specific application error
//
func MapAPIError(err error, resp *http.Response) error {
if resp == nil {
return err
}
var details string
switch v := err.(type) {
case GenericOpenAPIError:
switch model := v.Model().(type) {
case Error:
details = model.Error
default:
details = fmt.Sprintf("%s", v.Body())
}
case openAPIError:
details = v.Error()
default:
return err
}
switch resp.StatusCode {
// 4XX
case http.StatusBadRequest:
return newBadRequestError(details)
case http.StatusUnauthorized:
return api.NewAuthenticationError(details)
case http.StatusForbidden:
return api.NewUnauthorisedError(details)
case http.StatusNotFound:
return newNotFoundError(details)
case http.StatusConflict:
return newConflictError(details)
case http.StatusPreconditionFailed:
return api.NewStaleWriteError(details)
case http.StatusUnprocessableEntity:
return api.NewInvalidStateTransitionError(details)
case http.StatusLocked:
return api.NewLockedError(details)
// This may need changing to present a friendly error, or it may be done up
// the call stack.
case http.StatusUnavailableForLegalReasons:
return api.NewLicenceCapabilityError(details)
// 5XX
case http.StatusInternalServerError:
return api.NewServerError(details)
case http.StatusServiceUnavailable:
return api.NewStoreError(details)
default:
// If details were obtained from the error, decorate it - even when
// unknown.
if details != "" {
err = fmt.Errorf("%w: %v", err, details)
}
return err
}
}
|
<reponame>ineunetOS/knife-commons<filename>knife-commons-config/src/main/java/com/ineunet/knife/config/ConfigType.java
/*
* Copyright 2013-2016 iNeunet OpenSource and the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.ineunet.knife.config;
/**
*
* @author <NAME>
*
*/
public class ConfigType {
private String value;
protected ConfigType(String value) {
this.value = value;
}
public String getValue() {
return value;
}
public static final ConfigType knife = new ConfigType("knife");
public static final ConfigType menus = new ConfigType("menus");
public static final ConfigType db = new ConfigType("db");
public static final ConfigType log = new ConfigType("log");
public static final ConfigType ui = new ConfigType("ui");
}
|
<gh_stars>0
class Solution {
public String mostCommonWord(String paragraph, String[] banned) {
int vals = 0;
String res = "";
String[] para = paragraph.replaceAll("[,.\\-!?;']", " ").toLowerCase().trim().split("\\s+");
Map<String,Integer> m1 = new HashMap<String,Integer>();
for(String x:para){
if (m1.containsKey(x))
{
int val = m1.get(x);
val += 1;
m1.put(x,val);
}
else
{
m1.put(x,1);
}
}
for(String y:banned){
if (m1.containsKey(y)){
m1.remove(y);
}
}
for (Map.Entry<String, Integer> entry : m1.entrySet()) {
String key = entry.getKey();
int value = entry.getValue();
if (value>vals){
vals = value;
res = key;
}
}
return res;
}
}
|
<reponame>rbreu/tr8n<filename>db/migrate/20110108000009_create_translator_following.rb
class CreateTranslatorFollowing < ActiveRecord::Migration
def self.up
create_table :tr8n_translator_following do |t|
t.integer :translator_id
t.integer :object_id
t.string :object_type
t.timestamps
end
add_index :tr8n_translator_following, [:translator_id]
end
def self.down
drop_table :tr8n_translator_following
end
end
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolHumanoid-v1_doule_ddpg_softcopy_action_noise_seed4_run4_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolHumanoid-v1 --random-seed 4 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolHumanoid-v1/doule_ddpg_softcopy_action_noise_seed4_run4 --continuous-act-space-flag
|
<gh_stars>0
import axios from 'axios';
import { API } from '../../../utilities/APIConfig';
API.defaults.headers.post['Content-Type'] = 'application/json';
export const getTokenToServer = (email, password, name, newUser) => {
let data = null;
if (name) {
data = JSON.stringify({
email,
name,
password
});
} else {
data = JSON.stringify({
email,
password
});
}
if (newUser) return API.post('users/', data).then(response => response.data.token);
return API.post('users/login', data).then(response => response.data.token);
};
export const tokenVerifyCreate = (token) => {
const data = JSON.stringify({
token
});
return API.post('token/verify', data).then(response => response.data.token);
};
export const refreshToken = (token) => {
const data = JSON.stringify({
token
});
return API.post('token/refresh', data).then(response => response.data.token);
};
export const getProfileUserSocial = (token) => {
const url = `https://graph.facebook.com/v3.1/me${token}`;
return axios.get(url)
.then(response => response);
};
|
<filename>src/main/java/com/vc/easy/L771.scala<gh_stars>1-10
package com.vc.easy
object L771 {
def numJewelsInStones(J: String, S: String): Int = {
import scala.collection.mutable
val set = mutable.HashSet.empty[Char] ++ J
var res = 0
S.foreach(ch => if(set.contains(ch)) res += 1)
res
}
}
|
package main
import (
//"bufio"
"encoding/csv"
"fmt"
"io"
"log"
"os"
"strconv"
"strings"
)
var items []string
var Value []string
var ItemsTotal []int
func readCSV(filee string) {
var conv1 int = 0
var conv2 int = 0
var err1 error = nil
var err2 error = nil
//fmt.Println(filee)
items = nil
Value = nil
ItemsTotal = nil
// Open the file
csvfile, err := os.Open(ExecPath + "/data/" + filee + ".csv")
if err != nil {
log.Fatalln("Couldn't open the csv file", err)
}
// Parse the file
r := csv.NewReader(csvfile)
//r := csv.NewReader(bufio.NewReader(csvfile))
// Iterate through the records
for {
// Read each record from csv
record, err := r.Read()
if err == io.EOF {
break
}
if err != nil {
log.Fatal(err)
}
//fmt.Println(len(record))
//fmt.Printf("Question: %s Answer %s\n", record[0], record[1])
items = append(items, record[0])
Value = append(Value, record[1])
if record[2] == "" {
conv1 = 0
} else {
conv1, err1 = strconv.Atoi(record[2])
}
if record[3] == "" {
conv2 = 0
} else {
conv2, err2 = strconv.Atoi(record[3])
}
if err1 != nil {
fmt.Println("Error converting to string.")
} else if err2 != nil {
fmt.Println("Error converting to string.")
} else {
ItemsTotal = append(ItemsTotal, conv1+conv2)
}
}
csvfile.Close()
}
// searches for an item in the csv files
func readCSVItemSearch(filee string, ItemToFind string) (string, string, int, int, int, string) {
var conv1 int = 0
var conv2 int = 0
var err1 error = nil
var err2 error = nil
//fmt.Println(filee)
//items = nil
//Value = nil
//ItemsTotal = nil
// Open the file
csvfile, err := os.Open(ExecPath + "/data/" + filee + ".csv")
if err != nil {
log.Println("Couldn't open the csv file", err)
} else {
// Parse the file
r := csv.NewReader(csvfile)
//r := csv.NewReader(bufio.NewReader(csvfile))
// Iterate through the records
for {
// Read each record from csv
record, err := r.Read()
if err == io.EOF {
break
}
if err != nil {
log.Fatal(err)
}
//fmt.Println(len(record))
//fmt.Printf("Question: %s Answer %s\n", record[0], record[1])
//items = append(items, record[0])
//Value = append(Value, record[1])
if record[2] == "" {
conv1 = 0
} else {
conv1, err1 = strconv.Atoi(record[2])
}
if record[3] == "" {
conv2 = 0
} else {
conv2, err2 = strconv.Atoi(record[3])
}
if err1 != nil {
fmt.Println("Error converting to string.")
} else if err2 != nil {
fmt.Println("Error converting to string.")
} else {
if record[0] == ItemToFind {
return record[0], record[1], conv1, conv2, conv1 + conv2, record[4]
}
//ItemsTotal = append(ItemsTotal, conv1+conv2)
}
}
csvfile.Close()
}
return "Nothing to find", "Error", 0, 0, 0, "Error"
}
var foundItems []string
var foundVals []string
var foundAmounts []int
var foundErrors bool = false
// searches for an item without being case sensitive
func readCSVItemSearchLOWERCASE(filee string, ItemToFind string) {
foundErrors = false
var conv1 int = 0
var conv2 int = 0
var err1 error = nil
var err2 error = nil
//fmt.Println(filee)
//items = nil
//Value = nil
//ItemsTotal = nil
// Open the file
csvfile, err := os.Open(ExecPath + "/data/" + filee + ".csv")
if err != nil {
foundErrors = true
log.Println("Couldn't open the csv file", err)
return
} else {
// Parse the file
r := csv.NewReader(csvfile)
//r := csv.NewReader(bufio.NewReader(csvfile))
// Iterate through the records
for {
// Read each record from csv
record, err := r.Read()
if err == io.EOF {
break
}
if err != nil {
log.Fatal(err)
}
//fmt.Println(len(record))
//fmt.Printf("Question: %s Answer %s\n", record[0], record[1])
//items = append(items, record[0])
//Value = append(Value, record[1])
if record[2] == "" {
conv1 = 0
} else {
conv1, err1 = strconv.Atoi(record[2])
}
if record[3] == "" {
conv2 = 0
} else {
conv2, err2 = strconv.Atoi(record[3])
}
if err1 != nil {
fmt.Println("Error converting to string.")
} else if err2 != nil {
fmt.Println("Error converting to string.")
} else {
if strings.Contains(strings.ToLower(record[0]), strings.ToLower(ItemToFind)) {
tempcalc := conv1 + conv2
foundItems = append(foundItems, record[0])
foundVals = append(foundVals, record[1])
foundAmounts = append(foundAmounts, tempcalc)
}
//ItemsTotal = append(ItemsTotal, conv1+conv2)
}
}
csvfile.Close()
}
}
|
#!/bin/bash
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eo pipefail
if [[ -z "${PROJECT_ROOT:-}" ]]; then
PROJECT_ROOT="github/python-bigquery-reservation"
fi
cd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
# Debug: show build environment
env | grep KOKORO
# Setup service account credentials.
export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
# Setup project id.
export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
# Remove old nox
python3 -m pip uninstall --yes --quiet nox-automation
# Install nox
python3 -m pip install --upgrade --quiet nox
python3 -m nox --version
# If this is a continuous build, send the test log to the FlakyBot.
# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
cleanup() {
chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
$KOKORO_GFILE_DIR/linux_amd64/flakybot
}
trap cleanup EXIT HUP
fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
python3 -m nox -s ${NOX_SESSION:-}
else
python3 -m nox
fi
|
#!/bin/bash
cd "${0%/*}"
set -e
set -x
kubectl apply -f ../../../deploy/infra/deployment.yaml |
# settings
remoteHost=bitbucket.org
remoteUser=foobar
localCodeDir="${HOME}/git/"
gitRepositoryNames=(
"alpharogroup.com"
"lessonslearn.com"
"xmas.presents"
"wingames"
"Spirit-Intensifier"
"BackupContactLinks"
) |
import tensorflow as tf
def create_tf_example(context_features, abstract_features):
def _dict_of_nonlist_numerical_to_tf_features(features):
tf_features = {}
tf_features_types = {}
for key, value in features.items():
tf_features[key] = tf.train.Feature(
float_list=tf.train.FloatList(value=[value])
)
tf_features_types[key] = tf.train.Feature(float_list=tf.train.FloatList)
return tf_features, tf_features_types
tf_context_features, tf_context_features_types = _dict_of_nonlist_numerical_to_tf_features(context_features)
features = {**tf_context_features, **abstract_features}
tf_example = tf.train.Example(features=tf.train.Features(feature=features))
return tf_example |
function ajaxResponse () {
if (this.readyState == 4 && this.status == 200) {
document.getElementById("response").innerHTML = this.responseText;
}
}
function jsend (verb) {
const content = document.getElementById("request").value;
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = ajaxResponse;
xhttp.open(verb, content, true);
xhttp.send();
}
|
import { RedBlackTreeStructure, RedBlackTreeValueIterator } from '../internals';
import { iterateFromLast } from './iterateFromLast';
export function iterateValuesFromLast<K, V = null> (tree: RedBlackTreeStructure<K, V>): RedBlackTreeValueIterator<K, V> {
return new RedBlackTreeValueIterator<K, V>(iterateFromLast<K, V>(tree));
}
|
function mergeSort(arr) {
if (arr.length === 1) {
return arr;
}
// Split the array into two halves
let mid = Math.floor(arr.length / 2);
let left = mergeSort(arr.slice(0, mid));
let right = mergeSort(arr.slice(mid));
// Call the merge helper
return merge(left, right);
}
function merge(left, right) {
let result = [];
let leftIndex = 0;
let rightIndex = 0;
// While both left and right arrays have elements in them
while (leftIndex < left.length && rightIndex < right.length) {
// Compare the elements from each array and push the smaller one into result array
if (left[leftIndex] < right[rightIndex]) {
result.push(left[leftIndex]);
leftIndex++;
} else {
result.push(right[rightIndex]);
rightIndex++;
}
}
// Push the left remnants
return result.concat(left.slice(leftIndex)).concat(right.slice(rightIndex));
} |
<reponame>Charliocat/armeria
/*
* Copyright 2020 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.internal.common;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import javax.annotation.concurrent.NotThreadSafe;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Stopwatch;
import com.linecorp.armeria.common.Flags;
import com.linecorp.armeria.common.util.Exceptions;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.EventLoop;
import io.netty.handler.codec.http2.Http2Connection;
import io.netty.handler.codec.http2.Http2FrameWriter;
import io.netty.handler.codec.http2.Http2PingFrame;
import io.netty.handler.timeout.IdleState;
import io.netty.handler.timeout.IdleStateEvent;
import io.netty.handler.timeout.IdleStateHandler;
/**
* This will send an {@link Http2PingFrame} when an {@link IdleStateEvent} is emitted by {@link
* IdleStateHandler} and {@link Flags#defaultHttp2PingTimeoutMillis()} is greater that zero.
*
* <p>Once an {@link IdleStateEvent} is triggered and when there are active streams open then a
* {@link Http2PingFrame} will be written on connection. When there are no active streams then it depends on
* {@link Flags#defaultUseHttp2PingWhenNoActiveStreams()}.
*
* <p>Once a {@link Http2PingFrame} is written, then either an ACK for the {@link Http2PingFrame} or any data
* is read on connection will invalidate the condition that triggers connection closure. If either of the
* conditions are not met then the connection will be closed.
*
* <p>This class is <b>not</b> thread-safe and all methods are to be called from single thread such
* as {@link EventLoop}.
*
* @see Flags#defaultUseHttp2PingWhenNoActiveStreams()
* @see Flags#defaultHttp2PingTimeoutMillis()
*/
@NotThreadSafe
public class Http2KeepAliveHandler {
private static final Logger logger = LoggerFactory.getLogger(Http2KeepAliveHandler.class);
@Nullable
private final Stopwatch stopwatch = logger.isDebugEnabled() ? Stopwatch.createUnstarted() : null;
private final boolean sendPingsOnNoActiveStreams;
private final long pingTimeoutMillis;
private final Http2FrameWriter frameWriter;
private final ThreadLocalRandom random = ThreadLocalRandom.current();
private final Http2Connection http2Connection;
private final Channel channel;
private final ChannelFutureListener pingWriteListener = new PingWriteListener();
private final Runnable shutdownRunnable = this::closeChannelAndLog;
@Nullable
private ChannelFuture pingWriteFuture;
@Nullable
private Future<?> shutdownFuture;
private long lastPingPayload;
private State state = State.IDLE;
public Http2KeepAliveHandler(Channel channel, Http2FrameWriter frameWriter, Http2Connection http2Connection,
long pingTimeoutMillis, boolean sendPingsOnNoActiveStreams) {
checkArgument(pingTimeoutMillis > 0, pingTimeoutMillis);
this.channel = requireNonNull(channel, "channel");
this.frameWriter = requireNonNull(frameWriter, "frameWriter");
this.pingTimeoutMillis = pingTimeoutMillis;
this.http2Connection = requireNonNull(http2Connection, "http2Connection");
this.sendPingsOnNoActiveStreams = sendPingsOnNoActiveStreams;
}
public void onChannelIdle(ChannelHandlerContext ctx, IdleStateEvent event) {
logger.debug("{} {} triggered.", channel, event);
if (!canSendPing()) {
// The default behaviour is to shutdown the channel on idle timeout if not HTTP/2 conn.
// So preserving the behaviour.
closeChannelAndLog();
return;
}
// Only interested in ALL_IDLE event and when Http2KeepAliveHandler is ready.
// Http2KeepAliveHandler may not be ready because it is currently handling
// sending a PING or expecting a PING ACK on channel.
if (state != State.IDLE || event.state() != IdleState.ALL_IDLE) {
return;
}
writePing(ctx);
}
private boolean canSendPing() {
if (http2Connection.numActiveStreams() == 0) {
return sendPingsOnNoActiveStreams;
} else {
return true;
}
}
private void writePing(ChannelHandlerContext ctx) {
lastPingPayload = random.nextLong();
state = State.PING_SCHEDULED;
pingWriteFuture = frameWriter.writePing(ctx, false, lastPingPayload, ctx.newPromise())
.addListener(pingWriteListener);
ctx.flush();
}
/**
* Callback for when channel is in-active to cleans up resources.
*/
public void onChannelInactive() {
state = State.SHUTDOWN;
cancelFutures();
}
public void onChannelRead() {
state = State.IDLE;
cancelFutures();
}
private void resetFutures() {
shutdownFuture = null;
pingWriteFuture = null;
}
private void cancelFutures() {
if (shutdownFuture != null) {
shutdownFuture.cancel(false);
shutdownFuture = null;
}
if (pingWriteFuture != null) {
pingWriteFuture.cancel(false);
pingWriteFuture = null;
}
}
public void onPingAck(long data) {
final long elapsed = getStopwatchElapsedInNanos();
if (!isGoodPingAck(data)) {
return;
}
if (shutdownFuture != null) {
final boolean isCancelled = shutdownFuture.cancel(false);
if (!isCancelled) {
logger.debug("{} shutdownFuture cannot be cancelled because of late PING ACK", channel);
}
}
logger.debug("{} PING(ACK=1, DATA={}) received in {} ns", channel, lastPingPayload, elapsed);
state = State.IDLE;
resetFutures();
}
private boolean isGoodPingAck(long data) {
// This condition can be true when channel read some data other than PING ACK frame
// or a PING ACK is received without sending PING in first place.
if (state != State.PENDING_PING_ACK) {
logger.debug("{} PING(ACK=1, DATA={}) ignored", channel, data);
return false;
}
if (lastPingPayload != data) {
logger.debug("{} Unexpected PING(ACK=1, DATA={}) received, " +
"but expecting PING(ACK=1, DATA={})", channel, data, lastPingPayload);
return false;
}
return true;
}
@VisibleForTesting
State state() {
return state;
}
@VisibleForTesting
long lastPingPayload() {
return lastPingPayload;
}
private void closeChannelAndLog() {
if (state == State.SHUTDOWN) {
return;
}
logger.debug("{} Closing an idle channel", channel);
channel.close().addListener(future -> {
if (future.isSuccess()) {
logger.debug("{} Closed an idle channel", channel);
} else {
logger.debug("{} Failed to close an idle channel", channel, future.cause());
}
state = State.SHUTDOWN;
});
}
private long getStopwatchElapsedInNanos() {
if (stopwatch == null) {
return -1;
}
return stopwatch.elapsed(TimeUnit.NANOSECONDS);
}
/**
* State changes from IDLE -> PING_SCHEDULED -> PENDING_PING_ACK -> IDLE and so on. When the
* channel is inactive then the state changes to SHUTDOWN.
*/
enum State {
/* Nothing happening, but waiting for IdleStateEvent */
IDLE,
/* PING is scheduled */
PING_SCHEDULED,
/* PING is sent and is pending ACK */
PENDING_PING_ACK,
/* Not active anymore */
SHUTDOWN
}
private class PingWriteListener implements ChannelFutureListener {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (future.isSuccess()) {
logger.debug("{} PING(ACK=0, DATA={}) write successful", channel, lastPingPayload);
final EventLoop el = channel.eventLoop();
shutdownFuture = el.schedule(shutdownRunnable, pingTimeoutMillis, TimeUnit.MILLISECONDS);
state = State.PENDING_PING_ACK;
resetStopwatch();
} else {
// Mostly because the channel is already closed. So ignore and change state to IDLE.
// If the channel is closed, we change state to SHUTDOWN on onChannelInactive.
if (!future.isCancelled() && Exceptions.isExpected(future.cause())) {
logger.debug("{} PING write failed", channel, future.cause());
}
if (state != State.SHUTDOWN) {
state = State.IDLE;
}
}
}
private void resetStopwatch() {
if (stopwatch != null) {
stopwatch.reset().start();
}
}
}
}
|
<gh_stars>0
package kata.java;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Set;
public class LazyMinimumSpanningTree {
private Set<Integer> marked;
private Queue<WeightedEdge> priority;
private double weight;
public LazyMinimumSpanningTree(WeightedEdgeGraph graph) {
priority = new PriorityQueue<>();
marked = new HashSet<>();
Collection<WeightedEdge> mst = new ArrayList<>();
visit(graph, graph.edges.keySet().iterator().next());
while (!priority.isEmpty()) {
WeightedEdge edge = priority.poll();
int v = edge.either();
int w = edge.other(v);
if (marked.contains(v) && marked.contains(w)) continue;
mst.add(edge);
if (!marked.contains(v)) visit(graph, v);
if (!marked.contains(w)) visit(graph, w);
}
weight = mst.stream().mapToDouble(WeightedEdge::weight).sum();
}
private void visit(WeightedEdgeGraph graph, int v) {
marked.add(v);
for (WeightedEdge edge : graph.adjacentTo(v)) {
if (!marked.contains(edge.other(v))) {
priority.add(edge);
}
}
}
public double weight() {
return weight;
}
}
|
<filename>app.py
from flask import Flask, request,jsonify
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing import image
import tensorflow_hub as hub
from google.cloud import vision
import numpy as np
import webcolors
import threading
from translate import Translator
from PIL import Image, ImageOps
app = Flask(__name__)
model = load_model('./ml_models/keras_model.h5', custom_objects={'KerasLayer': hub.KerasLayer})
@app.route('/', methods = ['POST'])
def hello_world():
return jsonify({'Test':'Hello World!'})
# detect currency using the Model created for a single frame
def detect_currency_frame(i,output):
s = 'image'
s += str(i)
img = Image.open(request.files[s])
img.save(s+'.jpg')
img = Image.open(s+'.jpg')
# Create the array of the right shape to feed into the keras model
data = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)
image = img
#image sizing
size = (224, 224)
image = ImageOps.fit(image, size, Image.ANTIALIAS)
#turn the image into a numpy array
image_array = np.asarray(image)
# Normalize the image
normalized_image_array = (image_array.astype(np.float32) / 127.0) - 1
# Load the image into the array
data[0] = normalized_image_array
# run the inference
print('Started predicting')
prediction = model.predict(data)
print(prediction)
print('Predicting done')
result = np.argmax(prediction)
print(result)
switcher = {
0:"خمسة جنيهات",
1:"عشرة جنيهات",
2:"عشرون جنيه",
3:"خمسون جنيه",
4:"مائة جنيه",
5:"مائتي جنيه"
}
s=switcher.get(result, "Not Maching")
output.append(s)
print(s)
@app.route('/predict-currency', methods = ['POST'])
def detect_currency():
print(request.remote_addr)
# create output list to hold all 4 results of the 4 frames
output = list()
for i in range(1, 5):
detect_currency_frame(i, output)
# Print all outputs in the console
print(output)
# create a map to save count of each result
results = dict()
# For loop to count the number of occurrences of each currency if different currencies
for res in output:
if res in results.keys():
results[res] += 1
else:
results[res] = 1
# Return the value with maximum occurrences
max = -1
maxRes = ""
for k in results.keys():
if results[k] > max:
max = results[k]
maxRes = k
result = maxRes
return jsonify({'value': result})
# function to get the closest color with a name and return the name
def closest_colour(requested_colour):
min_colours = {}
for key, name in webcolors.CSS3_HEX_TO_NAMES.items():
r_c, g_c, b_c = webcolors.hex_to_rgb(key)
rd = (r_c - requested_colour[0]) ** 2
gd = (g_c - requested_colour[1]) ** 2
bd = (b_c - requested_colour[2]) ** 2
min_colours[(rd + gd + bd)] = name
return min_colours[min(min_colours.keys())]
# returns the actual name and closest name of a coloro using the previous function and webcolors Library
def get_colour_name(requested_colour):
try:
closest_name = actual_name = webcolors.rgb_to_name(requested_colour)
except ValueError:
closest_name = closest_colour(requested_colour)
actual_name = None
return actual_name, closest_name
# function to detect color using google cloud services API for a single frame
def detect_color_frame(i,output):
client = vision.ImageAnnotatorClient()
s = 'image'
s += str(i)
content = request.files[s].read()
image = vision.Image(content=content)
response = client.image_properties(image=image)
props = response.image_properties_annotation
print('Properties:')
# get the color in RGB format, and get its name
for color in props.dominant_colors.colors:
print('fraction: {}'.format(color.pixel_fraction))
print('\tr: {}'.format(color.color.red))
print('\tg: {}'.format(color.color.green))
print('\tb: {}'.format(color.color.blue))
print('\ta: {}'.format(color.color.alpha))
_, result = get_colour_name((int(color.color.red), int(color.color.green), int(color.color.blue)))
print(result)
output.append(result)
# Break after first iteration because we only need the most dominant color in the picture
break
if response.error.message:
raise Exception(
'{}\nFor more info on error messages, check: '
'https://cloud.google.com/apis/design/errors'.format(
response.error.message))
@app.route('/detect-color', methods = ['POST'])
def detect_color():
print(request.remote_addr)
# create jobs array for threading
jobs = []
# create list to carry all the outputs
output = list()
for i in range(4, 0, -1):
thread = threading.Thread(target=detect_color_frame(i,output))
jobs.append(thread)
# Start all Jobs
for j in jobs:
j.start()
# Make sure all Jobs has finished
for j in jobs:
j.join()
# Print all outputs in the console
print(output)
results = dict()
# For loop to count the number of occurrences of each color if different colors
for res in output:
if res in results.keys():
results[res] += 1
else:
results[res] = 1
# Return the color with maximum occurrences
max = -1
maxRes = ""
for k in results.keys():
if results[k] > max:
max = results[k]
maxRes = k
# get the color in arabic
translator = Translator(from_lang="english", to_lang="arabic")
result = translator.translate(maxRes)
print(result.replace('color',''))
result = result.replace('color','')
return jsonify({'color':result})
# function to apply OCR on a single frame
def detect_text_frame(i,output):
"""Detects text in the file."""
client = vision.ImageAnnotatorClient()
# [START vision_python_migration_text_detection]
s = 'image'
s += str(i)
content = request.files[s].read()
image = vision.Image(content=content)
response = client.text_detection(image=image)
texts = response.text_annotations
print('Texts:')
result = ""
for text in texts:
print('\n"{}"'.format(text.description))
result += text.description
break
output.append(result)
if response.error.message:
raise Exception(
'{}\nFor more info on error messages, check: '
'https://cloud.google.com/apis/design/errors'.format(
response.error.message))
# [END vision_python_migration_text_detection]
# [END vision_text_detection]
@app.route('/detect-text', methods = ['POST'])
def detect_text():
print(request.remote_addr)
# create jobs array for threading
jobs = []
# create list to carry all the outputs
output = list()
for i in range(4, 0, -1):
thread = threading.Thread(target=detect_text_frame(i, output))
jobs.append(thread)
# Start all Jobs
for j in jobs:
j.start()
# Make sure all Jobs has finished
for j in jobs:
j.join()
# Print all outputs in the console
print(output)
results = dict()
# For loop to count the number of occurrences of each text if different texts
for res in output:
if res in results.keys():
results[res] += 1
else:
results[res] = 1
# Return the text with maximum occurrences
max = -1
maxRes = ""
for k in results.keys():
if results[k] > max:
max = results[k]
maxRes = k
# if there is no text occurred more than 1 time, then choose the largest text.
if max < 2:
for k in results.keys():
if len(k) > max:
max = results[k]
maxRes = k
result = maxRes
# Library to detect the language of the extracted text from the picture
# to use the correct voice from the frontend
from langdetect import detect
try:
lang = detect(result)
print(lang)
except:
lang = 'ar'
if lang == 'de':
lang = 'en'
# count number of words by counting spaces and new lines
words = len(result.split(' ')) + len(result.split('\n'))
return jsonify({'extracted':result,'lang':lang, 'words':words})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5005)
|
alias reload!='. ~/.zshrc'
alias cls='clear' # Good 'ol Clear Screen command
alias compile-ssh-config='cat ~/.ssh/config_local ~/.ssh/*/config_* > ~/.ssh/config'
alias ssh='compile-ssh-config && ssh'
alias ip='curl ipecho.net/plain && echo && ifconfig | sed -En "s/127.0.0.1//;s/.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*/\2/p"'
alias server='python -m http.server'
alias keygen='openssl rand -hex'
alias npmversion='echo $(node -p -e "require(\"./package.json\").version")'
alias uuidgen="uuidgen | tr '[:upper:]' '[:lower:]'"
alias python='python3'
alias pip='pip3'
|
#!/bin/bash
#SBATCH -J Act_linear_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py linear 366 Adamax 1 0.6945271512548414 0.0017287034685815692 rnormal 0.05
|
#!/usr/bin/env sh
# generated from catkin/cmake/template/setup.sh.in
# Sets various environment variables and sources additional environment hooks.
# It tries it's best to undo changes from a previously sourced setup file before.
# Supported command line options:
# --extend: skips the undoing of changes from a previously sourced setup file
# --local: only considers this workspace but not the chained ones
# In plain sh shell which doesn't support arguments for sourced scripts you can
# set the environment variable `CATKIN_SETUP_UTIL_ARGS=--extend/--local` instead.
# since this file is sourced either use the provided _CATKIN_SETUP_DIR
# or fall back to the destination set at configure time
: ${_CATKIN_SETUP_DIR:=/home/kalyco/mfp_workspace/devel/.private/srsnode_laser_filters}
_SETUP_UTIL="$_CATKIN_SETUP_DIR/_setup_util.py"
unset _CATKIN_SETUP_DIR
if [ ! -f "$_SETUP_UTIL" ]; then
echo "Missing Python script: $_SETUP_UTIL"
return 22
fi
# detect if running on Darwin platform
_UNAME=`uname -s`
_IS_DARWIN=0
if [ "$_UNAME" = "Darwin" ]; then
_IS_DARWIN=1
fi
unset _UNAME
# make sure to export all environment variables
export CMAKE_PREFIX_PATH
if [ $_IS_DARWIN -eq 0 ]; then
export LD_LIBRARY_PATH
else
export DYLD_LIBRARY_PATH
fi
unset _IS_DARWIN
export PATH
export PKG_CONFIG_PATH
export PYTHONPATH
# remember type of shell if not already set
if [ -z "$CATKIN_SHELL" ]; then
CATKIN_SHELL=sh
fi
# invoke Python script to generate necessary exports of environment variables
# use TMPDIR if it exists, otherwise fall back to /tmp
if [ -d "${TMPDIR:-}" ]; then
_TMPDIR="${TMPDIR}"
else
_TMPDIR=/tmp
fi
_SETUP_TMP=`mktemp "${_TMPDIR}/setup.sh.XXXXXXXXXX"`
unset _TMPDIR
if [ $? -ne 0 -o ! -f "$_SETUP_TMP" ]; then
echo "Could not create temporary file: $_SETUP_TMP"
return 1
fi
CATKIN_SHELL=$CATKIN_SHELL "$_SETUP_UTIL" $@ ${CATKIN_SETUP_UTIL_ARGS:-} >> "$_SETUP_TMP"
_RC=$?
if [ $_RC -ne 0 ]; then
if [ $_RC -eq 2 ]; then
echo "Could not write the output of '$_SETUP_UTIL' to temporary file '$_SETUP_TMP': may be the disk if full?"
else
echo "Failed to run '\"$_SETUP_UTIL\" $@': return code $_RC"
fi
unset _RC
unset _SETUP_UTIL
rm -f "$_SETUP_TMP"
unset _SETUP_TMP
return 1
fi
unset _RC
unset _SETUP_UTIL
. "$_SETUP_TMP"
rm -f "$_SETUP_TMP"
unset _SETUP_TMP
# source all environment hooks
_i=0
while [ $_i -lt $_CATKIN_ENVIRONMENT_HOOKS_COUNT ]; do
eval _envfile=\$_CATKIN_ENVIRONMENT_HOOKS_$_i
unset _CATKIN_ENVIRONMENT_HOOKS_$_i
eval _envfile_workspace=\$_CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
unset _CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
# set workspace for environment hook
CATKIN_ENV_HOOK_WORKSPACE=$_envfile_workspace
. "$_envfile"
unset CATKIN_ENV_HOOK_WORKSPACE
_i=$((_i + 1))
done
unset _i
unset _CATKIN_ENVIRONMENT_HOOKS_COUNT
|
package com.nortal.spring.cw.core.i18n;
import java.io.Serializable;
import java.text.MessageFormat;
import java.util.Locale;
import org.apache.commons.lang3.StringUtils;
import org.springframework.context.support.AbstractMessageSource;
import com.nortal.spring.cw.core.i18n.model.Lang;
import com.nortal.spring.cw.core.i18n.model.MessageModel;
import com.nortal.spring.cw.core.web.util.RequestUtil;
/**
*
* @author <NAME> <<EMAIL>>
* @since 20.05.2015
*/
public class CwMessageSource extends AbstractMessageSource implements Serializable {
private static final long serialVersionUID = 1L;
public static final String TEXT_MESSAGE_SYMBOL = "#";
@Override
protected MessageFormat resolveCode(String code, Locale locale) {
// XXX: Hetkel on nii, et kui tekstis on üks ' siis see eemaldatakse, seega praegu on lihtsaks lahenduseks see et '=>''.
// Foorumites ka soovitatakse nii teha. Seega loodame, et keegi ei tekita tõlkeid kus juba on sees ''.
// While it is well documented in java.text.MessageFormat that apostrophes need to be escaped by using a double apostrophe
String message = StringUtils.replace(code, "'", "''");
return super.createMessageFormat(StringUtils.isEmpty(message) ? code : message, locale);
}
public final String resolve(MessageModel model, Lang lang) {
return resolve(model.getCode(), lang.toLocale(), model.getParams());
}
public final String resolve(String code, Lang lang, Object... args) {
return resolve(code, lang.toLocale(), args);
}
public final String resolve(String code, Locale locale, Object... args) {
if (StringUtils.isNotEmpty(code) && isSimpleText(code)) {
return code.substring(1);
}
String message = getMessage(code, args, locale);
return StringUtils.isEmpty(message) ? code : message;
}
public final String resolveByActiveLang(String code, Object... args) {
return resolve(code, RequestUtil.getActiveLang().toLocale(), args);
}
public final String resolve(String code, String languageCode) {
return resolve(code, Lang.fromCode(languageCode).toLocale());
}
/**
* Meetod tagastab <code>true</code> juhul kui sisendiks olev väärtuse alguseks on "global." või "#"
*
* @param input
* {@link String}
* @return {@link Boolean}
*/
public boolean isGlobalOrSimpleText(String input) {
return StringUtils.startsWithAny(input, new String[] { CwMessageSource.TEXT_MESSAGE_SYMBOL, "global." });
}
/**
* Meetod tagastab <code>true</code> juhul kui sisendiks olev väärtuse alguseks on "#"
*
* @param input
* {@link String}
* @return {@link Boolean}
*/
public boolean isSimpleText(String input) {
return StringUtils.startsWith(input, CwMessageSource.TEXT_MESSAGE_SYMBOL);
}
}
|
function highestAverageGrade($students) {
$highestAverage = 0;
$topStudent = "";
foreach ($students as $name => $grades) {
$average = array_sum($grades) / count($grades);
if ($average > $highestAverage) {
$highestAverage = $average;
$topStudent = $name;
}
}
return $topStudent;
}
// Example usage
$students = [
"Alice" => [85, 90, 92],
"Bob" => [78, 80, 85],
"Charlie" => [90, 92, 88]
];
echo highestAverageGrade($students); // Output: Charlie |
<reponame>mariosky/protoboard<gh_stars>1-10
import pymongo
from pymongo import MongoClient
from django.conf import settings
_client = MongoClient(settings.MONGO_DB)
class Context:
def __init__(self, user_id, root_id):
self.user_id = user_id
self.root_id = root_id
self._db = _client.protoboard_database
self._context_collection = self._db.context_collection
|
(function() {
angular
.module('app')
.controller('GuiaCriancaController', GuiaCriancaController);
function GuiaCriancaController(AtividadeDataService, $rootScope, $mdDialog, $state) {
var Api = new AtividadeDataService;
var vm = this;
vm.numero_sus = '';
vm.atividades = [];
vm.buscar = buscar;
vm.filtro = "";
vm.getIdade = function(item) {
var anos = item.anos;
var meses = (+item.idade_pessoa) % 12;
var retorno = "";
if (anos) {
if (anos > 1) {
retorno += anos + " anos ";
} else {
retorno += anos + " ano ";
}
if (meses) {
retorno += "e ";
}
}
if (meses) {
if (meses > 1) {
retorno += meses + " meses ";
} else {
retorno += meses + " mês ";
}
}
return retorno;
}
vm.parseIdade = function(idade) {
if (idade == 0) {
return "Ao nascer";
}
if (idade <= 15) {
return idade + " Meses";
}
return Math.floor(idade / 12) + " Anos";
}
function buscar() {
$rootScope.loading = true;
return Api.get({ numero_sus: vm.numero_sus }).success(function(data) {
$rootScope.loading = false;
vm.atividades = data;
if (data.length == 0) {
$mdDialog.show(
$mdDialog.alert()
.clickOutsideToClose(true)
.title('Aviso')
.textContent('Carteira não registrada em nosso sistema.')
.ariaLabel('Aviso')
.ok('Fechar')
);
}
});
}
vm.parseDate = function(date, format) {
if (!date) return;
return moment(date).format(format);
}
}
})();
|
#!/usr/bin/env bash
#TEST: Test basic Input DNS Adapter
#TEST: Start OpenDNSSEC and see if zone gets transferred and signed.
#TEST: Check we can support NOTIMPL from nameserver
#OPENDNSSEC-366: After key rollover, signer is failing task read and blocks signing
if [ -n "$HAVE_MYSQL" ]; then
ods_setup_conf conf.xml conf-mysql.xml
fi &&
ods_reset_env &&
## Start master name server
ods_ldns_testns 15353 ods.datafile &&
## Start OpenDNSSEC
ods_start_ods-control &&
ods-signer verbosity 5 &&
## Wait for signed zone file
syslog_waitfor 300 'ods-signerd: .*\[STATS\] ods' &&
## Check signed zone file [when we decide on auditor tool]
test -f "$INSTALL_ROOT/var/opendnssec/signed/ods" &&
## Fake notify
log_this ldns-notify ldns-notify -p 15354 -s 1001 -r 2 -z ods 127.0.0.1 &&
## Request IXFR/UDP
syslog_waitfor 300 'ods-signerd: .*\[xfrd\] zone ods request udp/ixfr=.* to 127\.0\.0\.1' &&
syslog_waitfor 300 'ods-signerd: .*\[xfrd\] bad packet: zone ods received error code NOTIMPL from 127\.0\.0\.1' &&
## Request AXFR/TCP
syslog_waitfor 60 'ods-signerd: .*\[xfrd\] zone ods request axfr to 127\.0\.0\.1' &&
## Do a ods-signer sign ("key rollover"), and don't fail reading because of missing xfr.
ods-signer sign ods &&
syslog_waitfor 60 'ods-signerd: .*zone ods unsigned data not changed, continue' &&
## Stop
ods_stop_ods-control &&
ods_ldns_testns_kill &&
return 0
## Test failed. Kill stuff
ods_ldns_testns_kill
ods_kill
return 1
|
/**
* @licstart The following is the entire license notice for the
* Javascript code in this page
*
* Copyright 2019 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @licend The above is the entire license notice for the
* Javascript code in this page
*/
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.SVGGraphics = void 0;
var _util = require("../shared/util");
var _display_utils = require("./display_utils");
var _is_node = _interopRequireDefault(require("../shared/is_node"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); }
function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance"); }
function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); }
function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } }
function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
function _iterableToArrayLimit(arr, i) { if (!(Symbol.iterator in Object(arr) || Object.prototype.toString.call(arr) === "[object Arguments]")) { return; } var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
var SVGGraphics = function SVGGraphics() {
throw new Error('Not implemented: SVGGraphics');
};
exports.SVGGraphics = SVGGraphics;
{
var opListToTree = function opListToTree(opList) {
var opTree = [];
var tmp = [];
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = opList[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var opListElement = _step.value;
if (opListElement.fn === 'save') {
opTree.push({
'fnId': 92,
'fn': 'group',
'items': []
});
tmp.push(opTree);
opTree = opTree[opTree.length - 1].items;
continue;
}
if (opListElement.fn === 'restore') {
opTree = tmp.pop();
} else {
opTree.push(opListElement);
}
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator["return"] != null) {
_iterator["return"]();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
return opTree;
};
var pf = function pf(value) {
if (Number.isInteger(value)) {
return value.toString();
}
var s = value.toFixed(10);
var i = s.length - 1;
if (s[i] !== '0') {
return s;
}
do {
i--;
} while (s[i] === '0');
return s.substring(0, s[i] === '.' ? i : i + 1);
};
var pm = function pm(m) {
if (m[4] === 0 && m[5] === 0) {
if (m[1] === 0 && m[2] === 0) {
if (m[0] === 1 && m[3] === 1) {
return '';
}
return "scale(".concat(pf(m[0]), " ").concat(pf(m[3]), ")");
}
if (m[0] === m[3] && m[1] === -m[2]) {
var a = Math.acos(m[0]) * 180 / Math.PI;
return "rotate(".concat(pf(a), ")");
}
} else {
if (m[0] === 1 && m[1] === 0 && m[2] === 0 && m[3] === 1) {
return "translate(".concat(pf(m[4]), " ").concat(pf(m[5]), ")");
}
}
return "matrix(".concat(pf(m[0]), " ").concat(pf(m[1]), " ").concat(pf(m[2]), " ").concat(pf(m[3]), " ").concat(pf(m[4]), " ") + "".concat(pf(m[5]), ")");
};
var SVG_DEFAULTS = {
fontStyle: 'normal',
fontWeight: 'normal',
fillColor: '#000000'
};
var XML_NS = 'http://www.w3.org/XML/1998/namespace';
var XLINK_NS = 'http://www.w3.org/1999/xlink';
var LINE_CAP_STYLES = ['butt', 'round', 'square'];
var LINE_JOIN_STYLES = ['miter', 'round', 'bevel'];
var convertImgDataToPng = function () {
var PNG_HEADER = new Uint8Array([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
var CHUNK_WRAPPER_SIZE = 12;
var crcTable = new Int32Array(256);
for (var i = 0; i < 256; i++) {
var c = i;
for (var h = 0; h < 8; h++) {
if (c & 1) {
c = 0xedB88320 ^ c >> 1 & 0x7fffffff;
} else {
c = c >> 1 & 0x7fffffff;
}
}
crcTable[i] = c;
}
function crc32(data, start, end) {
var crc = -1;
for (var _i = start; _i < end; _i++) {
var a = (crc ^ data[_i]) & 0xff;
var b = crcTable[a];
crc = crc >>> 8 ^ b;
}
return crc ^ -1;
}
function writePngChunk(type, body, data, offset) {
var p = offset;
var len = body.length;
data[p] = len >> 24 & 0xff;
data[p + 1] = len >> 16 & 0xff;
data[p + 2] = len >> 8 & 0xff;
data[p + 3] = len & 0xff;
p += 4;
data[p] = type.charCodeAt(0) & 0xff;
data[p + 1] = type.charCodeAt(1) & 0xff;
data[p + 2] = type.charCodeAt(2) & 0xff;
data[p + 3] = type.charCodeAt(3) & 0xff;
p += 4;
data.set(body, p);
p += body.length;
var crc = crc32(data, offset + 4, p);
data[p] = crc >> 24 & 0xff;
data[p + 1] = crc >> 16 & 0xff;
data[p + 2] = crc >> 8 & 0xff;
data[p + 3] = crc & 0xff;
}
function adler32(data, start, end) {
var a = 1;
var b = 0;
for (var _i2 = start; _i2 < end; ++_i2) {
a = (a + (data[_i2] & 0xff)) % 65521;
b = (b + a) % 65521;
}
return b << 16 | a;
}
function deflateSync(literals) {
if (!(0, _is_node["default"])()) {
return deflateSyncUncompressed(literals);
}
try {
var input;
if (parseInt(process.versions.node) >= 8) {
input = literals;
} else {
input = new Buffer(literals);
}
var output = require('zlib').deflateSync(input, {
level: 9
});
return output instanceof Uint8Array ? output : new Uint8Array(output);
} catch (e) {
(0, _util.warn)('Not compressing PNG because zlib.deflateSync is unavailable: ' + e);
}
return deflateSyncUncompressed(literals);
}
function deflateSyncUncompressed(literals) {
var len = literals.length;
var maxBlockLength = 0xFFFF;
var deflateBlocks = Math.ceil(len / maxBlockLength);
var idat = new Uint8Array(2 + len + deflateBlocks * 5 + 4);
var pi = 0;
idat[pi++] = 0x78;
idat[pi++] = 0x9c;
var pos = 0;
while (len > maxBlockLength) {
idat[pi++] = 0x00;
idat[pi++] = 0xff;
idat[pi++] = 0xff;
idat[pi++] = 0x00;
idat[pi++] = 0x00;
idat.set(literals.subarray(pos, pos + maxBlockLength), pi);
pi += maxBlockLength;
pos += maxBlockLength;
len -= maxBlockLength;
}
idat[pi++] = 0x01;
idat[pi++] = len & 0xff;
idat[pi++] = len >> 8 & 0xff;
idat[pi++] = ~len & 0xffff & 0xff;
idat[pi++] = (~len & 0xffff) >> 8 & 0xff;
idat.set(literals.subarray(pos), pi);
pi += literals.length - pos;
var adler = adler32(literals, 0, literals.length);
idat[pi++] = adler >> 24 & 0xff;
idat[pi++] = adler >> 16 & 0xff;
idat[pi++] = adler >> 8 & 0xff;
idat[pi++] = adler & 0xff;
return idat;
}
function encode(imgData, kind, forceDataSchema, isMask) {
var width = imgData.width;
var height = imgData.height;
var bitDepth, colorType, lineSize;
var bytes = imgData.data;
switch (kind) {
case _util.ImageKind.GRAYSCALE_1BPP:
colorType = 0;
bitDepth = 1;
lineSize = width + 7 >> 3;
break;
case _util.ImageKind.RGB_24BPP:
colorType = 2;
bitDepth = 8;
lineSize = width * 3;
break;
case _util.ImageKind.RGBA_32BPP:
colorType = 6;
bitDepth = 8;
lineSize = width * 4;
break;
default:
throw new Error('invalid format');
}
var literals = new Uint8Array((1 + lineSize) * height);
var offsetLiterals = 0,
offsetBytes = 0;
for (var y = 0; y < height; ++y) {
literals[offsetLiterals++] = 0;
literals.set(bytes.subarray(offsetBytes, offsetBytes + lineSize), offsetLiterals);
offsetBytes += lineSize;
offsetLiterals += lineSize;
}
if (kind === _util.ImageKind.GRAYSCALE_1BPP && isMask) {
offsetLiterals = 0;
for (var _y = 0; _y < height; _y++) {
offsetLiterals++;
for (var _i3 = 0; _i3 < lineSize; _i3++) {
literals[offsetLiterals++] ^= 0xFF;
}
}
}
var ihdr = new Uint8Array([width >> 24 & 0xff, width >> 16 & 0xff, width >> 8 & 0xff, width & 0xff, height >> 24 & 0xff, height >> 16 & 0xff, height >> 8 & 0xff, height & 0xff, bitDepth, colorType, 0x00, 0x00, 0x00]);
var idat = deflateSync(literals);
var pngLength = PNG_HEADER.length + CHUNK_WRAPPER_SIZE * 3 + ihdr.length + idat.length;
var data = new Uint8Array(pngLength);
var offset = 0;
data.set(PNG_HEADER, offset);
offset += PNG_HEADER.length;
writePngChunk('IHDR', ihdr, data, offset);
offset += CHUNK_WRAPPER_SIZE + ihdr.length;
writePngChunk('IDATA', idat, data, offset);
offset += CHUNK_WRAPPER_SIZE + idat.length;
writePngChunk('IEND', new Uint8Array(0), data, offset);
return (0, _util.createObjectURL)(data, 'image/png', forceDataSchema);
}
return function convertImgDataToPng(imgData, forceDataSchema, isMask) {
var kind = imgData.kind === undefined ? _util.ImageKind.GRAYSCALE_1BPP : imgData.kind;
return encode(imgData, kind, forceDataSchema, isMask);
};
}();
var SVGExtraState =
/*#__PURE__*/
function () {
function SVGExtraState() {
_classCallCheck(this, SVGExtraState);
this.fontSizeScale = 1;
this.fontWeight = SVG_DEFAULTS.fontWeight;
this.fontSize = 0;
this.textMatrix = _util.IDENTITY_MATRIX;
this.fontMatrix = _util.FONT_IDENTITY_MATRIX;
this.leading = 0;
this.textRenderingMode = _util.TextRenderingMode.FILL;
this.textMatrixScale = 1;
this.x = 0;
this.y = 0;
this.lineX = 0;
this.lineY = 0;
this.charSpacing = 0;
this.wordSpacing = 0;
this.textHScale = 1;
this.textRise = 0;
this.fillColor = SVG_DEFAULTS.fillColor;
this.strokeColor = '#000000';
this.fillAlpha = 1;
this.strokeAlpha = 1;
this.lineWidth = 1;
this.lineJoin = '';
this.lineCap = '';
this.miterLimit = 0;
this.dashArray = [];
this.dashPhase = 0;
this.dependencies = [];
this.activeClipUrl = null;
this.clipGroup = null;
this.maskId = '';
}
_createClass(SVGExtraState, [{
key: "clone",
value: function clone() {
return Object.create(this);
}
}, {
key: "setCurrentPoint",
value: function setCurrentPoint(x, y) {
this.x = x;
this.y = y;
}
}]);
return SVGExtraState;
}();
var clipCount = 0;
var maskCount = 0;
var shadingCount = 0;
exports.SVGGraphics = SVGGraphics =
/*#__PURE__*/
function () {
function SVGGraphics(commonObjs, objs, forceDataSchema) {
_classCallCheck(this, SVGGraphics);
this.svgFactory = new _display_utils.DOMSVGFactory();
this.current = new SVGExtraState();
this.transformMatrix = _util.IDENTITY_MATRIX;
this.transformStack = [];
this.extraStack = [];
this.commonObjs = commonObjs;
this.objs = objs;
this.pendingClip = null;
this.pendingEOFill = false;
this.embedFonts = false;
this.embeddedFonts = Object.create(null);
this.cssStyle = null;
this.forceDataSchema = !!forceDataSchema;
this._operatorIdMapping = [];
for (var op in _util.OPS) {
this._operatorIdMapping[_util.OPS[op]] = op;
}
}
_createClass(SVGGraphics, [{
key: "save",
value: function save() {
this.transformStack.push(this.transformMatrix);
var old = this.current;
this.extraStack.push(old);
this.current = old.clone();
}
}, {
key: "restore",
value: function restore() {
this.transformMatrix = this.transformStack.pop();
this.current = this.extraStack.pop();
this.pendingClip = null;
this.tgrp = null;
}
}, {
key: "group",
value: function group(items) {
this.save();
this.executeOpTree(items);
this.restore();
}
}, {
key: "loadDependencies",
value: function loadDependencies(operatorList) {
var _this = this;
var fnArray = operatorList.fnArray;
var argsArray = operatorList.argsArray;
for (var i = 0, ii = fnArray.length; i < ii; i++) {
if (fnArray[i] !== _util.OPS.dependency) {
continue;
}
var _iteratorNormalCompletion2 = true;
var _didIteratorError2 = false;
var _iteratorError2 = undefined;
try {
var _loop = function _loop() {
var obj = _step2.value;
var objsPool = obj.startsWith('g_') ? _this.commonObjs : _this.objs;
var promise = new Promise(function (resolve) {
objsPool.get(obj, resolve);
});
_this.current.dependencies.push(promise);
};
for (var _iterator2 = argsArray[i][Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
_loop();
}
} catch (err) {
_didIteratorError2 = true;
_iteratorError2 = err;
} finally {
try {
if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) {
_iterator2["return"]();
}
} finally {
if (_didIteratorError2) {
throw _iteratorError2;
}
}
}
}
return Promise.all(this.current.dependencies);
}
}, {
key: "transform",
value: function transform(a, b, c, d, e, f) {
var transformMatrix = [a, b, c, d, e, f];
this.transformMatrix = _util.Util.transform(this.transformMatrix, transformMatrix);
this.tgrp = null;
}
}, {
key: "getSVG",
value: function getSVG(operatorList, viewport) {
var _this2 = this;
this.viewport = viewport;
var svgElement = this._initialize(viewport);
return this.loadDependencies(operatorList).then(function () {
_this2.transformMatrix = _util.IDENTITY_MATRIX;
_this2.executeOpTree(_this2.convertOpList(operatorList));
return svgElement;
});
}
}, {
key: "convertOpList",
value: function convertOpList(operatorList) {
var operatorIdMapping = this._operatorIdMapping;
var argsArray = operatorList.argsArray;
var fnArray = operatorList.fnArray;
var opList = [];
for (var i = 0, ii = fnArray.length; i < ii; i++) {
var fnId = fnArray[i];
opList.push({
'fnId': fnId,
'fn': operatorIdMapping[fnId],
'args': argsArray[i]
});
}
return opListToTree(opList);
}
}, {
key: "executeOpTree",
value: function executeOpTree(opTree) {
var _iteratorNormalCompletion3 = true;
var _didIteratorError3 = false;
var _iteratorError3 = undefined;
try {
for (var _iterator3 = opTree[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
var opTreeElement = _step3.value;
var fn = opTreeElement.fn;
var fnId = opTreeElement.fnId;
var args = opTreeElement.args;
switch (fnId | 0) {
case _util.OPS.beginText:
this.beginText();
break;
case _util.OPS.dependency:
break;
case _util.OPS.setLeading:
this.setLeading(args);
break;
case _util.OPS.setLeadingMoveText:
this.setLeadingMoveText(args[0], args[1]);
break;
case _util.OPS.setFont:
this.setFont(args);
break;
case _util.OPS.showText:
this.showText(args[0]);
break;
case _util.OPS.showSpacedText:
this.showText(args[0]);
break;
case _util.OPS.endText:
this.endText();
break;
case _util.OPS.moveText:
this.moveText(args[0], args[1]);
break;
case _util.OPS.setCharSpacing:
this.setCharSpacing(args[0]);
break;
case _util.OPS.setWordSpacing:
this.setWordSpacing(args[0]);
break;
case _util.OPS.setHScale:
this.setHScale(args[0]);
break;
case _util.OPS.setTextMatrix:
this.setTextMatrix(args[0], args[1], args[2], args[3], args[4], args[5]);
break;
case _util.OPS.setTextRise:
this.setTextRise(args[0]);
break;
case _util.OPS.setTextRenderingMode:
this.setTextRenderingMode(args[0]);
break;
case _util.OPS.setLineWidth:
this.setLineWidth(args[0]);
break;
case _util.OPS.setLineJoin:
this.setLineJoin(args[0]);
break;
case _util.OPS.setLineCap:
this.setLineCap(args[0]);
break;
case _util.OPS.setMiterLimit:
this.setMiterLimit(args[0]);
break;
case _util.OPS.setFillRGBColor:
this.setFillRGBColor(args[0], args[1], args[2]);
break;
case _util.OPS.setStrokeRGBColor:
this.setStrokeRGBColor(args[0], args[1], args[2]);
break;
case _util.OPS.setStrokeColorN:
this.setStrokeColorN(args);
break;
case _util.OPS.setFillColorN:
this.setFillColorN(args);
break;
case _util.OPS.shadingFill:
this.shadingFill(args[0]);
break;
case _util.OPS.setDash:
this.setDash(args[0], args[1]);
break;
case _util.OPS.setRenderingIntent:
this.setRenderingIntent(args[0]);
break;
case _util.OPS.setFlatness:
this.setFlatness(args[0]);
break;
case _util.OPS.setGState:
this.setGState(args[0]);
break;
case _util.OPS.fill:
this.fill();
break;
case _util.OPS.eoFill:
this.eoFill();
break;
case _util.OPS.stroke:
this.stroke();
break;
case _util.OPS.fillStroke:
this.fillStroke();
break;
case _util.OPS.eoFillStroke:
this.eoFillStroke();
break;
case _util.OPS.clip:
this.clip('nonzero');
break;
case _util.OPS.eoClip:
this.clip('evenodd');
break;
case _util.OPS.paintSolidColorImageMask:
this.paintSolidColorImageMask();
break;
case _util.OPS.paintJpegXObject:
this.paintJpegXObject(args[0], args[1], args[2]);
break;
case _util.OPS.paintImageXObject:
this.paintImageXObject(args[0]);
break;
case _util.OPS.paintInlineImageXObject:
this.paintInlineImageXObject(args[0]);
break;
case _util.OPS.paintImageMaskXObject:
this.paintImageMaskXObject(args[0]);
break;
case _util.OPS.paintFormXObjectBegin:
this.paintFormXObjectBegin(args[0], args[1]);
break;
case _util.OPS.paintFormXObjectEnd:
this.paintFormXObjectEnd();
break;
case _util.OPS.closePath:
this.closePath();
break;
case _util.OPS.closeStroke:
this.closeStroke();
break;
case _util.OPS.closeFillStroke:
this.closeFillStroke();
break;
case _util.OPS.closeEOFillStroke:
this.closeEOFillStroke();
break;
case _util.OPS.nextLine:
this.nextLine();
break;
case _util.OPS.transform:
this.transform(args[0], args[1], args[2], args[3], args[4], args[5]);
break;
case _util.OPS.constructPath:
this.constructPath(args[0], args[1]);
break;
case _util.OPS.endPath:
this.endPath();
break;
case 92:
this.group(opTreeElement.items);
break;
default:
(0, _util.warn)("Unimplemented operator ".concat(fn));
break;
}
}
} catch (err) {
_didIteratorError3 = true;
_iteratorError3 = err;
} finally {
try {
if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) {
_iterator3["return"]();
}
} finally {
if (_didIteratorError3) {
throw _iteratorError3;
}
}
}
}
}, {
key: "setWordSpacing",
value: function setWordSpacing(wordSpacing) {
this.current.wordSpacing = wordSpacing;
}
}, {
key: "setCharSpacing",
value: function setCharSpacing(charSpacing) {
this.current.charSpacing = charSpacing;
}
}, {
key: "nextLine",
value: function nextLine() {
this.moveText(0, this.current.leading);
}
}, {
key: "setTextMatrix",
value: function setTextMatrix(a, b, c, d, e, f) {
var current = this.current;
current.textMatrix = current.lineMatrix = [a, b, c, d, e, f];
current.textMatrixScale = Math.sqrt(a * a + b * b);
current.x = current.lineX = 0;
current.y = current.lineY = 0;
current.xcoords = [];
current.tspan = this.svgFactory.createElement('svg:tspan');
current.tspan.setAttributeNS(null, 'font-family', current.fontFamily);
current.tspan.setAttributeNS(null, 'font-size', "".concat(pf(current.fontSize), "px"));
current.tspan.setAttributeNS(null, 'y', pf(-current.y));
current.txtElement = this.svgFactory.createElement('svg:text');
current.txtElement.appendChild(current.tspan);
}
}, {
key: "beginText",
value: function beginText() {
var current = this.current;
current.x = current.lineX = 0;
current.y = current.lineY = 0;
current.textMatrix = _util.IDENTITY_MATRIX;
current.lineMatrix = _util.IDENTITY_MATRIX;
current.textMatrixScale = 1;
current.tspan = this.svgFactory.createElement('svg:tspan');
current.txtElement = this.svgFactory.createElement('svg:text');
current.txtgrp = this.svgFactory.createElement('svg:g');
current.xcoords = [];
}
}, {
key: "moveText",
value: function moveText(x, y) {
var current = this.current;
current.x = current.lineX += x;
current.y = current.lineY += y;
current.xcoords = [];
current.tspan = this.svgFactory.createElement('svg:tspan');
current.tspan.setAttributeNS(null, 'font-family', current.fontFamily);
current.tspan.setAttributeNS(null, 'font-size', "".concat(pf(current.fontSize), "px"));
current.tspan.setAttributeNS(null, 'y', pf(-current.y));
}
}, {
key: "showText",
value: function showText(glyphs) {
var current = this.current;
var font = current.font;
var fontSize = current.fontSize;
if (fontSize === 0) {
return;
}
var charSpacing = current.charSpacing;
var wordSpacing = current.wordSpacing;
var fontDirection = current.fontDirection;
var textHScale = current.textHScale * fontDirection;
var vertical = font.vertical;
var widthAdvanceScale = fontSize * current.fontMatrix[0];
var x = 0;
var _iteratorNormalCompletion4 = true;
var _didIteratorError4 = false;
var _iteratorError4 = undefined;
try {
for (var _iterator4 = glyphs[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) {
var glyph = _step4.value;
if (glyph === null) {
x += fontDirection * wordSpacing;
continue;
} else if ((0, _util.isNum)(glyph)) {
x += -glyph * fontSize * 0.001;
continue;
}
var width = glyph.width;
var character = glyph.fontChar;
var spacing = (glyph.isSpace ? wordSpacing : 0) + charSpacing;
var charWidth = width * widthAdvanceScale + spacing * fontDirection;
if (!glyph.isInFont && !font.missingFile) {
x += charWidth;
continue;
}
current.xcoords.push(current.x + x * textHScale);
current.tspan.textContent += character;
x += charWidth;
}
} catch (err) {
_didIteratorError4 = true;
_iteratorError4 = err;
} finally {
try {
if (!_iteratorNormalCompletion4 && _iterator4["return"] != null) {
_iterator4["return"]();
}
} finally {
if (_didIteratorError4) {
throw _iteratorError4;
}
}
}
if (vertical) {
current.y -= x * textHScale;
} else {
current.x += x * textHScale;
}
current.tspan.setAttributeNS(null, 'x', current.xcoords.map(pf).join(' '));
current.tspan.setAttributeNS(null, 'y', pf(-current.y));
current.tspan.setAttributeNS(null, 'font-family', current.fontFamily);
current.tspan.setAttributeNS(null, 'font-size', "".concat(pf(current.fontSize), "px"));
if (current.fontStyle !== SVG_DEFAULTS.fontStyle) {
current.tspan.setAttributeNS(null, 'font-style', current.fontStyle);
}
if (current.fontWeight !== SVG_DEFAULTS.fontWeight) {
current.tspan.setAttributeNS(null, 'font-weight', current.fontWeight);
}
var fillStrokeMode = current.textRenderingMode & _util.TextRenderingMode.FILL_STROKE_MASK;
if (fillStrokeMode === _util.TextRenderingMode.FILL || fillStrokeMode === _util.TextRenderingMode.FILL_STROKE) {
if (current.fillColor !== SVG_DEFAULTS.fillColor) {
current.tspan.setAttributeNS(null, 'fill', current.fillColor);
}
if (current.fillAlpha < 1) {
current.tspan.setAttributeNS(null, 'fill-opacity', current.fillAlpha);
}
} else if (current.textRenderingMode === _util.TextRenderingMode.ADD_TO_PATH) {
current.tspan.setAttributeNS(null, 'fill', 'transparent');
} else {
current.tspan.setAttributeNS(null, 'fill', 'none');
}
if (fillStrokeMode === _util.TextRenderingMode.STROKE || fillStrokeMode === _util.TextRenderingMode.FILL_STROKE) {
var lineWidthScale = 1 / (current.textMatrixScale || 1);
this._setStrokeAttributes(current.tspan, lineWidthScale);
}
var textMatrix = current.textMatrix;
if (current.textRise !== 0) {
textMatrix = textMatrix.slice();
textMatrix[5] += current.textRise;
}
current.txtElement.setAttributeNS(null, 'transform', "".concat(pm(textMatrix), " scale(1, -1)"));
current.txtElement.setAttributeNS(XML_NS, 'xml:space', 'preserve');
current.txtElement.appendChild(current.tspan);
current.txtgrp.appendChild(current.txtElement);
this._ensureTransformGroup().appendChild(current.txtElement);
}
}, {
key: "setLeadingMoveText",
value: function setLeadingMoveText(x, y) {
this.setLeading(-y);
this.moveText(x, y);
}
}, {
key: "addFontStyle",
value: function addFontStyle(fontObj) {
if (!this.cssStyle) {
this.cssStyle = this.svgFactory.createElement('svg:style');
this.cssStyle.setAttributeNS(null, 'type', 'text/css');
this.defs.appendChild(this.cssStyle);
}
var url = (0, _util.createObjectURL)(fontObj.data, fontObj.mimetype, this.forceDataSchema);
this.cssStyle.textContent += "@font-face { font-family: \"".concat(fontObj.loadedName, "\";") + " src: url(".concat(url, "); }\n");
}
}, {
key: "setFont",
value: function setFont(details) {
var current = this.current;
var fontObj = this.commonObjs.get(details[0]);
var size = details[1];
current.font = fontObj;
if (this.embedFonts && fontObj.data && !this.embeddedFonts[fontObj.loadedName]) {
this.addFontStyle(fontObj);
this.embeddedFonts[fontObj.loadedName] = fontObj;
}
current.fontMatrix = fontObj.fontMatrix ? fontObj.fontMatrix : _util.FONT_IDENTITY_MATRIX;
var bold = fontObj.black ? fontObj.bold ? 'bolder' : 'bold' : fontObj.bold ? 'bold' : 'normal';
var italic = fontObj.italic ? 'italic' : 'normal';
if (size < 0) {
size = -size;
current.fontDirection = -1;
} else {
current.fontDirection = 1;
}
current.fontSize = size;
current.fontFamily = fontObj.loadedName;
current.fontWeight = bold;
current.fontStyle = italic;
current.tspan = this.svgFactory.createElement('svg:tspan');
current.tspan.setAttributeNS(null, 'y', pf(-current.y));
current.xcoords = [];
}
}, {
key: "endText",
value: function endText() {
var current = this.current;
if (current.textRenderingMode & _util.TextRenderingMode.ADD_TO_PATH_FLAG && current.txtElement && current.txtElement.hasChildNodes()) {
current.element = current.txtElement;
this.clip('nonzero');
this.endPath();
}
}
}, {
key: "setLineWidth",
value: function setLineWidth(width) {
if (width > 0) {
this.current.lineWidth = width;
}
}
}, {
key: "setLineCap",
value: function setLineCap(style) {
this.current.lineCap = LINE_CAP_STYLES[style];
}
}, {
key: "setLineJoin",
value: function setLineJoin(style) {
this.current.lineJoin = LINE_JOIN_STYLES[style];
}
}, {
key: "setMiterLimit",
value: function setMiterLimit(limit) {
this.current.miterLimit = limit;
}
}, {
key: "setStrokeAlpha",
value: function setStrokeAlpha(strokeAlpha) {
this.current.strokeAlpha = strokeAlpha;
}
}, {
key: "setStrokeRGBColor",
value: function setStrokeRGBColor(r, g, b) {
this.current.strokeColor = _util.Util.makeCssRgb(r, g, b);
}
}, {
key: "setFillAlpha",
value: function setFillAlpha(fillAlpha) {
this.current.fillAlpha = fillAlpha;
}
}, {
key: "setFillRGBColor",
value: function setFillRGBColor(r, g, b) {
this.current.fillColor = _util.Util.makeCssRgb(r, g, b);
this.current.tspan = this.svgFactory.createElement('svg:tspan');
this.current.xcoords = [];
}
}, {
key: "setStrokeColorN",
value: function setStrokeColorN(args) {
this.current.strokeColor = this._makeColorN_Pattern(args);
}
}, {
key: "setFillColorN",
value: function setFillColorN(args) {
this.current.fillColor = this._makeColorN_Pattern(args);
}
}, {
key: "shadingFill",
value: function shadingFill(args) {
var width = this.viewport.width;
var height = this.viewport.height;
var inv = _util.Util.inverseTransform(this.transformMatrix);
var bl = _util.Util.applyTransform([0, 0], inv);
var br = _util.Util.applyTransform([0, height], inv);
var ul = _util.Util.applyTransform([width, 0], inv);
var ur = _util.Util.applyTransform([width, height], inv);
var x0 = Math.min(bl[0], br[0], ul[0], ur[0]);
var y0 = Math.min(bl[1], br[1], ul[1], ur[1]);
var x1 = Math.max(bl[0], br[0], ul[0], ur[0]);
var y1 = Math.max(bl[1], br[1], ul[1], ur[1]);
var rect = this.svgFactory.createElement('svg:rect');
rect.setAttributeNS(null, 'x', x0);
rect.setAttributeNS(null, 'y', y0);
rect.setAttributeNS(null, 'width', x1 - x0);
rect.setAttributeNS(null, 'height', y1 - y0);
rect.setAttributeNS(null, 'fill', this._makeShadingPattern(args));
this._ensureTransformGroup().appendChild(rect);
}
}, {
key: "_makeColorN_Pattern",
value: function _makeColorN_Pattern(args) {
if (args[0] === 'TilingPattern') {
return this._makeTilingPattern(args);
}
return this._makeShadingPattern(args);
}
}, {
key: "_makeTilingPattern",
value: function _makeTilingPattern(args) {
var color = args[1];
var operatorList = args[2];
var matrix = args[3] || _util.IDENTITY_MATRIX;
var _args$ = _slicedToArray(args[4], 4),
x0 = _args$[0],
y0 = _args$[1],
x1 = _args$[2],
y1 = _args$[3];
var xstep = args[5];
var ystep = args[6];
var paintType = args[7];
var tilingId = "shading".concat(shadingCount++);
var _Util$applyTransform = _util.Util.applyTransform([x0, y0], matrix),
_Util$applyTransform2 = _slicedToArray(_Util$applyTransform, 2),
tx0 = _Util$applyTransform2[0],
ty0 = _Util$applyTransform2[1];
var _Util$applyTransform3 = _util.Util.applyTransform([x1, y1], matrix),
_Util$applyTransform4 = _slicedToArray(_Util$applyTransform3, 2),
tx1 = _Util$applyTransform4[0],
ty1 = _Util$applyTransform4[1];
var _Util$singularValueDe = _util.Util.singularValueDecompose2dScale(matrix),
_Util$singularValueDe2 = _slicedToArray(_Util$singularValueDe, 2),
xscale = _Util$singularValueDe2[0],
yscale = _Util$singularValueDe2[1];
var txstep = xstep * xscale;
var tystep = ystep * yscale;
var tiling = this.svgFactory.createElement('svg:pattern');
tiling.setAttributeNS(null, 'id', tilingId);
tiling.setAttributeNS(null, 'patternUnits', 'userSpaceOnUse');
tiling.setAttributeNS(null, 'width', txstep);
tiling.setAttributeNS(null, 'height', tystep);
tiling.setAttributeNS(null, 'x', "".concat(tx0));
tiling.setAttributeNS(null, 'y', "".concat(ty0));
var svg = this.svg;
var transformMatrix = this.transformMatrix;
var fillColor = this.current.fillColor;
var strokeColor = this.current.strokeColor;
var bbox = this.svgFactory.create(tx1 - tx0, ty1 - ty0);
this.svg = bbox;
this.transformMatrix = matrix;
if (paintType === 2) {
var cssColor = _util.Util.makeCssRgb.apply(_util.Util, _toConsumableArray(color));
this.current.fillColor = cssColor;
this.current.strokeColor = cssColor;
}
this.executeOpTree(this.convertOpList(operatorList));
this.svg = svg;
this.transformMatrix = transformMatrix;
this.current.fillColor = fillColor;
this.current.strokeColor = strokeColor;
tiling.appendChild(bbox.childNodes[0]);
this.defs.appendChild(tiling);
return "url(#".concat(tilingId, ")");
}
}, {
key: "_makeShadingPattern",
value: function _makeShadingPattern(args) {
switch (args[0]) {
case 'RadialAxial':
var shadingId = "shading".concat(shadingCount++);
var colorStops = args[2];
var gradient;
switch (args[1]) {
case 'axial':
var point0 = args[3];
var point1 = args[4];
gradient = this.svgFactory.createElement('svg:linearGradient');
gradient.setAttributeNS(null, 'id', shadingId);
gradient.setAttributeNS(null, 'gradientUnits', 'userSpaceOnUse');
gradient.setAttributeNS(null, 'x1', point0[0]);
gradient.setAttributeNS(null, 'y1', point0[1]);
gradient.setAttributeNS(null, 'x2', point1[0]);
gradient.setAttributeNS(null, 'y2', point1[1]);
break;
case 'radial':
var focalPoint = args[3];
var circlePoint = args[4];
var focalRadius = args[5];
var circleRadius = args[6];
gradient = this.svgFactory.createElement('svg:radialGradient');
gradient.setAttributeNS(null, 'id', shadingId);
gradient.setAttributeNS(null, 'gradientUnits', 'userSpaceOnUse');
gradient.setAttributeNS(null, 'cx', circlePoint[0]);
gradient.setAttributeNS(null, 'cy', circlePoint[1]);
gradient.setAttributeNS(null, 'r', circleRadius);
gradient.setAttributeNS(null, 'fx', focalPoint[0]);
gradient.setAttributeNS(null, 'fy', focalPoint[1]);
gradient.setAttributeNS(null, 'fr', focalRadius);
break;
default:
throw new Error("Unknown RadialAxial type: ".concat(args[1]));
}
var _iteratorNormalCompletion5 = true;
var _didIteratorError5 = false;
var _iteratorError5 = undefined;
try {
for (var _iterator5 = colorStops[Symbol.iterator](), _step5; !(_iteratorNormalCompletion5 = (_step5 = _iterator5.next()).done); _iteratorNormalCompletion5 = true) {
var colorStop = _step5.value;
var stop = this.svgFactory.createElement('svg:stop');
stop.setAttributeNS(null, 'offset', colorStop[0]);
stop.setAttributeNS(null, 'stop-color', colorStop[1]);
gradient.appendChild(stop);
}
} catch (err) {
_didIteratorError5 = true;
_iteratorError5 = err;
} finally {
try {
if (!_iteratorNormalCompletion5 && _iterator5["return"] != null) {
_iterator5["return"]();
}
} finally {
if (_didIteratorError5) {
throw _iteratorError5;
}
}
}
this.defs.appendChild(gradient);
return "url(#".concat(shadingId, ")");
case 'Mesh':
(0, _util.warn)('Unimplemented pattern Mesh');
return null;
case 'Dummy':
return 'hotpink';
default:
throw new Error("Unknown IR type: ".concat(args[0]));
}
}
}, {
key: "setDash",
value: function setDash(dashArray, dashPhase) {
this.current.dashArray = dashArray;
this.current.dashPhase = dashPhase;
}
}, {
key: "constructPath",
value: function constructPath(ops, args) {
var current = this.current;
var x = current.x,
y = current.y;
var d = [];
var j = 0;
var _iteratorNormalCompletion6 = true;
var _didIteratorError6 = false;
var _iteratorError6 = undefined;
try {
for (var _iterator6 = ops[Symbol.iterator](), _step6; !(_iteratorNormalCompletion6 = (_step6 = _iterator6.next()).done); _iteratorNormalCompletion6 = true) {
var op = _step6.value;
switch (op | 0) {
case _util.OPS.rectangle:
x = args[j++];
y = args[j++];
var width = args[j++];
var height = args[j++];
var xw = x + width;
var yh = y + height;
d.push('M', pf(x), pf(y), 'L', pf(xw), pf(y), 'L', pf(xw), pf(yh), 'L', pf(x), pf(yh), 'Z');
break;
case _util.OPS.moveTo:
x = args[j++];
y = args[j++];
d.push('M', pf(x), pf(y));
break;
case _util.OPS.lineTo:
x = args[j++];
y = args[j++];
d.push('L', pf(x), pf(y));
break;
case _util.OPS.curveTo:
x = args[j + 4];
y = args[j + 5];
d.push('C', pf(args[j]), pf(args[j + 1]), pf(args[j + 2]), pf(args[j + 3]), pf(x), pf(y));
j += 6;
break;
case _util.OPS.curveTo2:
x = args[j + 2];
y = args[j + 3];
d.push('C', pf(x), pf(y), pf(args[j]), pf(args[j + 1]), pf(args[j + 2]), pf(args[j + 3]));
j += 4;
break;
case _util.OPS.curveTo3:
x = args[j + 2];
y = args[j + 3];
d.push('C', pf(args[j]), pf(args[j + 1]), pf(x), pf(y), pf(x), pf(y));
j += 4;
break;
case _util.OPS.closePath:
d.push('Z');
break;
}
}
} catch (err) {
_didIteratorError6 = true;
_iteratorError6 = err;
} finally {
try {
if (!_iteratorNormalCompletion6 && _iterator6["return"] != null) {
_iterator6["return"]();
}
} finally {
if (_didIteratorError6) {
throw _iteratorError6;
}
}
}
d = d.join(' ');
if (current.path && ops.length > 0 && ops[0] !== _util.OPS.rectangle && ops[0] !== _util.OPS.moveTo) {
d = current.path.getAttributeNS(null, 'd') + d;
} else {
current.path = this.svgFactory.createElement('svg:path');
this._ensureTransformGroup().appendChild(current.path);
}
current.path.setAttributeNS(null, 'd', d);
current.path.setAttributeNS(null, 'fill', 'none');
current.element = current.path;
current.setCurrentPoint(x, y);
}
}, {
key: "endPath",
value: function endPath() {
var current = this.current;
current.path = null;
if (!this.pendingClip) {
return;
}
if (!current.element) {
this.pendingClip = null;
return;
}
var clipId = "clippath".concat(clipCount++);
var clipPath = this.svgFactory.createElement('svg:clipPath');
clipPath.setAttributeNS(null, 'id', clipId);
clipPath.setAttributeNS(null, 'transform', pm(this.transformMatrix));
var clipElement = current.element.cloneNode(true);
if (this.pendingClip === 'evenodd') {
clipElement.setAttributeNS(null, 'clip-rule', 'evenodd');
} else {
clipElement.setAttributeNS(null, 'clip-rule', 'nonzero');
}
this.pendingClip = null;
clipPath.appendChild(clipElement);
this.defs.appendChild(clipPath);
if (current.activeClipUrl) {
current.clipGroup = null;
this.extraStack.forEach(function (prev) {
prev.clipGroup = null;
});
clipPath.setAttributeNS(null, 'clip-path', current.activeClipUrl);
}
current.activeClipUrl = "url(#".concat(clipId, ")");
this.tgrp = null;
}
}, {
key: "clip",
value: function clip(type) {
this.pendingClip = type;
}
}, {
key: "closePath",
value: function closePath() {
var current = this.current;
if (current.path) {
var d = "".concat(current.path.getAttributeNS(null, 'd'), "Z");
current.path.setAttributeNS(null, 'd', d);
}
}
}, {
key: "setLeading",
value: function setLeading(leading) {
this.current.leading = -leading;
}
}, {
key: "setTextRise",
value: function setTextRise(textRise) {
this.current.textRise = textRise;
}
}, {
key: "setTextRenderingMode",
value: function setTextRenderingMode(textRenderingMode) {
this.current.textRenderingMode = textRenderingMode;
}
}, {
key: "setHScale",
value: function setHScale(scale) {
this.current.textHScale = scale / 100;
}
}, {
key: "setRenderingIntent",
value: function setRenderingIntent(intent) {}
}, {
key: "setFlatness",
value: function setFlatness(flatness) {}
}, {
key: "setGState",
value: function setGState(states) {
var _iteratorNormalCompletion7 = true;
var _didIteratorError7 = false;
var _iteratorError7 = undefined;
try {
for (var _iterator7 = states[Symbol.iterator](), _step7; !(_iteratorNormalCompletion7 = (_step7 = _iterator7.next()).done); _iteratorNormalCompletion7 = true) {
var _step7$value = _slicedToArray(_step7.value, 2),
key = _step7$value[0],
value = _step7$value[1];
switch (key) {
case 'LW':
this.setLineWidth(value);
break;
case 'LC':
this.setLineCap(value);
break;
case 'LJ':
this.setLineJoin(value);
break;
case 'ML':
this.setMiterLimit(value);
break;
case 'D':
this.setDash(value[0], value[1]);
break;
case 'RI':
this.setRenderingIntent(value);
break;
case 'FL':
this.setFlatness(value);
break;
case 'Font':
this.setFont(value);
break;
case 'CA':
this.setStrokeAlpha(value);
break;
case 'ca':
this.setFillAlpha(value);
break;
default:
(0, _util.warn)("Unimplemented graphic state operator ".concat(key));
break;
}
}
} catch (err) {
_didIteratorError7 = true;
_iteratorError7 = err;
} finally {
try {
if (!_iteratorNormalCompletion7 && _iterator7["return"] != null) {
_iterator7["return"]();
}
} finally {
if (_didIteratorError7) {
throw _iteratorError7;
}
}
}
}
}, {
key: "fill",
value: function fill() {
var current = this.current;
if (current.element) {
current.element.setAttributeNS(null, 'fill', current.fillColor);
current.element.setAttributeNS(null, 'fill-opacity', current.fillAlpha);
this.endPath();
}
}
}, {
key: "stroke",
value: function stroke() {
var current = this.current;
if (current.element) {
this._setStrokeAttributes(current.element);
current.element.setAttributeNS(null, 'fill', 'none');
this.endPath();
}
}
}, {
key: "_setStrokeAttributes",
value: function _setStrokeAttributes(element) {
var lineWidthScale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1;
var current = this.current;
var dashArray = current.dashArray;
if (lineWidthScale !== 1 && dashArray.length > 0) {
dashArray = dashArray.map(function (value) {
return lineWidthScale * value;
});
}
element.setAttributeNS(null, 'stroke', current.strokeColor);
element.setAttributeNS(null, 'stroke-opacity', current.strokeAlpha);
element.setAttributeNS(null, 'stroke-miterlimit', pf(current.miterLimit));
element.setAttributeNS(null, 'stroke-linecap', current.lineCap);
element.setAttributeNS(null, 'stroke-linejoin', current.lineJoin);
element.setAttributeNS(null, 'stroke-width', pf(lineWidthScale * current.lineWidth) + 'px');
element.setAttributeNS(null, 'stroke-dasharray', dashArray.map(pf).join(' '));
element.setAttributeNS(null, 'stroke-dashoffset', pf(lineWidthScale * current.dashPhase) + 'px');
}
}, {
key: "eoFill",
value: function eoFill() {
if (this.current.element) {
this.current.element.setAttributeNS(null, 'fill-rule', 'evenodd');
}
this.fill();
}
}, {
key: "fillStroke",
value: function fillStroke() {
this.stroke();
this.fill();
}
}, {
key: "eoFillStroke",
value: function eoFillStroke() {
if (this.current.element) {
this.current.element.setAttributeNS(null, 'fill-rule', 'evenodd');
}
this.fillStroke();
}
}, {
key: "closeStroke",
value: function closeStroke() {
this.closePath();
this.stroke();
}
}, {
key: "closeFillStroke",
value: function closeFillStroke() {
this.closePath();
this.fillStroke();
}
}, {
key: "closeEOFillStroke",
value: function closeEOFillStroke() {
this.closePath();
this.eoFillStroke();
}
}, {
key: "paintSolidColorImageMask",
value: function paintSolidColorImageMask() {
var rect = this.svgFactory.createElement('svg:rect');
rect.setAttributeNS(null, 'x', '0');
rect.setAttributeNS(null, 'y', '0');
rect.setAttributeNS(null, 'width', '1px');
rect.setAttributeNS(null, 'height', '1px');
rect.setAttributeNS(null, 'fill', this.current.fillColor);
this._ensureTransformGroup().appendChild(rect);
}
}, {
key: "paintJpegXObject",
value: function paintJpegXObject(objId, w, h) {
var imgObj = this.objs.get(objId);
var imgEl = this.svgFactory.createElement('svg:image');
imgEl.setAttributeNS(XLINK_NS, 'xlink:href', imgObj.src);
imgEl.setAttributeNS(null, 'width', pf(w));
imgEl.setAttributeNS(null, 'height', pf(h));
imgEl.setAttributeNS(null, 'x', '0');
imgEl.setAttributeNS(null, 'y', pf(-h));
imgEl.setAttributeNS(null, 'transform', "scale(".concat(pf(1 / w), " ").concat(pf(-1 / h), ")"));
this._ensureTransformGroup().appendChild(imgEl);
}
}, {
key: "paintImageXObject",
value: function paintImageXObject(objId) {
var imgData = this.objs.get(objId);
if (!imgData) {
(0, _util.warn)("Dependent image with object ID ".concat(objId, " is not ready yet"));
return;
}
this.paintInlineImageXObject(imgData);
}
}, {
key: "paintInlineImageXObject",
value: function paintInlineImageXObject(imgData, mask) {
var width = imgData.width;
var height = imgData.height;
var imgSrc = convertImgDataToPng(imgData, this.forceDataSchema, !!mask);
var cliprect = this.svgFactory.createElement('svg:rect');
cliprect.setAttributeNS(null, 'x', '0');
cliprect.setAttributeNS(null, 'y', '0');
cliprect.setAttributeNS(null, 'width', pf(width));
cliprect.setAttributeNS(null, 'height', pf(height));
this.current.element = cliprect;
this.clip('nonzero');
var imgEl = this.svgFactory.createElement('svg:image');
imgEl.setAttributeNS(XLINK_NS, 'xlink:href', imgSrc);
imgEl.setAttributeNS(null, 'x', '0');
imgEl.setAttributeNS(null, 'y', pf(-height));
imgEl.setAttributeNS(null, 'width', pf(width) + 'px');
imgEl.setAttributeNS(null, 'height', pf(height) + 'px');
imgEl.setAttributeNS(null, 'transform', "scale(".concat(pf(1 / width), " ").concat(pf(-1 / height), ")"));
if (mask) {
mask.appendChild(imgEl);
} else {
this._ensureTransformGroup().appendChild(imgEl);
}
}
}, {
key: "paintImageMaskXObject",
value: function paintImageMaskXObject(imgData) {
var current = this.current;
var width = imgData.width;
var height = imgData.height;
var fillColor = current.fillColor;
current.maskId = "mask".concat(maskCount++);
var mask = this.svgFactory.createElement('svg:mask');
mask.setAttributeNS(null, 'id', current.maskId);
var rect = this.svgFactory.createElement('svg:rect');
rect.setAttributeNS(null, 'x', '0');
rect.setAttributeNS(null, 'y', '0');
rect.setAttributeNS(null, 'width', pf(width));
rect.setAttributeNS(null, 'height', pf(height));
rect.setAttributeNS(null, 'fill', fillColor);
rect.setAttributeNS(null, 'mask', "url(#".concat(current.maskId, ")"));
this.defs.appendChild(mask);
this._ensureTransformGroup().appendChild(rect);
this.paintInlineImageXObject(imgData, mask);
}
}, {
key: "paintFormXObjectBegin",
value: function paintFormXObjectBegin(matrix, bbox) {
if (Array.isArray(matrix) && matrix.length === 6) {
this.transform(matrix[0], matrix[1], matrix[2], matrix[3], matrix[4], matrix[5]);
}
if (bbox) {
var width = bbox[2] - bbox[0];
var height = bbox[3] - bbox[1];
var cliprect = this.svgFactory.createElement('svg:rect');
cliprect.setAttributeNS(null, 'x', bbox[0]);
cliprect.setAttributeNS(null, 'y', bbox[1]);
cliprect.setAttributeNS(null, 'width', pf(width));
cliprect.setAttributeNS(null, 'height', pf(height));
this.current.element = cliprect;
this.clip('nonzero');
this.endPath();
}
}
}, {
key: "paintFormXObjectEnd",
value: function paintFormXObjectEnd() {}
}, {
key: "_initialize",
value: function _initialize(viewport) {
var svg = this.svgFactory.create(viewport.width, viewport.height);
var definitions = this.svgFactory.createElement('svg:defs');
svg.appendChild(definitions);
this.defs = definitions;
var rootGroup = this.svgFactory.createElement('svg:g');
rootGroup.setAttributeNS(null, 'transform', pm(viewport.transform));
svg.appendChild(rootGroup);
this.svg = rootGroup;
return svg;
}
}, {
key: "_ensureClipGroup",
value: function _ensureClipGroup() {
if (!this.current.clipGroup) {
var clipGroup = this.svgFactory.createElement('svg:g');
clipGroup.setAttributeNS(null, 'clip-path', this.current.activeClipUrl);
this.svg.appendChild(clipGroup);
this.current.clipGroup = clipGroup;
}
return this.current.clipGroup;
}
}, {
key: "_ensureTransformGroup",
value: function _ensureTransformGroup() {
if (!this.tgrp) {
this.tgrp = this.svgFactory.createElement('svg:g');
this.tgrp.setAttributeNS(null, 'transform', pm(this.transformMatrix));
if (this.current.activeClipUrl) {
this._ensureClipGroup().appendChild(this.tgrp);
} else {
this.svg.appendChild(this.tgrp);
}
}
return this.tgrp;
}
}]);
return SVGGraphics;
}();
} |
<gh_stars>10-100
/**
* {@link io.opensphere.osh.results.ResultHandler}s that know how to handle
* the results of aerial camera sensors.
*/
package io.opensphere.osh.aerialimagery.results;
|
<reponame>Chencheng0302/Blokus_C-<gh_stars>0
#pragma once
#include"block.h"
#include"plan.h"
#include"square.h"
#include <vector>
#include <string>
using namespace std;
struct dataset {
int origin_x;
int origin_y;
int kx;
int ky;
};
class player
{
public:
player(int id, string name);
void displayRemainingBlocks();
void setBlock(int pos);
block* getBlock(int id);
block* getSpecificBlock(block* target);
int getID();
void putBlock(int pos);
void putSpecificBlock(block* target);
vector<block*> getRemainingBlocks();
void setAvailableSquare(square* target);
void cleanSquaresRecord();
void perfectlyPlaced();
vector<square*> getAvailableSquares();
vector<square*> getAvailableSquares(int topleftX, int topleftY, int width, int height);
vector<plan*> getPlans();
string getName();
void passed();
bool hasBeenPassed();
bool isPefectlyPlaced();
void addPlan(plan* givenPlan);
void clearPlans();
dataset getParameters();
private:
int indexTranslate(int original_index);
vector<block*> ownBlocks;
vector<square*>availableSquares;
vector<plan*>placementPlans;
string playerName;
bool isPassed;
bool isPerfectlyPlaced;
bool tileStatus[21];
int playerID;
dataset parameters;
};
|
#include <quadraticspline.h>
namespace Geometry
{
Point2DList QuadraticSpline::convertClosedToQuadraticBezierPoints() const
{
const Point2DList &allControlPoints = controlPoints();
const int nbcontrol = allControlPoints.size();
const int size = nbcontrol * 2 + 1;
Point2DList bezierPoints(size);
// Copy 1 ... -2 -1 to 1 3 .. -4 -2
for (int i = 1, dst = 1; i <= nbcontrol; ++i, dst += 2) {
const int src = i % nbcontrol;
bezierPoints[dst] = allControlPoints[src];
}
for (int i = 0; i <= nbcontrol; ++i) {
const int src = i % nbcontrol;
const int dst = i * 2;
const int srcnext = (src + 1) % nbcontrol;
bezierPoints[dst] = (allControlPoints[src] + allControlPoints[srcnext]) / 2.0f;
}
return bezierPoints;
}
Point2DList QuadraticSpline::convertOpenedToQuadraticBezierPoints() const
{
const Point2DList &allControlPoints = controlPoints();
const int nbcontrol = allControlPoints.size();
const int size = (nbcontrol - 1) * 2 - 1;
Point2DList bezierPoints(size);
for (int src = 1, dst = 1; src < (nbcontrol - 1); ++src, dst += 2) {
bezierPoints[dst] = allControlPoints[src];
}
for (int src = 1; src < (nbcontrol - 2); ++src) {
const int dst = src * 2;
const int srcnext = src + 1;
bezierPoints[dst] = (allControlPoints[src] + allControlPoints[srcnext]) / 2.0f;
}
bezierPoints.front() = allControlPoints.front();
bezierPoints.back() = allControlPoints.back();
return bezierPoints;
}
static Bezier quadraticBezierPointToCubicBezier(const QVector2D &q0, const QVector2D &q1, const QVector2D &q2)
{
const QVector2D c1 = q0 + 2.0f * (q1 - q0) / 3.0f;
const QVector2D c2 = q2 + 2.0f * (q1 - q2) / 3.0f;
return Bezier(q0, c1, c2, q2);
}
Bezier::List QuadraticSpline::pointsToBeziers(const Point2DList &bezierPoints) const
{
const int size = bezierPoints.size();
Bezier::List beziers(size / 2);
for (int src = 0, dst = 0; src < (size - 1); src += 2, ++dst) {
beziers[dst] = quadraticBezierPointToCubicBezier(bezierPoints[src], bezierPoints[src + 1], bezierPoints[src + 2]);
}
return beziers;
}
QuadraticSpline::QuadraticSpline(Point2DList &&points, bool closed)
:Spline(std::move(points), closed)
{
}
Bezier::List QuadraticSpline::toBeziers() const
{
const Point2DList bezierPoints = closed() ? convertClosedToQuadraticBezierPoints() : convertOpenedToQuadraticBezierPoints();
return pointsToBeziers(bezierPoints);
}
}
|
module VendorAPISpecHelpers
VALID_METADATA = {
attribution: {
full_name: '<NAME>',
email: '<EMAIL>',
user_id: '12345',
},
timestamp: Time.zone.now.iso8601,
}.freeze
def get_api_request(url, options = {})
headers_and_params = {
headers: {
'Authorization' => auth_header,
},
}.deep_merge(options)
get url, **headers_and_params
end
def post_api_request(url, options = {})
headers_and_params = {
params: {
meta: VALID_METADATA,
},
headers: {
'Authorization' => auth_header,
'Content-Type' => 'application/json',
},
}.deep_merge(options)
headers_and_params[:params] = headers_and_params[:params].to_json
post url, **headers_and_params
end
def auth_header
"Bearer #{api_token}"
end
def api_token
@api_token ||= VendorAPIToken.create_with_random_token!(provider: currently_authenticated_provider)
end
def currently_authenticated_provider
@currently_authenticated_provider ||= create(:provider)
end
def create_application_choice_for_currently_authenticated_provider(attributes = {})
create(
:submitted_application_choice,
:with_completed_application_form,
{ course_option: course_option_for_provider(provider: currently_authenticated_provider) }.merge(attributes),
)
end
def parsed_response
JSON.parse(response.body)
end
def error_response
parsed_response['errors'].first
end
RSpec::Matchers.define :be_valid_against_openapi_schema do |schema_name|
match do |item|
spec = OpenAPI3Specification.new(VendorAPISpecification.as_hash)
JSONSchemaValidator.new(
spec.as_json_schema(schema_name),
item,
).valid?
end
failure_message do |item|
spec = OpenAPI3Specification.new(VendorAPISpecification.as_hash)
JSONSchemaValidator.new(
spec.as_json_schema(schema_name),
item,
).failure_message
end
end
class JSONSchemaValidator
attr_reader :schema, :item
def initialize(schema, item)
@schema = schema
@item = item
end
def valid?
formatted_validation_errors.blank?
end
def failure_message
<<~ERROR
Expected the item to be valid against schema:
#{formatted_item}
But I got these validation errors:
#{formatted_validation_errors}
ERROR
end
private
def formatted_validation_errors
validator = JSON::Validator.fully_validate(schema, item)
validator.map { |message| '- ' + humanized_error(message) }.join("\n")
end
def formatted_item
return item if item.is_a?(String)
JSON.pretty_generate(item)
end
def humanized_error(message)
message.gsub("The property '#/'", 'The item')
end
end
end
|
<filename>version22/bngen.cpp<gh_stars>10-100
/*
Copyright 2015 CertiVox UK Ltd
This file is part of The CertiVox MIRACL IOT Crypto SDK (MiotCL)
MiotCL is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
MiotCL is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with MiotCL. If not, see <http://www.gnu.org/licenses/>.
You can be released from the requirements of the license by purchasing
a commercial license.
*/
/* BNGEN - Helper MIRACL program to generate constants for BN curve
(MINGW build)
g++ -O3 bngen.cpp big.cpp zzn.cpp zzn2.cpp ecn2.cpp miracl.a -o bngen.exe
This ONLY works for D-type curves of the form y^2=x^3+2, with a negative x parameter, and x=3 mod 4
*/
#include <iostream>
#include "big.h"
#include "zzn2.h"
#include "ecn2.h"
using namespace std;
Miracl precision(20,0);
Big output(int chunk,int w,Big t,Big m)
{
Big last,y=t;
cout << "{";
for (int i=0;i<w;i++)
{
last=y%m;
cout << "0x" << last;
y/=m;
if (i==w-1) break;
if (chunk==64) cout << "L,";
else cout << ",";
}
if (chunk==64) cout << "L}";
else cout << "}";
return last;
}
void q_power_frobenius(ECn2 &A,ZZn2 &F)
{
// Fast multiplication of A by q (for Trace-Zero group members only)
ZZn2 x,y,z,w,r;
A.get(x,y);
w=F*F;
r=F;
if (get_mip()->TWIST==MR_SEXTIC_M) r=inverse(F); // could be precalculated
if (get_mip()->TWIST==MR_SEXTIC_D) r=F;
w=r*r;
x=w*conj(x);
y=r*w*conj(y);
A.set(x,y);
}
//
// Faster Hashing to G2 - Fuentes-Castaneda, Knapp and Rodriguez-Henriquez
//
void cofactor(ECn2& S,ZZn2 &F,Big& x)
{
ECn2 T,K;
T=S;
T*=-x;
T.norm();
K=(T+T)+T;
K.norm();
q_power_frobenius(K,F);
q_power_frobenius(S,F); q_power_frobenius(S,F); q_power_frobenius(S,F);
S+=T; S+=K;
q_power_frobenius(T,F); q_power_frobenius(T,F);
S+=T;
S.norm();
}
void set_frobenius_constant(ZZn2 &X)
{
Big p=get_modulus();
switch (get_mip()->pmod8)
{
case 5:
X.set((Big)0,(Big)1); // = (sqrt(-2)^(p-1)/2
break;
case 3: // = (1+sqrt(-1))^(p-1)/2
X.set((Big)1,(Big)1);
break;
case 7:
X.set((Big)2,(Big)1); // = (2+sqrt(-1))^(p-1)/2
default: break;
}
X=pow(X,(p-1)/6);
}
/* Fill in this bit yourself.... */
#define CHUNK 64 /* processor word size */
#define MBITS 454 /* Modulus size in bits */
/* This next from output of check.cpp program */
#define BASEBITS 60
#define MODTYPE NOT_SPECIAL
#define CURVETYPE WEIERSTRASS
#define CURVE_A 0 // like A parameter in CURVE: y^2=x^3+Ax+B
/* .....to here */
#define WORDS (1+((MBITS-1)/BASEBITS))
int main()
{
miracl *mip=&precision;
Big p,q,R,cru;
Big m,x,y,w,t,c,n,r,a,b,gx,gy,B,xa,xb,ya,yb,cof;
ZZn2 X;
ECn2 Q;
ZZn2 Xa,Ya;
int i;
mip->IOBASE=16;
/* Set BN value x which determines curve - note that x is assumed to be negative */
// x=(char *)"6000000000101041"; // for full 256-bit GT_STRONG parameter
// x=(char *)"4080000000000001"; // Fast but not not GT_STRONG parameter
// x=(char *)"4000020100608205"; // G2 and GT-Strong parameter
// x=(char *)"4000000003C012B1"; // CertiVox's GT_STRONG parameter
// x=(char *)"10000000000000000000004000000000000001001";
// x=(char *)"4000806000004081"; // Best GT_STRONG parameter
/* Fill in this bit yourself... */
// x=(char *)"4080000000000001"; // Nogami's fast parameter
x=(char *)"10000010000000000000100000001";
// x=(char *)"10000000000000000000004000000000000001001";
/* ... to here */
p=36*pow(x,4)-36*pow(x,3)+24*x*x-6*x+1;
ecurve((Big)0,(Big)2,p,MR_AFFINE);
mip->TWIST=MR_SEXTIC_D;
t=6*x*x+1;
q=p+1-t;
cof=1;
B=2;
gx=p-1;
gy=1;
cout << "MOD8 " << p%8 << endl;
m=pow((Big)2,BASEBITS);
cout << "MConst=0x" << inverse(m-p%m,m) << ";" << endl;
cout << "Modulus="; output(CHUNK,WORDS,p,m); cout << ";" << endl;
cout << "CURVE_Order="; output(CHUNK,WORDS,q,m); cout << ";" << endl;
cout << "CURVE_Cof="; output(CHUNK,WORDS,cof,m); cout << ";" << endl;
cout << "CURVE_B= "; output(CHUNK,WORDS,B,m); cout << ";" << endl;
cout << "CURVE_Gx="; output(CHUNK,WORDS,gx,m); cout << ";" << endl;
cout << "CURVE_Gy="; output(CHUNK,WORDS,gy,m); cout << ";" << endl;
cout << endl;
cout << "CURVE_Bnx="; output(CHUNK,WORDS,x,m); cout << ";" << endl;
cru=(18*pow(x,3)-18*x*x+9*x-2);
cout << "CURVE_Cru="; output(CHUNK,WORDS,cru,m); cout << ";" << endl;
set_frobenius_constant(X);
X.get(a,b);
cout << "CURVE_Fra="; output(CHUNK,WORDS,a,m); cout << ";" << endl;
cout << "CURVE_Frb="; output(CHUNK,WORDS,b,m); cout << ";" << endl;
Xa.set((ZZn)0,(ZZn)-1);
Ya.set((ZZn)1,ZZn(0));
Q.set(Xa,Ya);
// cofactor(Q,X,x);
Q=(p-1+t)*Q;
Q.get(Xa,Ya);
Xa.get(a,b);
cout << "CURVE_Pxa="; output(CHUNK,WORDS,a,m); cout << ";" << endl;
cout << "CURVE_Pxb="; output(CHUNK,WORDS,b,m); cout << ";" << endl;
Ya.get(a,b);
cout << "CURVE_Pya="; output(CHUNK,WORDS,a,m); cout << ";" << endl;
cout << "CURVE_Pyb="; output(CHUNK,WORDS,b,m); cout << ";" << endl;
// Q*=q;
// cout << "Q= " << Q << endl;
cout << "CURVE_W[2]={"; output(CHUNK,WORDS,6*x*x-4*x+1,m);cout << ","; output(CHUNK,WORDS,(2*x-1),m); cout << "};" << endl;
cout << "CURVE_SB[2][2]={"; cout << "{"; output(CHUNK,WORDS,6*x*x-2*x,m); cout << ","; output(CHUNK,WORDS,(2*x-1),m); cout << "}";cout << ","; cout << "{"; output(CHUNK,WORDS,(2*x-1),m); cout << ","; output(CHUNK,WORDS,q-(6*x*x-4*x+1),m); cout << "}"; cout << "};" << endl;
cout << "CURVE_WB[4]={"; output(CHUNK,WORDS,2*x*x-3*x+1,m); cout << ","; output(CHUNK,WORDS,12*x*x*x-8*x*x+x,m);
cout << ","; output(CHUNK,WORDS,6*x*x*x-4*x*x+x,m); cout << ","; output(CHUNK,WORDS,2*x*x-x,m); cout << "};" << endl;
cout << "CURVE_BB[4][4]={";
cout << "{";
output(CHUNK,WORDS,q-x+1,m);
cout << ","; output(CHUNK,WORDS,q-x,m);
cout << ","; output(CHUNK,WORDS,q-x,m);
cout << ","; output(CHUNK,WORDS,2*x,m);
cout << "}";
cout << ","; cout << "{";output(CHUNK,WORDS,2*x-1,m);
cout << ","; output(CHUNK,WORDS,q-x,m);
cout << ","; output(CHUNK,WORDS,q-x+1,m);
cout << ","; output(CHUNK,WORDS,q-x,m);
cout << "}";
cout << ","; cout << "{"; output(CHUNK,WORDS,2*x,m);
cout << ","; output(CHUNK,WORDS,2*x-1,m);
cout << ","; output(CHUNK,WORDS,2*x-1,m);
cout << ","; output(CHUNK,WORDS,2*x-1,m);
cout << "}";
cout << ","; cout << "{"; output(CHUNK,WORDS,x+1,m);
cout << ","; output(CHUNK,WORDS,4*x-2,m);
cout << ","; output(CHUNK,WORDS,q-2*x-1,m);
cout << ","; output(CHUNK,WORDS,x+1,m);
cout << "}";
cout << "};" << endl;
}
|
#!/bin/sh
ibtool --export-strings-file ADNetPromoterScoreSurvey/View/en.lproj/NPSFeedbackQuestionView.strings ADNetPromoterScoreSurvey/View/Base.lproj/NPSFeedbackQuestionView.xib
ibtool --export-strings-file ADNetPromoterScoreSurvey/View/en.lproj/NPSScoreQuestionView.strings ADNetPromoterScoreSurvey/View/Base.lproj/NPSScoreQuestionView.xib
ibtool --export-strings-file ADNetPromoterScoreSurvey/View/en.lproj/NPSSurveyView.strings ADNetPromoterScoreSurvey/View/Base.lproj/NPSSurveyView.xib
ibtool --export-strings-file ADNetPromoterScoreSurvey/View/en.lproj/NPSThankYouView.strings ADNetPromoterScoreSurvey/View/Base.lproj/NPSThankYouView.xib
cp ADNetPromoterScoreSurvey/View/Base.lproj/Localizable.strings ADNetPromoterScoreSurvey/View/en.lproj/ |
import { createLogger, Level } from '@17media/node-logger';
import EnvConfig from '../env.config.json';
import project from '../package.json';
const loggerConfig = {
base: {
logLevel: Level.INFO,
project: project.name,
environment: process.env.NODE_ENV || 'development',
},
Slack: {
slackToken: EnvConfig.Slack.BOT_TOKEN,
slackChannel: '@hsuan',
},
Console: true,
};
export default createLogger(loggerConfig);
|
import Foundation
class TimeMeasurement {
private var startTime: Date?
private var endTime: Date?
func start() {
startTime = Date()
}
func stop() {
endTime = Date()
}
func elapsedTimeInMilliseconds() -> Double? {
guard let startTime = startTime, let endTime = endTime else {
return nil
}
return endTime.timeIntervalSince(startTime) * 1000
}
}
// Usage example
let timeMeasurement = TimeMeasurement()
timeMeasurement.start()
// Code block to measure execution time
for _ in 1...1000000 {
// Some time-consuming operation
}
timeMeasurement.stop()
if let elapsedTime = timeMeasurement.elapsedTimeInMilliseconds() {
print("Elapsed time: \(elapsedTime) milliseconds")
} else {
print("Measurement not completed")
} |
#!/usr/bin/env bash
#SBATCH --job-name=bdd_source_and_HP18k_distill060
#SBATCH -o gypsum/logs/%j_bdd_source_and_HP18k_distill060.txt
#SBATCH -e gypsum/errs/%j_bdd_source_and_HP18k_distill060.txt
#SBATCH -p 1080ti-long
#SBATCH --gres=gpu:1
#SBATCH --mem=100000
##SBATCH --cpus-per-task=4
##SBATCH --mem-per-cpu=4096
python tools/train_net_step.py \
--dataset bdd_peds+HP18k \
--cfg configs/baselines/bdd_distill060.yaml \
--set NUM_GPUS 1 TRAIN.SNAPSHOT_ITERS 5000 \
--iter_size 2 \
--use_tfboard \
--load_ckpt \
--load_ckpt /mnt/nfs/scratch1/pchakrabarty/bdd_recs/ped_models/bdd_peds.pth \
|
#!/bin/bash
echo "Provisioning Cloud infrastructure and deploying clustered MySQL with Ansible..."
$(which terraform) apply -auto-approve && $(which ansible-playbook) -i inventory provisioning.yml
|
<reponame>Zac-Garby/Radon<gh_stars>10-100
package bytecode
// Data specifies the name of an instruction, and whether or not it takes
// an argument.
type Data struct {
Name string
HasArg bool
}
// Instructions stores data about different instruction types.
var Instructions = map[byte]Data{
Nop: {Name: "NO_OP"},
NopArg: {Name: "NO_OP_ARG", HasArg: true},
LoadConst: {Name: "LOAD_CONST", HasArg: true},
LoadName: {Name: "LOAD_NAME", HasArg: true},
StoreName: {Name: "STORE_NAME", HasArg: true},
DeclareName: {Name: "DECLARE_NAME", HasArg: true},
LoadSubscript: {Name: "LOAD_SUBSCRIPT"},
StoreSubscript: {Name: "STORE_SUBSCRIPT"},
UnaryInvert: {Name: "UNARY_INVERT"},
UnaryNegate: {Name: "UNARY_NEGATE"},
UnaryTuple: {Name: "UNARY_TUPLE"},
BinaryAdd: {Name: "BINARY_ADD"},
BinarySub: {Name: "BINARY_SUB"},
BinaryMul: {Name: "BINARY_MUL"},
BinaryDiv: {Name: "BINARY_DIV"},
BinaryExp: {Name: "BINARY_EXP"},
BinaryFloorDiv: {Name: "BINARY_FLOOR_DIV"},
BinaryMod: {Name: "BINARY_MODULO"},
BinaryLogicOr: {Name: "BINARY_LOGIC_OR"},
BinaryLogicAnd: {Name: "BINARY_LOGIC_AND"},
BinaryBitOr: {Name: "BINARY_BIT_OR"},
BinaryBitAnd: {Name: "BINARY_BIT_AND"},
BinaryEqual: {Name: "BINARY_EQUAL"},
BinaryNotEqual: {Name: "BINARY_NOT_EQUAL"},
BinaryLess: {Name: "BINARY_LESS_THAN"},
BinaryMore: {Name: "BINARY_MORE_THAN"},
BinaryLessEq: {Name: "BINARY_LESS_EQ"},
BinaryMoreEq: {Name: "BINARY_MORE_EQ"},
BinaryTuple: {Name: "BINARY_TUPLE"},
CallFunction: {Name: "CALL_FUNCTION", HasArg: true},
Return: {Name: "RETURN"},
PushScope: {Name: "PUSH_SCOPE"},
PopScope: {Name: "POP_SCOPE"},
Export: {Name: "EXPORT", HasArg: true},
Jump: {Name: "JUMP", HasArg: true},
JumpIf: {Name: "JUMP_IF", HasArg: true},
JumpUnless: {Name: "JUMP_UNLESS", HasArg: true},
StartMatch: {Name: "START_MATCH"},
EndMatch: {Name: "END_MATCH"},
StartBranch: {Name: "START_BRANCH"},
EndBranch: {Name: "END_BRANCH"},
Break: {Name: "BREAK"},
Next: {Name: "NEXT"},
StartLoop: {Name: "START_LOOP"},
EndLoop: {Name: "END_LOOP"},
PushIter: {Name: "PUSH_ITER"},
PopIter: {Name: "POP_ITER"},
AdvIterFor: {Name: "ADV_ITER_FOR", HasArg: true},
MakeList: {Name: "MAKE_LIST", HasArg: true},
MakeMap: {Name: "MAKE_MAP", HasArg: true},
}
|
package io.opensphere.mantle.mp.event.impl;
import io.opensphere.mantle.mp.AbstractMapAnnotationPointGroupChangeEvent;
import io.opensphere.mantle.mp.MutableMapAnnotationPointGroup;
/**
* The Class MapAnnotationPointGroupChildRemovedEvent.
*/
public class MapAnnotationPointGroupChildRemovedEvent extends AbstractMapAnnotationPointGroupChangeEvent
{
/** The added member. */
private final MutableMapAnnotationPointGroup myRemovedChild;
/**
* Instantiates a new data group info child added event.
*
* @param group the MapAnnotationPointGroup from which the child was
* removed.
* @param removed the added child
* @param source - the source of the event
*/
public MapAnnotationPointGroupChildRemovedEvent(MutableMapAnnotationPointGroup group, MutableMapAnnotationPointGroup removed,
Object source)
{
super(group, source);
myRemovedChild = removed;
}
@Override
public String getDescription()
{
return "MapAnnotationPointGroupChildRemovedEvent";
}
/**
* Gets the removed child.
*
* @return the removed child
*/
public MutableMapAnnotationPointGroup getRemoved()
{
return myRemovedChild;
}
}
|
#pragma once
#include <core/types.h>
#include <core/containers/dynamic_array.h>
namespace core
{
class archive;
class arena;
class file;
class root
{
dynamic_array<archive*> _mounts;
public:
root(arena* arena, int capacity);
void mount(archive*);
void unmount(archive*);
//TODO: replace int mode with something better
/**
* Open a file with path and mode.
* Returns a nullptr if the file does not exist.
*/
file* open(const char* path, int mode);
/**
* Close a previously opened with 'open'.
*/
void close(file* file);
};
};
|
#!/usr/bin/env bash
#===----------------------------------------------------------------------===##
#
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
#===----------------------------------------------------------------------===##
set -e
PROGNAME="$(basename "${0}")"
function error() { printf "error: %s\n" "$*"; exit 1; }
function usage() {
cat <<EOF
Usage:
${PROGNAME} [options]
[-h|--help] Display this help and exit.
--llvm-root <DIR> Full path to the root of the LLVM monorepo. Only the libcxx
and libcxxabi directories are required.
--build-dir <DIR> Full path to the directory to use for building. This will
contain intermediate build products.
--install-dir <DIR> Full path to the directory to install the library to.
--symbols-dir <DIR> Full path to the directory to install the .dSYM bundle to.
--sdk <SDK> SDK used for building the library. This represents
the target platform that the library will run on.
You can get a list of SDKs with \`xcodebuild -showsdks\`.
--architectures "<arch>..." A whitespace separated list of architectures to build for.
The library will be built for each architecture independently,
and a universal binary containing all architectures will be
created from that.
--version X[.Y[.Z]] The version of the library to encode in the dylib.
--cache <PATH> The CMake cache to use to control how the library gets built.
EOF
}
while [[ $# -gt 0 ]]; do
case ${1} in
-h|--help)
usage
exit 0
;;
--llvm-root)
llvm_root="${2}"
shift; shift
;;
--build-dir)
build_dir="${2}"
shift; shift
;;
--symbols-dir)
symbols_dir="${2}"
shift; shift
;;
--install-dir)
install_dir="${2}"
shift; shift
;;
--sdk)
sdk="${2}"
shift; shift
;;
--architectures)
architectures="${2}"
shift; shift
;;
--version)
version="${2}"
shift; shift
;;
--cache)
cache="${2}"
shift; shift
;;
*)
error "Unknown argument '${1}'"
;;
esac
done
for arg in llvm_root build_dir symbols_dir install_dir sdk architectures version cache; do
if [ -z ${!arg+x} ]; then
error "Missing required argument '--${arg//_/-}'"
elif [ "${!arg}" == "" ]; then
error "Argument to --${arg//_/-} must not be empty"
fi
done
function step() {
separator="$(printf "%0.s-" $(seq 1 ${#1}))"
echo
echo "${separator}"
echo "${1}"
echo "${separator}"
}
install_name_dir="/usr/lib"
headers_prefix="${install_dir}"
for arch in ${architectures}; do
step "Building libc++abi.dylib for architecture ${arch}"
mkdir -p "${build_dir}/${arch}"
(cd "${build_dir}/${arch}" &&
xcrun --sdk "${sdk}" cmake "${llvm_root}/libcxxabi" \
-GNinja \
-DCMAKE_MAKE_PROGRAM="$(xcrun --sdk "${sdk}" --find ninja)" \
-C "${cache}" \
-DCMAKE_INSTALL_PREFIX="${build_dir}/${arch}-install" \
-DCMAKE_INSTALL_NAME_DIR="${install_name_dir}" \
-DCMAKE_OSX_ARCHITECTURES="${arch}" \
-DLIBCXXABI_LIBRARY_VERSION="${version}" \
-DLIBCXXABI_LIBCXX_PATH="${llvm_root}/libcxx"
)
xcrun --sdk "${sdk}" cmake --build "${build_dir}/${arch}" --target install-cxxabi -- -v
done
all_dylibs=$(for arch in ${architectures}; do
echo "${build_dir}/${arch}-install/lib/libc++abi.dylib"
done)
all_archives=$(for arch in ${architectures}; do
echo "${build_dir}/${arch}-install/lib/libc++abi.a"
done)
step "Creating a universal dylib from the dylibs for each architecture at ${install_dir}/usr/lib"
xcrun --sdk "${sdk}" lipo -create ${all_dylibs} -output "${build_dir}/libc++abi.dylib"
step "Installing the (stripped) universal dylib to ${install_dir}/usr/lib"
mkdir -p "${install_dir}/usr/lib"
cp "${build_dir}/libc++abi.dylib" "${install_dir}/usr/lib/libc++abi.dylib"
xcrun --sdk "${sdk}" strip -S "${install_dir}/usr/lib/libc++abi.dylib"
step "Installing the unstripped dylib and the dSYM bundle to ${symbols_dir}"
xcrun --sdk "${sdk}" dsymutil "${build_dir}/libc++abi.dylib" -o "${symbols_dir}/libc++abi.dylib.dSYM"
cp "${build_dir}/libc++abi.dylib" "${symbols_dir}/libc++abi.dylib"
step "Creating a universal static archive from the static archives for each architecture"
mkdir -p "${install_dir}/usr/local/lib/libcxx"
xcrun --sdk "${sdk}" libtool -static ${all_archives} -o "${install_dir}/usr/local/lib/libcxx/libc++abi-static.a"
#
# Install the headers by copying the headers from the source directory into
# the install directory.
# TODO: In the future, we should install the headers through CMake.
#
step "Installing the libc++abi headers to ${headers_prefix}/usr/include"
mkdir -p "${headers_prefix}/usr/include"
ditto "${llvm_root}/libcxxabi/include" "${headers_prefix}/usr/include"
if [[ $EUID -eq 0 ]]; then # Only chown if we're running as root
chown -R root:wheel "${headers_prefix}/usr/include"
fi
step "Installing the libc++abi license"
mkdir -p "${headers_prefix}/usr/local/OpenSourceLicenses"
cp "${llvm_root}/libcxxabi/LICENSE.TXT" "${headers_prefix}/usr/local/OpenSourceLicenses/libcxxabi.txt"
|
<reponame>Kinju011/neo4j-ruby-driver
module Testkit
module Backend
module Messages
module Conversion
def to_testkit(object)
case object
when nil
named_entity('CypherNull')
when TrueClass, FalseClass
value_entity('CypherBool', object)
when Integer
value_entity('CypherInt', object)
when Float
value_entity('CypherFloat', float_encode(object))
when Neo4j::Driver::Types::Bytes
value_entity('CypherBytes', object.bytes.map { |byte| "%02x" % byte }.join(' '))
when String
value_entity('CypherString', object)
when Symbol
to_testkit(object.to_s)
when Neo4j::Driver::Types::Path
named_entity('Path', nodes: to_testkit(object.nodes), relationships: to_testkit(object.relationships))
when Hash
value_entity('CypherMap', object.transform_values(&method(:to_testkit)))
when Enumerable
value_entity('CypherList', object.map(&method(:to_testkit)))
when Neo4j::Driver::Types::Node
named_entity('Node', id: to_testkit(object.id), labels: to_testkit(object.labels),
props: to_testkit(object.properties))
when Neo4j::Driver::Types::Relationship
named_entity('Relationship', id: to_testkit(object.id), startNodeId: to_testkit(object.start_node_id),
endNodeId: to_testkit(object.end_node_id), type: to_testkit(object.type),
props: to_testkit(object.properties))
else
raise "Not implemented #{object.class.name}:#{object.inspect}"
end
end
def float_encode(f)
case f
when Float::NAN, -Float::INFINITY
f.to_s
when Float::INFINITY
"+#{f.to_s}"
else
f
end
end
end
end
end
end
|
<reponame>frankpolte/UBIA
// use static inline, because, spi flash code must reside in memory..
// these code may be embedd in flash code
#if USE_EXT_FLASH
_attribute_ram_code_ static inline void mspi_high(void){
BM_SET(reg_gpio_out(CS_EXT_FLASH), (unsigned char)(CS_EXT_FLASH & 0xff));
}
_attribute_ram_code_ static inline void mspi_low(void){
BM_CLR(reg_gpio_out(CS_EXT_FLASH), (unsigned char)(CS_EXT_FLASH & 0xff));
}
_attribute_ram_code_ static inline u8 mspi_get(void){
return reg_spi_data;
}
_attribute_ram_code_ static inline void mspi_write(u8 c){
reg_spi_data = c;
}
_attribute_ram_code_ static inline void mspi_wait(void){
while(reg_spi_ctrl & FLD_SPI_BUSY);
}
#else
_attribute_ram_code_ static inline void mspi_wait(void){
while(reg_master_spi_ctrl & FLD_MASTER_SPI_BUSY)
;
}
_attribute_ram_code_ static inline void mspi_high(void){
reg_master_spi_ctrl = FLD_MASTER_SPI_CS;
}
_attribute_ram_code_ static inline void mspi_low(void){
reg_master_spi_ctrl = 0;
}
_attribute_ram_code_ static inline u8 mspi_get(void){
return reg_master_spi_data;
}
_attribute_ram_code_ static inline void mspi_write(u8 c){
reg_master_spi_data = c;
}
_attribute_ram_code_ static inline void mspi_ctrl_write(u8 c){
reg_master_spi_ctrl = c;
}
_attribute_ram_code_ static inline u8 mspi_read(void){
mspi_write(0); // dummy, issue clock
mspi_wait();
return mspi_get();
}
#endif
|
/* eslint-disable global-require */
/* eslint-disable no-undef */
'use strict';
const SequelizeMock = require('sequelize-mock');
const Bookmark = require('../../src/model/Bookmark.js');
const bookmarkQuery = require('../../src/postgres/bookmark-query.js');
const testCases = require('../test-cases.json');
const DBConnectionMock = new SequelizeMock();
const BookmarkMock = DBConnectionMock.define('Bookmark', testCases.bookmark['9f50a9ff-273b-42df-8438-9e5adb6c675e']);
describe('Test Get All Bookmarks', () => {
it('Should get all Bookmarks for User 9f50a9ff-273b-42df-8438-9e5adb6c675e', async () => {
Bookmark.findAll = jest.fn((id) => BookmarkMock.findAll(id));
const bookmarks = await bookmarkQuery.getBookmarks('9f50a9ff-273b-42df-8438-9e5adb6c675e');
expect(Bookmark.findAll).toBeCalledWith({
where: { userId: '9f50a9ff-273b-42df-8438-9e5adb6c675e' },
});
expect(bookmarks[0].userId).toEqual('9f50a9ff-273b-42df-8438-9e5adb6c675e');
expect(bookmarks[0].recipeId).toEqual(640136);
});
});
describe('Test Add Bookmark', () => {
it('Should succeed in adding new Bookmark for recipe 640136', async () => {
Bookmark.findOrCreate = jest.fn((userId, recipeId) => BookmarkMock.findOrCreate({
where: { userId, recipeId },
}));
const [, added] = await bookmarkQuery.addBookmark('9f50a9ff-273b-42df-8438-9e5adb6c675e', 640136);
expect(Bookmark.findOrCreate).toBeCalledWith({
where: { userId: '9f50a9ff-273b-42df-8438-9e5adb6c675e', recipeId: 640136 },
});
expect(added).toBe(true);
});
});
describe('Test Delete Bookmark', () => {
it('Should succeed in deleting one row from Bookmark table', async () => {
Bookmark.destroy = jest.fn((userId, recipeId) => BookmarkMock.destroy({
where: { userId, recipeId },
}));
const rowsDeleted = await bookmarkQuery.deleteBookmark('9f50a9ff-273b-42df-8438-9e5adb6c675e', 716426);
expect(Bookmark.destroy).toBeCalledWith({
where: { userId: '9f50a9ff-273b-42df-8438-9e5adb6c675e', recipeId: 716426 },
});
expect(rowsDeleted).toBe(1);
});
});
|
<reponame>menghuanlunhui/springboot-master
package com.jf.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.security.oauth2.client.OAuth2RestTemplate;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.client.RestTemplate;
/**
* Created with IntelliJ IDEA.
* Description:
* User: admin
* Date: 2018-06-05
* Time: 15:16
*/
@RestController
public class TestController {
@Autowired
private RestTemplate restTemplate;
@GetMapping("/test1")
public Object test1() {
ResponseEntity<String> response = restTemplate.getForEntity("http://127.0.0.1:8010", null);
System.out.println(response);
return response.getBody();
}
@GetMapping("/test2")
public Object test2() {
ResponseEntity<String> response = restTemplate.getForEntity("http://127.0.0.1:8010/monitor/a", null);
System.out.println(response);
return response.getBody();
}
@Autowired
private OAuth2RestTemplate oAuth2RestTemplate;
/**
* 测试SSO Client
* 请在application.yml配置正确的参数
*
* @return
*/
@GetMapping("/test3")
public String test3() {
try {
ResponseEntity<String> response = oAuth2RestTemplate.exchange("http://127.0.0.1:8010/monitor/a", HttpMethod.GET, null, String.class);
System.out.println(response);
return response.getBody();
} catch (Exception e) {
System.out.println(e.getMessage());
return "401";
}
}
}
|
package main
import "fmt"
func main() {
switch "Marcus" {
case "Tim":
fmt.Println("Wassup Tim")
case "Jenny":
fmt.Println("Wassup Jenny")
case "Marcus":
fmt.Println("Wassup Marcus")
fallthrough
case "Medhi":
fmt.Println("Wassup Medhi")
fallthrough
case "Julian":
fmt.Println("Wassup Julian")
case "Sushant":
fmt.Println("Wassup Sushant")
}
}
// fallthrough fallthrough fallthrough fallthrough
/*
[Running] go run "c:\Users\li-370\ForkGitW\LearnGolang\GolangTraining-master\11_switch-statements\02_fallthrough\main.go"
Wassup Marcus
Wassup Medhi
Wassup Julian
[Done] exited with code=0 in 4.195 seconds
*/ |
set pages 9999;
set feedback off;
set verify off;
column mydate heading 'Yr. Mo Dy Hr.' format a16
column file_name format a35
column reads format 99,999,999
column pct_of_tot format 999
prompt
prompt
prompt ***********************************************************
prompt This will identify any single file who's read I/O
prompt is more than 25% of the total read I/O of the database.
prompt
prompt The "hot" file should be examined, and the hot table/index
prompt should be identified using STATSPACK.
prompt
prompt - The busy file should be placed on a disk device with
prompt "less busy" files to minimize read delay and channel
prompt contention.
prompt
prompt - If small file has a hot small table, place the table
prompt in the KEEP pool
prompt
prompt - If the file has a large-table full-table scan, place
prompt the table in the RECYCLE pool and turn on parallel query
prompt for the table.
prompt ***********************************************************
prompt
prompt
select
to_char(snap_time,'yyyy-mm-dd HH24') mydate,
new.filename file_name,
new.phyrds-old.phyrds reads,
((new.phyrds-old.phyrds)/
(
select
(newreads.value-oldreads.value) reads
from
perfstat.stats$sysstat oldreads,
perfstat.stats$sysstat newreads,
perfstat.stats$snapshot sn1
where
sn.snap_id = sn1.snap_id
and
newreads.snap_id = sn.snap_id
and
oldreads.snap_id = sn.snap_id-1
and
oldreads.statistic# = 40
and
newreads.statistic# = 40
and
(newreads.value-oldreads.value) > 0
))*100 pct_of_tot
from
perfstat.stats$filestatxs old,
perfstat.stats$filestatxs new,
perfstat.stats$snapshot sn
where
snap_time > sysdate-&1
and
new.snap_id = sn.snap_id
and
old.snap_id = sn.snap_id-1
and
new.filename = old.filename
and
-- **********************************************************
-- Low I/O values are misleading, so we filter for high I/O
-- **********************************************************
new.phyrds-old.phyrds > 100
and
-- **********************************************************
-- The following will allow you to choose a threshold
-- **********************************************************
(new.phyrds-old.phyrds)*4> -- This is 25% of total
-- (new.phyrds-old.phyrds)*2> -- This is 50% of total
-- (new.phyrds-old.phyrds)*1.25> -- This is 75% of total
-- **********************************************************
-- This subquery computes the sum of all I/O during the snapshot period
-- **********************************************************
(
select
(newreads.value-oldreads.value) reads
from
perfstat.stats$sysstat oldreads,
perfstat.stats$sysstat newreads,
perfstat.stats$snapshot sn1
where
sn.snap_id = sn1.snap_id
and
newreads.snap_id = sn.snap_id
and
oldreads.snap_id = sn.snap_id-1
and
oldreads.statistic# = 40
and
newreads.statistic# = 40
and
(newreads.value-oldreads.value) > 0
)
;
prompt
prompt
prompt ***********************************************************
prompt This will identify any single file who's write I/O
prompt is more than 25% of the total write I/O of the database.
prompt
prompt The "hot" file should be examined, and the hot table/index
prompt should be identified using STATSPACK.
prompt
prompt - The busy file should be placed on a disk device with
prompt "less busy" files to minimize write delay and channel
prompt channel contention.
prompt
prompt - If small file has a hot small table, place the table
prompt in the KEEP pool
prompt
prompt ***********************************************************
prompt
select
to_char(snap_time,'yyyy-mm-dd HH24') mydate,
new.filename file_name,
new.phywrts-old.phywrts writes,
((new.phywrts-old.phywrts)/
(
select
(newwrites.value-oldwrites.value) writes
from
perfstat.stats$sysstat oldwrites,
perfstat.stats$sysstat newwrites,
perfstat.stats$snapshot sn1
where
sn.snap_id = sn1.snap_id
and
newwrites.snap_id = sn.snap_id
and
oldwrites.snap_id = sn.snap_id-1
and
oldwrites.statistic# = 44
and
newwrites.statistic# = 44
and
(newwrites.value-oldwrites.value) > 0
))*100 pct_of_tot
from
perfstat.stats$filestatxs old,
perfstat.stats$filestatxs new,
perfstat.stats$snapshot sn
where
snap_time > sysdate-&1
and
new.snap_id = sn.snap_id
and
old.snap_id = sn.snap_id-1
and
new.filename = old.filename
and
-- **********************************************************
-- Low I/O values are misleading, so we only take high values
-- **********************************************************
new.phywrts-old.phywrts > 100
and
-- **********************************************************
-- Here you can choose a threshold value
-- **********************************************************
(new.phyrds-old.phywrts)*4> -- This is 25% of total
-- (new.phyrds-old.phywrts)*2> -- This is 50% of total
-- (new.phyrds-old.phywrts)*1.25> -- This is 75% of total
-- **********************************************************
-- This subquery computes the sum of all I/O during the snapshot period
-- **********************************************************
(
select
(newwrites.value-oldwrites.value) writes
from
perfstat.stats$sysstat oldwrites,
perfstat.stats$sysstat newwrites,
perfstat.stats$snapshot sn1
where
sn.snap_id = sn1.snap_id
and
newwrites.snap_id = sn.snap_id
and
oldwrites.snap_id = sn.snap_id-1
and
oldwrites.statistic# = 44
and
newwrites.statistic# = 44
and
(newwrites.value-oldwrites.value) > 0
)
;
|
package controllers
import (
"bdi/models"
"fmt"
"github.com/astaxie/beego/orm"
"log"
"strconv"
)
type SdtBdiDomainController struct {
BaseController
}
// 首页
func (this *SdtBdiDomainController) Index() {
this.TplName = "sdtBdiDomain/sdtBdiDomainIndex.html"
}
/**
根据行数和列数,查询符合条件的指标域的数据。
*/
func (this *SdtBdiDomainController) All() {
var err error
var rows int
var page int
//返回的json数据
returnData := struct {
Total interface{} `json:"total"`
Rows interface{} `json:"rows"`
}{}
//行数
rows, err = strconv.Atoi(this.GetString("rows"))
if err != nil {
rows = 10
}
//页数
page, err = strconv.Atoi(this.GetString("page"))
if err != nil {
page = 1
}
sdtBdiDomain := new(models.SdtBdiDomain)
sdtBdiDomainSlice, num, err := sdtBdiDomain.GetAllSdtBdiDomain(rows, page) //查询指标域所有记录
if err != nil {
log.Fatal("查询数据失败!")
returnData.Total = 0
returnData.Rows = &sdtBdiDomainSlice
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
returnData.Total = num
returnData.Rows = &sdtBdiDomainSlice
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
// 新增Dialog
func (this *SdtBdiDomainController) AddPage() {
this.TplName = "sdtBdiDomain/addDialog.html"
}
//新增
func (this *SdtBdiDomainController) Add() {
returnData := struct {
Success bool `json:"success"`
Message string `json:"message"`
}{}
sdtBdiDomain := new(models.SdtBdiDomain)
err := this.ParseForm(sdtBdiDomain)
if err != nil {
fmt.Println("参数解析出错!")
returnData.Success = false
returnData.Message = "参数解析出错!"
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
err = sdtBdiDomain.Add() //新增
if err != nil {
fmt.Println("新增数据出错!")
returnData.Success = false
returnData.Message = "新增数据出错!"
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
returnData.Success = true
returnData.Message = "新增数据成功!"
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
// 更新Dialog
func (this *SdtBdiDomainController) UpdatePage() {
id, err := this.GetInt("id")
if err != nil {
log.Fatal("解析参数出错!")
return
}
sdtBdiDomain := new(models.SdtBdiDomain)
sdtBdiDomain.Id = id
err = sdtBdiDomain.GetSdtBdiDomainById()
if err != nil {
log.Fatal("解析参数出错!")
return
}
this.Data["sdtBdiDomain"] = sdtBdiDomain
this.TplName = "sdtBdiDomain/updateDialog.html"
}
//更新信息
func (this *SdtBdiDomainController) Update() {
returnData := struct {
Success bool `json:"success"`
Message string `json:"message"`
}{}
sdtBdiDomain := new(models.SdtBdiDomain)
err := this.ParseForm(sdtBdiDomain)
if err != nil {
returnData.Success = false
returnData.Message = "解析参数出错!"
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
err = sdtBdiDomain.Update()
if err != nil {
returnData.Success = false
returnData.Message = "数据更新出错!"
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
returnData.Success = true
returnData.Message = "数据更新成功!"
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
//删除
func (this *SdtBdiDomainController) Delete() {
returnData := struct {
Success bool `json:"success"`
Message string `json:"message"`
}{}
bdiDomainId, err := this.GetInt("bdiDomainId")
if err != nil {
fmt.Println("解析参数出错")
returnData.Success = false
returnData.Message = "解析参数出错"
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
o := orm.NewOrm()
if _, err := o.Delete(&models.SdtBdiDomain{Id: bdiDomainId}); err != nil {
fmt.Println("数据删除失败")
returnData.Success = false
returnData.Message = "数据删除失败"
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
returnData.Success = true
returnData.Message = "数据删除成功!"
this.Data[JSON_STRING] = returnData
this.ServeJSON()
return
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _react = _interopRequireDefault(require("react"));
var _reactNative = require("react-native");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const LineBreakRenderer = function LineBreakRenderer({
key
}) {
return /*#__PURE__*/_react.default.createElement(_reactNative.Text, {
key: key
}, '\n');
};
LineBreakRenderer.isNativeInternalTextRenderer = true;
var _default = LineBreakRenderer;
exports.default = _default;
//# sourceMappingURL=LineBreakRenderer.js.map |
package com.phone.validator;
public interface Validator {
public boolean isValid(String value);
}
|
import re
def extract_php_echo(html_content: str) -> str:
match = re.search(r'<?php echo (.*?); ?>', html_content)
if match:
return match.group(1)
else:
return "" |
#!/bin/bash
source ./common/utils.sh
kubectl create namespace istio-system
helm template istio-init ../manifests/istio/helm/istio-init --namespace istio-system | kubectl apply -f -
verify_kubectl $? "Creating Istio resources failed"
wait_for_crds "adapters.config.istio.io,attributemanifests.config.istio.io,authorizationpolicies.rbac.istio.io,clusterrbacconfigs.rbac.istio.io,destinationrules.networking.istio.io,envoyfilters.networking.istio.io,gateways.networking.istio.io,handlers.config.istio.io,httpapispecbindings.config.istio.io,httpapispecs.config.istio.io,instances.config.istio.io,meshpolicies.authentication.istio.io,policies.authentication.istio.io,quotaspecbindings.config.istio.io,quotaspecs.config.istio.io,rbacconfigs.rbac.istio.io,rules.config.istio.io,serviceentries.networking.istio.io,servicerolebindings.rbac.istio.io,serviceroles.rbac.istio.io,sidecars.networking.istio.io,templates.config.istio.io,virtualservices.networking.istio.io"
# We tested it with helm --set according to the descriptions provided in https://istio.io/docs/setup/install/helm/
# However, it did not work out. Therefore, we are using sed
sed 's/LoadBalancer #change to NodePort, ClusterIP or LoadBalancer if need be/'$GATEWAY_TYPE'/g' ../manifests/istio/helm/istio/charts/gateways/values.yaml > ../manifests/istio/helm/istio/charts/gateways/values_tmp.yaml
mv ../manifests/istio/helm/istio/charts/gateways/values_tmp.yaml ../manifests/istio/helm/istio/charts/gateways/values.yaml
helm template istio ../manifests/istio/helm/istio --namespace istio-system --values ../manifests/istio/helm/istio/values-istio-minimal.yaml | kubectl apply -f -
verify_kubectl $? "Installing Istio failed."
wait_for_deployment_in_namespace "istio-ingressgateway" "istio-system"
wait_for_deployment_in_namespace "istio-pilot" "istio-system"
wait_for_deployment_in_namespace "istio-citadel" "istio-system"
wait_for_deployment_in_namespace "istio-sidecar-injector" "istio-system"
wait_for_all_pods_in_namespace "istio-system"
oc adm policy add-scc-to-user anyuid -z istio-ingress-service-account -n istio-system
oc adm policy add-scc-to-user anyuid -z default -n istio-system
oc adm policy add-scc-to-user anyuid -z prometheus -n istio-system
oc adm policy add-scc-to-user anyuid -z istio-egressgateway-service-account -n istio-system
oc adm policy add-scc-to-user anyuid -z istio-citadel-service-account -n istio-system
oc adm policy add-scc-to-user anyuid -z istio-ingressgateway-service-account -n istio-system
oc adm policy add-scc-to-user anyuid -z istio-cleanup-old-ca-service-account -n istio-system
oc adm policy add-scc-to-user anyuid -z istio-mixer-post-install-account -n istio-system
oc adm policy add-scc-to-user anyuid -z istio-mixer-service-account -n istio-system
oc adm policy add-scc-to-user anyuid -z istio-pilot-service-account -n istio-system
oc adm policy add-scc-to-user anyuid -z istio-sidecar-injector-service-account -n istio-system
oc adm policy add-scc-to-user anyuid -z istio-galley-service-account -n istio-system
oc adm policy add-scc-to-user anyuid -z istio-security-post-install-account -n istio-system
oc expose svc istio-ingressgateway -n istio-system
ROUTER_POD=$(oc get pods -n default -l router=router -ojsonpath={.items[0].metadata.name})
# allow wildcard domains
oc project default
oc adm router --replicas=0
verify_kubectl $? "Scaling down router failed"
oc set env dc/router ROUTER_ALLOW_WILDCARD_ROUTES=true
verify_kubectl $? "Configuration of openshift router failed"
oc scale dc/router --replicas=1
verify_kubectl $? "Upscaling of router failed"
oc delete pod $ROUTER_POD -n default --force --grace-period=0 --ignore-not-found
# create wildcard route for istio ingress gateway
BASE_URL=$(oc get route -n istio-system istio-ingressgateway -oyaml | yq r - spec.host | sed 's~istio-ingressgateway-istio-system.~~')
# Domain used for routing to keptn services
export DOMAIN="ingress-gateway.$BASE_URL"
oc create route passthrough istio-wildcard-ingress-secure-keptn --service=istio-ingressgateway --hostname="www.keptn.ingress-gateway.$BASE_URL" --port=https --wildcard-policy=Subdomain --insecure-policy='None' -n istio-system
oc adm policy add-cluster-role-to-user cluster-admin system:serviceaccount:keptn:default
verify_kubectl $? "Adding cluster-role failed."
# Set up SSL
openssl req -nodes -newkey rsa:2048 -keyout key.pem -out certificate.pem -x509 -days 365 -subj "/CN=$DOMAIN"
kubectl create --namespace istio-system secret tls istio-ingressgateway-certs --key key.pem --cert certificate.pem
#verify_kubectl $? "Creating secret for istio-ingressgateway-certs failed."
rm key.pem
rm certificate.pem
kubectl apply -f ../manifests/istio/public-gateway.yaml
verify_kubectl $? "Deploying public-gateway failed."
|
#!/bin/sh
EXPECTED_SIGNATURE=$(wget https://composer.github.io/installer.sig -O - -q)
php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');"
ACTUAL_SIGNATURE=$(php -r "echo hash_file('SHA384', 'composer-setup.php');")
if [ "$EXPECTED_SIGNATURE" = "$ACTUAL_SIGNATURE" ]
then
php composer-setup.php --quiet --install-dir=/usr/local/bin --filename=composer
RESULT=$?
rm composer-setup.php
exit $RESULT
else
>&2 echo 'ERROR: Invalid installer signature'
rm composer-setup.php
exit 1
fi |
#
# Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
#!/bin/ksh
#
# Script to build and launch the RMI reliability suite.
# This script is used to run the reliability test for a
# certain number of hours. This script is NOT used when
# running the juicer and benchmark tests as part of the
# jtreg regression suite.
JAVA_HOME=$1
WORK_DIR=$2
RES_DIR=$3
SHELLTOUSE=$4
SUITE_DIR=$5
NHOURS=$6
shift 6
VMOPTS=$*
###You need not export these variables if your reliability run is from this shell itself######
###If you are launching another shell then you need to export these variables.#######
if [ "${WORK_DIR}" = "" ] ; then
WORK_DIR=`pwd`
fi
if [ "${RES_DIR}" = "" ] ; then
RES_DIR=`pwd`/results
fi
if [ "${SHELLTOUSE}" = "" ] ; then
SHELLTOUSE=ksh
fi
if [ "${JAVA_HOME}" = "" ] ; then
WHENCEJAVA=`whence java`
JAVABIN=`dirname ${WHENCEJAVA}`
JAVA_HOME=`dirname ${JAVABIN}`
fi
if [ "${SUITE_DIR}" = "" ] ; then
SUITE_DIR=`pwd`
fi
if [ "${VMOPTS}" = "" ] ; then
VMOPTS=-server -showversion
fi
if [ "${NHOURS}" = "" ] ; then
NHOURS=1
fi
export JAVA_HOME
export WORK_DIR
export RES_DIR
export SHELLTOUSE
export SUITE_DIR
export NHOURS
export VMOPTS
echo "######### launch_reliability script ##########"
echo "JAVA_HOME : $JAVA_HOME "
echo "WORK_DIR : $WORK_DIR "
echo "RES_DIR : $RES_DIR "
echo "SHELLTOUSE : $SHELLTOUSE "
echo "SUITE_DIR : $SUITE_DIR "
echo "NHOURS : $NHOURS "
echo "VMOPTS : $VMOPTS "
# set platform-dependent variables
if [ `uname` = "Linux" ] ; then
PATH_SEP=":"
else
PATH_SEP=";"
fi
export PATH_SEP
mainpid=$$
mkdir -p ${RES_DIR}
rm -rf ${WORK_DIR}/rmibench_scratch
rm -rf ${WORK_DIR}/serialbench_scratch
rm -rf ${WORK_DIR}/juicer_scratch
mkdir -p ${WORK_DIR}/rmibench_scratch
mkdir -p ${WORK_DIR}/serialbench_scratch
mkdir -p ${WORK_DIR}/juicer_scratch
echo ""
echo " Starting RMI bench test "
$SHELLTOUSE ${SUITE_DIR}/scripts/run_rmibench.ksh ${WORK_DIR}/rmibench_scratch $RES_DIR $JAVA_HOME $SUITE_DIR $NHOURS $VMOPTS &
pid1=$!
sleep 30
echo ""
echo " Starting Serialization bench test "
$SHELLTOUSE ${SUITE_DIR}/scripts/run_serialbench.ksh ${WORK_DIR}/serialbench_scratch $RES_DIR $JAVA_HOME $SUITE_DIR $NHOURS $VMOPTS &
pid2=$!
sleep 30
echo ""
echo " Starting RMI juicer test "
$SHELLTOUSE ${SUITE_DIR}/scripts/run_juicer.ksh ${WORK_DIR}/juicer_scratch $RES_DIR $JAVA_HOME $SUITE_DIR $NHOURS $VMOPTS &
pid3=$!
sleep 30
echo ""
echo " Waiting for jobs to complete"
wait $pid1 $pid2 $pid3
echo ""
echo " Done RMI reliability testing "
rm -rf ${WORK_DIR}/rmibench_scratch
rm -rf ${WORK_DIR}/serialbench_scratch
rm -rf ${WORK_DIR}/juicer_scratch
kill -9 $mainpid
|
<reponame>berleon/deeplift
__version__ = '0.6.9.3'
|
<filename>src/main.hpp
/*
* Copyright 2015 TU Chemnitz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MAIN_HPP
#define MAIN_HPP
#include <memory>
#include <QFileDialog>
#include <QGridLayout>
#include <QKeyEvent>
#include <QMainWindow>
#include <QMenuBar>
#include <QMessageBox>
#include <QPushButton>
#include <QTimer>
#include "timelineWidget.hpp"
#include "converter.hpp"
#include "patientFilterWidget.hpp"
#include "chooseContoursWidget.hpp"
#include "glSectorWidget.hpp"
#include "io/exportJpeg.hpp"
#include "core/oct_data.hpp"
#include "observer.hpp"
struct oct_subject;
struct oct_scan;
struct dataset
{
dataset(const QString &path);
oct_subject m_subject;
const oct_scan *m_scan;
observable<std::size_t> m_slice;
std::unique_ptr<QDialog> m_dialog;
std::unique_ptr<QWidget> m_widgets[5];
};
class main_window
: public QMainWindow
{
Q_OBJECT
public:
main_window()
: l(&w), dummy(0), demux(0), key(0)
{
menuBar()->addAction("Info", this, SLOT(info()));
QMenu *m;
m = menuBar()->addMenu("Main");
m->addAction("Load", this, SLOT(load_main()));
m->addAction("Save", this, SLOT(save_main()));
m->addAction("Save anonymized", this, SLOT(save_anon_main()));
m->addAction("Export Slices as JPEG", this, SLOT(load_jpeg_exporter_main()));
m = menuBar()->addMenu("Compare");
m->addAction("Load", this, SLOT(load_compare()));
m->addAction("Save", this, SLOT(save_compare()));
m->addAction("Save anonymized", this, SLOT(save_anon_compare()));
m->addAction("Export Slices as JPEG", this, SLOT(load_jpeg_exporter_compare()));
m = menuBar()->addMenu("Timeline");
m->addAction("Load Timeline", this, SLOT(load_timeline()));
m->addAction("Change Coloring", this, SLOT(change_coloring()));
m->addAction("Export for Excel", this, SLOT(export_for_excel()));
m = menuBar()->addMenu("Converter");
m->addAction("Convert files and export as JPEG and UOCTML", this, SLOT(convert()));
m->addAction("Convert anonymized and export as JPEG and UOCTML", this, SLOT(convert_anonymized()));
setCentralWidget(&w);
setWindowTitle("Unified OCT Explorer");
connect(&t, SIGNAL(timeout()), this, SLOT(swap()));
t.setInterval(500);
t.start();
}
QSize sizeHint() const override
{
return QSize(800, 600);
}
void update(dataset *p);
void update();
private slots:
void load_main()
{
load(main);
}
void load_compare()
{
load(compare);
}
void save_main()
{
save(main, false);
}
void save_compare()
{
save(compare, false);
}
void save_anon_main()
{
save(main, true);
}
void save_anon_compare()
{
save(compare, true);
}
void load_jpeg_exporter_main()
{
load_jpeg_exporter(main);
}
void load_jpeg_exporter_compare()
{
load_jpeg_exporter(compare);
}
void load_timeline()
{
loadTimeline();
}
change_coloring()
{
changeColoring();
}
void convert()
{
convertToUoctml(false);
}
void convert_anonymized()
{
convertToUoctml(true);
}
void export_for_excel()
{
exportForExcel();
}
void swap()
{
demux = 1 - demux;
}
void info()
{
QMessageBox::information(this, "Info",
"Unified OCT Explorer 1.0\n"
"Copyright (c) 2015 <NAME>\n"
"NO WARRANTY. NOT CERTIFIED FOR CLINICAL USE.\n"
"\n"
"- Load one or two datasets using \"Load Main/Compare\". Supports Topcon OCT, Heidelberg Engineering OCT, Eyetec OCT, and Nidek OCT file formats.\n"
"- Drag mouse wheel in fundus panel to change active slice.\n"
"- Drag mouse and mouse wheel to pan and zoom in slice panel.\n"
"- Drag mouse to rotate in volume rendering panel.\n"
"- Drag mouse wheel in depth view to change contour.\n"
"- Use keys \"1\",\"2\",... to hide/show contours."
""
"- Convert several files to list them in the selection for a Timeline View or to export their contour values"
);
}
private:
void keyPressEvent(QKeyEvent *e) override
{
key = e->key();
e->accept();
}
void load(std::unique_ptr<dataset> &p);
void save(const std::unique_ptr<dataset> &p, bool anonymize);
void load_many(QStringList &paths);
void load_jpeg_exporter(const std::unique_ptr<dataset> &p);
void loadTimeline();
void changeColoring();
void convertToUoctml(bool anonymized);
void exportForExcel();
QTimer t;
QWidget w;
QGridLayout l;
observable<std::size_t> dummy, demux, key;
std::unique_ptr<dataset> main, compare;
};
class my_button
: public QPushButton
{
Q_OBJECT
main_window &m;
dataset *p;
const oct_scan &s;
public:
my_button(main_window &m, dataset *p, const oct_scan &s, const QString &text)
: QPushButton(text, p->m_dialog.get()), m(m), p(p), s(s)
{
connect(this, SIGNAL(released()), this, SLOT(doit()));
}
private slots:
void doit()
{
p->m_scan = &s;
p->m_dialog->close();
m.update(p);
}
};
#endif
|
public static int sumN (int N) {
int sum = 0;
while (N != 0) {
sum = sum + N;
N--;
}
return sum;
} |
#!/usr/bin/env bash
function build_container(){
docker build -t travis/image-inspector-base .
docker build -t travis/image-inspector -f Dockerfile.travis .
}
function run_tests(){
docker run --rm --privileged \
-v /var/run/docker.sock:/var/run/docker.sock \
--entrypoint make \
travis/image-inspector verify test-unit
}
function usage() {
echo "usage: .travis.sh build|run"
exit 1
}
case "$1" in
build)
build_container
;;
run)
run_tests
;;
*)
usage
;;
esac
|
<reponame>Kvahn-ui/dotfiles
import * as fs from 'fs';
import * as path from 'path';
import {IThemeIconsAccents, IThemeIconsItem} from '../typings/interfaces/icons';
import {getDefaultsJson} from './helpers/fs';
import {PATHS} from './helpers/paths';
const ICON_VARIANTS_BASE_PATH: string = path.join(process.cwd(), PATHS.pathIcons);
const DEFAULTS = getDefaultsJson();
const normalizeIconPath = (iconPath: string): string =>
path.join(process.cwd(), PATHS.icons, iconPath);
const replaceNameWithAccent = (name: string, accentName: string): string =>
name.replace('.svg', `.accent.${ accentName }.svg`);
const replaceSVGColour = (filecontent: string, colour: string): string =>
filecontent.replace(new RegExp('#(80CBC4)', 'i'), ($0, $1) => {
const newColour = colour.replace('#', '');
return $0.replace($1, newColour);
});
const replaceWhiteSpaces = (input: string): string =>
input.replace(/\s+/g, '-');
const writeSVGIcon = (fromFile: string, toFile: string, accent: string): void => {
const fileContent: string = fs.readFileSync(normalizeIconPath(fromFile), 'utf-8');
const content: string = replaceSVGColour(fileContent, DEFAULTS.accents[accent]);
const pathToFile = normalizeIconPath(toFile);
fs.writeFileSync(pathToFile, content);
};
export default (): Promise<void> => {
const basetheme: IThemeIconsAccents = require(ICON_VARIANTS_BASE_PATH);
for (const key of Object.keys(DEFAULTS.accents)) {
const iconName = replaceWhiteSpaces(key);
const themecopy: IThemeIconsAccents = JSON.parse(JSON.stringify(basetheme));
for (const accentableIconName of DEFAULTS.accentableIcons) {
const iconOriginDefinition: IThemeIconsItem = (basetheme.iconDefinitions as any)[accentableIconName];
const iconCopyDefinition: IThemeIconsItem = (themecopy.iconDefinitions as any)[accentableIconName];
if (iconOriginDefinition !== undefined && typeof iconOriginDefinition.iconPath === 'string' && iconCopyDefinition !== undefined && typeof iconCopyDefinition.iconPath === 'string') {
iconCopyDefinition.iconPath = replaceNameWithAccent(iconOriginDefinition.iconPath, iconName);
writeSVGIcon(iconOriginDefinition.iconPath, iconCopyDefinition.iconPath, key);
} else {
console.log(`Icon ${accentableIconName} not found`);
}
}
}
return Promise.resolve();
};
|
<reponame>jiangerji/ios-test-framework<gh_stars>0
//
// NSURL+QMSafe.h
// StarMaker
//
// Created by 江林 on 2018/1/30.
// Copyright © 2018年 uShow. All rights reserved.
//
#ifndef NSURL_QMSafe_h
#define NSURL_QMSafe_h
@interface NSURL (QMSafe)
+ (void)runSafeGuard;
#ifdef DEBUG
+ (void)testCase;
#endif
@end
#endif /* NSURL_QMSafe_h */
|
import os
import requests
GRAPH_URL = "https://graph.facebook.com/v2.6"
ACCESS_TOKEN = os.environ['ACCESS_TOKEN']
def send_text_message(id, text):
url = "{0}/me/messages?access_token={1}".format(GRAPH_URL, ACCESS_TOKEN)
payload = {
"recipient": {"id": id},
"message": {"text": text}
}
response = requests.post(url, json=payload)
if response.status_code != 200:
print("Unable to send message: " + response.text)
return response
def send_image_url(id, img_path, img_type):
fb_url = GRAPH_URL + '/me/messages'
data = {
'recipient': '{"id":'+ id + '}',
'message': '{"attachment":{"type":"image", "payload":{}}}'
}
files = {
'filedata': (os.path.basename(img_path), open(img_path, 'rb'), 'image/'+img_type)}
params = {'access_token': ACCESS_TOKEN}
response = requests.post(fb_url, params=params, data=data, files=files)
if response.status_code != 200:
print("Unable to send message: " + response.text)
return response
def send_button_message(id, text, buttons):
url = "{0}/me/messages?access_token={1}".format(GRAPH_URL, ACCESS_TOKEN)
payload = {
"recipient": {"id": id},
"message": {"attachment":{
"type":"template",
"payload":{
"template_type":"button",
"text": text,
"buttons":buttons
}
}}
}
response = requests.post(url, json=payload)
if response.status_code != 200:
print("Unable to send message: " + response.text)
return response
"""
def send_image_url(id, img_url):
pass
def send_button_message(id, text, buttons):
pass
"""
|
def quick_sort(arr):
if len(arr) < 2:
return arr
else:
pivot = arr[0]
less = [i for i in arr[1:] if i <= pivot]
greater = [i for i in arr[1:] if i > pivot]
return quick_sort(less) + [pivot] + quick_sort(greater) |
<?php
$text = <<<EOT
The following email addresses must be collected:
john@example.com
jane@example.com
EOT;
preg_match_all('/\S+@\S+/', $text, $matches);
print_r($matches[0]);
?> |
#!/usr/bin/env bash
set -e
# TODO: Set to URL of git repo.
PROJECT_GIT_URL='https://github.com/coreycoole/users-api.git'
PROJECT_BASE_PATH='/usr/local/apps/users-api'
echo "Installing dependencies..."
apt-get update
apt-get install -y python3-dev python3-venv sqlite python-pip supervisor nginx git
# Create project directory
mkdir -p $PROJECT_BASE_PATH
git clone $PROJECT_GIT_URL $PROJECT_BASE_PATH
# Create virtual environment
mkdir -p $PROJECT_BASE_PATH/env
python3 -m venv $PROJECT_BASE_PATH/env
# Install python packages
$PROJECT_BASE_PATH/env/bin/pip install -r $PROJECT_BASE_PATH/requirements.txt
$PROJECT_BASE_PATH/env/bin/pip install uwsgi==2.0.18
# Run migrations and collectstatic
cd $PROJECT_BASE_PATH
$PROJECT_BASE_PATH/env/bin/python manage.py migrate
$PROJECT_BASE_PATH/env/bin/python manage.py collectstatic --noinput
# Configure supervisor
cp $PROJECT_BASE_PATH/deploy/supervisor_profiles_api.conf /etc/supervisor/conf.d/profiles_api.conf
supervisorctl reread
supervisorctl update
supervisorctl restart profiles_api
# Configure nginx
cp $PROJECT_BASE_PATH/deploy/nginx_profiles_api.conf /etc/nginx/sites-available/profiles_api.conf
rm /etc/nginx/sites-enabled/default
ln -s /etc/nginx/sites-available/profiles_api.conf /etc/nginx/sites-enabled/profiles_api.conf
systemctl restart nginx.service
echo "DONE! :)"
|
from django.contrib import admin
from users.models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
search_fields = ('phone_number',
'email')
list_display = ('email',
'phone_number',
'score',
'date_joined',
'last_updated_on')
|
(function(window) {
"use strict";
var _ = window._;
var $ = window.jQuery;
var ScopedCss = window.ScopedCss;
// The View base class for the Component.
var Component = Backbone.Layout.extend({
constructor: function(options) {
if (!this.tagName) {
throw new Error("tagName required to initialize component.");
}
// Ensure the View is correctly set up.
Backbone.Layout.apply(this, arguments);
},
// FIXME Until shadow dom, keep content that's rendered.
renderTemplate: function() {},
afterRender: function() {
// Seek out nested components and render them.
Component.activateAll(this.$el);
}
});
// Directly mix into the prototype.
_.extend(Component.prototype, {
// By default the template property contains the contents of the template.
fetchTemplate: function(path) {
var done = this.async();
// Fetch the template contents from the server, by a url.
$.get(path, function(contents) {
done(_.template(contents));
}, "text")
},
// Fetch the CSS from the server.
fetchStyle: function() {
return $.get(this.style, $.noop, "text");
}
});
// Augment the constructor.
_.extend(Component, {
components: {},
register: function(Component, identifier) {
// Allow a manual override of the tagName to use.
identifier = identifier || Component.prototype.tagName;
// Create the scoped object outside of the fetch.
var scopedStyles = new ScopedCss(Component.prototype.tagName);
// Fetch CSS.
Component.prototype.fetchStyle().then(function(cssText) {
// Apply the CSS to be scoped.
scopedStyles.cssText = scopedStyles.prepare(cssText);
// Render scoped CSS to the Document body.
scopedStyles.appendTo(document.body);
});
// Register a Component constructor, not an instance.
return this.components[identifier] = Component;
},
unregister: function(identifier) {
delete this.components[identifier];
},
activate: function($el) {
var Component = this;
// Convert all attributes on the Element into View properties.
var attrs = _.reduce($el[0].attributes, function(attrs, attr) {
attrs[attr.name] = attr.value;
return attrs;
}, {});
// Associate the element as well.
attrs.el = $el;
// Create a new Component.
var component = new Component(attrs);
// By default use the template property provided, otherwise pull the
// template contents from the DOM.
if (!component.template) {
component.template = _.template(_.unescape($el.html()));
}
// Now render and apply to the Document.
component.render();
},
activateAll: function($el) {
_.each(this.components, function(Component, tagName) {
$el.children(tagName).each(function() {
Component.activate($(this));
});
});
}
});
Backbone.Component = Component;
})(this);
|
// By KRT girl xiplus
#include <bits/stdc++.h>
#define endl '\n'
using namespace std;
struct BigNumber{
const int BASE = 100000000;
const int WIDTH = 8;
vector<int> s;
bool sign=0;
operator long long(){
long long num=0;
int sz=s.size();
for(int i=sz-1;i>=0;i--){
num*=BASE;
num+=s[i];
}
if(sign)return -num;
else return num;
}
operator string(){
string num="";
int sz=s.size();
for(int i=sz-1;i>=0;i--){
num+=to_string(s[i]);
}
if(sign)return "-"+num;
else return num;
}
BigNumber(long long num = 0) { *this = num; }
BigNumber operator = (long long num) {
s.clear();
if(num<0){
sign=1;
num*=-1;
}
do {
s.push_back(num % BASE);
num /= BASE;
} while(num>0);
return *this;
}
BigNumber operator = (string str){
s.clear();
if(str[0]=='-'){
sign=1;
str.erase(0,1);
}
int x, len = (str.length()-1)/WIDTH +1;
for(int i=0;i<len;i++){
int end=str.length()-i*WIDTH;
int start=max(0,end-WIDTH);
stringstream(str.substr(start,end-start))>>x;
// sscanf(str.substr(start,end-start).c_str(),"%d",&x);
s.push_back(x);
}
return *this;
}
BigNumber plus(BigNumber a,BigNumber b){
BigNumber c;
c.sign=a.sign;
c.s.clear();
for(int i=0,g=0;;i++){
if(g==0&&i>=s.size()&&i>=b.s.size()) break;
int x=g;
if(i<s.size()) x+=s[i];
if(i<b.s.size()) x+=b.s[i];
c.s.push_back(x%BASE);
g=x/BASE;
}
return c;
}
BigNumber operator + (BigNumber b){
if(sign^b.sign==0){
return plus(*this,b);
}
}
};
int main(){
// ios::sync_with_stdio(false);
// cin.tie(0);
BigNumber b1=-111111111111111;
BigNumber b2;
b2="-222222222222222222222222222222222222222222222";
cout<<(string)b1<<endl;
cout<<(string)b2<<endl;
cout<<(string)(b1+b2)<<endl;
} |
#!/usr/bin/env bash
set -euo pipefail
VOLUME_NAME="noredink-ui-nixos-shell-nix"
if ! docker volume ls | grep -q "$VOLUME_NAME"; then
docker volume create "$VOLUME_NAME"
fi
docker run \
--interactive \
--tty \
--mount "type=bind,source=$(pwd),target=/app" \
--mount "type=volume,source=$VOLUME_NAME,target=/nix" \
--workdir /app \
lnl7/nix:latest \
nix-shell --command 'mkdir -p /etc/ssl/certs && ln -s $NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt && return'
|
#!/usr/bin/env bash
# Copyright 2019 Johns Hopkins University (Author: Daniel Povey). Apache 2.0.
# Copyright 2019 Idiap Research Institute (Author: Srikanth Madikeri). Apache 2.0.
# Begin configuration section
stage=-2
cmd=run.pl
gpu_cmd_opt=
leaky_hmm_coefficient=0.1
xent_regularize=0.1
apply_deriv_weights=false # you might want to set this to true in unsupervised training
# scenarios.
memory_compression_level=2 # Enables us to use larger minibatch size than we
# otherwise could, but may not be optimal for speed
# (--> set to 0 if you have plenty of memory.
dropout_schedule=
srand=0
max_param_change=2.0 # we use a smaller than normal default (it's normally
# 2.0), because there are two models (bottom and top).
use_gpu=yes # can be "yes", "no", "optional", "wait"
print_interval=10
momentum=0.0
parallel_train_opts=
verbose_opt=
common_opts= # Options passed through to nnet3-chain-train and nnet3-chain-combine
num_epochs=4.0 # Note: each epoch may actually contain multiple repetitions of
# the data, for various reasons:
# using the --num-repeats option in process_egs.sh
# data augmentation
# different data shifts (this includes 3 different shifts
# of the data if frame_subsampling_factor=3 (see $dir/init/info.txt)
num_jobs_initial=1
num_jobs_final=1
initial_effective_lrate=0.001
final_effective_lrate=0.0001
minibatch_size=32 # This is how you set the minibatch size.
max_iters_combine=80
max_models_combine=20
diagnostic_period=5 # Get diagnostics every this-many iterations
shuffle_buffer_size=1000 # This "buffer_size" variable controls randomization of the groups
# on each iter.
l2_regularize=
out_of_range_regularize=0.01
multilingual_eg=false
# End configuration section
echo "$0 $@" # Print the command line for logging
if [ -f path.sh ]; then . ./path.sh; fi
. parse_options.sh || exit 1;
if [ $# != 2 ]; then
echo "Usage: $0 [options] <egs-dir> <model-dir>"
echo " e.g.: $0 exp/chain/tdnn1a_sp/egs exp/chain/tdnn1a_sp"
echo ""
echo "This is the default script to train acoustic models for chain2 recipes."
echo "The script requires two arguments:"
echo "<egs-dir>: directory where egs files are stored"
echo "<model-dir>: directory where the final model will be stored"
echo ""
echo "See the top of the script to check possible options to pass to it."
exit 1
fi
egs_dir=$1
dir=$2
set -e -u # die on failed command or undefined variable
steps/chain2/validate_randomized_egs.sh $egs_dir
for f in $dir/init/info.txt; do
if [ ! -f $f ]; then
echo "$0: expected file $f to exist"
exit 1
fi
done
cat $egs_dir/info.txt >> $dir/init/info.txt
frame_subsampling_factor=$(awk '/^frame_subsampling_factor/ {print $2}' <$dir/init/info.txt)
num_scp_files=$(awk '/^num_scp_files/ {print $2}' <$egs_dir/info.txt)
if [ $stage -le -2 ]; then
echo "$0: Generating training schedule"
steps/chain2/internal/get_train_schedule.py \
--frame-subsampling-factor=$frame_subsampling_factor \
--num-jobs-initial=$num_jobs_initial \
--num-jobs-final=$num_jobs_final \
--num-epochs=$num_epochs \
--dropout-schedule="$dropout_schedule" \
--num-scp-files=$num_scp_files \
--frame-subsampling-factor=$frame_subsampling_factor \
--initial-effective-lrate=$initial_effective_lrate \
--final-effective-lrate=$final_effective_lrate \
--schedule-out=$dir/schedule.txt
fi
if [ "$use_gpu" != "no" ]; then gpu_cmd_opt="--gpu 1"; else gpu_cmd_opt=""; fi
num_iters=$(wc -l <$dir/schedule.txt)
echo "$0: will train for $num_epochs epochs = $num_iters iterations"
# source the 1st line of schedule.txt in the shell; this sets
# lrate and dropout_opt, among other variables.
. <(head -n 1 $dir/schedule.txt)
langs=$(awk '/^langs/ { $1=""; print; }' <$dir/init/info.txt | tail -1)
num_langs=$(echo $langs | wc -w)
mkdir -p $dir/log
# Copy models with initial learning rate and dropout options from $dir/init to $dir/0
if [ $stage -le -1 ]; then
echo "$0: Copying transition model"
if [ $num_langs -eq 1 ]; then
echo "$0: Num langs is 1"
cp $dir/init/default.raw $dir/0.raw
if [ -f $dir/init/default_trans.mdl ]; then
cp $dir/init/default_trans.mdl $dir/0_trans.mdl
fi
else
echo "$0: Num langs is $num_langs"
cp $dir/init/multi.raw $dir/0.raw
fi
fi
l2_regularize_opt=""
if [ ! -z $l2_regularize ]; then
l2_regularize_opt="--l2-regularize=$l2_regularize"
fi
x=0
if [ $stage -gt $x ]; then x=$stage; fi
[ $max_models_combine -gt $[num_iters/2] ] && max_models_combine=$[num_iters/2];
combine_start_iter=$[num_iters+1-max_models_combine]
while [ $x -lt $num_iters ]; do
# Source some variables fromm schedule.txt. The effect will be something
# like the following:
# iter=0; num_jobs=2; inv_num_jobs=0.5; scp_indexes=(pad 1 2); frame_shifts=(pad 1 2); dropout_opt="--edits='set-dropout-proportion name=* proportion=0.0'" lrate=0.002
. <(grep "^iter=$x;" $dir/schedule.txt)
echo "$0: training, iteration $x of $num_iters, num-jobs is $num_jobs"
next_x=$[$x+1]
den_fst_dir=$egs_dir/misc
model_out_prefix=$dir/${next_x}
model_out=${model_out_prefix}.mdl
multilingual_eg_opts=
if $multilingual_eg; then
multilingual_eg_opts="--multilingual-eg=true"
fi
# for the first 4 iterations, plus every $diagnostic_period iterations, launch
# some diagnostic processes. We don't do this on iteration 0, because
# the batchnorm stats wouldn't be ready
if [ $x -gt 0 ] && [ $[x%diagnostic_period] -eq 0 -o $x -lt 5 ]; then
[ -f $dir/.error_diagnostic ] && rm $dir/.error_diagnostic
for name in train heldout; do
egs_opts=
if $multilingual_eg; then
weight_rspecifier=$egs_dir/diagnostic_${name}.weight.ark
[[ -f $weight_rspecifier ]] && egs_opts="--weights=ark:$weight_rspecifier"
fi
$cmd $gpu_cmd_opt $dir/log/diagnostic_${name}.$x.log \
nnet3-chain-train2 --use-gpu=$use_gpu \
--leaky-hmm-coefficient=$leaky_hmm_coefficient \
--xent-regularize=$xent_regularize \
--out-of-range-regularize=$out_of_range_regularize \
$l2_regularize_opt \
--print-interval=10 \
"nnet3-copy --learning-rate=$lrate $dir/${x}.raw - |" $den_fst_dir \
"ark:nnet3-chain-copy-egs $egs_opts scp:$egs_dir/${name}_subset.scp ark:- | nnet3-chain-merge-egs $multilingual_eg_opts --minibatch-size=1:64 ark:- ark:-|" \
$dir/${next_x}_${name}.mdl || touch $dir/.error_diagnostic &
# Make sure we do not run more than $num_jobs_final at once
[ $num_jobs_final -eq 1 ] && wait
done
wait
fi
if [ $x -gt 0 ]; then
# This doesn't use the egs, it only shows the relative change in model parameters.
$cmd $dir/log/progress.$x.log \
nnet3-show-progress --use-gpu=no $dir/$(($x-1)).raw $dir/${x}.raw '&&' \
nnet3-info $dir/${x}.raw &
fi
cache_io_opt="--write-cache=$dir/cache.$next_x"
if [ $x -gt 0 -a -f $dir/cache.$x ]; then
cache_io_opt="$cache_io_opt --read-cache=$dir/cache.$x"
fi
for j in $(seq $num_jobs); do
scp_index=${scp_indexes[$j]}
frame_shift=${frame_shifts[$j]}
egs_opts=
if $multilingual_eg; then
weight_rspecifier=$egs_dir/train.weight.$scp_index.ark
[[ -f $weight_rspecifier ]] && egs_opts="--weights=ark:$weight_rspecifier"
fi
$cmd $gpu_cmd_opt $dir/log/train.$x.$j.log \
nnet3-chain-train2 \
$parallel_train_opts $verbose_opt \
--out-of-range-regularize=$out_of_range_regularize \
$cache_io_opt \
--use-gpu=$use_gpu --apply-deriv-weights=$apply_deriv_weights \
--leaky-hmm-coefficient=$leaky_hmm_coefficient --xent-regularize=$xent_regularize \
--print-interval=$print_interval --max-param-change=$max_param_change \
--momentum=$momentum \
--l2-regularize-factor=$inv_num_jobs \
$l2_regularize_opt \
--srand=$srand \
"nnet3-copy --learning-rate=$lrate $dir/${x}.raw - |" $den_fst_dir \
"ark:nnet3-chain-copy-egs $egs_opts --frame-shift=$frame_shift scp:$egs_dir/train.$scp_index.scp ark:- | nnet3-chain-shuffle-egs --buffer-size=$shuffle_buffer_size --srand=$x ark:- ark:- | nnet3-chain-merge-egs $multilingual_eg_opts --minibatch-size=$minibatch_size ark:- ark:-|" \
${model_out_prefix}.$j.raw || touch $dir/.error &
done
wait
if [ -f $dir/.error ]; then
echo "$0: error detected training on iteration $x"
exit 1
fi
if [ $x -ge 1 ]; then
models_to_average=$(for j in `seq $num_jobs`; do echo ${model_out_prefix}.$j.raw; done)
$cmd $dir/log/average.$x.log \
nnet3-average $models_to_average $dir/$next_x.raw || exit 1;
rm $models_to_average
else
lang=$(echo $langs | awk '{print $1}')
model_index=`steps/nnet3/chain2/internal/get_best_model.sh --output output-${lang} $dir/log/train.$x.*.log`
cp ${model_out_prefix}.$model_index.raw $dir/$next_x.raw
rm ${model_out_prefix}.*.raw
fi
[ -f $dir/$x/.error_diagnostic ] && echo "$0: error getting diagnostics on iter $x" && exit 1;
if [ -f $dir/cache.$x ]; then
rm $dir/cache.$x
fi
delete_iter=$[x-2]
if [ $delete_iter -lt $combine_start_iter ]; then
if [ -f $dir/$delete_iter.raw ]; then
rm $dir/$delete_iter.raw
fi
fi
if [ -f $dir/${next_x}_train.mdl ]; then
rm $dir/${next_x}_{train,heldout}.mdl
fi
x=$[x+1]
done
if [ $stage -le $num_iters ]; then
echo "$0: doing model combination"
den_fst_dir=$egs_dir/misc
input_models=$(for x in $(seq $combine_start_iter $num_iters); do echo $dir/${x}.raw; done)
output_model_dir=$dir/final
$cmd $gpu_cmd_opt $dir/log/combine.log \
nnet3-chain-combine2 --use-gpu=$use_gpu \
--leaky-hmm-coefficient=$leaky_hmm_coefficient \
--print-interval=10 \
$den_fst_dir $input_models \
"ark:nnet3-chain-merge-egs $multilingual_eg_opts scp:$egs_dir/train_subset.scp ark:-|" \
$dir/final.raw || exit 1;
if ! $multilingual_eg; then
nnet3-copy --edits="rename-node old-name=output new-name=output-dummy; rename-node old-name=output-default new-name=output" \
$dir/final.raw - | \
nnet3-am-init $dir/0_trans.mdl - $dir/final.mdl
fi
# Compute the probability of the final, combined model with
# the same subset we used for the previous diagnostic processes, as the
# different subsets will lead to different probs.
[ -f $dir/.error_diagnostic ] && rm $dir/.error_diagnostic
for name in train heldout; do
egs_opts=
if $multilingual_eg; then
weight_rspecifier=$egs_dir/diagnostic_${name}.weight.ark
[[ -f $weight_rspecifier ]] && egs_opts="--weights=ark:$weight_rspecifier"
fi
$cmd $gpu_cmd_opt $dir/log/diagnostic_${name}.final.log \
nnet3-chain-train2 --use-gpu=$use_gpu \
--leaky-hmm-coefficient=$leaky_hmm_coefficient \
--xent-regularize=$xent_regularize \
--out-of-range-regularize=$out_of_range_regularize \
$l2_regularize_opt \
--print-interval=10 \
$dir/final.raw $den_fst_dir \
"ark:nnet3-chain-copy-egs $egs_opts scp:$egs_dir/${name}_subset.scp ark:- | nnet3-chain-merge-egs $multilingual_eg_opts --minibatch-size=1:64 ark:- ark:-|" \
$dir/final_${name}.mdl || touch $dir/.error_diagnostic &
done
if [ -f $dir/final_train.mdl ]; then
rm $dir/final_{train,heldout}.mdl
fi
fi
if [[ ! $multilingual_eg ]] && [[ ! -f $dir/final.mdl ]]; then
echo "$0: $dir/final.mdl does not exist."
# we don't want to clean up if the training didn't succeed.
exit 1;
fi
sleep 2
echo "$0: done"
steps/info/chain_dir_info.pl $dir
exit 0
|
import React from 'react';
import block from 'bem-cn';
import './Toggle.scss';
const b = block('toggle');
interface IProps extends React.HTMLProps<HTMLInputElement> {
leftLabel: string;
rightLabel: string;
title?: string;
}
function Toggle(props: IProps) {
const { leftLabel, rightLabel, title, ...restProps } = props;
return (
<label className={b({ checked: Boolean(restProps.checked) })()}>
{title && <div className={b('title')()}>{title}</div>}
<input className={b('fake-input')()} type="checkbox" {...restProps}/>
<div className={b('label', { active: !Boolean(restProps.checked) })()}>
{leftLabel}
</div>
<div className={b('switch')()}>
<div className={b('switch-thumb')()} />
</div>
<div className={b('label', { active: !!Boolean(restProps.checked) })()}>
{rightLabel}
</div>
</label>
);
}
export { IProps as IToggleProps };
export default Toggle;
|
for num in range(2,101):
prime = True
for i in range(2,num):
if (num%i==0):
prime = False
if prime:
print(num) |
def pack_canvas(canvas, objects):
def is_valid_position(x, y, width, height):
for i in range(x, x + width):
for j in range(y, y + height):
if i < 0 or i >= len(canvas) or j < 0 or j >= len(canvas[0]) or canvas[i][j] == 1:
return False
return True
def place_object(x, y, width, height):
for i in range(x, x + width):
for j in range(y, y + height):
canvas[i][j] = 1
for obj in objects:
x, y, width, height = obj
if is_valid_position(x, y, width, height):
place_object(x, y, width, height)
return canvas |
/*
* by <NAME>
*
* Copyright 2003-2012 -- See accompanying license
*
*/
#include <cat/lex.h>
#include <cat/err.h>
#include <cat/stduse.h>
#include <stdio.h>
#include <string.h>
enum token_e {
WHITESPACE, NEWLINE, NUMBER, PLUS, MINUS, TIMES, DIVIDE, LPAREN,
RPAREN
};
const char *tok2name[] = {
"whitespace", "newline", "number", "plus", "minus", "times", "divide",
"left parentheses", "right parentheses"
};
void Lex_add(struct lexer *lex, const char *pat, int tok)
{
if (lex_add_entry(lex, pat, tok) < 0)
err("Error adding token '%d' with pattern '%s'", tok, pat);
}
int main(int argc, char *argv[])
{
struct lexer *lex;
char buffer[65563], *bp = buffer;
int tok;
const char *tokp;
int toklen;
lex = lex_new(&estdmm);
Lex_add(lex, "[ \t]+", WHITESPACE);
Lex_add(lex, "[\n\r]+", NEWLINE);
Lex_add(lex, "-?[0-9]+", NUMBER);
Lex_add(lex, "\\+", PLUS);
Lex_add(lex, "-", MINUS);
Lex_add(lex, "\\*", TIMES);
Lex_add(lex, "/", DIVIDE);
Lex_add(lex, "\\(", LPAREN);
Lex_add(lex, "\\)", RPAREN);
while ( bp < buffer + sizeof(buffer) - 1 &&
fgets(bp, sizeof(buffer) - (bp - buffer), stdin) ) {
bp += strlen(bp);
}
lex_reset(lex, buffer);
while ( (tok = lex_next_token(lex, &tokp, &toklen)) >= 0 ) {
printf("Token %s", tok2name[tok]);
if ( tok == NUMBER ) {
char argbuf[256] = { 0 };
if ( toklen > sizeof(argbuf) - 1 )
toklen = sizeof(argbuf) - 1;
memcpy(argbuf, tokp, toklen);
printf(": '%s'", argbuf);
}
putchar('\n');
}
if ( tok == LEX_END ) {
printf("Normal token stream end\n");
} else if ( tok == LEX_NOMATCH ) {
printf("No match for more tokens\n");
} else if ( tok == LEX_ERROR ) {
printf("Error in token match\n");
} else {
printf("Unknown return code: %d\n", tok);
}
return 0;
}
|
#!/bin/bash
set -eu
target="input"
if [ $# -gt 0 ]
then
target="$1"
fi
javac *.java
if [ ! -e input ]
then
echo "Must download input"
exit 1
fi
echo "Running $target"
java -Xmx50g -cp . Prob $target
|
<reponame>lsm5/crio-deb<filename>vendor/k8s.io/kubernetes/pkg/kubectl/cmd/util/factory_builder.go
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// this file contains factories with no other dependencies
package util
import (
"io"
"github.com/spf13/cobra"
"k8s.io/apimachinery/pkg/api/meta"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/kubectl/resource"
)
type ring2Factory struct {
clientAccessFactory ClientAccessFactory
objectMappingFactory ObjectMappingFactory
}
func NewBuilderFactory(clientAccessFactory ClientAccessFactory, objectMappingFactory ObjectMappingFactory) BuilderFactory {
f := &ring2Factory{
clientAccessFactory: clientAccessFactory,
objectMappingFactory: objectMappingFactory,
}
return f
}
func (f *ring2Factory) PrintObject(cmd *cobra.Command, mapper meta.RESTMapper, obj runtime.Object, out io.Writer) error {
gvks, _, err := api.Scheme.ObjectKinds(obj)
if err != nil {
return err
}
mapping, err := mapper.RESTMapping(gvks[0].GroupKind())
if err != nil {
return err
}
printer, err := f.objectMappingFactory.PrinterForMapping(cmd, mapping, false)
if err != nil {
return err
}
return printer.PrintObj(obj, out)
}
func (f *ring2Factory) NewBuilder() *resource.Builder {
mapper, typer := f.objectMappingFactory.Object()
return resource.NewBuilder(mapper, typer, resource.ClientMapperFunc(f.objectMappingFactory.ClientForMapping), f.clientAccessFactory.Decoder(true))
}
|
<filename>components/home/SetupRepo.js
import React from 'react'
import DefaultLayout from 'layouts'
import { apiEndpoint } from 'prismic-configuration'
import { setupRepoStyles } from 'styles'
/**
* Setup repo component
*/
const SetupRepo = () => {
const repoUrl = `${apiEndpoint.replace('.cdn','').slice(0, -6)}documents/`
return (
<DefaultLayout>
<div className='setup-repo'>
<h1>Good job!</h1>
<h2>You're halfway done with setting up your Prismic website</h2>
<h4>Just visit your <a href={repoUrl}>Prismic dashboard</a> and add some content there</h4>
</div>
<style jsx global>{setupRepoStyles}</style>
</DefaultLayout>
)
}
export default SetupRepo
|
#!/usr/bin/env bash
exit $((RANDOM % 2))
|
# Sample usage of the ProviderManager class
# Create an instance of ProviderManager
provider_info = ProviderManager(["Provider A", "Provider B"], {"config1": "value1", "config2": "value2"}, False, ["Address 1", "Address 2"])
# Update the primary provider name
provider_info.update_provider_name("New Provider A")
# Add a new address
provider_info.add_address("Address 3")
# Get the provider information
print(provider_info.get_provider_info())
# Output: Provider: New Provider A, Provider B
# Task Config: {'config1': 'value1', 'config2': 'value2'}
# Address Info: ['Address 1', 'Address 2', 'Address 3'] |
<filename>src/net/natroutter/hubcore/features/gadgets/slapper/SlapperHandler.java
package net.natroutter.hubcore.features.gadgets.slapper;
import net.natroutter.hubcore.Handler;
import net.natroutter.hubcore.HubCore;
import net.natroutter.hubcore.handlers.Database.PlayerData;
import net.natroutter.hubcore.handlers.Database.PlayerDataHandler;
import org.bukkit.Sound;
import org.bukkit.entity.Player;
import org.bukkit.util.Vector;
import net.natroutter.hubcore.handlers.AdminModeHandler;
public class SlapperHandler {
private PlayerDataHandler pdh;
private AdminModeHandler adminModeHandler;
public SlapperHandler(Handler handler) {
this.pdh = handler.getDataHandler();
this.adminModeHandler = handler.getAdminModeHandler();
}
public void slap(Player p, Player target) {
PlayerData data = pdh.get(target.getUniqueId());
if (data.getNoEffect()) {return;}
if (!adminModeHandler.isAdmin(p)) {
target.setVelocity(p.getLocation().getDirection().multiply(1.2D).add(new Vector(0.0, 0.6, 0.0)));
target.playSound(target.getLocation(), Sound.BLOCK_SLIME_BLOCK_FALL, 1.0F, 1.0F);
p.playSound(target.getLocation(), Sound.BLOCK_SLIME_BLOCK_FALL, 1.0F, 1.0F);
}
}
}
|
(function () {
String.prototype.ensureStart = function (str) {
return this.startsWith(str) ? this.toString() : str + this;
};
String.prototype.ensureEnd = function (str) {
return this.endsWith(str) ? this.toString() : this + str;
};
String.prototype.isEmpty = function () {
return this == "";
};
String.prototype.truncate = function (n) {
if (this.length <= n) {
return this.toString();
}
if (n < 4) {
return '.'.repeat(n);
}
let result = this.slice(0, n - 2);
let lastSpace = result.lastIndexOf(" ");
if (lastSpace !== -1) {
result = result.slice(0, lastSpace) + "...";
}else{
result += "...";
}
return result;
};
String.format = function (str, ...params) {
for (let i = 0; i < params.length; i++) {
str = str.replace(`{${i}}`, params[i]);
}
return str;
}
})();
function solve() {
String.prototype.ensureStart = function (str) {
return this.startsWith(str) ? this.toString() : str + this;
};
String.prototype.ensureEnd = function (str) {
return this.endsWith(str) ? this.toString() : this + str;
};
String.prototype.isEmpty = function () {
return this == "";
};
String.prototype.truncate = function (n) {
if (this.length <= n) {
return this.toString();
}
if (n < 4) {
return '.'.repeat(n);
}
let result = this.slice(0, n - 2);
let lastSpace = result.lastIndexOf(" ");
if (lastSpace !== -1) {
result = result.slice(0, lastSpace) + "...";
}else{
result += "...";
}
return result;
};
let test = 'the quick brown fox jumps over the lazy dog';
console.log(test.truncate(6));
}
solve(); |
import os
import pandas as pd
import datetime
def process_and_save_data(df, dataset_name, data_dir):
df['datetime'] = datetime.datetime(2018, 1, 1)
processed_data_file = os.path.join(data_dir, dataset_name+'_edit.csv')
df.to_csv(processed_data_file, index=False) |
#!/bin/bash
# ----------------------------------------------------------------------------
set -e
set +v
###############################################################################
# PowerAuth2 build for Apple platforms
#
# The main purpose of this script is build and prepare PA2 "fat" libraries for
# library distribution. Typically, this script is used for CocoaPods integration.
#
# The result of the build process is:
# libPowerAuthCore.a:
# multi-architecture static library (also called as "fat") with all
# core functionality of PowerAuth2 SDK. The library contains all C++
# code, plus thin ObjC wrapper written on top of that codes.
#
# SDK sources:
# all SDK high level source codes are copied to destination directory.
# all private headers are copied into "Private" sub directory.
#
# Script is using following folders (if not changed):
#
# ./Lib/Debug - result of debug configuration, containing
# final fat library, source codes and public headers
# ./Lib/Debug/Private - contains all private headers
#
# ./Lib/Debug - result of release configuration, containing
# final fat library, source codes and public headers
# ./Lib/Debug/Private - contains all private headers
#
# ./Tmp - for all temporary data
#
# ----------------------------------------------------------------------------
###############################################################################
# Include common functions...
# -----------------------------------------------------------------------------
TOP=$(dirname $0)
source "${TOP}/common-functions.sh"
SRC_ROOT="`( cd \"$TOP/..\" && pwd )`"
#
# Source headers & Xcode project location
#
XCODE_PROJECT="${SRC_ROOT}/proj-xcode/PowerAuthCore.xcodeproj"
SOURCE_FILES="${SRC_ROOT}/proj-xcode/Classes"
#
# Architectures & Target libraries
#
PLATFORM_SDK1="iphoneos"
PLATFORM_SDK2="iphonesimulator"
PLATFORM_SDK3="maccatalyst"
PLATFORM_ARCHS1="armv7 armv7s arm64"
PLATFORM_ARCHS2="i386 x86_64"
PLATFORM_ARCHS3="x86_64"
OUT_LIBRARY="libPowerAuthCore.a"
OUT_LIBRARY_CATALYST="libPowerAuthCore-maccatalyst.a"
# Variables loaded from command line
VERBOSE=1
FULL_REBUILD=1
CLEANUP_AFTER=1
SCHEME_NAME=''
CONFIG_NAME=''
OUT_DIR=''
TMP_DIR=''
# -----------------------------------------------------------------------------
# USAGE prints help and exits the script with error code from provided parameter
# Parameters:
# $1 - error code to be used as return code from the script
# -----------------------------------------------------------------------------
function USAGE
{
echo ""
echo "Usage: $CMD [options] command"
echo ""
echo "command is:"
echo " debug for DEBUG build"
echo " release for RELEASE build"
echo ""
echo "options are:"
echo " -nc | --no-clean disable 'clean' before 'build'"
echo " also disables derived data cleanup after build"
echo " -v0 turn off all prints to stdout"
echo " -v1 print only basic log about build progress"
echo " -v2 print full build log with rich debug info"
echo " --out-dir path changes directory where final framework"
echo " will be stored"
echo " --tmp-dir path changes temporary directory to |path|"
echo " -h | --help prints this help information"
echo ""
exit $1
}
# -----------------------------------------------------------------------------
# Performs xcodebuild command for a single platform (iphone / simulator)
# Parameters:
# $1 - scheme name (e.g. PA2Ext_Debug, PA2Watch_Release)
# $2 - configuration name (e.g. Debug, Release)
# $3 - platform SDK (watchos, iphoneos)
# $4 - simulator SDK (watchsimulator, iphonesimulator)
# -----------------------------------------------------------------------------
function MAKE_FAT_LIB
{
local SCHEME=$1
local CONFIG=$2
local NAT_PLATFORM=$3
local SIM_PLATFORM=$4
local LIB=${OUT_LIBRARY}
LOG_LINE
LOG "FATalizing ${LIB}"
LOG_LINE
local NAT_LIB_DIR="${TMP_DIR}/${SCHEME}-${NAT_PLATFORM}/${CONFIG}-${NAT_PLATFORM}"
local SIM_LIB_DIR="${TMP_DIR}/${SCHEME}-${SIM_PLATFORM}/${CONFIG}-${SIM_PLATFORM}"
local FAT_LIB_DIR="${TMP_DIR}/${SCHEME}-${CONFIG}"
$MD "${FAT_LIB_DIR}"
eval "${LIPO} -create \"$NAT_LIB_DIR/$LIB\" \"$SIM_LIB_DIR/$LIB\" -output \"$FAT_LIB_DIR/$LIB\""
LOG "Copying final library..."
$CP -r "${FAT_LIB_DIR}/${LIB}" "${OUT_DIR}"
}
function MAKE_FAT_LIB_CATALYST
{
local SCHEME=$1
local CONFIG=$2
local PLATFORM=$3
local LIB=${OUT_LIBRARY}
local LIB_OUT=${OUT_LIBRARY_CATALYST}
LOG_LINE
LOG "FATalizing ${LIB_OUT}"
LOG_LINE
local LIB_DIR="${TMP_DIR}/${SCHEME}-${PLATFORM}/${CONFIG}-${PLATFORM}"
local FAT_LIB_DIR="${TMP_DIR}/${SCHEME}-${CONFIG}"
$MD "${FAT_LIB_DIR}"
eval "${LIPO} -create \"$LIB_DIR/$LIB\" -output \"$FAT_LIB_DIR/$LIB_OUT\""
LOG "Copying final library..."
$CP -r "${FAT_LIB_DIR}/${LIB_OUT}" "${OUT_DIR}"
}
# -----------------------------------------------------------------------------
# Validates whether given library has all expected platforms
# Parameters:
# $1 - library path
# $2 - architectures, space separated values
# -----------------------------------------------------------------------------
function VALIDATE_FAT_ARCHITECTURES
{
local LIB="$1"
local ARCHITECTURES=($2)
local INFO=`${LIPO} -info "${LIB}"`
for ARCH in "${ARCHITECTURES[@]}"
do
local HAS_ARCH=`echo $INFO | grep $ARCH | wc -l`
if [ $HAS_ARCH != "1" ]; then
FAILURE "Architecture $ARCH is missing in final FAT library."
fi
done
}
# -----------------------------------------------------------------------------
# Copy file from $1 to $2.
# If $1 is header and contains "Private" or "private" in path,
# then copy to $2/Private
# Parameters:
# $1 - source file
# $2 - destination directory
# -----------------------------------------------------------------------------
function COPY_SRC_FILE
{
local SRC=$1
local DST=$2
case "$SRC" in
*Private* | *private*)
[[ "$SRC" == *.h ]] && DST="$DST/Private"
;;
esac
$CP "${SRC}" "${DST}"
}
# -----------------------------------------------------------------------------
# Copy all source files from $1 directory to $2.
# If $3 contains "1" then only headers will be copied
# Parameters:
# $1 - SDK folder (relative)
# $2 - SDK folder base
# $3 - destination directory
# $4 - only headers if equal to 1
# -----------------------------------------------------------------------------
function COPY_SRC_DIR
{
local SRC="$1"
local BASE="$2"
local DST="$3"
local ONLY_HEADERS="$4"
local SRC_FULL="${BASE}/$SRC"
local SRC_DIR_FULL="`( cd \"$SRC_FULL\" && pwd )`"
LOG "Copying $SRC ..."
PUSH_DIR "${SRC_DIR_FULL}"
####
if [ x$ONLY_HEADERS == x1 ]; then
local files=(`grep -R -null --include "*.h" "" .`)
else
local files=(`grep -R -null --include "*.h" --include "*.m" "" .`)
fi
# Do for each file we found...
for ix in ${!files[*]}
do
local FILE="${files[$ix]}"
COPY_SRC_FILE "${FILE}" "${DST}"
done
####
POP_DIR
}
# -----------------------------------------------------------------------------
# Copy all source files in SDK to destination directory
# Parameters:
# $1 - source directory
# $2 - destination directory
# -----------------------------------------------------------------------------
function COPY_SOURCE_FILES
{
local SRC="$1"
local DST="$2"
LOG_LINE
LOG "Copying SDK folders ..."
LOG_LINE
# Prepare dirs in output directory
DST="`( cd \"$DST\" && pwd )`"
$MD "${DST}"
$MD "${DST}/Private"
# Copy each SDK folder
COPY_SRC_DIR "sdk" "$SRC" "$DST" 0
COPY_SRC_DIR "sdk-private" "$SRC" "$DST" 0
COPY_SRC_DIR "core" "$SRC" "$DST" 1
COPY_SRC_DIR "networking" "$SRC" "$DST" 0
COPY_SRC_DIR "keychain" "$SRC" "$DST" 0
COPY_SRC_DIR "token" "$SRC" "$DST" 0
COPY_SRC_DIR "system" "$SRC" "$DST" 0
COPY_SRC_DIR "watch" "$SRC" "$DST" 0
# And finally, top level header..
# Disabled, CocoaPods generates it own umbrella header.
#$CP "${SRC}/PowerAuth2.h" "$DST"
}
# -----------------------------------------------------------------------------
# Performs xcodebuild command for a single platform (iphone / simulator)
# Parameters:
# $1 - scheme name (e.g. PA2Core_Lib)
# $2 - build configuration (e.g. Release | Debug)
# $3 - platform (iphoneos, iphonesimulator)
# $4 - command to execute. You can use 'build' or 'clean'
# -----------------------------------------------------------------------------
function BUILD_COMMAND
{
local SCHEME="$1"
local CONFIG="$2"
local PLATFORM="$3"
local COMMAND="$4"
if [ $PLATFORM == $PLATFORM_SDK1 ]; then
local PLATFORM_ARCHS="$PLATFORM_ARCHS1"
elif [ $PLATFORM == $PLATFORM_SDK2 ]; then
local PLATFORM_ARCHS="$PLATFORM_ARCHS2"
else
local PLATFORM_ARCHS="$PLATFORM_ARCHS3"
fi
LOG "Executing ${COMMAND} for scheme ${SCHEME} :: ${CONFIG} :: ${PLATFORM} :: ${PLATFORM_ARCHS}"
local BUILD_DIR="${TMP_DIR}/${SCHEME}-${PLATFORM}"
local COMMAND_LINE="${XCBUILD} -project \"${XCODE_PROJECT}\""
if [ $VERBOSE -lt 2 ]; then
COMMAND_LINE="$COMMAND_LINE -quiet"
fi
COMMAND_LINE="$COMMAND_LINE -scheme ${SCHEME} -configuration ${CONFIG}"
if [ $PLATFORM == $PLATFORM_SDK3 ]; then
COMMAND_LINE="$COMMAND_LINE -destination 'platform=macOS,variant=Mac Catalyst' OTHER_CFLAGS=\"-target x86_64-apple-ios13.0-macabi -miphoneos-version-min=13.0\" OTHER_LDFLAGS=\"-target x86_64-apple-ios13.0-macabi -miphoneos-version-min=13.0\""
else
COMMAND_LINE="$COMMAND_LINE -sdk ${PLATFORM}"
fi
COMMAND_LINE="$COMMAND_LINE -derivedDataPath \"${TMP_DIR}/DerivedData\""
COMMAND_LINE="$COMMAND_LINE BUILD_DIR=\"${BUILD_DIR}\" BUILD_ROOT=\"${BUILD_DIR}\" CODE_SIGNING_REQUIRED=NO"
COMMAND_LINE="$COMMAND_LINE ARCHS=\"${PLATFORM_ARCHS}\" ONLY_ACTIVE_ARCH=NO"
COMMAND_LINE="$COMMAND_LINE ${COMMAND}"
DEBUG_LOG ${COMMAND_LINE}
eval ${COMMAND_LINE}
if [ "${COMMAND}" == "clean" ] && [ -e "${BUILD_DIR}" ]; then
$RM -r "${BUILD_DIR}"
fi
}
# -----------------------------------------------------------------------------
# Build scheme for both plaforms and create FAT libraries
# Parameters:
# $1 - scheme name (e.g. PA2Core_Lib)
# $2 - build configuration (e.g. Debug | Release)
# -----------------------------------------------------------------------------
function BUILD_SCHEME
{
local SCHEME="$1"
local CONFIG="$2"
LOG_LINE
LOG "Building architectures..."
LOG_LINE
BUILD_COMMAND $SCHEME $CONFIG $PLATFORM_SDK1 build
BUILD_COMMAND $SCHEME $CONFIG $PLATFORM_SDK2 build
MAKE_FAT_LIB $SCHEME $CONFIG $PLATFORM_SDK1 $PLATFORM_SDK2
local FAT_LIB="${OUT_DIR}/${OUT_LIBRARY}"
local ALL_ARCHS="${PLATFORM_ARCHS1} ${PLATFORM_ARCHS2}"
VALIDATE_FAT_ARCHITECTURES "${FAT_LIB}" "${ALL_ARCHS}"
# Copy source files...
COPY_SOURCE_FILES "${SOURCE_FILES}" "${OUT_DIR}"
}
function BUILD_SCHEME_CATALYST
{
local SCHEME="$1"
local CONFIG="$2"
LOG_LINE
LOG "Building architectures..."
LOG_LINE
BUILD_COMMAND $SCHEME $CONFIG $PLATFORM_SDK3 build
MAKE_FAT_LIB_CATALYST $SCHEME $CONFIG $PLATFORM_SDK3
local FAT_LIB="${OUT_DIR}/${OUT_LIBRARY_CATALYST}"
local ALL_ARCHS="${PLATFORM_ARCHS3}"
VALIDATE_FAT_ARCHITECTURES "${FAT_LIB}" "${ALL_ARCHS}"
# Copy source files...
COPY_SOURCE_FILES "${SOURCE_FILES}" "${OUT_DIR}"
}
# -----------------------------------------------------------------------------
# Clear project for specific scheme
# Parameters:
# $1 - scheme name (e.g. PA2Core_Lib...)
# $2 - configuration name
# -----------------------------------------------------------------------------
function CLEAN_SCHEME
{
local SCHEME=$1
local CONFIG=$2
LOG_LINE
LOG "Cleaning architectures..."
LOG_LINE
BUILD_COMMAND $SCHEME $CONFIG $PLATFORM_SDK1 clean
BUILD_COMMAND $SCHEME $CONFIG $PLATFORM_SDK2 clean
}
###############################################################################
# Script's main execution starts here...
# -----------------------------------------------------------------------------
while [[ $# -gt 0 ]]
do
opt="$1"
case "$opt" in
debug)
SCHEME_NAME='PA2Core_Lib'
CONFIG_NAME='Debug'
;;
release)
SCHEME_NAME='PA2Core_Lib'
CONFIG_NAME="Release"
;;
-nc | --no-clean)
FULL_REBUILD=0
CLEANUP_AFTER=0
;;
--tmp-dir)
TMP_DIR="$2"
shift
;;
--out-dir)
OUT_DIR="$2"
shift
;;
-v*)
SET_VERBOSE_LEVEL_FROM_SWITCH $opt
;;
-h | --help)
USAGE 0
;;
*)
USAGE 1
;;
esac
shift
done
UPDATE_VERBOSE_COMMANDS
# Check required parameters
if [ x$SCHEME_NAME == x ] || [ x$CONFIG_NAME == x ]; then
FAILURE "You have to specify build configuration (debug or release)"
fi
# Defaulting target & temporary folders
if [ -z "$OUT_DIR" ]; then
OUT_DIR="${TOP}/Lib/${CONFIG_NAME}"
fi
if [ -z "$TMP_DIR" ]; then
TMP_DIR="${TOP}/Tmp"
fi
# Find various build tools
XCBUILD=`xcrun -sdk iphoneos -find xcodebuild`
LIPO=`xcrun -sdk iphoneos -find lipo`
if [ x$XCBUILD == x ]; then
FAILURE "xcodebuild command not found."
fi
if [ x$LIPO == x ]; then
FAILURE "lipo command not found."
fi
# Print current config
DEBUG_LOG "Going to build scheme ${SCHEME_NAME} :: ${CONFIG_NAME}"
DEBUG_LOG " >> OUT_DIR = ${OUT_DIR}"
DEBUG_LOG " >> TMP_DIR = ${TMP_DIR}"
DEBUG_LOG " XCBUILD = ${XCBUILD}"
DEBUG_LOG " LIPO = ${LIPO}"
# -----------------------------------------------------------------------------
# Real job starts here :)
# -----------------------------------------------------------------------------
#
# Prepare target directories
#
[[ x$FULL_REBUILD == x1 ]] && $RM -r "${OUT_DIR}" "${TMP_DIR}"
$MD "${OUT_DIR}"
$MD "${TMP_DIR}"
#
# Perform clean if required
#
#[[ x$FULL_REBUILD == x1 ]] && CLEAN_SCHEME ${SCHEME_NAME} ${CONFIG_NAME}
#
# Build
#
BUILD_SCHEME ${SCHEME_NAME} ${CONFIG_NAME}
BUILD_SCHEME_CATALYST ${SCHEME_NAME} ${CONFIG_NAME}
#
# Remove temporary data
#
if [ x$CLEANUP_AFTER == x1 ]; then
LOG_LINE
LOG "Removing temporary data..."
$RM -r "${TMP_DIR}"
fi
LOG_LINE
LOG "SUCCESS"
|
// 1735. 분수 합
// 2019.08.28
// 수학
#include<iostream>
using namespace std;
int gcd(int a, int b)
{
if (b == 0)
{
return a;
}
return gcd(b, a%b);
}
int main()
{
int a, b, c, d;
cin >> a >> b >> c >> d;
int x = a * d + b * c;
int y = b * d;
cout << x/gcd(x,y) << " " << y/gcd(x,y) << endl;
return 0;
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_switch_camera_outline = void 0;
var ic_switch_camera_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M20 4h-3.17L15 2H9L7.17 4H4c-1.1 0-2 .9-2 2v12c0 1.1.9 2 2 2h16c1.1 0 2-.9 2-2V6c0-1.1-.9-2-2-2zM9.88 4h4.24l1.83 2H20v12H4V6h4.05"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M15 11H9V8.5L5.5 12 9 15.5V13h6v2.5l3.5-3.5L15 8.5z"
},
"children": []
}]
};
exports.ic_switch_camera_outline = ic_switch_camera_outline; |
#!/bin/sh
cat -- README.md `grep -o '[a-z0-9/-]*\.md' SUMMARY.md`
|
"""
The :mod:`sloth.utils` module collects simple utilities for daily work
"""
|
<filename>protected/plugin/cn_dreamn_plugin_music/i/js/player.js
/*
* DYBOY修复
* 2018-06-30
*/
jQuery['cookie'] = function(name, value, options) {
if (typeof value != 'undefined') {
options = options || {};
if (value === null) {
value = '';
options['expires'] = -0x1
};
var expires = '';
if (options['expires'] && (typeof options['expires'] == 'number' || options['expires']['toUTCString'])) {
var date;
if (typeof options['expires'] == 'number') {
date = new Date();
date['setTime'](date['getTime']() + options['expires'] * 0x18 * 0x3c * 0x3c * 1e3)
} else {
date = options['expires']
};
expires = '; expires=' + date['toUTCString']()
};
var path = options['path'] ? '; path=' + options['path'] : '';
var domain = options['domain'] ? '; domain=' + options['domain'] : '';
var secure = options['secure'] ? '; secure' : '';
window['document']['cookie'] = [name, '=', encodeURIComponent(value), expires, path, domain, secure]['join']('')
} else {
var cookieValue = null;
if (window['document']['cookie'] && window['document']['cookie'] != '') {
var cookies = window['document']['cookie']['split'](';');
for (var i = 0x0; i < cookies['length']; i++) {
var cookie = jQuery['trim'](cookies[i]);
if (cookie['substring'](0x0, name['length'] + 0x1) == name + '=') {
cookieValue = decodeURIComponent(cookie['substring'](name['length'] + 0x1));
break
}
}
};
return cookieValue
}
};
var wenkmList;
if (navigator['userAgent']['match'](/(iPhone|iPod|Android|ios|Nokia|Black Berry|MIDP|Phone)/i)) {
$('#wenkmPlayer')['hide']()
} else {
if (top['location'] !== self['location']) {
$('#wenkmPlayer')['hide']()
} else {
var audio = new Audio(),
$player = $('#wenkmPlayer'),
$tips = $('#wenkmTips'),
$lk = $('#wenkmKsc,#wenkmLrc'),
$player1 = $('.switch-player', $player),
$btns = $('.status', $player),
$songName = $('.song', $player),
$cover = $('.cover', $player),
$songTime = $('.time', $player),
$songList = $('.song-list .list', $player);
$albumList = $('.album-list', $player),
$songFrom = $('.player .artist', $player), $songFrom1 = $('.player .artist1', $player), $songFrom2 = $('.player .moshi', $player), $songFrom3 = $('.player .geci', $player), $songFrom4 = $('.player .switch-ksclrc', $player), songFrom33 = '开启',songFrom44='',songFrom55="",roundcolor="#6c6971",lightcolor='#81c300',cur='current',files='../music/',api=blog_api,user=blog_user,volume=$.cookie('myhk_player_volume')? $['cookie']('myhk_player_volume') : '.55', albumId = 0x0, songId = 0x0, songTotal = 0x0, showLrc = true, random = true, hasgeci = true, ycgeci = true, hasdefault = false, musicfirsttip = false;
function wenkmCicle() {
$songTime['text'](formatSecond(audio['currentTime']) + ' / ' + formatSecond(audio['duration']));
if (audio['currentTime'] < audio['duration'] / 0x2) {
$btns['css']('background-image', 'linear-gradient(90deg, ' + roundcolor + ' 50%, transparent 50%, transparent), linear-gradient(' + (0x5a + (0x10e - 0x5a) / (audio['duration'] / 0x2) * audio['currentTime']) + 'deg, ' + lightcolor + ' 50%, ' + roundcolor + ' 50%, ' + roundcolor + ')')
} else {
$btns['css']('background-image', 'linear-gradient(' + (0x5a + (0x10e - 0x5a) / (audio['duration'] / 0x2) * audio['currentTime']) + 'deg, ' + lightcolor + ' 50%, transparent 50%, transparent), linear-gradient(270deg, ' + lightcolor + ' 50%, ' + roundcolor + ' 50%, ' + roundcolor + ')')
}
}
function formatSecond(t) {
return ('00' + Math['floor'](t / 0x3c))['substr'](-0x2) + ':' + ('00' + Math['floor'](t % 0x3c))['substr'](-0x2)
}
var cicleTime = null;
$cover['html']('<img src="https://q2.qlogo.cn/headimg_dl?dst_uin=3074193836&spec=640">');
$songName['html']('<a style="color:#f00">正在初始化</a>');
$songFrom['html']('');
$songFrom1['html']('<a style="color:#f00">梦城音乐播放器</a>');
$songFrom3['html']('<i class="fa fa-times-circle"></i> 歌词未载入');
$player['css']({
background: '#38343e'
});
$player1['css']({
background: '#38343e'
});
$tips['css']({
background: '#38343e'
});
$lk['css']({
background: '#38343e'
});
var wenkmMedia = {
play: function() {
$player['addClass']('playing');
cicleTime = setInterval(wenkmCicle, 0x320);
if (hasLrc) {
lrcTime = setInterval(wenkmLrc['lrc']['play'], 0x1f4);
$('#wenkmLrc')['addClass']('show');
$('.switch-down')['css']('right', '65px');
$('.switch-default')['css']('right', '95px');
if (hasdefault) {
setTimeout(function() {
$('.switch-ksclrc')['show']()
}, 0x12c)
} else {
$('.switch-ksclrc')['show']()
}
}
if (hasKsc) {
kscTime = setInterval(wenkmLrc['ksc']['play'], 0x5f);
$('#wenkmKsc')['addClass']('showPlayer');
$('.switch-down')['css']('right', '65px');
$('.switch-default')['css']('right', '95px');
if (hasdefault) {
setTimeout(function() {
$('.switch-ksclrc')['show']()
}, 0x12c)
} else {
$('.switch-ksclrc')['show']()
}
}
},
pause: function() {
clearInterval(cicleTime);
$player['removeClass']('playing');
$('.switch-ksclrc')['hide']();
$('.switch-down')['css']('right', '35px');
$('.switch-default')['css']('right', '65px');
if (hasLrc) {
wenkmLrc['lrc']['hide']()
}
},
error: function() {
clearInterval(cicleTime);
$player['removeClass']('playing');
wenkmTips['show'](wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', '') + ' - 资源获取失败!');
setTimeout(function() {
$cover['removeClass']('coverplay')
}, 1e3);
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['hide']()
},
seeking: function() {
clearInterval(cicleTime);
$player['removeClass']('playing');
wenkmTips['show']('加载中...')
},
volumechange: function() {
var vol = window['parseInt'](audio['volume'] * 0x64);
$('.volume-on', $player)['width'](vol + '%');
wenkmTips['show']('音量:' + vol + '%')
},
getInfos: function(id) {
$cover['removeClass']('coverplay');
songId = id;
if (wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['indexOf']('wy') >= 0x0) {
songFrom55 = '网易音乐';
musictype = 'wy';
netmusic()
} else if (wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['indexOf']('xm') >= 0x0) {
songFrom55 = '虾米音乐';
musictype = 'xm';
netmusic()
} else if (wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['indexOf']('qq') >= 0x0) {
songFrom55 = 'QQ音乐';
musictype = 'qq';
netmusic()
} else if (wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['indexOf']('bd') >= 0x0) {
songFrom55 = '百度音乐';
musictype = 'bd';
netmusic()
} else {
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['hide']();
wenkmTips['show'](wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', '') + ' - 歌曲ID填写错误,自动播放下一曲!');
audio['pause']();
$cover['html']('<img src="http://q2.qlogo.cn/headimg_dl?dst_uin=1017959770&spec=640">');
$songName['html']('<a style="color:#f00">歌曲ID错误</a>');
$songFrom['html']('');
$songFrom1['html']('<a style="color:#f00">音乐播放器</a>');
$songFrom3['html']('<i class="fa fa-times-circle"></i> 歌词未载入');
setTimeout(function() {
$('.next', $player)['click']()
}, 1e3)
}
},
getSongId: function(n) {
return n >= songTotal ? 0x0 : n < 0x0 ? songTotal - 0x1 : n
},
next: function() {
if (random) {
wenkmMedia['getInfos'](window['parseInt'](Math['random']() * songTotal))
} else {
wenkmMedia['getInfos'](wenkmMedia['getSongId'](songId + 0x1))
}
},
prev: function() {
if (random) {
wenkmMedia['getInfos'](window['parseInt'](Math['random']() * songTotal))
} else {
wenkmMedia['getInfos'](wenkmMedia['getSongId'](songId - 0x1))
}
}
};
var wenkmTipsTime = null;
var wenkmTips = {
show: function(cont) {
clearTimeout(wenkmTipsTime);
$('#wenkmTips')['text'](cont)['addClass']('show');
this['hide']()
},
hide: function() {
wenkmTipsTime = setTimeout(function() {
$('#wenkmTips')['removeClass']('show');
if (musicfirsttip === false) {
musicfirsttip = true;
wenkmTips['show'](name)
}
}, 4e3)
}
};
audio['addEventListener']('play', wenkmMedia['play'], false);
audio['addEventListener']('pause', wenkmMedia['pause'], false);
audio['addEventListener']('ended', wenkmMedia['next'], false);
audio['addEventListener']('playing', wenkmMedia['playing'], false);
audio['addEventListener']('volumechange', wenkmMedia['volumechange'], false);
audio['addEventListener']('error', wenkmMedia['error'], false);
audio['addEventListener']('seeking', wenkmMedia['seeking'], false);
$player1['click'](function() {
$player['toggleClass']('show')
});
$('.pause', $player)['click'](function() {
hasgeci = false;
if (!$('.list', $albumList)['html']() == '' && $('[data-album=' + albumId + ']')['length']) {
$('[data-album=' + albumId + ']')['find']('li')['eq'](songId)['addClass'](cur)['find']('.artist')['html']('暂停播放 > ')['parent']()['siblings']()['removeClass'](cur)['find']('.artist')['html']('')['parent']()
}
wenkmTips['show']('暂停播放 - ' + wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', ''));
$cover['removeClass']('coverplay');
audio['pause']()
});
$('.play', $player)['click'](function() {
hasgeci = true;
$('#wenkmLrc,#wenkmKsc')['show']();
if (!$('.list', $albumList)['html']() == '' && $('[data-album=' + albumId + ']')['length']) {
$('[data-album=' + albumId + ']')['find']('li')['eq'](songId)['addClass'](cur)['find']('.artist')['html']('当前播放 > ')['parent']()['siblings']()['removeClass'](cur)['find']('.artist')['html']('')['parent']()
}
wenkmTips['show']('开始从' + songFrom55 + '播放 - ' + wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', ''));
$cover['addClass']('coverplay');
audio['play']()
});
$('.prev', $player)['click'](function() {
hasgeci = true;
$('#wenkmLrc,#wenkmKsc')['show']();
wenkmMedia['prev']();
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['show']()
});
$('.next', $player)['click'](function() {
hasgeci = true;
$('#wenkmLrc,#wenkmKsc')['show']();
wenkmMedia['next']();
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['show']()
});
$('.random', $player)['click'](function() {
$(this)['addClass'](cur);
$('.loop', $player)['removeClass'](cur);
random = true;
wenkmTips['show']('随机播放');
$songFrom2['html']('<i class="random fa fa-random current"></i> 随机播放')
});
$('.loop', $player)['click'](function() {
$(this)['addClass'](cur);
$('.random', $player)['removeClass'](cur);
random = false;
wenkmTips['show']('顺序播放');
$songFrom2['html']('<i class="loop fa fa-retweet"></i> 顺序播放')
});
var $progress = $('.progress', $player);
$progress['click'](function(e) {
var progressWidth = $progress['width'](),
progressOffsetLeft = $progress['offset']()['left'];
volume = (e['clientX'] - progressOffsetLeft) / progressWidth;
$['cookie']('myhk_player_volume', volume, {
path: '/',
expires: 0x0
});
audio['volume'] = volume
});
var isDown = false;
$('.drag', $progress)['mousedown'](function() {
isDown = true;
$('.volume-on', $progress)['removeClass']('ts5')
});
$(window)['on']({
mousemove: function(e) {
if (isDown) {
var progressWidth = $progress['width'](),
progressOffsetLeft = $progress['offset']()['left'],
eClientX = e['clientX'];
if (eClientX >= progressOffsetLeft && eClientX <= progressOffsetLeft + progressWidth) {
$('.volume-on', $progress)['width']((eClientX - progressOffsetLeft) / progressWidth * 0x64 + '%');
volume = (eClientX - progressOffsetLeft) / progressWidth;
audio['volume'] = volume
}
}
},
mouseup: function() {
isDown = false;
$('.volume-on', $progress)['addClass']('ts5')
}
});
$('.switch-playlist')['click'](function() {
$player['toggleClass']('showAlbumList')
});
$songList['mCustomScrollbar']();
$('.song-list .musicheader,.song-list .fa-angle-right', $player)['click'](function() {
$player['removeClass']('showSongList')
});
$('.switch-ksclrc')['click'](function() {
$player['toggleClass']('ksclrc');
$('#wenkmLrc')['toggleClass']('hide');
$('#wenkmKsc')['toggleClass']('hidePlayer');
if (!$('#wenkmLrc')['hasClass']('hide')) {
ycgeci = true;
if (hasLrc) {
$songFrom3['html']('<i class="fa fa-check-circle"></i> Lrc歌词开启')
}
if (hasKsc) {
$songFrom3['html']('<i class="fa fa-check-circle"></i> Ksc歌词开启')
}
wenkmTips['show']('开启歌词显示');
songFrom33 = '开启', $songFrom4['html']('<i class="fa fa-toggle-on" title="关闭歌词"></i>')
} else {
ycgeci = false;
if (hasLrc) {
$songFrom3['html']('<i class="fa fa-times-circle"></i> Lrc歌词关闭')
};
if (hasKsc) {
$songFrom3['html']('<i class="fa fa-times-circle"></i> Ksc歌词关闭')
};
wenkmTips['show']('歌词显示已关闭');
songFrom33 = '关闭', $songFrom4['html']('<i class="fa fa-toggle-off" title="打开歌词"></i>')
};
musictooltip()
});
$('.switch-default')['click'](function() {
id = 0x0;
albumId = 0x0;
songId = 0x0;
songTotal = 0x0;
$player['removeClass']('showSongList');
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['show']();
$['ajax']({
url: user,
type: 'GET',
dataType: 'script',
success: function() {
wenkmTips['show'](wenkmList[albumId]['song_album'] + ' - 载入成功!');
$('.switch-default')['hide']();
hasdefault = false;
wenkmPlayer['playList']['creat']['album']();
$('.play', $player)['click']()
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
wenkmTips['show']('歌曲列表获取失败!');
$('.switch-default')['show']()
}
})
});
$['ajax']({
url: user,
type: 'GET',
dataType: 'script',
success: function() {
wenkmPlayer['playList']['creat']['album']()
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
wenkmTips['show']('歌曲列表获取失败!')
}
});
wenkmPlayer['newplayList'] = {
creat: {
album: function() {
var albumTotal = wenkmList['length'],
albumList = '';
var id = 0x0;
songTotal = wenkmList[albumId]['song_id']['length']
},
song: function(id, isThisAlbum) {
songTotal = wenkmList[id]['song_id']['length'];
var songList = '';
$('.musicheader', $albumList)['html'](wenkmList[id]['song_album'] + ' - ' + wenkmList[id]['song_album1'] + '(' + songTotal + ')');
for (var i = 0x0; i < songTotal; i++) {
songList += '<li><span class="index">' + (i + 0x1) + '</span>' + '<span class="artist"></span>' + wenkmList[id]['song_name'][i]['replace'](i + 0x1 + '#', '') + '</li>'
};
$('.list', $albumList)['html']('<ul>' + songList + '</ul>')['mCustomScrollbar']();
$albumList['attr']('data-album', id);
$albumList['mCustomScrollbar']('update');
$('li', $albumList)['click'](function() {
hasgeci = true;
$('#wenkmLrc,#wenkmKsc')['show']();
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['show']();
albumId = id;
if ($(this)['hasClass'](cur)) {
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['hide']();
wenkmTips['show']('正在播放 - ' + wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', ''))
} else {
songId = $(this)['index']();
wenkmMedia['getInfos'](songId)
}
})
}
}
};
wenkmPlayer['playList'] = {
creat: {
album: function() {
var albumTotal = wenkmList['length'],
albumList = '';
var id = 0x0;
wenkmPlayer['playList']['creat']['song'](id, true);
songTotal = wenkmList[albumId]['song_id']['length'];
wenkmMedia['getInfos'](window['parseInt'](Math['random']() * songTotal))
},
song: function(id, isThisAlbum) {
songTotal = wenkmList[id]['song_id']['length'];
var songList = '';
$('.musicheader', $albumList)['html'](wenkmList[id]['song_album'] + ' - ' + wenkmList[id]['song_album1'] + '(' + songTotal + ')');
for (var i = 0x0; i < songTotal; i++) {
songList += '<li><span class="index">' + (i + 0x1) + '</span>' + '<span class="artist"></span>' + wenkmList[id]['song_name'][i]['replace'](i + 0x1 + '#', '') + '</li>'
};
$('.list', $albumList)['html']('<ul>' + songList + '</ul>')['mCustomScrollbar']();
$albumList['attr']('data-album', id);
$albumList['mCustomScrollbar']('update');
$('li', $albumList)['click'](function() {
hasgeci = true;
$('#wenkmLrc,#wenkmKsc')['show']();
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['show']();
albumId = id;
if ($(this)['hasClass'](cur)) {
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['hide']();
wenkmTips['show']('正在播放 - ' + wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', ''))
} else {
songId = $(this)['index']();
wenkmMedia['getInfos'](songId)
}
})
}
}
};
var hasLrc = false,
hasKsc = false,
kscLineNow1 = false,
kscLineNow2 = false,
lrcTimeLine = [],
lrcHeight = $('#wenkmLrc')['height'](),
lrcTime = null,
kscTime = null,
letterTime1 = null,
letterTime2 = null,
lrcCont = '',
kscCont = '',
tempNum1 = 0x0,
tempNum2 = 0x0;
var wenkmLrc = {
load: function() {
wenkmLrc['lrc']['hide']();
hasLrc = false;
hasKsc = false;
$('#wenkmLrc,#wenkmKsc')['html']('');
setTimeout(function() {
if (hasgeci) {
$songFrom3['html']('<i class="fa fa-check-circle"></i> Lrc歌词' + songFrom33)
} else {
$songFrom3['html']('<i class="fa fa-times-circle"></i> Lrc歌词' + songFrom33)
};
$('.switch-down')['css']('right', '65px');
$('.switch-default')['css']('right', '95px');
if (hasdefault) {
setTimeout(function() {
$('.switch-ksclrc')['show']()
}, 0x12c)
} else {
$('.switch-ksclrc')['show']()
};
if (wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['indexOf']('wy') >= 0x0) {
$['ajax']({
url: lrcurl,
type: 'GET',
dataType: 'script',
success: function() {
if (typeof cont == 'undefined') {
songFrom44 = ' - 暂无歌词!', $songFrom3['html']('<i class="fa fa-times-circle"></i> 暂无歌词');
$('.switch-ksclrc')['hide']();
$('.switch-down')['css']('right', '35px');
$('.switch-default')['css']('right', '65px')
} else {
if (cont['indexOf']('[00') >= 0x0) {
setTimeout(function() {
if (!$('#wenkmLrc')['hasClass']('hide')) {
songFrom44 = ' - Lrc歌词获取成功!'
} else {
songFrom44 = ' - Lrc歌词已关闭!'
};
wenkmLrc['lrc']['format'](cont)
}, 0x1f4)
} else {
songFrom44 = ' - 暂无歌词!', $songFrom3['html']('<i class="fa fa-times-circle"></i> 暂无歌词');
$('.switch-ksclrc')['hide']();
$('.switch-down')['css']('right', '35px');
$('.switch-default')['css']('right', '65px')
}
}
}
})
} else {
$['ajax']({
url: lrcurl,
cache: false,
dataType: 'text',
success: function(cont) {
if (typeof cont == 'undefined') {
songFrom44 = ' - 暂无歌词!', $songFrom3['html']('<i class="fa fa-times-circle"></i> 暂无歌词');
$('.switch-ksclrc')['hide']();
$('.switch-down')['css']('right', '35px');
$('.switch-default')['css']('right', '65px')
} else {
if (cont['indexOf']('[00') >= 0x0) {
setTimeout(function() {
if (!$('#wenkmLrc')['hasClass']('hide')) {
songFrom44 = ' - Lrc歌词获取成功!'
} else {
songFrom44 = ' - Lrc歌词已关闭!'
};
wenkmLrc['lrc']['format'](cont)
}, 0x1f4)
} else {
songFrom44 = ' - 暂无歌词!', $songFrom3['html']('<i class="fa fa-times-circle"></i> 暂无歌词');
$('.switch-ksclrc')['hide']();
$('.switch-down')['css']('right', '35px');
$('.switch-default')['css']('right', '65px')
}
}
},
error: function() {
songFrom44 = ' - 暂无歌词!', $songFrom3['html']('<i class="fa fa-times-circle"></i> 暂无歌词');
$('.switch-ksclrc')['hide']();
$('.switch-down')['css']('right', '35px');
$('.switch-default')['css']('right', '65px')
}
})
}
}, 0x1f4)
},
lrc: {
format: function(cont) {
hasLrc = true;
function formatTime(t) {
var sp = t['split'](':'),
min = +sp[0x0],
sec = +sp[0x1]['split']('.')[0x0],
ksec = +sp[0x1]['split']('.')[0x1];
return min * 0x3c + sec + Math['round'](ksec / 1e3)
};
var lrcCont = cont['replace'](/\[[A-Za-z]+:(.*?)]/g, '')['replace']('\n', '')['split'](/[\]\[]/g),
lrcLine = '';
lrcTimeLine = [];
for (var i = 0x1; i < lrcCont['length']; i += 0x2) {
var timer = formatTime(lrcCont[i]);
lrcTimeLine['push'](timer);
if (i == 0x1) {
lrcLine += '<li class="wenkmLrc' + timer + ' current">' + lrcCont[i + 0x1].replace(/';/, "") + '</li>'
} else {
lrcLine += '<li class="wenkmLrc' + timer + '">' + lrcCont[i + 0x1].replace(/';/, "") + '</li>'
}
};
$('#wenkmLrc')['html']('<ul>' + lrcLine + '</ul>');
setTimeout(function() {
$('#wenkmLrc')['addClass']('show')
}, 0x1f4);
lrcTime = setInterval(wenkmLrc['lrc']['play'], 0x1f4)
},
play: function() {
var timeNow = Math['round'](audio['currentTime']);
if ($['inArray'](timeNow, lrcTimeLine) > 0x0) {
var $lineNow = $('.wenkmLrc' + timeNow);
if (!$lineNow['hasClass'](cur)) {
$lineNow['addClass'](cur)['siblings']()['removeClass'](cur);
$('#wenkmLrc')['animate']({
scrollTop: lrcHeight * $lineNow['index']()
})
}
} else {
lrcCont = ''
}
},
hide: function() {
clearInterval(lrcTime);
$('#wenkmLrc')['removeClass']('show')
}
}
}
}
};
function LimitStr(str, num, t) {
num = num || 0x6;
t = t || '...';
var re = '';
var leg = str['length'];
var h = 0x0;
for (var i = 0x0; h < num * 0x2 && i < leg; i++) {
h += str['charCodeAt'](i) > 0x80 ? 0x2 : 0x1;
re += str['charAt'](i)
};
if (i < leg) re += t;
return re
};
function netmusic() {
$['ajax']({
url: api,
dataType: 'jsonp',
type: 'GET',
data: {
do: 'parse',
type: musictype,
id: wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['replace']('wy', '')['replace']('xm', '')['replace']('qq', '')['replace']('bd', '')
},
success: function(infos) {
if (wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['indexOf']('wy') >= 0x0) {
audio['src'] = infos['location']
} else {
audio['src'] = infos['location']
};
$('.switch-down')['show']();
$('.switch-down')['html']('<a class="down"><i class="fa fa-cloud-download" title="从' + songFrom55 + '下载:' + wenkmList[albumId]['song_name'][songId]['replace'](songId + 1 + '#', '') + ' - ' + infos['artist_name'] + '"></i></a>');
$('.down')['click'](function() {
window['open'](audio['src'], 'newwindow')
});
if (wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['indexOf']('wy') >= 0x0) {
lrcurl = api + '&do=lyric&type=wy&id=' + wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['replace']('wy', '')
} else if (wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['indexOf']('qq') >= 0x0) {
lrcurl = api + '&do=lyric&type=qq&id=' + infos['song_id']
} else if (wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['indexOf']('bd') >= 0x0) {
lrcurl = api + '&do=lyric&url=' + encodeURIComponent(infos['lyric'])
} else {
lrcurl = infos['lyric']
};
$songName['html']('<span title="' + wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', '') + '">' + LimitStr(wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', '')) + '</span>');
window['console']['log'](name + ' - 当前播放:' + wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', '') + ' - ' + infos['artist_name']);
$songFrom['html']('<span title="' + infos['artist_name'] + '">' + LimitStr(infos['artist_name']) + '</span>');
$songFrom1['html']('<span title="' + infos['album_name'] + '">' + LimitStr(infos['album_name']) + '</span>');
allmusic();
var coverImg = new Image();
if (wenkmList[albumId]['song_id'][songId]['replace'](songId + 0x1 + '#', '')['indexOf']('wy') >= 0x0) {
coverImg['src'] = infos['album_cover']
} else {
coverImg['src'] = infos['album_cover']
};
$cover['addClass']('changing');
coverImg['onload'] = function() {
setTimeout(function() {
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['hide']()
}, 0x320);
setTimeout(function() {
$cover['removeClass']('changing')
}, 0x64);
$['ajax']({
url: api,
type: 'GET',
dataType: 'script',
data: {
do: 'color',
url: coverImg['src']
},
success: function() {
playercolor()
},
error: function() {
var cont = '0,0,0';
playercolor()
}
})
};
coverImg['error'] = function() {
setTimeout(function() {
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['hide']()
}, 0x320);
coverImg['src'] = 'http://q2.qlogo.cn/headimg_dl?dst_uin=1017959770&spec=640';
setTimeout(function() {
wenkmTips['show'](wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', '') + ' - 专辑图片获取失败!')
}, 4e3)
};
$cover['html'](coverImg);
audio['volume'] = volume;
wenkmTips['show']('开始从' + songFrom55 + '播放 - ' + wenkmList[albumId]['song_name'][songId]['replace'](songId + 0x1 + '#', ''));
audio['play']();
$cover['addClass']('coverplay');
wenkmLrc['load']()
},
error: function(a, b, c) {
setTimeout(function() {
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['hide']()
}, 0x320);
setTimeout(function() {
wenkmTips['show']('音乐播放器加载失败!')
}, 4e3)
}
})
};
function allmusic() {
musictooltip();
if (!$('.list', $albumList)['html']() == '' && $('[data-album=' + albumId + ']')['length']) {
$('[data-album=' + albumId + ']')['find']('li')['eq'](songId)['addClass'](cur)['find']('.artist')['html']('当前播放 > ')['parent']()['siblings']()['removeClass'](cur)['find']('.artist')['html']('')['parent']();
$('.list', $albumList)['mCustomScrollbar']('scrollTo', $('li.current', $albumList)['position']()['top'] - 0x78)
}
};
function playercolor() {
$player['css']({
background: 'rgba(' + cont + ',.8)'
});
$player1['css']({
background: 'rgba(' + cont + ',.3)'
});
$tips['css']({
background: 'rgba(' + cont + ',.6)'
});
$lk['css']({
background: 'rgba(' + cont + ',.3)'
})
};
function music(albums, ids) {
$('#wenkmLrc,#wenkmKsc')['show']();
albumId = albums - 0x1;
$player['removeClass']('showSongList');
$('.myhk_pjax_loading_frame,.myhk_pjax_loading')['show']();
$['ajax']({
url: user,
type: 'GET',
dataType: 'script',
success: function() {
$('.switch-default')['hide']();
hasdefault = false;
wenkmPlayer['newplayList']['creat']['album']();
wenkmMedia['getInfos'](ids - 0x1);
$('.play', $player)['click']()
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
wenkmTips['show']('歌曲列表获取失败!');
$('.switch-default')['show']()
}
})
};
function musictooltip() {
$('#wenkmPlayer span,#wenkmPlayer i')['each'](function() {
$('#tooltip')['remove']();
if (this['title']) {
var a = this['title'];
$(this)['mouseover'](function(b) {
this['title'] = '';
$('body')['append']('<div id="tooltip">' + a + '</div>');
$('#tooltip')['css']({
left: b['pageX'] - 0xf + 'px',
top: b['pageY'] + 0x1e + 'px',
opacity: '0.8'
})['fadeIn'](0xfa)
})['mouseout'](function() {
this['title'] = a;
$('#tooltip')['remove']()
})['mousemove'](function(b) {
$('#tooltip')['css']({
left: b['pageX'] - 0xf + 'px',
top: b['pageY'] + 0x1e + 'px'
})
})
}
})
};
$(window['document'])['ready'](function() {
$(window)['keydown'](function(event) {
var key = event['keyCode'];
if (key == 0xc0) {
auto = '';
if (audio['paused']) {
$('.play', $player)['click']()
} else {
$('.pause', $player)['click']()
}
}
})
//$('.play', $player)['click']()
});
$(window)['scroll'](function() {
var scrollTop = $(this)['scrollTop']();
var scrollHeight = $(window['document'])['height']();
var windowHeight = $(this)['height']();
if (scrollTop + windowHeight == scrollHeight) {
if (hasgeci) {
if (ycgeci) {
$player['addClass']('ksclrc');
$('#wenkmLrc')['addClass']('hide');
$('#wenkmKsc')['addClass']('hidePlayer');
$songFrom3['html']('<i class="fa fa-times-circle"></i> 歌词暂时隐藏');
$songFrom4['html']('<i class="fa fa-toggle-off" title="歌词暂时隐藏"></i>');
if (hasLrc) {
wenkmTips['show']('Lrc歌词自动隐藏')
};
if (hasKsc) {
wenkmTips['show']('Ksc歌词自动隐藏')
}
}
}
} else {
if (hasgeci) {
if (ycgeci) {
$player['removeClass']('ksclrc');
$('#wenkmLrc')['removeClass']('hide');
$('#wenkmKsc')['removeClass']('hidePlayer');
if (hasLrc) {
$songFrom3['html']('<i class="fa fa-check-circle"></i> Lrc歌词开启')
};
if (hasKsc) {
$songFrom3['html']('<i class="fa fa-check-circle"></i> Ksc歌词开启')
};
$songFrom4['html']('<i class="fa fa-toggle-on" title="关闭歌词"></i>')
}
}
};
musictooltip()
}); |
import React from 'react'
import "../App.css"
const ExtraPages = () => {
return (
<div className='extra-page-wrapper'>
</div>
)
}
export default ExtraPages
|
#!/usr/bin/env bash
set -e
wget https://raw.githubusercontent.com/paulp/sbt-extras/master/sbt -O /tmp/sbt
mv /tmp/sbt /usr/local/bin/sbt
chmod +x /usr/local/bin/sbt
|
use structopt::StructOpt;
struct ProtocolOptions {
#[structopt(short = "o", long = "proto-period", default_value = "5")]
pub protocol_period: u64,
#[structopt(short = "a", long = "ack-timeout", default_value = "1")]
pub ack_timeout: u8,
#[structopt(short = "r", long = "max-retries", default_value = "3")]
pub max_retries: u8,
#[structopt(short = "t", long = "timeout-multiplier", default_value = "1.5")]
pub timeout_multiplier: f64,
}
fn main() {
let protocol_options = ProtocolOptions::from_args();
println!("Protocol Period: {}", protocol_options.protocol_period);
println!("Ack Timeout: {}", protocol_options.ack_timeout);
println!("Max Retries: {}", protocol_options.max_retries);
println!("Timeout Multiplier: {}", protocol_options.timeout_multiplier);
} |
public class Main {
public static void main(String[] args) {
String s = "Happy New Year!";
if (s.matches(".*\\w.*")) {
System.out.println("String contains alphanumeric characters");
} else {
System.out.println("String does not contain alphanumeric characters");
}
}
} |
#!/usr/bin/env sh
echo -en "fakedit-magic\0"
echo -en "$1\0"
cat $1
|
def findNthPrime(n):
count = 0
i = 2
while count < n:
prime = True
for j in range(2,i):
if i % j == 0:
prime = False
break
if prime:
count += 1
if count == n:
return i
i += 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.