text
stringlengths 1
1.05M
|
|---|
// @flow
import * as React from 'react';
import {Button} from 'spaceweb/button';
export default () => (
<React.Fragment>
<Button $as="a" href="https://styletron.org" target="_blank">
Visit the Styletron docs
</Button>
</React.Fragment>
);
|
var searchData=
[
['embos_1798',['embOS',['../embos.html',1,'ports_rtos'],['../exa_embos.html',1,'exa_rtos']]],
['emwin_20embedded_20gui_1799',['emWin Embedded GUI',['../exa_emwin.html',1,'exa_mware']]],
['example_20applications_1800',['Example Applications',['../exa_apps.html',1,'exa']]],
['examples_1801',['Examples',['../exa.html',1,'']]],
['examples_20for_20qutest_20unit_20testing_20harness_1802',['Examples for QUTest Unit Testing Harness',['../exa_qutest.html',1,'exa']]],
['examples_20for_20third_2dparty_20middleware_1803',['Examples for Third-Party Middleware',['../exa_mware.html',1,'exa']]],
['examples_20for_20third_2dparty_20rtos_1804',['Examples for Third-Party RTOS',['../exa_rtos.html',1,'exa']]],
['examples_20for_20workstations_20_28windows_2fposix_29_1805',['Examples for Workstations (Windows/POSIX)',['../exa_os.html',1,'exa']]]
];
|
TERMUX_PKG_HOMEPAGE=https://github.com/visit1985/mdp
TERMUX_PKG_DESCRIPTION="Command-line based markdown presentation tool"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_MAINTAINER='lokesh @hax4us'
TERMUX_PKG_VERSION=1.0.15
TERMUX_PKG_SRCURL=https://github.com/visit1985/mdp/archive/${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=3edc8ea1551fdf290d6bba721105e2e2c23964070ac18c13b4b8d959cdf6116f
TERMUX_PKG_DEPENDS="ncurses"
TERMUX_PKG_BUILD_IN_SRC=true
|
<reponame>minuk8932/Algorithm_BaekJoon
package string_handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author exponential-e
* 백준 17838번: 커맨드
*
* @see https://www.acmicpc.net/problem/17838/
*
*/
public class Boj17838 {
private static final String NEW_LINE = "\n";
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int T = Integer.parseInt(br.readLine());
StringBuilder sb = new StringBuilder();
while(T-- > 0) {
char[] input = br.readLine().toCharArray();
if(input.length == 7) {
if(input[0] == input[1] && input[1] == input[4]) {
if(input[2] == input[3] && input[3] == input[5] && input[5] == input[6] && input[0] != input[2]) sb.append(1).append(NEW_LINE);
else sb.append(0).append(NEW_LINE);
}
else {
sb.append(0).append(NEW_LINE);
}
}
else {
sb.append(0).append(NEW_LINE);
}
}
System.out.println(sb.toString());
}
}
|
<reponame>domonda/go-sqldb
package sqldb
import (
"log"
"os"
)
// Logger has a Printf method used for logging
// information that could not be returned by
// any of the package functions directly.
type Logger interface {
Printf(format string, v ...interface{})
}
// ErrLogger will be used to log errors
// that could not be returned by
// any of the package functions directly.
var ErrLogger Logger = log.New(os.Stderr, "sqldb", log.LstdFlags)
|
import dom from 'metal-dom';
import ReadingProgressTracker from '../src/ReadingProgressTracker';
describe('ReadingProgressTracker', () => {
var readingProgress;
beforeAll(() => {
dom.enterDocument('<style id="style">body{margin:0;padding:0;}');
dom.enterDocument('<div id="content">' +
'<div id="content1" style="height:5000px;">Link1</div>' +
'<div id="content2" style="height:5000px;">Link2</div>' +
'<div id="content3" style="height:5000px;">Link3</div></div>'
);
dom.enterDocument('<ul id="links">' +
'<li><a id="link1" href="#content1">link1</a></li>' +
'<li><a id="link2" href="#content2">link2</a></li>' +
'<li><a id="link3" href="#content3">link3</a></li></ul>'
);
});
afterEach(() => {
if (readingProgress) {
readingProgress.dispose();
}
});
afterAll(() => {
document.body.innerHTML = '';
});
it.skip('should update progress while scrolling', (done) => {
readingProgress = new ReadingProgressTracker({
element: '#links'
});
dom.once(document, 'scroll', () => {
expect(0).toBe(readingProgress.activeIndex);
expect(20).toBe(readingProgress.progress);
dom.once(document, 'scroll', () => {
expect(0).toBe(readingProgress.activeIndex);
expect(60).toBe(readingProgress.progress);
done();
});
window.scrollTo(0, 3000);
});
window.scrollTo(0, 1000);
});
it.skip('should set "data-reading-progress" to the progress percentage', (done) => {
readingProgress = new ReadingProgressTracker({
element: '#links'
});
var contents = document.querySelectorAll('#links a');
dom.once(document, 'scroll', () => {
expect(0).toBe(readingProgress.activeIndex);
expect('20').toBe(contents.item(0).getAttribute('data-reading-progress'));
expect(!contents.item(1).hasAttribute('data-reading-progress')).toBeTruthy();
expect(!contents.item(2).hasAttribute('data-reading-progress')).toBeTruthy();
dom.once(document, 'scroll', () => {
expect(1).toBe(readingProgress.activeIndex);
expect(!contents.item(0).hasAttribute('data-reading-progress')).toBeTruthy();
expect('60').toBe(contents.item(1).getAttribute('data-reading-progress'));
expect(!contents.item(2).hasAttribute('data-reading-progress')).toBeTruthy();
done();
});
window.scrollTo(0, 8000);
});
window.scrollTo(0, 1000);
});
it.skip('should mark as complete/incomplete while scrolling', (done) => {
readingProgress = new ReadingProgressTracker({
element: '#links'
});
var contents = document.querySelectorAll('#links a');
dom.once(document, 'scroll', () => {
expect(!dom.hasClass(contents.item(0), readingProgress.completedClass)).toBeTruthy();
expect(!dom.hasClass(contents.item(1), readingProgress.completedClass)).toBeTruthy();
expect(!dom.hasClass(contents.item(2), readingProgress.completedClass)).toBeTruthy();
dom.once(document, 'scroll', () => {
expect(dom.hasClass(contents.item(0), readingProgress.completedClass)).toBeTruthy();
expect(!dom.hasClass(contents.item(1), readingProgress.completedClass)).toBeTruthy();
expect(!dom.hasClass(contents.item(2), readingProgress.completedClass)).toBeTruthy();
dom.once(document, 'scroll', () => {
expect(dom.hasClass(contents.item(0), readingProgress.completedClass)).toBeTruthy();
expect(dom.hasClass(contents.item(1), readingProgress.completedClass)).toBeTruthy();
expect(dom.hasClass(contents.item(2), readingProgress.completedClass)).toBeTruthy();
dom.once(document, 'scroll', () => {
expect(dom.hasClass(contents.item(0), readingProgress.completedClass)).toBeTruthy();
expect(dom.hasClass(contents.item(1), readingProgress.completedClass)).toBeTruthy();
expect(!dom.hasClass(contents.item(2), readingProgress.completedClass)).toBeTruthy();
done();
});
window.scrollTo(0, 12000);
});
window.scrollTo(0, 15000);
});
window.scrollTo(0, 8000);
});
window.scrollTo(0, 1000);
});
it.skip('should not set progress on any link if none is active', (done) => {
dom.enterDocument('<style id="style">body{ margin-top: 100px; }');
readingProgress = new ReadingProgressTracker({
element: '#links'
});
var links = document.querySelectorAll('#links a');
dom.once(document, 'scroll', () => {
expect(-1).toBe(readingProgress.activeIndex);
expect(!links.item(0).hasAttribute('data-reading-progress')).toBeTruthy();
expect(!links.item(1).hasAttribute('data-reading-progress')).toBeTruthy();
expect(!links.item(2).hasAttribute('data-reading-progress')).toBeTruthy();
done();
});
window.scrollTo(0, 50);
});
});
|
<filename>src/oatpp/web/client/RequestExecutor.cpp
/***************************************************************************
*
* Project _____ __ ____ _ _
* ( _ ) /__\ (_ _)_| |_ _| |_
* )(_)( /(__)\ )( (_ _)(_ _)
* (_____)(__)(__)(__) |_| |_|
*
*
* Copyright 2018-present, <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************************/
#include "RequestExecutor.hpp"
#include <thread>
#include <chrono>
namespace oatpp { namespace web { namespace client {
RequestExecutor::RequestExecutionError::RequestExecutionError(v_int32 errorCode, const char* message, v_int32 readErrorCode)
: std::runtime_error(message)
, m_errorCode(errorCode)
, m_message(message)
, m_readErrorCode(readErrorCode)
{}
v_int32 RequestExecutor::RequestExecutionError::getErrorCode() const {
return m_errorCode;
}
const char* RequestExecutor::RequestExecutionError::getMessage() const {
return m_message;
}
v_int32 RequestExecutor::RequestExecutionError::getReadErrorCode() const {
return m_readErrorCode;
}
RequestExecutor::RequestExecutor(const std::shared_ptr<RetryPolicy>& retryPolicy)
: m_retryPolicy(retryPolicy)
{}
std::shared_ptr<RequestExecutor::Response> RequestExecutor::execute(
const String& method,
const String& path,
const Headers& headers,
const std::shared_ptr<Body>& body,
const std::shared_ptr<ConnectionHandle>& connectionHandle
) {
if(!m_retryPolicy) {
auto ch = connectionHandle;
if (!ch) {
ch = getConnection();
}
return executeOnce(method, path, headers, body, ch);
} else {
RetryPolicy::Context context;
auto ch = connectionHandle;
while(true) {
context.attempt ++;
try {
if (!ch) {
ch = getConnection();
}
auto response = executeOnce(method, path, headers, body, ch);
if(!m_retryPolicy->retryOnResponse(response->getStatusCode(), context) || !m_retryPolicy->canRetry(context)) {
return response;
}
} catch (...) {
if(!m_retryPolicy->canRetry(context)) {
break;
}
}
invalidateConnection(ch);
ch.reset();
v_int64 waitMicro = m_retryPolicy->waitForMicroseconds(context);
v_int64 tick0 = oatpp::base::Environment::getMicroTickCount();
v_int64 tick = tick0;
while(tick < tick0 + waitMicro) {
std::this_thread::sleep_for(std::chrono::microseconds(tick0 + waitMicro - tick));
tick = oatpp::base::Environment::getMicroTickCount();
}
}
}
return nullptr;
}
oatpp::async::CoroutineStarterForResult<const std::shared_ptr<RequestExecutor::Response>&>
RequestExecutor::executeAsync(
const String& method,
const String& path,
const Headers& headers,
const std::shared_ptr<Body>& body,
const std::shared_ptr<ConnectionHandle>& connectionHandle
) {
class ExecutorCoroutine : public oatpp::async::CoroutineWithResult<ExecutorCoroutine, const std::shared_ptr<RequestExecutor::Response>&> {
private:
RequestExecutor* m_this;
String m_method;
String m_path;
Headers m_headers;
std::shared_ptr<Body> m_body;
std::shared_ptr<ConnectionHandle> m_connectionHandle;
RetryPolicy::Context m_context;
public:
ExecutorCoroutine(RequestExecutor* _this,
const String& method,
const String& path,
const Headers& headers,
const std::shared_ptr<Body>& body,
const std::shared_ptr<ConnectionHandle>& connectionHandle)
: m_this(_this)
, m_method(method)
, m_path(path)
, m_headers(headers)
, m_body(body)
, m_connectionHandle(connectionHandle)
{}
Action act() override {
if(!m_connectionHandle) {
return m_this->getConnectionAsync().callbackTo(&ExecutorCoroutine::onConnection);
}
return yieldTo(&ExecutorCoroutine::execute);
}
Action onConnection(const std::shared_ptr<ConnectionHandle>& connectionHandle) {
m_connectionHandle = connectionHandle;
return yieldTo(&ExecutorCoroutine::execute);
}
Action execute() {
m_context.attempt ++;
return m_this->executeOnceAsync(m_method, m_path, m_headers, m_body, m_connectionHandle).callbackTo(&ExecutorCoroutine::onResponse);
}
Action onResponse(const std::shared_ptr<RequestExecutor::Response>& response) {
if( m_this->m_retryPolicy &&
m_this->m_retryPolicy->retryOnResponse(response->getStatusCode(), m_context) &&
m_this->m_retryPolicy->canRetry(m_context)
) {
return yieldTo(&ExecutorCoroutine::retry);
}
return _return(response);
}
Action retry() {
if(m_connectionHandle) {
m_this->invalidateConnection(m_connectionHandle);
m_connectionHandle.reset();
}
return waitFor(std::chrono::microseconds(m_this->m_retryPolicy->waitForMicroseconds(m_context))).next(yieldTo(&ExecutorCoroutine::act));
}
Action handleError(Error* error) override {
if(m_this->m_retryPolicy && m_this->m_retryPolicy->canRetry(m_context)) {
return yieldTo(&ExecutorCoroutine::retry);
}
if(m_connectionHandle) {
m_this->invalidateConnection(m_connectionHandle);
m_connectionHandle.reset();
}
return error;
}
};
return ExecutorCoroutine::startForResult(this, method, path, headers, body, connectionHandle);
}
}}}
|
import pandas as pd
# Read the input data
df = pd.read_csv('company_data.csv')
# Select the top 5 companies by total revenue
top_5 = df.sort_values(by='revenue', ascending=False).head(5)
# Generate the report
print("Top 5 Companies by Total Revenue")
print("-------------------------------")
for index, row in top_5.iterrows():
print("Company name: {}, Total Revenue: {:.2f}, Total Profit: {:.2f}".format(
row['name'], row['revenue'], row['profit']
))
|
<html>
<head>
<title>Book List Page</title>
</head>
<body>
<h1>Book List</h1>
<ul>
<li>The Catcher in the Rye - J.D. Salinger</li>
<li>Harry Potter and the Sorcerer's Stone - J.K. Rowling</li>
<li>To Kill a Mockingbird - Harper Lee</li>
<li>The Great Gatsby - F. Scott Fitzgerald</li>
<li>The Hunger Games - Suzanne Collins</li>
<li>Pride and Prejudice - Jane Austen</li>
</ul>
</body>
</html>
|
#!/bin/bash
cd /home/pi/lora_gateway/gw_web_admin
echo "Installing gateway's web admin interface..."
echo "Removing any existing admin folder (from previous install)"
sudo rm -rf /var/www/html/admin
if [ -d ./admin ]
then
echo "have detected a local web admin folder"
echo "Copying web admin interface files"
sudo cp -r admin /var/www/html/
echo "Done"
fi
if [ ! -d ./admin ]
then
cd /home/pi
#sudo apt-get update
echo "Check if subversion is installed and install it if it is not"
[[ -z `dpkg --get-selections | grep -w ^subversion[^-]` ]] && aptitude install subversion
echo "Getting new gw_web_admin from github"
svn checkout https://github.com/CongducPham/LowCostLoRaGw/trunk/gw_full_latest/gw_web_admin
echo "Copying web admin interface files"
sudo cp -r gw_web_admin/admin /var/www/html/
echo "Removing downloaded gw_web_admin folder"
sudo rm -rf gw_web_admin
fi
######################################
# Editing /etc/sudoers file
######################################
echo "Edit /etc/sudoers to obtain root privileges for www-data"
# Take a backup of sudoers file and change the backup file.
sudo cp /etc/sudoers /tmp/sudoers.bak
# delete existing files
sudo sed -i '/www-data/d' /tmp/sudoers.bak
sudo echo "www-data ALL=(ALL) NOPASSWD: /sbin/reboot, /sbin/shutdown, /etc/hostapd/hostapd.conf" >> /tmp/sudoers.bak
sudo echo "www-data ALL=(ALL) NOPASSWD: /var/www/html/admin/libs/sh/web_shell_script.sh" >> /tmp/sudoers.bak
#sudo echo "www-data ALL=(ALL) NOPASSWD: /var/www/html/admin/libs/sh/thingspeak_key.sh, /var/www/html/admin/libs/sh/waziup_key.sh" >> /tmp/sudoers.bak
#sudo echo "www-data ALL=(ALL) NOPASSWD: /home/pi/lora_gateway/scripts/update_gw.sh, /var/www/html/admin/libs/sh/install_gw.sh, /var/www/html/admin/libs/sh/update_gw_file.sh" >> /tmp/sudoers.bak
#sudo echo "www-data ALL=(ALL) NOPASSWD: /var/www/html/admin/libs/sh/gateway_conf.sh, /var/www/html/admin/libs/sh/waziup_conf.sh, /var/www/html/admin/libs/sh/thingspeak_conf.sh" >> /tmp/sudoers.bak
#sudo echo "www-data ALL=(ALL) NOPASSWD: /var/www/html/admin/libs/sh/contact_mail.sh, /var/www/html/admin/libs/sh/contact_sms.sh" >> /tmp/sudoers.bak
#sudo echo "www-data ALL=(ALL) NOPASSWD: /var/www/html/admin/libs/sh/set_profile.sh, /var/www/html/admin/libs/sh/low_level_gw_status.sh, /var/www/html/admin/libs/sh/downlink_request.sh" >> /tmp/sudoers.bak
#sudo echo "www-data ALL=(ALL) NOPASSWD: /var/www/html/admin/libs/sh/copy_log_file.sh" >> /tmp/sudoers.bak
sudo echo "www-data ALL=(ALL) NOPASSWD: /usr/bin/python" >> /tmp/sudoers.bak
sudo echo "www-data ALL=(ALL) NOPASSWD: /var/www/html/admin/libs/python/key_clouds.py, /var/www/html/admin/libs/python/key_thingspeak.py" >> /tmp/sudoers.bak
sudo echo "www-data ALL=(ALL) NOPASSWD: /home/pi/lora_gateway/scripts/basic_config_gw.sh" >> /tmp/sudoers.bak
sudo echo "www-data ALL=(ALL) NOPASSWD: /home/pi/lora_gateway/scripts/update_gw.sh" >> /tmp/sudoers.bak
sudo echo "www-data ALL=(ALL) NOPASSWD: /home/pi/lora_gateway/gw_web_admin/install.sh" >> /tmp/sudoers.bak
sudo echo "www-data ALL=(ALL) NOPASSWD: /var/www/html/images/libs/sh/move_img.sh" >> /tmp/sudoers.bak
# Check syntax of the backup file to make sure it is correct.
sudo visudo -cf /tmp/sudoers.bak
if [ $? -eq 0 ]; then
# Replace the sudoers file with the new only if syntax is correct.
sudo cp /tmp/sudoers.bak /etc/sudoers
echo "/etc/sudoers file updated"
echo "Obtaining root privileges for www-data done."
else
echo "Could not modify /etc/sudoers file. Please do this manually."
fi
######################################
# End editing /etc/sudoers file
######################################
echo "Check if jq is installed and install it if it is not."
[[ -z `dpkg --get-selections | grep -w ^jq[^-]` ]] && apt-get install jq
echo "Give rights to the Apache folder in order to easily administer the website."
sudo chown -R pi:www-data /var/www/html/
sudo chmod -R 770 /var/www/html/
###################
# Setting profile
##################
if [ ! -d /etc/gw_web_admin ]
then
mkdir /etc/gw_web_admin
# username : admin && password : loragateway
echo "Creating the json authentifation file /etc/gw_web_admin/database.json"
echo "Default connection settings : username = admin & password = loragateway"
echo "{\"username\":\"admin\", \"password\":\"loragateway\"}" > /etc/gw_web_admin/database.json
fi
echo "Install gateway's web admin interface done."
echo "Connect to http://gw_ip_addr/admin"
|
#!/bin/sh
set -x
. ./00.profile
kubectl -n $KUBE_NAMESPACE get secret ${RELEASE_NAME}-gitlab-initial-root-password -ojsonpath='{.data.password}' | base64 --decode
|
<reponame>jasonseminara/tutr
'use strict';
/**
* @ngdoc function
* @name cattrApp.controller:ReservationcontrollerCtrl
* @description
* # ReservationcontrollerCtrl
* Controller of the cattrApp
*/
cattr
.controller( 'ReservationController', ['CatData','$state', function(catData,$state) {
var cats = this;
cats.date={
start : new Date(),
end: moment().add(13, 'months').toDate()
}
cats.sortorder = 'name'
//catData.getAll();
cats.getCats = function getCats(){
catData.getAll()
.$promise
.then(function(res) {
cats.all = res.objects
})
.catch(function(re) {
console.error('failure',res);
})
};
cats.getCats()
}])
|
# import libraries
#import nltk
#nltk.download('popular')
from textblob import TextBlob
# define text
text = "This is some text I want to analyze and get key phrases and keywords from."
# analyze text
blob = TextBlob(text)
# extract keywords
keywords = [word for (word, score) in blob.keywords]
print('Keywords:', ', '.join(keywords))
# extract key phrases
key_phrases = [phrase for phrase in blob.noun_phrases ]
print('Key Phrases:', ', '.join(key_phrases))
|
#!/bin/bash
OCTOOLSBIN=$(dirname $0)
# ===================================================================================================
# Funtions
# ---------------------------------------------------------------------------------------------------
usage() {
cat <<EOF
========================================================================================
Deletes a local OpenShift project.
----------------------------------------------------------------------------------------
Usage:
${0} [-h -x] -p <project_namespace>
OPTIONS:
========
-h prints the usage for the script
-x run the script in debug mode to see what's happening
-p <project_namespace> the namespace for the project.
EOF
exit 1
}
if [ -f ${OCTOOLSBIN}/ocFunctions.inc ]; then
. ${OCTOOLSBIN}/ocFunctions.inc
fi
deleteProject (){
projectName=$1
echo "Deleting project; ${projectName} ..."
oc delete project ${projectName}
}
# ===================================================================================================
# ===================================================================================================
# Setup
# ---------------------------------------------------------------------------------------------------
while getopts p:n:d:hx FLAG; do
case $FLAG in
p ) PROJECT_NAMESPACE=$OPTARG ;;
x ) export DEBUG=1 ;;
h ) usage ;;
\?) #unrecognized option - show help
echo -e \\n"Invalid script option"\\n
usage
;;
esac
done
# Shift the parameters in case there any more to be used
shift $((OPTIND-1))
# echo Remaining arguments: $@
if [ ! -z "${DEBUG}" ]; then
set -x
fi
if [ -z "${PROJECT_NAMESPACE}" ]; then
echo -e \\n"Missing parameters!"
usage
fi
# ===================================================================================================
if ! isLocalCluster; then
echo "This script can only be run on a local cluster!"
exit 1
fi
if projectExists ${PROJECT_NAMESPACE}; then
deleteProject ${PROJECT_NAMESPACE}
exitOnError
else
echo "${PROJECT_NAMESPACE} does not exist ..."
fi
|
#######################################
# User plan for ci testing
# This tests a user interacting with scaffold API
#######################################
pkg_name=user-linux-default
pkg_origin=ci
pkg_version="1.0.0"
pkg_scaffolding="ci/scaffolding-chef-infra"
pkg_svc_user=("root")
scaffold_policy_name="ci"
# Required Metadata for CI
pkg_description="CI Test Plan for Linux"
pkg_license="Apache-2.0"
pkg_maintainer="The Habitat Maintainers humans@habitat.sh"
pkg_upstream_url="https://chef.sh"
|
<filename>src/main/java/com/github/chen0040/leetcode/day07/medium/WordSearch.java
package com.github.chen0040.leetcode.day07.medium;
import java.util.HashSet;
import java.util.Set;
/**
* Created by xschen on 2/8/2017.
* summary:
* Given a 2D board and a word, find if the word exists in the grid.
* The word can be constructed from letters of sequentially adjacent cell, where "adjacent" cells are those horizontally or vertically neighboring. The same letter cell may not be used more than once.
*
* link: https://leetcode.com/problems/word-search/description/
*/
public class WordSearch {
public class Solution {
private boolean found;
private int rowCount;
private int colCount;
public boolean exist(char[][] board, String word) {
found = false;
rowCount = board.length;
if(word.length() == 0) return true;
if(rowCount == 0) return false;
colCount = board[0].length;
for(int i=0; i < rowCount; ++i) {
if(found) break;
for(int j=0; j < colCount; ++j) {
if(found) break;
if(board[i][j] == word.charAt(0)) {
Set<Integer> visited = new HashSet<Integer>();
visited.add(i * colCount + j);
dfs(board, i, j, word, 0, visited);
}
}
}
return found;
}
private void dfs(char[][] board, int i, int j, String word, int d, Set<Integer> visited) {
if(d == word.length()-1) {
found = true;
return;
}
for(int ii=-1; ii <= 1; ++ii) {
if(found) break;
if(ii == 0) continue;
int i2 = i + ii;
if(i2 < 0 || i2 >= rowCount) continue;
if(visited.contains(i2 * colCount + j) || board[i2][j] != word.charAt(d+1)) {
continue;
}
dfs(board, i2, j, word, d+1, move(visited, i2, j));
}
for(int jj=-1; jj <= 1; ++jj) {
if(found) break;
if(jj == 0) continue;
int j2 = j + jj;
if(j2 < 0 || j2 >= colCount) continue;
if(visited.contains(i * colCount + j2) || board[i][j2] != word.charAt(d+1)) {
continue;
}
dfs(board, i, j2, word, d+1, move(visited, i, j2));
}
}
private Set<Integer> move(Set<Integer> visited, int i, int j) {
Set<Integer> newSet = new HashSet<Integer>(visited);
newSet.add(i * colCount + j);
return newSet;
}
}
}
|
"""
Write a Python program to create a Tic Tac Toe game
"""
# global variables
board = ["-", "-", "-",
"-", "-", "-",
"-", "-", "-"]
game_is_still_going = True
# who is the winner
winner = None
# whose turn is it
current_player = "X"
# display board
def display_board():
print(board[0] + " | " + board[1] + " | " + board[2])
print(board[3] + " | " + board[4] + " | " + board[5])
print(board[6] + " | " + board[7] + " | " + board[8])
# play a game of tic tac toe
def play_game():
# display initial board
display_board()
# while game is still going
while game_is_still_going:
# assign handle turn to a variable
handle_turn(current_player)
# check if game is finished
check_if_game_over()
# flip to another player
flip_player()
# check if winner
if winner == "X" or winner == "O":
print("Winner is: " + winner)
elif winner == None:
print("Tie.")
# handle a single turn of a player
def handle_turn(player):
position = input("Choose a position from 1-9: ")
position = int(position) - 1
board[position] = player
display_board()
# check win
def check_if_game_over():
check_if_win()
check_if_tie()
# check rows, columns and diagonals for a win
def check_if_win():
# set global variables
global winner
# check rows
row_winner = check_rows()
# check columns
column_winner = check_columns()
# check diagonals
diagonal_winner = check_diagonals()
if row_winner:
# there is a win
winner = row_winner
elif column_winner:
# there is a win
winner = column_winner
elif diagonal_winner:
# there is a win
winner = diagonal_winner
else:
# there is no win
winner = None
return
# check rows for a win
def check_rows():
# set global varibales
global game_is_still_going
# check if any of the rows have all the same values (and is not empty)
row_1 = board[0] == board[1] == board[2] != "-"
row_2 = board[3] == board[4] == board[5] != "-"
row_3 = board[6] == board[7] == board[8] != "-"
# if any row does have a match, flag that there is a win
if row_1 or row_2 or row_3:
game_is_still_going = False
# return the winner (X or O)
if row_1:
return board[0]
elif row_2:
return board[3]
elif row_3:
return board[6]
# or return None if there was no win
else:
return None
# check columns for a win
def check_columns():
# set global variables
global game_is_still_going
# check if any of the columns have all the same values (and is not empty)
column_1 = board[0] == board[3] == board[6] != "-"
column_2 = board[1] == board[4] == board[7] != "-"
column_3 = board[2] == board[5] == board[8] != "-"
# if any column does have a match, flag that there is a win
if column_1 or column_2 or column_3:
game_is_still_going = False
# return the winner (X or O)
if column_1:
return board[0]
elif column_2:
return board[1]
elif column_3:
return board[2]
# or return None if there was no win
else:
return None
# check diagonals for a win
def check_diagonals():
# set global variables
global game_is_still_going
# check if any of the diagonals have all the same values (and is not empty)
diagonal_1 = board[0] == board[4] == board[8] != "-"
diagonal_2 = board[2] == board[4] == board[6] != "-"
# if any diagonal does have a match, flag that there is a win
if diagonal_1 or diagonal_2:
game_is_still_going = False
# return the winner (X or O)
if diagonal_1:
return board[0]
elif diagonal_2:
return board[2]
# or return None if there was no win
else:
return None
# check if there is a tie
def check_if_tie():
# set global variables
global game_is_still_going
# if board is full
if "-" not in board:
game_is_still_going = False
# return true if there is a tie, false if not
return
# flip to another player
def flip_player():
# global variables we need
global current_player
# if current player was x, make it o
if current_player == "X":
current_player = "O"
# if current player was o, make it x
elif current_player == "O":
current_player = "X"
if __name__ == '__main__':
play_game()
|
package com.huatuo.net.thread;
import java.util.ArrayList;
import java.util.HashMap;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.Context;
import android.os.Handler;
import com.huatuo.base.MyApplication;
import com.huatuo.dictionary.MsgId;
import com.huatuo.net.http.ActionResponse;
import com.huatuo.net.http.HttpAgent;
import com.huatuo.util.CommonUtil;
import com.huatuo.util.JsonUtil;
public class GetSecKillSessionOrAdvanceNoticeList implements Runnable {
private Handler mHandler;
private Context mContext;
private HashMap<String, String> inJson;
private JSONObject outJson;
private String outMsgJson = null;
private int outCode;
/**
* @param ID
* 技师id
* @param isGetProject
* 是否需要同时返回服务列表,1 需要,0 不需要,默认0
* @param isEvaluate
* 是否需要同时返回评论信息,1 需要,0 不需要,默认0
* */
public GetSecKillSessionOrAdvanceNoticeList(Context mContext, Handler mHandler,
HashMap<String, String> inJson) {
this.mContext = mContext;
this.mHandler = mHandler;
this.inJson = inJson;
}
@Override
public void run() {
try {
CommonUtil.logE("获取查询秒杀专项列表提交参数inJson:" + inJson);
HttpAgent httpAgent = new HttpAgent("spike/querySpecialContext",
inJson, mContext);
ActionResponse response = httpAgent.sendRequest(null);
int code = response.getCode();
initRsultData(response);
if (code == 0) {
mHandler.sendEmptyMessage(MsgId.DOWN_DATA_S);
} else if (code == MsgId.NET_NOT_CONNECT) {
mHandler.sendEmptyMessage(MsgId.NET_NOT_CONNECT);
} else {
mHandler.sendEmptyMessage(MsgId.DOWN_DATA_F);
}
} catch (Exception e) {
// LogUtil.e(Tag, "LoginInvokeItem run" + e.getMessage());
e.printStackTrace();
}
}
/**
* 初始化返回结果数据
* */
private void initRsultData(ActionResponse actionRespons) {
JSONObject bodyJsonObject = actionRespons.getRsbody();
outJson = bodyJsonObject;
outMsgJson = actionRespons.getMsg();
outCode = actionRespons.getCode();
}
public ArrayList<JSONObject> getActivityList() {
JSONArray array = outJson.optJSONArray("activityList");
JSONObject json_adObj;
ArrayList<JSONObject> list = new ArrayList<JSONObject>();
if (CommonUtil.NoEmptyArray(array)) {
for (int i = 0; i < array.length(); i++) {
try {
json_adObj = array.getJSONObject(i);
} catch (JSONException e) {
e.printStackTrace();
continue;
}
list.add(json_adObj);
}
}
return list;
}
/**
* inJson
* */
public void setinJson(JSONObject ij) {
HashMap<String, String> map = JsonUtil.Json2Map(ij);
this.inJson = map;
}
/**
* Out
*/
public JSONObject getOutJson() {
return this.outJson;
}
public String getOutMsg() {
return outMsgJson;
}
public int getOutCode() {
return outCode;
}
}
|
<filename>acmicpc/15733/15733.py<gh_stars>1-10
print("I'm Sexy")
|
#ifndef _BACKTRACE_H
#define _BACKTRACE_H
#include <string>
#include "DPMatrix.h"
#include "dp_matrix_cell.h"
class DPMatrix;
class Backtrace {
public:
Backtrace(DPMatrix *dp_matrix, int i, int j, int k, int l, const std::string &s1, const std::string &s2);
void run();
void print(std::string &alignment_s1, std::string &alignment_structure, std::string &alignment_s2);
private:
dp_matrix_cell get_parent(const dp_matrix_cell c);
void calculate_mb_position(float score);
void do_backtrace_mb(int i, int j, int k, int l);
DPMatrix *dp_matrix;
int i, j, k, l;
int m, n;
const std::string s1, s2;
std::string list_i, list_j, list_k, list_l, list_bp_left, list_bp_right;
};
#endif
|
<filename>qcommunity/modularity/standalone/refinement/refinement.py
#!/usr/bin/env python
import re, os, sys
import numpy as np
import networkx as nx
from numpy import linalg as la
from networkx.generators.atlas import *
import random, copy
from os import listdir
from os.path import isfile, join
import matplotlib.pyplot as plt
from matplotlib.pyplot import cm
from dwave_sapi2.local import local_connection
from dwave_sapi2.remote import RemoteConnection
from dwave_sapi2.core import solve_ising
from dwave_sapi2.embedding import find_embedding
from dwave_sapi2.util import get_chimera_adjacency, get_hardware_adjacency
from dwave_sapi2.embedding import embed_problem, unembed_answer
from chimera_embedding import processor
from collections import Counter
import time as mytime
import math
from pyomo.environ import *
sys.path.insert(0, '../Optimal/')
import minimize_ising_model
from networkx.utils import reverse_cuthill_mckee_ordering
#from networkx.algorithms.community.quality import modularity
# native embedding
def get_native_embedding(solver):
params = solver.properties.get('parameters', {})
#M = params.get('M', 12)
#N = params.get('N', 12)
M = params.get('M', 16)
N = params.get('N', 16)
L = params.get('L', 4)
hardware_adj = get_hardware_adjacency(solver)
embedder = processor(hardware_adj, M=M, N=N, L=L)
embedding = embedder.largestNativeClique()
return embedding
def get_sub_mod_matrix(graph, ptn_variables):
B_matrix = nx.modularity_matrix(graph, nodelist=sorted(graph.nodes()))
free_var = []
fixed_var = []
fixed_vec = []
free_nodes = []
fixed_nodes = []
for node in sorted(ptn_variables):
if ptn_variables[node] == 'free':
free_var.append(True)
fixed_var.append(False)
free_nodes.append(node)
else:
free_var.append(False)
fixed_var.append(True)
fixed_vec.append(ptn_variables[node])
fixed_nodes.append(node)
free_var = np.array(free_var)
sub_B_matrix = B_matrix[free_var][:, free_var]
bias = []
for node_i in free_nodes:
bias_i = 0
for node_j in fixed_nodes:
s_j = ptn_variables[node_j]
bias_i += 2 * s_j * B_matrix.item((node_i, node_j))
bias.append(bias_i)
fixed_var = np.array(fixed_var)
C = B_matrix[fixed_var][:, fixed_var]
n = C.shape[0]
vec = np.array(fixed_vec).reshape(n, 1)
constant = vec.transpose() * C * vec
return sub_B_matrix, bias, constant.item(0)
def get_new_partition(best_soln, ptn_variables):
''' map sub problem solution to original problem '''
partition = []
for node in sorted(ptn_variables):
if ptn_variables[node] == 'free':
ptn = best_soln[0]
partition.append(ptn)
del best_soln[0]
else:
partition.append(ptn_variables[node])
assert len(partition) == len(ptn_variables)
return partition
def compute_modularity(graph, mod_matrix, partition):
n = mod_matrix.shape[0]
x = np.array(partition).reshape(n, 1)
mod = x.transpose() * mod_matrix * x
return mod.item(0)
# modularity with DWave
def sapi_refine_modularity(
graph,
solver,
hardware_size, # max size subproblem
ptn_variables, # ptn_variables[node] = 0,1,'free'
num_reads,
annealing_time,
embeddings=False, # if false, get fast embedding
):
sub_B_matrix, bias, constant = get_sub_mod_matrix(graph, ptn_variables)
n = sub_B_matrix.shape[0]
if n > hardware_size:
print n, hardware_size
raise ValueError('Number free variables exceeds hardware size')
coupler = {}
# we add negative because we maximize modularity
bias = [-i for i in bias]
for i in range(n - 1):
for j in range(i + 1, n):
coupler[(i, j)] = -sub_B_matrix.item((i, j))
coupler[(j, i)] = -sub_B_matrix.item((j, i))
A = get_hardware_adjacency(solver)
#print "embedding..."
if not embeddings:
print 'finding embedding ....'
embeddings = find_embedding(coupler, A, verbose=0, fast_embedding=True)
(h0, j0, jc, new_emb) = embed_problem(
bias, coupler, embeddings, A, clean=True, smear=True)
emb_j = j0.copy()
emb_j.update(jc)
#print "On DWave..."
result = solve_ising(
solver, h0, emb_j, num_reads=num_reads, annealing_time=annealing_time)
#print result
#print "On DWave...COMPLETE"
energies = result['energies']
#print energies
#print result['solutions']
#print min(energies), max(energies)
new_answer = unembed_answer(result['solutions'], new_emb, 'minimize_energy',
bias, coupler)
min_energy = 10**10
best_soln = []
for i, ans in enumerate(new_answer):
soln = ans[0:n]
assert 3 not in soln
en = energies[i]
if en < min_energy:
#print 'energy', en
min_energy = en
best_soln = copy.deepcopy(soln)
return get_new_partition(best_soln, ptn_variables)
def get_random_nodes(graph, hardware_size):
return random.sample(graph.nodes(), hardware_size)
def _node_gain(node, mod_matrix, ptn):
''' return change in modularity of moving node to new part'''
int_deg = 0
ext_deg = 0
for i in range(mod_matrix.shape[0]):
if i != node:
if ptn[i] == ptn[node]:
int_deg += mod_matrix.item((i, node))
else:
ext_deg += mod_matrix.item((i, node))
#return -2*int_deg - mod_matrix.item((node, node))
return 4 * (ext_deg - int_deg) # min-cut
def verify_gain(node, graph, mod_matrix, ptn):
mod1 = compute_modularity(graph, mod_matrix, ptn)
ptn[node] = -ptn[node]
mod2 = compute_modularity(graph, mod_matrix, ptn)
ptn[node] = -ptn[node]
return mod2 - mod1
def get_top_gain_nodes(graph, mod_matrix, ptn, hardware_size):
''' return top gain nodes '''
node_gain = []
for node in sorted(graph.nodes()):
#noise = random.random() * 10**(-5)
noise = 0
gain = _node_gain(node, mod_matrix, ptn) + noise
node_gain.append((gain, node))
#gain2 = verify_gain(node, graph, mod_matrix, ptn)
sort_gain = sorted(node_gain, reverse=True)
#gains = [0.25/nx.number_of_edges(graph)*gain for gain, _ in sort_gain]
#gains = [gain for gain, _ in sort_gain]
nodes = [node for _, node in sort_gain]
#print 'top node', nodes[:hardware_size]
return nodes[:hardware_size]
def get_top_spectral_gain(graph, mod_matrix, ptn, hardware_size, sp_order):
node_gain = []
for node in sorted(graph.nodes()):
gain = _node_gain(node, mod_matrix, ptn)
node_gain.append((gain, node))
sort_gain = sorted(node_gain, reverse=True)
#gains = [0.25/nx.number_of_edges(graph)*gain for gain, _ in sort_gain]
#print gains
#nodes = [node for _, node in sort_gain]
top_node = sort_gain[0][1]
subset = get_neigborhood(top_node, sp_order, hardware_size)
return sorted(subset)
def get_free_nodes(graph,
mod_matrix,
ptn,
hardware_size,
sp_order,
method=False):
if method == 'top_gain':
free_nodes = get_top_gain_nodes(graph, mod_matrix, ptn, hardware_size)
elif method == 'spectral':
free_nodes = get_top_spectral_gain(graph, mod_matrix, ptn,
hardware_size, sp_order)
elif method == 'boundary_spectral':
free_nodes = get_spectral_boundary(graph, mod_matrix, ptn,
hardware_size, sp_order)
elif method == 'tg_sp_same_part':
free_nodes = tg_sp_same_part(graph, mod_matrix, ptn, hardware_size,
sp_order)
else:
free_nodes = get_random_nodes(graph, hardware_size)
return sorted(free_nodes)
def data_to_graph(filename):
return nx.convert_node_labels_to_integers(
nx.read_edgelist(
filename, comments='%', data=False, create_using=nx.OrderedGraph()))
def ising_to_file(B_matrix, bias):
data_var = {}
data_var['couplers'] = 'set couplers :=\n'
data_var['nodes'] = 'set nodes :=\n'
data_var['bias'] = 'param bias := \n'
data_var['weight'] = 'param w := \n'
mygraphfile = open('ising.dat', 'w')
n = B_matrix.shape[0]
# Take negative values because we max modularity
B_matrix = -B_matrix
bias = [-i for i in bias]
for i in range(n - 1):
for j in range(i, n):
w = B_matrix.item((i, j))
data_var['couplers'] += ' '.join([str(i), str(j), '\n'])
data_var['weight'] += ' '.join([str(i), str(j), str(w), '\n'])
i, j = n - 1, n - 1
w = B_matrix.item((i, j))
data_var['couplers'] += ' '.join([str(i), str(j), '\n'])
data_var['weight'] += ' '.join([str(i), str(j), str(w), '\n'])
for i in range(n):
data_var['nodes'] += str(i) + '\n'
data_var['bias'] += str(i) + ' ' + str(bias[i]) + '\n'
data_var['nodes'] += ';\n'
data_var['bias'] += ';\n'
data_var['weight'] += ';\n'
data_var['couplers'] += ';\n'
for item in data_var:
mygraphfile.write(data_var[item])
def pyomo_refine(graph, ptn_variables):
sub_B_matrix, bias, constant = get_sub_mod_matrix(graph, ptn_variables)
ising_to_file(sub_B_matrix, bias)
instance = minimize_ising_model.model.create_instance("ising.dat")
solver = SolverFactory("gurobi")
solver.options['mipgap'] = 0.00000001
solver.options['parallel'] = -1
solver.options['timelimit'] = 20
results = solver.solve(instance, tee=False)
energy = instance.min_ising()
#print energy, constant, energy - constant
# Get partition
varobject = getattr(instance, 'x')
part0 = []
part1 = []
n = sub_B_matrix.shape[0]
ising_partition = ['unset' for i in range(n)]
for index in sorted(varobject):
x_val = varobject[index].value
s_val = 2 * x_val - 1
if s_val < 0:
s_val = -1
else:
s_val = 1
assert s_val != 0
ising_partition[index] = s_val
new_ptn = get_new_partition(ising_partition, ptn_variables)
#print new_ptn
return new_ptn
def get_boundary(graph, ptn):
boundary = {}
for u, v in graph.edges():
if ptn[u] != ptn[v]:
boundary[u] = ptn[u]
boundary[v] = ptn[v]
return boundary
def get_spectral_boundary(graph, mod_matrix, ptn, hardware_size, sp_order):
'''
Pick a node
Get spectral neighborhood of node
if boundary nodes more than hardware_size, subset returned must all be
boundary nodes, else pick other nodes
How to pick a node:
1. top gain boundary node
2. random boundary node
'''
boundary = get_boundary(graph, ptn)
node_gain = []
for node in boundary:
gain = _node_gain(node, mod_matrix, ptn)
node_gain.append((gain, node))
top_node = max(node_gain)[1]
def tg_sp_same_part(graph, mod_matrix, ptn, hardware_size, sp_order):
node_gain = []
for node in graph.nodes():
gain = _node_gain(node, mod_matrix, ptn)
node_gain.append((gain, node))
top_node = max(node_gain)[1]
return spectral_neigh_same_part(top_node, graph, mod_matrix, ptn,
hardware_size, sp_order)
def spectral_neigh_same_part(node, graph, mod_matrix, ptn, hardware_size,
sp_order):
#top_node = random.choice(boundary.keys())
#boundary_sp_order = [i for i in sp_order if i in boundary]
neigh_sp_order = []
for i in sp_order:
#if i in boundary:
if ptn[i] == ptn[node]:
neigh_sp_order.append(i)
return get_neigborhood(node, neigh_sp_order, hardware_size)
def get_neigborhood(node, mylist, hardware_size):
'''
Get neighboorhood around mylist
'''
node_indx = mylist.index(node)
#n = nx.number_of_nodes(graph)
n = len(mylist)
l_indx = node_indx - 1
r_indx = node_indx + 1
max_left = max(node_indx - hardware_size / 2, 0)
max_right = min(node_indx + hardware_size / 2, n)
if hardware_size / 2 > node_indx:
# add to right
add = hardware_size / 2 - node_indx
max_right = min(max_right + add, n)
if hardware_size / 2 + node_indx > n:
add = hardware_size / 2 + node_indx - n
max_left = max(max_left - add, 0)
#print 'left right', max_left, node_indx, max_right
#print 'boundary', len(mylist[max_left:max_right])
return sorted(mylist[max_left:max_right])
def list_to_sorted(graph, init_ptn):
ptn = [0 for i in graph.nodes()]
for i, node in enumerate(graph.nodes()):
ptn[node] = init_ptn[i]
assert 0 not in ptn
return ptn
def main():
#method = 'pyomo'
method = 'dwave'
free_node_method = 'top_gain'
#free_node_method = 'spectral'
#free_node_method = 'random'
#free_node_method = 'boundary_spectral'
#free_node_method = 'tg_sp_same_part'
if method == 'dwave':
solver = start_sapi()
embedding = get_native_embedding(solver)
num_reads = 1000
annealing_time = 200
filename = sys.argv[1]
graph = data_to_graph(filename)
#print graph.nodes()
print('%i nodes, %i edges' % (nx.number_of_nodes(graph),
nx.number_of_edges(graph)))
if nx.number_of_nodes(graph) > 600:
exit()
mod_matrix = nx.modularity_matrix(graph, nodelist=sorted(graph.nodes()))
nnodes = nx.number_of_nodes(graph)
hardware_size = 25
#init_ptn = [1 - 2*random.randint(0,1) for _ in range(nnodes)]
seeds = [
1070, 8173, 3509, 8887, 1314, 4506, 5219, 3765, 1420, 7778, 3734, 6509,
1266, 5063, 6496, 4622, 7018, 6052, 8932, 8215, 1254, 400, 3260, 5999,
1331, 8073, 7357, 2928, 7208, 3874
]
niters = []
mod_values = []
for __, seed in enumerate(seeds):
#print('exp %i' %__)
#seed = 0
random.seed(seed)
np.random.seed(seed)
init_ptn = [
1 - 2 * x for x in list(
np.random.randint(2, size=(graph.number_of_nodes(),)))
]
#print init_ptn
mod = compute_modularity(graph, mod_matrix, init_ptn)
#print('init modularity:', mod, 0.25*mod/nx.number_of_edges(graph))
ptn_variables = {}
for node in sorted(graph.nodes()):
ptn_variables[node] = init_ptn[node]
free_nodes = get_random_nodes(graph, hardware_size)
free_set = set(free_nodes)
sp_order = nx.spectral_ordering(graph)
#sp_order = list(reverse_cuthill_mckee_ordering(graph))
#print(sp_order)
free_nodes = get_free_nodes(
graph,
mod_matrix,
init_ptn,
hardware_size,
sp_order,
method=free_node_method)
not_converge = True
myiter = 0
nconv = 5
best_soln = -float('inf')
while not_converge:
myiter += 1
#print len(free_nodes)
for node in free_nodes:
ptn_variables[node] = 'free'
if method == 'dwave':
new_ptn = sapi_refine_modularity(graph, solver, hardware_size,
ptn_variables, num_reads,
annealing_time, embedding)
else:
new_ptn = pyomo_refine(graph, ptn_variables)
for node in free_nodes:
ptn_variables[node] = new_ptn[node]
mod = compute_modularity(graph, mod_matrix, new_ptn)
#print(myiter, 'refine modularity:', mod, 0.25*mod/nx.number_of_edges(graph))
free_nodes = get_free_nodes(
graph,
mod_matrix,
new_ptn,
hardware_size,
sp_order,
method=free_node_method)
current_free_set = set(free_nodes)
if mod > best_soln:
best_soln = mod
best_it = myiter
if free_set == current_free_set:
not_converge = False
elif myiter - best_it >= nconv:
not_converge = False
free_set = current_free_set
niters.append(myiter)
mod_values.append(0.25 * mod / nx.number_of_edges(graph))
#print(seed, myiter, 0.25*mod/nx.number_of_edges(graph))
best = max(mod_values)
worst = min(mod_values)
av = np.mean(mod_values)
std = np.std(mod_values)
b_it = min(niters)
w_it = max(niters)
av_it = np.mean(niters)
std_it = np.std(niters)
#print(seeds)
out = [worst, av, best, std, b_it, av_it, w_it, std_it]
out = '& '.join([str(round(i, 4)) for i in out])
print(out)
print('-------------------\n')
'''
for mynode in sorted(graph.nodes()):
free_nodes = spectral_neigh_same_part(mynode, graph,
mod_matrix, new_ptn, hardware_size, sp_order)
for node in free_nodes:
ptn_variables[node] = 'free'
if method == 'dwave':
new_ptn = sapi_refine_modularity(graph,
solver,
hardware_size,
ptn_variables,
num_reads,
annealing_time,
embedding)
else:
new_ptn = pyomo_refine(graph, ptn_variables)
for node in free_nodes:
ptn_variables[node] = new_ptn[node]
mod = compute_modularity(graph, mod_matrix, new_ptn)
print(mynode, 'refine modularity:', mod, 0.25*mod/nx.number_of_edges(graph))
'''
def main2():
""" spectral same part"""
method = 'pyomo'
if method == 'dwave':
solver = start_sapi()
embedding = get_native_embedding(solver)
num_reads = 1000
annealing_time = 200
#seed = random.randint(1,10000)
#seed = 8834
#print seed
random.seed(seed)
filename = sys.argv[1]
graph = data_to_graph(filename)
#print '%i nodes, %i edges' %(nx.number_of_nodes(graph), nx.number_of_edges(graph))
mod_matrix = nx.modularity_matrix(graph, nodelist=sorted(graph.nodes()))
nnodes = nx.number_of_nodes(graph)
hardware_size = 25
ptn = [1 - 2 * random.randint(0, 1) for _ in range(nnodes)]
#init_ptn = [1 for _ in range(nnodes)]
#init_ptn = [np.sign(i) for i in nx.fiedler_vector(graph).tolist()]
mod = compute_modularity(graph, mod_matrix, ptn)
#print 'init modularity:', mod, 0.25*mod/nx.number_of_edges(graph)
ptn_variables = {}
for node in sorted(graph.nodes()):
ptn_variables[node] = ptn[node]
sp_order = nx.spectral_ordering(graph)
for mynode in sorted(graph.nodes()):
free_nodes = spectral_neigh_same_part(mynode, graph, mod_matrix, ptn,
hardware_size, sp_order)
for node in free_nodes:
ptn_variables[node] = 'free'
if method == 'dwave':
new_ptn = sapi_refine_modularity(graph, solver, hardware_size,
ptn_variables, num_reads,
annealing_time, embedding)
else:
new_ptn = pyomo_refine(graph, ptn_variables)
for node in free_nodes:
ptn_variables[node] = new_ptn[node]
mod = compute_modularity(graph, mod_matrix, new_ptn)
#print mynode, 'refine modularity:', mod, 0.25*mod/nx.number_of_edges(graph)
if __name__ == '__main__':
main()
#main2()
|
from pypy.jit.codewriter.policy import JitPolicy
from pypy.rlib.jit import JitHookInterface
from pypy.rlib import jit_hooks
from pypy.interpreter.error import OperationError
from pypy.jit.metainterp.jitprof import counter_names
from pypy.module.pypyjit.interp_resop import wrap_oplist, Cache, wrap_greenkey,\
WrappedOp
class PyPyJitIface(JitHookInterface):
def on_abort(self, reason, jitdriver, greenkey, greenkey_repr):
space = self.space
cache = space.fromcache(Cache)
if cache.in_recursion:
return
if space.is_true(cache.w_abort_hook):
cache.in_recursion = True
try:
try:
space.call_function(cache.w_abort_hook,
space.wrap(jitdriver.name),
wrap_greenkey(space, jitdriver,
greenkey, greenkey_repr),
space.wrap(counter_names[reason]))
except OperationError, e:
e.write_unraisable(space, "jit hook ", cache.w_abort_hook)
finally:
cache.in_recursion = False
def after_compile(self, debug_info):
w_greenkey = wrap_greenkey(self.space, debug_info.get_jitdriver(),
debug_info.greenkey,
debug_info.get_greenkey_repr())
self._compile_hook(debug_info, w_greenkey)
def after_compile_bridge(self, debug_info):
self._compile_hook(debug_info,
self.space.wrap(debug_info.fail_descr_no))
def before_compile(self, debug_info):
w_greenkey = wrap_greenkey(self.space, debug_info.get_jitdriver(),
debug_info.greenkey,
debug_info.get_greenkey_repr())
self._optimize_hook(debug_info, w_greenkey)
def before_compile_bridge(self, debug_info):
self._optimize_hook(debug_info,
self.space.wrap(debug_info.fail_descr_no))
def _compile_hook(self, debug_info, w_arg):
space = self.space
cache = space.fromcache(Cache)
if cache.in_recursion:
return
if space.is_true(cache.w_compile_hook):
logops = debug_info.logger._make_log_operations()
list_w = wrap_oplist(space, logops, debug_info.operations,
debug_info.asminfo.ops_offset)
cache.in_recursion = True
try:
try:
jd_name = debug_info.get_jitdriver().name
asminfo = debug_info.asminfo
space.call_function(cache.w_compile_hook,
space.wrap(jd_name),
space.wrap(debug_info.type),
w_arg,
space.newlist(list_w),
space.wrap(asminfo.asmaddr),
space.wrap(asminfo.asmlen))
except OperationError, e:
e.write_unraisable(space, "jit hook ", cache.w_compile_hook)
finally:
cache.in_recursion = False
def _optimize_hook(self, debug_info, w_arg):
space = self.space
cache = space.fromcache(Cache)
if cache.in_recursion:
return
if space.is_true(cache.w_optimize_hook):
logops = debug_info.logger._make_log_operations()
list_w = wrap_oplist(space, logops, debug_info.operations)
cache.in_recursion = True
try:
try:
jd_name = debug_info.get_jitdriver().name
w_res = space.call_function(cache.w_optimize_hook,
space.wrap(jd_name),
space.wrap(debug_info.type),
w_arg,
space.newlist(list_w))
if space.is_w(w_res, space.w_None):
return
l = []
for w_item in space.listview(w_res):
item = space.interp_w(WrappedOp, w_item)
l.append(jit_hooks._cast_to_resop(item.op))
del debug_info.operations[:] # modifying operations above is
# probably not a great idea since types may not work
# and we'll end up with half-working list and
# a segfault/fatal RPython error
for elem in l:
debug_info.operations.append(elem)
except OperationError, e:
e.write_unraisable(space, "jit hook ", cache.w_compile_hook)
finally:
cache.in_recursion = False
pypy_hooks = PyPyJitIface()
class PyPyJitPolicy(JitPolicy):
def look_inside_pypy_module(self, modname):
if (modname == '__builtin__.operation' or
modname == '__builtin__.abstractinst' or
modname == '__builtin__.interp_classobj' or
modname == '__builtin__.functional' or
modname == '__builtin__.descriptor' or
modname == 'thread.os_local' or
modname == 'thread.os_thread'):
return True
if '.' in modname:
modname, rest = modname.split('.', 1)
else:
rest = ''
if modname in ['pypyjit', 'signal', 'micronumpy', 'math', 'exceptions',
'imp', 'sys', 'array', '_ffi', 'itertools', 'operator',
'posix', '_socket', '_sre', '_lsprof', '_weakref',
'__pypy__', 'cStringIO', '_collections', 'struct',
'mmap', 'marshal', '_codecs', 'rctime']:
if modname == 'pypyjit' and 'interp_resop' in rest:
return False
return True
return False
def look_inside_function(self, func):
mod = func.__module__ or '?'
if mod == 'pypy.rlib.rbigint' or mod == 'pypy.rlib.rlocale' or mod == 'pypy.rlib.rsocket':
return False
if mod.startswith('pypy.interpreter.astcompiler.'):
return False
if mod.startswith('pypy.interpreter.pyparser.'):
return False
if mod.startswith('pypy.module.'):
modname = mod[len('pypy.module.'):]
if not self.look_inside_pypy_module(modname):
return False
return True
|
# !/bin/bash
#
# ################################################################################
#
# 联系方式 :
# Weibo : jkpang-庞 http://weibo.com/u/5743737098/home?wvr=5&uut=fin&from=reg
# Email : jkpang@outlook.com
# QQ 群 : 323408051
# GitHub: https://github.com/jkpang
#
# ################################################################################
#
# 使用方法:
# step1 : 将PPAutoPackageScript整个文件夹拖入到项目主目录,项目主目录,项目主目录~~~(重要的事情说3遍!😊😊😊)
# step2 : 打开PPAutoPackageScript.sh文件,修改 "项目自定义部分" 配置好项目参数
# step3 : 打开终端, cd到PPAutoPackageScript文件夹 (ps:在终端中先输入cd ,直接拖入PPAutoPackageScript文件夹,回车)
# step4 : 输入 sh PPAutoPackageScript.sh 命令,回车,开始执行此打包脚本
# ===============================项目自定义部分(自定义好下列参数后再执行该脚本)============================= #
# 计时
SECONDS=0
# 是否编译工作空间 (例:若是用Cocopods管理的.xcworkspace项目,赋值true;用Xcode默认创建的.xcodeproj,赋值false)
is_workspace="true"
# 指定项目的scheme名称
# (注意: 因为shell定义变量时,=号两边不能留空格,若scheme_name与info_plist_name有空格,脚本运行会失败,暂时还没有解决方法,知道的还请指教!)
scheme_name="TuringCalendar"
# 工程中Target对应的配置plist文件名称, Xcode默认的配置文件为Info.plist
info_plist_name="Info"
# 指定要打包编译的方式 : Release,Debug...
build_configuration="Release"
# ===============================自动打包部分(无特殊情况不用修改)============================= #
# 导出ipa所需要的plist文件路径 (默认为AdHocExportOptionsPlist.plist)
ExportOptionsPlistPath="./PPAutoPackageScript/AdHocExportOptionsPlist.plist"
# 返回上一级目录,进入项目工程目录
cd ..
# 获取项目名称
project_name=`find . -name *.xcodeproj | awk -F "[/.]" '{print $(NF-1)}'`
# 获取版本号,内部版本号,bundleID
info_plist_path="$project_name/$info_plist_name.plist"
bundle_version=`/usr/libexec/PlistBuddy -c "Print CFBundleShortVersionString" $info_plist_path`
bundle_build_version=`/usr/libexec/PlistBuddy -c "Print CFBundleVersion" $info_plist_path`
bundle_identifier=`/usr/libexec/PlistBuddy -c "Print CFBundleIdentifier" $info_plist_path`
# 删除旧.xcarchive文件
rm -rf ~/Desktop/$scheme_name-IPA/$scheme_name.xcarchive
# 指定输出ipa路径
export_path=~/Desktop/$scheme_name-IPA
# 指定输出归档文件地址
export_archive_path="$export_path/$scheme_name.xcarchive"
# 指定输出ipa地址
export_ipa_path="$export_path"
# 指定输出ipa名称 : scheme_name + bundle_version
ipa_name="$scheme_name-v$bundle_version"
# AdHoc,AppStore,Enterprise三种打包方式的区别: http://blog.csdn.net/lwjok2007/article/details/46379945
echo "\033[36;1m请选择打包方式(输入序号,按回车即可) \033[0m"
echo "\033[33;1m1. AdHoc \033[0m"
echo "\033[33;1m2. AppStore \033[0m"
echo "\033[33;1m3. Enterprise \033[0m"
echo "\033[33;1m4. Development \033[0m"
# 读取用户输入并存到变量里
read parameter
sleep 0.5
method="$parameter"
# 判读用户是否有输入
if [ -n "$method" ]
then
if [ "$method" = "1" ] ; then
ExportOptionsPlistPath="./PPAutoPackageScript/AdHocExportOptionsPlist.plist"
elif [ "$method" = "2" ] ; then
ExportOptionsPlistPath="./PPAutoPackageScript/AppStoreExportOptionsPlist.plist"
elif [ "$method" = "3" ] ; then
ExportOptionsPlistPath="./PPAutoPackageScript/EnterpriseExportOptionsPlist.plist"
elif [ "$method" = "4" ] ; then
ExportOptionsPlistPath="./PPAutoPackageScript/DevelopmentExportOptionsPlist.plist"
else
echo "输入的参数无效!!!"
exit 1
fi
fi
echo "\033[32m************************* 开始构建项目 ************************* \033[0m"
# 指定输出文件目录不存在则创建
if [ -d "$export_path" ] ; then
echo $export_path
else
mkdir -pv $export_path
fi
# 判断编译的项目类型是workspace还是project
if $is_workspace ; then
# 编译前清理工程
xcodebuild clean -workspace ${project_name}.xcworkspace \
-scheme ${scheme_name} \
-configuration ${build_configuration}
xcodebuild archive -workspace ${project_name}.xcworkspace \
-scheme ${scheme_name} \
-configuration ${build_configuration} \
-archivePath ${export_archive_path}
else
# 编译前清理工程
xcodebuild clean -project ${project_name}.xcodeproj \
-scheme ${scheme_name} \
-configuration ${build_configuration}
xcodebuild archive -project ${project_name}.xcodeproj \
-scheme ${scheme_name} \
-configuration ${build_configuration} \
-archivePath ${export_archive_path}
fi
# 检查是否构建成功
# xcarchive 实际是一个文件夹不是一个文件所以使用 -d 判断
if [ -d "$export_archive_path" ] ; then
echo "\033[32;1m项目构建成功 🚀 🚀 🚀 \033[0m"
else
echo "\033[31;1m项目构建失败 😢 😢 😢 \033[0m"
exit 1
fi
echo "\033[32m************************* 开始导出ipa文件 ************************* \033[0m"
xcodebuild -exportArchive \
-archivePath ${export_archive_path} \
-exportPath ${export_ipa_path} \
-exportOptionsPlist ${ExportOptionsPlistPath}
# 修改ipa文件名称
mv $export_ipa_path/$scheme_name.ipa $export_ipa_path/$ipa_name.ipa
# 检查文件是否存在
if [ -f "$export_ipa_path/$ipa_name.ipa" ] ; then
echo "\033[32;1m导出 ${ipa_name}.ipa 包成功 🎉 🎉 🎉 \033[0m"
open $export_path
else
echo "\033[31;1m导出 ${ipa_name}.ipa 包失败 😢 😢 😢 \033[0m"
# 相关的解决方法
echo "\033[33mps:以下类型的错误可以参考对应的链接\033[0m"
echo "\033[33m 1.\"error: exportArchive: No applicable devices found.\" --> 可能是ruby版本过低导致,升级最新版ruby再试,升级方法自行百度/谷歌,GitHub issue: https://github.com/jkpang/PPAutoPackageScript/issues/1#issuecomment-297589697"
echo "\033[33m 2.\"No valid iOS Distribution signing identities belonging to team 6F4Q87T7VD were found.\" --> http://fight4j.github.io/2016/11/21/xcodebuild/ \033[0m"
exit 1
fi
# 输出打包总用时
echo "\033[36;1m使用PPAutoPackageScript打包总用时: ${SECONDS}s \033[0m"
|
import { UserStateContext } from "@/context/UserContext";
import Head from "next/head";
import { useRouter } from "next/router";
import { useContext, useState } from "react";
import styles from '../../styles/layout.module.scss';
import Button from "../button/button";
import ContextMenu from "../contextMenu/contextMenu";
import { Props } from "./layout.type";
import GithubIcon from '../../images/github.svg';
import UserIcon from '../../images/user.svg';
export default function Layout({ children }: Props) {
const router = useRouter();
const state = useContext(UserStateContext);
const [isContextMenuOpen, setIsContextMenuOpen] = useState(false);
const isLoginPage = router.pathname === '/login';
const isAuth = state.isAuth;
const toggleContextMenu = () => {
setIsContextMenuOpen(!isContextMenuOpen);
}
const closeContextMenu = () => {
setIsContextMenuOpen(false);
}
const handleNavigation = (path: string) => {
closeContextMenu();
router.push(path);
}
return (
<div id="layoutRoot">
<Head>
<title>Tabula - Vaal Your Notes</title>
<link rel="shortcut icon" href="/favicon.ico" type="image/x-icon" />
<link rel="icon" href="/favicon.ico" type="imsage/x-icon" />
</Head>
<header className= {styles.headerBar}>
<div className={ styles.logo } >
<a href="/" role="link" tabIndex={ 0 }>
Tabula
</a>
</div>
<nav className={ styles.nav }>
{!isLoginPage && !isAuth && <Button href="/login"><span>Login</span></Button>}
{!isLoginPage && isAuth && (
<div className={ styles.user }>
<span role="button" tabIndex={0} onClick={ () => toggleContextMenu() }>
<img src={UserIcon} alt="User Icon"/>
</span>
</div>
)}
{!isLoginPage && isAuth && isContextMenuOpen && (
<ContextMenu
menuItems={[
{
id: "pages",
label: "Pages",
action: () => handleNavigation("/pages"),
},
{
id: "account",
label: "Account",
action: () => handleNavigation("/account"),
},
{
id: "logout",
label: "Logout",
action: () => handleNavigation("/logout"),
},
]}
closeAction={() => closeContextMenu()}
isTopNavigation={true}
/>
)}
</nav>
</header>
<main className={styles.content}>{children}</main>
<footer className={styles.footerBar}>
<hr className={styles.hr} />
<div className={styles.github}>
<a
href="https://github.com/tedzchu/tabula"
rel="noopener noreferrer"
role="link"
tabIndex={ 0 }
>
<img src={GithubIcon} alt="Github Icon" />
</a>
</div>
</footer>
</div>
)
}
|
<gh_stars>1-10
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe OnboardingChannelButtons::OnboardingChannelButtons, type: :component do
subject { render_inline(described_class.new(**params)) }
let(:params) { {} }
let(:phone_number) { nil }
before(:each) { allow(Setting).to receive(:signal_server_phone_number).and_return(phone_number) }
it { should_not have_css('.OnboardingChannelButtons--even') }
it { should have_css('.Button').exactly(3).times }
context 'if Signal is set up' do
let(:phone_number) { '+491234567890' }
it { should have_css('.Button').exactly(4).times }
it { should have_css('.OnboardingChannelButtons--even') }
end
end
|
package controllers
import java.net.URI
import java.net.URLDecoder.decode
import com.amazonaws.AmazonServiceException
import com.gu.mediaservice.lib.argo.ArgoHelpers
import com.gu.mediaservice.lib.argo.model._
import com.gu.mediaservice.lib.auth.Authentication
import com.gu.mediaservice.lib.aws.{NoItemFound, UpdateMessage}
import com.gu.mediaservice.lib.formatting._
import com.gu.mediaservice.model._
import lib._
import org.joda.time.DateTime
import play.api.data.Forms._
import play.api.data._
import play.api.libs.json._
import play.api.mvc.{BaseController, ControllerComponents}
import scala.concurrent.{ExecutionContext, Future}
// FIXME: the argoHelpers are all returning `Ok`s (200)
// Some of these responses should be `Accepted` (202)
// TODO: Look at adding actions e.g. to collections / sets where we could `PUT`
// a singular collection item e.g.
// {
// "labels": {
// "uri": "...",
// "data": [],
// "actions": [
// {
// "method": "PUT",
// "rel": "create",
// "href": "../metadata/{id}/labels/{label}"
// }
// ]
// }
// }
class EditsController(auth: Authentication, store: EditsStore, notifications: Notifications, config: EditsConfig,
override val controllerComponents: ControllerComponents)(implicit val ec: ExecutionContext)
extends BaseController with ArgoHelpers with EditsResponse {
import UsageRightsMetadataMapper.usageRightsToMetadata
val metadataBaseUri = config.services.metadataBaseUri
def decodeUriParam(param: String): String = decode(param, "UTF-8")
// TODO: Think about calling this `overrides` or something that isn't metadata
def getAllMetadata(id: String) = auth.async {
val emptyResponse = respond(Edits.getEmpty)(editsEntity(id))
store.get(id) map { dynamoEntry =>
dynamoEntry.asOpt[Edits]
.map(respond(_)(editsEntity(id)))
.getOrElse(emptyResponse)
} recover { case NoItemFound => emptyResponse }
}
def getEdits(id: String) = auth.async {
store.get(id) map { dynamoEntry =>
val edits = dynamoEntry.asOpt[Edits]
respond(data = edits)
} recover { case NoItemFound => NotFound }
}
def getArchived(id: String) = auth.async {
store.booleanGet(id, "archived") map { archived =>
respond(archived.getOrElse(false))
} recover {
case NoItemFound => respond(false)
}
}
def setArchived(id: String) = auth.async(parse.json) { implicit req =>
(req.body \ "data").validate[Boolean].fold(
errors =>
Future.successful(BadRequest(errors.toString())),
archived =>
store.booleanSetOrRemove(id, "archived", archived)
.map(publish(id))
.map(edits => respond(edits.archived))
)
}
def unsetArchived(id: String) = auth.async {
store.removeKey(id, "archived")
.map(publish(id))
.map(_ => respond(false))
}
def getLabels(id: String) = auth.async {
store.setGet(id, "labels")
.map(labelsCollection(id, _))
.map {case (uri, labels) => respondCollection(labels)} recover {
case NoItemFound => respond(Array[String]())
}
}
def getPhotoshoot(id: String) = auth.async {
store.jsonGet(id, "photoshoot").map(dynamoEntry => {
(dynamoEntry \ "photoshoot").toOption match {
case Some(photoshoot) => respond(photoshoot.as[Photoshoot])
case None => respondNotFound("No photoshoot found")
}
}) recover {
case NoItemFound => respondNotFound("No photoshoot found")
}
}
def setPhotoshoot(id: String) = auth.async(parse.json) { req => {
(req.body \ "data").asOpt[Photoshoot].map(photoshoot => {
store.jsonAdd(id, "photoshoot", caseClassToMap(photoshoot))
.map(publish(id, "update-image-photoshoot"))
.map(_ => respond(photoshoot))
}).getOrElse(
Future.successful(respondError(BadRequest, "invalid-form-data", "Invalid form data"))
)
}}
def deletePhotoshoot(id: String) = auth.async {
store.removeKey(id, "photoshoot")
.map(publish(id, "update-image-photoshoot"))
.map(_ => Accepted)
}
def addLabels(id: String) = auth.async(parse.json) { req =>
(req.body \ "data").validate[List[String]].fold(
errors =>
Future.successful(BadRequest(errors.toString())),
labels =>
store
.setAdd(id, "labels", labels)
.map(publish(id))
.map(edits => labelsCollection(id, edits.labels.toSet))
.map { case (uri, l) => respondCollection(l) } recover {
case _: AmazonServiceException => BadRequest
}
)
}
def removeLabel(id: String, label: String) = auth.async {
store.setDelete(id, "labels", decodeUriParam(label))
.map(publish(id))
.map(edits => labelsCollection(id, edits.labels.toSet))
.map {case (uri, labels) => respondCollection(labels, uri=Some(uri))}
}
def getMetadata(id: String) = auth.async {
store.jsonGet(id, "metadata").map { dynamoEntry =>
val metadata = (dynamoEntry \ "metadata").as[ImageMetadata]
respond(metadata)
} recover {
case NoItemFound => respond(Json.toJson(JsObject(Nil)))
}
}
def setMetadata(id: String) = auth.async(parse.json) { req =>
(req.body \ "data").validate[ImageMetadata].fold(
errors => Future.successful(BadRequest(errors.toString())),
metadata =>
store.jsonAdd(id, "metadata", metadataAsMap(metadata))
.map(publish(id))
.map(edits => respond(edits.metadata))
)
}
def setMetadataFromUsageRights(id: String) = auth.async { req =>
store.get(id) flatMap { dynamoEntry =>
val edits = dynamoEntry.as[Edits]
val originalMetadata = edits.metadata
val metadataOpt = edits.usageRights.flatMap(usageRightsToMetadata)
metadataOpt map { metadata =>
val mergedMetadata = originalMetadata.copy(
byline = metadata.byline orElse originalMetadata.byline,
credit = metadata.credit orElse originalMetadata.credit
)
store.jsonAdd(id, "metadata", metadataAsMap(mergedMetadata))
.map(publish(id))
.map(edits => respond(edits.metadata, uri = Some(metadataUri(id))))
} getOrElse {
// just return the unmodified
Future.successful(respond(edits.metadata, uri = Some(metadataUri(id))))
}
} recover {
case NoItemFound => respondError(NotFound, "item-not-found", "Could not find image")
}
}
def getUsageRights(id: String) = auth.async {
store.jsonGet(id, "usageRights").map { dynamoEntry =>
val usageRights = (dynamoEntry \ "usageRights").as[UsageRights]
respond(usageRights)
} recover {
case NoItemFound => respondNotFound("No usage rights overrides found")
}
}
def setUsageRights(id: String) = auth.async(parse.json) { req =>
(req.body \ "data").asOpt[UsageRights].map(usageRight => {
store.jsonAdd(id, "usageRights", caseClassToMap(usageRight))
.map(publish(id))
.map(edits => respond(usageRight))
}).getOrElse(Future.successful(respondError(BadRequest, "invalid-form-data", "Invalid form data")))
}
def deleteUsageRights(id: String) = auth.async { req =>
store.removeKey(id, "usageRights").map(publish(id)).map(edits => Accepted)
}
// TODO: Move this to the dynamo lib
def caseClassToMap[T](caseClass: T)(implicit tjs: Writes[T]): Map[String, JsValue] =
Json.toJson[T](caseClass).as[JsObject].as[Map[String, JsValue]]
def labelsCollection(id: String, labels: Set[String]): (URI, Seq[EmbeddedEntity[String]]) =
(labelsUri(id), labels.map(setUnitEntity(id, "labels", _)).toSeq)
def publish(id: String, subject: String = "update-image-user-metadata")(metadata: JsObject): Edits = {
val edits = metadata.as[Edits]
val updateMessage = UpdateMessage(subject = subject, id = Some(id), edits = Some(edits))
notifications.publish(updateMessage)
edits
}
def metadataAsMap(metadata: ImageMetadata) = {
(Json.toJson(metadata).as[JsObject]).as[Map[String, JsValue]]
}
}
case class EditsValidationError(key: String, message: String) extends Throwable
|
sed -i "s/''/'/g" all_u_lc.txt
sed -i -E "s/\(inaudible\)//g" all_u_lc.txt
sed -r -i "s/\[[0-9]+:[0-9]+*\]//g" all_u_lc.txt
sed -i -E "s/\(ph\)//g" all_u_lc.txt
|
def determine_output(a, b, ind, passing):
if not a == b:
ind = 1
if ind == 0 and passing == 0:
passing = 0
else:
passing = 1
if passing == 0:
return "Output result: 0"
else:
return "Output result: 1"
|
#! /bin/zsh
# A script to make using 256 colors in zsh less painful.
# P.C. Shyamshankar <sykora@lucentbeing.com>
# Copied from http://github.com/sykora/etc/blob/master/zsh/functions/spectrum/
typeset -AHg FX FG BG
FX=(
reset "%{[00m%}"
bold "%{[01m%}" no-bold "%{[22m%}"
italic "%{[03m%}" no-italic "%{[23m%}"
underline "%{[04m%}" no-underline "%{[24m%}"
blink "%{[05m%}" no-blink "%{[25m%}"
reverse "%{[07m%}" no-reverse "%{[27m%}"
)
for color in {000..255}; do
FG[$color]="%{[38;5;${color}m%}"
BG[$color]="%{[48;5;${color}m%}"
done
ZSH_SPECTRUM_TEXT=${ZSH_SPECTRUM_TEXT:-Arma virumque cano Troiae qui primus ab oris}
# Show all 256 colors with color number
function spectrum_ls() {
for code in {000..255}; do
print -P -- "$code: %{$FG[$code]%}$ZSH_SPECTRUM_TEXT%{$reset_color%}"
done
}
# Show all 256 colors where the background is set to specific color
function spectrum_bls() {
for code in {000..255}; do
print -P -- "$code: %{$BG[$code]%}$ZSH_SPECTRUM_TEXT%{$reset_color%}"
done
}
function spectrum_lls() {
for code in {000..015}; do
print -P -- "$code: %{$FG[$code]%}$ZSH_SPECTRUM_TEXT%{$reset_color%}"
print -P -- "$code: %{$FX[bold]$FG[$code]%}$ZSH_SPECTRUM_TEXT%{$reset_color%}"
done
}
|
// class defining a light
const { vec3 } = glMatrix;
export class Light {
// Given a bottom-left corner (vec3), width (vec3), number of columns (int),
// height (vec3), number of rows (int), and color values r, g, b (int),
// construct an area light
constructor(corner, uvecFull, usteps, vvecFull, vsteps, r, g, b) {
this.corner = corner;
this.uvecFull = uvecFull;
this.uvec = vec3.scale(vec3.create(), uvecFull, (1 / usteps));
this.usteps = usteps;
this.vvecFull = vvecFull;
this.vvec = vec3.scale(vec3.create(), vvecFull, (1 / vsteps));
this.vsteps = vsteps;
this.samples = usteps * vsteps;
this.position = vec3.add(
vec3.create(), vec3.add(vec3.create(), corner,
vec3.scale(vec3.create(), uvecFull, 1 / 2)),
vec3.scale(vec3.create(), vvecFull, 1 / 2)); // center
this.color = [r, g, b];
// TODO: add as an argument
this.diffuseOn = false;
this.ambientOn = true;
this.specularOn = false;
this.texture = null;
}
// Given a point as a vec3 object, return an array of color values r, g, b at
// that point
colorAt(point) {
// TODO: add case for texture
return this.color;
}
// Given a cell position uc, vc relative to the corner, return a jittered
// point in the area light as a vec3 object
pointAt(uc, vc) {
// jittered point = corner + uvec * (uc + random() * (uvec.length / usteps))
// + vvec * (vc + random() * (vvec.length / vsteps))
const u = vec3.scale(vec3.create(), this.uvec,
(uc + Math.random() * (vec3.length(this.uvec) / this.usteps)));
const v = vec3.scale(vec3.create(), this.vvec,
(vc + Math.random() * (vec3.length(this.vvec) / this.vsteps)));
return vec3.add(vec3.create(), vec3.add(vec3.create(), this.corner, u), v);
}
// Given a point and a ray direction as vec3 objects, return a normal of the
// plane containing the (rectangular) area light as a vec3 object
normal(point, rayDirection) {
const normal = vec3.normalize(vec3.create(), vec3.cross(vec3.create(),
this.uvec, this.vvec));
const negNormal = vec3.negate(vec3.create(), normal);
const normalAngle = vec3.angle(normal, vec3.negate(vec3.create(),
rayDirection));
const negNormalAngle = vec3.angle(negNormal, vec3.negate(vec3.create(),
rayDirection));
if (normalAngle < negNormalAngle) {
return normal;
}
return negNormal;
}
// Given a Ray object, return the t value of the intersection (null if the ray
// doesn’t intersect with the rectangle)
intersects(ray) {
const normal = this.normal(null, ray.direction);
// check if ray and plane are parallel (no intersection)
const normalDotRayDirection = vec3.dot(normal, ray.direction);
if (Math.abs(normalDotRayDirection) < Number.EPSILON) { // almost 0
return null;
}
// compute t
const d = vec3.dot(this.corner, normal);
const t = (vec3.dot(normal, ray.origin) + d) / normalDotRayDirection;
// check if the point of intersection lies within the rectangle
const point = ray.pointAtParameter(t);
const cornerPoint = vec3.subtract(vec3.create(), point, this.corner);
const uvecUnit = vec3.normalize(vec3.create(), this.uvec);
const uProjection = vec3.scale(vec3.create(), uvecUnit, vec3.dot(cornerPoint,
uvecUnit));
const vProjection = vec3.subtract(vec3.create(), cornerPoint, uProjection);
if (vec3.length(uProjection) >= 0 &&
vec3.length(uProjection) <= vec3.length(this.uvecFull) &&
vec3.length(vProjection) >= 0 &&
vec3.length(vProjection) <= vec3.length(this.vvecFull) &&
vec3.angle(cornerPoint, this.uvec) <= Math.PI / 2 &&
vec3.angle(cornerPoint, this.vvec) <= Math.PI / 2) {
return t;
}
return null;
}
}
|
def max_sum(arr):
current_sum = 0
max_sum = 0
for i in range(len(arr)):
current_sum += arr[i]
max_sum = max(current_sum, max_sum)
if current_sum < 0:
current_sum = 0
return max_sum
arr = [2, -4, 8, -5, 9, 10]
res = max_sum(arr)
print(res)
|
def word_frequency(sentence):
words = sentence.split()
freq = {}
for word in words:
if word in freq:
freq[word] += 1
else:
freq[word] = 1
return freq
if __name__ == '__main__':
print(words_frequency("This is a random sentence"))
|
/*
* IXCobraMetricsToRedisBot.cpp
* Author: <NAME>
* Copyright (c) 2020 Machine Zone, Inc. All rights reserved.
*/
#include "IXCobraMetricsToRedisBot.h"
#include "IXCobraBot.h"
#include "IXStatsdClient.h"
#include <chrono>
#include <ixcobra/IXCobraConnection.h>
#include <ixcore/utils/IXCoreLogger.h>
#include <sstream>
#include <vector>
#include <algorithm>
#include <map>
#include <cctype>
namespace
{
std::string removeSpaces(const std::string& str)
{
std::string out(str);
out.erase(
std::remove_if(out.begin(), out.end(), [](unsigned char x) { return std::isspace(x); }),
out.end());
return out;
}
}
namespace ix
{
bool processPerfMetricsEventSlowFrames(const Json::Value& msg,
RedisClient& redisClient,
const std::string& deviceId)
{
auto frameRateHistogramCounts = msg["data"]["FrameRateHistogramCounts"];
int slowFrames = 0;
slowFrames += frameRateHistogramCounts[4].asInt();
slowFrames += frameRateHistogramCounts[5].asInt();
slowFrames += frameRateHistogramCounts[6].asInt();
slowFrames += frameRateHistogramCounts[7].asInt();
//
// XADD without a device id
//
std::stringstream ss;
ss << msg["id"].asString() << "_slow_frames" << "."
<< msg["device"]["game"].asString() << "."
<< msg["device"]["os_name"].asString() << "."
<< removeSpaces(msg["data"]["Tag"].asString());
int maxLen;
maxLen = 100000;
std::string id = ss.str();
std::string errMsg;
if (redisClient.xadd(id, std::to_string(slowFrames), maxLen, errMsg).empty())
{
CoreLogger::info(std::string("redis XADD error: ") + errMsg);
}
//
// XADD with a device id
//
ss.str(""); // reset the stringstream
ss << msg["id"].asString() << "_slow_frames_by_device" << "."
<< deviceId << "."
<< msg["device"]["game"].asString() << "."
<< msg["device"]["os_name"].asString() << "."
<< removeSpaces(msg["data"]["Tag"].asString());
id = ss.str();
maxLen = 1000;
if (redisClient.xadd(id, std::to_string(slowFrames), maxLen, errMsg).empty())
{
CoreLogger::info(std::string("redis XADD error: ") + errMsg);
}
//
// Add device to the device zset, and increment the score
// so that we know which devices are used more than others
// ZINCRBY myzset 1 one
//
ss.str(""); // reset the stringstream
ss << msg["id"].asString() << "_slow_frames_devices" << "."
<< msg["device"]["game"].asString();
id = ss.str();
std::vector<std::string> args = {
"ZINCRBY", id, "1", deviceId
};
auto response = redisClient.send(args, errMsg);
if (response.first == RespType::Error)
{
CoreLogger::info(std::string("redis ZINCRBY error: ") + errMsg);
}
return true;
}
int64_t cobra_metrics_to_redis_bot(const ix::CobraBotConfig& config,
RedisClient& redisClient,
bool verbose)
{
CobraBot bot;
bot.setOnBotMessageCallback(
[&redisClient, &verbose, &bot]
(const Json::Value& msg,
const std::string& /*position*/,
std::atomic<bool>& /*throttled*/,
std::atomic<bool>& /*fatalCobraError*/,
std::atomic<uint64_t>& sentCount) -> void {
if (msg["device"].isNull())
{
CoreLogger::info("no device entry, skipping event");
return;
}
if (msg["id"].isNull())
{
CoreLogger::info("no id entry, skipping event");
return;
}
//
// Display full message with
if (verbose)
{
CoreLogger::info(msg.toStyledString());
}
bool success = false;
if (msg["id"].asString() == "engine_performance_metrics_id")
{
auto deviceId = bot.getDeviceIdentifier(msg);
success = processPerfMetricsEventSlowFrames(msg, redisClient, deviceId);
}
if (success) sentCount++;
});
return bot.run(config);
}
} // namespace ix
|
#!/bin/bash
# Copyright 2018 Crunchy Data Solutions, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ls -l /
id
source /opt/cpm/bin/setenv.sh
initdb /pgdata
|
#!/usr/bin/env bash
#==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
# Run this script to set up the topic stage for pushing changes.
egrep-q() {
egrep "$@" >/dev/null 2>/dev/null
}
die() {
echo 'Failure during topic stage setup.' 1>&2
echo '---------------------------------' 1>&2
echo '' 1>&2
echo "$@" 1>&2
exit 1
}
# Make sure we are inside the repository.
cd "$(echo "$0"|sed 's/[^/]*$//')"
if git config remote.stage.url >/dev/null; then
echo "Topic stage remote was already configured."
else
echo "Configuring the topic stage remote..."
git remote add stage git://itk.org/stage/SimpleITK.git || \
die "Could not add the topic stage remote."
git config remote.stage.pushurl git@itk.org:stage/SimpleITK.git
fi
read -ep "Do you want to test push access itk.org? [y/N]: " access
if [ "$access" == "y" ] || [ "$access" == "Y" ]; then
echo "Configuring push urls..."
if [ "`git config remote.origin.url`" == "git://itk.org/SimpleITK.git" ]; then
git config remote.origin.pushurl git@itk.org:SimpleITK.git
fi
pushd ../../Testing/Data >/dev/null
# not sure how the SimpleITKData should be managed... so do as if the one at
# itk.org was the only where we want to push
git config remote.origin.pushurl git@itk.org:SimpleITKData.git
popd >/dev/null
echo -e "Done.\n"
# We will have the private key corresponding the public key at itk.org at
# ~/.ssh/id_git_itk. This allows the developer to keep a single public key
# on file with the server across multiple machines.
if ! egrep-q 'Host itk\.org' ~/.ssh/config; then
echo "Configuring the IdentityFile for itk.org to be ~/.ssh/id_git_itk..."
mkdir -p ~/.ssh
chmod og-rwx ~/.ssh
echo "Host itk.org" >> ~/.ssh/config
echo " IdentityFile=~/.ssh/id_git_itk" >> ~/.ssh/config
chmod 600 ~/.ssh/config
fi
if ! test -e ~/.ssh/id_git_itk; then
if test -f ~/.ssh/id_rsa; then
# Take care of the common case.
pushd ~/.ssh >/dev/null
ln -s id_rsa id_git_itk
popd >/dev/null
cat << EOF
Assuming ~/.ssh/id_rsa is the private key corresponding to the public key given
for the 'git' user at itk.org. If this is not the case, please place the
appropriate private key at ~/.ssh/id_git_itk.
EOF
read -e -n 1 -p "Press any key to continue..."
else
cat << EOF
Please place the private key corresponding to the public key registered at
itk.org in '~/.ssh/id_git_itk'.
EOF
read -e -n 1 -p "Press any key to continue..."
fi
fi
echo "Testing ssh capabilities..."
git ls-remote git@itk.org:stage/SimpleITK.git refs/heads/master || \
die "SSH test to git@itk.org failed. You may need to request access at:
https://www.kitware.com/Admin/SendPassword.cgi
Note that push access to the stage/SimpleITK is separate to Gerrit.
"
echo "Test successful! SimpleITK push access confirmed. Summary of project access:"
echo
# This command queries gitolite for your access privileges
ssh git@itk.org info
fi
echo "Done."
|
package com.designre.blog.controller.admin;
import com.designre.blog.model.dto.TagInfoDto;
import com.designre.blog.util.RestResponse;
import com.designre.blog.model.entity.Tag;
import com.designre.blog.model.param.SaveTagParam;
import com.designre.blog.service.TagService;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.util.List;
@RestController
@RequestMapping("/api/admin/tag")
@RequiredArgsConstructor(onConstructor_ = @Autowired)
public class TagController {
private final TagService tagService;
@GetMapping
public RestResponse<List<TagInfoDto>> getAll() {
List<TagInfoDto> tagInfos = tagService.listTagInfo(false);
return RestResponse.ok(tagInfos);
}
@DeleteMapping("{id}")
public RestResponse<RestResponse.Empty> delete(@PathVariable Integer id) {
tagService.delete(id);
return RestResponse.ok();
}
@PostMapping
public RestResponse<Tag> save(@RequestBody @Valid SaveTagParam param) {
Tag tag = tagService.createOrUpdate(param);
return RestResponse.ok(tag);
}
}
|
<filename>cw_duplicate_encoder.py
"""Codewars: Duplicate Encoder
6 kyu
URL: https://www.codewars.com/kata/54b42f9314d9229fd6000d9c/
The goal of this exercise is to convert a string to a new string where
each character in the new string is "(" if that character appears only once
in the original string, or ")" if that character appears more than once in
the original string. Ignore capitalization when determining if a character
is a duplicate.
Examples
"din" => "((("
"recede" => "()()()"
"Success" => ")())())"
"(( @" => "))(("
"""
def duplicate_encode(word):
"""
Apply dict.
Time complexity: O(n).
Space complexity: O(n).
"""
from collections import defaultdict
# Convert word to lower case.
word_lower = word.lower()
# Create a dict to accumulate char numbers.
char_nums = defaultdict(int)
for c in word_lower:
char_nums[c] += 1
# Create a duplicate encoder based on char number dict.
encoder = [''] * len(word)
for i, c in enumerate(word_lower):
# There are duplicates in char c.
if char_nums[c] > 1:
encoder[i] = ')'
else:
encoder[i] = '('
return ''.join(encoder)
def main():
# "din" => "((("
word = "din"
print duplicate_encode(word)
# "recede" => "()()()"
word = "recede"
print duplicate_encode(word)
# "Success" => ")())())"
word = "Success"
print duplicate_encode(word)
# "(( @" => "))(("
word = "(( @"
print duplicate_encode(word)
if __name__ == '__main__':
main()
|
<reponame>bingenperez/nba
/*
stats always end up with a thing like:
{
[someKeyName]: [ ... relevant results ... ]
}
*/
function unnest (obj) {
const keys = Object.keys(obj);
if (keys.length !== 1) {
console.error("unnest() only works on objects with a single key");
return obj;
}
const items = obj[keys[0]];
if (!Array.isArray(items)) {
console.error("unnest() expects the only key to reference an array");
return obj;
}
return items;
}
module.exports = unnest;
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-3136-1
#
# Security announcement date: 2016-11-23 00:00:00 UTC
# Script generation date: 2017-01-01 21:05:45 UTC
#
# Operating System: Ubuntu 16.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - lxc1:2.0.5-0ubuntu1~ubuntu16.04.3
# - liblxc1:2.0.5-0ubuntu1~ubuntu16.04.3
#
# Last versions recommanded by security team:
# - lxc1:2.0.5-0ubuntu1~ubuntu16.04.3
# - liblxc1:2.0.5-0ubuntu1~ubuntu16.04.3
#
# CVE List:
# - CVE-2016-8649
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade lxc1=2.0.5-0ubuntu1~ubuntu16.04.3 -y
sudo apt-get install --only-upgrade liblxc1=2.0.5-0ubuntu1~ubuntu16.04.3 -y
|
#!/bin/bash
# Script args
WORKSPACE=$1
cd $WORKSPACE && make clean
|
<reponame>raulrozza/Gametask_Web
import React, { useCallback } from 'react';
import IActivity from 'modules/dashboard/domain/entities/IActivity';
import { EditButton, RemoveButton } from 'modules/dashboard/view/components';
import { Container, Experience, Name, Rules } from './styles';
interface ActivityProps {
activity: IActivity;
openEditorWith(activity: IActivity): void;
removeActivity(id: string): void;
}
const ActivityCard: React.FC<ActivityProps> = ({
activity,
openEditorWith,
removeActivity,
}) => {
const handleDeleteActivity = useCallback(() => removeActivity(activity.id), [
activity.id,
removeActivity,
]);
const handleEditActivity = useCallback(() => openEditorWith(activity), [
activity,
openEditorWith,
]);
return (
<Container>
<Experience>{activity.experience} XP</Experience>
<Name>{activity.name}</Name>
{activity.description && <div>{activity.description}</div>}
{activity.dmRules && (
<Rules>
Regras: <cite>{activity.dmRules}</cite>
</Rules>
)}
<RemoveButton
horizontalPosition="right"
title="Excluir atividade"
onClick={handleDeleteActivity}
/>
<EditButton title="Editar atividades" onClick={handleEditActivity} />
</Container>
);
};
export default ActivityCard;
|
#!/usr/bin/env bash
# WARNING: NGBUILDS_IO_KEY should NOT be printed.
set +x -eu -o pipefail
exec 3>&1
readonly INPUT_DIR=dist/
readonly OUTPUT_FILE=/tmp/snapshot.tar.gz
readonly AIO_BUILDS_DOMAIN=ngbuilds.io
readonly UPLOAD_URL=https://$AIO_BUILDS_DOMAIN/create-build/$TRAVIS_PULL_REQUEST/$TRAVIS_PULL_REQUEST_SHA
readonly SHORT_SHA=$(echo $TRAVIS_PULL_REQUEST_SHA | cut -c1-7)
readonly DEPLOYED_URL=https://pr$TRAVIS_PULL_REQUEST-$SHORT_SHA.$AIO_BUILDS_DOMAIN
readonly skipBuild=$([[ "$1" == "--skip-build" ]] && echo "true" || echo "");
readonly relevantChangedFilesCount=$(git diff --name-only $TRAVIS_COMMIT_RANGE | grep -P "^(?:aio|packages)/(?!.*[._]spec\.[jt]s$)" | wc -l)
(
cd "`dirname $0`/.."
# Do not deploy unless this PR has touched relevant files: `aio/` or `packages/` (except for spec files)
if [[ $relevantChangedFilesCount -eq 0 ]]; then
echo "Skipping deploy because this PR did not touch any relevant files."
exit 0
fi
# Build the app
if [ "$skipBuild" != "true" ]; then
yarn build
fi
tar --create --gzip --directory "$INPUT_DIR" --file "$OUTPUT_FILE" .
yarn payload-size
# Deploy to staging
readonly httpCode=$(
curl --include --location --request POST --silent --write-out "\nHTTP_CODE: %{http_code}\n" \
--header "Authorization: Token $NGBUILDS_IO_KEY" --data-binary "@$OUTPUT_FILE" "$UPLOAD_URL" \
| sed 's/\r\n/\n/' \
| tee /dev/fd/3 \
| tail -1 \
| sed 's/HTTP_CODE: //'
)
# Exit with an error if the request failed.
# (Ignore 409 failures, which mean trying to re-deploy for the same PR/SHA.)
if [ $httpCode -lt 200 ] || ([ $httpCode -ge 400 ] && [ $httpCode -ne 409 ]); then
exit 1
fi
# Run PWA-score tests (unless the deployment is not public yet;
# i.e. it could not be automatically verified).
if [ $httpCode -ne 202 ]; then
yarn test-pwa-score -- "$DEPLOYED_URL" "$MIN_PWA_SCORE"
fi
)
|
docker run -it --rm -v /work/:/root centos /bin/bash
# to remove the duplicate line, uniq -c
mysql -uroot -e 'show processlist\G' | grep State: | uniq -c | sort -rn
|
<filename>qht-modules/qht-api/src/main/java/com/qht/model/MyIndexCourseDetailsModel.java
package com.qht.model;
import java.io.Serializable;
import java.util.Date;
/**
* @ClassName MyIndexCourseDetailsParameter
* @Author Zuoxh
* @Data 2018/11/18 0018 下午 10:38
*/
public class MyIndexCourseDetailsModel implements Serializable {
private String uid;
private String play_type_id;
private String name;
private String video_file;
private Date begin;
private String content;
public String getUid() {
return uid;
}
public void setUid(String uid) {
this.uid = uid;
}
public String getPlay_type_id() {
return play_type_id;
}
public void setPlay_type_id(String play_type_id) {
this.play_type_id = play_type_id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getVideo_file() {
return video_file;
}
public void setVideo_file(String video_file) {
this.video_file = video_file;
}
public Date getBegin() {
return begin;
}
public void setBegin(Date begin) {
this.begin = begin;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
@Override
public String toString() {
return "MyIndexCourseDetailsParameter{" +
"uid='" + uid + '\'' +
", play_type_id='" + play_type_id + '\'' +
", name='" + name + '\'' +
", video_file='" + video_file + '\'' +
", begin=" + begin +
", content='" + content + '\'' +
'}';
}
}
|
<filename>shiny-damage-calculator/src/component/IdolParameterForm.tsx
import * as React from 'react';
import { useEffect, useState } from 'react';
import { Button, Form } from 'react-bootstrap';
import { range } from 'utility';
import { AppContext } from 'component/App';
import { IdolParameter } from 'component/IdolParameter';
export const IdolParameterForm: React.FC = () => {
const context = React.useContext(AppContext);
const [presetName, setPresetName] = useState('');
useEffect(() => {
if (context.state.presetList !== undefined) {
if (context.state.presetList.length > 0 && !presetNameList().includes(presetName)) {
setPresetName(context.state.presetList[0].idolStatusName);
}
}
}, [context.state.presetList]);
const disableAddFlg = () => {
if (context.state.idolStatusName === '') {
return true;
}
if (context.state.presetList !== undefined) {
if (context.state.presetList.map(p => p.idolStatusName).includes(context.state.idolStatusName)) {
return true;
}
}
return false;
}
const presetNameList = () => {
if (context.state.presetList === undefined) {
return [];
}
return context.state.presetList.map(p => p.idolStatusName);
}
const loadPreset = () => {
context.dispatch({ type: 'LOAD_PRESET', value: presetName });
};
const updatePreset = () => {
context.dispatch({ type: 'UPDATE_PRESET', value: presetName });
};
const deletePreset = () => {
if (window.confirm(`「${presetName}」을 삭제하시겠습니까?`)) {
context.dispatch({ type: 'DELETE_PRESET', value: presetName });
}
};
return (
<details className='border p-1'>
<summary>아이돌의 스테이터스</summary>
<Form.Group className='m-3'>
<Form.Control placeholder='프리셋 이름' value={context.state.idolStatusName}
// tslint:disable-next-line: jsx-no-lambda
onChange={(e: any) => context.dispatch({ 'type': 'STATUS_NAME', 'value': e.currentTarget.value })} />
</Form.Group>
<IdolParameter />
{
range(4).map(i => (
<IdolParameter key={i} produce={false} index={i} />
))
}
<Form.Group className='m-3 d-flex'>
<Button className="d-block mr-3 text-nowrap"
// tslint:disable-next-line: jsx-no-lambda
onClick={() => context.dispatch({ 'type': 'ADD_PRESET', 'value': '' })}
disabled={disableAddFlg()}>추가</Button>
<Form.Control as="select" className="mr-3" value={presetName}
// tslint:disable-next-line: jsx-no-lambda
onChange={(e: any) => setPresetName(e.currentTarget.value)}>
{
presetNameList().map(name => <option key={name}>{name}</option>)
}
</Form.Control>
<Button className="d-block mr-3 text-nowrap"
disabled={presetNameList().length === 0}
onClick={loadPreset}>불러오기</Button>
<Button className="d-block mr-3 text-nowrap" variant="warning"
disabled={presetNameList().length === 0 || (presetNameList().includes(context.state.idolStatusName) && presetName !== context.state.idolStatusName)}
onClick={updatePreset}>덮어쓰기</Button>
<Button className="d-block mr-3 text-nowrap" variant="danger"
disabled={presetNameList().length === 0}
onClick={deletePreset}>삭제</Button>
</Form.Group>
</details>
);
}
|
<gh_stars>10-100
/*
Copyright (C) 2011 <NAME>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#include <sstream>
#include <stdexcept>
#include "JackCoreMidiUtil.h"
#include "JackCoreMidiVirtualInputPort.h"
using Jack::JackCoreMidiVirtualInputPort;
///////////////////////////////////////////////////////////////////////////////
// Static callbacks
///////////////////////////////////////////////////////////////////////////////
void
JackCoreMidiVirtualInputPort::
HandleInputEvent(const MIDIPacketList *packet_list, void *port,
void */*src_ref*/)
{
((JackCoreMidiVirtualInputPort *) port)->ProcessCoreMidi(packet_list);
}
///////////////////////////////////////////////////////////////////////////////
// Class
///////////////////////////////////////////////////////////////////////////////
JackCoreMidiVirtualInputPort::
JackCoreMidiVirtualInputPort(const char *alias_name, const char *client_name,
const char *driver_name, int index,
MIDIClientRef client, double time_ratio,
size_t max_bytes, size_t max_messages):
JackCoreMidiInputPort(time_ratio, max_bytes, max_messages)
{
std::stringstream stream;
stream << "virtual" << (index + 1);
CFStringRef name = CFStringCreateWithCString(0, stream.str().c_str(),
CFStringGetSystemEncoding());
if (! name) {
throw std::bad_alloc();
}
MIDIEndpointRef destination;
OSStatus status = MIDIDestinationCreate(client, name, HandleInputEvent,
this, &destination);
CFRelease(name);
if (status != noErr) {
throw std::runtime_error(GetMacOSErrorString(status));
}
Initialize(alias_name, client_name, driver_name, index, destination);
}
JackCoreMidiVirtualInputPort::~JackCoreMidiVirtualInputPort()
{
OSStatus status = MIDIEndpointDispose(GetEndpoint());
if (status != noErr) {
WriteMacOSError("JackCoreMidiVirtualInputPort [destructor]",
"MIDIEndpointDispose", status);
}
}
|
// Command wikifeedia does something...
package main
import (
"context"
"crypto"
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/tls"
"crypto/x509"
"crypto/x509/pkix"
"fmt"
"io/fs"
"math/big"
"net/http"
"os"
"time"
"github.com/pkg/errors"
"github.com/tullo/wikifeedia/crawler"
"github.com/tullo/wikifeedia/db"
"github.com/tullo/wikifeedia/server"
"github.com/tullo/wikifeedia/wikipedia"
"github.com/urfave/cli"
)
func main() {
var pgURL string
var expandedPgURL string
app := cli.NewApp()
app.Name = "wikifeedia"
app.Usage = "runs one of the main actions"
app.Action = func(c *cli.Context) error {
println("run a subcommand")
return nil
}
app.Flags = []cli.Flag{
cli.StringFlag{
Name: "pgurl",
Value: "pgurl://root@localhost:26257?sslmode=disable",
Destination: &pgURL,
},
}
app.Before = cli.BeforeFunc(func(ctx *cli.Context) error {
expandedPgURL = os.ExpandEnv(pgURL)
return nil
})
app.Commands = []cli.Command{
{
Name: "setup",
Action: func(c *cli.Context) error {
fmt.Println("Setting up database at", pgURL)
_, err := db.New(expandedPgURL)
return err
},
},
{
Name: "crawl",
Description: "Update the set of articles one time",
Action: func(c *cli.Context) error {
fmt.Println("Setting up database at", pgURL)
conn, err := db.New(expandedPgURL)
if err != nil {
return err
}
wiki := wikipedia.New()
crawl := crawler.New(conn, wiki)
return crawl.CrawlOnce(context.Background())
},
},
{
Name: "server",
Description: "Run the server",
Action: func(c *cli.Context) error {
fmt.Println("Setting up database at", pgURL)
conn, err := db.New(expandedPgURL)
if err != nil {
return err
}
buildFS, err := fs.Sub(Assets, "app/build")
if err != nil {
return err
}
h := server.New(conn, http.FS(buildFS))
server := http.Server{
Addr: fmt.Sprintf(":%d", c.Int("port")),
Handler: h,
}
if !c.Bool("insecure") {
priv, certBytes, err := generateCertificate()
if err != nil {
return errors.Wrapf(err, "failed to generate certificate")
}
server.TLSConfig = &tls.Config{
Certificates: []tls.Certificate{{
Certificate: [][]byte{certBytes},
PrivateKey: priv,
}},
}
if err := server.ListenAndServeTLS("" /* certfile */, "" /* keyfile */); err != nil && err != http.ErrServerClosed {
return errors.Wrap(err, "failed to start server")
}
} else {
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
return errors.Wrap(err, "failed to start server")
}
}
return nil
},
Flags: []cli.Flag{
cli.IntFlag{
Name: "port",
Value: 8080,
Usage: "port on which to serve",
},
cli.BoolFlag{
Name: "insecure",
Usage: "disables TLS",
},
},
},
{
Name: "fetch-top-articles",
Description: "debug command to exercise the wikipedia client functionality.",
Action: func(c *cli.Context) error {
ctx := context.Background()
wiki := wikipedia.New()
project := c.String("project")
top, err := wiki.FetchTopArticles(ctx, project)
if err != nil {
return err
}
n := c.Int("num-articles")
for i := 0; i < len(top.Articles) && i < n; i++ {
article, err := wiki.GetArticle(ctx, project, top.Articles[i].Article)
if err != nil {
return err
}
if i > 0 {
fmt.Println()
}
fmt.Printf("%d. %s (%d)\n\n%s\n", i+1, article.Summary.Titles.Normalized, top.Articles[i].Views, article.Summary.Extract)
}
return nil
},
Flags: []cli.Flag{
cli.IntFlag{
Name: "num-articles,n",
Value: 10,
Usage: "number of articles to fetch",
},
cli.StringFlag{
Name: "project",
Value: "en",
Usage: "project to scan",
},
},
},
}
if err := app.Run(os.Args); err != nil {
fmt.Fprintf(os.Stderr, "failed to run command: %v\n", err)
os.Exit(1)
}
}
func generateCertificate() (crypto.PrivateKey, []byte, error) {
// Loosely based on https://golang.org/src/crypto/tls/generate_cert.go
priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
if err != nil {
return nil, nil, errors.Wrap(err, "failed to generate private key")
}
now := time.Now().UTC()
serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)
serialNumber, err := rand.Int(rand.Reader, serialNumberLimit)
if err != nil {
return nil, nil, errors.Wrap(err, "failed to generate serial number")
}
cert := x509.Certificate{
BasicConstraintsValid: true,
DNSNames: []string{"localhost"},
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
NotBefore: now,
NotAfter: now.AddDate(1, 0, 0),
SerialNumber: serialNumber,
Subject: pkix.Name{
Organization: []string{"Cockroach Labs"},
},
}
bytes, err := x509.CreateCertificate(rand.Reader, &cert, &cert, &priv.PublicKey, priv)
if err != nil {
return nil, nil, errors.Wrap(err, "failed to generate certificate")
}
return priv, bytes, err
}
|
using UnityEngine;
using System.Collections;
public class URLProcessor : MonoBehaviour
{
public IEnumerator ProcessURLWithProxy(string url)
{
string processedURL = PHPProxy.Escape(url); // Process the input URL using PHPProxy.Escape method
WWW www = new WWW(processedURL); // Make a web request using the processed URL
yield return www; // Wait for the web request to complete
if (www.error != null)
{
// If an error occurs during the request, return the error message
Debug.LogError("Error occurred during web request: " + www.error);
yield break;
}
else
{
// If the request is successful, return the response data
Debug.Log("Web request successful. Response data: " + www.text);
yield return www.text;
}
}
}
|
// Package validation contains some message validation helpers.
package validation
import "time"
// IsValidDate returns true if the given date is a valid format.
func IsValidDate(date string) bool {
if date == "" {
return false
}
_, err := time.Parse("2006-01-02", date)
return err == nil
}
|
package shadows.apotheosis.deadly.loot.affix.impl.melee;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.ai.attributes.AttributeModifier.Operation;
import shadows.apotheosis.deadly.loot.EquipmentType;
import shadows.apotheosis.deadly.loot.affix.impl.AttributeAffix;
public class AttackSpeedAffix extends AttributeAffix {
public AttackSpeedAffix(int weight) {
super(SharedMonsterAttributes.ATTACK_SPEED, 0.05F, 1F, Operation.MULTIPLY_TOTAL, weight);
}
@Override
public boolean canApply(EquipmentType type) {
return type == EquipmentType.SWORD;
}
@Override
public float getMax() {
return 1.5F;
}
}
|
install_python_packages() {
info "Installing pip"
curl https://bootstrap.pypa.io/get-pip.py | python
info "Installing pip xkcdpass"
pip install xkcdpass
}
install_neovim() {
info "Installing NeoVim"
install_brew_formulas neovim
info "Installing Vim Plugged"
sh -c 'curl -fLo $HOME/.local/share/nvim/site/autoload/plug.vim --create-dirs https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim'
}
|
/* eslint-env es6 */
/* global Vue, rangeUtils, dateMath */
(function() {
var html = `
<div v-cloak>
<button class="btn btn-secondary" v-on:click="showHidePicker()">
<i class="fa fa-clock-o"></i>
<span v-cloak>{{ rangeString() }}</span>
</button>
<div class="row position-absolute bg-light border rounded card-body picker-dropdown-panel m-1 w-100 shadow" v-show="isPickerOpen" v-cloak>
<div class="col-4">
<h3>
Custom range
</h3>
<form>
<div class="form-group">
<label for="inputFrom">From:</label>
<div class="input-group">
<input type="text" id="inputFrom" v-model="editRawFrom" class="form-control">
<div class="input-group-append" data-role="from-picker">
<div class="input-group-text">
<i class="fa fa-calendar"></i>
</div>
</div>
</div>
</div>
<div class="form-group">
<label for="inputTo">To:</label>
<div class="input-group">
<input type="text" id="inputTo" v-model="editRawTo" class="form-control">
<div class="input-group-append" data-role="to-picker">
<div class="input-group-text">
<i class="fa fa-calendar"></i>
</div>
</div>
</div>
</div>
<div>
<button class="btn btn-primary" v-on:click.prevent="pickerApply">Apply</button>
</div>
</form>
</div>
<div class="col-8 pl-4">
<h3>
Quick ranges
</h3>
<div class="row">
<ul class="list-unstyled col" v-for="section in ranges">
<li class="shortcut" v-for="range in section">
<a href :data-from=range.from :data-to="range.to" v-on:click.prevent="loadRangeShortcut(range)">
{{ describeTimeRange(range) }}
</a>
</li>
</ul>
</div>
</div>
</div>
<div class="btn-group">
<button class="btn btn-secondary" v-on:click="refresh()" :disabled="!isRefreshable">
<i class="fa fa-refresh"></i>
</button>
<button type="button" class="btn btn-secondary dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false" :disabled="!isRefreshable">
<span class="text-warning" v-if="isRefreshable">
{{ intervals[refreshInterval] }}
</span>
</button>
<div class="dropdown-menu dropdown-menu-right" style="min-width: 50px;">
<a class="dropdown-item" href v-on:click.prevent="refreshInterval = null">Off</a>
<div class="dropdown-divider"></div>
<a class="dropdown-item" href v-on:click.prevent="refreshInterval = key" v-for="interval, key in intervals">{{ interval }}</a>
</div>
</div>
</div>`;
var fromPicker;
var toPicker;
var refreshTimeoutId;
Vue.component('daterangepicker', {
props: ['from', 'to'],
data: function() {
return {
ranges: rangeUtils.getRelativeTimesList(),
rangeString: function() {
if (!this.editRawFrom || !this.editRawTo) {
return;
}
return rangeUtils.describeTimeRange({from: this.editRawFrom, to: this.editRawTo});
},
editRawFrom: null,
editRawTo: null,
refreshInterval: null,
intervals: {
'60': '1m',
'300': '5m',
'900': '15m'
},
isPickerOpen: false
};
},
methods: {
loadRangeShortcut: loadRangeShortcut,
describeTimeRange: rangeUtils.describeTimeRange,
showHidePicker: showHidePicker,
pickerApply: pickerApply,
setFromTo: setFromTo,
notify: notify,
refresh: refresh
},
computed: {
rawFromTo: function() {
return this.editRawFrom, this.editRawTo, new Date();
},
isRefreshable: function() {
return this.editRawFrom && this.editRawFrom.toString().indexOf('now') != -1 ||
this.editRawTo && this.editRawTo.toString().indexOf('now') != -1;
}
},
mounted: onMounted,
template: html,
watch: {
rawFromTo: function() {
if (this.$route.query.start !== this.editRawFrom || this.$route.query.end !== this.editRawTo) {
this.$router.push({ query: _.assign({}, this.$route.query, {
start: '' + this.editRawFrom,
end: '' + this.editRawTo
})});
}
},
refreshInterval: refresh,
$route: function(to, from) {
if (to.query.start) {
this.editRawFrom = convertDate(to.query.start);
}
if (to.query.end) {
this.editRawTo = convertDate(to.query.end);
}
this.refresh();
}
}
});
function onMounted() {
/**
* Parse location to get start and end date
* If dates are not provided, falls back to the date range corresponding to
* the last 24 hours.
*/
var start = this.$route.query.start || this.from || 'now-24h';
var end = this.$route.query.end || this.to || 'now';
this.editRawFrom = convertDate(start);
this.editRawTo = convertDate(end);
this.notify();
synchronizePickers.call(this);
}
function convertDate(date) {
return dateMath.parse(date).isValid() ? date : moment(parseInt(date, 10))
}
function loadRangeShortcut(shortcut) {
this.editRawFrom = shortcut.from;
this.editRawTo = shortcut.to;
this.isPickerOpen = false;
this.refresh();
}
function showHidePicker() {
this.isPickerOpen = !this.isPickerOpen;
}
function pickerApply() {
this.isPickerOpen = false;
this.refresh();
}
var pickerOptions = {
singleDatePicker: true,
timePicker: true,
timePicker24Hour: true,
timePickerSeconds: false
};
/*
* Make sure that date pickers are up-to-date
* especially with any 'now-like' dates
*/
function synchronizePickers() {
// update 'from' date picker only if not currently open
// and 'from' is updating (ie. contains 'now')
if (!fromPicker || !fromPicker.data('daterangepicker').isShowing) {
fromPicker = $(this.$el).find('[data-role=from-picker]').daterangepicker(
$.extend({
startDate: dateMath.parse(this.editRawFrom)
}, pickerOptions),
onPickerApply.bind(this, 'editRawFrom')
);
}
// update 'to' date picker only if not currently open
// and 'to' is updating (ie. contains 'now')
if (!toPicker || !toPicker.data('daterangepicker').isShowing) {
toPicker = $(this.$el).find('[data-role=to-picker]').daterangepicker(
$.extend({
startDate: dateMath.parse(this.editRawTo),
minDate: dateMath.parse(this.editRawFrom)
}, pickerOptions),
onPickerApply.bind(this, 'editRawTo')
);
}
}
function onPickerApply(targetProperty, date) {
this[targetProperty] = date;
}
function refresh() {
this.notify();
clearTimeout(refreshTimeoutId);
if (this.refreshInterval) {
refreshTimeoutId = window.setTimeout(this.refresh, this.refreshInterval * 1000);
}
}
function setFromTo(from, to) {
this.editRawFrom = from;
this.editRawTo = to;
this.refresh();
}
function notify() {
this.$emit('update:from', dateMath.parse(this.editRawFrom));
this.$emit('update:to', dateMath.parse(this.editRawTo, true));
}
})();
|
#!/bin/bash
# run munkiwebadmin dev server
# change dir
cd "$(dirname "$0")"
# default style
curl -Lk -o /tmp/mwa2-style.zip https://github.com/SteveKueng/mwa2-style/archive/master.zip && unzip /tmp/mwa2-style.zip -d /tmp && rm -rf /tmp/mwa2-style.zip
mkdir -p ./munkiwebadmin/static/styles/default
cp -r /tmp/mwa2-style-master/* ./munkiwebadmin/static/styles/default && rm -rf /tmp/mwa2-style-master
docker-compose up
|
<reponame>PinoEire/archi<filename>com.archimatetool.editor/src/com/archimatetool/editor/diagram/sketch/Messages.java
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.diagram.sketch;
import org.eclipse.osgi.util.NLS;
public class Messages extends NLS {
private static final String BUNDLE_NAME = "com.archimatetool.editor.diagram.sketch.messages"; //$NON-NLS-1$
public static String ISketchEditor_0;
public static String ISketchEditor_1;
public static String ISketchEditor_2;
public static String SketchEditor_0;
public static String SketchEditorActionBarContributor_0;
public static String SketchEditorPalette_0;
public static String SketchEditorPalette_1;
public static String SketchEditorPalette_10;
public static String SketchEditorPalette_11;
public static String SketchEditorPalette_12;
public static String SketchEditorPalette_2;
public static String SketchEditorPalette_3;
public static String SketchEditorPalette_4;
public static String SketchEditorPalette_5;
public static String SketchEditorPalette_6;
public static String SketchEditorPalette_7;
public static String SketchEditorPalette_8;
public static String SketchEditorPalette_9;
static {
// initialize resource bundle
NLS.initializeMessages(BUNDLE_NAME, Messages.class);
}
private Messages() {
}
}
|
def longestCommonPrefix(strings):
prefix = ""
min_length = min([len(s) for s in strings])
for i in range(min_length):
current_char = strings[0][i]
for s in strings:
if s[i] != current_char:
return prefix
prefix += current_char
return prefix
result = longestCommonPrefix(['car', 'cat', 'cow'])
print(result) # Output: 'c'
|
<filename>bitcoin_shop_latlong.py
"""bitcoin_shop_counter.py
Author: <NAME>
Bestand leest data uit van coinmap api: https://coinmap.org/api/v1/venues/
"""
import urllib.request
import json
from pprint import pprint
# Haal alle data op vanuit de coinmap api.
with urllib.request.urlopen('https://coinmap.org/api/v1/venues/') as response:
data = json.load(response)
# Maak de dict aan waarin de landen worden geteld.
shop_dict_latlng = dict()
# Zet de data om naar lat long values die de google api in een land om zet
for i in range(0, len(data['venues'])):
latlng = str(data['venues'][i]['lat']) + "," + str(data['venues'][i]['lon'])
shop_dict_latlng[data['venues'][i]['name']] = latlng
# Laat data in terminal zien en schrijf naar bestand
print(shop_dict_latlng)
with open('shopname_latlng.json', 'w') as file:
file.write(json.dumps(shop_dict_latlng))
|
#!/bin/bash
#=============================================================
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
# Lisence: MIT
# Author: P3TERX
# Blog: https://p3terx.com
#=============================================================
# Uncomment a feed source
#sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
#sed -i '$a src-git lienol https://github.com/Lienol/openwrt-package' feeds.conf.default
cd package
mkdir openwrt-packages
cd openwrt-packages
git clone https://github.com/rufengsuixing/luci-app-adguardhome.git
git clone https://github.com/Lienol/openwrt-package.git
git clone -b master https://github.com/vernesong/OpenClash.git
cd ..
cd lean
rm -rf luci-theme-argon
git clone -b 18.06 https://github.com/jerrykuku/luci-theme-argon.git
|
#!/bin/bash
docker push magland/sf-spykingcircus:0.9.7
|
<reponame>smagill/opensphere-desktop<filename>open-sphere-base/core/src/main/java/net/opengis/ows/_110/DomainMetadataType.java
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.01.26 at 12:40:25 PM MST
//
package net.opengis.ows._110;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlValue;
/**
* References metadata about a quantity, and provides a name for this metadata. (Informative: This element was simplified from the metaDataProperty element in GML 3.0.)
*
* Human-readable name of the metadata described by associated referenced document.
*
* <p>Java class for DomainMetadataType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="DomainMetadataType">
* <simpleContent>
* <extension base="<http://www.w3.org/2001/XMLSchema>string">
* <attribute ref="{http://www.opengis.net/ows/1.1}reference"/>
* </extension>
* </simpleContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "DomainMetadataType", propOrder = {
"value"
})
public class DomainMetadataType {
@XmlValue
protected String value;
@XmlAttribute(namespace = "http://www.opengis.net/ows/1.1")
@XmlSchemaType(name = "anyURI")
protected String reference;
/**
* Gets the value of the value property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValue() {
return value;
}
/**
* Sets the value of the value property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValue(String value) {
this.value = value;
}
/**
* Gets the value of the reference property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getReference() {
return reference;
}
/**
* Sets the value of the reference property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setReference(String value) {
this.reference = value;
}
}
|
#!/bin/bash
# custom <additional exports>
# end <additional exports>
cmake -DCMAKE_BUILD_TYPE=Release -B../cmake_build/release -H.
cmake -DCMAKE_BUILD_TYPE=Debug -B../cmake_build/debug -H.
|
#!/bin/bash
set -e
prod=$1
info() {
printf "\e[34m[➧]\e[0m ${1}\n"
}
error() {
printf "\e[31m[✘]\e[0m ${1}\n"
}
success() {
printf "\e[32m[✔]\e[0m ${1}\n"
}
function toWinPath() {
echo "$1" | sed -e 's/^\///' -e 's/\//\\/g' -e 's/^./\0:/'
}
function toPosixPath() {
echo "/$1" | sed -e 's/\\/\//g' -e 's/://' -e 's/\/\//\//g'
}
globalDirForWin=$(npm config get prefix)
currentDirForPosix=$(pwd)
currentDirForWin=$(toWinPath $currentDirForPosix)
globalDirForPosix=$(toPosixPath $globalDirForWin)
os="win"
uname=$(uname)
if [ "$uname"x = "Darwin"x ]; then
os="mac"
globalDirForPosix="$globalDirForPosix/bin"
fi
# Generate dev and debug bin file
array=( dev debug )
for mod in "${array[@]}"
do
params=""
if [ "$mod"x = "debug"x ]; then
params=" --inspect-brk"
fi
cat > "$globalDirForPosix/wepy-$mod" <<- EOF
#!/bin/sh
basedir=\$(dirname "\$(echo "\$0" | sed -e 's,\\\\,/,g')")
case \`uname\` in
*CYGWIN*) basedir=\`cygpath -w "\$basedir"\`;;
esac
if [ -x "\$basedir/node" ]; then
"\$basedir/node"$params "$currentDirForPosix/packages/cli/bin/wepy.js" "\$@"
ret=\$?
else
node$params "$currentDirForPosix/packages/cli/bin/wepy.js" "\$@"
ret=\$?
fi
exit \$ret
EOF
chmod +x "$globalDirForPosix/wepy-$mod"
success "generated: $globalDirForPosix/wepy-$mod"
# If it's win then generate cmd file
if [ "$os"x = "win"x ]; then
cat > "$globalDirForPosix/wepy-$mod.cmd" <<- EOF
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe"$params "$currentDirForWin\packages\cli\bin\wepy.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node$params "$currentDirForWin\packages\cli\bin\wepy.js" %*
)
EOF
success "generated: $globalDirForPosix/wepy-$mod.cmd"
fi
done
|
<gh_stars>1-10
/*!
* Copyright (c) 2021-Present, Okta, Inc. and/or its affiliates. All rights reserved.
* The Okta software accompanied by this notice is provided pursuant to the Apache License, Version 2.0 (the "License.")
*
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and limitations under the License.
*/
// We load all the current parsers, because we won't know in advance which version(s) we need to parse
// Expect to only support current major - 1 (also suspect that this limit may never be hit)
import v1 from './v1/parsers'; // More granularity to be defined as needed
const parsersForVersion = function parsersForVersion( version ) {
switch (version) {
case '1.0.0':
return v1;
case undefined:
case null:
throw new Error('Api version is required');
default:
throw new Error(`Unknown api version: ${version}. Use an exact semver version.`);
}
};
export default parsersForVersion;
|
package io.opensphere.core.util.collections.observable;
import javafx.beans.InvalidationListener;
import javafx.collections.SetChangeListener;
/**
* A helper class used in creation of observable sets.
*
* @param <E> the elements contained within the set.
*/
public abstract class SetListenerHelper<E> extends ExpressionHelperBase
{
/**
* Registers the supplied listener for notification with the supplied
* helper. If the supplied helper is <code>null</code>, a new instance is
* created.
*
* @param helper the optional helper in which the listener will be
* registered (if null, a new instance is created).
* @param listener the required invalidation listener to register for
* notification.
* @return a reference to the helper created during this operation.
*/
public static <E> SetListenerHelper<E> addListener(SetListenerHelper<E> helper, InvalidationListener listener)
{
if (listener == null)
{
throw new NullPointerException();
}
return (helper == null) ? new SingleInvalidation<>(listener) : helper.addListener(listener);
}
/**
* Unregisters the supplied listener for notification with the supplied
* helper. If the supplied helper is <code>null</code>, no action is taken.
*
* @param helper the optional helper in which the listener will be
* unregistered (if null, no action is taken).
* @param listener the required invalidation listener to unregister for
* notification.
* @return a reference to the helper used during this operation (null if the
* supplied helper is null).
*/
public static <E> SetListenerHelper<E> removeListener(SetListenerHelper<E> helper, InvalidationListener listener)
{
if (listener == null)
{
throw new NullPointerException();
}
return (helper == null) ? null : helper.removeListener(listener);
}
/**
* Registers the supplied listener for notification with the supplied
* helper. If the supplied helper is <code>null</code>, a new instance is
* created.
*
* @param helper the optional helper in which the listener will be
* registered (if null, a new instance is created).
* @param listener the required change listener to register for
* notification.
* @return a reference to the helper created during this operation.
*/
public static <E> SetListenerHelper<E> addListener(SetListenerHelper<E> helper, SetChangeListener<? super E> listener)
{
if (listener == null)
{
throw new NullPointerException();
}
return (helper == null) ? new SingleChange<>(listener) : helper.addListener(listener);
}
/**
* Unregisters the supplied listener for notification with the supplied
* helper. If the supplied helper is <code>null</code>, no action is taken.
*
* @param helper the optional helper in which the listener will be
* unregistered (if null, no action is taken).
* @param listener the required change listener to unregister for
* notification.
* @return a reference to the helper used during this operation (null if the
* supplied helper is null).
*/
public static <E> SetListenerHelper<E> removeListener(SetListenerHelper<E> helper, SetChangeListener<? super E> listener)
{
if (listener == null)
{
throw new NullPointerException();
}
return (helper == null) ? null : helper.removeListener(listener);
}
/**
* Through the supplied helper, propagates the supplied change event to the
* helper's registered listeners.
*
* @param helper the helper through which the event will be sent.
* @param change the object describing the changes made to the set.
*/
public static <E> void fireValueChangedEvent(SetListenerHelper<E> helper, SetChangeListener.Change<? extends E> change)
{
if (helper != null)
{
helper.fireValueChangedEvent(change);
}
}
/**
* Tests to determine if the supplied helper has listeners.
*
* @param helper the helper to test.
* @return <code>true</code> if the helper has listeners, <code>false</code>
* otherwise.
*/
public static <E> boolean hasListeners(SetListenerHelper<E> helper)
{
return helper != null;
}
/**
* Registers the supplied listener for invalidation event notifications.
*
* @param listener the listener to register.
* @return a reference to this instance for call chaining.
*/
protected abstract SetListenerHelper<E> addListener(InvalidationListener listener);
/**
* Unregisters the supplied listener from invalidation event notifications.
*
* @param listener the listener to unregister.
* @return a reference to this instance for call chaining.
*/
protected abstract SetListenerHelper<E> removeListener(InvalidationListener listener);
/**
* Registers the supplied listener for change event notifications.
*
* @param listener the listener to register.
* @return a reference to this instance for call chaining.
*/
protected abstract SetListenerHelper<E> addListener(SetChangeListener<? super E> listener);
/**
* Unregisters the supplied listener from change event notifications.
*
* @param listener the listener to unregister.
* @return a reference to this instance for call chaining.
*/
protected abstract SetListenerHelper<E> removeListener(SetChangeListener<? super E> listener);
/**
* Fires the supplied value change event to all registered change listeners.
*
* @param change the event object describing the change event on the
* underlying set.
*/
protected abstract void fireValueChangedEvent(SetChangeListener.Change<? extends E> change);
}
|
package main
import (
"fmt"
fiql "go-fiql/gofiql"
)
func main() {
queries := []string{
"(((((product==\"Apple\",product==\"Google\");(name==\"Joe\",name==\"Alan\")));label=!~=\"text\";(qty=gte=1,qty=lte=10)))",
"(product==\"Apple\",product==\"Google\");(name==\"Joe\",name==\"Alan\");(qty=gte=1,qty=lte=10)",
"(qty=gt=1;(qty=gte=1,qty=lte=10));(product==\"Apple\",product==\"HP\")",
"(product==\"Apple\",qty=lt=1);name==\"Joe\"",
"name==bar,dob=gt=1990-01-01",
"title==foo*;(updated=lt=-P1D,title==*bar*)",
}
for _, query := range queries {
fmt.Println(query)
root, err := fiql.Parse(query)
if err != nil {
fmt.Println(err)
return
}
visitor := fiql.NewSQLVisitor()
i, err := fiql.Traverse(root, visitor)
if err != nil {
fmt.Println(err)
return
}
fmt.Println(i)
}
}
|
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR="$2"
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
shift
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/anytime_models/examples/resnet-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--load=${MODEL_DIR}/checkpoint \
-n=17 -c=32 -s=1 --opt_at=25 --ds_name=cifar100 --batch_size=64 --nr_gpu=1 -f=2 --samloss=0
|
#!/usr/bin/env -S bash -e
# To be sourced from other scripts. Tip: use the following lines to source it independently from the PWD,
# provided your script is in the same directory as this one:
# SCRIPTS_DIR="$(readlink -f ${BASH_SOURCE[0]} | xargs dirname)"
# source "$SCRIPTS_DIR/utils.sh"
# Temporary hack, because sourceforge currently rejects connections randomly
function try_multiple_times() {
local ERR
local MAX_TRIES=5
local COUNT=0
while [ $COUNT -lt $MAX_TRIES ]
do
if [ $COUNT -ne 0 ]
then
echo "Retrying \"${@}\" (attempt #$(( COUNT + 1 )))..."
fi
# Execute the command in an IF, just in case "errexit" is enabled
if "${@}"
then
return 0
else
ERR=$?
fi
COUNT=$(( COUNT + 1 ))
done
echo "Failed to execute \"${@}\" after $COUNT attempts. Aborting."
# Simulate a failing command, just in case "errexit" is enabled, to trigger an exit as appropriate
false
# Otherwise return the failing command's status code
return $ERR
}
|
<reponame>VOLTTRON/volttron-AIRCx-visualization
// Copyright (c) 2020, Battelle Memorial Institute
// All rights reserved.
// 1. Battelle Memorial Institute (hereinafter Battelle) hereby grants
// permission to any person or entity lawfully obtaining a copy of this
// software and associated documentation files (hereinafter "the Software")
// to redistribute and use the Software in source and binary forms, with or
// without modification. Such person or entity may use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software, and
// may permit others to do so, subject to the following conditions:
// - Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimers.
// - Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// - Other than as used herein, neither the name Battelle Memorial Institute
// or Battelle may be used in any form whatsoever without the express
// written consent of Battelle.
// 2. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL BATTELLE OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
// The views and conclusions contained in the software and documentation are those
// of the authors and should not be interpreted as representing official policies,
// either expressed or implied, of the FreeBSD Project.
// This material was prepared as an account of work sponsored by an agency of the
// United States Government. Neither the United States Government nor the United
// States Department of Energy, nor Battelle, nor any of their employees, nor any
// jurisdiction or organization that has cooperated in the development of these
// materials, makes any warranty, express or implied, or assumes any legal
// liability or responsibility for the accuracy, completeness, or usefulness or
// any information, apparatus, product, software, or process disclosed, or
// represents that its use would not infringe privately owned rights.
// Reference herein to any specific commercial product, process, or service by
// trade name, trademark, manufacturer, or otherwise does not necessarily
// constitute or imply its endorsement, recommendation, or favoring by the
// United States Government or any agency thereof, or Battelle Memorial Institute.
// The views and opinions of authors expressed herein do not necessarily state or
// reflect those of the United States Government or any agency thereof.
// PACIFIC NORTHWEST NATIONAL LABORATORY
// operated by
// BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
// under Contract DE-AC05-76RL01830
import {
Button,
MenuItem,
Step,
StepButton,
StepLabel,
Stepper,
Toolbar,
withWidth,
} from "@material-ui/core";
import { withStyles } from "@material-ui/core/styles";
import {
NavigateBefore as NavigateBeforeIcon,
NavigateNext as NavigateNextIcon,
} from "@material-ui/icons";
import clsx from "clsx";
import { MuiSelect } from "components";
import _ from "lodash";
import PropTypes from "prop-types";
import React from "react";
import styles from "./styles";
class MuiStepper extends React.Component {
static defaultProps = {
disabled: [],
};
notifyStepChange = (step) => {
if (this.props.onStepChange) {
this.props.onStepChange(step);
}
};
handleNext = () => {
const { steps, step, isStepAbove } = this.props;
if (step < steps.length - 1) {
this.notifyStepChange(step + 1);
} else if (isStepAbove) {
this.props.onStepAbove();
}
};
handleBack = () => {
const { step, isStepBelow } = this.props;
if (step > 0) {
this.notifyStepChange(step - 1);
} else if (isStepBelow) {
this.props.onStepBelow();
}
};
handleReset = () => {
this.notifyStepChange(0);
};
handleStepChange = (step) => (event, value) => {
this.notifyStepChange(step);
};
useIcon = () => {
const { useIcon, width } = this.props;
if (useIcon) {
return true;
}
switch (width) {
case "xs":
case "sm":
return true;
default:
return false;
}
};
renderSelect() {
const { steps, step, header, classes } = this.props;
const { handleStepChange } = this;
return (
<div className={classes.select}>
<MuiSelect
id="select"
header={header}
value={step}
onChange={(e, v) => handleStepChange(_.get(e, "target.value", v))()}
renderValue={(v) => {
return _.get(steps, [v, "label"], v);
}}
>
{steps.map(function(item, index) {
return (
<MenuItem key={`item-${item.label}`} value={index}>
{item.label}
</MenuItem>
);
})}
</MuiSelect>
</div>
);
}
renderStepper() {
const {
steps,
step,
alternativeLabel,
nonLinear,
disableGutters,
classes,
} = this.props;
const { handleStepChange } = this;
return (
<Stepper
className={clsx(
classes.stepper,
disableGutters && classes.disableGutters
)}
alternativeLabel={alternativeLabel}
nonLinear={nonLinear}
activeStep={step}
>
{steps.map(function(item, index) {
return (
<Step key={`step-${item.name}`}>
<StepButton
key={`step-button-${item.name}`}
completed={false}
onClick={nonLinear ? handleStepChange(index) : undefined}
optional={item.optional}
icon={item.icon}
disableRipple
>
<StepLabel
className={clsx(_.isEmpty(item.name) && classes.stepLabel)}
key={`step-label-${item.name}`}
optional={item.optional}
icon={item.icon}
>
{item.name}
</StepLabel>
</StepButton>
</Step>
);
})}
</Stepper>
);
}
render() {
const {
steps,
step,
disabled,
disableGutters,
classes,
className,
isStepBelow,
isStepAbove,
variant,
} = this.props;
const icon = this.useIcon();
const stepBelow = step === 0 || disabled.indexOf(step - 1) !== -1;
const stepAbove =
step === steps.length - 1 || disabled.indexOf(step + 1) !== -1;
return (
<Toolbar
{..._.pick(this.props, ["style"])}
className={clsx(
className,
classes.toolbar,
disableGutters && classes.disableGutters,
variant === "compact" && classes.compactToolbar
)}
>
<Button
style={{ minWidth: icon ? 16 : 64 }}
disabled={!isStepBelow && stepBelow}
onClick={this.handleBack.bind(this)}
color="secondary"
variant="contained"
>
{icon ? <NavigateBeforeIcon /> : "Back"}
</Button>
{variant === "compact" ? this.renderSelect() : this.renderStepper()}
<Button
style={{ minWidth: icon ? 16 : 64 }}
disabled={!isStepAbove && stepAbove}
onClick={this.handleNext.bind(this)}
color="secondary"
variant="contained"
>
{icon ? <NavigateNextIcon /> : "Next"}
</Button>
</Toolbar>
);
}
}
MuiStepper.propTypes = {
step: PropTypes.number.isRequired,
steps: PropTypes.arrayOf(
PropTypes.shape({
name: PropTypes.string,
})
).isRequired,
disabled: PropTypes.arrayOf(PropTypes.number.isRequired),
nonLinear: PropTypes.bool,
header: PropTypes.string,
variant: PropTypes.oneOf(["compact", "default", undefined]),
onStepChange: PropTypes.func,
isStepBelow: PropTypes.bool,
isStepAbove: PropTypes.bool,
onStepBelow: PropTypes.func,
onStepAbove: PropTypes.func,
};
export default withWidth()(withStyles(styles)(MuiStepper));
|
#ifdef __INTEL_COMPILER
#define _BSD_SOURCE 1
#define _POSIX_C_SOURCE 200809L
#endif
#include <stdio.h>
#include <stdbool.h>
#include "windows.h"
#include "utils.h"
#define MIN(a,b) ((a<b)?a:b)
#define MAX(a,b) ((a>b)?a:b)
int getppid() {
int pid = GetCurrentProcessId();
HANDLE hProcessSnap = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);
PROCESSENTRY32 pe;
// Set the size of the structure before using it.
pe.dwSize = sizeof(PROCESSENTRY32);
// Get info about first process.
if(!Process32First(hProcessSnap, &pe)) {
printf("Unable to get parent pid");
exit(1);
}
// Walk the snapshot of processes to find the parent.
do {
if (pe.th32ProcessID == pid) {
return pe.th32ParentProcessID;
}
} while(Process32Next(hProcessSnap, &pe));
CloseHandle(hProcessSnap);
printf("Unable to get parent pid");
exit(1);
}
HANDLE open_stdin() {
HANDLE h_input = GetStdHandle(STD_INPUT_HANDLE);
if (h_input == INVALID_HANDLE_VALUE) {
printf("Unable to get stdin handle.");
exit(1);
}
return h_input;
}
HANDLE open_named_pipe(const char* pipe_name) {
HANDLE h_input = CreateFile(pipe_name,
GENERIC_READ,
0,
NULL,
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL,
NULL
);
if (h_input == INVALID_HANDLE_VALUE) {
printf("CreateFile failed with error %u\n", (unsigned)GetLastError());
exit(1);
}
return h_input;
}
void configure_input_handle(HANDLE h_input) {
DWORD handle_type = GetFileType(h_input);
if (handle_type == FILE_TYPE_CHAR) {
DWORD lpmode;
GetConsoleMode(h_input, &lpmode);
// Disable line input
lpmode = lpmode &
~ENABLE_LINE_INPUT &
~ENABLE_ECHO_INPUT;
// Only listen for character input events
if (!SetConsoleMode(h_input, lpmode)) {
printf("Unable to set console mode. %d", (int)GetLastError());
exit(1);
}
} else if (handle_type == FILE_TYPE_PIPE) {
// No need to do anything
} else if (handle_type == FILE_TYPE_DISK) {
printf("Don't know how to handle FILE_TYPE_DISK.");
exit(1);
} else {
printf("Unknown input type.");
exit(1);
}
}
// If there's a complete line of text, put that line in buffer, and return the
// number of characters. Otherwise, return NULL.
char* get_line_nonblock(char* buf, int max_chars, HANDLE h_input) {
// Check what type of thing we're reading from
DWORD input_type = GetFileType(h_input);
// Debugging info
char* input_type_name;
switch(input_type) {
case FILE_TYPE_CHAR:
input_type_name = "FILE_TYPE_CHAR (console)";
break;
case FILE_TYPE_DISK:
input_type_name = "FILE_TYPE_DISK";
break;
case FILE_TYPE_PIPE:
input_type_name = "FILE_TYPE_PIPE";
break;
default:
input_type_name = "Unknown";
}
if (input_type == FILE_TYPE_CHAR) {
// Attempt to read enough to fill the buffer
DWORD num_peeked;
INPUT_RECORD in_record_buf[WIN_INPUT_BUF_LEN];
char input_char_buf[WIN_INPUT_BUF_LEN];
int input_char_buf_n = 0;
// First use PeekConsoleInput to make sure some char is available,
// because ReadConsoleInput will block if there's no input.
if (!PeekConsoleInput(h_input, in_record_buf, WIN_INPUT_BUF_LEN, &num_peeked)) {
printf("Error peeking at console input.\n");
return NULL;
};
if (num_peeked == 0) {
return NULL;
}
bool found_newline = false;
int i;
for (i=0; i<num_peeked; i++) {
// We're looking for key down events where the value is not 0.
// (Special keys like Shift will have AsciiChar value of 0.)
if (in_record_buf[i].EventType == KEY_EVENT &&
in_record_buf[i].Event.KeyEvent.bKeyDown &&
in_record_buf[i].Event.KeyEvent.uChar.AsciiChar != 0)
{
// Store the character in input_char_buf. If there's a \n, then
// copy in_record_buf (up to the \n) to buf.
char c = in_record_buf[i].Event.KeyEvent.uChar.AsciiChar;
if (c == '\r') {
found_newline = true;
input_char_buf[input_char_buf_n] = '\n';
input_char_buf_n++;
break;
} else {
input_char_buf[input_char_buf_n] = c;
input_char_buf_n++;
}
}
}
if (found_newline) {
// This is the number of events up to and including the '\n'
DWORD num_events_read = i+1;
DWORD num_events_read2;
// Clear out console buffer up to the '\n' event
if (!ReadConsoleInput(h_input, in_record_buf, num_events_read , &num_events_read2)) {
printf("Error reading console input.\n");
return NULL;
}
// Place the content in buf
snprintf(buf, MIN(input_char_buf_n, max_chars), "%s", input_char_buf);
return buf;
} else {
return NULL;
}
} else if (input_type == FILE_TYPE_PIPE) {
DWORD num_peeked;
char input_char_buf[WIN_INPUT_BUF_LEN];
int input_char_buf_n = 0;
if (!PeekNamedPipe(h_input, input_char_buf, WIN_INPUT_BUF_LEN, &num_peeked, NULL, NULL)) {
printf("Error peeking at pipe input. Error %d.\n", (unsigned)GetLastError());
return NULL;
};
bool found_newline = false;
for (int i=0; i<num_peeked; i++) {
if (input_char_buf[i] == '\r' || input_char_buf[i] == '\n') {
found_newline = true;
}
input_char_buf_n++;
}
DWORD num_read;
if (found_newline) {
// Clear out pipe
if (!ReadFile(h_input, input_char_buf, input_char_buf_n, &num_read, NULL)) {
printf("Error reading pipe input.\n");
return NULL;
}
// Place the content in buf
snprintf(buf, MIN(input_char_buf_n, max_chars), "%s", input_char_buf);
return buf;
} else {
return NULL;
}
} else {
printf("Unsupported input type: %s\n", input_type_name);
exit(1);
}
return buf;
}
// Send Ctrl-C to a program if it has a console.
void sendCtrlC(int pid) {
verbose_printf("Sending ctrl+c to pid %d", pid);
FreeConsole();
if (AttachConsole(pid)) {
GenerateConsoleCtrlEvent(CTRL_C_EVENT, 0);
} else {
verbose_printf("Error attaching to console for PID: %d\n", pid);
}
}
// Callback function that closes a window if the PID matches the value passed
// in to lParam.
BOOL CALLBACK enumCloseWindowProc(_In_ HWND hwnd, LPARAM lParam) {
DWORD current_pid = 0;
GetWindowThreadProcessId(hwnd, ¤t_pid);
if (current_pid == (DWORD) lParam) {
PostMessage(hwnd, WM_CLOSE, 0, 0);
}
return true;
}
void sendWmClose(int pid) {
EnumWindows(enumCloseWindowProc, (LPARAM)pid);
}
// Terminate process by pid.
BOOL kill_pid(DWORD dwProcessId) {
HANDLE hProcess = OpenProcess(PROCESS_TERMINATE, FALSE, dwProcessId);
if (hProcess == NULL)
return FALSE;
BOOL result = TerminateProcess(hProcess, 1);
CloseHandle(hProcess);
return result;
}
|
<reponame>pythian/skeletos<gh_stars>1-10
import {Primitive, State} from "../../../../../../../../../core";
import {OrgMemberProfileSettingsRoute} from "./orgmembersettings/OrgMemberProfileSettingsRoute";
import {OrgMemberAuthenticationSettingsRoute} from "./orgmembersettings/OrgMemberAuthenticationSettingsRoute";
import {DummyRouteTreeSyncAction} from "../../../../../DummyRouteTreeSyncAction";
import {DummyRouteSyncAction} from "../../../../../DummyRouteSyncAction";
import {DummyRouteAsyncAction} from "../../../../../DummyRouteAsyncAction";
import {DummyRouteTreeAsyncAction} from "../../../../../DummyRouteTreeAsyncAction";
import {AbstractRouteAction, AbstractRouteState, QueryParam, Segment} from "../../../../../../../../../web-router";
export class OrgMemberSettingsTabRoute extends AbstractRouteState {
@QueryParam("full")
@Primitive()
showFullSettings: boolean;
@Segment("profile")
@State(() => OrgMemberProfileSettingsRoute)
orgMemberProfile: OrgMemberProfileSettingsRoute;
@Segment("auth")
@State(() => OrgMemberAuthenticationSettingsRoute)
orgMemberAuthentication: OrgMemberAuthenticationSettingsRoute;
onRouteUpdatedSync(): typeof AbstractRouteAction {
return DummyRouteSyncAction as any;
}
onRouteUpdatedAsync(): typeof AbstractRouteAction {
return DummyRouteAsyncAction as any;
}
onRouteTreeUpdatedSync(): typeof AbstractRouteAction {
return DummyRouteTreeSyncAction as any;
}
onRouteTreeUpdatedAsync(): typeof AbstractRouteAction {
return DummyRouteTreeAsyncAction as any;
}
}
|
#!/bin/bash
function veracode-api-invoke-v2 { veracode-api-invoke 2.0 $1 $2 ; }
function veracode-api-invoke-v4 { veracode-api-invoke 4.0 $1 $2 ; }
function veracode-api-invoke-v5 { veracode-api-invoke 5.0 $1 $2 ;}
function veracode-api-invoke {
local targetUrl="https://analysiscenter.veracode.com/api/$1/$2.do"
local data=""
if [[ "$3" != "" ]]; then
data="--data $3"
fi
#echo
#echo $targetUrl $data
echo $(curl --silent --compressed -u $API_USERNAME:$API_PASSWORD $targetUrl $data)
}
function veracode-api-invoke-v5-F {
local targetUrl="https://analysiscenter.veracode.com/api/5.0/$1.do"
if [[ "$2" != "" ]]; then
local data="-F $2"
fi
#echo $targetUrl $data
curl --compressed -u $API_USERNAME:$API_PASSWORD $targetUrl $data
}
function veracode-api-download {
local target_Url="https://analysiscenter.veracode.com/api/$1/$2.do?$3"
local target_File=$4
#echo "Downloading $2 to $target_File"
curl --silent --compressed -u $API_USERNAME:$API_PASSWORD -o $target_File $target_Url
}
function get-value-from-string {
local data=$1 # text to search
local selector="\"$2\"" # value to find
local position=$3 # position to return
local formated_Data=$(format-xml "$data") # format it so that grep works
#echo "$formated_Data"
#echo "$selector"
echo $(echo "$formated_Data" | \
grep "$selector" | \
awk -F"\"" "{print \$$position}") # feed value of data
# into grep which will pick the lines with $selector # split strings and get value in $ position
}
### bash utils
function create_folder {
local folder=$1
if [ ! -d "$folder" ]; then
mkdir $folder
fi
}
function delete_file {
local file=$1
if [ ! -f "$file" ]; then
rm $file
fi
}
function goto_folder {
local folder=$1
if [ ! -d "$folder" ]; then
mkdir $folder
fi
cd $folder
}
|
struct Asm {
// Define the Asm struct with necessary methods and fields
}
struct Assembler {
asm: Asm,
contexts: Vec<Context>,
}
struct Body {
branch: Option<Branch>,
label: Label,
}
struct Branch {
body: Body,
}
impl Assembler {
fn assemble(&mut self, needs: Needs) -> Result<(), AssembleError> {
// Implement the assemble method to generate machine code
// Handle conditional branching using self.asm.jump(end_label, span)
// Handle default branching using self.asm.label(label) and branch.body.assemble(c, needs)?.apply(c)
// Properly handle contexts during the assembly process
Ok(())
}
}
impl Body {
fn apply(&self, c: &mut Assembler) -> Result<(), ApplyError> {
// Implement the apply method to execute the generated machine code
// Execute the generated machine code based on the label and branch conditions
// Properly handle contexts during the execution process
Ok(())
}
}
|
package main
import "math"
type TreeNode struct {
Val int
Left *TreeNode
Right *TreeNode
}
/**
* Definition for a binary tree node.
* type TreeNode struct {
* Val int
* Left *TreeNode
* Right *TreeNode
* }
*/
func maxDepth(root *TreeNode) int {
if root == nil {
return 0
}
return 1 + int(math.Max(float64(maxDepth(root.Left)), float64(maxDepth(root.Right))))
}
|
/*
* Copyright 2018 The boardgame.io Authors.
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
*/
import React from 'react';
import PropTypes from 'prop-types';
import { Game } from 'boardgame.io/core';
import { Client } from 'boardgame.io/react';
import './phases.css';
const game = Game({
setup: () => ({ deck: 5, hand: 0 }),
moves: {
takeCard: G => ({ ...G, deck: G.deck - 1, hand: G.hand + 1 }),
playCard: G => ({ ...G, deck: G.deck + 1, hand: G.hand - 1 }),
},
flow: {
phases: [
{
name: 'take phase',
endPhaseIf: G => G.deck <= 0,
allowedMoves: ['takeCard'],
},
{
name: 'play phase',
allowedMoves: ['playCard'],
endPhaseIf: G => G.hand <= 0,
},
],
},
});
class Board extends React.Component {
static propTypes = {
G: PropTypes.any.isRequired,
ctx: PropTypes.any.isRequired,
moves: PropTypes.any,
events: PropTypes.any,
};
takeCard = () => {
if (this.props.ctx.phase != 'take phase') return;
this.props.moves.takeCard();
this.props.events.endTurn();
};
playCard = () => {
if (this.props.ctx.phase != 'play phase') return;
this.props.moves.playCard();
this.props.events.endTurn();
};
render() {
return (
<div className="phases">
<li style={{ background: '#aaa' }}>{this.props.ctx.phase}</li>
<li>Deck: {this.props.G.deck}</li>
<li>Hand: {this.props.G.hand}</li>
<li>
<button id="take" onClick={this.takeCard}>
Take Card
</button>
</li>
<li>
<button id="play" onClick={this.playCard}>
Play Card
</button>
</li>
</div>
);
}
}
const Phases = Client({
game,
numPlayers: 1,
board: Board,
});
export default Phases;
|
<filename>acmicpc.net/source/10815.cpp<gh_stars>1-10
// 10815. 숫자카드
// 2019.05.22
// 이분 탐색
#include<algorithm>
#include<iostream>
#include<vector>
using namespace std;
int main()
{
int n, m;
cin >> n;
vector<int> v(n);
for (int i = 0; i < n; i++)
{
cin >> v[i];
}
cin >> m;
vector<int> b(m);
for (int i = 0; i < m; i++)
{
cin >> b[i];
}
// 이분 탐색을 위해 정렬
sort(v.begin(), v.end());
// m개의 숫자에 대해 이분 탐색
for (int i = 0; i < m; i++)
{
int k = b[i];
if (binary_search(v.begin(), v.end(), k))
{
cout << "1 ";
}
else
{
cout << "0 ";
}
}
cout << endl;
return 0;
}
|
# mountShares.sh
# This file must have LF (UNIX-style) line endings!
keyfile=$1
sh cifsMount.sh
# Install jq used for the next command
sudo apt-get install -y jq
# Get the IP address of each node using the mesos API and store it inside a file called nodes
curl http://leader.mesos:1050/system/health/v1/nodes | jq '.nodes[].host_ip' | sed 's/\"//g' | sed '/172/d' > nodes
# From the previous file created, run our script to mount our share on each node
while read line; do
ssh `whoami`@$line -o StrictHostKeyChecking=no -i ${keyfile} < ./cifsMount.sh
done < nodes
|
import React from "react"
import renderer from "react-test-renderer"
import DriversTable from "."
import { DriversList } from "../../types"
describe("components/DriversTable", (): void => {
it.each([
[[]],
[
[
{
driverId: "foo",
permanentNumber: "13",
code: "FOO",
url: "https://foo.bar",
givenName: "Foo",
familyName: "Bar",
dateOfBirth: "1980-01-01",
nationality: "bar",
},
],
],
])(
"data: %p",
(data: DriversList): void => {
const tree = renderer.create(<DriversTable data={data} />)
expect(tree.toJSON()).toMatchSnapshot()
},
)
})
|
<reponame>seants/integrations-core
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import shutil
import subprocess
import sys
import tempfile
from datadog_checks.directory import DirectoryCheck
def test_run(benchmark):
temp_dir = tempfile.mkdtemp()
command = [sys.executable, '-m', 'virtualenv', temp_dir]
instance = {'directory': temp_dir, 'recursive': True}
try:
subprocess.call(command)
c = DirectoryCheck('directory', None, {}, [instance])
benchmark(c.check, instance)
finally:
shutil.rmtree(temp_dir)
|
#!/bin/sh
echo "*** Running linkWho ***"
#ln -s /home/ubuntu/src/ckan/who.ini /etc/ckan/default/who.ini
ls -ltr /usr/lib/ckan/default/src/ckan/who.ini
ls -ltr /etc/ckan/default/who.ini
ln -s /usr/lib/ckan/default/src/ckan/who.ini /etc/ckan/default/who.ini
ls -ltr /usr/lib/ckan/default/src/ckan/who.ini
ls -ltr /etc/ckan/default
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-shuffled-N-VB/7-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-shuffled-N-VB/7-512+0+512-N-VB-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_and_verbs_first_half_full --eval_function last_element_eval
|
#!/bin/bash
set -e
optimise=''
sanitise=''
while getopts 'os' flag; do
case "${flag}" in
o) optimise='true' ;;
s) sanitise='true' ;;
esac
done
export OPTIMISE=''
export SANITISE=''
if [[ $optimise ]]; then
OPTIMISE=-Os
fi
if [[ $sanitise ]]; then
SANITISE="-s INITIAL_MEMORY=655360000 -fsanitize=address -g2 debug.c"
fi
(
emcc \
-I ./triangle \
-s MODULARIZE=1 \
-s EXPORTED_RUNTIME_METHODS="['lengthBytesUTF8', 'stringToUTF8']" \
-s EXPORTED_FUNCTIONS="['_malloc', '_free', '_triangulate']" \
-o triangle.out.js \
${OPTIMISE} \
${SANITISE} \
./triangle/triangle.c -DTRILIBRARY
)
# Usage:
# ./build.sh -o
|
<reponame>segmentify/segmentify-android-sdk
package com.segmentify.segmentifyandroid;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import com.segmentify.segmentifyandroidsdk.model.ProductRecommendationModel;
import com.squareup.picasso.Picasso;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
public class ListAdapter extends BaseAdapter{
ArrayList<ProductRecommendationModel> productRecommendationModelArrayList;
Activity activity;
ImageView ivProduct;
Button btnAdd, btnDetail;
SharedPreferences sharedPref;
boolean isAfterPurchase;
public ListAdapter(Activity activity, ArrayList<ProductRecommendationModel> productRecommendationModelArrayList, boolean isAfterPurchase) {
this.activity = activity;
this.productRecommendationModelArrayList = productRecommendationModelArrayList;
this.isAfterPurchase = isAfterPurchase;
}
public int getCount() {
return productRecommendationModelArrayList.size();
}
public Object getItem(int arg0) {
return productRecommendationModelArrayList.get(arg0);
}
public long getItemId(int position) {
return 0;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
final LayoutInflater inflater = activity.getLayoutInflater();
View row;
row = inflater.inflate(R.layout.item_list, parent, false);
TextView tvProductname, tvPrice;
tvProductname = (TextView) row.findViewById(R.id.tvProductName);
tvPrice = (TextView) row.findViewById(R.id.tvPrice);
ivProduct=(ImageView)row.findViewById(R.id.imgProduct);
btnAdd = row.findViewById(R.id.btnAdd);
btnDetail = row.findViewById(R.id.btnDetail);
String fix_image ="";
if(productRecommendationModelArrayList.get(position).getImage().startsWith("https:https://"))
{
fix_image = productRecommendationModelArrayList.get(position).getImage().replace("https:https://","https://");
}
else if(productRecommendationModelArrayList.get(position).getImage().startsWith("//"))
{
fix_image = "https:" + productRecommendationModelArrayList.get(position).getImage();
}
else if(productRecommendationModelArrayList.get(position).getImage().startsWith("https://"))
{
fix_image = productRecommendationModelArrayList.get(position).getImage();
}
Picasso.get().load(fix_image).into(ivProduct);
tvProductname.setText(productRecommendationModelArrayList.get(position).getName());
tvPrice.setText(productRecommendationModelArrayList.get(position).getPrice() + " TL");
if(isAfterPurchase){
btnAdd.setVisibility(View.GONE);
}
else{
btnAdd.setVisibility(View.VISIBLE);
}
btnAdd.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(activity, BasketDetailActivity.class);
intent.putExtra("productId", productRecommendationModelArrayList.get(position).getProductId());
intent.putExtra("name", productRecommendationModelArrayList.get(position).getName());
intent.putExtra("price", productRecommendationModelArrayList.get(position).getPrice().toString());
intent.putExtra("image", productRecommendationModelArrayList.get(position).getImage());
activity.startActivity(intent);
}
});
/*btnDetail.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
}
});*/
row.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(activity, ProductDetailActivity.class);
intent.putExtra("productId", productRecommendationModelArrayList.get(position).getProductId());
intent.putExtra("name", productRecommendationModelArrayList.get(position).getName());
intent.putExtra("price", productRecommendationModelArrayList.get(position).getPrice().toString());
intent.putExtra("image", productRecommendationModelArrayList.get(position).getImage());
intent.putExtra("url",productRecommendationModelArrayList.get(position).getUrl());
activity.startActivity(intent);
}
});
return (row);
}
}
|
<reponame>lucacasonato/deno-fetchevent<gh_stars>1-10
export class ReadableStreamIOReader implements Deno.Reader {
#reader: ReadableStreamDefaultReader<Uint8Array>;
#buffer: Uint8Array | null;
#encoder: TextEncoder;
constructor(dst: ReadableStream<Uint8Array>) {
this.#reader = dst.getReader();
this.#buffer = null;
this.#encoder = new TextEncoder();
}
async read(p: Uint8Array): Promise<number | null> {
let value = this.#buffer;
if (value === null) {
// if buffer is empty, read from the stream
const res = await this.#reader.read();
if (res.done) {
this.#reader.releaseLock();
return null; // EOF
}
// TODO(lucacasonato): remove. workaround for https://github.com/denoland/deno/pull/8030
value = res.value ? new Uint8Array(res.value) : res.value;
if (!value) {
return 0;
}
if (typeof value === "string") {
value = this.#encoder.encode(value);
}
} else if (this.#buffer !== null && p.length >= this.#buffer.length) {
// If buffer size is lower or equal than view size, set buffer to null
this.#buffer = null;
}
if (value.length > p.length) {
// If chunk is bigger than view buffer exceeding bytes
this.#buffer = value.subarray(p.length);
value = value.subarray(0, p.length);
if (!this.#buffer.length) {
this.#buffer = null;
}
}
p.set(value, 0);
return value.length;
}
}
|
#!/usr/bin/env bash
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script is invoked by Jenkins and runs portability tests based on
# env variable setting.
#
# Setting up rvm environment BEFORE we set -ex.
[[ -s /etc/profile.d/rvm.sh ]] && . /etc/profile.d/rvm.sh
# To prevent cygwin bash complaining about empty lines ending with \r
# we set the igncr option. The option doesn't exist on Linux, so we fallback
# to just 'set -ex' there.
# NOTE: No empty lines should appear in this file before igncr is set!
set -ex -o igncr || set -ex
echo "building $scenario"
# If scenario has _bo suffix, add --build_only flag.
# Short suffix name had to been chosen due to path length limit on Windows.
if [ "$scenario" != "${scenario%_bo}" ]
then
scenario="${scenario%_bo}"
BUILD_ONLY_MAYBE="--build_only"
fi
parts=($(echo $scenario | tr '_' ' ')) # split scenario into parts
curr_platform=${parts[0]} # variable named 'platform' breaks the windows build
curr_arch=${parts[1]}
curr_compiler=${parts[2]}
config='dbg'
if [ "$curr_platform" == "linux" ]
then
USE_DOCKER_MAYBE="--use_docker"
fi
python tools/run_tests/run_tests.py $USE_DOCKER_MAYBE $BUILD_ONLY_MAYBE -t -l $language -c $config --arch ${curr_arch} --compiler ${curr_compiler} -x report.xml -j 3 $@
|
#############################################################################
# TitanicAI App - Helm
#############################################################################
# set context
kubectl config get-contexts
kubectl config use-context docker-desktop
kubectl config current-context
# deploy webapp & api
helm upgrade titanicai-webapp -i --create-namespace --namespace titanicai .cicd/helm/titanicai-webapp
helm upgrade titanicai-api -i --create-namespace --namespace titanicai .cicd/helm/titanicai-api
# deploy ingress controller
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo update
helm install nginx-ingress ingress-nginx/ingress-nginx \
--namespace titanicai \
--set controller.replicaCount=1 \
--set controller.admissionWebhooks.enabled=false \
--set controller.service.externalTrafficPolicy=Local
# check deployments
kubectl get all -n titanicai
kubectl get ingress -n titanicai
helm list --all -n titanicai
# launch webapp & api
start http://web.titanicai.localhost
start http://api.titanicai.localhost/__docs__/
# clean-up
kubectl delete namespace titanicai
|
package core
import (
"log"
"sort"
"sync"
"github.com/alikarimi999/shitcoin/core/types"
)
const (
poolSize = 3
)
type pool interface {
// this function can recieve transaction or mined block
// if block mined by another node you must send block snapshot to prevent data race
// if block mined by this node local must set true
UpdatePool(o any, local bool)
Handler(wg *sync.WaitGroup)
ContinueHandler(cont bool)
GetWaitGroup() *sync.WaitGroup
GetQueue() []*types.Transaction
GetPending() []*types.Transaction
// GenesisUpdate(b *types.Block)
}
type TxPool struct {
Mu *sync.Mutex
c *Chain
WG *sync.WaitGroup
queueTxs Transactions // verified transactions that recieved and didn't add to any block yet
pendingTxs Transactions // verified transactions that are in mining block
sealedTxs Transactions // verified transactions that sealed in mining process
queueCh chan *types.Transaction
minedRemoteCh chan Transactions
continueCh chan struct{}
minedLocal chan bool
}
func NewTxPool(c *Chain) *TxPool {
t := &TxPool{
Mu: &sync.Mutex{},
c: c,
WG: &sync.WaitGroup{},
queueTxs: make(Transactions),
pendingTxs: make(Transactions),
sealedTxs: make(Transactions),
queueCh: make(chan *types.Transaction),
minedRemoteCh: make(chan Transactions),
continueCh: make(chan struct{}),
minedLocal: make(chan bool, 1),
}
return t
}
func (tp *TxPool) Handler(wg *sync.WaitGroup) {
defer wg.Done()
log.Println("Transaction Pool handler start!!!")
for {
select {
case tx := <-tp.queueCh: // recieve from network
tp.Mu.Lock()
tp.queueTxs[txid(tx.TxID)] = tx.SnapShot()
tp.c.ChainState.StateTransition(tx, false)
if tp.queIsFull() {
tp.pendingTxs = tp.queueTxs
tp.queueTxs = make(Transactions)
// creat miner reward transaction
mtx := MinerReward(tp.c.MinerAdd, minerReward)
// tp.c.ChainState.StateTransition(mtx, false)
tp.pendingTxs[txid(mtx.TxID)] = mtx
txs := tp.pendingTxs.convert()
// wait untile previous Mining proccess finish
tp.WG.Wait()
// notify chainstate handler that mining process is going to start
tp.c.ChainState.MinerIsStarting(true)
<-tp.continueCh // wait for ChainState Handler until take a snapshot from memSet
// start mining process
tp.c.Miner.Start(txs, tp.WG)
}
tp.Mu.Unlock()
case local := <-tp.minedLocal:
tp.Mu.Lock()
if local {
tp.pendingTxs = make(Transactions)
}
tp.Mu.Unlock()
case tp.sealedTxs = <-tp.minedRemoteCh:
tp.Mu.Lock()
tp.manageTxs()
// FIXME:
// sending remaining transactions of transaction pool to state Handler
txs := []*types.Transaction{}
for _, tx := range tp.queueTxs {
txs = append(txs, tx.SnapShot())
}
tp.c.ChainState.StateTransition(txs, false)
tp.Mu.Unlock()
}
}
}
// delete transactions that added to remote mined block from queueTxs and pendingTxs
// and transfer pendingTxs that didn't added to mined block to queueTxs
func (tp *TxPool) manageTxs() {
// delete used transactions
for txid := range tp.sealedTxs {
delete(tp.pendingTxs, txid)
delete(tp.queueTxs, txid)
}
// merge pendingTxs and queueTxs
for txid, tx := range tp.pendingTxs {
if !tx.IsCoinbase() { // delete miner reward that remain in pendingTxs
tp.queueTxs[txid] = tx
}
}
tp.pendingTxs = make(Transactions)
}
func (tp *TxPool) ContinueHandler(cont bool) {
if cont {
tp.continueCh <- struct{}{}
}
}
// this function add transactions that added to mined block that recieved from other nodes
func (tp *TxPool) UpdatePool(o any, local bool) {
switch t := o.(type) {
case *types.Transaction:
tp.queueCh <- t
case *types.Block:
if local {
tp.minedLocal <- local
return
}
tp.minedRemoteCh <- newTransations(t.Transactions)
return
default:
return
}
}
func (tp *TxPool) queIsFull() bool {
return len(tp.queueTxs) == poolSize
}
func (t Transactions) convert() []*types.Transaction {
txs := []*types.Transaction{}
for _, tx := range t {
txs = append(txs, tx)
}
sort.SliceStable(txs, func(i, j int) bool { return txs[i].Timestamp < txs[j].Timestamp })
return txs
}
func newTransations(txs []*types.Transaction) Transactions {
t := make(Transactions)
for _, tx := range txs {
t[txid(tx.TxID)] = tx
}
return t
}
func (tp *TxPool) GetWaitGroup() *sync.WaitGroup {
return tp.WG
}
func (tp *TxPool) GetQueue() []*types.Transaction {
return tp.queueTxs.convert()
}
func (tp *TxPool) GetPending() []*types.Transaction {
tp.Mu.Lock()
defer tp.Mu.Unlock()
return tp.pendingTxs.convert()
}
// func (tp *TxPool) GenesisUpdate(b *types.Block) {
// tp.
// }
|
<reponame>Korla/boardgame.io<filename>src/ui/card.js<gh_stars>0
/*
* Copyright 2017 The boardgame.io Authors
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
*/
import React from 'react';
import PropTypes from 'prop-types';
import Logo from './logo';
import UIContext from './ui-context';
import { Draggable, DragComponent } from 'react-dragtastic';
import './card.css';
/* eslint-disable */
export function GetDraggable(props, classNames, cardStyle, onClick) {
return ({ isActive, events }) => {
return (
<div
className={classNames.join(' ')}
style={{
...props.style,
...cardStyle,
opacity: isActive ? 0 : 1,
pointerEvents: isActive ? 'none' : 'all',
}}
onClick={onClick}
{...events}
>
{props.isFaceUp ? props.front : props.back}
</div>
);
};
}
export function GetDragComponent(
props,
classNames,
ref,
isOverAcceptedCallback
) {
return ({ x, y, isOverAccepted, currentlyHoveredDroppableId }) => {
const classes = [...classNames];
let content = props.back;
isOverAcceptedCallback(isOverAccepted);
if (props.isFaceUp) {
content = props.front;
}
if (currentlyHoveredDroppableId !== null) {
if (isOverAccepted) {
classes.push('accept');
} else {
classes.push('reject');
}
}
return (
<div
className={classes.join(' ')}
ref={ref}
style={{
cursor: 'pointer',
borderWidth: 2,
pointerEvents: 'none',
position: 'fixed',
zIndex: 2000000000,
boxShadow: '5px 5px 5px #eee',
left: x - 50,
top: y - 70,
}}
>
{content}
</div>
);
};
}
/* eslint-enable */
export class CardImpl extends React.Component {
static propTypes = {
isFaceUp: PropTypes.bool,
front: PropTypes.node,
back: PropTypes.node,
className: PropTypes.string,
dragZone: PropTypes.string,
style: PropTypes.any,
onClick: PropTypes.func,
context: PropTypes.any.isRequired,
inDeck: PropTypes.bool,
data: PropTypes.any,
deckPosition: PropTypes.number,
};
static defaultProps = {
onClick: () => {},
isFaceUp: false,
dragZone: 'bgio-card',
front: <div className="bgio-card__front">Card</div>,
back: (
<div className="bgio-card__back">
<Logo width="48" />
</div>
),
};
constructor(props) {
super(props);
this.id = props.context.genID();
this.dragComponentRef = React.createRef();
this.isOverAccepted = false;
}
onClick = () => {
this.props.onClick(this.props.data);
};
render() {
const classNames = ['bgio-card'];
if (this.props.className) {
classNames.push(this.props.className);
}
let cardStyle = {};
if (this.props.inDeck) {
cardStyle = {
position: 'absolute',
zIndex: this.props.deckPosition,
};
}
return (
<div>
<Draggable
id={this.id}
type={this.props.dragZone}
data={this.props.data}
>
{GetDraggable(this.props, classNames, cardStyle, this.onClick)}
</Draggable>
<DragComponent for={this.id}>
{GetDragComponent(
this.props,
classNames,
this.dragComponentRef,
o => (this.isOverAccepted = o)
)}
</DragComponent>
</div>
);
}
}
const Card = props => (
<UIContext.Consumer>
{context => <CardImpl {...props} context={context} />}
</UIContext.Consumer>
);
export { Card };
|
<reponame>DhritiShikhar/must-gather-clean<gh_stars>0
package obfuscator
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/openshift/must-gather-clean/pkg/schema"
)
func TestIPObfuscatorStatic(t *testing.T) {
for _, tc := range []struct {
name string
input string
output string
report map[string]string
}{
{
name: "valid ipv4 address",
input: "received request from 192.168.1.10",
output: "received request from xxx.xxx.xxx.xxx",
report: map[string]string{"192.168.1.10": obfuscatedStaticIPv4},
},
{
name: "invalid ipv4 address",
input: "value 910.218.98.1 is not an ipv4",
output: "value 910.218.98.1 is not an ipv4",
report: map[string]string{},
},
{
name: "ipv4 in words",
input: "calling https://192.168.1.20/metrics for values",
output: "calling https://xxx.xxx.xxx.xxx/metrics for values",
report: map[string]string{"192.168.1.20": obfuscatedStaticIPv4},
},
{
name: "multiple ipv4s",
input: "received request from 192.168.1.20 proxied through 192.168.1.3",
output: "received request from xxx.xxx.xxx.xxx proxied through xxx.xxx.xxx.xxx",
report: map[string]string{
"192.168.1.20": obfuscatedStaticIPv4,
"192.168.1.3": obfuscatedStaticIPv4,
},
},
{
name: "valid ipv6 address",
input: "received request from 2001:db8::ff00:42:8329",
output: "received request from xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx",
report: map[string]string{
"2001:db8::ff00:42:8329": obfuscatedStaticIPv6,
},
},
{
name: "mixed ipv4 and ipv6",
input: "tunneling fdf8:f53e:61e4::18:bf9 as 192.168.1.30",
output: "tunneling xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx as xxx.xxx.xxx.xxx",
report: map[string]string{
"192.168.1.30": obfuscatedStaticIPv4,
"::2fa:bf9": obfuscatedStaticIPv6,
},
},
{
name: "non standard ipv4",
input: "ip-10-0-129-220.ec2.aws.yaml",
output: "ip-xxx.xxx.xxx.xxx.ec2.aws.yaml",
report: map[string]string{
"10-0-129-220": obfuscatedStaticIPv4,
},
},
{
name: "non-standard ipv4 with bad separator",
input: "ip+10+0+129+220.ec2.aws.yaml",
output: "ip+10+0+129+220.ec2.aws.yaml",
report: map[string]string{},
},
{
name: "standard ipv4 and standard ipv4",
input: "obfuscate 10.0.129.220 and 10-0-129-220",
output: "obfuscate xxx.xxx.xxx.xxx and xxx.xxx.xxx.xxx",
report: map[string]string{
"10.0.129.220": "xxx.xxx.xxx.xxx",
"10-0-129-220": "xxx.xxx.xxx.xxx",
},
},
{
name: "OCP nightly version false positive",
input: "version: 4.8.0-0.nightly-2021-07-31-065602",
output: "version: 4.8.0-0.nightly-2021-07-31-065602",
report: map[string]string{},
},
{
name: "OCP version x.y.z",
input: "version: 4.8.12",
output: "version: 4.8.12",
report: map[string]string{},
},
{
name: "excluded ipv4 address",
input: "Listening on 0.0.0.0:8080",
output: "Listening on 0.0.0.0:8080",
report: map[string]string{},
},
{
name: "excluded ipv6 address",
input: "Listening on [::1]:8080",
output: "Listening on [::1]:8080",
report: map[string]string{},
},
} {
t.Run(tc.name, func(t *testing.T) {
o, err := NewIPObfuscator(schema.ObfuscateReplacementTypeStatic)
assert.NoError(t, err)
output := o.Contents(tc.input)
assert.Equal(t, tc.output, output)
assert.Equal(t, tc.report, o.Report())
})
}
}
func TestIPObfuscatorConsistent(t *testing.T) {
for _, tc := range []struct {
name string
input []string
output []string
report map[string]string
}{
{
name: "valid ipv4 address",
input: []string{"received request from 192.168.1.10"},
output: []string{"received request from x-ipv4-000001-x"},
report: map[string]string{"192.168.1.10": "x-ipv4-000001-x"},
},
{
name: "ipv4 in words",
input: []string{"calling https://192.168.1.20/metrics for values"},
output: []string{"calling https://x-ipv4-000001-x/metrics for values"},
report: map[string]string{"192.168.1.20": "x-ipv4-000001-x"},
},
{
name: "multiple ipv4s",
input: []string{"received request from 192.168.1.20 proxied through 192.168.1.3"},
output: []string{"received request from x-ipv4-000001-x proxied through x-ipv4-000002-x"},
report: map[string]string{
"192.168.1.20": "x-ipv4-000001-x",
"192.168.1.3": "x-ipv4-000002-x",
},
},
{
name: "valid ipv6 address",
input: []string{"received request from 2001:db8::ff00:42:8329"},
output: []string{"received request from xxxxxxxxxxxxx-ipv6-000001-xxxxxxxxxxxxx"},
report: map[string]string{
"2001:db8::ff00:42:8329": "xxxxxxxxxxxxx-ipv6-000001-xxxxxxxxxxxxx",
},
},
{
name: "mixed ipv4 and ipv6",
input: []string{"tunneling fdf8:f53e:61e4::18:bf9 as 192.168.1.30"},
output: []string{"tunneling xxxxxxxxxxxxx-ipv6-000001-xxxxxxxxxxxxx as x-ipv4-000001-x"},
report: map[string]string{
"192.168.1.30": "x-ipv4-000001-x",
"::2fa:bf9": "xxxxxxxxxxxxx-ipv6-000001-xxxxxxxxxxxxx",
},
},
{
name: "multiple invocations",
input: []string{
"received request from 192.168.1.20 for 192.168.1.30",
"received request from 192.168.1.20 for 192.168.1.30",
},
output: []string{
"received request from x-ipv4-000001-x for x-ipv4-000002-x",
"received request from x-ipv4-000001-x for x-ipv4-000002-x",
},
report: map[string]string{
"192.168.1.20": "x-ipv4-000001-x",
"192.168.1.30": "x-ipv4-000002-x",
},
},
{
name: "multiple invocations with different IPs",
input: []string{
"received request from 192.168.1.20 for 192.168.1.30",
"received request from 192.168.1.21 for 192.168.1.31",
"received request from 192.168.1.22 for 192.168.1.32",
"received request from 192.168.1.23 for 192.168.1.33",
"received request from 192.168.1.24 for 192.168.1.34",
},
output: []string{
"received request from x-ipv4-000001-x for x-ipv4-000002-x",
"received request from x-ipv4-000003-x for x-ipv4-000004-x",
"received request from x-ipv4-000005-x for x-ipv4-000006-x",
"received request from x-ipv4-000007-x for x-ipv4-000008-x",
"received request from x-ipv4-000009-x for x-ipv4-000010-x",
},
report: map[string]string{
"192.168.1.20": "x-ipv4-000001-x",
"192.168.1.21": "x-ipv4-000003-x",
"192.168.1.22": "x-ipv4-000005-x",
"192.168.1.23": "x-ipv4-000007-x",
"192.168.1.24": "x-ipv4-000009-x",
"192.168.1.30": "x-ipv4-000002-x",
"192.168.1.31": "x-ipv4-000004-x",
"192.168.1.32": "x-ipv4-000006-x",
"192.168.1.33": "x-ipv4-000008-x",
"192.168.1.34": "x-ipv4-000010-x",
},
},
} {
t.Run(tc.name, func(t *testing.T) {
o, err := NewIPObfuscator(schema.ObfuscateReplacementTypeConsistent)
assert.NoError(t, err)
for i := 0; i < len(tc.input); i++ {
assert.Equal(t, tc.output[i], o.Contents(tc.input[i]))
}
assert.Equal(t, tc.report, o.Report())
})
}
}
|
class WebGLMemoryManagerImpl implements WebGLMemoryManager {
static = {
size: 0,
allocations: {}
};
dynamic = {
size: 0,
allocations: {}
};
mapping = {};
allocateStaticMemory(name: string, size: number, location: string): void {
this.static.allocations[name] = { size, location };
this.static.size += size;
this.mapping[name] = location;
}
allocateDynamicMemory(name: string, size: number): void {
this.dynamic.allocations[name] = { size };
this.dynamic.size += size;
}
resolveDynamicAllocation(name: string, location: string): void {
if (this.dynamic.allocations[name]) {
this.dynamic.allocations[name].location = location;
this.mapping[name] = location;
} else {
throw new Error(`Dynamic allocation for ${name} not found.`);
}
}
getMemoryLocation(name: string): string {
if (this.mapping[name]) {
return this.mapping[name];
} else {
throw new Error(`Memory location for ${name} not found.`);
}
}
}
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { resolve } from 'path';
import { registerTemplatesRoutes } from './server/routes/api/templates';
import { registerNodesRoutes } from './server/routes/api/nodes';
import { registerPoliciesRoutes } from './server/routes/api/policies';
import { registerIndexRoutes } from './server/routes/api/index';
import { registerLicenseChecker } from './server/lib/register_license_checker';
import { PLUGIN_ID } from './common/constants';
import { indexLifecycleDataEnricher } from './index_lifecycle_data';
export function indexLifecycleManagement(kibana) {
return new kibana.Plugin({
config: Joi => {
return Joi.object({
enabled: Joi.boolean().default(true),
ui: Joi.object({
enabled: Joi.boolean().default(true),
}).default(),
filteredNodeAttributes: Joi.array()
.items(Joi.string())
.default([]),
}).default();
},
id: PLUGIN_ID,
publicDir: resolve(__dirname, 'public'),
configPrefix: 'xpack.ilm',
require: ['kibana', 'elasticsearch', 'xpack_main', 'index_management'],
uiExports: {
styleSheetPaths: resolve(__dirname, 'public/index.scss'),
managementSections: ['plugins/index_lifecycle_management'],
injectDefaultVars(server) {
const config = server.config();
return {
ilmUiEnabled: config.get('xpack.ilm.ui.enabled'),
};
},
},
isEnabled(config) {
return (
config.get('xpack.ilm.enabled') &&
config.has('xpack.index_management.enabled') &&
config.get('xpack.index_management.enabled')
);
},
init: function(server) {
registerLicenseChecker(server);
registerTemplatesRoutes(server);
registerNodesRoutes(server);
registerPoliciesRoutes(server);
registerIndexRoutes(server);
if (
server.config().get('xpack.ilm.ui.enabled') &&
server.plugins.index_management &&
server.plugins.index_management.addIndexManagementDataEnricher
) {
server.plugins.index_management.addIndexManagementDataEnricher(indexLifecycleDataEnricher);
}
},
});
}
|
class MessageDispatcher {
public static function dispatchMessage($message) {
switch(strtoupper($message['Event'])){
case 'CLICK':
// Set the message type and content based on the event key
ImWx::setRequest('MsgType', 'text');
ImWx::setRequest('Content', $message['EventKey']);
ClickMessageHandler::dispatch($message['EventKey']);
break;
case 'VIEW':
// Handle the 'VIEW' event message
// Your code here
break;
case 'SUBSCRIBE':
// Handle the 'SUBSCRIBE' event message
// Your code here
break;
case 'UNSUBSCRIBE':
// Handle the 'UNSUBSCRIBE' event message
// Your code here
break;
default:
// Handle unknown event type
// Your code here
break;
}
}
}
class ClickMessageHandler {
public static function dispatch($eventKey) {
// Handle the 'CLICK' event message
// Your code here
}
}
// Example usage
$message = [
'Event' => 'CLICK',
'EventKey' => 'some_event_key'
];
MessageDispatcher::dispatchMessage($message);
|
<reponame>abt09kis/DishRandomizer
var mongoose = require('mongoose'),
Schema = mongoose.Schema,
Resturant = require('./restaurantSchema');
//Creation of user schema.
var dishSchema = new Schema({
restaurant:{type: mongoose.Schema.Types.ObjectId, ref: 'Resturant'},
name: String,
price: Number
});
var Dish = mongoose.model('Dish', dishSchema);
module.exports = Dish;
|
def ReplaceVowels(sentence):
return sentence.translate(str.maketrans('', '', "aeiouAEIOU"))
|
#!/usr/bin/env bash
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -e
http_port=8080
ssh_port=29418
while test $# -ne 0; do
case "$1" in
-v)
version="$2"
shift
;;
-d)
rundir="$2"
shift
;;
--http-port)
http_port="$2"
shift
;;
--ssh-port)
ssh_port="$2"
shift
;;
*)
rundir="$1"
;;
esac
shift
done
if [ -z "$rundir" ]; then
rundir=$(mktemp -d)
fi
this_dir=$(dirname $0)
gerrit_exe="$this_dir/gerrit.war"
account_id=101
full_name='Test Account'
maximum_page_size='25'
password='test-password'
preferred_email="test-username@test.org"
registered_on=$(date '+%Y-%m-%d %H:%M:%S.000%:::z')
username='test-username'
# The python code below for picking the "latest" gerrit release is cribbed and
# ported from the javascript at:
#
# http://gerrit-releases.storage.googleapis.com/index.html
url='https://www.googleapis.com/storage/v1/b/gerrit-releases/o?projection=noAcl'
curl --retry 30 --ssl-reqd -s $url | python <(cat <<EOF
# Receives Gerrit version via command line and reads json-encoded
# text from stdin in the format:
#
# {
# "items": [
# {
# "name": "gerrit-<version>.war",
# "md5Hash": "<base64 encoded md5sum>",
# },
# {
# "name": "gerrit-<version>.war",
# "md5Hash": "<base64 encoded md5sum>",
# },
# ...
# }
#
# ...and prints the name and md5sum of the corresponding *.war file.
from __future__ import print_function
import json
import re
import sys
requested_version = sys.argv[1] if len(sys.argv) > 1 else None
# Disable using -rc versions. This is a temporary hack to avoid
# picking up version 2.9-rc0, which requires java 7. These lines
# should be un-commented after this bug is fixed:
# https://code.google.com/p/chromium/issues/detail?id=346369
#gerrit_re = re.compile('gerrit(?:-full)?-([0-9.]+)(-rc[0-9]+)?[.]war')
gerrit_re = re.compile('gerrit(?:-full)?-([0-9.]+)[.]war')
j = json.load(sys.stdin)
items = [(x, gerrit_re.match(x['name'])) for x in j['items']]
#items = [(x, m.group(1), m.group(2)) for x, m in items if m]
items = [(x, m.group(1), '') for x, m in items if m]
def _cmp(a, b):
an = a[1].split('.')
bn = b[1].split('.')
while len(an) < len(bn):
an.append('0')
while len(bn) < len(an):
bn.append('0')
an.append(a[2][3:] if a[2] else '1000')
bn.append(b[2][3:] if b[2] else '1000')
for i in range(len(an)):
if an[i] != bn[i]:
return -1 if int(an[i]) > int(bn[i]) else 1
return 0
if requested_version:
for info, version in items:
if version == requested_version:
print('"%s" "%s"' % (info['name'], info['md5Hash']))
sys.exit(0)
print('No such Gerrit version: %s' % requested_version, file=sys.stderr)
sys.exit(1)
items.sort(cmp=_cmp)
for x in items:
print('"%s" "%s"' % (x[0]['name'], x[0]['md5Hash']))
sys.exit(0)
EOF
) "$version" | xargs | while read name md5; do
# Download the requested gerrit version if necessary, and verify the md5sum.
target="$this_dir/$name"
net_sum=$(echo -n $md5 | base64 -d | od -tx1 | head -1 | cut -d ' ' -f 2- |
sed 's/ //g')
if [ -f "$target" ]; then
file_sum=$(md5sum "$target" | awk '{print $1}' | xargs)
if [ "$file_sum" = "$net_sum" ]; then
ln -sf "$name" "$gerrit_exe"
break
else
rm -rf "$target"
fi
fi
curl --retry 30 --ssl-reqd -s -o "$target" \
"https://gerrit-releases.storage.googleapis.com/$name"
file_sum=$(md5sum "$target" | awk '{print $1}' | xargs)
if [ "$file_sum" != "$net_sum" ]; then
echo "ERROR: md5sum mismatch when downloading $name" 1>&2
rm -rf "$target"
exit 1
else
ln -sf "$name" "$gerrit_exe"
fi
done
if [ ! -e "$gerrit_exe" ]; then
echo "ERROR: No $gerrit_exe file or link present, and unable " 1>&2
echo " to download the latest version." 1>&2
exit 1
fi
# By default, gerrit only accepts https connections, which is a good thing. But
# for testing, it's convenient to enable plain http. Also, turn off all email
# notifications.
mkdir -p "${rundir}/etc"
cat <<EOF > "${rundir}/etc/gerrit.config"
[auth]
type = DEVELOPMENT_BECOME_ANY_ACCOUNT
gitBasicAuth = true
[gerrit]
canonicalWebUrl = http://$(hostname):${http_port}/
[httpd]
listenUrl = http://*:${http_port}/
[sshd]
listenAddress = *:${ssh_port}
[sendemail]
enable = false
[container]
javaOptions = -Duser.home=${rundir}/tmp
EOF
# Initialize the gerrit instance.
java -jar "$gerrit_exe" init --no-auto-start --batch --install-plugin=download-commands -d "${rundir}"
# Create SSH key pair for the first user.
mkdir -p "${rundir}/tmp"
ssh-keygen -t rsa -q -f "${rundir}/tmp/id_rsa" -N ""
ssh_public_key="$(cat ${rundir}/tmp/id_rsa.pub)"
# Set up the first user, with admin priveleges.
cat <<EOF | java -jar "$gerrit_exe" gsql -d "${rundir}" > /dev/null
INSERT INTO ACCOUNTS (FULL_NAME, MAXIMUM_PAGE_SIZE, PREFERRED_EMAIL, REGISTERED_ON, ACCOUNT_ID) VALUES ('${full_name}', ${maximum_page_size}, '${preferred_email}', '${registered_on}', ${account_id});
INSERT INTO ACCOUNT_EXTERNAL_IDS (ACCOUNT_ID, EXTERNAL_ID) VALUES (${account_id}, 'gerrit:${username}');
INSERT INTO ACCOUNT_EXTERNAL_IDS (ACCOUNT_ID, EXTERNAL_ID) VALUES (${account_id}, 'username:${username}');
INSERT INTO ACCOUNT_EXTERNAL_IDS (ACCOUNT_ID, EMAIL_ADDRESS, PASSWORD) VALUES (${account_id}, '${preferred_email}', '${password}');
INSERT INTO ACCOUNT_GROUP_MEMBERS (ACCOUNT_ID, GROUP_ID) VALUES (${account_id}, 1);
INSERT INTO ACCOUNT_SSH_KEYS (ACCOUNT_ID, SSH_PUBLIC_KEY, VALID, SEQ) VALUES (${account_id}, '${ssh_public_key}', 'Y', 0);
EOF
# Create a netrc file to authenticate as the first user.
cat <<EOF > "${rundir}/tmp/.netrc"
machine localhost login ${username} password ${password}
EOF
# Create a .git-credentials file, to enable password-less push.
cat <<EOF > "${rundir}/tmp/.git-credentials"
http://${username}:${password}@localhost:${http_port}
EOF
cat <<EOF
To start gerrit server:
${rundir}/bin/gerrit.sh start
To use the REST API:
curl --netrc-file ${rundir}/tmp/.netrc http://localhost:${http_port}/<endpoint>
To use SSH API:
ssh ${username}@localhost -p ${ssh_port} -i ${rundir}/tmp/id_rsa gerrit
To enable 'git push' without a password prompt:
git config credential.helper 'store --file=${rundir}/tmp/.git-credentials'
To stop the server:
${rundir}/bin/gerrit.sh stop
EOF
|
#!/bin/bash
echo_blue() {
BLUE='\033[0;34m'
NC='\033[0m' # No Color
echo -e "${BLUE}$1${NC}"
}
SOLR_BENCH_VERSION="0.0.1-SNAPSHOT"
download() {
file=$1
if [ -f "$(basename "$file")" ]; then
return
fi
if [[ $file == "https://"* ]] || [[ $file == "http://"* ]]
then
echo_blue "Downloading $file"
curl -O $file
elif [[ $file == "gs://"* ]]
then
echo_blue "Downloading $file"
gsutil cp $file .
fi
# else, don't do anything
}
ORIG_WORKING_DIR=`pwd`
CONFIGFILE=$1
download $CONFIGFILE # download this file from GCS/HTTP, if necessary
CONFIGFILE="${CONFIGFILE##*/}"
mkdir -p SolrNightlyBenchmarksWorkDirectory/Download
COMMIT=`jq -r '."repository"."commit-id"' $CONFIGFILE`
REPOSRC=`jq -r '."repository"."url"' $CONFIGFILE`
LOCALREPO=`pwd`/SolrNightlyBenchmarksWorkDirectory/Download/`jq -r '."repository"."name"' $CONFIGFILE`
BUILDCOMMAND=`jq -r '."repository"."build-command"' $CONFIGFILE`
PACKAGE_DIR=`jq -r '."repository"."package-subdir"' $CONFIGFILE`
LOCALREPO_VC_DIR=$LOCALREPO/.git
GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
export SOLR_TARBALL_NAME="solr-$COMMIT.tgz"
export SOLR_TARBALL_PATH="SolrNightlyBenchmarksWorkDirectory/Download/$SOLR_TARBALL_NAME"
if [[ "null" != `jq -r '.["solr-package"]' $CONFIGFILE` ]]
then
solrpackageurl=`jq -r '.["solr-package"]' $CONFIGFILE`
download $solrpackageurl
export SOLR_TARBALL_NAME="${solrpackageurl##*/}"
export SOLR_TARBALL_PATH=$SOLR_TARBALL_NAME
fi
terraform-gcp-provisioner() {
echo_blue "Using Terraform provisioner"
chmod +x start*sh
# Generate the Terraform JSON file
jq '.["cluster"]["terraform-gcp-config"]' $CONFIGFILE > terraform/terraform.tfvars.json
# Generate temporary ssh keys
rm terraform/id_rsa*
ssh-keygen -f terraform/id_rsa -N ""
# Provision instances using Terraform
cd $ORIG_WORKING_DIR/terraform
terraform init
terraform apply --auto-approve
# Start Solr on provisioned instances
cd $ORIG_WORKING_DIR
export SOLR_STARTUP_PARAMS=`jq -r '."cluster"."startup-params"' $CONFIGFILE`
export ZK_NODE=`terraform output -state=terraform/terraform.tfstate -json zookeeper_details|jq '.[] | .name'`
export ZK_NODE=${ZK_NODE//\"/}
export ZK_TARBALL_NAME="apache-zookeeper-3.6.2-bin.tar.gz"
export ZK_TARBALL_PATH="$ORIG_WORKING_DIR/apache-zookeeper-3.6.2-bin.tar.gz"
export JDK_TARBALL=`jq -r '."cluster"."jdk-tarball"' $CONFIGFILE`
./startzk.sh
for line in `terraform output -state=terraform/terraform.tfstate -json solr_node_details|jq '.[] | .name'`
do
SOLR_NODE=${line//\"/}
echo_blue "Starting Solr on $SOLR_NODE"
./startsolr.sh $SOLR_NODE
done
}
# Download the pre-requisites
wget -c `jq -r '."cluster"."jdk-url"' $CONFIGFILE`
wget -c https://archive.apache.org/dist/zookeeper/zookeeper-3.6.2/apache-zookeeper-3.6.2-bin.tar.gz
for i in `jq -r '."pre-download" | .[]' $CONFIGFILE`; do download $i; done
# Clone/checkout the git repository and build Solr
if [[ "null" == `jq -r '.["solr-package"]' $CONFIGFILE` ]] && [ ! -f $ORIG_WORKING_DIR/SolrNightlyBenchmarksWorkDirectory/Download/solr-$COMMIT.tgz ]
then
echo_blue "Building Solr package for $COMMIT"
if [ ! -d $LOCALREPO_VC_DIR ]
then
GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" git clone --config credential.helper=cache $REPOSRC $LOCALREPO
cd $LOCALREPO
else
cd $LOCALREPO
GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" git fetch
fi
GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" git checkout $COMMIT
if [[ "$REPOSRC" == http* ]]
then
GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" git -c 'url.https://github.com/.insteadof=git@github.com:' submodule update --init --recursive
else
GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" git submodule update --init --recursive
fi
# Build Solr package
bash -c "$BUILDCOMMAND"
cd $LOCALREPO
PACKAGE_PATH=`find . -name "solr*tgz" | grep -v src`
echo_blue "Package found here: $PACKAGE_PATH"
cp $PACKAGE_PATH $ORIG_WORKING_DIR/SolrNightlyBenchmarksWorkDirectory/Download/solr-$COMMIT.tgz
fi
cd $ORIG_WORKING_DIR
if [ "terraform-gcp" == `jq -r '.["cluster"]["provisioning-method"]' $CONFIGFILE` ];
then
terraform-gcp-provisioner
fi
# Run the benchmarking suite
cd $ORIG_WORKING_DIR
echo_blue "Running suite from working directory: $ORIG_WORKING_DIR"
java -cp org.apache.solr.benchmarks-${SOLR_BENCH_VERSION}-jar-with-dependencies.jar:target/org.apache.solr.benchmarks-${SOLR_BENCH_VERSION}-jar-with-dependencies.jar:. \
org.apache.solr.benchmarks.BenchmarksMain $CONFIGFILE
# Grab GC logs
NOW=`date +"%Y-%d-%m_%H.%M.%S"`
if [ "terraform-gcp" == `jq -r '.["cluster"]["provisioning-method"]' $CONFIGFILE` ];
then
echo_blue "Pulling logs"
for line in `terraform output -state=terraform/terraform.tfstate -json solr_node_details|jq '.[] | .name'`
do
SOLR_NODE=${line//\"/}
SOLR_DIR=`tar --exclude='*/*/*' -tf ${SOLR_TARBALL_NAME} | head -1| cut -d '/' -f 1`
cp -r "$SOLR_DIR/server/logs" /tmp/${SOLR_NODE}-logs
ssh -i terraform/id_rsa -oStrictHostKeyChecking=no solruser@$SOLR_NODE "tar -cf solrlogs-${SOLR_NODE}.tar $SOLR_DIR/server/logs"
scp -i terraform/id_rsa -oStrictHostKeyChecking=no solruser@$SOLR_NODE:solrlogs-${SOLR_NODE}.tar .
zip logs-${NOW}.zip solrlogs*tar
done
echo_blue "Removing the hostname entry from ~/.ssh/known_hosts, so that another run can be possible afterwards"
cd $ORIG_WORKING_DIR
for line in `terraform output -state=terraform/terraform.tfstate -json solr_node_details|jq '.[] | .name'`
do
SOLR_NODE=${line//\"/}
ssh-keygen -R "$SOLR_NODE"
done
ZK_NODE=`terraform output -state=terraform/terraform.tfstate -json zookeeper_details|jq '.[] | .name'`
ssh-keygen -R "$ZK_NODE"
fi
# Results upload (results.json), if needed
cd $ORIG_WORKING_DIR
if [[ "null" != `jq -r '.["results-upload-location"]' $CONFIGFILE` ]]
then
# Results uploading only supported for GCS buckets for now
mv results.json results-${NOW}.json
gsutil cp results-${NOW}.json `jq -r '.["results-upload-location"]' $CONFIGFILE`
gsutil cp logs-${NOW}.zip `jq -r '.["results-upload-location"]' $CONFIGFILE`
fi
# Cleanup
if [ "terraform-gcp" == `jq -r '.["cluster"]["provisioning-method"]' $CONFIGFILE` ];
then
cd $ORIG_WORKING_DIR/terraform
terraform destroy --auto-approve
rm id_rsa*
fi
|
<reponame>riddopic/config
########################################################################
#
# Copyright (c) 2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
########################################################################
from sysinv.db import api as db_api
from sysinv.objects import base
class PtpInterfaceMap(base.SysinvObject):
dbapi = db_api.get_instance()
fields = {
'interface_id': int,
'ptp_interface_id': int,
}
_foreign_fields = {
'interface_id': 'interface:id',
'ptp_interface_id': 'ptp_interface:id',
}
@base.remotable_classmethod
def get_by_uuid(cls, context, uuid):
return cls.dbapi.ptp_interface_map_get(uuid)
|
package com.serloman.imagedowloaderapptest;
import android.support.v7.app.ActionBarActivity;
import android.support.v4.app.Fragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.Toast;
import com.serloman.imagecachedownloader.downloader.LRUImageDownloader;
import com.serloman.imagecachedownloader.downloader.ImageDownloader;
import com.serloman.imagecachedownloader.listener.ImageViewListener;
public class MainActivity extends ActionBarActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (savedInstanceState == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.container, new PlaceholderFragment())
.commit();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* A placeholder fragment containing a simple view.
*/
public static class PlaceholderFragment extends Fragment {
public PlaceholderFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_main, container, false);
ImageView imageView = (ImageView)rootView.findViewById(R.id.imageViewTest);
ImageDownloader downloader;
// downloader = DiskImageDownloader.getInstance(getActivity());
downloader = LRUImageDownloader.getInstance();
downloader.downloadImage("http://fc00.deviantart.net/fs71/f/2015/013/3/c/3c026edbe356b22c802e7be0db6fbd0b-d8dt0go.jpg", new ImageViewListener(imageView){
@Override
public void imageError() {
super.imageError();
Toast.makeText(getActivity(),"Error downloading image", Toast.LENGTH_SHORT).show();
}
});
return rootView;
}
}
}
|
<filename>src/main/java-gen/io/dronefleet/mavlink/ardupilotmega/GoproProtuneExposure.java
package io.dronefleet.mavlink.ardupilotmega;
import io.dronefleet.mavlink.annotations.MavlinkEntryInfo;
import io.dronefleet.mavlink.annotations.MavlinkEnum;
/**
*
*/
@MavlinkEnum
public enum GoproProtuneExposure {
/**
* -5.0 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(0)
GOPRO_PROTUNE_EXPOSURE_NEG_5_0,
/**
* -4.5 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(1)
GOPRO_PROTUNE_EXPOSURE_NEG_4_5,
/**
* -4.0 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(2)
GOPRO_PROTUNE_EXPOSURE_NEG_4_0,
/**
* -3.5 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(3)
GOPRO_PROTUNE_EXPOSURE_NEG_3_5,
/**
* -3.0 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(4)
GOPRO_PROTUNE_EXPOSURE_NEG_3_0,
/**
* -2.5 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(5)
GOPRO_PROTUNE_EXPOSURE_NEG_2_5,
/**
* -2.0 EV.
*/
@MavlinkEntryInfo(6)
GOPRO_PROTUNE_EXPOSURE_NEG_2_0,
/**
* -1.5 EV.
*/
@MavlinkEntryInfo(7)
GOPRO_PROTUNE_EXPOSURE_NEG_1_5,
/**
* -1.0 EV.
*/
@MavlinkEntryInfo(8)
GOPRO_PROTUNE_EXPOSURE_NEG_1_0,
/**
* -0.5 EV.
*/
@MavlinkEntryInfo(9)
GOPRO_PROTUNE_EXPOSURE_NEG_0_5,
/**
* 0.0 EV.
*/
@MavlinkEntryInfo(10)
GOPRO_PROTUNE_EXPOSURE_ZERO,
/**
* +0.5 EV.
*/
@MavlinkEntryInfo(11)
GOPRO_PROTUNE_EXPOSURE_POS_0_5,
/**
* +1.0 EV.
*/
@MavlinkEntryInfo(12)
GOPRO_PROTUNE_EXPOSURE_POS_1_0,
/**
* +1.5 EV.
*/
@MavlinkEntryInfo(13)
GOPRO_PROTUNE_EXPOSURE_POS_1_5,
/**
* +2.0 EV.
*/
@MavlinkEntryInfo(14)
GOPRO_PROTUNE_EXPOSURE_POS_2_0,
/**
* +2.5 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(15)
GOPRO_PROTUNE_EXPOSURE_POS_2_5,
/**
* +3.0 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(16)
GOPRO_PROTUNE_EXPOSURE_POS_3_0,
/**
* +3.5 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(17)
GOPRO_PROTUNE_EXPOSURE_POS_3_5,
/**
* +4.0 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(18)
GOPRO_PROTUNE_EXPOSURE_POS_4_0,
/**
* +4.5 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(19)
GOPRO_PROTUNE_EXPOSURE_POS_4_5,
/**
* +5.0 EV (Hero 3+ Only).
*/
@MavlinkEntryInfo(20)
GOPRO_PROTUNE_EXPOSURE_POS_5_0
}
|
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.s2dao.metadata.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.name.ColumnSqlName;
import org.dbflute.helper.StringKeyMap;
import org.dbflute.helper.beans.DfBeanDesc;
import org.dbflute.helper.beans.DfPropertyDesc;
import org.dbflute.helper.beans.exception.DfBeanPropertyNotFoundException;
import org.dbflute.helper.beans.factory.DfBeanDescFactory;
import org.dbflute.helper.message.ExceptionMessageBuilder;
import org.dbflute.s2dao.extension.TnBeanMetaDataFactoryExtension;
import org.dbflute.s2dao.identity.TnIdentifierGenerator;
import org.dbflute.s2dao.identity.TnIdentifierGeneratorFactory;
import org.dbflute.s2dao.metadata.TnBeanAnnotationReader;
import org.dbflute.s2dao.metadata.TnBeanMetaData;
import org.dbflute.s2dao.metadata.TnModifiedPropertySupport;
import org.dbflute.s2dao.metadata.TnPropertyType;
import org.dbflute.s2dao.metadata.TnPropertyTypeFactory;
import org.dbflute.s2dao.metadata.TnRelationPropertyType;
import org.dbflute.s2dao.metadata.TnRelationPropertyTypeFactory;
/**
* The implementation as S2Dao of bean meta data. <br>
* This class has sub-class extended by DBFlute.
* <pre>
* {@link TnBeanMetaDataImpl} is close to S2Dao logic
* The extension in {@link TnBeanMetaDataFactoryExtension} has DBFlute logic
* </pre>
* DBFlute depended on S2Dao before 0.9.0. <br>
* It saves these structure to be easy to know what DBFlute extends it.
* @author modified by jflute (originated in S2Dao)
*/
public class TnBeanMetaDataImpl implements TnBeanMetaData {
// ===================================================================================
// Attribute
// =========
/** The type of bean. (NotNull) */
protected final Class<?> _beanClass;
/** The DB meta of the bean. (NotNull: if DBFlute entity) */
protected final DBMeta _dbmeta;
/** The name of table. (NotNull: after initialized, if it's not entity, this value is 'df:Unknown') */
protected String _tableName;
protected final StringKeyMap<TnPropertyType> _propertyTypeMap = StringKeyMap.createAsCaseInsensitive();
protected final List<TnPropertyType> _propertyTypeList = new ArrayList<TnPropertyType>();
protected TnBeanAnnotationReader _beanAnnotationReader;
protected TnPropertyTypeFactory _propertyTypeFactory;
/** The array of property type for primary key. */
protected TnPropertyType[] _primaryKeys;
// should be initialized in a process synchronized
protected final Map<String, TnPropertyType> _columnPropertyTypeMap = StringKeyMap.createAsCaseInsensitive();
protected final List<TnRelationPropertyType> _relationPropertyTypes = new ArrayList<TnRelationPropertyType>();
protected final List<TnIdentifierGenerator> _identifierGeneratorList = new ArrayList<TnIdentifierGenerator>();
protected final Map<String, TnIdentifierGenerator> _identifierGeneratorsByPropertyName = StringKeyMap.createAsCaseInsensitive();
protected String _versionNoPropertyName;
protected String _timestampPropertyName;
protected TnModifiedPropertySupport _modifiedPropertySupport;
protected TnRelationPropertyTypeFactory _relationPropertyTypeFactory;
// ===================================================================================
// Constructor
// ===========
public TnBeanMetaDataImpl(Class<?> beanClass, DBMeta dbmeta) {
_beanClass = beanClass;
_dbmeta = dbmeta;
}
// ===================================================================================
// Initialize
// ==========
public void initialize() { // non thread safe so this is called immediately after creation
final DfBeanDesc beanDesc = DfBeanDescFactory.getBeanDesc(getBeanClass());
setupTableName(beanDesc);
setupProperty();
setupPrimaryKey();
}
protected void setupTableName(DfBeanDesc beanDesc) { // only called in the initialize() process
final String ta = _beanAnnotationReader.getTableAnnotation();
if (ta != null) {
_tableName = ta;
} else {
_tableName = "df:Unknown";
}
}
protected void setupProperty() { // only called in the initialize() process
final TnPropertyType[] propertyTypes = _propertyTypeFactory.createBeanPropertyTypes();
for (int i = 0; i < propertyTypes.length; i++) {
TnPropertyType pt = propertyTypes[i];
addPropertyType(pt);
_columnPropertyTypeMap.put(pt.getColumnDbName(), pt);
}
final TnRelationPropertyType[] rptTypes = _relationPropertyTypeFactory.createRelationPropertyTypes();
for (int i = 0; i < rptTypes.length; i++) {
TnRelationPropertyType rpt = rptTypes[i];
addRelationPropertyType(rpt);
}
}
protected void addPropertyType(TnPropertyType propertyType) { // only called in the initialize() process
_propertyTypeMap.put(propertyType.getPropertyName(), propertyType);
_propertyTypeList.add(propertyType);
}
protected void setupPrimaryKey() { // only called in the initialize() process
final List<TnPropertyType> keys = new ArrayList<TnPropertyType>();
for (TnPropertyType pt : _propertyTypeList) {
if (pt.isPrimaryKey()) {
keys.add(pt);
setupIdentifierGenerator(pt);
}
}
_primaryKeys = (TnPropertyType[]) keys.toArray(new TnPropertyType[keys.size()]);
}
protected void setupIdentifierGenerator(TnPropertyType pt) { // only called in the initialize() process
final DfPropertyDesc pd = pt.getPropertyDesc();
final String propertyName = pt.getPropertyName();
final String idType = _beanAnnotationReader.getId(pd);
final TnIdentifierGenerator generator = TnIdentifierGeneratorFactory.createIdentifierGenerator(pt, idType);
_identifierGeneratorList.add(generator);
_identifierGeneratorsByPropertyName.put(propertyName, generator);
}
protected void addRelationPropertyType(TnRelationPropertyType rpt) { // only called in the initialize() process
for (int i = _relationPropertyTypes.size(); i <= rpt.getRelationNo(); ++i) {
_relationPropertyTypes.add(null);
}
_relationPropertyTypes.set(rpt.getRelationNo(), rpt);
}
// ===================================================================================
// Basic Info
// ==========
public Class<?> getBeanClass() {
return _beanClass;
}
public DBMeta getDBMeta() {
return _dbmeta;
}
public String getTableName() {
return _tableName;
}
// ===================================================================================
// Property Type
// =============
public List<TnPropertyType> getPropertyTypeList() {
return _propertyTypeList;
}
public TnPropertyType getPropertyType(String propertyName) {
final TnPropertyType propertyType = (TnPropertyType) _propertyTypeMap.get(propertyName);
if (propertyType == null) {
String msg = "The propertyName was not found in the map:";
msg = msg + " propertyName=" + propertyName + " propertyTypeMap=" + _propertyTypeMap;
throw new IllegalStateException(msg);
}
return propertyType;
}
public boolean hasPropertyType(String propertyName) {
return _propertyTypeMap.get(propertyName) != null;
}
public TnPropertyType getPropertyTypeByColumnName(String columnName) {
final TnPropertyType propertyType = _columnPropertyTypeMap.get(columnName);
if (propertyType == null) {
throwBeanMetaPropertyTypeByColumnNameNotFoundException(columnName);
}
return propertyType;
}
protected void throwBeanMetaPropertyTypeByColumnNameNotFoundException(String columnName) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("The column was not found in the table.");
br.addItem("Bean Class");
br.addElement(_beanClass);
br.addItem("Column");
br.addElement(_tableName + "." + columnName);
br.addItem("DBMeta");
br.addElement(_dbmeta);
br.addItem("Mapping");
final Set<Entry<String, TnPropertyType>> entrySet = _columnPropertyTypeMap.entrySet();
for (Entry<String, TnPropertyType> entry : entrySet) {
br.addElement(entry.getKey() + ": " + entry.getValue());
}
final String msg = br.buildExceptionMessage();
throw new IllegalStateException(msg);
}
public TnPropertyType getPropertyTypeByAliasName(String alias) {
if (hasPropertyTypeByColumnName(alias)) {
return getPropertyTypeByColumnName(alias);
}
final int index = alias.lastIndexOf('_');
if (index < 0) {
String msg = "The alias was not found in the table: table=" + _tableName + " alias=" + alias;
throw new IllegalStateException(msg);
}
final String columnName = alias.substring(0, index);
final String relnoStr = alias.substring(index + 1);
int relno = -1;
try {
relno = Integer.parseInt(relnoStr);
} catch (Throwable t) {
String msg = "The alias was not found in the table: table=" + _tableName + " alias=" + alias;
throw new IllegalStateException(msg, t);
}
final TnRelationPropertyType rpt = getRelationPropertyType(relno);
if (!rpt.getYourBeanMetaData().hasPropertyTypeByColumnName(columnName)) {
String msg = "The alias was not found in the table: table=" + _tableName + " alias=" + alias;
throw new IllegalStateException(msg);
}
return rpt.getYourBeanMetaData().getPropertyTypeByColumnName(columnName);
}
public boolean hasPropertyTypeByColumnName(String columnName) {
return _columnPropertyTypeMap.get(columnName) != null;
}
public boolean hasPropertyTypeByAliasName(String alias) {
if (hasPropertyTypeByColumnName(alias)) {
return true;
}
final int index = alias.lastIndexOf('_');
if (index < 0) {
return false;
}
final String columnName = alias.substring(0, index);
final String relnoStr = alias.substring(index + 1);
int relno = -1;
try {
relno = Integer.parseInt(relnoStr);
} catch (Throwable t) {
return false;
}
if (relno >= getRelationPropertyTypeSize()) {
return false;
}
final TnRelationPropertyType rpt = getRelationPropertyType(relno);
return rpt.getYourBeanMetaData().hasPropertyTypeByColumnName(columnName);
}
public String convertFullColumnName(String alias) {
if (hasPropertyTypeByColumnName(alias)) {
return _tableName + "." + alias;
}
final int index = alias.lastIndexOf('_');
if (index < 0) {
String msg = "The alias was not found in the table: table=" + _tableName + " alias=" + alias;
throw new IllegalStateException(msg);
}
final String columnName = alias.substring(0, index);
final String relnoStr = alias.substring(index + 1);
int relno = -1;
try {
relno = Integer.parseInt(relnoStr);
} catch (Throwable t) {
String msg = "The alias was not found in the table: table=" + _tableName + " alias=" + alias;
throw new IllegalStateException(msg, t);
}
final TnRelationPropertyType rpt = getRelationPropertyType(relno);
if (!rpt.getYourBeanMetaData().hasPropertyTypeByColumnName(columnName)) {
String msg = "The alias was not found in the table: table=" + _tableName + " alias=" + alias;
throw new IllegalStateException(msg);
}
return rpt.getPropertyName() + "." + columnName;
}
// ===================================================================================
// Optimistic Lock
// ===============
public TnPropertyType getVersionNoPropertyType() throws DfBeanPropertyNotFoundException {
return getPropertyType(getVersionNoPropertyName());
}
public TnPropertyType getTimestampPropertyType() throws DfBeanPropertyNotFoundException {
return getPropertyType(getTimestampPropertyName());
}
public String getVersionNoPropertyName() {
return _versionNoPropertyName;
}
public String getTimestampPropertyName() {
return _timestampPropertyName;
}
public boolean hasVersionNoPropertyType() {
return hasPropertyType(getVersionNoPropertyName());
}
public boolean hasTimestampPropertyType() {
return hasPropertyType(getTimestampPropertyName());
}
// ===================================================================================
// Relation Property Type
// ======================
public List<TnRelationPropertyType> getRelationPropertyTypeList() {
return _relationPropertyTypes;
}
public int getRelationPropertyTypeSize() {
return _relationPropertyTypes.size();
}
public TnRelationPropertyType getRelationPropertyType(int index) {
return _relationPropertyTypes.get(index);
}
public TnRelationPropertyType getRelationPropertyType(String propertyName) throws DfBeanPropertyNotFoundException {
for (int i = 0; i < getRelationPropertyTypeSize(); i++) {
final TnRelationPropertyType rpt = (TnRelationPropertyType) _relationPropertyTypes.get(i);
if (rpt != null && rpt.getPropertyName().equalsIgnoreCase(propertyName)) {
return rpt;
}
}
throw new DfBeanPropertyNotFoundException(getBeanClass(), propertyName);
}
public int getPrimaryKeySize() {
return _primaryKeys.length;
}
public String getPrimaryKeyDbName(int index) {
return _primaryKeys[index].getColumnDbName();
}
public ColumnSqlName getPrimaryKeySqlName(int index) {
return _primaryKeys[index].getColumnSqlName();
}
public int getIdentifierGeneratorSize() {
return _identifierGeneratorList.size();
}
public TnIdentifierGenerator getIdentifierGenerator(int index) {
return _identifierGeneratorList.get(index);
}
public TnIdentifierGenerator getIdentifierGenerator(String propertyName) {
return _identifierGeneratorsByPropertyName.get(propertyName);
}
public Set<String> getModifiedPropertyNames(Object bean) {
return getModifiedPropertySupport().getModifiedPropertyNames(bean);
}
// ===================================================================================
// Accessor
// ========
public void setVersionNoPropertyName(String versionNoPropertyName) {
_versionNoPropertyName = versionNoPropertyName;
}
public void setTimestampPropertyName(String timestampPropertyName) {
_timestampPropertyName = timestampPropertyName;
}
public void setBeanAnnotationReader(TnBeanAnnotationReader beanAnnotationReader) {
_beanAnnotationReader = beanAnnotationReader;
}
public void setPropertyTypeFactory(TnPropertyTypeFactory propertyTypeFactory) {
_propertyTypeFactory = propertyTypeFactory;
}
public void setRelationPropertyTypeFactory(TnRelationPropertyTypeFactory relationPropertyTypeFactory) {
_relationPropertyTypeFactory = relationPropertyTypeFactory;
}
public TnModifiedPropertySupport getModifiedPropertySupport() {
return _modifiedPropertySupport;
}
public void setModifiedPropertySupport(TnModifiedPropertySupport propertyModifiedSupport) {
_modifiedPropertySupport = propertyModifiedSupport;
}
}
|
<reponame>seants/integrations-core<filename>varnish/tests/common.py
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
# This is a small extract of metrics from varnish. This is meant to test that
# the check gather metrics. This the check return everything from varnish
# without any selection/rename, their is no point in having a complete list.
COMMON_METRICS = [
"varnish.uptime", # metrics where the "MAIN" prefix was removed
"varnish.sess_conn", # metrics where the "MAIN" prefix was removed
"varnish.sess_drop", # metrics where the "MAIN" prefix was removed
"varnish.sess_fail", # metrics where the "MAIN" prefix was removed
"varnish.client_req_400", # metrics where the "MAIN" prefix was removed
"varnish.client_req_417", # metrics where the "MAIN" prefix was removed
"varnish.client_req", # metrics where the "MAIN" prefix was removed
"varnish.cache_hit", # metrics where the "MAIN" prefix was removed
"varnish.cache_hitpass", # metrics where the "MAIN" prefix was removed
"varnish.cache_miss", # metrics where the "MAIN" prefix was removed
"varnish.backend_conn", # metrics where the "MAIN" prefix was removed
"varnish.backend_unhealthy", # metrics where the "MAIN" prefix was removed
"varnish.backend_busy", # metrics where the "MAIN" prefix was removed
"varnish.fetch_eof", # metrics where the "MAIN" prefix was removed
"varnish.fetch_bad", # metrics where the "MAIN" prefix was removed
"varnish.fetch_none", # metrics where the "MAIN" prefix was removed
"varnish.fetch_1xx", # metrics where the "MAIN" prefix was removed
"varnish.pools", # metrics where the "MAIN" prefix was removed
"varnish.busy_sleep", # metrics where the "MAIN" prefix was removed
"varnish.busy_wakeup", # metrics where the "MAIN" prefix was removed
"varnish.busy_killed", # metrics where the "MAIN" prefix was removed
"varnish.sess_queued", # metrics where the "MAIN" prefix was removed
"varnish.sess_dropped", # metrics where the "MAIN" prefix was removed
"varnish.n_object", # metrics where the "MAIN" prefix was removed
"varnish.n_vampireobject", # metrics where the "MAIN" prefix was removed
"varnish.n_vcl", # metrics where the "MAIN" prefix was removed
"varnish.n_vcl_avail", # metrics where the "MAIN" prefix was removed
"varnish.n_vcl_discard", # metrics where the "MAIN" prefix was removed
"varnish.bans", # metrics where the "MAIN" prefix was removed
"varnish.bans_completed", # metrics where the "MAIN" prefix was removed
"varnish.bans_obj", # metrics where the "MAIN" prefix was removed
"varnish.bans_req", # metrics where the "MAIN" prefix was removed
"varnish.MGT.child_start",
"varnish.MGT.child_exit",
"varnish.MGT.child_stop",
"varnish.MEMPOOL.busyobj.live",
"varnish.MEMPOOL.busyobj.pool",
"varnish.MEMPOOL.busyobj.allocs",
"varnish.MEMPOOL.busyobj.frees",
"varnish.SMA.s0.c_req",
"varnish.SMA.s0.c_fail",
"varnish.SMA.Transient.c_req",
"varnish.SMA.Transient.c_fail",
"varnish.VBE.boot.default.req",
"varnish.LCK.backend.creat",
"varnish.LCK.backend_tcp.creat",
"varnish.LCK.ban.creat",
"varnish.LCK.ban.locks",
"varnish.LCK.busyobj.creat",
"varnish.LCK.mempool.creat",
"varnish.LCK.vbe.creat",
"varnish.LCK.vbe.destroy",
"varnish.LCK.vcl.creat",
"varnish.LCK.vcl.destroy",
"varnish.LCK.vcl.locks",
"varnish.n_purges",
"varnish.n_purgesps",
]
VARNISH_DEFAULT_VERSION = "4.1.7"
VARNISHADM_PATH = "varnishadm"
SECRETFILE_PATH = "secretfile"
DAEMON_ADDRESS = "localhost:6082"
HERE = os.path.join(os.path.dirname(__file__))
FIXTURE_DIR = os.path.join(HERE, "fixtures")
CHECK_NAME = "varnish"
def get_config_by_version(name=None):
config = {
"varnishstat": get_varnish_stat_path(),
"tags": ["cluster:webs"]
}
if name:
config["name"] = name
return config
def get_varnish_stat_path():
varnish_version = os.environ.get("VARNISH_VERSION", VARNISH_DEFAULT_VERSION).split(".")[0]
return "docker exec ci_varnish{} varnishstat".format(varnish_version)
|
REM FILE NAME: chaining.sql
REM LOCATION: Object Management\Tables\Reports
REM FUNCTION: Report on the number of CHAINED rows within a named table
REM TESTED ON: 7.3.3.5, 8.0.4.1, 8.1.5, 8.1.7, 9.0.1
REM PLATFORM: non-specific
REM REQUIRES: v$statname, v$session, dba_tab_columns, sys.col$, sys.obj$, sys.icol$,
REM sys.user$, v$sesstat, dual
REM
REM This is a part of the Knowledge Xpert for Oracle Administration library.
REM Copyright (C) 2001 Quest Software
REM All rights reserved.
REM
REM******************** Knowledge Xpert for Oracle Administration ********************
REM
REM NOTES: Requires DBA priviledges.
REM The target table must have a column that is the leading portion
REM of an index and is defined as not null.
REM Uses the V$SESSTAT table where USERNAME is the current user.
REM A problem if > 1 session active with that USERID.
REM The statistics in V$SESSTAT may change between releases and
REM platforms. Make sure that 'table fetch continued row' is
REM a valid statistic.
REM This routine can be run by AUTO_CHN.sql by remarking the two
REM accepts and un-remarking the two defines.
REM
REM
REM***********************************************************************************
ACCEPT obj_own prompt 'Enter the table owner''s name: '
ACCEPT obj_nam prompt 'Enter the name of the table: '
REM DEFINE obj_own = &1
REM DEFINE obj_nam = &2
SET termout off feedback off verify off echo off heading off embedded on
REM Find out what statistic we want
COLUMN statistic# new_value stat_no
SELECT statistic#
FROM v$statname n
WHERE n.NAME = 'table fetch continued row'
/
REM Find out who we are in terms of sid
COLUMN sid new_value user_sid
SELECT DISTINCT sid
FROM v$session
WHERE audsid = USERENV ('SESSIONID')
/
REM Find the last col of the table and a not null indexed column
COLUMN column_name new_value last_col
COLUMN name new_value indexed_column
COLUMN value new_value before_count
SELECT column_name
FROM dba_tab_columns
WHERE table_name = UPPER ('&&obj_nam') AND owner = UPPER ('&&obj_own')
ORDER BY column_id
/
SELECT c.NAME
FROM sys.col$ c, sys.obj$ idx, sys.obj$ base, sys.icol$ ic
WHERE base.obj# = c.obj#
AND ic.bo# = base.obj#
AND ic.col# = c.col#
AND base.owner# =
(SELECT user#
FROM sys.user$
WHERE NAME = UPPER ('&&obj_own'))
AND ic.obj# = idx.obj#
AND base.NAME = UPPER ('&&obj_nam')
AND ic.pos# = 1
AND c.null$ > 0
/
SELECT VALUE
FROM v$sesstat
WHERE v$sesstat.sid = &user_sid AND v$sesstat.statistic# = &stat_no
/
REM Select every row from the target table
SELECT &last_col xx
FROM &obj_own..&obj_nam
WHERE &indexed_column <=
(SELECT MAX (&indexed_column)
FROM &obj_own..&obj_nam)
/
COLUMN value new_value after_count
SELECT VALUE
FROM v$sesstat
WHERE v$sesstat.sid = &user_sid AND v$sesstat.statistic# = &stat_no
/
SET termout on
SELECT 'Table '
|| UPPER ('&obj_own')
|| '.'
|| UPPER ('&obj_nam')
|| ' contains '
|| ( TO_NUMBER (&after_count)
- TO_NUMBER (&before_count)
)
|| ' chained row'
|| DECODE (
TO_NUMBER (&after_count)
- TO_NUMBER (&before_count),
1, '.',
's.'
)
FROM DUAL
WHERE RTRIM ('&indexed_column') IS NOT NULL
/
REM If we don't have an indexed column this won't work so say so
SELECT 'Table '
|| UPPER ('&obj_own')
|| '.'
|| UPPER ('&obj_nam')
|| ' has no indexed, not null columns.'
FROM DUAL
WHERE RTRIM ('&indexed_column') IS NULL
/
SET termout on feedback 15 verify on pagesize 20 linesize 80 space 1 heading on
UNDEF obj_nam
UNDEF obj_own
UNDEF before_count
UNDEF after_count
UNDEF indexed_column
UNDEF last_col
UNDEF stat_no
UNDEF user_sid
CLEAR columns
CLEAR computes
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.