text stringlengths 1 1.05M |
|---|
const fs = require("fs");
const os = require("os");
// const path = require("path");
const chalk = require("chalk");
const ora = require("ora");
const request = require("request");
// const simpleGit = require("simple-git");
const dns = require("dns");
var CliTable = require("cli-table");
// var CLIEngine = require("eslint").CLIEngine;
// var SourceCode = require("eslint").SourceCode;
// var eslint = require("eslint");
// var { Linter, SourceCode } = require("eslint");
const { execSync } = require("child_process");
const prettier = require("prettier");
const {
createESlintConfig,
runEslint,
parseOutPoutForRuleCheck,
assignESlintRules,
selectFilesForESLint,
checkIfLintStagedConfigExist,
checkIfEslintIsInstalled,
parseOutPoutForRuleCheckAsText,
parseOutPoutForRuleCheckAsTable,
parseEslintResults,
installEslint
} = require("./linters/eslint");
const {
createPrettierConfig,
formatPrettierRules,
askToRunPrettier,
runPrettierOnStagedFiles,
selectFilesForPrettier,
setParser,
checkIfPrettierIsInstalled,
installPrettier
} = require("./linters/prettier");
const {
checkInstalledPackages,
selectFilesForRuboCop,
createRubocopConfig,
enableRule,
runRubocop,
runRubocopJson,
checkIfRubocopIsInstalled,
installRubocop
} = require("./linters/rubocop");
const {
createErbLintConfig,
selectFilesForErbLint,
runErbLint
} = require("./linters/erbLint");
const {
runStyleLint,
selectFilesForStyleLint,
sortstyleLintRules,
createStyleLintConfig
} = require("./linters/stylelint");
const {
selectFilesForPylint,
createPylintConfig,
sortPylintConfig,
runPylintOntStagedFiles
} = require("./linters/pylint");
const { runBrakeman } = require("./linters/brakeman");
const {
getUsernameFromLocalDevice,
getTokenFromLocalDevice
} = require("./user");
const {
getEnclosingGitRepository,
isLintFilePresent,
getDotLintDirectory,
isLocalInstall,
rimraf,
copyFileSync,
copyFolderRecursiveSync,
copyRecursiveSync,
parseLintFile
} = require("./filesHandler");
const ROOT_PATH = os.homedir();
// const localUsernamePath = `/.lint/refs/user`;
// const usernameDir = path.join(ROOT_PATH, localUsernamePath);
const API_BASE_URL = "https://api.lint.dev";
const DEV_API_BASE_URL = "http://localhost:3000";
// var executionStartTime;
var executionStartTime = new Date();
function savePaths(paths) {
const dotLintDirectory = getDotLintDirectory();
if (!fs.existsSync(dotLintDirectory)) {
fs.mkdirSync(dotLintDirectory);
}
if (!fs.existsSync(dotLintDirectory + "/tmp")) {
fs.mkdirSync(dotLintDirectory + "/tmp");
}
fs.writeFileSync(dotLintDirectory + "/tmp/staged", paths);
}
function saveReport(report) {
const dotLintDirectory = getDotLintDirectory();
if (!fs.existsSync(dotLintDirectory)) {
fs.mkdirSync(dotLintDirectory);
}
if (!fs.existsSync(dotLintDirectory + "/tmp")) {
fs.mkdirSync(dotLintDirectory + "/tmp");
}
var stringifiedReport = JSON.stringify(report);
// console.log(stringifiedReport);
fs.writeFileSync(dotLintDirectory + "/tmp/report", stringifiedReport);
}
// Use of eslint with lint-staged//
function lintingPreCommit(desiredFormat, keep, time, truncate) {
// console.log(desiredFormat);
// console.log('startESLintPreCommit');
var eslintRules = {};
var rubocopRules = {
AllCops: {
DisabledByDefault: true
}
};
// repoOwner = "reyemneirda";
// repoName = "todolist";
const repositoryUUID = parseLintFile();
// const repositoryUUID = "reyemneirda/react-tic-tac-toe";
// console.log('repositoryUUID');
// console.log(repositoryUUID);
// var executionStartTime = new Date()
// executionStartTime = new Date();
if (!repositoryUUID) {
console.log(
"Please init repository first by running " +
chalk.green(" init") +
"."
);
process.exit(1);
}
// repoName = "command-line-test";
checkInternet(function(isConnected) {
// Terminate when no internet access
if (!isConnected) {
console.log("Not connected to the Internet.");
process.exit(1);
}
// var stagedFilePaths = getStagedFiles(time);
var stagedFilePaths = getStagedFilesAddedAndModifiedOnly(time);
// console.log(stagedFilePaths);
var removedFiles = getDeletedStagedFiles();
// console.log("stagedFilePaths");
// console.log(stagedFilePaths);
// console.log(stagedFilePaths.length);
//
// console.log("removedFiles");
// console.log(removedFiles);
// console.log(removedFiles.length);
const spinner = ora("Fetching repository policy...");
if (stagedFilePaths.length === 0 && removedFiles.length === 0) {
// spinner.succeed("No staged files.");
console.log("No staged files.");
process.exit(0);
return;
} else {
spinner.succeed(stagedFilePaths.length + " staged files");
spinner.start();
setTimeout(() => {
spinner.color = "yellow";
}, 2000);
}
var jsFiles = selectFilesForESLint(stagedFilePaths);
var rubyFiles = selectFilesForRuboCop(stagedFilePaths);
var prettierFiles = selectFilesForPrettier(stagedFilePaths);
var pythonFiles = selectFilesForPylint(stagedFilePaths);
var erbFiles = selectFilesForErbLint(stagedFilePaths);
var styleLintCompatibleFiles = selectFilesForStyleLint(stagedFilePaths);
// console.log(styleLintCompatibleFiles);
// connected to the internet
spinner.stop()
createCommitAttempt(repositoryUUID)
.then(commitAttempt => {
// savePaths(stagedFilePaths);
// spinner.succeed("Policy fetched: " + chalk.bold.magenta(body.policy.content.name));
if (commitAttempt.policy && commitAttempt.policy.name) {
if (time) {
spinner.succeed(
"Policy fetched in " +
(new Date() - executionStartTime) +
"ms: " +
chalk.bold.magenta(commitAttempt.policy.name)
);
} else {
spinner.succeed(
"Policy fetched: " + chalk.bold.magenta(commitAttempt.policy.name)
);
}
} else {
if (time) {
spinner.succeed(
"No policy - Fetched in " +
(new Date() - executionStartTime) +
"ms."
);
} else {
spinner.succeed("No policy.");
}
}
spinner.start("Writing linter configuration...");
var filterRulesStartTime = new Date();
// sleep(3000)
// spinner.start("Configuration set");
// console.log(fetchSHA());
var prettier_rules = {};
var styleLintRules = {};
var pythonRules = [];
var options = [];
saveCommitAttemptId(commitAttempt.id);
if (
commitAttempt.policy &&
commitAttempt.policy.policy_rules &&
commitAttempt.policy.policy_rules.length > 0
) {
commitAttempt.policy.policy_rules.forEach(function(policy_rule) {
var obj = {
Enabled: enableRule(policy_rule)
};
if (prettierFiles.length > 0) {
prettier_rules = formatPrettierRules(prettier_rules, policy_rule);
}
// console.log("test");
// console.log(policy_rule);
var es_lint_selected_options = {};
var rubocopSelectedOptions = {};
var name = policy_rule.slug;
if (
policy_rule.linter &&
policy_rule.linter.command == "pylint" &&
pythonFiles.length > 0
) {
pythonRules.push({
rule: policy_rule,
options: policy_rule.options
});
}
if (
policy_rule.linter &&
policy_rule.linter.command == "stylelint" &&
styleLintCompatibleFiles.length > 0
) {
styleLintRules = sortstyleLintRules(policy_rule, styleLintRules);
}
if (
policy_rule.linter &&
policy_rule.linter.command == "eslint" &&
jsFiles.length > 0
) {
if (policy_rule.options.length === 0) {
eslintRules[name] = policy_rule.status;
}
policy_rule.options.forEach(function(option) {
var rule_option = option.rule_option;
if (rule_option) {
switch (rule_option.value_type) {
case "integer":
// console.log("integer");
es_lint_selected_options[rule_option.slug] = parseInt(
option.selected.value
);
break;
case "boolean":
// console.log("Boolean");
var isTrueSet = option.selected.value == "true";
es_lint_selected_options[rule_option.slug] = isTrueSet;
break;
case "string":
// console.log("String");
es_lint_selected_options[rule_option.slug] =
option.selected.value;
break;
case "array-single":
eslintRules[name] = [
policy_rule.status,
option.selected.value
];
break;
case "array-multiple":
// console.log("array-multiple");
if (option.rule_option_options.length == 0) {
// console.log("0 choice");
eslintRules[name] = policy_rule.status;
} else if (option.rule_option_options.length == 1) {
// console.log("1 choice");
options.push(option.rule_option_options[0].value);
es_lint_selected_options[rule_option.slug] = options;
} else if (option.rule_option_options.length > 1) {
// console.log("More than 1 choice");
option.rule_option_options.forEach(function(
rule_option_option
) {
options.push(rule_option_option.value);
});
es_lint_selected_options[rule_option.slug] = options;
}
break;
default:
}
// Check if options are selected
if (Object.keys(es_lint_selected_options).length > 0) {
eslintRules[name] = [
policy_rule.status,
es_lint_selected_options
];
}
} else {
eslintRules[name] = policy_rule.status;
// console.log(name);
// console.log(policy_rule.status);
}
});
}
// if (policy_rule.linter &&
// policy_rule.linter.command) {
// console.log(chalk.green(policy_rule.linter.command));
// console.log(name);
//
// }
if (
policy_rule.linter &&
policy_rule.linter.command == "rubocop" &&
(rubyFiles.length > 0 || erbFiles.length > 0)
) {
if (policy_rule.options.length == 0) {
if (policy_rule.status == "warn") {
obj["Severity"] = "warning";
rubocopRules[name] = obj;
} else if (policy_rule.status == "error") {
obj["Severity"] = policy_rule.status;
rubocopRules[name] = obj;
}
}
policy_rule.options.forEach(function(option) {
var rule_option = option.rule_option;
if (rule_option) {
switch (rule_option.value_type) {
case "integer":
rubocopSelectedOptions[rule_option.slug] = parseInt(
option.selected.value
);
break;
case "boolean":
// console.log("Boolean");
var isTrueSet = option.selected.value == "true";
rubocopSelectedOptions[rule_option.slug] = isTrueSet;
break;
case "string":
// console.log("String");
rubocopSelectedOptions[rule_option.slug] =
option.selected.value;
break;
case "array-single":
rubocopSelectedOptions[rule_option.slug] =
option.selected.value;
break;
case "array-multiple":
// console.log("array-multiple");
if (option.rule_option_options.length == 0) {
// console.log("0 choice");
rubocopRules[name] = policy_rule.status;
} else if (option.rule_option_options.length == 1) {
// console.log("1 choice");
options.push(option.rule_option_options[0].value);
rubocopSelectedOptions[rule_option.slug] = options;
} else if (option.rule_option_options.length > 1) {
// console.log("More than 1 choice");
option.rule_option_options.forEach(function(
rule_option_option
) {
options.push(rule_option_option.value);
});
rubocopRules[rule_option.slug] = options;
}
break;
default:
}
if (policy_rule.status == "warn") {
rubocopSelectedOptions["Severity"] = "warning";
} else if (policy_rule.status == "error") {
rubocopSelectedOptions["Severity"] = policy_rule.status;
}
var jsonObj = rubocopSelectedOptions;
if (jsonObj) {
var merge = Object.assign(obj, jsonObj);
} else {
var merge = obj;
}
rubocopRules[name] = merge;
}
});
}
// console.log("Afterloop")
//
// console.log(rubocopRules)
//
});
var autofix = false;
if (commitAttempt.repository && commitAttempt.repository) {
var autofix = commitAttempt.repository.has_autofix;
}
// var eslintCli = new CLIEngine({
// envs: ["browser", "es6", "node"],
// fix: autofix,
// useEslintrc: false,
// rules: eslintRules
// });
} else {
console.log("No policy rules.");
}
// console.log(eslintRules);
// console.log(eslintCli);
// process.exit(1);
// createLintStagedConfig();
// console.log(eslintRules);
// console.log(rubocopRules);
// spinner.succeed("Configuration set.");
// var writeConfigurationFilesSpinner = ora("Writing configuration files...").start();
// var writeConfigurationFilesTime = new Date();
if (pythonFiles.length > 0) {
var testPython = sortPylintConfig(pythonRules);
createPylintConfig(testPython);
// console.log("Config created");
}
if (jsFiles.length > 0) {
createESlintConfig(eslintRules);
}
if (rubyFiles.length > 0 || erbFiles.length > 0) {
createRubocopConfig(rubocopRules);
}
if (erbFiles.length > 0) {
createErbLintConfig();
}
if (styleLintCompatibleFiles.length > 0) {
createStyleLintConfig(styleLintRules);
}
if (prettierFiles.length > 0) {
createPrettierConfig(prettier_rules);
}
if (time) {
spinner.succeed(
"Configuration set in " +
(new Date() - filterRulesStartTime) +
"ms."
);
} else {
spinner.succeed("Configuration set.");
}
// writeConfigurationFilesSpinner.succeed("Configuration written in " + (new Date() - writeConfigurationFilesTime) + "ms.");
// console.log( chalk.grey("Execution time: " + (new Date() - executionStartTime) + "ms.") );
lintStaged(
autofix,
commitAttempt,
desiredFormat,
prettier_rules,
jsFiles,
rubyFiles,
prettierFiles,
stagedFilePaths,
pythonFiles,
erbFiles,
styleLintCompatibleFiles,
truncate
)
.then(report => {
var executionEndTime = new Date() - executionStartTime;
// console.log("report.report");
// console.log(report.report);
if (report.report) {
report.report.lint_execution_time = executionEndTime;
}
saveReport(report);
postReport(report, time)
.then(report => {
// console.log("");
if (time) {
console.log(
chalk.grey(
"Total execution time: " +
(new Date() - executionStartTime) +
"ms."
)
);
}
if (!report.passed) {
spinner.fail("Commit aborted. Please fix your code first.");
// console.log(
// chalk.red("Commit Aborded. Fix your code first.")
// );
}
})
.catch(error => {
console.log(error);
reject();
process.exit(2);
});
})
.catch(function(e) {
console.log(e);
console.log(chalk.red("Commit Aborded. Fix your code first."));
if (!keep) {
rimraf("./.lint/tmp/");
}
process.exit(1);
// Expected output: "Success!"
});
//
//
// // execution time simulated with setTimeout function
// var executionEndTime = new Date() - executionStartTime
// console.info('Execution time: %dms', executionEndTime)
})
.catch(function(e) {
console.log("Error during CommitAttempt creation.");
console.log(e);
process.exit(1);
// expected output: "Success!"
});
});
}
function swapFiles(stagedFilePaths) {
var paths = stagedFilePaths;
var dotLintDirectory = getDotLintDirectory();
var enclosingRepository = getEnclosingGitRepository();
paths.forEach(path => {
if (!fs.existsSync(dotLintDirectory + "/tmp/plain/")) {
fs.mkdirSync(dotLintDirectory + "/tmp/plain/");
}
copyRecursiveSync(path, dotLintDirectory + "/tmp/plain/");
// copyFolderRecursiveSync(path, dotLintDirectory + "/tmp/plain/")
// var pathWithoutLastDirectory = path.substring(0, path.lastIndexOf('/'))
// if (fs.lstatSync(pathWithoutLastDirectory).isDirectory()) {
// copyFolderRecursiveSync(pathWithoutLastDirectory, dotLintDirectory +
// "/tmp/plain/")
// } else {
// copyFileSync(
// path,
// dotLintDirectory +
// "/tmp/plain/"
// );
// }
});
}
// Check if we are connected to internet
function checkInternet(cb) {
dns.lookup("lint.dev", function(err) {
if (err && err.code == "ENOTFOUND") {
cb(false);
} else {
cb(true);
}
});
}
function saveCommitAttemptId(commit_attempt_id) {
const dotLintDirectory = getDotLintDirectory();
if (!fs.existsSync(dotLintDirectory)) {
fs.mkdirSync(dotLintDirectory);
}
if (!fs.existsSync(dotLintDirectory + "/tmp")) {
fs.mkdirSync(dotLintDirectory + "/tmp");
}
fs.writeFileSync(
dotLintDirectory + "/tmp/commit_attempt_id",
commit_attempt_id
);
}
function readCommitAttempId() {
if (fs.existsSync("./.lint/tmp/commit_attempt_id")) {
const commit_attempt_id = fs.readFileSync(
"./.lint/tmp/commit_attempt_id"
);
return commit_attempt_id.toString();
} else {
console.error("No commit attempt found");
}
}
// Fetch commit attempt
function createCommitAttempt(repositoryUUID) {
// console.log("createCommitAttempt");
return new Promise((resolve, reject) => {
const currentUser = getUsernameFromLocalDevice();
const token = getTokenFromLocalDevice();
// const token = "<KEY>";
// console.log("WHY");
if (!repositoryUUID) {
reject(new Error("Unable to get repositoryUUID."));
process.exit(1);
}
if (!token) {
console.log("Please log in first.");
// reject(new Error("Unable to get token."));
process.exit(0);
}
const url = `${API_BASE_URL}/${repositoryUUID}/commit_attempts.json?user_token=${token}`;
// const url = `${DEV_API_BASE_URL}/${repositoryUUID}/commit_attempts.json?user_token=${token}`;
// console.log(url);
request.post(
url,
{
json: {
commit_attempt: {
branch_name: fetchbranch()
}
}
},
function(error, response, commitAttempt) {
// console.log(url);
// console.log(response);
// console.log(body);
if (response) {
if (!error && response.statusCode == 201) {
// console.log('Commit Attempt created.');
// var stringify = JSON.stringify(body);
// console.log(stringify);
resolve(commitAttempt);
} else {
console.log("No request");
// console.log(url);
console.log(response.statusCode);
if (error) {
console.log(error);
}
// console.log(body);
reject(new Error("Unable to post to server."));
process.exit(1);
}
} else {
console.error(new Error("Unable to connect."));
// console.log(error);
reject(error);
}
reject(error);
}
);
});
}
function fetchSHA() {
try {
var git_result = execSync("git rev-parse HEAD");
if (git_result) {
return git_result.toString();
}
} catch (err) {
console.log(err);
}
}
function fetchbranch() {
try {
var git_result = execSync("git rev-parse --abbrev-ref HEAD");
if (git_result) {
return git_result.toString().replace(/\n/g, "");
}
} catch (err) {
console.log(err);
process.exit(1);
}
}
function checkIfAutofixEnabled(body) {
return body.policy.autofix;
}
function getDeletedStagedFiles() {
try {
var git_staged_result = execSync(
// "git diff-index --cached --name-only HEAD"
"git diff --name-only --cached"
);
if (git_staged_result) {
var stagedFilePaths = git_staged_result
.toString()
.replace(/[\r]+/g, "")
.split("\n")
.slice(0, -1);
}
var removedFiles = [];
stagedFilePaths.forEach(file => {
if (!fs.existsSync(file)) {
removedFiles.push(file);
}
});
return removedFiles;
} catch (err) {
console.log("Error getting Staged Files in linter.js");
console.log(err);
process.exit(1);
}
}
function getStagedFiles(time) {
try {
var stagedFilesSpinner = ora("Checking git staged files.").start();
var stagedFilesStartTime = new Date();
// spinner.succeed("Policy fetched in " + (new Date() - executionStartTime) + "ms: " + chalk.bold.magenta(body.policy.content.name));
// var git_staged_result = execSync( "git diff-index --cached --name-only HEAD --diff-filter=ACMRT" );
// if (git_staged_result) {
// var stagedFilePaths = git_staged_result
// .toString()
// .replace(/[\r]+/g, "")
// .split("\n")
// .slice(0, -1);
// }
var git_staged_result = execSync(
// "git status -s"
"git diff --name-only --cached"
);
// console.log('git_staged_result');
// console.log(git_staged_result.toString());
if (git_staged_result) {
var stagedFilePaths = git_staged_result
.toString()
.replace(/[\r]+/g, "")
.split("\n")
.slice(0, -1);
// .substring(3);
// Remove first 3 characters at the begining of each file to get rid of the A, M, D etc.
// stagedFilePaths.forEach((file, index) => {
// // stagedFilePaths[index] = file.substring(3);
// console.log(stagedFilePaths[index]);
// });
}
// console.log('stagedFilePaths');
// console.log(stagedFilePaths);
// stagedFilePaths.forEach(file => {
// console.log(file);
// });
// var displayTime = false;
if (stagedFilePaths.length > 0) {
if (time) {
stagedFilesSpinner.succeed(
stagedFilePaths.length +
" staged files fetched in " +
(new Date() - stagedFilesStartTime) +
"ms."
);
} else {
if (stagedFilePaths.length == 1) {
stagedFilesSpinner.succeed(stagedFilePaths.length + " staged file.");
} else {
stagedFilesSpinner.succeed(stagedFilePaths.length + " staged files.");
}
}
} else {
stagedFilesSpinner.stop();
}
return stagedFilePaths;
} catch (err) {
console.log("Error getting Staged Files in linter.js.");
console.log(err);
process.exit(1);
}
}
function getStagedFilesAddedAndModifiedOnly(time) {
try {
var git_staged_result = execSync("git diff --name-only --cached");
if (git_staged_result) {
var stagedFilePaths = git_staged_result
.toString()
.replace(/[\r]+/g, "")
.split("\n")
.slice(0, -1);
}
var addedAndModifiedFiles = [];
stagedFilePaths.forEach(file => {
if (fs.existsSync(file)) {
addedAndModifiedFiles.push(file);
}
});
return addedAndModifiedFiles;
} catch (err) {
console.log("Error getting Staged Files in linter.js.");
console.log(err);
process.exit(1);
}
}
function arr_diff(a1, a2) {
var a = [],
diff = [];
for (var i = 0; i < a1.length; i++) {
a[a1[i]] = true;
}
for (var i = 0; i < a2.length; i++) {
if (a[a2[i]]) {
delete a[a2[i]];
} else {
a[a2[i]] = true;
}
}
for (var k in a) {
diff.push(k);
}
return diff;
}
//Get informations for report
function getCliVersion() {
try {
var result = execSync("which ")
if (result) {
return execSync("npx -v").toString()
}
} catch (e) {
}
}
function getNodeVersion() {
try {
var result = execSync("which node")
if (result) {
return execSync("node -v").toString()
}
} catch (e) {
}
}
function getNpmVersion() {
try {
var result = execSync("which npm")
if (result) {
return execSync("npm -v").toString()
}
} catch (e) {
}
}
function getRubyVersion() {
try {
var result = execSync("which ruby")
if (result) {
return execSync("ruby -v").toString()
}
} catch (e) {
}
}
function getPythonVersion() {
try {
var result = execSync("which python")
if (result) {
var pythonVersion = execSync("python --version")
return pythonVersion
}
} catch (e) {
console.log('error');
console.log(e);
}
}
function fetchShellCommand(){
var command = ''
process.argv.forEach(function (val, index, array) {
// console.log(index + ': ' + val);
command += val + ' '
});
return command;
}
//End informations for report
function lintStaged(
autofix,
commitAttempt,
desiredFormat,
prettier_rules,
jsFiles,
rubyFiles,
prettierFiles,
stagedFilePaths,
pythonFiles,
erbFiles,
styleLintCompatibleFiles,
truncate
) {
return new Promise((resolve, reject) => {
var report = {};
var cliVersion = getCliVersion()
var nodeVersion = getNodeVersion()
var npmVersion = getNpmVersion()
var rubyVersion = getRubyVersion()
var shellCommand = fetchShellCommand()
// var pythonVersion = getPythonVersion()
// console.log("cliVersion", cliVersion);
// console.log("nodeVersion", nodeVersion);
// console.log("npmVersion", npmVersion);
// console.log("rubyVersion", rubyVersion);
// console.log("pythonVersion", pythonVersion);
// fs.readFileSync( process.env.GIT_PARAMS );
// var stagedFilePaths = getStagedFiles();
// if (stagedFilePaths.length === 0) {
// console.log("No staged files.");
// console.log("");
// process.exit(0);
// return;
// }
if (!commitAttempt.policy) {
report.passed = true;
resolve(report);
return;
}
if (autofix) {
// console.log("");
var autofixEnabled = ora("Autofix enabled.").succeed();
}
var javascriptReports = {
error_count: 0,
warning_count: 0,
rule_checks_attributes: []
};
var rubyReports = {
error_count: 0,
warning_count: 0,
rule_checks_attributes: []
};
var pythonReports = {
error_count: 0,
warning_count: 0,
rule_checks_attributes: []
};
var styleFilesReport = {
error_count: 0,
warning_count: 0,
rule_checks_attributes: []
};
var brakemanReport = {
error_count: 0,
warning_count: 0,
rule_checks_attributes: []
};
var erbReports = {
error_count: 0,
warning_count: 0,
rule_checks_attributes: []
};
var filesMadePrettier = [];
var prettierHasSucceed;
if (prettierFiles.length > 0) {
// console.log("Before prettierFiles");
console.log("");
// console.log(
// "************************************************************************"
// );
// console.log(
// "******************************* Prettier *******************************"
// );
// console.log(
// "************************************************************************"
// );
console.log(chalk.bold.cyan("Running Prettier..."));
// console.log("");
// console.log("About to make " + prettierFiles.length + " file(s) prettier.");
// console.log(prettierFiles);
console.log("");
const dotLintDirectory = getDotLintDirectory();
var configFile = dotLintDirectory + "/tmp/prettierrc";
try {
var prettier_fails = 0;
prettierFiles.forEach(filePath => {
var parser = setParser(filePath);
const text = fs.readFileSync(filePath, "utf8");
prettier.resolveConfig.sync(filePath, {
config: configFile,
// or parser: "php"
parser: parser
});
var formatted = prettier.format(text, {
config: configFile,
// or parser: "php"
parser: parser
});
// console.log(formatted);
// console.log(filePath);
fs.writeFileSync(filePath, formatted, "utf8");
var fileFormatted = prettier.check(formatted, {
config: configFile,
// or parser: "php"
parser: parser
});
if (fileFormatted) {
console.log("- " + chalk.green(filePath) + " is prettier");
// console.log("----------------------------------------------");
filesMadePrettier.push(filePath);
} else {
prettier_fails = prettier_fails + 1;
console.log("Did not made " + filePath + " prettier");
}
});
// console.log("");
if (prettier_fails > 0) {
prettierHasSucceed = false;
} else {
prettierHasSucceed = true
}
} catch (e) {
console.log("Prettier failure:");
if (e.loc && e.loc.start) {
var error_at_line = e.loc.start + "";
console.log(error_at_line);
} else {
console.log(e.toString());
}
prettierHasSucceed = false;
}
// var prettierHasSucceed = runPrettierOnStagedFiles(prettierFiles, body);
}
var brakemanFiles = rubyFiles.concat(erbFiles);
if (brakemanFiles.length > 0) {
console.log("");
console.log(chalk.bold.cyan("Running Brakeman..."));
brakemanReport = runBrakeman(brakemanFiles, truncate);
// console.log(brakemanReport);
} else {
brakemanReport.error_count = 0;
brakemanReport.warning_count = 0;
brakemanReport.fixable_error_count = 0;
brakemanReport.fixable_warning_count = 0;
brakemanReport.rule_checks_attributes = [];
}
if (styleLintCompatibleFiles.length > 0) {
console.log("");
console.log(chalk.bold.cyan("Running Stylelint..."));
styleFilesReport = runStyleLint(
styleLintCompatibleFiles,
autofix,
commitAttempt,
desiredFormat,
truncate
);
} else {
styleFilesReport.error_count = 0;
styleFilesReport.warning_count = 0;
styleFilesReport.fixable_error_count = 0;
styleFilesReport.fixable_warning_count = 0;
styleFilesReport.rule_checks_attributes = [];
}
if (pythonFiles.length > 0) {
console.log("");
console.log(chalk.bold.cyan("Running Pylint..."));
pythonReports = runPylintOntStagedFiles(
pythonFiles,
autofix,
commitAttempt,
desiredFormat,
truncate
);
} else {
pythonReports.error_count = 0;
pythonReports.warning_count = 0;
pythonReports.fixable_error_count = 0;
pythonReports.fixable_warning_count = 0;
pythonReports.rule_checks_attributes = [];
}
if (jsFiles.length > 0) {
console.log("");
// console.log(
// "************************************************************************"
// );
// console.log(
// "******************************** ESLint ********************************"
// );
// console.log(
// "************************************************************************"
// );
console.log(chalk.bold.cyan("Running ESLint..."));
// console.log("");
// console.log("About to lint " + jsFiles.length + " Javascript file(s).");
// console.log(
// "Linter is coming for " + jsFiles.length + " Javascript file(s):"
// );
// console.log(jsFiles);
javascriptReports = runEslint(jsFiles, autofix, commitAttempt, desiredFormat, truncate);
// var linting = eslintCli.executeOnFiles(jsFiles);
//
// const dotLintDirectory = getDotLintDirectory();
// const enclosingGitRepository = getEnclosingGitRepository();
// var configFile = JSON.parse(
// fs.readFileSync(dotLintDirectory + "/tmp/eslintrc")
// );
//
// var linter = new eslint.Linter();
// // var output = [];
// var output = [];
// // var output = linting.results;
// jsFiles.forEach(file => {
// var fileContent = fs.readFileSync(enclosingGitRepository + "/" + file);
// var rulesResultForFile;
// if (autofix) {
// rulesResultForFile = linter.verifyAndFix(
// fileContent.toString(),
// configFile,
// {
// filename: file
// }
// );
// var errorCount = 0;
// var warningCount = 0;
// var fixableErrorCount = 0;
// var fixableWarningCount = 0;
// rulesResultForFile.messages.forEach(result => {
// if (result.severity == 1) {
// warningCount += 1;
// }
// if (result.severity == 2) {
// errorCount += 1;
// }
// if (result.fix) {
// if (result.severity == 1) {
// fixableWarningCount += 1;
// }
// if (result.severity == 2) {
// fixableErrorCount += 1;
// }
// }
// });
// fs.writeFileSync(file, rulesResultForFile.output, "utf8");
// } else {
// rulesResultForFile = linter.verify(
// fileContent.toString(),
// configFile,
// {
// filename: file
// }
// );
// var errorCount = 0;
// var warningCount = 0;
// var fixableErrorCount = 0;
// var fixableWarningCount = 0;
// // console.log(rulesResultForFile);
// rulesResultForFile.forEach(result => {
// if (result.severity == 1) {
// warningCount += 1;
// }
// if (result.severity == 2) {
// errorCount += 1;
// }
// if (result.fix) {
// if (result.severity == 1) {
// fixableWarningCount += 1;
// }
// if (result.severity == 2) {
// fixableErrorCount += 1;
// }
// }
// });
// }
// var messages;
// if (autofix) {
// messages = rulesResultForFile.messages;
// } else {
// messages = rulesResultForFile;
// }
// var resultForFile = {
// filePath: file,
// messages: messages,
// errorCount: errorCount,
// warningCount: warningCount,
// fixableErrorCount: fixableErrorCount,
// fixableWarningCount: fixableWarningCount
// };
// output.push(resultForFile);
// });
// console.log(output);
// var output = linter.verify(code, configFile, { filename: "foo.js" });
// var javascriptReports = parseEslintResults(javascriptReports, body);
// if (desiredFormat == "simple") {
// parseOutPoutForRuleCheckAsText(output);
// } else {
// parseOutPoutForRuleCheckAsTable(output);
// }
// console.log(javascriptReports.results);
// console.log("Linting Done");
} else {
javascriptReports.error_count = 0;
javascriptReports.warning_count = 0;
javascriptReports.fixable_error_count = 0;
javascriptReports.fixable_warning_count = 0;
javascriptReports.rule_checks_attributes = [];
// process.exit(0);
// console.error("No Javascript Files Found");
}
if (rubyFiles.length > 0) {
console.log("");
// console.log(
// "************************************************************************"
// );
// console.log(
// "******************************** Rubocop *******************************"
// );
// console.log(
// "************************************************************************"
// );
// console.log("");
console.log(chalk.bold.cyan("Running Rubocop..."));
// console.log(
// "Linter is coming for " + rubyFiles.length + " Ruby file(s):"
// );
// console.log("About to lint " + rubyFiles.length + " Ruby file(s):");
// console.log(rubyFiles);
rubyReports = runRubocopJson(rubyFiles, autofix, commitAttempt, desiredFormat, truncate);
// runRubocop(rubyFiles, autofix);
// console.log(rubyReports);
// console.log(rubyReports);
// console.log("Linting Done");
} else {
rubyReports.error_count = 0;
rubyReports.warning_count = 0;
rubyReports.fixable_error_count = 0;
rubyReports.fixable_warning_count = 0;
rubyReports.rule_checks_attributes = [];
}
if (erbFiles.length > 0) {
console.log("");
console.log(chalk.bold.cyan("Running ERB Lint..."));
erbReports = runErbLint(erbFiles, commitAttempt, truncate); // console.log(erbReports);
} else {
erbReports.error_count = 0;
erbReports.warning_count = 0;
erbReports.fixable_error_count = 0;
erbReports.fixable_warning_count = 0;
erbReports.rule_checks_attributes = [];
}
// console.log(rubyReports);
report.name = commitAttempt.message;
report.commit_attempt_id = commitAttempt.id;
report.policy_id = commitAttempt.policy.id;
report.repository_id = commitAttempt.repository_id;
report.user_id = commitAttempt.user_id;
report.error_count =
javascriptReports.error_count +
rubyReports.error_count +
styleFilesReport.error_count +
pythonReports.error_count +
brakemanReport.error_count +
erbReports.error_count;
report.warning_count =
javascriptReports.warning_count +
rubyReports.warning_count +
styleFilesReport.warning_count +
pythonReports.warning_count +
brakemanReport.warning_count +
erbReports.warning_count;
report.fixable_error_count =
javascriptReports.fixable_error_count +
rubyReports.fixable_error_count +
styleFilesReport.fixable_error_count +
pythonReports.fixable_error_count +
erbReports.error_count;
report.fixable_warning_count =
javascriptReports.fixable_warning_count +
rubyReports.fixable_warning_count +
styleFilesReport.fixable_warning_count +
pythonReports.fixable_warning_count +
erbReports.error_count;
var ruleChecks = {};
ruleChecks.rule_checks_attributes = javascriptReports.rule_checks_attributes
.concat(rubyReports.rule_checks_attributes)
.concat(pythonReports.rule_checks_attributes)
.concat(erbReports.rule_checks_attributes)
.concat(styleFilesReport.rule_checks_attributes)
.concat(brakemanReport.rule_checks_attributes);
// console.log('@@@ ruleChecks @@@')
// console.log(ruleChecks)
var inspectedFiles = jsFiles
.concat(rubyFiles)
.concat(erbFiles)
.concat(pythonFiles)
.concat(styleLintCompatibleFiles);
var notInspectedFiles = arr_diff(stagedFilePaths, inspectedFiles);
// console.log(notInspectedFiles);
report.report = {
rule_checks_attributes: ruleChecks.rule_checks_attributes,
staged_files: stagedFilePaths,
javascript_files: jsFiles,
ruby_files: rubyFiles,
erb_files: rubyFiles,
brakeman: brakemanFiles,
formatted_files: filesMadePrettier,
inspected_files: inspectedFiles,
not_inspected_files: notInspectedFiles,
_version: cliVersion,
node_version: nodeVersion,
npm_version: npmVersion,
ruby_version: rubyVersion,
// python_version = pythonVersion,
source_shell_command: shellCommand
};
//
// console.log("report.report");
// console.log(report.report);
if (
commitAttempt.policy.prevent_commits_on_errors &&
report.error_count > 0
) {
report.passed = false;
} else if (
prettierHasSucceed === false &&
commitAttempt.policy.prevent_commits_on_errors
) {
report.passed = false;
} else {
report.passed = true;
}
if (report) {
resolve(report);
} else {
reject();
}
});
}
function postReport(report, time) {
// console.log(report);
// console.log("");
const reportSpinner = ora("Creating report...");
reportSpinner.start();
const token = getTokenFromLocalDevice();
// const token = "<KEY>";
var postUrl = `${API_BASE_URL}/policy_checks.json?user_token=${token}`;
// var postUrl = `${DEV_API_BASE_URL}/policy_checks.json?user_token=${token}`;
var reportStartTime = new Date();
return new Promise((resolve, reject) => {
request.post(
postUrl,
{
json: {
policy_check: report
}
},
function(error, response, policy_check) {
if (response) {
if (!error && response.statusCode === 201) {
if (time) {
reportSpinner.succeed(
"Report saved in " + (new Date() - reportStartTime) + "ms."
);
} else {
reportSpinner.succeed("Report saved.");
}
// console.log( chalk.grey("Execution time: " + (new Date() - executionStartTime) + "ms.") );
// console.log("");
// console.log("Policy Check Saved.");
// var policy_check_stringified = JSON.stringify(policy_check);
// console.log(policy_check_stringified);
resolve(policy_check);
} else if (error) {
console.log(error);
reject(new Error("Unable to create Policy Check."));
} else {
if (response.statusCode !== 201) {
console.log(response.statusCode);
}
reject(new Error("Unable to create Policy Check."));
}
} else {
console.log(error);
console.error(new Error("Unable to create Policy Check."));
reject(error);
}
}
);
});
}
//test autofix
// function getExtension(file) {
// var extenstion = file.split(".").pop();
// return extenstion;
// }
// function lint(files, autofix, body) {
// // var cmd = "which eslint";
// // // console.log("=== Lint called ===");
// if (autofix) {
// var cmd = "eslint --color --fix --format json " + files.join(" ");
// // console.log("Prepare to fix");
// } else {
// var cmd = "eslint --color --format json " + files.join(" ");
// }
// try {
// // // console.log("=== Try ===");
// var linter_command = execSync(cmd);
// if (linter_command) {
// // // console.log("linter_command.toString() WORKS");
// // console.error(linter_command.toString() );
// // process.stdout.write(linter_command);
// // console.log(linter_command.stdout);
// // console.log(linter_command);
// var output = JSON.parse(linter_command);
// // console.log(output)
// parseOutPoutForRuleCheck(output);
// prepareRequestAfterLint(true, body, 0, output);
// }
// } catch (err) {
// // // console.log("=== Catch ===");
// // // console.log(err);
//
// if (err.stdout) {
// console.log("=== Catch stdout ===");
// // console.log(err.stdout.toString());
// var output = JSON.parse(err.stdout);
// parseOutPoutForRuleCheck(output);
// prepareRequestAfterLint(false, body, 1, output);
//
// // prepareRequestAfterLint(false, body, 1)
// }
// // prepareRequestAfterLint(passed, body)
// // process.exit(1);
// // // console.log("=== Catch after ===");
// }
// // // console.log("Linting Done");
// }
//
function fetchLinters() {
var linters = [];
if (checkIfEslintIsInstalled()) {
var eslint = {
name: "eslint",
language: "Javascript",
installed: "true",
command: "npm install -g eslint"
};
linters.push(eslint);
} else {
var eslint = {
name: "eslint",
language: "Javascript",
installed: "false",
command: "npm install -g eslint"
};
linters.push(eslint);
}
if (checkIfRubocopIsInstalled()) {
var rubocop = {
name: "rubocop",
language: "Ruby",
installed: "true",
command: "gem install rubocop"
};
linters.push(rubocop);
} else {
var rubocop = {
name: "rubocop",
language: "Ruby",
installed: "false",
command: "gem install rubocop"
};
linters.push(rubocop);
}
if (1 == 0) {
var phplint = {
name: "phplint",
language: "Php",
installed: "true",
command: "gem install rubocop"
};
linters.push(phplint);
} else {
var phplint = {
name: "phplint",
language: "Php",
installed: "false",
command: "npm install -g eslint"
};
linters.push(phplint);
}
printLinters(linters);
}
function printLinters(linters) {
if (linters) {
var table = new CliTable({
head: [
chalk.cyan("Linters (" + linters.length + ")"),
chalk.cyan("Installed"),
chalk.cyan("language"),
chalk.cyan("Command")
],
// colWidths: [30, 45, 12, 12, 12, 16, 16],
colWidths: [35, 15, 19, 30]
});
linters.map(linter => {
// const url = SITE_URL + '/' + repository.uuid;
if (linter.installed == "true") {
var installed = chalk.green(linter.installed);
} else {
var installed = chalk.red(linter.installed);
}
table.push([linter.name, installed, linter.language, linter.command]);
});
console.log(table.toString());
process.exit(0);
} else {
console.log("No Linters.");
process.exit(1);
}
}
function preCommit(keep, time, truncate) {
if (checkIfEslintIsInstalled()) {
// console.log("Eslint is installed.");
} else {
console.log("Eslint is not installed. Installing...");
installEslint();
console.log("Eslint is now installed.");
}
if (checkIfPrettierIsInstalled()) {
// console.log("Prettier is installed.");
} else {
console.log("Prettier is not installed. Installing...");
installPrettier();
console.log("Prettier is now installed.");
}
if (checkIfRubocopIsInstalled()) {
// console.log("Rubocop is installed.");
} else {
console.log("Rubocop is not installed. Installing...");
installRubocop();
console.log("Rubocop is now installed.");
}
lintingPreCommit("simple", keep, time, truncate);
}
function readPaths() {
if (fs.existsSync("./.lint/tmp/staged")) {
const paths = fs
.readFileSync("./.lint/tmp/staged")
.toString()
.split(",");
// console.log(paths);
return paths;
} else {
console.error("No commit attempt found");
}
}
function readReport() {
if (fs.existsSync("./.lint/tmp/report")) {
try {
const report = JSON.parse(fs.readFileSync("./.lint/tmp/report"));
return report;
} catch (e) {
console.error("Can't find report file.");
console.log(e);
}
} else {
console.error("No report found");
}
}
function checkIfPolicyCheckPassed() {
var report = readReport();
if (!report) {
process.exit(0);
}
if (report !== undefined && !report.passed) {
console.log("");
const repositoryUUID = parseLintFile();
if (!repositoryUUID) {
console.log(
"Please init repository first by running " +
chalk.green(" init") +
"."
);
process.exit(1);
}
editCommitAttempt(repositoryUUID).then(body => {
rimraf("./.lint/tmp/");
process.exit(1);
});
}
}
function prepareCommitMsg() {
checkIfPolicyCheckPassed();
}
function postCommit() {
const repositoryUUID = parseLintFile();
if (!repositoryUUID) {
console.log(
"Please init repository first by running " +
chalk.green(" init") +
"."
);
process.exit(1);
}
checkInternet(function(isConnected) {
// Terminate when no internet access
if (!isConnected) {
console.log("Not connected to the Internet.");
process.exit(1);
}
const sha = fetchSHA();
editCommitAttempt(repositoryUUID, sha)
.then(body => {
// console.log("editCommitAttempt Success");
// console.log(body);
rimraf("./.lint/tmp/");
})
.catch(error => {
// console.log("editCommitAttempt Error");
console.log(error);
rimraf("./.lint/tmp/");
process.exit(1);
});
});
}
function editCommitAttempt(repositoryUUID, sha) {
return new Promise((resolve, reject) => {
if (!sha) {
const sha = fetchSHA();
}
// console.log(sha);
const currentUser = getUsernameFromLocalDevice();
const token = getTokenFromLocalDevice();
// const token = "<KEY>";
if (!repositoryUUID) {
reject(new Error("Unable to get repositoryUUID."));
process.exit(1);
}
if (!token) {
console.log("Please log in first.");
// reject(new Error("Unable to get token."));
process.exit(0);
}
if (fs.existsSync(".git/COMMIT_EDITMSG")) {
var commitMessage = fs.readFileSync(".git/COMMIT_EDITMSG", "utf8");
} else {
var commitMessage = "NO COMMIT MESSAGE";
}
//test
const commit_attempt_id = readCommitAttempId();
// console.log(commitMessage);
// console.log(sha);
const url = `${API_BASE_URL}/${repositoryUUID}/commit_attempts/${commit_attempt_id}.json?user_token=${token}`;
// const url = `${DEV_API_BASE_URL}/${repositoryUUID}/commit_attempts/${commit_attempt_id}.json?user_token=${token}`;
// console.log(url);
request.put(
url,
{
json: {
commit_attempt: {
message: commitMessage,
sha: sha
}
}
},
function(error, response, body) {
// console.log(url);
// console.log(response);
// console.log(body);
if (response) {
// console.log(response);
// console.log(response.statusCode);
if (
(!error && response.statusCode == 200) ||
response.statusCode == 204
) {
// var stringify = JSON.stringify(body);
resolve(body);
} else {
// console.log(error);
// console.log(body);
// console.log(response.statusCode);
reject(new Error("Unable to post to server."));
}
} else {
console.error(new Error("Unable to connect."));
reject(error);
}
reject(error);
}
);
});
}
module.exports = {
getStagedFiles,
getDeletedStagedFiles,
lintingPreCommit,
createCommitAttempt,
lintStaged,
preCommit,
postCommit,
prepareCommitMsg,
// installEslint,
// getExtension,
fetchLinters
// createLintStagedConfig
};
|
from typing import List
def findMaxPathSum(triangle: List[List[int]]) -> int:
n = len(triangle)
# Start from the second last row and move upwards
for i in range(n - 2, -1, -1):
for j in range(i + 1):
# For each element, find the maximum sum path from that element to the bottom
triangle[i][j] += max(triangle[i + 1][j], triangle[i + 1][j + 1])
# The top element will have the maximum path sum
return triangle[0][0]
# Test the function with the given example
triangle = [
[20],
[19, 01],
[88, 02, 77],
[99, 65, 04, 28],
[41, 41, 26, 56, 83]
]
print(findMaxPathSum(triangle)) # Output: 321 |
/**
* Copyright (c) 2017-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import React, {useContext} from 'react';
import Link from '@docusaurus/Link';
import DocusaurusContext from '@docusaurus/context';
import styles from './styles.module.css';
function DocsPaginator() {
const context = useContext(DocusaurusContext);
const {docsMetadatas, metadata} = context;
if (!metadata) {
return null;
}
return (
<div className={styles.paginatorContainer}>
<div>
{metadata.previous && docsMetadatas[metadata.previous] && (
<Link
className={styles.paginatorLink}
to={docsMetadatas[metadata.previous].permalink}>
<svg className={styles.arrow} viewBox="0 0 24 24">
<g>
<line x1="19" y1="12" x2="5" y2="12" />
<polyline points="12 19 5 12 12 5" />
</g>
</svg>{' '}
<span className={styles.label}>{metadata.previous_title}</span>
</Link>
)}
</div>
<div className={styles.paginatorRightContainer}>
{metadata.next && docsMetadatas[metadata.next] && (
<Link
className={styles.paginatorLink}
to={docsMetadatas[metadata.next].permalink}>
<span className={styles.label}>{metadata.next_title}</span>{' '}
<svg className={styles.arrow} viewBox="0 0 24 24">
<g>
<line x1="5" y1="12" x2="19" y2="12" />
<polyline points="12 5 19 12 12 19" />
</g>
</svg>
</Link>
)}
</div>
</div>
);
}
export default DocsPaginator;
|
import hashlib
def verify_admin_session(admin_session_token, admin_token_hash, admin_name, stored_admin_token_hash):
try:
# Verify the session token by comparing its hash with the stored hash
hashed_session_token = hashlib.sha256(admin_session_token.encode()).hexdigest()
if hashed_session_token != stored_admin_token_hash:
raise ValueError("Invalid session token")
except ValueError as e:
print(f"Session verification error: {e}")
return None
except Exception as e:
print(f"An unexpected error occurred: {e}")
return None
return admin_name |
/// <reference path="../exceptionless.ts" />
module exceptionless.error {
export class DashboardViewModel extends ViewModelBase {
private _navigationViewModel: NavigationViewModel;
projectId = ko.observable<string>('');
currentTabHash = ko.observable<string>('');
installDate = ko.observable<Date>(DateUtil.minValue.toDate());
occurrence = ko.observable<Date>(DateUtil.minValue.toDate());
previousErrorId = ko.observable<string>('');
nextErrorId = ko.observable<string>('');
previousErrorLink = ko.observable<string>('');
nextErrorLink = ko.observable<string>('');
promotedTabs = ko.observable<string[]>([]);
constructor(elementId: string, navigationElementId: string, projectsElementId: string, dateRangeElementId: string, tabElementId: string, defaultProjectId?: string, autoUpdate?: boolean, data?: JSON) {
super(elementId, null, autoUpdate);
this.currentTabHash(location.hash);
TabUtil.init(tabElementId);
this._navigationViewModel = new NavigationViewModel(navigationElementId, null, defaultProjectId);
App.onPlanChanged.subscribe(() => window.location.reload());
window.addEventListener('hashchange', (hashChangeEvent: any) => {
this.currentTabHash(location.hash);
});
$.each($('[data-bind-template]'), (key, value) => {
var content = $(value).text();
try {
var json = JSON.parse(content);
var template = HandlebarsUtil.getTemplate(value);
$(value).html(template(json));
} catch (ex) {
$(value).text(content);
}
});
this.populateViewModel(data);
this.applyBindings();
App.initZeroClipboard();
}
public populateViewModel(data?: any) {
if (!data)
return;
this.projectId(data.ProjectId);
this.occurrence(DateUtil.parse(data.OccurrenceDate));
this.previousErrorId(data.PreviousErrorId);
this.previousErrorLink(this.previousErrorId() == null || this.previousErrorId().length == 0 ? 'javascript:return false;' : '/error/' + this.previousErrorId());
this.nextErrorId(data.NextErrorId);
this.nextErrorLink(this.nextErrorId() == null || this.nextErrorId().length == 0 ? 'javascript:return false;' : '/error/' + this.nextErrorId());
if (data.ExceptionlessClientInfo)
this.installDate(DateUtil.parse(data.ExceptionlessClientInfo.InstallDate));
if (data.PromotedTabs) {
var tabs: string[] = [];
for (var i = 0; i < data.PromotedTabs.length; i++) {
tabs.push(data.PromotedTabs[i]);
}
this.promotedTabs(tabs);
}
}
public promoteTab(key: string) {
// TODO: We need to add support for array item level patching.
var url = StringUtil.format('/api/v1/project/{id}', { id: this.projectId() });
var data = [key];
ko.utils.arrayForEach(this.promotedTabs(), (item: string) => data.push(item));
this.patch(url, { PromotedTabs: data },
(data) => {
this.promotedTabs(data);
window.location.hash = '#ex-' + key.toLowerCase();
window.location.reload();
},
'An error occurred while promoting this tab.');
}
public demoteTab(key: string) {
// TODO: We need to add support for array item level patching.
var url = StringUtil.format('/api/v1/project/{id}', { id: this.projectId() });
var data = [];
ko.utils.arrayForEach(this.promotedTabs(), (item: string) => {
if (item !== key)
data.push(item)
});
this.patch(url, { PromotedTabs: data },
(data) => {
this.promotedTabs(data);
window.location.hash = '#extended';
window.location.reload();
},
'An error occurred while demoting this tab.');
}
}
} |
require("dotenv").config({
path: `.env.${process.env.NODE_ENV}`,
})
module.exports = {
plugins: [
{
resolve: "gatsby-plugin-google-gtag",
options: {
trackingIds: [process.env.GA_TRACKING_ID],
pluginConfig: {
head: true,
},
},
},
"gatsby-plugin-react-helmet",
{
resolve: "gatsby-source-filesystem",
options: {
name: "markdown",
path: `${__dirname}/src/markdown`,
},
},
{
resolve: "gatsby-plugin-react-svg",
options: {
rule: {
include: `${__dirname}/src/assets/svgs`,
},
},
},
{
resolve: "gatsby-transformer-remark",
options: {
plugins: [
{
resolve: "gatsby-remark-shiki",
options: {
theme: "nord",
},
},
"gatsby-remark-remove-comments",
],
},
},
],
}
|
def get_distance(p1, p2):
x1, y1 = p1
x2, y2 = p2
return ((x2 - x1)**2 + (y2 - y1)**2)**0.5
print(get_distance((1,1), (9,9))) |
<gh_stars>0
#!/usr/bin/env python
"""Setup file for bumbleestatus bar to allow pip install of full package"""
# -*- coding: utf8 - *-
from setuptools import setup
import versioneer
with open('requirements/base.txt') as f:
INSTALL_REQS = [line for line in f.read().split('\n') if line]
# Module packages
def read_module(filename):
"""Read each in a module's requirements and parse it for extras"""
with open('requirements/modules/{}.txt'.format(filename)) as fname:
return [rline for rline in fname.read().split('\n') if rline]
EXTRAS_REQUIREMENTS_MAP = {
"battery-upower": read_module("battery_upower_reqs"),
"cpu": read_module("cpu"),
"cpu2": read_module("cpu2"),
"currency": read_module("currency"),
"docker_ps": read_module("docker_ps"),
"dunst": read_module("dunst"),
"getcrypto": read_module("getcrypto"),
"git": read_module("git"),
"github": read_module("github"),
"hddtemp": read_module("hddtemp"),
"layout-xkb": read_module("layout_xkb"),
"memory": read_module("memory"),
"network_traffic": read_module("network_traffic"),
"nic": read_module("nic"),
"pihole": read_module("pihole"),
"rss": read_module("rss"),
"spaceapi": read_module("spaceapi"),
"spotify": read_module("spotify"),
"stock": read_module("stock"),
"sun": read_module("sun"),
"system": read_module("system"),
"taskwarrior": read_module("taskwarrior"),
"title": read_module("title"),
"traffic": read_module("traffic"),
"weather": read_module("weather"),
"yubikey": read_module("yubikey"),
}
import glob
setup(
install_requires=INSTALL_REQS,
extras_require=EXTRAS_REQUIREMENTS_MAP,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
zip_safe=False,
data_files=[('share/bumblebee-status/themes', glob.glob('themes/*.json')),
('share/bumblebee-status/themes/icons', glob.glob('themes/icons/*.json'))
]
)
|
<filename>7-assets/_SNIPPETS/bryan-guner-gists/_JAVASCRIPT/isDir.js<gh_stars>0
import fs from 'fs';
fs.readdir( './', {
withFileTypes: true
}, ( err, files ) => {
if ( err ) {
console.error( err )
return
}
console.log( 'files: ' )
files.forEach( file => {
// the `isDirectory` method returns true if the entry is a directory
const type = file.isDirectory() ? '📂' : '📄'
console.log( type, file.name )
} )
} )
|
<reponame>oskar-taubert/pydca
from __future__ import absolute_import, division
import unittest
import os
import glob
from pydca.fasta_reader import fasta_reader
from .input_files_path import InputFilesPath
class TestCase(unittest.TestCase):
def setUp(self):
"""
"""
self.__rna_msa_file = InputFilesPath.rna_msa_file
self.__protein_msa_file = InputFilesPath.protein_msa_file
self.__rna = 'rna'
self.__protein = 'protein'
def test_get_alignment_from_fasta_file(self):
rna_seqs = fasta_reader.get_alignment_from_fasta_file(
self.__rna_msa_file,
)
self.assertIsNotNone(rna_seqs)
protein_seqs = fasta_reader.get_alignment_from_fasta_file(
self.__protein_msa_file,
)
self.assertIsNotNone(protein_seqs)
def test_get_alignment_int_form(self):
rna_seqs_int_form = fasta_reader.get_alignment_int_form(
self.__rna_msa_file,
biomolecule = self.__rna,
)
self.assertIsNotNone(rna_seqs_int_form)
protein_seqs_int_form = fasta_reader.get_alignment_int_form(
self.__protein_msa_file,
biomolecule = self.__protein,
)
self.assertIsNotNone(protein_seqs_int_form)
if __name__ == '__main__':
unittest.main()
|
import math
import re
import numpy as np
def prime(n):
if n == 2:
return True
if n % 2 == 0 or n <= 1:
return False
sqrt = int(math.sqrt(n)) + 1
for d in range(3, sqrt, 2):
if n % d == 0:
return False
return True
#print(prime(31))
def sortWords(path):
return sorted(i.lower() for i in re.split('\W+', open(path).read())[:-1])
#print(sortWords('./Latin-Lipsum.txt'))
def dotProduct(matrix, vector):
if len(matrix[0]) != len(vector):
print("Can't be multiplied")
return None
result = []
for row in matrix:
result.append(sum([a * b for a,b in zip(row, vector)]))
return result
#print(dotProduct([[1, 2, 3, 4], [11, 12, 13, 14], [21, 22, 23, 24]], [2, -5, 7, -10]))
########################################
def first():
matrix = np.array([[1, 2, 3, 4], [11, 12, 13, 14], [21, 22, 23, 24]])
print(matrix[0:2, -2:])
print(matrix[-1, -2:])
#first()
def second():
vector1 = np.random.uniform(size=(3,))
vector2 = np.random.uniform(size=(3,))
if np.sum(vector1) > np.sum(vector2):
print("sum(vector1) > sum(vector2)")
else:
print("sum(vector2) > sum(vector1)")
print(np.add(vector1, vector2))
print(np.cross(vector1, vector2))
print(np.dot(vector1, vector2))
print(np.sqrt(vector1))
print(np.sqrt(vector2))
#second()
def third():
matrix = np.random.uniform(size=(5,5))
print(matrix.T)
print(np.linalg.inv(matrix))
print(np.linalg.det(matrix))
vector = np.random.uniform(size=(5,))
print(np.dot(matrix, vector))
third()
|
module.exports = {
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint', 'import', 'react-hooks', 'jest'],
extends: [
'plugin:react/recommended',
'plugin:@typescript-eslint/recommended',
'prettier',
'plugin:import/typescript',
'plugin:jest/recommended',
],
parserOptions: {
ecmaVersion: 2020,
sourceType: 'module',
ecmaFeatures: {
jsx: true,
},
},
rules: {
'no-restricted-imports': ['error', { patterns: ['**/shared/**'] }],
'@typescript-eslint/ban-ts-ignore': 'off',
'@typescript-eslint/ban-types': 'off',
'@typescript-eslint/camelcase': 'off',
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-member-accessibility': 'off',
'@typescript-eslint/no-empty-function': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-non-null-assertion': 'off',
'@typescript-eslint/no-object-literal-type-assertion': 'off',
'@typescript-eslint/no-unused-vars': 'off',
'@typescript-eslint/no-use-before-define': ['error', { functions: false }],
'import/no-duplicates': ['error'],
'import/no-extraneous-dependencies': ['error'],
'import/order': [
'error',
{
'newlines-between': 'always',
},
],
'react/display-name': 'off',
'react/prop-types': 'off',
'react/react-in-jsx-scope': 'off',
'react-hooks/rules-of-hooks': 'error',
'react-hooks/exhaustive-deps': 'error',
},
overrides: [
{
files: ['*.tsx'],
rules: {
'@typescript-eslint/explicit-module-boundary-types': 'off',
},
},
],
settings: {
react: {
version: 'detect',
},
},
reportUnusedDisableDirectives: true,
}
|
<filename>cmd/diplomat/internal/field_searcher.go
package internal
import (
"github.com/tony84727/diplomat/pkg/reflecthelper"
"reflect"
)
// FieldSearcher is used for search field of a reflect type
// with "navigate" tag
type FieldSearcher struct {
value reflect.Value
}
func (f FieldSearcher) Search(name string) (value reflect.Value, ok bool) {
for i := 0 ; i < f.value.NumField(); i++ {
field := f.value.Type().FieldByIndex([]int{i})
navigateTag, exist := field.Tag.Lookup("navigate")
if !exist {
continue
}
if name == navigateTag {
return f.value.Field(i), true
}
}
// search one level down for embedded struct
for i := 0; i < f.value.NumField(); i++ {
fieldValue := reflecthelper.Actual(f.value.Field(i))
fieldType := fieldValue.Type()
for j := 0; j < fieldType.NumField(); j++ {
field := fieldType.Field(j)
value, exist := field.Tag.Lookup("navigate")
if !exist {
continue
}
if name == value {
return fieldValue.Field(j), true
}
}
}
return reflect.Value{}, false
}
|
#!/usr/bin/env bash
set -e
source $(dirname "$0")/common.sh
source $(dirname "$0")/config.sh
# generate clients
CLIENT_GEN_BASE=kubevirt.io/client-go/generated
rm -rf ${KUBEVIRT_DIR}/staging/src/${CLIENT_GEN_BASE}
# KubeVirt stuff
swagger-doc -in ${KUBEVIRT_DIR}/staging/src/kubevirt.io/client-go/apis/snapshot/v1alpha1/types.go
swagger-doc -in ${KUBEVIRT_DIR}/staging/src/kubevirt.io/client-go/apis/flavor/v1alpha1/types.go
deepcopy-gen --input-dirs kubevirt.io/client-go/apis/snapshot/v1alpha1,kubevirt.io/client-go/apis/flavor/v1alpha1 \
--bounding-dirs kubevirt.io/client-go/apis \
--go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt
openapi-gen --input-dirs kubevirt.io/client-go/apis/snapshot/v1alpha1,kubevirt.io/client-go/apis/flavor/v1alpha1,k8s.io/api/core/v1,k8s.io/apimachinery/pkg/apis/meta/v1,kubevirt.io/client-go/apis/core/v1 \
--output-base ${KUBEVIRT_DIR}/staging/src \
--output-package kubevirt.io/client-go/apis/snapshot/v1alpha1 \
--go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt >${KUBEVIRT_DIR}/api/api-rule-violations.list
if cmp ${KUBEVIRT_DIR}/api/api-rule-violations.list ${KUBEVIRT_DIR}/api/api-rule-violations-known.list; then
echo "openapi generated"
else
diff -u ${KUBEVIRT_DIR}/api/api-rule-violations-known.list ${KUBEVIRT_DIR}/api/api-rule-violations.list || true
echo "You introduced new API rule violation"
diff ${KUBEVIRT_DIR}/api/api-rule-violations.list ${KUBEVIRT_DIR}/api/api-rule-violations-known.list
exit 2
fi
client-gen --clientset-name versioned \
--input-base kubevirt.io/client-go/apis \
--input snapshot/v1alpha1,flavor/v1alpha1 \
--output-base ${KUBEVIRT_DIR}/staging/src \
--output-package ${CLIENT_GEN_BASE}/kubevirt/clientset \
--go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt
# dependencies
client-gen --clientset-name versioned \
--input-base kubevirt.io/containerized-data-importer/pkg/apis \
--input core/v1beta1,upload/v1beta1 \
--output-base ${KUBEVIRT_DIR}/staging/src \
--output-package ${CLIENT_GEN_BASE}/containerized-data-importer/clientset \
--go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt
client-gen --clientset-name versioned \
--input-base github.com/coreos/prometheus-operator/pkg/apis \
--input monitoring/v1 \
--output-base ${KUBEVIRT_DIR}/staging/src \
--output-package ${CLIENT_GEN_BASE}/prometheus-operator/clientset \
--go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt
client-gen --clientset-name versioned \
--input-base github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/apis \
--input k8s.cni.cncf.io/v1 \
--output-base ${KUBEVIRT_DIR}/staging/src \
--output-package ${CLIENT_GEN_BASE}/network-attachment-definition-client/clientset \
--go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt
client-gen --clientset-name versioned \
--input-base github.com/kubernetes-csi/external-snapshotter/v2/pkg/apis \
--input volumesnapshot/v1beta1 \
--output-base ${KUBEVIRT_DIR}/staging/src \
--output-package ${CLIENT_GEN_BASE}/external-snapshotter/clientset \
--go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt
find ${KUBEVIRT_DIR}/pkg/ -name "*generated*.go" -exec rm {} -f \;
${KUBEVIRT_DIR}/hack/build-go.sh generate ${WHAT}
deepcopy-gen --input-dirs ./pkg/virt-launcher/virtwrap/api \
--go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt
# Genearte validation with controller-gen and create go file for them
(
cd ${KUBEVIRT_DIR}/staging/src/kubevirt.io/client-go &&
# supress -mod=vendor
GOFLAGS= controller-gen crd:allowDangerousTypes=true paths=./apis/core/v1/
#include snapshot
GOFLAGS= controller-gen crd paths=./apis/snapshot/v1alpha1/
#include flavor
GOFLAGS= controller-gen crd paths=./apis/flavor/v1alpha1/
#remove some weird stuff from controller-gen
cd config/crd
for file in *; do
tail -n +3 $file >$file"new"
mv $file"new" $file
done
cd ${KUBEVIRT_DIR}/tools/crd-validation-generator/ && go_build
cd ${KUBEVIRT_DIR}
${KUBEVIRT_DIR}/tools/crd-validation-generator/crd-validation-generator
)
rm -rf ${KUBEVIRT_DIR}/staging/src/kubevirt.io/client-go/config
/${KUBEVIRT_DIR}/hack/bootstrap-ginkgo.sh
(cd ${KUBEVIRT_DIR}/tools/openapispec/ && go_build)
${KUBEVIRT_DIR}/tools/openapispec/openapispec --dump-api-spec-path ${KUBEVIRT_DIR}/api/openapi-spec/swagger.json
(cd ${KUBEVIRT_DIR}/tools/resource-generator/ && go_build)
(cd ${KUBEVIRT_DIR}/tools/csv-generator/ && go_build)
(cd ${KUBEVIRT_DIR}/tools/doc-generator/ && go_build)
(
cd ${KUBEVIRT_DIR}/docs
${KUBEVIRT_DIR}/tools/doc-generator/doc-generator
mv newmetrics.md metrics.md
)
rm -f ${KUBEVIRT_DIR}/manifests/generated/*
rm -f ${KUBEVIRT_DIR}/examples/*
ResourceDir=${KUBEVIRT_DIR}/manifests/generated
${KUBEVIRT_DIR}/tools/resource-generator/resource-generator --type=priorityclass >${ResourceDir}/kubevirt-priority-class.yaml
${KUBEVIRT_DIR}/tools/resource-generator/resource-generator --type=kv >${ResourceDir}/kv-resource.yaml
${KUBEVIRT_DIR}/tools/resource-generator/resource-generator --type=kv-cr --namespace='{{.Namespace}}' --pullPolicy='{{.ImagePullPolicy}}' --featureGates='{{.FeatureGates}}' >${ResourceDir}/kubevirt-cr.yaml.in
${KUBEVIRT_DIR}/tools/resource-generator/resource-generator --type=operator-rbac --namespace='{{.Namespace}}' >${ResourceDir}/rbac-operator.authorization.k8s.yaml.in
# used for Image fields in manifests
function getVersion() {
echo "{{if $1}}@{{$1}}{{else}}:{{.DockerTag}}{{end}}"
}
virtapi_version=$(getVersion ".VirtApiSha")
virtcontroller_version=$(getVersion ".VirtControllerSha")
virthandler_version=$(getVersion ".VirtHandlerSha")
virtlauncher_version=$(getVersion ".VirtLauncherSha")
virtoperator_version=$(getVersion ".VirtOperatorSha")
# used as env var for operator
function getShasum() {
echo "{{if $1}}@{{$1}}{{end}}"
}
# without the '@' symbole used in 'getShasum'
function getRawShasum() {
echo "{{if $1}}{{$1}}{{end}}"
}
virtapi_sha=$(getShasum ".VirtApiSha")
virtcontroller_sha=$(getShasum ".VirtControllerSha")
virthandler_sha=$(getShasum ".VirtHandlerSha")
virtlauncher_sha=$(getShasum ".VirtLauncherSha")
gs_sha=$(getShasum ".GsSha")
virtapi_rawsha=$(getRawShasum ".VirtApiSha")
virtcontroller_rawsha=$(getRawShasum ".VirtControllerSha")
virthandler_rawsha=$(getRawShasum ".VirtHandlerSha")
virtlauncher_rawsha=$(getRawShasum ".VirtLauncherSha")
gs_rawsha=$(getRawShasum ".GsSha")
# The generation code for CSV requires a valid semver to be used.
# But we're trying to generate a template for a CSV here from code
# rather than an actual usable CSV. To work around this, we set the
# versions to something absurd and do a find/replace with our templated
# values after the file is generated.
_fake_replaces_csv_version="1111.1111.1111"
_fake_csv_version="2222.2222.2222"
${KUBEVIRT_DIR}/tools/csv-generator/csv-generator --namespace={{.CSVNamespace}} --dockerPrefix={{.DockerPrefix}} --operatorImageVersion="$virtoperator_version" --pullPolicy={{.ImagePullPolicy}} --verbosity={{.Verbosity}} --apiSha="$virtapi_rawsha" --controllerSha="$virtcontroller_rawsha" --handlerSha="$virthandler_rawsha" --launcherSha="$virtlauncher_rawsha" --gsSha="$gs_rawsha" --kubevirtLogo={{.KubeVirtLogo}} --csvVersion="$_fake_csv_version" --replacesCsvVersion="$_fake_replaces_csv_version" --csvCreatedAtTimestamp={{.CreatedAt}} --kubeVirtVersion={{.DockerTag}} >${KUBEVIRT_DIR}/manifests/generated/operator-csv.yaml.in
sed -i "s/$_fake_csv_version/{{.CsvVersion}}/g" ${KUBEVIRT_DIR}/manifests/generated/operator-csv.yaml.in
sed -i "s/$_fake_replaces_csv_version/{{.ReplacesCsvVersion}}/g" ${KUBEVIRT_DIR}/manifests/generated/operator-csv.yaml.in
(cd ${KUBEVIRT_DIR}/tools/vms-generator/ && go_build)
vms_docker_prefix=${DOCKER_PREFIX:-registry:5000/kubevirt}
vms_docker_tag=${DOCKER_TAG:-devel}
${KUBEVIRT_DIR}/tools/vms-generator/vms-generator --container-prefix=${vms_docker_prefix} --container-tag=${vms_docker_tag} --generated-vms-dir=${KUBEVIRT_DIR}/examples
${KUBEVIRT_DIR}/hack/gen-proto.sh
mockgen -source pkg/handler-launcher-com/notify/info/info.pb.go -package=info -destination=pkg/handler-launcher-com/notify/info/generated_mock_info.go
mockgen -source pkg/handler-launcher-com/cmd/info/info.pb.go -package=info -destination=pkg/handler-launcher-com/cmd/info/generated_mock_info.go
mockgen -source pkg/handler-launcher-com/cmd/v1/cmd.pb.go -package=v1 -destination=pkg/handler-launcher-com/cmd/v1/generated_mock_cmd.go
|
/*
* Copyright (c) CERN 2013-2015
*
* Copyright (c) Members of the EMI Collaboration. 2010-2013
* See http://www.eu-emi.eu/partners for details on the copyright
* holders.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef JOBPARAMETERHANDLER_H_
#define JOBPARAMETERHANDLER_H_
#include <map>
#include <string>
#include <vector>
#include <boost/lexical_cast.hpp>
#include <boost/tuple/tuple.hpp>
namespace fts3
{
namespace cli
{
/**
* The JobParameterHandler class contains list of string values corresponding
* to transfer job parameter names. Moreover, it allows for mapping the
* parameter names into the respective values.
*/
class JobParameterHandler
{
public:
/**
* Default constructor.
*
* Sets the default values for some job parameters,
* e.g. copy_pin_lifetime = 1
*/
JobParameterHandler();
/**
* Destructor.
*/
virtual ~JobParameterHandler();
/**
* The functional operator.
*
* Allows for assigning values to some chosen job parameters
*
* @param keys - vector with keys (e.g. keys[0] corresponds to values[0], and so on)
* @param values - vector with values (e.g. keys[0] corresponds to values[0], and so on)
*/
void set(const std::vector<std::string>& keys, const std::vector<std::string>& values);
///@{
/**
* names of transfer job parameters
*/
static const std::string GRIDFTP;
static const std::string DELEGATIONID;
static const std::string SPACETOKEN;
static const std::string SPACETOKEN_SOURCE;
static const std::string COPY_PIN_LIFETIME;
static const std::string BRING_ONLINE;
static const std::string LAN_CONNECTION;
static const std::string FAIL_NEARLINE;
static const std::string OVERWRITEFLAG;
static const std::string CHECKSUM_METHOD;
static const std::string CHECKSUM_MODE;
static const std::string REUSE;
static const std::string JOB_METADATA;
static const std::string RETRY;
static const std::string RETRY_DELAY;
static const std::string MULTIHOP;
static const std::string BUFFER_SIZE;
static const std::string NOSTREAMS;
static const std::string TIMEOUT;
static const std::string STRICT_COPY;
static const std::string CREDENTIALS;
static const std::string S3ALTERNATE;
///@}
/**
* Gets the value corresponding to given parameter name
*
* @param name - parameter name
*
* @return parameter value
*/
inline std::string get(std::string name) const
{
if (parameters.count(name))
return parameters.at(name);
else
return std::string();
}
/**
* Gets the value corresponding to given parameter name
*
* @param name - parameter name
* @param T - typpe of the returned value
*
* @return parameter value
*/
template<typename T>
inline T get(std::string name) const
{
return boost::lexical_cast<T>(this->get(name));
}
/**
* Checks if the given parameter has been set
*
* @param name - parameter name
*
* @return true if the parameter value has been set
*/
inline bool isParamSet(std::string name)
{
return parameters.find(name) != parameters.end();
}
inline void set(std::string key, std::string value)
{
parameters[key] = value;
}
private:
/// maps parameter names into values
std::map<std::string, std::string> parameters;
};
}
}
#endif /* JOBPARAMETERHANDLER_H_ */
|
import pandas as pd
from rasa_nlu.training_data import load_data
from rasa_nlu.config import RasaNLUModelConfig
from rasa_nlu.model import Trainer
#read in the data
data = pd.read_csv('survey_data.csv')
#prepare the training data
training_data = load_data('survey_data.json')
#configure the model
config = RasaNLUModelConfig(config_file="/path/to/config.yml")
#train the model
trainer = Trainer(config)
interpreter = trainer.train(training_data)
#summarize the data
summary = interpreter.parse('summarize')
print(summary) |
#!/bin/bash
# This script is used by the auto-gke-pvc-snapshots container to take snapshots
# of all the Google PVC disks attached to the Origin cluster.
if [ -z "${DAYS_RETENTION}" ]; then
# Default to 14 days
DAYS_RETENTION=14
fi
gcloud compute disks list --filter='description:* AND description~kubernetes.io/created-for/pvc/name AND name~gke-origin-* AND description~prod' --format='value(name,zone)' | while read -r DISK_NAME ZONE; do
gcloud compute disks snapshot "${DISK_NAME}" --snapshot-names autogcs-"${DISK_NAME:0:31}"-"$(date '+%Y-%m-%d-%s')" --zone "${ZONE}"
echo "Snapshot created"
done
from_date=$(date -d "-${DAYS_RETENTION} days" "+%Y-%m-%d")
gcloud compute snapshots list --filter="creationTimestamp<${from_date} AND name~'autogcs.*'" --uri | while read -r SNAPSHOT_URI; do
gcloud compute snapshots delete "${SNAPSHOT_URI}" --quiet
done
|
const timestamp: string = new Date()
.toUTCString()
.slice(5)
.replaceAll(" ", "-");
module.exports = {
preset: "@vue/cli-plugin-unit-jest/presets/typescript-and-babel",
transform: {
"^.+\\.vue$": "vue-jest",
},
testMatch: ["**/*.test.{j,t}s", "**/*.spec.{j,t}s"],
verbose: true,
/** Generate /coverage directory and test results report */
collectCoverage: true,
collectCoverageFrom: [
"<rootDir>/test/e2e/*.{js}",
"<rootDir>/src/renderers/view/components/**/*.{js,vue}",
"!**/node_modules/**",
],
coverageReporters: ["html", "text-summary"],
coverageDirectory: `<rootDir>/test/out/${timestamp}/coverage/`,
reporters: [
"default",
[
"./node_modules/jest-html-reporter",
{
pageTitle: "Test Report",
/** @see https://github.com/Hargne/jest-html-reporter/wiki/configuration */
/** Comment to sort by jest-order */
sort: "status",
includeFailureMsg: true,
outputPath: `<rootDir>/test/out/${timestamp}/results.html`,
},
],
],
moduleNameMapper: {
"/^@/(.*)$/": "<rootDir>/$1",
},
};
|
<reponame>slimbeek6/Employee-Directory-SL
import React, { createContext, useReducer, useContext } from "react";
const EmployeeContext = createContext({
id: "",
name: "",
img: "",
phone: "",
email: "",
dob: ""
});
const { Provider } = EmployeeContext;
function reducer (state, action) {
switch (action.type) {
case "add":
return [...state,
{
id: state.length * Math.random(),
name: action.name,
img: action.img,
phone: action.phone,
email: action.email,
dob: action.dob
}
];
case "sort":
return [...state].sort((a,b)=> (a.name > b.name ? 1 : -1))
default:
return state;
}
}
function EmployeeProvider({ value= [], ...props}) {
const [state, dispatch] = useReducer(reducer, []);
return <Provider value={[state, dispatch]}{...props} />;
}
function useEmployeeContext(){
return useContext(EmployeeContext);
}
export { EmployeeProvider , useEmployeeContext}; |
# Install pre-requisites
sudo apt update && sudo apt install -y wget curl sudo git gnupg gnupg1 gnupg2 unzip zip
# Install Go
GO_PACKAGE=go1.15.6.linux-amd64.tar.gz
wget https://dl.google.com/go/$GO_PACKAGE
export GOROOT=$PWD/go-install
export GOPATH=$PWD/go-workspace
echo "export GOPATH=\"$GOPATH\"" >> ~/.bashrc
mkdir -p $GOPATH
export GOBIN=$GOROOT/bin
echo "export GOBIN=\"$GOBIN\"" >> ~/.bashrc
export PATH=$PATH:$GOBIN
echo "PATH=\"$PATH\"" >> ~/.bashrc
tar -xzf $GO_PACKAGE
rm $GO_PACKAGE
mv go $GOROOT
# Install Glide
wget https://github.com/Masterminds/glide/releases/download/v0.13.3/glide-v0.13.3-linux-amd64.tar.gz
tar -xvf glide-v0.13.3-linux-amd64.tar.gz
sudo cp linux-amd64/glide /bin/glide
rm -rf linux-amd64 glide-v0.13.3-linux-amd64.tar.gz
# Install Yarn
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
sudo apt update && sudo apt install -y yarn
# Clone repo
mkdir -p $GOPATH/src/github.com/stellar/kelp
cd $GOPATH/src/github.com/stellar/kelp
git clone https://github.com/RivalCoins/StablecoinEngine.git .
# Install dependencies
glide install
# Install astilectron-bundler
go get -u github.com/asticode/go-astilectron-bundler/...
# Dev Build
./scripts/build.sh
# Confirm dev build
./bin/kelp version
# Production build
export AMPLITUDE_API_KEY=foobar
echo "export AMPLITUDE_API_KEY=\"$AMPLITUDE_API_KEY\"" >> ~/.bashrc
./scripts/build.sh -d -f
# Confirm production build
echo "Confirm production build in the 'build' directory"
|
<filename>app/src/block/tests/services/block.service.spec.ts
import { Test, TestingModule } from '@nestjs/testing';
import { BlockService } from '../../services/block.service';
import { getModelToken } from '@nestjs/mongoose';
import { Query, Model } from 'mongoose';
import { BlockInterface } from '../../interfaces/block.interface';
import { BlockDocument } from '../../schemas/block.schema';
import { CreateBlockDto } from '../../dto/create-block.dto';
import { json } from 'stream/consumers';
import { BlockDTOs } from '../../dto/extra-models/block-models';
import {
mockParagraphBlock,
mockParagraphBlockDocument,
mapParagraphBlockArrToDocumentArr,
} from '../mocks/blocks';
const blockArr = [
mockParagraphBlock('id-1', 'One Block to rule them all'),
mockParagraphBlock('id-2', 'One Block to find them'),
mockParagraphBlock('id-3', 'One Block to bring them all'),
mockParagraphBlock('id-4', 'One Block to bring them all'),
mockParagraphBlock('id-5', 'And in the neatness bind them.'),
];
const blockDocumentArr = mapParagraphBlockArrToDocumentArr(blockArr);
describe('BlockService', () => {
let service: BlockService;
let model: Model<BlockDocument>;
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [
BlockService,
{
provide: getModelToken('Block'),
useValue: {
new: jest.fn().mockResolvedValue(mockParagraphBlock()),
constructor: jest.fn().mockResolvedValue(mockParagraphBlock()),
find: jest.fn(),
findOne: jest.fn(),
update: jest.fn(),
create: jest.fn(),
remove: jest.fn(),
exec: jest.fn(),
},
},
],
}).compile();
service = module.get<BlockService>(BlockService);
model = module.get<Model<BlockDocument>>(getModelToken('Block'));
});
it('should be defined', () => {
expect(service).toBeDefined();
});
afterEach(() => {
jest.clearAllMocks();
});
it('findMany: should return all listed block ids', async () => {
//Spy mock!
jest.spyOn(model, 'find').mockReturnValue({
exec: jest.fn().mockResolvedValueOnce(blockDocumentArr),
} as any);
//set scenario usig service, that uses model.find method
const foundBlocks = await service.findMany([
'id-1',
'id-2',
'id-3',
'id-4',
'id-5',
]);
//assert that out our service actually calls our model.find method
expect(foundBlocks).toEqual(blockDocumentArr);
});
it('insertOne: returns created block', async () => {
// spyc mock makes sure that if create gets called in our service.insertOne function we return a mock created block
jest.spyOn(model, 'create').mockReturnValue({
exec: jest.fn().mockResolvedValueOnce(mockParagraphBlockDocument()),
} as any);
//set scenario using the service.insertOne method
const createdBlock = await service.insertOne({
object: 'block',
type: 'paragraph',
paragraph: {
rich_text: [
{
type: 'text',
text: {
text: 'derp',
},
},
],
},
});
});
});
|
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.tests.service;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.ofbiz.base.util.UtilDateTime;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.DelegatorFactory;
import org.ofbiz.entity.GenericEntityException;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.entity.condition.EntityCondition;
import org.ofbiz.entity.condition.EntityOperator;
import org.opentaps.tests.OpentapsTestCase;
public class POJOServiceTests extends OpentapsTestCase {
private GenericValue user = null;
private String key1Value = "TEST";
private List key2Values = null;
private Timestamp testTimestamp = null;
public void setUp() throws Exception {
super.setUp();
user = delegator.findByPrimaryKey("UserLogin", UtilMisc.toMap("userLoginId", "admin"));
//because different database timestamp not use same precision, so we just compare "yyyy-MM-dd HH:mm:ss" of timestamp.
java.text.SimpleDateFormat sdf = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
java.util.Date timeDate = sdf.parse(sdf.format(UtilDateTime.nowTimestamp()));
testTimestamp = new java.sql.Timestamp(timeDate.getTime());
key2Values = UtilMisc.toList("Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot");
// just to be sure -- if test crashed somehow last time, these might still be hanging around
removeTestingRecords(delegator);
}
public void tearDown() throws Exception {
super.tearDown();
// delegator is reset to null by super.tearDown() so we have to get it again
removeTestingRecords(DelegatorFactory.getDelegator(OpentapsTestCase.DELEGATOR_NAME));
}
private void removeTestingRecords(Delegator delegator) throws GenericEntityException {
delegator.removeByCondition("ServiceTestRecord", EntityCondition.makeCondition("key1", EntityOperator.EQUALS, "TEST"));
}
public void testErrorDueToMissingUserLogin() {
Map params = UtilMisc.toMap("key1Value", key1Value);
runAndAssertServiceError("pojoTest", params);
}
public void testErrorDueToMissingRequiredValues() {
Map params = UtilMisc.toMap("key1Value", key1Value, "userLogin", user);
runAndAssertServiceError("pojoTest", params);
}
public void testErrorOnTrigger() {
Map params = UtilMisc.toMap("key1Value", key1Value, "key2Values", key2Values, "errorTrigger", new Boolean(true), "userLogin", user);
runAndAssertServiceError("pojoTest", params);
}
public void testFailureOnTrigger() {
Map params = UtilMisc.toMap("key1Value", key1Value, "key2Values", key2Values, "failureTrigger", new Boolean(true), "userLogin", user);
runAndAssertServiceFailure("pojoTest", params);
}
public void testBasicSuccessfulRun() throws GenericEntityException {
Map params = UtilMisc.toMap("key1Value", key1Value, "key2Values", key2Values, "testTimestamp", testTimestamp, "userLogin", user);
runAndAssertServiceSuccess("pojoTest", params);
List<GenericValue> testValues = POJOTestServices.getAllValues(key1Value, key2Values, delegator);
assertEachTestValueIsCorrect(testValues, new BigDecimal(1.0), null, testTimestamp, "admin", null);
}
public void testSuccessfulRunWithSECA() throws GenericEntityException {
Map params = UtilMisc.toMap("key1Value", key1Value, "key2Values", key2Values, "testTimestamp", testTimestamp, "followupTrigger", new Boolean(true), "userLogin", user);
runAndAssertServiceSuccess("pojoTest", params);
List<GenericValue> testValues = POJOTestServices.getAllValues(key1Value, key2Values, delegator);
assertEachTestValueIsCorrect(testValues, new BigDecimal(2.0), new BigDecimal(1.0), testTimestamp, "admin", "admin");
}
private void assertEachTestValueIsCorrect(List<GenericValue> testValues, BigDecimal expectedValue1, BigDecimal expectedValue2, Timestamp expectedTimestamp, String expectedCreateUserLogin, String expectedModifiedUserLogin) {
for (GenericValue testValue: testValues) {
assertEquals(testValue + " value1 correct ", testValue.getBigDecimal("value1"), expectedValue1);
assertEquals(testValue + " value2 correct ", testValue.getBigDecimal("value2"), expectedValue2);
assertEquals(testValue + " testTimestamp correct ", testValue.getTimestamp("testTimestamp"), expectedTimestamp);
assertEquals(testValue + " created user login correct ", testValue.getString("createdByUserLogin"), expectedCreateUserLogin);
assertEquals(testValue + " modified user login correct ", testValue.getString("modifiedByUserLogin"), expectedModifiedUserLogin);
}
}
}
|
import sqlite3
# create an in-memory SQLite3 library database
db_connection = sqlite3.connect(':memory:')
cursor = db_connection.cursor()
# Create a table called 'books'
cursor.execute('''CREATE TABLE books
(title text, author text, publisher text, year int)''')
db_connection.commit()
# Create a search engine function
def search(search_type, search_term):
cursor = db_connection.cursor()
if search_type == 'title':
query = "SELECT * FROM books WHERE title LIKE ?"
cursor.execute(query, ('%' + search_term + '%',))
elif search_type == 'author':
query = "SELECT * FROM books WHERE author LIKE ?"
cursor.execute(query, ('%' + search_term + '%',))
elif search_type == 'publisher':
query = "SELECT * FROM books WHERE publisher LIKE ?"
cursor.execute(query, ('%' + search_term + '%',))
results = cursor.fetchall()
return results |
package weixin.report.model;
import java.util.Date;
import org.apache.commons.lang.StringUtils;
import org.jeecgframework.poi.excel.annotation.Excel;
import weixin.util.DataDictionaryUtil.FlowType;
/**
* @author parallel_line
* @version 2016年9月26日 下午9:01:11
*/
public class MerchantCharge implements java.io.Serializable {
private static final long serialVersionUID = 1L;
@Excel(exportName = "商户名称",orderNum="1")
private java.lang.String acctName;
@Excel(exportName = "商户等级",orderNum="10",exportConvertSign=1)
private java.lang.String acctLevel;
@Excel(exportName = "流量类型",exportConvertSign=1,orderNum="20")
private java.lang.String flowType;
@Excel(exportName = "流量值",orderNum="30")
private java.lang.Double flowValue;
@Excel(exportName = "充值时间",exportFieldWidth=20,orderNum="40",exportFormat="yyyy-MM-dd HH:mm:ss")
private Date chargetime;
@Excel(exportName = "描述",orderNum="50")
private java.lang.String des;
public String convertGetFlowType(){
if(StringUtils.equals(FlowType.country.getCode(), flowType)){
return FlowType.country.getName();
}
if(StringUtils.equals(FlowType.province.getCode(), flowType)){
return FlowType.province.getName();
}
return flowType;
}
public String convertGetAcctLevel(){
if(StringUtils.equals("0",acctLevel)){
return "S级";
}
if(StringUtils.equals("1",acctLevel)){
return "A级";
}
if(StringUtils.equals("2",acctLevel)){
return "B级";
}
if(StringUtils.equals("3",acctLevel)){
return "C级";
}
return acctLevel;
}
public java.lang.String getAcctName() {
return acctName;
}
public void setAcctName(java.lang.String acctName) {
this.acctName = acctName;
}
public java.lang.String getAcctLevel() {
return acctLevel;
}
public void setAcctLevel(java.lang.String acctLevel) {
this.acctLevel = acctLevel;
}
public java.lang.String getFlowType() {
return flowType;
}
public void setFlowType(java.lang.String flowType) {
this.flowType = flowType;
}
public java.lang.Double getFlowValue() {
return flowValue;
}
public void setFlowValue(java.lang.Double flowValue) {
this.flowValue = flowValue;
}
public Date getChargetime() {
return chargetime;
}
public void setChargetime(Date chargetime) {
this.chargetime = chargetime;
}
public java.lang.String getDes() {
return des;
}
public void setDes(java.lang.String des) {
this.des = des;
}
public static long getSerialversionuid() {
return serialVersionUID;
}
}
|
/*
* Copyright (c) 2006-2015 <NAME> <<EMAIL>>,
* 2006-2009 <NAME> <<EMAIL>>,
* 2015 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <stdio.h>
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <strings.h>
#include <signal.h>
#include <unistd.h>
#include <sys/param.h>
#include <sys/time.h>
#include <fcntl.h>
#include <time.h>
#include <zlib.h>
#include <ctype.h>
#include <errno.h>
#include "common.h"
#include "version.h"
#include "dns.h"
#include "encoding.h"
#include "base32.h"
#include "base64.h"
#include "base64u.h"
#include "base128.h"
#include "user.h"
#include "login.h"
#include "tun.h"
#include "fw_query.h"
#include "util.h"
#include "server.h"
#include "window.h"
#ifdef HAVE_SYSTEMD
# include <systemd/sd-daemon.h>
#endif
#ifdef WINDOWS32
WORD req_version = MAKEWORD(2, 2);
WSADATA wsa_data;
#else
#include <err.h>
#endif
static void
send_raw(int fd, uint8_t *buf, size_t buflen, int user, int cmd, struct sockaddr_storage *from, socklen_t fromlen)
{
char packet[buflen + RAW_HDR_LEN];
int len = buflen;
memcpy(packet, raw_header, RAW_HDR_LEN);
if (len) {
memcpy(&packet[RAW_HDR_LEN], buf, len);
}
len += RAW_HDR_LEN;
packet[RAW_HDR_CMD] = cmd | (user & 0x0F);
DEBUG(3, "TX-raw: client %s (user %d), cmd %d, %d bytes",
format_addr(from, fromlen), user, cmd, len);
sendto(fd, packet, len, 0, (struct sockaddr *) from, fromlen);
}
/* Ringbuffer Query Handling (qmem) and DNS Cache:
This is used to make the handling duplicates and query timeouts simpler
and all handled in one place.
Using this, lazy mode is possible with n queries (n <= windowsize)
New queries are placed consecutively in the buffer, replacing any old
queries (already responded to) if length == QMEM_LEN. Old queries are kept
as a record for duplicate requests. If a dupe is found and USE_DNSCACHE is
defined, the previous answer is sent (if it exists), otherwise an invalid
response is sent.
On the DNS cache:
This cache is implemented to better handle the aggressively impatient DNS
servers that very quickly re-send requests when we choose to not
immediately answer them in lazy mode. This cache works much better than
pruning(=dropping) the improper requests, since the DNS server will
actually get an answer instead of silence.
Because of the CMC in both ping and upstream data, unwanted cache hits
are prevented. Due to the combination of CMC and varying sequence IDs, it
is extremely unlikely that any duplicate answers will be incorrectly sent
during a session (given QMEM_LEN is not very large). */
#define QMEM_DEBUG(l, u, ...) \
if (server.debug >= l) {\
TIMEPRINT("[QMEM u%d (%" L "u/%u)] ", u, users[u].qmem.num_pending, users[u].outgoing->windowsize); \
fprintf(stderr, __VA_ARGS__);\
fprintf(stderr, "\n");\
}
static void
qmem_init(int userid)
/* initialize user QMEM and DNS cache (if enabled) */
{
memset(&users[userid].qmem, 0, sizeof(struct qmem_buffer));
for (size_t i = 0; i < QMEM_LEN; i++) {
users[userid].qmem.queries[i].q.id = -1;
}
}
static int
qmem_is_cached(int dns_fd, int userid, struct query *q)
/* Check if an answer for a particular query is cached in qmem
* If so, sends an "invalid" answer or one from DNS cache
* Returns 0 if new query (ie. not cached), 1 if cached (and then answered) */
{
struct qmem_buffer *buf;
struct query *pq;
char *data = "x";
char dataenc = 'T';
size_t len = 1;
int dnscache = 0;
buf = &users[userid].qmem;
/* Check if this is a duplicate query */
for (size_t p = buf->start; p != buf->end; p = (p + 1) % QMEM_LEN) {
pq = &buf->queries[p].q;
if (pq->id != q->id)
continue;
if (pq->type != q->type)
continue;
if (strcasecmp(pq->name, q->name))
continue;
/* Aha! A match! */
#ifdef USE_DNSCACHE
/* Check if answer is in DNS cache */
if (buf->queries[p].a.len) {
data = (char *)buf->queries[p].a.data;
len = buf->queries[p].a.len;
dataenc = users[userid].downenc;
dnscache = 1;
}
#endif
QMEM_DEBUG(2, userid, "OUT from qmem for '%s', %s", q->name,
dnscache ? "answer from DNS cache" : "sending invalid response");
write_dns(dns_fd, q, data, len, dataenc);
return 1;
}
return 0;
}
static int
qmem_append(int userid, struct query *q)
/* Appends incoming query to the buffer. */
{
struct qmem_buffer *buf;
buf = &users[userid].qmem;
if (buf->num_pending >= QMEM_LEN) {
/* this means we have QMEM_LEN *pending* queries; respond to oldest
* one to make space for new query */
QMEM_DEBUG(2, userid, "Full of pending queries! Replacing old query %d with new %d.",
buf->queries[buf->start].q.id, q->id);
send_data_or_ping(userid, &buf->queries[buf->start].q, 0, 0, NULL);
}
if (buf->length < QMEM_LEN) {
buf->length++;
} else {
/* will replace oldest query (in buf->queries[buf->start]) */
buf->start = (buf->start + 1) % QMEM_LEN;
}
QMEM_DEBUG(5, userid, "add query ID %d, timeout %" L "u ms", q->id, timeval_to_ms(&users[userid].dns_timeout));
/* Copy query into end of buffer */
memcpy(&buf->queries[buf->end].q, q, sizeof(struct query));
#ifdef USE_DNSCACHE
buf->queries[buf->end].a.len = 0;
#endif
buf->end = (buf->end + 1) % QMEM_LEN;
buf->num_pending += 1;
return 1;
}
static void
qmem_answered(int userid, uint8_t *data, size_t len)
/* Call when oldest/first/earliest query added has been answered */
{
struct qmem_buffer *buf;
size_t answered;
buf = &users[userid].qmem;
if (buf->num_pending == 0) {
/* Most likely caused by bugs somewhere else. */
QMEM_DEBUG(1, userid, "Query answered with 0 in qmem! Fix bugs.");
return;
}
answered = buf->start_pending;
buf->start_pending = (buf->start_pending + 1) % QMEM_LEN;
buf->num_pending -= 1;
#ifdef USE_DNSCACHE
/* Add answer to query entry */
if (len && data) {
if (len > 4096) {
QMEM_DEBUG(1, userid, "got answer with length >4096!");
}
memcpy(&buf->queries[answered].a.data, data, MIN(len, 4096));
buf->queries[answered].a.len = len;
}
#endif
QMEM_DEBUG(3, userid, "query ID %d answered", buf->queries[answered].q.id);
}
struct query *
qmem_get_next_response(int userid)
/* Gets oldest query to be responded to (for lazy mode) or NULL if none available
* The query is NOT marked as "answered" since that is done later. */
{
struct qmem_buffer *buf;
struct query *q;
buf = &users[userid].qmem;
if (buf->length == 0 || buf->num_pending == 0)
return NULL;
q = &buf->queries[buf->start_pending].q;
QMEM_DEBUG(3, userid, "next response using cached query: ID %d", q->id);
return q;
}
static struct timeval
qmem_max_wait(int *touser, struct query **sendq)
/* Gets max interval before the next query has to be responded to
* Response(s) are sent automatically for queries if:
* - the query has timed out
* - the user has data to send or pending ACKs, and spare pending queries
* - the user has excess pending queries (>downstream window size)
* Returns largest safe time to wait before next timeout */
{
struct timeval now, timeout, soonest, tmp, age, nextresend;
soonest.tv_sec = 10;
soonest.tv_usec = 0;
int userid, nextuser = -1, resend = 0;
struct query *q = NULL, *nextq = NULL;
size_t sending, total, sent;
struct tun_user *u;
gettimeofday(&now, NULL);
for (userid = 0; userid < created_users; userid++) {
if (!user_active(userid))
continue;
u = &users[userid];
if (u->qmem.num_pending == 0)
continue;
/* Keep track of how many fragments we can send */
if (u->lazy) {
total = window_sending(u->outgoing, &nextresend);
if ((nextresend.tv_sec != 0 || nextresend.tv_usec != 0)
&& u->qmem.num_pending >= 1) {
/* will use nextresend as max wait time if it is smallest
* and if user has spare queries */
resend = 1;
soonest = nextresend;
}
if (u->qmem.num_pending > u->outgoing->windowsize) {
/* calculate number of "excess" queries */
total = MAX(total, u->qmem.num_pending - u->outgoing->windowsize);
}
} else {
/* User in immediate mode, must answer all pending queries */
total = u->qmem.num_pending;
}
sending = total;
sent = 0;
int qnum = u->qmem.start_pending;
for (; qnum != u->qmem.end; qnum = (qnum + 1) % QMEM_LEN) {
q = &u->qmem.queries[qnum].q;
/* queries will always be in time order */
timeradd(&q->time_recv, &u->dns_timeout, &timeout);
if (sending > 0 || !timercmp(&now, &timeout, <) || u->next_upstream_ack >= 0) {
/* respond to a query with ping/data if:
* - query has timed out (ping, or data if available)
* - user has pending data (always data)
* - user has pending ACK (either) */
timersub(&now, &q->time_recv, &age);
time_t age_ms = timeval_to_ms(&age);
/* only consider "immediate" when age is negligible */
int immediate = llabs(age_ms) <= 10;
QMEM_DEBUG(3, userid, "ANSWER: ID %d, age=%ldms (imm=%d), timeout %ldms, ACK %d,"
" sent %" L "u/%" L "u (+%" L "u)", q->id, age_ms, immediate,
timeval_to_ms(&u->dns_timeout), u->next_upstream_ack, sent, total, sending);
sent++;
send_data_or_ping(userid, q, 0, immediate, NULL);
if (sending > 0)
sending--;
continue;
}
timersub(&timeout, &now, &tmp);
if (timercmp(&tmp, &soonest, <)) {
/* the oldest non-timed-out query in the buffer will be the
* soonest to timeout for this user; we can skip the rest */
soonest = tmp;
nextuser = userid;
nextq = q;
break;
}
}
}
if (server.debug >= 5) {
time_t soonest_ms = timeval_to_ms(&soonest);
if (nextq && nextuser >= 0) {
QMEM_DEBUG(5, nextuser, "can wait for %" L "d ms, will send id %d", soonest_ms, nextq->id);
} else {
if (nextuser < 0)
nextuser = 0;
if (soonest_ms != 10000 && resend) {
/* only if resending some frags */
QMEM_DEBUG(5, nextuser, "Resending some fragments, soonest = %d ms", soonest_ms);
if (soonest_ms == 0)
QMEM_DEBUG(5, nextuser, "soonest_ms == 0! tv=%ds,%dus", soonest.tv_sec, soonest.tv_usec);
} else {
QMEM_DEBUG(2, nextuser, "Don't need to send anything to any users, waiting %" L "d ms", soonest_ms);
}
}
}
if (sendq)
*sendq = nextq;
if (touser)
*touser = nextuser;
return soonest;
}
static int
get_dns_fd(struct dnsfd *fds, struct sockaddr_storage *addr)
{
if (addr->ss_family == AF_INET6) {
return fds->v6fd;
}
return fds->v4fd;
}
static void
forward_query(int bind_fd, struct query *q)
{
char buf[64*1024];
int len;
struct fw_query fwq;
struct sockaddr_in *myaddr;
in_addr_t newaddr;
len = dns_encode(buf, sizeof(buf), q, QR_QUERY, q->name, strlen(q->name));
if (len < 1) {
warnx("dns_encode doesn't fit");
return;
}
/* Store sockaddr for q->id */
memcpy(&(fwq.addr), &(q->from), q->fromlen);
fwq.addrlen = q->fromlen;
fwq.id = q->id;
fw_query_put(&fwq);
newaddr = inet_addr("127.0.0.1");
myaddr = (struct sockaddr_in *) &(q->from);
memcpy(&(myaddr->sin_addr), &newaddr, sizeof(in_addr_t));
myaddr->sin_port = htons(server.bind_port);
DEBUG(2, "TX: NS reply");
if (sendto(bind_fd, buf, len, 0, (struct sockaddr*)&q->from, q->fromlen) <= 0) {
warn("forward query error");
}
}
static void
send_version_response(int fd, version_ack_t ack, uint32_t payload, int userid, struct query *q)
{
char out[9];
switch (ack) {
case VERSION_ACK:
strncpy(out, "VACK", sizeof(out));
break;
case VERSION_NACK:
strncpy(out, "VNAK", sizeof(out));
break;
case VERSION_FULL:
strncpy(out, "VFUL", sizeof(out));
break;
}
*(uint32_t *) (out + 4) = htonl(payload);
out[8] = userid & 0xff;
write_dns(fd, q, out, sizeof(out), users[userid].downenc);
}
void
send_data_or_ping(int userid, struct query *q, int ping, int immediate, char *tcperror)
/* Sends current fragment to user, or a ping if no data available.
ping: 1=force send ping (even if data available), 0=only send if no data.
immediate: 1=not from qmem (ie. fresh query), 0=query is from qmem
tcperror: whether to tell user that TCP socket is closed (NULL if OK or pointer to error message) */
{
size_t datalen, headerlen;
fragment *f = NULL;
struct frag_buffer *out, *in;
in = users[userid].incoming;
out = users[userid].outgoing;
uint8_t pkt[out->maxfraglen + DOWNSTREAM_PING_HDR];
if (!tcperror) {
f = window_get_next_sending_fragment(out, &users[userid].next_upstream_ack);
} else {
/* construct fake fragment containing error message. */
fragment fr;
f = &fr;
memset(f, 0, sizeof(fragment));
f->ack_other = -1;
f->len = strlen(tcperror);
memcpy(f->data, tcperror, f->len);
f->data[f->len] = 0;
f->start = 1;
f->end = 1;
DEBUG(2, "Sending ping with TCP forward disconnect; error: %s", f->data);
}
/* Build downstream data/ping header (see doc/proto_xxxxxxxx.txt) for details */
if (!f) {
/* No data, send data/ping header (with extra info) */
ping = 1;
datalen = 0;
pkt[0] = 0; /* Pings don't need seq IDs unless they have data */
pkt[1] = users[userid].next_upstream_ack & 0xFF;
pkt[2] = (users[userid].next_upstream_ack < 0 ? 0 : 1) << 3;
users[userid].next_upstream_ack = -1;
} else {
datalen = f->len;
pkt[0] = f->seqID & 0xFF;
pkt[1] = f->ack_other & 0xFF;
pkt[2] = ((f->ack_other < 0 ? 0 : 1) << 3) | ((f->compressed & 1) << 2) | (f->start << 1) | f->end;
headerlen = DOWNSTREAM_HDR;
}
/* If this is being responded to immediately (ie. not from qmem)
* This flag is used by client to calculate stats */
pkt[2] |= (immediate & 1) << 5;
if (tcperror) {
pkt[2] |= (1 << 6);
}
if (ping) {
/* set ping flag and build extra header */
pkt[2] |= 1 << 4;
pkt[3] = out->windowsize & 0xFF;
pkt[4] = in->windowsize & 0xFF;
pkt[5] = out->start_seq_id & 0xFF;
pkt[6] = in->start_seq_id & 0xFF;
headerlen = DOWNSTREAM_PING_HDR;
}
if (datalen + headerlen > sizeof(pkt)) {
warnx("send_data_or_ping: fragment too large to send! (%" L "u)", datalen);
window_tick(out);
return;
}
if (f) {
memcpy(pkt + headerlen, f->data, datalen);
}
write_dns(get_dns_fd(&server.dns_fds, &q->from), q, (char *)pkt,
datalen + headerlen, users[userid].downenc);
/* mark query as answered */
qmem_answered(userid, pkt, datalen + headerlen);
window_tick(out);
}
void
user_process_incoming_data(int userid, int ack)
{
uint8_t pkt[65536];
size_t datalen;
uint8_t compressed = 0;
int can_reassemble = 1;
if (ack >= 0) {
window_ack(users[userid].outgoing, ack);
window_tick(users[userid].outgoing);
}
while (can_reassemble == 1) {
datalen = sizeof(pkt);
can_reassemble = window_reassemble_data(users[userid].incoming, pkt, &datalen, &compressed);
/* Update time info */
users[userid].last_pkt = time(NULL);
if (datalen > 0) {
/* Data reassembled successfully + cleared out of buffer */
handle_full_packet(userid, pkt, datalen, compressed);
}
}
}
static int
user_send_data(int userid, uint8_t *indata, size_t len, int compressed)
/* Appends data to a user's outgoing queue and sends it (in raw mode only) */
{
size_t datalen;
int ret = 0;
uint8_t out[65536], *data;
data = indata;
datalen = len;
/* use compressed or uncompressed packet to match user settings */
if (users[userid].down_compression && !compressed) {
datalen = sizeof(out);
compress2(out, &datalen, indata, len, 9);
data = out;
} else if (!users[userid].down_compression && compressed) {
datalen = sizeof(out);
ret = uncompress(out, &datalen, indata, len);
if (ret != Z_OK) {
DEBUG(1, "FAIL: Uncompress == %d: %" L "u bytes to user %d!", ret, len, userid);
return 0;
}
}
compressed = users[userid].down_compression;
if (users[userid].conn == CONN_DNS_NULL && data && datalen) {
/* append new data to user's outgoing queue; sent later in qmem_max_wait */
ret = window_add_outgoing_data(users[userid].outgoing, data, datalen, compressed);
} else if (data && datalen) { /* CONN_RAW_UDP */
if (!compressed)
DEBUG(1, "Sending in RAW mode uncompressed to user %d!", userid);
int dns_fd = get_dns_fd(&server.dns_fds, &users[userid].host);
send_raw(dns_fd, data, datalen, userid, RAW_HDR_CMD_DATA,
&users[userid].host, users[userid].hostlen);
ret = 1;
}
return ret;
}
static int
user_send_tcp_disconnect(int userid, struct query *q, char *errormsg)
/* tell user that TCP socket has been disconnected */
{
users[userid].remote_forward_connected = -1;
close_socket(users[userid].remote_tcp_fd);
if (q == NULL)
q = qmem_get_next_response(userid);
if (q != NULL) {
send_data_or_ping(userid, q, 1, 0, errormsg);
users[userid].active = 0;
return 1;
}
users[userid].active = 0;
return 0;
}
static int
tunnel_bind()
{
char packet[64*1024];
struct sockaddr_storage from;
socklen_t fromlen;
struct fw_query *query;
unsigned short id;
int dns_fd;
int r;
fromlen = sizeof(struct sockaddr);
r = recvfrom(server.bind_fd, packet, sizeof(packet), 0,
(struct sockaddr*)&from, &fromlen);
if (r <= 0)
return 0;
id = dns_get_id(packet, r);
DEBUG(3, "RX: Got response on query %u from DNS", (id & 0xFFFF));
/* Get sockaddr from id */
fw_query_get(id, &query);
if (!query) {
DEBUG(2, "Lost sender of id %u, dropping reply", (id & 0xFFFF));
return 0;
}
DEBUG(3, "TX: client %s id %u, %d bytes",
format_addr(&query->addr, query->addrlen), (id & 0xffff), r);
dns_fd = get_dns_fd(&server.dns_fds, &query->addr);
if (sendto(dns_fd, packet, r, 0, (const struct sockaddr *) &(query->addr),
query->addrlen) <= 0) {
warn("forward reply error");
}
return 0;
}
static ssize_t
tunnel_tcp(int userid)
{
ssize_t len;
uint8_t buf[64*1024];
char *errormsg = NULL;
if (users[userid].remote_forward_connected != 1) {
DEBUG(2, "tunnel_tcp: user %d TCP socket not connected!", userid);
return 0;
}
len = read(users[userid].remote_tcp_fd, buf, sizeof(buf));
DEBUG(5, "read %ld bytes on TCP", len);
if (len == 0) {
DEBUG(1, "EOF on TCP forward for user %d; closing connection.", userid);
errormsg = "Connection closed by remote host.";
user_send_tcp_disconnect(userid, NULL, errormsg);
return -1;
} else if (len < 0) {
errormsg = strerror(errno);
DEBUG(1, "Error %d on TCP forward for user %d: %s", errno, userid, errormsg);
user_send_tcp_disconnect(userid, NULL, errormsg);
return -1;
}
user_send_data(userid, buf, (size_t) len, 0);
return len;
}
static int
tunnel_tun()
{
struct ip *header;
static uint8_t in[64*1024];
int userid;
int read;
if ((read = read_tun(server.tun_fd, in, sizeof(in))) <= 0)
return 0;
/* find target ip in packet, in is padded with 4 bytes TUN header */
header = (struct ip*) (in + 4);
userid = find_user_by_ip(header->ip_dst.s_addr);
if (userid < 0)
return 0;
DEBUG(3, "IN: %d byte pkt from tun to user %d; compression %d",
read, userid, users[userid].down_compression);
return user_send_data(userid, in, read, 0);
}
static int
tunnel_dns(int dns_fd)
{
struct query q;
int read;
int domain_len;
int inside_topdomain = 0;
if ((read = read_dns(dns_fd, &q)) <= 0)
return 0;
DEBUG(3, "RX: client %s ID %5d, type %d, name %s",
format_addr(&q.from, q.fromlen), q.id, q.type, q.name);
domain_len = strlen(q.name) - strlen(server.topdomain);
if (domain_len >= 0 && !strcasecmp(q.name + domain_len, server.topdomain))
inside_topdomain = 1;
/* require dot before topdomain */
if (domain_len >= 1 && q.name[domain_len - 1] != '.')
inside_topdomain = 0;
if (inside_topdomain) {
/* This is a query we can handle */
/* Handle A-type query for ns.topdomain, possibly caused
by our proper response to any NS request */
if (domain_len == 3 && q.type == T_A &&
(q.name[0] == 'n' || q.name[0] == 'N') &&
(q.name[1] == 's' || q.name[1] == 'S') &&
q.name[2] == '.') {
handle_a_request(dns_fd, &q, 0);
return 0;
}
/* Handle A-type query for www.topdomain, for anyone that's
poking around */
if (domain_len == 4 && q.type == T_A &&
(q.name[0] == 'w' || q.name[0] == 'W') &&
(q.name[1] == 'w' || q.name[1] == 'W') &&
(q.name[2] == 'w' || q.name[2] == 'W') &&
q.name[3] == '.') {
handle_a_request(dns_fd, &q, 1);
return 0;
}
switch (q.type) {
case T_NULL:
case T_PRIVATE:
case T_CNAME:
case T_A:
case T_MX:
case T_SRV:
case T_TXT:
case T_PTR:
case T_AAAA:
case T_A6:
case T_DNAME:
/* encoding is "transparent" here */
handle_null_request(dns_fd, &q, domain_len);
break;
case T_NS:
handle_ns_request(dns_fd, &q);
break;
default:
break;
}
} else {
/* Forward query to other port ? */
DEBUG(2, "Requested domain outside our topdomain.");
if (server.bind_fd) {
forward_query(server.bind_fd, &q);
}
}
return 0;
}
int
server_tunnel()
{
struct timeval tv;
fd_set read_fds, write_fds;
int i;
int userid;
struct query *answer_now = NULL;
time_t last_action = time(NULL);
window_debug = server.debug;
while (server.running) {
int maxfd;
/* max wait time based on pending queries */
tv = qmem_max_wait(&userid, &answer_now);
FD_ZERO(&read_fds);
FD_ZERO(&write_fds);
maxfd = 0;
if (server.dns_fds.v4fd >= 0) {
FD_SET(server.dns_fds.v4fd, &read_fds);
maxfd = MAX(server.dns_fds.v4fd, maxfd);
}
if (server.dns_fds.v6fd >= 0) {
FD_SET(server.dns_fds.v6fd, &read_fds);
maxfd = MAX(server.dns_fds.v6fd, maxfd);
}
if (server.bind_fd) {
/* wait for replies from real DNS */
FD_SET(server.bind_fd, &read_fds);
maxfd = MAX(server.bind_fd, maxfd);
}
/* Don't read from tun if all users have filled outpacket queues */
if(!all_users_waiting_to_send()) {
FD_SET(server.tun_fd, &read_fds);
maxfd = MAX(server.tun_fd, maxfd);
}
/* add connected user TCP forward FDs to read set */
maxfd = MAX(set_user_tcp_fds(&read_fds, 1), maxfd);
/* add connectING user TCP FDs to write set */
maxfd = MAX(set_user_tcp_fds(&write_fds, 2), maxfd);
i = select(maxfd + 1, &read_fds, &write_fds, NULL, &tv);
if(i < 0) {
if (server.running)
warn("select < 0");
return 1;
}
if (i == 0) {
if (server.max_idle_time) {
/* only trigger the check if that's worth ( ie, no need to loop over if there
is something to send */
if (difftime(time(NULL), last_action) > server.max_idle_time) {
for (userid = 0; userid < created_users; userid++) {
last_action = (users[userid].last_pkt > last_action) ? users[userid].last_pkt : last_action;
}
if (difftime(time(NULL), last_action) > server.max_idle_time) {
fprintf(stderr, "Server idle for too long, shutting down...\n");
server.running = 0;
}
}
}
} else {
if (FD_ISSET(server.tun_fd, &read_fds)) {
tunnel_tun();
}
for (userid = 0; userid < created_users; userid++) {
if (FD_ISSET(users[userid].remote_tcp_fd, &read_fds) && users[userid].remoteforward_addr_len > 0) {
DEBUG(4, "tunnel_tcp called for user %d", userid);
tunnel_tcp(userid);
} else if (users[userid].remote_forward_connected == 2 &&
FD_ISSET(users[userid].remote_tcp_fd, &write_fds)) {
DEBUG(2, "User %d TCP socket now writable (connection established)", userid);
users[userid].remote_forward_connected = 1;
}
}
if (FD_ISSET(server.dns_fds.v4fd, &read_fds)) {
tunnel_dns(server.dns_fds.v4fd);
}
if (FD_ISSET(server.dns_fds.v6fd, &read_fds)) {
tunnel_dns(server.dns_fds.v6fd);
}
if (FD_ISSET(server.bind_fd, &read_fds)) {
tunnel_bind();
}
}
}
return 0;
}
void
handle_full_packet(int userid, uint8_t *data, size_t len, int compressed)
{
size_t rawlen;
uint8_t out[64*1024], *rawdata;
struct ip *hdr;
int touser = -1;
int ret;
/* Check if data needs to be uncompressed */
if (compressed) {
rawlen = sizeof(out);
ret = uncompress(out, &rawlen, data, len);
rawdata = out;
} else {
rawlen = len;
rawdata = data;
ret = Z_OK;
}
if (ret == Z_OK) {
if (users[userid].remoteforward_addr_len == 0) {
hdr = (struct ip*) (out + 4);
touser = find_user_by_ip(hdr->ip_dst.s_addr);
DEBUG(2, "FULL PKT: %" L "u bytes from user %d (touser %d)", len, userid, touser);
if (touser == -1) {
/* send the uncompressed packet to tun device */
write_tun(server.tun_fd, rawdata, rawlen);
} else {
/* don't re-compress if possible */
if (users[touser].down_compression && compressed) {
user_send_data(touser, data, len, 1);
} else {
user_send_data(touser, rawdata, rawlen, 0);
}
}
} else {
/* Write full pkt to user's remote forward TCP stream */
if ((ret = write(users[userid].remote_tcp_fd, rawdata, rawlen)) != rawlen) {
DEBUG(2, "Write error %d on TCP socket for user %d: %s", errno, userid, strerror(errno));
}
}
} else {
DEBUG(2, "Discarded pkt from user %d, uncompress()==%d, len=%" L "u, rawlen=%" L "u",
userid, ret, len, rawlen);
}
}
static void
handle_raw_login(uint8_t *packet, size_t len, struct query *q, int fd, int userid)
{
char myhash[16];
if (len < 16) {
DEBUG(2, "Invalid raw login packet: length %" L "u < 16 bytes!", len);
return;
}
if (userid < 0 || userid >= created_users ||
check_authenticated_user_and_ip(userid, q, server.check_ip) != 0) {
DEBUG(2, "User %d not authenticated, ignoring raw login!", userid);
return;
}
DEBUG(1, "RX-raw: login, len %" L "u, from user %d", len, userid);
/* User sends hash of seed + 1 */
login_calculate(myhash, 16, server.password, users[userid].seed + 1);
if (memcmp(packet, myhash, 16) == 0) {
/* Update time info for user */
users[userid].last_pkt = time(NULL);
/* Store remote IP number */
memcpy(&(users[userid].host), &(q->from), q->fromlen);
users[userid].hostlen = q->fromlen;
/* Correct hash, reply with hash of seed - 1 */
user_set_conn_type(userid, CONN_RAW_UDP);
login_calculate(myhash, 16, server.password, users[userid].seed - 1);
send_raw(fd, (uint8_t *)myhash, 16, userid, RAW_HDR_CMD_LOGIN, &q->from, q->fromlen);
users[userid].authenticated_raw = 1;
}
}
static void
handle_raw_data(uint8_t *packet, size_t len, struct query *q, int userid)
{
if (check_authenticated_user_and_ip(userid, q, server.check_ip) != 0) {
return;
}
if (!users[userid].authenticated_raw) return;
/* Update time info for user */
users[userid].last_pkt = time(NULL);
/* copy to packet buffer, update length */
DEBUG(3, "RX-raw: full pkt raw, length %" L "u, from user %d", len, userid);
handle_full_packet(userid, packet, len, 1);
}
static void
handle_raw_ping(struct query *q, int dns_fd, int userid)
{
if (check_authenticated_user_and_ip(userid, q, server.check_ip) != 0) {
return;
}
if (!users[userid].authenticated_raw) return;
/* Update time info for user */
users[userid].last_pkt = time(NULL);
DEBUG(3, "RX-raw: ping from user %d", userid);
/* Send ping reply */
send_raw(dns_fd, NULL, 0, userid, RAW_HDR_CMD_PING, &q->from, q->fromlen);
}
static int
raw_decode(uint8_t *packet, size_t len, struct query *q, int dns_fd)
{
int raw_user;
uint8_t raw_cmd;
/* minimum length */
if (len < RAW_HDR_LEN) return 0;
/* should start with header */
if (memcmp(packet, raw_header, RAW_HDR_IDENT_LEN))
return 0;
raw_cmd = RAW_HDR_GET_CMD(packet);
raw_user = RAW_HDR_GET_USR(packet);
DEBUG(3, "RX-raw: client %s, user %d, raw command 0x%02X, length %" L "u",
format_addr(&q->from, q->fromlen), raw_user, raw_cmd, len);
packet += RAW_HDR_LEN;
len -= RAW_HDR_LEN;
switch (raw_cmd) {
case RAW_HDR_CMD_LOGIN:
/* Login challenge */
handle_raw_login(packet, len, q, dns_fd, raw_user);
break;
case RAW_HDR_CMD_DATA:
/* Data packet */
handle_raw_data(packet, len, q, raw_user);
break;
case RAW_HDR_CMD_PING:
/* Keepalive packet */
handle_raw_ping(q, dns_fd, raw_user);
break;
default:
DEBUG(1, "Unhandled raw command %02X from user %d", raw_cmd, raw_user);
break;
}
return 1;
}
int
read_dns(int fd, struct query *q)
{
struct sockaddr_storage from;
socklen_t addrlen;
uint8_t packet[64*1024];
int r;
#ifndef WINDOWS32
char control[CMSG_SPACE(sizeof (struct in6_pktinfo))];
struct msghdr msg;
struct iovec iov;
struct cmsghdr *cmsg;
addrlen = sizeof(struct sockaddr_storage);
iov.iov_base = packet;
iov.iov_len = sizeof(packet);
msg.msg_name = (caddr_t) &from;
msg.msg_namelen = (unsigned) addrlen;
msg.msg_iov = &iov;
msg.msg_iovlen = 1;
msg.msg_control = control;
msg.msg_controllen = sizeof(control);
msg.msg_flags = 0;
r = recvmsg(fd, &msg, 0);
#else
addrlen = sizeof(struct sockaddr_storage);
r = recvfrom(fd, packet, sizeof(packet), 0, (struct sockaddr*)&from, &addrlen);
#endif /* !WINDOWS32 */
if (r > 0) {
memcpy(&q->from, &from, addrlen);
q->fromlen = addrlen;
gettimeofday(&q->time_recv, NULL);
/* TODO do not handle raw packets here! */
if (raw_decode(packet, r, q, fd)) {
return 0;
}
if (dns_decode(NULL, 0, q, QR_QUERY, (char *)packet, r) < 0) {
return 0;
}
#ifndef WINDOWS32
memset(&q->destination, 0, sizeof(struct sockaddr_storage));
/* Read destination IP address */
for (cmsg = CMSG_FIRSTHDR(&msg); cmsg != NULL;
cmsg = CMSG_NXTHDR(&msg, cmsg)) {
if (cmsg->cmsg_level == IPPROTO_IP &&
cmsg->cmsg_type == DSTADDR_SOCKOPT) {
struct sockaddr_in *addr = (struct sockaddr_in *) &q->destination;
addr->sin_family = AF_INET;
addr->sin_addr = *dstaddr(cmsg);
q->dest_len = sizeof(*addr);
break;
}
if (cmsg->cmsg_level == IPPROTO_IPV6 &&
cmsg->cmsg_type == IPV6_PKTINFO) {
struct in6_pktinfo *pktinfo;
struct sockaddr_in6 *addr = (struct sockaddr_in6 *) &q->destination;
pktinfo = (struct in6_pktinfo *) CMSG_DATA(cmsg);
addr->sin6_family = AF_INET6;
memcpy(&addr->sin6_addr, &pktinfo->ipi6_addr, sizeof(struct in6_addr));
q->dest_len = sizeof(*addr);
break;
}
}
#endif
return strlen(q->name);
} else if (r < 0) {
/* Error */
warn("read dns");
}
return 0;
}
static size_t
write_dns_nameenc(uint8_t *buf, size_t buflen, uint8_t *data, size_t datalen, char downenc)
/* Returns #bytes of data that were encoded */
{
static int td_cmc;
char td[3];
struct encoder *enc;
/* Make a rotating topdomain to prevent filtering, ie 10-bit CMC */
td_cmc ++;
td_cmc &= 0x3FF;
td[0] = b32_5to8(td_cmc & 0x1F);
td[1] = b32_5to8((td_cmc >> 5) & 0x1F);
td[2] = 0;
/* encode data,datalen to CNAME/MX answer */
if (downenc == 'S') {
buf[0] = 'i';
enc = b64;
} else if (downenc == 'U') {
buf[0] = 'j';
enc = b64u;
} else if (downenc == 'V') {
buf[0] = 'k';
enc = b128;
} else {
buf[0] = 'h';
enc = b32;
}
return build_hostname(buf, buflen, data, datalen, td, enc, 0xFF, 1);
}
void
write_dns(int fd, struct query *q, char *data, size_t datalen, char downenc)
{
char buf[64*1024];
int len = 0;
if (q->type == T_CNAME || q->type == T_A ||
q->type == T_PTR || q->type == T_AAAA || q->type == T_A6 || q->type == T_DNAME) {
char cnamebuf[1024]; /* max 255 */
write_dns_nameenc((uint8_t *)cnamebuf, sizeof(cnamebuf), (uint8_t *)data, datalen, downenc);
len = dns_encode(buf, sizeof(buf), q, QR_ANSWER, cnamebuf, sizeof(cnamebuf));
} else if (q->type == T_MX || q->type == T_SRV) {
char mxbuf[64*1024];
char *b = mxbuf;
int offset = 0;
int res;
while (1) {
res = write_dns_nameenc((uint8_t *)b, sizeof(mxbuf) - (b - mxbuf),
(uint8_t *)data + offset, datalen - offset, downenc);
if (res < 1) {
/* nothing encoded */
b++; /* for final \0 */
break;
}
b = b + strlen(b) + 1;
offset += res;
if (offset >= datalen)
break;
}
/* Add final \0 */
*b = '\0';
len = dns_encode(buf, sizeof(buf), q, QR_ANSWER, mxbuf,
sizeof(mxbuf));
} else if (q->type == T_TXT) {
/* TXT with base32 */
uint8_t txtbuf[64*1024];
size_t space = sizeof(txtbuf) - 1;;
memset(txtbuf, 0, sizeof(txtbuf));
if (downenc == 'S') {
txtbuf[0] = 's'; /* plain base64(Sixty-four) */
len = b64->encode(txtbuf+1, &space, (uint8_t *)data, datalen);
}
else if (downenc == 'U') {
txtbuf[0] = 'u'; /* Base64 with Underscore */
len = b64u->encode(txtbuf+1, &space, (uint8_t *)data, datalen);
}
else if (downenc == 'V') {
txtbuf[0] = 'v'; /* Base128 */
len = b128->encode(txtbuf+1, &space, (uint8_t *)data, datalen);
}
else if (downenc == 'R') {
txtbuf[0] = 'r'; /* Raw binary data */
len = MIN(datalen, sizeof(txtbuf) - 1);
memcpy(txtbuf + 1, data, len);
} else {
txtbuf[0] = 't'; /* plain base32(Thirty-two) */
len = b32->encode(txtbuf+1, &space, (uint8_t *)data, datalen);
}
len = dns_encode(buf, sizeof(buf), q, QR_ANSWER, (char *)txtbuf, len+1);
} else {
/* Normal NULL-record encode */
len = dns_encode(buf, sizeof(buf), q, QR_ANSWER, data, datalen);
}
if (len < 1) {
warnx("dns_encode doesn't fit");
return;
}
DEBUG(3, "TX: client %s ID %5d, %" L "u bytes data, type %d, name '%10s'",
format_addr(&q->from, q->fromlen), q->id, datalen, q->type, q->name);
sendto(fd, buf, len, 0, (struct sockaddr*)&q->from, q->fromlen);
}
#define CHECK_LEN(l, x) \
if (l < x) { \
write_dns(dns_fd, q, "BADLEN", 6, 'T'); \
return; \
}
void
handle_dns_version(int dns_fd, struct query *q, uint8_t *domain, int domain_len)
{
uint8_t unpacked[512];
uint32_t version = !PROTOCOL_VERSION;
int userid, read;
read = unpack_data(unpacked, sizeof(unpacked), (uint8_t *)domain + 1, domain_len - 1, b32);
/* Version greeting, compare and send ack/nak */
if (read >= 4) {
/* Received V + 32bits version (network byte order) */
version = ntohl(*(uint32_t *) unpacked);
} /* if invalid pkt, just send VNAK */
if (version != PROTOCOL_VERSION) {
send_version_response(dns_fd, VERSION_NACK, PROTOCOL_VERSION, 0, q);
syslog(LOG_INFO, "dropped user from %s, sent bad version %08X",
format_addr(&q->from, q->fromlen), version);
return;
}
userid = find_available_user();
if (userid < 0) {
/* No space for another user */
send_version_response(dns_fd, VERSION_FULL, created_users, 0, q);
syslog(LOG_INFO, "dropped user from %s, server full",
format_addr(&q->from, q->fromlen));
return;
}
/* Reset user options to safe defaults */
struct tun_user *u = &users[userid];
u->seed = rand();
/* Store remote IP number */
memcpy(&(u->host), &(q->from), q->fromlen);
u->hostlen = q->fromlen;
u->remote_forward_connected = 0;
u->remoteforward_addr_len = 0;
u->remote_tcp_fd = 0;
u->remoteforward_addr.ss_family = AF_UNSPEC;
u->fragsize = 100; /* very safe */
u->conn = CONN_DNS_NULL;
u->encoder = get_base32_encoder();
u->down_compression = 1;
u->lazy = 0;
u->next_upstream_ack = -1;
window_buffer_resize(u->outgoing, u->outgoing->length,
u->encoder->get_raw_length(u->fragsize) - DOWNSTREAM_PING_HDR);
window_buffer_clear(u->incoming);
qmem_init(userid);
if (q->type == T_NULL || q->type == T_PRIVATE) {
u->downenc = 'R';
u->downenc_bits = 8;
} else {
u->downenc = 'T';
u->downenc_bits = 5;
}
send_version_response(dns_fd, VERSION_ACK, u->seed, userid, q);
syslog(LOG_INFO, "Accepted version for user #%d from %s",
userid, format_addr(&q->from, q->fromlen));
DEBUG(1, "User %d connected with correct version from %s.",
userid, format_addr(&q->from, q->fromlen));
DEBUG(3, "User %d has login challenge 0x%08x", userid, u->seed);
}
void
handle_dns_downstream_codec_check(int dns_fd, struct query *q, uint8_t *domain, int domain_len)
{
char *datap;
int datalen, i, codec;
i = b32_8to5(domain[2]); /* check variant: second char in b32 */
if (i == 1) {
datap = DOWNCODECCHECK1;
datalen = DOWNCODECCHECK1_LEN;
} else {
write_dns(dns_fd, q, "BADLEN", 6, 'T');
return;
}
/* codec to test: first char raw */
codec = toupper(domain[1]);
switch (codec) {
case 'T':
case 'S':
case 'U':
case 'V':
if (q->type == T_TXT ||
q->type == T_SRV || q->type == T_MX ||
q->type == T_CNAME || q->type == T_A ||
q->type == T_PTR || q->type == T_AAAA ||
q->type == T_A6 || q->type == T_DNAME) {
write_dns(dns_fd, q, datap, datalen, codec);
return;
}
break;
case 'R':
if (q->type == T_NULL || q->type == T_TXT) {
write_dns(dns_fd, q, datap, datalen, 'R');
return;
}
break;
}
/* if still here, then codec not available */
write_dns(dns_fd, q, "BADCODEC", 8, 'T');
}
void
handle_dns_login(int dns_fd, struct query *q, uint8_t *domain, int domain_len, int userid)
{
uint8_t unpacked[512], flags;
char logindata[16], *tmp[2], out[512], *reason = NULL;
char *errormsg = NULL, fromaddr[100];
struct in_addr tempip;
char remote_tcp, remote_isnt_localhost, use_ipv6, poll_status; //, drop_packets;
int length = 17, read, addrlen, login_ok = 1;
uint16_t port;
struct tun_user *u = &users[userid];
struct sockaddr_in6 *addr6 = (struct sockaddr_in6 *) &u->remoteforward_addr;
struct sockaddr_in *addr = (struct sockaddr_in *) &u->remoteforward_addr;
read = unpack_data(unpacked, sizeof(unpacked), (uint8_t *) domain + 2, domain_len - 2, b32);
/* Decode flags and calculate min. length */
flags = unpacked[0];
remote_tcp = flags & 1;
remote_isnt_localhost = (flags & 2) >> 1;
use_ipv6 = (flags & 4) >> 2;
//drop_packets = (flags & 8) >> 3; /* currently unimplemented */
poll_status = (flags & 0x10) >> 4;
addrlen = (remote_tcp && remote_isnt_localhost) ? (use_ipv6 ? 16 : 4) : 0;
length += (remote_tcp ? 2 : 0) + addrlen;
/* There should be no extra data if only polling forwarding status */
if (poll_status) {
length = 17;
}
CHECK_LEN(read, length);
strncpy(fromaddr, format_addr(&q->from, q->fromlen), 100);
DEBUG(2, "Received login request for user %d from %s",
userid, fromaddr);
DEBUG(6, "Login: length=%d, flags=0x%02x, seed=0x%08x, hash=0x%016llx%016llx",
length, flags, u->seed, *(unsigned long long *) (unpacked + 1),
*(unsigned long long *) (unpacked + 9));
if (check_user_and_ip(userid, q, server.check_ip) != 0) {
write_dns(dns_fd, q, "BADIP", 5, 'T');
syslog(LOG_WARNING, "rejected login request from user #%d from %s; expected source %s",
userid, fromaddr, format_addr(&u->host, u->hostlen));
DEBUG(1, "Rejected login request from user %d: BADIP", userid);
return;
}
/* Check remote host/port options */
if ((addrlen > 0 && !server.allow_forward_remote) ||
(remote_tcp && !server.allow_forward_local_port)) {
login_ok = 0;
reason = "requested bad TCP forward options";
}
u->last_pkt = time(NULL);
login_calculate(logindata, 16, server.password, u->seed);
if (memcmp(logindata, unpacked + 1, 16) != 0) {
login_ok = 0;
reason = "bad password";
}
if (remote_tcp) {
port = ntohs(*(uint16_t *) (unpacked + 17));
if (addrlen > 0) {
if (use_ipv6) {
addr6->sin6_family = AF_INET6;
addr6->sin6_port = htons(port);
u->remoteforward_addr_len = sizeof(*addr6);
memcpy(&addr6->sin6_addr, unpacked + 19, MIN(sizeof(*addr6), addrlen));
} else {
addr->sin_family = AF_INET;
addr->sin_port = htons(port);
u->remoteforward_addr_len = sizeof(*addr);
memcpy(&addr->sin_addr, unpacked + 19, MIN(sizeof(*addr), addrlen));
}
DEBUG(1, "User %d requested TCP connection to %s:%hu, %s.", userid,
format_addr(&u->remoteforward_addr, u->remoteforward_addr_len),
port, login_ok ? "allowed" : "rejected");
} else {
addr->sin_family = AF_INET;
addr->sin_port = htons(port);
addr->sin_addr.s_addr = htonl(INADDR_LOOPBACK);
DEBUG(1, "User %d requested TCP connection to localhost:%hu, %s.", userid,
port, login_ok ? "allowed" : "rejected");
}
}
if (poll_status && login_ok) {
if (addrlen > 0 || (flags ^ 0x10)) {
login_ok = 0;
reason = "invalid flags";
}
}
if (!login_ok) {
write_dns(dns_fd, q, "LNAK", 4, 'T');
if (--u->authenticated >= 0)
u->authenticated = -1;
int tries = abs(u->authenticated);
DEBUG(1, "rejected login from user %d (%s), tries: %d, reason: %s",
userid, fromaddr, tries, reason);
syslog(LOG_WARNING, "rejected login request from user #%d from %s, %s; incorrect attempts: %d",
userid, fromaddr, reason, tries);
return;
}
/* Store user auth OK, count number of logins */
u->authenticated++;
if (u->authenticated > 1 && !poll_status)
syslog(LOG_WARNING, "duplicate login request from user #%d from %s",
userid, fromaddr);
if (remote_tcp) {
int tcp_fd;
DEBUG(1, "User %d connected from %s, starting TCP connection to %s.", userid,
fromaddr, format_addr(&u->remoteforward_addr, sizeof(struct sockaddr_storage)));
syslog(LOG_NOTICE, "accepted password from user #%d, connecting TCP forward", userid);
/* Open socket and connect to TCP forward host:port */
tcp_fd = open_tcp_nonblocking(&u->remoteforward_addr, &errormsg);
if (tcp_fd < 0) {
if (!errormsg)
errormsg = "Error opening socket.";
goto tcp_forward_error;
}
/* connection in progress */
out[0] = 'W';
read = 1;
write_dns(dns_fd, q, out, read + 1, u->downenc);
u->remote_tcp_fd = tcp_fd;
u->remote_forward_connected = 2; /* connecting */
return;
} else if (poll_status) {
/* Check TCP forward connection status and update user data */
int retval;
/* Check for connection errors */
if ((retval = check_tcp_error(u->remote_tcp_fd, &errormsg)) != 0) {
/* if unacceptable error, tell user */
if (retval != EINPROGRESS)
goto tcp_forward_error;
}
if (retval == EINPROGRESS)
u->remote_forward_connected = 2;
read = 1;
out[1] = 0;
/* check user TCP forward status flag, which is updated in server_tunnel
* when the file descriptor becomes writable (ie, connection established */
if (u->remote_forward_connected == 1) {
out[0] = 'C';
DEBUG(2, "User %d TCP forward connection established: %s", userid, errormsg);
} else if (u->remote_forward_connected == 2) {
out[0] = 'W';
DEBUG(3, "User %d TCP connection in progress: %s", userid, errormsg);
}
write_dns(dns_fd, q, out, read + 1, u->downenc);
return;
} else {
out[0] = 'I';
/* Send ip/mtu/netmask info */
tempip.s_addr = server.my_ip;
tmp[0] = strdup(inet_ntoa(tempip));
tempip.s_addr = u->tun_ip;
tmp[1] = strdup(inet_ntoa(tempip));
read = snprintf(out + 1, sizeof(out) - 1, "-%s-%s-%d-%d",
tmp[0], tmp[1], server.mtu, server.netmask);
DEBUG(1, "User %d connected from %s, tun_ip %s.", userid,
fromaddr, tmp[1]);
syslog(LOG_NOTICE, "accepted password from user #%d, given IP %s", userid, tmp[1]);
free(tmp[1]);
free(tmp[0]);
write_dns(dns_fd, q, out, read + 1, u->downenc);
return;
}
tcp_forward_error:
DEBUG(1, "Failed to connect TCP forward for user %d: %s", userid, errormsg);
out[0] = 'E';
strncpy(out + 1, errormsg, sizeof(out) - 1);
read = strlen(out);
write_dns(dns_fd, q, out, read + 1, u->downenc);
}
void
handle_dns_ip_request(int dns_fd, struct query *q, int userid)
{
char reply[17];
int length;
reply[0] = 'I';
if (q->from.ss_family == AF_INET) {
if (server.ns_ip != INADDR_ANY) {
/* If set, use assigned external ip (-n option) */
memcpy(&reply[1], &server.ns_ip, sizeof(server.ns_ip));
} else {
/* otherwise return destination ip from packet */
struct sockaddr_in *addr = (struct sockaddr_in *) &q->destination;
memcpy(&reply[1], &addr->sin_addr, sizeof(struct in_addr));
}
length = 1 + sizeof(struct in_addr);
} else {
struct sockaddr_in6 *addr = (struct sockaddr_in6 *) &q->destination;
memcpy(&reply[1], &addr->sin6_addr, sizeof(struct in6_addr));
length = 1 + sizeof(struct in6_addr);
}
write_dns(dns_fd, q, reply, length, 'T');
}
void
handle_dns_upstream_codec_switch(int dns_fd, struct query *q, int userid,
uint8_t *unpacked, size_t read)
{
int codec;
struct encoder *enc;
codec = unpacked[0];
switch (codec) {
case 5: /* 5 bits per byte = base32 */
enc = b32;
user_switch_codec(userid, enc);
write_dns(dns_fd, q, enc->name, strlen(enc->name), users[userid].downenc);
break;
case 6: /* 6 bits per byte = base64 */
enc = b64;
user_switch_codec(userid, enc);
write_dns(dns_fd, q, enc->name, strlen(enc->name), users[userid].downenc);
break;
case 26: /* "2nd" 6 bits per byte = base64u, with underscore */
enc = b64u;
user_switch_codec(userid, enc);
write_dns(dns_fd, q, enc->name, strlen(enc->name), users[userid].downenc);
break;
case 7: /* 7 bits per byte = base128 */
enc = b128;
user_switch_codec(userid, enc);
write_dns(dns_fd, q, enc->name, strlen(enc->name), users[userid].downenc);
break;
default:
write_dns(dns_fd, q, "BADCODEC", 8, users[userid].downenc);
break;
}
}
void
handle_dns_set_options(int dns_fd, struct query *q, int userid,
uint8_t *unpacked, size_t read)
{
uint8_t bits = 0;
char *encname = "BADCODEC";
int tmp_lazy, tmp_downenc, tmp_comp;
/* Temporary variables: don't change anything until all options parsed */
tmp_lazy = users[userid].lazy;
tmp_comp = users[userid].down_compression;
tmp_downenc = users[userid].downenc;
switch (unpacked[0] & 0x7C) {
case (1 << 6): /* Base32 */
tmp_downenc = 'T';
encname = "Base32";
bits = 5;
break;
case (1 << 5): /* Base64 */
tmp_downenc = 'S';
encname = "Base64";
bits = 6;
break;
case (1 << 4): /* Base64u */
tmp_downenc = 'U';
encname = "Base64u";
bits = 26;
break;
case (1 << 3): /* Base128 */
tmp_downenc = 'V';
encname = "Base128";
bits = 7;
break;
case (1 << 2): /* Raw */
tmp_downenc = 'R';
encname = "Raw";
bits = 8;
break;
default: /* Invalid (More than 1 encoding bit set) */
write_dns(dns_fd, q, "BADCODEC", 8, users[userid].downenc);
return;
}
tmp_comp = (unpacked[0] & 2) >> 1; /* compression flag */
tmp_lazy = (unpacked[0] & 1); /* lazy mode flag */
/* Automatically switch to raw encoding if PRIVATE or NULL request */
if ((q->type == T_NULL || q->type == T_PRIVATE) && !bits) {
users[userid].downenc = 'R';
bits = 8;
DEBUG(2, "Assuming raw data encoding with NULL/PRIVATE requests for user %d.", userid);
}
if (bits) {
int f = users[userid].fragsize;
window_buffer_resize(users[userid].outgoing, users[userid].outgoing->length,
(bits * f) / 8 - DOWNSTREAM_PING_HDR);
users[userid].downenc_bits = bits;
}
DEBUG(1, "Options for user %d: down compression %d, data bits %d/maxlen %u (enc '%c'), lazy %d.",
userid, tmp_comp, bits, users[userid].outgoing->maxfraglen, tmp_downenc, tmp_lazy);
/* Store any changes */
users[userid].down_compression = tmp_comp;
users[userid].downenc = tmp_downenc;
users[userid].lazy = tmp_lazy;
write_dns(dns_fd, q, encname, strlen(encname), users[userid].downenc);
}
void
handle_dns_fragsize_probe(int dns_fd, struct query *q, int userid,
uint8_t *unpacked, size_t read)
/* Downstream fragsize probe packet */
{
int req_frag_size;
req_frag_size = ntohs(*(uint16_t *) unpacked);
DEBUG(3, "Got downstream fragsize probe from user %d, required fragsize %d", userid, req_frag_size);
if (req_frag_size < 2 || req_frag_size > MAX_FRAGSIZE) {
write_dns(dns_fd, q, "BADFRAG", 7, users[userid].downenc);
} else {
char buf[MAX_FRAGSIZE];
int i;
unsigned int v = ((unsigned int) rand()) & 0xff;
memset(buf, 0, sizeof(buf));
buf[0] = (req_frag_size >> 8) & 0xff;
buf[1] = req_frag_size & 0xff;
/* make checkable pseudo-random sequence */
buf[2] = 107;
for (i = 3; i < MAX_FRAGSIZE; i++, v = (v + 107) & 0xff)
buf[i] = v;
write_dns(dns_fd, q, buf, req_frag_size, users[userid].downenc);
}
}
void
handle_dns_set_fragsize(int dns_fd, struct query *q, int userid,
uint8_t *unpacked, size_t read)
/* Downstream fragsize packet */
{
int max_frag_size;
max_frag_size = ntohs(*(uint16_t *)unpacked);
if (max_frag_size < 2 || max_frag_size > MAX_FRAGSIZE) {
write_dns(dns_fd, q, "BADFRAG", 7, users[userid].downenc);
} else {
users[userid].fragsize = max_frag_size;
window_buffer_resize(users[userid].outgoing, users[userid].outgoing->length,
(users[userid].downenc_bits * max_frag_size) / 8 - DOWNSTREAM_PING_HDR);
write_dns(dns_fd, q, (char *)unpacked, 2, users[userid].downenc);
DEBUG(1, "Setting max downstream data length to %u bytes for user %d; %d bits (%c)",
users[userid].outgoing->maxfraglen, userid, users[userid].downenc_bits, users[userid].downenc);
}
}
void
handle_dns_ping(int dns_fd, struct query *q, int userid,
uint8_t *unpacked, size_t read)
{
int dn_seq, up_seq, dn_winsize, up_winsize, dn_ack;
int respond, set_qtimeout, set_wtimeout, tcp_disconnect;
unsigned qtimeout_ms, wtimeout_ms;
CHECK_LEN(read, UPSTREAM_PING);
/* Check if query is cached */
if (qmem_is_cached(dns_fd, userid, q))
return;
/* Unpack flags/options from ping header */
dn_ack = ((unpacked[9] >> 2) & 1) ? unpacked[0] : -1;
up_winsize = unpacked[1];
dn_winsize = unpacked[2];
up_seq = unpacked[3];
dn_seq = unpacked[4];
/* Query timeout and window frag timeout */
qtimeout_ms = ntohs(*(uint16_t *) (unpacked + 5));
wtimeout_ms = ntohs(*(uint16_t *) (unpacked + 7));
respond = unpacked[9] & 1;
set_qtimeout = (unpacked[9] >> 3) & 1;
set_wtimeout = (unpacked[9] >> 4) & 1;
tcp_disconnect = (unpacked[9] >> 5) & 1;
DEBUG(3, "PING pkt user %d, down %d/%d, up %d/%d, ACK %d, %sqtime %u ms, "
"%swtime %u ms, respond %d, tcp_close %d (flags %02X)",
userid, dn_seq, dn_winsize, up_seq, up_winsize, dn_ack,
set_qtimeout ? "SET " : "", qtimeout_ms, set_wtimeout ? "SET " : "",
wtimeout_ms, respond, tcp_disconnect, unpacked[9]);
if (tcp_disconnect) {
/* close user's TCP forward connection and mark user as inactive */
if (users[userid].remoteforward_addr_len == 0) {
DEBUG(1, "User %d attempted TCP disconnect but didn't request TCP forwarding!", userid);
} else {
DEBUG(1, "User %d closed remote TCP forward", userid);
close_socket(users[userid].remote_tcp_fd);
users[userid].active = 0;
}
}
if (set_qtimeout) {
/* update user's query timeout if timeout flag set */
users[userid].dns_timeout = ms_to_timeval(qtimeout_ms);
/* if timeout is 0, we do not enable lazy mode but it is effectively the same */
int newlazy = !(qtimeout_ms == 0);
if (newlazy != users[userid].lazy)
DEBUG(2, "User %d: not changing lazymode to %d with timeout %u",
userid, newlazy, qtimeout_ms);
}
if (set_wtimeout) {
/* update sending window fragment ACK timeout */
users[userid].outgoing->timeout = ms_to_timeval(wtimeout_ms);
}
qmem_append(userid, q);
if (respond) {
/* ping handshake - set windowsizes etc, respond NOW using this query
* NOTE: still added to qmem (for cache) even though responded to immediately */
DEBUG(2, "PING HANDSHAKE set windowsizes (old/new) up: %d/%d, dn: %d/%d",
users[userid].outgoing->windowsize, dn_winsize, users[userid].incoming->windowsize, up_winsize);
users[userid].outgoing->windowsize = dn_winsize;
users[userid].incoming->windowsize = up_winsize;
send_data_or_ping(userid, q, 1, 1, NULL);
return;
}
/* if respond flag not set, query waits in qmem and is used later */
user_process_incoming_data(userid, dn_ack);
}
void
handle_dns_data(int dns_fd, struct query *q, uint8_t *domain, int domain_len, int userid)
{
uint8_t unpacked[20];
fragment f;
size_t len;
/* Need 6 char header + >=1 char data */
CHECK_LEN(domain_len, UPSTREAM_HDR + 1);
/* Check if cached */
if (qmem_is_cached(dns_fd, userid, q)) {
/* if is cached, by this point it has already been answered */
return;
}
qmem_append(userid, q);
/* Decode upstream data header - see docs/proto_XXXXXXXX.txt */
/* First byte (after userid) = CMC (ignored); skip 2 bytes */
len = sizeof(unpacked);
b32->decode(unpacked, &len, (uint8_t *)domain + 2, 5);
f.seqID = unpacked[0];
unpacked[2] >>= 4; /* Lower 4 bits are unused */
f.ack_other = ((unpacked[2] >> 3) & 1) ? unpacked[1] : -1;
f.compressed = (unpacked[2] >> 2) & 1;
f.start = (unpacked[2] >> 1) & 1;
f.end = unpacked[2] & 1;
uint8_t data[users[userid].incoming->maxfraglen];
f.data = data;
/* Decode remainder of data with user encoding into fragment */
f.len = unpack_data(f.data, users[userid].incoming->maxfraglen,
(uint8_t *)domain + UPSTREAM_HDR,
domain_len - UPSTREAM_HDR, users[userid].encoder);
DEBUG(3, "frag seq %3u, datalen %5lu, ACK %3d, compression %1d, s%1d e%1d",
f.seqID, f.len, f.ack_other, f.compressed, f.start, f.end);
/* if already waiting for an ACK to be sent back upstream (on incoming buffer) */
if (users[userid].next_upstream_ack >= 0) {
/* Shouldn't normally happen; will always be reset after sending a packet. */
DEBUG(1, "[WARNING] next_upstream_ack == %d for user %d.",users[userid].next_upstream_ack, userid);
}
window_process_incoming_fragment(users[userid].incoming, &f);
users[userid].next_upstream_ack = f.seqID;
user_process_incoming_data(userid, f.ack_other);
/* Nothing to do. ACK for this fragment is sent later in qmem_max_wait,
* using an old query. This is left in qmem until needed/times out */
}
void
handle_null_request(int dns_fd, struct query *q, int domain_len)
/* Handles a NULL DNS request. See doc/proto_XXXXXXXX.txt for details on iodine protocol. */
{
char cmd, userchar;
int userid = -1;
uint8_t in[QUERY_NAME_SIZE + 1];
/* Everything here needs at least 5 chars in the name:
* cmd, userid and more data or at least 3 bytes CMC */
if (domain_len < 5)
return;
/* Duplicate domain name to prevent changing original query */
memcpy(in, q->name, QUERY_NAME_SIZE + 1);
in[QUERY_NAME_SIZE] = 0; /* null terminate */
cmd = toupper(in[0]);
DEBUG(3, "NULL request length %d/%" L "u, command '%c'", domain_len, sizeof(in), cmd);
/* Commands that do not care about userid: also these need to be backwards
* compatible with older versions of iodine (at least down to 00000502) */
if (cmd == 'V') { /* Version check - before userid is assigned*/
handle_dns_version(dns_fd, q, in, domain_len);
return;
}
else if (cmd == 'Z') { /* Upstream codec check - user independent */
/* Reply with received hostname as data (encoded in base32) */
write_dns(dns_fd, q, (char *)in, domain_len, 'T');
return;
}
else if (cmd == 'Y') { /* Downstream codec check - user independent */
handle_dns_downstream_codec_check(dns_fd, q, in, domain_len);
return;
}
/* Get userid from query (always 2nd byte in hex except for data packets) */
if (isxdigit(cmd)) {
/* Upstream data packet - first byte is userid in hex */
userchar = cmd;
cmd = 'd'; /* flag for data packet - not part of protocol */
} else {
userchar = toupper(in[1]);
}
if (isxdigit(userchar)) {
userid = (userchar >= 'A' && userchar <= 'F') ?
(userchar - 'A' + 10) : (userchar - '0');
} else {
/* Invalid user ID or bad DNS query */
write_dns(dns_fd, q, "BADLEN", 5, 'T');
}
/* Login request - after version check successful, do not check auth yet */
if (cmd == 'L') {
handle_dns_login(dns_fd, q, in, domain_len, userid);
return;
}
/* Check user IP and authentication status */
if (check_authenticated_user_and_ip(userid, q, server.check_ip) != 0) {
write_dns(dns_fd, q, "BADIP", 5, 'T');
return;
}
if (cmd == 'd') { /* Upstream data packet */
handle_dns_data(dns_fd, q, in, domain_len, userid);
return;
} else if (cmd == 'I') { /* IP request packet - no base32 data */
handle_dns_ip_request(dns_fd, q, userid);
}
/* Following commands have everything after cmd and userid in base32
* All bytes that are not valid base32 are decoded to 0 */
uint8_t unpacked[512];
size_t raw_len;
raw_len = unpack_data(unpacked, sizeof(unpacked), (uint8_t *)in + 2, domain_len - 2, b32);
if (raw_len < 3) /* always at least 3 bytes after decoding at least 5 bytes */
return; /* Just in case. */
switch (cmd) {
case 'S':
handle_dns_upstream_codec_switch(dns_fd, q, userid, unpacked, raw_len);
break;
case 'O':
handle_dns_set_options(dns_fd, q, userid, unpacked, raw_len);
break;
case 'R':
handle_dns_fragsize_probe(dns_fd, q, userid, unpacked, raw_len);
break;
case 'N':
handle_dns_set_fragsize(dns_fd, q, userid, unpacked, raw_len);
break;
case 'P':
handle_dns_ping(dns_fd, q, userid, unpacked, raw_len);
break;
default:
DEBUG(2, "Invalid DNS query! cmd = %c, hostname = '%*s'",
cmd, domain_len, in);
}
}
void
handle_ns_request(int dns_fd, struct query *q)
/* Mostly identical to handle_a_request() below */
{
char buf[64*1024];
int len;
if (server.ns_ip != INADDR_ANY) {
/* If ns_ip set, overwrite destination addr with it.
* Destination addr will be sent as additional record (A, IN) */
struct sockaddr_in *addr = (struct sockaddr_in *) &q->destination;
memcpy(&addr->sin_addr, &server.ns_ip, sizeof(server.ns_ip));
}
len = dns_encode_ns_response(buf, sizeof(buf), q, server.topdomain);
if (len < 1) {
warnx("dns_encode_ns_response doesn't fit");
return;
}
DEBUG(2, "TX: NS reply client %s ID %5d, type %d, name %s, %d bytes",
format_addr(&q->from, q->fromlen), q->id, q->type, q->name, len);
if (sendto(dns_fd, buf, len, 0, (struct sockaddr*)&q->from, q->fromlen) <= 0) {
warn("ns reply send error");
}
}
void
handle_a_request(int dns_fd, struct query *q, int fakeip)
/* Mostly identical to handle_ns_request() above */
{
char buf[64*1024];
int len;
if (fakeip) {
in_addr_t ip = inet_addr("127.0.0.1");
struct sockaddr_in *addr = (struct sockaddr_in *) &q->destination;
memcpy(&addr->sin_addr, &ip, sizeof(ip));
} else if (server.ns_ip != INADDR_ANY) {
/* If ns_ip set, overwrite destination addr with it.
* Destination addr will be sent as additional record (A, IN) */
struct sockaddr_in *addr = (struct sockaddr_in *) &q->destination;
memcpy(&addr->sin_addr, &server.ns_ip, sizeof(server.ns_ip));
}
len = dns_encode_a_response(buf, sizeof(buf), q);
if (len < 1) {
warnx("dns_encode_a_response doesn't fit");
return;
}
DEBUG(2, "TX: A reply client %s ID %5d, type %d, name %s, %d bytes",
format_addr(&q->from, q->fromlen), q->id, q->type, q->name, len);
if (sendto(dns_fd, buf, len, 0, (struct sockaddr*)&q->from, q->fromlen) <= 0) {
warn("a reply send error");
}
}
|
<reponame>msaglJS/msagl-js
// A priority queue based on the binary heap algorithm
export class BinaryHeapPriorityQueue {
// indexing for A starts from 1
_heap: number[]
// array of heap elements
_reverse_heap: number[]
// the map from [0,..., n-1] to their places of heap
// the array of priorities
_priors: number[]
get Count(): number {
return this.heapSize
}
heapSize = 0
// the constructor
// we assume that all integers inserted into the queue will be non-negative and less then n
constructor(n: number) {
this._priors = new Array(n)
this._heap = new Array(n + 1)
// because indexing for A starts from 1
this._reverse_heap = new Array(n)
}
SwapWithParent(i: number) {
const parent: number = this._heap[i >> 1]
this.PutAtI(i >> 1, this._heap[i])
this.PutAtI(i, parent)
}
Enqueue(o: number, priority: number) {
this.heapSize++
let i: number = this.heapSize
this._priors[o] = priority
this.PutAtI(i, o)
while (i > 1 && this._priors[this._heap[i >> 1]] > priority) {
this.SwapWithParent(i)
i >>= 1
}
}
PutAtI(i: number, h: number) {
this._heap[i] = h
this._reverse_heap[h] = i
}
// return the first element of the queue and removes it from the queue
Dequeue(): number {
if (this.heapSize == 0) {
throw new Error()
}
const ret: number = this._heap[1]
if (this.heapSize > 1) {
this.PutAtI(1, this._heap[this.heapSize])
let i = 1
while (true) {
let smallest: number = i
const l: number = i << 1
if (
l <= this.heapSize &&
this._priors[this._heap[l]] < this._priors[this._heap[i]]
) {
smallest = l
}
const r: number = l + 1
if (
r <= this.heapSize &&
this._priors[this._heap[r]] < this._priors[this._heap[smallest]]
) {
smallest = r
}
if (smallest != i) {
this.SwapWithParent(smallest)
} else {
break
}
i = smallest
}
}
this.heapSize--
return ret
}
IsEmpty(): boolean {
return this.heapSize == 0
}
DecreasePriority(o: number, newPriority: number) {
// System.Diagnostics.Debug.WriteLine("delcrease "+ o.ToString()+" to "+ newPriority.ToString());
this._priors[o] = newPriority
let i: number = this._reverse_heap[o]
while (i > 1) {
if (this._priors[this._heap[i]] < this._priors[this._heap[i >> 1]]) {
this.SwapWithParent(i)
} else {
break
}
i >>= 1
}
}
}
|
#!/bin/bash
if [ -z "$MAMBO_PATH" ]; then
MAMBO_PATH=/opt/ibm/systemsim-p8/
fi
if [ -z "$MAMBO_BINARY" ]; then
MAMBO_BINARY="/run/pegasus/power8"
fi
if [ ! -x "$MAMBO_PATH/$MAMBO_BINARY" ]; then
echo 'Could not find executable MAMBO_BINARY. Skipping hello_world test';
exit 0;
fi
if [ -n "$KERNEL" ]; then
echo 'Please rebuild skiboot without KERNEL set. Skipping hello_world test';
exit 0;
fi
if [ ! `command -v expect` ]; then
echo 'Could not find expect binary. Skipping hello_world test';
exit 0;
fi
export SKIBOOT_ZIMAGE=`pwd`/test/hello_world/hello_kernel/hello_kernel
# Currently getting some core dumps from mambo, so disable them!
OLD_ULIMIT_C=`ulimit -c`
ulimit -c 0
( cd external/mambo;
cat <<EOF | expect
set timeout 30
spawn $MAMBO_PATH/$MAMBO_BINARY -n -f skiboot.tcl
expect {
timeout { send_user "\nTimeout waiting for hello world\n"; exit 1 }
eof { send_user "\nUnexpected EOF\n;" exit 1 }
"Hello World!"
}
close
exit 0
EOF
)
ulimit -c $OLD_ULIMIT_C
echo
exit 0;
|
<filename>docs/html/structCatch_1_1Matchers_1_1StdString_1_1EqualsMatcher.js
var structCatch_1_1Matchers_1_1StdString_1_1EqualsMatcher =
[
[ "EqualsMatcher", "structCatch_1_1Matchers_1_1StdString_1_1EqualsMatcher.html#ab740f1fb2310e9fe3fed5134d4c7e4c8", null ],
[ "match", "structCatch_1_1Matchers_1_1StdString_1_1EqualsMatcher.html#a0bb9d64693f7bb1ef7441062d219f21a", null ]
]; |
<gh_stars>1-10
from django.forms import ModelForm
from .models import Post, Comment
from loginsignup.utils import getBeaverInstance
class PostForm(ModelForm):
class Meta:
model = Post
exclude = ["likes", "posted_on", "post_creator"]
def checkPost(self, request):
if self.is_valid():
post = self.save(commit=False)
beaver = getBeaverInstance(request)
post.post_creator = beaver
post.save()
return True
return False
class CommentForm(ModelForm):
class Meta:
model = Comment
fields = ["comment"]
def checkComment(self, request, post):
if self.is_valid():
comment = self.save(commit=False)
comment.comment_creator = getBeaverInstance(request)
comment.post = post
comment.save()
return True
return False
|
<reponame>mdavidsaver/yascaif<gh_stars>1-10
package yascaif.cli;
import java.util.List;
import yascaif.CA;
public interface Command {
public void process(CA ca, List<String> PVs);
}
|
#!/bin/bash
#
# Copy files to the bastion
# Prepare the bastion to configure the rest of the VMs
#
#
BASTION_HOST=${BASTION_HOST:-bastion.${OCP3_BASE_DOMAIN}}
INSTANCE_FILES="instance_hosts.sh ch4.8.3*_all.sh ch4.8.4_*.sh"
scp -i ${OCP3_KEY_FILE} ${INSTANCE_FILES} cloud-user@${BASTION_HOST}:
ssh -i ${OCP3_KEY_FILE} cloud-user@${BASTION_HOST} sh ./instance_hosts.sh
|
<reponame>bogdanbebic/InverseSquareRoot
var searchData=
[
['inv_5fsqrt',['inv_sqrt',['../namespaceinv__sqrt.html',1,'']]],
['inverse_5fsqrt_2ecpp',['inverse_sqrt.cpp',['../inverse__sqrt_8cpp.html',1,'']]],
['inverse_5fsqrt_2eh',['inverse_sqrt.h',['../inverse__sqrt_8h.html',1,'']]]
];
|
<filename>plugin/trino-memory/src/main/java/io/trino/plugin/memory/MemoryMetadata.java
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.memory;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import io.airlift.slice.Slice;
import io.trino.spi.HostAddress;
import io.trino.spi.Node;
import io.trino.spi.NodeManager;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ColumnMetadata;
import io.trino.spi.connector.ConnectorInsertTableHandle;
import io.trino.spi.connector.ConnectorMetadata;
import io.trino.spi.connector.ConnectorOutputMetadata;
import io.trino.spi.connector.ConnectorOutputTableHandle;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorTableHandle;
import io.trino.spi.connector.ConnectorTableLayout;
import io.trino.spi.connector.ConnectorTableMetadata;
import io.trino.spi.connector.ConnectorTableProperties;
import io.trino.spi.connector.ConnectorViewDefinition;
import io.trino.spi.connector.Constraint;
import io.trino.spi.connector.LimitApplicationResult;
import io.trino.spi.connector.RetryMode;
import io.trino.spi.connector.SampleApplicationResult;
import io.trino.spi.connector.SampleType;
import io.trino.spi.connector.SchemaNotFoundException;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.connector.SchemaTablePrefix;
import io.trino.spi.connector.ViewNotFoundException;
import io.trino.spi.security.TrinoPrincipal;
import io.trino.spi.statistics.ComputedStatistics;
import io.trino.spi.statistics.Estimate;
import io.trino.spi.statistics.TableStatistics;
import javax.annotation.concurrent.ThreadSafe;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalDouble;
import java.util.OptionalLong;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static io.trino.spi.StandardErrorCode.ALREADY_EXISTS;
import static io.trino.spi.StandardErrorCode.NOT_FOUND;
import static io.trino.spi.StandardErrorCode.SCHEMA_NOT_EMPTY;
import static io.trino.spi.connector.RetryMode.NO_RETRIES;
import static io.trino.spi.connector.SampleType.SYSTEM;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
@ThreadSafe
public class MemoryMetadata
implements ConnectorMetadata
{
public static final String SCHEMA_NAME = "default";
private final NodeManager nodeManager;
private final List<String> schemas = new ArrayList<>();
private final AtomicLong nextTableId = new AtomicLong();
private final Map<SchemaTableName, Long> tableIds = new HashMap<>();
private final Map<Long, TableInfo> tables = new HashMap<>();
private final Map<SchemaTableName, ConnectorViewDefinition> views = new HashMap<>();
@Inject
public MemoryMetadata(NodeManager nodeManager)
{
this.nodeManager = requireNonNull(nodeManager, "nodeManager is null");
this.schemas.add(SCHEMA_NAME);
}
@Override
public synchronized List<String> listSchemaNames(ConnectorSession session)
{
return ImmutableList.copyOf(schemas);
}
@Override
public synchronized void createSchema(ConnectorSession session, String schemaName, Map<String, Object> properties, TrinoPrincipal owner)
{
if (schemas.contains(schemaName)) {
throw new TrinoException(ALREADY_EXISTS, format("Schema [%s] already exists", schemaName));
}
schemas.add(schemaName);
}
@Override
public synchronized void dropSchema(ConnectorSession session, String schemaName)
{
if (!schemas.contains(schemaName)) {
throw new TrinoException(NOT_FOUND, format("Schema [%s] does not exist", schemaName));
}
boolean tablesExist = tables.values().stream()
.anyMatch(table -> table.getSchemaName().equals(schemaName));
if (tablesExist) {
throw new TrinoException(SCHEMA_NOT_EMPTY, "Schema not empty: " + schemaName);
}
verify(schemas.remove(schemaName));
}
@Override
public synchronized ConnectorTableHandle getTableHandle(ConnectorSession session, SchemaTableName schemaTableName)
{
Long id = tableIds.get(schemaTableName);
if (id == null) {
return null;
}
return new MemoryTableHandle(id);
}
@Override
public synchronized ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle tableHandle)
{
MemoryTableHandle handle = (MemoryTableHandle) tableHandle;
return tables.get(handle.getId()).getMetadata();
}
@Override
public synchronized List<SchemaTableName> listTables(ConnectorSession session, Optional<String> schemaName)
{
ImmutableList.Builder<SchemaTableName> builder = ImmutableList.builder();
views.keySet().stream()
.filter(table -> schemaName.map(table.getSchemaName()::contentEquals).orElse(true))
.forEach(builder::add);
tables.values().stream()
.filter(table -> schemaName.map(table.getSchemaName()::contentEquals).orElse(true))
.map(TableInfo::getSchemaTableName)
.forEach(builder::add);
return builder.build();
}
@Override
public synchronized Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle)
{
MemoryTableHandle handle = (MemoryTableHandle) tableHandle;
return tables.get(handle.getId())
.getColumns().stream()
.collect(toImmutableMap(ColumnInfo::getName, ColumnInfo::getHandle));
}
@Override
public synchronized ColumnMetadata getColumnMetadata(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle columnHandle)
{
MemoryTableHandle handle = (MemoryTableHandle) tableHandle;
return tables.get(handle.getId())
.getColumn(columnHandle)
.getMetadata();
}
@Override
public synchronized Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix)
{
return tables.values().stream()
.filter(table -> prefix.matches(table.getSchemaTableName()))
.collect(toImmutableMap(TableInfo::getSchemaTableName, handle -> handle.getMetadata().getColumns()));
}
@Override
public synchronized void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle)
{
MemoryTableHandle handle = (MemoryTableHandle) tableHandle;
TableInfo info = tables.remove(handle.getId());
if (info != null) {
tableIds.remove(info.getSchemaTableName());
}
}
@Override
public synchronized void renameTable(ConnectorSession session, ConnectorTableHandle tableHandle, SchemaTableName newTableName)
{
checkSchemaExists(newTableName.getSchemaName());
checkTableNotExists(newTableName);
MemoryTableHandle handle = (MemoryTableHandle) tableHandle;
long tableId = handle.getId();
TableInfo oldInfo = tables.get(tableId);
tables.put(tableId, new TableInfo(tableId, newTableName.getSchemaName(), newTableName.getTableName(), oldInfo.getColumns(), oldInfo.getDataFragments()));
tableIds.remove(oldInfo.getSchemaTableName());
tableIds.put(newTableName, tableId);
}
@Override
public synchronized void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, boolean ignoreExisting)
{
ConnectorOutputTableHandle outputTableHandle = beginCreateTable(session, tableMetadata, Optional.empty(), NO_RETRIES);
finishCreateTable(session, outputTableHandle, ImmutableList.of(), ImmutableList.of());
}
@Override
public synchronized MemoryOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, Optional<ConnectorTableLayout> layout, RetryMode retryMode)
{
checkSchemaExists(tableMetadata.getTable().getSchemaName());
checkTableNotExists(tableMetadata.getTable());
long tableId = nextTableId.getAndIncrement();
Set<Node> nodes = nodeManager.getRequiredWorkerNodes();
checkState(!nodes.isEmpty(), "No Memory nodes available");
ImmutableList.Builder<ColumnInfo> columns = ImmutableList.builder();
for (int i = 0; i < tableMetadata.getColumns().size(); i++) {
ColumnMetadata column = tableMetadata.getColumns().get(i);
columns.add(new ColumnInfo(new MemoryColumnHandle(i), column.getName(), column.getType()));
}
tableIds.put(tableMetadata.getTable(), tableId);
tables.put(tableId, new TableInfo(
tableId,
tableMetadata.getTable().getSchemaName(),
tableMetadata.getTable().getTableName(),
columns.build(),
new HashMap<>()));
return new MemoryOutputTableHandle(tableId, ImmutableSet.copyOf(tableIds.values()));
}
private void checkSchemaExists(String schemaName)
{
if (!schemas.contains(schemaName)) {
throw new SchemaNotFoundException(schemaName);
}
}
private void checkTableNotExists(SchemaTableName tableName)
{
if (tableIds.containsKey(tableName)) {
throw new TrinoException(ALREADY_EXISTS, format("Table [%s] already exists", tableName));
}
if (views.containsKey(tableName)) {
throw new TrinoException(ALREADY_EXISTS, format("View [%s] already exists", tableName));
}
}
@Override
public synchronized Optional<ConnectorOutputMetadata> finishCreateTable(ConnectorSession session, ConnectorOutputTableHandle tableHandle, Collection<Slice> fragments, Collection<ComputedStatistics> computedStatistics)
{
requireNonNull(tableHandle, "tableHandle is null");
MemoryOutputTableHandle memoryOutputHandle = (MemoryOutputTableHandle) tableHandle;
updateRowsOnHosts(memoryOutputHandle.getTable(), fragments);
return Optional.empty();
}
@Override
public synchronized MemoryInsertTableHandle beginInsert(ConnectorSession session, ConnectorTableHandle tableHandle, List<ColumnHandle> columns, RetryMode retryMode)
{
MemoryTableHandle memoryTableHandle = (MemoryTableHandle) tableHandle;
return new MemoryInsertTableHandle(memoryTableHandle.getId(), ImmutableSet.copyOf(tableIds.values()));
}
@Override
public synchronized Optional<ConnectorOutputMetadata> finishInsert(ConnectorSession session, ConnectorInsertTableHandle insertHandle, Collection<Slice> fragments, Collection<ComputedStatistics> computedStatistics)
{
requireNonNull(insertHandle, "insertHandle is null");
MemoryInsertTableHandle memoryInsertHandle = (MemoryInsertTableHandle) insertHandle;
updateRowsOnHosts(memoryInsertHandle.getTable(), fragments);
return Optional.empty();
}
@Override
public synchronized void createView(ConnectorSession session, SchemaTableName viewName, ConnectorViewDefinition definition, boolean replace)
{
checkSchemaExists(viewName.getSchemaName());
if (tableIds.containsKey(viewName)) {
throw new TrinoException(ALREADY_EXISTS, "Table already exists: " + viewName);
}
if (replace) {
views.put(viewName, definition);
}
else if (views.putIfAbsent(viewName, definition) != null) {
throw new TrinoException(ALREADY_EXISTS, "View already exists: " + viewName);
}
}
@Override
public synchronized void renameView(ConnectorSession session, SchemaTableName viewName, SchemaTableName newViewName)
{
checkSchemaExists(newViewName.getSchemaName());
if (tableIds.containsKey(newViewName)) {
throw new TrinoException(ALREADY_EXISTS, "Table already exists: " + newViewName);
}
if (views.containsKey(newViewName)) {
throw new TrinoException(ALREADY_EXISTS, "View already exists: " + newViewName);
}
views.put(newViewName, views.remove(viewName));
}
@Override
public synchronized void dropView(ConnectorSession session, SchemaTableName viewName)
{
if (views.remove(viewName) == null) {
throw new ViewNotFoundException(viewName);
}
}
@Override
public synchronized List<SchemaTableName> listViews(ConnectorSession session, Optional<String> schemaName)
{
return views.keySet().stream()
.filter(viewName -> schemaName.map(viewName.getSchemaName()::equals).orElse(true))
.collect(toImmutableList());
}
@Override
public synchronized Map<SchemaTableName, ConnectorViewDefinition> getViews(ConnectorSession session, Optional<String> schemaName)
{
SchemaTablePrefix prefix = schemaName.map(SchemaTablePrefix::new).orElseGet(SchemaTablePrefix::new);
return ImmutableMap.copyOf(Maps.filterKeys(views, prefix::matches));
}
@Override
public synchronized Optional<ConnectorViewDefinition> getView(ConnectorSession session, SchemaTableName viewName)
{
return Optional.ofNullable(views.get(viewName));
}
private void updateRowsOnHosts(long tableId, Collection<Slice> fragments)
{
TableInfo info = tables.get(tableId);
checkState(
info != null,
"Uninitialized tableId [%s.%s]",
info.getSchemaName(),
info.getTableName());
Map<HostAddress, MemoryDataFragment> dataFragments = new HashMap<>(info.getDataFragments());
for (Slice fragment : fragments) {
MemoryDataFragment memoryDataFragment = MemoryDataFragment.fromSlice(fragment);
dataFragments.merge(memoryDataFragment.getHostAddress(), memoryDataFragment, MemoryDataFragment::merge);
}
tables.put(tableId, new TableInfo(tableId, info.getSchemaName(), info.getTableName(), info.getColumns(), dataFragments));
}
@Override
public ConnectorTableProperties getTableProperties(ConnectorSession session, ConnectorTableHandle table)
{
return new ConnectorTableProperties();
}
public List<MemoryDataFragment> getDataFragments(long tableId)
{
return ImmutableList.copyOf(tables.get(tableId).getDataFragments().values());
}
@Override
public TableStatistics getTableStatistics(ConnectorSession session, ConnectorTableHandle tableHandle, Constraint constraint)
{
List<MemoryDataFragment> dataFragments = getDataFragments(((MemoryTableHandle) tableHandle).getId());
long rows = dataFragments.stream()
.mapToLong(MemoryDataFragment::getRows)
.sum();
return TableStatistics.builder()
.setRowCount(Estimate.of(rows))
.build();
}
@Override
public Optional<LimitApplicationResult<ConnectorTableHandle>> applyLimit(ConnectorSession session, ConnectorTableHandle handle, long limit)
{
MemoryTableHandle table = (MemoryTableHandle) handle;
if (table.getLimit().isPresent() && table.getLimit().getAsLong() <= limit) {
return Optional.empty();
}
return Optional.of(new LimitApplicationResult<>(
new MemoryTableHandle(table.getId(), OptionalLong.of(limit), OptionalDouble.empty()),
true,
true));
}
@Override
public Optional<SampleApplicationResult<ConnectorTableHandle>> applySample(ConnectorSession session, ConnectorTableHandle handle, SampleType sampleType, double sampleRatio)
{
MemoryTableHandle table = (MemoryTableHandle) handle;
if ((table.getSampleRatio().isPresent() && table.getSampleRatio().getAsDouble() == sampleRatio) || sampleType != SYSTEM || table.getLimit().isPresent()) {
return Optional.empty();
}
return Optional.of(new SampleApplicationResult<>(
new MemoryTableHandle(table.getId(), table.getLimit(), OptionalDouble.of(table.getSampleRatio().orElse(1) * sampleRatio)),
true));
}
}
|
#!/bin/sh
set -o errexit
set -o pipefail
set -o nounset
set -o xtrace
celery -A flyrig.taskapp worker -l INFO
|
SELECT AVG(CountBooksRead)
FROM (
SELECT COUNT(OrderID) AS CountBooksRead
FROM Orders
WHERE DATEDIFF (YEAR, OrderCreatedDate, CURDATE()) <= 1
GROUP BY CustomerID) As booksRead |
<reponame>mattwigway/analysis-ui
import {Button, Flex, Heading, Text} from '@chakra-ui/react'
import get from 'lodash/get'
import {useDispatch, useSelector} from 'react-redux'
import {
fetchTravelTimeSurface,
setIsochroneFetchStatus
} from 'lib/actions/analysis'
import {abortFetch} from 'lib/actions/fetch'
import {SyncIcon, XIcon} from 'lib/components/icons'
import {FETCH_TRAVEL_TIME_SURFACE} from 'lib/constants'
import message from 'lib/message'
import {activeOpportunityDataset} from 'lib/modules/opportunity-datasets/selectors'
import selectCurrentProject from 'lib/selectors/current-project'
import selectProfileRequestHasChanged from 'lib/selectors/profile-request-has-changed'
import selectIsochrone from 'lib/selectors/isochrone'
function TitleMessage({fetchStatus, project}) {
const opportunityDataset = useSelector(activeOpportunityDataset)
const isochrone = useSelector(selectIsochrone)
const profileRequestHasChanged = useSelector(selectProfileRequestHasChanged)
let title = 'Analyze results'
if (fetchStatus) title = fetchStatus
else if (!project) title = 'Select a project'
else if (!isochrone) title = 'Compute travel time'
else if (profileRequestHasChanged)
title = 'Results are out of sync with settings'
else if (!opportunityDataset)
title = 'Select a destination layer to see accessibility'
return <Text> {title}</Text>
}
export default function AnalysisTitle() {
const dispatch = useDispatch()
const isochroneFetchStatus = useSelector((s) =>
get(s, 'analysis.isochroneFetchStatus')
)
const currentProject = useSelector(selectCurrentProject)
const isFetchingIsochrone = !!isochroneFetchStatus
function abort() {
dispatch(abortFetch({type: FETCH_TRAVEL_TIME_SURFACE}))
dispatch(setIsochroneFetchStatus(false))
}
return (
<Flex
align='center'
borderBottomWidth='1px'
justify='space-between'
px={5}
py={4}
width='640px'
>
<Heading alignItems='center' display='flex' size='md'>
<TitleMessage
fetchStatus={isochroneFetchStatus}
project={currentProject}
/>
</Heading>
{isFetchingIsochrone ? (
<Button
rightIcon={<XIcon />}
onClick={abort}
colorScheme='red'
key='abort'
size='lg'
>
Abort
</Button>
) : (
<Button
isDisabled={!currentProject}
rightIcon={<SyncIcon />}
onClick={() => dispatch(fetchTravelTimeSurface())}
colorScheme='blue'
size='lg'
title={
!currentProject
? message('analysis.disableFetch')
: message('analysis.refresh')
}
>
{message('analysis.refresh')}
</Button>
)}
</Flex>
)
}
|
<gh_stars>0
# Standard library
import logging
import os
import re
import socket
from argparse import ArgumentParser
from shutil import copyfile, rmtree
# Third-party
import git
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management import BaseCommand, CommandError
from django.http.response import Http404
from django.urls import reverse
# First-party/Local
from licenses.git_utils import commit_and_push_changes, setup_local_branch
from licenses.models import LegalCode, TranslationBranch
from licenses.utils import (
init_utils_logger,
relative_symlink,
save_bytes_to_file,
save_redirect,
save_url_as_static_file,
)
LOG = logging.getLogger(__name__)
LOG_LEVELS = {
0: logging.ERROR,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG,
}
def list_open_translation_branches():
"""
Return list of names of open translation branches
"""
return list(
TranslationBranch.objects.filter(complete=False).values_list(
"branch_name", flat=True
)
)
class Command(BaseCommand):
"""
Command to push the static files in the build directory to a specified
branch in cc-licenses-data repository
Arguments:
branch_name - Branch name in cc-license-data to pull translations from
and publish artifacts too.
list_branches - A list of active branches in cc-licenses-data will be
displayed
If no arguments are supplied all cc-licenses-data branches are checked and
then updated.
"""
def add_arguments(self, parser: ArgumentParser):
parser.add_argument(
"-b",
"--branch_name",
help="Translation branch name to pull translations from and push"
" artifacts to. Use --list_branches to see available branch names."
" With no option, all active branches are published.",
)
parser.add_argument(
"-l",
"--list_branches",
action="store_true",
help="A list of active translation branches will be displayed.",
)
parser.add_argument(
"--nopush",
action="store_true",
help="Update the local branches, but don't push upstream.",
)
parser.add_argument(
"--nogit",
action="store_true",
help="Update the local files without any attempt to manage them in"
" git (implies --nopush)",
)
def _quiet(self, *args, **kwargs):
pass
def run_clean_output_dir(self):
output_dir = self.output_dir
# RE: .nojekyll:
# https://github.blog/2009-12-29-bypassing-jekyll-on-github-pages/
# RE: CNAME
# https://docs.github.com/en/pages/configuring-a-custom-domain-for-your-github-pages-site
output_dir_items = [
os.path.join(output_dir, item)
for item in os.listdir(output_dir)
if item not in [".nojekyll", "CNAME"]
]
for item in output_dir_items:
if os.path.isdir(item):
rmtree(item)
else:
os.remove(item)
def run_django_distill(self):
"""Outputs static files into the output dir."""
if not os.path.isdir(settings.STATIC_ROOT):
e = "Static source directory does not exist, run collectstatic"
raise CommandError(e)
hostname = socket.gethostname()
output_dir = self.output_dir
LOG.debug(f"{hostname}:{output_dir}")
save_url_as_static_file(
output_dir,
url="/dev/status/",
relpath="status/index.html",
)
tbranches = TranslationBranch.objects.filter(complete=False)
for tbranch_id in tbranches.values_list("id", flat=True):
relpath = f"status/{tbranch_id}.html"
LOG.debug(f" {relpath}")
save_url_as_static_file(
output_dir,
url=f"/status/{tbranch_id}/",
relpath=relpath,
)
legal_codes = LegalCode.objects.validgroups()
redirect_pairs = []
for group in legal_codes.keys():
LOG.info(f"Publishing {group}")
LOG.debug(f"{hostname}:{output_dir}")
for legal_code in legal_codes[group]:
# deed
try:
(
relpath,
symlinks,
redirects_data,
) = legal_code.get_publish_files("deed")
save_url_as_static_file(
output_dir,
url=legal_code.deed_url,
relpath=relpath,
)
for symlink in symlinks:
relative_symlink(output_dir, relpath, symlink)
for redirect_data in redirects_data:
save_redirect(output_dir, redirect_data)
redirect_pairs += legal_code.get_redirect_pairs("deed")
except Http404 as e:
if "invalid language" not in str(e):
raise
# legalcode
(
relpath,
symlinks,
redirects_data,
) = legal_code.get_publish_files("legalcode")
save_url_as_static_file(
output_dir,
url=legal_code.legal_code_url,
relpath=relpath,
)
for symlink in symlinks:
relative_symlink(output_dir, relpath, symlink)
for redirect_data in redirects_data:
save_redirect(output_dir, redirect_data)
redirect_pairs += legal_code.get_redirect_pairs("legalcode")
redirect_pairs.sort(key=lambda x: x[0], reverse=True)
for i, pair in enumerate(redirect_pairs):
redirect_pairs[i][0] = re.escape(pair[0])
widths = [max(map(len, map(str, col))) for col in zip(*redirect_pairs)]
redirects_include = [
"# DO NOT EDIT MANUALLY",
"# This file was generated by the publish command.",
"# https://github.com/creativecommons/cc-licenses",
]
for regex, replacement in redirect_pairs:
regex = f"^/{regex.ljust(widths[0])}"
replacement = replacement.ljust(widths[1])
redirects_include.append(
f"rewrite {regex} {replacement} permanent;"
)
redirects_include.append("# vim: set ft=nginx")
redirects_include.append("")
redirects_include = "\n".join(redirects_include).encode("utf-8")
redirects_filename = os.path.join(
self.config_dir, "nginx_language_redirects"
)
save_bytes_to_file(redirects_include, redirects_filename)
LOG.debug(f"{hostname}:{output_dir}")
save_url_as_static_file(
output_dir,
url=reverse("metadata"),
relpath="licenses/metadata.yaml",
)
def run_copy_licenses_rdfs(self):
hostname = socket.gethostname()
legacy_dir = self.legacy_dir
output_dir = self.output_dir
licenses_rdf_dir = os.path.join(legacy_dir, "rdf-licenses")
licenses_rdfs = [
rdf_file
for rdf_file in os.listdir(licenses_rdf_dir)
if os.path.isfile(os.path.join(licenses_rdf_dir, rdf_file))
]
licenses_rdfs.sort()
LOG.info("Copying legal code RDFs")
LOG.debug(f"{hostname}:{output_dir}")
for rdf in licenses_rdfs:
if rdf.endswith(".rdf"):
name = rdf[:-4]
else:
continue
relative_name = os.path.join(*name.split("_"), "rdf")
# "xu" is a "user assigned code" meaning "unported"
# See https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#User-assigned_code_elements. # noqa: E501
relative_name = relative_name.replace("xu/", "")
dest_file = os.path.join(output_dir, relative_name)
os.makedirs(os.path.dirname(dest_file), exist_ok=True)
copyfile(os.path.join(licenses_rdf_dir, rdf), dest_file)
LOG.debug(f" {relative_name}")
def run_copy_meta_rdfs(self):
hostname = socket.gethostname()
legacy_dir = self.legacy_dir
output_dir = self.output_dir
meta_rdf_dir = os.path.join(legacy_dir, "rdf-meta")
meta_files = [
meta_file
for meta_file in os.listdir(meta_rdf_dir)
if os.path.isfile(os.path.join(meta_rdf_dir, meta_file))
]
meta_files.sort()
dest_dir = os.path.join(output_dir, "rdf")
os.makedirs(dest_dir, exist_ok=True)
LOG.info("Copying RDF information and metadata")
LOG.debug(f"{hostname}:{output_dir}")
for meta_file in meta_files:
dest_relative = os.path.join("rdf", meta_file)
dest_full = os.path.join(output_dir, dest_relative)
LOG.debug(f" {dest_relative}")
copyfile(os.path.join(meta_rdf_dir, meta_file), dest_full)
if meta_file == "index.rdf":
os.makedirs(
os.path.join(output_dir, "licenses"), exist_ok=True
)
dir_fd = os.open(output_dir, os.O_RDONLY)
symlink = os.path.join("licenses", meta_file)
try:
os.symlink(f"../{dest_relative}", symlink, dir_fd=dir_fd)
LOG.debug(f" ^{symlink}")
finally:
os.close(dir_fd)
elif meta_file == "ns.html":
dir_fd = os.open(output_dir, os.O_RDONLY)
symlink = meta_file
try:
os.symlink(dest_relative, symlink, dir_fd=dir_fd)
LOG.debug(f" ^{symlink}")
finally:
os.close(dir_fd)
elif meta_file == "schema.rdf":
dir_fd = os.open(output_dir, os.O_RDONLY)
symlink = meta_file
try:
os.symlink(dest_relative, symlink, dir_fd=dir_fd)
LOG.debug(f" ^{symlink}")
finally:
os.close(dir_fd)
def run_copy_legal_code_plaintext(self):
hostname = socket.gethostname()
legacy_dir = self.legacy_dir
output_dir = self.output_dir
plaintext_dir = os.path.join(legacy_dir, "legalcode")
plaintext_files = [
text_file
for text_file in os.listdir(plaintext_dir)
if (
os.path.isfile(os.path.join(plaintext_dir, text_file))
and text_file.endswith(".txt")
)
]
LOG.info("Copying plaintext legal code")
LOG.debug(f"{hostname}:{output_dir}")
for text in plaintext_files:
if text.startswith("by"):
context = "licenses"
else:
context = "publicdomain"
name = text[:-4]
relative_name = os.path.join(
context,
*name.split("_"),
"legalcode.txt",
)
dest_file = os.path.join(output_dir, relative_name)
os.makedirs(os.path.dirname(dest_file), exist_ok=True)
copyfile(os.path.join(plaintext_dir, text), dest_file)
LOG.debug(f" {relative_name}")
def distill_and_copy(self):
self.run_clean_output_dir()
self.run_django_distill()
self.run_copy_licenses_rdfs()
self.run_copy_meta_rdfs()
self.run_copy_legal_code_plaintext()
def publish_branch(self, branch: str):
"""Workflow for publishing a single branch"""
LOG.debug(f"Publishing branch {branch}")
with git.Repo(settings.DATA_REPOSITORY_DIR) as repo:
setup_local_branch(repo, branch)
self.distill_and_copy()
if repo.is_dirty(untracked_files=True):
# Add any changes and new files
commit_and_push_changes(
repo,
"Updated built HTML files",
self.relpath,
push=self.push,
)
if repo.is_dirty(untracked_files=True):
raise git.exc.RepositoryDirtyError(
settings.DATA_REPOSITORY_DIR,
"Repository is dirty. We cannot continue.",
)
else:
LOG.debug(f"{branch} build dir is up to date.")
def publish_all(self):
"""Workflow for checking branches and updating their build dir"""
branches = list_open_translation_branches()
LOG.info(
f"Checking and updating build dirs for {len(branches)}"
" translation branches."
)
for branch in branches:
self.publish_branch(branch)
def handle(self, *args, **options):
LOG.setLevel(LOG_LEVELS[int(options["verbosity"])])
init_utils_logger(LOG)
self.options = options
self.output_dir = os.path.abspath(settings.DISTILL_DIR)
self.config_dir = os.path.abspath(
os.path.join(self.output_dir, "..", "config")
)
self.legacy_dir = os.path.abspath(settings.LEGACY_DIR)
git_dir = os.path.abspath(settings.DATA_REPOSITORY_DIR)
if not self.output_dir.startswith(git_dir):
raise ImproperlyConfigured(
"In Django settings, DISTILL_DIR must be inside"
f" DATA_REPOSITORY_DIR, but DISTILL_DIR={self.output_dir} is"
f" outside DATA_REPOSITORY_DIR={git_dir}."
)
self.relpath = os.path.relpath(self.output_dir, git_dir)
self.push = not options["nopush"]
if options.get("list_branches"):
branches = list_open_translation_branches()
LOG.debug("Which branch are we publishing to?")
for branch in branches:
LOG.debug(branch)
elif options.get("nogit"):
self.distill_and_copy()
elif options.get("branch_name"):
self.publish_branch(options["branch_name"])
else:
self.publish_all()
|
#!/bin/bash
GAME="packerfactorio.zip"
echo "Starting server with savefile: ${GAME}"
if [ ! -f ./save/${GAME} ]
then
echo "Save not found, creating new map"
./bin/x64/factorio --create ./save/packerfactorio.zip
fi
./bin/x64/factorio --start-server ./save/${GAME} \
--server-settings ./config/server-settings.json
|
#!/bin/bash
set -xeou pipefail
GOPATH=$(go env GOPATH)
REPO_ROOT=$GOPATH/src/github.com/kubedb/mysql
source "$REPO_ROOT/hack/libbuild/common/lib.sh"
source "$REPO_ROOT/hack/libbuild/common/kubedb_image.sh"
DOCKER_REGISTRY=${DOCKER_REGISTRY:-kubedb}
IMG=mysql-tools
DB_VERSION=8.0.3
TAG="$DB_VERSION"
OSM_VER=${OSM_VER:-0.9.1}
DIST=$REPO_ROOT/dist
mkdir -p $DIST
build() {
pushd "$REPO_ROOT/hack/docker/mysql-tools/$DB_VERSION"
# Download osm
wget https://cdn.appscode.com/binaries/osm/${OSM_VER}/osm-alpine-amd64
chmod +x osm-alpine-amd64
mv osm-alpine-amd64 osm
local cmd="docker build --pull -t $DOCKER_REGISTRY/$IMG:$TAG ."
echo $cmd; $cmd
rm osm
popd
}
binary_repo $@
|
/**
* @typedef {Object} Rate
*
* @property {string} currency
* @property {number} buy
* @property {number} cell
*/
/**
* @typedef {Object} NumberSeparators
*
* @property {string} thousands
* @property {string} decimals
*/
|
#! /usr/bin/env sh
set -o nounset
set -e
echo "Running 2.0.0 data migrations"
echo "Remove duplicate past status"
bin/rake remove_duplicate_past_status
echo "fix OTF service associtaions"
bin/rake fix_otf_service_associations
echo "remove invalid identities"
bin/rake data:remove_invalid_identities
echo "Replace arm name special characters"
bin/rake data:replace_arm_name_special_characters
echo "Add service request to dashboard protocols"
bin/rake add_service_request_to_dashboard_protocols
echo "Fix missing ssr ids"
bin/rake data:fix_missing_ssr_ids
|
<filename>app/components/answer-tile.js<gh_stars>1-10
import Ember from 'ember';
export default Ember.Component.extend({
actions: {
upvote(answer) {
var params = {
score: answer.get('score') + 1
};
this.sendAction('updateAnswer', answer, params);
},
downvote(answer) {
var params = {
score: answer.get('score') - 1
};
this.sendAction('updateAnswer', answer, params);
},
}
});
|
import random
import string
def generate_verification_code(length):
code = ''.join(random.choices(string.ascii_letters + string.digits, k=length))
return code |
LONG_SCRIPT_NAME=$(basename $0)
SCRIPT_NAME=${LONG_SCRIPT_NAME%.sh}
# Variable initialization, to avoid crash
CRITICAL_ERRORS_NUMBER=0 # This will be used to see if a script failed, or passed
status=""
forcedstatus=""
SUDO_CMD=""
[ -r $CIS_ROOT_DIR/lib/constants.sh ] && . $CIS_ROOT_DIR/lib/constants.sh
[ -r $CIS_ROOT_DIR/etc/hardening.cfg ] && . $CIS_ROOT_DIR/etc/hardening.cfg
[ -r $CIS_ROOT_DIR/lib/common.sh ] && . $CIS_ROOT_DIR/lib/common.sh
[ -r $CIS_ROOT_DIR/lib/utils.sh ] && . $CIS_ROOT_DIR/lib/utils.sh
# Environment Sanitizing
export PATH='/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
info "Working on $SCRIPT_NAME"
# Arguments parsing
while [[ $# > 0 ]]; do
ARG="$1"
case $ARG in
--audit-all)
debug "Audit all specified, setting status to audit regardless of configuration"
forcedstatus=auditall
;;
--audit)
if [ "$status" != 'disabled' -a "$status" != 'false' ]; then
debug "Audit argument detected, setting status to audit"
forcedstatus=audit
else
info "Audit argument passed but script is disabled"
fi
;;
--sudo)
SUDO_CMD="sudo -n"
;;
*)
debug "Unknown option passed"
;;
esac
shift
done
# Source specific configuration file
if ! [ -r $CIS_ROOT_DIR/etc/conf.d/$SCRIPT_NAME.cfg ] ; then
# If it doesn't exist, create it with default values
echo "# Configuration for $SCRIPT_NAME, created from default values on `date`" > $CIS_ROOT_DIR/etc/conf.d/$SCRIPT_NAME.cfg
# If create_config is a defined function, execute it.
# Otherwise, just disable the test by default.
if type -t create_config | grep -qw function ; then
create_config >> $CIS_ROOT_DIR/etc/conf.d/$SCRIPT_NAME.cfg
else
echo "status=disabled" >> $CIS_ROOT_DIR/etc/conf.d/$SCRIPT_NAME.cfg
fi
fi
[ -r $CIS_ROOT_DIR/etc/conf.d/$SCRIPT_NAME.cfg ] && . $CIS_ROOT_DIR/etc/conf.d/$SCRIPT_NAME.cfg
# Now check configured value for status, and potential cmdline parameter
if [ "$forcedstatus" = "auditall" ] ; then
# We want to audit even disabled script, so override config value in any case
status=audit
elif [ "$forcedstatus" = "audit" ] ; then
# We want to audit only enabled scripts
if [ "$status" != 'disabled' -a "$status" != 'false' ]; then
debug "Audit argument detected, setting status to audit"
status=audit
else
info "Audit argument passed but script is disabled"
fi
fi
if [ -z $status ]; then
crit "Could not find status variable for $SCRIPT_NAME, considered as disabled"
exit 2
fi
case $status in
enabled | true )
info "Checking Configuration"
check_config
info "Performing audit"
audit # Perform audit
info "Applying Hardening"
apply # Perform hardening
;;
audit )
info "Checking Configuration"
check_config
info "Performing audit"
audit # Perform audit
;;
disabled | false )
info "$SCRIPT_NAME is disabled, ignoring"
exit 2 # Means unknown status
;;
*)
warn "Wrong value for status : $status. Must be [ enabled | true | audit | disabled | false ]"
;;
esac
if [ $CRITICAL_ERRORS_NUMBER = 0 ]; then
ok "Check Passed"
exit 0 # Means ok status
else
crit "Check Failed"
exit 1 # Means critical status
fi
|
#!/bin/sh
set -e
curl -s http://localhost:8081/posts/clearCache -o /dev/null
curl -s http://localhost:8081/posts/MSG001 -o /dev/null -w "%{time_starttransfer}s\n"
curl -s http://localhost:8081/posts/MSG001 -o /dev/null -w "%{time_starttransfer}s\n"
|
#include "gtest/gtest.h"
#include "ScaFES_Communicator.hpp"
#include "ScaFES_Buffer.hpp"
namespace ScaFES_test
{
/*******************************************************************************
******************************************************************************/
/**
* Test class for the class 'Buffer'.
*
*/
template<typename CT>
class BufferTest : public testing::Test
{
public:
/*----------------------------------------------------------------------
| LIFECYCLE
----------------------------------------------------------------------*/
/** Constructor. */
BufferTest();
/** Copy constructor. */
BufferTest(BufferTest<CT> const&) = delete;
/** Assignment operator. */
BufferTest& operator= (BufferTest<CT> const&) = delete;
/** Destructor. */
~BufferTest() = default;
public:
/*----------------------------------------------------------------------
| Member variables.
----------------------------------------------------------------------*/
// Independent member variables(A).
ScaFES::Communicator mMyWorldA;
int mIdNeighPartitionA;
int mSizeBufferA;
int mUseSkeletonConceptA;
/*--------------------------------------------------------------------*/
// Variables depending on above variables(A).
/*--------------------------------------------------------------------*/
// Independent member variables(B).
ScaFES::Communicator mMyWorldB;
int mIdNeighPartitionB;
int mSizeBufferB;
int mUseSkeletonConceptB;
/*--------------------------------------------------------------------*/
/** Arbitrary element 'a'. */
ScaFES::Buffer<CT> mObjA;
/** 1:1 copy of element 'a'. */
ScaFES::Buffer<CT> mObjCopyA;
/** Another arbitrary element 'b'. */
ScaFES::Buffer<CT> mObjB;
};
TYPED_TEST_CASE_P(BufferTest);
/*******************************************************************************
******************************************************************************/
template<typename CT>
inline BufferTest<CT>::BufferTest()
: mMyWorldA()
, mIdNeighPartitionA(0)
, mSizeBufferA(83)
, mUseSkeletonConceptA(true)
, mMyWorldB()
, mIdNeighPartitionB(1)
, mSizeBufferB(57)
, mUseSkeletonConceptB(false)
, mObjA(ScaFES::Buffer<CT>(mMyWorldA,
mIdNeighPartitionA,
mSizeBufferA,
mUseSkeletonConceptA))
, mObjCopyA(ScaFES::Buffer<CT>(mMyWorldA,
mIdNeighPartitionA,
mSizeBufferA,
mUseSkeletonConceptA))
, mObjB(ScaFES::Buffer<CT>(mMyWorldB,
mIdNeighPartitionB,
mSizeBufferB,
mUseSkeletonConceptB))
{ }
} // End of namespace. //
|
import { ECSClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECSClient";
import { ListTaskDefinitionFamiliesRequest, ListTaskDefinitionFamiliesResponse } from "../models/models_0";
import { Command as $Command } from "@aws-sdk/smithy-client";
import { Handler, MiddlewareStack, HttpHandlerOptions as __HttpHandlerOptions, MetadataBearer as __MetadataBearer } from "@aws-sdk/types";
export interface ListTaskDefinitionFamiliesCommandInput extends ListTaskDefinitionFamiliesRequest {
}
export interface ListTaskDefinitionFamiliesCommandOutput extends ListTaskDefinitionFamiliesResponse, __MetadataBearer {
}
/**
* <p>Returns a list of task definition families that are registered to your account (which
* may include task definition families that no longer have any <code>ACTIVE</code> task
* definition revisions).</p>
* <p>You can filter out task definition families that do not contain any
* <code>ACTIVE</code> task definition revisions by setting the <code>status</code>
* parameter to <code>ACTIVE</code>. You can also filter the results with the
* <code>familyPrefix</code> parameter.</p>
* @example
* Use a bare-bones client and the command you need to make an API call.
* ```javascript
* import { ECSClient, ListTaskDefinitionFamiliesCommand } from "@aws-sdk/client-ecs"; // ES Modules import
* // const { ECSClient, ListTaskDefinitionFamiliesCommand } = require("@aws-sdk/client-ecs"); // CommonJS import
* const client = new ECSClient(config);
* const command = new ListTaskDefinitionFamiliesCommand(input);
* const response = await client.send(command);
* ```
*
* @see {@link ListTaskDefinitionFamiliesCommandInput} for command's `input` shape.
* @see {@link ListTaskDefinitionFamiliesCommandOutput} for command's `response` shape.
* @see {@link ECSClientResolvedConfig | config} for command's `input` shape.
*
*/
export declare class ListTaskDefinitionFamiliesCommand extends $Command<ListTaskDefinitionFamiliesCommandInput, ListTaskDefinitionFamiliesCommandOutput, ECSClientResolvedConfig> {
readonly input: ListTaskDefinitionFamiliesCommandInput;
constructor(input: ListTaskDefinitionFamiliesCommandInput);
/**
* @internal
*/
resolveMiddleware(clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>, configuration: ECSClientResolvedConfig, options?: __HttpHandlerOptions): Handler<ListTaskDefinitionFamiliesCommandInput, ListTaskDefinitionFamiliesCommandOutput>;
private serialize;
private deserialize;
}
|
/* **** Notes
Convert.
//*/
# define CAR
# include "../../../incl/config.h"
signed(__cdecl cv_l(signed char(*di_tbl),signed char(*si_tbl),signed char(*di),signed char(*si))) {
/* **** DATA, BSS and STACK */
auto signed i,r;
auto signed short flag;
/* **** CODE/TEXT */
if(!di_tbl) return(0x00);
if(!si_tbl) return(0x00);
if(!di) return(0x00);
if(!si) return(0x00);
r = ct(di_tbl);
if(r^(ct(si_tbl))) return(0x00);
return(cv_l_r(r,di_tbl,si_tbl,di,si));
}
|
/*
* @Author: dang
* @Date: 2021-04-08 16:16:02
* @LastEditTime: 2021-10-19 15:26:22
* @LastEditors: Please set LastEditors
* @Description: A worm
* @FilePath: \iot_gxhy_reservoirdam_web\src\api\base.js
*/
import request from '@/utils/request'
// 获取行政区
export const BASE_API_6 = process.env.VUE_APP_BASE_API_6 // yunw
export const BASE_API_2 = process.env.VUE_APP_BASE_API_2 // yunw
// let outNet = "http://221.222.252.224:18882"; //外网地址
const outNet = 'http://172.16.17.32:8826' // 线上地址
// let outNet = "http://192.168.1.103:18882"; //本地
export const outNetUrl = `${outNet}/GxwlSk/file/` // 在线预览
export const outNetZip = `${outNet}/GxwlSk/zip/` // zip下载
// 本地
// export const previewUrl = 'http://192.168.1.103:19001/img/' // 龙哥本地预览, 预览视频
// export const videoUrl = 'http://192.168.1.103:19001/video/' // 视频
// export const audioUrl = 'http://192.168.1.103:19001/audio/' // 音频
// // export const xcPointUrl = "http://192.168.1.103:18088/app/uploadFile"; //巡查点位图片上传地址
// export const xcPointUrl = 'http://192.168.1.103:8008/upload' // 巡查点位图片上传地址
// export const demo =
// 'http://192.168.1.103:8826/GxwlSk/file/sk_info_template.xlspreviewFileUrl' // 下载模板地址
// export const locationUrl =
// 'http://47.107.44.149:8009' // 子系统跳转地址
export const locationUrl =
'http://192.168.1.110:8008' // 子系统跳转地址
// 线上
// svg 图片
export const svgUrl = 'http://192.168.127.12/:19001/file/dam.svg' // 预览图片
export const previewUrl = 'http://192.168.127.12:19001/img/' // 预览图片
export const previewFileUrl = 'http://192.168.127.12:19001/file/' // 预览文件
export const videoUrl = 'http://47.107.44.149:19001/video/' // 视频
export const audioUrl = 'http://192.168.127.12:19001/audio/' // 音频
export const xcPointUrl = 'http://47.107.44.149:8008/upload' // 巡查点位图片上传地址
export const demo =
'http://192.168.127.12:19001/file/sk_info_template.xls' // 下载模板地址
export const previewUrlFile = 'http://172.16.17.32:8009/' // 水库资料wps预览文件地址
// export const previewUrl = 'http://192.168.1.103:19001/img/' // 预览图片
// export const previewFileUrl = 'http://192.168.1.103:19001/file/' // 预览文件
// export const videoUrl = 'http://192.168.1.103:19001/video/' // 视频
// export const audioUrl = 'http://192.168.1.103:19001/audio/' // 音频
// export const xcPointUrl = 'http://192.168.1.103:8008/upload' // 巡查点位图片上传地址
// export const demo =
// 'http://192.168.127.12:19001/file/sk_info_template.xls' // 下载模板地址
// export const previewUrlFile = 'http://172.16.17.32:8009/' // 水库资料wps预览文件地址
// 1.199
// export const previewUrl = "http://192.168.1.199:18882/GxwlSk/img/"; //龙哥本地预览, 预览视频
// export const videoUrl = 'http://192.168.1.199:18882/GxwlSk/video/'; //视频
// export const audioUrl = 'http://192.168.1.199:18882/GxwlSk/audio/'; //音频
// export const xcPointUrl = 'http://192.168.1.199:18088/app/uploadFile'; //巡查点位图片上传地址
// export const demo = 'http://192.168.1.199:18882/GxwlSk/file/sk_info_template.xls'; //下载模板地址
// 行政区管理
export function getAdcdbByAccvd(params) {
return request({
url: '/web/rsvr/getAdcdbByAccvd',
baseURL: BASE_API_2,
method: 'GET',
params: params
})
}
// 行政区管理
export function getAddvcdTreeList(params) {
return request({
url: '/advcd/childs',
baseURL: BASE_API_2,
method: 'GET',
params: params
})
}
// 根据行政区编码查询水库
export function getReservoirInfoByPc(params) {
return request({
url: '/web/rsvr/getReservoirInfoByPc',
baseURL: BASE_API_2,
method: 'GET',
params: params
})
}
// 行政区划树
export function addvnmTree(params) {
return request({
url: '/addvnmTree',
baseURL: BASE_API_6,
method: 'post',
data: params
})
}
// 行政区划+水库tree
export function addvnmAndRsvrTree(params) {
return request({
url: '/addvnmAndRsvrTree',
baseURL: BASE_API_6,
method: 'post',
data: params
})
}
|
import random
def generate_password(length):
if length < 8:
return None
password = ""
# Generate a random string with 1 uppercase, 1 lowercase and 1 digit
while True:
# Generate a random string with the specified length
password = ''.join(random.choices(string.ascii_uppercase +
string.ascii_lowercase +
string.digits, k = length))
# Check if the string has uppercase, lowercase and digit
has_uppercase = False
has_lowercase = False
has_digit = False
for char in password:
if char.isupper():
has_uppercase = True
elif char.islower():
has_lowercase = True
elif char.isdigit():
has_digit = True
if has_uppercase and has_lowercase and has_digit:
break
return password |
<filename>app/src/main/java/com/example/android/miwok/NumbersFragment.java
package com.example.android.miwok;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ListView;
import java.util.ArrayList;
import static android.content.Context.AUDIO_SERVICE;
/**
* A simple {@link Fragment} subclass.
*/
public class NumbersFragment extends Fragment {
MediaPlayer mediaPlayer;
AudioManager audioManager;
MediaPlayer.OnCompletionListener mCompletionListener = new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mediaPlayer) {
releaseMediaPlayer();
}
};
AudioManager.OnAudioFocusChangeListener mAudioFocusListener = new AudioManager.OnAudioFocusChangeListener() {
@Override
public void onAudioFocusChange(int focusChange) {
switch (focusChange) {
case AudioManager.AUDIOFOCUS_LOSS:
releaseMediaPlayer();
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
mediaPlayer.pause();
mediaPlayer.seekTo(0);
break;
case AudioManager.AUDIOFOCUS_GAIN:
mediaPlayer.start();
break;
default:
releaseMediaPlayer();
}
}
};
public NumbersFragment() {
// Required empty public constructor
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.activity_word_list, container, false);
final ArrayList<Word> enNumbers = new ArrayList<>();
enNumbers.add(0, new Word("Zero", "Nol", R.drawable.color_gray, R.raw.number_ten));
enNumbers.add(1, new Word("One", "Satu", R.drawable.number_one, R.raw.number_one));
enNumbers.add(2, new Word("Two", "Dua", R.drawable.number_two, R.raw.number_two));
enNumbers.add(3, new Word("Three", "Tiga", R.drawable.number_three, R.raw.number_three));
enNumbers.add(4, new Word("Four", "Empat", R.drawable.number_four, R.raw.number_four));
enNumbers.add(5, new Word("Five", "Lima", R.drawable.number_five, R.raw.number_five));
enNumbers.add(6, new Word("Six", "Enam", R.drawable.number_six, R.raw.number_six));
enNumbers.add(7, new Word("Seven", "Tujuh", R.drawable.number_seven, R.raw.number_seven));
enNumbers.add(8, new Word("Eight", "Delapan", R.drawable.number_eight, R.raw.number_eight));
enNumbers.add(9, new Word("Nine", "Sembilan", R.drawable.number_nine, R.raw.number_nine));
enNumbers.add(10, new Word("Ten", "Sepuluh", R.drawable.number_ten, R.raw.number_ten));
WordAdapter rootNumberView = new WordAdapter(getActivity(), R.layout.list_item, enNumbers, R.color.category_numbers);
ListView listView = (ListView) rootView.findViewById(R.id.wordListView);
listView.setPadding(16, 16, 16, 16);
listView.setAdapter(rootNumberView);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
Word word = enNumbers.get(i);
releaseMediaPlayer();
audioManager = (AudioManager) getActivity().getSystemService(AUDIO_SERVICE);
int requestAudioFocus = audioManager.requestAudioFocus(mAudioFocusListener,
AudioManager.STREAM_MUSIC,
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
if (requestAudioFocus == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
mediaPlayer = MediaPlayer.create(getActivity(), word.getMiwokSound());
mediaPlayer.start();
mediaPlayer.setOnCompletionListener(mCompletionListener);
}
}
});
return rootView;
}
@Override
public void onStop() {
super.onStop();
releaseMediaPlayer();
}
/**
* Clean up the media player by releasing its resources.
*/
private void releaseMediaPlayer() {
// If the media player is not null, then it may be currently playing a sound.
if (mediaPlayer != null) {
// Regardless of the current state of the media player, release its resources
// because we no longer need it.
mediaPlayer.release();
// Set the media player back to null. For our code, we've decided that
// setting the media player to null is an easy way to tell that the media player
// is not configured to play an audio file at the moment.
mediaPlayer = null;
audioManager.abandonAudioFocus(mAudioFocusListener);
}
}
}
|
const { MessageEmbed } = require("discord.js");
const regions = require("../../data/regions.json");
module.exports = {
name: "guildRegionUpdate",
/**
* @param {import("discord.js").Client} bot
* @param {import("discord.js").Guild} guild
* @param {string} oldRegion
* @param {string} newRegion
*/
async execute(bot, guild, oldRegion, newRegion) {
if (!guild.me.hasPermission("MANAGE_WEBHOOKS")) return;
const webhook = await bot.getWebhook(guild);
if (!webhook) return;
const oldR = regions.find((r) => r.keys.includes(oldRegion.toLowerCase()));
const newR = regions.find((r) => r.keys.includes(newRegion.toLowerCase()));
const embed = new MessageEmbed()
.setTimestamp()
.setColor("ORANGE")
.setTitle("Guild Update: `Region Update`")
.setDescription("Region for this guild was updated")
.addField("Old Region", `${oldRegion}: ${oldR.flag}`)
.addField("New Region", `${newRegion}: ${newR.flag}`);
webhook.send(embed);
},
};
|
while true; do
echo 23;
sleep 1000;
done |
#!/usr/bin/env bash
docker build -t cc.momas/momas-mospider:1.0 .
|
import { logging } from 'protractor';
export class JourneySearchResponse
{
OutwardOpenPureReturnFare : JourneyResponse[];
SingleOutward : JourneyResponse[];
SingleReturn : JourneyResponse[];
JourneyReturnTimeDetails : JourneyReturnTimeDetails[];
JourneyFareBreakups : JourneyFareBreakups[];
}
debugger
export class JourneyFareBreakups
{
fareBreakupId : number;
adults : number;
childern :number;
perAdultFare : number;
perChildFare :number;
totalAdultFare :number;
totalChildernFare :number;
totalFare: number;
railcards: string;
}
export class JourneyReturnTimeDetails
{
fareGroupId : number;
departureTime : Date;
arrivalTime : Date;
duration : number;
totalChanges :number;
serviceId : number;
fareBreakupId : number[];
enquiryId : number;
firstInDay : boolean;
lastInDay : boolean;
fromStation:string;
toStation:string;
journeyType:string;
journeyDetail:string;
isServiceDisrupted:boolean;
isQuickest:boolean;
}
export class JourneyResponse {
departureTime : Date;
arrivalTime: Date;
duration:number;
minimumFare:number;
totalChanges : number;
fromStation:string;
toStation:string;
journeyType:string;
journeyDetail:string;
isCheapest:boolean;
isOpenReturn:boolean;
isServiceDisrupted:boolean;
isQuickest:boolean;
uId : number;
enquiryId : number;
firstInDay : boolean;
lastInDay : boolean;
serviceId : number;
journeyFareDetails:JourneyFareDetails[];
}
export class JourneyFareDetails
{
fareGroupId : number;
fare:number;
ticketType:string;
ticketDetails:string;
nectarPoint:number;
isUpgradeToFirstClass:boolean;
upgradeToFirstClassAmount:string;
nectarPointsToUpgradeFirstClass:string;
isDiscountedFare:boolean;
fareBreakupId : number[];
// Railcard: string;
// FarePerson: string;
}
|
<filename>lib/backend/syncersv1/bgpsyncer/bgpsyncer.go
// Copyright (c) 2017-2018 Tigera, Inc. All rights reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package bgpsyncer
import (
apiv3 "github.com/unai-ttxu/libcalico-go/lib/apis/v3"
"github.com/unai-ttxu/libcalico-go/lib/backend/api"
"github.com/unai-ttxu/libcalico-go/lib/backend/model"
"github.com/unai-ttxu/libcalico-go/lib/backend/syncersv1/updateprocessors"
"github.com/unai-ttxu/libcalico-go/lib/backend/watchersyncer"
)
// New creates a new BGP v1 Syncer. Since only etcdv3 supports Watchers for all of
// the required resource types, the WatcherSyncer will go into a polling loop for
// KDD. An optional node name may be supplied. If set, the syncer only watches
// the specified node rather than all nodes.
func New(client api.Client, callbacks api.SyncerCallbacks, node string) api.Syncer {
// Create ResourceTypes required for BGP.
resourceTypes := []watchersyncer.ResourceType{
{
ListInterface: model.ResourceListOptions{Kind: apiv3.KindIPPool},
UpdateProcessor: updateprocessors.NewIPPoolUpdateProcessor(),
},
{
ListInterface: model.ResourceListOptions{Kind: apiv3.KindBGPConfiguration},
UpdateProcessor: updateprocessors.NewBGPConfigUpdateProcessor(),
},
{
ListInterface: model.ResourceListOptions{Kind: apiv3.KindNode},
},
{
ListInterface: model.ResourceListOptions{Kind: apiv3.KindBGPPeer},
},
{
ListInterface: model.BlockAffinityListOptions{Host: node},
},
}
return watchersyncer.New(client, resourceTypes, callbacks)
}
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cms', '0016_auto_20160608_1535'),
]
operations = [
migrations.CreateModel(
name='form',
fields=[
('cmsplugin_ptr', models.OneToOneField(primary_key=True, serialize=False, auto_created=True, related_name='open_form', parent_link=True, to='cms.CMSPlugin')),
('name', models.CharField(max_length=25, default='Demo')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='OpenAccountAnonymous',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
],
),
migrations.CreateModel(
name='OpenAccountDemo',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
],
),
migrations.CreateModel(
name='OpenAccountReal',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
],
),
] |
/*
Copyright 2012 Two Toasters, LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twotoasters.android.horizontalimagescroller.widget;
import java.util.List;
import android.content.Context;
import android.util.AttributeSet;
public class HorizontalImageScroller extends HorizontalListView {
public HorizontalImageScroller(Context context, AttributeSet attrs) {
super(context, attrs);
setSolidColor(getResources().getColor(android.R.color.black));
}
public void setCurrentImageIndex(int index) {
if (mAdapter != null) {
((HorizontalImageScrollerAdapter)mAdapter).setCurrentIndex(index);
setSelection(index);
}
}
public int getCurrentImageIndex() {
if (mAdapter != null) {
return ((HorizontalImageScrollerAdapter)mAdapter).getCurrentIndex();
}
return -1;
}
public boolean hasCurrentImageIndex() {
if (mAdapter != null) {
return ((HorizontalImageScrollerAdapter)mAdapter).hasCurrentIndex();
}
return false;
}
public void setImageSize(int size) {
if(mAdapter != null) {
((HorizontalImageScrollerAdapter)mAdapter).setImageSize(size);
}
}
public void setHighlightActiveImage(boolean b) {
if(mAdapter != null) {
((HorizontalImageScrollerAdapter)mAdapter).setHighlightActiveImage(b);
}
}
public void setShowImageFrame(boolean b) {
if(mAdapter != null) {
((HorizontalImageScrollerAdapter)mAdapter).setShowImageFrame(b);
}
}
public static void unbindImageViews(List<HorizontalImageScroller> scrollers) {
for (HorizontalImageScroller scroller : scrollers) {
((HorizontalImageScrollerAdapter)scroller.mAdapter).unbindImageViews();
}
}
public int getCurrentX() {
return mCurrentX;
}
}
|
import theme from '@nuxt/content-theme-docs'
export default theme({
target: "static",
docs: {
primaryColor: '#0080FF'
},
content: {
markdown: {
remarkPlugins: [
"remark-emoji",
"@fec/remark-a11y-emoji"
]
}
},
components: true
})
|
import base64
def validateAndProcessSignature(ProjectId, SigContent, SigId, SigPurpose, SigType, CertificateType, Document, File):
# Validate input parameters
if not isinstance(ProjectId, str):
return "Error: ProjectId should be a string."
if not isinstance(SigContent, str):
return "Error: SigContent should be a string."
if not isinstance(SigId, str):
return "Error: SigId should be a string."
if not isinstance(SigPurpose, int) or SigPurpose not in [0, 1]:
return "Error: SigPurpose should be an integer (0 or 1)."
if not isinstance(SigType, int) or SigType not in [0, 1, 2, 3, 4, 5]:
return "Error: SigType should be an integer (0 to 5)."
if not isinstance(CertificateType, int) or CertificateType not in [0, 1, 2, 3, 4, 5]:
return "Error: CertificateType should be an integer (0 to 5)."
if not isinstance(Document, str):
return "Error: Document should be a string."
if not isinstance(File, str):
return "Error: File should be a string."
# Process the signature information
processedSignature = {
"ProjectId": ProjectId,
"SigContent": SigContent,
"SigId": SigId,
"SigPurpose": "Self-use" if SigPurpose == 0 else "Others' use",
"SigType": {
0: "Company or enterprise full name or abbreviation",
1: "App application full name or abbreviation",
2: "Ministry of Industry and Information Technology website full name or abbreviation",
3: "Public account or mini-program full name or abbreviation",
4: "Trademark full name or abbreviation",
5: "Government/institution/other unit full name or abbreviation"
}[SigType],
"CertificateType": {
0: "Unified social credit code/enterprise business license/organization code certificate",
1: "Screenshot of the application store backend developer management",
2: "Screenshot of the successful filing by the filing service provider",
3: "Screenshot of the management interface of the public account or mini-program",
4: "Trademark registration certificate",
5: "Organization code certificate, social credit code certificate"
}[CertificateType],
"Document": Document,
"File": File
}
return "Input parameters are valid. Processed signature information: " + str(processedSignature) |
module Kudzu
class Agent
class Reference < Kudzu::Model::Base
include Kudzu::Model::Link
attr_accessor :url, :title
end
end
end
|
#!/usr/bin/env bash
set -e
resolve_link() {
$(type -p greadlink readlink | head -1) "$1"
}
abs_dirname() {
local cwd="$(pwd)"
local path="$1"
while [ -n "$path" ]; do
cd "${path%/*}"
local name="${path##*/}"
path="$(resolve_link "$name" || true)"
done
pwd
cd "$cwd"
}
PREFIX="$1"
if [ -z "$1" ]; then
{ echo "usage: $0 <prefix>"
echo " e.g. $0 /usr/local"
} >&2
exit 1
fi
BATS_ROOT="$(abs_dirname "$0")"
mkdir -p "$PREFIX"/{bin,libexec,share/man/man{1,7}}
cp -R "$BATS_ROOT"/libexec/* "$PREFIX"/libexec
if [[ ! -s "$PREFIX"/bin/bats ]]; then
ln -s "$BATS_ROOT"/libexec/bats "$PREFIX"/bin/bats
fi
cp "$BATS_ROOT"/man/bats.1 "$PREFIX"/share/man/man1
cp "$BATS_ROOT"/man/bats.7 "$PREFIX"/share/man/man7
echo "Installed Bats to $PREFIX/bin/bats"
|
import { SET_CURRENT_BOARD } from '../../actions/types';
const currentBoard = (state = null, action) => {
const { type, payload: boardId } = action;
switch (type) {
case SET_CURRENT_BOARD:
return boardId;
default:
return state;
}
};
export default currentBoard;
|
<filename>OpenRobertaParent/WedoInterpreter/jsGenerated/node_modules/interpreter.interpreter.js<gh_stars>1-10
(function (factory) {
if (typeof module === "object" && typeof module.exports === "object") {
var v = factory(require, exports);
if (v !== undefined) module.exports = v;
}
else if (typeof define === "function" && define.amd) {
define(["require", "exports", "interpreter.state", "interpreter.constants", "interpreter.util"], factory);
}
})(function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var interpreter_state_1 = require("interpreter.state");
var C = require("interpreter.constants");
var U = require("interpreter.util");
var Interpreter = (function () {
function Interpreter() {
this.terminated = false;
this.callbackOnTermination = undefined;
}
/**
* run the operations.
*
* . @param generatedCode argument contains the operations and the function definitions
* . @param native implementation of the native interface Native to connect a real WeDo robot (or a test instance) to the interpreter
* . @param cbOnTermination is called when the program has terminated
*/
Interpreter.prototype.run = function (generatedCode, native, cbOnTermination) {
var _this = this;
this.terminated = false;
this.callbackOnTermination = cbOnTermination;
var stmts = generatedCode[C.OPS];
var functions = generatedCode[C.FUNCTION_DECLARATION];
this.n = native;
var stop = {};
stop[C.OPCODE] = "stop";
stmts.push(stop);
this.s = new interpreter_state_1.State(stmts, functions);
this.timeout(function () { _this.evalOperation(); }, 0); // return to caller. Don't block the UI.
};
/**
* return true, if the program is terminated
*/
Interpreter.prototype.isTerminated = function () {
return this.terminated;
};
/**
* force the termination of the program. The termination takes place, when the NEXT operation should be executed. The delay is not significant.
* The callbackOnTermination function is called
*/
Interpreter.prototype.terminate = function () {
this.terminated = true;
};
/**
* the central interpreter. It is a stack machine interpreting operations given as JSON objects. The operations are all IMMUTABLE. It
* - uses the S (state) component to store the state of the interpretation.
* - uses the N (native) component for accessing hardware sensors and actors
*
* if the program is not terminated, it will take one operation after the other and execute it. The property C.OPCODE contains the
* operation code and is used for switching to the various operations implementations. For some operation codes the implementations is extracted to
* special functions (repeat, expression) for better readability.
*
* The state of the interpreter consists of
* - a stack of computed values
* - the actual array of operations to be executed now, including a program counter as index into the array
* - a stack of operations-arrays (including their program counters), that are actually frozen until the actual array has been interpreted.
* - a hash map of bindings. A binding map a name as key to an array of values. This implements hiding of variables.
*
* The stack of operations-arrays is used to store the history of complex operation as
* - function call
* - if-then-else
* - repeat
* - wait
* - If such an operation is executed, it pushes the actual array of operations (including itself) onto the stack of operations-arrays,
* set the actual array of operations to a new array of own operations (found at the property C.STMT_LIST) and set the program counter to 0
* - The program counter of the pushed array of operations keeps pointing to the operation that effected the push. Thus some operations as break
* have to increase the program counter (to avoid an endless loop)
* - if the actual array of operations is exhausted, the last array of operations pushed to the stack of operations-arrays is re-activated
*
* The statement C.FLOW_CONTROL is rather complex:
* - it is used explicitly by 'continue' and 'break' and know about the repeat-statement / repeat-continuation structure (@see eval_repeat())
* - it is used implicitly by if-then-else, if one branch is selected and is exhausted. It forces the continuation after the if-then-else
*
* Each operation code implementation may
* - create new bindings of values to names (variable declaration)
* - change the values of the binding (assign)
* - push and pop values to the stack (expressions)
* - push and pop to the stack of operations-arrays
*/
Interpreter.prototype.evalOperation = function () {
var _this = this;
var s = this.s;
var n = this.n;
var _loop_1 = function () {
s.opLog('actual ops: ');
var stmt = s.getOp();
if (stmt === undefined) {
U.debug('PROGRAM TERMINATED. No ops remaining');
return "break-topLevelLoop";
}
var opCode = stmt[C.OPCODE];
switch (opCode) {
case C.ASSIGN_STMT: {
var name_1 = stmt[C.NAME];
s.setVar(name_1, s.pop());
break;
}
case C.CLEAR_DISPLAY_ACTION: {
n.clearDisplay();
break;
}
case C.CREATE_DEBUG_ACTION: {
U.debug('NYI');
break;
}
case C.DRIVE_ACTION: {
U.debug('NYI');
break;
}
case C.EXPR:
this_1.evalExpr(stmt);
break;
case C.FLOW_CONTROL: {
var conditional = stmt[C.CONDITIONAL];
var activatedBy = stmt[C.BOOLEAN] === undefined ? true : stmt[C.BOOLEAN];
var doIt = conditional ? (s.pop() === activatedBy) : true;
if (doIt) {
s.popOpsUntil(stmt[C.KIND]);
if (stmt[C.BREAK]) {
s.getOp();
}
}
break;
}
case C.GET_SAMPLE: {
n.getSample(s, stmt[C.NAME], stmt[C.PORT], stmt[C.GET_SAMPLE], stmt[C.SLOT]);
break;
}
case C.IF_STMT:
s.pushOps(stmt[C.STMT_LIST]);
break;
case C.IF_TRUE_STMT:
if (s.pop()) {
s.pushOps(stmt[C.STMT_LIST]);
}
break;
case C.IF_RETURN:
if (s.pop()) {
s.pushOps(stmt[C.STMT_LIST]);
}
break;
case C.LED_ON_ACTION: {
var color = s.pop();
n.ledOnAction(stmt[C.NAME], stmt[C.PORT], color);
break;
}
case C.METHOD_CALL_VOID:
case C.METHOD_CALL_RETURN: {
for (var _i = 0, _a = stmt[C.NAMES]; _i < _a.length; _i++) {
var parameterName = _a[_i];
s.bindVar(parameterName, s.pop());
}
var body = s.getFunction(stmt[C.NAME])[C.STATEMENTS];
s.pushOps(body);
break;
}
case C.MOTOR_ON_ACTION: {
var duration = s.pop();
var speed = s.pop();
var name_2 = stmt[C.NAME];
var port_1 = stmt[C.PORT];
n.motorOnAction(name_2, port_1, duration, speed);
if (duration >= 0) {
this_1.timeout(function () { n.motorStopAction(name_2, port_1); _this.evalOperation(); }, duration);
return { value: void 0 };
}
break;
}
case C.MOTOR_STOP: {
n.motorStopAction(stmt[C.NAME], stmt[C.PORT]);
break;
}
case C.REPEAT_STMT:
this_1.evalRepeat(stmt);
break;
case C.REPEAT_STMT_CONTINUATION:
if (stmt[C.MODE] === C.FOR || stmt[C.MODE] === C.TIMES) {
var runVariableName = stmt[C.NAME];
var end = s.get1();
var incr = s.get0();
var value = s.getVar(runVariableName) + incr;
if (+value >= +end) {
s.popOpsUntil(C.REPEAT_STMT);
s.getOp(); // the repeat has terminated
}
else {
s.setVar(runVariableName, value);
s.pushOps(stmt[C.STMT_LIST]);
}
}
break;
case C.SHOW_TEXT_ACTION: {
n.showTextAction(s.pop());
break;
}
case C.STATUS_LIGHT_ACTION:
n.statusLightOffAction(stmt[C.NAME], stmt[C.PORT]);
break;
case C.STOP:
U.debug("PROGRAM TERMINATED. stop op");
return "break-topLevelLoop";
case C.TEXT_JOIN:
var second = s.pop();
var first = s.pop();
s.push('' + first + second);
break;
case C.TIMER_SENSOR_RESET:
var port = stmt[C.PORT];
n.timerReset(port);
break;
case C.TONE_ACTION: {
var duration = s.pop();
var frequency = s.pop();
n.toneAction(stmt[C.NAME], frequency, duration);
this_1.timeout(function () { _this.evalOperation(); }, duration);
return { value: void 0 };
}
case C.VAR_DECLARATION: {
var name_3 = stmt[C.NAME];
s.bindVar(name_3, s.pop());
break;
}
case C.WAIT_STMT: {
U.debug('waitstmt started');
s.pushOps(stmt[C.STMT_LIST]);
break;
}
case C.WAIT_TIME_STMT: {
var time = s.pop();
this_1.timeout(function () { _this.evalOperation(); }, time);
return { value: void 0 };
}
default:
U.dbcException("invalid stmt op: " + opCode);
}
};
var this_1 = this;
topLevelLoop: while (!this.terminated) {
var state_1 = _loop_1();
if (typeof state_1 === "object")
return state_1.value;
switch (state_1) {
case "break-topLevelLoop": break topLevelLoop;
}
}
// termination either requested by the client or by executing 'stop' or after last statement
this.terminated = true;
n.close();
this.callbackOnTermination();
};
/**
* called from @see evalOperation() to evaluate all kinds of expressions
*
* . @param expr to be evaluated
*/
Interpreter.prototype.evalExpr = function (expr) {
var kind = expr[C.EXPR];
var s = this.s;
switch (kind) {
case C.VAR:
s.push(s.getVar(expr[C.NAME]));
break;
case C.NUM_CONST:
s.push(+expr[C.VALUE]);
break;
case C.BOOL_CONST:
s.push(expr[C.VALUE]);
break;
case C.STRING_CONST:
s.push(expr[C.VALUE]);
break;
case C.COLOR_CONST:
s.push(expr[C.VALUE]);
break;
case C.UNARY: {
var subOp = expr[C.OP];
switch (subOp) {
case C.NOT:
var truthy;
var bool = s.pop();
if (bool === 'true') {
truthy = true;
}
else if (bool === 'false' || bool === '0' || bool === '') {
truthy = false;
}
else {
truthy = !!bool;
}
s.push(!truthy);
break;
case C.NEG:
var value = s.pop();
s.push(-value);
break;
default:
U.dbcException("invalid unary expr subOp: " + subOp);
}
break;
}
case C.MATH_CONST: {
var value = expr[C.VALUE];
switch (value) {
case 'PI':
s.push(Math.PI);
break;
case 'E':
s.push(Math.E);
break;
case 'GOLDEN_RATIO':
s.push((1.0 + Math.sqrt(5.0)) / 2.0);
break;
case 'SQRT2':
s.push(Math.SQRT2);
break;
case 'SQRT1_2':
s.push(Math.SQRT1_2);
break;
case 'INFINITY':
s.push(Infinity);
break;
default:
throw "Invalid Math Constant Name";
}
break;
}
case C.SINGLE_FUNCTION: {
var subOp = expr[C.OP];
var value = s.pop();
U.debug('---------- ' + subOp + ' with ' + value);
switch (subOp) {
case 'ROOT':
s.push(Math.sqrt(value));
break;
case 'ABS':
s.push(Math.abs(value));
break;
case 'LN':
s.push(Math.log(value));
break;
case 'LOG10':
s.push(Math.log(value) / Math.LN10);
break;
case 'EXP':
s.push(Math.exp(value));
break;
case 'POW10':
s.push(Math.pow(10, value));
break;
case 'SIN':
s.push(Math.sin(value));
break;
case 'COS':
s.push(Math.cos(value));
break;
case 'TAN':
s.push(Math.tan(value));
break;
case 'ASIN':
s.push(Math.asin(value));
break;
case 'ATAN':
s.push(Math.atan(value));
break;
case 'ACOS':
s.push(Math.acos(value));
break;
case 'ROUND':
s.push(Math.round(value));
break;
case 'ROUNDUP':
s.push(Math.ceil(value));
break;
case 'ROUNDDOWN':
s.push(Math.floor(value));
break;
default:
throw "Invalid Function Name";
}
break;
}
case C.MATH_CONSTRAIN_FUNCTION: {
var max_1 = s.pop();
var min_1 = s.pop();
var value = s.pop();
s.push(Math.min(Math.max(value, min_1), max_1));
break;
}
case C.RANDOM_INT: {
var max = s.pop();
var min = s.pop();
if (min > max) {
_a = [max, min], min = _a[0], max = _a[1];
}
s.push(Math.floor(Math.random() * (max - min + 1) + min));
break;
}
case C.RANDOM_DOUBLE:
s.push(Math.random());
break;
case C.MATH_PROP_FUNCT: {
var subOp = expr[C.OP];
var value = s.pop();
switch (subOp) {
case 'EVEN':
s.push(value % 2 === 0);
break;
case 'ODD':
s.push(value % 2 !== 0);
break;
case 'PRIME':
s.push(this.isPrime(value));
break;
case 'WHOLE':
s.push(Number(value) === value && value % 1 === 0);
break;
case 'POSITIVE':
s.push(value >= 0);
break;
case 'NEGATIVE':
s.push(value < 0);
break;
case 'DIVISIBLE_BY':
var first = s.pop();
s.push(first % value === 0);
break;
default:
throw "Invalid Math Property Function Name";
}
break;
}
case C.BINARY: {
var subOp = expr[C.OP];
var right = s.pop();
var left = s.pop();
switch (subOp) {
case C.EQ:
s.push(left == right);
break;
case C.NEQ:
s.push(left !== right);
break;
case C.LT:
s.push(left < right);
break;
case C.LTE:
s.push(left <= right);
break;
case C.GT:
s.push(left > right);
break;
case C.GTE:
s.push(left >= right);
break;
case C.AND:
s.push(left && right);
break;
case C.OR:
s.push(left || right);
break;
case C.ADD:
s.push(0 + left + right);
break;
case C.MINUS:
s.push(0 + left - right);
break;
case C.MULTIPLY:
s.push(0 + left * right);
break;
case C.DIVIDE:
s.push(0 + left / right);
break;
case C.POWER:
s.push(Math.pow(left, right));
break;
case C.MOD:
s.push(left % right);
break;
default:
U.dbcException("invalid binary expr supOp: " + subOp);
}
break;
}
default:
U.dbcException("invalid expr op: " + kind);
}
var _a;
};
/**
* called from @see evalOperation() to run a repeat statement
*
* a repeat-statement ALWAYS contains a single repeat-continuation statement. That in turn contains the body statements written by the programmer.
* The repeat-statement does initialization of init, end, step and the run variable for the FOR and TIMES variant (other variants don't need that)
* The repeat-continuation is for updating the run variable in the FOR and TIMES variant.
*
* A continue statement pops the stack until a repeat-continuation is found and re-executes it
* A break statement pops the stack until a repeat-statement is found and skips that
*
* Have a look at the functions for push and pop of operations in the STATE component. The cleanup of the run variable is done there.
* This is not optimal, as design decisions are distributed over two components.
*
* . @param stmt the repeat statement
*/
Interpreter.prototype.evalRepeat = function (stmt) {
var s = this.s;
var mode = stmt[C.MODE];
var contl = stmt[C.STMT_LIST];
if (contl.length !== 1 || contl[0][C.OPCODE] !== C.REPEAT_STMT_CONTINUATION) {
U.dbcException("repeat expects an embedded continuation statement");
}
var cont = contl[0];
switch (mode) {
case C.FOREVER:
case C.UNTIL:
case C.WHILE:
s.pushOps(contl);
s.getOp(); // pseudo execution. Init is already done. Continuation is for termination only.
s.pushOps(cont[C.STMT_LIST]);
break;
case C.TIMES:
case C.FOR: {
var runVariableName = stmt[C.NAME];
var start = s.get2();
var end = s.get1();
if (+start >= +end) {
s.pop();
s.pop();
s.pop();
}
else {
s.bindVar(runVariableName, start);
s.pushOps(contl);
s.getOp(); // pseudo excution. Init is already done. Continuation is for termination only.
s.pushOps(cont[C.STMT_LIST]);
break;
}
break;
}
default:
U.dbcException("invalid repeat mode: " + mode);
}
};
/**
* return true if the parameter is prime
*
* . @param n to be checked for primality
*/
Interpreter.prototype.isPrime = function (n) {
if (n < 2) {
return false;
}
if (n === 2) {
return true;
}
if (n % 2 === 0) {
return false;
}
for (var i = 3, s = Math.sqrt(n); i <= s; i += 2) {
if (n % i === 0) {
return false;
}
}
return true;
};
/**
* after the duration specified, call the callback function given. The duration is partitioned into 100 millisec intervals to allow termination of the running interpreter during
* a timeout. Be careful: the termination is NOT effected here, but by the callback function (this should be @see evalOperation() in ALMOST ALL cases)
*
* . @param callback called when the time has elapsed
*
* . @param durationInMilliSec time that should elapse before the callback is called
*/
Interpreter.prototype.timeout = function (callback, durationInMilliSec) {
var _this = this;
if (this.terminated) {
callback();
}
else if (durationInMilliSec > 100) {
// U.p( 'waiting for 100 msec from ' + durationInMilliSec + ' msec' );
durationInMilliSec -= 100;
setTimeout(function () { _this.timeout(callback, durationInMilliSec); }, 100);
}
else {
// U.p( 'waiting for ' + durationInMilliSec + ' msec' );
setTimeout(function () { callback(); }, durationInMilliSec);
}
};
return Interpreter;
}());
exports.Interpreter = Interpreter;
});
|
def lcs(s1, s2):
m = len(s1)
n = len(s2)
L = [[0 for x in range(n+1)] for x in range(m+1)]
for i in range(m+1):
for j in range(n+1):
if i == 0 or j == 0:
L[i][j] = 0
elif s1[i-1] == s2[j-1]:
L[i][j] = L[i-1][j-1] + 1
else:
L[i][j] = max(L[i-1][j], L[i][j-1])
return L[m][n] |
# coding: UTF-8
require "spec_helper"
describe Warden::Protocol::PingRequest do
subject(:request) do
Warden::Protocol::PingRequest.new
end
it_should_behave_like "wrappable request"
it 'has class type methods' do
expect(request.class.type_camelized).to eq('Ping')
expect(request.class.type_underscored).to eq('ping')
end
it "should respond to #create_response" do
expect(request.create_response).to be_a(Warden::Protocol::PingResponse)
end
end
describe Warden::Protocol::PingResponse do
subject(:response) do
Warden::Protocol::PingResponse.new
end
it_should_behave_like "wrappable response"
it 'has class type methods' do
expect(response.class.type_camelized).to eq('Ping')
expect(response.class.type_underscored).to eq('ping')
end
it 'should be ok' do
expect(response).to be_ok
end
it 'should not be an error' do
expect(response).to_not be_error
end
end
|
<gh_stars>0
package praesentation;
import java.awt.Dimension;
import java.awt.Font;
import java.util.Iterator;
import java.util.List;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.UIManager;
import modell.Fassade;
import steuerung.Hauptsteuerung;
import steuerung.WahrheitstabellenSteuerungen;
/**Startet zu Beginn das Hauptmenue, wechselt die sichtbare Ansicht
* und stoesst das Beenden des Programms durch die Hauptsteuerung an.
* @author Nick
*/
public class Fensterverwaltung {
private Hauptsteuerung strg;
private JFrame aktivesFenster;
private Fassade modell;
private int[] fensterMass = new int[]{1280, 720};
private int[] minimumMass = fensterMass;
ImageIcon img = new ImageIcon(getClass().getResource("/Icon/icon.png"));
public Fensterverwaltung(Hauptsteuerung strg, Fassade fsd) {
this.strg = strg;
this.modell = fsd;
}
/**
* Erstellt ein Hauptmenue und setzt Variablen.
*/
public void init() {
UIManager.put("Button.font",
new javax.swing.plaf.FontUIResource("Arial Unicode MS", Font.BOLD, 20));
UIManager.put("Label.font",
new javax.swing.plaf.FontUIResource("Arial Unicode MS", Font.BOLD, 40));
UIManager.put("TextArea.font",
new javax.swing.plaf.FontUIResource("Arial Unicode MS", Font.PLAIN, 18));
UIManager.put("Table.font", new Font("Arial Unicode MS", Font.BOLD, 20));
aktivesFenster = new JFrame();
aktivesFenster.setContentPane(new Hauptmenue(this));
aktivesFenster.setTitle("RBLS");
aktivesFenster.setSize(fensterMass[0], fensterMass[1]);
aktivesFenster.setMinimumSize(new Dimension(minimumMass[0], minimumMass[1]));
aktivesFenster.setResizable(true);
aktivesFenster.setLocationRelativeTo(null);
aktivesFenster.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
aktivesFenster.setVisible(true);
aktivesFenster.setIconImage(img.getImage());
}
/**
* Wechselt zum Hauptmenue.
*/
public void oeffneMenue() {
wechseleFenster(new Hauptmenue(this), "RBLS");
}
/**
* Wechselt zum Raetselwahlfenster.
* @param stufe Stufe der Raetsel
*/
public void oeffneRaetselwahl(int stufe) {
wechseleFenster(new Raetselwahl(this, modell.gibRaetselListe(stufe),
modell.gibGeloesteRaetsel(stufe), stufe), "Rätselwahl");
}
/**
* Wechselt zum Abschlussfenster.
*/
public void oeffneAbschlussFenster() {
wechseleFenster(new Abschlussfenster(this), "Abschlussfenster");
}
/**
* Startet ein Raetsel.
* @param name Raetselname bzw Name der Datei
*/
public void starteRaetsel(String name) {
modell.setzeRaetsel(name);
WahrheitstabellenSteuerungen wstrg;
wstrg = strg.raetselFensterInit();
wechseleFenster(new StufenRaetselFenster(this, modell, wstrg).ansicht, "RBLS");
}
/**
* Startet ein zufaelliges Raetsel der aktuellen Stufe.
*/
public void starteZufaelligesRaetsel() {
List<String> liste = modell.gibRaetselListe(modell.gibStufe());
for (Iterator<String> i = liste.iterator(); i.hasNext(); ) {
String raetsel = i.next();
if (modell.gibGeloesteRaetsel(modell.gibStufe()).contains(raetsel) == false) {
starteRaetsel(raetsel);
return;
} else {
oeffneRaetselwahl(modell.gibStufe());
}
}
}
/**
* Startet den Freien Modus.
*/
public void starteFreienModus() {
wechseleFenster(new FreiesRaetselFenster(this, modell, strg).ansicht, "RBLS");
}
/**
* Stoesst das Beenden an.
*/
public void beende() {
aktivesFenster.setVisible(false);
strg.beenden();
}
/**
* Teilt der Hauptsteuerung das Loesen des aktiven Raetsels mit und oeffnet ein Abschlussfenster.
*/
public void erledigeRaetsel() {
strg.raetselGeloest();
oeffneAbschlussFenster();
}
private void wechseleFenster(JPanel fenster, String titel) {
aktivesFenster.getContentPane().removeAll();
fenster.setPreferredSize(new Dimension(fensterMass[0], fensterMass[1]));
aktivesFenster.setContentPane(fenster);
aktivesFenster.getContentPane().revalidate();
aktivesFenster.getContentPane().repaint();
aktivesFenster.setTitle(titel);
}
} |
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2018.2 (64-bit)
#
# Filename : rd_data_fifo.sh
# Simulator : Mentor Graphics ModelSim Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Mon Feb 03 09:38:51 +0800 2020
# SW Build 2258646 on Thu Jun 14 20:03:12 MDT 2018
#
# Copyright 1986-2018 Xilinx, Inc. All Rights Reserved.
#
# usage: rd_data_fifo.sh [-help]
# usage: rd_data_fifo.sh [-lib_map_path]
# usage: rd_data_fifo.sh [-noclean_files]
# usage: rd_data_fifo.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'rd_data_fifo.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "rd_data_fifo.sh - Script generated by export_simulation (Vivado v2018.2 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <simulate>
simulate()
{
vsim -64 -c -do "do {simulate.do}" -l simulate.log
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./rd_data_fifo.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
create_lib_dir
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy modelsim.ini file
copy_setup_file()
{
file="modelsim.ini"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="E:/Material/FPGA/Xilinx/Xlib10.7"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
cp $src_file .
fi
}
# Create design library directory
create_lib_dir()
{
lib_dir="modelsim_lib"
if [[ -e $lib_dir ]]; then
rm -rf $lib_dir
fi
mkdir $lib_dir
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaborate.log simulate.log vsim.wlf modelsim_lib)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
create_lib_dir
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./rd_data_fifo.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: rd_data_fifo.sh [-help]\n\
Usage: rd_data_fifo.sh [-lib_map_path]\n\
Usage: rd_data_fifo.sh [-reset_run]\n\
Usage: rd_data_fifo.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
def bfs(graph, start):
"""
Implement an iterative Breadth-first search (BFS) algorithm.
Args:
graph (dict): The graph representing the connections
start (str): The given start node
Returns:
list: The visited nodes in order
"""
# Keep track of all visited nodes
visited = []
# Keep track of nodes to be checked
queue = [start]
# Keep looping until the queue is empty
while queue:
# Pop the first node from the queue
current_node = queue.pop(0)
# If it is not visited yet
if current_node not in visited:
# Append the node to the visited list
visited.append(current_node)
# Append the edges of current node to the queue
queue.extend(graph[current_node])
# Return the visited nodes in order
return visited |
package utils
import (
"fmt"
"strings"
)
// Run doc
type Run struct {
Profile *ProfileConf
ProfilePath string
OutputDir string
CmdDir func(string, string, string, bool)
Verbosity bool
}
func (r *Run) runCmdOnDir(cmd string, cmdDesc string, cmdDir string) {
baseCmd := strings.Split(cmd, " ")[0]
if strings.HasSuffix(baseCmd, "gg") {
dir := r.OutputDir
dir = fmt.Sprintf("%s/%s", dir, cmdDir)
if strings.HasPrefix(dir, "/") {
dir = dir[1:]
}
if strings.HasPrefix(dir, "./") {
dir = dir[2:]
}
if strings.HasSuffix(dir, "/") {
dir = dir[:len(dir)-1]
}
pathPrepend := ""
for i := 0; i < len(strings.Split(dir, "/"))-1; i++ {
pathPrepend += "../"
}
cmd = cmd + " --profile=" + pathPrepend + r.ProfilePath
}
r.CmdDir(cmd, cmdDesc, cmdDir, r.Verbosity)
if strings.HasSuffix(baseCmd, "gg") && strings.Contains(cmd, "update-profile") {
r.Profile = GetProfile(r.Profile.env)
}
}
// Run doc
func (r *Run) Run() {
for _, project := range r.Profile.Main {
dir := "."
if r.OutputDir != "" {
dir = r.OutputDir
}
if project.Dir != "" {
dir = fmt.Sprintf("%s/%s", dir, project.Dir)
}
for _, step := range project.Steps {
stepDir := dir
if step.Dir != "" {
stepDir = fmt.Sprintf("%s/%s", stepDir, step.Dir)
}
r.runCmdOnDir(step.Cmd, step.Desc, stepDir)
}
}
}
// RunProjectStep doc
func (r *Run) RunProjectStep(projectStep string) {
for _, project := range r.Profile.Main {
dir := r.OutputDir
if project.Name != projectStep {
continue
}
if project.Dir != "" {
dir = fmt.Sprintf("%s/%s", dir, project.Dir)
}
for _, step := range project.Steps {
stepDir := dir
if step.Dir != "" {
stepDir = fmt.Sprintf("%s/%s", stepDir, step.Dir)
}
r.runCmdOnDir(step.Cmd, step.Desc, stepDir)
}
}
}
// RunProjectSubStep doc
func (r *Run) RunProjectSubStep(projectStep string, index int) {
for _, project := range r.Profile.Main {
dir := r.OutputDir
if project.Name != projectStep {
continue
}
if project.Dir != "" {
dir = fmt.Sprintf("%s/%s", dir, project.Dir)
}
for i, step := range project.Steps {
if i != index {
continue
}
if step.Dir != "" {
dir = fmt.Sprintf("%s/%s", dir, step.Dir)
}
r.runCmdOnDir(step.Cmd, step.Desc, dir)
}
}
}
// RunTask doc
func (r *Run) RunTask(task string) {
for _, project := range r.Profile.Tasks {
dir := r.OutputDir
if project.Name != task {
continue
}
if project.Dir != "" {
dir = fmt.Sprintf("%s/%s", dir, project.Dir)
}
for _, step := range project.Steps {
stepDir := dir
if step.Dir != "" {
stepDir = fmt.Sprintf("%s/%s", stepDir, step.Dir)
}
r.runCmdOnDir(step.Cmd, step.Desc, stepDir)
}
}
}
// RunTaskSubStep doc
func (r *Run) RunTaskSubStep(task string, index int) {
for _, project := range r.Profile.Tasks {
dir := r.OutputDir
if project.Name != task {
continue
}
if project.Dir != "" {
dir = fmt.Sprintf("%s/%s", dir, project.Dir)
}
for i, step := range project.Steps {
if i != index {
continue
}
if step.Dir != "" {
dir = fmt.Sprintf("%s/%s", dir, step.Dir)
}
r.runCmdOnDir(step.Cmd, step.Desc, dir)
}
}
}
|
import hashlib
from Crypto.PublicKey import RSA
from Crypto.Signature import pkcs1_15
from Crypto.Hash import SHA256
class RSAAlgorithm(AbstractSigningAlgorithm):
def __init__(self, hash_fun: object) -> None:
self.hash_fun = hash_fun
def sign(self, data: bytes) -> bytes:
key = RSA.generate(2048)
h = self.hash_fun.new(data)
signer = pkcs1_15.new(key)
signature = signer.sign(h)
return signature |
// Copyright 2007, 2008 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5;
/**
* Services to help with field {@linkplain org.apache.tapestry5.Validator validation} and {@linkplain
* org.apache.tapestry5.Translator translation}. This service encapsulates the logic that mixes normal
* configured/declared validation/translation with events triggered on the component.
*/
public interface FieldValidationSupport
{
/**
* A wrapper around {@link org.apache.tapestry5.Translator#toClient(Object)} that first fires a "toclient" event on
* the component to see if it can perform the conversion. If the value is null, then no event is fired and the
* translator is <em>not</em> invoked, the return value is simply null.
*
* @param value to be converted to a client-side string, which may be null
* @param componentResources used to fire events on the component
* @param translator used if the component does not provide a non-null value
* @param nullFieldStrategy used to convert a null server side value to an appropriate client side value
* @return the translated value or null if the value is null
* @see org.apache.tapestry5.Translator#toClient(Object)
*/
String toClient(Object value, ComponentResources componentResources, FieldTranslator<Object> translator,
NullFieldStrategy nullFieldStrategy);
/**
* A wrapper around {@link Translator#parseClient(Field, String, String)}. First a "parseclient" event is fired; the
* translator is only invoked if that returns null (typically because there is no event handler method for the
* event).
*
* @param clientValue the value provided by the client (not null)
* @param componentResources used to trigger events
* @param translator translator that will do the work if the component event returns null
* @param nullFieldStrategy used to convert null/blank values from client into non-null server side values
* @return the input parsed to an object
* @throws org.apache.tapestry5.ValidationException
* if the value can't be parsed
* @see Translator#parseClient(Field, String, String)
*/
Object parseClient(String clientValue, ComponentResources componentResources, FieldTranslator<Object> translator,
NullFieldStrategy nullFieldStrategy)
throws ValidationException;
/**
* Performs validation on a parsed value from the client. Normal validations occur first, then a "validate" event
* is triggered on the component.
*
* @param value parsed value from the client, possibly null
* @param componentResources used to trigger events
* @param validator performs normal validations
* @throws ValidationException if the value is not valid
* @see org.apache.tapestry5.Validator#validate(Field, Object, org.apache.tapestry5.ioc.MessageFormatter, Object)
*/
void validate(Object value, ComponentResources componentResources, FieldValidator validator)
throws ValidationException;
}
|
import React, { Component } from 'react';
import ReactDOM from 'react-dom';
import Images from './components/Images';
class App extends Component{
render(){
return(
<div>
This is the React App
<Images />
</div>
);
}
}
ReactDOM.render(<App />, document.getElementById('root'));
|
import React from "react";
function SearchForm() {
return (
<form class="form-inline">
<input class="form-control mr-sm-2" type="search" placeholder="Search" aria-label="Search"/>
<button class="btn btn-outline-success my-2 my-sm-0" type="submit">Search</button>
</form>
)
};
export default SearchForm; |
<!DOCTYPE html>
<html>
<head>
<title>Login</title>
</head>
<body>
<h2>Login</h2>
<form>
<input type="email" id="input_email" placeholder="Email">
<input type="password" id="input_password" placeholder="Password">
<button type="button" onclick="handleClick()">Login</button>
</form>
<div id="login_status"></div>
<script src="https://code.jquery.com/jquery-3.4.1.min.js"></script>
<script>
function handleClick(){
let email = document.getElementById("input_email").value;
let password = document.getElementById("input_password").value;
let payload = {
email: email,
password: password
};
$.get('/api', payload, function(data){
if(data && data.success){
showLoginStatus("Login successful");
}else{
showLoginStatus("Login failed");
}
});
}
function showLoginStatus(message){
let loginStatusDiv = document.getElementById("login_status");
loginStatusDiv.innerHTML = `
<div>
<p>${message}</p>
</div>
`;
}
</script>
</body>
</html> |
import Game from "/src/game";
let canvas = document.createElement("canvas");
let context = canvas.getContext("2d");
canvas.width = 350;
canvas.height = 500;
document.body.appendChild(canvas);
let game = new Game(canvas);
let lastTime = 0;
function gameLoop(timestamp){
let deltaTime = timestamp - lastTime;
lastTime = timestamp;
context.clearRect(0, 0, canvas.width, canvas.height);
game.update(deltaTime);
game.draw(context);
requestAnimationFrame(gameLoop);
}
requestAnimationFrame(gameLoop);
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 ludovicRoucoux
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package fr.novia.zaproxyplugin;
import hudson.EnvVars;
import hudson.Extension;
import hudson.FilePath;
import hudson.FilePath.FileCallable;
import hudson.Launcher;
import hudson.Launcher.LocalLauncher;
import hudson.Launcher.RemoteLauncher;
import hudson.Util;
import hudson.model.BuildListener;
import hudson.model.Computer;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Node;
import hudson.remoting.VirtualChannel;
import hudson.slaves.SlaveComputer;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.Builder;
import net.sf.json.JSONObject;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.jenkinsci.remoting.RoleChecker;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.StaplerRequest;
import java.io.File;
import java.io.IOException;
/**
* /!\
* Au jour du 27/03/2015
* La version 2.3.1 de ZAPROXY ne contient pas le plugin "pscanrules-release-10.zap" qui sert à
* remonter les alertes lors d'un scan passif (spider). Il faut donc ajouter ce plugin manuellement ou
* télécharger la prochaine version de ZAPROXY (2.4) via Custom Tools Plugin (et non la 2.3.1)
* /!\
*
* The main class of the plugin. This class adds a build step in a Jenkins job that allows you
* to launch the ZAProxy security tool and get alerts reports from it.
*
* @author ludovic.roucoux
*
*/
public class ZAProxyBuilder extends Builder {
/** To start ZAP as a prebuild step */
private final boolean startZAPFirst;
/** The objet to start and call ZAProxy methods */
private final ZAProxy zaproxy;
/** Host configured when ZAProxy is used as proxy */
private final String zapProxyHost;
/** Port configured when ZAProxy is used as proxy */
private final int zapProxyPort;
// Fields in fr/novia/zaproxyplugin/ZAProxyBuilder/config.jelly must match the parameter names in the "DataBoundConstructor"
@DataBoundConstructor
public ZAProxyBuilder(boolean startZAPFirst, String zapProxyHost, int zapProxyPort, ZAProxy zaproxy) {
this.startZAPFirst = startZAPFirst;
this.zaproxy = zaproxy;
this.zapProxyHost = zapProxyHost;
this.zapProxyPort = zapProxyPort;
this.zaproxy.setZapProxyHost(zapProxyHost);
this.zaproxy.setZapProxyPort(zapProxyPort);
//call the set methods of Zaoroxy to set the values
this.zaproxy.setJiraBaseURL(ZAProxyBuilder.DESCRIPTOR.getJiraBaseURL());
this.zaproxy.setJiraUserName(ZAProxyBuilder.DESCRIPTOR.getJiraUserName());
this.zaproxy.setJiraPassword(ZAProxyBuilder.DESCRIPTOR.getJiraPassword());
}
/*
* Getters allows to access member via UI (config.jelly)
*/
public boolean getStartZAPFirst() {
return startZAPFirst;
}
public ZAProxy getZaproxy() {
return zaproxy;
}
public String getZapProxyHost() {
return zapProxyHost;
}
public int getZapProxyPort() {
return zapProxyPort;
}
// Overridden for better type safety.
// If your plugin doesn't really define any property on Descriptor,
// you don't have to do this.
@Override
public ZAProxyBuilderDescriptorImpl getDescriptor() {
return (ZAProxyBuilderDescriptorImpl)super.getDescriptor();
}
// Method called before launching the build
public boolean prebuild(AbstractBuild<?, ?> build, BuildListener listener) {
listener.getLogger().println("------- START Replace environment variables -------");
//replace the environment variables with the corresponding values
String reportName=zaproxy.getFilenameReports();
try {
reportName=applyMacro( build, listener, reportName);
} catch (InterruptedException e1) {
listener.error(ExceptionUtils.getStackTrace(e1));
}
// zaproxy.setFilenameReports(reportName);
//we don't overwrite the file name containing the environment variables
//the evaluated value is saved in an other file name
zaproxy.setEvaluatedFilenameReports(reportName);
listener.getLogger().println("ReportName : "+reportName);
listener.getLogger().println("------- END Replace environment variables -------");
if(startZAPFirst) {
listener.getLogger().println("------- START Prebuild -------");
try {
Launcher launcher = null;
Node node = build.getBuiltOn();
// Create launcher according to the build's location (Master or Slave) and the build's OS
if("".equals(node.getNodeName())) { // Build on master
launcher = new LocalLauncher(listener, build.getWorkspace().getChannel());
} else { // Build on slave
boolean isUnix;
if( "Unix".equals(((SlaveComputer)node.toComputer()).getOSDescription()) ) {
isUnix = true;
} else {
isUnix = false;
}
launcher = new RemoteLauncher(listener, build.getWorkspace().getChannel(), isUnix);
}
zaproxy.startZAP(build, listener, launcher);
} catch (Exception e) {
e.printStackTrace();
listener.error(ExceptionUtils.getStackTrace(e));
return false;
}
listener.getLogger().println("------- END Prebuild -------");
}
return true;
}
// Method called when the build is launching
@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) {
listener.getLogger().println("Perform ZAProxy");
if(!startZAPFirst) {
try {
zaproxy.startZAP(build, listener, launcher);
} catch (Exception e) {
e.printStackTrace();
listener.error(ExceptionUtils.getStackTrace(e));
return false;
}
}
boolean res;
try {
//copyPolicyFile(build.getWorkspace(), listener); // TODO maybe in future version
res = build.getWorkspace().act(new ZAProxyCallable(this.zaproxy, listener));
} catch (Exception e) {
e.printStackTrace();
listener.error(ExceptionUtils.getStackTrace(e));
return false;
}
return res;
}
/**
* Replace macro with environment variable if it exists
* @param build
* @param listener
* @param macro
* @return
* @throws InterruptedException
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static String applyMacro(AbstractBuild build, BuildListener listener, String macro)
throws InterruptedException{
try {
EnvVars envVars = new EnvVars(Computer.currentComputer().getEnvironment());
envVars.putAll(build.getEnvironment(listener));
envVars.putAll(build.getBuildVariables());
return Util.replaceMacro(macro, envVars);
} catch (IOException e) {
listener.getLogger().println("Failed to apply macro " + macro);
listener.error(ExceptionUtils.getStackTrace(e));
}
return macro;
}
/**
* Copy local policy file to slave in policies directory of ZAP default directory.
*
* @param workspace the workspace of the build
* @param listener
* @throws IOException
* @throws InterruptedException
*/
private void copyPolicyFile(FilePath workspace, BuildListener listener) throws IOException, InterruptedException {
//if(zaproxy.getScanURL() && zaproxy.pathToLocalPolicy != null && !zaproxy.pathToLocalPolicy.isEmpty())
// TODO a recup via un champ
// File fileToCopy = new File(zaproxy.pathToLocalPolicy);
File fileToCopy = new File("C:\\Users\\ludovic.roucoux\\OWASP ZAP\\policies\\OnlySQLInjection.policy");
String stringForLogger = "Copy [" + fileToCopy.getAbsolutePath() + "] to ";
String data = FileUtils.readFileToString(fileToCopy, (String)null);
stringForLogger = workspace.act(new CopyFileCallable(data, zaproxy.getZapDefaultDir(),
fileToCopy.getName(), stringForLogger));
listener.getLogger().println(stringForLogger);
}
/**
* Allows to copy local policy file to the default ZAP policies directory in slave.
*
* @author ludovic.roucoux
*
*/
private static class CopyFileCallable implements FileCallable<String> {
private static final long serialVersionUID = -3375349701206827354L;
private String data;
private String zapDefaultDir;
private String copyFilename;
private String stringForLogger;
public CopyFileCallable(String data, String zapDefaultDir,
String copyFilename, String stringForLogger) {
this.data = data;
this.zapDefaultDir = zapDefaultDir;
this.copyFilename = copyFilename;
this.stringForLogger = stringForLogger;
}
public String invoke(File f, VirtualChannel channel) throws IOException, InterruptedException {
File fileCopiedDir = new File(zapDefaultDir, ZAProxy.NAME_POLICIES_DIR_ZAP);
File fileCopied = new File(fileCopiedDir, copyFilename);
FileUtils.writeStringToFile(fileCopied, data);
stringForLogger += "[" + fileCopied.getAbsolutePath() + "]";
return stringForLogger;
}
@Override
public void checkRoles(RoleChecker checker) throws SecurityException {
// Nothing to do
}
}
/**
* Descriptor for {@link ZAProxyBuilder}. Used as a singleton.
* The class is marked as public so that it can be accessed from views.
*
* <p>
* See <tt>src/main/resources/fr/novia/zaproxyplugin/ZAProxyBuilder/*.jelly</tt>
* for the actual HTML fragment for the configuration screen.
*/
@Extension // This indicates to Jenkins this is an implementation of an extension point.
public static final ZAProxyBuilderDescriptorImpl DESCRIPTOR = new ZAProxyBuilderDescriptorImpl();
public static final class ZAProxyBuilderDescriptorImpl extends BuildStepDescriptor<Builder> {
/**
* To persist global configuration information,
* simply store it in a field and call save().
*
* <p>
* If you don't want fields to be persisted, use <tt>transient</tt>.
*/
private String zapProxyDefaultHost;
private int zapProxyDefaultPort;
private String jiraBaseURL;
private String jiraUserName;
private String jiraPassword;
/**
* In order to load the persisted global configuration, you have to
* call load() in the constructor.
*/
public ZAProxyBuilderDescriptorImpl() {
load();
}
@Override
public boolean isApplicable(Class<? extends AbstractProject> aClass) {
// Indicates that this builder can be used with all kinds of project types
return true;
}
/**
* This human readable name is used in the configuration screen.
*/
@Override
public String getDisplayName() {
return "Execute ZAProxy";
}
@Override
public boolean configure(StaplerRequest req, JSONObject formData) throws FormException {
// To persist global configuration information,
// set that to properties and call save().
zapProxyDefaultHost = formData.getString("zapProxyDefaultHost");
zapProxyDefaultPort = formData.getInt("zapProxyDefaultPort");
//set the values from the global configuration for CREATE JIRA ISSUES
jiraBaseURL=formData.getString("jiraBaseURL");
jiraUserName=formData.getString("jiraUserName");
jiraPassword=<PASSWORD>.<PASSWORD>("<PASSWORD>Password");
// ^Can also use req.bindJSON(this, formData);
// (easier when there are many fields; need set* methods for this, like setUseFrench)
save();
return super.configure(req,formData);
}
public String getZapProxyDefaultHost() { return zapProxyDefaultHost; }
public int getZapProxyDefaultPort() {
return zapProxyDefaultPort;
}
public String getJiraBaseURL(){return jiraBaseURL;}
public String getJiraUserName(){return jiraUserName;}
public String getJiraPassword(){return <PASSWORD>;}
}
/**
* Used to execute ZAP remotely.
*
* @author ludovic.roucoux
*
*/
private static class ZAProxyCallable implements FileCallable<Boolean> {
private static final long serialVersionUID = -313398999885177679L;
private ZAProxy zaproxy;
private BuildListener listener;
public ZAProxyCallable(ZAProxy zaproxy, BuildListener listener) {
this.zaproxy = zaproxy;
this.listener = listener;
}
@Override
public Boolean invoke(File f, VirtualChannel channel) {
return zaproxy.executeZAP(new FilePath(f), listener);
}
@Override
public void checkRoles(RoleChecker checker) throws SecurityException {
// Nothing to do
}
}
}
|
<filename>editors/src/main/java/org/museautomation/ui/editors/suite/SetDataIdAction.java
package org.museautomation.ui.editors.suite;
import org.museautomation.core.suite.*;
import org.museautomation.ui.extend.actions.*;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
class SetDataIdAction extends UndoableAction
{
public SetDataIdAction(ParameterListTaskSuite suite, String new_data_id)
{
_suite = suite;
_new_data_id = new_data_id;
}
@Override
protected boolean executeImplementation()
{
_old_data_id = _suite.getDataTableId();
_suite.setDataTableId(_new_data_id);
return true;
}
@Override
protected boolean undoImplementation()
{
_suite.setDataTableId(_old_data_id);
return true;
}
private ParameterListTaskSuite _suite;
private String _new_data_id;
private String _old_data_id;
}
|
import java.util.List;
import java.util.Map;
public class SmsStaffAmountStatisticsResult implements Serializable {
private List<String> amountCat;
public int calculateTotalSmsAmount(SmsStaffAmountStatisticsResult result, Map<String, Integer> smsAmounts) {
int totalSmsAmount = 0;
for (String category : result.getAmountCat()) {
for (Map.Entry<String, Integer> entry : smsAmounts.entrySet()) {
if (category.equals(getCategory(entry.getKey()))) {
totalSmsAmount += entry.getValue();
}
}
}
return totalSmsAmount;
}
private String getCategory(String staffName) {
// Implement logic to determine the category of the staff member based on their name
// For example, you can use a predefined mapping or a more complex categorization logic
// Return the category of the staff member
}
} |
public boolean isPalindrome(String str) {
int n = str.length();
for (int i = 0; i < n/2; i++)
if (str.charAt(i) != str.charAt(n-i-1))
return false;
return true;
} |
<filename>src/include/wctype.h
#pragma once
/* Wide character classification and mapping utilities <wctype.h>
This file is part of the Public Domain C Library (PDCLib).
Permission is granted to use, modify, and / or redistribute at will.
*/
#include "j6libc/cpp.h"
#include "j6libc/int.h"
#include "j6libc/wint_t.h"
CPP_CHECK_BEGIN
// wctrans_t
// wctype_t
#ifndef _PDCLIB_WEOF_DEFINED
#define _PDCLIB_WEOF_DEFINED _PDCLIB_WEOF_DEFINED
#define WEOF (wint_t)-1
#endif
/* Wide character classification functions */
/* Returns iswalpha( wc ) || iswdigit( wc ) */
int iswalnum( wint_t wc );
/* Returns true for wide characters for which either isupper( wc ) or
islower( wc ) is true, as well as a set of locale-specific wide
characters which are neither control characters, digits, punctuation,
or whitespace.
*/
int iswalpha( wint_t wc );
/* Returns true if the character iswspace() and used for separating words
within a line of text. In the "C" locale, only L' ' and L'\t' are
considered blanks.
*/
int iswblank( wint_t wc );
/* Returns true if the wide character is a control character. */
int iswcntrl( wint_t wc );
/* Returns true if the wide character is a decimal digit. Locale-
independent. */
int iswdigit( wint_t wc );
/* Returns iswprint( wc ) && ! iswspace( wc ).
NOTE: This definition differs from that of isgraph() in <ctype.h>,
which considers only ' ', not all isspace() characters.
*/
int iswgraph( wint_t wc );
/* Returns true for lowerspace wide characters, as well as a set of
locale-specific wide characters which are neither control charcters,
digits, punctuation, or whitespace.
*/
int iswlower( wint_t wc );
/* Returns true for every printing wide character. */
int iswprint( wint_t wc );
/* Returns true for a locale-specific set of punctuation characters that
are neither whitespace nor alphanumeric.
*/
int iswpunct( wint_t wc );
/* Returns true for a locale-specific set of whitespace characters that
are neither alphanumeric, graphic, or punctuation.
*/
int iswspace( wint_t wc );
/* Returns true for upperspace wide characters, as well as a set of
locale-specific wide characters which are neither control charcters,
digits, punctuation, or whitespace.
*/
int iswupper( wint_t wc );
/* Returns true if the wide character is a hexadecimal digit. Locale-
independent. */
int iswxdigit( wint_t wc );
/* Extensible wide character classification functions */
/* Returns true if the wide character wc has the property described by
desc (which was retrieved by a previous call to wctype() without
changing the LC_CTYPE locale setting between the two calls).
*/
int iswctype( wint_t wc, wctype_t desc );
/* Returns a description object for a named character property, to be
used as parameter to the iswctype() function. Supported property
names are:
"alnum" -- alphanumeric, as per iswalnum()
"alpha" -- alphabetic, as per iswalpha()
"blank" -- blank, as per iswblank()
"cntrl" -- control, as per iswcntrl()
"digit" -- decimal digit, as per iswdigit()
"graph" -- graphic, as per iswgraph()
"lower" -- lowercase, as per iswlower()
"print" -- printing, as per iswprint()
"punct" -- punctuation, as per iswprint()
"space" -- whitespace, as per iswspace()
"upper" -- uppercase, as per iswupper()
"xdigit" -- hexadecimal digit, as per iswxdigit()
For unsupported properties, the function returns zero.
*/
wctype_t wctype( const char * property );
/* Wide character case mapping utilities */
/* Converts an uppercase letter to a corresponding lowercase letter. Input for
which no corresponding lowercase letter exists remains unchanged.
*/
wint_t towlower( wint_t wc );
/* Converts a lowercase letter to a corresponding uppercase letter. Input for
which no corresponding uppercase letter exists remains unchanged.
*/
wint_t towupper( wint_t wc );
/* Extensible wide character case mapping utilities */
/* Converts the wide character wc according to the transition described
by desc (which was retrieved by a previous call to wctrans() without
changing the LC_CTYPE locale setting between the two calls).
*/
wint_t towctrans( wint_t wc, wctrans_t desc );
/* Returns a description object for a named character transformation, to
be used as parameter to the towctrans() function. Supported transformation
properties are:
"tolower" -- lowercase mapping, as per towlower()
"toupper" -- uppercase mapping, as per towupper()
For unsupported properties, the function returns zero.
*/
wctrans_t wctrans( const char * property );
CPP_CHECK_END
|
<filename>services/api/src/utils/__tests__/csv.js
const mongoose = require('mongoose');
const { csvExport } = require('../csv');
const { dedent: d } = require('../string');
const user = {
firstName: 'John',
lastName: 'Doe',
};
const complex = {
user,
status: 'active',
address: {
city: 'Baltimore',
state: 'MD',
},
};
describe('csvExport', () => {
it('should export a simple object', async () => {
const csv = await run([user]);
expect(csv).toBe(d`
firstName,lastName
John,Doe
`);
});
it('should export nested objects', async () => {
const csv = await run([{ user }]);
expect(csv).toBe(d`
user.firstName,user.lastName
John,Doe
`);
});
it('should export multiple nested', async () => {
const csv = await run([{ user }, { user }, { user }]);
expect(csv).toBe(d`
user.firstName,user.lastName
John,Doe
John,Doe
John,Doe
`);
});
it('should include specific fields', async () => {
const csv = await run([complex], {
include: ['status', 'user.firstName,', 'address.city,'],
});
expect(csv).toBe(d`
status,user.firstName,address.city
active,John,Baltimore
`);
});
it('should include specific parents', async () => {
const csv = await run([complex], {
include: ['status', 'user', 'address.city'],
});
expect(csv).toBe(d`
status,user.firstName,user.lastName,address.city
active,John,Doe,Baltimore
`);
});
it('should follow include order', async () => {
const csv = await run([complex], {
include: ['address.city', 'status', 'user.firstName,'],
});
expect(csv).toBe(d`
address.city,status,user.firstName
Baltimore,active,John
`);
});
it('should exclude specific fields', async () => {
const csv = await run([complex], {
exclude: ['user.lastName', 'address.city'],
});
expect(csv).toBe(d`
user.firstName,status,address.state
John,active,MD
`);
});
it('should call toObject to prevent private field access', async () => {
const obj = {
user,
toObject: () => {
return {
user: {
firstName: 'Frank',
lastName: 'Reynolds',
},
};
},
};
const csv = await run([obj]);
expect(csv).toBe(d`
user.firstName,user.lastName
Frank,Reynolds
`);
});
it('should not expose ObjectId or id field unless included', async () => {
const userId = mongoose.Types.ObjectId();
const projectId = mongoose.Types.ObjectId();
const obj = {
id: 'foo',
name: 'Frank',
userId,
projectId,
};
const csv = await run([obj], {
include: ['name', 'projectId'],
});
expect(csv).toBe(d`
name,projectId
Frank,${projectId}
`);
});
});
async function run(arr, options) {
const ctx = {
set: () => {},
};
csvExport(ctx, arr, { filename: 'test.csv', ...options });
return await streamToString(ctx.body);
}
function streamToString(stream) {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err) => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
});
}
|
<gh_stars>1-10
"""
Repository of test pipelines
"""
from dagster import (
Int,
ModeDefinition,
PipelineDefinition,
PresetDefinition,
repository,
resource,
solid,
)
from dagster.utils import file_relative_path
def define_empty_pipeline():
return PipelineDefinition(name="empty_pipeline", solid_defs=[])
def define_single_mode_pipeline():
@solid
def return_two(_context):
return 2
return PipelineDefinition(
name="single_mode", solid_defs=[return_two], mode_defs=[ModeDefinition(name="the_mode")]
)
def define_multi_mode_pipeline():
@solid
def return_three(_context):
return 3
return PipelineDefinition(
name="multi_mode",
solid_defs=[return_three],
mode_defs=[ModeDefinition(name="mode_one"), ModeDefinition("mode_two")],
)
def define_multi_mode_with_resources_pipeline():
# API red alert. One has to wrap a type in Field because it is callable
@resource(config_schema=Int)
def adder_resource(init_context):
return lambda x: x + init_context.resource_config
@resource(config_schema=Int)
def multer_resource(init_context):
return lambda x: x * init_context.resource_config
@resource(config_schema={"num_one": Int, "num_two": Int})
def double_adder_resource(init_context):
return (
lambda x: x
+ init_context.resource_config["num_one"]
+ init_context.resource_config["num_two"]
)
@solid(required_resource_keys={"op"})
def apply_to_three(context):
return context.resources.op(3)
return PipelineDefinition(
name="multi_mode_with_resources",
solid_defs=[apply_to_three],
mode_defs=[
ModeDefinition(name="add_mode", resource_defs={"op": adder_resource}),
ModeDefinition(name="mult_mode", resource_defs={"op": multer_resource}),
ModeDefinition(
name="double_adder_mode",
resource_defs={"op": double_adder_resource},
description="Mode that adds two numbers to thing",
),
],
preset_defs=[
PresetDefinition.from_files(
"add",
mode="add_mode",
config_files=[
file_relative_path(
__file__, "../environments/multi_mode_with_resources/add_mode.yaml"
)
],
),
PresetDefinition(
"multiproc",
mode="add_mode",
run_config={
"resources": {"op": {"config": 2}},
"execution": {"multiprocess": {}},
"intermediate_storage": {"filesystem": {}},
},
),
],
)
@repository
def dagster_test_repository():
return [
define_empty_pipeline(),
define_single_mode_pipeline(),
define_multi_mode_pipeline(),
define_multi_mode_with_resources_pipeline(),
]
def test_repository_construction():
assert dagster_test_repository
@repository
def empty_repository():
return []
|
#!/bin/bash
# install needed dependencies
sudo apt-get update
sudo apt-get install \
build-essential pkg-config libc6-dev m4 g++-multilib \
autoconf libtool ncurses-dev unzip git python python-zmq \
zlib1g-dev wget curl bsdmainutils automake
# zcashBitcore
cd
git clone https://github.com/bitzec/bitzec-patched-for-explorer.git
cd bitzec-patched-for-explorer
./zcutil/fetch-params.sh
./zcutil/build.sh --disable-tests -j$(nproc)
cd
echo "bitzecd with extended RPC functionalities is prepared. Please run following command to install insight explorer for bitzec"
echo "wget -qO- https://raw.githubusercontent.com/bitzec/bitcore-node/master/installExplorer.sh | bash"
|
<reponame>VerdaPegasus/FarmersDelight
package vectorwing.farmersdelight.client.renderer;
import com.mojang.blaze3d.platform.NativeImage;
import com.mojang.blaze3d.vertex.PoseStack;
import com.mojang.blaze3d.vertex.VertexConsumer;
import com.mojang.math.Vector3f;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.Font;
import net.minecraft.client.model.geom.ModelLayers;
import net.minecraft.client.player.LocalPlayer;
import net.minecraft.client.renderer.MultiBufferSource;
import net.minecraft.client.renderer.blockentity.BlockEntityRendererProvider;
import net.minecraft.client.renderer.blockentity.SignRenderer;
import net.minecraft.client.resources.model.Material;
import net.minecraft.util.FormattedCharSequence;
import net.minecraft.util.Mth;
import net.minecraft.world.entity.Entity;
import net.minecraft.world.item.DyeColor;
import net.minecraft.world.level.block.StandingSignBlock;
import net.minecraft.world.level.block.WallSignBlock;
import net.minecraft.world.level.block.entity.SignBlockEntity;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.level.block.state.properties.WoodType;
import net.minecraft.world.phys.Vec3;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
import vectorwing.farmersdelight.common.block.state.CanvasSign;
import vectorwing.farmersdelight.common.registry.ModAtlases;
import javax.annotation.Nullable;
import java.util.List;
@OnlyIn(Dist.CLIENT)
public class CanvasSignRenderer extends SignRenderer
{
public static final float TEXT_LINE_HEIGHT = 10;
public static final float TEXT_VERTICAL_OFFSET = 19;
private static final int OUTLINE_RENDER_DISTANCE = Mth.square(16);
private final SignModel signModel;
private final Font font;
public CanvasSignRenderer(BlockEntityRendererProvider.Context context) {
super(context);
this.signModel = new SignRenderer.SignModel(context.bakeLayer(ModelLayers.createSignModelName(WoodType.SPRUCE)));
this.font = context.getFont();
}
@Override
public void render(SignBlockEntity blockEntity, float pPartialTick, PoseStack poseStack, MultiBufferSource pBufferSource, int pPackedLight, int pPackedOverlay) {
BlockState state = blockEntity.getBlockState();
poseStack.pushPose();
SignRenderer.SignModel signrenderer$signmodel = signModel;
if (state.getBlock() instanceof StandingSignBlock) {
poseStack.translate(0.5D, 0.5D, 0.5D);
float f1 = -((float) (state.getValue(StandingSignBlock.ROTATION) * 360) / 16.0F);
poseStack.mulPose(Vector3f.YP.rotationDegrees(f1));
signrenderer$signmodel.stick.visible = true;
} else {
poseStack.translate(0.5D, 0.5D, 0.5D);
float f4 = -state.getValue(WallSignBlock.FACING).toYRot();
poseStack.mulPose(Vector3f.YP.rotationDegrees(f4));
poseStack.translate(0.0D, -0.3125D, -0.4375D);
signrenderer$signmodel.stick.visible = false;
}
poseStack.pushPose();
float rootScale = 0.6666667F;
poseStack.scale(rootScale, -rootScale, -rootScale);
DyeColor dye = null;
if (state.getBlock() instanceof CanvasSign canvasSign) {
dye = canvasSign.getBackgroundColor();
}
Material material = getMaterial(dye);
VertexConsumer vertexconsumer = material.buffer(pBufferSource, signrenderer$signmodel::renderType);
signrenderer$signmodel.root.render(poseStack, vertexconsumer, pPackedLight, pPackedOverlay);
poseStack.popPose();
float textScale = 0.010416667F;
poseStack.translate(0.0D, 0.33333334F, 0.046666667F);
poseStack.scale(textScale, -textScale, textScale);
FormattedCharSequence[] aformattedcharsequence = blockEntity.getRenderMessages(Minecraft.getInstance().isTextFilteringEnabled(), (component) -> {
List<FormattedCharSequence> list = this.font.split(component, 90);
return list.isEmpty() ? FormattedCharSequence.EMPTY : list.get(0);
});
int darkColor;
int baseColor;
boolean hasOutline;
int light;
if (blockEntity.hasGlowingText()) {
darkColor = getDarkColor(blockEntity, true);
baseColor = blockEntity.getColor().getTextColor();
hasOutline = isOutlineVisible(blockEntity, baseColor);
light = 15728880;
} else {
darkColor = getDarkColor(blockEntity, false);
baseColor = darkColor;
hasOutline = false;
light = pPackedLight;
}
for (int i1 = 0; i1 < 4; ++i1) {
FormattedCharSequence formattedcharsequence = aformattedcharsequence[i1];
float x = (float) (-this.font.width(formattedcharsequence) / 2);
float y = i1 * TEXT_LINE_HEIGHT - TEXT_VERTICAL_OFFSET;
if (hasOutline) {
this.font.drawInBatch8xOutline(formattedcharsequence, x, y, baseColor, darkColor, poseStack.last().pose(), pBufferSource, light);
} else {
this.font.drawInBatch(formattedcharsequence, x, y, baseColor, false, poseStack.last().pose(), pBufferSource, false, 0, light);
}
}
poseStack.popPose();
}
private static boolean isOutlineVisible(SignBlockEntity blockEntity, int textColor) {
if (textColor == DyeColor.BLACK.getTextColor()) {
return true;
} else {
Minecraft minecraft = Minecraft.getInstance();
LocalPlayer localPlayer = minecraft.player;
if (localPlayer != null && minecraft.options.getCameraType().isFirstPerson() && localPlayer.isScoping()) {
return true;
} else {
Entity entity = minecraft.getCameraEntity();
return entity != null && entity.distanceToSqr(Vec3.atCenterOf(blockEntity.getBlockPos())) < (double) OUTLINE_RENDER_DISTANCE;
}
}
}
private static int getDarkColor(SignBlockEntity blockEntity, boolean isOutlineVisible) {
int textColor = blockEntity.getColor().getTextColor();
double brightness = isOutlineVisible ? 0.4D : 0.6D;
int red = (int) ((double) NativeImage.getR(textColor) * brightness);
int green = (int) ((double) NativeImage.getG(textColor) * brightness);
int blue = (int) ((double) NativeImage.getB(textColor) * brightness);
return textColor == DyeColor.BLACK.getTextColor() && blockEntity.hasGlowingText() ? -988212 : NativeImage.combine(0, blue, green, red);
}
public static Material getMaterial(@Nullable DyeColor dyeColor) {
return dyeColor != null ? ModAtlases.DYED_CANVAS_SIGN_MATERIALS.get(dyeColor) : ModAtlases.BLANK_CANVAS_SIGN_MATERIAL;
}
}
|
package com.lilithsthrone.game.inventory.clothing;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import com.lilithsthrone.game.character.CharacterUtils;
import com.lilithsthrone.game.character.GameCharacter;
import com.lilithsthrone.game.character.attributes.Attribute;
import com.lilithsthrone.game.character.body.CoverableArea;
import com.lilithsthrone.game.character.body.types.PenisType;
import com.lilithsthrone.game.character.body.types.VaginaType;
import com.lilithsthrone.game.dialogue.utils.UtilText;
import com.lilithsthrone.game.inventory.AbstractCoreItem;
import com.lilithsthrone.game.inventory.AbstractCoreType;
import com.lilithsthrone.game.inventory.InventorySlot;
import com.lilithsthrone.game.inventory.ItemTag;
import com.lilithsthrone.game.inventory.Rarity;
import com.lilithsthrone.game.inventory.enchanting.AbstractItemEffectType;
import com.lilithsthrone.game.inventory.enchanting.ItemEffect;
import com.lilithsthrone.game.inventory.enchanting.ItemEffectType;
import com.lilithsthrone.game.inventory.enchanting.TFEssence;
import com.lilithsthrone.game.inventory.enchanting.TFModifier;
import com.lilithsthrone.game.inventory.enchanting.TFPotency;
import com.lilithsthrone.main.Main;
import com.lilithsthrone.rendering.Pattern;
import com.lilithsthrone.utils.Colour;
import com.lilithsthrone.utils.Util;
import com.lilithsthrone.utils.XMLSaving;
import java.util.Set;
/**
* @since 0.1.0
* @version 0.2.5
* @author Innoxia
*/
public abstract class AbstractClothing extends AbstractCoreItem implements Serializable, XMLSaving {
private static final long serialVersionUID = 1L;
private AbstractClothingType clothingType;
protected List<ItemEffect> effects;
private Colour secondaryColour;
private Colour tertiaryColour;
private boolean cummedIn, enchantmentKnown;
private List<DisplacementType> displacedList;
private String pattern; // name of the pattern.
private Colour patternColour;
private Colour patternSecondaryColour;
private Colour patternTertiaryColour;
public AbstractClothing(AbstractClothingType clothingType, Colour colour, Colour secondaryColour, Colour tertiaryColour, boolean allowRandomEnchantment) {
super(clothingType.getName(),
clothingType.getNamePlural(),
clothingType.getPathName(),
clothingType.getAllAvailablePrimaryColours().contains(colour) ? colour : clothingType.getAllAvailablePrimaryColours().get(Util.random.nextInt(clothingType.getAllAvailablePrimaryColours().size())),
clothingType.getRarity(),
null);
this.itemTags = new HashSet<>(clothingType.getItemTags());
this.clothingType = clothingType;
if(clothingType.getEffects()==null) {
this.effects = new ArrayList<>();
} else {
this.effects = new ArrayList<>(clothingType.getEffects());
}
cummedIn = false;
enchantmentKnown = true;
this.secondaryColour = secondaryColour;
this.tertiaryColour = tertiaryColour;
patternColour = Colour.CLOTHING_BLACK;
patternSecondaryColour = Colour.CLOTHING_BLACK;
patternTertiaryColour = Colour.CLOTHING_BLACK;
displacedList = new ArrayList<>();
if (effects.isEmpty() && allowRandomEnchantment && getClothingType().getRarity() == Rarity.COMMON) {
int chance = Util.random.nextInt(100) + 1;
List<TFModifier> attributeMods = new ArrayList<>(TFModifier.getClothingAttributeList());
TFModifier rndMod = attributeMods.get(Util.random.nextInt(attributeMods.size()));
attributeMods.remove(rndMod);
TFModifier rndMod2 = attributeMods.get(Util.random.nextInt(attributeMods.size()));
if (chance <= 25) { // Jinxed:
effects.add(new ItemEffect(ItemEffectType.CLOTHING, TFModifier.CLOTHING_SEALING, TFModifier.ARCANE_BOOST, TFPotency.MINOR_BOOST, 0));
effects.add(new ItemEffect(ItemEffectType.CLOTHING, TFModifier.CLOTHING_ATTRIBUTE, rndMod, TFPotency.getRandomWeightedNegativePotency(), 0));
if(chance <10) {
effects.add(new ItemEffect(ItemEffectType.CLOTHING, TFModifier.CLOTHING_ATTRIBUTE, rndMod2, TFPotency.getRandomWeightedNegativePotency(), 0));
}
enchantmentKnown = false;
} else if (chance >= 75) { // Enchanted:
effects.add(new ItemEffect(ItemEffectType.CLOTHING, TFModifier.CLOTHING_ATTRIBUTE, rndMod, TFPotency.getRandomWeightedPositivePotency(), 0));
if(chance > 90) {
effects.add(new ItemEffect(ItemEffectType.CLOTHING, TFModifier.CLOTHING_ATTRIBUTE, rndMod2, TFPotency.getRandomWeightedPositivePotency(), 0));
}
enchantmentKnown = false;
}
}
}
public AbstractClothing(AbstractClothingType clothingType, Colour colour, Colour secondaryColour, Colour tertiaryColour, List<ItemEffect> effects) {
super(clothingType.getName(),
clothingType.getNamePlural(),
clothingType.getPathName(),
clothingType.getAllAvailablePrimaryColours().contains(colour) ? colour : clothingType.getAllAvailablePrimaryColours().get(Util.random.nextInt(clothingType.getAllAvailablePrimaryColours().size())),
clothingType.getRarity(),
null);
this.itemTags = new HashSet<>(clothingType.getItemTags());
this.clothingType = clothingType;
cummedIn = false;
enchantmentKnown = true;
this.secondaryColour = secondaryColour;
this.tertiaryColour = tertiaryColour;
patternColour = Colour.CLOTHING_BLACK;
patternSecondaryColour = Colour.CLOTHING_BLACK;
patternTertiaryColour = Colour.CLOTHING_BLACK;
displacedList = new ArrayList<>();
if(effects!=null) {
this.effects = new ArrayList<>(effects);
} else {
this.effects = new ArrayList<>();
}
enchantmentKnown = false;
}
@Override
public boolean equals (Object o) {
if(super.equals(o)){
if(o instanceof AbstractClothing){
if(((AbstractClothing)o).getClothingType().equals(getClothingType())
&& ((AbstractClothing)o).getSecondaryColour()==secondaryColour
&& ((AbstractClothing)o).getTertiaryColour()==tertiaryColour
&& ((AbstractClothing)o).getPattern().equals(getPattern())
&& ((AbstractClothing)o).isSealed()==this.isSealed()
&& ((AbstractClothing)o).isDirty()==cummedIn
&& ((AbstractClothing)o).isEnchantmentKnown()==enchantmentKnown
&& ((AbstractClothing)o).isBadEnchantment()==this.isBadEnchantment()
&& ((AbstractClothing)o).getEffects().equals(this.getEffects())
){
return true;
}
}
}
return false;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + getClothingType().hashCode();
if(getSecondaryColour()!=null) {
result = 31 * result + getSecondaryColour().hashCode();
}
if(getTertiaryColour()!=null) {
result = 31 * result + getTertiaryColour().hashCode();
}
result = 31 * result + getPattern().hashCode();
result = 31 * result + (this.isSealed() ? 1 : 0);
result = 31 * result + (cummedIn ? 1 : 0);
result = 31 * result + (enchantmentKnown ? 1 : 0);
result = 31 * result + (this.isBadEnchantment() ? 1 : 0);
result = 31 * result + this.getEffects().hashCode();
return result;
}
public Element saveAsXML(Element parentElement, Document doc) {
Element element = doc.createElement("clothing");
parentElement.appendChild(element);
CharacterUtils.addAttribute(doc, element, "id", this.getClothingType().getId());
CharacterUtils.addAttribute(doc, element, "name", name);
CharacterUtils.addAttribute(doc, element, "colour", this.getColour().toString());
CharacterUtils.addAttribute(doc, element, "colourSecondary", this.getSecondaryColour().toString());
CharacterUtils.addAttribute(doc, element, "colourTertiary", this.getTertiaryColour().toString());
CharacterUtils.addAttribute(doc, element, "patternColour", this.getPatternColour().toString());
CharacterUtils.addAttribute(doc, element, "patternColourSecondary", this.getPatternSecondaryColour().toString());
CharacterUtils.addAttribute(doc, element, "patternColourTertiary", this.getPatternTertiaryColour().toString());
CharacterUtils.addAttribute(doc, element, "pattern", this.getPattern());
CharacterUtils.addAttribute(doc, element, "isDirty", String.valueOf(this.isDirty()));
CharacterUtils.addAttribute(doc, element, "enchantmentKnown", String.valueOf(this.isEnchantmentKnown()));
Element innerElement = doc.createElement("effects");
element.appendChild(innerElement);
for(ItemEffect ie : this.getEffects()) {
ie.saveAsXML(innerElement, doc);
}
innerElement = doc.createElement("displacedList");
element.appendChild(innerElement);
for(DisplacementType dt : this.getDisplacedList()) {
Element displacementType = doc.createElement("displacementType");
innerElement.appendChild(displacementType);
CharacterUtils.addAttribute(doc, displacementType, "value", dt.toString());
}
return element;
}
public static AbstractClothing loadFromXML(Element parentElement, Document doc) {
AbstractClothing clothing = null;
try {
clothing = AbstractClothingType.generateClothing(ClothingType.getClothingTypeFromId(parentElement.getAttribute("id")), false);
} catch(Exception ex) {
System.err.println("Warning: An instance of AbstractClothing was unable to be imported. ("+parentElement.getAttribute("id")+")");
return null;
}
if(clothing==null) {
System.err.println("Warning: An instance of AbstractClothing was unable to be imported. ("+parentElement.getAttribute("id")+")");
return null;
}
if(!parentElement.getAttribute("name").isEmpty()) {
clothing.setName(parentElement.getAttribute("name"));
}
// Try to load colour:
try {
clothing.setColour(Colour.valueOf(parentElement.getAttribute("colour")));
if(!parentElement.getAttribute("colourSecondary").isEmpty()) {
Colour secColour = Colour.valueOf(parentElement.getAttribute("colourSecondary"));
if(clothing.clothingType.getAllAvailableSecondaryColours().contains(secColour)) {
clothing.setSecondaryColour(secColour);
}
}
if(!parentElement.getAttribute("colourTertiary").isEmpty()) {
Colour terColour = Colour.valueOf(parentElement.getAttribute("colourTertiary"));
if(clothing.clothingType.getAllAvailableTertiaryColours().contains(terColour)) {
clothing.setTertiaryColour(terColour);
}
}
if(!parentElement.getAttribute("pattern").isEmpty()) {
String pat = parentElement.getAttribute("pattern");
clothing.setPattern(pat);
}
if(!parentElement.getAttribute("patternColour").isEmpty()) {
Colour colour = Colour.valueOf(parentElement.getAttribute("patternColour"));
clothing.setPatternColour(colour);
}
if(!parentElement.getAttribute("patternColourSecondary").isEmpty()) {
Colour secColour = Colour.valueOf(parentElement.getAttribute("patternColourSecondary"));
clothing.setPatternSecondaryColour(secColour);
}
if(!parentElement.getAttribute("patternColourTertiary").isEmpty()) {
Colour terColour = Colour.valueOf(parentElement.getAttribute("patternColourTertiary"));
clothing.setPatternTertiaryColour(terColour);
}
} catch(Exception ex) {
}
// Try to load core features:
try {
if(!parentElement.getAttribute("sealed").isEmpty()) {
clothing.setSealed(Boolean.valueOf(parentElement.getAttribute("sealed")));
}
clothing.setDirty(Boolean.valueOf(parentElement.getAttribute("isDirty")));
clothing.setEnchantmentKnown(Boolean.valueOf(parentElement.getAttribute("enchantmentKnown")));
} catch(Exception ex) {
}
// Try to load attributes:
if(parentElement.getElementsByTagName("attributeModifiers")!=null && parentElement.getElementsByTagName("attributeModifiers").getLength()>0) {
if(clothing.getClothingType().getClothingSet()==null) {
clothing.getEffects().clear();
Element element = (Element)parentElement.getElementsByTagName("attributeModifiers").item(0);
NodeList modifierElements = element.getElementsByTagName("modifier");
for(int i = 0; i < modifierElements.getLength(); i++){
Element e = ((Element)modifierElements.item(i));
try {
Attribute att = Attribute.valueOf(e.getAttribute("attribute"));
int value = Integer.valueOf(e.getAttribute("value"));
TFPotency pot = TFPotency.BOOST;
if(value <= -5) {
pot = TFPotency.MAJOR_DRAIN;
} else if(value <= -3) {
pot = TFPotency.DRAIN;
} else if(value <= -1) {
pot = TFPotency.MINOR_DRAIN;
} else if(value <= 1) {
pot = TFPotency.MINOR_BOOST;
} else if(value <= 3) {
pot = TFPotency.BOOST;
} else {
pot = TFPotency.MAJOR_BOOST;
}
for(TFModifier mod : TFModifier.getClothingAttributeList()) {
if(mod.getAssociatedAttribute()==att) {
clothing.addEffect(new ItemEffect(ItemEffectType.CLOTHING, TFModifier.CLOTHING_ATTRIBUTE, mod, pot, 0));
break;
}
}
for(TFModifier mod : TFModifier.getClothingMajorAttributeList()) {
if(mod.getAssociatedAttribute()==att) {
clothing.addEffect(new ItemEffect(ItemEffectType.CLOTHING, TFModifier.CLOTHING_MAJOR_ATTRIBUTE, mod, pot, 0));
break;
}
}
} catch(Exception ex) {
}
}
}
} else {
try {
clothing.getEffects().clear();
Element element = (Element)parentElement.getElementsByTagName("effects").item(0);
NodeList effectElements = element.getElementsByTagName("effect");
for(int i=0; i<effectElements.getLength(); i++){
Element e = ((Element)effectElements.item(i));
ItemEffect ie = ItemEffect.loadFromXML(e, doc);
if(ie!=null) {
clothing.addEffect(ie);
}
}
} catch(Exception ex) {
}
}
// Try to load displacements:
try {
clothing.displacedList = new ArrayList<>();
Element displacementElement = (Element)parentElement.getElementsByTagName("displacedList").item(0);
NodeList displacementTypeElements = displacementElement.getElementsByTagName("displacementType");
for(int i = 0; i < displacementTypeElements.getLength(); i++){
Element e = ((Element)displacementTypeElements.item(i));
DisplacementType dt = DisplacementType.valueOf(e.getAttribute("value"));
boolean displacementTypeFound = false;
for (BlockedParts bp : clothing.getClothingType().getBlockedPartsList()) {
if (bp.displacementType == dt)
displacementTypeFound = true;
}
if(displacementTypeFound)
clothing.displacedList.add(dt);
else
System.err.println("Warning: Invalid displacement");
}
} catch(Exception ex) {
}
return clothing;
}
public Colour getSecondaryColour() {
return secondaryColour;
}
public void setSecondaryColour(Colour secondaryColour) {
this.secondaryColour = secondaryColour;
}
public Colour getTertiaryColour() {
return tertiaryColour;
}
public void setTertiaryColour(Colour tertiaryColour) {
this.tertiaryColour = tertiaryColour;
}
/**
* Returns the name of a pattern that the clothing has.
* @return
*/
public String getPattern() {
if(pattern == null) {
return "none";
}
return pattern;
}
/**
* Changes pattern to specified one. Will not render that pattern if it doesn't exist or the item doesn't support it anyway.
* @param pattern
*/
public void setPattern(String pattern) {
this.pattern = pattern;
}
public Colour getPatternColour() {
return patternColour;
}
public Colour getPatternSecondaryColour() {
return patternSecondaryColour;
}
public Colour getPatternTertiaryColour() {
return patternTertiaryColour;
}
public void setPatternColour(Colour patternColour) {
this.patternColour = patternColour;
}
public void setPatternSecondaryColour(Colour patternSecondaryColour) {
this.patternSecondaryColour = patternSecondaryColour;
}
public void setPatternTertiaryColour(Colour patternTertiaryColour) {
this.patternTertiaryColour = patternTertiaryColour;
}
/**
* @return the name of a css class to use as a displayed rarity in inventory screens
*/
@Override
public String getDisplayRarity() {
if (!enchantmentKnown) {
return "unknown";
}
return super.getDisplayRarity();
}
private static StringBuilder descriptionSB = new StringBuilder();
public String getTypeDescription() {
return this.getClothingType().getDescription();
}
@Override
public String getDescription() {
descriptionSB.setLength(0);
descriptionSB.append(
"<p>"
+ getTypeDescription()
+ "</p>");
// Physical resistance
descriptionSB.append("<p>" + (getClothingType().isPlural() ? "They" : "It") + " provide" + (getClothingType().isPlural() ? "" : "s") + " <b>" + getClothingType().getPhysicalResistance() + "</b> <b style='color: "
+ Attribute.RESISTANCE_PHYSICAL.getColour().toWebHexString() + ";'> " + Attribute.RESISTANCE_PHYSICAL.getName() + "</b>.</p>");
if (enchantmentKnown) {
if (!this.getEffects().isEmpty()) {
descriptionSB.append("<p>Effects:");
for (ItemEffect e : this.getEffects()) {
if(e.getPrimaryModifier()!=TFModifier.CLOTHING_ATTRIBUTE
&& e.getPrimaryModifier()!=TFModifier.CLOTHING_MAJOR_ATTRIBUTE) {
for(String s : e.getEffectsDescription(Main.game.getPlayer(), Main.game.getPlayer())) {
descriptionSB.append("<br/>"+ s);
}
}
}
for(Entry<Attribute, Integer> entry : this.getAttributeModifiers().entrySet()) {
descriptionSB.append("<br/>"+
(entry.getValue()<0
?"[style.boldBad("+entry.getValue()+")] "
:"[style.boldGood(+"+entry.getValue()+")] ")
+ "<b style='color:"+entry.getKey().getColour().toWebHexString()+";'>"+Util.capitaliseSentence(entry.getKey().getName())+"</b>");
}
descriptionSB.append("</p>");
}
descriptionSB.append("<p>" + (getClothingType().isPlural() ? "They have" : "It has") + " a value of " + UtilText.formatAsMoney(getValue()) + ".");
} else {
descriptionSB.append("<br/>" + (getClothingType().isPlural() ? "They have" : "It has") + " an <b>unknown value</b>!");
}
descriptionSB.append("</p>");
if (getClothingType().getClothingSet() != null) {
descriptionSB.append("<p>" + (getClothingType().isPlural() ? "They are" : "It is") + " part of the <b style='color:" + Colour.RARITY_EPIC.toWebHexString() + ";'>"
+ getClothingType().getClothingSet().getName() + "</b> set." + "</p>");
}
return descriptionSB.toString();
}
public AbstractClothingType getClothingType() {
return clothingType;
}
public boolean isCanBeEquipped(GameCharacter clothingOwner) {
return this.getClothingType().isCanBeEquipped(clothingOwner);
}
public String getCannotBeEquippedText(GameCharacter clothingOwner) {
return this.getClothingType().getCannotBeEquippedText(clothingOwner);
}
@Override
public Rarity getRarity() {
if(rarity==Rarity.LEGENDARY || rarity==Rarity.QUEST) {
return rarity;
}
if(this.getClothingType().getClothingSet()!=null || rarity==Rarity.EPIC) {
return Rarity.EPIC;
}
if(this.isSealed() || this.isBadEnchantment()) {
return Rarity.JINXED;
}
if(this.getEffects().size()>1) {
return Rarity.RARE;
}
if(!this.getEffects().isEmpty()) {
return Rarity.UNCOMMON;
}
return Rarity.COMMON;
}
@Override
public int getValue() {
float runningTotal = this.getClothingType().getBaseValue();
if (colourShade == Colour.CLOTHING_PLATINUM) {
runningTotal *= 2f;
} else if (colourShade == Colour.CLOTHING_GOLD) {
runningTotal *= 1.75f;
} else if (colourShade == Colour.CLOTHING_ROSE_GOLD) {
runningTotal *= 1.5f;
} else if (colourShade == Colour.CLOTHING_SILVER) {
runningTotal *= 1.25f;
}
if(rarity==Rarity.JINXED) {
runningTotal *= 0.5;
}
float attributeBonuses = 0;//getModifiedDropoffValue
if (attributeModifiers != null) {
for (Integer i : attributeModifiers.values()) {
attributeBonuses += i * 15;
}
}
if (getClothingType().getClothingSet() != null) {
if (getClothingType().getClothingSet().getAssociatedStatusEffect().getAttributeModifiers(Main.game.getPlayer()) != null) {
for (Float f : getClothingType().getClothingSet().getAssociatedStatusEffect().getAttributeModifiers(Main.game.getPlayer()).values()) {
attributeBonuses += f * 15;
}
}
}
attributeBonuses = Util.getModifiedDropoffValue(attributeBonuses, 500);
runningTotal += Math.max(0, attributeBonuses);
if (runningTotal < 1) {
runningTotal = 1;
}
return (int) runningTotal;
}
@Override
public int getPrice(float modifier) {
if (!enchantmentKnown) {
return 50;
}
return super.getPrice(modifier);
}
@Override
public String getName() {
return this.getEffects().isEmpty()?this.getClothingType().getName():name;
}
/**
* @param withDeterminer
* True if you want the determiner to prefix the name
* @return A string in the format "blue shirt" or "a blue shirt"
*/
public String getName(boolean withDeterminer) {
return (withDeterminer ? (getClothingType().isPlural() ? getClothingType().getDeterminer() + " " : (Util.isVowel(getColour().getName().charAt(0)) ? "an " : "a ")) : "") + getColour().getName() + " " + getName();
}
public String getName(boolean withDeterminer, boolean withRarityColour) {
if (!enchantmentKnown) {
return (withDeterminer
? (getClothingType().isPlural()
? getClothingType().getDeterminer() + " "
: (Util.isVowel(getColour().getName().charAt(0)) ? "an " : "a "))
: "")
+ getColour().getName()
+ (withRarityColour
? (" <span style='color: " + Colour.RARITY_UNKNOWN.toWebHexString() + ";'>" + getName() + "</span>")
: " "+getName());
} else {
return (withDeterminer
? (getClothingType().isPlural()
? getClothingType().getDeterminer() + " "
: (Util.isVowel(getColour().getName().charAt(0)) ? "an " : "a "))
: "")
+ getColour().getName()
+ (withRarityColour
? (" <span style='color: " + this.getRarity().getColour().toWebHexString() + ";'>" + getName() + "</span>")
: " "+getName());
}
}
/**
* @param withRarityColour
* If true, the name will be coloured to its rarity.
* @return A string in the format "Blue cap of frostbite" or
* "Gold circlet of anti-magic"
*/
public String getDisplayName(boolean withRarityColour) {
if(!this.getName().replaceAll("\u00A0"," ").equalsIgnoreCase(this.getClothingType().getName().replaceAll("\u00A0"," "))) { // If this item has a custom name, just display that:
// for(int i=0;i<this.getName().toCharArray().length;i++) {
// System.out.print("["+Character.codePointAt(this.getName().toCharArray(), i)+"]");
// }
// System.out.println();
// for(int i=0;i<this.getClothingType().getName().toCharArray().length;i++) {
// System.out.print("["+Character.codePointAt(this.getClothingType().getName().toCharArray(), i)+"]");
// }
// System.out.println();
// System.out.println();
return (withRarityColour
? (" <span style='color: " + (!this.isEnchantmentKnown()?Colour.RARITY_UNKNOWN:this.getRarity().getColour()).toWebHexString() + ";'>" + getName() + "</span>")
: getName());
}
return Util.capitaliseSentence(getColour().getName()) + " "
+ (!this.getPattern().equalsIgnoreCase("none")?Pattern.getPattern(this.getPattern()).getNiceName():"")
+ (withRarityColour
? (" <span style='color: " + (!this.isEnchantmentKnown()?Colour.RARITY_UNKNOWN:this.getRarity().getColour()).toWebHexString() + ";'>" + getName() + "</span>")
: getName())
+(!this.getEffects().isEmpty() && this.isEnchantmentKnown() && this.getRarity()!=Rarity.LEGENDARY && this.getRarity()!=Rarity.EPIC
? " "+getEnchantmentPostfix(withRarityColour, "b")
: "");
}
@Override
public String getSVGString() {
return getClothingType().getSVGImage(colourShade, secondaryColour, tertiaryColour, pattern, patternColour, patternSecondaryColour, patternTertiaryColour);
}
public String getSVGEquippedString(GameCharacter character) {
return getClothingType().getSVGEquippedImage(character, colourShade, secondaryColour, tertiaryColour, pattern, patternColour, patternSecondaryColour, patternTertiaryColour);
}
/**
* Applies any extra effects this clothing causes when being equipped.
*
* @return A description of this clothing being equipped.
*/
public String onEquipApplyEffects(GameCharacter clothingOwner, GameCharacter clothingEquipper, boolean rough) {
if (!enchantmentKnown) {
enchantmentKnown = true;
pointlessSB.setLength(0);
if (this.isBadEnchantment()) {
clothingOwner.incrementAttribute(Attribute.MAJOR_CORRUPTION, 1);
pointlessSB.append(
getClothingType().equipText(clothingOwner, clothingEquipper, rough, this, true)
+ "<p style='text-align:center;'>"
+ "<b style='color:" + Colour.GENERIC_BAD.toWebHexString() + ";'>Jinx revealed:</b> "+getDisplayName(true));
for(Entry<Attribute, Integer> att : attributeModifiers.entrySet()) {
pointlessSB.append("<br/><b>(" + att.getValue()+"</b> <b style='color:"+att.getKey().getColour().toWebHexString()+";'>"+ Util.capitaliseSentence(att.getKey().getName()) + "</b><b>)</b>");
}
pointlessSB.append("<br/>"
+ "<b>"+(clothingOwner.isPlayer()?"You gain":UtilText.parse(clothingOwner, "[npc.Name] gains"))
+" +1</b> <b style='color:" + Colour.GENERIC_TERRIBLE.toWebHexString()+ ";'>core</b> <b style='color:" + Colour.ATTRIBUTE_CORRUPTION.toWebHexString() + ";'>corruption</b> <b>from discovering their jinx...</b>"
+ "</p>");
} else {
pointlessSB.append(
getClothingType().equipText(clothingOwner, clothingEquipper, rough, this, true)
+ "<p style='text-align:center;'>"
+ "<b style='color:" + Colour.GENERIC_GOOD.toWebHexString() + ";'>Enchantment revealed:</b> "+getDisplayName(true));
for(Entry<Attribute, Integer> att : attributeModifiers.entrySet()) {
pointlessSB.append("<br/><b>(+" + att.getValue()+"</b> <b style='color:"+att.getKey().getColour().toWebHexString()+";'>"+ Util.capitaliseSentence(att.getKey().getName()) + "</b><b>)</b>");
}
pointlessSB.append("</p>");
}
return pointlessSB.toString();
} else {
return getClothingType().equipText(clothingOwner, clothingEquipper, rough, this, true);
}
}
/**
* @return A description of this clothing being equipped.
*/
public String onEquipText(GameCharacter clothingOwner, GameCharacter clothingEquipper, boolean rough) {
return getClothingType().equipText(clothingOwner, clothingEquipper, rough, this, false);
}
/**
* Applies any extra effects this clothing causes when being unequipped.
*
* @return A description of this clothing being unequipped.
*/
public String onUnequipApplyEffects(GameCharacter clothingOwner, GameCharacter clothingEquipper, boolean rough) {
return getClothingType().unequipText(clothingOwner, clothingEquipper, rough, this, true);
}
/**
* @return A description of this clothing being unequipped.
*/
public String onUnequipText(GameCharacter clothingOwner, GameCharacter clothingEquipper, boolean rough) {
return getClothingType().unequipText(clothingOwner, clothingEquipper, rough, this, false);
}
private static List<String> incompatibleClothing = new ArrayList<>();
/**
* Returns a formatted description of if this clothing is sealed, cummedIn, too feminine/masculine and what slots it is blocking.
*/
public String clothingExtraInformation(GameCharacter equippedToCharacter) {
StringBuilder extraInformationSB = new StringBuilder();
if (equippedToCharacter == null) { // The clothing is not currently equipped by anyone:
incompatibleClothing.clear();
if (!getClothingType().getIncompatibleSlots().isEmpty()) {
for (InventorySlot invSlot : getClothingType().getIncompatibleSlots())
if (Main.game.getPlayer().getClothingInSlot(invSlot) != null)
incompatibleClothing.add(Main.game.getPlayer().getClothingInSlot(invSlot).getClothingType().getName());
}
for (AbstractClothing c : Main.game.getPlayer().getClothingCurrentlyEquipped())
for (InventorySlot invSlot : c.getClothingType().getIncompatibleSlots())
if (getClothingType().getSlot() == invSlot)
incompatibleClothing.add(c.getClothingType().getName());
if(!getClothingType().getIncompatibleSlots().isEmpty()) {
extraInformationSB.append("Equipping "+(getClothingType().isPlural()?"them":"it")+" will [style.boldBad(block)] your "+ Util.inventorySlotsToStringList(getClothingType().getIncompatibleSlots())+".<br/>");
}
if(Main.game.getPlayer().getClothingInSlot(this.getClothingType().getSlot())!=null && Main.game.getPlayer().getClothingInSlot(this.getClothingType().getSlot()).getClothingType().isDiscardedOnUnequip()) {
extraInformationSB.append("[style.boldBad(Equipping this will cause the "+Main.game.getPlayer().getClothingInSlot(this.getClothingType().getSlot()).getName()+" you're already wearing to be discarded!)]<br/>");
}
if(this.isSealed() && enchantmentKnown) {
extraInformationSB.append((getClothingType().isPlural() ? "They" : "It") + " will [style.boldJinx(jinx)] " + (getClothingType().isPlural() ? "themselves" : "itself") + " onto you!<br/>");
}
if(!enchantmentKnown) {
extraInformationSB.append("You can either take " + (getClothingType().isPlural() ? "them" : "it") + " to a suitable vendor, or equip " + (getClothingType().isPlural() ? "them" : "it") + " now to identify the"
+ " <b style='color: "+ Colour.RARITY_UNKNOWN.toWebHexString() + ";'>unknown enchantment</b>!<br/>");
}
if(cummedIn) {
extraInformationSB.append((getClothingType().isPlural() ? "They have" : "It has") + " been <b style='color: " + Colour.CUM.toWebHexString() + ";'>covered in sexual fluids</b>!<br/>");
}
if(this.getClothingType().isMufflesSpeech()) {
extraInformationSB.append((getClothingType().isPlural() ? "They [style.boldBad(muffle" : "It [style.boldBad(muffles") + " the wearer's speech)].<br/>");
}
if(this.getClothingType().isHindersLegMovement()) {
extraInformationSB.append((getClothingType().isPlural() ? "They [style.boldTerrible(block" : "It [style.boldTerrible(blocks") + " the wearer's escape in combat)] (if they are unable to fly).<br/>");
}
if(this.getClothingType().isHindersArmMovement()) {
extraInformationSB.append((getClothingType().isPlural() ? "They [style.boldTerrible(block" : "It [style.boldTerrible(blocks") + " flight from arm-wings)].<br/>");
}
if(getClothingType().getFemininityMaximum() < Main.game.getPlayer().getFemininityValue()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " <b style='color: " + Colour.MASCULINE.toWebHexString() + ";'>too masculine</b> for you.<br/>");
}
if(getClothingType().getFemininityMinimum() > Main.game.getPlayer().getFemininityValue()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " <b style='color: " + Colour.FEMININE.toWebHexString() + ";'>too feminine</b> for you.<br/>");
}
if(!incompatibleClothing.isEmpty()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " <b style='color:" + Colour.GENERIC_BAD.toWebHexString() + ";'>incompatible</b> with your "
+ Util.stringsToStringList(incompatibleClothing, false) + ".<br/>");
}
if(extraInformationSB.length()==0) {
return "";
}
return "<p>"+extraInformationSB.toString().substring(0, extraInformationSB.length()-5)+"</p>";
} else if (equippedToCharacter.isPlayer()) { // Character is player:
if(!getClothingType().getIncompatibleSlots().isEmpty()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " <b style='color: " + Colour.GENERIC_BAD.toWebHexString()
+ ";'>blocking</b> your " + Util.inventorySlotsToStringList(getClothingType().getIncompatibleSlots()) + "!<br/>");
}
if(this.isSealed()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " <b style='color: " + Colour.ATTRIBUTE_CORRUPTION.toWebHexString() + ";'>jinxed</b> and can't be removed!<br/>");
} else if(this.getClothingType().isDiscardedOnUnequip()) {
extraInformationSB.append("[style.boldBad(Removing your "+this.getName()+" will cause "+(getClothingType().isPlural() ? "them" : "it")+" to be discarded!)]<br/>");
}
if(cummedIn) {
extraInformationSB.append((getClothingType().isPlural() ? "They have" : "It has") + " been <b style='color: " + Colour.CUM.toWebHexString() + ";'>covered in sexual fluids</b>!<br/>");
}
if(this.getClothingType().isMufflesSpeech()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " [style.boldBad(muffling your speech)].<br/>");
}
if(this.getClothingType().isHindersLegMovement() && !equippedToCharacter.isAbleToFly()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " [style.boldTerrible(blocking your escape in combat)].<br/>");
}
if(this.getClothingType().isHindersArmMovement() && equippedToCharacter.isAbleToFlyFromArms()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are " : "It is") + " [style.boldTerrible(blocking flight from your arm-wings)].<br/>");
}
if(getClothingType().getFemininityMaximum() < equippedToCharacter.getFemininityValue()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " <b style='color: " + Colour.MASCULINE.toWebHexString() + ";'>too masculine</b> for you.<br/>");
}
if(getClothingType().getFemininityMinimum() > equippedToCharacter.getFemininityValue()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " <b style='color: " + Colour.FEMININE.toWebHexString() + ";'>too feminine</b> for you.<br/>");
}
if(!displacedList.isEmpty()) {
extraInformationSB.append((getClothingType().isPlural() ? "They have" : "It has") + " been <b style='color: " + Colour.FEMININE.toWebHexString() + ";'>"
+ Util.displacementTypesToStringList(displacedList) + "</b>!<br/>");
}
if(extraInformationSB.length()==0) {
return "";
}
return "<p>"+extraInformationSB.toString().substring(0, extraInformationSB.length()-5)+"</p>";
} else { // Character is an NPC:
if(!getClothingType().getIncompatibleSlots().isEmpty()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " [style.boldBad(blocking)] [npc.her] "
+ Util.inventorySlotsToStringList(getClothingType().getIncompatibleSlots()) + "!<br/>");
}
if(this.isSealed()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " [style.boldCorruption(jinxed)] and can't be removed!<br/>");
} else if(this.getClothingType().isDiscardedOnUnequip()) {
extraInformationSB.append("[style.boldBad(Removing [npc.namePos] "+this.getName()+" will cause "+(getClothingType().isPlural() ? "them" : "it")+" to be discarded!)]<br/>");
}
if(cummedIn) {
extraInformationSB.append((getClothingType().isPlural() ? "They have" : "It has") + " been <b style='color: " + Colour.CUM.toWebHexString() + ";'>covered in sexual fluids</b>!<br/>");
}
if(this.getClothingType().isMufflesSpeech()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " [style.boldBad(muffling [npc.her] speech)].<br/>");
}
if(this.getClothingType().isHindersLegMovement() && !equippedToCharacter.isAbleToFly()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " [style.boldTerrible(blocking [npc.her] escape in combat)].<br/>");
}
if(this.getClothingType().isHindersArmMovement() && equippedToCharacter.isAbleToFlyFromArms()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are " : "It is") + " [style.boldTerrible(blocking flight from [npc.her] arm-wings)].<br/>");
}
if(getClothingType().getFemininityMaximum() < equippedToCharacter.getFemininityValue()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " <b style='color: " + Colour.MASCULINE.toWebHexString() + ";'>too masculine</b> for [npc.herHim].<br/>");
}
if(getClothingType().getFemininityMinimum() > equippedToCharacter.getFemininityValue()) {
extraInformationSB.append((getClothingType().isPlural() ? "They are" : "It is") + " <b style='color: " + Colour.FEMININE.toWebHexString() + ";'>too feminine</b> for [npc.herHim].<br/>");
}
if(!displacedList.isEmpty()) {
extraInformationSB.append((getClothingType().isPlural() ? "They have been" : "It has been")
+ " <b style='color: " + Colour.GENERIC_BAD.toWebHexString() + ";'>"+ Util.displacementTypesToStringList(displacedList) + "</b>!<br/>");
}
if(extraInformationSB.length()==0) {
return "";
}
return "<p>"+UtilText.parse(equippedToCharacter,extraInformationSB.toString().substring(0, extraInformationSB.length()-5))+"</p>";
}
}
public String getDisplacementBlockingDescriptions(GameCharacter equippedToCharacter){
descriptionSB = new StringBuilder("<p><b>Displacement types:</b>");
for(BlockedParts bp : getClothingType().getBlockedPartsList()){
descriptionSB.append("<br/><b>"+Util.capitaliseSentence(bp.displacementType.getDescription())+":</b> ");
if(equippedToCharacter.isAbleToBeDisplaced(this, bp.displacementType, false, false, equippedToCharacter))
descriptionSB.append("<b style='color:"+Colour.GENERIC_GOOD.toWebHexString()+";'>Available</b>");
else
descriptionSB.append("<b style='color:"+Colour.GENERIC_BAD.toWebHexString()+";'>Blocked</b> by "+equippedToCharacter.getBlockingClothing().getName()+"");
}
descriptionSB.append("</p>");
return descriptionSB.toString();
}
public List<String> getExtraDescriptions(GameCharacter equippedToCharacter) {
List<String> descriptionsList = new ArrayList<>();
if(this.getClothingType().isHindersLegMovement()) {
descriptionsList.add("<b style='color: " + Colour.GENERIC_TERRIBLE.toWebHexString() + ";'>Blocks non-flight escape in combat</b>");
}
if(this.getClothingType().isHindersArmMovement()) {
descriptionsList.add("<b style='color: " + Colour.GENERIC_TERRIBLE.toWebHexString() + ";'>Blocks arm-wing flight</b>");
}
if(this.getClothingType().isMufflesSpeech()) {
descriptionsList.add("<b style='color: " + Colour.GENERIC_BAD.toWebHexString() + ";'>Muffles speech</b>");
}
if(this.getItemTags().contains(ItemTag.PLUGS_ANUS)) {
descriptionsList.add("<b style='color: " + Colour.GENERIC_SEX.toWebHexString() + ";'>Plugs Anus</b>");
}
if(this.getItemTags().contains(ItemTag.PLUGS_VAGINA)) {
descriptionsList.add("<b style='color: " + Colour.GENERIC_SEX.toWebHexString() + ";'>Plugs Vagina</b>");
}
if(this.getItemTags().contains(ItemTag.PLUGS_NIPPLES)) {
descriptionsList.add("<b style='color: " + Colour.GENERIC_SEX.toWebHexString() + ";'>Plugs Nipples</b>");
}
if (equippedToCharacter == null) { // The clothing is not currently
// equipped by anyone:
incompatibleClothing.clear();
if (!getClothingType().getIncompatibleSlots().isEmpty()) {
for (InventorySlot invSlot : getClothingType().getIncompatibleSlots())
if (Main.game.getPlayer().getClothingInSlot(invSlot) != null)
incompatibleClothing.add(Main.game.getPlayer().getClothingInSlot(invSlot).getClothingType().getName());
}
for (AbstractClothing c : Main.game.getPlayer().getClothingCurrentlyEquipped())
for (InventorySlot invSlot : c.getClothingType().getIncompatibleSlots())
if (getClothingType().getSlot() == invSlot)
incompatibleClothing.add(c.getClothingType().getName());
if (!getClothingType().getIncompatibleSlots().isEmpty()) {
// descriptionsList.add("-<b style='color:
// "+Colour.GENERIC_BAD.toWebHexString()+";'>Equipping
// blocks</b>");
for (InventorySlot slot : getClothingType().getIncompatibleSlots())
descriptionsList.add("<b style='color: " + Colour.GENERIC_BAD.toWebHexString() + ";'>Blocks " + Util.capitaliseSentence(slot.getName()) + "</b>");
}
if (this.isSealed() && enchantmentKnown) {
descriptionsList.add("<b style='color: " + Colour.GENERIC_ARCANE.toWebHexString() + ";'>Jinxed</b>");
}
if (cummedIn) {
descriptionsList.add("<b style='color: " + Colour.CUM.toWebHexString() + ";'>Dirty</b>");
}
if (getClothingType().getFemininityMaximum() < Main.game.getPlayer().getFemininityValue()) {
descriptionsList.add("<b style='color: " + Colour.MASCULINE.toWebHexString() + ";'>Too masculine</b>");
}
if (getClothingType().getFemininityMinimum() > Main.game.getPlayer().getFemininityValue()) {
descriptionsList.add("<b style='color: " + Colour.FEMININE.toWebHexString() + ";'>Too feminine</b>");
}
if (!incompatibleClothing.isEmpty()) {
descriptionsList.add("<b style='color: " + Colour.GENERIC_BAD.toWebHexString() + ";'>Incompatible with:</b>");
descriptionsList.addAll(incompatibleClothing);
}
} else { // Being worn:
if (!getClothingType().getIncompatibleSlots().isEmpty()) {
// descriptionsList.add("-<b style='color:
// "+Colour.GENERIC_BAD.toWebHexString()+";'>Blocking</b>");
for (InventorySlot slot : getClothingType().getIncompatibleSlots())
descriptionsList.add("<b style='color: " + Colour.GENERIC_BAD.toWebHexString() + ";'>Blocking " + Util.capitaliseSentence(slot.getName()) + "</b>");
}
if (this.isSealed() && enchantmentKnown) {
descriptionsList.add("<b style='color: " + Colour.GENERIC_ARCANE.toWebHexString() + ";'>Jinxed</b>");
}
if (cummedIn) {
descriptionsList.add("<b style='color: " + Colour.CUM.toWebHexString() + ";'>Dirty</b>");
}
if (getClothingType().getFemininityMaximum() < equippedToCharacter.getFemininityValue()) {
descriptionsList.add("<b style='color: " + Colour.MASCULINE.toWebHexString() + ";'>Too masculine</b>");
}
if (getClothingType().getFemininityMinimum() > equippedToCharacter.getFemininityValue()) {
descriptionsList.add("<b style='color: " + Colour.FEMININE.toWebHexString() + ";'>Too feminine</b>");
}
if (!displacedList.isEmpty()) {
// descriptionsList.add("-<b style='color:
// "+Colour.GENERIC_BAD.toWebHexString()+";'>Displaced</b>");
for (DisplacementType dt : displacedList)
descriptionsList.add("<b style='color: " + Colour.GENERIC_BAD.toWebHexString() + ";'>" + Util.capitaliseSentence(dt.getDescriptionPast()) + "</b>");
}
}
return descriptionsList;
}
/**
* @return A list of blocked body parts. e.g. "Penis, Anus and Vagina" or
* "Nipples"
*/
public String getClothingBlockingDescription(DisplacementType dt, GameCharacter owner, String preFix, String postFix) {
Set<CoverableArea> coveredAreas = new HashSet<>();// EnumSet.noneOf(CoverableArea.class);
if (dt == null) {
for (BlockedParts bp : this.getClothingType().getBlockedPartsList())
if (!this.getDisplacedList().contains(bp.displacementType))
coveredAreas.addAll(bp.blockedBodyParts);
} else {
for (BlockedParts bp : this.getClothingType().getBlockedPartsList())
if (bp.displacementType == dt)
coveredAreas.addAll(bp.blockedBodyParts);
}
if(owner!=null) {
if (owner.getVaginaType() == VaginaType.NONE)
coveredAreas.remove(CoverableArea.VAGINA);
if (owner.getPenisType() == PenisType.NONE)
coveredAreas.remove(CoverableArea.PENIS);
}
if (!coveredAreas.isEmpty())
return preFix + Util.setToStringListCoverableArea(coveredAreas) + postFix;
else
return "";
}
public void removeBadEnchantments() {
this.getEffects().removeIf(e -> (e.getPrimaryModifier() == TFModifier.CLOTHING_ATTRIBUTE || e.getPrimaryModifier() == TFModifier.CLOTHING_MAJOR_ATTRIBUTE) && e.getPotency().isNegative());
}
public boolean isSealed() {
for(ItemEffect effect : this.getEffects()) {
if(effect.getPrimaryModifier()==TFModifier.CLOTHING_SEALING) {
return true;
}
}
return false;
}
public void setSealed(boolean sealed) {
if(sealed) {
this.addEffect(new ItemEffect(ItemEffectType.CLOTHING, TFModifier.CLOTHING_SEALING, TFModifier.NONE, TFPotency.MINOR_BOOST, 0));
} else {
this.getEffects().removeIf(e -> e.getPrimaryModifier() == TFModifier.CLOTHING_SEALING);
}
}
public int getJinxRemovalCost() {
for(ItemEffect effect : this.getEffects()) {
if(effect.getPrimaryModifier()==TFModifier.CLOTHING_SEALING) {
switch(effect.getPotency()) {
case BOOST:
break;
case DRAIN:
return ItemEffect.SEALED_COST_DRAIN;
case MAJOR_BOOST:
break;
case MAJOR_DRAIN:
return ItemEffect.SEALED_COST_MAJOR_DRAIN;
case MINOR_BOOST:
return ItemEffect.SEALED_COST_MINOR_BOOST;
case MINOR_DRAIN:
return ItemEffect.SEALED_COST_MINOR_DRAIN;
}
}
}
return ItemEffect.SEALED_COST_MINOR_BOOST;
}
public boolean isDirty() {
return cummedIn;
}
public void setDirty(boolean cummedIn) {
this.cummedIn = cummedIn;
if(Main.game.getPlayer()!=null) {
if(Main.game.getPlayer().getClothingCurrentlyEquipped().contains(this)) {
Main.game.getPlayer().updateInventoryListeners();
}
}
}
public List<DisplacementType> getDisplacedList() {
return displacedList;
}
public void clearDisplacementList() {
displacedList.clear();
}
public boolean isEnchantmentKnown() {
return enchantmentKnown;
}
private StringBuilder pointlessSB = new StringBuilder();
public String setEnchantmentKnown(boolean enchantmentKnown) {
pointlessSB.setLength(0);
this.enchantmentKnown = enchantmentKnown;
if(enchantmentKnown && !attributeModifiers.isEmpty()){
if (isBadEnchantment()) {
pointlessSB.append(
"<p style='text-align:center;'>"
+ "<b style='color:" + Colour.GENERIC_BAD.toWebHexString() + ";'>Jinx revealed:</b> "+getDisplayName(true));
for(Entry<Attribute, Integer> att : attributeModifiers.entrySet()) {
pointlessSB.append("<br/><b>(" + att.getValue()+"</b> <b style='color:"+att.getKey().getColour().toWebHexString()+";'>"+ Util.capitaliseSentence(att.getKey().getName()) + "</b><b>)</b>");
}
pointlessSB.append("</p>");
} else {
pointlessSB.append(
"<p style='text-align:center;'>"
+ "<b style='color:" + Colour.GENERIC_GOOD.toWebHexString() + ";'>Enchantment revealed:</b> "+getDisplayName(true));
for(Entry<Attribute, Integer> att : attributeModifiers.entrySet()) {
pointlessSB.append("<br/><b>(+" + att.getValue()+"</b> <b style='color:"+att.getKey().getColour().toWebHexString()+";'>"+ Util.capitaliseSentence(att.getKey().getName()) + "</b><b>)</b>");
}
pointlessSB.append("</p>");
}
} else {
return "";
}
return pointlessSB.toString();
}
public Attribute getCoreEnchantment() {
Attribute att = Attribute.MAJOR_PHYSIQUE;
int max = 0;
for(Entry<Attribute, Integer> entry : getAttributeModifiers().entrySet()) {
if(entry.getValue() > max) {
att = entry.getKey();
max = entry.getValue();
}
}
return att;
}
public String getEnchantmentPostfix(boolean coloured, String tag) {
if(!this.getEffects().isEmpty()) {
for(ItemEffect ie : this.getEffects()) {
if(ie.getPrimaryModifier() == TFModifier.CLOTHING_ENSLAVEMENT) {
return "of "+(coloured?"<"+tag+" style='color:"+TFModifier.CLOTHING_ENSLAVEMENT.getColour().toWebHexString()+";'>enslavement</"+tag+">":"enslavement");
} else if(ie.getPrimaryModifier() == TFModifier.TF_MOD_FETISH_BEHAVIOUR || ie.getPrimaryModifier() == TFModifier.TF_MOD_FETISH_BODY_PART) {
return "of "+(coloured?"<"+tag+" style='color:"+Colour.FETISH.toWebHexString()+";'>"+ie.getSecondaryModifier().getDescriptor()+"</"+tag+">":ie.getSecondaryModifier().getDescriptor());
} else if(ie.getPrimaryModifier() == TFModifier.CLOTHING_ATTRIBUTE || ie.getPrimaryModifier() == TFModifier.CLOTHING_MAJOR_ATTRIBUTE) {
String name = (this.isBadEnchantment()?this.getCoreEnchantment().getNegativeEnchantment():this.getCoreEnchantment().getPositiveEnchantment());
return "of "+(coloured?"<"+tag+" style='color:"+this.getCoreEnchantment().getColour().toWebHexString()+";'>"+name+"</"+tag+">":name);
} else if(ie.getPrimaryModifier() == TFModifier.CLOTHING_SEALING) {
return "of "+(coloured?"<"+tag+" style='color:"+Colour.SEALED.toWebHexString()+";'>sealing</"+tag+">":"sealing");
} else {
return "of "+(coloured?"<"+tag+" style='color:"+Colour.TRANSFORMATION_GENERIC.toWebHexString()+";'>transformation</"+tag+">":"transformation");
}
}
}
return "";
}
public boolean isBadEnchantment() {
return this.getEffects().stream().anyMatch(e -> (e.getPrimaryModifier() == TFModifier.CLOTHING_ATTRIBUTE || e.getPrimaryModifier() == TFModifier.CLOTHING_MAJOR_ATTRIBUTE) && e.getPotency().isNegative());
}
public boolean isEnslavementClothing() {
return this.getEffects().stream().anyMatch(e -> e.getPrimaryModifier() == TFModifier.CLOTHING_ENSLAVEMENT);
}
@Override
public List<ItemEffect> getEffects() {
return effects;
}
public void addEffect(ItemEffect effect) {
effects.add(effect);
}
public void removeEffect(ItemEffect effect) {
effects.remove(effect);
}
@Override
public Map<Attribute, Integer> getAttributeModifiers() {
attributeModifiers.clear();
for(ItemEffect ie : getEffects()) {
if(ie.getPrimaryModifier() == TFModifier.CLOTHING_ATTRIBUTE || ie.getPrimaryModifier() == TFModifier.CLOTHING_MAJOR_ATTRIBUTE) {
if(attributeModifiers.containsKey(ie.getSecondaryModifier().getAssociatedAttribute())) {
attributeModifiers.put(ie.getSecondaryModifier().getAssociatedAttribute(), attributeModifiers.get(ie.getSecondaryModifier().getAssociatedAttribute()) + ie.getPotency().getClothingBonusValue());
} else {
attributeModifiers.put(ie.getSecondaryModifier().getAssociatedAttribute(), ie.getPotency().getClothingBonusValue());
}
}
}
return attributeModifiers;
}
@Override
public int getEnchantmentLimit() {
return clothingType.getEnchantmentLimit();
}
@Override
public AbstractItemEffectType getEnchantmentEffect() {
return clothingType.getEnchantmentEffect();
}
@Override
public AbstractCoreType getEnchantmentItemType(List<ItemEffect> effects) {
return clothingType.getEnchantmentItemType(effects);
}
@Override
public TFEssence getRelatedEssence() {
return clothingType.getRelatedEssence();
}
}
|
// https://www.codechef.com/OCT19A/problems/MSV
#include <bits/stdc++.h>
using namespace std;
using vi = vector<int>;
using vvi = vector<vi>;
int N = 1000000;
vi d;
int main() {
ios::sync_with_stdio(0);
cin.tie(0);
int t, n;
cin >> t;
while (t--) {
cin >> n;
vi a(n);
d = vi(N+1);
for (int i = 0; i < n; i++) cin >> a[i];
int m = 0;
for (int i = 0; i < n; i++) {
int x = a[i];
if (m < d[x]) m = d[x];
for (int i = 1; i * i <= x; i++)
if (!(x % i)) {
d[i]++;
int y = x / i;
if (y != i) d[y]++;
}
}
cout << m << "\n";
}
}
|
<filename>Calligraphy/src/com/jinke/calligraphy/app/branch/CalliPointsImpl.java
package com.jinke.calligraphy.app.branch;
import android.content.SharedPreferences;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.Rect;
import android.os.CountDownTimer;
import android.util.Log;
import android.view.View;
/*
* 毛笔点集操作
*/
public class CalliPointsImpl extends BasePointsImpl {
public int mStartPoint; //从此点开始画,当前点life为0时,即移到下一个点
public int mDrawnPoint; //从此点开始画,这一点已经不用再重绘
public float mFontSize; //保存当前的字体大小
public CalliTimer mTimer;
public View mView;
public static boolean penStat = false;
private float zX; //zk20121027
private float zY; //zk20121027
private float mX;
private float mY;
// private Canvas mCanvas;
public Paint mPaint;
public Paint mPathPaint;
private CurInfo mCurInfo;
private static final String TAG = "CalliPointsImpl";
private static boolean TIMER = false;
private Canvas strokeCanvas;
private Canvas addCanvas;
private Path mPath;
private Path mOffsetPath;
private float mLastPointX=0;
private float mLastPointY=0;
//private Thread genDataThread;
//private GenDataRunnable genDataRunnable;
private final int TOUCH_TOLERANCE = 3;
protected Rect dirtyRect;
private float mStartX, mStartY, mEndX, mEndY;
//zk20121027
public void nextPoint() {
// Log.i(TAG, "Time:" + Calendar.getInstance().getTimeInMillis());
if(zX<1 && zY<1)
return;
float x=zX;
float y=zY;
mLastPointX=(x + mX)/2;
mLastPointY=(y + mY)/2;
float dx = mX - mLastPointX;
float dy = mY - mLastPointY;
float sqrt = ((float)Math.sqrt(dx*dx + dy*dy));
float tmpFontSize = 0;
// if(sqrt < 6){
// tmpFontSize = mFontSize + (6) - sqrt;
// }else{
// if(sqrt > 18){
// tmpFontSize = mFontSize - 1;//zk20121123
// }
// else
// {
// tmpFontSize = mFontSize - ((sqrt - (6)) / (12)) ;
// }
// }
tmpFontSize = mFontSize - ((sqrt - (7)) / (5)) ;
extendBound(mLastPointX, mLastPointY, mFontSize +10);
// Log.i(TAG, "touch_move:size:" + tmpFontSize + " sqrt:" + sqrt);
if(tmpFontSize > CalliPoint.SIZE_MAX)
tmpFontSize = CalliPoint.SIZE_MAX;
if(tmpFontSize < CalliPoint.SIZE_MIN - 2 )
tmpFontSize = CalliPoint.SIZE_MIN - 2;
// if( (Math.abs(dx) >= TOUCH_TOLERANCE || Math.abs(dy) >= TOUCH_TOLERANCE ) && cpi.mFontSize > 0)
// if( (Math.abs(dx) >= TOUCH_TOLERANCE || Math.abs(dy) >= TOUCH_TOLERANCE ))
{
for(int i=0;i<(int)(sqrt);i+=(sqrt/(4) > (5)) ? sqrt/(4):(5))
{
float px = mX - dx/sqrt * i;
float py = mY - dy/sqrt * i;
//
CalliPoint p = new CalliPoint(px, py, mFontSize-((float)(mFontSize-tmpFontSize)/sqrt)*i, mPaint);
addPoint(p);
// break;
}
if((int)sqrt == 0){
CalliPoint p = new CalliPoint(mX, mY, tmpFontSize, mPaint);
addPoint(p);
}
}
mFontSize = tmpFontSize;
mPath.quadTo(mX, mY, mLastPointX, mLastPointY);
// mPath.lineTo(mLastPointX, mLastPointY);
// mPath.quadTo(mX, mY, x, y);
mX = x;
mY = y;
// bView.invalidate();
//genDataRunnable.isBitmapDirty = true;
//zk20121027绘制轨迹操作移至Tick中
// if(!mPath.isEmpty()) {
// Log.i(TAG, "size min:" + CalliPoint.SIZE_MIN);
// mPathPaint.setStrokeWidth(CalliPoint.SIZE_MIN + CalliPoint.FILTER_FACTOR);
// bCanvas.drawPath(mPath, mPathPaint);
//// mPath.reset();
// }
//zk20121027刷屏移至Tick中
/*
if(count %3 == 0)
bView.invalidate();
if(++count > Integer.MAX_VALUE)
count = 0;
*/
}
public void extendBound(float x , float y, float w) {
if(x - w < mStartX)
mStartX = x -w ;
if(y -w < mStartY)
mStartY = y -w ;
if(x +w > mEndX)
mEndX = x +w ;
if(y +w > mEndY)
mEndY = y +w ;
if(mStartX < 0 )
mStartX =0;
if(mStartY < 0 )
mStartY =0;
if(mEndX >= Start.SCREEN_WIDTH)
mEndX = Start.SCREEN_WIDTH;
if(mEndY >= Start.SCREEN_HEIGHT)
mEndY = Start.SCREEN_HEIGHT;
// dirtyRect.set(mStartX, mStartY, mEndX,mEndY);
dirtyRect.set((int)mStartX, (int)mStartY, (int)mEndX,
(int)mEndY);
}
public void resetBound() {
mStartX = Float.MAX_VALUE;
mStartY = Float.MAX_VALUE;
mEndX = 0;
mEndY = 0;
}
public CalliPointsImpl(BaseBitmap b, MyView v){
super(b, v);
mStartPoint = 0;
mDrawnPoint = 0;
mFontSize = 0;
mPaint = super.bPaint;
mCurInfo = super.bCurInfo;
//ly
mTimer = new CalliTimer(Long.MAX_VALUE, 80);
//end
//mTimer = new CalliTimer(Long.MAX_VALUE, 1); //zk20121027 mTimer = new CalliTimer(Long.MAX_VALUE, 1);
//mTimer.start();
SharedPreferences settings = Start.context.getSharedPreferences(ParametersDialog.FILENAME, android.content.Context.MODE_PRIVATE);
penStat = settings.getBoolean(ParametersDialog.PARAM_CALI, false);
// mCanvas = new Canvas();
// mCanvas.setBitmap(mCurInfo.mBitmap);
addCanvas = new Canvas();
addCanvas.setBitmap(BaseBitmap.addBitmap);
strokeCanvas = new Canvas();
mPath = new Path();
mOffsetPath = new Path();
mPathPaint = new Paint(mPaint);
mPathPaint.setStrokeWidth(CalliPoint.SIZE_MIN);
//genDataRunnable = new GenDataRunnable();
//genDataThread = new Thread(genDataRunnable);
// genDataThread.start();
dirtyRect = new Rect();
resetBound();
}
int count = 0;
@Override
public void start(float x, float y) {
// TODO Auto-generated method stub
super.start(x, y);
count = 0;
Log.i(TAG, "start");
if(TIMER == false){
mTimer.start();
TIMER = true;
}
mFontSize = CalliPoint.SIZE_MIN + CalliPoint.FILTER_FACTOR;
CalliPoint point = new CalliPoint(x, y, mFontSize, mPaint);
point.startFlag = true;
addPoint(point);
mX = x;
mY = y;
mPath.moveTo(x, y);
mLastPointX = x;
mLastPointY = y;
//当切换到涂鸦态时,此函数返回null
// if(MyView.drawStatus == MyView.STATUS_DRAW_FREE)
// mCanvas.setBitmap(mCurInfo.mBitmap);
// else
// mCanvas.setBitmap(bDrawBitmap.getTopBitmap());
//genDataRunnable.isRun = true;
//genDataRunnable.isBitmapDirty = true;
}
@Override
public boolean after() {
// TODO Auto-generated method stub
if(!super.after())
return false;
//mPath.lineTo(mX, mY);
//zk20121027
//makeNextPoint(mX, mY);
makeLastPoint();
zX=0;
zY=0;
if(mFontSize < 0)
mFontSize = 0;
// bView.invalidate();
mPath.reset();
// genDataThread.stop();
//genDataRunnable.isRun = false;
return true;
}
@Override
public void finish() {
// TODO Auto-generated method stub
super.finish();
mTimer.cancel();
}
//当光标态一个字写完时会调用此方法
@Override
public void clear() {
// TODO Auto-generated method stub
super.clear();
mStartPoint = 0;
mDrawnPoint = 0;
mLastPointX=0;
mLastPointY=0;
zX=0;
zY=0;
mTimer.cancel();
TIMER = false;
bPointsList.clear();
mPath.reset();
mX = 0;
mY = 0;
resetBound();
Log.e("clear", "CalliPoint clear !!!!");
}
@Override
public void updatePaintSize() {
// TODO Auto-generated method stub
super.updatePaintSize();
if(MyView.drawStatus == MyView.STATUS_DRAW_FREE) {
CalliPoint.SIZE_MIN = (2);
// CalliPoint.SIZE_MIN = 1;
CalliPoint.SIZE_MAX = (10);
// mFontSize = CalliPoint.SIZE_MIN;
mFontSize = 0;
} else {
CalliPoint.SIZE_MIN = (5);
CalliPoint.SIZE_MAX = (30);
mFontSize = CalliPoint.SIZE_MIN + CalliPoint.FILTER_FACTOR;
mFontSize = 0;
}
}
static final int maxDrawPointsNum = 15;
static final int maxSpreadPointsNum = 1000;
/* (non-Javadoc)
* @see com.jinke.calligraphy.app.branch.BasePointsImpl#draw(android.graphics.Canvas, android.graphics.Matrix)
*/
@Override
public void draw(Canvas canvas, Matrix matrix) {
// Log.e("CALLIDRAW", "BEGIN DRAW");
// TODO Auto-generated method stub
super.draw(canvas, matrix);
// canvas.drawBitmap(bView.cursorBitmap.getTopBitmap(), 0, 0, new Paint());
if(mEndX > mStartX && mEndY > mStartY)
canvas.drawBitmap(bView.cursorBitmap.getTopBitmap(), dirtyRect, dirtyRect, new Paint());
}
public boolean spread() {
int size = bPointsList.size();
boolean spreaded = false;
int maxnum = size-1;
if(mStartPoint + maxSpreadPointsNum < size)
maxnum = mStartPoint + maxSpreadPointsNum;
CalliPoint point = null;
int curcolor = mPaint.getColor();
for (int i = mStartPoint; i < maxnum; i++) {
point = (CalliPoint) bPointsList.get(i);
// for(int j =0;j < point.life;j++)
// point.addSize();
// point.life = 0;
point.addSize();
point.life-=1;
if(point.life <= 0) {
spreaded = true;
mStartPoint = i+1;
//continue;
}
CalliPoint point2 = (CalliPoint) bPointsList.get(i+1);
// mPaint.setARGB((int)point.color[0], (int)point.color[1], (int)point.color[2], (int)point.color[3]);
// mPathPaint.setARGB((int)point.color[0], (int)point.color[1], (int)point.color[2], (int)point.color[3]);
if(point.size <= point.SIZE_MIN + 2 * CalliPoint.FILTER_FACTOR)
continue;
// mPaint.setStrokeWidth(point.size);
if(point2.startFlag == true) {
} else {
// reduceColor(mPaint);
mPaint.setStrokeWidth(point.size );
bCanvas.drawLine(point.x, point.y, point2.x, point2.y, mPaint);
}
// point.addSize();
// point.life-=1;
// if(point.life <= 0) {
// spreaded = true;
// mStartPoint = i+1;
// continue;
// }
// point.addSize();
// point.life-=1;
// if(point.life <= 0){
// spreaded = true;
// mStartPoint = i+1;
// continue;
// }
// point.addSize();
// point.life-=1;
// if(point.life <= 0) {
// spreaded = true;
// mStartPoint = i+1;
// continue;
// }
// for(int j=1;j<4;j++)
// point.color[j] -= 1;
spreaded = true;
}
if(point != null){
CalliPoint point2 = (CalliPoint) bPointsList.get(maxnum);
// mPaint.setARGB((int)point.color[0], (int)point.color[1], (int)point.color[2], (int)point.color[3]);
// mPathPaint.setARGB((int)point.color[0], (int)point.color[1], (int)point.color[2], (int)point.color[3]);
if(point.size > point.SIZE_MIN) {
// mPaint.setStrokeWidth(point.size);
if(point2.startFlag == true) {
} else {
// reduceColor(mPaint);
mPaint.setStrokeWidth(point.size );
bCanvas.drawLine(point.x, point.y, point2.x, point2.y, mPaint);
}
}
}
mPaint.setColor(curcolor);
mPathPaint.setColor(curcolor);
return spreaded;
}
@Override
public boolean makeNextPoint(float x, float y) {
// TODO Auto-generated method stub
if(!super.makeNextPoint(x, y))
return false;
// Log.i(TAG, "Time:" + Calendar.getInstance().getTimeInMillis());
mPath.moveTo(mLastPointX, mLastPointY);
//zk20121027 添加点的动作放在了Tick中
zX = x;
zY = y;
return true;
}
public boolean makeLastPoint() {
// Log.i("kk", "************----------------------------------- ");
if(zX<1 && zY<1)
return false;
float x=zX;
float y=zY;
mPath.reset();
mPath.moveTo(mLastPointX, mLastPointY);
float mmLX=mLastPointX;
float mmLY=mLastPointY;
mLastPointX=(x + mX)/2;
mLastPointY=(y + mY)/2;
float dx = mX - mLastPointX;
float dy = mY - mLastPointY;
float sqrt = ((float)Math.sqrt(dx*dx + dy*dy));
// Log.i("kk", "*****************************************makeLastPoint: " +"sqrt: "+sqrt);
float tmpFontSize = 0;
if(sqrt>8){
float tmp=(1.8f+CalliPoint.SPREAD_FACTOR)*(sqrt - (6)/(12)) ;
tmp=mFontSize;
float tmpX=mmLX;
float tmpY=mmLY;
float tmpSqrt=((float)Math.sqrt((mX-mmLX)*(mX-mmLX) + (mY-mmLY)*(mY-mmLY)));
float sinFirst=(mX-mmLX)/tmpSqrt;
float cosFirst=(mY-mmLY)/tmpSqrt;
tmpSqrt=((float)Math.sqrt((mX-mLastPointX)*(mX-mLastPointX) + (mY-mLastPointY)*(mY-mLastPointY)));
float sinLast=(mLastPointX-mX)/tmpSqrt;
float cosLast=(mLastPointY-mY)/tmpSqrt;
tmpFontSize=mFontSize;
for(;tmp>0;tmp=tmp-0.2f)
{
tmpFontSize=tmpFontSize-0.2f;
if(tmpFontSize < CalliPoint.SIZE_MIN ){
tmpFontSize = CalliPoint.SIZE_MIN;
break;
}
if( ( ((mmLX<mX)&&(tmpX>=mmLX)&&(tmpX<=mX)) || ((mmLX>mX)&&(tmpX>=mX)&&(tmpX<=mmLX)) ) && ( ((mmLY<mY)&&(tmpY>=mmLY)&&(tmpY<=mY)) || ((mmLY>mY)&&(tmpY>=mY)&&(tmpY<=mmLY)) ) )
{
tmpX=tmpX+1*sinFirst;
tmpY=tmpY+1*cosFirst;
Log.i("kk", "Last Point First Line: " + tmpFontSize+" "+mFontSize+" "+tmp+" "+tmpX+" "+tmpY);
}
else
{
tmpX=tmpX+1*sinLast;
tmpY=tmpY+1*cosLast;
Log.i("kk", "Last Point Last Line: " + tmpFontSize+" "+mFontSize+" "+tmp+" "+tmpX+" "+tmpY);
// if( ( ((mLastPointX<mX)&&(tmpX>=mLastPointX)&&(tmpX<=mX)) || ((mLastPointX>mX)&&(tmpX>=mX)&&(tmpX<=mLastPointX)) ) && ( ((mLastPointY<mY)&&(tmpY>=mLastPointY)&&(tmpY<=mY)) || ((mLastPointY>mY)&&(tmpY>=mY)&&(tmpY<=mLastPointY)) ) )
// {
// tmpX=tmpX+1*sinLast;
// tmpY=tmpY+1*cosLast;
// Log.i("kk", "Last Point Last Line: " + tmpFontSize+" "+mFontSize+" "+sqrt+" ");
// }
// else
// {
// Log.i("kk", "Last Point End Line: " + tmpFontSize+" "+mFontSize+" "+sqrt+" ");
// break;
// }
}
extendBound(tmpX, tmpY, tmpFontSize);
mPathPaint.setStrokeWidth(tmpFontSize);
mPath.lineTo(tmpX, tmpY);
bCanvas.drawPath(mPath, mPathPaint);
mPath.reset();
mPath.moveTo(tmpX, tmpY);
//bView.invalidate();
}
// tmpFontSize = mFontSize - tmp + i-1 ;
}
bView.invalidate();
return true;
}
public boolean directNextpoint() {
if(zX<1 && zY<1)
return false;
float x=zX;
float y=zY;
mPath.reset();
mPath.moveTo(mLastPointX, mLastPointY);
float mmLX=mLastPointX;
float mmLY=mLastPointY;
mLastPointX=(x + mX)/2;
mLastPointY=(y + mY)/2;
// float dx = mX - mLastPointX;
// float dy = mY - mLastPointY;
// float sqrt = ((float)Math.sqrt(dx*dx + dy*dy));
float dx1 = mX-mmLX ;
float dy1 = mY-mmLY;
float dx2 = mLastPointX - mX;
float dy2 = mLastPointY - mY;
float sqrt1 = ((float)Math.sqrt(dx1*dx1 + dy1*dy1));
float sqrt2 = ((float)Math.sqrt(dx2*dx2 + dy2*dy2));
float sqrt=sqrt1+sqrt2;
float tmpFontSize = 0;
if(sqrt>25){ //13
// float tmp=(1.3f+CalliPoint.SPREAD_FACTOR)*(sqrt - (6)/(12)) ;
//// if(tmp>1.8f){
//// tmpFontSize= mFontSize -1.8f;
//// Log.i(TAG, "33333333333: " + tmpFontSize+" "+mFontSize+" "+sqrt+" ");
//// }
//// else{
//// tmpFontSize = mFontSize - tmp;
//// }
//
//
//
// float tmpX=mmLX;
// float tmpY=mmLY;
// float tmpSqrt=((float)Math.sqrt((mX-mmLX)*(mX-mmLX) + (mY-mmLY)*(mY-mmLY)));
// float sinFirst=(mX-mmLX)/tmpSqrt;
// float cosFirst=(mY-mmLY)/tmpSqrt;
// tmpSqrt=((float)Math.sqrt((mX-mLastPointX)*(mX-mLastPointX) + (mY-mLastPointY)*(mY-mLastPointY)));
// float sinLast=(mLastPointX-mX)/tmpSqrt;
// float cosLast=(mLastPointY-mY)/tmpSqrt;
// tmpFontSize=mFontSize;
//
// for(;tmp>0;tmp=tmp-0.1f)
// {
// tmpFontSize=tmpFontSize-0.1f;
// if(tmpFontSize < CalliPoint.SIZE_MIN ){
// tmpFontSize = CalliPoint.SIZE_MIN;
// break;
// }
//
// if( ( ((mmLX<mX)&&(tmpX>=mmLX)&&(tmpX<=mX)) || ((mmLX>mX)&&(tmpX>=mX)&&(tmpX<=mmLX)) ) && ( ((mmLY<mY)&&(tmpY>=mmLY)&&(tmpY<=mY)) || ((mmLY>mY)&&(tmpY>=mY)&&(tmpY<=mmLY)) ) )
// {
// tmpX=tmpX+1*sinFirst;
// tmpY=tmpY+1*cosFirst;
//// Log.i(TAG, "First Line: " + tmpFontSize+" "+mFontSize+" "+sqrt+" ");
// }
// else
// {
// if( ( ((mLastPointX<mX)&&(tmpX>=mLastPointX)&&(tmpX<=mX)) || ((mLastPointX>mX)&&(tmpX>=mX)&&(tmpX<=mLastPointX)) ) && ( ((mLastPointY<mY)&&(tmpY>=mLastPointY)&&(tmpY<=mY)) || ((mLastPointY>mY)&&(tmpY>=mY)&&(tmpY<=mLastPointY)) ) )
// {
// tmpX=tmpX+1*sinLast;
// tmpY=tmpY+1*cosLast;
//// Log.i(TAG, "Last Line: " + tmpFontSize+" "+mFontSize+" "+sqrt+" ");
// }
// else
// {
//// Log.i(TAG, "End Line: " + tmpFontSize+" "+mFontSize+" "+sqrt+" ");
// break;
// }
// }
//
// mPathPaint.setStrokeWidth(tmpFontSize);
// mPath.lineTo(tmpX, tmpY);
// bCanvas.drawPath(mPath, mPathPaint);
// mPath.reset();
// mPath.moveTo(tmpX, tmpY);
// }
// float fontSizeChange=(1.3f+CalliPoint.SPREAD_FACTOR)*((sqrt - 15)/(20)) ;
float fontSizeChange=(2.3f+CalliPoint.SPREAD_FACTOR)*((sqrt - 15)/(20)) ;
// float fontSizeChange=(1.3f+CalliPoint.SPREAD_FACTOR)*((sqrt - 6)/(12)) ;
bezier(mmLX,mmLY,mX, mY, mLastPointX, mLastPointY,fontSizeChange);
// tmpFontSize = mFontSize - tmp + i-1 ;
}
else{
if(sqrt<15)
{
// tmpFontSize = mFontSize + (0.1f+CalliPoint.SPREAD_FACTOR)*((15)- sqrt) ;
tmpFontSize = mFontSize + (2.1f+CalliPoint.SPREAD_FACTOR)*((15)- sqrt) ;
}
else
{
tmpFontSize = mFontSize;
}
extendBound(x, y, tmpFontSize);
// Log.i(TAG, "touch_move:size:" + tmpFontSize + " sqrt:" + sqrt);
if(tmpFontSize > CalliPoint.SIZE_MAX)
tmpFontSize = CalliPoint.SIZE_MAX;
mFontSize = tmpFontSize;
mPathPaint.setStrokeWidth(tmpFontSize);
// mPath.quadTo(mX, mY, mLastPointX, mLastPointY);
mPath.lineTo(mLastPointX, mLastPointY);
// mPath.quadTo(mX, mY, x, y);
bCanvas.drawPath(mPath, mPathPaint);
}
mX = x;
mY = y;
return true;
}
public boolean drawPen() {
boolean result=false;
//Log.e("uuuu", "penStat" + penStat);
if(penStat)
{
nextPoint();
//}
result = spread();
//zk20121027绘制轨迹操作移至Tick中
if(!mPath.isEmpty()) {
//if(tempcounter %8 ==0){ //修改N为6 比8次在单核上效果
// Log.i(TAG, "size min:" + CalliPoint.SIZE_MIN);
mPathPaint.setStrokeWidth(CalliPoint.SIZE_MIN + CalliPoint.FILTER_FACTOR);
bCanvas.drawPath(mPath, mPathPaint);
// mPath.reset();
result=true;
//}
}
}
else
{
result=directNextpoint();
}
//zk 20121027 将刷屏操作移到判断之外,Tick N次后就刷屏。
if(result){
//由于采样周期设置的长了,所以就不等待了 测试CalliTimer 10 这里设置为2效果也还行,(10*2ms一刷屏),4就有点跳了。
//if(tempcounter %8 ==0){ //修改N为6 比8次在单核上效果
bView.invalidate();
//}
}
return result;
// counter ++;
// if(counter >= 1 && dirty ) {
//// bView.invalidate(dirtyRect);
//// if(genDataRunnable.isUpdated){
//// bView.invalidate();
//// genDataRunnable.isUpdated = false;
//// }
// bView.invalidate();
// counter = 0;
// dirty = false;
// }
// if(spread())
// bView.invalidate();
//spread();
// bView.draw();
}
/*
* 定时器,每一个tick都执行一次spread
*/
class CalliTimer extends CountDownTimer {
public CalliTimer(long millisInFuture, long countDownInterval) {
super(millisInFuture, countDownInterval);
// TODO Auto-generated constructor stub
}
@Override
public void onFinish() {
// TODO Auto-generated method stub
}
public int counter = 0;
public boolean dirty = false;
int tempcounter=0;
// public Rect dirtyRect = new Rect(100, 100, 200, 300);
@Override
public void onTick(long millisUntilFinished) {
// TODO Auto-generated method stub
//Log.e("!!!!", "onTick");
if(drawPen()) {
dirty = true;
}
}
}
public boolean bezier(float p1x,float p1y,float cpx,float cpy,float p2x,float p2y,float fontSizeChange) {
// Point p1 = ...; //起点
// Point cp = ...; //初始的控制点
// Point p2 = ...; //初始的终点
float t = 0;
float c1x; //将要求出的控制点的x
float c1y; //将要求出的控制点的y
float c2x; //将要求出的终点的x
float c2y; //将要求出的终点的y
float px; //二次贝赛尔曲线上的点的x
float py; //二次贝赛尔曲线上的点的y
float tInterval=0.01f;
float fontSizeInterval=fontSizeChange*tInterval;
mPath.reset();
mPath.moveTo(p1x, p1y);
float tmpFontSize=mFontSize;
while ( t < 1 ) {
/*
控制点是由起点和初始的控制点组成的一次/线性贝赛尔曲线上的点,
所以由一次/线性贝赛尔曲线函数表达式求出c1x,c1y
*/
c1x = p1x + ( cpx - p1x ) * t;
c1y = p1y + ( cpy - p1y ) * t;
/*
终点是由初始的控制点和初始的终点组成的一次/线性贝赛尔曲线上的点,
所以由一次/线性贝赛尔曲线函数表达式求出c2x,c2y
*/
c2x = cpx + ( p2x - cpx ) * t;
c2y = cpy + ( p2y - cpy ) * t;
/*
二次贝赛尔曲线上的点是由控制点和终点组成的一次/线性贝赛尔曲线上的点,
所以由一次/线性贝赛尔曲线函数表达式求出px,py
*/
px = c1x + ( c2x - c1x ) * t;
py = c1y + ( c2y - c1y ) * t;
tmpFontSize-=fontSizeInterval;
t += tInterval;
//Log.i("kk", "Bezir "+px+" "+py);
if(tmpFontSize < CalliPoint.SIZE_MIN ){
tmpFontSize = CalliPoint.SIZE_MIN;
}
mPathPaint.setStrokeWidth(tmpFontSize);
mPath.lineTo(px, py);
extendBound(px, py, tmpFontSize);
bCanvas.drawPath(mPath, mPathPaint);
mPath.reset();
mPath.moveTo(px, py);
// bCanvas.drawPoint(px, py, mPathPaint);
}
mPath.lineTo(p2x, p2y);
bCanvas.drawPath(mPath, mPathPaint);
mFontSize=tmpFontSize;
return true;
}
/*
class GenDataRunnable implements Runnable {
public boolean isRun = false;
public boolean isBitmapDirty = false;
public boolean isUpdated = false;
@Override
public void run() {
// Auto-generated method stub
while(true) {
try {
Thread.sleep(3);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
*/
}
|
#!/bin/bash
docker node ls | grep Down | awk '{print $1}' | xargs docker node rm
|
import React from 'react';
import Img from 'gatsby-image';
import styles from './StaffProfile.module.css';
const StaffProfile = ({ image, name, title, description}) => {
return (
<div className={styles.container}>
<Img fixed={image} />
<p className={styles.nameTitle}>{name}<br/>{title}</p>
<p>{description}</p>
</div>
)
}
export default StaffProfile
|
#!/bin/bash
# This script help with heartbeat and silences synchronization.
#
# Whenever a cluster is know to have issues, a silence is created in https://github.com/giantswarm/silences repository.
# In some cases this silence might apply to the whole cluster,
# when this is the case heartbeat in Opsgenie for the corresponding cluster
# should be disabled.
# When the silence is then removed the heartbeat in Opsgenie should be re-enabled.
#
# There is currently no automation for this, so this script is here to help humans to do their job right.
#
# Requirements to run this script :
# - heartbeatctl: https://github.com/giantswarm/heartbeatctl
# - silencectl: https://github.com/giantswarm/silencectl
command -V heartbeatctl
command -V silencectl
echo "> start"
echo "> re-enable disabled heartbeats (for cluster with no silences)"
for h in $(heartbeatctl list -s=DISABLED --no-headers|awk '{print $1}'); do grep -q $h <(silencectl list) || heartbeatctl enable $h; done
echo "> disable heartbeat (for cluster with silences)"
silencectl list | xargs heartbeatctl disable
echo "> end"
|
<reponame>GoldenPedro/java-deployshoppingcart
package com.lambdaschool.shoppingcart.config;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import javax.sql.DataSource;
/**
* Configures which database we are using based on a property in application.properties
*/
@Configuration
public class DataSourceConfig
{
/**
* The property from application properties. Defaults to H2
*/
@Value("${local.run.db:h2}")
private String dbValue;
/**
* A config var from Heroku giving the url for access to POSTGRESQL. Default to empty string
*/
@Value("${spring.datasource.url:}")
private String dbURL;
/**
* The actual datasource configuration
*
* @return the datasource to use
*/
@Bean
public DataSource dataSource()
{
if (dbValue.equalsIgnoreCase("POSTGRESQL"))
{
// Assume Heroku
HikariConfig config = new HikariConfig();
config.setDriverClassName("org.postgresql.Driver");
config.setJdbcUrl(dbURL);
return new HikariDataSource(config);
} else
{
// Assume H2
String myURLString = "jdbc:h2:mem:testdb";
String myDriverClass = "org.h2.Driver";
String myDBUser = "sa";
String myDBPassword = "";
return DataSourceBuilder.create()
.username(myDBUser)
.password(<PASSWORD>)
.url(myURLString)
.driverClassName(myDriverClass)
.build();
}
}
}
|
package com.ootterskog.apps;
import org.dom4j.tree.AbstractEntity;
import javax.persistence.Entity;
import javax.persistence.Id;
@Entity
public class Customer extends AbstractEntity {
@Id
private String id;
private String firstname, lastname, email;
}
|
#!/bin/bash
# ...env
mkdir -p /$PROJECT/.env
source /usr/local/bin/virtualenvwrapper.sh
mkvirtualenv $PROJECT
pip install -r /tmp/requirements.txt
|
<filename>src/main/java/de/tub/cit/slist/bdos/util/SerializerHelper.java
package de.tub.cit.slist.bdos.util;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import de.tub.cit.slist.bdos.annotation.FixedLength;
import de.tub.cit.slist.bdos.metadata.ClassMetadata;
import de.tub.cit.slist.bdos.metadata.FieldMetadata;
import de.tub.cit.slist.bdos.metadata.FieldType;
public class SerializerHelper {
private SerializerHelper() {
}
/**
* get metadata for <code>baseclazz</code> and store it in <code>target</code>
*
* @param baseclazz
* @param classMetadataMap
* @return added class metadata
*/
@SuppressWarnings("restriction")
public static Map<Class<?>, ClassMetadata> acquireClassMetadata(final Class<?> baseclazz, final Map<Class<?>, ClassMetadata> classMetadataMap) {
final Map<Class<?>, ClassMetadata> addedClasses = new HashMap<>();
if (classMetadataMap.containsKey(baseclazz)) return addedClasses;
Class<?> clazz = baseclazz;
final List<FieldMetadata> fields = new ArrayList<>();
long fieldLength = 0;
long totalLength = 0;
if (clazz.isPrimitive()) {
totalLength = UnsafeHelper.PRIMITIVE_LENGTHS.get(clazz);
} else {
do {
for (final Field f : clazz.getDeclaredFields()) {
if (!Modifier.isStatic(f.getModifiers())) {
final Class<?> classType = f.getType();
final FieldMetadata fieldMetadata = new FieldMetadata();
fieldMetadata.setFieldName(f.getName());
fieldMetadata.setOffset(UnsafeHelper.getUnsafe().objectFieldOffset(f));
fieldMetadata.setClazz(classType);
if (classType == boolean.class) {
fieldMetadata.setType(FieldType.BOOL);
fieldLength = UnsafeHelper.BOOLEAN_FIELD_SIZE;
} else if (classType == byte.class) {
fieldMetadata.setType(FieldType.BYTE);
fieldLength = UnsafeHelper.BYTE_FIELD_SIZE;
} else if (classType == char.class) {
fieldMetadata.setType(FieldType.CHAR);
fieldLength = UnsafeHelper.CHAR_FIELD_SIZE;
} else if (classType == double.class) {
fieldMetadata.setType(FieldType.DOUBLE);
fieldLength = UnsafeHelper.DOUBLE_FIELD_SIZE;
} else if (classType == float.class) {
fieldMetadata.setType(FieldType.FLOAT);
fieldLength = UnsafeHelper.FLOAT_FIELD_SIZE;
} else if (classType == int.class) {
fieldMetadata.setType(FieldType.INT);
fieldLength = UnsafeHelper.INT_FIELD_SIZE;
} else if (classType == long.class) {
fieldMetadata.setType(FieldType.LONG);
fieldLength = UnsafeHelper.LONG_FIELD_SIZE;
} else if (classType == short.class) {
fieldMetadata.setType(FieldType.SHORT);
fieldLength = UnsafeHelper.SHORT_FIELD_SIZE;
} else if (classType == String.class) {
final FixedLength fixedLength = f.getAnnotation(FixedLength.class);
if (fixedLength != null) {
fieldMetadata.setType(FieldType.STRING_FIXED);
fieldLength = (long) fixedLength.value() * UnsafeHelper.CHAR_FIELD_SIZE + UnsafeHelper.INT_FIELD_SIZE;
fieldMetadata.setElements(fixedLength.value());
} else {
fieldMetadata.setType(FieldType.STRING);
fieldLength = UnsafeHelper.LONG_FIELD_SIZE;
}
} else if (classType.isArray()) {
final Class<?> subtype = classType.getComponentType();
if (subtype.isInterface()) throw new UnsupportedOperationException("No Interface Array allowed.");
fieldMetadata.setClazz(subtype);
final Map<Class<?>, ClassMetadata> subtypeMetadata = acquireClassMetadata(subtype, classMetadataMap);
addedClasses.putAll(subtypeMetadata);
final FixedLength fixedLength = f.getAnnotation(FixedLength.class);
if (fixedLength != null) {
fieldMetadata.setType(FieldType.ARRAY_FIXED);
fieldMetadata.setElements(fixedLength.value());
fieldLength = fixedLength.value() * classMetadataMap.get(subtype).getLength() + UnsafeHelper.INT_FIELD_SIZE;
} else {
fieldMetadata.setType(FieldType.ARRAY);
fieldLength = UnsafeHelper.LONG_FIELD_SIZE;
}
} else if (Collection.class.isAssignableFrom(classType)) {
if (classType.isInterface()) {
if (classType == List.class) {
// default implementation for Lists
fieldMetadata.setCollectionClass(ArrayList.class);
} else if (classType == Map.class) {
// default implementation for Maps
fieldMetadata.setCollectionClass(HashMap.class);
} else if (classType == Set.class) {
// default implementation for Sets
fieldMetadata.setCollectionClass(HashSet.class);
} else
throw new UnsupportedOperationException("Collection types must either be non-Interfaces, or of type List|Map|Set.");
} else {
fieldMetadata.setCollectionClass(classType);
}
final Class<?> subtype = (Class<?>) ((ParameterizedType) f.getGenericType()).getActualTypeArguments()[0];
fieldMetadata.setClazz(subtype);
final Map<Class<?>, ClassMetadata> subtypeMetadata = acquireClassMetadata(subtype, classMetadataMap);
addedClasses.putAll(subtypeMetadata);
final FixedLength fixedLength = f.getAnnotation(FixedLength.class);
if (fixedLength != null) {
fieldMetadata.setType(FieldType.COLLECTION_FIXED);
fieldMetadata.setElements(fixedLength.value());
fieldLength = fixedLength.value() * classMetadataMap.get(subtype).getLength() + UnsafeHelper.INT_FIELD_SIZE;
} else {
fieldMetadata.setType(FieldType.COLLECTION_FIXED);
fieldLength = UnsafeHelper.LONG_FIELD_SIZE;
}
} else {
fieldMetadata.setType(FieldType.OBJECT);
final Map<Class<?>, ClassMetadata> subtypeMetadata = acquireClassMetadata(classType, classMetadataMap);
fieldLength = classMetadataMap.get(classType).getLength();
addedClasses.putAll(subtypeMetadata);
}
fieldLength += UnsafeHelper.BOOLEAN_FIELD_SIZE; // isNull indicator
totalLength += fieldLength;
fieldMetadata.setLength(fieldLength);
fields.add(fieldMetadata);
}
}
} while ((clazz = clazz.getSuperclass()) != null);
Collections.sort(fields); // order by offset
}
final ClassMetadata classMeta = new ClassMetadata(totalLength, fields.toArray(new FieldMetadata[fields.size()]));
classMeta.calcSerializedOffsets();
addedClasses.put(baseclazz, classMeta);
classMetadataMap.putAll(addedClasses);
return addedClasses;
}
public static Map<Class<?>, ClassMetadata> acquireWrapperAndPrimitivesMetadata() {
final Map<Class<?>, ClassMetadata> wrapperAndPrimitivesMetadata = new HashMap<>(29);
acquireClassMetadata(Boolean.class, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Byte.class, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Character.class, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Double.class, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Float.class, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Integer.class, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Long.class, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Short.class, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Boolean.TYPE, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Byte.TYPE, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Character.TYPE, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Double.TYPE, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Float.TYPE, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Integer.TYPE, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Long.TYPE, wrapperAndPrimitivesMetadata);
acquireClassMetadata(Short.TYPE, wrapperAndPrimitivesMetadata);
return wrapperAndPrimitivesMetadata;
}
}
|
<gh_stars>100-1000
package com.sun.javafx.scene.control.skin;
import javafx.beans.value.ObservableValue;
import javafx.event.EventHandler;
import javafx.event.EventType;
import javafx.scene.control.Control;
import com.sun.javafx.scene.control.MultiplePropertyChangeListenerHandler;
import javafx.scene.control.SkinBase;
import javafx.scene.input.MouseEvent;
import com.sun.javafx.scene.control.behavior.BehaviorBase;
/**
*
*/
public abstract class BehaviorSkinBase<C extends Control, BB extends BehaviorBase<C>> extends SkinBase<C> {
/**
* A static final reference to whether the platform we are on supports touch.
*/
protected final static boolean IS_TOUCH_SUPPORTED = false; // Platform.isSupported(ConditionalFeature.INPUT_TOUCH);
/***************************************************************************
* *
* Private fields *
* *
**************************************************************************/
/**
* The {@link BehaviorBase} that encapsulates the interaction with the
* {@link Control} from this {@code Skin}. The {@code Skin} does not modify
* the {@code Control} directly, but rather redirects events into the
* {@code BehaviorBase} which then handles the events by modifying internal state
* and public state in the {@code Control}. Generally, specific
* {@code Skin} implementations will require specific {@code BehaviorBase}
* implementations. For example, a ButtonSkin might require a ButtonBehavior.
*/
private BB behavior;
/**
* This is part of the workaround introduced during delomboking. We probably will
* want to adjust the way listeners are added rather than continuing to use this
* map (although it doesn't really do much harm).
*/
private MultiplePropertyChangeListenerHandler changeListenerHandler;
/***************************************************************************
* *
* Event Handlers / Listeners *
* *
**************************************************************************/
/**
* Forwards mouse events received by a MouseListener to the behavior.
* Note that we use this pattern to remove some of the anonymous inner
* classes which we'd otherwise have to create. When lambda expressions
* are supported, we could do it that way instead (or use MethodHandles).
*/
private final EventHandler<MouseEvent> mouseHandler =
new EventHandler<MouseEvent>() {
@Override public void handle(MouseEvent e) {
final EventType<?> type = e.getEventType();
if (type == MouseEvent.MOUSE_ENTERED) behavior.mouseEntered(e);
else if (type == MouseEvent.MOUSE_EXITED) behavior.mouseExited(e);
else if (type == MouseEvent.MOUSE_PRESSED) behavior.mousePressed(e);
else if (type == MouseEvent.MOUSE_RELEASED) behavior.mouseReleased(e);
else if (type == MouseEvent.MOUSE_DRAGGED) behavior.mouseDragged(e);
else { // no op
throw new AssertionError("Unsupported event type received");
}
}
};
/*
private final EventHandler<ContextMenuEvent> contextMenuHandler =
new EventHandler<ContextMenuEvent>() {
@Override public void handle(ContextMenuEvent event) {
behavior.contextMenuRequested(event);
}
};
*/
/***************************************************************************
* *
* Constructor *
* *
**************************************************************************/
/**
* Constructor for all BehaviorSkinBase instances.
*
* @param control The control for which this Skin should attach to.
* @param behavior The behavior for which this Skin should defer to.
*/
protected BehaviorSkinBase(final C control, final BB behavior) {
super(control);
if (behavior == null) {
throw new IllegalArgumentException("Cannot pass null for behavior");
}
// Update the control and behavior
this.behavior = behavior;
// We will auto-add listeners for wiring up Region mouse events to
// be sent to the behavior
control.addEventHandler(MouseEvent.MOUSE_ENTERED, mouseHandler);
control.addEventHandler(MouseEvent.MOUSE_EXITED, mouseHandler);
control.addEventHandler(MouseEvent.MOUSE_PRESSED, mouseHandler);
control.addEventHandler(MouseEvent.MOUSE_RELEASED, mouseHandler);
control.addEventHandler(MouseEvent.MOUSE_DRAGGED, mouseHandler);
//control.addEventHandler(ContextMenuEvent.CONTEXT_MENU_REQUESTED, contextMenuHandler);
}
/***************************************************************************
* *
* Public API (from Skin) *
* *
**************************************************************************/
/** {@inheritDoc} */
public final BB getBehavior() {
return behavior;
}
/** {@inheritDoc} */
@Override public void dispose() {
// unhook listeners
if (changeListenerHandler != null) {
changeListenerHandler.dispose();
}
C control = getSkinnable();
if (control != null) {
control.removeEventHandler(MouseEvent.MOUSE_ENTERED, mouseHandler);
control.removeEventHandler(MouseEvent.MOUSE_EXITED, mouseHandler);
control.removeEventHandler(MouseEvent.MOUSE_PRESSED, mouseHandler);
control.removeEventHandler(MouseEvent.MOUSE_RELEASED, mouseHandler);
control.removeEventHandler(MouseEvent.MOUSE_DRAGGED, mouseHandler);
}
if (behavior != null) {
behavior.dispose();
behavior = null;
}
super.dispose();
}
/***************************************************************************
* *
* Public API *
* *
**************************************************************************/
/**
* Subclasses can invoke this method to register that we want to listen to
* property change events for the given property.
*
* @param property
* @param reference
*/
protected final void registerChangeListener(ObservableValue<?> property, String reference) {
if (changeListenerHandler == null) {
changeListenerHandler = new MultiplePropertyChangeListenerHandler(p -> {
handleControlPropertyChanged(p);
return null;
});
}
changeListenerHandler.registerChangeListener(property, reference);
}
/**
* Skin subclasses will override this method to handle changes in corresponding
* control's properties.
*/
protected void handleControlPropertyChanged(String propertyReference) {
// no-op
}
}
|
package demo.dso.auth;
import org.noear.solon.core.handle.Context;
import org.noear.solon.core.handle.Result;
import org.noear.solon.validation.Validator;
/**
* @author noear 2021/3/10 created
*/
public class AuthValidator implements Validator<Auth> {
public static final AuthValidator instance = new AuthValidator();
@Override
public String message(Auth anno) {
return anno.message();
}
@Override
public Result validateOfContext(Context ctx, Auth anno, String name, StringBuilder tmp) {
int user_role = ctx.session("role", 0);
for (AuthRole role : anno.value()) {
if (user_role >= role.weight) {
return Result.succeed();
}
}
return Result.failure(401);
}
}
|
#!/bin/bash
# Terraform Scaffold
#
# A wrapper for running terraform projects
# - handles remote state
# - uses consistent .tfvars files for each environment
##
# Set Script Version
##
readonly script_ver="1.6.1";
##
# Standardised failure function
##
function error_and_die {
echo -e "ERROR: ${1}" >&2;
exit 1;
};
##
# Print Script Version
##
function version() {
echo "${script_ver}";
}
##
# Print Usage Text
##
function usage() {
cat <<EOF
Usage: ${0} \\
-a/--action [action] \\
-b/--bucket-prefix [bucket_prefix] \\
-c/--component [component_name] \\
-e/--environment [environment] \\
-g/--group [group]
-i/--build-id [build_id] (optional) \\
-p/--project [project] \\
-r/--region [region] \\
-d/--detailed-exitcode \\
-n/--no-color \\
-w/--compact-warnings \\
-- \\
<additional arguments to forward to the terraform binary call>
action:
- Special actions:
* plan / plan-destroy
* apply / destroy
* graph
* taint / untaint
- Generic actions:
* See https://www.terraform.io/docs/commands/
bucket_prefix (optional):
Defaults to: "\${project_name}-tfscaffold"
- myproject-terraform
- terraform-yourproject
- my-first-tfscaffold-project
build_id (optional):
- testing
- \$BUILD_ID (jenkins)
component_name:
- the name of the terraform component module in the components directory
environment:
- dev
- test
- prod
- management
group:
- dev
- live
- mytestgroup
project:
- The name of the project being deployed
region (optional):
Defaults to value of \$AWS_DEFAULT_REGION
- the AWS region name unique to all components and terraform processes
detailed-exitcode (optional):
When not provided, false.
Changes the plan operation to exit 0 only when there are no changes.
Will be ignored for actions other than plan.
no-color (optional):
Append -no-color to all terraform calls
compact-warnings (optional):
Append -compact-warnings to all terraform calls
additional arguments:
Any arguments provided after "--" will be passed directly to terraform as its own arguments
EOF
};
##
# Test for GNU getopt
##
getopt_out=$(getopt -T)
if (( $? != 4 )) && [[ -n $getopt_out ]]; then
error_and_die "Non GNU getopt detected. If you're using a Mac then try \"brew install gnu-getopt\"";
fi
##
# Execute getopt and process script arguments
##
readonly raw_arguments="${*}";
ARGS=$(getopt \
-o dhnvwa:b:c:e:g:i:p:r: \
-l "help,version,bootstrap,action:,bucket-prefix:,build-id:,component:,environment:,group:,project:,region:,detailed-exitcode,no-color,compact-warnings" \
-n "${0}" \
-- \
"$@");
#Bad arguments
if [ $? -ne 0 ]; then
usage;
error_and_die "command line argument parse failure";
fi;
eval set -- "${ARGS}";
declare bootstrap="false";
declare component_arg;
declare region_arg;
declare environment_arg;
declare group;
declare action;
declare bucket_prefix;
declare build_id;
declare project;
declare detailed_exitcode;
declare no_color;
declare compact_warnings;
while true; do
case "${1}" in
-h|--help)
usage;
exit 0;
;;
-v|--version)
version;
exit 0;
;;
-c|--component)
shift;
if [ -n "${1}" ]; then
component_arg="${1}";
shift;
fi;
;;
-r|--region)
shift;
if [ -n "${1}" ]; then
region_arg="${1}";
shift;
fi;
;;
-e|--environment)
shift;
if [ -n "${1}" ]; then
environment_arg="${1}";
shift;
fi;
;;
-g|--group)
shift;
if [ -n "${1}" ]; then
group="${1}";
shift;
fi;
;;
-a|--action)
shift;
if [ -n "${1}" ]; then
action="${1}";
shift;
fi;
;;
-b|--bucket-prefix)
shift;
if [ -n "${1}" ]; then
bucket_prefix="${1}";
shift;
fi;
;;
-i|--build-id)
shift;
if [ -n "${1}" ]; then
build_id="${1}";
shift;
fi;
;;
-p|--project)
shift;
if [ -n "${1}" ]; then
project="${1}";
shift;
fi;
;;
--bootstrap)
shift;
bootstrap="true";
;;
-d|--detailed-exitcode)
shift;
detailed_exitcode="true";
;;
-n|--no-color)
shift;
no_color="-no-color";
;;
-w|--compact-warnings)
shift;
compact_warnings="-compact-warnings";
;;
--)
shift;
break;
;;
esac;
done;
declare extra_args="${@} ${no_color} ${compact_warnings}"; # All arguments supplied after "--"
##
# Script Set-Up
##
# Determine where I am and from that derive basepath and project name
script_path="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )";
base_path="${script_path%%\/bin}";
project_name_default="${base_path##*\/}";
status=0;
echo "Args ${raw_arguments}";
# Ensure script console output is separated by blank line at top and bottom to improve readability
trap echo EXIT;
echo;
##
# Munge Params
##
# Set Region from args or environment. Exit if unset.
readonly region="${region_arg:-${AWS_DEFAULT_REGION}}";
[ -n "${region}" ] \
|| error_and_die "No AWS region specified. No -r/--region argument supplied and AWS_DEFAULT_REGION undefined";
[ -n "${project}" ] \
|| error_and_die "Required argument -p/--project not specified";
# Bootstrapping is special
if [ "${bootstrap}" == "true" ]; then
[ -n "${component_arg}" ] \
&& error_and_die "The --bootstrap parameter and the -c/--component parameter are mutually exclusive";
[ -n "${build_id}" ] \
&& error_and_die "The --bootstrap parameter and the -i/--build-id parameter are mutually exclusive. We do not currently support plan files for bootstrap";
else
# Validate component to work with
[ -n "${component_arg}" ] \
|| error_and_die "Required argument missing: -c/--component";
readonly component="${component_arg}";
# Validate environment to work with
[ -n "${environment_arg}" ] \
|| error_and_die "Required argument missing: -e/--environment";
readonly environment="${environment_arg}";
fi;
[ -n "${action}" ] \
|| error_and_die "Required argument missing: -a/--action";
# Validate AWS Credentials Available
iam_iron_man="$(aws sts get-caller-identity --query 'Arn' --output text)";
if [ -n "${iam_iron_man}" ]; then
echo -e "AWS Credentials Found. Using ARN '${iam_iron_man}'";
else
error_and_die "No AWS Credentials Found. \"aws sts get-caller-identity --query 'Arn' --output text\" responded with ARN '${iam_iron_man}'";
fi;
# Query canonical AWS Account ID
aws_account_id="$(aws sts get-caller-identity --query 'Account' --output text)";
if [ -n "${aws_account_id}" ]; then
echo -e "AWS Account ID: ${aws_account_id}";
else
error_and_die "Couldn't determine AWS Account ID. \"aws sts get-caller-identity --query 'Account' --output text\" provided no output";
fi;
# Validate S3 bucket. Set default if undefined
if [ -n "${bucket_prefix}" ]; then
readonly bucket="${bucket_prefix}-${aws_account_id}-${region}"
echo -e "Using S3 bucket s3://${bucket}";
else
readonly bucket="${project}-tfscaffold-${aws_account_id}-${region}";
echo -e "No bucket prefix specified. Using S3 bucket s3://${bucket}";
fi;
declare component_path;
if [ "${bootstrap}" == "true" ]; then
component_path="${base_path}/bootstrap";
else
component_path="${base_path}/components/${component}";
fi;
# Get the absolute path to the component
if [[ "${component_path}" != /* ]]; then
component_path="$(cd "$(pwd)/${component_path}" && pwd)";
else
component_path="$(cd "${component_path}" && pwd)";
fi;
[ -d "${component_path}" ] || error_and_die "Component path ${component_path} does not exist";
## Debug
#echo $component_path;
##
# Begin parameter-dependent logic
##
case "${action}" in
apply)
refresh="-refresh=true";
;;
destroy)
destroy='-destroy';
refresh="-refresh=true";
;;
plan)
refresh="-refresh=true";
;;
plan-destroy)
action="plan";
destroy="-destroy";
refresh="-refresh=true";
;;
*)
;;
esac;
# Tell terraform to moderate its output to be a little
# more friendly to automation wrappers
# Value is irrelavant, just needs to be non-null
export TF_IN_AUTOMATION="true";
for rc_path in "${base_path}" "${base_path}/etc" "${component_path}"; do
if [ -f "${rc_path}/.terraformrc" ]; then
echo "Found .terraformrc at ${rc_path}/.terraformrc. Overriding.";
export TF_CLI_CONFIG_FILE="${rc_path}/.terraformrc";
fi;
done;
# Configure the plugin-cache location so plugins are not
# downloaded to individual components
declare default_plugin_cache_dir="$(pwd)/plugin-cache";
export TF_PLUGIN_CACHE_DIR="${TF_PLUGIN_CACHE_DIR:-${default_plugin_cache_dir}}"
mkdir -p "${TF_PLUGIN_CACHE_DIR}" \
|| error_and_die "Failed to created the plugin-cache directory (${TF_PLUGIN_CACHE_DIR})";
[ -w "${TF_PLUGIN_CACHE_DIR}" ] \
|| error_and_die "plugin-cache directory (${TF_PLUGIN_CACHE_DIR}) not writable";
# Clear cache, safe enough as we enforce plugin cache
rm -rf ${component_path}/.terraform;
# Run global pre.sh
if [ -f "pre.sh" ]; then
source pre.sh "${region}" "${environment}" "${action}" \
|| error_and_die "Global pre script execution failed with exit code ${?}";
fi;
# Make sure we're running in the component directory
pushd "${component_path}";
readonly component_name=$(basename ${component_path});
# Check for presence of tfenv (https://github.com/kamatama41/tfenv)
# and a .terraform-version file. If both present, ensure required
# version of terraform for this component is installed automagically.
tfenv_bin="$(which tfenv 2>/dev/null)";
if [[ -n "${tfenv_bin}" && -x "${tfenv_bin}" && -f .terraform-version ]]; then
${tfenv_bin} install;
fi;
# Regardless of bootstrapping or not, we'll be using this string.
# If bootstrapping, we will fill it with variables,
# if not we will fill it with variable file parameters
declare tf_var_params;
if [ "${bootstrap}" == "true" ]; then
if [ "${action}" == "destroy" ]; then
error_and_die "You cannot destroy a bootstrap bucket using tfscaffold, it's just too dangerous. If you're absolutely certain that you want to delete the bucket and all contents, including any possible state files environments and components within this project, then you will need to do it from the AWS Console. Note you cannot do this from the CLI because the bootstrap bucket is versioned, and even the --force CLI parameter will not empty the bucket of versions";
fi;
# Bootstrap requires this parameter as explicit as it is constructed here
# for multiple uses, so we cannot just depend on it being set in tfvars
tf_var_params+=" -var bucket_name=${bucket}";
fi;
# Run component-specific pre.sh
if [ -f "pre.sh" ]; then
source pre.sh "${region}" "${environment}" "${action}" \
|| error_and_die "Component pre script execution failed with exit code ${?}";
fi;
# Pull down secret TFVAR file from S3
# Anti-pattern and security warning: This secrets mechanism provides very little additional security.
# It permits you to inject secrets directly into terraform without storing them in source control or unencrypted in S3.
# Secrets will still be stored in all copies of your state file - which will be stored on disk wherever this script is run and in S3.
# This script does not currently support encryption of state files.
# Use this feature only if you're sure it's the right pattern for your use case.
declare -a secrets=();
readonly secrets_file_name="secret.tfvars.enc";
readonly secrets_file_path="build/${secrets_file_name}";
aws s3 ls s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${secrets_file_name} >/dev/null 2>&1;
if [ $? -eq 0 ]; then
mkdir -p build;
aws s3 cp s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${secrets_file_name} ${secrets_file_path} \
|| error_and_die "S3 secrets file is present, but inaccessible. Ensure you have permission to read s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${secrets_file_name}";
if [ -f "${secrets_file_path}" ]; then
secrets=($(aws kms decrypt --ciphertext-blob fileb://${secrets_file_path} --output text --query Plaintext | base64 --decode));
fi;
fi;
if [ -n "${secrets[0]}" ]; then
secret_regex='^[A-Za-z0-9_-]+=.+$';
secret_count=1;
for secret_line in "${secrets[@]}"; do
if [[ "${secret_line}" =~ ${secret_regex} ]]; then
var_key="${secret_line%=*}";
var_val="${secret_line##*=}";
export TF_VAR_${var_key}="${var_val}";
((secret_count++));
else
echo "Malformed secret on line ${secret_count} - ignoring";
fi;
done;
fi;
# Pull down additional dynamic plaintext tfvars file from S3
# Anti-pattern warning: Your variables should almost always be in source control.
# There are a very few use cases where you need constant variability in input variables,
# and even in those cases you should probably pass additional -var parameters to this script
# from your automation mechanism.
# Use this feature only if you're sure it's the right pattern for your use case.
readonly dynamic_file_name="dynamic.tfvars";
readonly dynamic_file_path="build/${dynamic_file_name}";
aws s3 ls s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${dynamic_file_name} >/dev/null 2>&1;
if [ $? -eq 0 ]; then
aws s3 cp s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${dynamic_file_name} ${dynamic_file_path} \
|| error_and_die "S3 tfvars file is present, but inaccessible. Ensure you have permission to read s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${dynamic_file_name}";
fi;
# Use versions TFVAR files if exists
readonly versions_file_name="versions_${region}_${environment}.tfvars";
readonly versions_file_path="${base_path}/etc/${versions_file_name}";
# Check for presence of an environment variables file, and use it if readable
if [ -n "${environment}" ]; then
readonly env_file_path="${base_path}/etc/env_${region}_${environment}.tfvars";
fi;
# Check for presence of a global variables file, and use it if readable
readonly global_vars_file_name="global.tfvars";
readonly global_vars_file_path="${base_path}/etc/${global_vars_file_name}";
# Check for presence of a region variables file, and use it if readable
readonly region_vars_file_name="${region}.tfvars";
readonly region_vars_file_path="${base_path}/etc/${region_vars_file_name}";
# Check for presence of a group variables file if specified, and use it if readable
if [ -n "${group}" ]; then
readonly group_vars_file_name="group_${group}.tfvars";
readonly group_vars_file_path="${base_path}/etc/${group_vars_file_name}";
fi;
# Collect the paths of the variables files to use
declare -a tf_var_file_paths;
# Use Global and Region first, to allow potential for terraform to do the
# honourable thing and override global and region settings with environment
# specific ones; however we do not officially support the same variable
# being declared in multiple locations, and we warn when we find any duplicates
[ -f "${global_vars_file_path}" ] && tf_var_file_paths+=("${global_vars_file_path}");
[ -f "${region_vars_file_path}" ] && tf_var_file_paths+=("${region_vars_file_path}");
# If a group has been specified, load the vars for the group. If we are to assume
# terraform correctly handles override-ordering (which to be fair we don't hence
# the warning about duplicate variables below) we add this to the list after
# global and region-global variables, but before the environment variables
# so that the environment can explicitly override variables defined in the group.
if [ -n "${group}" ]; then
if [ -f "${group_vars_file_path}" ]; then
tf_var_file_paths+=("${group_vars_file_path}");
else
echo -e "[WARNING] Group \"${group}\" has been specified, but no group variables file is available at ${group_vars_file_path}";
fi;
fi;
# Environment is normally expected, but in bootstrapping it may not be provided
if [ -n "${environment}" ]; then
if [ -f "${env_file_path}" ]; then
tf_var_file_paths+=("${env_file_path}");
else
echo -e "[WARNING] Environment \"${environment}\" has been specified, but no environment variables file is available at ${env_file_path}";
fi;
fi;
# If present and readable, use versions and dynamic variables too
[ -f "${versions_file_path}" ] && tf_var_file_paths+=("${versions_file_path}");
[ -f "${dynamic_file_path}" ] && tf_var_file_paths+=("${dynamic_file_path}");
# Warn on duplication
duplicate_variables="$(cat "${tf_var_file_paths[@]}" | sed -n -e 's/\(^[a-zA-Z0-9_\-]\+\)\s*=.*$/\1/p' | sort | uniq -d)";
[ -n "${duplicate_variables}" ] \
&& echo -e "
###################################################################
# WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING #
###################################################################
The following input variables appear to be duplicated:
${duplicate_variables}
This could lead to unexpected behaviour. Overriding of variables
has previously been unpredictable and is not currently supported,
but it may work.
Recent changes to terraform might give you useful overriding and
map-merging functionality, please use with caution and report back
on your successes & failures.
###################################################################";
# Build up the tfvars arguments for terraform command line
for file_path in "${tf_var_file_paths[@]}"; do
tf_var_params+=" -var-file=${file_path}";
done;
##
# Start Doing Real Things
##
# Really Hashicorp? Really?!
#
# In order to work with terraform >=0.9.2 (I say 0.9.2 because 0.9 prior
# to 0.9.2 is barely usable due to key bugs and missing features)
# we now need to do some ugly things to our terraform remote backend configuration.
# The long term hope is that they will fix this, and maybe remove the need for it
# altogether by supporting interpolation in the backend config stanza.
#
# For now we're left with this garbage, and no more support for <0.9.0.
if [ -f backend_tfscaffold.tf ]; then
echo -e "WARNING: backend_tfscaffold.tf exists and will be overwritten!" >&2;
fi;
declare backend_prefix;
declare backend_filename;
if [ "${bootstrap}" == "true" ]; then
backend_prefix="${project}/${aws_account_id}/${region}/bootstrap";
backend_filename="bootstrap.tfstate";
else
backend_prefix="${project}/${aws_account_id}/${region}/${environment}";
backend_filename="${component_name}.tfstate";
fi;
readonly backend_key="${backend_prefix}/${backend_filename}";
readonly backend_config="terraform {
backend \"s3\" {
region = \"${region}\"
bucket = \"${bucket}\"
key = \"${backend_key}\"
}
}";
# We're now all ready to go. All that's left is to:
# * Write the backend config
# * terraform init
# * terraform ${action}
#
# But if we're dealing with the special bootstrap component
# we can't remotely store the backend until we've bootstrapped it
#
# So IF the S3 bucket already exists, we will continue as normal
# because we want to be able to manage changes to an existing
# bootstrap bucket. But if it *doesn't* exist, then we need to be
# able to plan and apply it with a local state, and *then* configure
# the remote state.
# In default operations we assume we are already bootstrapped
declare bootstrapped="true";
# If we are in bootstrap mode, we need to know if we have already bootstrapped
# or we are working with or modifying an existing bootstrap bucket
if [ "${bootstrap}" == "true" ]; then
# For this exist check we could do many things, but we explicitly perform
# an ls against the key we will be working with so as to not require
# permissions to, for example, list all buckets, or the bucket root keyspace
aws s3 ls s3://${bucket}/${backend_prefix}/${backend_filename} >/dev/null 2>&1;
[ $? -eq 0 ] || bootstrapped="false";
fi;
if [ "${bootstrapped}" == "true" ]; then
echo -e "${backend_config}" > backend_tfscaffold.tf \
|| error_and_die "Failed to write backend config to $(pwd)/backend_tfscaffold.tf";
# Nix the horrible hack on exit
trap "rm -f $(pwd)/backend_tfscaffold.tf" EXIT;
# Configure remote state storage
echo "Setting up S3 remote state from s3://${bucket}/${backend_key}";
terraform init -upgrade ${no_color} ${compact_warnings} \
|| error_and_die "Terraform init failed";
else
# We are bootstrapping. Download the providers, skip the backend config.
terraform init \
-backend=false \
${no_color} \
${compact_warnings} \
|| error_and_die "Terraform init failed";
fi;
case "${action}" in
'plan')
if [ -n "${build_id}" ]; then
mkdir -p build;
plan_file_name="${component_name}_${build_id}.tfplan";
plan_file_remote_key="${backend_prefix}/plans/${plan_file_name}";
out="-out=build/${plan_file_name}";
fi;
if [ "${detailed_exitcode}" == "true" ]; then
detailed="-detailed-exitcode";
fi;
terraform "${action}" \
-input=false \
${refresh} \
${tf_var_params} \
${extra_args} \
${destroy} \
${out} \
${detailed} \
-parallelism=300;
status="${?}";
# Even when detailed exitcode is set, a 1 is still a fail,
# so exit
# (detailed exit codes are 0 and 2)
if [ "${status}" -eq 1 ]; then
error_and_die "Terraform plan failed";
fi;
if [ -n "${build_id}" ]; then
aws s3 cp build/${plan_file_name} s3://${bucket}/${plan_file_remote_key} \
|| error_and_die "Plan file upload to S3 failed (s3://${bucket}/${plan_file_remote_key})";
fi;
exit ${status};
;;
'graph')
mkdir -p build || error_and_die "Failed to create output directory '$(pwd)/build'";
terraform graph ${extra_args} -draw-cycles | dot -Tpng > build/${project}-${aws_account_id}-${region}-${environment}.png \
|| error_and_die "Terraform simple graph generation failed";
terraform graph ${extra_args} -draw-cycles -verbose | dot -Tpng > build/${project}-${aws_account_id}-${region}-${environment}-verbose.png \
|| error_and_die "Terraform verbose graph generation failed";
exit 0;
;;
'apply'|'destroy')
# Support for terraform <0.10 is now deprecated
if [ "${action}" == "apply" ]; then
echo "Compatibility: Adding to terraform arguments: -auto-approve=true";
extra_args+=" -auto-approve=true";
else # action is `destroy`
# Check terraform version - if pre-0.15, need to add `-force`; 0.15 and above instead use `-auto-approve`
if [ $(terraform version | head -n1 | cut -d" " -f2 | cut -d"." -f1) == "v0" ] && [ $(terraform version | head -n1 | cut -d" " -f2 | cut -d"." -f2) -lt 15 ]; then
echo "Compatibility: Adding to terraform arguments: -force";
force='-force';
else
extra_args+=" -auto-approve";
fi;
fi;
if [ -n "${build_id}" ]; then
mkdir -p build;
plan_file_name="${component_name}_${build_id}.tfplan";
plan_file_remote_key="${backend_prefix}/plans/${plan_file_name}";
aws s3 cp s3://${bucket}/${plan_file_remote_key} build/${plan_file_name} \
|| error_and_die "Plan file download from S3 failed (s3://${bucket}/${plan_file_remote_key})";
apply_plan="build/${plan_file_name}";
terraform "${action}" \
-input=false \
${refresh} \
-parallelism=300 \
${extra_args} \
${force} \
${apply_plan};
exit_code=$?;
else
terraform "${action}" \
-input=false \
${refresh} \
${tf_var_params} \
-parallelism=300 \
${extra_args} \
${force};
exit_code=$?;
if [ "${bootstrapped}" == "false" ]; then
# If we are here, and we are in bootstrap mode, and not already bootstrapped,
# Then we have just bootstrapped for the first time! Congratulations.
# Now we need to copy our state file into the bootstrap bucket
echo -e "${backend_config}" > backend_tfscaffold.tf \
|| error_and_die "Failed to write backend config to $(pwd)/backend_tfscaffold.tf";
# Nix the horrible hack on exit
trap "rm -f $(pwd)/backend_tfscaffold.tf" EXIT;
# Push Terraform Remote State to S3
# TODO: Add -upgrade to init when we drop support for <0.10
echo "yes" | terraform init || error_and_die "Terraform init failed";
# Hard cleanup
rm -f backend_tfscaffold.tf;
rm -f terraform.tfstate # Prime not the backup
rm -rf .terraform;
# This doesn't mean anything here, we're just celebrating!
bootstrapped="true";
fi;
fi;
if [ ${exit_code} -ne 0 ]; then
error_and_die "Terraform ${action} failed with exit code ${exit_code}";
fi;
if [ -f "post.sh" ]; then
source post.sh "${region}" "${environment}" "${action}" \
|| error_and_die "Component post script execution failed with exit code ${?}";
fi;
;;
'*taint')
terraform "${action}" ${extra_args} || error_and_die "Terraform ${action} failed.";
;;
'import')
terraform "${action}" ${tf_var_params} ${extra_args} || error_and_die "Terraform ${action} failed.";
;;
*)
echo -e "Generic action case invoked. Only the additional arguments will be passed to terraform, you break it you fix it:";
echo -e "\tterraform ${action} ${extra_args}";
terraform "${action}" ${extra_args} \
|| error_and_die "Terraform ${action} failed.";
;;
esac;
popd
if [ -f "post.sh" ]; then
source post.sh "${region}" "${environment}" "${action}" \
|| error_and_die "Global post script execution failed with exit code ${?}";
fi;
exit 0;
|
public class Percolation {
private int M;
private WeightedQuickUnionUF gridUF;
private boolean[][] gridOC;
public Percolation(int N) {
if(N <= 0) {
throw new java.lang.IllegalArgumentException();
}
M = N;
// create ,N*N+2 components
gridUF = new WeightedQuickUnionUF(N*N+2);
// grid
gridOC = new boolean[N][N];
// union all top sites with one site above all, bottom sites the same
for (int i = 1; i <= N; i++) {
gridUF.union(0, i);
gridUF.union(N*N+1, N*N-i+1);
}
}
public void open(int i, int j) {
// open site (row i, column j) if it is not open already
if(i > M || i < 1 || j > M || j < 1){
throw new java.lang.IndexOutOfBoundsException();
}
if (!gridOC[i - 1][j - 1]) {
gridOC[i-1][j-1] = true;
if (i-1 > 0 && gridOC[i-2][j-1]) {
gridUF.union(M*(i-1)+j, M*(i-1)+j - M);
}
if (i+1 <= M && gridOC[i][j-1]) {
gridUF.union(M*(i-1)+j, M*(i-1)+j + M);
}
if (j-1 > 0 && gridOC[i-1][j-2]) {
gridUF.union(M*(i-1)+j, M*(i-1)+j - 1);
}
if (j+1 <= M && gridOC[i-1][j]) {
gridUF.union(M*(i-1)+j, M*(i-1)+j + 1);
}
}
}
public boolean isOpen(int i, int j) { // is site (row i, column j) open?
return gridOC[i-1][j-1];
}
public boolean isFull(int i, int j) { // is site (row i, column j) full?
return gridUF.connected(0, M*(i-1)+j) && gridOC[i-1][j-1];
}
public boolean percolates() {
// does the system percolate?
return gridUF.connected(0, M*M+1);
}
public static void main(String[] args) {
// test client (optional)
/*Percolation some = new Percolation(5);
some.open(1,2);
some.open(2,2);
some.open(3,2);
some.open(4,2);
some.open(5,2);
*/
}
} |
import * as React from 'react';
import Typography from '@material-ui/core/Typography';
import DialogTitle from '@material-ui/core/DialogTitle';
import DialogActions from '@material-ui/core/DialogActions';
import DialogContent from '@material-ui/core/DialogContent';
import Button from '@material-ui/core/Button';
import LinearProgress from '@material-ui/core/LinearProgress';
import { useIconDialog } from '../../components/icon-dialog';
import { EventType, ServiceError, Target, ServiceDeleted, Rule } from '../../services/api';
import { useDelete, ENTITY } from '../../services/api-provider/use-api';
import { useStyles } from './styles';
const EmptyMessage: React.FC<{ show: boolean }> = ({ show }) => {
if (!show) return null;
return (
<div aria-label='empty message'>
<Typography variant='h6'>There are NOT Elements selected to be deleted!</Typography>
</div>
);
};
const ErrorMessage: React.FC<{ error: ServiceError | undefined }> = ({ error }) => {
if (!error) return null;
return (
<div aria-label='error message'>
<Typography variant='h3'>{error.message}</Typography>
</div>
);
};
const Actions: React.FC<{
close(): void;
disableClose: boolean;
disableDelete: boolean;
deleteElements(): void;
}> = ({ close, disableClose, disableDelete, deleteElements }) => {
const styles = useStyles();
return (
<DialogActions aria-label='actions'>
<Button onClick={close} disabled={disableClose} aria-label='close button'>
Close
</Button>
<Button onClick={deleteElements} disabled={disableDelete} className={styles.deleteButton} aria-label='delete button'>
Delete
</Button>
</DialogActions>
);
};
const LoadingView: React.FC<{ show: boolean }> = ({ show }) => {
if (!show)
return (
<div aria-label='element loader'>
<LinearProgress variant='determinate' value={100} color='primary' />
</div>
);
return (
<div aria-label='deleting element'>
<LinearProgress variant='indeterminate' color='secondary' />
</div>
);
};
export type ELEMENTS = (EventType | Target | Rule)[];
const DeleteList: React.FC<{
show: boolean;
elements?: ELEMENTS;
isLoading: boolean;
}> = ({ show, elements, isLoading }) => {
const styles = useStyles();
if (!show) return null;
isLoading = true;
return (
<div aria-label='elements to delete' className={styles.elementList}>
{elements!.map(e => (
<div key={e.id} aria-label='element to delete' className={styles.elementItem}>
<div>
<Typography variant='h5'>{e.name}</Typography>
</div>
<div>
<Typography variant='caption' className={styles.captionText}>
{e.id}
</Typography>
</div>
</div>
))}
</div>
);
};
const DeletedList: React.FC<{
responses: ServiceDeleted[] | undefined;
elements: ELEMENTS | undefined;
}> = ({ responses, elements }) => {
const styles = useStyles();
if (!Array.isArray(elements) || elements.length === 0) return null;
if (!Array.isArray(responses) || responses.length === 0) return null;
return (
<div aria-label='elements deleted' className={styles.elementList}>
{responses.map((event, idx) => {
//if(event.state === 'DELETED') return null;
return (
<div key={event.id} aria-label='element to delete' className={styles.elementItem}>
<div aria-label='deleted element'>
<Typography variant='h5'>{elements[idx].name}</Typography>
</div>
{event.state === 'DELETED' ? (
<div aria-label='success message'>
<Typography variant='caption' className={styles.successText}>
This entity has been deleted successfuly
</Typography>
</div>
) : (
<div aria-label='error message'>
<Typography variant='caption' className={styles.errorText}>
Error: {event.error ? event.error.message : 'This element cannot be deleted'}
</Typography>
</div>
)}
</div>
);
})}
</div>
);
};
export type DeleteDialogProps = {
title: string;
entity: ENTITY;
elementsSelecteds?: ELEMENTS;
onDeleted?(ids: string[]): void;
onCloseDialog?: () => void;
};
export const DeleteDialog: React.FC<DeleteDialogProps> = React.memo(
({ title, entity, elementsSelecteds, onDeleted, onCloseDialog }) => {
const styles = useStyles();
const [elements] = React.useState(elementsSelecteds);
const closeIconDialog = useIconDialog();
const events = React.useMemo(() => (elements ? elements?.map(e => e.id) : []), [elements]);
const { isLoading, response, error, request } = useDelete(entity, events);
const hasElements = Array.isArray(elements) && elements.length > 0;
const hasResponse = !(!Array.isArray(response?.data) || response?.data?.length === 0);
const closeDialog = React.useCallback(() => {
closeIconDialog?.();
onCloseDialog?.();
}, [onCloseDialog, closeIconDialog]);
React.useEffect(() => {
// TODO: Si hay alguna respuesta con delete, recargar el listado de elements
if (!!response && hasResponse) {
const itemsDeleted = response.data.filter(ruleDelete => ruleDelete.state === 'DELETED').map(rule => rule.id);
if (itemsDeleted.length > 0) onDeleted?.(itemsDeleted);
}
}, [hasResponse, response, onDeleted]);
return (
<div className={styles.container}>
<DialogTitle aria-label='title' id='icon-dialog-title'>
{title}
</DialogTitle>
<DialogContent dividers={true} className={styles.dialogContent} id='icon-dialog-content'>
<EmptyMessage show={!hasElements && !hasResponse && !error} />
<DeleteList show={hasElements && !hasResponse && !error} elements={elements} isLoading={isLoading} />
<DeletedList elements={elements} responses={response?.data} />
<ErrorMessage error={error && error.error} />
</DialogContent>
<LoadingView show={isLoading} />
<Actions
close={closeDialog}
disableClose={isLoading}
disableDelete={!hasElements || hasResponse || !!error}
deleteElements={request}
/>
</div>
);
},
() => true
);
export default DeleteDialog;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.