code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef __SLAVE_FLAGS_HPP__
#define __SLAVE_FLAGS_HPP__
#include <cstdint>
#include <string>
#include <stout/bytes.hpp>
#include <stout/duration.hpp>
#include <stout/json.hpp>
#include <stout/option.hpp>
#include <stout/path.hpp>
#include <mesos/module/module.hpp>
#include "logging/flags.hpp"
#include "messages/flags.hpp"
namespace mesos {
namespace internal {
namespace slave {
class Flags : public virtual logging::Flags
{
public:
Flags();
bool version;
Option<std::string> hostname;
bool hostname_lookup;
Option<std::string> resources;
Option<std::string> resource_provider_config_dir;
Option<std::string> disk_profile_adaptor;
std::string isolation;
std::string launcher;
Option<std::string> image_providers;
Option<std::string> image_provisioner_backend;
Option<ImageGcConfig> image_gc_config;
std::string appc_simple_discovery_uri_prefix;
std::string appc_store_dir;
std::string docker_registry;
std::string docker_store_dir;
std::string docker_volume_checkpoint_dir;
bool docker_ignore_runtime;
std::string default_role;
Option<std::string> attributes;
Bytes fetcher_cache_size;
std::string fetcher_cache_dir;
Duration fetcher_stall_timeout;
std::string work_dir;
std::string runtime_dir;
std::string launcher_dir;
Option<std::string> hadoop_home;
size_t max_completed_executors_per_framework;
#ifndef __WINDOWS__
bool switch_user;
Option<std::string> volume_gid_range;
#endif // __WINDOWS__
Duration http_heartbeat_interval;
std::string frameworks_home; // TODO(benh): Make an Option.
Duration registration_backoff_factor;
Duration authentication_backoff_factor;
Duration authentication_timeout_min;
Duration authentication_timeout_max;
Option<JSON::Object> executor_environment_variables;
Duration executor_registration_timeout;
Duration executor_reregistration_timeout;
Option<Duration> executor_reregistration_retry_interval;
Duration executor_shutdown_grace_period;
#ifdef USE_SSL_SOCKET
Option<Path> jwt_secret_key;
#endif // USE_SSL_SOCKET
Duration gc_delay;
double gc_disk_headroom;
bool gc_non_executor_container_sandboxes;
Duration disk_watch_interval;
Option<std::string> container_logger;
std::string reconfiguration_policy;
std::string recover;
Duration recovery_timeout;
bool strict;
Duration register_retry_interval_min;
#ifdef __linux__
Duration cgroups_destroy_timeout;
std::string cgroups_hierarchy;
std::string cgroups_root;
bool cgroups_enable_cfs;
bool cgroups_limit_swap;
bool cgroups_cpu_enable_pids_and_tids_count;
Option<std::string> cgroups_net_cls_primary_handle;
Option<std::string> cgroups_net_cls_secondary_handles;
Option<DeviceWhitelist> allowed_devices;
Option<std::string> agent_subsystems;
Option<std::string> host_path_volume_force_creation;
Option<std::vector<unsigned int>> nvidia_gpu_devices;
Option<std::string> perf_events;
Duration perf_interval;
Duration perf_duration;
bool revocable_cpu_low_priority;
bool systemd_enable_support;
std::string systemd_runtime_directory;
Option<CapabilityInfo> effective_capabilities;
Option<CapabilityInfo> bounding_capabilities;
Option<Bytes> default_shm_size;
bool disallow_sharing_agent_ipc_namespace;
bool disallow_sharing_agent_pid_namespace;
#endif
Option<Firewall> firewall_rules;
Option<Path> credential;
Option<ACLs> acls;
std::string containerizers;
std::string docker;
Option<std::string> docker_mesos_image;
Duration docker_remove_delay;
std::string sandbox_directory;
Option<ContainerDNSInfo> default_container_dns;
Option<ContainerInfo> default_container_info;
// TODO(alexr): Remove this after the deprecation cycle (started in 1.0).
Duration docker_stop_timeout;
bool docker_kill_orphans;
std::string docker_socket;
Option<JSON::Object> docker_config;
#ifdef ENABLE_PORT_MAPPING_ISOLATOR
uint16_t ephemeral_ports_per_container;
Option<std::string> eth0_name;
Option<std::string> lo_name;
Option<Bytes> egress_rate_limit_per_container;
bool egress_unique_flow_per_container;
std::string egress_flow_classifier_parent;
bool network_enable_socket_statistics_summary;
bool network_enable_socket_statistics_details;
bool network_enable_snmp_statistics;
#endif // ENABLE_PORT_MAPPING_ISOLATOR
#ifdef ENABLE_NETWORK_PORTS_ISOLATOR
Duration container_ports_watch_interval;
bool check_agent_port_range_only;
bool enforce_container_ports;
Option<std::string> container_ports_isolated_range;
#endif // ENABLE_NETWORK_PORTS_ISOLATOR
Option<std::string> network_cni_plugins_dir;
Option<std::string> network_cni_config_dir;
bool network_cni_root_dir_persist;
bool network_cni_metrics;
Duration container_disk_watch_interval;
bool enforce_container_disk_quota;
Option<Modules> modules;
Option<std::string> modulesDir;
std::string authenticatee;
std::string authorizer;
Option<std::string> http_authenticators;
bool authenticate_http_readonly;
bool authenticate_http_readwrite;
#ifdef USE_SSL_SOCKET
bool authenticate_http_executors;
#endif // USE_SSL_SOCKET
Option<Path> http_credentials;
Option<std::string> hooks;
Option<std::string> secret_resolver;
Option<std::string> resource_estimator;
Option<std::string> qos_controller;
Duration qos_correction_interval_min;
Duration oversubscribed_resources_interval;
Option<std::string> master_detector;
#if ENABLE_XFS_DISK_ISOLATOR
std::string xfs_project_range;
bool xfs_kill_containers;
#endif
#if ENABLE_SECCOMP_ISOLATOR
Option<std::string> seccomp_config_dir;
Option<std::string> seccomp_profile_name;
#endif
bool http_command_executor;
Option<SlaveCapabilities> agent_features;
Option<DomainInfo> domain;
// The following flags are executable specific (e.g., since we only
// have one instance of libprocess per execution, we only want to
// advertise the IP and port option once, here).
Option<std::string> ip;
uint16_t port;
Option<std::string> advertise_ip;
Option<std::string> advertise_port;
Option<flags::SecurePathOrValue> master;
bool memory_profiling;
Duration zk_session_timeout;
// Optional IP discover script that will set the slave's IP.
// If set, its output is expected to be a valid parseable IP string.
Option<std::string> ip_discovery_command;
// IPv6 flags.
//
// NOTE: These IPv6 flags are currently input mechanisms
// for the operator to specify v6 addresses on which containers
// running on host network can listen. Mesos itself doesn't listen
// or communicate over v6 addresses at this point.
Option<std::string> ip6;
// Similar to the `ip_discovery_command` this optional discover
// script is expected to output a valid IPv6 string. Only one of the
// two options `ip6` or `ip6_discovery_command` can be set at any
// given point of time.
Option<std::string> ip6_discovery_command;
};
} // namespace slave {
} // namespace internal {
} // namespace mesos {
#endif // __SLAVE_FLAGS_HPP__
| gsantovena/mesos | src/slave/flags.hpp | C++ | apache-2.0 | 7,795 |
# Copyright 2015-2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
from azure_common import BaseTest, arm_template
class NetworkSecurityGroupTest(BaseTest):
def setUp(self):
super(NetworkSecurityGroupTest, self).setUp()
@arm_template('networksecuritygroup.json')
def test_find_by_name(self):
p = self.load_policy({
'name': 'test-azure-nsg',
'resource': 'azure.networksecuritygroup',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'eq',
'value_type': 'normalize',
'value': 'c7n-nsg'}],
})
resources = p.run()
self.assertEqual(len(resources), 1)
@arm_template('networksecuritygroup.json')
def test_allow_single_port(self):
p = self.load_policy({
'name': 'test-azure-nsg',
'resource': 'azure.networksecuritygroup',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'eq',
'value_type': 'normalize',
'value': 'c7n-nsg'},
{'type': 'ingress',
'ports': '80',
'access': 'Allow'}],
})
resources = p.run()
self.assertEqual(len(resources), 1)
@arm_template('networksecuritygroup.json')
def test_allow_multiple_ports(self):
p = self.load_policy({
'name': 'test-azure-nsg',
'resource': 'azure.networksecuritygroup',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'eq',
'value_type': 'normalize',
'value': 'c7n-nsg'},
{'type': 'ingress',
'ports': '80,8080-8084,88-90',
'match': 'all',
'access': 'Allow'}],
})
resources = p.run()
self.assertEqual(len(resources), 1)
@arm_template('networksecuritygroup.json')
def test_allow_ports_range_any(self):
p = self.load_policy({
'name': 'test-azure-nsg',
'resource': 'azure.networksecuritygroup',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'eq',
'value_type': 'normalize',
'value': 'c7n-nsg'},
{'type': 'ingress',
'ports': '40-100',
'match': 'any',
'access': 'Allow'}]
})
resources = p.run()
self.assertEqual(len(resources), 1)
@arm_template('networksecuritygroup.json')
def test_deny_port(self):
p = self.load_policy({
'name': 'test-azure-nsg',
'resource': 'azure.networksecuritygroup',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'eq',
'value_type': 'normalize',
'value': 'c7n-nsg'},
{'type': 'ingress',
'ports': '8086',
'access': 'Deny'}],
})
resources = p.run()
self.assertEqual(len(resources), 1)
@arm_template('networksecuritygroup.json')
def test_egress_policy_protocols(self):
p = self.load_policy({
'name': 'test-azure-nsg',
'resource': 'azure.networksecuritygroup',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'eq',
'value_type': 'normalize',
'value': 'c7n-nsg'},
{'type': 'egress',
'ports': '22',
'ipProtocol': 'TCP',
'access': 'Allow'}],
})
resources = p.run()
self.assertEqual(len(resources), 1)
p = self.load_policy({
'name': 'test-azure-nsg',
'resource': 'azure.networksecuritygroup',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'eq',
'value_type': 'normalize',
'value': 'c7n-nsg'},
{'type': 'egress',
'ports': '22',
'ipProtocol': 'UDP',
'access': 'Allow'}],
})
resources = p.run()
self.assertEqual(len(resources), 0)
@arm_template('networksecuritygroup.json')
def test_open_ports(self):
p = self.load_policy({
'name': 'test-azure-nsg',
'resource': 'azure.networksecuritygroup',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'eq',
'value_type': 'normalize',
'value': 'c7n-nsg'},
{'type': 'ingress',
'ports': '1000-1100',
'match': 'any',
'access': 'Deny'}],
'actions': [
{
'type': 'open',
'ports': '1000-1100',
'direction': 'Inbound'}
]
})
resources = p.run()
self.assertEqual(len(resources), 1)
p = self.load_policy({
'name': 'test-azure-nsg',
'resource': 'azure.networksecuritygroup',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'eq',
'value_type': 'normalize',
'value': 'c7n-nsg'},
{'type': 'ingress',
'ports': '1000-1100',
'match': 'any',
'access': 'Deny'}],
'actions': [
{'type': 'open',
'ports': '1000-1100',
'direction': 'Inbound'}]
})
resources = p.run()
self.assertEqual(len(resources), 0)
| taohungyang/cloud-custodian | tools/c7n_azure/tests/test_networksecuritygroup.py | Python | apache-2.0 | 6,524 |
/*!
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var path = require('path');
var uniq = require('array-uniq');
var globby = require('globby');
var spawn = require('child_process').spawnSync;
require('shelljs/global');
/**
* google-cloud-node root directory.. useful in case we need to cd
*/
var ROOT_DIR = path.join(__dirname, '..');
module.exports.ROOT_DIR = ROOT_DIR;
/**
* Helper class to make install dependencies + running tests easier to read
* and less error prone.
*
* @class Module
* @param {string} name - The module name (e.g. common, bigquery, etc.)
*/
function Module(name) {
if (!(this instanceof Module)) {
return new Module(name);
}
this.name = name;
this.directory = path.join(ROOT_DIR, 'packages', name);
var pkgJson = require(path.join(this.directory, 'package.json'));
this.packageName = pkgJson.name;
this.dependencies = Object.keys(pkgJson.devDependencies || {});
}
/**
* Umbrella module name.
*
* @static
*/
Module.UMBRELLA = 'google-cloud';
/**
* Retrieves a list of modules that are ahead of origin/master. We do this by
* creating a temporary remote branch that points official master branch.
* We then do a git diff against the two to get a list of files. From there we
* only care about either JS or JSON files being changed.
*
* @static
* @return {Module[]} modules - The updated modules.
*/
Module.getUpdated = function() {
var command = 'git';
var args = ['diff'];
if (!isPushToMaster()) {
run([
'git remote add temp',
'https://github.com/GoogleCloudPlatform/google-cloud-node.git'
]);
run('git fetch -q temp');
args.push('HEAD', 'temp/master');
} else {
args.push('HEAD^');
}
args.push('--name-only');
console.log(command, args.join(' '));
// There's a Windows bug where child_process.exec exits early on `git diff`
// which in turn does not return all of the files that have changed. This can
// cause a false positive when checking for package changes on AppVeyor
var output = spawn(command, args, {
cwd: ROOT_DIR,
stdio: null
});
if (output.status || output.error) {
console.error(output.error || output.stderr.toString());
exit(output.status || 1);
}
var files = output.stdout.toString();
console.log(files);
var modules = files
.trim()
.split('\n')
.filter(function(file) {
return /^packages\/.+\.js/.test(file);
})
.map(function(file) {
return file.split('/')[1];
});
return uniq(modules).map(Module);
};
/**
* Builds docs for all modules
*
* @static
*/
Module.buildDocs = function() {
run('npm run docs', { cwd: ROOT_DIR });
};
/**
* Returns a list containing ALL the modules.
*
* @static
* @return {Module[]} modules - All of em'!
*/
Module.getAll = function() {
cd(ROOT_DIR);
return globby
.sync('*', { cwd: 'packages' })
.map(Module);
};
/**
* Returns a list of modules that are dependent on one or more of the modules
* specified.
*
* @static
* @param {Module[]} modules - The dependency modules.
* @return {Module[]} modules - The dependent modules.
*/
Module.getDependents = function(modules) {
return Module.getAll().filter(function(mod) {
return mod.hasDeps(modules);
});
};
/**
* Installs dependencies for all the modules!
*
* @static
*/
Module.installAll = function() {
run('npm run postinstall', { cwd: ROOT_DIR });
};
/**
* Generates an lcov coverage report for the specified modules.
*
* @static
*/
Module.runCoveralls = function() {
run('npm run coveralls', { cwd: ROOT_DIR });
};
/**
* Installs this modules dependencies via `npm install`
*/
Module.prototype.install = function() {
run('npm install', { cwd: this.directory });
};
/**
* Creates/uses symlink for a module (depending on if module was provided)
* via `npm link`
*
* @param {Module=} mod - The module to use with `npm link ${mod.packageName}`
*/
Module.prototype.link = function(mod) {
run(['npm link', mod && mod.packageName || ''], {
cwd: this.directory
});
};
/**
* Runs unit tests for this module via `npm run test`
*/
Module.prototype.runUnitTests = function() {
run('npm run test', { cwd: this.directory });
};
/**
* Runs snippet tests for this module.
*/
Module.prototype.runSnippetTests = function() {
process.env.TEST_MODULE = this.name;
run('npm run snippet-test', { cwd: ROOT_DIR });
delete process.env.TEST_MODULE;
};
/**
* Runs system tests for this module via `npm run system-test`
*/
Module.prototype.runSystemTests = function() {
run('npm run system-test', { cwd: this.directory });
};
/**
* Checks to see if this module has one or more of the supplied modules
* as a dev dependency.
*
* @param {Module[]} modules - The modules to check for.
* @return {boolean}
*/
Module.prototype.hasDeps = function(modules) {
var packageName;
for (var i = 0; i < modules.length; i++) {
packageName = modules[i].packageName;
if (this.dependencies.indexOf(packageName) > -1) {
return true;
}
}
return false;
};
module.exports.Module = Module;
/**
* Exec's command via child_process.spawnSync.
* By default all output will be piped to the console unless `stdio`
* is overridden.
*
* @param {string} command - The command to run.
* @param {object=} options - Options to pass to `spawnSync`.
* @return {string|null}
*/
function run(command, options) {
options = options || {};
if (Array.isArray(command)) {
command = command.join(' ');
}
console.log(command);
var response = exec(command, options);
if (response.code) {
exit(response.code);
}
return response.stdout;
}
module.exports.run = run;
/**
* Used to make committing to git easier/etc..
*
* @param {string=} cwd - Directory to commit/add/push from.
*/
function Git(cwd) {
this.cwd = cwd || ROOT_DIR;
}
// We'll use this for cloning/submoduling/pushing purposes on CI
Git.REPO = 'https://${GH_OAUTH_TOKEN}@github.com/${GH_OWNER}/${GH_PROJECT_NAME}';
/**
* Creates a submodule in the root directory in quiet mode.
*
* @param {string} branch - The branch to use.
* @param {string=} alias - Name of the folder that contains submodule.
* @return {Git}
*/
Git.prototype.submodule = function(branch, alias) {
alias = alias || branch;
run(['git submodule add -q -b', branch, Git.REPO, alias], {
cwd: this.cwd
});
return new Git(path.join(this.cwd, alias));
};
/**
* Check to see if git has any files it can commit.
*
* @return {boolean}
*/
Git.prototype.hasUpdates = function() {
var output = run('git status --porcelain', {
cwd: this.cwd
});
return !!output && output.trim().length > 0;
};
/**
* Sets git user
*
* @param {string} name - User name
* @param {string} email - User email
*/
Git.prototype.setUser = function(name, email) {
run(['git config --global user.name', name], {
cwd: this.cwd
});
run(['git config --global user.email', email], {
cwd: this.cwd
});
};
/**
* Adds all files passed in via git add
*
* @param {...string} file - File to add
*/
Git.prototype.add = function() {
var files = [].slice.call(arguments);
var command = ['git add'].concat(files);
run(command, {
cwd: this.cwd
});
};
/**
* Commits to git via commit message.
*
* @param {string} message - The commit message.
*/
Git.prototype.commit = function(message) {
run(['git commit -m', '"' + message + ' [ci skip]"'], {
cwd: this.cwd
});
};
/**
* Runs git status and pushes changes in quiet mode.
*
* @param {string} branch - The branch to push to.
*/
Git.prototype.push = function(branch) {
run('git status', {
cwd: this.cwd
});
run(['git push -q', Git.REPO, branch], {
cwd: this.cwd
});
};
module.exports.git = new Git();
/**
* The name of the branch currently being tested.
*
* @alias ci.BRANCH
*/
var BRANCH = process.env.TRAVIS_BRANCH || process.env.APPVEYOR_REPO_BRANCH;
/**
* The pull request number.
*
* @alias ci.PR_NUMBER;
*/
var PR_NUMBER = process.env.TRAVIS_PULL_REQUEST ||
process.env.APPVEYOR_PULL_REQUEST_NUMBER;
/**
* Checks to see if this is a pull request or not.
*
* @alias ci.IS_PR
*/
var IS_PR = !isNaN(parseInt(PR_NUMBER, 10));
/**
* Returns the tag name (assuming this is a release)
*
* @alias ci.getTagName
* @return {string|null}
*/
function getTagName() {
return process.env.TRAVIS_TAG || process.env.APPVEYOR_REPO_TAG_NAME;
}
/**
* Let's us know whether or not this is a release.
*
* @alias ci.isReleaseBuild
* @return {string|null}
*/
function isReleaseBuild() {
return !!getTagName();
}
/**
* Returns name/version of release.
*
* @alias ci.getRelease
* @return {object|null}
*/
function getRelease() {
var tag = getTagName();
if (!tag) {
return null;
}
var parts = tag.split('-');
return {
version: parts.pop(),
name: parts.pop() || Module.UMBRELLA
};
}
/**
* Checks to see if this is a push to master.
*
* @alias ci.isPushToMaster
* @return {boolean}
*/
function isPushToMaster() {
return BRANCH === 'master' && !IS_PR;
}
/**
* Checks to see if this the CI's first pass (Travis only)
*
* @alias ci.isFirstPass
* @return {boolean}
*/
function isFirstPass() {
return /\.1$/.test(process.env.TRAVIS_JOB_NUMBER);
}
module.exports.ci = {
BRANCH: BRANCH,
IS_PR: IS_PR,
PR_NUMBER: PR_NUMBER,
getTagName: getTagName,
isReleaseBuild: isReleaseBuild,
getRelease: getRelease,
isPushToMaster: isPushToMaster,
isFirstPass: isFirstPass
};
| tcrognon/google-cloud-node | scripts/helpers.js | JavaScript | apache-2.0 | 10,052 |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.directconnect.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import com.amazonaws.services.directconnect.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DescribeConnectionsOnInterconnectResult JSON Unmarshaller
*/
public class DescribeConnectionsOnInterconnectResultJsonUnmarshaller
implements
Unmarshaller<DescribeConnectionsOnInterconnectResult, JsonUnmarshallerContext> {
public DescribeConnectionsOnInterconnectResult unmarshall(
JsonUnmarshallerContext context) throws Exception {
DescribeConnectionsOnInterconnectResult describeConnectionsOnInterconnectResult = new DescribeConnectionsOnInterconnectResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL)
return null;
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("connections", targetDepth)) {
context.nextToken();
describeConnectionsOnInterconnectResult
.setConnections(new ListUnmarshaller<Connection>(
ConnectionJsonUnmarshaller.getInstance())
.unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null
|| context.getLastParsedParentElement().equals(
currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return describeConnectionsOnInterconnectResult;
}
private static DescribeConnectionsOnInterconnectResultJsonUnmarshaller instance;
public static DescribeConnectionsOnInterconnectResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DescribeConnectionsOnInterconnectResultJsonUnmarshaller();
return instance;
}
}
| dump247/aws-sdk-java | aws-java-sdk-directconnect/src/main/java/com/amazonaws/services/directconnect/model/transform/DescribeConnectionsOnInterconnectResultJsonUnmarshaller.java | Java | apache-2.0 | 3,202 |
package com.laimiux.rxactivity;
import android.app.Activity;
public abstract class LifecycleEvent {
private final Kind kind;
private final Activity activity;
public LifecycleEvent(Kind kind, Activity activity) {
this.kind = kind;
this.activity = activity;
}
public Activity activity() {
return activity;
}
public Kind kind() {
return kind;
}
public enum Kind {
CREATE,
START,
RESUME,
PAUSE,
SAVE_INSTANCE,
STOP,
DESTROY
}
}
| Laimiux/RxActivity | rxactivity/src/main/java/com/laimiux/rxactivity/LifecycleEvent.java | Java | apache-2.0 | 495 |
<?php
/*
+-------------------------------------------------------------------------+
| Engine of the Enigma Plugin |
| |
| This program is free software; you can redistribute it and/or modify |
| it under the terms of the GNU General Public License version 2 |
| as published by the Free Software Foundation. |
| |
| This program is distributed in the hope that it will be useful, |
| but WITHOUT ANY WARRANTY; without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| GNU General Public License for more details. |
| |
| You should have received a copy of the GNU General Public License along |
| with this program; if not, write to the Free Software Foundation, Inc., |
| 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
| |
+-------------------------------------------------------------------------+
| Author: Aleksander Machniak <alec@alec.pl> |
+-------------------------------------------------------------------------+
*/
/*
RFC2440: OpenPGP Message Format
RFC3156: MIME Security with OpenPGP
RFC3851: S/MIME
*/
class enigma_engine
{
private $rc;
private $enigma;
private $pgp_driver;
private $smime_driver;
public $decryptions = array();
public $signatures = array();
public $signed_parts = array();
const PASSWORD_TIME = 120;
/**
* Plugin initialization.
*/
function __construct($enigma)
{
$this->rc = rcmail::get_instance();
$this->enigma = $enigma;
// this will remove passwords from session after some time
$this->get_passwords();
}
/**
* PGP driver initialization.
*/
function load_pgp_driver()
{
if ($this->pgp_driver) {
return;
}
$driver = 'enigma_driver_' . $this->rc->config->get('enigma_pgp_driver', 'gnupg');
$username = $this->rc->user->get_username();
// Load driver
$this->pgp_driver = new $driver($username);
if (!$this->pgp_driver) {
rcube::raise_error(array(
'code' => 600, 'type' => 'php',
'file' => __FILE__, 'line' => __LINE__,
'message' => "Enigma plugin: Unable to load PGP driver: $driver"
), true, true);
}
// Initialise driver
$result = $this->pgp_driver->init();
if ($result instanceof enigma_error) {
rcube::raise_error(array(
'code' => 600, 'type' => 'php',
'file' => __FILE__, 'line' => __LINE__,
'message' => "Enigma plugin: ".$result->getMessage()
), true, true);
}
}
/**
* S/MIME driver initialization.
*/
function load_smime_driver()
{
if ($this->smime_driver) {
return;
}
$driver = 'enigma_driver_' . $this->rc->config->get('enigma_smime_driver', 'phpssl');
$username = $this->rc->user->get_username();
// Load driver
$this->smime_driver = new $driver($username);
if (!$this->smime_driver) {
rcube::raise_error(array(
'code' => 600, 'type' => 'php',
'file' => __FILE__, 'line' => __LINE__,
'message' => "Enigma plugin: Unable to load S/MIME driver: $driver"
), true, true);
}
// Initialise driver
$result = $this->smime_driver->init();
if ($result instanceof enigma_error) {
rcube::raise_error(array(
'code' => 600, 'type' => 'php',
'file' => __FILE__, 'line' => __LINE__,
'message' => "Enigma plugin: ".$result->getMessage()
), true, true);
}
}
/**
* Handler for message_part_structure hook.
* Called for every part of the message.
*
* @param array Original parameters
*
* @return array Modified parameters
*/
function part_structure($p)
{
if ($p['mimetype'] == 'text/plain' || $p['mimetype'] == 'application/pgp') {
$this->parse_plain($p);
}
else if ($p['mimetype'] == 'multipart/signed') {
$this->parse_signed($p);
}
else if ($p['mimetype'] == 'multipart/encrypted') {
$this->parse_encrypted($p);
}
else if ($p['mimetype'] == 'application/pkcs7-mime') {
$this->parse_encrypted($p);
}
return $p;
}
/**
* Handler for message_part_body hook.
*
* @param array Original parameters
*
* @return array Modified parameters
*/
function part_body($p)
{
// encrypted attachment, see parse_plain_encrypted()
if ($p['part']->need_decryption && $p['part']->body === null) {
$storage = $this->rc->get_storage();
$body = $storage->get_message_part($p['object']->uid, $p['part']->mime_id, $p['part'], null, null, true, 0, false);
$result = $this->pgp_decrypt($body);
// @TODO: what to do on error?
if ($result === true) {
$p['part']->body = $body;
$p['part']->size = strlen($body);
$p['part']->body_modified = true;
}
}
return $p;
}
/**
* Handler for plain/text message.
*
* @param array Reference to hook's parameters
*/
function parse_plain(&$p)
{
$part = $p['structure'];
// exit, if we're already inside a decrypted message
if ($part->encrypted) {
return;
}
// Get message body from IMAP server
$body = $this->get_part_body($p['object'], $part->mime_id);
// @TODO: big message body could be a file resource
// PGP signed message
if (preg_match('/^-----BEGIN PGP SIGNED MESSAGE-----/', $body)) {
$this->parse_plain_signed($p, $body);
}
// PGP encrypted message
else if (preg_match('/^-----BEGIN PGP MESSAGE-----/', $body)) {
$this->parse_plain_encrypted($p, $body);
}
}
/**
* Handler for multipart/signed message.
*
* @param array Reference to hook's parameters
*/
function parse_signed(&$p)
{
$struct = $p['structure'];
// S/MIME
if ($struct->parts[1] && $struct->parts[1]->mimetype == 'application/pkcs7-signature') {
$this->parse_smime_signed($p);
}
// PGP/MIME: RFC3156
// The multipart/signed body MUST consist of exactly two parts.
// The first part contains the signed data in MIME canonical format,
// including a set of appropriate content headers describing the data.
// The second body MUST contain the PGP digital signature. It MUST be
// labeled with a content type of "application/pgp-signature".
else if ($struct->ctype_parameters['protocol'] == 'application/pgp-signature'
&& count($struct->parts) == 2
&& $struct->parts[1] && $struct->parts[1]->mimetype == 'application/pgp-signature'
) {
$this->parse_pgp_signed($p);
}
}
/**
* Handler for multipart/encrypted message.
*
* @param array Reference to hook's parameters
*/
function parse_encrypted(&$p)
{
$struct = $p['structure'];
// S/MIME
if ($struct->mimetype == 'application/pkcs7-mime') {
$this->parse_smime_encrypted($p);
}
// PGP/MIME: RFC3156
// The multipart/encrypted MUST consist of exactly two parts. The first
// MIME body part must have a content type of "application/pgp-encrypted".
// This body contains the control information.
// The second MIME body part MUST contain the actual encrypted data. It
// must be labeled with a content type of "application/octet-stream".
else if ($struct->ctype_parameters['protocol'] == 'application/pgp-encrypted'
&& count($struct->parts) == 2
&& $struct->parts[0] && $struct->parts[0]->mimetype == 'application/pgp-encrypted'
&& $struct->parts[1] && $struct->parts[1]->mimetype == 'application/octet-stream'
) {
$this->parse_pgp_encrypted($p);
}
}
/**
* Handler for plain signed message.
* Excludes message and signature bodies and verifies signature.
*
* @param array Reference to hook's parameters
* @param string Message (part) body
*/
private function parse_plain_signed(&$p, $body)
{
$this->load_pgp_driver();
$part = $p['structure'];
// Verify signature
if ($this->rc->action == 'show' || $this->rc->action == 'preview') {
$sig = $this->pgp_verify($body);
}
// @TODO: Handle big bodies using (temp) files
// In this way we can use fgets on string as on file handle
$fh = fopen('php://memory', 'br+');
// @TODO: fopen/fwrite errors handling
if ($fh) {
fwrite($fh, $body);
rewind($fh);
}
$body = $part->body = null;
$part->body_modified = true;
// Extract body (and signature?)
while (!feof($fh)) {
$line = fgets($fh, 1024);
if ($part->body === null)
$part->body = '';
else if (preg_match('/^-----BEGIN PGP SIGNATURE-----/', $line))
break;
else
$part->body .= $line;
}
// Remove "Hash" Armor Headers
$part->body = preg_replace('/^.*\r*\n\r*\n/', '', $part->body);
// de-Dash-Escape (RFC2440)
$part->body = preg_replace('/(^|\n)- -/', '\\1-', $part->body);
// Store signature data for display
if (!empty($sig)) {
$this->signed_parts[$part->mime_id] = $part->mime_id;
$this->signatures[$part->mime_id] = $sig;
}
fclose($fh);
}
/**
* Handler for PGP/MIME signed message.
* Verifies signature.
*
* @param array Reference to hook's parameters
*/
private function parse_pgp_signed(&$p)
{
// Verify signature
if ($this->rc->action == 'show' || $this->rc->action == 'preview') {
$this->load_pgp_driver();
$struct = $p['structure'];
$msg_part = $struct->parts[0];
$sig_part = $struct->parts[1];
// Get bodies
// Note: The first part body need to be full part body with headers
// it also cannot be decoded
$msg_body = $this->get_part_body($p['object'], $msg_part->mime_id, true);
$sig_body = $this->get_part_body($p['object'], $sig_part->mime_id);
// Verify
$sig = $this->pgp_verify($msg_body, $sig_body);
// Store signature data for display
$this->signatures[$struct->mime_id] = $sig;
// Message can be multipart (assign signature to each subpart)
if (!empty($msg_part->parts)) {
foreach ($msg_part->parts as $part)
$this->signed_parts[$part->mime_id] = $struct->mime_id;
}
else {
$this->signed_parts[$msg_part->mime_id] = $struct->mime_id;
}
// Remove signature file from attachments list (?)
unset($struct->parts[1]);
}
}
/**
* Handler for S/MIME signed message.
* Verifies signature.
*
* @param array Reference to hook's parameters
*/
private function parse_smime_signed(&$p)
{
return; // @TODO
// Verify signature
if ($this->rc->action == 'show' || $this->rc->action == 'preview') {
$this->load_smime_driver();
$struct = $p['structure'];
$msg_part = $struct->parts[0];
// Verify
$sig = $this->smime_driver->verify($struct, $p['object']);
// Store signature data for display
$this->signatures[$struct->mime_id] = $sig;
// Message can be multipart (assign signature to each subpart)
if (!empty($msg_part->parts)) {
foreach ($msg_part->parts as $part)
$this->signed_parts[$part->mime_id] = $struct->mime_id;
}
else {
$this->signed_parts[$msg_part->mime_id] = $struct->mime_id;
}
// Remove signature file from attachments list
unset($struct->parts[1]);
}
}
/**
* Handler for plain encrypted message.
*
* @param array Reference to hook's parameters
* @param string Message (part) body
*/
private function parse_plain_encrypted(&$p, $body)
{
$this->load_pgp_driver();
$part = $p['structure'];
// Decrypt
$result = $this->pgp_decrypt($body);
// Store decryption status
$this->decryptions[$part->mime_id] = $result;
// Parse decrypted message
if ($result === true) {
$part->body = $body;
$part->body_modified = true;
$part->encrypted = true;
// Encrypted plain message may contain encrypted attachments
// in such case attachments have .pgp extension and application/octet-stream.
// This is what happens when you select "Encrypt each attachment separately
// and send the message using inline PGP" in Thunderbird's Enigmail.
// find parent part ID
if (strpos($part->mime_id, '.')) {
$items = explode('.', $part->mime_id);
array_pop($items);
$parent = implode('.', $items);
}
else {
$parent = 0;
}
if ($p['object']->mime_parts[$parent]) {
foreach ((array)$p['object']->mime_parts[$parent]->parts as $p) {
if ($p->disposition == 'attachment' && $p->mimetype == 'application/octet-stream'
&& preg_match('/^(.*)\.pgp$/i', $p->filename, $m)
) {
// modify filename
$p->filename = $m[1];
// flag the part, it will be decrypted when needed
$p->need_decryption = true;
// disable caching
$p->body_modified = true;
}
}
}
}
}
/**
* Handler for PGP/MIME encrypted message.
*
* @param array Reference to hook's parameters
*/
private function parse_pgp_encrypted(&$p)
{
$this->load_pgp_driver();
$struct = $p['structure'];
$part = $struct->parts[1];
// Get body
$body = $this->get_part_body($p['object'], $part->mime_id);
// Decrypt
$result = $this->pgp_decrypt($body);
if ($result === true) {
// Parse decrypted message
$struct = $this->parse_body($body);
// Modify original message structure
$this->modify_structure($p, $struct);
// Attach the decryption message to all parts
$this->decryptions[$struct->mime_id] = $result;
foreach ((array) $struct->parts as $sp) {
$this->decryptions[$sp->mime_id] = $result;
}
}
else {
$this->decryptions[$part->mime_id] = $result;
// Make sure decryption status message will be displayed
$part->type = 'content';
$p['object']->parts[] = $part;
}
}
/**
* Handler for S/MIME encrypted message.
*
* @param array Reference to hook's parameters
*/
private function parse_smime_encrypted(&$p)
{
// $this->load_smime_driver();
}
/**
* PGP signature verification.
*
* @param mixed Message body
* @param mixed Signature body (for MIME messages)
*
* @return mixed enigma_signature or enigma_error
*/
private function pgp_verify(&$msg_body, $sig_body=null)
{
// @TODO: Handle big bodies using (temp) files
// @TODO: caching of verification result
$sig = $this->pgp_driver->verify($msg_body, $sig_body);
if (($sig instanceof enigma_error) && $sig->getCode() != enigma_error::E_KEYNOTFOUND)
rcube::raise_error(array(
'code' => 600, 'type' => 'php',
'file' => __FILE__, 'line' => __LINE__,
'message' => "Enigma plugin: " . $sig->getMessage()
), true, false);
return $sig;
}
/**
* PGP message decryption.
*
* @param mixed Message body
*
* @return mixed True or enigma_error
*/
private function pgp_decrypt(&$msg_body)
{
// @TODO: Handle big bodies using (temp) files
// @TODO: caching of verification result
$keys = $this->get_passwords();
$result = $this->pgp_driver->decrypt($msg_body, $keys);
if ($result instanceof enigma_error) {
$err_code = $result->getCode();
if (!in_array($err_code, array(enigma_error::E_KEYNOTFOUND, enigma_error::E_BADPASS)))
rcube::raise_error(array(
'code' => 600, 'type' => 'php',
'file' => __FILE__, 'line' => __LINE__,
'message' => "Enigma plugin: " . $result->getMessage()
), true, false);
return $result;
}
$msg_body = $result;
return true;
}
/**
* PGP keys listing.
*
* @param mixed Key ID/Name pattern
*
* @return mixed Array of keys or enigma_error
*/
function list_keys($pattern = '')
{
$this->load_pgp_driver();
$result = $this->pgp_driver->list_keys($pattern);
if ($result instanceof enigma_error) {
rcube::raise_error(array(
'code' => 600, 'type' => 'php',
'file' => __FILE__, 'line' => __LINE__,
'message' => "Enigma plugin: " . $result->getMessage()
), true, false);
}
return $result;
}
/**
* PGP key details.
*
* @param mixed Key ID
*
* @return mixed enigma_key or enigma_error
*/
function get_key($keyid)
{
$this->load_pgp_driver();
$result = $this->pgp_driver->get_key($keyid);
if ($result instanceof enigma_error) {
rcube::raise_error(array(
'code' => 600, 'type' => 'php',
'file' => __FILE__, 'line' => __LINE__,
'message' => "Enigma plugin: " . $result->getMessage()
), true, false);
}
return $result;
}
/**
* PGP key delete.
*
* @param string Key ID
*
* @return enigma_error|bool True on success
*/
function delete_key($keyid)
{
$this->load_pgp_driver();
$result = $this->pgp_driver->delete_key($keyid);
if ($result instanceof enigma_error) {
rcube::raise_error(array(
'code' => 600, 'type' => 'php',
'file' => __FILE__, 'line' => __LINE__,
'message' => "Enigma plugin: " . $result->getMessage()
), true, false);
}
return $result;
}
/**
* PGP keys/certs importing.
*
* @param mixed Import file name or content
* @param boolean True if first argument is a filename
*
* @return mixed Import status data array or enigma_error
*/
function import_key($content, $isfile=false)
{
$this->load_pgp_driver();
$result = $this->pgp_driver->import($content, $isfile);
if ($result instanceof enigma_error) {
rcube::raise_error(array(
'code' => 600, 'type' => 'php',
'file' => __FILE__, 'line' => __LINE__,
'message' => "Enigma plugin: " . $result->getMessage()
), true, false);
}
else {
$result['imported'] = $result['public_imported'] + $result['private_imported'];
$result['unchanged'] = $result['public_unchanged'] + $result['private_unchanged'];
}
return $result;
}
/**
* Handler for keys/certs import request action
*/
function import_file()
{
$uid = rcube_utils::get_input_value('_uid', rcube_utils::INPUT_POST);
$mbox = rcube_utils::get_input_value('_mbox', rcube_utils::INPUT_POST);
$mime_id = rcube_utils::get_input_value('_part', rcube_utils::INPUT_POST);
$storage = $this->rc->get_storage();
if ($uid && $mime_id) {
$storage->set_folder($mbox);
$part = $storage->get_message_part($uid, $mime_id);
}
if ($part && is_array($result = $this->import_key($part))) {
$this->rc->output->show_message('enigma.keysimportsuccess', 'confirmation',
array('new' => $result['imported'], 'old' => $result['unchanged']));
}
else
$this->rc->output->show_message('enigma.keysimportfailed', 'error');
$this->rc->output->send();
}
function password_handler()
{
$keyid = rcube_utils::get_input_value('_keyid', rcube_utils::INPUT_POST);
$passwd = rcube_utils::get_input_value('_passwd', rcube_utils::INPUT_POST, true);
if ($keyid && $passwd !== null && strlen($passwd)) {
$this->save_password($keyid, $passwd);
}
}
function save_password($keyid, $password)
{
// we store passwords in session for specified time
if ($config = $_SESSION['enigma_pass']) {
$config = $this->rc->decrypt($config);
$config = @unserialize($config);
}
$config[$keyid] = array($password, time());
$_SESSION['enigma_pass'] = $this->rc->encrypt(serialize($config));
}
function get_passwords()
{
if ($config = $_SESSION['enigma_pass']) {
$config = $this->rc->decrypt($config);
$config = @unserialize($config);
}
$threshold = time() - self::PASSWORD_TIME;
$keys = array();
// delete expired passwords
foreach ((array) $config as $key => $value) {
if ($value[1] < $threshold) {
unset($config[$key]);
$modified = true;
}
else {
$keys[$key] = $value[0];
}
}
if ($modified) {
$_SESSION['enigma_pass'] = $this->rc->encrypt(serialize($config));
}
return $keys;
}
/**
* Get message part body.
*
* @param rcube_message Message object
* @param string Message part ID
* @param bool Return raw body with headers
*/
private function get_part_body($msg, $part_id, $full = false)
{
// @TODO: Handle big bodies using file handles
if ($full) {
$storage = $this->rc->get_storage();
$body = $storage->get_raw_headers($msg->uid, $part_id);
$body .= $storage->get_raw_body($msg->uid, null, $part_id);
}
else {
$body = $msg->get_part_body($part_id, false);
}
return $body;
}
/**
* Parse decrypted message body into structure
*
* @param string Message body
*
* @return array Message structure
*/
private function parse_body(&$body)
{
// Mail_mimeDecode need \r\n end-line, but gpg may return \n
$body = preg_replace('/\r?\n/', "\r\n", $body);
// parse the body into structure
$struct = rcube_mime::parse_message($body);
return $struct;
}
/**
* Replace message encrypted structure with decrypted message structure
*
* @param array
* @param rcube_message_part
*/
private function modify_structure(&$p, $struct)
{
// modify mime_parts property of the message object
$old_id = $p['structure']->mime_id;
foreach (array_keys($p['object']->mime_parts) as $idx) {
if (!$old_id || $idx == $old_id || strpos($idx, $old_id . '.') === 0) {
unset($p['object']->mime_parts[$idx]);
}
}
// modify the new structure to be correctly handled by Roundcube
$this->modify_structure_part($struct, $p['object'], $old_id);
// replace old structure with the new one
$p['structure'] = $struct;
$p['mimetype'] = $struct->mimetype;
}
/**
* Modify decrypted message part
*
* @param rcube_message_part
* @param rcube_message
*/
private function modify_structure_part($part, $msg, $old_id)
{
// never cache the body
$part->body_modified = true;
$part->encoding = 'stream';
// Cache the fact it was decrypted
$part->encrypted = true;
// modify part identifier
if ($old_id) {
$part->mime_id = !$part->mime_id ? $old_id : ($old_id . '.' . $part->mime_id);
}
$msg->mime_parts[$part->mime_id] = $part;
// modify sub-parts
foreach ((array) $part->parts as $p) {
$this->modify_structure_part($p, $msg, $old_id);
}
}
/**
* Checks if specified message part is a PGP-key or S/MIME cert data
*
* @param rcube_message_part Part object
*
* @return boolean True if part is a key/cert
*/
public function is_keys_part($part)
{
// @TODO: S/MIME
return (
// Content-Type: application/pgp-keys
$part->mimetype == 'application/pgp-keys'
);
}
}
| kfmaster/cicdlab | modules/docker-roundcubemail/plugins/enigma/lib/enigma_engine.php | PHP | apache-2.0 | 26,398 |
# Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional, Sequence
from pyquil import get_qc
from pyquil.api import QuantumComputer
import cirq
from cirq_rigetti import circuit_transformers as transformers
from cirq_rigetti import circuit_sweep_executors as executors
_default_executor = executors.with_quilc_compilation_and_cirq_parameter_resolution
class RigettiQCSSampler(cirq.Sampler):
"""This class supports running circuits on QCS quantum hardware as well as pyQuil's
quantum virtual machine (QVM). It implements the `cirq.Sampler` interface and
thereby supports sampling parameterized circuits across parameter sweeps.
"""
def __init__(
self,
quantum_computer: QuantumComputer,
executor: executors.CircuitSweepExecutor = _default_executor,
transformer: transformers.CircuitTransformer = transformers.default,
):
"""Initializes a `RigettiQCSSampler`.
Args:
quantum_computer: A `pyquil.api.QuantumComputer` against which to run the
`cirq.Circuit`s.
executor: A callable that first uses the below `transformer` on `cirq.Circuit` s and
then executes the transformed circuit on the `quantum_computer`. You may pass your
own callable or any static method on `CircuitSweepExecutors`.
transformer: A callable that transforms the `cirq.Circuit` into a `pyquil.Program`.
You may pass your own callable or any static method on `CircuitTransformers`.
"""
self._quantum_computer = quantum_computer
self.executor = executor
self.transformer = transformer
def run_sweep(
self,
program: cirq.AbstractCircuit,
params: cirq.Sweepable,
repetitions: int = 1,
) -> Sequence[cirq.Result]:
"""This will evaluate results on the circuit for every set of parameters in `params`.
Args:
program: Circuit to evaluate for each set of parameters in `params`.
params: `cirq.Sweepable` of parameters which this function passes to
`cirq.protocols.resolve_parameters` for evaluating the circuit.
repetitions: Number of times to run each iteration through the `params`. For a given
set of parameters, the `cirq.Result` will include a measurement for each repetition.
Returns:
A list of `cirq.Result` s.
"""
resolvers = [r for r in cirq.to_resolvers(params)]
return self.executor(
quantum_computer=self._quantum_computer,
circuit=program.unfreeze(copy=False),
resolvers=resolvers,
repetitions=repetitions,
transformer=self.transformer,
)
def get_rigetti_qcs_sampler(
quantum_processor_id: str,
*,
as_qvm: Optional[bool] = None,
noisy: Optional[bool] = None,
executor: executors.CircuitSweepExecutor = _default_executor,
transformer: transformers.CircuitTransformer = transformers.default,
) -> RigettiQCSSampler:
"""Calls `pyquil.get_qc` to initialize a `pyquil.api.QuantumComputer` and uses
this to initialize `RigettiQCSSampler`.
Args:
quantum_processor_id: The name of the desired quantum computer. This should
correspond to a name returned by `pyquil.api.list_quantum_computers`. Names
ending in "-qvm" will return a QVM. Names ending in "-pyqvm" will return a
`pyquil.PyQVM`. Otherwise, we will return a Rigetti QCS QPU if one exists
with the requested name.
as_qvm: An optional flag to force construction of a QVM (instead of a QPU). If
specified and set to `True`, a QVM-backed quantum computer will be returned regardless
of the name's suffix
noisy: An optional flag to force inclusion of a noise model. If
specified and set to `True`, a quantum computer with a noise model will be returned.
The generic QVM noise model is simple T1 and T2 noise plus readout error. At the time
of this writing, this has no effect on a QVM initialized based on a Rigetti QCS
`qcs_api_client.models.InstructionSetArchitecture`.
executor: A callable that first uses the below transformer on cirq.Circuit s and
then executes the transformed circuit on the quantum_computer. You may pass your
own callable or any static method on CircuitSweepExecutors.
transformer: A callable that transforms the cirq.Circuit into a pyquil.Program.
You may pass your own callable or any static method on CircuitTransformers.
Returns:
A `RigettiQCSSampler` with the specified quantum processor, executor, and transformer.
"""
qc = get_qc(
quantum_processor_id,
as_qvm=as_qvm,
noisy=noisy,
)
return RigettiQCSSampler(
quantum_computer=qc,
executor=executor,
transformer=transformer,
)
| quantumlib/Cirq | cirq-rigetti/cirq_rigetti/sampler.py | Python | apache-2.0 | 5,548 |
package com.afeng.xf.utils.AFengUtils;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.graphics.Color;
import android.os.Build;
import android.support.annotation.ColorInt;
import android.support.design.widget.CoordinatorLayout;
import android.support.v4.widget.DrawerLayout;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.LinearLayout;
import com.afeng.xf.R;
/**
* Created by Jaeger on 16/2/14.
* <p>
* Email: chjie.jaeger@gmail.com
* GitHub: https://github.com/laobie
*/
public class StatusBarUtil {
public static final int DEFAULT_STATUS_BAR_ALPHA = 112;
private static final int FAKE_STATUS_BAR_VIEW_ID = R.id.statusbarutil_fake_status_bar_view;
private static final int FAKE_TRANSLUCENT_VIEW_ID = R.id.statusbarutil_translucent_view;
private static final int TAG_KEY_HAVE_SET_OFFSET = -123;
/**
* 隐藏菜单。沉浸式阅读
*/
public static void hideSystemUI(Activity activity) {
// Set the IMMERSIVE flag.
// Set the content to appear under the system bars so that the content
// doesn't resize when the system bars hide and show.
activity.getWindow().getDecorView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
// | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION // hide nav bar
| View.SYSTEM_UI_FLAG_FULLSCREEN // hide status bar
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
);
}
public static void showSystemUI(Activity activity) {
activity.getWindow().getDecorView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
// | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
);
}
/**
* 设置状态栏颜色
*
* @param activity 需要设置的 activity
* @param color 状态栏颜色值
*/
public static void setColor(Activity activity, @ColorInt int color) {
setColor(activity, color, DEFAULT_STATUS_BAR_ALPHA);
}
/**
* 设置状态栏颜色
*
* @param activity 需要设置的activity
* @param color 状态栏颜色值
* @param statusBarAlpha 状态栏透明度
*/
public static void setColor(Activity activity, @ColorInt int color, int statusBarAlpha) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
activity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
activity.getWindow().setStatusBarColor(calculateStatusColor(color, statusBarAlpha));
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
ViewGroup decorView = (ViewGroup) activity.getWindow().getDecorView();
View fakeStatusBarView = decorView.findViewById(FAKE_STATUS_BAR_VIEW_ID);
if (fakeStatusBarView != null) {
if (fakeStatusBarView.getVisibility() == View.GONE) {
fakeStatusBarView.setVisibility(View.VISIBLE);
}
fakeStatusBarView.setBackgroundColor(calculateStatusColor(color, statusBarAlpha));
} else {
decorView.addView(createStatusBarView(activity, color, statusBarAlpha));
}
setRootView(activity);
}
}
/**
* 为滑动返回界面设置状态栏颜色
*
* @param activity 需要设置的activity
* @param color 状态栏颜色值
*/
public static void setColorForSwipeBack(Activity activity, int color) {
setColorForSwipeBack(activity, color, DEFAULT_STATUS_BAR_ALPHA);
}
/**
* 为滑动返回界面设置状态栏颜色
*
* @param activity 需要设置的activity
* @param color 状态栏颜色值
* @param statusBarAlpha 状态栏透明度
*/
public static void setColorForSwipeBack(Activity activity, @ColorInt int color, int statusBarAlpha) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
ViewGroup contentView = ((ViewGroup) activity.findViewById(android.R.id.content));
View rootView = contentView.getChildAt(0);
int statusBarHeight = getStatusBarHeight(activity);
if (rootView != null && rootView instanceof CoordinatorLayout) {
final CoordinatorLayout coordinatorLayout = (CoordinatorLayout) rootView;
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
coordinatorLayout.setFitsSystemWindows(false);
contentView.setBackgroundColor(calculateStatusColor(color, statusBarAlpha));
boolean isNeedRequestLayout = contentView.getPaddingTop() < statusBarHeight;
if (isNeedRequestLayout) {
contentView.setPadding(0, statusBarHeight, 0, 0);
coordinatorLayout.post(new Runnable() {
@Override
public void run() {
coordinatorLayout.requestLayout();
}
});
}
} else {
coordinatorLayout.setStatusBarBackgroundColor(calculateStatusColor(color, statusBarAlpha));
}
} else {
contentView.setPadding(0, statusBarHeight, 0, 0);
contentView.setBackgroundColor(calculateStatusColor(color, statusBarAlpha));
}
setTransparentForWindow(activity);
}
}
/**
* 设置状态栏纯色 不加半透明效果
*
* @param activity 需要设置的 activity
* @param color 状态栏颜色值
*/
public static void setColorNoTranslucent(Activity activity, @ColorInt int color) {
setColor(activity, color, 0);
}
/**
* 设置状态栏颜色(5.0以下无半透明效果,不建议使用)
*
* @param activity 需要设置的 activity
* @param color 状态栏颜色值
*/
@Deprecated
public static void setColorDiff(Activity activity, @ColorInt int color) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return;
}
transparentStatusBar(activity);
ViewGroup contentView = (ViewGroup) activity.findViewById(android.R.id.content);
// 移除半透明矩形,以免叠加
View fakeStatusBarView = contentView.findViewById(FAKE_STATUS_BAR_VIEW_ID);
if (fakeStatusBarView != null) {
if (fakeStatusBarView.getVisibility() == View.GONE) {
fakeStatusBarView.setVisibility(View.VISIBLE);
}
fakeStatusBarView.setBackgroundColor(color);
} else {
contentView.addView(createStatusBarView(activity, color));
}
setRootView(activity);
}
/**
* 使状态栏半透明
* <p>
* 适用于图片作为背景的界面,此时需要图片填充到状态栏
*
* @param activity 需要设置的activity
*/
public static void setTranslucent(Activity activity) {
setTranslucent(activity, DEFAULT_STATUS_BAR_ALPHA);
}
/**
* 使状态栏半透明
* <p>
* 适用于图片作为背景的界面,此时需要图片填充到状态栏
*
* @param activity 需要设置的activity
* @param statusBarAlpha 状态栏透明度
*/
public static void setTranslucent(Activity activity, int statusBarAlpha) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return;
}
setTransparent(activity);
addTranslucentView(activity, statusBarAlpha);
}
/**
* 针对根布局是 CoordinatorLayout, 使状态栏半透明
* <p>
* 适用于图片作为背景的界面,此时需要图片填充到状态栏
*
* @param activity 需要设置的activity
* @param statusBarAlpha 状态栏透明度
*/
public static void setTranslucentForCoordinatorLayout(Activity activity, int statusBarAlpha) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return;
}
transparentStatusBar(activity);
addTranslucentView(activity, statusBarAlpha);
}
/**
* 设置状态栏全透明
*
* @param activity 需要设置的activity
*/
public static void setTransparent(Activity activity) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return;
}
transparentStatusBar(activity);
setRootView(activity);
}
/**
* 使状态栏透明(5.0以上半透明效果,不建议使用)
* <p>
* 适用于图片作为背景的界面,此时需要图片填充到状态栏
*
* @param activity 需要设置的activity
*/
@Deprecated
public static void setTranslucentDiff(Activity activity) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
// 设置状态栏透明
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
setRootView(activity);
}
}
/**
* 为DrawerLayout 布局设置状态栏变色
*
* @param activity 需要设置的activity
* @param drawerLayout DrawerLayout
* @param color 状态栏颜色值
*/
public static void setColorForDrawerLayout(Activity activity, DrawerLayout drawerLayout, @ColorInt int color) {
setColorForDrawerLayout(activity, drawerLayout, color, DEFAULT_STATUS_BAR_ALPHA);
}
/**
* 为DrawerLayout 布局设置状态栏颜色,纯色
*
* @param activity 需要设置的activity
* @param drawerLayout DrawerLayout
* @param color 状态栏颜色值
*/
public static void setColorNoTranslucentForDrawerLayout(Activity activity, DrawerLayout drawerLayout, @ColorInt int color) {
setColorForDrawerLayout(activity, drawerLayout, color, 0);
}
/**
* 为DrawerLayout 布局设置状态栏变色
*
* @param activity 需要设置的activity
* @param drawerLayout DrawerLayout
* @param color 状态栏颜色值
* @param statusBarAlpha 状态栏透明度
*/
public static void setColorForDrawerLayout(Activity activity, DrawerLayout drawerLayout, @ColorInt int color,
int statusBarAlpha) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
activity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
activity.getWindow().setStatusBarColor(Color.TRANSPARENT);
} else {
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
}
// 生成一个状态栏大小的矩形
// 添加 statusBarView 到布局中
ViewGroup contentLayout = (ViewGroup) drawerLayout.getChildAt(0);
View fakeStatusBarView = contentLayout.findViewById(FAKE_STATUS_BAR_VIEW_ID);
if (fakeStatusBarView != null) {
if (fakeStatusBarView.getVisibility() == View.GONE) {
fakeStatusBarView.setVisibility(View.VISIBLE);
}
fakeStatusBarView.setBackgroundColor(color);
} else {
contentLayout.addView(createStatusBarView(activity, color), 0);
}
// 内容布局不是 LinearLayout 时,设置padding top
if (!(contentLayout instanceof LinearLayout) && contentLayout.getChildAt(1) != null) {
contentLayout.getChildAt(1)
.setPadding(contentLayout.getPaddingLeft(), getStatusBarHeight(activity) + contentLayout.getPaddingTop(),
contentLayout.getPaddingRight(), contentLayout.getPaddingBottom());
}
// 设置属性
setDrawerLayoutProperty(drawerLayout, contentLayout);
addTranslucentView(activity, statusBarAlpha);
}
/**
* 设置 DrawerLayout 属性
*
* @param drawerLayout DrawerLayout
* @param drawerLayoutContentLayout DrawerLayout 的内容布局
*/
private static void setDrawerLayoutProperty(DrawerLayout drawerLayout, ViewGroup drawerLayoutContentLayout) {
ViewGroup drawer = (ViewGroup) drawerLayout.getChildAt(1);
drawerLayout.setFitsSystemWindows(false);
drawerLayoutContentLayout.setFitsSystemWindows(false);
drawerLayoutContentLayout.setClipToPadding(true);
drawer.setFitsSystemWindows(false);
}
/**
* 为DrawerLayout 布局设置状态栏变色(5.0以下无半透明效果,不建议使用)
*
* @param activity 需要设置的activity
* @param drawerLayout DrawerLayout
* @param color 状态栏颜色值
*/
@Deprecated
public static void setColorForDrawerLayoutDiff(Activity activity, DrawerLayout drawerLayout, @ColorInt int color) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
// 生成一个状态栏大小的矩形
ViewGroup contentLayout = (ViewGroup) drawerLayout.getChildAt(0);
View fakeStatusBarView = contentLayout.findViewById(FAKE_STATUS_BAR_VIEW_ID);
if (fakeStatusBarView != null) {
if (fakeStatusBarView.getVisibility() == View.GONE) {
fakeStatusBarView.setVisibility(View.VISIBLE);
}
fakeStatusBarView.setBackgroundColor(calculateStatusColor(color, DEFAULT_STATUS_BAR_ALPHA));
} else {
// 添加 statusBarView 到布局中
contentLayout.addView(createStatusBarView(activity, color), 0);
}
// 内容布局不是 LinearLayout 时,设置padding top
if (!(contentLayout instanceof LinearLayout) && contentLayout.getChildAt(1) != null) {
contentLayout.getChildAt(1).setPadding(0, getStatusBarHeight(activity), 0, 0);
}
// 设置属性
setDrawerLayoutProperty(drawerLayout, contentLayout);
}
}
/**
* 为 DrawerLayout 布局设置状态栏透明
*
* @param activity 需要设置的activity
* @param drawerLayout DrawerLayout
*/
public static void setTranslucentForDrawerLayout(Activity activity, DrawerLayout drawerLayout) {
setTranslucentForDrawerLayout(activity, drawerLayout, DEFAULT_STATUS_BAR_ALPHA);
}
/**
* 为 DrawerLayout 布局设置状态栏透明
*
* @param activity 需要设置的activity
* @param drawerLayout DrawerLayout
*/
public static void setTranslucentForDrawerLayout(Activity activity, DrawerLayout drawerLayout, int statusBarAlpha) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return;
}
setTransparentForDrawerLayout(activity, drawerLayout);
addTranslucentView(activity, statusBarAlpha);
}
/**
* 为 DrawerLayout 布局设置状态栏透明
*
* @param activity 需要设置的activity
* @param drawerLayout DrawerLayout
*/
public static void setTransparentForDrawerLayout(Activity activity, DrawerLayout drawerLayout) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
activity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
activity.getWindow().setStatusBarColor(Color.TRANSPARENT);
} else {
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
}
ViewGroup contentLayout = (ViewGroup) drawerLayout.getChildAt(0);
// 内容布局不是 LinearLayout 时,设置padding top
if (!(contentLayout instanceof LinearLayout) && contentLayout.getChildAt(1) != null) {
contentLayout.getChildAt(1).setPadding(0, getStatusBarHeight(activity), 0, 0);
}
// 设置属性
setDrawerLayoutProperty(drawerLayout, contentLayout);
}
/**
* 为 DrawerLayout 布局设置状态栏透明(5.0以上半透明效果,不建议使用)
*
* @param activity 需要设置的activity
* @param drawerLayout DrawerLayout
*/
@Deprecated
public static void setTranslucentForDrawerLayoutDiff(Activity activity, DrawerLayout drawerLayout) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
// 设置状态栏透明
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
// 设置内容布局属性
ViewGroup contentLayout = (ViewGroup) drawerLayout.getChildAt(0);
contentLayout.setFitsSystemWindows(true);
contentLayout.setClipToPadding(true);
// 设置抽屉布局属性
ViewGroup vg = (ViewGroup) drawerLayout.getChildAt(1);
vg.setFitsSystemWindows(false);
// 设置 DrawerLayout 属性
drawerLayout.setFitsSystemWindows(false);
}
}
/**
* 为头部是 ImageView 的界面设置状态栏全透明
*
* @param activity 需要设置的activity
* @param needOffsetView 需要向下偏移的 View
*/
public static void setTransparentForImageView(Activity activity, View needOffsetView) {
setTranslucentForImageView(activity, 0, needOffsetView);
}
/**
* 为头部是 ImageView 的界面设置状态栏透明(使用默认透明度)
*
* @param activity 需要设置的activity
* @param needOffsetView 需要向下偏移的 View
*/
public static void setTranslucentForImageView(Activity activity, View needOffsetView) {
setTranslucentForImageView(activity, DEFAULT_STATUS_BAR_ALPHA, needOffsetView);
}
/**
* 为头部是 ImageView 的界面设置状态栏透明
*
* @param activity 需要设置的activity
* @param statusBarAlpha 状态栏透明度
* @param needOffsetView 需要向下偏移的 View
*/
public static void setTranslucentForImageView(Activity activity, int statusBarAlpha, View needOffsetView) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return;
}
setTransparentForWindow(activity);
addTranslucentView(activity, statusBarAlpha);
if (needOffsetView != null) {
Object haveSetOffset = needOffsetView.getTag(TAG_KEY_HAVE_SET_OFFSET);
if (haveSetOffset != null && (Boolean) haveSetOffset) {
return;
}
ViewGroup.MarginLayoutParams layoutParams = (ViewGroup.MarginLayoutParams) needOffsetView.getLayoutParams();
layoutParams.setMargins(layoutParams.leftMargin, layoutParams.topMargin + getStatusBarHeight(activity),
layoutParams.rightMargin, layoutParams.bottomMargin);
needOffsetView.setTag(TAG_KEY_HAVE_SET_OFFSET, true);
}
}
/**
* 为 fragment 头部是 ImageView 的设置状态栏透明
*
* @param activity fragment 对应的 activity
* @param needOffsetView 需要向下偏移的 View
*/
public static void setTranslucentForImageViewInFragment(Activity activity, View needOffsetView) {
setTranslucentForImageViewInFragment(activity, DEFAULT_STATUS_BAR_ALPHA, needOffsetView);
}
/**
* 为 fragment 头部是 ImageView 的设置状态栏透明
*
* @param activity fragment 对应的 activity
* @param needOffsetView 需要向下偏移的 View
*/
public static void setTransparentForImageViewInFragment(Activity activity, View needOffsetView) {
setTranslucentForImageViewInFragment(activity, 0, needOffsetView);
}
/**
* 为 fragment 头部是 ImageView 的设置状态栏透明
*
* @param activity fragment 对应的 activity
* @param statusBarAlpha 状态栏透明度
* @param needOffsetView 需要向下偏移的 View
*/
public static void setTranslucentForImageViewInFragment(Activity activity, int statusBarAlpha, View needOffsetView) {
setTranslucentForImageView(activity, statusBarAlpha, needOffsetView);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
clearPreviousSetting(activity);
}
}
/**
* 隐藏伪状态栏 View
*
* @param activity 调用的 Activity
*/
public static void hideFakeStatusBarView(Activity activity) {
ViewGroup decorView = (ViewGroup) activity.getWindow().getDecorView();
View fakeStatusBarView = decorView.findViewById(FAKE_STATUS_BAR_VIEW_ID);
if (fakeStatusBarView != null) {
fakeStatusBarView.setVisibility(View.GONE);
}
View fakeTranslucentView = decorView.findViewById(FAKE_TRANSLUCENT_VIEW_ID);
if (fakeTranslucentView != null) {
fakeTranslucentView.setVisibility(View.GONE);
}
}
///////////////////////////////////////////////////////////////////////////////////
@TargetApi(Build.VERSION_CODES.KITKAT)
private static void clearPreviousSetting(Activity activity) {
ViewGroup decorView = (ViewGroup) activity.getWindow().getDecorView();
View fakeStatusBarView = decorView.findViewById(FAKE_STATUS_BAR_VIEW_ID);
if (fakeStatusBarView != null) {
decorView.removeView(fakeStatusBarView);
ViewGroup rootView = (ViewGroup) ((ViewGroup) activity.findViewById(android.R.id.content)).getChildAt(0);
rootView.setPadding(0, 0, 0, 0);
}
}
/**
* 添加半透明矩形条
*
* @param activity 需要设置的 activity
* @param statusBarAlpha 透明值
*/
private static void addTranslucentView(Activity activity, int statusBarAlpha) {
ViewGroup contentView = (ViewGroup) activity.findViewById(android.R.id.content);
View fakeTranslucentView = contentView.findViewById(FAKE_TRANSLUCENT_VIEW_ID);
if (fakeTranslucentView != null) {
if (fakeTranslucentView.getVisibility() == View.GONE) {
fakeTranslucentView.setVisibility(View.VISIBLE);
}
fakeTranslucentView.setBackgroundColor(Color.argb(statusBarAlpha, 0, 0, 0));
} else {
contentView.addView(createTranslucentStatusBarView(activity, statusBarAlpha));
}
}
/**
* 生成一个和状态栏大小相同的彩色矩形条
*
* @param activity 需要设置的 activity
* @param color 状态栏颜色值
* @return 状态栏矩形条
*/
private static View createStatusBarView(Activity activity, @ColorInt int color) {
return createStatusBarView(activity, color, 0);
}
/**
* 生成一个和状态栏大小相同的半透明矩形条
*
* @param activity 需要设置的activity
* @param color 状态栏颜色值
* @param alpha 透明值
* @return 状态栏矩形条
*/
private static View createStatusBarView(Activity activity, @ColorInt int color, int alpha) {
// 绘制一个和状态栏一样高的矩形
View statusBarView = new View(activity);
LinearLayout.LayoutParams params =
new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, getStatusBarHeight(activity));
statusBarView.setLayoutParams(params);
statusBarView.setBackgroundColor(calculateStatusColor(color, alpha));
statusBarView.setId(FAKE_STATUS_BAR_VIEW_ID);
return statusBarView;
}
/**
* 设置根布局参数
*/
private static void setRootView(Activity activity) {
ViewGroup parent = (ViewGroup) activity.findViewById(android.R.id.content);
for (int i = 0, count = parent.getChildCount(); i < count; i++) {
View childView = parent.getChildAt(i);
if (childView instanceof ViewGroup) {
childView.setFitsSystemWindows(true);
((ViewGroup) childView).setClipToPadding(true);
}
}
}
/**
* 设置透明
*/
private static void setTransparentForWindow(Activity activity) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
activity.getWindow().setStatusBarColor(Color.TRANSPARENT);
activity.getWindow()
.getDecorView()
.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN);
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
activity.getWindow()
.setFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS, WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
}
}
/**
* 使状态栏透明
*/
@TargetApi(Build.VERSION_CODES.KITKAT)
private static void transparentStatusBar(Activity activity) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
activity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_NAVIGATION);
activity.getWindow().setStatusBarColor(Color.TRANSPARENT);
} else {
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
}
}
/**
* 创建半透明矩形 View
*
* @param alpha 透明值
* @return 半透明 View
*/
private static View createTranslucentStatusBarView(Activity activity, int alpha) {
// 绘制一个和状态栏一样高的矩形
View statusBarView = new View(activity);
LinearLayout.LayoutParams params =
new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, getStatusBarHeight(activity));
statusBarView.setLayoutParams(params);
statusBarView.setBackgroundColor(Color.argb(alpha, 0, 0, 0));
statusBarView.setId(FAKE_TRANSLUCENT_VIEW_ID);
return statusBarView;
}
/**
* 获取状态栏高度
*
* @param context context
* @return 状态栏高度
*/
private static int getStatusBarHeight(Context context) {
// 获得状态栏高度
int resourceId = context.getResources().getIdentifier("status_bar_height", "dimen", "android");
return context.getResources().getDimensionPixelSize(resourceId);
}
/**
* 计算状态栏颜色
*
* @param color color值
* @param alpha alpha值
* @return 最终的状态栏颜色
*/
private static int calculateStatusColor(@ColorInt int color, int alpha) {
if (alpha == 0) {
return color;
}
float a = 1 - alpha / 255f;
int red = color >> 16 & 0xff;
int green = color >> 8 & 0xff;
int blue = color & 0xff;
red = (int) (red * a + 0.5);
green = (int) (green * a + 0.5);
blue = (int) (blue * a + 0.5);
return 0xff << 24 | red << 16 | green << 8 | blue;
}
}
| GitHubAFeng/AFengAndroid | app/src/main/java/com/afeng/xf/utils/AFengUtils/StatusBarUtil.java | Java | apache-2.0 | 28,369 |
package com.fiberlink.ninjaparser.output;
public class StdOut implements Output{
public void print() {
}
}
| anandaverma/NinjaParser | src/com/fiberlink/ninjaparser/output/StdOut.java | Java | apache-2.0 | 123 |
/*
* Copyright (C) 2012-2015 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra2.cql.jdbc;
import java.sql.Driver;
/**
* The Class CassandraDriver.
*/
public class CassandraDriver extends com.github.adejanovski.cassandra.jdbc.CassandraDriver implements Driver
{}
| adejanovski/cassandra-jdbc-wrapper | src/main/java/org/apache/cassandra2/cql/jdbc/CassandraDriver.java | Java | apache-2.0 | 844 |
<?php
namespace DCarbone\PHPFHIRGenerated\DSTU1\PHPFHIRTests\FHIRElement\FHIRBackboneElement\FHIRFamilyHistory;
/*!
* This class was generated with the PHPFHIR library (https://github.com/dcarbone/php-fhir) using
* class definitions from HL7 FHIR (https://www.hl7.org/fhir/)
*
* Class creation date: December 26th, 2019 15:43+0000
*
* PHPFHIR Copyright:
*
* Copyright 2016-2019 Daniel Carbone (daniel.p.carbone@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* FHIR Copyright Notice:
*
* Copyright (c) 2011-2013, HL7, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of HL7 nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*
* Generated on Tue, Sep 30, 2014 18:08+1000 for FHIR v0.0.82
*/
use PHPUnit\Framework\TestCase;
use DCarbone\PHPFHIRGenerated\DSTU1\FHIRElement\FHIRBackboneElement\FHIRFamilyHistory\FHIRFamilyHistoryCondition;
/**
* Class FHIRFamilyHistoryConditionTest
* @package \DCarbone\PHPFHIRGenerated\DSTU1\PHPFHIRTests\FHIRElement\FHIRBackboneElement\FHIRFamilyHistory
*/
class FHIRFamilyHistoryConditionTest extends TestCase
{
public function testCanConstructTypeNoArgs()
{
$type = new FHIRFamilyHistoryCondition();
$this->assertInstanceOf('\DCarbone\PHPFHIRGenerated\DSTU1\FHIRElement\FHIRBackboneElement\FHIRFamilyHistory\FHIRFamilyHistoryCondition', $type);
}
}
| dcarbone/php-fhir-generated | src/DCarbone/PHPFHIRGenerated/DSTU1/PHPFHIRTests/FHIRElement/FHIRBackboneElement/FHIRFamilyHistory/FHIRFamilyHistoryConditionTest.php | PHP | apache-2.0 | 3,336 |
"""Reverse complement reads with Seqtk."""
import os
from plumbum import TEE
from resolwe.process import (
Cmd,
DataField,
FileField,
FileHtmlField,
ListField,
Process,
StringField,
)
class ReverseComplementSingle(Process):
"""Reverse complement single-end FASTQ reads file using Seqtk."""
slug = "seqtk-rev-complement-single"
process_type = "data:reads:fastq:single:seqtk"
name = "Reverse complement FASTQ (single-end)"
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/s4q6j6e8/resolwebio/common:3.0.0"},
},
"resources": {
"cores": 1,
"memory": 16384,
},
}
entity = {
"type": "sample",
}
data_name = '{{ reads|sample_name|default("?") }}'
version = "1.2.0"
class Input:
"""Input fields to process ReverseComplementSingle."""
reads = DataField("reads:fastq:single", label="Reads")
class Output:
"""Output fields."""
fastq = ListField(FileField(), label="Reverse complemented FASTQ file")
fastqc_url = ListField(FileHtmlField(), label="Quality control with FastQC")
fastqc_archive = ListField(FileField(), label="Download FastQC archive")
def run(self, inputs, outputs):
"""Run the analysis."""
basename = os.path.basename(inputs.reads.output.fastq[0].path)
assert basename.endswith(".fastq.gz")
name = basename[:-9]
complemented_name = f"{name}_complemented.fastq"
# Concatenate multilane reads
(
Cmd["cat"][[reads.path for reads in inputs.reads.output.fastq]]
> "input_reads.fastq.gz"
)()
# Reverse complement reads
(Cmd["seqtk"]["seq", "-r", "input_reads.fastq.gz"] > complemented_name)()
_, _, stderr = (
Cmd["fastqc"][complemented_name, "--extract", "--outdir=./"] & TEE
)
if "Failed to process" in stderr or "Skipping" in stderr:
self.error("Failed while processing with FastQC.")
(Cmd["gzip"][complemented_name])()
outputs.fastq = [f"{complemented_name}.gz"]
outputs.fastqc_url = [f"{name}_complemented_fastqc.html"]
outputs.fastqc_archive = [f"{name}_complemented_fastqc.zip"]
class ReverseComplementPaired(Process):
"""Reverse complement paired-end FASTQ reads file using Seqtk."""
slug = "seqtk-rev-complement-paired"
process_type = "data:reads:fastq:paired:seqtk"
name = "Reverse complement FASTQ (paired-end)"
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/s4q6j6e8/resolwebio/common:3.0.0"},
},
"resources": {
"cores": 1,
"memory": 16384,
},
}
entity = {
"type": "sample",
}
data_name = '{{ reads|sample_name|default("?") }}'
version = "1.1.0"
class Input:
"""Input fields to process ReverseComplementPaired."""
reads = DataField("reads:fastq:paired", label="Reads")
select_mate = StringField(
label="Select mate",
description="Select the which mate should be reverse complemented.",
choices=[("Mate 1", "Mate 1"), ("Mate 2", "Mate 2"), ("Both", "Both")],
default="Mate 1",
)
class Output:
"""Output fields."""
fastq = ListField(FileField(), label="Reverse complemented FASTQ file")
fastq2 = ListField(FileField(), label="Remaining mate")
fastqc_url = ListField(
FileHtmlField(), label="Quality control with FastQC (Mate 1)"
)
fastqc_archive = ListField(
FileField(), label="Download FastQC archive (Mate 1)"
)
fastqc_url2 = ListField(
FileHtmlField(), label="Quality control with FastQC (Mate 2)"
)
fastqc_archive2 = ListField(
FileField(), label="Download FastQC archive (Mate 2)"
)
def run(self, inputs, outputs):
"""Run the analysis."""
basename_mate1 = os.path.basename(inputs.reads.output.fastq[0].path)
basename_mate2 = os.path.basename(inputs.reads.output.fastq2[0].path)
assert basename_mate1.endswith(".fastq.gz")
assert basename_mate2.endswith(".fastq.gz")
name_mate1 = basename_mate1[:-9]
name_mate2 = basename_mate2[:-9]
original_mate1 = f"{name_mate1}_original.fastq.gz"
original_mate2 = f"{name_mate2}_original.fastq.gz"
(
Cmd["cat"][[reads.path for reads in inputs.reads.output.fastq]]
> original_mate1
)()
(
Cmd["cat"][[reads.path for reads in inputs.reads.output.fastq2]]
> original_mate2
)()
if inputs.select_mate == "Mate 1":
complemented_mate1 = f"{name_mate1}_complemented.fastq"
(Cmd["seqtk"]["seq", "-r", original_mate1] > complemented_mate1)()
_, _, stderr = (
Cmd["fastqc"][complemented_mate1, "--extract", "--outdir=./"] & TEE
)
if "Failed to process" in stderr or "Skipping" in stderr:
self.error("Failed while processing with FastQC.")
_, _, stderr2 = (
Cmd["fastqc"][original_mate2, "--extract", "--outdir=./"] & TEE
)
if "Failed to process" in stderr2 or "Skipping" in stderr2:
self.error("Failed while processing with FastQC.")
(Cmd["gzip"][complemented_mate1])()
outputs.fastq = [f"{complemented_mate1}.gz"]
outputs.fastq2 = [original_mate2]
outputs.fastqc_url = [f"{name_mate1}_complemented_fastqc.html"]
outputs.fastqc_archive = [f"{name_mate1}_complemented_fastqc.zip"]
outputs.fastqc_url2 = [f"{name_mate2}_original_fastqc.html"]
outputs.fastqc_archive2 = [f"{name_mate2}_original_fastqc.zip"]
elif inputs.select_mate == "Mate 2":
complemented_mate2 = f"{name_mate2}_complemented.fastq"
(
Cmd["seqtk"]["seq", "-r", f"{name_mate2}_original.fastq.gz"]
> complemented_mate2
)()
_, _, stderr = (
Cmd["fastqc"][original_mate1, "--extract", "--outdir=./"] & TEE
)
if "Failed to process" in stderr or "Skipping" in stderr:
self.error("Failed while processing with FastQC.")
_, _, stderr2 = (
Cmd["fastqc"][complemented_mate2, "--extract", "--outdir=./"] & TEE
)
if "Failed to process" in stderr2 or "Skipping" in stderr2:
self.error("Failed while processing with FastQC.")
(Cmd["gzip"][complemented_mate2])()
outputs.fastq = [original_mate1]
outputs.fastq2 = [f"{complemented_mate2}.gz"]
outputs.fastqc_url = [f"{name_mate1}_original_fastqc.html"]
outputs.fastqc_archive = [f"{name_mate1}_original_fastqc.zip"]
outputs.fastqc_url2 = [f"{name_mate2}_complemented_fastqc.html"]
outputs.fastqc_archive2 = [f"{name_mate2}_complemented_fastqc.zip"]
else:
complemented_mate1 = f"{name_mate1}_complemented.fastq"
complemented_mate2 = f"{name_mate2}_complemented.fastq"
(
Cmd["seqtk"]["seq", "-r", f"{name_mate1}_original.fastq.gz"]
> complemented_mate1
)()
_, _, stderr = (
Cmd["fastqc"][complemented_mate1, "--extract", "--outdir=./"] & TEE
)
if "Failed to process" in stderr or "Skipping" in stderr:
self.error("Failed while processing with FastQC.")
(Cmd["gzip"][complemented_mate1])()
(
Cmd["seqtk"]["seq", "-r", f"{name_mate2}_original.fastq.gz"]
> complemented_mate2
)()
_, _, stderr2 = (
Cmd["fastqc"][complemented_mate2, "--extract", "--outdir=./"] & TEE
)
if "Failed to process" in stderr2 or "Skipping" in stderr2:
self.error("Failed while processing with FastQC.")
(Cmd["gzip"][complemented_mate2])()
outputs.fastq = [f"{complemented_mate1}.gz"]
outputs.fastq2 = [f"{complemented_mate2}.gz"]
outputs.fastqc_url = [f"{name_mate1}_complemented_fastqc.html"]
outputs.fastqc_archive = [f"{name_mate1}_complemented_fastqc.zip"]
outputs.fastqc_url2 = [f"{name_mate2}_complemented_fastqc.html"]
outputs.fastqc_archive2 = [f"{name_mate2}_complemented_fastqc.zip"]
| genialis/resolwe-bio | resolwe_bio/processes/support_processors/seqtk_reverse_complement.py | Python | apache-2.0 | 8,764 |
"""TcEx Framework Service Common module"""
# standard library
import json
import threading
import time
import traceback
import uuid
from datetime import datetime
from typing import Callable, Optional, Union
from .mqtt_message_broker import MqttMessageBroker
class CommonService:
"""TcEx Framework Service Common module
Shared service logic between the supported service types:
* API Service
* Custom Trigger Service
* Webhook Trigger Service
"""
def __init__(self, tcex: object):
"""Initialize the Class properties.
Args:
tcex: Instance of TcEx.
"""
self.tcex = tcex
# properties
self._ready = False
self._start_time = datetime.now()
self.args: object = tcex.default_args
self.configs = {}
self.heartbeat_max_misses = 3
self.heartbeat_sleep_time = 1
self.heartbeat_watchdog = 0
self.ij = tcex.ij
self.key_value_store = self.tcex.key_value_store
self.log = tcex.log
self.logger = tcex.logger
self.message_broker = MqttMessageBroker(
broker_host=self.args.tc_svc_broker_host,
broker_port=self.args.tc_svc_broker_port,
broker_timeout=self.args.tc_svc_broker_conn_timeout,
broker_token=self.args.tc_svc_broker_token,
broker_cacert=self.args.tc_svc_broker_cacert_file,
logger=tcex.log,
)
self.ready = False
self.redis_client = self.tcex.redis_client
self.token = tcex.token
# config callbacks
self.shutdown_callback = None
def _create_logging_handler(self):
"""Create a logging handler."""
if self.logger.handler_exist(self.thread_name):
return
# create trigger id logging filehandler
self.logger.add_pattern_file_handler(
name=self.thread_name,
filename=f'''{datetime.today().strftime('%Y%m%d')}/{self.session_id}.log''',
level=self.args.tc_log_level,
path=self.args.tc_log_path,
# uuid4 pattern for session_id
pattern=r'^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}.log$',
handler_key=self.session_id,
thread_key='session_id',
)
def add_metric(self, label: str, value: Union[int, str]) -> None:
"""Add a metric.
Metrics are reported in heartbeat message.
Args:
label: The metric label (e.g., hits) to add.
value: The value for the metric.
"""
self._metrics[label] = value
@property
def command_map(self) -> dict:
"""Return the command map for the current Service type."""
return {
'heartbeat': self.process_heartbeat_command,
'loggingchange': self.process_logging_change_command,
'shutdown': self.process_shutdown_command,
}
@staticmethod
def create_session_id() -> str: # pylint: disable=unused-argument
"""Return a uuid4 session id.
Returns:
str: A unique UUID string value.
"""
return str(uuid.uuid4())
def heartbeat(self) -> None:
"""Start heartbeat process."""
self.service_thread(name='heartbeat', target=self.heartbeat_monitor)
def heartbeat_monitor(self) -> None:
"""Publish heartbeat on timer."""
self.log.info('feature=service, event=heartbeat-monitor-started')
while True:
if self.heartbeat_watchdog > (
int(self.args.tc_svc_hb_timeout_seconds) / int(self.heartbeat_sleep_time)
):
self.log.error(
'feature=service, event=missed-heartbeat, action=shutting-service-down'
)
self.process_shutdown_command({'reason': 'Missed heartbeat commands.'})
break
time.sleep(self.heartbeat_sleep_time)
self.heartbeat_watchdog += 1
def increment_metric(self, label: str, value: Optional[int] = 1) -> None:
"""Increment a metric if already exists.
Args:
label: The metric label (e.g., hits) to increment.
value: The increment value. Defaults to 1.
"""
if self._metrics.get(label) is not None:
self._metrics[label] += value
def listen(self) -> None:
"""List for message coming from broker."""
self.message_broker.add_on_connect_callback(self.on_connect_handler)
self.message_broker.add_on_message_callback(
self.on_message_handler, topics=[self.args.tc_svc_server_topic]
)
self.message_broker.register_callbacks()
# start listener thread
self.service_thread(name='broker-listener', target=self.message_broker.connect)
def loop_forever(self, sleep: Optional[int] = 1) -> bool:
"""Block and wait for shutdown.
Args:
sleep: The amount of time to sleep between iterations. Defaults to 1.
Returns:
Bool: Returns True until shutdown received.
"""
while True:
deadline = time.time() + sleep
while time.time() < deadline:
if self.message_broker.shutdown:
return False
time.sleep(1)
return True
@property
def metrics(self) -> dict:
"""Return current metrics."""
# TODO: move to trigger command and handle API Service
if self._metrics.get('Active Playbooks') is not None:
self.update_metric('Active Playbooks', len(self.configs))
return self._metrics
@metrics.setter
def metrics(self, metrics: dict):
"""Return current metrics."""
if isinstance(metrics, dict):
self._metrics = metrics
else:
self.log.error('feature=service, event=invalid-metrics')
def on_connect_handler(
self, client, userdata, flags, rc # pylint: disable=unused-argument
) -> None:
"""On connect method for mqtt broker."""
self.log.info(
f'feature=service, event=topic-subscription, topic={self.args.tc_svc_server_topic}'
)
self.message_broker.client.subscribe(self.args.tc_svc_server_topic)
self.message_broker.client.disable_logger()
def on_message_handler(
self, client, userdata, message # pylint: disable=unused-argument
) -> None:
"""On message for mqtt."""
try:
# messages on server topic must be json objects
m = json.loads(message.payload)
except ValueError:
self.log.warning(
f'feature=service, event=parsing-issue, message="""{message.payload}"""'
)
return
# use the command to call the appropriate method defined in command_map
command: str = m.get('command', 'invalid').lower()
trigger_id: Optional[int] = m.get('triggerId')
if trigger_id is not None:
# coerce trigger_id to int in case a string was provided (testing framework)
trigger_id = int(trigger_id)
self.log.info(f'feature=service, event=command-received, command="{command}"')
# create unique session id to be used as thread name
# and stored as property of thread for logging emit
session_id = self.create_session_id()
# get the target method from command_map for the current command
thread_method = self.command_map.get(command, self.process_invalid_command)
self.service_thread(
# use session_id as thread name to provide easy debugging per thread
name=session_id,
target=thread_method,
args=(m,),
session_id=session_id,
trigger_id=trigger_id,
)
def process_heartbeat_command(self, message: dict) -> None: # pylint: disable=unused-argument
"""Process the HeartBeat command.
.. code-block:: python
:linenos:
:lineno-start: 1
{
"command": "Heartbeat",
"metric": {},
"memoryPercent": 0,
"cpuPercent": 0
}
Args:
message: The message payload from the server topic.
"""
self.heartbeat_watchdog = 0
# send heartbeat -acknowledge- command
response = {'command': 'Heartbeat', 'metric': self.metrics}
self.message_broker.publish(
message=json.dumps(response), topic=self.args.tc_svc_client_topic
)
self.log.info(f'feature=service, event=heartbeat-sent, metrics={self.metrics}')
def process_logging_change_command(self, message: dict) -> None:
"""Process the LoggingChange command.
.. code-block:: python
:linenos:
:lineno-start: 1
{
"command": "LoggingChange",
"level": "DEBUG"
}
Args:
message: The message payload from the server topic.
"""
level: str = message.get('level')
self.log.info(f'feature=service, event=logging-change, level={level}')
self.logger.update_handler_level(level)
def process_invalid_command(self, message: dict) -> None:
"""Process all invalid commands.
Args:
message: The message payload from the server topic.
"""
self.log.warning(
f'feature=service, event=invalid-command-received, message="""({message})""".'
)
def process_shutdown_command(self, message: dict) -> None:
"""Implement parent method to process the shutdown command.
.. code-block:: python
:linenos:
:lineno-start: 1
{
"command": "Shutdown",
"reason": "Service disabled by user."
}
Args:
message: The message payload from the server topic.
"""
reason = message.get('reason') or (
'A shutdown command was received on server topic. Service is shutting down.'
)
self.log.info(f'feature=service, event=shutdown, reason={reason}')
# acknowledge shutdown command
self.message_broker.publish(
json.dumps({'command': 'Acknowledged', 'type': 'Shutdown'}),
self.args.tc_svc_client_topic,
)
# call App shutdown callback
if callable(self.shutdown_callback):
try:
# call callback for shutdown and handle exceptions to protect thread
self.shutdown_callback() # pylint: disable=not-callable
except Exception as e:
self.log.error(
f'feature=service, event=shutdown-callback-error, error="""({e})""".'
)
self.log.trace(traceback.format_exc())
# unsubscribe and disconnect from the broker
self.message_broker.client.unsubscribe(self.args.tc_svc_server_topic)
self.message_broker.client.disconnect()
# update shutdown flag
self.message_broker.shutdown = True
# delay shutdown to give App time to cleanup
time.sleep(5)
self.tcex.exit(0) # final shutdown in case App did not
@property
def ready(self) -> bool:
"""Return ready boolean."""
return self._ready
@ready.setter
def ready(self, bool_val: bool):
"""Set ready boolean."""
if isinstance(bool_val, bool) and bool_val is True:
# wait until connected to send ready command
while not self.message_broker._connected:
if self.message_broker.shutdown:
break
time.sleep(1)
else: # pylint: disable=useless-else-on-loop
self.log.info('feature=service, event=service-ready')
ready_command = {'command': 'Ready'}
if self.ij.runtime_level.lower() in ['apiservice']:
ready_command['discoveryTypes'] = self.ij.service_discovery_types
self.message_broker.publish(
json.dumps(ready_command), self.args.tc_svc_client_topic
)
self._ready = True
def service_thread(
self,
name: str,
target: Callable[[], bool],
args: Optional[tuple] = None,
kwargs: Optional[dict] = None,
session_id: Optional[str] = None,
trigger_id: Optional[int] = None,
) -> None:
"""Start a message thread.
Args:
name: The name of the thread.
target: The method to call for the thread.
args: The args to pass to the target method.
kwargs: Additional args.
session_id: The current session id.
trigger_id: The current trigger id.
"""
self.log.info(f'feature=service, event=service-thread-creation, name={name}')
args = args or ()
try:
t = threading.Thread(name=name, target=target, args=args, kwargs=kwargs, daemon=True)
# add session_id to thread to use in logger emit
t.session_id = session_id
# add trigger_id to thread to use in logger emit
t.trigger_id = trigger_id
t.start()
except Exception:
self.log.trace(traceback.format_exc())
@property
def session_id(self) -> Optional[str]:
"""Return the current session_id."""
if not hasattr(threading.current_thread(), 'session_id'):
threading.current_thread().session_id = self.create_session_id()
return threading.current_thread().session_id
@property
def thread_name(self) -> str:
"""Return a uuid4 session id."""
return threading.current_thread().name
@property
def trigger_id(self) -> Optional[int]:
"""Return the current trigger_id."""
trigger_id = None
if hasattr(threading.current_thread(), 'trigger_id'):
trigger_id = threading.current_thread().trigger_id
if trigger_id is not None:
trigger_id = int(trigger_id)
return trigger_id
def update_metric(self, label: str, value: Union[int, str]) -> None:
"""Update a metric if already exists.
Args:
label: The metric label (e.g., hits) to update.
value: The updated value for the metric.
"""
if self._metrics.get(label) is not None:
self._metrics[label] = value
| kstilwell/tcex | tcex/services/common_service.py | Python | apache-2.0 | 14,584 |
package org.glamey.scaffold.component.store.qiniu;
import com.google.common.io.Files;
import org.glamey.scaffold.BaseSpringJunit;
import org.glamey.scaffold.component.store.StoreTemplate;
import org.junit.Test;
import javax.annotation.Resource;
import java.io.File;
/**
* @author zhouyang.zhou.
*/
public class QiNiuStoreTemplateTest extends BaseSpringJunit {
@Resource
private StoreTemplate qiNiuStoreTemplate;
@Test
public void uploadFile() throws Exception {
File uploadFile = new File("C:\\tmp\\dept.json");
qiNiuStoreTemplate.upload(uploadFile, uploadFile.getName());
}
@Test
public void uploadFileBytes() throws Exception {
File uploadFile = new File("C:\\tmp\\qcache\\.gitignore");
qiNiuStoreTemplate.upload(Files.toByteArray(uploadFile), uploadFile.getName());
}
} | glameyzhou/scaffold | scaffold-component/src/test/java/org/glamey/scaffold/component/store/qiniu/QiNiuStoreTemplateTest.java | Java | apache-2.0 | 846 |
package whelk.gui;
import whelk.ScriptGenerator;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
class ReplaceRecordsPanel extends WizardCard implements ActionListener
{
final Wizard window;
private final JFileChooser chooser = new JFileChooser();
private File chosenFile;
private final JTextField chosenFileField;
public ReplaceRecordsPanel(Wizard wizard)
{
super(wizard);
window = wizard;
chooser.setPreferredSize(new Dimension(1024, 768));
Box vbox = Box.createVerticalBox();
vbox.add(new JLabel("<html>Vänligen välj en fil med par av XL-IDn (EJ KONTROLLNUMMER!).<br/>" +
"<br/>Filen måste innehålla två IDn per rad, separerade av ett mellanslag." +
"<br/>Det första IDt på varje rad ersätter det andra IDt på samma rad." +
"<br/><br/>Exempel:" +
"<br/>vd6njp162pcr3zd c9ps03vw0cpqgx6" +
"<br/>jvtbf1w0g7jhgttx fcrtxkcz4dxttr5" +
"<br/><br/>Tolkas som:" +
"<br/>c9ps03vw0cpqgx6 ersätts av vd6njp162pcr3zd." +
"<br/>fcrtxkcz4dxttr5 ersätts av jvtbf1w0g7jhgttx." +
"</html>"));
vbox.add(Box.createVerticalStrut(10));
JButton chooseFileButton = new JButton("Välj fil");
chooseFileButton.setActionCommand("open");
chooseFileButton.addActionListener(this);
vbox.add(chooseFileButton);
vbox.add(Box.createVerticalStrut(10));
chosenFileField = new JTextField();
chosenFileField.setEditable(false);
vbox.add(chosenFileField);
vbox.add(Box.createVerticalStrut(10));
add(vbox);
}
@Override
protected void beforeNext()
{
Set<String> ids = new HashSet<>();
try (BufferedReader reader = new BufferedReader(new FileReader(chosenFile)))
{
for (String line; (line = reader.readLine()) != null; )
{
ids.add(line);
}
} catch (Throwable e) {
Wizard.exitFatal(e);
}
try
{
setParameterForNextCard(ScriptGenerator.generateReplaceRecordsScript(ids));
} catch (IOException ioe)
{
Wizard.exitFatal(ioe);
}
}
@Override
void onShow(Object parameterFromPreviousCard)
{
setNextCard(Wizard.RUN);
chosenFile = null;
disableNext();
}
@Override
public void actionPerformed(ActionEvent actionEvent)
{
if (actionEvent.getActionCommand().equals("open"))
{
int returnVal = chooser.showOpenDialog(window);
if(returnVal == JFileChooser.APPROVE_OPTION)
{
chosenFile = chooser.getSelectedFile();
chosenFileField.setText( chooser.getSelectedFile().getName() );
enableNext();
}
}
}
} | libris/librisxl | gui-whelktool/src/main/java/whelk/gui/ReplaceRecordsPanel.java | Java | apache-2.0 | 3,153 |
using System;
using System.Threading;
using bytePassion.Lib.Communication.State;
using bytePassion.Lib.ConcurrencyLib;
using OQF.AnalysisAndProgress.ProgressUtils;
using OQF.Bot.Contracts;
using OQF.Bot.Contracts.Coordination;
using OQF.Bot.Contracts.GameElements;
using OQF.Bot.Contracts.Moves;
using OQF.PlayerVsBot.Contracts;
using OQF.Utils.Enum;
namespace OQF.PlayerVsBot.GameLogic
{
public class GameService : IGameService
{
private readonly bool disableBotTimeout;
private readonly ISharedStateWriteOnly<bool> isBoardRotatedVariable;
public event Action<BoardState> NewBoardStateAvailable;
public event Action<string> NewDebugMsgAvailable;
public event Action<Player, WinningReason, Move> WinnerAvailable;
public event Action<GameStatus> NewGameStatusAvailable;
private TimeoutBlockingQueue<Move> humenMoves;
private IGameLoopThread gameLoopThread;
private IQuoridorBot quoridorBot;
private BoardState currentBoardState;
private GameStatus currentGameStatus;
public GameService(bool disableBotTimeout, ISharedStateWriteOnly<bool> isBoardRotatedVariable)
{
this.disableBotTimeout = disableBotTimeout;
this.isBoardRotatedVariable = isBoardRotatedVariable;
CurrentBoardState = null;
gameLoopThread = null;
CurrentGameStatus = GameStatus.Unloaded;
}
public BoardState CurrentBoardState
{
get { return currentBoardState; }
private set
{
if (value != currentBoardState)
{
currentBoardState = value;
NewBoardStateAvailable?.Invoke(currentBoardState);
}
}
}
public GameStatus CurrentGameStatus
{
get { return currentGameStatus; }
private set
{
if (currentGameStatus != value)
{
currentGameStatus = value;
NewGameStatusAvailable?.Invoke(currentGameStatus);
}
}
}
public PlayerType HumanPlayerPosition { get; private set; }
public void CreateGame(IQuoridorBot uninitializedBot, string botName, GameConstraints gameConstraints,
PlayerType startingPosition, QProgress initialProgress)
{
HumanPlayerPosition = startingPosition;
isBoardRotatedVariable.Value = startingPosition == PlayerType.TopPlayer;
if (gameLoopThread != null)
{
StopGame();
}
var finalGameConstraints = disableBotTimeout
? new GameConstraints(Timeout.InfiniteTimeSpan,gameConstraints.MaximalMovesPerPlayer)
: gameConstraints;
quoridorBot = uninitializedBot;
quoridorBot.DebugMessageAvailable += OnDebugMessageAvailable;
humenMoves = new TimeoutBlockingQueue<Move>(200);
gameLoopThread = startingPosition == PlayerType.BottomPlayer
? (IGameLoopThread) new GameLoopThreadPvB(quoridorBot, botName, humenMoves, finalGameConstraints, initialProgress)
: (IGameLoopThread) new GameLoopThreadBvP(quoridorBot, botName, humenMoves, finalGameConstraints, initialProgress);
gameLoopThread.NewBoardStateAvailable += OnNewBoardStateAvailable;
gameLoopThread.WinnerAvailable += OnWinnerAvailable;
CurrentGameStatus = GameStatus.Active;
new Thread(gameLoopThread.Run).Start();
}
private void OnWinnerAvailable(Player player, WinningReason winningReason, Move invalidMove)
{
WinnerAvailable?.Invoke(player, winningReason, invalidMove);
CurrentGameStatus = GameStatus.Finished;
}
private void OnNewBoardStateAvailable (BoardState boardState)
{
CurrentBoardState = boardState;
}
private void OnDebugMessageAvailable(string s)
{
NewDebugMsgAvailable?.Invoke(s);
}
public void ReportHumanMove(Move move)
{
humenMoves.Put(move);
}
public void StopGame()
{
if (gameLoopThread != null)
{
quoridorBot.DebugMessageAvailable -= OnDebugMessageAvailable;
gameLoopThread.Stop();
gameLoopThread.NewBoardStateAvailable -= OnNewBoardStateAvailable;
gameLoopThread.WinnerAvailable -= OnWinnerAvailable;
gameLoopThread = null;
CurrentBoardState = null;
}
CurrentGameStatus = GameStatus.Unloaded;
}
}
}
| bytePassion/OpenQuoridorFramework | OpenQuoridorFramework/OQF.PlayerVsBot.GameLogic/GameService.cs | C# | apache-2.0 | 4,020 |
/*
* Copyright 2018 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cd.go.contrib.elasticagents.docker.requests;
import cd.go.contrib.elasticagents.docker.*;
import cd.go.contrib.elasticagents.docker.executors.JobCompletionRequestExecutor;
import cd.go.contrib.elasticagents.docker.models.JobIdentifier;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.Map;
public class JobCompletionRequest {
private static final Gson GSON = new GsonBuilder().excludeFieldsWithoutExposeAnnotation()
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.create();
@Expose
@SerializedName("elastic_agent_id")
private String elasticAgentId;
@Expose
@SerializedName("job_identifier")
private JobIdentifier jobIdentifier;
@Expose
@SerializedName("elastic_agent_profile_properties")
private Map<String, String> properties;
@Expose
@SerializedName("cluster_profile_properties")
private ClusterProfileProperties clusterProfileProperties;
public JobCompletionRequest() {
}
public JobCompletionRequest(String elasticAgentId, JobIdentifier jobIdentifier, Map<String, String> properties, Map<String, String> clusterProfile) {
this.elasticAgentId = elasticAgentId;
this.jobIdentifier = jobIdentifier;
this.properties = properties;
this.clusterProfileProperties = ClusterProfileProperties.fromConfiguration(clusterProfile);
}
public JobCompletionRequest(String elasticAgentId, JobIdentifier jobIdentifier, Map<String, String> properties, ClusterProfileProperties clusterProfileProperties) {
this.elasticAgentId = elasticAgentId;
this.jobIdentifier = jobIdentifier;
this.properties = properties;
this.clusterProfileProperties = clusterProfileProperties;
}
public static JobCompletionRequest fromJSON(String json) {
JobCompletionRequest jobCompletionRequest = GSON.fromJson(json, JobCompletionRequest.class);
return jobCompletionRequest;
}
public String getElasticAgentId() {
return elasticAgentId;
}
public JobIdentifier jobIdentifier() {
return jobIdentifier;
}
public ClusterProfileProperties getClusterProfileProperties() {
return clusterProfileProperties;
}
public Map<String, String> getProperties() {
return properties;
}
public RequestExecutor executor(AgentInstances<DockerContainer> agentInstances, PluginRequest pluginRequest) {
return new JobCompletionRequestExecutor(this, agentInstances, pluginRequest);
}
@Override
public String toString() {
return "JobCompletionRequest{" +
"elasticAgentId='" + elasticAgentId + '\'' +
", jobIdentifier=" + jobIdentifier +
", properties=" + properties +
", clusterProfileProperties=" + clusterProfileProperties +
'}';
}
}
| gocd-contrib/docker-elastic-agents | src/main/java/cd/go/contrib/elasticagents/docker/requests/JobCompletionRequest.java | Java | apache-2.0 | 3,654 |
/*
* Copyright 2015 Gerald Muecke, gerald.muecke@gmail.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.inkstand.http.undertow;
import io.undertow.Undertow;
import io.undertow.security.api.AuthenticationMechanism;
import io.undertow.security.api.AuthenticationMode;
import io.undertow.security.handlers.AuthenticationCallHandler;
import io.undertow.security.handlers.AuthenticationConstraintHandler;
import io.undertow.security.handlers.AuthenticationMechanismsHandler;
import io.undertow.security.handlers.SecurityInitialHandler;
import io.undertow.security.idm.IdentityManager;
import io.undertow.security.impl.BasicAuthenticationMechanism;
import io.undertow.server.HttpHandler;
import io.undertow.servlet.Servlets;
import io.undertow.servlet.api.DeploymentInfo;
import io.undertow.servlet.api.DeploymentManager;
import java.util.Collections;
import java.util.List;
import javax.annotation.Priority;
import javax.enterprise.inject.Produces;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.ServletException;
import io.inkstand.ProtectedService;
import io.inkstand.InkstandRuntimeException;
import io.inkstand.config.WebServerConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provider of an Undertow WebServer instance with a specific deployment configuration. The deployment configuration is
* injected itself and may be provided by an implementation of {@link UndertowDeploymentProvider}. The {@link Undertow}
* instance provided by this provider is only configured, but not started.
*
* @author <a href="mailto:gerald@inkstand.io">Gerald Mücke</a>
*/
@Singleton
@Priority(0)
@ProtectedService
public class AuthenticatingUndertowWebServerProvider {
/**
* SLF4J Logger for this class
*/
private static final Logger LOG = LoggerFactory.getLogger(UndertowWebServerProvider.class);
@Inject
private WebServerConfiguration config;
@Inject
private DeploymentInfo deploymentInfo;
@Inject
private IdentityManager identityManager;
@Produces
public Undertow getLdapAuthUndertow() {
deploymentInfo.setIdentityManager(identityManager);
final DeploymentManager deploymentManager = Servlets.defaultContainer().addDeployment(deploymentInfo);
deploymentManager.deploy();
try {
LOG.info("Creating service endpoint {}:{}/{} for {} at ", config.getBindAddress(), config.getPort(),
deploymentInfo.getContextPath(), deploymentInfo.getDeploymentName());
return Undertow.builder().addHttpListener(config.getPort(), config.getBindAddress())
.setHandler(addSecurity(deploymentManager.start())).build();
} catch (final ServletException e) {
throw new InkstandRuntimeException(e);
}
}
HttpHandler addSecurity(final HttpHandler toWrap) {
HttpHandler handler = toWrap;
handler = new AuthenticationCallHandler(handler);
handler = new AuthenticationConstraintHandler(handler);
final List<AuthenticationMechanism> mechanisms = Collections
.<AuthenticationMechanism> singletonList(new BasicAuthenticationMechanism("My Realm"));
handler = new AuthenticationMechanismsHandler(handler, mechanisms);
handler = new SecurityInitialHandler(AuthenticationMode.PRO_ACTIVE, identityManager, handler);
return handler;
}
}
| rolandio/inkstand | inkstand-http-undertow/src/main/java/io/inkstand/http/undertow/AuthenticatingUndertowWebServerProvider.java | Java | apache-2.0 | 3,953 |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/metadata_service.proto
package com.google.cloud.aiplatform.v1;
public interface DeleteContextRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1.DeleteContextRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Required. The resource name of the Context to delete.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
java.lang.String getName();
/**
*
*
* <pre>
* Required. The resource name of the Context to delete.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
com.google.protobuf.ByteString getNameBytes();
/**
*
*
* <pre>
* The force deletion semantics is still undefined.
* Users should not use this field.
* </pre>
*
* <code>bool force = 2;</code>
*
* @return The force.
*/
boolean getForce();
/**
*
*
* <pre>
* Optional. The etag of the Context to delete.
* If this is provided, it must match the server's etag. Otherwise, the
* request will fail with a FAILED_PRECONDITION.
* </pre>
*
* <code>string etag = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The etag.
*/
java.lang.String getEtag();
/**
*
*
* <pre>
* Optional. The etag of the Context to delete.
* If this is provided, it must match the server's etag. Otherwise, the
* request will fail with a FAILED_PRECONDITION.
* </pre>
*
* <code>string etag = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for etag.
*/
com.google.protobuf.ByteString getEtagBytes();
}
| googleapis/java-aiplatform | proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/DeleteContextRequestOrBuilder.java | Java | apache-2.0 | 2,785 |
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Some useful getters for thread local request style validation.
"""
def pecan_getter(parm):
"""pecan getter."""
pecan_module = __import__('pecan', globals(), locals(), ['request'])
return getattr(pecan_module, 'request')
| obulpathi/poppy | poppy/transport/validators/stoplight/helpers.py | Python | apache-2.0 | 823 |
"""
* Copyright 2007 Fred Sauer
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http:#www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
"""
import AbstractLocation
"""*
* A position represented by a left (x) and top (y) coordinate.
"""
class CoordinateLocation(AbstractLocation):
def __init__(self, left, top):
self.left = left
self.top = top
"""
* (non-Javadoc)
*
* @see com.allen_sauer.gwt.dnd.client.util.Location#getLeft()
"""
def getLeft(self):
return left
"""
* (non-Javadoc)
*
* @see com.allen_sauer.gwt.dnd.client.util.Location#getTop()
"""
def getTop(self):
return top
| jaredly/pyjamas | library/pyjamas/dnd/util/CoordinateLocation.py | Python | apache-2.0 | 1,133 |
namespace TreeConstructionFromQuartets.Model
{
using System;
using System.Collections.Generic;
using System.Linq;
public class PartitionSet
{
public PartitionSet(string PartitionSetName)
{
this._PartitionSetName = PartitionSetName;
this._Final_Score = 0;
this._IsolatedCount = 0;
this._ViotatedCount = 0;
this._SatisfiedCount = 0;
this._DifferedCount = 0;
this._taxValueForGainCalculation = string.Empty;
this._Gain = 0;
this.PartitionList = new List<Partition>();
this._ListQuatrets = new List<Quartet>();
}
public PartitionSet(string PartitionSetName, int _Final_Score, int _IsolatedCount, int _ViotatedCount, int _SatisfiedCount, int _DifferedCount, string _taxValueForGainCalculation, int _Gain, List<Partition> PartitionList, List<Quartet> _ListQuatrets)
{
this._PartitionSetName = PartitionSetName;
this._Final_Score = _Final_Score;
this._IsolatedCount = _IsolatedCount;
this._ViotatedCount = _ViotatedCount;
this._SatisfiedCount = _SatisfiedCount;
this._DifferedCount = _DifferedCount;
this._taxValueForGainCalculation = _taxValueForGainCalculation;
this._Gain = _Gain;
this.PartitionList = new List<Partition>(PartitionList.Select(x => new Partition(x._PartitionName)
{
_PartitionName = x._PartitionName,
TaxaList = new List<Taxa>(x.TaxaList.Select(m => new Taxa()
{
_Taxa_Value = m._Taxa_Value,
_Quartet_Name = m._Quartet_Name,
_Taxa_ValuePosition_In_Quartet = m._Taxa_ValuePosition_In_Quartet,
_Gain = m._Gain,
_CumulativeGain = m._CumulativeGain,
IsFreeze = m.IsFreeze,
_IsolatedCount = m._IsolatedCount,
_ViotatedCount = m._ViotatedCount,
_DifferedCount = m._DifferedCount,
_SatisfiedCount = m._SatisfiedCount,
_TaxaPartitionSet = m._TaxaPartitionSet != null ? new PartitionSet(m._TaxaPartitionSet._PartitionSetName, m._TaxaPartitionSet._Final_Score, m._TaxaPartitionSet._IsolatedCount, m._TaxaPartitionSet._ViotatedCount, m._TaxaPartitionSet._SatisfiedCount, m._TaxaPartitionSet._DifferedCount, m._TaxaPartitionSet._taxValueForGainCalculation, m._TaxaPartitionSet._Gain, m._TaxaPartitionSet.PartitionList, m._TaxaPartitionSet._ListQuatrets) : null,
StepK = m.StepK
}))
}));
this._ListQuatrets = new List<Quartet>(_ListQuatrets.Select(x => new Quartet()
{
_First_Taxa_Value = x._First_Taxa_Value,
_Second_Taxa_Value = x._Second_Taxa_Value,
_Third_Taxa_Value = x._Third_Taxa_Value,
_Fourth_Taxa_Value = x._Fourth_Taxa_Value,
_Quartet_Name = x._Quartet_Name,
_Quartet_Input = x._Quartet_Input,
_Quartet_LeftPart = x._Quartet_LeftPart,
_Quartet_LeftPartReverse = x._Quartet_LeftPartReverse,
_Quartet_RightPart = x._Quartet_RightPart,
_Quartet_RightPartReverse = x._Quartet_RightPartReverse,
_isDistinct = x._isDistinct,
_Frequency = x._Frequency,
_DuplicateQuatrets = x._DuplicateQuatrets,
_PartitionStatus = x._PartitionStatus,
_ConsistancyStatus = x._ConsistancyStatus,
_TaxaSplitLeft = x._TaxaSplitLeft,
_TaxaSplitRight = x._TaxaSplitRight
}));
}
public string _PartitionSetName { get; set; }
public string _taxValueForGainCalculation { get; set; }
public int _Gain { get; set; }
public int _Final_Score { get; set; }
public int _IsolatedCount { get; set; }
public int _ViotatedCount { get; set; }
public int _DifferedCount { get; set; }
public int _SatisfiedCount { get; set; }
public List<Quartet> _ListQuatrets { get; set; }
private List<Partition> _PartitionList = new List<Partition>();
public List<Partition> PartitionList
{
get
{
return _PartitionList;
}
set
{
_PartitionList = value;
}
}
}
}
| tanvirehsan/TreeConstructionFromQuartets | TreeConstructionFromQuartets/Model/PartitionSet.cs | C# | apache-2.0 | 4,522 |
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module SecuritycenterV1beta1
# Security Command Center API
#
# Security Command Center API provides access to temporal views of assets and
# findings within an organization.
#
# @example
# require 'google/apis/securitycenter_v1beta1'
#
# Securitycenter = Google::Apis::SecuritycenterV1beta1 # Alias the module
# service = Securitycenter::SecurityCommandCenterService.new
#
# @see https://console.cloud.google.com/apis/api/securitycenter.googleapis.com/overview
class SecurityCommandCenterService < Google::Apis::Core::BaseService
# @return [String]
# API key. Your API key identifies your project and provides you with API access,
# quota, and reports. Required unless you provide an OAuth 2.0 token.
attr_accessor :key
# @return [String]
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
attr_accessor :quota_user
def initialize
super('https://securitycenter.googleapis.com/', '')
@batch_path = 'batch'
end
# Gets the settings for an organization.
# @param [String] name
# Required. Name of the organization to get organization settings for. Its
# format is "organizations/[organization_id]/organizationSettings".
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::OrganizationSettings] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::OrganizationSettings]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_organization_organization_settings(name, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1beta1/{+name}', options)
command.response_representation = Google::Apis::SecuritycenterV1beta1::OrganizationSettings::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::OrganizationSettings
command.params['name'] = name unless name.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Updates an organization's settings.
# @param [String] name
# The relative resource name of the settings. See: https://cloud.google.com/apis/
# design/resource_names#relative_resource_name Example: "organizations/`
# organization_id`/organizationSettings".
# @param [Google::Apis::SecuritycenterV1beta1::OrganizationSettings] organization_settings_object
# @param [String] update_mask
# The FieldMask to use when updating the settings resource.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::OrganizationSettings] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::OrganizationSettings]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def update_organization_organization_settings(name, organization_settings_object = nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:patch, 'v1beta1/{+name}', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::OrganizationSettings::Representation
command.request_object = organization_settings_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::OrganizationSettings::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::OrganizationSettings
command.params['name'] = name unless name.nil?
command.query['updateMask'] = update_mask unless update_mask.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Filters an organization's assets and groups them by their specified properties.
# @param [String] parent
# Required. Name of the organization to groupBy. Its format is "organizations/[
# organization_id]".
# @param [Google::Apis::SecuritycenterV1beta1::GroupAssetsRequest] group_assets_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::GroupAssetsResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::GroupAssetsResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def group_assets(parent, group_assets_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1beta1/{+parent}/assets:group', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::GroupAssetsRequest::Representation
command.request_object = group_assets_request_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::GroupAssetsResponse::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::GroupAssetsResponse
command.params['parent'] = parent unless parent.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Lists an organization's assets.
# @param [String] parent
# Required. Name of the organization assets should belong to. Its format is "
# organizations/[organization_id]".
# @param [String] compare_duration
# When compare_duration is set, the ListAssetResult's "state" attribute is
# updated to indicate whether the asset was added, removed, or remained present
# during the compare_duration period of time that precedes the read_time. This
# is the time between (read_time - compare_duration) and read_time. The state
# value is derived based on the presence of the asset at the two points in time.
# Intermediate state changes between the two times don't affect the result. For
# example, the results aren't affected if the asset is removed and re-created
# again. Possible "state" values when compare_duration is specified: * "ADDED":
# indicates that the asset was not present before compare_duration, but present
# at read_time. * "REMOVED": indicates that the asset was present at the start
# of compare_duration, but not present at read_time. * "ACTIVE": indicates that
# the asset was present at both the start and the end of the time period defined
# by compare_duration and read_time. If compare_duration is not specified, then
# the only possible state is "UNUSED", which indicates that the asset is present
# at read_time.
# @param [String] field_mask
# Optional. A field mask to specify the ListAssetsResult fields to be listed in
# the response. An empty field mask will list all fields.
# @param [String] filter
# Expression that defines the filter to apply across assets. The expression is a
# list of zero or more restrictions combined via logical operators `AND` and `OR`
# . Parentheses are not supported, and `OR` has higher precedence than `AND`.
# Restrictions have the form ` ` and may have a `-` character in front of them
# to indicate negation. The fields map to those defined in the Asset resource.
# Examples include: * name * security_center_properties.resource_name *
# resource_properties.a_property * security_marks.marks.marka The supported
# operators are: * `=` for all value types. * `>`, `<`, `>=`, `<=` for integer
# values. * `:`, meaning substring matching, for strings. The supported value
# types are: * string literals in quotes. * integer literals without quotes. *
# boolean literals `true` and `false` without quotes. For example, `
# resource_properties.size = 100` is a valid filter string.
# @param [String] order_by
# Expression that defines what fields and order to use for sorting. The string
# value should follow SQL syntax: comma separated list of fields. For example: "
# name,resource_properties.a_property". The default sorting order is ascending.
# To specify descending order for a field, a suffix " desc" should be appended
# to the field name. For example: "name desc,resource_properties.a_property".
# Redundant space characters in the syntax are insignificant. "name desc,
# resource_properties.a_property" and " name desc , resource_properties.
# a_property " are equivalent.
# @param [Fixnum] page_size
# The maximum number of results to return in a single response. Default is 10,
# minimum is 1, maximum is 1000.
# @param [String] page_token
# The value returned by the last `ListAssetsResponse`; indicates that this is a
# continuation of a prior `ListAssets` call, and that the system should return
# the next page of data.
# @param [String] read_time
# Time used as a reference point when filtering assets. The filter is limited to
# assets existing at the supplied time and their values are those at that
# specific time. Absence of this field will default to the API's version of NOW.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::ListAssetsResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::ListAssetsResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def list_organization_assets(parent, compare_duration: nil, field_mask: nil, filter: nil, order_by: nil, page_size: nil, page_token: nil, read_time: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1beta1/{+parent}/assets', options)
command.response_representation = Google::Apis::SecuritycenterV1beta1::ListAssetsResponse::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::ListAssetsResponse
command.params['parent'] = parent unless parent.nil?
command.query['compareDuration'] = compare_duration unless compare_duration.nil?
command.query['fieldMask'] = field_mask unless field_mask.nil?
command.query['filter'] = filter unless filter.nil?
command.query['orderBy'] = order_by unless order_by.nil?
command.query['pageSize'] = page_size unless page_size.nil?
command.query['pageToken'] = page_token unless page_token.nil?
command.query['readTime'] = read_time unless read_time.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Runs asset discovery. The discovery is tracked with a long-running operation.
# This API can only be called with limited frequency for an organization. If it
# is called too frequently the caller will receive a TOO_MANY_REQUESTS error.
# @param [String] parent
# Required. Name of the organization to run asset discovery for. Its format is "
# organizations/[organization_id]".
# @param [Google::Apis::SecuritycenterV1beta1::RunAssetDiscoveryRequest] run_asset_discovery_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def run_organization_asset_discovery(parent, run_asset_discovery_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1beta1/{+parent}/assets:runDiscovery', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::RunAssetDiscoveryRequest::Representation
command.request_object = run_asset_discovery_request_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::Operation::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::Operation
command.params['parent'] = parent unless parent.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Updates security marks.
# @param [String] name
# The relative resource name of the SecurityMarks. See: https://cloud.google.com/
# apis/design/resource_names#relative_resource_name Examples: "organizations/`
# organization_id`/assets/`asset_id`/securityMarks" "organizations/`
# organization_id`/sources/`source_id`/findings/`finding_id`/securityMarks".
# @param [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks] google_cloud_securitycenter_v1beta1_security_marks_object
# @param [String] start_time
# The time at which the updated SecurityMarks take effect.
# @param [String] update_mask
# The FieldMask to use when updating the security marks resource.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def update_organization_asset_security_marks(name, google_cloud_securitycenter_v1beta1_security_marks_object = nil, start_time: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:patch, 'v1beta1/{+name}', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks::Representation
command.request_object = google_cloud_securitycenter_v1beta1_security_marks_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks
command.params['name'] = name unless name.nil?
command.query['startTime'] = start_time unless start_time.nil?
command.query['updateMask'] = update_mask unless update_mask.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Starts asynchronous cancellation on a long-running operation. The server makes
# a best effort to cancel the operation, but success is not guaranteed. If the
# server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
# Clients can use Operations.GetOperation or other methods to check whether the
# cancellation succeeded or whether the operation completed despite cancellation.
# On successful cancellation, the operation is not deleted; instead, it becomes
# an operation with an Operation.error value with a google.rpc.Status.code of 1,
# corresponding to `Code.CANCELLED`.
# @param [String] name
# The name of the operation resource to be cancelled.
# @param [Google::Apis::SecuritycenterV1beta1::CancelOperationRequest] cancel_operation_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::Empty] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::Empty]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def cancel_operation(name, cancel_operation_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1beta1/{+name}:cancel', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::CancelOperationRequest::Representation
command.request_object = cancel_operation_request_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::Empty::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::Empty
command.params['name'] = name unless name.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Deletes a long-running operation. This method indicates that the client is no
# longer interested in the operation result. It does not cancel the operation.
# If the server doesn't support this method, it returns `google.rpc.Code.
# UNIMPLEMENTED`.
# @param [String] name
# The name of the operation resource to be deleted.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::Empty] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::Empty]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def delete_organization_operation(name, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:delete, 'v1beta1/{+name}', options)
command.response_representation = Google::Apis::SecuritycenterV1beta1::Empty::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::Empty
command.params['name'] = name unless name.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Gets the latest state of a long-running operation. Clients can use this method
# to poll the operation result at intervals as recommended by the API service.
# @param [String] name
# The name of the operation resource.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::Operation] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::Operation]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_organization_operation(name, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1beta1/{+name}', options)
command.response_representation = Google::Apis::SecuritycenterV1beta1::Operation::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::Operation
command.params['name'] = name unless name.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Lists operations that match the specified filter in the request. If the server
# doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name`
# binding allows API services to override the binding to use different resource
# name schemes, such as `users/*/operations`. To override the binding, API
# services can add a binding such as `"/v1/`name=users/*`/operations"` to their
# service configuration. For backwards compatibility, the default name includes
# the operations collection id, however overriding users must ensure the name
# binding is the parent resource, without the operations collection id.
# @param [String] name
# The name of the operation's parent resource.
# @param [String] filter
# The standard list filter.
# @param [Fixnum] page_size
# The standard list page size.
# @param [String] page_token
# The standard list page token.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::ListOperationsResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::ListOperationsResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def list_organization_operations(name, filter: nil, page_size: nil, page_token: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1beta1/{+name}', options)
command.response_representation = Google::Apis::SecuritycenterV1beta1::ListOperationsResponse::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::ListOperationsResponse
command.params['name'] = name unless name.nil?
command.query['filter'] = filter unless filter.nil?
command.query['pageSize'] = page_size unless page_size.nil?
command.query['pageToken'] = page_token unless page_token.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Creates a source.
# @param [String] parent
# Required. Resource name of the new source's parent. Its format should be "
# organizations/[organization_id]".
# @param [Google::Apis::SecuritycenterV1beta1::Source] source_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::Source] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::Source]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def create_organization_source(parent, source_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1beta1/{+parent}/sources', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::Source::Representation
command.request_object = source_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::Source::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::Source
command.params['parent'] = parent unless parent.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Gets a source.
# @param [String] name
# Required. Relative resource name of the source. Its format is "organizations/[
# organization_id]/source/[source_id]".
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::Source] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::Source]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_organization_source(name, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1beta1/{+name}', options)
command.response_representation = Google::Apis::SecuritycenterV1beta1::Source::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::Source
command.params['name'] = name unless name.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Gets the access control policy on the specified Source.
# @param [String] resource
# REQUIRED: The resource for which the policy is being requested. See the
# operation documentation for the appropriate value for this field.
# @param [Google::Apis::SecuritycenterV1beta1::GetIamPolicyRequest] get_iam_policy_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::Policy] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::Policy]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_source_iam_policy(resource, get_iam_policy_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1beta1/{+resource}:getIamPolicy', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::GetIamPolicyRequest::Representation
command.request_object = get_iam_policy_request_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::Policy::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::Policy
command.params['resource'] = resource unless resource.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Lists all sources belonging to an organization.
# @param [String] parent
# Required. Resource name of the parent of sources to list. Its format should be
# "organizations/[organization_id]".
# @param [Fixnum] page_size
# The maximum number of results to return in a single response. Default is 10,
# minimum is 1, maximum is 1000.
# @param [String] page_token
# The value returned by the last `ListSourcesResponse`; indicates that this is a
# continuation of a prior `ListSources` call, and that the system should return
# the next page of data.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::ListSourcesResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::ListSourcesResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def list_organization_sources(parent, page_size: nil, page_token: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1beta1/{+parent}/sources', options)
command.response_representation = Google::Apis::SecuritycenterV1beta1::ListSourcesResponse::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::ListSourcesResponse
command.params['parent'] = parent unless parent.nil?
command.query['pageSize'] = page_size unless page_size.nil?
command.query['pageToken'] = page_token unless page_token.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Updates a source.
# @param [String] name
# The relative resource name of this source. See: https://cloud.google.com/apis/
# design/resource_names#relative_resource_name Example: "organizations/`
# organization_id`/sources/`source_id`"
# @param [Google::Apis::SecuritycenterV1beta1::Source] source_object
# @param [String] update_mask
# The FieldMask to use when updating the source resource.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::Source] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::Source]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def patch_organization_source(name, source_object = nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:patch, 'v1beta1/{+name}', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::Source::Representation
command.request_object = source_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::Source::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::Source
command.params['name'] = name unless name.nil?
command.query['updateMask'] = update_mask unless update_mask.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Sets the access control policy on the specified Source.
# @param [String] resource
# REQUIRED: The resource for which the policy is being specified. See the
# operation documentation for the appropriate value for this field.
# @param [Google::Apis::SecuritycenterV1beta1::SetIamPolicyRequest] set_iam_policy_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::Policy] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::Policy]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def set_source_iam_policy(resource, set_iam_policy_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1beta1/{+resource}:setIamPolicy', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::SetIamPolicyRequest::Representation
command.request_object = set_iam_policy_request_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::Policy::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::Policy
command.params['resource'] = resource unless resource.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Returns the permissions that a caller has on the specified source.
# @param [String] resource
# REQUIRED: The resource for which the policy detail is being requested. See the
# operation documentation for the appropriate value for this field.
# @param [Google::Apis::SecuritycenterV1beta1::TestIamPermissionsRequest] test_iam_permissions_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::TestIamPermissionsResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::TestIamPermissionsResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def test_source_iam_permissions(resource, test_iam_permissions_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1beta1/{+resource}:testIamPermissions', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::TestIamPermissionsRequest::Representation
command.request_object = test_iam_permissions_request_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::TestIamPermissionsResponse::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::TestIamPermissionsResponse
command.params['resource'] = resource unless resource.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Creates a finding. The corresponding source must exist for finding creation to
# succeed.
# @param [String] parent
# Required. Resource name of the new finding's parent. Its format should be "
# organizations/[organization_id]/sources/[source_id]".
# @param [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding] google_cloud_securitycenter_v1beta1_finding_object
# @param [String] finding_id
# Required. Unique identifier provided by the client within the parent scope. It
# must be alphanumeric and less than or equal to 32 characters and greater than
# 0 characters in length.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def create_organization_source_finding(parent, google_cloud_securitycenter_v1beta1_finding_object = nil, finding_id: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1beta1/{+parent}/findings', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding::Representation
command.request_object = google_cloud_securitycenter_v1beta1_finding_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding
command.params['parent'] = parent unless parent.nil?
command.query['findingId'] = finding_id unless finding_id.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Filters an organization or source's findings and groups them by their
# specified properties. To group across all sources provide a `-` as the source
# id. Example: /v1beta1/organizations/`organization_id`/sources/-/findings
# @param [String] parent
# Required. Name of the source to groupBy. Its format is "organizations/[
# organization_id]/sources/[source_id]". To groupBy across all sources provide a
# source_id of `-`. For example: organizations/`organization_id`/sources/-
# @param [Google::Apis::SecuritycenterV1beta1::GroupFindingsRequest] group_findings_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::GroupFindingsResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::GroupFindingsResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def group_findings(parent, group_findings_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1beta1/{+parent}/findings:group', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::GroupFindingsRequest::Representation
command.request_object = group_findings_request_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::GroupFindingsResponse::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::GroupFindingsResponse
command.params['parent'] = parent unless parent.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Lists an organization or source's findings. To list across all sources provide
# a `-` as the source id. Example: /v1beta1/organizations/`organization_id`/
# sources/-/findings
# @param [String] parent
# Required. Name of the source the findings belong to. Its format is "
# organizations/[organization_id]/sources/[source_id]". To list across all
# sources provide a source_id of `-`. For example: organizations/`
# organization_id`/sources/-
# @param [String] field_mask
# Optional. A field mask to specify the Finding fields to be listed in the
# response. An empty field mask will list all fields.
# @param [String] filter
# Expression that defines the filter to apply across findings. The expression is
# a list of one or more restrictions combined via logical operators `AND` and `
# OR`. Parentheses are not supported, and `OR` has higher precedence than `AND`.
# Restrictions have the form ` ` and may have a `-` character in front of them
# to indicate negation. Examples include: * name * source_properties.a_property *
# security_marks.marks.marka The supported operators are: * `=` for all value
# types. * `>`, `<`, `>=`, `<=` for integer values. * `:`, meaning substring
# matching, for strings. The supported value types are: * string literals in
# quotes. * integer literals without quotes. * boolean literals `true` and `
# false` without quotes. For example, `source_properties.size = 100` is a valid
# filter string.
# @param [String] order_by
# Expression that defines what fields and order to use for sorting. The string
# value should follow SQL syntax: comma separated list of fields. For example: "
# name,resource_properties.a_property". The default sorting order is ascending.
# To specify descending order for a field, a suffix " desc" should be appended
# to the field name. For example: "name desc,source_properties.a_property".
# Redundant space characters in the syntax are insignificant. "name desc,
# source_properties.a_property" and " name desc , source_properties.a_property "
# are equivalent.
# @param [Fixnum] page_size
# The maximum number of results to return in a single response. Default is 10,
# minimum is 1, maximum is 1000.
# @param [String] page_token
# The value returned by the last `ListFindingsResponse`; indicates that this is
# a continuation of a prior `ListFindings` call, and that the system should
# return the next page of data.
# @param [String] read_time
# Time used as a reference point when filtering findings. The filter is limited
# to findings existing at the supplied time and their values are those at that
# specific time. Absence of this field will default to the API's version of NOW.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::ListFindingsResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::ListFindingsResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def list_organization_source_findings(parent, field_mask: nil, filter: nil, order_by: nil, page_size: nil, page_token: nil, read_time: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1beta1/{+parent}/findings', options)
command.response_representation = Google::Apis::SecuritycenterV1beta1::ListFindingsResponse::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::ListFindingsResponse
command.params['parent'] = parent unless parent.nil?
command.query['fieldMask'] = field_mask unless field_mask.nil?
command.query['filter'] = filter unless filter.nil?
command.query['orderBy'] = order_by unless order_by.nil?
command.query['pageSize'] = page_size unless page_size.nil?
command.query['pageToken'] = page_token unless page_token.nil?
command.query['readTime'] = read_time unless read_time.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Creates or updates a finding. The corresponding source must exist for a
# finding creation to succeed.
# @param [String] name
# The relative resource name of this finding. See: https://cloud.google.com/apis/
# design/resource_names#relative_resource_name Example: "organizations/`
# organization_id`/sources/`source_id`/findings/`finding_id`"
# @param [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding] google_cloud_securitycenter_v1beta1_finding_object
# @param [String] update_mask
# The FieldMask to use when updating the finding resource. This field should not
# be specified when creating a finding.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def patch_organization_source_finding(name, google_cloud_securitycenter_v1beta1_finding_object = nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:patch, 'v1beta1/{+name}', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding::Representation
command.request_object = google_cloud_securitycenter_v1beta1_finding_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding
command.params['name'] = name unless name.nil?
command.query['updateMask'] = update_mask unless update_mask.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Updates the state of a finding.
# @param [String] name
# Required. The relative resource name of the finding. See: https://cloud.google.
# com/apis/design/resource_names#relative_resource_name Example: "organizations/`
# organization_id`/sources/`source_id`/finding/`finding_id`".
# @param [Google::Apis::SecuritycenterV1beta1::SetFindingStateRequest] set_finding_state_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def set_organization_source_finding_state(name, set_finding_state_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1beta1/{+name}:setState', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::SetFindingStateRequest::Representation
command.request_object = set_finding_state_request_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1Finding
command.params['name'] = name unless name.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Updates security marks.
# @param [String] name
# The relative resource name of the SecurityMarks. See: https://cloud.google.com/
# apis/design/resource_names#relative_resource_name Examples: "organizations/`
# organization_id`/assets/`asset_id`/securityMarks" "organizations/`
# organization_id`/sources/`source_id`/findings/`finding_id`/securityMarks".
# @param [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks] google_cloud_securitycenter_v1beta1_security_marks_object
# @param [String] start_time
# The time at which the updated SecurityMarks take effect.
# @param [String] update_mask
# The FieldMask to use when updating the security marks resource.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def update_organization_source_finding_security_marks(name, google_cloud_securitycenter_v1beta1_security_marks_object = nil, start_time: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:patch, 'v1beta1/{+name}', options)
command.request_representation = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks::Representation
command.request_object = google_cloud_securitycenter_v1beta1_security_marks_object
command.response_representation = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks::Representation
command.response_class = Google::Apis::SecuritycenterV1beta1::GoogleCloudSecuritycenterV1beta1SecurityMarks
command.params['name'] = name unless name.nil?
command.query['startTime'] = start_time unless start_time.nil?
command.query['updateMask'] = update_mask unless update_mask.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
protected
def apply_command_defaults(command)
command.query['key'] = key unless key.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
end
end
end
end
end
| googleapis/google-api-ruby-client | google-api-client/generated/google/apis/securitycenter_v1beta1/service.rb | Ruby | apache-2.0 | 66,146 |
<?php $TRANSLATIONS = array(
"Not all calendars are completely cached" => "Ikke alle kalendere er fuldstændig cached",
"Everything seems to be completely cached" => "Alt ser ud til at være cached",
"No calendars found." => "Der blev ikke fundet nogen kalendere.",
"No events found." => "Der blev ikke fundet nogen begivenheder.",
"Wrong calendar" => "Forkert kalender",
"You do not have the permissions to edit this event." => "Du har ikke rettigheder til at redigere denne begivenhed.",
"The file contained either no events or all events are already saved in your calendar." => "Enten indeholdt filen ingen begivenheder, eller også er alle begivenheder allerede gemt i din kalender.",
"events has been saved in the new calendar" => "begivenheder er gemt i den nye kalender",
"Import failed" => "Import mislykkedes",
"events has been saved in your calendar" => "begivenheder er gemt i din kalender",
"New Timezone:" => "Ny tidszone:",
"Timezone changed" => "Tidszone ændret",
"Invalid request" => "Ugyldig forespørgsel",
"Calendar" => "Kalender",
"Deletion failed" => "Fejl ved sletning",
"ddd d MMMM[ yyyy]{ - [ddd d] MMMM yyyy}" => "ddd d MMMM[ yyyy]{ - [ddd d] MMMM yyyy}",
"ddd d MMMM[ yyyy] HH:mm{ - [ ddd d MMMM yyyy] HH:mm}" => "ddd d MMMM[ yyyy] HH:mm{ - [ ddd d MMMM yyyy] HH:mm}",
"group" => "gruppe",
"can edit" => "kan redigere",
"ddd" => "ddd",
"ddd M/d" => "ddd M/d",
"dddd M/d" => "dddd M/d",
"MMMM yyyy" => "MMMM yyyy",
"MMM d[ yyyy]{ '–'[ MMM] d yyyy}" => "MMM d[ yyyy]{ '–'[ MMM] d yyyy}",
"dddd, MMM d, yyyy" => "dddd, MMM d, yyyy",
"Sunday" => "Søndag",
"Monday" => "Mandag",
"Tuesday" => "Tirsdag",
"Wednesday" => "Onsdag",
"Thursday" => "Torsdag",
"Friday" => "Fredag",
"Saturday" => "Lørdag",
"Sun." => "Søn.",
"Mon." => "Man.",
"Tue." => "Tir.",
"Wed." => "Ons.",
"Thu." => "Tor.",
"Fri." => "Fre.",
"Sat." => "Lør.",
"January" => "Januar",
"February" => "Februar",
"March" => "Marts",
"April" => "April",
"May" => "Maj",
"June" => "Juni",
"July" => "Juli",
"August" => "August",
"September" => "September",
"October" => "Oktober",
"November" => "November",
"December" => "December",
"Jan." => "Jan.",
"Feb." => "Feb.",
"Mar." => "Mar.",
"Apr." => "Apr.",
"May." => "Maj",
"Jun." => "Jun.",
"Jul." => "Jul.",
"Aug." => "Aug.",
"Sep." => "Sep.",
"Oct." => "Okt.",
"Nov." => "Nov.",
"Dec." => "Dec.",
"All day" => "Hele dagen",
"New Calendar" => "Ny kalender",
"Missing or invalid fields" => "Manglende eller ugyldige felter",
"Title" => "Titel",
"From Date" => "Fra dato",
"From Time" => "Fra tidspunkt",
"To Date" => "Til dato",
"To Time" => "Til tidspunkt",
"The event ends before it starts" => "Begivenheden slutter, inden den begynder",
"There was a database fail" => "Der var en fejl i databasen",
"Birthday" => "Fødselsdag",
"Business" => "Erhverv",
"Call" => "Ring",
"Clients" => "Kunder",
"Deliverer" => "Leverance",
"Holidays" => "Helligdage",
"Ideas" => "Ideer",
"Journey" => "Rejse",
"Jubilee" => "Jubilæum",
"Meeting" => "Møde",
"Other" => "Andet",
"Personal" => "Privat",
"Projects" => "Projekter",
"Questions" => "Spørgsmål",
"Work" => "Arbejde",
"by" => "af",
"unnamed" => "unavngivet",
"You do not have the permissions to update this calendar." => "Du har ikke rettigheder til at opdatere denne kalender.",
"You do not have the permissions to delete this calendar." => "Du har ikke rettigheder til at slette denne kalender.",
"You do not have the permissions to add to this calendar." => "Du har ikke rettigheder til at tilføje til denne kalender.",
"You do not have the permissions to add events to this calendar." => "Du har ikke rettigheder til at tilføje begivenheder til denne kalender.",
"You do not have the permissions to delete this event." => "Du har ikke rettigheder til at slette denne begivenhed.",
"Busy" => "Optaget",
"Does not repeat" => "Gentages ikke",
"Daily" => "Dagligt",
"Weekly" => "Ugentligt",
"Every Weekday" => "Alle hverdage",
"Bi-Weekly" => "Hver anden uge",
"Monthly" => "Månedligt",
"Yearly" => "Årligt",
"never" => "aldrig",
"by occurrences" => "efter forekomster",
"by date" => "efter dato",
"by monthday" => "efter dag i måneden",
"by weekday" => "efter ugedag",
"events week of month" => "begivenhedens uge i måneden",
"first" => "første",
"second" => "anden",
"third" => "tredje",
"fourth" => "fjerde",
"fifth" => "femte",
"last" => "sidste",
"by events date" => "efter begivenheders dato",
"by yearday(s)" => "efter dag(e) i året",
"by weeknumber(s)" => "efter ugenummer/-numre",
"by day and month" => "efter dag og måned",
"Contact birthdays" => "Kontakt fødselsdage",
"Date" => "Dato",
"Cal." => "Kal.",
"Day" => "Dag",
"Week" => "Uge",
"Month" => "Måned",
"Today" => "I dag",
"Settings" => "Indstillinger",
"Share Calendar" => "Del kalender",
"CalDav Link" => "CalDav-link",
"Download" => "Hent",
"Edit" => "Rediger",
"Delete" => "Slet",
"New calendar" => "Ny kalender",
"Edit calendar" => "Rediger kalender",
"Displayname" => "Vist navn",
"Calendar color" => "Kalenderfarve",
"Save" => "Gem",
"Submit" => "Send",
"Cancel" => "Annuller",
"Eventinfo" => "Begivenhedsinfo",
"Repeating" => "Gentagende",
"Alarm" => "Alarm",
"Attendees" => "Deltagere",
"Share" => "Del",
"Title of the Event" => "Titel på begivenheden",
"from" => "fra",
"All Day Event" => "Heldagsarrangement",
"Advanced options" => "Avancerede indstillinger",
"Location" => "Sted",
"Edit categories" => "Rediger kategorier",
"Description" => "Beskrivelse",
"Repeat" => "Gentag",
"Advanced" => "Avanceret",
"Select weekdays" => "Vælg ugedage",
"Select days" => "Vælg dage",
"and the events day of year." => "og begivenhedens dag i året.",
"and the events day of month." => "og begivenhedens dag i måneden",
"Select months" => "Vælg måneder",
"Select weeks" => "Vælg uger",
"and the events week of year." => "og begivenhedens uge i året.",
"Interval" => "Interval",
"End" => "Afslutning",
"occurrences" => "forekomster",
"create a new calendar" => "opret en ny kalender",
"Import a calendar file" => "Importer en kalenderfil",
"Please choose a calendar" => "Vælg en kalender",
"Name of new calendar" => "Navn på ny kalender",
"Take an available name!" => "Vælg et ledigt navn!",
"A Calendar with this name already exists. If you continue anyhow, these calendars will be merged." => "En kalender med dette navn findes allerede. Hvis du fortsætter alligevel, vil disse kalendere blive sammenlagt.",
"Remove all events from the selected calendar" => "Fjern alle events fra den valgte kalender",
"Import" => "Importer",
"Close Dialog" => "Luk dialog",
"Create a new event" => "Opret en ny begivenhed",
"Unshare" => "Fjern deling",
"Send Email" => "Send Email",
"Shared via calendar" => "Delt via kalender",
"View an event" => "Vis en begivenhed",
"Category" => "Kategori",
"No categories selected" => "Ingen categorier valgt",
"of" => "fra",
"Access Class" => "Adgangsklasse",
"From" => "Fra",
"at" => "kl.",
"To" => "Til",
"Your calendars" => "Dine kalendere",
"General" => "Generel",
"Timezone" => "Tidszone",
"Update timezone automatically" => "Opdater tidszone automatisk",
"Time format" => "Tidsformat",
"24h" => "24T",
"12h" => "12T",
"Start week on" => "Start ugen med",
"Cache" => "Cache",
"Clear cache for repeating events" => "Ryd cache for gentagende begivenheder",
"URLs" => "URLs",
"Calendar CalDAV syncing addresses" => "Adresser til kalendersynkronisering over CalDAV",
"more info" => "flere oplysninger",
"Primary address (Kontact et al)" => "Primær adresse (Kontakt o.a.)",
"iOS/OS X" => "iOS/OS X",
"Read only iCalendar link(s)" => "Skrivebeskyttet iCalendar-link(s)"
);
| ArcherCraftStore/ArcherVMPeridot | apps/owncloud/htdocs/apps/calendar/l10n/da.php | PHP | apache-2.0 | 7,568 |
#include <iostream>
#include <vector>
#include <cmath>
#include <algorithm>
#include <fstream>
#include "gflags/gflags.h"
#include "Common/log.h"
DEFINE_string(res_file, "", "predict res");
DEFINE_int32(log_level, 2, "LogLevel :"
"0 : TRACE "
"1 : DEBUG "
"2 : INFO "
"3 : ERROR");
uint32_t log_level = 0;
namespace ML {
struct Elem {
double p;
double y;
};
bool LessThan(const Elem& a, const Elem& b) {
return a.p < b.p;
}
bool Equal(const Elem& a, const Elem& b) {
return fabs(a.p - b.p) < 1.e-10;
}
void check(const Elem& e, int& n, int& p) {
if (e.y < 0.5) {
n++;
}
else {
p++;
}
}
double logistic_loss(Elem& e)
{
if (e.y > 0.5) {
return -log(e.p);
}
else {
return -log(1-e.p);
}
}
double auc(std::vector<Elem>& res) {
if (res.size() <= 0)
{
return 0.5;
}
std::sort(res.begin(), res.end(), LessThan);
int n = 0;
int p = 0;
int cur_n = 0;
int cur_p = 0;
double correct = 0;
check(res[0], cur_n, cur_p);
for (size_t i=1; i<res.size(); ++i)
{
if (Equal(res[i], res[i-1]))
{
check(res[i], cur_n, cur_p);
}
else
{
correct += cur_p*n + 0.5*cur_p*cur_n;
p += cur_p;
n += cur_n;
cur_p = 0;
cur_n = 0;
check(res[i], cur_n, cur_p);
}
}
correct += cur_p*n + 0.5*cur_p*cur_n;
p += cur_p;
n += cur_n;
LOG_INFO("POSITIVE : %d , NEGATIVE : %d, CORRECT : %lf", p, n, correct);
if (n == 0) return 1.0;
if (p == 0) return 0.0;
return correct*1.0/p/n;
}
}
using namespace ML;
int main(int argc, char** argv)
{
google::ParseCommandLineFlags(&argc, &argv, true);
log_level = FLAGS_log_level;
if (FLAGS_res_file.empty())
{
std::cout<< "res file is empty!" << std::endl;
return 0;
}
std::ifstream infile(FLAGS_res_file.c_str());
if (!infile)
{
LOG_ERROR("Load res file : %s failed!", FLAGS_res_file.c_str());
return 0;
}
std::string line;
std::vector<Elem> res;
Elem e;
e.p = 0.0;
e.y = 1.0;
double total_loss = 0;
getline(infile, line);
while (!infile.eof())
{
sscanf(line.c_str(), "%lf %lf", &(e.y), &(e.p));
LOG_TRACE("Elem p : %lf, y : %lf", e.p, e.y);
res.push_back(e);
total_loss += logistic_loss(e);
getline(infile, line);
}
LOG_TRACE("Res size : %lu", res.size());
printf("[%s]AUC = %.5lf, LOSS = %.3lf\n", FLAGS_res_file.c_str(), auc(res), total_loss);
return 0;
}
| uwroute/study | ML/eval/auc.cpp | C++ | apache-2.0 | 2,402 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Web;
namespace Mobet.Runtime.Cookie
{
public static class CookieManager
{
/// <summary>
/// 创建cookie
/// </summary>
/// <param name="name">cookie名</param>
/// <param name="value">cookie值</param>
/// <param name="expires">过期时间</param>
public static void CreateCookie(string name, string value, DateTime expires)
{
HttpCookie hc = value.Trim() == "" ? new HttpCookie(name) : new HttpCookie(name, value.Trim());
hc.Path = "/";
// 当过期时间为MinValue时,不设置cookie的过期时间,也就是说,关闭浏览器后cookie即过期
if (expires != DateTime.MinValue)
{
hc.Expires = expires;
}
HttpContext.Current.Response.Cookies.Set(hc);
}
/// <summary>
/// 清除cookie
/// </summary>
/// <param name="name">cookie名</param>
/// <param name="domain">域名</param>
public static void ClearCookie(string name)
{
CreateCookie(name, "", DateTime.Now.AddDays(-1));
}
/// <summary>
/// 获取cookie值
/// </summary>
/// <param name="name"></param>
/// <returns></returns>
public static string GetCookieValue(string name)
{
var cookie = HttpContext.Current.Request.Cookies[name];
if (cookie == null)
{
return null;
}
else
{
return cookie.Value;
}
}
}
}
| Mobet/Mobet-Net | Mobet-Net/Mobet/Runtime/Cookie/CookieManager.cs | C# | apache-2.0 | 1,723 |
<tr>
<td><?php echo $area->id ?></td>
<td><?php echo $area->lat ?></td>
<td><?php echo $area->lng ?></td>
<td><?php echo $area->radius ?></td>
<td width="300px"><?php echo $area->url ?></td>
<td><?php echo $area->area ?></td>
<td><?php echo $area->min_radius_area ?></td>
<td><?php echo $area->radius_area ?></td>
<td>
<?php echo link_to('Tạo flow','pos/newGoogleFlow?area='.$area->area) ?> |
<?php echo link_to('Sửa','pos/edit?id='.$area->id) ?> |
</td>
</tr> | smart-e/lifemap | plugins/lfPosPlugin/apps/pc_backend/modules/pos/templates/_googlearea.php | PHP | apache-2.0 | 520 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Diagnostics.EngineV2
{
internal class DiagnosticIncrementalAnalyzer : BaseDiagnosticIncrementalAnalyzer
{
private readonly int _correlationId;
private readonly DiagnosticAnalyzerService _owner;
private readonly Workspace _workspace;
private readonly AnalyzerManager _analyzerManager;
public DiagnosticIncrementalAnalyzer(DiagnosticAnalyzerService owner, int correlationId, Workspace workspace, AnalyzerManager analyzerManager)
{
_correlationId = correlationId;
_owner = owner;
_workspace = workspace;
_analyzerManager = analyzerManager;
}
#region IIncrementalAnalyzer
public override Task AnalyzeDocumentAsync(Document document, SyntaxNode bodyOpt, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
public override async Task AnalyzeProjectAsync(Project project, bool semanticsChanged, CancellationToken cancellationToken)
{
var diagnostics = await GetDiagnosticsAsync(project.Solution, project.Id, null, cancellationToken).ConfigureAwait(false);
RaiseEvents(project, diagnostics);
}
public override Task AnalyzeSyntaxAsync(Document document, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
public override Task DocumentOpenAsync(Document document, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
public override Task DocumentResetAsync(Document document, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
public override Task NewSolutionSnapshotAsync(Solution solution, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
public override void RemoveDocument(DocumentId documentId)
{
_owner.RaiseDiagnosticsUpdated(
this, new DiagnosticsUpdatedArgs(ValueTuple.Create(this, documentId), _workspace, null, null, null, ImmutableArray<DiagnosticData>.Empty));
}
public override void RemoveProject(ProjectId projectId)
{
_owner.RaiseDiagnosticsUpdated(
this, new DiagnosticsUpdatedArgs(ValueTuple.Create(this, projectId), _workspace, null, null, null, ImmutableArray<DiagnosticData>.Empty));
}
#endregion
public override Task<ImmutableArray<DiagnosticData>> GetCachedDiagnosticsAsync(Solution solution, ProjectId projectId = null, DocumentId documentId = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetDiagnosticsAsync(solution, projectId, documentId, cancellationToken);
}
public override Task<ImmutableArray<DiagnosticData>> GetSpecificCachedDiagnosticsAsync(Solution solution, object id, CancellationToken cancellationToken)
{
return GetSpecificDiagnosticsAsync(solution, id, cancellationToken);
}
public override async Task<ImmutableArray<DiagnosticData>> GetDiagnosticsAsync(Solution solution, ProjectId projectId = null, DocumentId documentId = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (documentId != null)
{
var diagnostics = await GetProjectDiagnosticsAsync(solution.GetProject(projectId), cancellationToken).ConfigureAwait(false);
return diagnostics.Where(d => d.DocumentId == documentId).ToImmutableArrayOrEmpty();
}
if (projectId != null)
{
return await GetProjectDiagnosticsAsync(solution.GetProject(projectId), cancellationToken).ConfigureAwait(false);
}
var builder = ImmutableArray.CreateBuilder<DiagnosticData>();
foreach (var project in solution.Projects)
{
builder.AddRange(await GetProjectDiagnosticsAsync(project, cancellationToken).ConfigureAwait(false));
}
return builder.ToImmutable();
}
public override async Task<ImmutableArray<DiagnosticData>> GetSpecificDiagnosticsAsync(Solution solution, object id, CancellationToken cancellationToken)
{
if (id is ValueTuple<DiagnosticIncrementalAnalyzer, DocumentId>)
{
var key = (ValueTuple<DiagnosticIncrementalAnalyzer, DocumentId>)id;
return await GetDiagnosticsAsync(solution, key.Item2.ProjectId, key.Item2, cancellationToken).ConfigureAwait(false);
}
if (id is ValueTuple<DiagnosticIncrementalAnalyzer, ProjectId>)
{
var key = (ValueTuple<DiagnosticIncrementalAnalyzer, ProjectId>)id;
var diagnostics = await GetDiagnosticsAsync(solution, key.Item2, null, cancellationToken).ConfigureAwait(false);
return diagnostics.Where(d => d.DocumentId == null).ToImmutableArray();
}
return ImmutableArray<DiagnosticData>.Empty;
}
public override async Task<ImmutableArray<DiagnosticData>> GetDiagnosticsForIdsAsync(Solution solution, ProjectId projectId = null, DocumentId documentId = null, ImmutableHashSet<string> diagnosticIds = null, CancellationToken cancellationToken = default(CancellationToken))
{
var diagnostics = await GetDiagnosticsAsync(solution, projectId, documentId, cancellationToken).ConfigureAwait(false);
return diagnostics.Where(d => diagnosticIds.Contains(d.Id)).ToImmutableArrayOrEmpty();
}
public override async Task<ImmutableArray<DiagnosticData>> GetProjectDiagnosticsForIdsAsync(Solution solution, ProjectId projectId = null, ImmutableHashSet<string> diagnosticIds = null, CancellationToken cancellationToken = default(CancellationToken))
{
var diagnostics = await GetDiagnosticsForIdsAsync(solution, projectId, null, diagnosticIds, cancellationToken).ConfigureAwait(false);
return diagnostics.Where(d => d.DocumentId == null).ToImmutableArray();
}
public override async Task<bool> TryAppendDiagnosticsForSpanAsync(Document document, TextSpan range, List<DiagnosticData> result, CancellationToken cancellationToken)
{
result.AddRange(await GetDiagnosticsForSpanAsync(document, range, cancellationToken).ConfigureAwait(false));
return true;
}
public override async Task<IEnumerable<DiagnosticData>> GetDiagnosticsForSpanAsync(Document document, TextSpan range, CancellationToken cancellationToken)
{
var diagnostics = await GetDiagnosticsAsync(document.Project.Solution, document.Project.Id, document.Id, cancellationToken).ConfigureAwait(false);
return diagnostics.Where(d => range.IntersectsWith(d.TextSpan));
}
private async Task<ImmutableArray<DiagnosticData>> GetProjectDiagnosticsAsync(Project project, CancellationToken cancellationToken)
{
if (project == null)
{
return ImmutableArray<DiagnosticData>.Empty;
}
var compilation = await project.GetCompilationAsync(cancellationToken).ConfigureAwait(false);
var analyzers = _analyzerManager.CreateDiagnosticAnalyzers(project);
var compilationWithAnalyzer = compilation.WithAnalyzers(analyzers, project.AnalyzerOptions, cancellationToken);
// REVIEW: this API is a bit strange.
// if getting diagnostic is cancelled, it has to create new compilation and do everything from scretch again?
return GetDiagnosticData(project, await compilationWithAnalyzer.GetAnalyzerDiagnosticsAsync().ConfigureAwait(false)).ToImmutableArrayOrEmpty();
}
private IEnumerable<DiagnosticData> GetDiagnosticData(Project project, ImmutableArray<Diagnostic> diagnostics)
{
foreach (var diagnostic in diagnostics)
{
if (diagnostic.Location == Location.None)
{
yield return DiagnosticData.Create(project, diagnostic);
continue;
}
var document = project.GetDocument(diagnostic.Location.SourceTree);
if (document == null)
{
continue;
}
yield return DiagnosticData.Create(document, diagnostic);
}
}
private void RaiseEvents(Project project, ImmutableArray<DiagnosticData> diagnostics)
{
var groups = diagnostics.GroupBy(d => d.DocumentId);
var solution = project.Solution;
var workspace = solution.Workspace;
foreach (var kv in groups)
{
if (kv.Key == null)
{
_owner.RaiseDiagnosticsUpdated(
this, new DiagnosticsUpdatedArgs(
ValueTuple.Create(this, project.Id), workspace, solution, project.Id, null, kv.ToImmutableArrayOrEmpty()));
continue;
}
_owner.RaiseDiagnosticsUpdated(
this, new DiagnosticsUpdatedArgs(
ValueTuple.Create(this, kv.Key), workspace, solution, project.Id, kv.Key, kv.ToImmutableArrayOrEmpty()));
}
}
}
}
| mono/roslyn | src/Features/Core/Diagnostics/EngineV2/DiagnosticIncrementalAnalyzer.cs | C# | apache-2.0 | 9,908 |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("MNISTTest")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("MNISTTest")]
[assembly: AssemblyCopyright("Copyright © 2018")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("6d135fe2-ccdd-4c72-b6ae-3cce024289ba")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| kawatan/Milk | MNISTTest/Properties/AssemblyInfo.cs | C# | apache-2.0 | 1,389 |
require 'test/unit'
class TestRubyLiteral < Test::Unit::TestCase
def test_special_const
assert_equal 'true', true.inspect
assert_instance_of TrueClass, true
assert_equal 'false', false.inspect
assert_instance_of FalseClass, false
assert_equal 'nil', nil.inspect
assert_instance_of NilClass, nil
assert_equal ':sym', :sym.inspect
assert_instance_of Symbol, :sym
assert_equal '1234', 1234.inspect
assert_instance_of Fixnum, 1234
assert_equal '1234', 1_2_3_4.inspect
assert_instance_of Fixnum, 1_2_3_4
assert_equal '18', 0x12.inspect
assert_instance_of Fixnum, 0x12
assert_raise(SyntaxError) { eval("0x") }
assert_equal '15', 0o17.inspect
assert_instance_of Fixnum, 0o17
assert_raise(SyntaxError) { eval("0o") }
assert_equal '5', 0b101.inspect
assert_instance_of Fixnum, 0b101
assert_raise(SyntaxError) { eval("0b") }
assert_equal '123456789012345678901234567890', 123456789012345678901234567890.inspect
assert_instance_of Bignum, 123456789012345678901234567890
assert_instance_of Float, 1.3
end
def test_self
assert_equal self, self
assert_instance_of TestRubyLiteral, self
assert_respond_to self, :test_self
end
def test_string
assert_instance_of String, ?a
assert_equal "a", ?a
assert_instance_of String, ?A
assert_equal "A", ?A
assert_instance_of String, ?\n
assert_equal "\n", ?\n
assert_equal " ", ?\ # space
assert_equal '', ''
assert_equal 'string', 'string'
assert_equal 'string string', 'string string'
assert_equal ' ', ' '
assert_equal ' ', " "
assert_equal "\0", "\0"
assert_equal "\1", "\1"
assert_equal "3", "\x33"
assert_equal "\n", "\n"
end
def test_dstring
assert_equal '2', "#{1+1}"
assert_equal '16', "#{2 ** 4}"
s = "string"
assert_equal s, "#{s}"
end
def test_dsymbol
assert_equal :a3c, :"a#{1+2}c"
end
def test_xstring
assert_equal "foo\n", `echo foo`
s = 'foo'
assert_equal "foo\n", `echo #{s}`
end
def test_regexp
assert_instance_of Regexp, //
assert_match //, 'a'
assert_match //, ''
assert_instance_of Regexp, /a/
assert_match /a/, 'a'
assert_no_match /test/, 'tes'
re = /test/
assert_match re, 'test'
str = 'test'
assert_match re, str
assert_match /test/, str
assert_equal 0, (/test/ =~ 'test')
assert_equal 0, (re =~ 'test')
assert_equal 0, (/test/ =~ str)
assert_equal 0, (re =~ str)
assert_equal 0, ('test' =~ /test/)
assert_equal 0, ('test' =~ re)
assert_equal 0, (str =~ /test/)
assert_equal 0, (str =~ re)
end
def test_dregexp
assert_instance_of Regexp, /re#{'ge'}xp/
assert_equal(/regexp/, /re#{'ge'}xp/)
end
def test_array
assert_instance_of Array, []
assert_equal [], []
assert_equal 0, [].size
assert_instance_of Array, [0]
assert_equal [3], [3]
assert_equal 1, [3].size
a = [3]
assert_equal 3, a[0]
assert_instance_of Array, [1,2]
assert_equal [1,2], [1,2]
assert_instance_of Array, [1,2,3,4,5]
assert_equal [1,2,3,4,5], [1,2,3,4,5]
assert_equal 5, [1,2,3,4,5].size
a = [1,2]
assert_equal 1, a[0]
assert_equal 2, a[1]
a = [1 + 2, 3 + 4, 5 + 6]
assert_instance_of Array, a
assert_equal [3, 7, 11], a
assert_equal 7, a[1]
assert_equal 1, ([0][0] += 1)
assert_equal 1, ([2][0] -= 1)
a = [obj = Object.new]
assert_instance_of Array, a
assert_equal 1, a.size
assert_equal obj, a[0]
a = [1,2,3]
a[1] = 5
assert_equal 5, a[1]
end
def test_hash
assert_instance_of Hash, {}
assert_equal({}, {})
assert_instance_of Hash, {1 => 2}
assert_equal({1 => 2}, {1 => 2})
h = {1 => 2}
assert_equal 2, h[1]
h = {"string" => "literal", "goto" => "hell"}
assert_equal h, h
assert_equal 2, h.size
assert_equal h, h
assert_equal "literal", h["string"]
end
def test_range
assert_instance_of Range, (1..2)
assert_equal(1..2, 1..2)
r = 1..2
assert_equal 1, r.begin
assert_equal 2, r.end
assert_equal false, r.exclude_end?
assert_instance_of Range, (1...3)
assert_equal(1...3, 1...3)
r = 1...3
assert_equal 1, r.begin
assert_equal 3, r.end
assert_equal true, r.exclude_end?
r = 1+2 .. 3+4
assert_instance_of Range, r
assert_equal 3, r.begin
assert_equal 7, r.end
assert_equal false, r.exclude_end?
r = 1+2 ... 3+4
assert_instance_of Range, r
assert_equal 3, r.begin
assert_equal 7, r.end
assert_equal true, r.exclude_end?
assert_instance_of Range, 'a'..'c'
r = 'a'..'c'
assert_equal 'a', r.begin
assert_equal 'c', r.end
end
def test__FILE__
assert_instance_of String, __FILE__
assert_equal __FILE__, __FILE__
assert_equal 'test_literal.rb', File.basename(__FILE__)
end
def test__LINE__
assert_instance_of Fixnum, __LINE__
assert_equal __LINE__, __LINE__
end
def test_integer
head = ['', '0x', '0o', '0b', '0d', '-', '+']
chars = ['0', '1', '_', '9', 'f']
head.each {|h|
4.times {|len|
a = [h]
len.times { a = a.product(chars).map {|x| x.join('') } }
a.each {|s|
next if s.empty?
begin
r1 = Integer(s)
rescue ArgumentError
r1 = :err
end
begin
r2 = eval(s)
rescue NameError, SyntaxError
r2 = :err
end
assert_equal(r1, r2, "Integer(#{s.inspect}) != eval(#{s.inspect})")
}
}
}
end
def test_float
head = ['', '-', '+']
chars = ['0', '1', '_', '9', 'f', '.']
head.each {|h|
6.times {|len|
a = [h]
len.times { a = a.product(chars).map {|x| x.join('') } }
a.each {|s|
next if s.empty?
next if /\.\z/ =~ s
next if /\A[-+]?\./ =~ s
next if /\A[-+]?0/ =~ s
begin
r1 = Float(s)
rescue ArgumentError
r1 = :err
end
begin
r2 = eval(s)
rescue NameError, SyntaxError
r2 = :err
end
r2 = :err if Range === r2
assert_equal(r1, r2, "Float(#{s.inspect}) != eval(#{s.inspect})")
}
}
}
end
end
| google-code/android-scripting | jruby/src/test/externals/ruby1.9/ruby/test_literal.rb | Ruby | apache-2.0 | 6,345 |
package dbutil
import (
"github.com/wpxiong/beargo/log"
)
func init() {
log.InitLog()
}
| wpxiong/beargo | util/dbutil/db_util.go | GO | apache-2.0 | 94 |
import java.util.HashMap;
import java.util.Map;
public class WordPattern {
public static boolean wordPattern(String pattern, String str) {
Map<String, String> a2b = new HashMap<String, String>();
Map<String, String> b2a = new HashMap<String, String>();
String[] tokens = str.split(" ");
if (tokens.length != pattern.length()) return false;
int i = 0;
for (String token : tokens) {
StringBuilder tmp = new StringBuilder();
tmp.append(pattern.charAt(i));
if (a2b.containsKey(tmp.toString())) {
String r = a2b.get(tmp.toString());
if (!r.equals(token)) return false;
} else if (b2a.containsKey(token)) {
String r = b2a.get(token);
if (!r.equals(tmp.toString())) return false;
} else {
a2b.put(tmp.toString(), token);
b2a.put(token, tmp.toString());
}
++i;
}
return true;
}
}
| wittyResry/leetcode | src/main/java/WordPattern.java | Java | apache-2.0 | 1,026 |
package org.aksw.sparqlify.qa.pinpointing;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import org.aksw.sparqlify.core.algorithms.CandidateViewSelectorImpl;
import org.aksw.sparqlify.core.algorithms.ViewQuad;
import org.aksw.sparqlify.core.domain.input.ViewDefinition;
import org.aksw.sparqlify.database.Clause;
import org.aksw.sparqlify.database.NestedNormalForm;
import org.aksw.sparqlify.restriction.RestrictionManagerImpl;
import org.springframework.stereotype.Component;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.sparql.core.Quad;
import com.hp.hpl.jena.sparql.core.Var;
import com.hp.hpl.jena.sparql.expr.E_Equals;
import com.hp.hpl.jena.sparql.expr.ExprVar;
import com.hp.hpl.jena.sparql.expr.NodeValue;
@Component
public class Pinpointer {
// private Collection<ViewDefinition> viewDefs;
CandidateViewSelectorImpl candidateSelector;
/*
* TODO:
* - add query method
* - add caching
*/
public void registerViewDefs(Collection<ViewDefinition> viewDefs) {
candidateSelector = new CandidateViewSelectorImpl();
for (ViewDefinition viewDef : viewDefs) {
candidateSelector.addView(viewDef);
}
}
public Set<ViewQuad<ViewDefinition>> getViewCandidates(Triple triple) {
Var g = Var.alloc("g");
Var s = Var.alloc("s");
Var p = Var.alloc("p");
Var o = Var.alloc("o");
Node gv = Quad.defaultGraphNodeGenerated;
Node sv = triple.getSubject();
Node pv = triple.getPredicate();
Node ov = triple.getObject();
Quad tmpQuad = new Quad(g, s, p, o);
RestrictionManagerImpl r = new RestrictionManagerImpl();
Set<Clause> clauses = new HashSet<Clause>();
clauses.add(new Clause(new E_Equals(new ExprVar(g), NodeValue.makeNode(gv))));
clauses.add(new Clause(new E_Equals(new ExprVar(s), NodeValue.makeNode(sv))));
clauses.add(new Clause(new E_Equals(new ExprVar(p), NodeValue.makeNode(pv))));
clauses.add(new Clause(new E_Equals(new ExprVar(o), NodeValue.makeNode(ov))));
NestedNormalForm nnf = new NestedNormalForm(clauses);
r.stateCnf(nnf);
Set<ViewQuad<ViewDefinition>> result = candidateSelector.findCandidates(tmpQuad, r);
return result;
}
}
| AKSW/R2RLint | src/main/java/org/aksw/sparqlify/qa/pinpointing/Pinpointer.java | Java | apache-2.0 | 2,208 |
/*
* Copyright 2014 Martin W. Kirst
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import CaveatPacket = require('./CaveatPacket');
import CaveatPacketType = require('./CaveatPacketType');
import Macaroon = require('./Macaroon');
import MacaroonsConstants = require('./MacaroonsConstants');
import BufferTools = require('./BufferTools');
import CryptoTools = require('./CryptoTools');
export = MacaroonsVerifier;
/**
* Used to verify Macaroons
*/
class MacaroonsVerifier {
"use strict";
private predicates:string[] = [];
private boundMacaroons:Macaroon[] = [];
private generalCaveatVerifiers:GeneralCaveatVerifier[] = [];
private macaroon:Macaroon;
constructor(macaroon:Macaroon) {
this.macaroon = macaroon;
}
/**
* @param secret string this secret will be enhanced, in case it's shorter than {@link MacaroonsConstants.MACAROON_SUGGESTED_SECRET_LENGTH}
* @throws Error if macaroon isn't valid
*/
public assertIsValid(secret:string):void;
/**
* @param secret a Buffer, that will be used, a minimum length of {@link MacaroonsConstants.MACAROON_SUGGESTED_SECRET_LENGTH} is highly recommended
* @throws Error if macaroon isn't valid
*/
public assertIsValid(secret:Buffer):void;
public assertIsValid(secret:any):void {
var secretBuffer = (secret instanceof Buffer) ? secret : CryptoTools.generate_derived_key(secret);
var result = this.isValid_verify_raw(this.macaroon, secretBuffer);
if (result.fail) {
var msg = result.failMessage != null ? result.failMessage : "This macaroon isn't valid.";
throw new Error(msg);
}
}
/**
* @param secret string this secret will be enhanced, in case it's shorter than {@link MacaroonsConstants.MACAROON_SUGGESTED_SECRET_LENGTH}
* @return true/false if the macaroon is valid
*/
public isValid(secret:string):boolean;
/**
* @param secret a Buffer, that will be used, a minimum length of {@link MacaroonsConstants.MACAROON_SUGGESTED_SECRET_LENGTH} is highly recommended
* @return true/false if the macaroon is valid
*/
public isValid(secret:Buffer):boolean;
public isValid(secret:any):boolean {
var secretBuffer = (secret instanceof Buffer) ? secret : CryptoTools.generate_derived_key(secret);
return !this.isValid_verify_raw(this.macaroon, secretBuffer).fail;
}
/**
* Caveats like these are called "exact caveats" because there is exactly one way
* to satisfy them. Either the given caveat matches, or it doesn't. At
* verification time, the verifier will check each caveat in the macaroon against
* the list of satisfied caveats provided to satisfyExact(String).
* When it finds a match, it knows that the caveat holds and it can move onto the next caveat in
* the macaroon.
*
* @param caveat caveat
* @return this {@link MacaroonsVerifier}
*/
public satisfyExact(caveat:string):MacaroonsVerifier {
if (caveat) {
this.predicates.push(caveat);
}
return this;
}
/**
* Binds a prepared macaroon.
*
* @param preparedMacaroon preparedMacaroon
* @return this {@link MacaroonsVerifier}
*/
public satisfy3rdParty(preparedMacaroon:Macaroon):MacaroonsVerifier {
if (preparedMacaroon) {
this.boundMacaroons.push(preparedMacaroon);
}
return this;
}
/**
* Another technique for informing the verifier that a caveat is satisfied
* allows for expressive caveats. Whereas exact caveats are checked
* by simple byte-wise equality, general caveats are checked using
* an application-provided callback that returns true if and only if the caveat
* is true within the context of the request.
* There's no limit on the contents of a general caveat,
* so long as the callback understands how to determine whether it is satisfied.
* This technique is called "general caveats".
*
* @param generalVerifier generalVerifier a function(caveat:string):boolean which does the verification
* @return this {@link MacaroonsVerifier}
*/
public satisfyGeneral(generalVerifier:(caveat:string)=>boolean):MacaroonsVerifier {
if (generalVerifier) {
this.generalCaveatVerifiers.push(generalVerifier);
}
return this;
}
private isValid_verify_raw(M:Macaroon, secret:Buffer):VerificationResult {
var vresult = this.macaroon_verify_inner(M, secret);
if (!vresult.fail) {
vresult.fail = !BufferTools.equals(vresult.csig, this.macaroon.signatureBuffer);
if (vresult.fail) {
vresult = new VerificationResult("Verification failed. Signature doesn't match. Maybe the key was wrong OR some caveats aren't satisfied.");
}
}
return vresult;
}
private macaroon_verify_inner(M:Macaroon, key:Buffer):VerificationResult {
var csig:Buffer = CryptoTools.macaroon_hmac(key, M.identifier);
if (M.caveatPackets != null) {
var caveatPackets = M.caveatPackets;
for (var i = 0; i < caveatPackets.length; i++) {
var caveat = caveatPackets[i];
if (caveat == null) continue;
if (caveat.type == CaveatPacketType.cl) continue;
if (!(caveat.type == CaveatPacketType.cid && caveatPackets[Math.min(i + 1, caveatPackets.length - 1)].type == CaveatPacketType.vid)) {
if (MacaroonsVerifier.containsElement(this.predicates, caveat.getValueAsText()) || this.verifiesGeneral(caveat.getValueAsText())) {
csig = CryptoTools.macaroon_hmac(csig, caveat.rawValue);
}
} else {
i++;
var caveat_vid = caveatPackets[i];
var boundMacaroon = this.findBoundMacaroon(caveat.getValueAsText());
if (boundMacaroon == null) {
var msg = "Couldn't verify 3rd party macaroon, because no discharged macaroon was provided to the verifier.";
return new VerificationResult(msg);
}
if (!this.macaroon_verify_inner_3rd(boundMacaroon, caveat_vid, csig)) {
var msg = "Couldn't verify 3rd party macaroon, identifier= " + boundMacaroon.identifier;
return new VerificationResult(msg);
}
var data = caveat.rawValue;
var vdata = caveat_vid.rawValue;
csig = CryptoTools.macaroon_hash2(csig, vdata, data);
}
}
}
return new VerificationResult(csig);
}
private macaroon_verify_inner_3rd(M:Macaroon, C:CaveatPacket, sig:Buffer):boolean {
if (!M) return false;
var enc_plaintext = Buffer.alloc(MacaroonsConstants.MACAROON_SECRET_TEXT_ZERO_BYTES + MacaroonsConstants.MACAROON_HASH_BYTES);
var enc_ciphertext = Buffer.alloc(MacaroonsConstants.MACAROON_HASH_BYTES + MacaroonsConstants.SECRET_BOX_OVERHEAD);
enc_plaintext.fill(0);
enc_ciphertext.fill(0);
var vid_data = C.rawValue;
//assert vid_data.length == VID_NONCE_KEY_SZ;
/**
* the nonce is in the first MACAROON_SECRET_NONCE_BYTES
* of the vid; the ciphertext is in the rest of it.
*/
var enc_nonce = Buffer.alloc(MacaroonsConstants.MACAROON_SECRET_NONCE_BYTES);
vid_data.copy(enc_nonce, 0, 0, enc_nonce.length);
/* fill in the ciphertext */
vid_data.copy(enc_ciphertext, 0, MacaroonsConstants.MACAROON_SECRET_NONCE_BYTES, MacaroonsConstants.MACAROON_SECRET_NONCE_BYTES + vid_data.length - MacaroonsConstants.MACAROON_SECRET_NONCE_BYTES);
try {
var enc_plaintext = CryptoTools.macaroon_secretbox_open(sig, enc_nonce, enc_ciphertext);
}
catch (error) {
if (/Cipher bytes fail verification/.test(error.message)) {
return false;
} else {
throw new Error("Error while deciphering 3rd party caveat, msg=" + error);
}
}
var key = Buffer.alloc(MacaroonsConstants.MACAROON_HASH_BYTES);
key.fill(0);
enc_plaintext.copy(key, 0, 0, MacaroonsConstants.MACAROON_HASH_BYTES);
var vresult = this.macaroon_verify_inner(M, key);
var data = this.macaroon.signatureBuffer;
var csig = CryptoTools.macaroon_bind(data, vresult.csig);
return BufferTools.equals(csig, M.signatureBuffer);
}
private findBoundMacaroon(identifier:string):Macaroon {
for (var i = 0; i < this.boundMacaroons.length; i++) {
var boundMacaroon = this.boundMacaroons[i];
if (identifier === boundMacaroon.identifier) {
return boundMacaroon;
}
}
return null;
}
private verifiesGeneral(caveat:string):boolean {
var found:boolean = false;
for (var i = 0; i < this.generalCaveatVerifiers.length; i++) {
var verifier:GeneralCaveatVerifier = this.generalCaveatVerifiers[i];
found = found || verifier(caveat);
}
return found;
}
private static containsElement(elements:string[], anElement:string):boolean {
if (elements != null) {
for (var i = 0; i < elements.length; i++) {
var element = elements[i];
if (element === anElement) return true;
}
}
return false;
}
}
class VerificationResult {
csig:Buffer = null;
fail:boolean = false;
failMessage:string = null;
constructor(csig:Buffer);
constructor(failMessage:string);
constructor(arg:any) {
if (typeof arg === 'string') {
this.failMessage = arg;
this.fail = true;
} else if (typeof arg === 'object') {
this.csig = arg;
}
}
}
interface GeneralCaveatVerifier {
/**
* @param caveat caveat
* @return True, if this caveat is satisfies the applications requirements. False otherwise.
*/
(caveat:string):boolean;
}
| nitram509/macaroons.js | src/main/ts/MacaroonsVerifier.ts | TypeScript | apache-2.0 | 9,915 |
const _ = require('lodash');
const teFlow = require('te-flow');
const AnimManager = require('./anim-manager.js');
const _H = require('./../helpers/helper-index.js');
const animConfig = function (_key, _data) {
/**
* Preps the data to be processed
* @param {str} key -> anim key
* @param {obj} data -> anim data
* @return {---} -> {key, data, globalData}
*/
const formatData = function (key, data) {
let globalData;
({data, globalData} = _H.util.getGlobal(data));
//plural container check, don't want to format if plural
if (!_H.util.regularExp.pluralTest(key, 'animation')) {
data = _H.util.formatData(data, key, {
addOnKeys: ['timeline', 'tl'],
globalData
});
}
return {
key,
data,
globalData
};
};
/**
* Inits the manager and then sets the data from the raw data obj
* to create a itteration to cycle over in extract
* @return {---} -> {animMgr} animation class with configed data
*/
const initSetData = function (key, data, globalData) {
//init the manager to set data to
const animMgr = new AnimManager(key, globalData);
//loop through data and the objs
_.forEach(data, function (val, objKey) {
animMgr.set(val, objKey);
});
return {
animMgr
};
};
//-> passing data to anim-extract
return teFlow.call({
args: {
key: _key,
data: _data
}},
formatData,
initSetData
);
};
module.exports = animConfig;
| ctr-lang/ctr | lib/ctr-nodes/animation/anim-config.js | JavaScript | apache-2.0 | 1,529 |
/**
*
*/
package com.f1000.rank.helper;
import java.util.List;
import com.f1000.rank.journal.model.Rank;
/**
* The Interface RankingHelper.
*
* @author mattiam
*/
public interface RankingHelper {
/**
* Ranking.
*
* @param <T> the generic type
* @param list the list
*/
public <T extends Rank<T>> void ranking(List<T> list);
}
| mattiamascia/jrank | src/main/java/com/f1000/rank/helper/RankingHelper.java | Java | apache-2.0 | 361 |
package me.chen_wei.zhihu;
import android.app.Application;
import android.support.v7.app.AppCompatDelegate;
/**
* Created by Hander on 16/2/28.
* <p/>
* Email : hander_wei@163.com
*/
public class MyApplication extends Application {
static{
//设置DayNightTheme模式
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_AUTO);
}
@Override
public void onCreate() {
super.onCreate();
// LeakCanary.install(this);
}
}
| HanderWei/ZhihuDaily | app/src/main/java/me/chen_wei/zhihu/MyApplication.java | Java | apache-2.0 | 489 |
package leetcode;
/**
* https://leetcode.com/problems/two-sum-ii-input-array-is-sorted/
* https://leetcode.com/explore/learn/card/array-and-string/205/array-two-pointer-technique/1153/
*/
public final class Problem167TwoSumII {
public int[] twoSum(int[] numbers, int target) {
if (numbers == null || numbers.length == 0) {
return new int[0];
}
int i = 0;
int j = numbers.length - 1;
while (i < j) {
int sum = numbers[i] + numbers[j];
if (sum == target) {
return new int[] {i + 1, j + 1};
} else if (sum < target) {
i++;
} else {
j--;
}
}
return new int[0];
}
}
| jaredsburrows/cs-interview-questions | java/src/main/java/leetcode/Problem167TwoSumII.java | Java | apache-2.0 | 752 |
/*
* Copyright 2013-2019 consulo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.actions.runAnything;
import com.intellij.openapi.components.*;
import com.intellij.openapi.project.Project;
import com.intellij.util.xmlb.XmlSerializerUtil;
import jakarta.inject.Singleton;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.List;
/**
* from kotlin
*/
@Singleton
@State(name = "RunAnythingContextRecentDirectoryCache", storages = @Storage(StoragePathMacros.WORKSPACE_FILE))
public class RunAnythingContextRecentDirectoryCache implements PersistentStateComponent<RunAnythingContextRecentDirectoryCache.State> {
static class State {
public List<String> paths = new ArrayList<>();
}
@Nonnull
public static RunAnythingContextRecentDirectoryCache getInstance(@Nonnull Project project) {
return ServiceManager.getService(project, RunAnythingContextRecentDirectoryCache.class);
}
private State myState = new State();
@Nonnull
@Override
public State getState() {
return myState;
}
@Override
public void loadState(State state) {
XmlSerializerUtil.copyBean(state, myState);
}
}
| consulo/consulo | modules/base/lang-impl/src/main/java/com/intellij/ide/actions/runAnything/RunAnythingContextRecentDirectoryCache.java | Java | apache-2.0 | 1,682 |
module Synthea
module World
class BirthRate
def initialize
@area = Synthea::Config.population.area
@population_variance = Synthea::Config.population.birth_variance
@rate_per_sq_mile = (Synthea::Config.population.daily_births_per_square_mile * Synthea::Config.time_step)
mean = @rate_per_sq_mile * @area
@distribution = Distribution::Normal.rng(mean, mean * @population_variance)
end
def births
@distribution.call
end
end
end
end
| AmartC/synthea | lib/world/birth_rate.rb | Ruby | apache-2.0 | 515 |
/*******************************************************************************
* Copyright 2015 Maximilian Stark | Dakror <mail@dakror.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.dakror.vloxlands.generate;
import com.badlogic.gdx.math.Vector3;
import de.dakror.vloxlands.game.Game;
import de.dakror.vloxlands.game.world.Island;
/**
* @author Dakror
*/
public class WorldGenerator extends Thread {
public float progress;
public boolean done;
public WorldGenerator() {
setName("WorldGenerator Thread");
}
@Override
public void run() {
for (int i = 0; i < Game.world.getWidth(); i++) {
for (int j = 0; j < Game.world.getDepth(); j++) {
Island island = IslandGenerator.generate(this);
island.setPos(new Vector3(i * Island.SIZE, island.pos.y, j * Island.SIZE));
Game.world.addIsland(i, j, island);
}
}
done = true;
}
public void step() {
int total = Game.world.getWidth() * Game.world.getDepth() * 6;
progress += 1f / total;
}
}
| Dakror/Vloxlands | core/src/de/dakror/vloxlands/generate/WorldGenerator.java | Java | apache-2.0 | 1,624 |
package com.havryliuk.itarticles.data.local.preferences;
import javax.inject.Inject;
/**
* Created by Igor Havrylyuk on 23.10.2017.
*/
public class AppPreferencesHelper implements IPreferencesHelper {
private static final String PREF_KEY_IS_LOGGED_IN = "PREF_KEY_IS_LOGGED_IN";
private static final String PREF_KEY_USER_NAME = "PREF_KEY_USER_NAME_VALUE";
private static final String PREF_KEY_USER_TOKEN = "PREF_KEY_USER_TOKEN_VALUE";
private CommonPreferencesHelper commonPreferencesHelper;
@Inject
public AppPreferencesHelper(CommonPreferencesHelper commonPreferencesHelper) {
this.commonPreferencesHelper = commonPreferencesHelper;
}
@Override
public void setLoggedIn(boolean value) {
commonPreferencesHelper.setBooleanToPrefs(PREF_KEY_IS_LOGGED_IN, value);
}
@Override
public boolean isLoggedIn() {
return commonPreferencesHelper.getBooleanFromPrefs(PREF_KEY_IS_LOGGED_IN);
}
@Override
public void setUserName(String userName) {
commonPreferencesHelper.setStringToPrefs(PREF_KEY_USER_NAME, userName);
}
@Override
public String getUserName() {
return commonPreferencesHelper.getStringFromPrefs(PREF_KEY_USER_NAME);
}
@Override
public String getAccessToken() {
return commonPreferencesHelper.getStringFromPrefs(PREF_KEY_USER_TOKEN);
}
@Override
public void setAccessToken(String accessToken) {
commonPreferencesHelper.setStringToPrefs(PREF_KEY_USER_TOKEN, accessToken);
}
}
| graviton57/ITArticles | app/src/main/java/com/havryliuk/itarticles/data/local/preferences/AppPreferencesHelper.java | Java | apache-2.0 | 1,546 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.ml.math.impls.matrix;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.Spliterator;
import java.util.function.Consumer;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.ml.math.Blas;
import org.apache.ignite.ml.math.Matrix;
import org.apache.ignite.ml.math.MatrixStorage;
import org.apache.ignite.ml.math.Vector;
import org.apache.ignite.ml.math.decompositions.LUDecomposition;
import org.apache.ignite.ml.math.exceptions.CardinalityException;
import org.apache.ignite.ml.math.exceptions.ColumnIndexException;
import org.apache.ignite.ml.math.exceptions.RowIndexException;
import org.apache.ignite.ml.math.functions.Functions;
import org.apache.ignite.ml.math.functions.IgniteBiFunction;
import org.apache.ignite.ml.math.functions.IgniteDoubleFunction;
import org.apache.ignite.ml.math.functions.IgniteFunction;
import org.apache.ignite.ml.math.functions.IgniteTriFunction;
import org.apache.ignite.ml.math.functions.IntIntToDoubleFunction;
import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
import org.apache.ignite.ml.math.impls.vector.MatrixVectorView;
import org.apache.ignite.ml.math.util.MatrixUtil;
/**
* This class provides a helper implementation of the {@link Matrix}
* interface to minimize the effort required to implement it.
* Subclasses may override some of the implemented methods if a more
* specific or optimized implementation is desirable.
*/
public abstract class AbstractMatrix implements Matrix {
// Stochastic sparsity analysis.
/** */
private static final double Z95 = 1.959964;
/** */
private static final double Z80 = 1.281552;
/** */
private static final int MAX_SAMPLES = 500;
/** */
private static final int MIN_SAMPLES = 15;
/** Cached minimum element. */
private Element minElm;
/** Cached maximum element. */
private Element maxElm = null;
/** Matrix storage implementation. */
private MatrixStorage sto;
/** Meta attributes storage. */
private Map<String, Object> meta = new HashMap<>();
/** Matrix's GUID. */
private IgniteUuid guid = IgniteUuid.randomUuid();
/**
* @param sto Backing {@link MatrixStorage}.
*/
public AbstractMatrix(MatrixStorage sto) {
this.sto = sto;
}
/**
*
*/
public AbstractMatrix() {
// No-op.
}
/**
* @param sto Backing {@link MatrixStorage}.
*/
protected void setStorage(MatrixStorage sto) {
assert sto != null;
this.sto = sto;
}
/**
* @param row Row index in the matrix.
* @param col Column index in the matrix.
* @param v Value to set.
*/
protected void storageSet(int row, int col, double v) {
sto.set(row, col, v);
// Reset cached values.
minElm = maxElm = null;
}
/**
* @param row Row index in the matrix.
* @param col Column index in the matrix.
*/
protected double storageGet(int row, int col) {
return sto.get(row, col);
}
/** {@inheritDoc} */
@Override public Element maxElement() {
if (maxElm == null) {
double max = Double.NEGATIVE_INFINITY;
int row = 0, col = 0;
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++) {
double d = storageGet(x, y);
if (d > max) {
max = d;
row = x;
col = y;
}
}
maxElm = mkElement(row, col);
}
return maxElm;
}
/** {@inheritDoc} */
@Override public Element minElement() {
if (minElm == null) {
double min = Double.MAX_VALUE;
int row = 0, col = 0;
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++) {
double d = storageGet(x, y);
if (d < min) {
min = d;
row = x;
col = y;
}
}
minElm = mkElement(row, col);
}
return minElm;
}
/** {@inheritDoc} */
@Override public double maxValue() {
return maxElement().get();
}
/** {@inheritDoc} */
@Override public double minValue() {
return minElement().get();
}
/**
* @param row Row index in the matrix.
* @param col Column index in the matrix.
*/
private Element mkElement(int row, int col) {
return new Element() {
/** {@inheritDoc} */
@Override public double get() {
return storageGet(row, col);
}
/** {@inheritDoc} */
@Override public int row() {
return row;
}
/** {@inheritDoc} */
@Override public int column() {
return col;
}
/** {@inheritDoc} */
@Override public void set(double d) {
storageSet(row, col, d);
}
};
}
/** {@inheritDoc} */
@Override public Element getElement(int row, int col) {
return mkElement(row, col);
}
/** {@inheritDoc} */
@Override public Matrix swapRows(int row1, int row2) {
checkRowIndex(row1);
checkRowIndex(row2);
int cols = columnSize();
for (int y = 0; y < cols; y++) {
double v = getX(row1, y);
setX(row1, y, getX(row2, y));
setX(row2, y, v);
}
return this;
}
/** {@inheritDoc} */
@Override public Matrix swapColumns(int col1, int col2) {
checkColumnIndex(col1);
checkColumnIndex(col2);
int rows = rowSize();
for (int x = 0; x < rows; x++) {
double v = getX(x, col1);
setX(x, col1, getX(x, col2));
setX(x, col2, v);
}
return this;
}
/** {@inheritDoc} */
@Override public MatrixStorage getStorage() {
return sto;
}
/** {@inheritDoc} */
@Override public boolean isSequentialAccess() {
return sto.isSequentialAccess();
}
/** {@inheritDoc} */
@Override public boolean isDense() {
return sto.isDense();
}
/** {@inheritDoc} */
@Override public boolean isRandomAccess() {
return sto.isRandomAccess();
}
/** {@inheritDoc} */
@Override public boolean isDistributed() {
return sto.isDistributed();
}
/** {@inheritDoc} */
@Override public boolean isArrayBased() {
return sto.isArrayBased();
}
/**
* Check row index bounds.
*
* @param row Row index.
*/
void checkRowIndex(int row) {
if (row < 0 || row >= rowSize())
throw new RowIndexException(row);
}
/**
* Check column index bounds.
*
* @param col Column index.
*/
void checkColumnIndex(int col) {
if (col < 0 || col >= columnSize())
throw new ColumnIndexException(col);
}
/**
* Check column and row index bounds.
*
* @param row Row index.
* @param col Column index.
*/
private void checkIndex(int row, int col) {
checkRowIndex(row);
checkColumnIndex(col);
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject(sto);
out.writeObject(meta);
out.writeObject(guid);
}
/** {@inheritDoc} */
@Override public Map<String, Object> getMetaStorage() {
return meta;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
sto = (MatrixStorage)in.readObject();
meta = (Map<String, Object>)in.readObject();
guid = (IgniteUuid)in.readObject();
}
/** {@inheritDoc} */
@Override public Matrix assign(double val) {
if (sto.isArrayBased())
Arrays.fill(sto.data(), val);
else {
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
storageSet(x, y, val);
}
return this;
}
/** {@inheritDoc} */
@Override public Matrix assign(IntIntToDoubleFunction fun) {
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
storageSet(x, y, fun.apply(x, y));
return this;
}
/** */
private void checkCardinality(Matrix mtx) {
checkCardinality(mtx.rowSize(), mtx.columnSize());
}
/** */
private void checkCardinality(int rows, int cols) {
if (rows != rowSize())
throw new CardinalityException(rowSize(), rows);
if (cols != columnSize())
throw new CardinalityException(columnSize(), cols);
}
/** {@inheritDoc} */
@Override public Matrix assign(double[][] vals) {
checkCardinality(vals.length, vals[0].length);
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
storageSet(x, y, vals[x][y]);
return this;
}
/** {@inheritDoc} */
@Override public Matrix assign(Matrix mtx) {
checkCardinality(mtx);
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
storageSet(x, y, mtx.getX(x, y));
return this;
}
/** {@inheritDoc} */
@Override public Matrix map(IgniteDoubleFunction<Double> fun) {
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
storageSet(x, y, fun.apply(storageGet(x, y)));
return this;
}
/** {@inheritDoc} */
@Override public Matrix map(Matrix mtx, IgniteBiFunction<Double, Double, Double> fun) {
checkCardinality(mtx);
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
storageSet(x, y, fun.apply(storageGet(x, y), mtx.getX(x, y)));
return this;
}
/** {@inheritDoc} */
@Override public Spliterator<Double> allSpliterator() {
return new Spliterator<Double>() {
/** {@inheritDoc} */
@Override public boolean tryAdvance(Consumer<? super Double> act) {
int rLen = rowSize();
int cLen = columnSize();
for (int i = 0; i < rLen; i++)
for (int j = 0; j < cLen; j++)
act.accept(storageGet(i, j));
return true;
}
/** {@inheritDoc} */
@Override public Spliterator<Double> trySplit() {
return null; // No Splitting.
}
/** {@inheritDoc} */
@Override public long estimateSize() {
return rowSize() * columnSize();
}
/** {@inheritDoc} */
@Override public int characteristics() {
return ORDERED | SIZED;
}
};
}
/** {@inheritDoc} */
@Override public int nonZeroElements() {
int cnt = 0;
for (int i = 0; i < rowSize(); i++)
for (int j = 0; j < rowSize(); j++)
if (get(i, j) != 0.0)
cnt++;
return cnt;
}
/** {@inheritDoc} */
@Override public Spliterator<Double> nonZeroSpliterator() {
return new Spliterator<Double>() {
/** {@inheritDoc} */
@Override public boolean tryAdvance(Consumer<? super Double> act) {
int rLen = rowSize();
int cLen = columnSize();
for (int i = 0; i < rLen; i++)
for (int j = 0; j < cLen; j++) {
double val = storageGet(i, j);
if (val != 0.0)
act.accept(val);
}
return true;
}
/** {@inheritDoc} */
@Override public Spliterator<Double> trySplit() {
return null; // No Splitting.
}
/** {@inheritDoc} */
@Override public long estimateSize() {
return nonZeroElements();
}
/** {@inheritDoc} */
@Override public int characteristics() {
return ORDERED | SIZED;
}
};
}
/** {@inheritDoc} */
@Override public Matrix assignColumn(int col, Vector vec) {
checkColumnIndex(col);
int rows = rowSize();
for (int x = 0; x < rows; x++)
storageSet(x, col, vec.getX(x));
return this;
}
/** {@inheritDoc} */
@Override public Matrix assignRow(int row, Vector vec) {
checkRowIndex(row);
int cols = columnSize();
if (cols != vec.size())
throw new CardinalityException(cols, vec.size());
// TODO: IGNITE-5777, use Blas for this.
for (int y = 0; y < cols; y++)
storageSet(row, y, vec.getX(y));
return this;
}
/** {@inheritDoc} */
@Override public Vector foldRows(IgniteFunction<Vector, Double> fun) {
int rows = rowSize();
Vector vec = likeVector(rows);
for (int i = 0; i < rows; i++)
vec.setX(i, fun.apply(viewRow(i)));
return vec;
}
/** {@inheritDoc} */
@Override public Vector foldColumns(IgniteFunction<Vector, Double> fun) {
int cols = columnSize();
Vector vec = likeVector(cols);
for (int i = 0; i < cols; i++)
vec.setX(i, fun.apply(viewColumn(i)));
return vec;
}
/** {@inheritDoc} */
@Override public <T> T foldMap(IgniteBiFunction<T, Double, T> foldFun, IgniteDoubleFunction<Double> mapFun,
T zeroVal) {
T res = zeroVal;
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
res = foldFun.apply(res, mapFun.apply(storageGet(x, y)));
return res;
}
/** {@inheritDoc} */
@Override public int columnSize() {
return sto.columnSize();
}
/** {@inheritDoc} */
@Override public int rowSize() {
return sto.rowSize();
}
/** {@inheritDoc} */
@Override public double determinant() {
//TODO: IGNITE-5799, This decomposition should be cached
LUDecomposition dec = new LUDecomposition(this);
double res = dec.determinant();
dec.destroy();
return res;
}
/** {@inheritDoc} */
@Override public Matrix inverse() {
if (rowSize() != columnSize())
throw new CardinalityException(rowSize(), columnSize());
//TODO: IGNITE-5799, This decomposition should be cached
LUDecomposition dec = new LUDecomposition(this);
Matrix res = dec.solve(likeIdentity());
dec.destroy();
return res;
}
/** */
protected Matrix likeIdentity() {
int n = rowSize();
Matrix res = like(n, n);
for (int i = 0; i < n; i++)
res.setX(i, i, 1.0);
return res;
}
/** {@inheritDoc} */
@Override public Matrix divide(double d) {
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
setX(x, y, getX(x, y) / d);
return this;
}
/** {@inheritDoc} */
@Override public double get(int row, int col) {
checkIndex(row, col);
return storageGet(row, col);
}
/** {@inheritDoc} */
@Override public double getX(int row, int col) {
return storageGet(row, col);
}
/** {@inheritDoc} */
@Override public Matrix minus(Matrix mtx) {
int rows = rowSize();
int cols = columnSize();
checkCardinality(rows, cols);
Matrix res = like(rows, cols);
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
res.setX(x, y, getX(x, y) - mtx.getX(x, y));
return res;
}
/** {@inheritDoc} */
@Override public Matrix plus(double x) {
Matrix cp = copy();
cp.map(Functions.plus(x));
return cp;
}
/** {@inheritDoc} */
@Override public Matrix plus(Matrix mtx) {
int rows = rowSize();
int cols = columnSize();
checkCardinality(rows, cols);
Matrix res = like(rows, cols);
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
res.setX(x, y, getX(x, y) + mtx.getX(x, y));
return res;
}
/** {@inheritDoc} */
@Override public IgniteUuid guid() {
return guid;
}
/** {@inheritDoc} */
@Override public Matrix set(int row, int col, double val) {
checkIndex(row, col);
storageSet(row, col, val);
return this;
}
/** {@inheritDoc} */
@Override public Matrix setRow(int row, double[] data) {
checkRowIndex(row);
int cols = columnSize();
if (cols != data.length)
throw new CardinalityException(cols, data.length);
// TODO: IGNITE-5777, use Blas for this.
for (int y = 0; y < cols; y++)
setX(row, y, data[y]);
return this;
}
/** {@inheritDoc} */
@Override public Vector getRow(int row) {
checkRowIndex(row);
Vector res = new DenseLocalOnHeapVector(columnSize());
for (int i = 0; i < columnSize(); i++)
res.setX(i, getX(row, i));
return res;
}
/** {@inheritDoc} */
@Override public Matrix setColumn(int col, double[] data) {
checkColumnIndex(col);
int rows = rowSize();
if (rows != data.length)
throw new CardinalityException(rows, data.length);
for (int x = 0; x < rows; x++)
setX(x, col, data[x]);
return this;
}
/** {@inheritDoc} */
@Override public Vector getCol(int col) {
checkColumnIndex(col);
Vector res;
if (isDistributed())
res = MatrixUtil.likeVector(this, rowSize());
else
res = new DenseLocalOnHeapVector(rowSize());
for (int i = 0; i < rowSize(); i++)
res.setX(i, getX(i, col));
return res;
}
/** {@inheritDoc} */
@Override public Matrix setX(int row, int col, double val) {
storageSet(row, col, val);
return this;
}
/** {@inheritDoc} */
@Override public double maxAbsRowSumNorm() {
double max = 0.0;
int rows = rowSize();
int cols = columnSize();
for (int x = 0; x < rows; x++) {
double sum = 0;
for (int y = 0; y < cols; y++)
sum += Math.abs(getX(x, y));
if (sum > max)
max = sum;
}
return max;
}
/** {@inheritDoc} */
@Override public Matrix times(double x) {
Matrix cp = copy();
cp.map(Functions.mult(x));
return cp;
}
/** {@inheritDoc} */
@Override public Vector times(Vector vec) {
int cols = columnSize();
if (cols != vec.size())
throw new CardinalityException(cols, vec.size());
int rows = rowSize();
Vector res = likeVector(rows);
Blas.gemv(1, this, vec, 0, res);
return res;
}
/** {@inheritDoc} */
@Override public Matrix times(Matrix mtx) {
int cols = columnSize();
if (cols != mtx.rowSize())
throw new CardinalityException(cols, mtx.rowSize());
Matrix res = like(rowSize(), mtx.columnSize());
Blas.gemm(1, this, mtx, 0, res);
return res;
}
/** {@inheritDoc} */
@Override public double sum() {
int rows = rowSize();
int cols = columnSize();
double sum = 0.0;
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
sum += getX(x, y);
return sum;
}
/** {@inheritDoc} */
@Override public Matrix transpose() {
int rows = rowSize();
int cols = columnSize();
Matrix mtx = like(cols, rows);
for (int x = 0; x < rows; x++)
for (int y = 0; y < cols; y++)
mtx.setX(y, x, getX(x, y));
return mtx;
}
/** {@inheritDoc} */
@Override public boolean density(double threshold) {
assert threshold >= 0.0 && threshold <= 1.0;
int n = MIN_SAMPLES;
int rows = rowSize();
int cols = columnSize();
double mean = 0.0;
double pq = threshold * (1 - threshold);
Random rnd = new Random();
for (int i = 0; i < MIN_SAMPLES; i++)
if (getX(rnd.nextInt(rows), rnd.nextInt(cols)) != 0.0)
mean++;
mean /= MIN_SAMPLES;
double iv = Z80 * Math.sqrt(pq / n);
if (mean < threshold - iv)
return false; // Sparse.
else if (mean > threshold + iv)
return true; // Dense.
while (n < MAX_SAMPLES) {
// Determine upper bound we may need for 'n' to likely relinquish the uncertainty.
// Here, we use confidence interval formula but solved for 'n'.
double ivX = Math.max(Math.abs(threshold - mean), 1e-11);
double stdErr = ivX / Z80;
double nX = Math.min(Math.max((int)Math.ceil(pq / (stdErr * stdErr)), n), MAX_SAMPLES) - n;
if (nX < 1.0) // IMPL NOTE this can happen with threshold 1.0
nX = 1.0;
double meanNext = 0.0;
for (int i = 0; i < nX; i++)
if (getX(rnd.nextInt(rows), rnd.nextInt(cols)) != 0.0)
meanNext++;
mean = (n * mean + meanNext) / (n + nX);
n += nX;
// Are we good now?
iv = Z80 * Math.sqrt(pq / n);
if (mean < threshold - iv)
return false; // Sparse.
else if (mean > threshold + iv)
return true; // Dense.
}
return mean > threshold; // Dense if mean > threshold.
}
/** {@inheritDoc} */
@Override public Matrix viewPart(int[] off, int[] size) {
return new MatrixView(this, off[0], off[1], size[0], size[1]);
}
/** {@inheritDoc} */
@Override public Matrix viewPart(int rowOff, int rows, int colOff, int cols) {
return viewPart(new int[] {rowOff, colOff}, new int[] {rows, cols});
}
/** {@inheritDoc} */
@Override public Vector viewRow(int row) {
return new MatrixVectorView(this, row, 0, 0, 1);
}
/** {@inheritDoc} */
@Override public Vector viewColumn(int col) {
return new MatrixVectorView(this, 0, col, 1, 0);
}
/** {@inheritDoc} */
@Override public Vector viewDiagonal() {
return new MatrixVectorView(this, 0, 0, 1, 1);
}
/** {@inheritDoc} */
@Override public void destroy() {
getStorage().destroy();
}
/** {@inheritDoc} */
@Override public Matrix copy() {
Matrix cp = like(rowSize(), columnSize());
cp.assign(this);
return cp;
}
/** {@inheritDoc} */
@Override public int hashCode() {
int res = 1;
res = res * 37 + guid.hashCode();
res = res * 37 + sto.hashCode();
res = res * 37 + meta.hashCode();
return res;
}
/**
* {@inheritDoc}
*
* We ignore guid's for comparisons.
*/
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
AbstractMatrix that = (AbstractMatrix)o;
MatrixStorage sto = getStorage();
return (sto != null ? sto.equals(that.getStorage()) : that.getStorage() == null);
}
/** {@inheritDoc} */
@Override public void compute(int row, int col, IgniteTriFunction<Integer, Integer, Double, Double> f) {
setX(row, col, f.apply(row, col, getX(row, col)));
}
/**
* Return max amount of columns in 2d array.
*
* TODO: why this in this class, mb some util class?
*
* @param data Data.
*/
protected int getMaxAmountOfColumns(double[][] data) {
int maxAmountOfCols = 0;
for (int i = 0; i < data.length; i++)
maxAmountOfCols = Math.max(maxAmountOfCols, data[i].length);
return maxAmountOfCols;
}
}
| WilliamDo/ignite | modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/AbstractMatrix.java | Java | apache-2.0 | 26,094 |
package com.ctrip.xpipe.redis.proxy.spring;
import com.ctrip.xpipe.redis.core.proxy.endpoint.DefaultProxyEndpointManager;
import com.ctrip.xpipe.redis.core.proxy.endpoint.ProxyEndpointManager;
import com.ctrip.xpipe.redis.core.proxy.handler.NettyClientSslHandlerFactory;
import com.ctrip.xpipe.redis.core.proxy.handler.NettyServerSslHandlerFactory;
import com.ctrip.xpipe.redis.core.proxy.handler.NettySslHandlerFactory;
import com.ctrip.xpipe.redis.proxy.config.DefaultProxyConfig;
import com.ctrip.xpipe.redis.proxy.config.ProxyConfig;
import com.ctrip.xpipe.spring.AbstractProfile;
import com.ctrip.xpipe.utils.OsUtils;
import com.ctrip.xpipe.utils.XpipeThreadFactory;
import com.google.common.util.concurrent.MoreExecutors;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* @author chen.zhu
* <p>
* May 10, 2018
*/
@Configuration
@Profile(AbstractProfile.PROFILE_NAME_PRODUCTION)
public class Production extends AbstractProfile {
public static final String GLOBAL_ENDPOINT_MANAGER = "globalProxyEndpointManager";
public static final String CLIENT_SSL_HANDLER_FACTORY = "clientSslHandlerFactory";
public static final String SERVER_SSL_HANDLER_FACTORY = "serverSslHandlerFactory";
public static final String BACKEND_EVENTLOOP_GROUP = "backendEventLoopGroup";
public static final String GLOBAL_SCHEDULED = "globalScheduled";
private ProxyConfig config = new DefaultProxyConfig();
@Bean
public ProxyConfig getProxyConfig() {
return config;
}
@Bean(name = CLIENT_SSL_HANDLER_FACTORY)
public NettySslHandlerFactory clientSslHandlerFactory() {
return new NettyClientSslHandlerFactory(config);
}
@Bean(name = SERVER_SSL_HANDLER_FACTORY)
public NettySslHandlerFactory serverSslHandlerFactory() {
return new NettyServerSslHandlerFactory(config);
}
@Bean(name = GLOBAL_SCHEDULED)
public ScheduledExecutorService getScheduled() {
int corePoolSize = Math.min(OsUtils.getCpuCount(), 4);
return MoreExecutors.getExitingScheduledExecutorService(
new ScheduledThreadPoolExecutor(corePoolSize, XpipeThreadFactory.create(GLOBAL_SCHEDULED)),
1, TimeUnit.SECONDS
);
}
@Bean(name = GLOBAL_ENDPOINT_MANAGER)
public ProxyEndpointManager getProxyResourceManager() {
return new DefaultProxyEndpointManager(() -> config.endpointHealthCheckIntervalSec());
}
}
| ctripcorp/x-pipe | redis/redis-proxy/src/main/java/com/ctrip/xpipe/redis/proxy/spring/Production.java | Java | apache-2.0 | 2,700 |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
namespace DotVVM.Framework.Parser.Dothtml.Parser
{
[DebuggerDisplay("{debuggerDisplay,nq}")]
public class DothtmlBindingNode : DothtmlLiteralNode
{
#region debbuger display
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
private string debuggerDisplay
{
get
{
return "{" + Name + ": " + Value + "}";
}
}
#endregion
public string Name { get; set; }
public DothtmlBindingNode()
{
Escape = true;
}
}
} | holajan/dotvvm | src/DotVVM.Framework/Parser/Dothtml/Parser/DothtmlBindingNode.cs | C# | apache-2.0 | 666 |
package com.lucidastar.hodgepodge.view;
import android.animation.ValueAnimator;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.widget.ProgressBar;
import androidx.annotation.IntDef;
import com.lucidastar.hodgepodge.R;
import com.mine.lucidastarutils.utils.ScreenUtils;
import com.mine.lucidastarutils.utils.Utils;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Created by qiuyouzone on 2018/10/9.
*/
public class CircleProgressView extends ProgressBar {
private int mReachBarSize = ScreenUtils.dp2px(getContext(), 2); // 未完成进度条大小
private int mNormalBarSize = ScreenUtils.dp2px(getContext(), 2); // 未完成进度条大小
private int mReachBarColor = Color.parseColor("#108ee9"); // 已完成进度颜色
private int mNormalBarColor = Color.parseColor("#FFD3D6DA"); // 未完成进度颜色
private int mTextSize = ScreenUtils.sp2px(getContext(), 14); // 进度值字体大小
private int mTextColor = Color.parseColor("#108ee9"); // 进度的值字体颜色
private float mTextSkewX; // 进度值字体倾斜角度
private String mTextSuffix = "%"; // 进度值前缀
private String mTextPrefix = ""; // 进度值后缀
private boolean mTextVisible = true; // 是否显示进度值
private boolean mReachCapRound; // 画笔是否使用圆角边界,normalStyle下生效
private int mRadius = ScreenUtils.dp2px(getContext(), 20); // 半径
private int mStartArc; // 起始角度
private int mInnerBackgroundColor; // 内部背景填充颜色
private int mProgressStyle = ProgressStyle.NORMAL; // 进度风格
private int mInnerPadding = ScreenUtils.dp2px(getContext(), 1); // 内部圆与外部圆间距
private int mOuterColor; // 外部圆环颜色
private boolean needDrawInnerBackground; // 是否需要绘制内部背景
private RectF rectF; // 外部圆环绘制区域
private RectF rectInner; // 内部圆环绘制区域
private int mOuterSize = ScreenUtils.dp2px(getContext(), 1); // 外层圆环宽度
private Paint mTextPaint; // 绘制进度值字体画笔
private Paint mNormalPaint; // 绘制未完成进度画笔
private Paint mReachPaint; // 绘制已完成进度画笔
private Paint mInnerBackgroundPaint; // 内部背景画笔
private Paint mOutPaint; // 外部圆环画笔
private int mRealWidth;
private int mRealHeight;
@IntDef({ProgressStyle.NORMAL, ProgressStyle.FILL_IN, ProgressStyle.FILL_IN_ARC})
@Retention(RetentionPolicy.SOURCE)
public @interface ProgressStyle {
int NORMAL = 0;
int FILL_IN = 1;
int FILL_IN_ARC = 2;
}
public CircleProgressView(Context context) {
this(context, null);
}
public CircleProgressView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public CircleProgressView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
obtainAttributes(attrs);
initPaint();
}
private void initPaint() {
mTextPaint = new Paint();
mTextPaint.setColor(mTextColor);
mTextPaint.setStyle(Paint.Style.FILL);
mTextPaint.setTextSize(mTextSize);
mTextPaint.setTextSkewX(mTextSkewX);
mTextPaint.setAntiAlias(true);
mNormalPaint = new Paint();
mNormalPaint.setColor(mNormalBarColor);
mNormalPaint.setStyle(mProgressStyle == ProgressStyle.FILL_IN_ARC ? Paint.Style.FILL : Paint.Style.STROKE);
mNormalPaint.setAntiAlias(true);
mNormalPaint.setStrokeWidth(mNormalBarSize);
mReachPaint = new Paint();
mReachPaint.setColor(mReachBarColor);
mReachPaint.setStyle(mProgressStyle == ProgressStyle.FILL_IN_ARC ? Paint.Style.FILL : Paint.Style.STROKE);
mReachPaint.setAntiAlias(true);
mReachPaint.setStrokeCap(mReachCapRound ? Paint.Cap.ROUND : Paint.Cap.BUTT);
mReachPaint.setStrokeWidth(mReachBarSize);
if (needDrawInnerBackground) {
mInnerBackgroundPaint = new Paint();
mInnerBackgroundPaint.setStyle(Paint.Style.FILL);
mInnerBackgroundPaint.setAntiAlias(true);
mInnerBackgroundPaint.setColor(mInnerBackgroundColor);
}
if (mProgressStyle == ProgressStyle.FILL_IN_ARC) {
mOutPaint = new Paint();
mOutPaint.setStyle(Paint.Style.STROKE);
mOutPaint.setColor(mOuterColor);
mOutPaint.setStrokeWidth(mOuterSize);
mOutPaint.setAntiAlias(true);
}
}
private void obtainAttributes(AttributeSet attrs) {
TypedArray ta = getContext().obtainStyledAttributes(attrs, R.styleable.CircleProgressView);
mProgressStyle = ta.getInt(R.styleable.CircleProgressView_cpv_progressStyle, ProgressStyle.NORMAL);
// 获取三种风格通用的属性
mNormalBarSize = (int) ta.getDimension(R.styleable.CircleProgressView_cpv_progressNormalSize, mNormalBarSize);
mNormalBarColor = ta.getColor(R.styleable.CircleProgressView_cpv_progressNormalColor, mNormalBarColor);
mReachBarSize = (int) ta.getDimension(R.styleable.CircleProgressView_cpv_progressReachSize, mReachBarSize);
mReachBarColor = ta.getColor(R.styleable.CircleProgressView_cpv_progressReachColor, mReachBarColor);
mTextSize = (int) ta.getDimension(R.styleable.CircleProgressView_cpv_progressTextSize, mTextSize);
mTextColor = ta.getColor(R.styleable.CircleProgressView_cpv_progressTextColor, mTextColor);
mTextSkewX = ta.getDimension(R.styleable.CircleProgressView_cpv_progressTextSkewX, 0);
if (ta.hasValue(R.styleable.CircleProgressView_cpv_progressTextSuffix)) {
mTextSuffix = ta.getString(R.styleable.CircleProgressView_cpv_progressTextSuffix);
}
if (ta.hasValue(R.styleable.CircleProgressView_cpv_progressTextPrefix)) {
mTextPrefix = ta.getString(R.styleable.CircleProgressView_cpv_progressTextPrefix);
}
mTextVisible = ta.getBoolean(R.styleable.CircleProgressView_cpv_progressTextVisible, mTextVisible);
mRadius = (int) ta.getDimension(R.styleable.CircleProgressView_cpv_radius, mRadius);
rectF = new RectF(-mRadius, -mRadius, mRadius, mRadius);
switch (mProgressStyle) {
case ProgressStyle.FILL_IN:
mReachBarSize = 0;
mNormalBarSize = 0;
mOuterSize = 0;
break;
case ProgressStyle.FILL_IN_ARC:
mStartArc = ta.getInt(R.styleable.CircleProgressView_cpv_progressStartArc, 0) + 270;
mInnerPadding = (int) ta.getDimension(R.styleable.CircleProgressView_cpv_innerPadding, mInnerPadding);
mOuterColor = ta.getColor(R.styleable.CircleProgressView_cpv_outerColor, mReachBarColor);
mOuterSize = (int) ta.getDimension(R.styleable.CircleProgressView_cpv_outerSize, mOuterSize);
mReachBarSize = 0;// 将画笔大小重置为0
mNormalBarSize = 0;
if (!ta.hasValue(R.styleable.CircleProgressView_cpv_progressNormalColor)) {
mNormalBarColor = Color.TRANSPARENT;
}
int mInnerRadius = mRadius - mOuterSize / 2 - mInnerPadding;
rectInner = new RectF(-mInnerRadius, -mInnerRadius, mInnerRadius, mInnerRadius);
break;
case ProgressStyle.NORMAL:
mReachCapRound = ta.getBoolean(R.styleable.CircleProgressView_cpv_reachCapRound, true);
mStartArc = ta.getInt(R.styleable.CircleProgressView_cpv_progressStartArc, 0) + 270;
if (ta.hasValue(R.styleable.CircleProgressView_cpv_innerBackgroundColor)) {
mInnerBackgroundColor = ta.getColor(R.styleable.CircleProgressView_cpv_innerBackgroundColor, Color.argb(0, 0, 0, 0));
needDrawInnerBackground = true;
}
break;
}
ta.recycle();
}
@Override
protected synchronized void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int maxBarPaintWidth = Math.max(mReachBarSize, mNormalBarSize);
int maxPaintWidth = Math.max(maxBarPaintWidth, mOuterSize);
int height = 0;
int width = 0;
switch (mProgressStyle) {
case ProgressStyle.FILL_IN:
height = getPaddingTop() + getPaddingBottom() // 边距
+ Math.abs(mRadius * 2); // 直径
width = getPaddingLeft() + getPaddingRight() // 边距
+ Math.abs(mRadius * 2); // 直径
break;
case ProgressStyle.FILL_IN_ARC:
height = getPaddingTop() + getPaddingBottom() // 边距
+ Math.abs(mRadius * 2) // 直径
+ maxPaintWidth;// 边框
width = getPaddingLeft() + getPaddingRight() // 边距
+ Math.abs(mRadius * 2) // 直径
+ maxPaintWidth;// 边框
break;
case ProgressStyle.NORMAL:
height = getPaddingTop() + getPaddingBottom() // 边距
+ Math.abs(mRadius * 2) // 直径
+ maxBarPaintWidth;// 边框
width = getPaddingLeft() + getPaddingRight() // 边距
+ Math.abs(mRadius * 2) // 直径
+ maxBarPaintWidth;// 边框
break;
}
mRealWidth = resolveSize(width, widthMeasureSpec);
mRealHeight = resolveSize(height, heightMeasureSpec);
setMeasuredDimension(mRealWidth, mRealHeight);
}
@Override
protected synchronized void onDraw(Canvas canvas) {
switch (mProgressStyle) {
case ProgressStyle.NORMAL:
drawNormalCircle(canvas);
break;
case ProgressStyle.FILL_IN:
drawFillInCircle(canvas);
break;
case ProgressStyle.FILL_IN_ARC:
drawFillInArcCircle(canvas);
break;
}
}
/**
* 绘制PROGRESS_STYLE_FILL_IN_ARC圆形
*/
private void drawFillInArcCircle(Canvas canvas) {
canvas.save();
canvas.translate(mRealWidth / 2, mRealHeight / 2);
// 绘制外层圆环
canvas.drawArc(rectF, 0, 360, false, mOutPaint);
// 绘制内层进度实心圆弧
// 内层圆弧半径
float reachArc = getProgress() * 1.0f / getMax() * 360;
canvas.drawArc(rectInner, mStartArc, reachArc, true, mReachPaint);
// 绘制未到达进度
if (reachArc != 360) {
canvas.drawArc(rectInner, reachArc + mStartArc, 360 - reachArc, true, mNormalPaint);
}
canvas.restore();
}
/**
* 绘制PROGRESS_STYLE_FILL_IN圆形
*/
private void drawFillInCircle(Canvas canvas) {
canvas.save();
canvas.translate(mRealWidth / 2, mRealHeight / 2);
float progressY = getProgress() * 1.0f / getMax() * (mRadius * 2);
float angle = (float) (Math.acos((mRadius - progressY) / mRadius) * 180 / Math.PI);
float startAngle = 90 + angle;
float sweepAngle = 360 - angle * 2;
// 绘制未到达区域
rectF = new RectF(-mRadius, -mRadius, mRadius, mRadius);
mNormalPaint.setStyle(Paint.Style.FILL);
canvas.drawArc(rectF, startAngle, sweepAngle, false, mNormalPaint);
// 翻转180度绘制已到达区域
canvas.rotate(180);
mReachPaint.setStyle(Paint.Style.FILL);
canvas.drawArc(rectF, 270 - angle, angle * 2, false, mReachPaint);
// 文字显示在最上层最后绘制
canvas.rotate(180);
// 绘制文字
if (mTextVisible) {
String text = mTextPrefix + getProgress() + mTextSuffix;
float textWidth = mTextPaint.measureText(text);
float textHeight = (mTextPaint.descent() + mTextPaint.ascent());
canvas.drawText(text, -textWidth / 2, -textHeight / 2, mTextPaint);
}
}
/**
* 绘制PROGRESS_STYLE_NORMAL圆形
*/
private void drawNormalCircle(Canvas canvas) {
canvas.save();
canvas.translate(mRealWidth / 2, mRealHeight / 2);
// 绘制内部圆形背景色
if (needDrawInnerBackground) {
canvas.drawCircle(0, 0, mRadius - Math.min(mReachBarSize, mNormalBarSize) / 2,
mInnerBackgroundPaint);
}
// 绘制文字
if (mTextVisible) {
String text = mTextPrefix + getProgress() + mTextSuffix;
float textWidth = mTextPaint.measureText(text);
float textHeight = (mTextPaint.descent() + mTextPaint.ascent());
canvas.drawText(text, -textWidth / 2, -textHeight / 2, mTextPaint);
}
// 计算进度值
float reachArc = getProgress() * 1.0f / getMax() * 360;
// 绘制未到达进度
if (reachArc != 360) {
canvas.drawArc(rectF, reachArc + mStartArc, 360 - reachArc, false, mNormalPaint);
}
// 绘制已到达进度
canvas.drawArc(rectF, mStartArc, reachArc, false, mReachPaint);
canvas.restore();
}
/**
* 动画进度(0-当前进度)
*
* @param duration 动画时长
*/
public void runProgressAnim(long duration) {
setProgressInTime(0, duration);
}
/**
* @param progress 进度值
* @param duration 动画播放时间
*/
public void setProgressInTime(final int progress, final long duration) {
setProgressInTime(progress, getProgress(), duration);
}
/**
* @param startProgress 起始进度
* @param progress 进度值
* @param duration 动画播放时间
*/
public void setProgressInTime(int startProgress, final int progress, final long duration) {
ValueAnimator valueAnimator = ValueAnimator.ofInt(startProgress, progress);
valueAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animator) {
//获得当前动画的进度值,整型,1-100之间
int currentValue = (Integer) animator.getAnimatedValue();
setProgress(currentValue);
}
});
AccelerateDecelerateInterpolator interpolator = new AccelerateDecelerateInterpolator();
valueAnimator.setInterpolator(interpolator);
valueAnimator.setDuration(duration);
valueAnimator.start();
}
public int getReachBarSize() {
return mReachBarSize;
}
public void setReachBarSize(int reachBarSize) {
mReachBarSize = ScreenUtils.dp2px(getContext(), reachBarSize);
invalidate();
}
public int getNormalBarSize() {
return mNormalBarSize;
}
public void setNormalBarSize(int normalBarSize) {
mNormalBarSize = ScreenUtils.dp2px(getContext(), normalBarSize);
invalidate();
}
public int getReachBarColor() {
return mReachBarColor;
}
public void setReachBarColor(int reachBarColor) {
mReachBarColor = reachBarColor;
invalidate();
}
public int getNormalBarColor() {
return mNormalBarColor;
}
public void setNormalBarColor(int normalBarColor) {
mNormalBarColor = normalBarColor;
invalidate();
}
public int getTextSize() {
return mTextSize;
}
public void setTextSize(int textSize) {
mTextSize = ScreenUtils.sp2px(getContext(), textSize);
invalidate();
}
public int getTextColor() {
return mTextColor;
}
public void setTextColor(int textColor) {
mTextColor = textColor;
invalidate();
}
public float getTextSkewX() {
return mTextSkewX;
}
public void setTextSkewX(float textSkewX) {
mTextSkewX = textSkewX;
invalidate();
}
public String getTextSuffix() {
return mTextSuffix;
}
public void setTextSuffix(String textSuffix) {
mTextSuffix = textSuffix;
invalidate();
}
public String getTextPrefix() {
return mTextPrefix;
}
public void setTextPrefix(String textPrefix) {
mTextPrefix = textPrefix;
invalidate();
}
public boolean isTextVisible() {
return mTextVisible;
}
public void setTextVisible(boolean textVisible) {
mTextVisible = textVisible;
invalidate();
}
public boolean isReachCapRound() {
return mReachCapRound;
}
public void setReachCapRound(boolean reachCapRound) {
mReachCapRound = reachCapRound;
invalidate();
}
public int getRadius() {
return mRadius;
}
public void setRadius(int radius) {
mRadius = ScreenUtils.dp2px(getContext(), radius);
invalidate();
}
public int getStartArc() {
return mStartArc;
}
public void setStartArc(int startArc) {
mStartArc = startArc;
invalidate();
}
public int getInnerBackgroundColor() {
return mInnerBackgroundColor;
}
public void setInnerBackgroundColor(int innerBackgroundColor) {
mInnerBackgroundColor = innerBackgroundColor;
invalidate();
}
public int getProgressStyle() {
return mProgressStyle;
}
public void setProgressStyle(int progressStyle) {
mProgressStyle = progressStyle;
invalidate();
}
public int getInnerPadding() {
return mInnerPadding;
}
public void setInnerPadding(int innerPadding) {
mInnerPadding = ScreenUtils.dp2px(getContext(), innerPadding);
int mInnerRadius = mRadius - mOuterSize / 2 - mInnerPadding;
rectInner = new RectF(-mInnerRadius, -mInnerRadius, mInnerRadius, mInnerRadius);
invalidate();
}
public int getOuterColor() {
return mOuterColor;
}
public void setOuterColor(int outerColor) {
mOuterColor = outerColor;
invalidate();
}
public int getOuterSize() {
return mOuterSize;
}
public void setOuterSize(int outerSize) {
mOuterSize = ScreenUtils.dp2px(getContext(), outerSize);
invalidate();
}
private static final String STATE = "state";
private static final String PROGRESS_STYLE = "progressStyle";
private static final String TEXT_COLOR = "textColor";
private static final String TEXT_SIZE = "textSize";
private static final String TEXT_SKEW_X = "textSkewX";
private static final String TEXT_VISIBLE = "textVisible";
private static final String TEXT_SUFFIX = "textSuffix";
private static final String TEXT_PREFIX = "textPrefix";
private static final String REACH_BAR_COLOR = "reachBarColor";
private static final String REACH_BAR_SIZE = "reachBarSize";
private static final String NORMAL_BAR_COLOR = "normalBarColor";
private static final String NORMAL_BAR_SIZE = "normalBarSize";
private static final String IS_REACH_CAP_ROUND = "isReachCapRound";
private static final String RADIUS = "radius";
private static final String START_ARC = "startArc";
private static final String INNER_BG_COLOR = "innerBgColor";
private static final String INNER_PADDING = "innerPadding";
private static final String OUTER_COLOR = "outerColor";
private static final String OUTER_SIZE = "outerSize";
@Override
public Parcelable onSaveInstanceState() {
final Bundle bundle = new Bundle();
bundle.putParcelable(STATE, super.onSaveInstanceState());
// 保存当前样式
bundle.putInt(PROGRESS_STYLE, getProgressStyle());
bundle.putInt(RADIUS, getRadius());
bundle.putBoolean(IS_REACH_CAP_ROUND, isReachCapRound());
bundle.putInt(START_ARC, getStartArc());
bundle.putInt(INNER_BG_COLOR, getInnerBackgroundColor());
bundle.putInt(INNER_PADDING, getInnerPadding());
bundle.putInt(OUTER_COLOR, getOuterColor());
bundle.putInt(OUTER_SIZE, getOuterSize());
// 保存text信息
bundle.putInt(TEXT_COLOR, getTextColor());
bundle.putInt(TEXT_SIZE, getTextSize());
bundle.putFloat(TEXT_SKEW_X, getTextSkewX());
bundle.putBoolean(TEXT_VISIBLE, isTextVisible());
bundle.putString(TEXT_SUFFIX, getTextSuffix());
bundle.putString(TEXT_PREFIX, getTextPrefix());
// 保存已到达进度信息
bundle.putInt(REACH_BAR_COLOR, getReachBarColor());
bundle.putInt(REACH_BAR_SIZE, getReachBarSize());
// 保存未到达进度信息
bundle.putInt(NORMAL_BAR_COLOR, getNormalBarColor());
bundle.putInt(NORMAL_BAR_SIZE, getNormalBarSize());
return bundle;
}
@Override
public void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
final Bundle bundle = (Bundle) state;
mProgressStyle = bundle.getInt(PROGRESS_STYLE);
mRadius = bundle.getInt(RADIUS);
mReachCapRound = bundle.getBoolean(IS_REACH_CAP_ROUND);
mStartArc = bundle.getInt(START_ARC);
mInnerBackgroundColor = bundle.getInt(INNER_BG_COLOR);
mInnerPadding = bundle.getInt(INNER_PADDING);
mOuterColor = bundle.getInt(OUTER_COLOR);
mOuterSize = bundle.getInt(OUTER_SIZE);
mTextColor = bundle.getInt(TEXT_COLOR);
mTextSize = bundle.getInt(TEXT_SIZE);
mTextSkewX = bundle.getFloat(TEXT_SKEW_X);
mTextVisible = bundle.getBoolean(TEXT_VISIBLE);
mTextSuffix = bundle.getString(TEXT_SUFFIX);
mTextPrefix = bundle.getString(TEXT_PREFIX);
mReachBarColor = bundle.getInt(REACH_BAR_COLOR);
mReachBarSize = bundle.getInt(REACH_BAR_SIZE);
mNormalBarColor = bundle.getInt(NORMAL_BAR_COLOR);
mNormalBarSize = bundle.getInt(NORMAL_BAR_SIZE);
initPaint();
super.onRestoreInstanceState(bundle.getParcelable(STATE));
return;
}
super.onRestoreInstanceState(state);
}
@Override
public void invalidate() {
initPaint();
super.invalidate();
}
}
| Lucidastar/hodgepodgeForAndroid | app/src/main/java/com/lucidastar/hodgepodge/view/CircleProgressView.java | Java | apache-2.0 | 22,758 |
/// <reference path="..\compiler\commandLineParser.ts" />
/// <reference path="..\services\services.ts" />
/// <reference path="protocol.d.ts" />
/// <reference path="session.ts" />
namespace ts.server {
export interface Logger {
close(): void;
isVerbose(): boolean;
loggingEnabled(): boolean;
perftrc(s: string): void;
info(s: string): void;
startGroup(): void;
endGroup(): void;
msg(s: string, type?: string): void;
}
const lineCollectionCapacity = 4;
function mergeFormatOptions(formatCodeOptions: FormatCodeOptions, formatOptions: protocol.FormatOptions): void {
const hasOwnProperty = Object.prototype.hasOwnProperty;
Object.keys(formatOptions).forEach((key) => {
const codeKey = key.charAt(0).toUpperCase() + key.substring(1);
if (hasOwnProperty.call(formatCodeOptions, codeKey)) {
formatCodeOptions[codeKey] = formatOptions[key];
}
});
}
export class ScriptInfo {
svc: ScriptVersionCache;
children: ScriptInfo[] = []; // files referenced by this file
defaultProject: Project; // project to use by default for file
fileWatcher: FileWatcher;
formatCodeOptions = ts.clone(CompilerService.defaultFormatCodeOptions);
path: Path;
constructor(private host: ServerHost, public fileName: string, public content: string, public isOpen = false) {
this.path = toPath(fileName, host.getCurrentDirectory(), createGetCanonicalFileName(host.useCaseSensitiveFileNames));
this.svc = ScriptVersionCache.fromString(host, content);
}
setFormatOptions(formatOptions: protocol.FormatOptions): void {
if (formatOptions) {
mergeFormatOptions(this.formatCodeOptions, formatOptions);
}
}
close() {
this.isOpen = false;
}
addChild(childInfo: ScriptInfo) {
this.children.push(childInfo);
}
snap() {
return this.svc.getSnapshot();
}
getText() {
const snap = this.snap();
return snap.getText(0, snap.getLength());
}
getLineInfo(line: number) {
const snap = this.snap();
return snap.index.lineNumberToInfo(line);
}
editContent(start: number, end: number, newText: string): void {
this.svc.edit(start, end - start, newText);
}
getTextChangeRangeBetweenVersions(startVersion: number, endVersion: number): ts.TextChangeRange {
return this.svc.getTextChangesBetweenVersions(startVersion, endVersion);
}
getChangeRange(oldSnapshot: ts.IScriptSnapshot): ts.TextChangeRange {
return this.snap().getChangeRange(oldSnapshot);
}
}
interface TimestampedResolvedModule extends ResolvedModuleWithFailedLookupLocations {
lastCheckTime: number;
}
export class LSHost implements ts.LanguageServiceHost {
ls: ts.LanguageService;
compilationSettings: ts.CompilerOptions;
filenameToScript: ts.FileMap<ScriptInfo>;
roots: ScriptInfo[] = [];
private resolvedModuleNames: ts.FileMap<Map<TimestampedResolvedModule>>;
private moduleResolutionHost: ts.ModuleResolutionHost;
private getCanonicalFileName: (fileName: string) => string;
constructor(public host: ServerHost, public project: Project) {
this.getCanonicalFileName = createGetCanonicalFileName(host.useCaseSensitiveFileNames);
this.resolvedModuleNames = createFileMap<Map<TimestampedResolvedModule>>();
this.filenameToScript = createFileMap<ScriptInfo>();
this.moduleResolutionHost = {
fileExists: fileName => this.fileExists(fileName),
readFile: fileName => this.host.readFile(fileName)
};
}
resolveModuleNames(moduleNames: string[], containingFile: string): ResolvedModule[] {
const path = toPath(containingFile, this.host.getCurrentDirectory(), this.getCanonicalFileName);
const currentResolutionsInFile = this.resolvedModuleNames.get(path);
const newResolutions: Map<TimestampedResolvedModule> = {};
const resolvedModules: ResolvedModule[] = [];
const compilerOptions = this.getCompilationSettings();
for (const moduleName of moduleNames) {
// check if this is a duplicate entry in the list
let resolution = lookUp(newResolutions, moduleName);
if (!resolution) {
const existingResolution = currentResolutionsInFile && ts.lookUp(currentResolutionsInFile, moduleName);
if (moduleResolutionIsValid(existingResolution)) {
// ok, it is safe to use existing module resolution results
resolution = existingResolution;
}
else {
resolution = <TimestampedResolvedModule>resolveModuleName(moduleName, containingFile, compilerOptions, this.moduleResolutionHost);
resolution.lastCheckTime = Date.now();
newResolutions[moduleName] = resolution;
}
}
ts.Debug.assert(resolution !== undefined);
resolvedModules.push(resolution.resolvedModule);
}
// replace old results with a new one
this.resolvedModuleNames.set(path, newResolutions);
return resolvedModules;
function moduleResolutionIsValid(resolution: TimestampedResolvedModule): boolean {
if (!resolution) {
return false;
}
if (resolution.resolvedModule) {
// TODO: consider checking failedLookupLocations
// TODO: use lastCheckTime to track expiration for module name resolution
return true;
}
// consider situation if we have no candidate locations as valid resolution.
// after all there is no point to invalidate it if we have no idea where to look for the module.
return resolution.failedLookupLocations.length === 0;
}
}
getDefaultLibFileName() {
const nodeModuleBinDir = ts.getDirectoryPath(ts.normalizePath(this.host.getExecutingFilePath()));
return ts.combinePaths(nodeModuleBinDir, ts.getDefaultLibFileName(this.compilationSettings));
}
getScriptSnapshot(filename: string): ts.IScriptSnapshot {
const scriptInfo = this.getScriptInfo(filename);
if (scriptInfo) {
return scriptInfo.snap();
}
}
setCompilationSettings(opt: ts.CompilerOptions) {
this.compilationSettings = opt;
// conservatively assume that changing compiler options might affect module resolution strategy
this.resolvedModuleNames.clear();
}
lineAffectsRefs(filename: string, line: number) {
const info = this.getScriptInfo(filename);
const lineInfo = info.getLineInfo(line);
if (lineInfo && lineInfo.text) {
const regex = /reference|import|\/\*|\*\//;
return regex.test(lineInfo.text);
}
}
getCompilationSettings() {
// change this to return active project settings for file
return this.compilationSettings;
}
getScriptFileNames() {
return this.roots.map(root => root.fileName);
}
getScriptVersion(filename: string) {
return this.getScriptInfo(filename).svc.latestVersion().toString();
}
getCurrentDirectory(): string {
return "";
}
getScriptIsOpen(filename: string) {
return this.getScriptInfo(filename).isOpen;
}
removeReferencedFile(info: ScriptInfo) {
if (!info.isOpen) {
this.filenameToScript.remove(info.path);
this.resolvedModuleNames.remove(info.path);
}
}
getScriptInfo(filename: string): ScriptInfo {
const path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
let scriptInfo = this.filenameToScript.get(path);
if (!scriptInfo) {
scriptInfo = this.project.openReferencedFile(filename);
if (scriptInfo) {
this.filenameToScript.set(path, scriptInfo);
}
}
return scriptInfo;
}
addRoot(info: ScriptInfo) {
if (!this.filenameToScript.contains(info.path)) {
this.filenameToScript.set(info.path, info);
this.roots.push(info);
}
}
removeRoot(info: ScriptInfo) {
if (!this.filenameToScript.contains(info.path)) {
this.filenameToScript.remove(info.path);
this.roots = copyListRemovingItem(info, this.roots);
this.resolvedModuleNames.remove(info.path);
}
}
saveTo(filename: string, tmpfilename: string) {
const script = this.getScriptInfo(filename);
if (script) {
const snap = script.snap();
this.host.writeFile(tmpfilename, snap.getText(0, snap.getLength()));
}
}
reloadScript(filename: string, tmpfilename: string, cb: () => any) {
const script = this.getScriptInfo(filename);
if (script) {
script.svc.reloadFromFile(tmpfilename, cb);
}
}
editScript(filename: string, start: number, end: number, newText: string) {
const script = this.getScriptInfo(filename);
if (script) {
script.editContent(start, end, newText);
return;
}
throw new Error("No script with name '" + filename + "'");
}
resolvePath(path: string): string {
const start = new Date().getTime();
const result = this.host.resolvePath(path);
return result;
}
fileExists(path: string): boolean {
const start = new Date().getTime();
const result = this.host.fileExists(path);
return result;
}
directoryExists(path: string): boolean {
return this.host.directoryExists(path);
}
/**
* @param line 1 based index
*/
lineToTextSpan(filename: string, line: number): ts.TextSpan {
const path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
const script: ScriptInfo = this.filenameToScript.get(path);
const index = script.snap().index;
const lineInfo = index.lineNumberToInfo(line + 1);
let len: number;
if (lineInfo.leaf) {
len = lineInfo.leaf.text.length;
}
else {
const nextLineInfo = index.lineNumberToInfo(line + 2);
len = nextLineInfo.offset - lineInfo.offset;
}
return ts.createTextSpan(lineInfo.offset, len);
}
/**
* @param line 1 based index
* @param offset 1 based index
*/
lineOffsetToPosition(filename: string, line: number, offset: number): number {
const path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
const script: ScriptInfo = this.filenameToScript.get(path);
const index = script.snap().index;
const lineInfo = index.lineNumberToInfo(line);
// TODO: assert this offset is actually on the line
return (lineInfo.offset + offset - 1);
}
/**
* @param line 1-based index
* @param offset 1-based index
*/
positionToLineOffset(filename: string, position: number): ILineInfo {
const path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
const script: ScriptInfo = this.filenameToScript.get(path);
const index = script.snap().index;
const lineOffset = index.charOffsetToLineNumberAndPos(position);
return { line: lineOffset.line, offset: lineOffset.offset + 1 };
}
}
// assumes normalized paths
function getAbsolutePath(filename: string, directory: string) {
const rootLength = ts.getRootLength(filename);
if (rootLength > 0) {
return filename;
}
else {
const splitFilename = filename.split("/");
const splitDir = directory.split("/");
let i = 0;
let dirTail = 0;
const sflen = splitFilename.length;
while ((i < sflen) && (splitFilename[i].charAt(0) == ".")) {
const dots = splitFilename[i];
if (dots == "..") {
dirTail++;
}
else if (dots != ".") {
return undefined;
}
i++;
}
return splitDir.slice(0, splitDir.length - dirTail).concat(splitFilename.slice(i)).join("/");
}
}
export interface ProjectOptions {
// these fields can be present in the project file
files?: string[];
compilerOptions?: ts.CompilerOptions;
}
export class Project {
compilerService: CompilerService;
projectFilename: string;
projectFileWatcher: FileWatcher;
directoryWatcher: FileWatcher;
// Used to keep track of what directories are watched for this project
directoriesWatchedForTsconfig: string[] = [];
program: ts.Program;
filenameToSourceFile: ts.Map<ts.SourceFile> = {};
updateGraphSeq = 0;
/** Used for configured projects which may have multiple open roots */
openRefCount = 0;
constructor(public projectService: ProjectService, public projectOptions?: ProjectOptions) {
if (projectOptions && projectOptions.files) {
// If files are listed explicitly, allow all extensions
projectOptions.compilerOptions.allowNonTsExtensions = true;
}
this.compilerService = new CompilerService(this, projectOptions && projectOptions.compilerOptions);
}
addOpenRef() {
this.openRefCount++;
}
deleteOpenRef() {
this.openRefCount--;
return this.openRefCount;
}
openReferencedFile(filename: string) {
return this.projectService.openFile(filename, false);
}
getRootFiles() {
return this.compilerService.host.roots.map(info => info.fileName);
}
getFileNames() {
const sourceFiles = this.program.getSourceFiles();
return sourceFiles.map(sourceFile => sourceFile.fileName);
}
getSourceFile(info: ScriptInfo) {
return this.filenameToSourceFile[info.fileName];
}
getSourceFileFromName(filename: string, requireOpen?: boolean) {
const info = this.projectService.getScriptInfo(filename);
if (info) {
if ((!requireOpen) || info.isOpen) {
return this.getSourceFile(info);
}
}
}
isRoot(info: ScriptInfo) {
return this.compilerService.host.roots.some(root => root === info);
}
removeReferencedFile(info: ScriptInfo) {
this.compilerService.host.removeReferencedFile(info);
this.updateGraph();
}
updateFileMap() {
this.filenameToSourceFile = {};
const sourceFiles = this.program.getSourceFiles();
for (let i = 0, len = sourceFiles.length; i < len; i++) {
const normFilename = ts.normalizePath(sourceFiles[i].fileName);
this.filenameToSourceFile[normFilename] = sourceFiles[i];
}
}
finishGraph() {
this.updateGraph();
this.compilerService.languageService.getNavigateToItems(".*");
}
updateGraph() {
this.program = this.compilerService.languageService.getProgram();
this.updateFileMap();
}
isConfiguredProject() {
return this.projectFilename;
}
// add a root file to project
addRoot(info: ScriptInfo) {
this.compilerService.host.addRoot(info);
}
// remove a root file from project
removeRoot(info: ScriptInfo) {
this.compilerService.host.removeRoot(info);
}
filesToString() {
let strBuilder = "";
ts.forEachValue(this.filenameToSourceFile,
sourceFile => { strBuilder += sourceFile.fileName + "\n"; });
return strBuilder;
}
setProjectOptions(projectOptions: ProjectOptions) {
this.projectOptions = projectOptions;
if (projectOptions.compilerOptions) {
projectOptions.compilerOptions.allowNonTsExtensions = true;
this.compilerService.setCompilerOptions(projectOptions.compilerOptions);
}
}
}
export interface ProjectOpenResult {
success?: boolean;
errorMsg?: string;
project?: Project;
}
function copyListRemovingItem<T>(item: T, list: T[]) {
const copiedList: T[] = [];
for (let i = 0, len = list.length; i < len; i++) {
if (list[i] != item) {
copiedList.push(list[i]);
}
}
return copiedList;
}
export interface ProjectServiceEventHandler {
(eventName: string, project: Project, fileName: string): void;
}
export interface HostConfiguration {
formatCodeOptions: ts.FormatCodeOptions;
hostInfo: string;
}
export class ProjectService {
filenameToScriptInfo: ts.Map<ScriptInfo> = {};
// open, non-configured root files
openFileRoots: ScriptInfo[] = [];
// projects built from openFileRoots
inferredProjects: Project[] = [];
// projects specified by a tsconfig.json file
configuredProjects: Project[] = [];
// open files referenced by a project
openFilesReferenced: ScriptInfo[] = [];
// open files that are roots of a configured project
openFileRootsConfigured: ScriptInfo[] = [];
// a path to directory watcher map that detects added tsconfig files
directoryWatchersForTsconfig: ts.Map<FileWatcher> = {};
// count of how many projects are using the directory watcher. If the
// number becomes 0 for a watcher, then we should close it.
directoryWatchersRefCount: ts.Map<number> = {};
hostConfiguration: HostConfiguration;
timerForDetectingProjectFilelistChanges: Map<NodeJS.Timer> = {};
constructor(public host: ServerHost, public psLogger: Logger, public eventHandler?: ProjectServiceEventHandler) {
// ts.disableIncrementalParsing = true;
this.addDefaultHostConfiguration();
}
addDefaultHostConfiguration() {
this.hostConfiguration = {
formatCodeOptions: ts.clone(CompilerService.defaultFormatCodeOptions),
hostInfo: "Unknown host"
};
}
getFormatCodeOptions(file?: string) {
if (file) {
const info = this.filenameToScriptInfo[file];
if (info) {
return info.formatCodeOptions;
}
}
return this.hostConfiguration.formatCodeOptions;
}
watchedFileChanged(fileName: string) {
const info = this.filenameToScriptInfo[fileName];
if (!info) {
this.psLogger.info("Error: got watch notification for unknown file: " + fileName);
}
if (!this.host.fileExists(fileName)) {
// File was deleted
this.fileDeletedInFilesystem(info);
}
else {
if (info && (!info.isOpen)) {
info.svc.reloadFromFile(info.fileName);
}
}
}
/**
* This is the callback function when a watched directory has added or removed source code files.
* @param project the project that associates with this directory watcher
* @param fileName the absolute file name that changed in watched directory
*/
directoryWatchedForSourceFilesChanged(project: Project, fileName: string) {
// If a change was made inside "folder/file", node will trigger the callback twice:
// one with the fileName being "folder/file", and the other one with "folder".
// We don't respond to the second one.
if (fileName && !ts.isSupportedSourceFileName(fileName)) {
return;
}
this.log("Detected source file changes: " + fileName);
this.startTimerForDetectingProjectFilelistChanges(project);
}
startTimerForDetectingProjectFilelistChanges(project: Project) {
if (this.timerForDetectingProjectFilelistChanges[project.projectFilename]) {
clearTimeout(this.timerForDetectingProjectFilelistChanges[project.projectFilename]);
}
this.timerForDetectingProjectFilelistChanges[project.projectFilename] = setTimeout(
() => this.handleProjectFilelistChanges(project),
250
);
}
handleProjectFilelistChanges(project: Project) {
const { succeeded, projectOptions, error } = this.configFileToProjectOptions(project.projectFilename);
const newRootFiles = projectOptions.files.map((f => this.getCanonicalFileName(f)));
const currentRootFiles = project.getRootFiles().map((f => this.getCanonicalFileName(f)));
// We check if the project file list has changed. If so, we update the project.
if (!arrayIsEqualTo(currentRootFiles && currentRootFiles.sort(), newRootFiles && newRootFiles.sort())) {
// For configured projects, the change is made outside the tsconfig file, and
// it is not likely to affect the project for other files opened by the client. We can
// just update the current project.
this.updateConfiguredProject(project);
// Call updateProjectStructure to clean up inferred projects we may have
// created for the new files
this.updateProjectStructure();
}
}
/**
* This is the callback function when a watched directory has an added tsconfig file.
*/
directoryWatchedForTsconfigChanged(fileName: string) {
if (ts.getBaseFileName(fileName) != "tsconfig.json") {
this.log(fileName + " is not tsconfig.json");
return;
}
this.log("Detected newly added tsconfig file: " + fileName);
const { succeeded, projectOptions, error } = this.configFileToProjectOptions(fileName);
const rootFilesInTsconfig = projectOptions.files.map(f => this.getCanonicalFileName(f));
const openFileRoots = this.openFileRoots.map(s => this.getCanonicalFileName(s.fileName));
// We should only care about the new tsconfig file if it contains any
// opened root files of existing inferred projects
for (const openFileRoot of openFileRoots) {
if (rootFilesInTsconfig.indexOf(openFileRoot) >= 0) {
this.reloadProjects();
return;
}
}
}
getCanonicalFileName(fileName: string) {
const name = this.host.useCaseSensitiveFileNames ? fileName : fileName.toLowerCase();
return ts.normalizePath(name);
}
watchedProjectConfigFileChanged(project: Project) {
this.log("Config file changed: " + project.projectFilename);
this.updateConfiguredProject(project);
this.updateProjectStructure();
}
log(msg: string, type = "Err") {
this.psLogger.msg(msg, type);
}
setHostConfiguration(args: ts.server.protocol.ConfigureRequestArguments) {
if (args.file) {
const info = this.filenameToScriptInfo[args.file];
if (info) {
info.setFormatOptions(args.formatOptions);
this.log("Host configuration update for file " + args.file, "Info");
}
}
else {
if (args.hostInfo !== undefined) {
this.hostConfiguration.hostInfo = args.hostInfo;
this.log("Host information " + args.hostInfo, "Info");
}
if (args.formatOptions) {
mergeFormatOptions(this.hostConfiguration.formatCodeOptions, args.formatOptions);
this.log("Format host information updated", "Info");
}
}
}
closeLog() {
this.psLogger.close();
}
createInferredProject(root: ScriptInfo) {
const project = new Project(this);
project.addRoot(root);
let currentPath = ts.getDirectoryPath(root.fileName);
let parentPath = ts.getDirectoryPath(currentPath);
while (currentPath != parentPath) {
if (!project.projectService.directoryWatchersForTsconfig[currentPath]) {
this.log("Add watcher for: " + currentPath);
project.projectService.directoryWatchersForTsconfig[currentPath] =
this.host.watchDirectory(currentPath, fileName => this.directoryWatchedForTsconfigChanged(fileName));
project.projectService.directoryWatchersRefCount[currentPath] = 1;
}
else {
project.projectService.directoryWatchersRefCount[currentPath] += 1;
}
project.directoriesWatchedForTsconfig.push(currentPath);
currentPath = parentPath;
parentPath = ts.getDirectoryPath(parentPath);
}
project.finishGraph();
this.inferredProjects.push(project);
return project;
}
fileDeletedInFilesystem(info: ScriptInfo) {
this.psLogger.info(info.fileName + " deleted");
if (info.fileWatcher) {
info.fileWatcher.close();
info.fileWatcher = undefined;
}
if (!info.isOpen) {
this.filenameToScriptInfo[info.fileName] = undefined;
const referencingProjects = this.findReferencingProjects(info);
if (info.defaultProject) {
info.defaultProject.removeRoot(info);
}
for (let i = 0, len = referencingProjects.length; i < len; i++) {
referencingProjects[i].removeReferencedFile(info);
}
for (let j = 0, flen = this.openFileRoots.length; j < flen; j++) {
const openFile = this.openFileRoots[j];
if (this.eventHandler) {
this.eventHandler("context", openFile.defaultProject, openFile.fileName);
}
}
for (let j = 0, flen = this.openFilesReferenced.length; j < flen; j++) {
const openFile = this.openFilesReferenced[j];
if (this.eventHandler) {
this.eventHandler("context", openFile.defaultProject, openFile.fileName);
}
}
}
this.printProjects();
}
updateConfiguredProjectList() {
const configuredProjects: Project[] = [];
for (let i = 0, len = this.configuredProjects.length; i < len; i++) {
if (this.configuredProjects[i].openRefCount > 0) {
configuredProjects.push(this.configuredProjects[i]);
}
}
this.configuredProjects = configuredProjects;
}
removeProject(project: Project) {
this.log("remove project: " + project.getRootFiles().toString());
if (project.isConfiguredProject()) {
project.projectFileWatcher.close();
project.directoryWatcher.close();
this.configuredProjects = copyListRemovingItem(project, this.configuredProjects);
}
else {
for (const directory of project.directoriesWatchedForTsconfig) {
// if the ref count for this directory watcher drops to 0, it's time to close it
if (!(--project.projectService.directoryWatchersRefCount[directory])) {
this.log("Close directory watcher for: " + directory);
project.projectService.directoryWatchersForTsconfig[directory].close();
delete project.projectService.directoryWatchersForTsconfig[directory];
}
}
this.inferredProjects = copyListRemovingItem(project, this.inferredProjects);
}
const fileNames = project.getFileNames();
for (const fileName of fileNames) {
const info = this.getScriptInfo(fileName);
if (info.defaultProject == project) {
info.defaultProject = undefined;
}
}
}
setConfiguredProjectRoot(info: ScriptInfo) {
for (let i = 0, len = this.configuredProjects.length; i < len; i++) {
const configuredProject = this.configuredProjects[i];
if (configuredProject.isRoot(info)) {
info.defaultProject = configuredProject;
configuredProject.addOpenRef();
return true;
}
}
return false;
}
addOpenFile(info: ScriptInfo) {
if (this.setConfiguredProjectRoot(info)) {
this.openFileRootsConfigured.push(info);
}
else {
this.findReferencingProjects(info);
if (info.defaultProject) {
this.openFilesReferenced.push(info);
}
else {
// create new inferred project p with the newly opened file as root
info.defaultProject = this.createInferredProject(info);
const openFileRoots: ScriptInfo[] = [];
// for each inferred project root r
for (let i = 0, len = this.openFileRoots.length; i < len; i++) {
const r = this.openFileRoots[i];
// if r referenced by the new project
if (info.defaultProject.getSourceFile(r)) {
// remove project rooted at r
this.removeProject(r.defaultProject);
// put r in referenced open file list
this.openFilesReferenced.push(r);
// set default project of r to the new project
r.defaultProject = info.defaultProject;
}
else {
// otherwise, keep r as root of inferred project
openFileRoots.push(r);
}
}
this.openFileRoots = openFileRoots;
this.openFileRoots.push(info);
}
}
this.updateConfiguredProjectList();
}
/**
* Remove this file from the set of open, non-configured files.
* @param info The file that has been closed or newly configured
*/
closeOpenFile(info: ScriptInfo) {
// Closing file should trigger re-reading the file content from disk. This is
// because the user may chose to discard the buffer content before saving
// to the disk, and the server's version of the file can be out of sync.
info.svc.reloadFromFile(info.fileName);
const openFileRoots: ScriptInfo[] = [];
let removedProject: Project;
for (let i = 0, len = this.openFileRoots.length; i < len; i++) {
// if closed file is root of project
if (info === this.openFileRoots[i]) {
// remove that project and remember it
removedProject = info.defaultProject;
}
else {
openFileRoots.push(this.openFileRoots[i]);
}
}
this.openFileRoots = openFileRoots;
if (!removedProject) {
const openFileRootsConfigured: ScriptInfo[] = [];
for (let i = 0, len = this.openFileRootsConfigured.length; i < len; i++) {
if (info === this.openFileRootsConfigured[i]) {
if (info.defaultProject.deleteOpenRef() === 0) {
removedProject = info.defaultProject;
}
}
else {
openFileRootsConfigured.push(this.openFileRootsConfigured[i]);
}
}
this.openFileRootsConfigured = openFileRootsConfigured;
}
if (removedProject) {
this.removeProject(removedProject);
const openFilesReferenced: ScriptInfo[] = [];
const orphanFiles: ScriptInfo[] = [];
// for all open, referenced files f
for (let i = 0, len = this.openFilesReferenced.length; i < len; i++) {
const f = this.openFilesReferenced[i];
// if f was referenced by the removed project, remember it
if (f.defaultProject === removedProject || !f.defaultProject) {
f.defaultProject = undefined;
orphanFiles.push(f);
}
else {
// otherwise add it back to the list of referenced files
openFilesReferenced.push(f);
}
}
this.openFilesReferenced = openFilesReferenced;
// treat orphaned files as newly opened
for (let i = 0, len = orphanFiles.length; i < len; i++) {
this.addOpenFile(orphanFiles[i]);
}
}
else {
this.openFilesReferenced = copyListRemovingItem(info, this.openFilesReferenced);
}
info.close();
}
findReferencingProjects(info: ScriptInfo, excludedProject?: Project) {
const referencingProjects: Project[] = [];
info.defaultProject = undefined;
for (let i = 0, len = this.inferredProjects.length; i < len; i++) {
const inferredProject = this.inferredProjects[i];
inferredProject.updateGraph();
if (inferredProject !== excludedProject) {
if (inferredProject.getSourceFile(info)) {
info.defaultProject = inferredProject;
referencingProjects.push(inferredProject);
}
}
}
for (let i = 0, len = this.configuredProjects.length; i < len; i++) {
const configuredProject = this.configuredProjects[i];
configuredProject.updateGraph();
if (configuredProject.getSourceFile(info)) {
info.defaultProject = configuredProject;
}
}
return referencingProjects;
}
/**
* This function rebuilds the project for every file opened by the client
*/
reloadProjects() {
this.log("reload projects.");
// First check if there is new tsconfig file added for inferred project roots
for (const info of this.openFileRoots) {
this.openOrUpdateConfiguredProjectForFile(info.fileName);
}
this.updateProjectStructure();
}
/**
* This function is to update the project structure for every projects.
* It is called on the premise that all the configured projects are
* up to date.
*/
updateProjectStructure() {
this.log("updating project structure from ...", "Info");
this.printProjects();
const unattachedOpenFiles: ScriptInfo[] = [];
const openFileRootsConfigured: ScriptInfo[] = [];
for (const info of this.openFileRootsConfigured) {
const project = info.defaultProject;
if (!project || !(project.getSourceFile(info))) {
info.defaultProject = undefined;
unattachedOpenFiles.push(info);
}
else {
openFileRootsConfigured.push(info);
}
}
this.openFileRootsConfigured = openFileRootsConfigured;
// First loop through all open files that are referenced by projects but are not
// project roots. For each referenced file, see if the default project still
// references that file. If so, then just keep the file in the referenced list.
// If not, add the file to an unattached list, to be rechecked later.
const openFilesReferenced: ScriptInfo[] = [];
for (let i = 0, len = this.openFilesReferenced.length; i < len; i++) {
const referencedFile = this.openFilesReferenced[i];
referencedFile.defaultProject.updateGraph();
const sourceFile = referencedFile.defaultProject.getSourceFile(referencedFile);
if (sourceFile) {
openFilesReferenced.push(referencedFile);
}
else {
unattachedOpenFiles.push(referencedFile);
}
}
this.openFilesReferenced = openFilesReferenced;
// Then, loop through all of the open files that are project roots.
// For each root file, note the project that it roots. Then see if
// any other projects newly reference the file. If zero projects
// newly reference the file, keep it as a root. If one or more
// projects newly references the file, remove its project from the
// inferred projects list (since it is no longer a root) and add
// the file to the open, referenced file list.
const openFileRoots: ScriptInfo[] = [];
for (let i = 0, len = this.openFileRoots.length; i < len; i++) {
const rootFile = this.openFileRoots[i];
const rootedProject = rootFile.defaultProject;
const referencingProjects = this.findReferencingProjects(rootFile, rootedProject);
if (rootFile.defaultProject && rootFile.defaultProject.isConfiguredProject()) {
// If the root file has already been added into a configured project,
// meaning the original inferred project is gone already.
if (!rootedProject.isConfiguredProject()) {
this.removeProject(rootedProject);
}
this.openFileRootsConfigured.push(rootFile);
}
else {
if (referencingProjects.length === 0) {
rootFile.defaultProject = rootedProject;
openFileRoots.push(rootFile);
}
else {
// remove project from inferred projects list because root captured
this.removeProject(rootedProject);
this.openFilesReferenced.push(rootFile);
}
}
}
this.openFileRoots = openFileRoots;
// Finally, if we found any open, referenced files that are no longer
// referenced by their default project, treat them as newly opened
// by the editor.
for (let i = 0, len = unattachedOpenFiles.length; i < len; i++) {
this.addOpenFile(unattachedOpenFiles[i]);
}
this.printProjects();
}
getScriptInfo(filename: string) {
filename = ts.normalizePath(filename);
return ts.lookUp(this.filenameToScriptInfo, filename);
}
/**
* @param filename is absolute pathname
*/
openFile(fileName: string, openedByClient: boolean) {
fileName = ts.normalizePath(fileName);
let info = ts.lookUp(this.filenameToScriptInfo, fileName);
if (!info) {
let content: string;
if (this.host.fileExists(fileName)) {
content = this.host.readFile(fileName);
}
if (!content) {
if (openedByClient) {
content = "";
}
}
if (content !== undefined) {
info = new ScriptInfo(this.host, fileName, content, openedByClient);
info.setFormatOptions(this.getFormatCodeOptions());
this.filenameToScriptInfo[fileName] = info;
if (!info.isOpen) {
info.fileWatcher = this.host.watchFile(fileName, _ => { this.watchedFileChanged(fileName); });
}
}
}
if (info) {
if (openedByClient) {
info.isOpen = true;
}
}
return info;
}
// This is different from the method the compiler uses because
// the compiler can assume it will always start searching in the
// current directory (the directory in which tsc was invoked).
// The server must start searching from the directory containing
// the newly opened file.
findConfigFile(searchPath: string): string {
while (true) {
const fileName = ts.combinePaths(searchPath, "tsconfig.json");
if (this.host.fileExists(fileName)) {
return fileName;
}
const parentPath = ts.getDirectoryPath(searchPath);
if (parentPath === searchPath) {
break;
}
searchPath = parentPath;
}
return undefined;
}
/**
* Open file whose contents is managed by the client
* @param filename is absolute pathname
*/
openClientFile(fileName: string) {
this.openOrUpdateConfiguredProjectForFile(fileName);
const info = this.openFile(fileName, true);
this.addOpenFile(info);
this.printProjects();
return info;
}
/**
* This function tries to search for a tsconfig.json for the given file. If we found it,
* we first detect if there is already a configured project created for it: if so, we re-read
* the tsconfig file content and update the project; otherwise we create a new one.
*/
openOrUpdateConfiguredProjectForFile(fileName: string) {
const searchPath = ts.normalizePath(getDirectoryPath(fileName));
this.log("Search path: " + searchPath, "Info");
const configFileName = this.findConfigFile(searchPath);
if (configFileName) {
this.log("Config file name: " + configFileName, "Info");
const project = this.findConfiguredProjectByConfigFile(configFileName);
if (!project) {
const configResult = this.openConfigFile(configFileName, fileName);
if (!configResult.success) {
this.log("Error opening config file " + configFileName + " " + configResult.errorMsg);
}
else {
this.log("Opened configuration file " + configFileName, "Info");
this.configuredProjects.push(configResult.project);
}
}
else {
this.updateConfiguredProject(project);
}
}
else {
this.log("No config files found.");
}
}
/**
* Close file whose contents is managed by the client
* @param filename is absolute pathname
*/
closeClientFile(filename: string) {
const info = ts.lookUp(this.filenameToScriptInfo, filename);
if (info) {
this.closeOpenFile(info);
info.isOpen = false;
}
this.printProjects();
}
getProjectForFile(filename: string) {
const scriptInfo = ts.lookUp(this.filenameToScriptInfo, filename);
if (scriptInfo) {
return scriptInfo.defaultProject;
}
}
printProjectsForFile(filename: string) {
const scriptInfo = ts.lookUp(this.filenameToScriptInfo, filename);
if (scriptInfo) {
this.psLogger.startGroup();
this.psLogger.info("Projects for " + filename);
const projects = this.findReferencingProjects(scriptInfo);
for (let i = 0, len = projects.length; i < len; i++) {
this.psLogger.info("Project " + i.toString());
}
this.psLogger.endGroup();
}
else {
this.psLogger.info(filename + " not in any project");
}
}
printProjects() {
if (!this.psLogger.isVerbose()) {
return;
}
this.psLogger.startGroup();
for (let i = 0, len = this.inferredProjects.length; i < len; i++) {
const project = this.inferredProjects[i];
project.updateGraph();
this.psLogger.info("Project " + i.toString());
this.psLogger.info(project.filesToString());
this.psLogger.info("-----------------------------------------------");
}
for (let i = 0, len = this.configuredProjects.length; i < len; i++) {
const project = this.configuredProjects[i];
project.updateGraph();
this.psLogger.info("Project (configured) " + (i + this.inferredProjects.length).toString());
this.psLogger.info(project.filesToString());
this.psLogger.info("-----------------------------------------------");
}
this.psLogger.info("Open file roots of inferred projects: ");
for (let i = 0, len = this.openFileRoots.length; i < len; i++) {
this.psLogger.info(this.openFileRoots[i].fileName);
}
this.psLogger.info("Open files referenced by inferred or configured projects: ");
for (let i = 0, len = this.openFilesReferenced.length; i < len; i++) {
let fileInfo = this.openFilesReferenced[i].fileName;
if (this.openFilesReferenced[i].defaultProject.isConfiguredProject()) {
fileInfo += " (configured)";
}
this.psLogger.info(fileInfo);
}
this.psLogger.info("Open file roots of configured projects: ");
for (let i = 0, len = this.openFileRootsConfigured.length; i < len; i++) {
this.psLogger.info(this.openFileRootsConfigured[i].fileName);
}
this.psLogger.endGroup();
}
configProjectIsActive(fileName: string) {
return this.findConfiguredProjectByConfigFile(fileName) === undefined;
}
findConfiguredProjectByConfigFile(configFileName: string) {
for (let i = 0, len = this.configuredProjects.length; i < len; i++) {
if (this.configuredProjects[i].projectFilename == configFileName) {
return this.configuredProjects[i];
}
}
return undefined;
}
configFileToProjectOptions(configFilename: string): { succeeded: boolean, projectOptions?: ProjectOptions, error?: ProjectOpenResult } {
configFilename = ts.normalizePath(configFilename);
// file references will be relative to dirPath (or absolute)
const dirPath = ts.getDirectoryPath(configFilename);
const contents = this.host.readFile(configFilename);
const rawConfig: { config?: ProjectOptions; error?: Diagnostic; } = ts.parseConfigFileTextToJson(configFilename, contents);
if (rawConfig.error) {
return { succeeded: false, error: rawConfig.error };
}
else {
const parsedCommandLine = ts.parseJsonConfigFileContent(rawConfig.config, this.host, dirPath);
Debug.assert(!!parsedCommandLine.fileNames);
if (parsedCommandLine.errors && (parsedCommandLine.errors.length > 0)) {
return { succeeded: false, error: { errorMsg: "tsconfig option errors" } };
}
else if (parsedCommandLine.fileNames.length === 0) {
return { succeeded: false, error: { errorMsg: "no files found" } };
}
else {
const projectOptions: ProjectOptions = {
files: parsedCommandLine.fileNames,
compilerOptions: parsedCommandLine.options
};
return { succeeded: true, projectOptions };
}
}
}
openConfigFile(configFilename: string, clientFileName?: string): ProjectOpenResult {
const { succeeded, projectOptions, error } = this.configFileToProjectOptions(configFilename);
if (!succeeded) {
return error;
}
else {
const project = this.createProject(configFilename, projectOptions);
for (const rootFilename of projectOptions.files) {
if (this.host.fileExists(rootFilename)) {
const info = this.openFile(rootFilename, /*openedByClient*/ clientFileName == rootFilename);
project.addRoot(info);
}
else {
return { errorMsg: "specified file " + rootFilename + " not found" };
}
}
project.finishGraph();
project.projectFileWatcher = this.host.watchFile(configFilename, _ => this.watchedProjectConfigFileChanged(project));
this.log("Add recursive watcher for: " + ts.getDirectoryPath(configFilename));
project.directoryWatcher = this.host.watchDirectory(
ts.getDirectoryPath(configFilename),
path => this.directoryWatchedForSourceFilesChanged(project, path),
/*recursive*/ true
);
return { success: true, project: project };
}
}
updateConfiguredProject(project: Project) {
if (!this.host.fileExists(project.projectFilename)) {
this.log("Config file deleted");
this.removeProject(project);
}
else {
const { succeeded, projectOptions, error } = this.configFileToProjectOptions(project.projectFilename);
if (!succeeded) {
return error;
}
else {
const oldFileNames = project.compilerService.host.roots.map(info => info.fileName);
const newFileNames = projectOptions.files;
const fileNamesToRemove = oldFileNames.filter(f => newFileNames.indexOf(f) < 0);
const fileNamesToAdd = newFileNames.filter(f => oldFileNames.indexOf(f) < 0);
for (const fileName of fileNamesToRemove) {
const info = this.getScriptInfo(fileName);
if (info) {
project.removeRoot(info);
}
}
for (const fileName of fileNamesToAdd) {
let info = this.getScriptInfo(fileName);
if (!info) {
info = this.openFile(fileName, false);
}
else {
// if the root file was opened by client, it would belong to either
// openFileRoots or openFileReferenced.
if (info.isOpen) {
if (this.openFileRoots.indexOf(info) >= 0) {
this.openFileRoots = copyListRemovingItem(info, this.openFileRoots);
if (info.defaultProject && !info.defaultProject.isConfiguredProject()) {
this.removeProject(info.defaultProject);
}
}
if (this.openFilesReferenced.indexOf(info) >= 0) {
this.openFilesReferenced = copyListRemovingItem(info, this.openFilesReferenced);
}
this.openFileRootsConfigured.push(info);
info.defaultProject = project;
}
}
project.addRoot(info);
}
project.setProjectOptions(projectOptions);
project.finishGraph();
}
}
}
createProject(projectFilename: string, projectOptions?: ProjectOptions) {
const project = new Project(this, projectOptions);
project.projectFilename = projectFilename;
return project;
}
}
export class CompilerService {
host: LSHost;
languageService: ts.LanguageService;
classifier: ts.Classifier;
settings: ts.CompilerOptions;
documentRegistry = ts.createDocumentRegistry();
constructor(public project: Project, opt?: ts.CompilerOptions) {
this.host = new LSHost(project.projectService.host, project);
if (opt) {
this.setCompilerOptions(opt);
}
else {
const defaultOpts = ts.getDefaultCompilerOptions();
defaultOpts.allowNonTsExtensions = true;
this.setCompilerOptions(defaultOpts);
}
this.languageService = ts.createLanguageService(this.host, this.documentRegistry);
this.classifier = ts.createClassifier();
}
setCompilerOptions(opt: ts.CompilerOptions) {
this.settings = opt;
this.host.setCompilationSettings(opt);
}
isExternalModule(filename: string): boolean {
const sourceFile = this.languageService.getSourceFile(filename);
return ts.isExternalModule(sourceFile);
}
static defaultFormatCodeOptions: ts.FormatCodeOptions = {
IndentSize: 4,
TabSize: 4,
NewLineCharacter: ts.sys ? ts.sys.newLine : "\n",
ConvertTabsToSpaces: true,
IndentStyle: ts.IndentStyle.Smart,
InsertSpaceAfterCommaDelimiter: true,
InsertSpaceAfterSemicolonInForStatements: true,
InsertSpaceBeforeAndAfterBinaryOperators: true,
InsertSpaceAfterKeywordsInControlFlowStatements: true,
InsertSpaceAfterFunctionKeywordForAnonymousFunctions: false,
InsertSpaceAfterOpeningAndBeforeClosingNonemptyParenthesis: false,
InsertSpaceAfterOpeningAndBeforeClosingNonemptyBrackets: false,
PlaceOpenBraceOnNewLineForFunctions: false,
PlaceOpenBraceOnNewLineForControlBlocks: false,
};
}
export interface LineCollection {
charCount(): number;
lineCount(): number;
isLeaf(): boolean;
walk(rangeStart: number, rangeLength: number, walkFns: ILineIndexWalker): void;
}
export interface ILineInfo {
line: number;
offset: number;
text?: string;
leaf?: LineLeaf;
}
export enum CharRangeSection {
PreStart,
Start,
Entire,
Mid,
End,
PostEnd
}
export interface ILineIndexWalker {
goSubtree: boolean;
done: boolean;
leaf(relativeStart: number, relativeLength: number, lineCollection: LineLeaf): void;
pre?(relativeStart: number, relativeLength: number, lineCollection: LineCollection,
parent: LineNode, nodeType: CharRangeSection): LineCollection;
post?(relativeStart: number, relativeLength: number, lineCollection: LineCollection,
parent: LineNode, nodeType: CharRangeSection): LineCollection;
}
class BaseLineIndexWalker implements ILineIndexWalker {
goSubtree = true;
done = false;
leaf(rangeStart: number, rangeLength: number, ll: LineLeaf) {
}
}
class EditWalker extends BaseLineIndexWalker {
lineIndex = new LineIndex();
// path to start of range
startPath: LineCollection[];
endBranch: LineCollection[] = [];
branchNode: LineNode;
// path to current node
stack: LineNode[];
state = CharRangeSection.Entire;
lineCollectionAtBranch: LineCollection;
initialText = "";
trailingText = "";
suppressTrailingText = false;
constructor() {
super();
this.lineIndex.root = new LineNode();
this.startPath = [this.lineIndex.root];
this.stack = [this.lineIndex.root];
}
insertLines(insertedText: string) {
if (this.suppressTrailingText) {
this.trailingText = "";
}
if (insertedText) {
insertedText = this.initialText + insertedText + this.trailingText;
}
else {
insertedText = this.initialText + this.trailingText;
}
const lm = LineIndex.linesFromText(insertedText);
const lines = lm.lines;
if (lines.length > 1) {
if (lines[lines.length - 1] == "") {
lines.length--;
}
}
let branchParent: LineNode;
let lastZeroCount: LineCollection;
for (let k = this.endBranch.length - 1; k >= 0; k--) {
(<LineNode>this.endBranch[k]).updateCounts();
if (this.endBranch[k].charCount() === 0) {
lastZeroCount = this.endBranch[k];
if (k > 0) {
branchParent = <LineNode>this.endBranch[k - 1];
}
else {
branchParent = this.branchNode;
}
}
}
if (lastZeroCount) {
branchParent.remove(lastZeroCount);
}
// path at least length two (root and leaf)
let insertionNode = <LineNode>this.startPath[this.startPath.length - 2];
const leafNode = <LineLeaf>this.startPath[this.startPath.length - 1];
const len = lines.length;
if (len > 0) {
leafNode.text = lines[0];
if (len > 1) {
let insertedNodes = <LineCollection[]>new Array(len - 1);
let startNode = <LineCollection>leafNode;
for (let i = 1, len = lines.length; i < len; i++) {
insertedNodes[i - 1] = new LineLeaf(lines[i]);
}
let pathIndex = this.startPath.length - 2;
while (pathIndex >= 0) {
insertionNode = <LineNode>this.startPath[pathIndex];
insertedNodes = insertionNode.insertAt(startNode, insertedNodes);
pathIndex--;
startNode = insertionNode;
}
let insertedNodesLen = insertedNodes.length;
while (insertedNodesLen > 0) {
const newRoot = new LineNode();
newRoot.add(this.lineIndex.root);
insertedNodes = newRoot.insertAt(this.lineIndex.root, insertedNodes);
insertedNodesLen = insertedNodes.length;
this.lineIndex.root = newRoot;
}
this.lineIndex.root.updateCounts();
}
else {
for (let j = this.startPath.length - 2; j >= 0; j--) {
(<LineNode>this.startPath[j]).updateCounts();
}
}
}
else {
// no content for leaf node, so delete it
insertionNode.remove(leafNode);
for (let j = this.startPath.length - 2; j >= 0; j--) {
(<LineNode>this.startPath[j]).updateCounts();
}
}
return this.lineIndex;
}
post(relativeStart: number, relativeLength: number, lineCollection: LineCollection, parent: LineCollection, nodeType: CharRangeSection): LineCollection {
// have visited the path for start of range, now looking for end
// if range is on single line, we will never make this state transition
if (lineCollection === this.lineCollectionAtBranch) {
this.state = CharRangeSection.End;
}
// always pop stack because post only called when child has been visited
this.stack.length--;
return undefined;
}
pre(relativeStart: number, relativeLength: number, lineCollection: LineCollection, parent: LineCollection, nodeType: CharRangeSection) {
// currentNode corresponds to parent, but in the new tree
const currentNode = this.stack[this.stack.length - 1];
if ((this.state === CharRangeSection.Entire) && (nodeType === CharRangeSection.Start)) {
// if range is on single line, we will never make this state transition
this.state = CharRangeSection.Start;
this.branchNode = currentNode;
this.lineCollectionAtBranch = lineCollection;
}
let child: LineCollection;
function fresh(node: LineCollection): LineCollection {
if (node.isLeaf()) {
return new LineLeaf("");
}
else return new LineNode();
}
switch (nodeType) {
case CharRangeSection.PreStart:
this.goSubtree = false;
if (this.state !== CharRangeSection.End) {
currentNode.add(lineCollection);
}
break;
case CharRangeSection.Start:
if (this.state === CharRangeSection.End) {
this.goSubtree = false;
}
else {
child = fresh(lineCollection);
currentNode.add(child);
this.startPath[this.startPath.length] = child;
}
break;
case CharRangeSection.Entire:
if (this.state !== CharRangeSection.End) {
child = fresh(lineCollection);
currentNode.add(child);
this.startPath[this.startPath.length] = child;
}
else {
if (!lineCollection.isLeaf()) {
child = fresh(lineCollection);
currentNode.add(child);
this.endBranch[this.endBranch.length] = child;
}
}
break;
case CharRangeSection.Mid:
this.goSubtree = false;
break;
case CharRangeSection.End:
if (this.state !== CharRangeSection.End) {
this.goSubtree = false;
}
else {
if (!lineCollection.isLeaf()) {
child = fresh(lineCollection);
currentNode.add(child);
this.endBranch[this.endBranch.length] = child;
}
}
break;
case CharRangeSection.PostEnd:
this.goSubtree = false;
if (this.state !== CharRangeSection.Start) {
currentNode.add(lineCollection);
}
break;
}
if (this.goSubtree) {
this.stack[this.stack.length] = <LineNode>child;
}
return lineCollection;
}
// just gather text from the leaves
leaf(relativeStart: number, relativeLength: number, ll: LineLeaf) {
if (this.state === CharRangeSection.Start) {
this.initialText = ll.text.substring(0, relativeStart);
}
else if (this.state === CharRangeSection.Entire) {
this.initialText = ll.text.substring(0, relativeStart);
this.trailingText = ll.text.substring(relativeStart + relativeLength);
}
else {
// state is CharRangeSection.End
this.trailingText = ll.text.substring(relativeStart + relativeLength);
}
}
}
// text change information
export class TextChange {
constructor(public pos: number, public deleteLen: number, public insertedText?: string) {
}
getTextChangeRange() {
return ts.createTextChangeRange(ts.createTextSpan(this.pos, this.deleteLen),
this.insertedText ? this.insertedText.length : 0);
}
}
export class ScriptVersionCache {
changes: TextChange[] = [];
versions: LineIndexSnapshot[] = [];
minVersion = 0; // no versions earlier than min version will maintain change history
private currentVersion = 0;
private host: ServerHost;
static changeNumberThreshold = 8;
static changeLengthThreshold = 256;
static maxVersions = 8;
// REVIEW: can optimize by coalescing simple edits
edit(pos: number, deleteLen: number, insertedText?: string) {
this.changes[this.changes.length] = new TextChange(pos, deleteLen, insertedText);
if ((this.changes.length > ScriptVersionCache.changeNumberThreshold) ||
(deleteLen > ScriptVersionCache.changeLengthThreshold) ||
(insertedText && (insertedText.length > ScriptVersionCache.changeLengthThreshold))) {
this.getSnapshot();
}
}
latest() {
return this.versions[this.currentVersion];
}
latestVersion() {
if (this.changes.length > 0) {
this.getSnapshot();
}
return this.currentVersion;
}
reloadFromFile(filename: string, cb?: () => any) {
const content = this.host.readFile(filename);
this.reload(content);
if (cb)
cb();
}
// reload whole script, leaving no change history behind reload
reload(script: string) {
this.currentVersion++;
this.changes = []; // history wiped out by reload
const snap = new LineIndexSnapshot(this.currentVersion, this);
this.versions[this.currentVersion] = snap;
snap.index = new LineIndex();
const lm = LineIndex.linesFromText(script);
snap.index.load(lm.lines);
// REVIEW: could use linked list
for (let i = this.minVersion; i < this.currentVersion; i++) {
this.versions[i] = undefined;
}
this.minVersion = this.currentVersion;
}
getSnapshot() {
let snap = this.versions[this.currentVersion];
if (this.changes.length > 0) {
let snapIndex = this.latest().index;
for (let i = 0, len = this.changes.length; i < len; i++) {
const change = this.changes[i];
snapIndex = snapIndex.edit(change.pos, change.deleteLen, change.insertedText);
}
snap = new LineIndexSnapshot(this.currentVersion + 1, this);
snap.index = snapIndex;
snap.changesSincePreviousVersion = this.changes;
this.currentVersion = snap.version;
this.versions[snap.version] = snap;
this.changes = [];
if ((this.currentVersion - this.minVersion) >= ScriptVersionCache.maxVersions) {
const oldMin = this.minVersion;
this.minVersion = (this.currentVersion - ScriptVersionCache.maxVersions) + 1;
for (let j = oldMin; j < this.minVersion; j++) {
this.versions[j] = undefined;
}
}
}
return snap;
}
getTextChangesBetweenVersions(oldVersion: number, newVersion: number) {
if (oldVersion < newVersion) {
if (oldVersion >= this.minVersion) {
const textChangeRanges: ts.TextChangeRange[] = [];
for (let i = oldVersion + 1; i <= newVersion; i++) {
const snap = this.versions[i];
for (let j = 0, len = snap.changesSincePreviousVersion.length; j < len; j++) {
const textChange = snap.changesSincePreviousVersion[j];
textChangeRanges[textChangeRanges.length] = textChange.getTextChangeRange();
}
}
return ts.collapseTextChangeRangesAcrossMultipleVersions(textChangeRanges);
}
else {
return undefined;
}
}
else {
return ts.unchangedTextChangeRange;
}
}
static fromString(host: ServerHost, script: string) {
const svc = new ScriptVersionCache();
const snap = new LineIndexSnapshot(0, svc);
svc.versions[svc.currentVersion] = snap;
svc.host = host;
snap.index = new LineIndex();
const lm = LineIndex.linesFromText(script);
snap.index.load(lm.lines);
return svc;
}
}
export class LineIndexSnapshot implements ts.IScriptSnapshot {
index: LineIndex;
changesSincePreviousVersion: TextChange[] = [];
constructor(public version: number, public cache: ScriptVersionCache) {
}
getText(rangeStart: number, rangeEnd: number) {
return this.index.getText(rangeStart, rangeEnd - rangeStart);
}
getLength() {
return this.index.root.charCount();
}
// this requires linear space so don't hold on to these
getLineStartPositions(): number[] {
const starts: number[] = [-1];
let count = 1;
let pos = 0;
this.index.every((ll, s, len) => {
starts[count++] = pos;
pos += ll.text.length;
return true;
}, 0);
return starts;
}
getLineMapper() {
return ((line: number) => {
return this.index.lineNumberToInfo(line).offset;
});
}
getTextChangeRangeSinceVersion(scriptVersion: number) {
if (this.version <= scriptVersion) {
return ts.unchangedTextChangeRange;
}
else {
return this.cache.getTextChangesBetweenVersions(scriptVersion, this.version);
}
}
getChangeRange(oldSnapshot: ts.IScriptSnapshot): ts.TextChangeRange {
const oldSnap = <LineIndexSnapshot>oldSnapshot;
return this.getTextChangeRangeSinceVersion(oldSnap.version);
}
}
export class LineIndex {
root: LineNode;
// set this to true to check each edit for accuracy
checkEdits = false;
charOffsetToLineNumberAndPos(charOffset: number) {
return this.root.charOffsetToLineNumberAndPos(1, charOffset);
}
lineNumberToInfo(lineNumber: number): ILineInfo {
const lineCount = this.root.lineCount();
if (lineNumber <= lineCount) {
const lineInfo = this.root.lineNumberToInfo(lineNumber, 0);
lineInfo.line = lineNumber;
return lineInfo;
}
else {
return {
line: lineNumber,
offset: this.root.charCount()
};
}
}
load(lines: string[]) {
if (lines.length > 0) {
const leaves: LineLeaf[] = [];
for (let i = 0, len = lines.length; i < len; i++) {
leaves[i] = new LineLeaf(lines[i]);
}
this.root = LineIndex.buildTreeFromBottom(leaves);
}
else {
this.root = new LineNode();
}
}
walk(rangeStart: number, rangeLength: number, walkFns: ILineIndexWalker) {
this.root.walk(rangeStart, rangeLength, walkFns);
}
getText(rangeStart: number, rangeLength: number) {
let accum = "";
if ((rangeLength > 0) && (rangeStart < this.root.charCount())) {
this.walk(rangeStart, rangeLength, {
goSubtree: true,
done: false,
leaf: (relativeStart: number, relativeLength: number, ll: LineLeaf) => {
accum = accum.concat(ll.text.substring(relativeStart, relativeStart + relativeLength));
}
});
}
return accum;
}
getLength(): number {
return this.root.charCount();
}
every(f: (ll: LineLeaf, s: number, len: number) => boolean, rangeStart: number, rangeEnd?: number) {
if (!rangeEnd) {
rangeEnd = this.root.charCount();
}
const walkFns = {
goSubtree: true,
done: false,
leaf: function (relativeStart: number, relativeLength: number, ll: LineLeaf) {
if (!f(ll, relativeStart, relativeLength)) {
this.done = true;
}
}
};
this.walk(rangeStart, rangeEnd - rangeStart, walkFns);
return !walkFns.done;
}
edit(pos: number, deleteLength: number, newText?: string) {
function editFlat(source: string, s: number, dl: number, nt = "") {
return source.substring(0, s) + nt + source.substring(s + dl, source.length);
}
if (this.root.charCount() === 0) {
// TODO: assert deleteLength === 0
if (newText) {
this.load(LineIndex.linesFromText(newText).lines);
return this;
}
}
else {
let checkText: string;
if (this.checkEdits) {
checkText = editFlat(this.getText(0, this.root.charCount()), pos, deleteLength, newText);
}
const walker = new EditWalker();
if (pos >= this.root.charCount()) {
// insert at end
pos = this.root.charCount() - 1;
const endString = this.getText(pos, 1);
if (newText) {
newText = endString + newText;
}
else {
newText = endString;
}
deleteLength = 0;
walker.suppressTrailingText = true;
}
else if (deleteLength > 0) {
// check whether last characters deleted are line break
const e = pos + deleteLength;
const lineInfo = this.charOffsetToLineNumberAndPos(e);
if ((lineInfo && (lineInfo.offset === 0))) {
// move range end just past line that will merge with previous line
deleteLength += lineInfo.text.length;
// store text by appending to end of insertedText
if (newText) {
newText = newText + lineInfo.text;
}
else {
newText = lineInfo.text;
}
}
}
if (pos < this.root.charCount()) {
this.root.walk(pos, deleteLength, walker);
walker.insertLines(newText);
}
if (this.checkEdits) {
const updatedText = this.getText(0, this.root.charCount());
Debug.assert(checkText == updatedText, "buffer edit mismatch");
}
return walker.lineIndex;
}
}
static buildTreeFromBottom(nodes: LineCollection[]): LineNode {
const nodeCount = Math.ceil(nodes.length / lineCollectionCapacity);
const interiorNodes: LineNode[] = [];
let nodeIndex = 0;
for (let i = 0; i < nodeCount; i++) {
interiorNodes[i] = new LineNode();
let charCount = 0;
let lineCount = 0;
for (let j = 0; j < lineCollectionCapacity; j++) {
if (nodeIndex < nodes.length) {
interiorNodes[i].add(nodes[nodeIndex]);
charCount += nodes[nodeIndex].charCount();
lineCount += nodes[nodeIndex].lineCount();
}
else {
break;
}
nodeIndex++;
}
interiorNodes[i].totalChars = charCount;
interiorNodes[i].totalLines = lineCount;
}
if (interiorNodes.length === 1) {
return interiorNodes[0];
}
else {
return this.buildTreeFromBottom(interiorNodes);
}
}
static linesFromText(text: string) {
const lineStarts = ts.computeLineStarts(text);
if (lineStarts.length === 0) {
return { lines: <string[]>[], lineMap: lineStarts };
}
const lines = <string[]>new Array(lineStarts.length);
const lc = lineStarts.length - 1;
for (let lmi = 0; lmi < lc; lmi++) {
lines[lmi] = text.substring(lineStarts[lmi], lineStarts[lmi + 1]);
}
const endText = text.substring(lineStarts[lc]);
if (endText.length > 0) {
lines[lc] = endText;
}
else {
lines.length--;
}
return { lines: lines, lineMap: lineStarts };
}
}
export class LineNode implements LineCollection {
totalChars = 0;
totalLines = 0;
children: LineCollection[] = [];
isLeaf() {
return false;
}
updateCounts() {
this.totalChars = 0;
this.totalLines = 0;
for (let i = 0, len = this.children.length; i < len; i++) {
const child = this.children[i];
this.totalChars += child.charCount();
this.totalLines += child.lineCount();
}
}
execWalk(rangeStart: number, rangeLength: number, walkFns: ILineIndexWalker, childIndex: number, nodeType: CharRangeSection) {
if (walkFns.pre) {
walkFns.pre(rangeStart, rangeLength, this.children[childIndex], this, nodeType);
}
if (walkFns.goSubtree) {
this.children[childIndex].walk(rangeStart, rangeLength, walkFns);
if (walkFns.post) {
walkFns.post(rangeStart, rangeLength, this.children[childIndex], this, nodeType);
}
}
else {
walkFns.goSubtree = true;
}
return walkFns.done;
}
skipChild(relativeStart: number, relativeLength: number, childIndex: number, walkFns: ILineIndexWalker, nodeType: CharRangeSection) {
if (walkFns.pre && (!walkFns.done)) {
walkFns.pre(relativeStart, relativeLength, this.children[childIndex], this, nodeType);
walkFns.goSubtree = true;
}
}
walk(rangeStart: number, rangeLength: number, walkFns: ILineIndexWalker) {
// assume (rangeStart < this.totalChars) && (rangeLength <= this.totalChars)
let childIndex = 0;
let child = this.children[0];
let childCharCount = child.charCount();
// find sub-tree containing start
let adjustedStart = rangeStart;
while (adjustedStart >= childCharCount) {
this.skipChild(adjustedStart, rangeLength, childIndex, walkFns, CharRangeSection.PreStart);
adjustedStart -= childCharCount;
child = this.children[++childIndex];
childCharCount = child.charCount();
}
// Case I: both start and end of range in same subtree
if ((adjustedStart + rangeLength) <= childCharCount) {
if (this.execWalk(adjustedStart, rangeLength, walkFns, childIndex, CharRangeSection.Entire)) {
return;
}
}
else {
// Case II: start and end of range in different subtrees (possibly with subtrees in the middle)
if (this.execWalk(adjustedStart, childCharCount - adjustedStart, walkFns, childIndex, CharRangeSection.Start)) {
return;
}
let adjustedLength = rangeLength - (childCharCount - adjustedStart);
child = this.children[++childIndex];
childCharCount = child.charCount();
while (adjustedLength > childCharCount) {
if (this.execWalk(0, childCharCount, walkFns, childIndex, CharRangeSection.Mid)) {
return;
}
adjustedLength -= childCharCount;
child = this.children[++childIndex];
childCharCount = child.charCount();
}
if (adjustedLength > 0) {
if (this.execWalk(0, adjustedLength, walkFns, childIndex, CharRangeSection.End)) {
return;
}
}
}
// Process any subtrees after the one containing range end
if (walkFns.pre) {
const clen = this.children.length;
if (childIndex < (clen - 1)) {
for (let ej = childIndex + 1; ej < clen; ej++) {
this.skipChild(0, 0, ej, walkFns, CharRangeSection.PostEnd);
}
}
}
}
charOffsetToLineNumberAndPos(lineNumber: number, charOffset: number): ILineInfo {
const childInfo = this.childFromCharOffset(lineNumber, charOffset);
if (!childInfo.child) {
return {
line: lineNumber,
offset: charOffset,
};
}
else if (childInfo.childIndex < this.children.length) {
if (childInfo.child.isLeaf()) {
return {
line: childInfo.lineNumber,
offset: childInfo.charOffset,
text: (<LineLeaf>(childInfo.child)).text,
leaf: (<LineLeaf>(childInfo.child))
};
}
else {
const lineNode = <LineNode>(childInfo.child);
return lineNode.charOffsetToLineNumberAndPos(childInfo.lineNumber, childInfo.charOffset);
}
}
else {
const lineInfo = this.lineNumberToInfo(this.lineCount(), 0);
return { line: this.lineCount(), offset: lineInfo.leaf.charCount() };
}
}
lineNumberToInfo(lineNumber: number, charOffset: number): ILineInfo {
const childInfo = this.childFromLineNumber(lineNumber, charOffset);
if (!childInfo.child) {
return {
line: lineNumber,
offset: charOffset
};
}
else if (childInfo.child.isLeaf()) {
return {
line: lineNumber,
offset: childInfo.charOffset,
text: (<LineLeaf>(childInfo.child)).text,
leaf: (<LineLeaf>(childInfo.child))
};
}
else {
const lineNode = <LineNode>(childInfo.child);
return lineNode.lineNumberToInfo(childInfo.relativeLineNumber, childInfo.charOffset);
}
}
childFromLineNumber(lineNumber: number, charOffset: number) {
let child: LineCollection;
let relativeLineNumber = lineNumber;
let i: number;
let len: number;
for (i = 0, len = this.children.length; i < len; i++) {
child = this.children[i];
const childLineCount = child.lineCount();
if (childLineCount >= relativeLineNumber) {
break;
}
else {
relativeLineNumber -= childLineCount;
charOffset += child.charCount();
}
}
return {
child: child,
childIndex: i,
relativeLineNumber: relativeLineNumber,
charOffset: charOffset
};
}
childFromCharOffset(lineNumber: number, charOffset: number) {
let child: LineCollection;
let i: number;
let len: number;
for (i = 0, len = this.children.length; i < len; i++) {
child = this.children[i];
if (child.charCount() > charOffset) {
break;
}
else {
charOffset -= child.charCount();
lineNumber += child.lineCount();
}
}
return {
child: child,
childIndex: i,
charOffset: charOffset,
lineNumber: lineNumber
};
}
splitAfter(childIndex: number) {
let splitNode: LineNode;
const clen = this.children.length;
childIndex++;
const endLength = childIndex;
if (childIndex < clen) {
splitNode = new LineNode();
while (childIndex < clen) {
splitNode.add(this.children[childIndex++]);
}
splitNode.updateCounts();
}
this.children.length = endLength;
return splitNode;
}
remove(child: LineCollection) {
const childIndex = this.findChildIndex(child);
const clen = this.children.length;
if (childIndex < (clen - 1)) {
for (let i = childIndex; i < (clen - 1); i++) {
this.children[i] = this.children[i + 1];
}
}
this.children.length--;
}
findChildIndex(child: LineCollection) {
let childIndex = 0;
const clen = this.children.length;
while ((this.children[childIndex] !== child) && (childIndex < clen)) childIndex++;
return childIndex;
}
insertAt(child: LineCollection, nodes: LineCollection[]) {
let childIndex = this.findChildIndex(child);
const clen = this.children.length;
const nodeCount = nodes.length;
// if child is last and there is more room and only one node to place, place it
if ((clen < lineCollectionCapacity) && (childIndex === (clen - 1)) && (nodeCount === 1)) {
this.add(nodes[0]);
this.updateCounts();
return [];
}
else {
const shiftNode = this.splitAfter(childIndex);
let nodeIndex = 0;
childIndex++;
while ((childIndex < lineCollectionCapacity) && (nodeIndex < nodeCount)) {
this.children[childIndex++] = nodes[nodeIndex++];
}
let splitNodes: LineNode[] = [];
let splitNodeCount = 0;
if (nodeIndex < nodeCount) {
splitNodeCount = Math.ceil((nodeCount - nodeIndex) / lineCollectionCapacity);
splitNodes = <LineNode[]>new Array(splitNodeCount);
let splitNodeIndex = 0;
for (let i = 0; i < splitNodeCount; i++) {
splitNodes[i] = new LineNode();
}
let splitNode = <LineNode>splitNodes[0];
while (nodeIndex < nodeCount) {
splitNode.add(nodes[nodeIndex++]);
if (splitNode.children.length === lineCollectionCapacity) {
splitNodeIndex++;
splitNode = <LineNode>splitNodes[splitNodeIndex];
}
}
for (let i = splitNodes.length - 1; i >= 0; i--) {
if (splitNodes[i].children.length === 0) {
splitNodes.length--;
}
}
}
if (shiftNode) {
splitNodes[splitNodes.length] = shiftNode;
}
this.updateCounts();
for (let i = 0; i < splitNodeCount; i++) {
(<LineNode>splitNodes[i]).updateCounts();
}
return splitNodes;
}
}
// assume there is room for the item; return true if more room
add(collection: LineCollection) {
this.children[this.children.length] = collection;
return (this.children.length < lineCollectionCapacity);
}
charCount() {
return this.totalChars;
}
lineCount() {
return this.totalLines;
}
}
export class LineLeaf implements LineCollection {
udata: any;
constructor(public text: string) {
}
setUdata(data: any) {
this.udata = data;
}
getUdata() {
return this.udata;
}
isLeaf() {
return true;
}
walk(rangeStart: number, rangeLength: number, walkFns: ILineIndexWalker) {
walkFns.leaf(rangeStart, rangeLength, this);
}
charCount() {
return this.text.length;
}
lineCount() {
return 1;
}
}
}
| bowlofstew/kythe | third_party/typescript/src/server/editorServices.ts | TypeScript | apache-2.0 | 96,179 |
package com.dxbook.ui;
public class SearchResultActivity {
}
| treper/SlidingMenuUsage | DxStore/src/com/dxbook/ui/SearchResultActivity.java | Java | apache-2.0 | 63 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.managedblockchain.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* A summary of network configuration properties.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/NetworkSummary" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class NetworkSummary implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The unique identifier of the network.
* </p>
*/
private String id;
/**
* <p>
* The name of the network.
* </p>
*/
private String name;
/**
* <p>
* An optional description of the network.
* </p>
*/
private String description;
/**
* <p>
* The blockchain framework that the network uses.
* </p>
*/
private String framework;
/**
* <p>
* The version of the blockchain framework that the network uses.
* </p>
*/
private String frameworkVersion;
/**
* <p>
* The current status of the network.
* </p>
*/
private String status;
/**
* <p>
* The date and time that the network was created.
* </p>
*/
private java.util.Date creationDate;
/**
* <p>
* The Amazon Resource Name (ARN) of the network. For more information about ARNs and their format, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
* </p>
*/
private String arn;
/**
* <p>
* The unique identifier of the network.
* </p>
*
* @param id
* The unique identifier of the network.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The unique identifier of the network.
* </p>
*
* @return The unique identifier of the network.
*/
public String getId() {
return this.id;
}
/**
* <p>
* The unique identifier of the network.
* </p>
*
* @param id
* The unique identifier of the network.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public NetworkSummary withId(String id) {
setId(id);
return this;
}
/**
* <p>
* The name of the network.
* </p>
*
* @param name
* The name of the network.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the network.
* </p>
*
* @return The name of the network.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the network.
* </p>
*
* @param name
* The name of the network.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public NetworkSummary withName(String name) {
setName(name);
return this;
}
/**
* <p>
* An optional description of the network.
* </p>
*
* @param description
* An optional description of the network.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* An optional description of the network.
* </p>
*
* @return An optional description of the network.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* An optional description of the network.
* </p>
*
* @param description
* An optional description of the network.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public NetworkSummary withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The blockchain framework that the network uses.
* </p>
*
* @param framework
* The blockchain framework that the network uses.
* @see Framework
*/
public void setFramework(String framework) {
this.framework = framework;
}
/**
* <p>
* The blockchain framework that the network uses.
* </p>
*
* @return The blockchain framework that the network uses.
* @see Framework
*/
public String getFramework() {
return this.framework;
}
/**
* <p>
* The blockchain framework that the network uses.
* </p>
*
* @param framework
* The blockchain framework that the network uses.
* @return Returns a reference to this object so that method calls can be chained together.
* @see Framework
*/
public NetworkSummary withFramework(String framework) {
setFramework(framework);
return this;
}
/**
* <p>
* The blockchain framework that the network uses.
* </p>
*
* @param framework
* The blockchain framework that the network uses.
* @return Returns a reference to this object so that method calls can be chained together.
* @see Framework
*/
public NetworkSummary withFramework(Framework framework) {
this.framework = framework.toString();
return this;
}
/**
* <p>
* The version of the blockchain framework that the network uses.
* </p>
*
* @param frameworkVersion
* The version of the blockchain framework that the network uses.
*/
public void setFrameworkVersion(String frameworkVersion) {
this.frameworkVersion = frameworkVersion;
}
/**
* <p>
* The version of the blockchain framework that the network uses.
* </p>
*
* @return The version of the blockchain framework that the network uses.
*/
public String getFrameworkVersion() {
return this.frameworkVersion;
}
/**
* <p>
* The version of the blockchain framework that the network uses.
* </p>
*
* @param frameworkVersion
* The version of the blockchain framework that the network uses.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public NetworkSummary withFrameworkVersion(String frameworkVersion) {
setFrameworkVersion(frameworkVersion);
return this;
}
/**
* <p>
* The current status of the network.
* </p>
*
* @param status
* The current status of the network.
* @see NetworkStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* The current status of the network.
* </p>
*
* @return The current status of the network.
* @see NetworkStatus
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* The current status of the network.
* </p>
*
* @param status
* The current status of the network.
* @return Returns a reference to this object so that method calls can be chained together.
* @see NetworkStatus
*/
public NetworkSummary withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* The current status of the network.
* </p>
*
* @param status
* The current status of the network.
* @return Returns a reference to this object so that method calls can be chained together.
* @see NetworkStatus
*/
public NetworkSummary withStatus(NetworkStatus status) {
this.status = status.toString();
return this;
}
/**
* <p>
* The date and time that the network was created.
* </p>
*
* @param creationDate
* The date and time that the network was created.
*/
public void setCreationDate(java.util.Date creationDate) {
this.creationDate = creationDate;
}
/**
* <p>
* The date and time that the network was created.
* </p>
*
* @return The date and time that the network was created.
*/
public java.util.Date getCreationDate() {
return this.creationDate;
}
/**
* <p>
* The date and time that the network was created.
* </p>
*
* @param creationDate
* The date and time that the network was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public NetworkSummary withCreationDate(java.util.Date creationDate) {
setCreationDate(creationDate);
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the network. For more information about ARNs and their format, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
* </p>
*
* @param arn
* The Amazon Resource Name (ARN) of the network. For more information about ARNs and their format, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
*/
public void setArn(String arn) {
this.arn = arn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the network. For more information about ARNs and their format, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
* </p>
*
* @return The Amazon Resource Name (ARN) of the network. For more information about ARNs and their format, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
*/
public String getArn() {
return this.arn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the network. For more information about ARNs and their format, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
* </p>
*
* @param arn
* The Amazon Resource Name (ARN) of the network. For more information about ARNs and their format, see <a
* href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">Amazon Resource Names
* (ARNs)</a> in the <i>AWS General Reference</i>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public NetworkSummary withArn(String arn) {
setArn(arn);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getFramework() != null)
sb.append("Framework: ").append(getFramework()).append(",");
if (getFrameworkVersion() != null)
sb.append("FrameworkVersion: ").append(getFrameworkVersion()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getCreationDate() != null)
sb.append("CreationDate: ").append(getCreationDate()).append(",");
if (getArn() != null)
sb.append("Arn: ").append(getArn());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof NetworkSummary == false)
return false;
NetworkSummary other = (NetworkSummary) obj;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getFramework() == null ^ this.getFramework() == null)
return false;
if (other.getFramework() != null && other.getFramework().equals(this.getFramework()) == false)
return false;
if (other.getFrameworkVersion() == null ^ this.getFrameworkVersion() == null)
return false;
if (other.getFrameworkVersion() != null && other.getFrameworkVersion().equals(this.getFrameworkVersion()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getCreationDate() == null ^ this.getCreationDate() == null)
return false;
if (other.getCreationDate() != null && other.getCreationDate().equals(this.getCreationDate()) == false)
return false;
if (other.getArn() == null ^ this.getArn() == null)
return false;
if (other.getArn() != null && other.getArn().equals(this.getArn()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getFramework() == null) ? 0 : getFramework().hashCode());
hashCode = prime * hashCode + ((getFrameworkVersion() == null) ? 0 : getFrameworkVersion().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getCreationDate() == null) ? 0 : getCreationDate().hashCode());
hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode());
return hashCode;
}
@Override
public NetworkSummary clone() {
try {
return (NetworkSummary) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.managedblockchain.model.transform.NetworkSummaryMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| aws/aws-sdk-java | aws-java-sdk-managedblockchain/src/main/java/com/amazonaws/services/managedblockchain/model/NetworkSummary.java | Java | apache-2.0 | 16,696 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.servicecatalog.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/servicecatalog-2015-12-10/DescribeTagOption" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeTagOptionResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* Information about the TagOption.
* </p>
*/
private TagOptionDetail tagOptionDetail;
/**
* <p>
* Information about the TagOption.
* </p>
*
* @param tagOptionDetail
* Information about the TagOption.
*/
public void setTagOptionDetail(TagOptionDetail tagOptionDetail) {
this.tagOptionDetail = tagOptionDetail;
}
/**
* <p>
* Information about the TagOption.
* </p>
*
* @return Information about the TagOption.
*/
public TagOptionDetail getTagOptionDetail() {
return this.tagOptionDetail;
}
/**
* <p>
* Information about the TagOption.
* </p>
*
* @param tagOptionDetail
* Information about the TagOption.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeTagOptionResult withTagOptionDetail(TagOptionDetail tagOptionDetail) {
setTagOptionDetail(tagOptionDetail);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTagOptionDetail() != null)
sb.append("TagOptionDetail: ").append(getTagOptionDetail());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeTagOptionResult == false)
return false;
DescribeTagOptionResult other = (DescribeTagOptionResult) obj;
if (other.getTagOptionDetail() == null ^ this.getTagOptionDetail() == null)
return false;
if (other.getTagOptionDetail() != null && other.getTagOptionDetail().equals(this.getTagOptionDetail()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTagOptionDetail() == null) ? 0 : getTagOptionDetail().hashCode());
return hashCode;
}
@Override
public DescribeTagOptionResult clone() {
try {
return (DescribeTagOptionResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| aws/aws-sdk-java | aws-java-sdk-servicecatalog/src/main/java/com/amazonaws/services/servicecatalog/model/DescribeTagOptionResult.java | Java | apache-2.0 | 3,915 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.scavi.brainsqueeze.codefight.challenge;
import java.util.GregorianCalendar;
public class CreditCycle {
int creditCycle(int m, int d, int y, int c) {
if (c < d) return d - c;
if (--m == 0) {
m = 12;
--y;
}
return new GregorianCalendar(y, m-1, 1).getActualMaximum(5) - c + d;
}
}
| Scavi/BrainSqueeze | src/main/java/com/scavi/brainsqueeze/codefight/challenge/CreditCycle.java | Java | apache-2.0 | 923 |
/*
* Copyright 2015-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package example.springdata.jpa.multipleds.order;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import example.springdata.jpa.multipleds.customer.CustomerRepository;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional;
/**
* Integration test for {@link CustomerRepository}.
*
* @author Oliver Gierke
*/
@RunWith(SpringRunner.class)
@SpringBootTest
@Transactional(transactionManager = "orderTransactionManager")
public class OrderRepositoryTests {
@Autowired OrderRepository orders;
@Autowired CustomerRepository customers;
@Test
public void persistsAndFindsCustomer() {
customers.findAll().forEach(customer -> {
assertThat(orders.findByCustomer(customer.getId()), hasSize(1));
});
}
}
| thomasdarimont/spring-data-examples | jpa/multiple-datasources/src/test/java/example/springdata/jpa/multipleds/order/OrderRepositoryTests.java | Java | apache-2.0 | 1,608 |
package com.commercetools.sunrise.common.template.cms.filebased;
import com.commercetools.sunrise.cms.CmsPage;
import com.commercetools.sunrise.cms.CmsService;
import com.commercetools.sunrise.common.template.i18n.I18nResolver;
import javax.inject.Inject;
import javax.inject.Named;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import java.util.concurrent.CompletionStage;
import static java.util.concurrent.CompletableFuture.completedFuture;
/**
* Service that provides content data from i18n files that can be found in a local file.
* Internally it uses a I18nResolver to resolve the requested messages.
*
* The mapping of the {@link CmsService} parameters to {@link I18nResolver} parameters goes as follows:
*
* - {@code bundle} = {@code entryType} (e.g. banner)
* - {@code messageKey} = {@code entryKey.fieldName} (e.g. homeTopLeft.subtitle.text)
*/
public final class FileBasedCmsService implements CmsService {
@Inject
@Named("cms")
private I18nResolver i18nResolver;
@Override
public CompletionStage<Optional<CmsPage>> page(final String pageKey, final List<Locale> locales) {
final CmsPage cmsPage = new FileBasedCmsPage(i18nResolver, pageKey, locales);
return completedFuture(Optional.of(cmsPage));
}
} | rfuertesp/pruebas2 | common/app/com/commercetools/sunrise/common/template/cms/filebased/FileBasedCmsService.java | Java | apache-2.0 | 1,294 |
import torch
from torch import nn, Tensor
from typing import Iterable, Dict
from sentence_transformers import SentenceTransformer
from transformers import AutoConfig, AutoTokenizer, AutoModelForCausalLM, PreTrainedModel
import logging
logger = logging.getLogger(__name__)
class DenoisingAutoEncoderLoss(nn.Module):
"""
This loss expects as input a batch consisting of damaged sentences and the corresponding original ones.
The data generation process has already been implemented in readers/DenoisingAutoEncoderReader.py
During training, the decoder reconstructs the original sentences from the encoded sentence embeddings.
Here the argument 'decoder_name_or_path' indicates the pretrained model (supported by Huggingface) to be used as the decoder.
Since decoding process is included, here the decoder should have a class called XXXLMHead (in the context of Huggingface's Transformers).
Flag 'tie_encoder_decoder' indicates whether to tie the trainable parameters of encoder and decoder,
which is shown beneficial to model performance while limiting the amount of required memory.
Only when the encoder and decoder are from the same architecture, can the flag 'tie_encoder_decoder' works.
For more information, please refer to the TSDAE paper.
"""
def __init__(
self,
model: SentenceTransformer,
decoder_name_or_path: str = None,
tie_encoder_decoder: bool = True
):
"""
:param model: SentenceTransformer model
:param decoder_name_or_path: Model name or path for initializing a decoder (compatible with Huggingface's Transformers)
:param tie_encoder_decoder: whether to tie the trainable parameters of encoder and decoder
"""
super(DenoisingAutoEncoderLoss, self).__init__()
self.encoder = model # This will be the final model used during the inference time.
self.tokenizer_encoder = model.tokenizer
encoder_name_or_path = model[0].auto_model.config._name_or_path
if decoder_name_or_path is None:
assert tie_encoder_decoder, "Must indicate the decoder_name_or_path argument when tie_encoder_decoder=False!"
if tie_encoder_decoder:
if decoder_name_or_path:
logger.warning('When tie_encoder_decoder=True, the decoder_name_or_path will be invalid.')
decoder_name_or_path = encoder_name_or_path
self.tokenizer_decoder = AutoTokenizer.from_pretrained(decoder_name_or_path)
self.need_retokenization = not (type(self.tokenizer_encoder) == type(self.tokenizer_decoder))
decoder_config = AutoConfig.from_pretrained(decoder_name_or_path)
decoder_config.is_decoder = True
decoder_config.add_cross_attention = True
kwargs_decoder = {'config': decoder_config}
try:
self.decoder = AutoModelForCausalLM.from_pretrained(decoder_name_or_path, **kwargs_decoder)
except ValueError as e:
logger.error(f'Model name or path "{decoder_name_or_path}" does not support being as a decoder. Please make sure the decoder model has an "XXXLMHead" class.')
raise e
assert model[0].auto_model.config.hidden_size == decoder_config.hidden_size, 'Hidden sizes do not match!'
if self.tokenizer_decoder.pad_token is None:
# Needed by GPT-2, etc.
self.tokenizer_decoder.pad_token = self.tokenizer_decoder.eos_token
self.decoder.config.pad_token_id = self.decoder.config.eos_token_id
if len(AutoTokenizer.from_pretrained(encoder_name_or_path)) != len(self.tokenizer_encoder):
logger.warning('WARNING: The vocabulary of the encoder has been changed. One might need to change the decoder vocabulary, too.')
if tie_encoder_decoder:
assert not self.need_retokenization, "The tokenizers should be the same when tie_encoder_decoder=True."
if len(self.tokenizer_encoder) != len(self.tokenizer_decoder): # The vocabulary has been changed.
self.tokenizer_decoder = self.tokenizer_encoder
self.decoder.resize_token_embeddings(len(self.tokenizer_decoder))
logger.warning('Since the encoder vocabulary has been changed and --tie_encoder_decoder=True, now the new vocabulary has also been used for the decoder.')
decoder_base_model_prefix = self.decoder.base_model_prefix
PreTrainedModel._tie_encoder_decoder_weights(
model[0].auto_model,
self.decoder._modules[decoder_base_model_prefix],
self.decoder.base_model_prefix
)
def retokenize(self, sentence_features):
input_ids = sentence_features['input_ids']
device = input_ids.device
sentences_decoded = self.tokenizer_decoder.batch_decode(
input_ids,
skip_special_tokens=True,
clean_up_tokenization_spaces=True
)
retokenized = self.tokenizer_decoder(
sentences_decoded,
padding=True,
truncation='longest_first',
return_tensors="pt",
max_length=None
).to(device)
return retokenized
def forward(self, sentence_features: Iterable[Dict[str, Tensor]], labels: Tensor):
source_features, target_features = tuple(sentence_features)
if self.need_retokenization:
# since the sentence_features here are all tokenized by encoder's tokenizer,
# retokenization by the decoder's one is needed if different tokenizers used
target_features = self.retokenize(target_features)
reps = self.encoder(source_features)['sentence_embedding'] # (bsz, hdim)
# Prepare input and output
target_length = target_features['input_ids'].shape[1]
decoder_input_ids = target_features['input_ids'].clone()[:, :target_length - 1]
label_ids = target_features['input_ids'][:, 1:]
# Decode
decoder_outputs = self.decoder(
input_ids=decoder_input_ids,
inputs_embeds=None,
attention_mask=None,
encoder_hidden_states=reps[:, None], # (bsz, hdim) -> (bsz, 1, hdim)
encoder_attention_mask=source_features['attention_mask'][:, 0:1],
labels=None,
return_dict=None,
use_cache=False
)
# Calculate loss
lm_logits = decoder_outputs[0]
ce_loss_fct = nn.CrossEntropyLoss(ignore_index=self.tokenizer_decoder.pad_token_id)
loss = ce_loss_fct(lm_logits.view(-1, lm_logits.shape[-1]), label_ids.reshape(-1))
return loss | UKPLab/sentence-transformers | sentence_transformers/losses/DenoisingAutoEncoderLoss.py | Python | apache-2.0 | 6,700 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import org.apache.hadoop.classification.InterfaceAudience;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCKREPORT_INITIAL_DELAY_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCKREPORT_INITIAL_DELAY_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCKREPORT_SPLIT_THRESHOLD_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCKREPORT_SPLIT_THRESHOLD_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CACHEREPORT_INTERVAL_MSEC_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CACHEREPORT_INTERVAL_MSEC_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_LIFELINE_INTERVAL_SECONDS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_NON_LOCAL_LAZY_PERSIST;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_NON_LOCAL_LAZY_PERSIST_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SYNCONCLOSE_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SYNCONCLOSE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_TRANSFERTO_ALLOWED_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_TRANSFERTO_ALLOWED_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_XCEIVER_STOP_TIMEOUT_MILLIS_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_XCEIVER_STOP_TIMEOUT_MILLIS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_MIN_SUPPORTED_NAMENODE_VERSION_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_MIN_SUPPORTED_NAMENODE_VERSION_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATA_ENCRYPTION_ALGORITHM_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_RESTART_REPLICA_EXPIRY_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_RESTART_REPLICA_EXPIRY_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_BP_READY_TIMEOUT_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_BP_READY_TIMEOUT_DEFAULT;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.datatransfer.TrustedChannelResolver;
import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil;
import org.apache.hadoop.hdfs.server.common.Util;
import org.apache.hadoop.security.SaslPropertiesResolver;
import java.util.concurrent.TimeUnit;
/**
* Simple class encapsulating all of the configuration that the DataNode
* loads at startup time.
*/
@InterfaceAudience.Private
public class DNConf {
final int socketTimeout;
final int socketWriteTimeout;
final int socketKeepaliveTimeout;
private final int transferSocketSendBufferSize;
private final int transferSocketRecvBufferSize;
private final boolean tcpNoDelay;
final boolean transferToAllowed;
final boolean dropCacheBehindWrites;
final boolean syncBehindWrites;
final boolean syncBehindWritesInBackground;
final boolean dropCacheBehindReads;
final boolean syncOnClose;
final boolean encryptDataTransfer;
final boolean connectToDnViaHostname;
final long readaheadLength;
final long heartBeatInterval;
private final long lifelineIntervalMs;
final long blockReportInterval;
final long blockReportSplitThreshold;
final boolean peerStatsEnabled;
final boolean diskStatsEnabled;
final long outliersReportIntervalMs;
final long ibrInterval;
final long initialBlockReportDelayMs;
final long cacheReportInterval;
final long datanodeSlowIoWarningThresholdMs;
final String minimumNameNodeVersion;
final String encryptionAlgorithm;
final SaslPropertiesResolver saslPropsResolver;
final TrustedChannelResolver trustedChannelResolver;
private final boolean ignoreSecurePortsForTesting;
final long xceiverStopTimeout;
final long restartReplicaExpiry;
final long maxLockedMemory;
private final long bpReadyTimeout;
// Allow LAZY_PERSIST writes from non-local clients?
private final boolean allowNonLocalLazyPersist;
private final int volFailuresTolerated;
private final int volsConfigured;
private final int maxDataLength;
private Configurable dn;
public DNConf(final Configurable dn) {
this.dn = dn;
socketTimeout = getConf().getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY,
HdfsConstants.READ_TIMEOUT);
socketWriteTimeout = getConf().getInt(DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY,
HdfsConstants.WRITE_TIMEOUT);
socketKeepaliveTimeout = getConf().getInt(
DFSConfigKeys.DFS_DATANODE_SOCKET_REUSE_KEEPALIVE_KEY,
DFSConfigKeys.DFS_DATANODE_SOCKET_REUSE_KEEPALIVE_DEFAULT);
this.transferSocketSendBufferSize = getConf().getInt(
DFSConfigKeys.DFS_DATANODE_TRANSFER_SOCKET_SEND_BUFFER_SIZE_KEY,
DFSConfigKeys.DFS_DATANODE_TRANSFER_SOCKET_SEND_BUFFER_SIZE_DEFAULT);
this.transferSocketRecvBufferSize = getConf().getInt(
DFSConfigKeys.DFS_DATANODE_TRANSFER_SOCKET_RECV_BUFFER_SIZE_KEY,
DFSConfigKeys.DFS_DATANODE_TRANSFER_SOCKET_RECV_BUFFER_SIZE_DEFAULT);
this.tcpNoDelay = getConf().getBoolean(
DFSConfigKeys.DFS_DATA_TRANSFER_SERVER_TCPNODELAY,
DFSConfigKeys.DFS_DATA_TRANSFER_SERVER_TCPNODELAY_DEFAULT);
/* Based on results on different platforms, we might need set the default
* to false on some of them. */
transferToAllowed = getConf().getBoolean(
DFS_DATANODE_TRANSFERTO_ALLOWED_KEY,
DFS_DATANODE_TRANSFERTO_ALLOWED_DEFAULT);
readaheadLength = getConf().getLong(
HdfsClientConfigKeys.DFS_DATANODE_READAHEAD_BYTES_KEY,
HdfsClientConfigKeys.DFS_DATANODE_READAHEAD_BYTES_DEFAULT);
maxDataLength = getConf().getInt(DFSConfigKeys.IPC_MAXIMUM_DATA_LENGTH,
DFSConfigKeys.IPC_MAXIMUM_DATA_LENGTH_DEFAULT);
dropCacheBehindWrites = getConf().getBoolean(
DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_WRITES_KEY,
DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_WRITES_DEFAULT);
syncBehindWrites = getConf().getBoolean(
DFSConfigKeys.DFS_DATANODE_SYNC_BEHIND_WRITES_KEY,
DFSConfigKeys.DFS_DATANODE_SYNC_BEHIND_WRITES_DEFAULT);
syncBehindWritesInBackground = getConf().getBoolean(
DFSConfigKeys.DFS_DATANODE_SYNC_BEHIND_WRITES_IN_BACKGROUND_KEY,
DFSConfigKeys.DFS_DATANODE_SYNC_BEHIND_WRITES_IN_BACKGROUND_DEFAULT);
dropCacheBehindReads = getConf().getBoolean(
DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_READS_KEY,
DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_READS_DEFAULT);
connectToDnViaHostname = getConf().getBoolean(
DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME,
DFSConfigKeys.DFS_DATANODE_USE_DN_HOSTNAME_DEFAULT);
this.blockReportInterval = getConf().getLong(
DFS_BLOCKREPORT_INTERVAL_MSEC_KEY,
DFS_BLOCKREPORT_INTERVAL_MSEC_DEFAULT);
this.peerStatsEnabled = getConf().getBoolean(
DFSConfigKeys.DFS_DATANODE_PEER_STATS_ENABLED_KEY,
DFSConfigKeys.DFS_DATANODE_PEER_STATS_ENABLED_DEFAULT);
this.diskStatsEnabled = Util.isDiskStatsEnabled(getConf().getDouble(
DFSConfigKeys.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY,
DFSConfigKeys.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_DEFAULT));
this.outliersReportIntervalMs = getConf().getTimeDuration(
DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_KEY,
DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_DEFAULT,
TimeUnit.MILLISECONDS);
this.ibrInterval = getConf().getLong(
DFSConfigKeys.DFS_BLOCKREPORT_INCREMENTAL_INTERVAL_MSEC_KEY,
DFSConfigKeys.DFS_BLOCKREPORT_INCREMENTAL_INTERVAL_MSEC_DEFAULT);
this.blockReportSplitThreshold = getConf().getLong(
DFS_BLOCKREPORT_SPLIT_THRESHOLD_KEY,
DFS_BLOCKREPORT_SPLIT_THRESHOLD_DEFAULT);
this.cacheReportInterval = getConf().getLong(
DFS_CACHEREPORT_INTERVAL_MSEC_KEY,
DFS_CACHEREPORT_INTERVAL_MSEC_DEFAULT);
this.datanodeSlowIoWarningThresholdMs = getConf().getLong(
DFSConfigKeys.DFS_DATANODE_SLOW_IO_WARNING_THRESHOLD_KEY,
DFSConfigKeys.DFS_DATANODE_SLOW_IO_WARNING_THRESHOLD_DEFAULT);
long initBRDelay = getConf().getTimeDuration(
DFS_BLOCKREPORT_INITIAL_DELAY_KEY,
DFS_BLOCKREPORT_INITIAL_DELAY_DEFAULT, TimeUnit.SECONDS) * 1000L;
if (initBRDelay >= blockReportInterval) {
initBRDelay = 0;
DataNode.LOG.info("dfs.blockreport.initialDelay is "
+ "greater than or equal to" + "dfs.blockreport.intervalMsec."
+ " Setting initial delay to 0 msec:");
}
initialBlockReportDelayMs = initBRDelay;
heartBeatInterval = getConf().getTimeDuration(DFS_HEARTBEAT_INTERVAL_KEY,
DFS_HEARTBEAT_INTERVAL_DEFAULT, TimeUnit.SECONDS) * 1000L;
long confLifelineIntervalMs =
getConf().getLong(DFS_DATANODE_LIFELINE_INTERVAL_SECONDS_KEY,
3 * getConf().getTimeDuration(DFS_HEARTBEAT_INTERVAL_KEY,
DFS_HEARTBEAT_INTERVAL_DEFAULT, TimeUnit.SECONDS)) * 1000L;
if (confLifelineIntervalMs <= heartBeatInterval) {
confLifelineIntervalMs = 3 * heartBeatInterval;
DataNode.LOG.warn(
String.format("%s must be set to a value greater than %s. " +
"Resetting value to 3 * %s, which is %d milliseconds.",
DFS_DATANODE_LIFELINE_INTERVAL_SECONDS_KEY,
DFS_HEARTBEAT_INTERVAL_KEY, DFS_HEARTBEAT_INTERVAL_KEY,
confLifelineIntervalMs));
}
lifelineIntervalMs = confLifelineIntervalMs;
// do we need to sync block file contents to disk when blockfile is closed?
this.syncOnClose = getConf().getBoolean(DFS_DATANODE_SYNCONCLOSE_KEY,
DFS_DATANODE_SYNCONCLOSE_DEFAULT);
this.minimumNameNodeVersion = getConf().get(
DFS_DATANODE_MIN_SUPPORTED_NAMENODE_VERSION_KEY,
DFS_DATANODE_MIN_SUPPORTED_NAMENODE_VERSION_DEFAULT);
this.encryptDataTransfer = getConf().getBoolean(
DFS_ENCRYPT_DATA_TRANSFER_KEY,
DFS_ENCRYPT_DATA_TRANSFER_DEFAULT);
this.encryptionAlgorithm = getConf().get(DFS_DATA_ENCRYPTION_ALGORITHM_KEY);
this.trustedChannelResolver = TrustedChannelResolver.getInstance(getConf());
this.saslPropsResolver = DataTransferSaslUtil.getSaslPropertiesResolver(
getConf());
this.ignoreSecurePortsForTesting = getConf().getBoolean(
IGNORE_SECURE_PORTS_FOR_TESTING_KEY,
IGNORE_SECURE_PORTS_FOR_TESTING_DEFAULT);
this.xceiverStopTimeout = getConf().getLong(
DFS_DATANODE_XCEIVER_STOP_TIMEOUT_MILLIS_KEY,
DFS_DATANODE_XCEIVER_STOP_TIMEOUT_MILLIS_DEFAULT);
this.maxLockedMemory = getConf().getLong(
DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
DFS_DATANODE_MAX_LOCKED_MEMORY_DEFAULT);
this.restartReplicaExpiry = getConf().getLong(
DFS_DATANODE_RESTART_REPLICA_EXPIRY_KEY,
DFS_DATANODE_RESTART_REPLICA_EXPIRY_DEFAULT) * 1000L;
this.allowNonLocalLazyPersist = getConf().getBoolean(
DFS_DATANODE_NON_LOCAL_LAZY_PERSIST,
DFS_DATANODE_NON_LOCAL_LAZY_PERSIST_DEFAULT);
this.bpReadyTimeout = getConf().getTimeDuration(
DFS_DATANODE_BP_READY_TIMEOUT_KEY,
DFS_DATANODE_BP_READY_TIMEOUT_DEFAULT, TimeUnit.SECONDS);
this.volFailuresTolerated =
getConf().getInt(
DFSConfigKeys.DFS_DATANODE_FAILED_VOLUMES_TOLERATED_KEY,
DFSConfigKeys.DFS_DATANODE_FAILED_VOLUMES_TOLERATED_DEFAULT);
String[] dataDirs =
getConf().getTrimmedStrings(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY);
this.volsConfigured = (dataDirs == null) ? 0 : dataDirs.length;
}
// We get minimumNameNodeVersion via a method so it can be mocked out in tests.
String getMinimumNameNodeVersion() {
return this.minimumNameNodeVersion;
}
/**
* Returns the configuration.
*
* @return Configuration the configuration
*/
public Configuration getConf() {
return this.dn.getConf();
}
/**
* Returns true if encryption enabled for DataTransferProtocol.
*
* @return boolean true if encryption enabled for DataTransferProtocol
*/
public boolean getEncryptDataTransfer() {
return encryptDataTransfer;
}
/**
* Returns encryption algorithm configured for DataTransferProtocol, or null
* if not configured.
*
* @return encryption algorithm configured for DataTransferProtocol
*/
public String getEncryptionAlgorithm() {
return encryptionAlgorithm;
}
public long getXceiverStopTimeout() {
return xceiverStopTimeout;
}
public long getMaxLockedMemory() {
return maxLockedMemory;
}
/**
* Returns true if connect to datanode via hostname
*
* @return boolean true if connect to datanode via hostname
*/
public boolean getConnectToDnViaHostname() {
return connectToDnViaHostname;
}
/**
* Returns socket timeout
*
* @return int socket timeout
*/
public int getSocketTimeout() {
return socketTimeout;
}
/**
* Returns socket write timeout
*
* @return int socket write timeout
*/
public int getSocketWriteTimeout() {
return socketWriteTimeout;
}
/**
* Returns the SaslPropertiesResolver configured for use with
* DataTransferProtocol, or null if not configured.
*
* @return SaslPropertiesResolver configured for use with DataTransferProtocol
*/
public SaslPropertiesResolver getSaslPropsResolver() {
return saslPropsResolver;
}
/**
* Returns the TrustedChannelResolver configured for use with
* DataTransferProtocol, or null if not configured.
*
* @return TrustedChannelResolver configured for use with DataTransferProtocol
*/
public TrustedChannelResolver getTrustedChannelResolver() {
return trustedChannelResolver;
}
/**
* Returns true if configuration is set to skip checking for proper
* port configuration in a secured cluster. This is only intended for use in
* dev testing.
*
* @return true if configured to skip checking secured port configuration
*/
public boolean getIgnoreSecurePortsForTesting() {
return ignoreSecurePortsForTesting;
}
public boolean getAllowNonLocalLazyPersist() {
return allowNonLocalLazyPersist;
}
public int getTransferSocketRecvBufferSize() {
return transferSocketRecvBufferSize;
}
public int getTransferSocketSendBufferSize() {
return transferSocketSendBufferSize;
}
public boolean getDataTransferServerTcpNoDelay() {
return tcpNoDelay;
}
public long getBpReadyTimeout() {
return bpReadyTimeout;
}
/**
* Returns the interval in milliseconds between sending lifeline messages.
*
* @return interval in milliseconds between sending lifeline messages
*/
public long getLifelineIntervalMs() {
return lifelineIntervalMs;
}
public int getVolFailuresTolerated() {
return volFailuresTolerated;
}
public int getVolsConfigured() {
return volsConfigured;
}
public long getSlowIoWarningThresholdMs() {
return datanodeSlowIoWarningThresholdMs;
}
int getMaxDataLength() {
return maxDataLength;
}
}
| WIgor/hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DNConf.java | Java | apache-2.0 | 17,321 |
import sys
from typing import Optional
from bowler import Query, LN, Capture, Filename, TOKEN, SYMBOL
from fissix.pytree import Node, Leaf
from lib2to3.fixer_util import Name, KeywordArg, Dot, Comma, Newline, ArgList
def filter_print_string(node, capture, filename) -> bool:
function_name = capture.get("function_name")
from pprint import pprint
pprint(node)
pprint(capture)
return True
def filter_has_no_on_delete(node: LN, capture: Capture, filename: Filename) -> bool:
arguments = capture.get("function_arguments")[0].children
for arg in arguments:
if arg.type == SYMBOL.argument and arg.children[0].type == TOKEN.NAME:
arg_name = arg.children[0].value
if arg_name == "on_delete":
return False # this call already has an on_delete argument.
return True
def add_on_delete_cascade(
node: LN, capture: Capture, filename: Filename
) -> Optional[LN]:
arguments = capture.get("function_arguments")[0]
new_on_delete_node = KeywordArg(Name(" on_delete"), Name("models.CASCADE"))
if isinstance(arguments, Leaf): # Node is a leaf and so we need to replace it with a list of things we want instead.
arguments.replace([arguments.clone(),Comma(),new_on_delete_node])
else:
arguments.append_child(Comma())
arguments.append_child(new_on_delete_node)
return node
(
Query(sys.argv[1])
.select_method("ForeignKey")
.is_call()
.filter(filter_has_no_on_delete)
.modify(add_on_delete_cascade)
.idiff()
),
(
Query(sys.argv[1])
.select_method("OneToOneField")
.is_call()
.filter(filter_has_no_on_delete)
.modify(add_on_delete_cascade)
.idiff()
)
| edx/repo-tools | edx_repo_tools/codemods/django2/foreignkey_on_delete_mod.py | Python | apache-2.0 | 1,718 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.graphalgo.impl.centrality;
import java.util.ArrayList;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.neo4j.graphalgo.CostEvaluator;
import org.neo4j.graphalgo.impl.util.MatrixUtil;
import org.neo4j.graphalgo.impl.util.MatrixUtil.DoubleMatrix;
import org.neo4j.graphalgo.impl.util.MatrixUtil.DoubleVector;
import org.neo4j.graphdb.Direction;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
/**
* Computing eigenvector centrality with the "Arnoldi iteration". Convergence is
* dependent of the eigenvalues of the input adjacency matrix (the network). If
* the two largest eigenvalues are u1 and u2, a small factor u2/u1 will give a
* faster convergence (i.e. faster computation). NOTE: Currently only works on
* Doubles.
* @complexity The {@link CostEvaluator} is called once for every relationship
* in each iteration. Assuming this is done in constant time, the
* total time complexity is O(j(n + m + i)) when j internal restarts
* are required and i iterations are done in the internal
* eigenvector solving of the H matrix. Typically j = the number of
* iterations / k, where normally k = 3.
* @author Patrik Larsson
* @author Anton Persson
*/
public class EigenvectorCentralityArnoldi extends EigenvectorCentralityBase
{
/**
* See {@link EigenvectorCentralityBase#EigenvectorCentralityBase(Direction, CostEvaluator, Set, Set, double)}
*/
public EigenvectorCentralityArnoldi( Direction relationDirection,
CostEvaluator<Double> costEvaluator, Set<Node> nodeSet,
Set<Relationship> relationshipSet, double precision )
{
super( relationDirection, costEvaluator, nodeSet, relationshipSet, precision );
}
/**
* This runs the Arnoldi decomposition in a specified number of steps.
*/
@Override
protected int runInternalIteration()
{
int iterations = 3;
// Create a list of the nodes, in order to quickly translate an index
// into a node.
ArrayList<Node> nodes = new ArrayList<>( nodeSet.size() );
for ( Node node : nodeSet )
{
nodes.add( node );
}
DoubleMatrix hMatrix = new DoubleMatrix();
DoubleMatrix qMatrix = new DoubleMatrix();
for ( int i = 0; i < nodes.size(); ++i )
{
qMatrix.set( 0, i, values.get( nodes.get( i ) ) );
}
int localIterations = 1;
// The main arnoldi iteration loop
while ( true )
{
incrementTotalIterations();
Map<Node, Double> newValues = processRelationships();
// Orthogonalize
for ( int j = 0; j < localIterations; ++j )
{
DoubleVector qj = qMatrix.getRow( j );
// vector product
double product = 0;
for ( int i = 0; i < nodes.size(); ++i )
{
Double d1 = newValues.get( nodes.get( i ) );
Double d2 = qj.get( i );
if ( d1 != null && d2 != null )
{
product += d1 * d2;
}
}
hMatrix.set( j, localIterations - 1, product );
if ( product != 0.0 )
{
// vector subtraction
for ( int i = 0; i < nodes.size(); ++i )
{
Node node = nodes.get( i );
Double value = newValues.get( node );
if ( value == null )
{
value = 0.0;
}
Double qValue = qj.get( i );
if ( qValue != null )
{
newValues.put( node, value - product * qValue );
}
}
}
}
double normalizeFactor = normalize( newValues );
values = newValues;
DoubleVector qVector = new DoubleVector();
for ( int i = 0; i < nodes.size(); ++i )
{
Node key = nodes.get( i );
Double value = newValues.get( key );
if ( value != null )
{
qVector.set( i, value );
}
}
qMatrix.setRow( localIterations, qVector );
if ( normalizeFactor == 0.0 || localIterations >= nodeSet.size()
|| localIterations >= iterations )
{
break;
}
hMatrix.set( localIterations, localIterations - 1, normalizeFactor );
++localIterations;
}
// employ the power method to find eigenvector to h
Random random = new Random( System.currentTimeMillis() );
DoubleVector vector = new DoubleVector();
for ( int i = 0; i < nodeSet.size(); ++i )
{
vector.set( i, random.nextDouble() );
}
MatrixUtil.normalize( vector );
boolean powerDone = false;
int its = 0;
double powerPrecision = 0.1;
while ( !powerDone )
{
DoubleVector newVector = MatrixUtil.multiply( hMatrix, vector );
MatrixUtil.normalize( newVector );
powerDone = true;
for ( Integer index : vector.getIndices() )
{
if ( newVector.get( index ) == null )
{
continue;
}
double factor = Math.abs( newVector.get( index )
/ vector.get( index ) );
if ( factor - powerPrecision > 1.0
|| factor + powerPrecision < 1.0 )
{
powerDone = false;
break;
}
}
vector = newVector;
++its;
if ( its > 100 )
{
break;
}
}
// multiply q and vector to get a ritz vector
DoubleVector ritzVector = new DoubleVector();
for ( int r = 0; r < nodeSet.size(); ++r )
{
for ( int c = 0; c < localIterations; ++c )
{
ritzVector.incrementValue( r, vector.get( c )
* qMatrix.get( c, r ) );
}
}
for ( int i = 0; i < nodeSet.size(); ++i )
{
values.put( nodes.get( i ), ritzVector.get( i ) );
}
normalize( values );
return localIterations;
}
}
| HuangLS/neo4j | community/graph-algo/src/main/java/org/neo4j/graphalgo/impl/centrality/EigenvectorCentralityArnoldi.java | Java | apache-2.0 | 7,492 |
/*
* Copyright © 2016, 2017 IBM Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.cloudant.sync.internal.documentstore.callables;
import com.cloudant.sync.documentstore.Attachment;
import com.cloudant.sync.documentstore.AttachmentException;
import com.cloudant.sync.internal.documentstore.AttachmentStreamFactory;
import com.cloudant.sync.internal.documentstore.DatabaseImpl;
import com.cloudant.sync.documentstore.DocumentStoreException;
import com.cloudant.sync.internal.documentstore.InternalDocumentRevision;
import com.cloudant.sync.internal.documentstore.DocumentRevisionTree;
import com.cloudant.sync.internal.documentstore.helpers.GetFullRevisionFromCurrentCursor;
import com.cloudant.sync.internal.sqlite.Cursor;
import com.cloudant.sync.internal.sqlite.SQLCallable;
import com.cloudant.sync.internal.sqlite.SQLDatabase;
import com.cloudant.sync.internal.util.DatabaseUtils;
import java.sql.SQLException;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Get all Revisions for a given Document ID, in the form of a {@code DocumentRevisionTree}
*
* @see DocumentRevisionTree
*/
public class GetAllRevisionsOfDocumentCallable implements SQLCallable<DocumentRevisionTree> {
private String docId;
private String attachmentsDir;
private AttachmentStreamFactory attachmentStreamFactory;
private static final Logger logger = Logger.getLogger(DatabaseImpl.class.getCanonicalName());
/**
* @param docId The Document ID to get the Document for
* @param attachmentsDir Location of attachments
* @param attachmentStreamFactory Factory to manage access to attachment streams
*/
public GetAllRevisionsOfDocumentCallable(String docId, String attachmentsDir,
AttachmentStreamFactory attachmentStreamFactory) {
this.docId = docId;
this.attachmentsDir = attachmentsDir;
this.attachmentStreamFactory = attachmentStreamFactory;
}
public DocumentRevisionTree call(SQLDatabase db) throws DocumentStoreException, AttachmentException {
String sql = "SELECT " + CallableSQLConstants.FULL_DOCUMENT_COLS + " FROM revs, docs " +
"WHERE docs.docid=? AND revs.doc_id = docs.doc_id ORDER BY sequence ASC";
String[] args = {docId};
Cursor cursor = null;
try {
DocumentRevisionTree tree = new DocumentRevisionTree();
cursor = db.rawQuery(sql, args);
while (cursor.moveToNext()) {
long sequence = cursor.getLong(3);
Map<String, ? extends Attachment> atts = new AttachmentsForRevisionCallable(
this.attachmentsDir, this.attachmentStreamFactory, sequence).call(db);
InternalDocumentRevision rev = GetFullRevisionFromCurrentCursor.get(cursor, atts);
logger.finer("Rev: " + rev);
tree.add(rev);
}
return tree;
} catch (SQLException e) {
logger.log(Level.SEVERE, "Error getting all revisions of document", e);
throw new DocumentStoreException("DocumentRevisionTree not found with id: " + docId, e);
} finally {
DatabaseUtils.closeCursorQuietly(cursor);
}
}
}
| cloudant/sync-android | cloudant-sync-datastore-core/src/main/java/com/cloudant/sync/internal/documentstore/callables/GetAllRevisionsOfDocumentCallable.java | Java | apache-2.0 | 3,855 |
package com.cognizant.cognizantits.qcconnection.qcupdation;
import com4j.DISPID;
import com4j.DefaultValue;
import com4j.IID;
import com4j.NativeType;
import com4j.Optional;
import com4j.ReturnValue;
import com4j.VTID;
@IID("{2AF970F7-6CCC-4DFB-AA78-08F689481F94}")
public abstract interface IBug
extends IBaseFieldExMail
{
@DISPID(15)
@VTID(24)
public abstract String status();
@DISPID(15)
@VTID(25)
public abstract void status(String paramString);
@DISPID(16)
@VTID(26)
public abstract String project();
@DISPID(16)
@VTID(27)
public abstract void project(String paramString);
@DISPID(17)
@VTID(28)
public abstract String summary();
@DISPID(17)
@VTID(29)
public abstract void summary(String paramString);
@DISPID(18)
@VTID(30)
public abstract String priority();
@DISPID(18)
@VTID(31)
public abstract void priority(String paramString);
@DISPID(19)
@VTID(32)
public abstract String detectedBy();
@DISPID(19)
@VTID(33)
public abstract void detectedBy(String paramString);
@DISPID(20)
@VTID(34)
public abstract String assignedTo();
@DISPID(20)
@VTID(35)
public abstract void assignedTo(String paramString);
@DISPID(21)
@VTID(36)
public abstract IList findSimilarBugs(@Optional @DefaultValue("10") int paramInt);
@DISPID(22)
@VTID(37)
public abstract boolean hasChange();
@DISPID(23)
@VTID(38)
public abstract IList changeLinks();
@VTID(38)
@ReturnValue(type=NativeType.VARIANT, defaultPropertyThrough={IList.class})
public abstract Object changeLinks(int paramInt);
}
/* Location: D:\Prabu\jars\QC.jar
* Qualified Name: qcupdation.IBug
* JD-Core Version: 0.7.0.1
*/ | CognizantQAHub/Cognizant-Intelligent-Test-Scripter | QcConnection/src/main/java/com/cognizant/cognizantits/qcconnection/qcupdation/IBug.java | Java | apache-2.0 | 1,729 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.*;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.Version;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.junit.Test;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
/**
*
*/
public class LuceneTest extends ElasticsearchLuceneTestCase {
/*
* simple test that ensures that we bump the version on Upgrade
*/
@Test
public void testVersion() {
// note this is just a silly sanity check, we test it in lucene, and we point to it this way
assertEquals(Lucene.VERSION, Version.LATEST);
}
public void testPruneUnreferencedFiles() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
dir.setEnableVirusScanner(false);
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
iwc.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new TextField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
writer.commit();
doc = new Document();
doc.add(new TextField("id", "2", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
doc = new Document();
doc.add(new TextField("id", "3", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
writer.commit();
SegmentInfos segmentCommitInfos = Lucene.readSegmentInfos(dir);
doc = new Document();
doc.add(new TextField("id", "4", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
writer.deleteDocuments(new Term("id", "2"));
writer.commit();
DirectoryReader open = DirectoryReader.open(writer, true);
assertEquals(3, open.numDocs());
assertEquals(1, open.numDeletedDocs());
assertEquals(4, open.maxDoc());
open.close();
writer.close();
SegmentInfos si = Lucene.pruneUnreferencedFiles(segmentCommitInfos.getSegmentsFileName(), dir);
assertEquals(si.getSegmentsFileName(), segmentCommitInfos.getSegmentsFileName());
open = DirectoryReader.open(dir);
assertEquals(3, open.numDocs());
assertEquals(0, open.numDeletedDocs());
assertEquals(3, open.maxDoc());
IndexSearcher s = new IndexSearcher(open);
assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits, 1);
assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits, 1);
assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits, 1);
assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits, 0);
for (String file : dir.listAll()) {
assertFalse("unexpected file: " + file, file.equals("segments_3") || file.startsWith("_2"));
}
open.close();
dir.close();
}
public void testFiles() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
dir.setEnableVirusScanner(false);
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
iwc.setMaxBufferedDocs(2);
iwc.setUseCompoundFile(true);
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new TextField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
writer.commit();
Set<String> files = new HashSet<>();
for (String f : Lucene.files(Lucene.readSegmentInfos(dir))) {
files.add(f);
}
assertTrue(files.toString(), files.contains("segments_1"));
assertTrue(files.toString(), files.contains("_0.cfs"));
assertTrue(files.toString(), files.contains("_0.cfe"));
assertTrue(files.toString(), files.contains("_0.si"));
doc = new Document();
doc.add(new TextField("id", "2", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
doc = new Document();
doc.add(new TextField("id", "3", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
writer.commit();
files.clear();
for (String f : Lucene.files(Lucene.readSegmentInfos(dir))) {
files.add(f);
}
assertFalse(files.toString(), files.contains("segments_1"));
assertTrue(files.toString(), files.contains("segments_2"));
assertTrue(files.toString(), files.contains("_0.cfs"));
assertTrue(files.toString(), files.contains("_0.cfe"));
assertTrue(files.toString(), files.contains("_0.si"));
assertTrue(files.toString(), files.contains("_1.cfs"));
assertTrue(files.toString(), files.contains("_1.cfe"));
assertTrue(files.toString(), files.contains("_1.si"));
writer.close();
dir.close();
}
}
| Asimov4/elasticsearch | src/test/java/org/elasticsearch/common/lucene/LuceneTest.java | Java | apache-2.0 | 6,332 |
/*
* Copyright 2015-2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.agent.monitor.storage;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import org.hawkular.agent.monitor.api.MetricTagPayloadBuilder;
import org.hawkular.agent.monitor.util.Util;
import org.hawkular.metrics.client.common.MetricType;
/**
* Allows one to build up payload requests to send to metric storage to add tags.
* After all tags are added to this builder, you can get the payloads in
* JSON format via {@link #toPayload()}.
*/
public class MetricTagPayloadBuilderImpl implements MetricTagPayloadBuilder {
// key is metric ID, value is map of name/value pairs (the actual tags)
private Map<String, Map<String, String>> allGauges = new HashMap<>();
private Map<String, Map<String, String>> allCounters = new HashMap<>();
private Map<String, Map<String, String>> allAvails = new HashMap<>();
// a running count of the number of tags that have been added
private int count = 0;
// if not null, this is the tenant ID to associate all the metrics with (null means used the agent tenant ID)
private String tenantId = null;
@Override
public void addTag(String key, String name, String value, MetricType metricType) {
Map<String, Map<String, String>> map;
switch (metricType) {
case GAUGE: {
map = allGauges;
break;
}
case COUNTER: {
map = allCounters;
break;
}
case AVAILABILITY: {
map = allAvails;
break;
}
default: {
throw new IllegalArgumentException("Unsupported metric type: " + metricType);
}
}
Map<String, String> allTagsForMetric = map.get(key);
if (allTagsForMetric == null) {
// we haven't seen this metric ID before, create a new map of tags
allTagsForMetric = new TreeMap<String, String>(); // use tree map to sort the tags
map.put(key, allTagsForMetric);
}
allTagsForMetric.put(name, value);
count++;
}
@Override
public Map<String, String> toPayload() {
Map<String, Map<String, String>> withMapObject = new HashMap<>();
for (Map.Entry<String, Map<String, String>> gaugeEntry : allGauges.entrySet()) {
withMapObject.put("gauges/" + Util.urlEncode(gaugeEntry.getKey()), gaugeEntry.getValue());
}
for (Map.Entry<String, Map<String, String>> counterEntry : allCounters.entrySet()) {
withMapObject.put("counters/" + Util.urlEncode(counterEntry.getKey()), counterEntry.getValue());
}
for (Map.Entry<String, Map<String, String>> availEntry : allAvails.entrySet()) {
withMapObject.put("availability/" + Util.urlEncode(availEntry.getKey()), availEntry.getValue());
}
// now convert all the maps of tags to json
Map<String, String> withJson = new HashMap<>(withMapObject.size());
for (Map.Entry<String, Map<String, String>> entry : withMapObject.entrySet()) {
withJson.put(entry.getKey(), Util.toJson(entry.getValue()));
}
return withJson;
}
@Override
public int getNumberTags() {
return count;
}
@Override
public void setTenantId(String tenantId) {
this.tenantId = tenantId;
}
@Override
public String getTenantId() {
return this.tenantId;
}
} | Jiri-Kremser/hawkular-agent | hawkular-agent-core/src/main/java/org/hawkular/agent/monitor/storage/MetricTagPayloadBuilderImpl.java | Java | apache-2.0 | 4,162 |
#!/usr/bin/env python
''' Script to ingest GCP billing data into a DB '''
import logging
import os
import re
import sys
from datetime import datetime
from dateutil.relativedelta import relativedelta
from dateutil.parser import parse as parse_date
from httplib2 import Http
import transaction
from apiclient.discovery import build
from oauth2client.service_account import ServiceAccountCredentials
from sqlalchemy import engine_from_config
from sqlalchemy.sql import functions
from pyramid.paster import get_appsettings, setup_logging
from pyramid.scripts.common import parse_vars
from ..models import (DBSession,
GcpLineItem)
from ..util.fileloader import load_json, save_json
COMMIT_THRESHOLD = 10000
LOG = None
def usage(argv):
''' cli usage '''
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [rundate=YYYY-MM-DD]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def run(settings, options):
''' do things '''
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = settings['creds.dir'] + \
"/" + \
settings['creds.gcp.json']
scopes = ['https://www.googleapis.com/auth/cloud-platform']
credentials = ServiceAccountCredentials.from_json_keyfile_name(settings['creds.dir'] + \
"/" + \
settings['creds.gcp.json'], scopes)
http_auth = credentials.authorize(Http())
# The apiclient.discovery.build() function returns an instance of an API service
# object that can be used to make API calls. The object is constructed with
# methods specific to the books API. The arguments provided are:
# name of the API ('cloudbilling')
# version of the API you are using ('v1')
# API key
service = build('cloudbilling', 'v1', http=http_auth,
cache_discovery=False)
request = service.billingAccounts().projects().list(name='billingAccounts/0085BB-6B96B9-89FD9F')
response = request.execute()
LOG.debug(response)
def main(argv):
''' main script entry point '''
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
global LOG
LOG = logging.getLogger(__name__)
settings = get_appsettings(config_uri, options=options)
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
run(settings, options)
if '__main__' in __name__:
try:
main(sys.argv)
except KeyboardInterrupt:
print "Ctrl+C detected. Exiting..."
| blentz/cloud-costs | budget/scripts/gcp_test.py | Python | apache-2.0 | 2,730 |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
?>
<h3 class="page-title">
Authority management <small> Role detail </small>
</h3>
<div class="page-bar">
<ul class="page-breadcrumb">
<li>
<i class="fa fa-home"></i>
<a href="index.html">Home</a>
<i class="fa fa-angle-right"></i>
</li>
<li>
<a href="<?php echo Yii::$app->urlManager->createUrl('role/index') ?>">Authority management</a>
<i class="fa fa-angle-right"></i>
</li>
<li>
<a href="<?php echo Yii::$app->urlManager->createUrl(['role/update', 'name' => $model->name]) ?>">Role detail</a>
</li>
</ul>
<div class="page-toolbar">
<div class="btn-group pull-right">
<button type="button" class="btn btn-fit-height grey-salt dropdown-toggle" data-toggle="dropdown" data-hover="dropdown" data-delay="1000" data-close-others="true">
Actions <i class="fa fa-angle-down"></i>
</button>
<ul class="dropdown-menu pull-right" role="menu">
<li>
<a href="<?php echo Yii::$app->urlManager->createUrl('role/create') ?>">Role add</a>
</li>
</ul>
</div>
</div>
</div>
<div class="row">
<div class="col-md-12 ">
<!-- BEGIN SAMPLE FORM PORTLET-->
<div class="portlet box green">
<div class="portlet-title">
<div class="caption">
<i class="fa fa-gift"></i>
Role detail
</div>
</div>
<div class="portlet-body form">
<!-- BEGIN FORM-->
<?php
$form = ActiveForm::begin([
'id' => 'form_sample_1',
'options' => [
'class' => 'form-horizontal',
'novalidate' => 'novalidate',
],
]);
?>
<div class="form-body">
<div class="alert alert-danger display-hide">
<button class="close" data-close="alert"></button>
You have some form errors. Please check below.
</div>
<div class="alert alert-success display-hide">
<button class="close" data-close="alert"></button>
Your form validation is successful!
</div>
<div class="form-group">
<label class="control-label col-md-3">Role name <span class="required" aria-required="true">
* </span>
</label>
<div class="col-md-4">
<?= $form->field($model, 'name')->textInput([ 'class' => 'form-control', 'placeholder' => 'Please enter the role name', 'disabled' => 'disabled'])->label(false) ?>
</div>
</div>
<div class="form-group">
<label class="control-label col-md-3">Role type <span class="required" aria-required="true">
* </span>
</label>
<div class="col-md-4">
<?= $form->field($model, 'type')->textInput([ 'value' => isset($model->type) ? $model->type : NULL, 'disabled' => 'disabled', 'class' => 'form-control'])->label(false) ?>
</div>
</div>
<div class="form-group">
<label class="control-label col-md-3">Rule name <span class="required" aria-required="true">
</span>
</label>
<div class="col-md-4">
<?= $form->field($model, 'rule_name')->textInput([ 'class' => 'form-control', 'placeholder' => '', 'disabled' => 'disabled'])->label(false) ?>
</div>
</div>
<div class="form-group">
<label class="control-label col-md-3">Description <span class="required" aria-required="true">
</span>
</label>
<div class="col-md-4">
<?= $form->field($model, 'description')->textarea(['rows' => 6, 'cols' => 5, 'disabled' => 'disabled'])->label(false) ?>
</div>
</div>
<div class="form-group">
<label class="control-label col-md-3">Data <span class="required" aria-required="true">
</span>
</label>
<div class="col-md-4">
<?= $form->field($model, 'data')->textarea(['rows' => 6, 'cols' => 5, 'disabled' => 'disabled'])->label(false) ?>
</div>
</div>
<?php if (!empty($childArray)) { ?>
<div class="form-group">
<label class="control-label col-md-3">Permission list <span class="required" aria-required="true">
</span>
</label>
<div class="col-md-4">
<?= $form->field($model, 'child')->checkboxList($childArray, $model->child)->label(false) ?>
</div>
</div>
<?php } ?>
</div>
<div class="form-actions">
<div class="row">
<div class="col-md-offset-3 col-md-9">
<a href="<?php echo Yii::$app->urlManager->createUrl(['role/update', "name" => $model->name]) ?>" class="btn default">Modify </a>
<a href="<?php echo Yii::$app->urlManager->createUrl('role/index') ?>" class="btn default">Return list </a>
</div>
</div>
</div>
<!-- </form> -->
<?php ActiveForm::end(); ?>
<!-- END FORM-->
</div>
</div>
<!-- END SAMPLE FORM PORTLET-->
</div>
</div>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/plugins/jquery.min.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/plugins/jquery-migrate.min.js" type="text/javascript"></script>
<!-- IMPORTANT! Load jquery-ui-1.10.3.custom.min.js before bootstrap.min.js to fix bootstrap tooltip conflict with jquery ui tooltip -->
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/plugins/jquery-ui/jquery-ui-1.10.3.custom.min.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/plugins/bootstrap-hover-dropdown/bootstrap-hover-dropdown.min.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/plugins/jquery.blockui.min.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/plugins/jquery.cokie.min.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/plugins/uniform/jquery.uniform.min.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/plugins/bootstrap-switch/js/bootstrap-switch.min.js" type="text/javascript"></script>
<!-- END CORE PLUGINS -->
<!-- BEGIN PAGE LEVEL PLUGINS -->
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN PAGE LEVEL SCRIPTS -->
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/global/scripts/metronic.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/admin/layout/scripts/layout.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/admin/layout/scripts/quick-sidebar.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/admin/layout/scripts/demo.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/admin/pages/scripts/index.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/admin/pages/scripts/tasks.js" type="text/javascript"></script>
<script src="<?php echo Yii::$app->request->baseUrl ?>/metronic/admin/pages/scripts/components-pickers.js"></script>
<!-- END PAGE LEVEL SCRIPTS -->
<script>
jQuery(document).ready(function () {
// initiate layout and plugins
Metronic.init(); // init metronic core components
Layout.init(); // init current layout
QuickSidebar.init(); // init quick sidebar
Demo.init(); // init demo features
Tasks.initDashboardWidget();
ComponentsPickers.init();
});
</script> | diandianxiyu/Yii2-CMS-Template | views/role/view.php | PHP | apache-2.0 | 9,248 |
package org.gradle.test.performance.mediummonolithicjavaproject.p127;
import org.junit.Test;
import static org.junit.Assert.*;
public class Test2551 {
Production2551 objectUnderTest = new Production2551();
@Test
public void testProperty0() {
String value = "value";
objectUnderTest.setProperty0(value);
assertEquals(value, objectUnderTest.getProperty0());
}
@Test
public void testProperty1() {
String value = "value";
objectUnderTest.setProperty1(value);
assertEquals(value, objectUnderTest.getProperty1());
}
@Test
public void testProperty2() {
String value = "value";
objectUnderTest.setProperty2(value);
assertEquals(value, objectUnderTest.getProperty2());
}
@Test
public void testProperty3() {
String value = "value";
objectUnderTest.setProperty3(value);
assertEquals(value, objectUnderTest.getProperty3());
}
@Test
public void testProperty4() {
String value = "value";
objectUnderTest.setProperty4(value);
assertEquals(value, objectUnderTest.getProperty4());
}
@Test
public void testProperty5() {
String value = "value";
objectUnderTest.setProperty5(value);
assertEquals(value, objectUnderTest.getProperty5());
}
@Test
public void testProperty6() {
String value = "value";
objectUnderTest.setProperty6(value);
assertEquals(value, objectUnderTest.getProperty6());
}
@Test
public void testProperty7() {
String value = "value";
objectUnderTest.setProperty7(value);
assertEquals(value, objectUnderTest.getProperty7());
}
@Test
public void testProperty8() {
String value = "value";
objectUnderTest.setProperty8(value);
assertEquals(value, objectUnderTest.getProperty8());
}
@Test
public void testProperty9() {
String value = "value";
objectUnderTest.setProperty9(value);
assertEquals(value, objectUnderTest.getProperty9());
}
} | oehme/analysing-gradle-performance | my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p127/Test2551.java | Java | apache-2.0 | 2,111 |
package org.gradle.test.performance.mediummonolithicjavaproject.p211;
public class Production4230 {
private String property0;
public String getProperty0() {
return property0;
}
public void setProperty0(String value) {
property0 = value;
}
private String property1;
public String getProperty1() {
return property1;
}
public void setProperty1(String value) {
property1 = value;
}
private String property2;
public String getProperty2() {
return property2;
}
public void setProperty2(String value) {
property2 = value;
}
private String property3;
public String getProperty3() {
return property3;
}
public void setProperty3(String value) {
property3 = value;
}
private String property4;
public String getProperty4() {
return property4;
}
public void setProperty4(String value) {
property4 = value;
}
private String property5;
public String getProperty5() {
return property5;
}
public void setProperty5(String value) {
property5 = value;
}
private String property6;
public String getProperty6() {
return property6;
}
public void setProperty6(String value) {
property6 = value;
}
private String property7;
public String getProperty7() {
return property7;
}
public void setProperty7(String value) {
property7 = value;
}
private String property8;
public String getProperty8() {
return property8;
}
public void setProperty8(String value) {
property8 = value;
}
private String property9;
public String getProperty9() {
return property9;
}
public void setProperty9(String value) {
property9 = value;
}
} | oehme/analysing-gradle-performance | my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p211/Production4230.java | Java | apache-2.0 | 1,891 |
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Binary read_entries scans the entries from a specified GraphStore and emits
// them to stdout as a delimited stream.
package main
import (
"context"
"flag"
"fmt"
"log"
"os"
"sync"
"kythe.io/kythe/go/platform/delimited"
"kythe.io/kythe/go/platform/vfs"
"kythe.io/kythe/go/services/graphstore"
"kythe.io/kythe/go/storage/gsutil"
"kythe.io/kythe/go/util/flagutil"
"kythe.io/kythe/go/util/kytheuri"
spb "kythe.io/kythe/proto/storage_go_proto"
_ "kythe.io/kythe/go/services/graphstore/proxy"
_ "kythe.io/kythe/go/storage/leveldb"
)
var (
gs graphstore.Service
count = flag.Bool("count", false, "Only print the number of entries scanned")
shardsToFiles = flag.String("sharded_file", "", "If given, scan the entire GraphStore, storing each shard in a separate file instead of stdout (requires --shards)")
shardIndex = flag.Int64("shard_index", 0, "Index of a single shard to emit (requires --shards)")
shards = flag.Int64("shards", 0, "Number of shards to split the GraphStore")
edgeKind = flag.String("edge_kind", "", "Edge kind by which to filter a read/scan")
targetTicket = flag.String("target", "", "Ticket of target by which to filter a scan")
factPrefix = flag.String("fact_prefix", "", "Fact prefix by which to filter a scan")
)
func init() {
gsutil.Flag(&gs, "graphstore", "GraphStore to read")
flag.Usage = flagutil.SimpleUsage("Scans/reads the entries from a GraphStore, emitting a delimited entry stream to stdout",
"--graphstore spec [--count] [--shards N [--shard_index I] --sharded_file path] [--edge_kind] ([--fact_prefix str] [--target ticket] | [ticket...])")
}
func main() {
flag.Parse()
if gs == nil {
flagutil.UsageError("missing --graphstore")
} else if *shardsToFiles != "" && *shards <= 0 {
flagutil.UsageError("--sharded_file and --shards must be given together")
} else if *shards > 0 && len(flag.Args()) > 0 {
flagutil.UsageError("--shards and giving tickets for reads are mutually exclusive")
}
ctx := context.Background()
wr := delimited.NewWriter(os.Stdout)
var total int64
if *shards <= 0 {
entryFunc := func(entry *spb.Entry) error {
if *count {
total++
return nil
}
return wr.PutProto(entry)
}
if len(flag.Args()) > 0 {
if *targetTicket != "" || *factPrefix != "" {
log.Fatal("--target and --fact_prefix are unsupported when given tickets")
}
if err := readEntries(ctx, gs, entryFunc, *edgeKind, flag.Args()); err != nil {
log.Fatal(err)
}
} else {
if err := scanEntries(ctx, gs, entryFunc, *edgeKind, *targetTicket, *factPrefix); err != nil {
log.Fatal(err)
}
}
if *count {
fmt.Println(total)
}
return
}
sgs, ok := gs.(graphstore.Sharded)
if !ok {
log.Fatalf("Sharding unsupported for given GraphStore type: %T", gs)
} else if *shardIndex >= *shards {
log.Fatalf("Invalid shard index for %d shards: %d", *shards, *shardIndex)
}
if *count {
cnt, err := sgs.Count(ctx, &spb.CountRequest{Index: *shardIndex, Shards: *shards})
if err != nil {
log.Fatalf("ERROR: %v", err)
}
fmt.Println(cnt)
return
} else if *shardsToFiles != "" {
var wg sync.WaitGroup
wg.Add(int(*shards))
for i := int64(0); i < *shards; i++ {
go func(i int64) {
defer wg.Done()
path := fmt.Sprintf("%s-%.5d-of-%.5d", *shardsToFiles, i, *shards)
f, err := vfs.Create(ctx, path)
if err != nil {
log.Fatalf("Failed to create file %q: %v", path, err)
}
defer f.Close()
wr := delimited.NewWriter(f)
if err := sgs.Shard(ctx, &spb.ShardRequest{
Index: i,
Shards: *shards,
}, func(entry *spb.Entry) error {
return wr.PutProto(entry)
}); err != nil {
log.Fatalf("GraphStore shard scan error: %v", err)
}
}(i)
}
wg.Wait()
return
}
if err := sgs.Shard(ctx, &spb.ShardRequest{
Index: *shardIndex,
Shards: *shards,
}, func(entry *spb.Entry) error {
return wr.PutProto(entry)
}); err != nil {
log.Fatalf("GraphStore shard scan error: %v", err)
}
}
func readEntries(ctx context.Context, gs graphstore.Service, entryFunc graphstore.EntryFunc, edgeKind string, tickets []string) error {
for _, ticket := range tickets {
src, err := kytheuri.ToVName(ticket)
if err != nil {
return fmt.Errorf("error parsing ticket %q: %v", ticket, err)
}
if err := gs.Read(ctx, &spb.ReadRequest{
Source: src,
EdgeKind: edgeKind,
}, entryFunc); err != nil {
return fmt.Errorf("GraphStore Read error for ticket %q: %v", ticket, err)
}
}
return nil
}
func scanEntries(ctx context.Context, gs graphstore.Service, entryFunc graphstore.EntryFunc, edgeKind, targetTicket, factPrefix string) error {
var target *spb.VName
var err error
if targetTicket != "" {
target, err = kytheuri.ToVName(targetTicket)
if err != nil {
return fmt.Errorf("error parsing --target %q: %v", targetTicket, err)
}
}
if err := gs.Scan(ctx, &spb.ScanRequest{
EdgeKind: edgeKind,
FactPrefix: factPrefix,
Target: target,
}, entryFunc); err != nil {
return fmt.Errorf("GraphStore Scan error: %v", err)
}
return nil
}
| benjyw/kythe | kythe/go/storage/tools/read_entries/read_entries.go | GO | apache-2.0 | 5,685 |
#
# Author:: Seth Chisamore (<schisamo@opscode.com>)
# Cookbook Name:: chef_handlers
# Recipe:: default
#
# Copyright 2011, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Chef::Log.info("Chef Handlers will be at: #{node['chef_handler']['handler_path']}")
remote_directory node['chef_handler']['handler_path'] do
source 'handlers'
recursive true
action :nothing
if node.os == 'linux'
owner 'root'
group 'root'
mode "0755"
end
end.run_action(:create)
| hh/chef_handler | recipes/default.rb | Ruby | apache-2.0 | 989 |
package com.alipay.api.response;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.marketing.campaign.activity.offline.create response.
*
* @author auto create
* @since 1.0, 2017-04-07 18:22:19
*/
public class AlipayMarketingCampaignActivityOfflineCreateResponse extends AlipayResponse {
private static final long serialVersionUID = 3571851859414371374L;
/**
* 创建成功的活动id
*/
@ApiField("camp_id")
private String campId;
/**
* 创建成功的券模版id
*/
@ApiField("prize_id")
private String prizeId;
public void setCampId(String campId) {
this.campId = campId;
}
public String getCampId( ) {
return this.campId;
}
public void setPrizeId(String prizeId) {
this.prizeId = prizeId;
}
public String getPrizeId( ) {
return this.prizeId;
}
}
| wendal/alipay-sdk | src/main/java/com/alipay/api/response/AlipayMarketingCampaignActivityOfflineCreateResponse.java | Java | apache-2.0 | 860 |
class Config(object):
SERVER_URL = "BACKEND_SERVER_URL"
CAMERA_NAME = "CAM_NAME"
API_KEY = "API_KEY"
class Backend(object):
URL_PREFIX = "http://"
API_PREFIX = "/api/v1/"
AUTH_URL = "%susers/auth" % API_PREFIX
REGISTER_CAM_URL = "%scam" % API_PREFIX
UPLOAD_URL = "%scam/upload" % API_PREFIX
CHECK_STATE = "%scam/state" % API_PREFIX
| SillentTroll/rascam_client | wsgi/common/constants.py | Python | apache-2.0 | 370 |
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.carlomicieli.footballdb.starter.domain.games;
/**
* @author Carlo Micieli
*/
@SuppressWarnings("serial")
public class PassingStatsFormatException extends IllegalArgumentException {
/**
* Constructs a <em>PassingStatsFormatException</em> with no detail message.
*/
public PassingStatsFormatException() {
this("Invalid format for passing stats. Correct format is [Comp-Att-Yd-TD-INT]");
}
/**
* Constructs a <em>PassingStatsFormatException</em> with the specified detail message.
* @param s the detail message.
*/
public PassingStatsFormatException(String s) {
super(s);
}
}
| CarloMicieli/footballdb-starter | src/main/java/io/github/carlomicieli/footballdb/starter/domain/games/PassingStatsFormatException.java | Java | apache-2.0 | 1,290 |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: BceForgetPassword.proto
package com.xinqihd.sns.gameserver.proto;
public final class XinqiBceForgetPassword {
private XinqiBceForgetPassword() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface BceForgetPasswordOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string roleName = 1;
boolean hasRoleName();
String getRoleName();
}
public static final class BceForgetPassword extends
com.google.protobuf.GeneratedMessage
implements BceForgetPasswordOrBuilder {
// Use BceForgetPassword.newBuilder() to construct.
private BceForgetPassword(Builder builder) {
super(builder);
}
private BceForgetPassword(boolean noInit) {}
private static final BceForgetPassword defaultInstance;
public static BceForgetPassword getDefaultInstance() {
return defaultInstance;
}
public BceForgetPassword getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.internal_static_com_xinqihd_sns_gameserver_proto_BceForgetPassword_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.internal_static_com_xinqihd_sns_gameserver_proto_BceForgetPassword_fieldAccessorTable;
}
private int bitField0_;
// required string roleName = 1;
public static final int ROLENAME_FIELD_NUMBER = 1;
private java.lang.Object roleName_;
public boolean hasRoleName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public String getRoleName() {
java.lang.Object ref = roleName_;
if (ref instanceof String) {
return (String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
if (com.google.protobuf.Internal.isValidUtf8(bs)) {
roleName_ = s;
}
return s;
}
}
private com.google.protobuf.ByteString getRoleNameBytes() {
java.lang.Object ref = roleName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((String) ref);
roleName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
roleName_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRoleName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getRoleNameBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getRoleNameBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPasswordOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.internal_static_com_xinqihd_sns_gameserver_proto_BceForgetPassword_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.internal_static_com_xinqihd_sns_gameserver_proto_BceForgetPassword_fieldAccessorTable;
}
// Construct using com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
roleName_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword.getDescriptor();
}
public com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword getDefaultInstanceForType() {
return com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword.getDefaultInstance();
}
public com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword build() {
com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
private com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword buildPartial() {
com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword result = new com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.roleName_ = roleName_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword) {
return mergeFrom((com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword other) {
if (other == com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword.getDefaultInstance()) return this;
if (other.hasRoleName()) {
setRoleName(other.getRoleName());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRoleName()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
roleName_ = input.readBytes();
break;
}
}
}
}
private int bitField0_;
// required string roleName = 1;
private java.lang.Object roleName_ = "";
public boolean hasRoleName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public String getRoleName() {
java.lang.Object ref = roleName_;
if (!(ref instanceof String)) {
String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
roleName_ = s;
return s;
} else {
return (String) ref;
}
}
public Builder setRoleName(String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
roleName_ = value;
onChanged();
return this;
}
public Builder clearRoleName() {
bitField0_ = (bitField0_ & ~0x00000001);
roleName_ = getDefaultInstance().getRoleName();
onChanged();
return this;
}
void setRoleName(com.google.protobuf.ByteString value) {
bitField0_ |= 0x00000001;
roleName_ = value;
onChanged();
}
// @@protoc_insertion_point(builder_scope:com.xinqihd.sns.gameserver.proto.BceForgetPassword)
}
static {
defaultInstance = new BceForgetPassword(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:com.xinqihd.sns.gameserver.proto.BceForgetPassword)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_com_xinqihd_sns_gameserver_proto_BceForgetPassword_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_com_xinqihd_sns_gameserver_proto_BceForgetPassword_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\027BceForgetPassword.proto\022 com.xinqihd.s" +
"ns.gameserver.proto\"%\n\021BceForgetPassword" +
"\022\020\n\010roleName\030\001 \002(\tB\030B\026XinqiBceForgetPass" +
"word"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_com_xinqihd_sns_gameserver_proto_BceForgetPassword_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_com_xinqihd_sns_gameserver_proto_BceForgetPassword_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_com_xinqihd_sns_gameserver_proto_BceForgetPassword_descriptor,
new java.lang.String[] { "RoleName", },
com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword.class,
com.xinqihd.sns.gameserver.proto.XinqiBceForgetPassword.BceForgetPassword.Builder.class);
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}
| wangqi/gameserver | server/src/gensrc/java/com/xinqihd/sns/gameserver/proto/XinqiBceForgetPassword.java | Java | apache-2.0 | 16,953 |
package com.lys.ping;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.pingplusplus.Pingpp;
import com.pingplusplus.exception.APIConnectionException;
import com.pingplusplus.exception.APIException;
import com.pingplusplus.exception.AuthenticationException;
import com.pingplusplus.exception.ChannelException;
import com.pingplusplus.exception.InvalidRequestException;
import com.pingplusplus.exception.PingppException;
import com.pingplusplus.model.App;
import com.pingplusplus.model.Charge;
import com.pingplusplus.model.ChargeCollection;
import com.sun.istack.internal.logging.Logger;
/**
* Charge 对象相关示例
* @author sunkai
*
* 该实例程序演示了如何从 ping++ 服务器获得 charge ,查询 charge。
*
* 开发者需要填写 apiKey 和 appId , apiKey 可以在 ping++ 管理平台【应用信息里面查看】
*
* apiKey 有 TestKey 和 LiveKey 两种。
*
* TestKey 模式下不会产生真实的交易。
*/
public class PingCharge {
Logger logger = Logger.getLogger(getClass());
public PingCharge(){
Pingpp.apiKey = apiKey;
}
/**
* pingpp 管理平台对应的 API key
*/
public static String apiKey = "sk_live_zGMmfNLh87sghw4qjeWs4DnP";
/**
* pingpp 管理平台对应的应用 ID
*/
public static String appId = "app_j9S4O4G00GC0jHWj";
public static void main(String[] args) {
Pingpp.apiKey = apiKey;
PingCharge ce = new PingCharge();
System.out.println("---------创建 charge");
//Charge charge = ce.charge();
System.out.println("---------查询 charge");
//ce.retrieve(charge.getId());
System.out.println("---------查询 charge列表");
//ce.all();
}
/**
* 创建 Charge
*
* 创建 Charge 用户需要组装一个 map 对象作为参数传递给 Charge.create();
* map 里面参数的具体说明请参考:https://pingxx.com/document/api#api-c-new
* @return
*/
public Charge charge(String channel,int amount,String Subject,String Body,String OrderId) {
logger.info("channel:"+channel+" amount:"+amount+" Subject:"+Subject+" Body:"+Body+" OrderId:"+OrderId);
Charge charge = null;
Map<String, Object> chargeMap = new HashMap<String, Object>();
chargeMap.put("amount", amount*100);
chargeMap.put("currency", "cny");
chargeMap.put("subject", Subject);
chargeMap.put("body", Body);
chargeMap.put("order_no", OrderId);
chargeMap.put("channel", channel);
chargeMap.put("client_ip", "127.0.0.1");
Map<String,Object> extra =new HashMap<String, Object>();
if(channel.equals("alipay_wap")){
extra.put("success_url", "http://www.wangzhong.com/index/PayMoneyEnd");
}else if(channel.equals("alipay_wap")){
}else if(channel.equals("upacp_wap")||channel.equals("upmp_wap")){
extra.put("result_url", "http://www.wangzhong.com/index/PayMoneyEnd");
}
//extra.put("cancel_url", "http://www.wangzhong.com/");
chargeMap.put("extra", extra);
Map<String, String> app = new HashMap<String, String>();
app.put("id",appId);
chargeMap.put("app", app);
try {
//发起交易请求
charge = Charge.create(chargeMap);
System.out.println(charge);
} catch (PingppException e) {
e.printStackTrace();
}
return charge;
}
/**
* 查询 Charge
*
* 该接口根据 charge Id 查询对应的 charge 。
* 参考文档:https://pingxx.com/document/api#api-c-inquiry
*
* 该接口可以传递一个 expand , 返回的 charge 中的 app 会变成 app 对象。
* 参考文档: https://pingxx.com/document/api#api-expanding
* @param id
*/
public void retrieve(String id) {
try {
Map<String, Object> param = new HashMap<String, Object>();
List<String> expande = new ArrayList<String>();
expande.add("app");
param.put("expand", expande);
//Charge charge = Charge.retrieve(id);
//Expand app
Charge charge = Charge.retrieve(id, param);
if (charge.getApp() instanceof App) {
//App app = (App) charge.getApp();
// System.out.println("App Object ,appId = " + app.getId());
} else {
// System.out.println("String ,appId = " + charge.getApp());
}
System.out.println(charge);
} catch (PingppException e) {
e.printStackTrace();
}
}
/**
* 分页查询Charge
*
* 该接口为批量查询接口,默认一次查询10条。
* 用户可以通过添加 limit 参数自行设置查询数目,最多一次不能超过 100 条。
*
* 该接口同样可以使用 expand 参数。
* @return
*/
public ChargeCollection all() {
ChargeCollection chargeCollection = null;
Map<String, Object> chargeParams = new HashMap<String, Object>();
chargeParams.put("limit", 3);
//增加此处设施,刻意获取app expande
// List<String> expande = new ArrayList<String>();
// expande.add("app");
// chargeParams.put("expand", expande);
try {
chargeCollection = Charge.all(chargeParams);
System.out.println(chargeCollection);
} catch (AuthenticationException e) {
e.printStackTrace();
} catch (InvalidRequestException e) {
e.printStackTrace();
} catch (APIConnectionException e) {
e.printStackTrace();
} catch (APIException e) {
e.printStackTrace();
} catch (ChannelException e) {
e.printStackTrace();
}
return chargeCollection;
}
}
| zzsoszz/wyyf | src/com/lys/ping/PingCharge.java | Java | apache-2.0 | 5,897 |
# -*- coding: utf-8 -*-
#
# Copyright 2014-2020 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""BigMLer - Resources processing: creation, update and retrieval of ensembles
"""
import bigmler.utils as u
import bigmler.resourcesapi.ensembles as r
import bigmler.checkpoint as c
MONTECARLO_FACTOR = 200
def ensemble_processing(datasets, api, args, resume,
fields=None,
session_file=None,
path=None, log=None):
"""Creates an ensemble of models from the input data
"""
ensembles = []
ensemble_ids = []
models = []
model_ids = []
number_of_ensembles = len(datasets)
if resume:
resume, ensemble_ids = c.checkpoint(
c.are_ensembles_created, path, number_of_ensembles,
debug=args.debug)
if args.number_of_models > 1:
_, model_ids = c.checkpoint(c.are_models_created, path, \
number_of_ensembles * args.number_of_models)
models = model_ids
if not resume:
message = u.dated("Found %s ensembles out of %s. Resuming.\n"
% (len(ensemble_ids),
number_of_ensembles))
u.log_message(message, log_file=session_file,
console=args.verbosity)
ensembles = ensemble_ids
number_of_ensembles -= len(ensemble_ids)
if number_of_ensembles > 0:
ensemble_args = r.set_ensemble_args(args, fields=fields)
ensembles, ensemble_ids, models, model_ids = r.create_ensembles(
datasets, ensembles, ensemble_args, args, api=api, path=path,
number_of_ensembles=number_of_ensembles,
session_file=session_file, log=log)
return ensembles, ensemble_ids, models, model_ids, resume
def ensemble_per_label(labels, dataset, api, args, resume, fields=None,
multi_label_data=None,
session_file=None, path=None, log=None):
"""Creates an ensemble per label for multi-label datasets
"""
ensemble_ids = []
ensembles = []
model_ids = []
models = []
number_of_ensembles = len(labels)
if resume:
resume, ensemble_ids = c.checkpoint(
c.are_ensembles_created, path, number_of_ensembles,
debug=args.debug)
ensembles = ensemble_ids
if not resume:
message = u.dated("Found %s ensembles out of %s."
" Resuming.\n"
% (len(ensemble_ids),
number_of_ensembles))
u.log_message(message, log_file=session_file,
console=args.verbosity)
# erase models' info that will be rebuilt
u.log_created_resources("models", path, None,
mode='w')
number_of_ensembles = len(labels) - len(ensemble_ids)
ensemble_args_list = r.set_label_ensemble_args(
args,
labels, multi_label_data, number_of_ensembles,
fields)
# create ensembles changing the input_field to select
# only one label at a time
(ensembles, ensemble_ids,
models, model_ids) = r.create_ensembles(
dataset, ensemble_ids, ensemble_args_list, args,
number_of_ensembles, api,
path, session_file, log)
return ensembles, ensemble_ids, models, model_ids, resume
| jaor/bigmler | bigmler/processing/ensembles.py | Python | apache-2.0 | 3,944 |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cli;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import com.android.ddmlib.IDevice;
import com.android.ddmlib.IShellOutputReceiver;
import com.android.ddmlib.InstallException;
import com.facebook.buck.rules.ArtifactCache;
import com.facebook.buck.rules.NoopArtifactCache;
import com.facebook.buck.util.Ansi;
import com.facebook.buck.util.ProjectFilesystem;
import com.google.common.io.ByteStreams;
import org.junit.Before;
import org.junit.Test;
import org.kohsuke.args4j.CmdLineException;
import java.io.File;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
public class InstallCommandTest {
private BuckConfig buckConfig;
private InstallCommand installCommand;
@Before
public void setUp() {
buckConfig = BuckConfig.emptyConfig();
installCommand = createInstallCommand();
}
private InstallCommandOptions getOptions(String...args) throws CmdLineException {
InstallCommandOptions options = new InstallCommandOptions(buckConfig);
new CmdLineParserAdditionalOptions(options).parseArgument(args);
return options;
}
private TestDevice createRealDevice(String serial, IDevice.DeviceState state) {
TestDevice device = TestDevice.createRealDevice(serial);
device.setState(state);
return device;
}
private TestDevice createEmulator(String serial, IDevice.DeviceState state) {
TestDevice device = TestDevice.createEmulator(serial);
device.setState(state);
return device;
}
private TestDevice createDeviceForShellCommandTest(final String output) {
return new TestDevice() {
@Override
public void executeShellCommand(String cmd, IShellOutputReceiver receiver, int timeout) {
byte[] outputBytes = output.getBytes();
receiver.addOutput(outputBytes, 0, outputBytes.length);
receiver.flush();
}
};
}
private InstallCommand createInstallCommand() {
OutputStream nullOut = ByteStreams.nullOutputStream();
PrintStream out = new PrintStream(nullOut);
Console console = new Console(out, out, new Ansi());
ProjectFilesystem filesystem = new ProjectFilesystem(new File("."));
ArtifactCache artifactCache = new NoopArtifactCache();
return new InstallCommand(console.getStdOut(),
console.getStdErr(),
console,
filesystem,
artifactCache);
}
/**
* Verify that null is returned when no devices are present.
*/
@Test
public void testDeviceFilterNoDevices() throws CmdLineException {
InstallCommandOptions options = getOptions();
IDevice[] devices = new IDevice[] { };
assertNull(installCommand.filterDevices(devices, options.adbOptions()));
}
/**
* Verify that non-online devices will not appear in result list.
*/
@Test
public void testDeviceFilterOnlineOnly() throws CmdLineException {
InstallCommandOptions options = getOptions();
IDevice[] devices = new IDevice[] {
createEmulator("1", IDevice.DeviceState.OFFLINE),
createEmulator("2", IDevice.DeviceState.BOOTLOADER),
createEmulator("3", IDevice.DeviceState.RECOVERY),
createRealDevice("4", IDevice.DeviceState.OFFLINE),
createRealDevice("5", IDevice.DeviceState.BOOTLOADER),
createRealDevice("6", IDevice.DeviceState.RECOVERY),
};
assertNull(installCommand.filterDevices(devices, options.adbOptions()));
}
/**
* Verify that multi-install is not enabled and multiple devices
* pass the filter null is returned. Also verify that if multiple
* devices are passing the filter and multi-install mode is enabled
* they all appear in resulting list.
*/
@Test
public void testDeviceFilterMultipleDevices() throws CmdLineException {
IDevice[] devices = new IDevice[] {
createEmulator("1", IDevice.DeviceState.ONLINE),
createEmulator("2", IDevice.DeviceState.ONLINE),
createRealDevice("4", IDevice.DeviceState.ONLINE),
createRealDevice("5", IDevice.DeviceState.ONLINE)
};
InstallCommandOptions options = getOptions();
assertNull(installCommand.filterDevices(devices, options.adbOptions()));
options = getOptions(AdbOptions.MULTI_INSTALL_MODE_SHORT_ARG);
List<IDevice> filteredDevices = installCommand.filterDevices(devices, options.adbOptions());
assertNotNull(filteredDevices);
assertEquals(devices.length, filteredDevices.size());
}
/**
* Verify that when emulator-only mode is enabled only emulators appear in result.
*/
@Test
public void testDeviceFilterEmulator() throws CmdLineException {
InstallCommandOptions options = getOptions(AdbOptions.EMULATOR_MODE_SHORT_ARG);
IDevice[] devices = new IDevice[] {
createEmulator("1", IDevice.DeviceState.ONLINE),
createRealDevice("2", IDevice.DeviceState.ONLINE),
};
List<IDevice> filteredDevices = installCommand.filterDevices(devices, options.adbOptions());
assertNotNull(filteredDevices);
assertEquals(1, filteredDevices.size());
assertSame(devices[0], filteredDevices.get(0));
}
/**
* Verify that when real-device-only mode is enabled only real devices appear in result.
*/
@Test
public void testDeviceFilterRealDevices() throws CmdLineException {
InstallCommandOptions options = getOptions(AdbOptions.DEVICE_MODE_LONG_ARG);
IDevice[] devices = new IDevice[] {
createRealDevice("1", IDevice.DeviceState.ONLINE),
createEmulator("2", IDevice.DeviceState.ONLINE)
};
List<IDevice> filteredDevices = installCommand.filterDevices(devices, options.adbOptions());
assertNotNull(filteredDevices);
assertEquals(1, filteredDevices.size());
assertSame(devices[0], filteredDevices.get(0));
}
/**
* Verify that filtering by serial number works.
*/
@Test
public void testDeviceFilterBySerial() throws CmdLineException {
IDevice[] devices = new IDevice[] {
createRealDevice("1", IDevice.DeviceState.ONLINE),
createEmulator("2", IDevice.DeviceState.ONLINE),
createRealDevice("3", IDevice.DeviceState.ONLINE),
createEmulator("4", IDevice.DeviceState.ONLINE)
};
for (int i = 0; i < devices.length; i++) {
InstallCommandOptions options = getOptions(
AdbOptions.SERIAL_NUMBER_SHORT_ARG,devices[i].getSerialNumber());
List<IDevice> filteredDevices = installCommand.filterDevices(devices, options.adbOptions());
assertNotNull(filteredDevices);
assertEquals(1, filteredDevices.size());
assertSame(devices[i], filteredDevices.get(0));
}
}
/**
* Verify that if no devices match filters null is returned.
*/
@Test
public void testDeviceFilterNoMatchingDevices() throws CmdLineException {
IDevice[] devices = new IDevice[] {
createRealDevice("1", IDevice.DeviceState.ONLINE),
createEmulator("2", IDevice.DeviceState.ONLINE),
createRealDevice("3", IDevice.DeviceState.ONLINE),
createEmulator("4", IDevice.DeviceState.ONLINE)
};
InstallCommandOptions options = getOptions(
AdbOptions.SERIAL_NUMBER_SHORT_ARG, "invalid-serial");
List<IDevice> filteredDevices = installCommand.filterDevices(devices, options.adbOptions());
assertNull(filteredDevices);
}
/**
* Verify that different combinations of arguments work correctly.
*/
@Test
public void testDeviceFilterCombos() throws CmdLineException {
TestDevice realDevice1 = createRealDevice("1", IDevice.DeviceState.ONLINE);
TestDevice realDevice2 = createRealDevice("2", IDevice.DeviceState.ONLINE);
TestDevice emulator1 = createEmulator("3", IDevice.DeviceState.ONLINE);
TestDevice emulator2 = createEmulator("4", IDevice.DeviceState.ONLINE);
IDevice[] devices = new IDevice[] {
realDevice1,
emulator1,
realDevice2,
emulator2
};
// Filter by serial in "real device" mode with serial number for real device.
InstallCommandOptions options = getOptions(
AdbOptions.SERIAL_NUMBER_SHORT_ARG, realDevice1.getSerialNumber(),
AdbOptions.DEVICE_MODE_LONG_ARG);
List<IDevice> filteredDevices = installCommand.filterDevices(devices, options.adbOptions());
assertNotNull(filteredDevices);
assertEquals(1, filteredDevices.size());
assertSame(realDevice1, filteredDevices.get(0));
// Filter by serial in "real device" mode with serial number for emulator.
options = getOptions(
AdbOptions.SERIAL_NUMBER_SHORT_ARG, emulator1.getSerialNumber(),
AdbOptions.DEVICE_MODE_LONG_ARG);
filteredDevices = installCommand.filterDevices(devices, options.adbOptions());
assertNull(filteredDevices);
// Filter by serial in "emulator" mode with serial number for real device.
options = getOptions(
AdbOptions.SERIAL_NUMBER_SHORT_ARG, realDevice1.getSerialNumber(),
AdbOptions.EMULATOR_MODE_SHORT_ARG);
filteredDevices = installCommand.filterDevices(devices, options.adbOptions());
assertNull(filteredDevices);
// Filter by serial in "real device" mode with serial number for emulator.
options = getOptions(
AdbOptions.SERIAL_NUMBER_SHORT_ARG, emulator1.getSerialNumber(),
AdbOptions.EMULATOR_MODE_SHORT_ARG);
filteredDevices = installCommand.filterDevices(devices, options.adbOptions());
assertNotNull(filteredDevices);
assertEquals(1, filteredDevices.size());
assertSame(emulator1, filteredDevices.get(0));
// Filter in both "real device" mode and "emulator mode".
options = getOptions(
AdbOptions.DEVICE_MODE_LONG_ARG,
AdbOptions.EMULATOR_MODE_SHORT_ARG,
AdbOptions.MULTI_INSTALL_MODE_SHORT_ARG);
filteredDevices = installCommand.filterDevices(devices, options.adbOptions());
assertNotNull(filteredDevices);
assertEquals(devices.length, filteredDevices.size());
for (IDevice device : devices) {
assertTrue(filteredDevices.contains(device));
}
}
/**
* Verify that successful installation on device results in true.
*/
@Test
public void testSuccessfulDeviceInstall() {
File apk = new File("/some/file.apk");
final AtomicReference<String> apkPath = new AtomicReference<String>();
TestDevice device = new TestDevice() {
@Override
public String installPackage(String s, boolean b, String... strings) throws InstallException {
apkPath.set(s);
return null;
}
};
device.setSerialNumber("serial#1");
device.setName("testDevice");
assertTrue(installCommand.installApkOnDevice(device, apk));
assertEquals(apk.getAbsolutePath(), apkPath.get());
}
/**
* Also make sure we're not erroneously parsing "Exception" and "Error".
*/
@Test
public void testDeviceStartActivitySuccess() {
TestDevice device = createDeviceForShellCommandTest(
"Starting: Intent { cmp=com.example.ExceptionErrorActivity }\r\n");
assertNull(installCommand.deviceStartActivity(device, "com.foo/.Activity"));
}
@Test
public void testDeviceStartActivityAmDoesntExist() {
TestDevice device = createDeviceForShellCommandTest("sh: am: not found\r\n");
assertNotNull(installCommand.deviceStartActivity(device, "com.foo/.Activity"));
}
@Test
public void testDeviceStartActivityActivityDoesntExist() {
String errorLine = "Error: Activity class {com.foo/.Activiqy} does not exist.\r\n";
TestDevice device = createDeviceForShellCommandTest(
"Starting: Intent { cmp=com.foo/.Activiqy }\r\n" +
"Error type 3\r\n" +
errorLine);
assertEquals(
errorLine.trim(),
installCommand.deviceStartActivity(device, "com.foo/.Activiy").trim());
}
@Test
public void testDeviceStartActivityException() {
String errorLine = "java.lang.SecurityException: Permission Denial: " +
"starting Intent { flg=0x10000000 cmp=com.foo/.Activity } from null " +
"(pid=27581, uid=2000) not exported from uid 10002\r\n";
TestDevice device = createDeviceForShellCommandTest(
"Starting: Intent { cmp=com.foo/.Activity }\r\n" +
errorLine +
" at android.os.Parcel.readException(Parcel.java:1425)\r\n" +
" at android.os.Parcel.readException(Parcel.java:1379)\r\n" +
// (...)
" at dalvik.system.NativeStart.main(Native Method)\r\n");
assertEquals(
errorLine.trim(),
installCommand.deviceStartActivity(device, "com.foo/.Activity").trim());
}
/**
* Verify that if failure reason is returned, installation is marked as failed.
*/
@Test
public void testFailedDeviceInstallWithReason() {
File apk = new File("/some/file.apk");
TestDevice device = new TestDevice() {
@Override
public String installPackage(String s, boolean b, String... strings) throws InstallException {
return "[SOME_REASON]";
}
};
device.setSerialNumber("serial#1");
device.setName("testDevice");
assertFalse(installCommand.installApkOnDevice(device, apk));
}
/**
* Verify that if exception is thrown during installation, installation is marked as failed.
*/
@Test
public void testFailedDeviceInstallWithException() {
File apk = new File("/some/file.apk");
TestDevice device = new TestDevice() {
@Override
public String installPackage(String s, boolean b, String... strings) throws InstallException {
throw new InstallException("Failed to install on test device.", null);
}
};
device.setSerialNumber("serial#1");
device.setName("testDevice");
assertFalse(installCommand.installApkOnDevice(device, apk));
}
}
| azatoth/buck | test/com/facebook/buck/cli/InstallCommandTest.java | Java | apache-2.0 | 14,471 |
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#include "isourceselector.h"
namespace search::queryeval {
ISourceSelector::ISourceSelector(Source defaultSource) :
_baseId(0),
_defaultSource(defaultSource)
{
assert(defaultSource < SOURCE_LIMIT);
}
void
ISourceSelector::setDefaultSource(Source source)
{
assert(source < SOURCE_LIMIT);
assert(source >= _defaultSource);
_defaultSource = source;
}
}
| vespa-engine/vespa | searchlib/src/vespa/searchlib/queryeval/isourceselector.cpp | C++ | apache-2.0 | 483 |
# == Schema Information
#
# Table name: course_users
#
# id :integer not null, primary key
# course_id :integer
# user_id :integer
# created_at :datetime
# updated_at :datetime
#
# Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :course_user do
end
end
| patrickspencer/compass-webapp | spec/factories/course_users.rb | Ruby | apache-2.0 | 334 |
using System;
using System.Web.Http;
using System.Web.Mvc;
using xStudio.Web.Areas.HelpPage.ModelDescriptions;
using xStudio.Web.Areas.HelpPage.Models;
namespace xStudio.Web.Areas.HelpPage.Controllers
{
/// <summary>
/// The controller that will handle requests for the help page.
/// </summary>
public class HelpController : Controller
{
private const string ErrorViewName = "Error";
public HelpController()
: this(GlobalConfiguration.Configuration)
{
}
public HelpController(HttpConfiguration config)
{
Configuration = config;
}
public HttpConfiguration Configuration { get; private set; }
public ActionResult Index()
{
ViewBag.DocumentationProvider = Configuration.Services.GetDocumentationProvider();
return View(Configuration.Services.GetApiExplorer().ApiDescriptions);
}
public ActionResult Api(string apiId)
{
if (!String.IsNullOrEmpty(apiId))
{
HelpPageApiModel apiModel = Configuration.GetHelpPageApiModel(apiId);
if (apiModel != null)
{
return View(apiModel);
}
}
return View(ErrorViewName);
}
public ActionResult ResourceModel(string modelName)
{
if (!String.IsNullOrEmpty(modelName))
{
ModelDescriptionGenerator modelDescriptionGenerator = Configuration.GetModelDescriptionGenerator();
ModelDescription modelDescription;
if (modelDescriptionGenerator.GeneratedModels.TryGetValue(modelName, out modelDescription))
{
return View(modelDescription);
}
}
return View(ErrorViewName);
}
}
} | xDevsPro/xStudio | Solution/UIs/xStudio.Web/Areas/HelpPage/Controllers/HelpController.cs | C# | apache-2.0 | 1,892 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotdeviceadvisor;
import javax.annotation.Generated;
import com.amazonaws.services.iotdeviceadvisor.model.*;
import com.amazonaws.client.AwsAsyncClientParams;
import com.amazonaws.annotation.ThreadSafe;
import java.util.concurrent.ExecutorService;
/**
* Client for accessing AWSIoTDeviceAdvisor asynchronously. Each asynchronous method will return a Java Future object
* representing the asynchronous operation; overloads which accept an {@code AsyncHandler} can be used to receive
* notification when an asynchronous operation completes.
* <p>
* <p>
* Amazon Web Services IoT Core Device Advisor is a cloud-based, fully managed test capability for validating IoT
* devices during device software development. Device Advisor provides pre-built tests that you can use to validate IoT
* devices for reliable and secure connectivity with Amazon Web Services IoT Core before deploying devices to
* production. By using Device Advisor, you can confirm that your devices can connect to Amazon Web Services IoT Core,
* follow security best practices and, if applicable, receive software updates from IoT Device Management. You can also
* download signed qualification reports to submit to the Amazon Web Services Partner Network to get your device
* qualified for the Amazon Web Services Partner Device Catalog without the need to send your device in and wait for it
* to be tested.
* </p>
*/
@ThreadSafe
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AWSIoTDeviceAdvisorAsyncClient extends AWSIoTDeviceAdvisorClient implements AWSIoTDeviceAdvisorAsync {
private static final int DEFAULT_THREAD_POOL_SIZE = 50;
private final java.util.concurrent.ExecutorService executorService;
public static AWSIoTDeviceAdvisorAsyncClientBuilder asyncBuilder() {
return AWSIoTDeviceAdvisorAsyncClientBuilder.standard();
}
/**
* Constructs a new asynchronous client to invoke service methods on AWSIoTDeviceAdvisor using the specified
* parameters.
*
* @param asyncClientParams
* Object providing client parameters.
*/
AWSIoTDeviceAdvisorAsyncClient(AwsAsyncClientParams asyncClientParams) {
this(asyncClientParams, false);
}
/**
* Constructs a new asynchronous client to invoke service methods on AWSIoTDeviceAdvisor using the specified
* parameters.
*
* @param asyncClientParams
* Object providing client parameters.
* @param endpointDiscoveryEnabled
* true will enable endpoint discovery if the service supports it.
*/
AWSIoTDeviceAdvisorAsyncClient(AwsAsyncClientParams asyncClientParams, boolean endpointDiscoveryEnabled) {
super(asyncClientParams, endpointDiscoveryEnabled);
this.executorService = asyncClientParams.getExecutor();
}
/**
* Returns the executor service used by this client to execute async requests.
*
* @return The executor service used by this client to execute async requests.
*/
public ExecutorService getExecutorService() {
return executorService;
}
@Override
public java.util.concurrent.Future<CreateSuiteDefinitionResult> createSuiteDefinitionAsync(CreateSuiteDefinitionRequest request) {
return createSuiteDefinitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateSuiteDefinitionResult> createSuiteDefinitionAsync(final CreateSuiteDefinitionRequest request,
final com.amazonaws.handlers.AsyncHandler<CreateSuiteDefinitionRequest, CreateSuiteDefinitionResult> asyncHandler) {
final CreateSuiteDefinitionRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<CreateSuiteDefinitionResult>() {
@Override
public CreateSuiteDefinitionResult call() throws Exception {
CreateSuiteDefinitionResult result = null;
try {
result = executeCreateSuiteDefinition(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<DeleteSuiteDefinitionResult> deleteSuiteDefinitionAsync(DeleteSuiteDefinitionRequest request) {
return deleteSuiteDefinitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteSuiteDefinitionResult> deleteSuiteDefinitionAsync(final DeleteSuiteDefinitionRequest request,
final com.amazonaws.handlers.AsyncHandler<DeleteSuiteDefinitionRequest, DeleteSuiteDefinitionResult> asyncHandler) {
final DeleteSuiteDefinitionRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<DeleteSuiteDefinitionResult>() {
@Override
public DeleteSuiteDefinitionResult call() throws Exception {
DeleteSuiteDefinitionResult result = null;
try {
result = executeDeleteSuiteDefinition(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetEndpointResult> getEndpointAsync(GetEndpointRequest request) {
return getEndpointAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetEndpointResult> getEndpointAsync(final GetEndpointRequest request,
final com.amazonaws.handlers.AsyncHandler<GetEndpointRequest, GetEndpointResult> asyncHandler) {
final GetEndpointRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<GetEndpointResult>() {
@Override
public GetEndpointResult call() throws Exception {
GetEndpointResult result = null;
try {
result = executeGetEndpoint(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetSuiteDefinitionResult> getSuiteDefinitionAsync(GetSuiteDefinitionRequest request) {
return getSuiteDefinitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetSuiteDefinitionResult> getSuiteDefinitionAsync(final GetSuiteDefinitionRequest request,
final com.amazonaws.handlers.AsyncHandler<GetSuiteDefinitionRequest, GetSuiteDefinitionResult> asyncHandler) {
final GetSuiteDefinitionRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<GetSuiteDefinitionResult>() {
@Override
public GetSuiteDefinitionResult call() throws Exception {
GetSuiteDefinitionResult result = null;
try {
result = executeGetSuiteDefinition(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetSuiteRunResult> getSuiteRunAsync(GetSuiteRunRequest request) {
return getSuiteRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetSuiteRunResult> getSuiteRunAsync(final GetSuiteRunRequest request,
final com.amazonaws.handlers.AsyncHandler<GetSuiteRunRequest, GetSuiteRunResult> asyncHandler) {
final GetSuiteRunRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<GetSuiteRunResult>() {
@Override
public GetSuiteRunResult call() throws Exception {
GetSuiteRunResult result = null;
try {
result = executeGetSuiteRun(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetSuiteRunReportResult> getSuiteRunReportAsync(GetSuiteRunReportRequest request) {
return getSuiteRunReportAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetSuiteRunReportResult> getSuiteRunReportAsync(final GetSuiteRunReportRequest request,
final com.amazonaws.handlers.AsyncHandler<GetSuiteRunReportRequest, GetSuiteRunReportResult> asyncHandler) {
final GetSuiteRunReportRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<GetSuiteRunReportResult>() {
@Override
public GetSuiteRunReportResult call() throws Exception {
GetSuiteRunReportResult result = null;
try {
result = executeGetSuiteRunReport(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<ListSuiteDefinitionsResult> listSuiteDefinitionsAsync(ListSuiteDefinitionsRequest request) {
return listSuiteDefinitionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListSuiteDefinitionsResult> listSuiteDefinitionsAsync(final ListSuiteDefinitionsRequest request,
final com.amazonaws.handlers.AsyncHandler<ListSuiteDefinitionsRequest, ListSuiteDefinitionsResult> asyncHandler) {
final ListSuiteDefinitionsRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<ListSuiteDefinitionsResult>() {
@Override
public ListSuiteDefinitionsResult call() throws Exception {
ListSuiteDefinitionsResult result = null;
try {
result = executeListSuiteDefinitions(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<ListSuiteRunsResult> listSuiteRunsAsync(ListSuiteRunsRequest request) {
return listSuiteRunsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListSuiteRunsResult> listSuiteRunsAsync(final ListSuiteRunsRequest request,
final com.amazonaws.handlers.AsyncHandler<ListSuiteRunsRequest, ListSuiteRunsResult> asyncHandler) {
final ListSuiteRunsRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<ListSuiteRunsResult>() {
@Override
public ListSuiteRunsResult call() throws Exception {
ListSuiteRunsResult result = null;
try {
result = executeListSuiteRuns(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request) {
return listTagsForResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(final ListTagsForResourceRequest request,
final com.amazonaws.handlers.AsyncHandler<ListTagsForResourceRequest, ListTagsForResourceResult> asyncHandler) {
final ListTagsForResourceRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<ListTagsForResourceResult>() {
@Override
public ListTagsForResourceResult call() throws Exception {
ListTagsForResourceResult result = null;
try {
result = executeListTagsForResource(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<StartSuiteRunResult> startSuiteRunAsync(StartSuiteRunRequest request) {
return startSuiteRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartSuiteRunResult> startSuiteRunAsync(final StartSuiteRunRequest request,
final com.amazonaws.handlers.AsyncHandler<StartSuiteRunRequest, StartSuiteRunResult> asyncHandler) {
final StartSuiteRunRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<StartSuiteRunResult>() {
@Override
public StartSuiteRunResult call() throws Exception {
StartSuiteRunResult result = null;
try {
result = executeStartSuiteRun(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<StopSuiteRunResult> stopSuiteRunAsync(StopSuiteRunRequest request) {
return stopSuiteRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<StopSuiteRunResult> stopSuiteRunAsync(final StopSuiteRunRequest request,
final com.amazonaws.handlers.AsyncHandler<StopSuiteRunRequest, StopSuiteRunResult> asyncHandler) {
final StopSuiteRunRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<StopSuiteRunResult>() {
@Override
public StopSuiteRunResult call() throws Exception {
StopSuiteRunResult result = null;
try {
result = executeStopSuiteRun(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request) {
return tagResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(final TagResourceRequest request,
final com.amazonaws.handlers.AsyncHandler<TagResourceRequest, TagResourceResult> asyncHandler) {
final TagResourceRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<TagResourceResult>() {
@Override
public TagResourceResult call() throws Exception {
TagResourceResult result = null;
try {
result = executeTagResource(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request) {
return untagResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(final UntagResourceRequest request,
final com.amazonaws.handlers.AsyncHandler<UntagResourceRequest, UntagResourceResult> asyncHandler) {
final UntagResourceRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<UntagResourceResult>() {
@Override
public UntagResourceResult call() throws Exception {
UntagResourceResult result = null;
try {
result = executeUntagResource(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<UpdateSuiteDefinitionResult> updateSuiteDefinitionAsync(UpdateSuiteDefinitionRequest request) {
return updateSuiteDefinitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateSuiteDefinitionResult> updateSuiteDefinitionAsync(final UpdateSuiteDefinitionRequest request,
final com.amazonaws.handlers.AsyncHandler<UpdateSuiteDefinitionRequest, UpdateSuiteDefinitionResult> asyncHandler) {
final UpdateSuiteDefinitionRequest finalRequest = beforeClientExecution(request);
return executorService.submit(new java.util.concurrent.Callable<UpdateSuiteDefinitionResult>() {
@Override
public UpdateSuiteDefinitionResult call() throws Exception {
UpdateSuiteDefinitionResult result = null;
try {
result = executeUpdateSuiteDefinition(finalRequest);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(finalRequest, result);
}
return result;
}
});
}
/**
* Shuts down the client, releasing all managed resources. This includes forcibly terminating all pending
* asynchronous service calls. Clients who wish to give pending asynchronous service calls time to complete should
* call {@code getExecutorService().shutdown()} followed by {@code getExecutorService().awaitTermination()} prior to
* calling this method.
*/
@Override
public void shutdown() {
super.shutdown();
executorService.shutdownNow();
}
}
| aws/aws-sdk-java | aws-java-sdk-iotdeviceadvisor/src/main/java/com/amazonaws/services/iotdeviceadvisor/AWSIoTDeviceAdvisorAsyncClient.java | Java | apache-2.0 | 22,034 |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.intellij.protoeditor.ide.documentation;
import com.google.devtools.intellij.protoeditor.lang.psi.PbCommentOwner;
import com.google.devtools.intellij.protoeditor.lang.psi.util.PbCommentUtil;
import com.intellij.lang.documentation.AbstractDocumentationProvider;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiComment;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiManager;
import java.util.List;
import org.jetbrains.annotations.Nullable;
/** A {@link com.intellij.lang.documentation.DocumentationProvider} for proto elements. */
public class PbDocumentationProvider extends AbstractDocumentationProvider {
@Nullable
@Override
public String getQuickNavigateInfo(PsiElement element, PsiElement originalElement) {
return null;
}
@Nullable
@Override
public List<String> getUrlFor(PsiElement element, PsiElement originalElement) {
return null;
}
@Nullable
@Override
public String generateDoc(PsiElement element, @Nullable PsiElement originalElement) {
if (!(element instanceof PbCommentOwner)) {
return null;
}
PbCommentOwner owner = (PbCommentOwner) element;
List<PsiComment> comments = owner.getComments();
if (comments.isEmpty()) {
return null;
}
StringBuilder commentBuilder = new StringBuilder("<pre>");
for (String line : PbCommentUtil.extractText(comments)) {
commentBuilder.append(StringUtil.escapeXml(line));
commentBuilder.append("\n");
}
commentBuilder.append("</pre>");
return commentBuilder.toString();
}
@Nullable
@Override
public PsiElement getDocumentationElementForLink(
PsiManager psiManager, String link, PsiElement context) {
return null;
}
}
| google/intellij-protocol-buffer-editor | core/src/main/java/com/google/devtools/intellij/protoeditor/ide/documentation/PbDocumentationProvider.java | Java | apache-2.0 | 2,354 |
package com.mnknowledge.dp.behavioral.observer.newsfeed;
public class User implements Observer {
private String name;
private String article;
private Subject newsFeed;
public User(String name) {
super();
this.name = name;
}
public void subscribe(Subject newsFeed) {
newsFeed.registerObserver(this);
this.newsFeed = newsFeed;
article = "No New Article!";
}
@Override
public void update() {
System.out.println("State change reported by Subject.");
article = (String) newsFeed.getUpdate();
}
public String getArticle() {
return article;
}
public String getName() {
return name;
}
}
| stapetro/mnk_designpatterns | dpsamples/src/main/java/com/mnknowledge/dp/behavioral/observer/newsfeed/User.java | Java | apache-2.0 | 714 |
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ads.googleads.migration.campaignmanagement;
import com.beust.jcommander.Parameter;
import com.google.ads.googleads.lib.GoogleAdsClient;
import com.google.ads.googleads.migration.utils.ArgumentNames;
import com.google.ads.googleads.migration.utils.CodeSampleParams;
import com.google.ads.googleads.v10.common.ExpandedTextAdInfo;
import com.google.ads.googleads.v10.common.KeywordInfo;
import com.google.ads.googleads.v10.common.ManualCpc;
import com.google.ads.googleads.v10.enums.AdGroupAdStatusEnum.AdGroupAdStatus;
import com.google.ads.googleads.v10.enums.AdGroupCriterionStatusEnum.AdGroupCriterionStatus;
import com.google.ads.googleads.v10.enums.AdGroupStatusEnum.AdGroupStatus;
import com.google.ads.googleads.v10.enums.AdGroupTypeEnum.AdGroupType;
import com.google.ads.googleads.v10.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType;
import com.google.ads.googleads.v10.enums.BudgetDeliveryMethodEnum.BudgetDeliveryMethod;
import com.google.ads.googleads.v10.enums.CampaignStatusEnum.CampaignStatus;
import com.google.ads.googleads.v10.enums.KeywordMatchTypeEnum.KeywordMatchType;
import com.google.ads.googleads.v10.errors.GoogleAdsError;
import com.google.ads.googleads.v10.errors.GoogleAdsException;
import com.google.ads.googleads.v10.resources.Ad;
import com.google.ads.googleads.v10.resources.AdGroup;
import com.google.ads.googleads.v10.resources.AdGroupAd;
import com.google.ads.googleads.v10.resources.AdGroupCriterion;
import com.google.ads.googleads.v10.resources.Campaign;
import com.google.ads.googleads.v10.resources.Campaign.NetworkSettings;
import com.google.ads.googleads.v10.resources.CampaignBudget;
import com.google.ads.googleads.v10.services.AdGroupAdOperation;
import com.google.ads.googleads.v10.services.AdGroupAdServiceClient;
import com.google.ads.googleads.v10.services.AdGroupCriterionOperation;
import com.google.ads.googleads.v10.services.AdGroupCriterionServiceClient;
import com.google.ads.googleads.v10.services.AdGroupOperation;
import com.google.ads.googleads.v10.services.AdGroupServiceClient;
import com.google.ads.googleads.v10.services.CampaignBudgetOperation;
import com.google.ads.googleads.v10.services.CampaignBudgetServiceClient;
import com.google.ads.googleads.v10.services.CampaignOperation;
import com.google.ads.googleads.v10.services.CampaignServiceClient;
import com.google.ads.googleads.v10.services.GoogleAdsRow;
import com.google.ads.googleads.v10.services.GoogleAdsServiceClient;
import com.google.ads.googleads.v10.services.GoogleAdsServiceClient.SearchPagedResponse;
import com.google.ads.googleads.v10.services.MutateAdGroupAdResult;
import com.google.ads.googleads.v10.services.MutateAdGroupAdsResponse;
import com.google.ads.googleads.v10.services.MutateAdGroupCriteriaResponse;
import com.google.ads.googleads.v10.services.MutateAdGroupCriterionResult;
import com.google.ads.googleads.v10.services.MutateAdGroupsResponse;
import com.google.ads.googleads.v10.services.MutateCampaignBudgetsResponse;
import com.google.ads.googleads.v10.services.MutateCampaignsResponse;
import com.google.ads.googleads.v10.services.SearchGoogleAdsRequest;
import com.google.ads.googleads.v10.utils.ResourceNames;
import com.google.common.collect.ImmutableList;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.joda.time.DateTime;
/**
* This code example is the last in a series of code examples that shows how to create a Search
* campaign using the AdWords API, and then migrate it to the Google Ads API one functionality at a
* time. See Step0 through Step5 for code examples in various stages of migration.
*
* <p>This code example represents the final state, where all the functionality - create a campaign
* budget, a search campaign, an ad group, keywords, and expanded text ads have all been migrated to
* using the Google Ads API. The AdWords API is not used.
*/
public class CreateCompleteCampaignGoogleAdsApiOnly {
private static final int PAGE_SIZE = 1_000;
private static final int NUMBER_OF_ADS = 5;
private static final List<String> KEYWORDS_TO_ADD = Arrays.asList("mars cruise", "space hotel");
private static class CreateCompleteCampaignGoogleAdsApiOnlyParams extends CodeSampleParams {
@Parameter(names = ArgumentNames.CUSTOMER_ID, required = true)
private Long customerId;
}
public static void main(String[] args) {
CreateCompleteCampaignGoogleAdsApiOnlyParams params =
new CreateCompleteCampaignGoogleAdsApiOnlyParams();
if (!params.parseArguments(args)) {
// Either pass the required parameters for this example on the command line, or insert them
// into the code here. See the parameter class definition above for descriptions.
params.customerId = Long.parseLong("INSERT_CUSTOMER_ID_HERE");
}
// Initializes the Google Ads client.
GoogleAdsClient googleAdsClient;
try {
googleAdsClient = GoogleAdsClient.newBuilder().fromPropertiesFile().build();
} catch (FileNotFoundException fnfe) {
System.err.printf(
"Failed to load GoogleAdsClient configuration from file. Exception: %s%n", fnfe);
return;
} catch (IOException ioe) {
System.err.printf("Failed to create GoogleAdsClient. Exception: %s%n", ioe);
return;
}
try {
new CreateCompleteCampaignGoogleAdsApiOnly().runExample(googleAdsClient, params.customerId);
} catch (GoogleAdsException gae) {
// GoogleAdsException is the base class for most exceptions thrown by an API request.
// Instances of this exception have a message and a GoogleAdsFailure that contains a
// collection of GoogleAdsErrors that indicate the underlying causes of the
// GoogleAdsException.
System.err.printf(
"Request ID %s failed due to GoogleAdsException. Underlying errors:%n",
gae.getRequestId());
int i = 0;
for (GoogleAdsError googleAdsError : gae.getGoogleAdsFailure().getErrorsList()) {
System.err.printf(" Error %d: %s%n", i++, googleAdsError);
}
}
}
/**
* Runs the example.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private void runExample(GoogleAdsClient googleAdsClient, long customerId) {
CampaignBudget budget = createBudget(googleAdsClient, customerId);
Campaign campaign = createCampaign(googleAdsClient, customerId, budget);
AdGroup adGroup = createAdGroup(googleAdsClient, customerId, campaign);
createTextAds(googleAdsClient, customerId, adGroup, NUMBER_OF_ADS);
createKeywords(googleAdsClient, customerId, adGroup, KEYWORDS_TO_ADD);
}
/**
* Creates a budget.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private CampaignBudget createBudget(GoogleAdsClient googleAdsClient, long customerId) {
// Creates the budget.
CampaignBudget budget =
CampaignBudget.newBuilder()
.setName("Interplanetary Cruise Budget #" + System.currentTimeMillis())
.setDeliveryMethod(BudgetDeliveryMethod.STANDARD)
.setAmountMicros(10_000_000)
.build();
// Creates the operation.
CampaignBudgetOperation op = CampaignBudgetOperation.newBuilder().setCreate(budget).build();
// Gets the CampaignBudget service.
try (CampaignBudgetServiceClient campaignBudgetServiceClient =
googleAdsClient.getLatestVersion().createCampaignBudgetServiceClient()) {
// Adds the budget.
MutateCampaignBudgetsResponse response =
campaignBudgetServiceClient.mutateCampaignBudgets(
Long.toString(customerId), ImmutableList.of(op));
String budgetResourceName = response.getResults(0).getResourceName();
// Retrieves the budget.
CampaignBudget newBudget = getBudget(googleAdsClient, customerId, budgetResourceName);
// Displays the results.
System.out.printf(
"Budget with ID %s and name '%s' was created.%n", newBudget.getId(), newBudget.getName());
return newBudget;
}
}
/**
* Retrieves the campaign budget.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param budgetResourceName resource name of the new campaign budget.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private CampaignBudget getBudget(
GoogleAdsClient googleAdsClient, long customerId, String budgetResourceName) {
// Gets the GoogleAdsService.
try (GoogleAdsServiceClient googleAdsServiceClient =
googleAdsClient.getLatestVersion().createGoogleAdsServiceClient()) {
// Creates the request.
SearchGoogleAdsRequest request =
SearchGoogleAdsRequest.newBuilder()
.setCustomerId(Long.toString(customerId))
.setPageSize(PAGE_SIZE)
.setQuery(
String.format(
"SELECT campaign_budget.id, campaign_budget.name, "
+ "campaign_budget.resource_name FROM campaign_budget "
+ "WHERE campaign_budget.resource_name = '%s'",
budgetResourceName))
.build();
// Retrieves the budget.
SearchPagedResponse searchPagedResponse = googleAdsServiceClient.search(request);
return searchPagedResponse.getPage().getResponse().getResults(0).getCampaignBudget();
}
}
/**
* Creates a campaign.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param budget the budget for the campaign.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private Campaign createCampaign(
GoogleAdsClient googleAdsClient, long customerId, CampaignBudget budget) {
String budgetResourceName = ResourceNames.campaignBudget(customerId, budget.getId());
// Configures the campaign network options
NetworkSettings networkSettings =
NetworkSettings.newBuilder()
.setTargetGoogleSearch(true)
.setTargetSearchNetwork(true)
.setTargetContentNetwork(false)
.setTargetPartnerSearchNetwork(false)
.build();
// Creates the campaign.
Campaign campaign =
Campaign.newBuilder()
.setName("Interplanetary Cruise #" + System.currentTimeMillis())
.setAdvertisingChannelType(AdvertisingChannelType.SEARCH)
// Recommendation: Set the campaign to PAUSED when creating it to prevent
// the ads from immediately serving. Set to ENABLED once you've added
// targeting and the ads are ready to serve
.setStatus(CampaignStatus.PAUSED)
// Sets the bidding strategy and budget.
.setManualCpc(ManualCpc.newBuilder().build())
.setCampaignBudget(budgetResourceName)
// Adds the networkSettings configured above.
.setNetworkSettings(networkSettings)
// Optional: sets the start & end dates.
.setStartDate(new DateTime().plusDays(1).toString("yyyyMMdd"))
.setEndDate(new DateTime().plusDays(30).toString("yyyyMMdd"))
.build();
// Creates the operation.
CampaignOperation op = CampaignOperation.newBuilder().setCreate(campaign).build();
// Gets the Campaign service.
try (CampaignServiceClient campaignServiceClient =
googleAdsClient.getLatestVersion().createCampaignServiceClient()) {
// Adds the campaign.
MutateCampaignsResponse response =
campaignServiceClient.mutateCampaigns(Long.toString(customerId), ImmutableList.of(op));
String campaignResourceName = response.getResults(0).getResourceName();
// Retrieves the campaign.
Campaign newCampaign = getCampaign(googleAdsClient, customerId, campaignResourceName);
// Displays the results.
System.out.printf(
"Campaign with ID %s and name '%s' was created.%n",
newCampaign.getId(), newCampaign.getName());
return newCampaign;
}
}
/**
* Retrieves the campaign.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param campaignResourceName resource name of the new campaign.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private Campaign getCampaign(
GoogleAdsClient googleAdsClient, long customerId, String campaignResourceName) {
// Gets the GoogleAdsService.
try (GoogleAdsServiceClient googleAdsServiceClient =
googleAdsClient.getLatestVersion().createGoogleAdsServiceClient()) {
// Creates the request.
SearchGoogleAdsRequest request =
SearchGoogleAdsRequest.newBuilder()
.setCustomerId(Long.toString(customerId))
.setPageSize(PAGE_SIZE)
.setQuery(
String.format(
"SELECT campaign.id, campaign.name, campaign.resource_name "
+ "FROM campaign "
+ "WHERE campaign.resource_name = '%s'",
campaignResourceName))
.build();
// Retrieves the campaign.
SearchPagedResponse searchPagedResponse = googleAdsServiceClient.search(request);
return searchPagedResponse.getPage().getResponse().getResults(0).getCampaign();
}
}
/**
* Creates an ad group.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param campaign the campaign for the ad group.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private AdGroup createAdGroup(
GoogleAdsClient googleAdsClient, long customerId, Campaign campaign) {
String campaignResourceName = ResourceNames.campaign(customerId, campaign.getId());
// Creates the ad group, setting an optional CPC value.
AdGroup adGroup =
AdGroup.newBuilder()
.setName("Earth to Mars Cruises #" + System.currentTimeMillis())
.setStatus(AdGroupStatus.ENABLED)
.setCampaign(campaignResourceName)
.setType(AdGroupType.SEARCH_STANDARD)
.setCpcBidMicros(500_000L)
.build();
// Creates the operation.
AdGroupOperation op = AdGroupOperation.newBuilder().setCreate(adGroup).build();
// Gets the AdGroup Service.
try (AdGroupServiceClient adGroupServiceClient =
googleAdsClient.getLatestVersion().createAdGroupServiceClient()) {
// Adds the AdGroup.
MutateAdGroupsResponse response =
adGroupServiceClient.mutateAdGroups(Long.toString(customerId), ImmutableList.of(op));
String adGroupResourceName = response.getResults(0).getResourceName();
// Retrieves the AdGroup.
AdGroup newAdGroup = getAdGroup(googleAdsClient, customerId, adGroupResourceName);
// Displays the results.
System.out.printf(
"Ad group with ID %s and name '%s' was created.%n",
newAdGroup.getId(), newAdGroup.getName());
return newAdGroup;
}
}
/**
* Retrieves the ad group.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param adGroupResourceName resource name of the new ad group.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private AdGroup getAdGroup(
GoogleAdsClient googleAdsClient, long customerId, String adGroupResourceName) {
// Gets the GoogleAdsService.
try (GoogleAdsServiceClient googleAdsServiceClient =
googleAdsClient.getLatestVersion().createGoogleAdsServiceClient()) {
// Creates the request.
SearchGoogleAdsRequest request =
SearchGoogleAdsRequest.newBuilder()
.setCustomerId(Long.toString(customerId))
.setPageSize(PAGE_SIZE)
.setQuery(
String.format(
"SELECT ad_group.id, ad_group.name, ad_group.resource_name "
+ "FROM ad_group WHERE ad_group.resource_name = '%s'",
adGroupResourceName))
.build();
// Retrieves the AdGroup.
SearchPagedResponse response = googleAdsServiceClient.search(request);
return response.getPage().getResponse().getResults(0).getAdGroup();
}
}
/**
* Creates text ads.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param adGroup the ad group for the text ad.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private List<AdGroupAd> createTextAds(
GoogleAdsClient googleAdsClient, long customerId, AdGroup adGroup, int numberOfAds) {
String adGroupResourceName = ResourceNames.adGroup(customerId, adGroup.getId());
List<AdGroupAdOperation> operations = new ArrayList<>();
for (int i = 0; i < numberOfAds; i++) {
// Creates the text ad
AdGroupAd adgroupAd =
AdGroupAd.newBuilder()
.setAdGroup(adGroupResourceName)
.setStatus(AdGroupAdStatus.PAUSED)
.setAd(
Ad.newBuilder()
.addFinalUrls("http://www.example.com/" + String.valueOf(i))
.setExpandedTextAd(
ExpandedTextAdInfo.newBuilder()
.setDescription("Buy your tickets now!")
.setHeadlinePart1("Cruise #" + i + " to Mars")
.setHeadlinePart2("Best Space Cruise Line")
.setPath1("path1")
.setPath2("path2")
.build()))
.build();
// Creates the operation.
AdGroupAdOperation op = AdGroupAdOperation.newBuilder().setCreate(adgroupAd).build();
operations.add(op);
}
// Gets the AdGroupAd service.
try (AdGroupAdServiceClient adGroupAdServiceClient =
googleAdsClient.getLatestVersion().createAdGroupAdServiceClient()) {
// Adds the text ads.
MutateAdGroupAdsResponse response =
adGroupAdServiceClient.mutateAdGroupAds(Long.toString(customerId), operations);
System.out.printf("Added %d text ads:%n", response.getResultsCount());
// Creates a list of the text ad resource names.
List<String> newAdGroupAdResourceNames = new ArrayList<>();
for (MutateAdGroupAdResult result : response.getResultsList()) {
newAdGroupAdResourceNames.add(result.getResourceName());
}
// Retrieves the expanded text ads.
List<AdGroupAd> newAdGroupAds =
getAdGroupAds(googleAdsClient, customerId, newAdGroupAdResourceNames);
for (AdGroupAd newAdGroupAd : newAdGroupAds) {
Ad ad = newAdGroupAd.getAd();
ExpandedTextAdInfo expandedTextAdInfo = ad.getExpandedTextAd();
// Displays the results.
System.out.printf(
"Expanded text ad with ID %s, status '%s', "
+ "and headline '%s - %s' was created in ad group with ID %s.%n",
ad.getId(),
newAdGroupAd.getStatus(),
expandedTextAdInfo.getHeadlinePart1(),
expandedTextAdInfo.getHeadlinePart2(),
adGroup.getId());
}
return newAdGroupAds;
}
}
/**
* Retrieves the ad group ads.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param newResourceNames resource names of the new ad group ad.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private List<AdGroupAd> getAdGroupAds(
GoogleAdsClient googleAdsClient, long customerId, List<String> newResourceNames) {
// Gets the GoogleAdsService.
try (GoogleAdsServiceClient googleAdsServiceClient =
googleAdsClient.getLatestVersion().createGoogleAdsServiceClient()) {
// Creates the request.
SearchGoogleAdsRequest request =
SearchGoogleAdsRequest.newBuilder()
.setCustomerId(Long.toString(customerId))
.setPageSize(PAGE_SIZE)
.setQuery(
String.format(
"SELECT "
+ "ad_group.id, "
+ "ad_group_ad.ad.id, "
+ "ad_group_ad.ad.expanded_text_ad.headline_part1, "
+ "ad_group_ad.ad.expanded_text_ad.headline_part2, "
+ "ad_group_ad.status, "
+ "ad_group_ad.ad.final_urls, "
+ "ad_group_ad.resource_name "
+ "FROM ad_group_ad "
+ "WHERE ad_group_ad.resource_name IN (%s)",
String.join(
", ",
newResourceNames.stream()
.map(resourceName -> String.format("'%s'", resourceName))
.collect(Collectors.toList()))))
.build();
// Retrieves the ad group ads
SearchPagedResponse response = googleAdsServiceClient.search(request);
// Creates and returns a list of the ad group ads.
List<AdGroupAd> adGroupAds = new ArrayList<>();
for (GoogleAdsRow googleAdsRow : response.iterateAll()) {
adGroupAds.add(googleAdsRow.getAdGroupAd());
}
return adGroupAds;
}
}
/**
* Creates keywords ad group criteria.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param adGroup the ad group for the new criteria.
* @param keywordsToAdd the keywords to add to the text ads.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private List<AdGroupCriterion> createKeywords(
GoogleAdsClient googleAdsClient,
long customerId,
AdGroup adGroup,
List<String> keywordsToAdd) {
String adGroupResourceName = ResourceNames.adGroup(customerId, adGroup.getId());
List<AdGroupCriterionOperation> operations = new ArrayList<>();
for (String keywordText : keywordsToAdd) {
// Creates the keyword criterion
AdGroupCriterion adGroupCriterion =
AdGroupCriterion.newBuilder()
.setAdGroup(adGroupResourceName)
.setStatus(AdGroupCriterionStatus.ENABLED)
.setKeyword(
KeywordInfo.newBuilder()
.setText(keywordText)
.setMatchType(KeywordMatchType.EXACT)
.build())
.build();
// Creates the operation.
AdGroupCriterionOperation op =
AdGroupCriterionOperation.newBuilder().setCreate(adGroupCriterion).build();
operations.add(op);
}
// Gets the AdGroupCriterionService.
try (AdGroupCriterionServiceClient adGroupCriterionServiceClient =
googleAdsClient.getLatestVersion().createAdGroupCriterionServiceClient()) {
// Adds the keywords
MutateAdGroupCriteriaResponse response =
adGroupCriterionServiceClient.mutateAdGroupCriteria(
Long.toString(customerId), operations);
System.out.printf("Added %d keywords:%n", response.getResultsCount());
// Creates a list of new keyword resource names
List<String> newCriteriaResourceNames = new ArrayList<>();
for (MutateAdGroupCriterionResult result : response.getResultsList()) {
newCriteriaResourceNames.add(result.getResourceName());
}
// Retrieves the newly created keywords.
List<AdGroupCriterion> newCriteria =
getKeywords(googleAdsClient, customerId, newCriteriaResourceNames);
// Displays the results.
for (AdGroupCriterion newCriterion : newCriteria) {
System.out.printf(
"Keyword with text '%s', ID %s, and match type '%s' was retrieved for ad group '%s'.%n",
newCriterion.getKeyword().getText(),
newCriterion.getCriterionId(),
newCriterion.getKeyword().getMatchType(),
adGroup.getName());
}
return newCriteria;
}
}
/**
* Retrieves the keyword ad group criteria.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param newResourceNames resource names of the new ad group criteria.
* @throws GoogleAdsException if an API request failed with one or more service errors.
*/
private List<AdGroupCriterion> getKeywords(
GoogleAdsClient googleAdsClient, long customerId, List<String> newResourceNames) {
// Gets the GoogleAdsService.
try (GoogleAdsServiceClient googleAdsServiceClient =
googleAdsClient.getLatestVersion().createGoogleAdsServiceClient()) {
// Creates the request.
SearchGoogleAdsRequest request =
SearchGoogleAdsRequest.newBuilder()
.setCustomerId(Long.toString(customerId))
.setPageSize(PAGE_SIZE)
// Creates the search query.
.setQuery(
String.format(
"SELECT "
+ "ad_group.id, "
+ "ad_group.status, "
+ "ad_group_criterion.criterion_id, "
+ "ad_group_criterion.keyword.text, "
+ "ad_group_criterion.keyword.match_type "
+ "FROM ad_group_criterion "
+ "WHERE ad_group_criterion.type = 'KEYWORD' "
+ "AND ad_group.status = 'ENABLED' "
+ "AND ad_group_criterion.status IN ('ENABLED', 'PAUSED') "
+ "AND ad_group_criterion.resource_name IN (%s) ",
String.join(
", ",
newResourceNames.stream()
.map(resourceName -> String.format("'%s'", resourceName))
.collect(Collectors.toList()))))
.build();
// Retrieves the adGroupCriteria.
SearchPagedResponse response = googleAdsServiceClient.search(request);
// Creates and returns a list of adGroupCriteria
List<AdGroupCriterion> adGroupCriteria = new ArrayList<>();
for (GoogleAdsRow googleAdsRow : response.iterateAll()) {
adGroupCriteria.add(googleAdsRow.getAdGroupCriterion());
}
return adGroupCriteria;
}
}
}
| googleads/google-ads-java | google-ads-migration-examples/src/main/java/com/google/ads/googleads/migration/campaignmanagement/CreateCompleteCampaignGoogleAdsApiOnly.java | Java | apache-2.0 | 27,574 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ro.nextreports.server.aop;
import java.util.List;
import org.aspectj.lang.annotation.Pointcut;
/**
* @author Decebal Suiu
*/
public abstract class EntitiesRemoveAdvice {
@Pointcut("target(ro.nextreports.server.service.StorageService)")
public void inStorageService() {
}
@Pointcut("execution(* removeEntity(..))")
public void isRemoveEntity() {
}
@Pointcut("args(path, ..)")
public void withPath(String path) {
}
@Pointcut("inStorageService() && isRemoveEntity() && withPath(path)")
public void removeEntity(String path) {
}
@Pointcut("execution(* removeEntityById(..))")
public void isRemoveEntityById() {
}
@Pointcut("args(id, ..)")
public void withId(String id) {
}
@Pointcut("inStorageService() && isRemoveEntityById() && withId(id)")
public void removeEntityById(String id) {
}
@Pointcut("execution(* removeEntitiesById(..))")
public void isRemoveEntitiesById() {
}
@Pointcut("args(ids, ..)")
public void withIds(List<String> ids) {
}
@Pointcut("inStorageService() && isRemoveEntitiesById() && withIds(ids)")
public void removeEntitiesById(List<String> ids) {
}
}
| nextreports/nextreports-server | src/ro/nextreports/server/aop/EntitiesRemoveAdvice.java | Java | apache-2.0 | 1,930 |
import operator
import pandas as pd
import pandas.util.testing as tm
import pytest
import ibis
from ibis.common import IbisTypeError
def test_array_length(t, df):
expr = t.projection([
t.array_of_float64.length().name('array_of_float64_length'),
t.array_of_int64.length().name('array_of_int64_length'),
t.array_of_strings.length().name('array_of_strings_length'),
])
result = expr.execute()
expected = pd.DataFrame({
'array_of_float64_length': [2, 1, 0],
'array_of_int64_length': [2, 0, 1],
'array_of_strings_length': [2, 0, 1],
})
tm.assert_frame_equal(result, expected)
def test_array_length_scalar(client):
raw_value = [1, 2, 4]
value = ibis.literal(raw_value)
expr = value.length()
result = client.execute(expr)
expected = len(raw_value)
assert result == expected
def test_array_collect(t, df):
expr = t.group_by(
t.dup_strings
).aggregate(collected=t.float64_with_zeros.collect())
result = expr.execute().sort_values('dup_strings').reset_index(drop=True)
expected = df.groupby(
'dup_strings'
).float64_with_zeros.apply(list).reset_index().rename(
columns={'float64_with_zeros': 'collected'}
)
tm.assert_frame_equal(result, expected)
@pytest.mark.xfail(
raises=TypeError,
reason=(
'Pandas does not implement rolling for functions that do not return '
'numbers'
)
)
def test_array_collect_rolling_partitioned(t, df):
window = ibis.trailing_window(2, order_by=t.plain_int64)
colexpr = t.plain_float64.collect().over(window)
expr = t['dup_strings', 'plain_int64', colexpr.name('collected')]
result = expr.execute()
expected = pd.DataFrame({
'dup_strings': ['d', 'a', 'd'],
'plain_int64': [1, 2, 3],
'collected': [[4.0], [4.0, 5.0], [5.0, 6.0]],
})[expr.columns]
tm.assert_frame_equal(result, expected)
@pytest.mark.xfail(raises=IbisTypeError, reason='Not sure if this should work')
def test_array_collect_scalar(client):
raw_value = 'abcd'
value = ibis.literal(raw_value)
expr = value.collect()
result = client.execute(expr)
expected = [raw_value]
assert result == expected
@pytest.mark.parametrize(
['start', 'stop'],
[
(1, 3),
(1, 1),
(2, 3),
(2, 5),
(None, 3),
(None, None),
(3, None),
# negative slices are not supported
pytest.mark.xfail(
(-3, None),
raises=ValueError,
reason='Negative slicing not supported'
),
pytest.mark.xfail(
(None, -3),
raises=ValueError,
reason='Negative slicing not supported'
),
pytest.mark.xfail(
(-3, -1),
raises=ValueError,
reason='Negative slicing not supported'
),
]
)
def test_array_slice(t, df, start, stop):
expr = t.array_of_strings[start:stop]
result = expr.execute()
slicer = operator.itemgetter(slice(start, stop))
expected = df.array_of_strings.apply(slicer)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
['start', 'stop'],
[
(1, 3),
(1, 1),
(2, 3),
(2, 5),
(None, 3),
(None, None),
(3, None),
# negative slices are not supported
pytest.mark.xfail(
(-3, None),
raises=ValueError,
reason='Negative slicing not supported'
),
pytest.mark.xfail(
(None, -3),
raises=ValueError,
reason='Negative slicing not supported'
),
pytest.mark.xfail(
(-3, -1),
raises=ValueError,
reason='Negative slicing not supported'
),
]
)
def test_array_slice_scalar(client, start, stop):
raw_value = [-11, 42, 10]
value = ibis.literal(raw_value)
expr = value[start:stop]
result = client.execute(expr)
expected = raw_value[start:stop]
assert result == expected
@pytest.mark.parametrize('index', [1, 3, 4, 11, -11])
def test_array_index(t, df, index):
expr = t[t.array_of_float64[index].name('indexed')]
result = expr.execute()
expected = pd.DataFrame({
'indexed': df.array_of_float64.apply(
lambda x: x[index] if -len(x) <= index < len(x) else None
)
})
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize('index', [1, 3, 4, 11])
def test_array_index_scalar(client, index):
raw_value = [-10, 1, 2, 42]
value = ibis.literal(raw_value)
expr = value[index]
result = client.execute(expr)
expected = raw_value[index] if index < len(raw_value) else None
assert result == expected
@pytest.mark.parametrize('n', [1, 3, 4, 7, -2]) # negative returns empty list
@pytest.mark.parametrize('mul', [lambda x, n: x * n, lambda x, n: n * x])
def test_array_repeat(t, df, n, mul):
expr = t.projection([mul(t.array_of_strings, n).name('repeated')])
result = expr.execute()
expected = pd.DataFrame({'repeated': df.array_of_strings * n})
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize('n', [1, 3, 4, 7, -2]) # negative returns empty list
@pytest.mark.parametrize('mul', [lambda x, n: x * n, lambda x, n: n * x])
def test_array_repeat_scalar(client, n, mul):
raw_array = [1, 2]
array = ibis.literal(raw_array)
expr = mul(array, n)
result = client.execute(expr)
expected = mul(raw_array, n)
assert result == expected
@pytest.mark.parametrize('op', [lambda x, y: x + y, lambda x, y: y + x])
def test_array_concat(t, df, op):
x = t.array_of_float64.cast('array<string>')
y = t.array_of_strings
expr = op(x, y)
result = expr.execute()
expected = op(
df.array_of_float64.apply(lambda x: list(map(str, x))),
df.array_of_strings
)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize('op', [lambda x, y: x + y, lambda x, y: y + x])
def test_array_concat_scalar(client, op):
raw_left = [1, 2, 3]
raw_right = [3, 4]
left = ibis.literal(raw_left)
right = ibis.literal(raw_right)
expr = op(left, right)
result = client.execute(expr)
assert result == op(raw_left, raw_right)
| deepfield/ibis | ibis/pandas/execution/tests/test_arrays.py | Python | apache-2.0 | 6,305 |
package com.hubspot.singularity;
import ch.qos.logback.classic.LoggerContext;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.net.HostAndPort;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.hubspot.mesos.JavaUtils;
import com.hubspot.singularity.config.SMTPConfiguration;
import com.hubspot.singularity.config.SingularityConfiguration;
import com.hubspot.singularity.managed.SingularityLifecycleManaged;
import com.hubspot.singularity.sentry.SingularityExceptionNotifier;
import com.hubspot.singularity.smtp.SingularitySmtpSender;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.inject.Named;
import javax.inject.Singleton;
import org.eclipse.jetty.server.Server;
import org.slf4j.ILoggerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class SingularityAbort {
private static final Logger LOG = LoggerFactory.getLogger(SingularityAbort.class);
private final Optional<SMTPConfiguration> maybeSmtpConfiguration;
private final SingularitySmtpSender smtpSender;
private final HostAndPort hostAndPort;
private final SingularityExceptionNotifier exceptionNotifier;
private final Injector injector;
private final ServerProvider serverProvider;
private final AtomicBoolean aborting = new AtomicBoolean();
@Inject
public SingularityAbort(
SingularitySmtpSender smtpSender,
ServerProvider serverProvider,
SingularityConfiguration configuration,
SingularityExceptionNotifier exceptionNotifier,
Injector injector,
@Named(SingularityMainModule.HTTP_HOST_AND_PORT) HostAndPort hostAndPort
) {
this.maybeSmtpConfiguration = configuration.getSmtpConfigurationOptional();
this.serverProvider = serverProvider;
this.smtpSender = smtpSender;
this.exceptionNotifier = exceptionNotifier;
this.injector = injector;
this.hostAndPort = hostAndPort;
}
public enum AbortReason {
LOST_ZK_CONNECTION,
LOST_LEADERSHIP,
UNRECOVERABLE_ERROR,
ERROR_IN_LEADER_ONLY_POLLER,
TEST_ABORT,
MESOS_ERROR,
LOST_MESOS_CONNECTION,
MANUAL
}
public void abort(AbortReason abortReason, Optional<Throwable> throwable) {
if (!aborting.getAndSet(true)) {
try {
sendAbortNotification(abortReason, throwable);
SingularityLifecycleManaged lifecycle = injector.getInstance(
SingularityLifecycleManaged.class
);
try {
lifecycle.stop();
} catch (Throwable t) {
LOG.error("While shutting down", t);
}
flushLogs();
} finally {
exit();
}
}
}
private void exit() {
Optional<Server> server = serverProvider.get();
if (server.isPresent()) {
try {
server.get().stop();
} catch (Exception e) {
LOG.warn("While aborting server", e);
} finally {
System.exit(1);
}
} else {
LOG.warn("SingularityAbort called before server has fully initialized!");
System.exit(1); // Use the hammer.
}
}
private void sendAbortNotification(
AbortReason abortReason,
Optional<Throwable> throwable
) {
final String message = String.format(
"Singularity on %s is aborting due to %s",
hostAndPort.getHost(),
abortReason
);
LOG.error(message);
sendAbortMail(message, throwable);
if (throwable.isPresent()) {
exceptionNotifier.notify(
message,
throwable.get(),
ImmutableMap.of("abortReason", abortReason.name())
);
} else {
exceptionNotifier.notify(
message,
ImmutableMap.of("abortReason", abortReason.name())
);
}
}
private void sendAbortMail(final String message, final Optional<Throwable> throwable) {
if (!maybeSmtpConfiguration.isPresent()) {
LOG.warn("Couldn't send abort mail because no SMTP configuration is present");
return;
}
final List<SingularityEmailDestination> emailDestination = maybeSmtpConfiguration
.get()
.getEmailConfiguration()
.get(SingularityEmailType.SINGULARITY_ABORTING);
if (
emailDestination.isEmpty() ||
!emailDestination.contains(SingularityEmailDestination.ADMINS)
) {
LOG.info("Not configured to send abort mail");
return;
}
final String body;
if (throwable.isPresent()) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
throwable.get().printStackTrace(pw);
body = "<pre>\n" + throwable.get().getMessage() + "\n" + sw.toString() + "\n</pre>";
} else {
body = "(no stack trace)";
}
smtpSender.queueMail(
maybeSmtpConfiguration.get().getAdmins(),
ImmutableList.of(),
message,
body
);
}
private void flushLogs() {
final long millisToWait = 100;
LOG.info(
"Attempting to flush logs and wait {} ...",
JavaUtils.durationFromMillis(millisToWait)
);
ILoggerFactory loggerFactory = LoggerFactory.getILoggerFactory();
if (loggerFactory instanceof LoggerContext) {
LoggerContext context = (LoggerContext) loggerFactory;
context.stop();
}
try {
Thread.sleep(millisToWait);
} catch (Exception e) {
LOG.info("While sleeping for log flush", e);
}
}
}
| hs-jenkins-bot/Singularity | SingularityService/src/main/java/com/hubspot/singularity/SingularityAbort.java | Java | apache-2.0 | 5,463 |
/*
* Copyright 2015-2018 G-Labs. All Rights Reserved.
* https://zuixjs.github.io/zuix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
*
* This file is part of
* zUIx, Javascript library for component-based development.
* https://zuixjs.github.io/zuix
*
* @author Generoso Martello <generoso@martello.com>
*/
const baseFolder = process.cwd();
// Commons
const fs = require('fs');
const path = require('path');
const recursive = require('fs-readdir-recursive');
// logging
const tlog = require(path.join(baseFolder, 'src/lib/logger'));
// ESLint
const linter = require('eslint').linter;
const lintConfig = require(path.join(baseFolder, '.eslintrc.json'));
const sourceFolder = path.join(baseFolder, 'src/js/');
const stats = {
error: 0,
warning: 0
};
function lint(callback) {
recursive(sourceFolder).map((f, i) => {
if (f.endsWith('.js')) {
tlog.info('^B%s^R', f);
const code = fs.readFileSync(sourceFolder + f, 'utf8');
const issues = linter.verify(code, lintConfig, sourceFolder + f);
issues.map((m, i)=>{
if (m.fatal || m.severity > 1) {
stats.error++;
tlog.error(' ^RError^: %s ^R(^Y%s^w:^Y%s^R)', m.message, m.line, m.column);
} else {
stats.warning++;
tlog.warn(' ^YWarning^: %s ^R(^Y%s^w:^Y%s^R)', m.message, m.line, m.column);
}
});
if (issues.length === 0) tlog.info(' ^G\u2713^: OK');
tlog.br();
}
});
tlog.info('Linting completed ^G-^: Errors ^R%s^: ^G-^: Warnings ^Y%s^:\n\n', stats.error, stats.warning);
//process.exit(stats.error);
if (callback) callback(stats);
}
module.exports = {
lint: lint
};
| genielabs/zuix | build/scripts/lint.js | JavaScript | apache-2.0 | 2,327 |
$: << File.join(File.dirname(__FILE__), "/../lib" )
require 'rubygems'
require 'spec'
require 'dynomite'
| chef/ruby-dynomite | spec/spec_helper.rb | Ruby | apache-2.0 | 106 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.wss4j.stax.validate;
import org.apache.commons.codec.binary.Base64;
import org.apache.wss4j.binding.wss10.AttributedString;
import org.apache.wss4j.binding.wss10.EncodedString;
import org.apache.wss4j.binding.wss10.PasswordString;
import org.apache.wss4j.binding.wss10.UsernameTokenType;
import org.apache.wss4j.binding.wsu10.AttributedDateTime;
import org.apache.wss4j.common.ext.WSPasswordCallback;
import org.apache.wss4j.common.ext.WSSecurityException;
import org.apache.wss4j.stax.ext.WSSConstants;
import org.apache.wss4j.stax.securityToken.UsernameSecurityToken;
import org.apache.wss4j.stax.securityToken.WSSecurityTokenConstants;
import org.apache.wss4j.stax.utils.WSSUtils;
import org.apache.wss4j.stax.impl.securityToken.UsernameSecurityTokenImpl;
import org.apache.xml.security.stax.ext.XMLSecurityUtils;
import org.apache.xml.security.stax.securityToken.InboundSecurityToken;
public class UsernameTokenValidatorImpl implements UsernameTokenValidator {
private static final transient org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(UsernameTokenValidatorImpl.class);
@Override
public <T extends UsernameSecurityToken & InboundSecurityToken> T validate(
UsernameTokenType usernameTokenType, TokenContext tokenContext) throws WSSecurityException {
// If the UsernameToken is to be used for key derivation, the (1.1)
// spec says that it cannot contain a password, and it must contain
// an Iteration element
final byte[] salt = XMLSecurityUtils.getQNameType(usernameTokenType.getAny(), WSSConstants.TAG_WSSE11_SALT);
PasswordString passwordType = XMLSecurityUtils.getQNameType(usernameTokenType.getAny(), WSSConstants.TAG_WSSE_PASSWORD);
final Long iteration = XMLSecurityUtils.getQNameType(usernameTokenType.getAny(), WSSConstants.TAG_WSSE11_ITERATION);
if (salt != null && (passwordType != null || iteration == null)) {
throw new WSSecurityException(WSSecurityException.ErrorCode.INVALID_SECURITY_TOKEN, "badTokenType01");
}
boolean handleCustomPasswordTypes = tokenContext.getWssSecurityProperties().getHandleCustomPasswordTypes();
boolean allowUsernameTokenNoPassword =
tokenContext.getWssSecurityProperties().isAllowUsernameTokenNoPassword()
|| Boolean.parseBoolean((String)tokenContext.getWsSecurityContext().get(WSSConstants.PROP_ALLOW_USERNAMETOKEN_NOPASSWORD));
// Check received password type against required type
WSSConstants.UsernameTokenPasswordType requiredPasswordType =
tokenContext.getWssSecurityProperties().getUsernameTokenPasswordType();
if (requiredPasswordType != null) {
if (passwordType == null || passwordType.getType() == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Authentication failed as the received password type does not "
+ "match the required password type of: " + requiredPasswordType);
}
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
WSSConstants.UsernameTokenPasswordType usernameTokenPasswordType =
WSSConstants.UsernameTokenPasswordType.getUsernameTokenPasswordType(passwordType.getType());
if (requiredPasswordType != usernameTokenPasswordType) {
if (LOG.isDebugEnabled()) {
LOG.debug("Authentication failed as the received password type does not "
+ "match the required password type of: " + requiredPasswordType);
}
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
}
WSSConstants.UsernameTokenPasswordType usernameTokenPasswordType = WSSConstants.UsernameTokenPasswordType.PASSWORD_NONE;
if (passwordType != null && passwordType.getType() != null) {
usernameTokenPasswordType = WSSConstants.UsernameTokenPasswordType.getUsernameTokenPasswordType(passwordType.getType());
}
final AttributedString username = usernameTokenType.getUsername();
if (username == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.INVALID_SECURITY_TOKEN, "badTokenType01");
}
final EncodedString encodedNonce =
XMLSecurityUtils.getQNameType(usernameTokenType.getAny(), WSSConstants.TAG_WSSE_NONCE);
byte[] nonceVal = null;
if (encodedNonce != null && encodedNonce.getValue() != null) {
nonceVal = Base64.decodeBase64(encodedNonce.getValue());
}
final AttributedDateTime attributedDateTimeCreated =
XMLSecurityUtils.getQNameType(usernameTokenType.getAny(), WSSConstants.TAG_WSU_CREATED);
String created = null;
if (attributedDateTimeCreated != null) {
created = attributedDateTimeCreated.getValue();
}
if (usernameTokenPasswordType == WSSConstants.UsernameTokenPasswordType.PASSWORD_DIGEST) {
if (encodedNonce == null || attributedDateTimeCreated == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.INVALID_SECURITY_TOKEN, "badTokenType01");
}
if (!WSSConstants.SOAPMESSAGE_NS10_BASE64_ENCODING.equals(encodedNonce.getEncodingType())) {
throw new WSSecurityException(WSSecurityException.ErrorCode.UNSUPPORTED_SECURITY_TOKEN, "badTokenType01");
}
verifyDigestPassword(username.getValue(), passwordType, nonceVal, created, tokenContext);
} else if (usernameTokenPasswordType == WSSConstants.UsernameTokenPasswordType.PASSWORD_TEXT
|| passwordType != null && passwordType.getValue() != null
&& usernameTokenPasswordType == WSSConstants.UsernameTokenPasswordType.PASSWORD_NONE) {
verifyPlaintextPassword(username.getValue(), passwordType, tokenContext);
} else if (passwordType != null && passwordType.getValue() != null) {
if (!handleCustomPasswordTypes) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
verifyCustomPassword(username.getValue(), passwordType, tokenContext);
} else {
if (!allowUsernameTokenNoPassword) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
}
final String password;
if (passwordType != null) {
password = passwordType.getValue();
} else if (salt != null) {
WSPasswordCallback pwCb = new WSPasswordCallback(username.getValue(),
WSPasswordCallback.USERNAME_TOKEN);
try {
WSSUtils.doPasswordCallback(tokenContext.getWssSecurityProperties().getCallbackHandler(), pwCb);
} catch (WSSecurityException e) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION, e);
}
password = pwCb.getPassword();
} else {
password = null;
}
UsernameSecurityTokenImpl usernameSecurityToken = new UsernameSecurityTokenImpl(
usernameTokenPasswordType, username.getValue(), password, created,
nonceVal, salt, iteration,
tokenContext.getWsSecurityContext(), usernameTokenType.getId(),
WSSecurityTokenConstants.KEYIDENTIFIER_SECURITY_TOKEN_DIRECT_REFERENCE);
usernameSecurityToken.setElementPath(tokenContext.getElementPath());
usernameSecurityToken.setXMLSecEvent(tokenContext.getFirstXMLSecEvent());
@SuppressWarnings("unchecked")
T token = (T)usernameSecurityToken;
return token;
}
/**
* Verify a UsernameToken containing a password digest.
*/
protected void verifyDigestPassword(
String username,
PasswordString passwordType,
byte[] nonceVal,
String created,
TokenContext tokenContext
) throws WSSecurityException {
WSPasswordCallback pwCb = new WSPasswordCallback(username,
null,
passwordType.getType(),
WSPasswordCallback.USERNAME_TOKEN);
try {
WSSUtils.doPasswordCallback(tokenContext.getWssSecurityProperties().getCallbackHandler(), pwCb);
} catch (WSSecurityException e) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION, e);
}
if (pwCb.getPassword() == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
String passDigest = WSSUtils.doPasswordDigest(nonceVal, created, pwCb.getPassword());
if (!passwordType.getValue().equals(passDigest)) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
passwordType.setValue(pwCb.getPassword());
}
/**
* Verify a UsernameToken containing a plaintext password.
*/
protected void verifyPlaintextPassword(
String username,
PasswordString passwordType,
TokenContext tokenContext
) throws WSSecurityException {
WSPasswordCallback pwCb = new WSPasswordCallback(username,
null,
passwordType.getType(),
WSPasswordCallback.USERNAME_TOKEN);
try {
WSSUtils.doPasswordCallback(tokenContext.getWssSecurityProperties().getCallbackHandler(), pwCb);
} catch (WSSecurityException e) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION, e);
}
if (pwCb.getPassword() == null) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
if (!passwordType.getValue().equals(pwCb.getPassword())) {
throw new WSSecurityException(WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
passwordType.setValue(pwCb.getPassword());
}
/**
* Verify a UsernameToken containing a password of some unknown (but specified) password
* type.
*/
protected void verifyCustomPassword(
String username,
PasswordString passwordType,
TokenContext tokenContext
) throws WSSecurityException {
verifyPlaintextPassword(username, passwordType, tokenContext);
}
}
| clibois/wss4j | ws-security-stax/src/main/java/org/apache/wss4j/stax/validate/UsernameTokenValidatorImpl.java | Java | apache-2.0 | 11,364 |
import {firestore} from '../../utils/firestore';
const HOOK_COLLECTION_NAME = 'github-event-deliveries';
export const HOOK_MAX_AGE = 60 * 1000;
class HooksModel {
/**
* This method may be called multiple times in quick succession resulting
* in a the doc already existing.
*/
async logHook(hookDelivery: string): Promise<boolean> {
const hookDocRef =
await firestore().collection(HOOK_COLLECTION_NAME).doc(hookDelivery);
return firestore().runTransaction(async (transaction) => {
const hookDoc = await transaction.get(hookDocRef);
if (hookDoc.exists) {
return false;
}
await transaction.set(
hookDocRef, {received: true, timestamp: Date.now()});
return true;
});
}
async cleanHooks() {
const querySnapshot =
await firestore()
.collection(HOOK_COLLECTION_NAME)
.where('timestamp', '<', Date.now() - HOOK_MAX_AGE)
.get();
const batch = firestore().batch();
querySnapshot.forEach((doc) => {
batch.delete(doc.ref);
});
await batch.commit();
}
async deleteHook(hookDelivery: string) {
await firestore()
.collection(HOOK_COLLECTION_NAME)
.doc(hookDelivery)
.delete();
}
}
export const hooksModel = new HooksModel();
| PolymerLabs/project-health | src/server/models/hooksModel.ts | TypeScript | apache-2.0 | 1,305 |
package com.ycsoft.report.query.daq;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import com.ycsoft.commons.exception.ReportException;
import com.ycsoft.commons.helper.LoggerHelper;
import com.ycsoft.report.db.ConnContainer;
/**
* 数据库提取数据
*/
public class DBAcquisition implements DataReader {
private Connection conn = null;
private Statement stmt = null;
private ResultSet rs = null;
private String database =null;
private String sql=null;
public DBAcquisition(String sql,String database){
this.sql=sql;
this.database=database;
}
public void close() throws ReportException {
try {
if (rs != null){
rs.close();
rs=null;
}
} catch (Exception e) {
}
try {
if (stmt != null){
stmt.close();
stmt=null;
}
} catch (Exception e) {
}
try {
if (conn != null){
conn.close();
conn=null;
}
} catch (Exception e) {
}
}
public Object getObject(int i) throws ReportException {
try {
return rs.getObject(i);
} catch (SQLException e) {
throw new ReportException(e);
}
}
public String getString(int i) throws ReportException {
try {
return rs.getString(i);
} catch (SQLException e) {
throw new ReportException(e);
}
}
public boolean next() throws ReportException {
try {
return rs.next();
} catch (SQLException e) {
throw new ReportException(e);
}
}
public void open() throws ReportException {
try {
conn = ConnContainer.getConn(database);
stmt = conn.createStatement();
stmt.setFetchSize(1000);
LoggerHelper.debug(this.getClass(),sql);
rs = stmt.executeQuery(sql);
} catch (SQLException e) {
throw new ReportException(e,e.getSQLState());
}
}
}
| leopardoooo/cambodia | boss-report/src/main/java/com/ycsoft/report/query/daq/DBAcquisition.java | Java | apache-2.0 | 1,854 |
package org.aws4j.data.dynamo.attribute.converter;
import java.lang.reflect.Type;
import java.util.Set;
import org.aws4j.core.exception.NotImplementedException;
import org.aws4j.core.util.JacksonUtil;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
public class JsonConverter<V> implements AttributeValueConverter<V> {
private Type parameterType;
private ObjectMapper mapper;
public JsonConverter(Type parameterType) {
this.parameterType = parameterType;
this.mapper = new ObjectMapper();
};
// TODO Util
private String toJsonString(V value) {
try {
return mapper.writeValueAsString(value);
} catch (JsonProcessingException e) {
throw new IllegalArgumentException(e);
}
}
private V toValue(String json) {
try {
return mapper.readValue(json,
JacksonUtil.getJavaType(parameterType));
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
@Override
public AttributeValue convert(V value) {
return new AttributeValue().withS(toJsonString(value));
}
@Override
public AttributeValue convertFromSet(Set<V> values) {
throw new NotImplementedException();
}
@Override
public V deconvert(AttributeValue attrValue) {
// TODO Auto-generated method stub
return toValue(attrValue.getS());
}
@Override
public Set<V> deconvertToSet(AttributeValue attrValue) {
throw new NotImplementedException();
}
}
| aws4j/dynamo-mapper | src/main/java/org/aws4j/data/dynamo/attribute/converter/JsonConverter.java | Java | apache-2.0 | 1,500 |
package ee.jiss.commons.json.convert;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.deser.std.StdScalarDeserializer;
import org.joda.time.LocalDate;
import org.joda.time.format.DateTimeFormatter;
import java.io.IOException;
import static ee.jiss.commons.lang.CheckUtils.isEmptyString;
import static org.joda.time.format.DateTimeFormat.forPattern;
public class LocalDateParser
extends StdScalarDeserializer<LocalDate> {
private static final DateTimeFormatter FORMATTER = forPattern("dd.MM.yyyy");
public LocalDateParser() {
super(LocalDate.class);
}
@Override
public LocalDate deserialize(final JsonParser jp, final DeserializationContext ctxt) throws IOException {
final String text = jp.getText();
return isEmptyString(text) ? null : FORMATTER.parseLocalDate(text);
}
} | jiss-software/jiss-commons | commons-json/src/main/java/ee/jiss/commons/json/convert/LocalDateParser.java | Java | apache-2.0 | 935 |
/*
* Copyright 2004-2015 Cray Inc.
* Other additional copyright holders may be indicated within.
*
* The entirety of this work is licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
*
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define EXTERN
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS
#endif
#include "driver.h"
#include "arg.h"
#include "chpl.h"
#include "commonFlags.h"
#include "config.h"
#include "countTokens.h"
#include "docsDriver.h"
#include "files.h"
#include "ipe.h"
#include "log.h"
#include "misc.h"
#include "mysystem.h"
#include "PhaseTracker.h"
#include "primitive.h"
#include "runpasses.h"
#include "stmt.h"
#include "stringutil.h"
#include "symbol.h"
#include "timer.h"
#include "version.h"
#include <inttypes.h>
#include <string>
#include <sstream>
char CHPL_HOME[FILENAME_MAX+1] = "";
const char* CHPL_HOST_PLATFORM = NULL;
const char* CHPL_HOST_COMPILER = NULL;
const char* CHPL_TARGET_PLATFORM = NULL;
const char* CHPL_TARGET_COMPILER = NULL;
const char* CHPL_TARGET_ARCH = NULL;
const char* CHPL_LOCALE_MODEL = NULL;
const char* CHPL_COMM = NULL;
const char* CHPL_COMM_SUBSTRATE = NULL;
const char* CHPL_GASNET_SEGMENT = NULL;
const char* CHPL_TASKS = NULL;
const char* CHPL_THREADS = NULL;
const char* CHPL_LAUNCHER = NULL;
const char* CHPL_TIMERS = NULL;
const char* CHPL_MEM = NULL;
const char* CHPL_MAKE = NULL;
const char* CHPL_ATOMICS = NULL;
const char* CHPL_NETWORK_ATOMICS = NULL;
const char* CHPL_GMP = NULL;
const char* CHPL_HWLOC = NULL;
const char* CHPL_REGEXP = NULL;
const char* CHPL_WIDE_POINTERS = NULL;
const char* CHPL_LLVM = NULL;
const char* CHPL_AUX_FILESYS = NULL;
// quick and dirty
#define MAX_CHPL_ENV_VARS 50
int num_chpl_env_vars = 0;
const char *chpl_env_vars[MAX_CHPL_ENV_VARS];
const char *chpl_env_var_names[MAX_CHPL_ENV_VARS];
bool widePointersStruct;
static char makeArgument[256] = "";
static char libraryFilename[FILENAME_MAX] = "";
static char incFilename[FILENAME_MAX] = "";
static char moduleSearchPath[FILENAME_MAX] = "";
static char log_flags[512] = "";
bool fLibraryCompile = false;
bool no_codegen = false;
int debugParserLevel = 0;
bool fVerify = false;
bool ignore_errors = false;
bool ignore_errors_for_pass = false;
bool ignore_warnings = false;
int fcg = 0;
static bool fBaseline = false;
bool fCacheRemote = false;
bool fFastFlag = false;
int fConditionalDynamicDispatchLimit = 0;
bool fUseNoinit = true;
bool fNoCopyPropagation = false;
bool fNoDeadCodeElimination = false;
bool fNoRemoveWrapRecords = false;
bool fNoScalarReplacement = false;
bool fNoTupleCopyOpt = false;
bool fNoRemoteValueForwarding = false;
bool fNoRemoveCopyCalls = false;
bool fNoOptimizeLoopIterators = false;
bool fNoVectorize = true;
bool fNoGlobalConstOpt = false;
bool fNoFastFollowers = false;
bool fNoInlineIterators = false;
bool fNoLiveAnalysis = false;
bool fNoBoundsChecks = false;
bool fNoLocalChecks = false;
bool fNoNilChecks = false;
bool fNoStackChecks = false;
bool fNoCastChecks = false;
bool fMungeUserIdents = true;
bool fEnableTaskTracking = false;
bool printPasses = false;
FILE* printPassesFile = NULL;
// flag for llvmWideOpt
bool fLLVMWideOpt = false;
bool fWarnConstLoops = true;
// Enable all extra special warnings
static bool fNoWarnSpecial = true;
static bool fNoWarnDomainLiteral = true;
static bool fNoWarnTupleIteration = true;
bool fNoloopInvariantCodeMotion = false;
bool fNoChecks = false;
bool fNoInline = false;
bool fNoPrivatization = false;
bool fNoOptimizeOnClauses = false;
bool fNoRemoveEmptyRecords = true;
bool fRemoveUnreachableBlocks = true;
bool fMinimalModules = false;
bool fUseIPE = false;
int optimize_on_clause_limit = 20;
int scalar_replace_limit = 8;
int tuple_copy_limit = scalar_replace_limit;
bool fGenIDS = false;
int fLinkStyle = LS_DEFAULT; // use backend compiler's default
bool fLocal; // initialized in setupOrderedGlobals() below
bool fIgnoreLocalClasses = false;
bool fHeterogeneous = false; // re-initialized in setupOrderedGlobals() below
bool fieeefloat = false;
int ffloatOpt = 0; // 0 -> backend default; -1 -> strict; 1 -> opt
bool report_inlining = false;
char fExplainCall[256] = "";
int explainCallID = -1;
int breakOnResolveID = -1;
char fExplainInstantiation[256] = "";
bool fExplainVerbose = false;
bool fPrintCallStackOnError = false;
bool fPrintIDonError = false;
bool fPrintModuleResolution = false;
bool fCLineNumbers = false;
bool fPrintEmittedCodeSize = false;
char fPrintStatistics[256] = "";
bool fPrintDispatch = false;
bool fReportOptimizedLoopIterators = false;
bool fReportOrderIndependentLoops = false;
bool fReportOptimizedOn = false;
bool fReportPromotion = false;
bool fReportScalarReplace = false;
bool fReportDeadBlocks = false;
bool fReportDeadModules = false;
bool printCppLineno = false;
bool userSetCppLineno = false;
int num_constants_per_variable = 1;
char defaultDist[256] = "DefaultDist";
int instantiation_limit = 256;
char mainModuleName[256] = "";
bool printSearchDirs = false;
bool printModuleFiles = false;
bool llvmCodegen = false;
#ifdef HAVE_LLVM
bool externC = true;
#else
bool externC = false;
#endif
char breakOnCodegenCname[256] = "";
bool debugCCode = false;
bool optimizeCCode = false;
bool specializeCCode = false;
bool fNoMemoryFrees = false;
int numGlobalsOnHeap = 0;
bool preserveInlinedLineNumbers = false;
const char* compileCommand = NULL;
char compileVersion[64];
/* Note -- LLVM provides a way to get the path to the executable...
// This function isn't referenced outside its translation unit, but it
// can't use the "static" keyword because its address is used for
// GetMainExecutable (since some platforms don't support taking the
// address of main, and some platforms can't implement GetMainExecutable
// without being given the address of a function in the main executable).
llvm::sys::Path GetExecutablePath(const char *Argv0) {
// This just needs to be some symbol in the binary; C++ doesn't
// allow taking the address of ::main however.
void *MainAddr = (void*) (intptr_t) GetExecutablePath;
return llvm::sys::Path::GetMainExecutable(Argv0, MainAddr);
}
*/
static bool isMaybeChplHome(const char* path)
{
bool ret = false;
char* real = dirHasFile(path, "util/chplenv");
if (real)
ret = true;
free(real);
return ret;
}
static void setupChplHome(const char* argv0) {
const char* chpl_home = getenv("CHPL_HOME");
char* guess = NULL;
// Get the executable path.
guess = findProgramPath(argv0);
if (guess) {
// Determine CHPL_HOME based on the exe path.
// Determined exe path, but don't have a env var set
// Look for ../../../util/chplenv
// Remove the /bin/some-platform/chpl part
// from the path.
if( guess[0] ) {
int j = strlen(guess) - 5; // /bin and '\0'
for( ; j >= 0; j-- ) {
if( guess[j] == '/' &&
guess[j+1] == 'b' &&
guess[j+2] == 'i' &&
guess[j+3] == 'n' ) {
guess[j] = '\0';
break;
}
}
}
if( isMaybeChplHome(guess) ) {
// OK!
} else {
// Maybe we are in e.g. /usr/bin.
free(guess);
guess = NULL;
}
}
if( chpl_home ) {
if( strlen(chpl_home) > FILENAME_MAX )
USR_FATAL("$CHPL_HOME=%s path too long", chpl_home);
if( guess == NULL ) {
// Could not find exe path, but have a env var set
strncpy(CHPL_HOME, chpl_home, FILENAME_MAX);
} else {
// We have env var and found exe path.
// Check that they match and emit a warning if not.
if( ! isSameFile(chpl_home, guess) ) {
// Not the same. Emit warning.
USR_WARN("$CHPL_HOME=%s mismatched with executable home=%s",
chpl_home, guess);
}
// Since we have an enviro var, always use that.
strncpy(CHPL_HOME, chpl_home, FILENAME_MAX);
}
} else {
if( guess == NULL ) {
// Could not find enviro var, and could not
// guess at exe's path name.
USR_FATAL("$CHPL_HOME must be set to run chpl");
} else {
int rc;
if( strlen(guess) > FILENAME_MAX )
USR_FATAL("chpl guessed home %s too long", guess);
// Determined exe path, but don't have a env var set
strncpy(CHPL_HOME, guess, FILENAME_MAX);
// Also need to setenv in this case.
rc = setenv("CHPL_HOME", guess, 0);
if( rc ) USR_FATAL("Could not setenv CHPL_HOME");
}
}
// Check that the resulting path is a Chapel distribution.
if( ! isMaybeChplHome(CHPL_HOME) ) {
// Bad enviro var.
USR_WARN("CHPL_HOME=%s is not a Chapel distribution", CHPL_HOME);
}
if( guess )
free(guess);
parseCmdLineConfig("CHPL_HOME", astr("\"", CHPL_HOME, "\""));
}
static void setupEnvVar(std::istringstream& iss, const char** var, const char* varname) {
std::string line;
std::string value;
std::getline(iss, line);
if (!iss.good() || line.find(varname) == std::string::npos) {
INT_FATAL(astr("Parsing ", varname));
}
value = line.substr(line.find('=')+1, std::string::npos);
*var = astr(value.c_str()); // astr call is to canonicalize
parseCmdLineConfig(varname, astr("\"", *var, "\""));
}
#define SETUP_ENV_VAR(varname) \
INT_ASSERT(num_chpl_env_vars < MAX_CHPL_ENV_VARS); \
setupEnvVar(iss, &varname, #varname); \
chpl_env_var_names[num_chpl_env_vars] = #varname; \
chpl_env_vars[num_chpl_env_vars] = varname; \
num_chpl_env_vars++;
static void setupEnvVars() {
std::string vars = runUtilScript("printchplenv --simple");
std::istringstream iss(vars);
SETUP_ENV_VAR(CHPL_HOST_PLATFORM);
SETUP_ENV_VAR(CHPL_HOST_COMPILER);
SETUP_ENV_VAR(CHPL_TARGET_PLATFORM);
SETUP_ENV_VAR(CHPL_TARGET_COMPILER);
SETUP_ENV_VAR(CHPL_TARGET_ARCH);
SETUP_ENV_VAR(CHPL_LOCALE_MODEL);
SETUP_ENV_VAR(CHPL_COMM);
SETUP_ENV_VAR(CHPL_COMM_SUBSTRATE);
SETUP_ENV_VAR(CHPL_GASNET_SEGMENT);
SETUP_ENV_VAR(CHPL_TASKS);
SETUP_ENV_VAR(CHPL_THREADS);
SETUP_ENV_VAR(CHPL_LAUNCHER);
SETUP_ENV_VAR(CHPL_TIMERS);
SETUP_ENV_VAR(CHPL_MEM);
SETUP_ENV_VAR(CHPL_MAKE);
SETUP_ENV_VAR(CHPL_ATOMICS);
SETUP_ENV_VAR(CHPL_NETWORK_ATOMICS);
SETUP_ENV_VAR(CHPL_GMP);
SETUP_ENV_VAR(CHPL_HWLOC);
SETUP_ENV_VAR(CHPL_REGEXP);
SETUP_ENV_VAR(CHPL_WIDE_POINTERS);
SETUP_ENV_VAR(CHPL_LLVM);
SETUP_ENV_VAR(CHPL_AUX_FILESYS);
}
//
// Can't rely on a variable initialization order for globals, so any
// variables that need to be initialized in a particular order go here
//
static void setupOrderedGlobals(const char* argv0) {
// Set up CHPL_HOME first
setupChplHome(argv0);
// Then CHPL_* variables
setupEnvVars();
// These depend on the environment variables being set
fLocal = !strcmp(CHPL_COMM, "none");
bool gotPGI = !strcmp(CHPL_TARGET_COMPILER, "pgi")
|| !strcmp(CHPL_TARGET_COMPILER, "cray-prgenv-pgi");
// conservatively how much is needed for the current PGI compiler
if (gotPGI) fMaxCIdentLen = 1020;
if( 0 == strcmp(CHPL_WIDE_POINTERS, "struct") ) {
widePointersStruct = true;
} else {
widePointersStruct = false;
}
}
// NOTE: We are leaking memory here by dropping astr() results on the ground.
static void recordCodeGenStrings(int argc, char* argv[]) {
compileCommand = astr("chpl ");
// WARNING: This does not handle arbitrary sequences of escaped characters
// in string arguments
for (int i = 1; i < argc; i++) {
char *arg = argv[i];
// Handle " and \" in strings
while (char *dq = strchr(arg, '"')) {
char targ[strlen(argv[i])+4];
memcpy(targ, arg, dq-arg);
if ((dq==argv[i]) || ((dq!=argv[i]) && (*(dq-1)!='\\'))) {
targ[dq-arg] = '\\';
targ[dq-arg+1] = '"';
targ[dq-arg+2] = '\0';
} else {
targ[dq-arg] = '"';
targ[dq-arg+1] = '\0';
}
arg = dq+1;
compileCommand = astr(compileCommand, targ);
if (arg == NULL) break;
}
if (arg)
compileCommand = astr(compileCommand, arg, " ");
}
get_version(compileVersion);
}
static void setStaticLink(const ArgumentState* state, const char* arg_unused) {
if (strcmp(CHPL_TARGET_PLATFORM, "darwin") == 0) {
USR_WARN("Static compilation is not supported on OS X, ignoring flag.");
fLinkStyle = LS_DEFAULT;
} else {
fLinkStyle = LS_STATIC;
}
}
static void setDynamicLink(const ArgumentState* state, const char* arg_unused) {
fLinkStyle = LS_DYNAMIC;
}
static void setChapelDebug(const ArgumentState* state, const char* arg_unused) {
printCppLineno = true;
}
// In order to handle accumulating ccflags arguments, the argument
// processing calls this function. This function appends the flags
// to the ccflags variable, so that multiple --ccflags arguments
// all end up together in the ccflags variable (and will end up
// being passed to the backend C compiler).
static void setCCFlags(const ArgumentState* state, const char* arg) {
// Append arg to the end of ccflags.
int curlen = strlen(ccflags);
int space = sizeof(ccflags) - curlen - 1 - 1; // room for ' ' and \0
int arglen = strlen(arg);
if( arglen <= space ) {
// add a space if there are already arguments here
if( curlen != 0 ) ccflags[curlen++] = ' ';
memcpy(&ccflags[curlen], arg, arglen);
} else {
USR_FATAL("ccflags argument too long");
}
}
static void handleLibrary(const ArgumentState* state, const char* arg_unused) {
addLibInfo(astr("-l", libraryFilename));
}
static void handleLibPath(const ArgumentState* state, const char* arg_unused) {
addLibInfo(astr("-L", libraryFilename));
}
static void handleMake(const ArgumentState* state, const char* arg_unused) {
CHPL_MAKE = makeArgument;
}
static void handleIncDir(const ArgumentState* state, const char* arg_unused) {
addIncInfo(incFilename);
}
static void runCompilerInGDB(int argc, char* argv[]) {
const char* gdbCommandFilename = createDebuggerFile("gdb", argc, argv);
const char* command = astr("gdb -q ", argv[0]," -x ", gdbCommandFilename);
int status = mysystem(command, "running gdb", false);
clean_exit(status);
}
static void runCompilerInLLDB(int argc, char* argv[]) {
const char* lldbCommandFilename = createDebuggerFile("lldb", argc, argv);
const char* command = astr("lldb -s ", lldbCommandFilename, " ", argv[0]);
int status = mysystem(command, "running lldb", false);
clean_exit(status);
}
static void readConfig(const ArgumentState* state, const char* arg_unused) {
// Expect arg_unused to be a string of either of these forms:
// 1. name=value -- set the config param "name" to "value"
// 2. name -- set the boolean config param "name" to NOT("name")
// if name is not type bool, set it to 0.
char *name = strdup(arg_unused);
char *value;
value = strstr(name, "=");
if (value) {
*value = '\0';
value++;
if (value[0]) {
// arg_unused was name=value
parseCmdLineConfig(name, value);
} else {
// arg_unused was name= <blank>
USR_FATAL("Missing config param value");
}
} else {
// arg_unused was just name
parseCmdLineConfig(name, "");
}
}
static void addModulePath(const ArgumentState* state, const char* newpath) {
addFlagModulePath(newpath);
}
static void noteCppLinesSet(const ArgumentState* state, const char* unused) {
userSetCppLineno = true;
}
static void verifySaveCDir(const ArgumentState* state, const char* unused) {
if (saveCDir[0] == '-') {
USR_FATAL("--savec takes a directory name as its argument\n"
" (you specified '%s', assumed to be another flag)",
saveCDir);
}
}
static void turnOffChecks(const ArgumentState* state, const char* unused) {
fNoNilChecks = true;
fNoBoundsChecks = true;
fNoLocalChecks = true;
fNoStackChecks = true;
fNoCastChecks = true;
}
static void handleStackCheck(const ArgumentState* state, const char* unused) {
if (!fNoStackChecks && strcmp(CHPL_TASKS, "massivethreads") == 0) {
USR_WARN("CHPL_TASKS=%s cannot do stack checks.", CHPL_TASKS);
}
}
static void handleTaskTracking(const ArgumentState* state, const char* unused) {
if (fEnableTaskTracking && strcmp(CHPL_TASKS, "fifo") != 0) {
USR_WARN("Enabling task tracking with CHPL_TASKS=%s has no effect other than to slow down compilation", CHPL_TASKS);
}
}
static void setFastFlag(const ArgumentState* state, const char* unused) {
//
// Enable all compiler optimizations, disable all runtime checks
//
fBaseline = false;
// don't set fieeefloat since it can change program behavior.
// instead, we rely on the backend C compiler to choose
// an appropriate level of optimization.
fNoCopyPropagation = false;
fNoDeadCodeElimination = false;
fNoRemoveWrapRecords = false;
fNoFastFollowers = false;
fNoloopInvariantCodeMotion= false;
fNoInline = false;
fNoInlineIterators = false;
fNoOptimizeLoopIterators = false;
fNoVectorize = false;
fNoLiveAnalysis = false;
fNoRemoteValueForwarding = false;
fNoRemoveCopyCalls = false;
fNoScalarReplacement = false;
fNoTupleCopyOpt = false;
fNoPrivatization = false;
fNoChecks = true;
fNoBoundsChecks = true;
fNoLocalChecks = true;
fIgnoreLocalClasses = false;
fNoNilChecks = true;
fNoStackChecks = true;
fNoCastChecks = true;
fNoOptimizeOnClauses = false;
optimizeCCode = true;
specializeCCode = true;
}
static void setFloatOptFlag(const ArgumentState* state, const char* unused) {
// It would be nicer if arg.cpp could handle
// 3-value variables like this (set to false, set to true, not set)
// But if this is the only such case, having a set function is an OK plan.
// ffloatOpt defaults to 0 -> backend default
if( fieeefloat ) {
// IEEE strict
ffloatOpt = -1;
} else {
// lax IEEE, optimize
ffloatOpt = 1;
}
}
static void setBaselineFlag(const ArgumentState* state, const char* unused) {
//
// disable all chapel compiler optimizations
//
fBaseline = true;
fNoCopyPropagation = true;
fNoDeadCodeElimination = true;
fNoRemoveWrapRecords = true;
fNoFastFollowers = true;
fNoloopInvariantCodeMotion = true;
fNoInline = true;
fNoInlineIterators = true;
fNoLiveAnalysis = true;
fNoOptimizeLoopIterators = true;
fNoVectorize = true;
fNoRemoteValueForwarding = true;
fNoRemoveCopyCalls = true;
fNoScalarReplacement = true;
fNoTupleCopyOpt = true;
fNoPrivatization = true;
fNoOptimizeOnClauses = true;
fIgnoreLocalClasses = true;
fConditionalDynamicDispatchLimit = 0;
}
static void setCacheEnable(const ArgumentState* state, const char* unused) {
const char *val = fCacheRemote ? "true" : "false";
parseCmdLineConfig("CHPL_CACHE_REMOTE", val);
}
static void setHtmlUser(const ArgumentState* state, const char* unused) {
fdump_html = true;
fdump_html_include_system_modules = false;
}
static void setWarnTupleIteration(const ArgumentState* state, const char* unused) {
const char *val = fNoWarnTupleIteration ? "false" : "true";
parseCmdLineConfig("CHPL_WARN_TUPLE_ITERATION", astr("\"", val, "\""));
}
static void setWarnDomainLiteral(const ArgumentState* state, const char* unused) {
const char *val = fNoWarnDomainLiteral ? "false" : "true";
parseCmdLineConfig("CHPL_WARN_DOMAIN_LITERAL", astr("\"", val, "\""));
}
static void setWarnSpecial(const ArgumentState* state, const char* unused) {
fNoWarnSpecial = false;
fNoWarnDomainLiteral = false;
setWarnDomainLiteral(state, unused);
fNoWarnTupleIteration = false;
setWarnTupleIteration(state, unused);
}
static void setPrintPassesFile(const ArgumentState* state, const char* fileName) {
printPassesFile = fopen(fileName, "w");
if(printPassesFile == NULL) {
USR_WARN("Error opening printPassesFile: %s.", fileName);
}
}
/*
Flag types:
I = int
P = path
S = string
D = double
f = set to false
F = set to true
+ = increment
T = toggle
L = int64 (long)
N = --no-... flag, --no version sets to false
n = --no-... flag, --no version sets to true
Record components:
{"long option" (or "" for separators), 'short option', "description of option argument(s), if any", "option description", "option type", &affectedVariable, "environment variable name", setter_function},
*/
static ArgumentDescription arg_desc[] = {
{"", ' ', NULL, "Module Processing Options", NULL, NULL, NULL, NULL},
{"count-tokens", ' ', NULL, "[Don't] count tokens in main modules", "N", &countTokens, "CHPL_COUNT_TOKENS", NULL},
{"main-module", ' ', "<module>", "Specify entry point module", "S256", mainModuleName, NULL, NULL},
{"module-dir", 'M', "<directory>", "Add directory to module search path", "P", moduleSearchPath, NULL, addModulePath},
{"print-code-size", ' ', NULL, "[Don't] print code size of main modules", "N", &printTokens, "CHPL_PRINT_TOKENS", NULL},
{"print-module-files", ' ', NULL, "Print module file locations", "F", &printModuleFiles, NULL, NULL},
{"print-search-dirs", ' ', NULL, "[Don't] print module search path", "N", &printSearchDirs, "CHPL_PRINT_SEARCH_DIRS", NULL},
{"", ' ', NULL, "Parallelism Control Options", NULL, NULL, NULL, NULL},
{"local", ' ', NULL, "Target one [many] locale[s]", "N", &fLocal, "CHPL_LOCAL", NULL},
{"", ' ', NULL, "Optimization Control Options", NULL, NULL, NULL, NULL},
{"baseline", ' ', NULL, "Disable all Chapel optimizations", "F", &fBaseline, "CHPL_BASELINE", setBaselineFlag},
{"cache-remote", ' ', NULL, "Enable cache for remote data (must be enabled specifically)", "F", &fCacheRemote, "CHPL_CACHE_REMOTE", setCacheEnable},
{"conditional-dynamic-dispatch-limit", ' ', "<limit>", "Set limit on # of inline conditionals used for dynamic dispatch", "I", &fConditionalDynamicDispatchLimit, "CHPL_CONDITIONAL_DYNAMIC_DISPATCH_LIMIT", NULL},
{"copy-propagation", ' ', NULL, "Enable [disable] copy propagation", "n", &fNoCopyPropagation, "CHPL_DISABLE_COPY_PROPAGATION", NULL},
{"dead-code-elimination", ' ', NULL, "Enable [disable] dead code elimination", "n", &fNoDeadCodeElimination, "CHPL_DISABLE_DEAD_CODE_ELIMINATION", NULL},
{"fast", ' ', NULL, "Use fast default settings", "F", &fFastFlag, "CHPL_FAST", setFastFlag},
{"fast-followers", ' ', NULL, "Enable [disable] fast followers", "n", &fNoFastFollowers, "CHPL_DISABLE_FAST_FOLLOWERS", NULL},
{"ieee-float", ' ', NULL, "Generate code that is strict [lax] with respect to IEEE compliance", "N", &fieeefloat, "CHPL_IEEE_FLOAT", setFloatOptFlag},
{"ignore-local-classes", ' ', NULL, "Disable [enable] local classes", "N", &fIgnoreLocalClasses, NULL, NULL},
{"inline", ' ', NULL, "Enable [disable] function inlining", "n", &fNoInline, NULL, NULL},
{"inline-iterators", ' ', NULL, "Enable [disable] iterator inlining", "n", &fNoInlineIterators, "CHPL_DISABLE_INLINE_ITERATORS", NULL},
{"live-analysis", ' ', NULL, "Enable [disable] live variable analysis", "n", &fNoLiveAnalysis, "CHPL_DISABLE_LIVE_ANALYSIS", NULL},
{"loop-invariant-code-motion", ' ', NULL, "Enable [disable] loop invariant code motion", "n", &fNoloopInvariantCodeMotion, NULL, NULL},
{"optimize-loop-iterators", ' ', NULL, "Enable [disable] optimization of iterators composed of a single loop", "n", &fNoOptimizeLoopIterators, "CHPL_DISABLE_OPTIMIZE_LOOP_ITERATORS", NULL},
{"optimize-on-clauses", ' ', NULL, "Enable [disable] optimization of on clauses", "n", &fNoOptimizeOnClauses, "CHPL_DISABLE_OPTIMIZE_ON_CLAUSES", NULL},
{"optimize-on-clause-limit", ' ', "<limit>", "Limit recursion depth of on clause optimization search", "I", &optimize_on_clause_limit, "CHPL_OPTIMIZE_ON_CLAUSE_LIMIT", NULL},
{"privatization", ' ', NULL, "Enable [disable] privatization of distributed arrays and domains", "n", &fNoPrivatization, "CHPL_DISABLE_PRIVATIZATION", NULL},
{"remote-value-forwarding", ' ', NULL, "Enable [disable] remote value forwarding", "n", &fNoRemoteValueForwarding, "CHPL_DISABLE_REMOTE_VALUE_FORWARDING", NULL},
{"remove-copy-calls", ' ', NULL, "Enable [disable] remove copy calls", "n", &fNoRemoveCopyCalls, "CHPL_DISABLE_REMOVE_COPY_CALLS", NULL},
{"remove-wrap-records", ' ', NULL, "Enable [disable] wrap record removal", "n", &fNoRemoveWrapRecords, "CHPL_REMOVE_WRAP_RECORDS", NULL},
{"scalar-replacement", ' ', NULL, "Enable [disable] scalar replacement", "n", &fNoScalarReplacement, "CHPL_DISABLE_SCALAR_REPLACEMENT", NULL},
{"scalar-replace-limit", ' ', "<limit>", "Limit on the size of tuples being replaced during scalar replacement", "I", &scalar_replace_limit, "CHPL_SCALAR_REPLACE_TUPLE_LIMIT", NULL},
{"tuple-copy-opt", ' ', NULL, "Enable [disable] tuple (memcpy) optimization", "n", &fNoTupleCopyOpt, "CHPL_DISABLE_TUPLE_COPY_OPT", NULL},
{"tuple-copy-limit", ' ', "<limit>", "Limit on the size of tuples considered for optimization", "I", &tuple_copy_limit, "CHPL_TUPLE_COPY_LIMIT", NULL},
{"use-noinit", ' ', NULL, "Enable [disable] ability to skip default initialization through the keyword noinit", "N", &fUseNoinit, NULL, NULL},
{"vectorize", ' ', NULL, "Enable [disable] generation of vectorization hints", "n", &fNoVectorize, "CHPL_DISABLE_VECTORIZATION", NULL},
{"", ' ', NULL, "Run-time Semantic Check Options", NULL, NULL, NULL, NULL},
{"no-checks", ' ', NULL, "Disable all following run-time checks", "F", &fNoChecks, "CHPL_NO_CHECKS", turnOffChecks},
{"bounds-checks", ' ', NULL, "Enable [disable] bounds checking", "n", &fNoBoundsChecks, "CHPL_NO_BOUNDS_CHECKING", NULL},
{"local-checks", ' ', NULL, "Enable [disable] local block checking", "n", &fNoLocalChecks, NULL, NULL},
{"nil-checks", ' ', NULL, "Enable [disable] nil checking", "n", &fNoNilChecks, "CHPL_NO_NIL_CHECKS", NULL},
{"stack-checks", ' ', NULL, "Enable [disable] stack overflow checking", "n", &fNoStackChecks, "CHPL_STACK_CHECKS", handleStackCheck},
{"cast-checks", ' ', NULL, "Enable [disable] checks in safeCast calls", "n", &fNoCastChecks, NULL, NULL},
{"", ' ', NULL, "C Code Generation Options", NULL, NULL, NULL, NULL},
{"codegen", ' ', NULL, "[Don't] Do code generation", "n", &no_codegen, "CHPL_NO_CODEGEN", NULL},
{"cpp-lines", ' ', NULL, "[Don't] Generate #line annotations", "N", &printCppLineno, "CHPL_CG_CPP_LINES", noteCppLinesSet},
{"max-c-ident-len", ' ', NULL, "Maximum length of identifiers in generated code, 0 for unlimited", "I", &fMaxCIdentLen, "CHPL_MAX_C_IDENT_LEN", NULL},
{"munge-user-idents", ' ', NULL, "[Don't] Munge user identifiers to avoid naming conflicts with external code", "N", &fMungeUserIdents, "CHPL_MUNGE_USER_IDENTS"},
{"savec", ' ', "<directory>", "Save generated C code in directory", "P", saveCDir, "CHPL_SAVEC_DIR", verifySaveCDir},
{"", ' ', NULL, "C Code Compilation Options", NULL, NULL, NULL, NULL},
{"ccflags", ' ', "<flags>", "Back-end C compiler flags", "S", NULL, "CHPL_CC_FLAGS", setCCFlags},
{"debug", 'g', NULL, "[Don't] Support debugging of generated C code", "N", &debugCCode, "CHPL_DEBUG", setChapelDebug},
{"dynamic", ' ', NULL, "Generate a dynamically linked binary", "F", &fLinkStyle, NULL, setDynamicLink},
{"hdr-search-path", 'I', "<directory>", "C header search path", "P", incFilename, NULL, handleIncDir},
{"ldflags", ' ', "<flags>", "Back-end C linker flags", "S256", ldflags, "CHPL_LD_FLAGS", NULL},
{"lib-linkage", 'l', "<library>", "C library linkage", "P", libraryFilename, "CHPL_LIB_NAME", handleLibrary},
{"lib-search-path", 'L', "<directory>", "C library search path", "P", libraryFilename, "CHPL_LIB_PATH", handleLibPath},
{"make", ' ', "<make utility>", "Make utility for generated code", "S256", makeArgument, "CHPL_MAKE", handleMake},
{"optimize", 'O', NULL, "[Don't] Optimize generated C code", "N", &optimizeCCode, "CHPL_OPTIMIZE", NULL},
{"specialize", ' ', NULL, "[Don't] Specialize generated C code for CHPL_TARGET_ARCH", "N", &specializeCCode, "CHPL_SPECIALIZE", NULL},
{"output", 'o', "<filename>", "Name output executable", "P", executableFilename, "CHPL_EXE_NAME", NULL},
{"static", ' ', NULL, "Generate a statically linked binary", "F", &fLinkStyle, NULL, setStaticLink},
{"", ' ', NULL, "LLVM Code Generation Options", NULL, NULL, NULL, NULL},
{"llvm", ' ', NULL, "[Don't] use the LLVM code generator", "N", &llvmCodegen, "CHPL_LLVM_CODEGEN", NULL},
{"llvm-wide-opt", ' ', NULL, "Enable [disable] LLVM wide pointer optimizations", "N", &fLLVMWideOpt, "CHPL_LLVM_WIDE_OPTS", NULL},
{"", ' ', NULL, "Compilation Trace Options", NULL, NULL, NULL, NULL},
{"print-commands", ' ', NULL, "[Don't] print system commands", "N", &printSystemCommands, "CHPL_PRINT_COMMANDS", NULL},
{"print-passes", ' ', NULL, "[Don't] print compiler passes", "N", &printPasses, "CHPL_PRINT_PASSES", NULL},
{"print-passes-file", ' ', "<filename>", "Print compiler passes to <filename>", "S", NULL, "CHPL_PRINT_PASSES_FILE", setPrintPassesFile},
{"", ' ', NULL, "Miscellaneous Options", NULL, NULL, NULL, NULL},
// Support for extern { c-code-here } blocks could be toggled with this
// flag, but instead we just leave it on if the compiler can do it.
// {"extern-c", ' ', NULL, "Enable [disable] extern C block support", "f", &externC, "CHPL_EXTERN_C", NULL},
DRIVER_ARG_DEVELOPER,
{"explain-call", ' ', "<call>[:<module>][:<line>]", "Explain resolution of call", "S256", fExplainCall, NULL, NULL},
{"explain-instantiation", ' ', "<function|type>[:<module>][:<line>]", "Explain instantiation of type", "S256", fExplainInstantiation, NULL, NULL},
{"explain-verbose", ' ', NULL, "Enable [disable] tracing of disambiguation with 'explain' options", "N", &fExplainVerbose, "CHPL_EXPLAIN_VERBOSE", NULL},
{"instantiate-max", ' ', "<max>", "Limit number of instantiations", "I", &instantiation_limit, "CHPL_INSTANTIATION_LIMIT", NULL},
{"print-callstack-on-error", ' ', NULL, "print the Chapel call stack leading to each error or warning", "N", &fPrintCallStackOnError, "CHPL_PRINT_CALLSTACK_ON_ERROR", NULL},
{"set", 's', "<name>[=<value>]", "Set config param value", "S", NULL, NULL, readConfig},
{"task-tracking", ' ', NULL, "Enable [disable] runtime task tracking", "N", &fEnableTaskTracking, "CHPL_TASK_TRACKING", handleTaskTracking},
{"warn-const-loops", ' ', NULL, "Enable [disable] warnings for some 'while' loops with constant conditions", "N", &fWarnConstLoops, "CHPL_WARN_CONST_LOOPS", NULL},
{"warn-special", ' ', NULL, "Enable [disable] special warnings", "n", &fNoWarnSpecial, "CHPL_WARN_SPECIAL", setWarnSpecial},
{"warn-domain-literal", ' ', NULL, "Enable [disable] old domain literal syntax warnings", "n", &fNoWarnDomainLiteral, "CHPL_WARN_DOMAIN_LITERAL", setWarnDomainLiteral},
{"warn-tuple-iteration", ' ', NULL, "Enable [disable] warnings for tuple iteration", "n", &fNoWarnTupleIteration, "CHPL_WARN_TUPLE_ITERATION", setWarnTupleIteration},
{"no-warnings", ' ', NULL, "Disable output of warnings", "F", &ignore_warnings, "CHPL_DISABLE_WARNINGS", NULL},
{"", ' ', NULL, "Compiler Information Options", NULL, NULL, NULL, NULL},
DRIVER_ARG_COPYRIGHT,
DRIVER_ARG_HELP,
DRIVER_ARG_HELP_ENV,
DRIVER_ARG_HELP_SETTINGS,
DRIVER_ARG_LICENSE,
DRIVER_ARG_VERSION,
{"", ' ', NULL, "Developer Flags -- Debug Output", NULL, NULL, NULL, NULL},
{"cc-warnings", ' ', NULL, "[Don't] Give warnings for generated code", "N", &ccwarnings, "CHPL_CC_WARNINGS", NULL},
{"c-line-numbers", ' ', NULL, "Use C code line numbers and filenames", "F", &fCLineNumbers, NULL, NULL},
{"gen-ids", ' ', NULL, "[Don't] pepper generated code with BaseAST::ids", "N", &fGenIDS, "CHPL_GEN_IDS", NULL},
{"html", 't', NULL, "Dump IR in HTML format (toggle)", "T", &fdump_html, "CHPL_HTML", NULL},
{"html-user", ' ', NULL, "Dump IR in HTML for user module(s) only (toggle)", "T", &fdump_html, "CHPL_HTML_USER", setHtmlUser},
{"html-wrap-lines", ' ', NULL, "[Don't] allow wrapping lines in HTML dumps", "N", &fdump_html_wrap_lines, "CHPL_HTML_WRAP_LINES", NULL},
{"html-print-block-ids", ' ', NULL, "[Don't] print block IDs in HTML dumps", "N", &fdump_html_print_block_IDs, "CHPL_HTML_PRINT_BLOCK_IDS", NULL},
{"html-chpl-home", ' ', NULL, "Path to use instead of CHPL_HOME in HTML dumps", "P", fdump_html_chpl_home, "CHPL_HTML_CHPL_HOME", NULL},
{"log", 'd', "<letters>", "Dump IR in text format. See runpasses.cpp for definition of <letters>. Empty argument (\"-d=\" or \"--log=\") means \"log all passes\"", "S512", log_flags, "CHPL_LOG_FLAGS", log_flags_arg},
{"log-dir", ' ', "<path>", "Specify log directory", "P", log_dir, "CHPL_LOG_DIR", NULL},
{"log-ids", ' ', NULL, "[Don't] include BaseAST::ids in log files", "N", &fLogIds, "CHPL_LOG_IDS", NULL},
{"log-module", ' ', "<module-name>", "Restrict IR dump to the named module", "S256", log_module, "CHPL_LOG_MODULE", NULL},
// {"log-symbol", ' ', "<symbol-name>", "Restrict IR dump to the named symbol(s)", "S256", log_symbol, "CHPL_LOG_SYMBOL", NULL}, // This doesn't work yet.
{"verify", ' ', NULL, "Run consistency checks during compilation", "N", &fVerify, "CHPL_VERIFY", NULL},
{"parser-debug", 'D', NULL, "Set parser debug level", "+", &debugParserLevel, "CHPL_PARSER_DEBUG", NULL},
{"debug-short-loc", ' ', NULL, "Display long [short] location in certain debug outputs", "N", &debugShortLoc, "CHPL_DEBUG_SHORT_LOC", NULL},
{"print-emitted-code-size", ' ', NULL, "Print emitted code size", "F", &fPrintEmittedCodeSize, NULL, NULL},
{"print-module-resolution", ' ', NULL, "Print name of module being resolved", "F", &fPrintModuleResolution, "CHPL_PRINT_MODULE_RESOLUTION", NULL},
{"print-dispatch", ' ', NULL, "Print dynamic dispatch table", "F", &fPrintDispatch, NULL, NULL},
{"print-statistics", ' ', "[n|k|t]", "Print AST statistics", "S256", fPrintStatistics, NULL, NULL},
{"report-inlining", ' ', NULL, "Print inlined functions", "F", &report_inlining, NULL, NULL},
{"report-dead-blocks", ' ', NULL, "Print dead block removal stats", "F", &fReportDeadBlocks, NULL, NULL},
{"report-dead-modules", ' ', NULL, "Print dead module removal stats", "F", &fReportDeadModules, NULL, NULL},
{"report-optimized-loop-iterators", ' ', NULL, "Print stats on optimized single loop iterators", "F", &fReportOptimizedLoopIterators, NULL, NULL},
{"report-order-independent-loops", ' ', NULL, "Print stats on order independent loops", "F", &fReportOrderIndependentLoops, NULL, NULL},
{"report-optimized-on", ' ', NULL, "Print information about on clauses that have been optimized for potential fast remote fork operation", "F", &fReportOptimizedOn, NULL, NULL},
{"report-promotion", ' ', NULL, "Print information about scalar promotion", "F", &fReportPromotion, NULL, NULL},
{"report-scalar-replace", ' ', NULL, "Print scalar replacement stats", "F", &fReportScalarReplace, NULL, NULL},
{"", ' ', NULL, "Developer Flags -- Miscellaneous", NULL, NULL, NULL, NULL},
{"break-on-id", ' ', NULL, "Break when AST id is created", "I", &breakOnID, "CHPL_BREAK_ON_ID", NULL},
{"break-on-delete-id", ' ', NULL, "Break when AST id is deleted", "I", &breakOnDeleteID, "CHPL_BREAK_ON_DELETE_ID", NULL},
{"break-on-codegen", ' ', NULL, "Break when function cname is code generated", "S256", &breakOnCodegenCname, "CHPL_BREAK_ON_CODEGEN", NULL},
{"default-dist", ' ', "<distribution>", "Change the default distribution", "S256", defaultDist, "CHPL_DEFAULT_DIST", NULL},
{"explain-call-id", ' ', "<call-id>", "Explain resolution of call by ID", "I", &explainCallID, NULL, NULL},
{"break-on-resolve-id", ' ', NULL, "Break when function call with AST id is resolved", "I", &breakOnResolveID, "CHPL_BREAK_ON_RESOLVE_ID", NULL},
DRIVER_ARG_DEBUGGERS,
{"heterogeneous", ' ', NULL, "Compile for heterogeneous nodes", "F", &fHeterogeneous, "", NULL},
{"ignore-errors", ' ', NULL, "[Don't] attempt to ignore errors", "N", &ignore_errors, "CHPL_IGNORE_ERRORS", NULL},
{"ignore-errors-for-pass", ' ', NULL, "[Don't] attempt to ignore errors until the end of the pass in which they occur", "N", &ignore_errors_for_pass, "CHPL_IGNORE_ERRORS_FOR_PASS", NULL},
{"library", ' ', NULL, "Generate a Chapel library file", "F", &fLibraryCompile, NULL, NULL},
{"localize-global-consts", ' ', NULL, "Enable [disable] optimization of global constants", "n", &fNoGlobalConstOpt, "CHPL_DISABLE_GLOBAL_CONST_OPT", NULL},
{"local-temp-names", ' ', NULL, "[Don't] Generate locally-unique temp names", "N", &localTempNames, "CHPL_LOCAL_TEMP_NAMES", NULL},
{"log-deleted-ids-to", ' ', "<filename>", "Log AST id and memory address of each deleted node to the specified file", "P", deletedIdFilename, "CHPL_DELETED_ID_FILENAME", NULL},
{"memory-frees", ' ', NULL, "Enable [disable] memory frees in the generated code", "n", &fNoMemoryFrees, "CHPL_DISABLE_MEMORY_FREES", NULL},
{"preserve-inlined-line-numbers", ' ', NULL, "[Don't] Preserve file names/line numbers in inlined code", "N", &preserveInlinedLineNumbers, "CHPL_PRESERVE_INLINED_LINE_NUMBERS", NULL},
{"print-id-on-error", ' ', NULL, "[Don't] print AST id in error messages", "N", &fPrintIDonError, "CHPL_PRINT_ID_ON_ERROR", NULL},
{"remove-empty-records", ' ', NULL, "Enable [disable] empty record removal", "n", &fNoRemoveEmptyRecords, "CHPL_DISABLE_REMOVE_EMPTY_RECORDS", NULL},
{"remove-unreachable-blocks", ' ', NULL, "[Don't] remove unreachable blocks after resolution", "N", &fRemoveUnreachableBlocks, "CHPL_REMOVE_UNREACHABLE_BLOCKS", NULL},
{"minimal-modules", ' ', NULL, "Enable [disable] using minimal modules", "N", &fMinimalModules, "CHPL_MINIMAL_MODULES", NULL},
DRIVER_ARG_PRINT_CHPL_HOME,
DRIVER_ARG_LAST
};
static ArgumentState sArgState = {
0,
0,
"program",
"path",
NULL
};
static void setupDependentVars() {
if (developer && !userSetCppLineno) {
printCppLineno = false;
}
#ifndef HAVE_LLVM
if (llvmCodegen)
USR_FATAL("This compiler was built without LLVM support");
#endif
if (specializeCCode && (strcmp(CHPL_TARGET_ARCH, "unknown") == 0)) {
USR_WARN("--specialize was set, but CHPL_TARGET_ARCH is 'unknown'. If "
"you want any specialization to occur please set CHPL_TARGET_ARCH "
"to a proper value.");
}
}
static void printStuff(const char* argv0) {
bool shouldExit = false;
bool printedSomething = false;
if (fPrintVersion) {
fprintf(stdout, "%s Version %s\n", sArgState.program_name, compileVersion);
fPrintCopyright = true;
printedSomething = true;
shouldExit = true;
}
if (fPrintLicense) {
fprintf(stdout,
#include "LICENSE"
);
fPrintCopyright = false;
shouldExit = true;
printedSomething = true;
}
if (fPrintCopyright) {
fprintf(stdout,
#include "COPYRIGHT"
);
printedSomething = true;
}
if( fPrintChplHome ) {
char* guess = findProgramPath(argv0);
printf("%s\t%s\n", CHPL_HOME, guess);
free(guess);
printedSomething = true;
}
if (fPrintHelp || (!printedSomething && sArgState.nfile_arguments < 1)) {
if (printedSomething) printf("\n");
usage(&sArgState, !fPrintHelp, fPrintEnvHelp, fPrintSettingsHelp);
shouldExit = true;
printedSomething = true;
}
if (printedSomething && sArgState.nfile_arguments < 1) {
shouldExit = true;
}
if (shouldExit) {
clean_exit(0);
}
}
int main(int argc, char* argv[]) {
PhaseTracker tracker;
startCatchingSignals();
{
astlocMarker markAstLoc(0, "<internal>");
tracker.StartPhase("init");
init_args(&sArgState, argv[0]);
fDocs = (strcmp(sArgState.program_name, "chpldoc") == 0) ? true : false;
fUseIPE = (strcmp(sArgState.program_name, "chpl-ipe") == 0) ? true : false;
// Initialize the arguments for argument state. If chpldoc, use the docs
// specific arguments. Otherwise, use the regular arguments.
if (fDocs) {
init_arg_desc(&sArgState, docs_arg_desc);
} else {
init_arg_desc(&sArgState, arg_desc);
}
initFlags();
initRootModule();
initPrimitive();
initPrimitiveTypes();
DefExpr* objectClass = defineObjectClass();
initChplProgram(objectClass);
initStringLiteralModule();
setupOrderedGlobals(argv[0]);
process_args(&sArgState, argc, argv);
initCompilerGlobals(); // must follow argument parsing
setupDependentVars();
setupModulePaths();
recordCodeGenStrings(argc, argv);
} // astlocMarker scope
if (fUseIPE == false)
printStuff(argv[0]);
if (fRungdb)
runCompilerInGDB(argc, argv);
if (fRunlldb)
runCompilerInLLDB(argc, argv);
addSourceFiles(sArgState.nfile_arguments, sArgState.file_argument);
if (fUseIPE == false) {
runPasses(tracker, fDocs);
} else {
ipeRun();
}
tracker.StartPhase("driverCleanup");
free_args(&sArgState);
tracker.Stop();
if (printPasses == true || printPassesFile != NULL) {
tracker.ReportPass();
tracker.ReportTotal();
tracker.ReportRollup();
}
if (printPassesFile != NULL) {
fclose(printPassesFile);
}
clean_exit(0);
return 0;
}
| hildeth/chapel | compiler/main/driver.cpp | C++ | apache-2.0 | 41,155 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.io.hfile;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.CellComparator.MetaCellComparator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable;
import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.BloomFilterWriter;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.io.Writable;
/**
* Common functionality needed by all versions of {@link HFile} writers.
*/
@InterfaceAudience.Private
public class HFileWriterImpl implements HFile.Writer {
private static final Log LOG = LogFactory.getLog(HFileWriterImpl.class);
private static final long UNSET = -1;
/** if this feature is enabled, preCalculate encoded data size before real encoding happens*/
public static final String UNIFIED_ENCODED_BLOCKSIZE_RATIO = "hbase.writer.unified.encoded.blocksize.ratio";
/** Block size limit after encoding, used to unify encoded block Cache entry size*/
private final int encodedBlockSizeLimit;
/** The Cell previously appended. Becomes the last cell in the file.*/
protected Cell lastCell = null;
/** FileSystem stream to write into. */
protected FSDataOutputStream outputStream;
/** True if we opened the <code>outputStream</code> (and so will close it). */
protected final boolean closeOutputStream;
/** A "file info" block: a key-value map of file-wide metadata. */
protected FileInfo fileInfo = new HFile.FileInfo();
/** Total # of key/value entries, i.e. how many times add() was called. */
protected long entryCount = 0;
/** Used for calculating the average key length. */
protected long totalKeyLength = 0;
/** Used for calculating the average value length. */
protected long totalValueLength = 0;
/** Total uncompressed bytes, maybe calculate a compression ratio later. */
protected long totalUncompressedBytes = 0;
/** Key comparator. Used to ensure we write in order. */
protected final CellComparator comparator;
/** Meta block names. */
protected List<byte[]> metaNames = new ArrayList<>();
/** {@link Writable}s representing meta block data. */
protected List<Writable> metaData = new ArrayList<>();
/**
* First cell in a block.
* This reference should be short-lived since we write hfiles in a burst.
*/
protected Cell firstCellInBlock = null;
/** May be null if we were passed a stream. */
protected final Path path;
/** Cache configuration for caching data on write. */
protected final CacheConfig cacheConf;
/**
* Name for this object used when logging or in toString. Is either
* the result of a toString on stream or else name of passed file Path.
*/
protected final String name;
/**
* The data block encoding which will be used.
* {@link NoOpDataBlockEncoder#INSTANCE} if there is no encoding.
*/
protected final HFileDataBlockEncoder blockEncoder;
protected final HFileContext hFileContext;
private int maxTagsLength = 0;
/** KeyValue version in FileInfo */
public static final byte [] KEY_VALUE_VERSION = Bytes.toBytes("KEY_VALUE_VERSION");
/** Version for KeyValue which includes memstore timestamp */
public static final int KEY_VALUE_VER_WITH_MEMSTORE = 1;
/** Inline block writers for multi-level block index and compound Blooms. */
private List<InlineBlockWriter> inlineBlockWriters = new ArrayList<>();
/** block writer */
protected HFileBlock.Writer blockWriter;
private HFileBlockIndex.BlockIndexWriter dataBlockIndexWriter;
private HFileBlockIndex.BlockIndexWriter metaBlockIndexWriter;
/** The offset of the first data block or -1 if the file is empty. */
private long firstDataBlockOffset = UNSET;
/** The offset of the last data block or 0 if the file is empty. */
protected long lastDataBlockOffset = UNSET;
/**
* The last(stop) Cell of the previous data block.
* This reference should be short-lived since we write hfiles in a burst.
*/
private Cell lastCellOfPreviousBlock = null;
/** Additional data items to be written to the "load-on-open" section. */
private List<BlockWritable> additionalLoadOnOpenData = new ArrayList<>();
protected long maxMemstoreTS = 0;
public HFileWriterImpl(final Configuration conf, CacheConfig cacheConf, Path path,
FSDataOutputStream outputStream,
CellComparator comparator, HFileContext fileContext) {
this.outputStream = outputStream;
this.path = path;
this.name = path != null ? path.getName() : outputStream.toString();
this.hFileContext = fileContext;
DataBlockEncoding encoding = hFileContext.getDataBlockEncoding();
if (encoding != DataBlockEncoding.NONE) {
this.blockEncoder = new HFileDataBlockEncoderImpl(encoding);
} else {
this.blockEncoder = NoOpDataBlockEncoder.INSTANCE;
}
this.comparator = comparator != null? comparator: CellComparator.COMPARATOR;
closeOutputStream = path != null;
this.cacheConf = cacheConf;
float encodeBlockSizeRatio = conf.getFloat(UNIFIED_ENCODED_BLOCKSIZE_RATIO, 1f);
this.encodedBlockSizeLimit = (int)(hFileContext.getBlocksize() * encodeBlockSizeRatio);
finishInit(conf);
if (LOG.isTraceEnabled()) {
LOG.trace("Writer" + (path != null ? " for " + path : "") +
" initialized with cacheConf: " + cacheConf +
" comparator: " + comparator.getClass().getSimpleName() +
" fileContext: " + fileContext);
}
}
/**
* Add to the file info. All added key/value pairs can be obtained using
* {@link HFile.Reader#loadFileInfo()}.
*
* @param k Key
* @param v Value
* @throws IOException in case the key or the value are invalid
*/
@Override
public void appendFileInfo(final byte[] k, final byte[] v)
throws IOException {
fileInfo.append(k, v, true);
}
/**
* Sets the file info offset in the trailer, finishes up populating fields in
* the file info, and writes the file info into the given data output. The
* reason the data output is not always {@link #outputStream} is that we store
* file info as a block in version 2.
*
* @param trailer fixed file trailer
* @param out the data output to write the file info to
* @throws IOException
*/
protected final void writeFileInfo(FixedFileTrailer trailer, DataOutputStream out)
throws IOException {
trailer.setFileInfoOffset(outputStream.getPos());
finishFileInfo();
long startTime = System.currentTimeMillis();
fileInfo.write(out);
HFile.updateWriteLatency(System.currentTimeMillis() - startTime);
}
/**
* Checks that the given Cell's key does not violate the key order.
*
* @param cell Cell whose key to check.
* @return true if the key is duplicate
* @throws IOException if the key or the key order is wrong
*/
protected boolean checkKey(final Cell cell) throws IOException {
boolean isDuplicateKey = false;
if (cell == null) {
throw new IOException("Key cannot be null or empty");
}
if (lastCell != null) {
int keyComp = comparator.compareKeyIgnoresMvcc(lastCell, cell);
if (keyComp > 0) {
throw new IOException("Added a key not lexically larger than"
+ " previous. Current cell = " + cell + ", lastCell = " + lastCell);
} else if (keyComp == 0) {
isDuplicateKey = true;
}
}
return isDuplicateKey;
}
/** Checks the given value for validity. */
protected void checkValue(final byte[] value, final int offset,
final int length) throws IOException {
if (value == null) {
throw new IOException("Value cannot be null");
}
}
/**
* @return Path or null if we were passed a stream rather than a Path.
*/
@Override
public Path getPath() {
return path;
}
@Override
public String toString() {
return "writer=" + (path != null ? path.toString() : null) + ", name="
+ name + ", compression=" + hFileContext.getCompression().getName();
}
public static Compression.Algorithm compressionByName(String algoName) {
if (algoName == null)
return HFile.DEFAULT_COMPRESSION_ALGORITHM;
return Compression.getCompressionAlgorithmByName(algoName);
}
/** A helper method to create HFile output streams in constructors */
protected static FSDataOutputStream createOutputStream(Configuration conf,
FileSystem fs, Path path, InetSocketAddress[] favoredNodes) throws IOException {
FsPermission perms = FSUtils.getFilePermissions(fs, conf,
HConstants.DATA_FILE_UMASK_KEY);
return FSUtils.create(conf, fs, path, perms, favoredNodes);
}
/** Additional initialization steps */
protected void finishInit(final Configuration conf) {
if (blockWriter != null) {
throw new IllegalStateException("finishInit called twice");
}
blockWriter = new HFileBlock.Writer(blockEncoder, hFileContext);
// Data block index writer
boolean cacheIndexesOnWrite = cacheConf.shouldCacheIndexesOnWrite();
dataBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter(blockWriter,
cacheIndexesOnWrite ? cacheConf : null,
cacheIndexesOnWrite ? name : null);
dataBlockIndexWriter.setMaxChunkSize(
HFileBlockIndex.getMaxChunkSize(conf));
dataBlockIndexWriter.setMinIndexNumEntries(
HFileBlockIndex.getMinIndexNumEntries(conf));
inlineBlockWriters.add(dataBlockIndexWriter);
// Meta data block index writer
metaBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter();
if (LOG.isTraceEnabled()) LOG.trace("Initialized with " + cacheConf);
}
/**
* At a block boundary, write all the inline blocks and opens new block.
*
* @throws IOException
*/
protected void checkBlockBoundary() throws IOException {
//for encoder like prefixTree, encoded size is not available, so we have to compare both encoded size
//and unencoded size to blocksize limit.
if (blockWriter.encodedBlockSizeWritten() >= encodedBlockSizeLimit
|| blockWriter.blockSizeWritten() >= hFileContext.getBlocksize()) {
finishBlock();
writeInlineBlocks(false);
newBlock();
}
}
/** Clean up the data block that is currently being written.*/
private void finishBlock() throws IOException {
if (!blockWriter.isWriting() || blockWriter.blockSizeWritten() == 0) return;
// Update the first data block offset if UNSET; used scanning.
if (firstDataBlockOffset == UNSET) {
firstDataBlockOffset = outputStream.getPos();
}
// Update the last data block offset each time through here.
lastDataBlockOffset = outputStream.getPos();
blockWriter.writeHeaderAndData(outputStream);
int onDiskSize = blockWriter.getOnDiskSizeWithHeader();
Cell indexEntry =
getMidpoint(this.comparator, lastCellOfPreviousBlock, firstCellInBlock);
dataBlockIndexWriter.addEntry(CellUtil.getCellKeySerializedAsKeyValueKey(indexEntry),
lastDataBlockOffset, onDiskSize);
totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader();
if (cacheConf.shouldCacheDataOnWrite()) {
doCacheOnWrite(lastDataBlockOffset);
}
}
/**
* Try to return a Cell that falls between <code>left</code> and
* <code>right</code> but that is shorter; i.e. takes up less space. This
* trick is used building HFile block index. Its an optimization. It does not
* always work. In this case we'll just return the <code>right</code> cell.
*
* @param comparator
* Comparator to use.
* @param left
* @param right
* @return A cell that sorts between <code>left</code> and <code>right</code>.
*/
public static Cell getMidpoint(final CellComparator comparator, final Cell left,
final Cell right) {
// TODO: Redo so only a single pass over the arrays rather than one to
// compare and then a second composing midpoint.
if (right == null) {
throw new IllegalArgumentException("right cell can not be null");
}
if (left == null) {
return right;
}
// If Cells from meta table, don't mess around. meta table Cells have schema
// (table,startrow,hash) so can't be treated as plain byte arrays. Just skip
// out without trying to do this optimization.
if (comparator instanceof MetaCellComparator) {
return right;
}
int diff = comparator.compareRows(left, right);
if (diff > 0) {
throw new IllegalArgumentException("Left row sorts after right row; left="
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
}
byte[] midRow;
boolean bufferBacked = left instanceof ByteBufferCell && right instanceof ByteBufferCell;
if (diff < 0) {
// Left row is < right row.
if (bufferBacked) {
midRow = getMinimumMidpointArray(((ByteBufferCell) left).getRowByteBuffer(),
((ByteBufferCell) left).getRowPosition(), left.getRowLength(),
((ByteBufferCell) right).getRowByteBuffer(),
((ByteBufferCell) right).getRowPosition(), right.getRowLength());
} else {
midRow = getMinimumMidpointArray(left.getRowArray(), left.getRowOffset(),
left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength());
}
// If midRow is null, just return 'right'. Can't do optimization.
if (midRow == null) return right;
return CellUtil.createFirstOnRow(midRow);
}
// Rows are same. Compare on families.
diff = CellComparator.compareFamilies(left, right);
if (diff > 0) {
throw new IllegalArgumentException("Left family sorts after right family; left="
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
}
if (diff < 0) {
if (bufferBacked) {
midRow = getMinimumMidpointArray(((ByteBufferCell) left).getFamilyByteBuffer(),
((ByteBufferCell) left).getFamilyPosition(), left.getFamilyLength(),
((ByteBufferCell) right).getFamilyByteBuffer(),
((ByteBufferCell) right).getFamilyPosition(), right.getFamilyLength());
} else {
midRow = getMinimumMidpointArray(left.getFamilyArray(), left.getFamilyOffset(),
left.getFamilyLength(), right.getFamilyArray(), right.getFamilyOffset(),
right.getFamilyLength());
}
// If midRow is null, just return 'right'. Can't do optimization.
if (midRow == null) return right;
// Return new Cell where we use right row and then a mid sort family.
return CellUtil.createFirstOnRowFamily(right, midRow, 0, midRow.length);
}
// Families are same. Compare on qualifiers.
diff = CellComparator.compareQualifiers(left, right);
if (diff > 0) {
throw new IllegalArgumentException("Left qualifier sorts after right qualifier; left="
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
}
if (diff < 0) {
if (bufferBacked) {
midRow = getMinimumMidpointArray(((ByteBufferCell) left).getQualifierByteBuffer(),
((ByteBufferCell) left).getQualifierPosition(), left.getQualifierLength(),
((ByteBufferCell) right).getQualifierByteBuffer(),
((ByteBufferCell) right).getQualifierPosition(), right.getQualifierLength());
} else {
midRow = getMinimumMidpointArray(left.getQualifierArray(), left.getQualifierOffset(),
left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(),
right.getQualifierLength());
}
// If midRow is null, just return 'right'. Can't do optimization.
if (midRow == null) return right;
// Return new Cell where we use right row and family and then a mid sort qualifier.
return CellUtil.createFirstOnRowCol(right, midRow, 0, midRow.length);
}
// No opportunity for optimization. Just return right key.
return right;
}
/**
* @param leftArray
* @param leftOffset
* @param leftLength
* @param rightArray
* @param rightOffset
* @param rightLength
* @return Return a new array that is between left and right and minimally
* sized else just return null as indicator that we could not create a
* mid point.
*/
private static byte[] getMinimumMidpointArray(final byte[] leftArray, final int leftOffset,
final int leftLength, final byte[] rightArray, final int rightOffset, final int rightLength) {
// rows are different
int minLength = leftLength < rightLength ? leftLength : rightLength;
int diffIdx = 0;
while (diffIdx < minLength
&& leftArray[leftOffset + diffIdx] == rightArray[rightOffset + diffIdx]) {
diffIdx++;
}
byte[] minimumMidpointArray = null;
if (diffIdx >= minLength) {
// leftKey's row is prefix of rightKey's.
minimumMidpointArray = new byte[diffIdx + 1];
System.arraycopy(rightArray, rightOffset, minimumMidpointArray, 0, diffIdx + 1);
} else {
int diffByte = leftArray[leftOffset + diffIdx];
if ((0xff & diffByte) < 0xff && (diffByte + 1) < (rightArray[rightOffset + diffIdx] & 0xff)) {
minimumMidpointArray = new byte[diffIdx + 1];
System.arraycopy(leftArray, leftOffset, minimumMidpointArray, 0, diffIdx);
minimumMidpointArray[diffIdx] = (byte) (diffByte + 1);
} else {
minimumMidpointArray = new byte[diffIdx + 1];
System.arraycopy(rightArray, rightOffset, minimumMidpointArray, 0, diffIdx + 1);
}
}
return minimumMidpointArray;
}
private static byte[] getMinimumMidpointArray(ByteBuffer left, int leftOffset, int leftLength,
ByteBuffer right, int rightOffset, int rightLength) {
// rows are different
int minLength = leftLength < rightLength ? leftLength : rightLength;
int diffIdx = 0;
while (diffIdx < minLength && ByteBufferUtils.toByte(left,
leftOffset + diffIdx) == ByteBufferUtils.toByte(right, rightOffset + diffIdx)) {
diffIdx++;
}
byte[] minMidpoint = null;
if (diffIdx >= minLength) {
// leftKey's row is prefix of rightKey's.
minMidpoint = new byte[diffIdx + 1];
ByteBufferUtils.copyFromBufferToArray(minMidpoint, right, rightOffset, 0, diffIdx + 1);
} else {
int diffByte = ByteBufferUtils.toByte(left, leftOffset + diffIdx);
if ((0xff & diffByte) < 0xff
&& (diffByte + 1) < (ByteBufferUtils.toByte(right, rightOffset + diffIdx) & 0xff)) {
minMidpoint = new byte[diffIdx + 1];
ByteBufferUtils.copyFromBufferToArray(minMidpoint, left, leftOffset, 0, diffIdx);
minMidpoint[diffIdx] = (byte) (diffByte + 1);
} else {
minMidpoint = new byte[diffIdx + 1];
ByteBufferUtils.copyFromBufferToArray(minMidpoint, right, rightOffset, 0, diffIdx + 1);
}
}
return minMidpoint;
}
/** Gives inline block writers an opportunity to contribute blocks. */
private void writeInlineBlocks(boolean closing) throws IOException {
for (InlineBlockWriter ibw : inlineBlockWriters) {
while (ibw.shouldWriteBlock(closing)) {
long offset = outputStream.getPos();
boolean cacheThisBlock = ibw.getCacheOnWrite();
ibw.writeInlineBlock(blockWriter.startWriting(
ibw.getInlineBlockType()));
blockWriter.writeHeaderAndData(outputStream);
ibw.blockWritten(offset, blockWriter.getOnDiskSizeWithHeader(),
blockWriter.getUncompressedSizeWithoutHeader());
totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader();
if (cacheThisBlock) {
doCacheOnWrite(offset);
}
}
}
}
/**
* Caches the last written HFile block.
* @param offset the offset of the block we want to cache. Used to determine
* the cache key.
*/
private void doCacheOnWrite(long offset) {
HFileBlock cacheFormatBlock = blockWriter.getBlockForCaching(cacheConf);
cacheConf.getBlockCache().cacheBlock(
new BlockCacheKey(name, offset, true, cacheFormatBlock.getBlockType()),
cacheFormatBlock);
}
/**
* Ready a new block for writing.
*
* @throws IOException
*/
protected void newBlock() throws IOException {
// This is where the next block begins.
blockWriter.startWriting(BlockType.DATA);
firstCellInBlock = null;
if (lastCell != null) {
lastCellOfPreviousBlock = lastCell;
}
}
/**
* Add a meta block to the end of the file. Call before close(). Metadata
* blocks are expensive. Fill one with a bunch of serialized data rather than
* do a metadata block per metadata instance. If metadata is small, consider
* adding to file info using {@link #appendFileInfo(byte[], byte[])}
*
* @param metaBlockName
* name of the block
* @param content
* will call readFields to get data later (DO NOT REUSE)
*/
@Override
public void appendMetaBlock(String metaBlockName, Writable content) {
byte[] key = Bytes.toBytes(metaBlockName);
int i;
for (i = 0; i < metaNames.size(); ++i) {
// stop when the current key is greater than our own
byte[] cur = metaNames.get(i);
if (Bytes.BYTES_RAWCOMPARATOR.compare(cur, 0, cur.length, key, 0,
key.length) > 0) {
break;
}
}
metaNames.add(i, key);
metaData.add(i, content);
}
@Override
public void close() throws IOException {
if (outputStream == null) {
return;
}
// Save data block encoder metadata in the file info.
blockEncoder.saveMetadata(this);
// Write out the end of the data blocks, then write meta data blocks.
// followed by fileinfo, data block index and meta block index.
finishBlock();
writeInlineBlocks(true);
FixedFileTrailer trailer = new FixedFileTrailer(getMajorVersion(), getMinorVersion());
// Write out the metadata blocks if any.
if (!metaNames.isEmpty()) {
for (int i = 0; i < metaNames.size(); ++i) {
// store the beginning offset
long offset = outputStream.getPos();
// write the metadata content
DataOutputStream dos = blockWriter.startWriting(BlockType.META);
metaData.get(i).write(dos);
blockWriter.writeHeaderAndData(outputStream);
totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader();
// Add the new meta block to the meta index.
metaBlockIndexWriter.addEntry(metaNames.get(i), offset,
blockWriter.getOnDiskSizeWithHeader());
}
}
// Load-on-open section.
// Data block index.
//
// In version 2, this section of the file starts with the root level data
// block index. We call a function that writes intermediate-level blocks
// first, then root level, and returns the offset of the root level block
// index.
long rootIndexOffset = dataBlockIndexWriter.writeIndexBlocks(outputStream);
trailer.setLoadOnOpenOffset(rootIndexOffset);
// Meta block index.
metaBlockIndexWriter.writeSingleLevelIndex(blockWriter.startWriting(
BlockType.ROOT_INDEX), "meta");
blockWriter.writeHeaderAndData(outputStream);
totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader();
if (this.hFileContext.isIncludesMvcc()) {
appendFileInfo(MAX_MEMSTORE_TS_KEY, Bytes.toBytes(maxMemstoreTS));
appendFileInfo(KEY_VALUE_VERSION, Bytes.toBytes(KEY_VALUE_VER_WITH_MEMSTORE));
}
// File info
writeFileInfo(trailer, blockWriter.startWriting(BlockType.FILE_INFO));
blockWriter.writeHeaderAndData(outputStream);
totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader();
// Load-on-open data supplied by higher levels, e.g. Bloom filters.
for (BlockWritable w : additionalLoadOnOpenData){
blockWriter.writeBlock(w, outputStream);
totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader();
}
// Now finish off the trailer.
trailer.setNumDataIndexLevels(dataBlockIndexWriter.getNumLevels());
trailer.setUncompressedDataIndexSize(
dataBlockIndexWriter.getTotalUncompressedSize());
trailer.setFirstDataBlockOffset(firstDataBlockOffset);
trailer.setLastDataBlockOffset(lastDataBlockOffset);
trailer.setComparatorClass(comparator.getClass());
trailer.setDataIndexCount(dataBlockIndexWriter.getNumRootEntries());
finishClose(trailer);
blockWriter.release();
}
@Override
public void addInlineBlockWriter(InlineBlockWriter ibw) {
inlineBlockWriters.add(ibw);
}
@Override
public void addGeneralBloomFilter(final BloomFilterWriter bfw) {
this.addBloomFilter(bfw, BlockType.GENERAL_BLOOM_META);
}
@Override
public void addDeleteFamilyBloomFilter(final BloomFilterWriter bfw) {
this.addBloomFilter(bfw, BlockType.DELETE_FAMILY_BLOOM_META);
}
private void addBloomFilter(final BloomFilterWriter bfw,
final BlockType blockType) {
if (bfw.getKeyCount() <= 0)
return;
if (blockType != BlockType.GENERAL_BLOOM_META &&
blockType != BlockType.DELETE_FAMILY_BLOOM_META) {
throw new RuntimeException("Block Type: " + blockType.toString() +
"is not supported");
}
additionalLoadOnOpenData.add(new BlockWritable() {
@Override
public BlockType getBlockType() {
return blockType;
}
@Override
public void writeToBlock(DataOutput out) throws IOException {
bfw.getMetaWriter().write(out);
Writable dataWriter = bfw.getDataWriter();
if (dataWriter != null)
dataWriter.write(out);
}
});
}
@Override
public HFileContext getFileContext() {
return hFileContext;
}
/**
* Add key/value to file. Keys must be added in an order that agrees with the
* Comparator passed on construction.
*
* @param cell
* Cell to add. Cannot be empty nor null.
* @throws IOException
*/
@Override
public void append(final Cell cell) throws IOException {
// checkKey uses comparator to check we are writing in order.
boolean dupKey = checkKey(cell);
if (!dupKey) {
checkBlockBoundary();
}
if (!blockWriter.isWriting()) {
newBlock();
}
blockWriter.write(cell);
totalKeyLength += CellUtil.estimatedSerializedSizeOfKey(cell);
totalValueLength += cell.getValueLength();
// Are we the first key in this block?
if (firstCellInBlock == null) {
// If cell is big, block will be closed and this firstCellInBlock reference will only last
// a short while.
firstCellInBlock = cell;
}
// TODO: What if cell is 10MB and we write infrequently? We hold on to cell here indefinitely?
lastCell = cell;
entryCount++;
this.maxMemstoreTS = Math.max(this.maxMemstoreTS, cell.getSequenceId());
int tagsLength = cell.getTagsLength();
if (tagsLength > this.maxTagsLength) {
this.maxTagsLength = tagsLength;
}
}
@Override
public void beforeShipped() throws IOException {
// Add clone methods for every cell
if (this.lastCell != null) {
this.lastCell = KeyValueUtil.toNewKeyCell(this.lastCell);
}
if (this.firstCellInBlock != null) {
this.firstCellInBlock = KeyValueUtil.toNewKeyCell(this.firstCellInBlock);
}
if (this.lastCellOfPreviousBlock != null) {
this.lastCellOfPreviousBlock = KeyValueUtil.toNewKeyCell(this.lastCellOfPreviousBlock);
}
}
protected void finishFileInfo() throws IOException {
if (lastCell != null) {
// Make a copy. The copy is stuffed into our fileinfo map. Needs a clean
// byte buffer. Won't take a tuple.
byte [] lastKey = CellUtil.getCellKeySerializedAsKeyValueKey(this.lastCell);
fileInfo.append(FileInfo.LASTKEY, lastKey, false);
}
// Average key length.
int avgKeyLen =
entryCount == 0 ? 0 : (int) (totalKeyLength / entryCount);
fileInfo.append(FileInfo.AVG_KEY_LEN, Bytes.toBytes(avgKeyLen), false);
fileInfo.append(FileInfo.CREATE_TIME_TS, Bytes.toBytes(hFileContext.getFileCreateTime()),
false);
// Average value length.
int avgValueLen =
entryCount == 0 ? 0 : (int) (totalValueLength / entryCount);
fileInfo.append(FileInfo.AVG_VALUE_LEN, Bytes.toBytes(avgValueLen), false);
if (hFileContext.getDataBlockEncoding() == DataBlockEncoding.PREFIX_TREE) {
// In case of Prefix Tree encoding, we always write tags information into HFiles even if all
// KVs are having no tags.
fileInfo.append(FileInfo.MAX_TAGS_LEN, Bytes.toBytes(this.maxTagsLength), false);
} else if (hFileContext.isIncludesTags()) {
// When tags are not being written in this file, MAX_TAGS_LEN is excluded
// from the FileInfo
fileInfo.append(FileInfo.MAX_TAGS_LEN, Bytes.toBytes(this.maxTagsLength), false);
boolean tagsCompressed = (hFileContext.getDataBlockEncoding() != DataBlockEncoding.NONE)
&& hFileContext.isCompressTags();
fileInfo.append(FileInfo.TAGS_COMPRESSED, Bytes.toBytes(tagsCompressed), false);
}
}
protected int getMajorVersion() {
return 3;
}
protected int getMinorVersion() {
return HFileReaderImpl.MAX_MINOR_VERSION;
}
protected void finishClose(FixedFileTrailer trailer) throws IOException {
// Write out encryption metadata before finalizing if we have a valid crypto context
Encryption.Context cryptoContext = hFileContext.getEncryptionContext();
if (cryptoContext != Encryption.Context.NONE) {
// Wrap the context's key and write it as the encryption metadata, the wrapper includes
// all information needed for decryption
trailer.setEncryptionKey(EncryptionUtil.wrapKey(cryptoContext.getConf(),
cryptoContext.getConf().get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY,
User.getCurrent().getShortName()),
cryptoContext.getKey()));
}
// Now we can finish the close
trailer.setMetaIndexCount(metaNames.size());
trailer.setTotalUncompressedBytes(totalUncompressedBytes+ trailer.getTrailerSize());
trailer.setEntryCount(entryCount);
trailer.setCompressionCodec(hFileContext.getCompression());
long startTime = System.currentTimeMillis();
trailer.serialize(outputStream);
HFile.updateWriteLatency(System.currentTimeMillis() - startTime);
if (closeOutputStream) {
outputStream.close();
outputStream = null;
}
}
}
| JingchengDu/hbase | hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java | Java | apache-2.0 | 32,336 |
export * from './dynamic-date.module';
export * from './dynamic-date.pipe';
| our-city-app/oca-backend | embedded-apps/projects/shared/src/lib/dynamic-date/index.ts | TypeScript | apache-2.0 | 76 |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Events;
namespace Game.Event
{
public class Event : MonoBehaviour
{
#region Fields
[SerializeField]
private UnityEvent _startList;
[SerializeField]
private UnityEvent _endList;
#endregion Fields
#region Event
public virtual void StartEvent()
{
Debug.Log(string.Format("Start Event: {0}", this.gameObject.name));
this._startList.Invoke();
}
public virtual void EndEvent()
{
Debug.Log(string.Format("End Event: {0}", this.gameObject.name));
this._endList.Invoke();
}
#endregion Event
}
}
| hilllo/HaloLandsAR | HaloLands/Assets/Scripts/EventSystem/Event.cs | C# | apache-2.0 | 779 |
import execSync from "../services/exec-sync";
import log from "../services/logger";
import lambdaExists from "../utils/lambda-exists";
export default function deploy (options) {
const {
awsAccessKeyId,
awsSecretAccessKey,
awsRegion,
lambdaName,
lambdaRole,
sourceDir
} = options;
// Create/update the lambda function
const awsCliEnv = {
AWS_ACCESS_KEY_ID: awsAccessKeyId,
AWS_SECRET_ACCESS_KEY: awsSecretAccessKey,
AWS_DEFAULT_REGION: awsRegion
};
switch (lambdaExists(awsCliEnv, lambdaName)) {
case false:
// Create the function
log.info(`Lambda ${lambdaName} doesn't exist.`);
log.info(`Creating lambda ${lambdaName}`);
execSync([
"aws lambda create-function",
`--function-name ${lambdaName}`,
"--runtime nodejs4.3",
`--role ${lambdaRole}`,
"--handler index.handler",
`--zip-file fileb://${sourceDir}/bundle.zip`
].join(" "), {env: awsCliEnv});
break;
case true:
// Update function code
log.info(`Lambda ${lambdaName} already exists`);
log.info(`Updating function code for lambda ${lambdaName}`);
execSync([
"aws lambda update-function-code",
`--function-name ${lambdaName}`,
`--zip-file fileb://${sourceDir}/bundle.zip`
].join(" "), {env: awsCliEnv});
// Update function configuration (just the role for now)
log.info(`Updating function configuration for lambda ${lambdaName}`);
execSync([
"aws lambda update-function-configuration",
`--function-name ${lambdaName}`,
`--role ${lambdaRole}`
].join(" "), {env: awsCliEnv});
break;
}
}
| lk-architecture/lk-lambda-deploy | src/steps/4.deploy.js | JavaScript | apache-2.0 | 1,817 |
var schema = require("./schema.js");
var xml = require("./xml.js");
module.exports.createBuilder = schema.createBuilder;
module.exports.parse = xml.parse;
module.exports.parseXml = xml.parse;
| pagi-org/pagijs | src/js/schema/index.js | JavaScript | apache-2.0 | 193 |
using System.Web.Http;
using System.Web.Routing;
namespace Giventocode.AzureSearch
{
public class WebApiApplication : System.Web.HttpApplication
{
protected void Application_Start()
{
WebApiConfig.Register();
}
}
} | giventocode/azure-mobileservices-search | Giventocode.AzureSearch/Global.asax.cs | C# | apache-2.0 | 266 |
namespace Snippets6.RavenDB
{
using System.Threading.Tasks;
using NServiceBus;
using NServiceBus.RavenDB;
using Raven.Client;
using Raven.Client.Document;
class RavenDBConfigure
{
public void SharedSessionForSagasAndOutbox()
{
#region ravendb-persistence-shared-session-for-sagas
DocumentStore myDocumentStore = new DocumentStore();
// configure document store properties here
EndpointConfiguration endpointConfiguration = new EndpointConfiguration();
endpointConfiguration.UsePersistence<RavenDBPersistence>().UseSharedAsyncSession(() =>
{
IAsyncDocumentSession session = myDocumentStore.OpenAsyncSession();
// customize the session properties here
return session;
});
#endregion
}
public class MyMessage
{
}
public class MyDocument
{
}
#region ravendb-persistence-shared-session-for-sagas-handler
public class MyMessageHandler : IHandleMessages<MyMessage>
{
public Task Handle(MyMessage message, IMessageHandlerContext context)
{
MyDocument doc = new MyDocument();
IAsyncDocumentSession ravenSession = context.SynchronizedStorageSession.RavenSession();
return ravenSession.StoreAsync(doc);
}
}
#endregion
public void SpecificExternalDocumentStore()
{
#region ravendb-persistence-specific-external-store
DocumentStore myDocumentStore = new DocumentStore();
// configure document store properties here
EndpointConfiguration endpointConfiguration = new EndpointConfiguration();
endpointConfiguration.UsePersistence<RavenDBPersistence>()
.UseDocumentStoreForSubscriptions(myDocumentStore)
.UseDocumentStoreForSagas(myDocumentStore)
.UseDocumentStoreForTimeouts(myDocumentStore);
#endregion
}
public void SpecificDocumentStoreViaConnectionString()
{
//See the config file
}
public void ExternalDocumentStore()
{
#region ravendb-persistence-external-store
DocumentStore myDocumentStore = new DocumentStore();
// configure document store properties here
EndpointConfiguration endpointConfiguration = new EndpointConfiguration();
endpointConfiguration.UsePersistence<RavenDBPersistence>()
.SetDefaultDocumentStore(myDocumentStore);
#endregion
}
public void ExternalConnectionParameters()
{
#region ravendb-persistence-external-connection-params
ConnectionParameters connectionParams = new ConnectionParameters();
// configure connection params (ApiKey, DatabaseName, Url) here
EndpointConfiguration endpointConfiguration = new EndpointConfiguration();
endpointConfiguration.UsePersistence<RavenDBPersistence>()
.SetDefaultDocumentStore(connectionParams);
#endregion
}
public void SharedDocumentStoreViaConnectionString()
{
//See the config file
}
}
} | WojcikMike/docs.particular.net | Snippets/Snippets_6/RavenDB/Configure.cs | C# | apache-2.0 | 3,386 |
#if TEXTURE_EXPOSE == TEXTURE_NAMES
#elif TEXTURE_EXPOSE == TEXTURE_BUILDER_NAME
builArmchair,
#elif TEXTURE_EXPOSE == TEXTURE_FILE
__FILE__,
#elif TEXTURE_EXPOSE == TEXTURE_BUILDER_BODY
void builArmchair()
{
Channel t;
t.Cells(100);
Channel r = t; r.Scale(53.f/255.f, 83.f/255.f);
Channel g = t; g.Scale(32.f/255.f, 54.f/255.f);
Channel b = t; b.Scale(27.f/245.f, 50.f/255.f);
queueTextureRGB(armchair,
r, g, b,
GL_LINEAR_MIPMAP_LINEAR, GL_LINEAR,
true, GL_REPEAT, false);
t.Random();
t.GaussianBlur();
t.Scale(0., 0.25f);
buildAndQueueBumpMapFromHeightMap(armchairBump, t, true);
}
#endif // TEXTURE_EXPOSE
| laurentlb/Ctrl-Alt-Test | F/data/textures/armchair.cc | C++ | apache-2.0 | 678 |