code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }();
exports.getMatchedParams = getMatchedParams;
exports.getQueryParams = getQueryParams;
exports.createRoute = createRoute;
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var parametersPattern = /(:[^\/]+)/g;
// Some utility functions. Exported just to be able to test them easily
function getMatchedParams(route, path) {
var matches = path.match(route.matcher);
if (!matches) {
return false;
}
return route.params.reduce(function (acc, param, idx) {
acc[param] = decodeURIComponent(matches[idx + 1]);
return acc;
}, {});
};
function getQueryParams(query) {
return query.split('&').filter(function (p) {
return p.length;
}).reduce(function (acc, part) {
var _part$split = part.split('=');
var _part$split2 = _slicedToArray(_part$split, 2);
var key = _part$split2[0];
var value = _part$split2[1];
acc[decodeURIComponent(key)] = decodeURIComponent(value);
return acc;
}, {});
};
function createRoute(name, path, handler) {
var matcher = new RegExp(path.replace(parametersPattern, '([^\/]+)') + '$');
var params = (path.match(parametersPattern) || []).map(function (x) {
return x.substring(1);
});
return { name: name, path: path, handler: handler, matcher: matcher, params: params };
};
var findRouteParams = function findRouteParams(routes, path) {
var params = void 0;
var route = routes.find(function (r) {
return params = getMatchedParams(r, path);
});
return { route: route, params: params };
};
var parseUrl = function parseUrl(url) {
var _url$split = url.split('?');
var _url$split2 = _slicedToArray(_url$split, 2);
var path = _url$split2[0];
var queryString = _url$split2[1];
return { path: path, queryString: queryString };
};
var stripPrefix = function stripPrefix(url, prefix) {
return url.replace(new RegExp('^' + prefix), '');
};
// The actual Router as the default export of the module
var Router = function () {
function Router() {
_classCallCheck(this, Router);
this.routes = [];
this.prefix = '';
}
// Adds a route with an _optional_ name, a path and a handler function
_createClass(Router, [{
key: 'add',
value: function add(name, path, handler) {
if (arguments.length == 2) {
this.add.apply(this, [''].concat(Array.prototype.slice.call(arguments)));
} else {
this.routes.push(createRoute(name, path, handler));
}
return this;
}
}, {
key: 'setPrefix',
value: function setPrefix(prefix) {
this.prefix = prefix;
return this;
}
}, {
key: 'dispatch',
value: function dispatch(url) {
var _parseUrl = parseUrl(stripPrefix(url, this.prefix));
var path = _parseUrl.path;
var queryString = _parseUrl.queryString;
var query = getQueryParams(queryString || '');
var _findRouteParams = findRouteParams(this.routes, path);
var route = _findRouteParams.route;
var params = _findRouteParams.params;
if (route) {
route.handler({ params: params, query: query });
return route;
}
return false;
}
}, {
key: 'getCurrentRoute',
value: function getCurrentRoute(url) {
var _parseUrl2 = parseUrl(stripPrefix(url, this.prefix));
var path = _parseUrl2.path;
var queryString = _parseUrl2.queryString;
var rp = findRouteParams(this.routes, path);
return rp && rp.route;
}
}, {
key: 'formatUrl',
value: function formatUrl(routeName) {
var params = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
var query = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2];
var route = this.routes.find(function (r) {
return r.name === routeName;
});
if (!route) {
return '';
}
var queryString = Object.keys(query).map(function (k) {
return [k, query[k]];
}).map(function (_ref) {
var _ref2 = _slicedToArray(_ref, 2);
var k = _ref2[0];
var v = _ref2[1];
return encodeURIComponent(k) + '=' + encodeURIComponent(v);
}).join('&');
var path = this.prefix + route.path.replace(parametersPattern, function (match) {
return params[match.substring(1)];
});
return queryString.length ? path + '?' + queryString : path;
}
}]);
return Router;
}();
exports.default = Router;
; | jmhdez/minimal-router | lib/Router.js | JavaScript | apache-2.0 | 5,579 |
package Entity;
import android.os.Parcel;
import android.os.Parcelable;
/**
* Created by liangchenzhou on 25/08/16.
*/
public class SpeciesKingdom implements Parcelable {
private String kingdom;
private String scientificName;
public SpeciesKingdom() {
}
public SpeciesKingdom(String kingdom, String scientificName) {
this.kingdom = kingdom;
this.scientificName = scientificName;
}
public String getKingdom() {
return kingdom;
}
public void setKingdom(String kingdom) {
this.kingdom = kingdom;
}
public String getScientificName() {
return scientificName;
}
public void setScientificName(String scientificName) {
this.scientificName = scientificName;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel parcel, int flags) {
parcel.writeString(this.kingdom);
parcel.writeString(this.scientificName);
}
public static final Parcelable.Creator<SpeciesKingdom> CREATOR = new Creator<SpeciesKingdom>() {
@Override
public SpeciesKingdom createFromParcel(Parcel source) {
return new SpeciesKingdom(source.readString(), source.readString());
}
@Override
public SpeciesKingdom[] newArray(int size) {
return new SpeciesKingdom[size];
}
};
}
| lawrencezcc/Moneco-V6 | app/src/main/java/Entity/SpeciesKingdom.java | Java | apache-2.0 | 1,421 |
from django.core.files import locks
from django.core.urlresolvers import reverse
from django.db.models import Count, F, Q, Min
from django.template import RequestContext, TemplateDoesNotExist
from django.template.loader import get_template, select_template
from django.utils import timezone
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_GET, require_POST
from django.http import HttpResponse, Http404, HttpResponseBadRequest
from datetime import datetime
from base64 import b64encode
import pytz
import json
import os
import logging
import random
import uuid
import numpy as np
from basecrowd.interface import CrowdRegistry
from basecrowd.models import TaskGroupRetainerStatus
from basecrowd.models import RetainerPoolStatus
from basecrowd.tasks import gather_answer
logger = logging.getLogger('crowd_server')
@require_POST
@csrf_exempt
def create_task_group(request, crowd_name):
""" See README.md for API. """
# get the interface implementation from the crowd name.
interface, model_spec = CrowdRegistry.get_registry_entry(crowd_name)
# Response dictionaries
correct_response = {'status': 'ok'}
wrong_response = {'status': 'wrong'}
# Parse information contained in the URL
json_dict = request.POST.get('data')
# Validate the format.
if not interface.validate_create_request(json_dict):
wrong_response['reason'] = 'Invalid request data.'
return HttpResponse(json.dumps(wrong_response))
# Pull out important data fields
json_dict = json.loads(json_dict)
configuration = json_dict['configuration']
group_id = json_dict['group_id']
group_context = json.dumps(json_dict['group_context'])
content = json_dict['content']
point_identifiers = content.keys()
# Create a new group for the tasks.
if model_spec.group_model.objects.filter(group_id=group_id).exists():
wrong_response['reason'] = 'Group id %s is already in use.' % group_id
return HttpResponse(json.dumps(wrong_response))
current_group = model_spec.group_model(
group_id=group_id,
tasks_finished=0,
callback_url=configuration['callback_url'],
group_context=group_context,
crowd_config=json.dumps(configuration.get(crowd_name, {})),
global_config=json.dumps(configuration))
# Call the group hook function, then save the new group to the database.
interface.group_pre_save(current_group)
current_group.save()
# Build crowd tasks from the group
if 'retainer_pool' in configuration: # Retainer pool tasks
# The specified crowd must support retainer pools
retainer_pool_model = model_spec.retainer_pool_model
if not retainer_pool_model:
wrong_response['reason'] = 'Crowd does not support retainer pools.'
return HttpResponse(json.dumps(wrong_response))
# Create or find the retainer pool.
retainer_config = configuration['retainer_pool']
create_pool = retainer_config['create_pool']
pool_id = retainer_config.get('pool_id', '')
if create_pool:
(retainer_pool, created) = retainer_pool_model.objects.get_or_create(
external_id=pool_id,
defaults={
'capacity': retainer_config['pool_size'],
'status': RetainerPoolStatus.RECRUITING,
})
if created == False: # pool id already taken
wrong_response['reason'] = 'Pool id %s already in use' % pool_id
return HttpResponse(json.dumps(wrong_response))
else:
try:
retainer_pool = retainer_pool_model.objects.get(
external_id=pool_id)
# TODO: Make sure this pool is compatible with the new task group
except retainer_pool_model.DoesNotExist:
# clean up
current_group.delete()
wrong_response['reason'] = 'Pool %s does not exist' % pool_id
return HttpResponse(json.dumps(wrong_response))
current_group.retainer_pool = retainer_pool
# Don't call interface.create_task, the `post_retainer_tasks` celery
# task will do so.
# Batch and create the tasks.
batch_size = configuration['task_batch_size']
for i in range(0, len(point_identifiers), batch_size):
batch_point_ids = point_identifiers[i:i+batch_size]
batch_content = { j: content[j] for j in batch_point_ids }
task_id = str(uuid.uuid4()) # generate a random id for this task
task = model_spec.task_model(
task_type=configuration['task_type'],
data=json.dumps(batch_content),
create_time=timezone.now(),
task_id=task_id,
group=current_group,
num_assignments=configuration['num_assignments'],
is_retainer=True,
)
interface.task_pre_save(task)
task.save()
#for point_id, point_content in content.iteritems():
# task_id = str(uuid.uuid4()) # generate a random id for this task
# task = model_spec.task_model(
# task_type=configuration['task_type'],
# data=json.dumps({point_id: point_content}),
# create_time=pytz.utc.localize(datetime.now()),
# task_id=task_id,
# group=current_group,
# num_assignments=configuration['num_assignments'],
# is_retainer=True,
# )
# interface.task_pre_save(task)
# task.save()
# start the work right away if the pool is ready
if retainer_pool.status in [RetainerPoolStatus.IDLE,
RetainerPoolStatus.ACTIVE]:
current_group.retainer_pool_status = TaskGroupRetainerStatus.RUNNING
retainer_pool.status = RetainerPoolStatus.ACTIVE
retainer_pool.save()
else:
current_group.retainer_pool_status = TaskGroupRetainerStatus.WAITING
current_group.save()
else: # Not retainer, create a task for each batch of points.
for i in range(0, len(point_identifiers),
configuration['task_batch_size']):
# build the batch
current_content = {}
for j in range(i, i + configuration['task_batch_size']):
if j >= len(point_identifiers):
break
current_content[point_identifiers[j]] = content[
point_identifiers[j]]
current_content = json.dumps(current_content)
# Call the create task hook
current_task_id = interface.create_task(configuration,
current_content)
# Build the task object
current_task = model_spec.task_model(
task_type=configuration['task_type'],
data=current_content,
create_time=pytz.utc.localize(datetime.now()),
task_id=current_task_id,
group=current_group,
num_assignments=configuration['num_assignments'])
# Call the pre-save hook, then save the task to the database.
interface.task_pre_save(current_task)
current_task.save()
return HttpResponse(json.dumps(correct_response))
# Delete all tasks from the system.
def purge_tasks(request, crowd_name):
interface, model_spec = CrowdRegistry.get_registry_entry(crowd_name)
tasks = model_spec.task_model.objects.all()
# Call the delete hook, then delete the tasks from our database.
# TODO: clean up retainer pool tasks correctly.
interface.delete_tasks(tasks)
tasks.delete()
return HttpResponse('ok')
# we need this view to load in AMT's iframe, so disable Django's built-in
# clickjacking protection.
@xframe_options_exempt
@require_GET
def get_assignment(request, crowd_name):
# get the interface implementation from the crowd name.
interface, model_spec = CrowdRegistry.get_registry_entry(crowd_name)
logger.info('Non-retainer worker requested task assignment.')
# get assignment context
context = interface.get_assignment_context(request)
try:
interface.require_context(
context, ['task_id', 'is_accepted'],
ValueError('Task id unavailable in assignment request context.'))
except ValueError:
# This task is no longer available (due to a race condition).
# Return the 'No available tasks' template.
template = get_scoped_template(crowd_name, 'unavailable.html')
return HttpResponse(template.render(RequestContext(request, {})))
return _get_assignment(request, crowd_name, interface, model_spec, context)
def _get_assignment(request, crowd_name, interface, model_spec, context,
**custom_template_context):
# Retrieve the task based on task_id from the database
try:
current_task = (model_spec.task_model.objects
.select_related('group')
.get(task_id=context['task_id']))
task_group = current_task.group
except model_spec.task_model.DoesNotExist:
response_str = '''
<html><head></head><body>
<h1>Error: Expired Task!</h1>
<p>Task %s has expired, and isn't currently available for work.
<b>Please return this task</b> and pick up a new one.</p>
</body></html>
''' % context['task_id']
return HttpResponse(response_str)
# Save the information of this worker
worker_id = context.get('worker_id')
if worker_id:
try:
current_worker = model_spec.worker_model.objects.get(
worker_id=worker_id)
except model_spec.worker_model.DoesNotExist:
current_worker = model_spec.worker_model(
worker_id=worker_id)
# Call the pre-save hook, the save to the database
interface.worker_pre_save(current_worker)
current_worker.save()
else:
current_worker = None
is_accepted = context.get('is_accepted', False)
# If this is a retainer task, add the worker to the pool (if the worker
# isn't already in the pool, i.e., they're trying to accept multiple HITs
# for the same pool).
if current_task.task_type == 'retainer':
# TODO: consider making this all pools (i.e., a worker can't be in
# more than one pool at a time).
pool = task_group.retainer_pool
if ((pool.active_workers.filter(worker_id=worker_id).exists()
or pool.reserve_workers.filter(worker_id=worker_id).exists())
and (current_worker.assignments.filter(
task__group__retainer_pool=pool,
task__task_type='retainer')
.exclude(task=current_task).exists())):
response_str = '''
<html><head></head><body>
<h1>Error: Multiple pool memberships detected</h1>
<p>You can't accept more than one retainer task at a time,
and we've detected that you are already active in another
retainer task.</p>
<p>Please return this task, or leave the pool in your other
active task.</p>
<p><b>Note:</b> You may see this error if you have recently
finished another retainer task. In that case, simply wait 5-10
seconds and refresh this page, and the error should be gone.
</p>
</body></html>
'''
return HttpResponse(response_str)
global_config = json.loads(task_group.global_config)
retainer_config = global_config['retainer_pool']
exp_config = global_config.get('experimental')
churn_thresh = exp_config.get('churn_threshold') if exp_config else None
context.update({
'waiting_rate': retainer_config['waiting_rate'],
'per_task_rate': retainer_config['task_rate'],
'min_required_tasks': retainer_config['min_tasks_per_worker'],
'pool_status': pool.get_status_display(),
})
# Relate workers and tasks (after a worker accepts the task).
if is_accepted:
if not current_worker:
raise ValueError("Accepted tasks must have an associated worker.")
assignment_id = context['assignment_id']
try:
assignment = current_worker.assignments.get(assignment_id=assignment_id)
except model_spec.assignment_model.DoesNotExist:
assignment = model_spec.assignment_model.objects.create(
assignment_id=assignment_id, worker=current_worker,
task=current_task)
# Add the new worker to the session task's retainer pool.
if current_task.task_type == 'retainer':
# Put the worker on reserve if the pool is full and we're churning
if pool.active_workers.count() >= pool.capacity and churn_thresh is not None:
assignment.on_reserve = True
else:
assignment.on_reserve = False
current_worker.pools.add(pool)
assignment.save()
context.update({
'wait_time': assignment.time_waited,
'tasks_completed': current_worker.completed_assignments_for_pool_session(
current_task).count(),
'understands_retainer': current_worker.understands_retainer,
})
else:
if not current_task.group.work_start_time:
current_task.group.work_start_time = timezone.now()
current_task.group.save()
# Add task data to the context.
content = json.loads(current_task.data)
group_context = json.loads(task_group.group_context)
crowd_config = json.loads(task_group.crowd_config)
context.update(group_context=group_context,
content=content,
backend_submit_url=interface.get_backend_submit_url(),
frontend_submit_url=interface.get_frontend_submit_url(crowd_config),
crowd_name=crowd_name)
context.update(**custom_template_context)
# Load the template and render it.
template = get_scoped_template(crowd_name, current_task.task_type + '.html',
context=context)
return HttpResponse(template.render(RequestContext(request, context)))
def get_scoped_template(crowd_name, template_name, context=None):
base_template_name = os.path.join(crowd_name, 'base.html')
if context is not None:
try:
t = get_template(base_template_name)
except TemplateDoesNotExist:
base_template_name = 'basecrowd/base.html'
context['base_template_name'] = base_template_name
return select_template([
os.path.join(crowd_name, template_name),
os.path.join('basecrowd', template_name)])
# When workers submit assignments, we should send data to this view via AJAX
# before submitting to AMT.
@require_POST
@csrf_exempt
def post_response(request, crowd_name):
# get the interface implementation from the crowd name.
interface, model_spec = CrowdRegistry.get_registry_entry(crowd_name)
# get context from the request
context = interface.get_response_context(request)
# validate context
interface.require_context(
context, ['assignment_id', 'task_id', 'worker_id', 'answers'],
ValueError("Response context missing required keys."))
# Check if this is a duplicate response
assignment_id = context['assignment_id']
if model_spec.assignment_model.objects.filter(
assignment_id=assignment_id,
finished_at__isnull=False).exists():
return HttpResponse('Duplicate!')
# Retrieve the task and worker from the database based on ids.
current_task = model_spec.task_model.objects.get(task_id=context['task_id'])
assignment = model_spec.assignment_model.objects.get(assignment_id=assignment_id)
# Store this response into the database
assignment.content = context['answers']
assignment.finished_at = timezone.now()
interface.response_pre_save(assignment)
assignment.save()
# Check if this task has been finished
# If we've gotten too many responses, ignore.
if (not current_task.is_complete
and (current_task.assignments.filter(finished_at__isnull=False).count()
>= current_task.num_assignments)):
current_task.is_complete = True
current_task.pre_celery = timezone.now()
current_task.save()
gather_answer.delay(current_task.task_id, model_spec)
# terminate in progress retainer tasks
(model_spec.assignment_model.objects
.exclude(task__task_type='retainer')
.filter(task=current_task,
finished_at__isnull=True)
.update(finished_at=timezone.now(),
terminated=True))
return HttpResponse('ok') # AJAX call succeded.
# Views related to Retainer Pool tasks
#######################################
@require_POST
@csrf_exempt
def ping(request, crowd_name):
try:
interface, model_spec = CrowdRegistry.get_registry_entry(crowd_name)
now = timezone.now()
# get and validate context
context = interface.get_response_context(request)
interface.require_context(
context, ['task_id', 'worker_id', 'assignment_id'],
ValueError("ping context missing required keys."))
task = model_spec.task_model.objects.get(task_id=context['task_id'])
worker = model_spec.worker_model.objects.get(worker_id=context['worker_id'])
assignment = model_spec.assignment_model.objects.get(
assignment_id=context['assignment_id'])
pool_status = task.group.retainer_pool.get_status_display()
terminate_work = False
terminate_worker = assignment.worker_released_at is not None
# update waiting time
ping_type = request.POST['ping_type']
# Task started waiting, create a new session
if ping_type == 'starting':
assignment.finish_waiting_session()
# Task is waiting, increment wait time.
elif ping_type == 'waiting' and pool_status != 'finished':
last_ping = assignment.last_ping
time_since_last_ping = (now - last_ping).total_seconds()
assignment.time_waited_session += time_since_last_ping
# Task is working, verify that the assignment hasn't been terminated.
elif ping_type == 'working':
active_task_id = request.POST.get('active_task', None)
if not active_task_id:
logger.warning('Ping from %s, but no active task id.' % assignment)
terminate_worker = False # Don't kill them if we don't know what they're working on
else:
try:
active_assignment = model_spec.assignment_model.objects.filter(
worker=worker, task_id=active_task_id)[0]
if active_assignment.terminated:
terminate_work = True
except IndexError: # No active assignment
terminate_worker = False # Don't kill the worker if we don't know what they're working on.
# if terminate_worker: # make sure their current task can be recycled
# active_assignment.finished_at = now
# active_assignment.terminated = True
# active_assignment.save()
assignment.last_ping = now
assignment.save()
worker.last_ping = now
worker.save()
logger.info('ping from worker %s, task %s' % (worker, task))
retainer_config = json.loads(task.group.global_config)['retainer_pool']
data = {
'ping_type': ping_type,
'wait_time': assignment.time_waited,
'tasks_completed': worker.completed_assignments_for_pool_session(
task).count(),
'pool_status': pool_status,
'waiting_rate': retainer_config['waiting_rate'],
'per_task_rate': retainer_config['task_rate'],
'min_required_tasks': retainer_config['min_tasks_per_worker'],
'terminate_work': terminate_work,
'terminate_worker': terminate_worker,
}
return HttpResponse(json.dumps(data), content_type='application/json')
except Exception as e:
logger.exception(e)
raise e
@require_GET
def assign_retainer_task(request, crowd_name):
try:
# get the interface implementation from the crowd name.
interface, model_spec = CrowdRegistry.get_registry_entry(crowd_name)
context = interface.get_response_context(request)
interface.require_context(
context, ['task_id', 'worker_id'],
ValueError("retainer assignment context missing required keys."))
try:
task = (model_spec.task_model.objects
.select_related('group__retainer_pool')
.get(task_id=context['task_id']))
group = task.group
pool = group.retainer_pool
worker = model_spec.worker_model.objects.get(worker_id=context['worker_id'])
logger.info('Retainer task %s requested work.' % task)
except Exception: # Issue loading models from IDs, finish this assignment
return HttpResponse(json.dumps({'start': False, 'pool_status': 'finished'}),
content_type='application/json')
exp_config = json.loads(group.global_config).get('experimental')
if exp_config:
straggler_mitigation = exp_config.get('mitigate_stragglers', False)
straggler_routing_policy = exp_config.get('straggler_routing_policy', 'random')
churn_threshold = exp_config.get('churn_threshold')
else:
straggler_mitigation = False
churn_threshold = None
# Acquire an exclusive lock to avoid duplicate assignments
lockf = open('/tmp/ASSIGNMENT_LOCK', 'wb')
logger.debug("Locking assignment lock...")
locks.lock(lockf, locks.LOCK_EX)
# Don't assign a task if the worker is on reserve or the pool is inactive.
on_reserve = (task.assignments.filter(worker=worker, on_reserve=True).exists()
if churn_threshold is not None else False)
pool_inactive = pool.status not in (RetainerPoolStatus.ACTIVE,
RetainerPoolStatus.REFILLING,
RetainerPoolStatus.IDLE)
no_work_response = HttpResponse(json.dumps({'start': False,
'pool_status': pool.get_status_display()}),
content_type='application/json')
if on_reserve:
logger.info("Worker on reserve: not assigning work.")
return no_work_response
if pool_inactive:
logger.info("Pool still recruiting or otherwise inactive: not assigning work.")
return no_work_response
# Look for a task the worker is already assigned to
assignment_task = None
existing_assignments = (worker.assignments
.filter(finished_at__isnull=True)
.filter(task__group__retainer_pool=pool)
.exclude(task__task_type='retainer'))
logger.info('Looking for assignments for retainer worker...')
if existing_assignments.exists():
assignment_task = existing_assignments[0].task
logger.info('Found an existing assignment for this worker')
else: # Look for open tasks
incomplete_tasks = (
# incomplete tasks
model_spec.task_model.objects.filter(is_complete=False)
# in this pool's tasks
.filter(group__retainer_pool=pool)
# that aren't dummy retainer tasks
.exclude(task_type='retainer')
# that the worker hasn't worked on already
.exclude(assignments__worker=worker))
# First check if the open tasks haven't been assigned to enough workers.
# TODO: avoid gross SQL
non_terminated_assignments = """
SELECT COUNT(*) FROM %(crowdname)s_%(assignment_model)s
WHERE %(crowdname)s_%(assignment_model)s.terminated = False
AND %(crowdname)s_%(assignment_model)s.task_id = %(crowdname)s_%(task_model)s.task_id
""" % {
'crowdname': crowd_name,
'assignment_model': model_spec.assignment_model.__name__.lower(),
'task_model': model_spec.task_model.__name__.lower(),
}
open_tasks = incomplete_tasks.extra(
where=["num_assignments > (%s)" % non_terminated_assignments])
if open_tasks.exists():
logger.info('Found an unassigned but open task')
assignment_task = open_tasks.order_by('?')[0]
# Then, check if there in-progress tasks with enough assignments.
elif incomplete_tasks.exists():
if not straggler_mitigation: # only assign tasks that have been abandoned
# Bad performance characteristics! consider rewriting.
active_workers = set(pool.active_workers.all())
abandoned_tasks = [
t for t in incomplete_tasks
if len([a for a in t.assignments.select_related('worker').all()
if a.worker in active_workers]) < t.num_assignments]
if abandoned_tasks:
logger.info('Found an assigned but abandoned task.')
assignment_task = random.choice(abandoned_tasks)
else:
logger.info('All tasks are assigned.')
# Straggler mitigation
else:
logger.info('Assigning to an active task for straggler mitigation with policy %s.' %
straggler_routing_policy)
if straggler_routing_policy == 'random':
assignment_task = incomplete_tasks.order_by('?')[0]
elif straggler_routing_policy == 'oldest':
now = timezone.now()
annotated = incomplete_tasks.annotate(start=Min('assignments__assigned_at'))
weights = [(now - t.start).total_seconds() for t in annotated]
weights = np.array(weights) / sum(weights)
assignment_task = np.random.choice(list(annotated), size=1, p=weights)[0]
elif straggler_routing_policy == 'young-workers':
now = timezone.now()
weights = [
1 / (now - min([a.worker.assignments
.filter(task__task_type='retainer',
task__group__retainer_pool=pool)
.order_by('assigned_at')[0].assigned_at
for a in task.assignments.all()])).total_seconds()
for task in incomplete_tasks]
weights = np.array(weights) / sum(weights)
assignment_task = np.random.choice(list(incomplete_tasks), size=1, p=weights)[0]
elif straggler_routing_policy == 'fair':
# assign to the task with the fewest assignments
assignment_task = (incomplete_tasks
.extra(select={'n_assignments': non_terminated_assignments},
order_by=['n_assignments']))[0]
else:
logger.info('Unkown straggler routing policy: %s. Using random instead...' %
straggler_routing_policy)
assignment_task = incomplete_tasks.order_by('?')[0]
# return a url to the assignment
if assignment_task:
# create the assignment if necessary
try:
logger.info('Looking up assignment...')
assignment = worker.assignments.get(
task=assignment_task, worker=worker)
if not assignment.retainer_session_task:
assignment.retainer_session_task = task
assignment.save()
except model_spec.assignment_model.DoesNotExist:
logger.info('No assignment found: creating new one.')
assignment_id = str(uuid.uuid4())
assignment = model_spec.assignment_model.objects.create(
assignment_id=assignment_id, worker=worker,
task=assignment_task,
retainer_session_task=task)
if not assignment_task.group.work_start_time:
assignment_task.group.work_start_time = timezone.now()
assignment_task.group.save()
url_args = {
'crowd_name': crowd_name,
'worker_id': worker.worker_id,
'task_id': assignment_task.task_id,
}
response_data = json.dumps({
'start': True,
'task_url': reverse('basecrowd:get_retainer_assignment',
kwargs=url_args),
'task_id': assignment_task.task_id,
'pool_status': pool.get_status_display()
})
logger.info('Linking task to assignment.')
return HttpResponse(response_data, content_type='application/json')
else:
logger.info('No tasks found!')
return no_work_response
except Exception as e:
logger.exception(e)
raise e
finally:
# Release the assignment lock--either an assignment has been created in the DB, or an error occurred.
logger.debug("Unlocking assignment lock...")
locks.unlock(lockf)
lockf.close()
# we need this view to load in AMT's iframe, so disable Django's built-in
# clickjacking protection.
@xframe_options_exempt
@require_GET
def get_retainer_assignment(request, crowd_name, worker_id, task_id):
# get the interface implementation from the crowd name.
interface, model_spec = CrowdRegistry.get_registry_entry(crowd_name)
logger.info('Retainer worker fetched task assignment.')
# fetch assignment if it already exists (e.g. the user refreshed the browser).
try:
assignment_id = model_spec.assignment_model.objects.get(
task_id=task_id, worker_id=worker_id).assignment_id
except model_spec.assignment_model.DoesNotExist:
assignment_id = str(uuid.uuid4())
context = {
'task_id': task_id,
'worker_id': worker_id,
'is_accepted': True,
'assignment_id': assignment_id
}
return _get_assignment(request, crowd_name, interface, model_spec, context)
@require_POST
@csrf_exempt
def finish_pool(request, crowd_name):
pool_id = request.POST.get('pool_id')
interface, model_spec = CrowdRegistry.get_registry_entry(crowd_name)
try:
pool = model_spec.retainer_pool_model.objects.get(external_id=pool_id)
except model_spec.retainer_pool_model.DoesNotExist:
return HttpResponse(json.dumps({'error': 'Invalid pool id'}))
_finish_pool(pool, model_spec)
logger.info("Retainer pool %s finished" % pool)
return HttpResponse(json.dumps({'status': 'ok'}))
def _finish_pool(pool, model_spec):
# Mark open sessions as interrupted so we don't penalize them unfairly.
(model_spec.assignment_model.objects
.filter(task__group__retainer_pool=pool,
task__task_type='retainer')
.exclude(Q(finished_at__isnull=False) & Q(terminated=False))
.update(pool_ended_mid_assignment=True))
pool.status = RetainerPoolStatus.FINISHED
pool.finished_at = timezone.now()
pool.save()
@require_POST
@csrf_exempt
def understands_retainer(request, crowd_name, worker_id):
interface, model_spec = CrowdRegistry.get_registry_entry(crowd_name)
try:
worker = model_spec.worker_model.objects.get(worker_id=worker_id)
except model_spec.worker_model.DoesNotExist:
return HttpResponse(json.dumps({'error': 'Invalid worker id'}))
worker.understands_retainer = True
worker.save()
logger.info('%s understands the retainer model.' % worker)
return HttpResponse(json.dumps({'status': 'ok'}))
| amplab/ampcrowd | ampcrowd/basecrowd/views.py | Python | apache-2.0 | 33,310 |
/*
* Copyright 2017 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stroom.pipeline.xslt;
import stroom.docref.DocRef;
import stroom.docstore.api.DocumentResourceHelper;
import stroom.event.logging.rs.api.AutoLogged;
import stroom.pipeline.shared.XsltDoc;
import stroom.pipeline.shared.XsltResource;
import stroom.util.shared.EntityServiceException;
import javax.inject.Inject;
import javax.inject.Provider;
@AutoLogged
class XsltResourceImpl implements XsltResource {
private final Provider<XsltStore> xsltStoreProvider;
private final Provider<DocumentResourceHelper> documentResourceHelperProvider;
@Inject
XsltResourceImpl(final Provider<XsltStore> xsltStoreProvider,
final Provider<DocumentResourceHelper> documentResourceHelperProvider) {
this.xsltStoreProvider = xsltStoreProvider;
this.documentResourceHelperProvider = documentResourceHelperProvider;
}
@Override
public XsltDoc fetch(final String uuid) {
return documentResourceHelperProvider.get().read(xsltStoreProvider.get(), getDocRef(uuid));
}
@Override
public XsltDoc update(final String uuid, final XsltDoc doc) {
if (doc.getUuid() == null || !doc.getUuid().equals(uuid)) {
throw new EntityServiceException("The document UUID must match the update UUID");
}
return documentResourceHelperProvider.get().update(xsltStoreProvider.get(), doc);
}
private DocRef getDocRef(final String uuid) {
return DocRef.builder()
.uuid(uuid)
.type(XsltDoc.DOCUMENT_TYPE)
.build();
}
}
| gchq/stroom | stroom-pipeline/src/main/java/stroom/pipeline/xslt/XsltResourceImpl.java | Java | apache-2.0 | 2,168 |
package org.mariotaku.menucomponent.internal.menu;
import java.lang.reflect.Method;
import java.util.Collection;
import android.annotation.SuppressLint;
import android.content.Context;
import android.view.Menu;
import android.view.MenuItem;
public class MenuUtils {
public static Menu createMenu(final Context context) {
return new MenuImpl(context, null, null);
}
public static Menu createMenu(final Context context, final Collection<MenuItem> items) {
return new MenuImpl(context, null, items);
}
public static Menu createMenu(final Context context, final MenuAdapter adapter) {
return new MenuImpl(context, adapter, null);
}
public static Menu createMenu(final Context context, final MenuAdapter adapter, final Collection<MenuItem> items) {
return new MenuImpl(context, adapter, items);
}
@SuppressLint("InlinedApi")
public static int getShowAsActionFlags(final MenuItem item) {
if (item == null) throw new NullPointerException();
if (item instanceof MenuItemImpl) return ((MenuItemImpl) item).getShowAsActionFlags();
try {
final Method m = item.getClass().getMethod("getShowAsAction");
return (Integer) m.invoke(item);
} catch (final Exception e) {
return MenuItem.SHOW_AS_ACTION_NEVER;
}
}
}
| xiedantibu/Android-MenuComponent | library/src/org/mariotaku/menucomponent/internal/menu/MenuUtils.java | Java | apache-2.0 | 1,243 |
# encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ARM::MobileEngagement
module Models
#
# Defines values for PushModes
#
module PushModes
RealTime = "real-time"
OneShot = "one-shot"
Manual = "manual"
end
end
end
| devigned/azure-sdk-for-ruby | management/azure_mgmt_mobile_engagement/lib/generated/azure_mgmt_mobile_engagement/models/push_modes.rb | Ruby | apache-2.0 | 389 |
#include "types.hpp"
namespace compiler
{
} | iamOgunyinka/substance-lang | src/types.cpp | C++ | apache-2.0 | 45 |
// Code generated by go-swagger; DO NOT EDIT.
package repositories
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"net/http"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
"github.com/go-openapi/runtime/middleware"
strfmt "github.com/go-openapi/strfmt"
)
// NewGetRepositoryTreeParams creates a new GetRepositoryTreeParams object
// no default values defined in spec.
func NewGetRepositoryTreeParams() GetRepositoryTreeParams {
return GetRepositoryTreeParams{}
}
// GetRepositoryTreeParams contains all the bound params for the get repository tree operation
// typically these are obtained from a http.Request
//
// swagger:parameters getRepositoryTree
type GetRepositoryTreeParams struct {
// HTTP Request Object
HTTPRequest *http.Request `json:"-"`
/*The repository's name
Required: true
In: path
*/
Name string
/*The owner's username
Required: true
In: path
*/
Owner string
/*The path for the tree
In: query
*/
Path *string
/*The ref for the tree
In: query
*/
Ref *string
}
// BindRequest both binds and validates a request, it assumes that complex things implement a Validatable(strfmt.Registry) error interface
// for simple values it will use straight method calls.
//
// To ensure default values, the struct must have been initialized with NewGetRepositoryTreeParams() beforehand.
func (o *GetRepositoryTreeParams) BindRequest(r *http.Request, route *middleware.MatchedRoute) error {
var res []error
o.HTTPRequest = r
qs := runtime.Values(r.URL.Query())
rName, rhkName, _ := route.Params.GetOK("name")
if err := o.bindName(rName, rhkName, route.Formats); err != nil {
res = append(res, err)
}
rOwner, rhkOwner, _ := route.Params.GetOK("owner")
if err := o.bindOwner(rOwner, rhkOwner, route.Formats); err != nil {
res = append(res, err)
}
qPath, qhkPath, _ := qs.GetOK("path")
if err := o.bindPath(qPath, qhkPath, route.Formats); err != nil {
res = append(res, err)
}
qRef, qhkRef, _ := qs.GetOK("ref")
if err := o.bindRef(qRef, qhkRef, route.Formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
// bindName binds and validates parameter Name from path.
func (o *GetRepositoryTreeParams) bindName(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: true
// Parameter is provided by construction from the route
o.Name = raw
return nil
}
// bindOwner binds and validates parameter Owner from path.
func (o *GetRepositoryTreeParams) bindOwner(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: true
// Parameter is provided by construction from the route
o.Owner = raw
return nil
}
// bindPath binds and validates parameter Path from query.
func (o *GetRepositoryTreeParams) bindPath(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: false
// AllowEmptyValue: false
if raw == "" { // empty values pass all other validations
return nil
}
o.Path = &raw
return nil
}
// bindRef binds and validates parameter Ref from query.
func (o *GetRepositoryTreeParams) bindRef(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: false
// AllowEmptyValue: false
if raw == "" { // empty values pass all other validations
return nil
}
o.Ref = &raw
return nil
}
| gitpods/gitpods | pkg/api/v1/restapi/operations/repositories/get_repository_tree_parameters.go | GO | apache-2.0 | 3,756 |
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
requires = [
'cdo',
'bokeh',
'ocgis',
'pandas',
'nose',
]
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Atmospheric Science',
]
setup(name='flyingpigeon',
version='0.2.0',
description='Processes for climate data, indices and extrem events',
long_description=README + '\n\n' + CHANGES,
classifiers=classifiers,
author='Nils Hempelmann',
author_email='nils.hempelmann@ipsl.jussieu.fr',
url='http://www.lsce.ipsl.fr/',
license = "http://www.apache.org/licenses/LICENSE-2.0",
keywords='wps flyingpigeon pywps malleefowl ipsl birdhouse conda anaconda',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='nose.collector',
install_requires=requires,
entry_points = {
'console_scripts': [
]}
,
)
| sradanov/flyingpigeon | setup.py | Python | apache-2.0 | 1,385 |
/**
* 预存费用
*/
AcctFeeGrid = Ext.extend(Ext.ux.Grid, {
border : false,
acctFeeStore : null,
region : 'center',
pageSize : 10,
constructor : function() {
this.acctFeeStore = new Ext.data.JsonStore({
url:Constant.ROOT_PATH+ "/commons/x/QueryCust!queryAcctPayFee.action",
totalProperty:'totalProperty',
root:'records',
fields : ['acct_type_text', 'acct_type', 'fee_text','is_logoff','is_doc','is_doc_text',
'fee_type', 'fee_type_text', {name : 'real_pay',type : 'float'},
'create_time','acct_date','device_code','invoice_mode','invoice_book_id','finance_status',
'invoice_code', 'invoice_id', 'pay_type_text','begin_date','prod_invalid_date',
'pay_type', 'status_text', 'status', 'dept_id','dept_name',
'user_id','user_name', 'user_type_text','fee_sn','optr_id','optr_name',
'OPERATE','busi_code','busi_name','create_done_code','data_right',
'busi_optr_id','busi_optr_name','prod_sn','invoice_fee',"doc_type","doc_type_text","invoice_mode_text","allow_done_code",'count_text']
});
this.acctFeeStore.on("load",this.doOperate);
var lc = langUtils.main("pay.payfee.columns");
var cm = new Ext.ux.grid.LockingColumnModel({
columns : [
{header:lc[0],dataIndex:'create_done_code',width:80},
{header:lc[1],dataIndex:'busi_name', width:150,renderer:App.qtipValue},
{header:lc[2],dataIndex:'acct_type_text',width:105,renderer:App.qtipValue},
{header:lc[3],dataIndex:'fee_text',width:150,renderer:App.qtipValue},
{header:lc[4],dataIndex:'user_type_text',width:70},
{header:lc[5],dataIndex:'user_name',width:130,renderer:App.qtipValue},
{header:lc[6],dataIndex:'device_code',width:130,renderer:App.qtipValue},
{header:lc[7],dataIndex:'status_text',width:70,renderer:Ext.util.Format.statusShow},
{header:lc[8],dataIndex:'real_pay',width:60,renderer:Ext.util.Format.formatFee},
{header:lc[9],dataIndex:'begin_date',width:125,renderer:Ext.util.Format.dateFormat},
{header:lc[10],dataIndex:'prod_invalid_date',width:125,renderer:Ext.util.Format.dateFormat},
{header:lc[11],dataIndex:'is_doc_text',width:75,renderer:Ext.util.Format.statusShow},
{header:lc[12],dataIndex:'pay_type_text',width:75, renderer:App.qtipValue},
{header:lc[13],dataIndex:'create_time',width:125},
{header:lc[14],dataIndex:'acct_date',width:125},
{header:lc[15],dataIndex:'optr_name',width:90, renderer:App.qtipValue},
{header:lc[16],dataIndex:'dept_name',width:100, renderer:App.qtipValue},
{header:lc[17],dataIndex:'invoice_id',width:80, renderer:App.qtipValue},
// {header:lc[18],dataIndex:'invoice_mode_text',width:80},
{header:lc[19],dataIndex:'doc_type_text',width:90, renderer:App.qtipValue},
{header:lc[20],dataIndex:'busi_optr_name',width:100, renderer:App.qtipValue}
]
});
var pageTbar = new Ext.PagingToolbar({store: this.acctFeeStore ,pageSize : this.pageSize});
AcctFeeGrid.superclass.constructor.call(this, {
id:'P_ACCT',
title : langUtils.main("pay.payfee._title"),
loadMask : true,
store : this.acctFeeStore,
border : false,
bbar: pageTbar,
disableSelection:true,
view: new Ext.ux.grid.ColumnLockBufferView(),
sm : new Ext.grid.RowSelectionModel(),
cm : cm,
tools:[{id:'search',qtip:'查询',cls:'tip-target',scope:this,handler:function(){
var comp = this.tools.search;
if(this.acctFeeStore.getCount()>0){
if(win)win.close();
win = FilterWindow.addComp(this,[
{text:'智能卡号',field:'device_code',type:'textfield'},
{text:'状态',field:'status',showField:'status_text',
data:[
{'text':'所有','value':''},
{'text':'已支付','value':'PAY'},
{'text':'失效','value':'INVALID'}
]
},
{text:'受理日期',field:'create_time',type:'datefield'},
{text:'受理人',field:'optr_name',type:'textfield'},
{text:'发票号',field:'invoice_id',type:'textfield'}
],700,null,true,"queryFeeInfo");
if(win){
win.setPosition(comp.getX()-win.width, comp.getY()-50);//弹出框右对齐
win.show();
}
}else{
Alert('请先查询数据!');
}
}
}]
});
},
initEvents :function(){
this.on("afterrender",function(){
this.swapViews();
},this,{delay:10});
AcctFeeGrid.superclass.initEvents.call(this);
},
doOperate:function(){
//不是当前登录人的不能进行“冲正”操作(去掉不符合条件行的“冲正”操作按钮)
this.each(function(record){
if(record.get('optr_name') != App.getData().optr['optr_name']){
record.set('OPERATE','F');
}
});
this.commitChanges();
},
remoteRefresh : function() {
this.refresh();
},
localRefresh : function() {
unitRecords = this.getSelectionModel().getSelections();
for (var i in unitRecords) {
this.acctFeeStore.remove(unitRecords[i]);
}
},
refresh:function(){
this.acctFeeStore.baseParams = {
residentCustId: App.getData().custFullInfo.cust.cust_id,
custStatus : App.data.custFullInfo.cust.status
};
// this.acctFeeStore.baseParams['residentCustId'] = App.getData().custFullInfo.cust.cust_id;
// this.acctFeeStore.baseParams['custStatus'] = App.data.custFullInfo.cust.status;
// this.acctFeeStore.load({params:{start: 0,limit: this.pageSize}});
this.reloadCurrentPage();
}
});
/**
* 业务费用
*/
BusiFeeGrid = Ext.extend(Ext.ux.Grid, {
border : false,
busiFeeStore : null,
region : 'center',
pageSize : 10,
constructor : function() {
this.busiFeeStore = new Ext.data.JsonStore({
url : Constant.ROOT_PATH+ "/commons/x/QueryCust!queryBusiPayFee.action",
totalProperty:'totalProperty',
root:'records',
fields : ['create_done_code','fee_text', 'device_type', 'device_type_text',
'device_code', {name : 'should_pay',type : 'int'},'is_doc','is_doc_text',
'pay_type_text', 'pay_type', 'status_text','dept_name','dept_id',
'status', 'invoice_code', 'invoice_id','invoice_mode', 'optr_id','optr_name',
{name : 'real_pay',type : 'int'},'fee_sn','create_time','acct_date','deposit',
'data_right','finance_status','invoice_book_id','is_busi_fee',
'busi_optr_id','busi_optr_name','invoice_fee',"doc_type","doc_type_text",
"invoice_mode_text",'buy_num','device_model','device_model_name', 'count_text']
});
var lc = langUtils.main("pay.busifee.columns");
var cm = new Ext.ux.grid.LockingColumnModel({
columns : [
{header:lc[0],dataIndex:'create_done_code',width:80},
{header:lc[1],dataIndex:'fee_text',width:110, renderer:App.qtipValue},
{header:lc[2],dataIndex:'device_type_text',width:80},
{header:lc[16],dataIndex:'device_model_name',width:120, renderer:App.qtipValue},
{header:lc[3],dataIndex:'device_code',width:130, renderer:App.qtipValue},
{header:lc[4],dataIndex:'status_text',width:70,renderer:Ext.util.Format.statusShow},
{header:lc[5],dataIndex:'is_doc_text',width:80,renderer:Ext.util.Format.statusShow},
{header:lc[6],dataIndex:'should_pay',width:60,renderer:Ext.util.Format.formatFee},
{header:lc[7],dataIndex:'real_pay',width:60,renderer:Ext.util.Format.formatFee},
{header:lc[17],dataIndex:'count_text',width:120,renderer:App.qtipValue},
{header:lc[15],dataIndex:'buy_num',width:80},
{header:lc[8],dataIndex:'pay_type_text',width:90, renderer:App.qtipValue},
{header:lc[9],dataIndex:'create_time',width:125},
{header:lc[19],dataIndex:'acct_date',width:125},
{header:lc[10],dataIndex:'optr_name',width:90, renderer:App.qtipValue},
{header:lc[11],dataIndex:'dept_name',width:100, renderer:App.qtipValue},
{header:lc[12],dataIndex:'invoice_id',width:90, renderer:App.qtipValue},
// {header:lc[13],dataIndex:'invoice_mode_text',width:80},
{header:lc[14],dataIndex:'doc_type_text',width:90, renderer:App.qtipValue},
{header:lc[18],dataIndex:'busi_optr_name',width:100, renderer:App.qtipValue}
]
});
BusiFeeGrid.superclass.constructor.call(this, {
id:'P_BUSI',
title : langUtils.main("pay.busifee._title"),
loadMask : true,
store : this.busiFeeStore,
border : false,
bbar: new Ext.PagingToolbar({store: this.busiFeeStore ,pageSize : this.pageSize}),
sm : new Ext.grid.RowSelectionModel(),
view: new Ext.ux.grid.ColumnLockBufferView(),
cm : cm,
listeners:{
scope:this,
delay:10,
rowdblclick: this.doDblClickRecord,
afterrender: this.swapViews
},
tools:[{id:'search',qtip:'查询',cls:'tip-target',scope:this,handler:function(){
var comp = this.tools.search;
if(this.busiFeeStore.getCount()>0){
if(win)win.close();
win = FilterWindow.addComp(this,[
{text:'设备类型',field:'device_type',showField:'device_type_text',
data:[
{'text':'设备类型','value':''},
{'text':'机顶盒','value':'STB'},
{'text':'智能卡','value':'CARD'},
{'text':'MODEM','value':'MODEM'}
]
},
{text:'设备编号',field:'device_code',type:'textfield'},
{text:'受理日期',field:'create_time',type:'datefield'},
{text:'状态',field:'status',showField:'status_text',
data:[
{'text':'所有','value':''},
{'text':'已支付','value':'PAY'},
{'text':'失效','value':'INVALID'}
]
},
{text:'受理人',field:'optr_name',type:'textfield'},
{text:'发票号',field:'invoice_id',type:'textfield'}
],800,null,true,"queryFeeInfo");
if(win){
win.setPosition(comp.getX()-win.width, comp.getY()-50);//弹出框右对齐
win.show();
}
}else{
Alert('请先查询数据!');
}
}
}]
});
},
doDblClickRecord : function(grid, rowIndex, e) {
var record = grid.getStore().getAt(rowIndex);
},
remoteRefresh : function() {
this.refresh();
},
refresh:function(){
/*Ext.Ajax.request({
url : Constant.ROOT_PATH+ "/commons/x/QueryCust!queryBusiPayFee.action",
scope:this,
params:{
residentCustId:App.getData().custFullInfo.cust.cust_id,
custStatus : App.data.custFullInfo.cust.status
},
success:function(res,opt){
var data = Ext.decode(res.responseText);
//PagingMemoryProxy() 一次性读取数据
this.busiFeeStore.proxy = new Ext.data.PagingMemoryProxy(data),
//本地分页
this.busiFeeStore.load({params:{start:0,limit:this.pageSize}});
}
});*/
// this.busiFeeStore.baseParams['residentCustId'] = App.getData().custFullInfo.cust.cust_id;
this.busiFeeStore.baseParams = {
residentCustId: App.getData().custFullInfo.cust.cust_id
};
// this.busiFeeStore.load({params:{start: 0,limit: this.pageSize}});
this.reloadCurrentPage();
},
localRefresh : function() {
unitRecords = this.getSelectionModel().getSelections();
for (var i in unitRecords) {
this.busiFeeStore.remove(unitRecords[i]);
}
}
});
/**
* 订单金额明细
*/
CfeePayWindow = Ext.extend(Ext.Window, {
feeStore:null,
constructor: function(){
this.feeStore = new Ext.data.JsonStore({
url : Constant.ROOT_PATH+ "/commons/x/QueryCust!queryFeePayDetail.action",
fields: ["real_pay","fee_text","invoice_id","create_done_code"]
})
var lc = langUtils.main("pay.feePayDetail.columns");
var columns = [
{header: lc[0], width: 80,sortable:true, dataIndex: 'create_done_code',renderer:App.qtipValue},
{header: lc[1],sortable:true, dataIndex: 'fee_text',renderer:App.qtipValue},
{header: lc[2], width: 70, sortable:true, dataIndex: 'real_pay',renderer:Ext.util.Format.formatFee},
{header: lc[3], width: 80,sortable:true, dataIndex: 'invoice_id',renderer:App.qtipValue}
];
return CfeePayWindow.superclass.constructor.call(this, {
layout:"fit",
title: langUtils.main("pay.feePayDetail._title"),
width: 600,
height: 300,
resizable: false,
maximizable: false,
closeAction: 'hide',
minimizable: false,
items: [{
xtype: 'grid',
stripeRows: true,
border: false,
store: this.feeStore,
columns: columns,
viewConfig:{forceFit:true},
stateful: true
}]
});
},
show: function(sn){
this.feeStore.baseParams = {
paySn: sn
};
this.feeStore.load();
return CfeePayWindow.superclass.show.call(this);
}
});
FeePayGrid = Ext.extend(Ext.ux.Grid, {
feePayStore : null,
pageSize : 20,
cfeePayWindow:null,
feePayData: [],
constructor : function() {
this.feePayStore = new Ext.data.JsonStore({
url : Constant.ROOT_PATH+ "/commons/x/QueryCust!queryFeePay.action",
totalProperty:'totalProperty',
root:'records',
fields : ['pay_sn','reverse_done_code', 'pay_type', 'pay_type_text',
'done_code', {name : 'usd',type : 'int'},'receipt_id','is_valid_text',
'is_valid', 'payer', 'acct_date','dept_name','dept_id',
'invoice_mode', 'invoice_mode_text', 'remark',{name : 'exchange',type : 'int'},
{name : 'cos',type : 'int'}, 'optr_id','optr_name',
{name : 'khr',type : 'int'},'create_time','data_right']
});
var lc = langUtils.main("pay.detail.columns");
var cm = new Ext.ux.grid.LockingColumnModel({
columns : [
{header:lc[0],dataIndex:'pay_sn',width:85,renderer:function(value,metaData,record){
that = this;
if(value != ''){
return '<div style="text-decoration:underline;font-weight:bold" onclick="Ext.getCmp(\'P_FEE_PAY\').doTransferFeeShow();" ext:qtitle="" ext:qtip="' + value + '">' + value +'</div>';
}else{
return '<div ext:qtitle="" ext:qtip="' + value + '">' + value +'</div>';
}
}},
{header:lc[1],dataIndex:'usd',width:50,renderer:Ext.util.Format.formatFee},
{header:lc[5],dataIndex:'is_valid_text',width:70,renderer:Ext.util.Format.statusShow},
{header:lc[2],dataIndex:'khr',width:50,renderer:Ext.util.Format.formatFee},
{header:lc[3],dataIndex:'exchange',width:70},
{header:lc[4],dataIndex:'cos',width:100,renderer:Ext.util.Format.formatFee},
{header:lc[6],dataIndex:'pay_type_text',width:100},
{header:lc[7],dataIndex:'payer',width:70},
{header:lc[8],dataIndex:'done_code',width:80},
{header:lc[9],dataIndex:'receipt_id',width:100},
// {header:lc[10],dataIndex:'invoice_mode_text',width:80},
{header:lc[11],dataIndex:'create_time',width:125},
{header:lc[12],dataIndex:'optr_name',width:100},
{header:lc[13],dataIndex:'dept_name',width:100}
]
});
FeePayGrid.superclass.constructor.call(this, {
id:'P_FEE_PAY',
title : langUtils.main("pay.detail._title"),
region:"east",
width:"30%",
split:true,
loadMask : true,
store : this.feePayStore,
// border : false,
bbar: new Ext.PagingToolbar({store: this.feePayStore ,pageSize : this.pageSize}),
sm : new Ext.grid.RowSelectionModel(),
view: new Ext.ux.grid.ColumnLockBufferView(),
cm : cm,
listeners:{
scope:this,
delay:10,
rowdblclick: this.doDblClickRecord,
afterrender: this.swapViews
},
tools:[{id:'search',qtip:'查询',cls:'tip-target',scope:this,handler:function(){
var comp = this.tools.search;
if(this.feePayStore.getCount()>0){
if(win)win.close();
win = FilterWindow.addComp(this,[
{text:'状态',field:'status',showField:'is_valid_text',
data:[
{'text':'所有','value':''},
{'text':'有效','value':'T'},
{'text':'失效','value':'F'}
]
},
{text:'受理日期',field:'create_time',type:'datefield'},
{text:'受理人',field:'optr_name',type:'textfield'},
{text:'发票号',field:'invoice_id',type:'textfield'}
],580,null,true,"queryFeeInfo");
if(win){
win.setPosition(comp.getX()-win.width, comp.getY()-50);//弹出框右对齐
win.show();
}
}else{
Alert('请先查询数据!');
}
}
}]
});
},
doDblClickRecord : function(grid, rowIndex, e) {
var record = grid.getStore().getAt(rowIndex);
},
remoteRefresh : function() {
this.refresh();
},
refresh:function(){
this.feePayStore.baseParams = {
residentCustId: App.getData().custFullInfo.cust.cust_id
};
// this.busiFeeStore.load({params:{start: 0,limit: this.pageSize}});
this.reloadCurrentPage();
},
localRefresh : function() {
unitRecords = this.getSelectionModel().getSelections();
for (var i in unitRecords) {
this.feePayStore.remove(unitRecords[i]);
}
},
doTransferFeeShow:function(){
if(!App.getApp().getCustId()){
Alert('请查询客户之后再做操作.');
return false;
}
var recs = this.selModel.getSelections();
if(!recs || recs.length !=1){
Alert('请选择且仅选择一条记录!');
return false;
}
var rec = recs[0];
if(!this.cfeePayWindow){
this.cfeePayWindow = new CfeePayWindow();
}
this.cfeePayWindow.show(rec.get('pay_sn'));
}
});
/**
*/
PayfeePanel = Ext.extend(BaseInfoPanel, {
// 面板属性定义
acctFeeGrid : null,
busiFeeGrid : null,
feePayGrid : null,
// other property
mask : null,
constructor : function() {
// 子面板实例化
this.acctFeeGrid = new AcctFeeGrid();
this.busiFeeGrid = new BusiFeeGrid();
this.feePayGrid = new FeePayGrid();
PayfeePanel.superclass.constructor.call(this, {
layout:"border",
border:false,
items:[{
region:"center",
layout:"anchor",
// border: false,
items : [{
layout : 'fit',
border : false,
anchor : "100% 60%",
bodyStyle: 'border-bottom-width: 1px;',
items : [this.acctFeeGrid]
}, {
layout : 'fit',
border : false,
anchor : "100% 40%",
items : [this.busiFeeGrid]
}]
},this.feePayGrid
// {
// region:"west",
// split:true,
// width:"30%",
// border: false,
// items:[this.feePayGrid]
// }
]
});
},
refresh : function() {
this.acctFeeGrid.remoteRefresh();
this.busiFeeGrid.remoteRefresh();
this.feePayGrid.remoteRefresh();
}
});
Ext.reg("payfeepanel", PayfeePanel); | leopardoooo/cambodia | boss-core/src/main/webapp/pages/index/center/PayfeePanel.js | JavaScript | apache-2.0 | 18,342 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.ServiceModel;
namespace WcfChatServer
{
interface IWcfChatClient
{
[OperationContract(IsOneWay = true)]
void onMessageReceived(string username, string message);
[OperationContract(IsOneWay = true)]
void onServerInfoReceived(int statusCode, string message);
}
}
| jatwigg/wcf-messaging-service | WcfChatServer/Interface1.cs | C# | apache-2.0 | 439 |
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// todo: should have a way to auto-register these (instead of manually maintaining a list)
export default [
'version',
'setting',
'home',
'historyItem',
'space',
'spaces',
'source',
'sources',
'dataset',
'fullDataset',
'datasetUI',
'datasetContext',
'physicalDataset',
'datasetConfig',
'file',
'fileFormat',
'folder',
'jobDetails',
'tableData',
'previewTable',
'user',
'datasetAccelerationSettings',
'provision'
];
| dremio/dremio-oss | dac/ui/src/reducers/resources/entityTypes.js | JavaScript | apache-2.0 | 1,073 |
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.interestrate;
import java.util.List;
import java.util.Map;
import com.opengamma.analytics.financial.interestrate.future.derivative.BondFuture;
import com.opengamma.analytics.financial.interestrate.future.method.BondFutureHullWhiteMethod;
import com.opengamma.util.tuple.DoublesPair;
/**
* Present value curve sensitivity calculator for interest rate instruments using Hull-White (extended Vasicek) one factor model.
*/
public final class PresentValueCurveSensitivityHullWhiteCalculator extends PresentValueCurveSensitivityCalculator {
/**
* The instance of the calculator.
*/
private static final PresentValueCurveSensitivityHullWhiteCalculator INSTANCE = new PresentValueCurveSensitivityHullWhiteCalculator();
/**
* Return the instance of the calculator.
* @return The calculator.
*/
public static PresentValueCurveSensitivityHullWhiteCalculator getInstance() {
return INSTANCE;
}
/**
* Private constructor.
*/
private PresentValueCurveSensitivityHullWhiteCalculator() {
}
/**
* The methods.
*/
private static final BondFutureHullWhiteMethod METHOD_HW = BondFutureHullWhiteMethod.getInstance();
@Override
public Map<String, List<DoublesPair>> visitBondFuture(final BondFuture bondFuture, final YieldCurveBundle curves) {
return METHOD_HW.presentValueCurveSensitivity(bondFuture, curves).getSensitivities();
}
}
| charles-cooper/idylfin | src/com/opengamma/analytics/financial/interestrate/PresentValueCurveSensitivityHullWhiteCalculator.java | Java | apache-2.0 | 1,565 |
# Copyright (c) OpenMMLab. All rights reserved.
import itertools
import numpy as np
import torch
from .general_data import GeneralData
class InstanceData(GeneralData):
"""Data structure for instance-level annnotations or predictions.
Subclass of :class:`GeneralData`. All value in `data_fields`
should have the same length. This design refer to
https://github.com/facebookresearch/detectron2/blob/master/detectron2/structures/instances.py # noqa E501
Examples:
>>> from mmdet.core import InstanceData
>>> import numpy as np
>>> img_meta = dict(img_shape=(800, 1196, 3), pad_shape=(800, 1216, 3))
>>> results = InstanceData(img_meta)
>>> img_shape in results
True
>>> results.det_labels = torch.LongTensor([0, 1, 2, 3])
>>> results["det_scores"] = torch.Tensor([0.01, 0.7, 0.6, 0.3])
>>> results["det_masks"] = np.ndarray(4, 2, 2)
>>> len(results)
4
>>> print(resutls)
<InstanceData(
META INFORMATION
pad_shape: (800, 1216, 3)
img_shape: (800, 1196, 3)
PREDICTIONS
shape of det_labels: torch.Size([4])
shape of det_masks: (4, 2, 2)
shape of det_scores: torch.Size([4])
) at 0x7fe26b5ca990>
>>> sorted_results = results[results.det_scores.sort().indices]
>>> sorted_results.det_scores
tensor([0.0100, 0.3000, 0.6000, 0.7000])
>>> sorted_results.det_labels
tensor([0, 3, 2, 1])
>>> print(results[results.scores > 0.5])
<InstanceData(
META INFORMATION
pad_shape: (800, 1216, 3)
img_shape: (800, 1196, 3)
PREDICTIONS
shape of det_labels: torch.Size([2])
shape of det_masks: (2, 2, 2)
shape of det_scores: torch.Size([2])
) at 0x7fe26b6d7790>
>>> results[results.det_scores > 0.5].det_labels
tensor([1, 2])
>>> results[results.det_scores > 0.5].det_scores
tensor([0.7000, 0.6000])
"""
def __setattr__(self, name, value):
if name in ('_meta_info_fields', '_data_fields'):
if not hasattr(self, name):
super().__setattr__(name, value)
else:
raise AttributeError(
f'{name} has been used as a '
f'private attribute, which is immutable. ')
else:
assert isinstance(value, (torch.Tensor, np.ndarray, list)), \
f'Can set {type(value)}, only support' \
f' {(torch.Tensor, np.ndarray, list)}'
if self._data_fields:
assert len(value) == len(self), f'the length of ' \
f'values {len(value)} is ' \
f'not consistent with' \
f' the length ' \
f'of this :obj:`InstanceData` ' \
f'{len(self)} '
super().__setattr__(name, value)
def __getitem__(self, item):
"""
Args:
item (str, obj:`slice`,
obj`torch.LongTensor`, obj:`torch.BoolTensor`):
get the corresponding values according to item.
Returns:
obj:`InstanceData`: Corresponding values.
"""
assert len(self), ' This is a empty instance'
assert isinstance(
item, (str, slice, int, torch.LongTensor, torch.BoolTensor))
if isinstance(item, str):
return getattr(self, item)
if type(item) == int:
if item >= len(self) or item < -len(self):
raise IndexError(f'Index {item} out of range!')
else:
# keep the dimension
item = slice(item, None, len(self))
new_data = self.new()
if isinstance(item, (torch.Tensor)):
assert item.dim() == 1, 'Only support to get the' \
' values along the first dimension.'
if isinstance(item, torch.BoolTensor):
assert len(item) == len(self), f'The shape of the' \
f' input(BoolTensor)) ' \
f'{len(item)} ' \
f' does not match the shape ' \
f'of the indexed tensor ' \
f'in results_filed ' \
f'{len(self)} at ' \
f'first dimension. '
for k, v in self.items():
if isinstance(v, torch.Tensor):
new_data[k] = v[item]
elif isinstance(v, np.ndarray):
new_data[k] = v[item.cpu().numpy()]
elif isinstance(v, list):
r_list = []
# convert to indexes from boolTensor
if isinstance(item, torch.BoolTensor):
indexes = torch.nonzero(item).view(-1)
else:
indexes = item
for index in indexes:
r_list.append(v[index])
new_data[k] = r_list
else:
# item is a slice
for k, v in self.items():
new_data[k] = v[item]
return new_data
@staticmethod
def cat(instances_list):
"""Concat the predictions of all :obj:`InstanceData` in the list.
Args:
instances_list (list[:obj:`InstanceData`]): A list
of :obj:`InstanceData`.
Returns:
obj:`InstanceData`
"""
assert all(
isinstance(results, InstanceData) for results in instances_list)
assert len(instances_list) > 0
if len(instances_list) == 1:
return instances_list[0]
new_data = instances_list[0].new()
for k in instances_list[0]._data_fields:
values = [results[k] for results in instances_list]
v0 = values[0]
if isinstance(v0, torch.Tensor):
values = torch.cat(values, dim=0)
elif isinstance(v0, np.ndarray):
values = np.concatenate(values, axis=0)
elif isinstance(v0, list):
values = list(itertools.chain(*values))
else:
raise ValueError(
f'Can not concat the {k} which is a {type(v0)}')
new_data[k] = values
return new_data
def __len__(self):
if len(self._data_fields):
for v in self.values():
return len(v)
else:
raise AssertionError('This is an empty `InstanceData`.')
| open-mmlab/mmdetection | mmdet/core/data_structures/instance_data.py | Python | apache-2.0 | 6,926 |
using Lucene.Net.Documents;
using Lucene.Net.Index.Extensions;
using Lucene.Net.Util.Automaton;
using NUnit.Framework;
using RandomizedTesting.Generators;
using System;
using System.Collections.Generic;
using Console = Lucene.Net.Util.SystemConsole;
namespace Lucene.Net.Search
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using AttributeSource = Lucene.Net.Util.AttributeSource;
using Automaton = Lucene.Net.Util.Automaton.Automaton;
using AutomatonTestUtil = Lucene.Net.Util.Automaton.AutomatonTestUtil;
using BytesRef = Lucene.Net.Util.BytesRef;
using CharacterRunAutomaton = Lucene.Net.Util.Automaton.CharacterRunAutomaton;
using CharsRef = Lucene.Net.Util.CharsRef;
using Codec = Lucene.Net.Codecs.Codec;
using Directory = Lucene.Net.Store.Directory;
using Document = Documents.Document;
using Field = Field;
using FilteredTermsEnum = Lucene.Net.Index.FilteredTermsEnum;
using IndexReader = Lucene.Net.Index.IndexReader;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
using RegExp = Lucene.Net.Util.Automaton.RegExp;
using Term = Lucene.Net.Index.Term;
using Terms = Lucene.Net.Index.Terms;
using TermsEnum = Lucene.Net.Index.TermsEnum;
using TestUtil = Lucene.Net.Util.TestUtil;
using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
/// <summary>
/// Create an index with random unicode terms
/// Generates random regexps, and validates against a simple impl.
/// </summary>
[TestFixture]
public class TestRegexpRandom2 : LuceneTestCase
{
protected internal IndexSearcher searcher1;
protected internal IndexSearcher searcher2;
private IndexReader reader;
private Directory dir;
protected internal string fieldName;
[SetUp]
public override void SetUp()
{
base.SetUp();
dir = NewDirectory();
fieldName = Random.NextBoolean() ? "field" : ""; // sometimes use an empty string as field name
RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
Document doc = new Document();
Field field = NewStringField(fieldName, "", Field.Store.NO);
doc.Add(field);
List<string> terms = new List<string>();
int num = AtLeast(200);
for (int i = 0; i < num; i++)
{
string s = TestUtil.RandomUnicodeString(Random);
field.SetStringValue(s);
terms.Add(s);
writer.AddDocument(doc);
}
if (Verbose)
{
// utf16 order
terms.Sort();
Console.WriteLine("UTF16 order:");
foreach (string s in terms)
{
Console.WriteLine(" " + UnicodeUtil.ToHexString(s));
}
}
reader = writer.GetReader();
searcher1 = NewSearcher(reader);
searcher2 = NewSearcher(reader);
writer.Dispose();
}
[TearDown]
public override void TearDown()
{
reader.Dispose();
dir.Dispose();
base.TearDown();
}
/// <summary>
/// a stupid regexp query that just blasts thru the terms </summary>
private class DumbRegexpQuery : MultiTermQuery
{
private readonly Automaton automaton;
internal DumbRegexpQuery(Term term, RegExpSyntax flags)
: base(term.Field)
{
RegExp re = new RegExp(term.Text, flags);
automaton = re.ToAutomaton();
}
protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
{
return new SimpleAutomatonTermsEnum(this, terms.GetEnumerator());
}
private sealed class SimpleAutomatonTermsEnum : FilteredTermsEnum
{
private readonly TestRegexpRandom2.DumbRegexpQuery outerInstance;
private CharacterRunAutomaton runAutomaton;
private readonly CharsRef utf16 = new CharsRef(10);
internal SimpleAutomatonTermsEnum(TestRegexpRandom2.DumbRegexpQuery outerInstance, TermsEnum tenum)
: base(tenum)
{
this.outerInstance = outerInstance;
runAutomaton = new CharacterRunAutomaton(outerInstance.automaton);
SetInitialSeekTerm(new BytesRef(""));
}
protected override AcceptStatus Accept(BytesRef term)
{
UnicodeUtil.UTF8toUTF16(term.Bytes, term.Offset, term.Length, utf16);
return runAutomaton.Run(utf16.Chars, 0, utf16.Length) ? AcceptStatus.YES : AcceptStatus.NO;
}
}
public override string ToString(string field)
{
return field.ToString() + automaton.ToString();
}
}
/// <summary>
/// test a bunch of random regular expressions </summary>
[Test]
public virtual void TestRegexps()
{
// we generate aweful regexps: good for testing.
// but for preflex codec, the test can be very slow, so use less iterations.
int num = Codec.Default.Name.Equals("Lucene3x", StringComparison.Ordinal) ? 100 * RandomMultiplier : AtLeast(1000);
for (int i = 0; i < num; i++)
{
string reg = AutomatonTestUtil.RandomRegexp(Random);
if (Verbose)
{
Console.WriteLine("TEST: regexp=" + reg);
}
AssertSame(reg);
}
}
/// <summary>
/// check that the # of hits is the same as from a very
/// simple regexpquery implementation.
/// </summary>
protected internal virtual void AssertSame(string regexp)
{
RegexpQuery smart = new RegexpQuery(new Term(fieldName, regexp), RegExpSyntax.NONE);
DumbRegexpQuery dumb = new DumbRegexpQuery(new Term(fieldName, regexp), RegExpSyntax.NONE);
TopDocs smartDocs = searcher1.Search(smart, 25);
TopDocs dumbDocs = searcher2.Search(dumb, 25);
CheckHits.CheckEqual(smart, smartDocs.ScoreDocs, dumbDocs.ScoreDocs);
}
}
} | NightOwl888/lucenenet | src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs | C# | apache-2.0 | 7,611 |
#include <QtGui/QApplication>
#include "GameUI.h"
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
GameUI w(QString("XO4"), QSize(256, 256));
w.show();
return a.exec();
}
| Dovgalyuk/AIBattle | XO4/Visualizer.cpp | C++ | apache-2.0 | 193 |
'use strict'
// ** Constants
const DEFAULT_HOST = 'localhost';
const DEFAULT_PORT = 8080;
// ** Dependencies
const WebSocket = require('ws');
// ** Libraries
const Service = require('../../lib/Service');
// ** Platform
const logger = require('nodus-framework').logging.createLogger();
function logEvents() {
var socket = new WebSocket('ws://localhost:3333/');
socket.on('open', function open() {
console.log('connected');
socket.send(Date.now().toString(), {mask: true});
});
socket.on('close', function close() {
console.log('disconnected');
});
socket.on('message', function message(data, flags) {
console.log('Roundtrip time: ' + (Date.now() - parseInt(data)) + 'ms', flags);
setTimeout(function timeout() {
socket.send(Date.now().toString(), {mask: true});
}, 500);
});
return socket;
}
// ** Exports
module.exports = logEvents; | bradserbu/nodus-server | examples/console/console.js | JavaScript | apache-2.0 | 935 |
/**
@file Connection_uLimeSDREntry.cpp
@author Lime Microsystems
@brief Implementation of uLimeSDR board connection.
*/
#include "ConnectionFT601.h"
#include "Logger.h"
using namespace lime;
#ifdef __unix__
void ConnectionFT601Entry::handle_libusb_events()
{
struct timeval tv;
tv.tv_sec = 0;
tv.tv_usec = 250000;
while(mProcessUSBEvents.load() == true)
{
int r = libusb_handle_events_timeout_completed(ctx, &tv, NULL);
if(r != 0) lime::error("error libusb_handle_events %s", libusb_strerror(libusb_error(r)));
}
}
#endif // __UNIX__
//! make a static-initialized entry in the registry
void __loadConnectionFT601Entry(void) //TODO fixme replace with LoadLibrary/dlopen
{
static ConnectionFT601Entry FTDIEntry;
}
ConnectionFT601Entry::ConnectionFT601Entry(void):
ConnectionRegistryEntry("FT601")
{
#ifndef __unix__
//m_pDriver = new CDriverInterface();
#else
int r = libusb_init(&ctx); //initialize the library for the session we just declared
if(r < 0)
lime::error("Init Error %i", r); //there was an error
libusb_set_debug(ctx, 3); //set verbosity level to 3, as suggested in the documentation
mProcessUSBEvents.store(true);
mUSBProcessingThread = std::thread(&ConnectionFT601Entry::handle_libusb_events, this);
#endif
}
ConnectionFT601Entry::~ConnectionFT601Entry(void)
{
#ifndef __unix__
//delete m_pDriver;
#else
mProcessUSBEvents.store(false);
mUSBProcessingThread.join();
libusb_exit(ctx);
#endif
}
std::vector<ConnectionHandle> ConnectionFT601Entry::enumerate(const ConnectionHandle &hint)
{
std::vector<ConnectionHandle> handles;
#ifndef __unix__
FT_STATUS ftStatus=FT_OK;
static DWORD numDevs = 0;
ftStatus = FT_CreateDeviceInfoList(&numDevs);
if (!FT_FAILED(ftStatus) && numDevs > 0)
{
DWORD Flags = 0;
char SerialNumber[16] = { 0 };
char Description[32] = { 0 };
for (DWORD i = 0; i < numDevs; i++)
{
ftStatus = FT_GetDeviceInfoDetail(i, &Flags, nullptr, nullptr, nullptr, SerialNumber, Description, nullptr);
if (!FT_FAILED(ftStatus))
{
ConnectionHandle handle;
handle.media = Flags & FT_FLAGS_SUPERSPEED ? "USB 3" : Flags & FT_FLAGS_HISPEED ? "USB 2" : "USB";
handle.name = Description;
handle.index = i;
handle.serial = SerialNumber;
handles.push_back(handle);
}
}
}
#else
libusb_device **devs; //pointer to pointer of device, used to retrieve a list of devices
int usbDeviceCount = libusb_get_device_list(ctx, &devs);
if (usbDeviceCount < 0) {
lime::error("failed to get libusb device list: %s", libusb_strerror(libusb_error(usbDeviceCount)));
return handles;
}
libusb_device_descriptor desc;
for(int i=0; i<usbDeviceCount; ++i)
{
int r = libusb_get_device_descriptor(devs[i], &desc);
if(r<0)
lime::error("failed to get device description");
int pid = desc.idProduct;
int vid = desc.idVendor;
if( vid == 0x0403)
{
if(pid == 0x601F)
{
libusb_device_handle *tempDev_handle(nullptr);
if(libusb_open(devs[i], &tempDev_handle) != 0 || tempDev_handle == nullptr)
continue;
ConnectionHandle handle;
//check operating speed
int speed = libusb_get_device_speed(devs[i]);
if(speed == LIBUSB_SPEED_HIGH)
handle.media = "USB 2.0";
else if(speed == LIBUSB_SPEED_SUPER)
handle.media = "USB 3.0";
else
handle.media = "USB";
//read device name
char data[255];
memset(data, 0, 255);
int st = libusb_get_string_descriptor_ascii(tempDev_handle, LIBUSB_CLASS_COMM, (unsigned char*)data, 255);
if(st < 0)
lime::error("Error getting usb descriptor");
else
handle.name = std::string(data, size_t(st));
handle.addr = std::to_string(int(pid))+":"+std::to_string(int(vid));
if (desc.iSerialNumber > 0)
{
r = libusb_get_string_descriptor_ascii(tempDev_handle,desc.iSerialNumber,(unsigned char*)data, sizeof(data));
if(r<0)
lime::error("failed to get serial number");
else
handle.serial = std::string(data, size_t(r));
}
libusb_close(tempDev_handle);
//add handle conditionally, filter by serial number
if (hint.serial.empty() or hint.serial == handle.serial)
{
handles.push_back(handle);
}
}
}
}
libusb_free_device_list(devs, 1);
#endif
return handles;
}
IConnection *ConnectionFT601Entry::make(const ConnectionHandle &handle)
{
#ifndef __unix__
return new ConnectionFT601(mFTHandle, handle);
#else
return new ConnectionFT601(ctx, handle);
#endif
}
| romeojulietthotel/LimeSuite | src/ConnectionFTDI/ConnectionFT601Entry.cpp | C++ | apache-2.0 | 5,428 |
from ajenti.api import *
from ajenti.plugins import *
info = PluginInfo(
title='Resource Manager',
icon='link',
dependencies=[
],
)
def init():
import server
| lupyuen/RaspberryPiImage | usr/share/pyshared/ajenti/plugins/resources/__init__.py | Python | apache-2.0 | 182 |
import Ember from 'ember';
/**
* @module ember-osf
* @submodule mixins
*/
/**
* Controller mixin that implements common actions performed on nodes.
* @class NodeActionsMixin
* @extends Ember.Mixin
*/
export default Ember.Mixin.create({
/**
* The node to perform these actions on. If not specified, defaults to the model hook.
* @property node
* @type DS.Model
*/
node: null,
model: null,
_node: Ember.computed.or('node', 'model'),
/**
* Helper method that affiliates an institution with a node.
*
* @method _affiliateNode
* @private
* @param {DS.Model} node Node record
* @param {Object} institution Institution record
* @return {Promise} Returns a promise that resolves to the updated node with the newly created institution relationship
*/
_affiliateNode(node, institution) {
node.get('affiliatedInstitutions').pushObject(institution);
return node.save();
},
actions: {
/**
* Update a node
*
* @method updateNode
* @param {String} title New title of the node
* @param {String} description New Description of the node
* @param {String} category New node category
* @param {Boolean} isPublic Should this node be publicly-visible?
* @return {Promise} Returns a promise that resolves to the updated node
*/
updateNode(title, description, category, isPublic) {
var node = this.get('_node');
if (title) {
node.set('title', title);
}
if (category) {
node.set('category', category);
}
if (description) {
node.set('description', description);
}
if (isPublic !== null) {
node.set('public', isPublic);
}
return node.save();
},
/**
* Delete a node
*
* @method deleteNode
* @return {Promise} Returns a promise that resolves after the deletion of the node.
*/
deleteNode() {
var node = this.get('_node');
return node.destroyRecord();
},
/**
* Affiliate a node with an institution
*
* @method affiliateNode
* @param {String} institutionId ID of the institutution to be affiliated
* @return {Promise} Returns a promise that resolves to the updated node
* with the newly affiliated institution relationship
*/
affiliateNode(institutionId) {
var node = this.get('_node');
return this.store.findRecord('institution', institutionId)
.then(institution => this._affiliateNode(node, institution));
},
/**
* Unaffiliate a node with an institution
*
* @method unaffiliateNode
* @param {Object} institution Institution relationship to be removed from node
* @return {Promise} Returns a promise that resolves to the updated node
* with the affiliated institution relationship removed.
*/
unaffiliateNode(institution) {
var node = this.get('_node');
node.get('affiliatedInstitutions').removeObject(institution);
return node.save();
},
/**
* Add a contributor to a node
*
* @method addContributor
* @param {String} userId ID of user that will be a contributor on the node
* @param {String} permission User permission level. One of "read", "write", or "admin". Default: "write".
* @param {Boolean} isBibliographic Whether user will be included in citations for the node. "default: true"
* @param {Boolean} sendEmail Whether user will receive an email when added. "default: true"
* @return {Promise} Returns a promise that resolves to the newly created contributor object.
*/
addContributor(userId, permission, isBibliographic, sendEmail) { // jshint ignore:line
return this.get('_node').addContributor(...arguments);
},
/**
* Bulk add contributors to a node
*
* @method addContributors
* @param {Array} contributors Array of objects containing contributor permission, bibliographic, and userId keys
* @param {Boolean} sendEmail Whether user will receive an email when added. "default: true"
* @return {Promise} Returns a promise that resolves to an array of added contributors
*/
addContributors(contributors, sendEmail) { // jshint ignore:line
return this.get('_node').addContributors(...arguments);
},
/**
* Remove a contributor from a node
*
* @method removeContributor
* @param {Object} contributor Contributor relationship that will be removed from node
* @return {Promise} Returns a promise that will resolve upon contributor deletion.
* User itself will not be removed.
*/
removeContributor(contributor) {
var node = this.get('_node');
return node.removeContributor(contributor);
},
/**
* Update contributors of a node. Makes a bulk request to the APIv2.
*
* @method updateContributors
* @param {Contributor[]} contributors Contributor relationships on the node.
* @param {Object} permissionsChanges Dictionary mapping contributor ids to desired permissions.
* @param {Object} bibliographicChanges Dictionary mapping contributor ids to desired bibliographic statuses
* @return {Promise} Returns a promise that resolves to the updated node
* with edited contributor relationships.
*/
updateContributors(contributors, permissionsChanges, bibliographicChanges) { // jshint ignore:line
return this.get('_node').updateContributors(...arguments);
},
/**
* Update contributors of a node. Makes a bulk request to the APIv2.
*
* @method updateContributor
* @param {Contributor} contributor relationship on the node.
* @param {string} permissions desired permissions.
* @param {boolean} bibliographic desired bibliographic statuses
* @return {Promise} Returns a promise that resolves to the updated node
* with edited contributor relationships.
*/
updateContributor(contributor, permissions, bibliographic) { // jshint ignore:line
return this.get('_node').updateContributor(...arguments);
},
/**
* Reorder contributors on a node, and manually updates store.
*
* @method reorderContributors
* @param {Object} contributor Contributor record to be modified
* @param {Integer} newIndex Contributor's new position in the list
* @param {Array} contributors New contributor list in correct order
* @return {Promise} Returns a promise that resolves to the updated contributor.
*/
reorderContributors(contributor, newIndex, contributors) {
contributor.set('index', newIndex);
return contributor.save().then(() => {
contributors.forEach((contrib, index) => {
if (contrib.id !== contributor.id) {
var payload = contrib.serialize();
payload.data.attributes = {
permission: contrib.get('permission'),
bibliographic: contrib.get('bibliographic'),
index: index
};
payload.data.id = contrib.get('id');
this.store.pushPayload(payload);
}
});
});
},
/**
* Add a child (component) to a node.
*
* @method addChild
* @param {String} title Title for the child
* @param {String} description Description for the child
* @param {String} category Category for the child
* @return {Promise} Returns a promise that resolves to the newly created child node.
*/
addChild(title, description, category) {
return this.get('_node').addChild(title, description, category);
},
/**
* Add a node link (pointer) to another node
*
* @method addNodeLink
* @param {String} targetNodeId ID of the node for which you wish to create a pointer
* @return {Promise} Returns a promise that resolves to model for the newly created NodeLink
*/
addNodeLink(targetNodeId) {
var node = this.get('_node');
var nodeLink = this.store.createRecord('node-link', {
target: targetNodeId
});
node.get('nodeLinks').pushObject(nodeLink);
return node.save().then(() => nodeLink);
},
/**
* Remove a node link (pointer) to another node
*
* @method removeNodeLink
* @param {Object} nodeLink nodeLink record to be destroyed.
* @return {Promise} Returns a promise that resolves after the node link has been removed. This does not delete
* the target node itself.
*/
removeNodeLink(nodeLink) {
return nodeLink.destroyRecord();
}
}
});
| pattisdr/ember-osf | addon/mixins/node-actions.js | JavaScript | apache-2.0 | 9,519 |
package act.app.event;
/*-
* #%L
* ACT Framework
* %%
* Copyright (C) 2014 - 2017 ActFramework
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import act.event.ActEventListener;
public interface SysEventListener<EVENT_TYPE extends SysEvent> extends ActEventListener<EVENT_TYPE> {
}
| actframework/actframework | src/main/java/act/app/event/SysEventListener.java | Java | apache-2.0 | 816 |
/*
* Copyright 2012 hbz NRW (http://www.hbz-nrw.de/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package de.nrw.hbz.regal.sync.ingest;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import javax.xml.XMLConstants;
import javax.xml.namespace.NamespaceContext;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import models.ObjectType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import archive.fedora.XmlUtils;
import de.nrw.hbz.regal.sync.extern.DigitalEntity;
import de.nrw.hbz.regal.sync.extern.DigitalEntityBuilderInterface;
import de.nrw.hbz.regal.sync.extern.DigitalEntityRelation;
import de.nrw.hbz.regal.sync.extern.Md5Checksum;
import de.nrw.hbz.regal.sync.extern.RelatedDigitalEntity;
import de.nrw.hbz.regal.sync.extern.StreamType;
/**
* @author Jan Schnasse schnasse@hbz-nrw.de
*
*/
public class EdowebDigitalEntityBuilder implements
DigitalEntityBuilderInterface {
final static Logger logger = LoggerFactory
.getLogger(EdowebDigitalEntityBuilder.class);
Map<String, DigitalEntity> filedIds2DigitalEntity = new HashMap<String, DigitalEntity>();
Map<String, String> groupIds2FileIds = new HashMap<String, String>();
Map<String, List<String>> idmap = new HashMap<String, List<String>>();
@Override
public DigitalEntity build(String location, String pid) {
DigitalEntity dtlDe = buildSimpleBean(location, pid);
if (dtlDe.getStream(StreamType.STRUCT_MAP) != null) {
logger.debug(pid + " is a mets object");
dtlDe = prepareMetsStructure(dtlDe);
dtlDe = addSiblings(dtlDe);
dtlDe = addChildren(dtlDe);
} else {
dtlDe = addSiblings(dtlDe);
dtlDe = addDigitoolChildren(dtlDe);
dtlDe = addChildren(dtlDe);
}
dtlDe = removeEmptyVolumes(dtlDe);
return dtlDe;
}
private DigitalEntity removeEmptyVolumes(final DigitalEntity entity) {
DigitalEntity dtlDe = entity;
List<RelatedDigitalEntity> result = new Vector<RelatedDigitalEntity>();
List<RelatedDigitalEntity> related = entity.getRelated();
for (RelatedDigitalEntity d : related) {
if (DigitalEntityRelation.part_of.toString().equals(d.relation)) {
if (ObjectType.volume.toString()
.equals(d.entity.getUsageType())) {
if (d.entity.getRelated().isEmpty())
continue;
}
}
result.add(d);
}
dtlDe.setRelated(result);
return dtlDe;
}
DigitalEntity buildSimpleBean(String location, String pid) {
DigitalEntity dtlDe = new DigitalEntity(location, pid);
dtlDe.setXml(new File(dtlDe.getLocation() + File.separator + pid
+ ".xml"));
Element root = getXmlRepresentation(dtlDe);
dtlDe.setLabel(getLabel(root));
loadMetadataStreams(dtlDe, root);
setType(dtlDe);
dtlDe.setImportedFrom("http://klio.hbz-nrw.de:1801/webclient/DeliveryManager?pid="
+ pid + "&custom_att_2=default_viewer");
dtlDe.setCreatedBy("digitool");
try {
setCatalogId(dtlDe);
logger.debug("p2a: " + pid + "," + dtlDe.getLegacyId());
} catch (CatalogIdNotFoundException e) {
logger.debug(pid + "has no catalog id");
}
loadDataStream(dtlDe, root);
linkToParent(dtlDe);
return dtlDe;
}
/**
* Tries to find the catalog id (aleph)
*
* @param dtlDe
* the digital entity
*/
void setCatalogId(DigitalEntity dtlDe) {
try {
Element root = XmlUtils.getDocument(dtlDe
.getStream(StreamType.MARC).getFile());
XPathFactory factory = XPathFactory.newInstance();
XPath xpath = factory.newXPath();
xpath.setNamespaceContext(new MarcNamespaceContext());
XPathExpression expr = xpath.compile("//controlfield[@tag='001']");
Object result = expr.evaluate(root, XPathConstants.NODESET);
NodeList nodes = (NodeList) result;
if (nodes.getLength() != 1) {
throw new CatalogIdNotFoundException("Found "
+ nodes.getLength() + " ids");
}
String id = nodes.item(0).getTextContent();
dtlDe.addIdentifier(id);
dtlDe.setLegacyId(id);
// logger.debug(dtlDe.getPid() + " add id " + id);
} catch (Exception e) {
throw new CatalogIdNotFoundException(e);
}
}
private void setType(DigitalEntity dtlBean) {
Element root = XmlUtils.getDocument(dtlBean.getStream(
StreamType.CONTROL).getFile());
XPathFactory factory = XPathFactory.newInstance();
XPath xpath = factory.newXPath();
try {
XPathExpression expr = xpath.compile("//partition_c");
Object result = expr.evaluate(root, XPathConstants.NODESET);
NodeList nodes = (NodeList) result;
if (nodes.getLength() != 1) {
throw new TypeNotFoundException("Found " + nodes.getLength()
+ " types");
}
dtlBean.setType(nodes.item(0).getTextContent());
// logger.debug(dtlBean.getPid() + " setType to: " +
// dtlBean.getType());
} catch (XPathExpressionException e) {
throw new XPathException(e);
}
}
private DigitalEntity prepareMetsStructure(final DigitalEntity entity) {
DigitalEntity dtlDe = entity;
dtlDe = createTree(entity);
mapGroupIdsToFileIds(entity);
return dtlDe;
}
private void mapGroupIdsToFileIds(DigitalEntity entity) {
try {
Element root = XmlUtils.getDocument(entity.getStream(
StreamType.FILE_SEC).getFile());
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath xpath = xpathFactory.newXPath();
NodeList volumes = (NodeList) xpath.evaluate("/*/*/*/*/*", root,
XPathConstants.NODESET);
for (int i = 0; i < volumes.getLength(); i++) {
Element item = (Element) volumes.item(i);
String groupId = item.getAttribute("GROUPID");
String fileId = item.getAttribute("ID");
// logger.debug(groupId + " to " + fileId);
groupIds2FileIds.put(groupId, fileId);
}
} catch (XPathExpressionException e) {
logger.warn("", e);
} catch (Exception e) {
logger.debug("", e);
}
}
// private String normalizeLabel(final String volumeLabel) {
//
// String[] parts = volumeLabel.split("\\s|-");
// String camelCaseString = "";
// for (String part : parts) {
// camelCaseString = camelCaseString + toProperCase(part);
// }
// return camelCaseString.replace(":", "-").replace("/", "-");
// }
//
// String toProperCase(String s) {
// return s.substring(0, 1).toUpperCase() + s.substring(1).toLowerCase();
// }
private DigitalEntity createTree(final DigitalEntity entity) {
DigitalEntity dtlDe = entity;
try {
Element root = XmlUtils.getDocument(entity.getStream(
StreamType.STRUCT_MAP).getFile());
List<Element> firstLevel = XmlUtils.getElements("/*/*/*/*/*", root,
null);
logger.debug("create volumes for: " + entity.getPid());
for (Element firstLevelElement : firstLevel) {
DigitalEntity v = createDigitalEntity(ObjectType.volume,
firstLevelElement.getAttribute("LABEL"),
dtlDe.getPid(), dtlDe.getLocation());
v.setOrder(firstLevelElement.getAttribute("ORDER"));
dtlDe.addRelated(v, DigitalEntityRelation.part_of.toString());
logger.debug("Create volume " + v.getPid());
List<Element> issues = XmlUtils.getElements("./div",
firstLevelElement, null);
if (issues == null || issues.isEmpty()) {
v.setUsageType(ObjectType.rootElement.toString());
mapFileIdToDigitalEntity(v, firstLevelElement);
} else {
for (Element issue : issues) {
DigitalEntity i = createDigitalEntity(
ObjectType.rootElement,
issue.getAttribute("LABEL"), v.getPid(),
dtlDe.getLocation());
i.setOrder(firstLevelElement.getAttribute("ORDER"));
logger.debug("Create issue " + i.getPid());
v.addRelated(i,
DigitalEntityRelation.part_of.toString());
mapFileIdToDigitalEntity(i, issue);
}
}
}
} catch (XPathException e) {
logger.warn(entity.getPid() + " no volumes found.");
} catch (Exception e) {
logger.debug("", e);
}
return dtlDe;
}
private void mapFileIdToDigitalEntity(DigitalEntity de, Element root) {
final String regex = ".//fptr";
List<Element> files = XmlUtils.getElements(regex, root, null);
for (Element f : files) {
String fileId = f.getAttribute("FILEID");
logger.debug("Key: " + fileId + " Value: " + de.getPid());
filedIds2DigitalEntity.put(fileId, de);
}
}
private DigitalEntity createDigitalEntity(ObjectType type, String label,
String parentPid, String location) {
String prefix = parentPid;
String pid = prefix + "-" + getId(prefix);
DigitalEntity entity = new DigitalEntity(location + File.separator
+ pid, pid);
entity.setLabel(label);
entity.setParentPid(parentPid);
entity.setUsageType(type.toString());
return entity;
}
private String getId(String prefix) {
List<String> ids = null;
if (idmap.containsKey(prefix)) {
ids = idmap.get(prefix);
} else {
ids = new ArrayList<String>();
}
if (ids.size() >= Integer.MAX_VALUE) {
throw new java.lang.ArrayIndexOutOfBoundsException(
"We have serious problem here!");
}
String id = Integer.toString(ids.size());
ids.add(id);
idmap.put(prefix, ids);
return id;
}
private void loadDataStream(DigitalEntity dtlDe, Element root) {
Node streamRef = root.getElementsByTagName("stream_ref").item(0);
String filename = ((Element) streamRef)
.getElementsByTagName("file_name").item(0).getTextContent();
File file = new File(dtlDe.getLocation() + File.separator
+ dtlDe.getPid() + File.separator + filename);
String mime = ((Element) streamRef).getElementsByTagName("mime_type")
.item(0).getTextContent();
String fileId = ((Element) streamRef).getElementsByTagName("file_id")
.item(0).getTextContent();
if (!file.exists()) {
logger.error("The file " + filename
+ " found in DTL Xml does not exist.");
return;
}
logger.debug("found data stream " + file + "," + mime + ","
+ StreamType.DATA + "," + fileId);
dtlDe.setLabel(root.getElementsByTagName("label").item(0)
.getTextContent());
dtlDe.addStream(file, mime, StreamType.DATA, fileId, getMd5(file));
}
private void loadMetadataStreams(DigitalEntity dtlDe, Element root) {
setXmlStream(dtlDe, root.getElementsByTagName("control").item(0),
StreamType.CONTROL);
dtlDe.setUsageType(root.getElementsByTagName("usage_type").item(0)
.getTextContent());
NodeList list = root.getElementsByTagName("md");
for (int i = 0; i < list.getLength(); i++) {
Node item = list.item(i);
String type = getItemType((Element) item);
if (type.compareTo("dc") == 0) {
setXmlStream(dtlDe, item, StreamType.DC);
} else if (type.compareTo("preservation_md") == 0) {
setXmlStream(dtlDe, item, StreamType.PREMIS);
} else if (type.compareTo("text_md") == 0) {
setXmlStream(dtlDe, item, StreamType.TEXT);
} else if (type.compareTo("rights_md") == 0) {
setXmlStream(dtlDe, item, StreamType.RIGHTS);
} else if (type.compareTo("jhove") == 0) {
setXmlStream(dtlDe, item, StreamType.JHOVE);
} else if (type.compareTo("changehistory_md") == 0) {
setXmlStream(dtlDe, item, StreamType.HIST);
} else if (type.compareTo("marc") == 0) {
setMarcStream(dtlDe, item, StreamType.MARC);
} else if (type.compareTo("metsHdr") == 0) {
setXmlStream(dtlDe, item, StreamType.METS_HDR);
} else if (type.compareTo("structMap") == 0) {
setXmlStream(dtlDe, item, StreamType.STRUCT_MAP);
} else if (type.compareTo("fileSec") == 0) {
setXmlStream(dtlDe, item, StreamType.FILE_SEC);
}
}
}
@SuppressWarnings("deprecation")
private void setXmlStream(DigitalEntity dtlDe, Node item, StreamType type) {
try {
File file = new File(dtlDe.getLocation() + File.separator + "."
+ dtlDe.getPid() + "_" + type.toString() + ".xml");
File stream = XmlUtils.stringToFile(file,
XmlUtils.nodeToString(item));
String md5Hash = getMd5(stream);
dtlDe.addStream(stream, "application/xml", type, null, md5Hash);
} catch (Exception e) {
e.printStackTrace();
}
}
@SuppressWarnings("deprecation")
private void setMarcStream(DigitalEntity dtlDe, Node item, StreamType type) {
try {
File file = new File(dtlDe.getLocation() + File.separator + "."
+ dtlDe.getPid() + "_" + type.toString() + ".xml");
File stream = XmlUtils.stringToFile(file, getMarc(item));
String md5Hash = getMd5(stream);
dtlDe.addStream(stream, "application/xml", type, null, md5Hash);
} catch (Exception e) {
e.printStackTrace();
}
}
private String getMd5(File stream) {
Md5Checksum md5 = new Md5Checksum();
return md5.getMd5Checksum(stream);
}
private String getMarc(Node item) {
Element marc = (Element) ((Element) item)
.getElementsByTagName("record").item(0);
marc.setAttribute("xmlns", "http://www.loc.gov/MARC21/slim");
String xmlStr = XmlUtils.nodeToString(marc);
xmlStr = xmlStr.replaceAll("nam 2200000 u 4500",
"00000 a2200000 4500");
return xmlStr;
}
private String getItemType(Element root) {
return root.getElementsByTagName("type").item(0).getTextContent();
}
private String getLabel(Element root) {
List<Element> list = XmlUtils.getElements(
"//datafield[@tag='245']/subfield[@code='a']", root, null);
if (list != null && !list.isEmpty()) {
return list.get(0).getTextContent();
} else {
return root.getElementsByTagName("label").item(0).getTextContent();
}
}
private Element getXmlRepresentation(final DigitalEntity dtlDe) {
File digitalEntityFile = new File(dtlDe.getLocation() + File.separator
+ dtlDe.getPid() + ".xml");
return XmlUtils.getDocument(digitalEntityFile);
}
private DigitalEntity addSiblings(final DigitalEntity entity) {
DigitalEntity dtlDe = entity;
Element root;
try {
root = XmlUtils.getDocument(entity.getXml());
NodeList list = root.getElementsByTagName("relation");
for (int i = 0; i < list.getLength(); i++) {
Node item = list.item(i);
String relPid = ((Element) item).getElementsByTagName("pid")
.item(0).getTextContent();
String usageType = ((Element) item)
.getElementsByTagName("usage_type").item(0)
.getTextContent();
String type = ((Element) item).getElementsByTagName("type")
.item(0).getTextContent();
if (type.compareTo(DigitalEntityRelation.manifestation
.toString()) == 0) {
DigitalEntity b = buildSimpleBean(entity.getLocation(),
relPid);
logger.debug("Add sibling " + b.getPid() + " to "
+ entity.getPid() + " utilizing relation "
+ usageType);
dtlDe.addRelated(b, usageType);
}
}
} catch (Exception e) {
logger.warn("", e);
}
return dtlDe;
}
private DigitalEntity addChildren(final DigitalEntity entity) {
DigitalEntity dtlDe = entity;
try {
Element root = XmlUtils.getDocument(entity.getXml());
NodeList list = root.getElementsByTagName("relation");
for (int i = 0; i < list.getLength(); i++) {
Node item = list.item(i);
String relPid = ((Element) item).getElementsByTagName("pid")
.item(0).getTextContent();
String usageType = ((Element) item)
.getElementsByTagName("usage_type").item(0)
.getTextContent();
String type = ((Element) item).getElementsByTagName("type")
.item(0).getTextContent();
String mimeType = ((Element) item)
.getElementsByTagName("mime_type").item(0)
.getTextContent();
if (type.compareTo(DigitalEntityRelation.include.toString()) == 0
&& mimeType.equals("application/pdf")
&& (usageType.compareTo(DigitalEntityRelation.ARCHIVE
.toString()) != 0)) {
try {
DigitalEntity b = build(entity.getLocation(), relPid);
b.setUsageType(usageType);
addToTree(dtlDe, b);
} catch (Exception e) {
e.printStackTrace();
}
} else if (type.compareTo(DigitalEntityRelation.include
.toString()) == 0
&& mimeType.equals("application/zip")
&& (usageType.compareTo(DigitalEntityRelation.VIEW
.toString()) != 0)) {
try {
DigitalEntity b = build(entity.getLocation(), relPid);
b.setUsageType(usageType);
addToTree(dtlDe, b);
} catch (Exception e) {
e.printStackTrace();
}
}
}
} catch (Exception e) {
logger.warn("", e);
}
return dtlDe;
}
private DigitalEntity addDigitoolChildren(final DigitalEntity entity) {
DigitalEntity dtlDe = entity;
try {
Element root = XmlUtils.getDocument(entity.getXml());
NodeList list = root.getElementsByTagName("relation");
for (int i = 0; i < list.getLength(); i++) {
Node item = list.item(i);
String relPid = ((Element) item).getElementsByTagName("pid")
.item(0).getTextContent();
String usageType = ((Element) item)
.getElementsByTagName("usage_type").item(0)
.getTextContent();
String type = ((Element) item).getElementsByTagName("type")
.item(0).getTextContent();
String mimeType = ((Element) item)
.getElementsByTagName("mime_type").item(0)
.getTextContent();
if (type.compareTo(DigitalEntityRelation.include.toString()) == 0
&& mimeType.equals("application/pdf")
&& (usageType.compareTo(DigitalEntityRelation.ARCHIVE
.toString()) != 0)) {
try {
DigitalEntity b = build(entity.getLocation(), relPid);
// logger.debug(b.getPid() + " is child of "
// + dtlDe.getPid());
b.setUsageType(usageType);
dtlDe.setIsParent(true);
b.setParentPid(dtlDe.getPid());
dtlDe.addRelated(b,
DigitalEntityRelation.part_of.toString());
} catch (Exception e) {
e.printStackTrace();
}
}
}
} catch (Exception e) {
logger.warn("", e);
}
return dtlDe;
}
private void linkToParent(DigitalEntity dtlDe) {
try {
Element root = XmlUtils.getDocument(dtlDe.getXml());
NodeList list = root.getElementsByTagName("relation");
for (int i = 0; i < list.getLength(); i++) {
Node item = list.item(i);
String relPid = ((Element) item).getElementsByTagName("pid")
.item(0).getTextContent();
String type = ((Element) item).getElementsByTagName("type")
.item(0).getTextContent();
if (type.compareTo(DigitalEntityRelation.part_of.toString()) == 0) {
dtlDe.setIsParent(false);
dtlDe.setParentPid(relPid);
}
}
} catch (Exception e) {
logger.warn("", e);
}
}
private void addToTree(DigitalEntity dtlDe, DigitalEntity related) {
DigitalEntity parent = findParent(dtlDe, related);
if (parent == null) {
logger.info(related.getPid() + " is not longer part of tree.");
return;
}
parent.setIsParent(true);
related.setParentPid(parent.getPid());
parent.addRelated(related, DigitalEntityRelation.part_of.toString());
logger.debug(related.getPid() + " is child of " + parent.getPid());
}
private DigitalEntity findParent(DigitalEntity dtlDe, DigitalEntity related) {
if (!(related.getUsageType().compareTo("VIEW") == 0)
&& !(related.getUsageType().compareTo("VIEW_MAIN") == 0)) {
System.out.println("Related of wrong type: " + related.getPid());
return dtlDe;
}
String groupId = related.getStream(StreamType.DATA).getFileId();
String fileId = groupIds2FileIds.get(groupId);
DigitalEntity parent = this.filedIds2DigitalEntity.get(fileId);
if (parent != null) {
related.setUsageType(ObjectType.file.toString());
}
return parent;
}
@SuppressWarnings("javadoc")
public class MarcNamespaceContext implements NamespaceContext {
public String getNamespaceURI(String prefix) {
if (prefix == null)
throw new NullPointerException("Null prefix");
else if ("marc".equals(prefix))
return "http://www.loc.gov/MARC21/slim";
else if ("xml".equals(prefix))
return XMLConstants.XML_NS_URI;
return XMLConstants.NULL_NS_URI;
}
// This method isn't necessary for XPath processing.
public String getPrefix(String uri) {
throw new UnsupportedOperationException();
}
// This method isn't necessary for XPath processing either.
@SuppressWarnings("rawtypes")
public Iterator getPrefixes(String uri) {
throw new UnsupportedOperationException();
}
}
@SuppressWarnings({ "javadoc", "serial" })
public class TypeNotFoundException extends RuntimeException {
public TypeNotFoundException(String message) {
super(message);
}
}
@SuppressWarnings({ "javadoc", "serial" })
public class CatalogIdNotFoundException extends RuntimeException {
public CatalogIdNotFoundException(String message) {
super(message);
}
public CatalogIdNotFoundException(Throwable cause) {
super(cause);
}
}
@SuppressWarnings({ "javadoc", "serial" })
public class XPathException extends RuntimeException {
public XPathException(Throwable cause) {
super(cause);
}
public XPathException(String message, Throwable cause) {
super(message, cause);
}
}
}
| edoweb/regal-import | edoweb-sync/src/main/java/de/nrw/hbz/regal/sync/ingest/EdowebDigitalEntityBuilder.java | Java | apache-2.0 | 21,427 |
/*
* Copyright 2007-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.ymate.platform.configuration.annotation;
import net.ymate.platform.core.configuration.IConfigurationProvider;
import org.apache.commons.lang3.StringUtils;
import java.lang.annotation.*;
/**
* @author 刘镇 (suninformation@163.com) on 2020/03/09 16:16
* @since 2.1.0
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface ConfigurationConf {
/**
* @return 配置体系根路径
*/
String configHome() default StringUtils.EMPTY;
/**
* @return 项目名称
*/
String projectName() default StringUtils.EMPTY;
/**
* @return 模块名称
*/
String moduleName() default StringUtils.EMPTY;
/**
* @return 配置文件检查时间间隔(毫秒)
*/
long checkTimeInterval() default 0;
/**
* @return 默认配置文件分析器
*/
Class<? extends IConfigurationProvider> providerClass() default IConfigurationProvider.class;
}
| suninformation/ymate-platform-v2 | ymate-platform-configuration/src/main/java/net/ymate/platform/configuration/annotation/ConfigurationConf.java | Java | apache-2.0 | 1,590 |
/*
* Copyright 2015 Raffael Herzog
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.raffael.guards;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Specifies that this method does not change the state of the object in any ways.
*
* It's the same as `@Contract(pure=true)` using IDEA annotations.
*
* @author <a href="mailto:herzog@raffael.ch">Raffael Herzog</a>
*/
@Target({ ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface Pure {
}
| Abnaxos/guards | annotations/src/main/java/ch/raffael/guards/Pure.java | Java | apache-2.0 | 1,167 |
package fr.ebiz.computer_database.model;
import javax.persistence.*;
/**
* Created by ebiz on 31/05/17.
*/
@Entity
@Table(name = "role")
public class Role {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private long id;
private String role;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getRole() {
return role;
}
public void setRole(String role) {
this.role = role;
}
}
| ckeita/training-java | computer-database/core/src/main/java/fr/ebiz/computer_database/model/Role.java | Java | apache-2.0 | 457 |
package com.hs.mail.smtp.processor.hook;
import java.util.StringTokenizer;
import org.junit.AfterClass;
import org.junit.Test;
public class DNSRBLHandlerTest {
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
@Test
public void test() {
String ipAddress = "1.2.3.4";
StringBuffer sb = new StringBuffer();
StringTokenizer st = new StringTokenizer(ipAddress, " .", false);
while (st.hasMoreTokens()) {
sb.insert(0, st.nextToken() + ".");
}
String reversedOctets = sb.toString();
System.out.println(reversedOctets);
}
}
| svn2github/hwmail-mirror | hedwig-server/src/test/java/com/hs/mail/smtp/processor/hook/DNSRBLHandlerTest.java | Java | apache-2.0 | 640 |
module Rtree
class BoundingBox < Struct.new(:top_left, :bottom_right)
def self.from_points(points)
x0 = points.min { |a,b| a.x <=> b.x }.x
y0 = points.min { |a,b| a.y <=> b.y }.y
x1 = points.max { |a,b| a.x <=> b.x }.x
y1 = points.max { |a,b| a.y <=> b.y }.y
BoundingBox.new(Point.new(x0, y0), Point.new(x1, y1))
end
def self.merged_bound_box(shapes)
self.minimum_bounding_rectangle(shapes.map { |s| s.bounding_box })
end
def self.minimum_bounding_rectangle(bounding_boxes)
x0 = bounding_boxes.min { |a,b| a.top_left.x <=> b.top_left.x }.top_left.x
y0 = bounding_boxes.min { |a,b| a.top_left.y <=> b.top_left.y }.top_left.y
x1 = bounding_boxes.max { |a,b| a.bottom_right.x <=> b.bottom_right.x }.bottom_right.x
y1 = bounding_boxes.max { |a,b| a.bottom_right.y <=> b.bottom_right.y }.bottom_right.y
BoundingBox.new(Point.new(x0, y0), Point.new(x1, y1))
end
def overlap(other_bounding_box)
x0, y0 = self.top_left.x, self.top_left.y
x1, y1 = self.bottom_right.x, self.bottom_right.y
x2, y2 = other_bounding_box.top_left.x, other_bounding_box.top_left.y
x3, y3 = other_bounding_box.bottom_right.x, other_bounding_box.bottom_right.y
half_w0, half_h0 = (x1-x0)/2, (y1-y0)/2
half_w1, half_h1 = (x3-x2)/2, (y3-y2)/2
within_x = ((x1+x0)/2 - (x3+x2)/2).abs <= half_w0 + half_w1
within_y = ((y1+y0)/2 - (y3+y2)/2).abs <= half_h0 + half_h1
within_x && within_y
end
end
end | newmana/rtree | lib/rtree/bounding_box.rb | Ruby | apache-2.0 | 1,520 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.IdentityModel.Tokens;
namespace Muscularity.Web.Infrastructure
{
public class JwtTokenServiceOptions
{
public TimeSpan TimeToLive { get; set; }
public string Issuer { get; set; }
public string Audience { get; set; }
public SecurityKey SigningKey { get; set; }
}
}
| jbload/Muscularity | Muscularity.Web/Infrastructure/JwtTokenServiceOptions.cs | C# | apache-2.0 | 444 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.greengrassv2.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.greengrassv2.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* EffectiveDeploymentMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class EffectiveDeploymentMarshaller {
private static final MarshallingInfo<String> DEPLOYMENTID_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("deploymentId").build();
private static final MarshallingInfo<String> DEPLOYMENTNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("deploymentName").build();
private static final MarshallingInfo<String> IOTJOBID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("iotJobId").build();
private static final MarshallingInfo<String> IOTJOBARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("iotJobArn").build();
private static final MarshallingInfo<String> DESCRIPTION_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("description").build();
private static final MarshallingInfo<String> TARGETARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("targetArn").build();
private static final MarshallingInfo<String> COREDEVICEEXECUTIONSTATUS_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("coreDeviceExecutionStatus").build();
private static final MarshallingInfo<String> REASON_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("reason").build();
private static final MarshallingInfo<java.util.Date> CREATIONTIMESTAMP_BINDING = MarshallingInfo.builder(MarshallingType.DATE)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("creationTimestamp").timestampFormat("unixTimestamp").build();
private static final MarshallingInfo<java.util.Date> MODIFIEDTIMESTAMP_BINDING = MarshallingInfo.builder(MarshallingType.DATE)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("modifiedTimestamp").timestampFormat("unixTimestamp").build();
private static final EffectiveDeploymentMarshaller instance = new EffectiveDeploymentMarshaller();
public static EffectiveDeploymentMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(EffectiveDeployment effectiveDeployment, ProtocolMarshaller protocolMarshaller) {
if (effectiveDeployment == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(effectiveDeployment.getDeploymentId(), DEPLOYMENTID_BINDING);
protocolMarshaller.marshall(effectiveDeployment.getDeploymentName(), DEPLOYMENTNAME_BINDING);
protocolMarshaller.marshall(effectiveDeployment.getIotJobId(), IOTJOBID_BINDING);
protocolMarshaller.marshall(effectiveDeployment.getIotJobArn(), IOTJOBARN_BINDING);
protocolMarshaller.marshall(effectiveDeployment.getDescription(), DESCRIPTION_BINDING);
protocolMarshaller.marshall(effectiveDeployment.getTargetArn(), TARGETARN_BINDING);
protocolMarshaller.marshall(effectiveDeployment.getCoreDeviceExecutionStatus(), COREDEVICEEXECUTIONSTATUS_BINDING);
protocolMarshaller.marshall(effectiveDeployment.getReason(), REASON_BINDING);
protocolMarshaller.marshall(effectiveDeployment.getCreationTimestamp(), CREATIONTIMESTAMP_BINDING);
protocolMarshaller.marshall(effectiveDeployment.getModifiedTimestamp(), MODIFIEDTIMESTAMP_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| aws/aws-sdk-java | aws-java-sdk-greengrassv2/src/main/java/com/amazonaws/services/greengrassv2/model/transform/EffectiveDeploymentMarshaller.java | Java | apache-2.0 | 5,040 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mediatailor.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediatailor-2018-04-23/ListTagsForResource" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListTagsForResourceResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A comma-separated list of tag key:value pairs. For example: { "Key1": "Value1", "Key2": "Value2" }
* </p>
*/
private java.util.Map<String, String> tags;
/**
* <p>
* A comma-separated list of tag key:value pairs. For example: { "Key1": "Value1", "Key2": "Value2" }
* </p>
*
* @return A comma-separated list of tag key:value pairs. For example: { "Key1": "Value1", "Key2": "Value2" }
*/
public java.util.Map<String, String> getTags() {
return tags;
}
/**
* <p>
* A comma-separated list of tag key:value pairs. For example: { "Key1": "Value1", "Key2": "Value2" }
* </p>
*
* @param tags
* A comma-separated list of tag key:value pairs. For example: { "Key1": "Value1", "Key2": "Value2" }
*/
public void setTags(java.util.Map<String, String> tags) {
this.tags = tags;
}
/**
* <p>
* A comma-separated list of tag key:value pairs. For example: { "Key1": "Value1", "Key2": "Value2" }
* </p>
*
* @param tags
* A comma-separated list of tag key:value pairs. For example: { "Key1": "Value1", "Key2": "Value2" }
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForResourceResult withTags(java.util.Map<String, String> tags) {
setTags(tags);
return this;
}
public ListTagsForResourceResult addTagsEntry(String key, String value) {
if (null == this.tags) {
this.tags = new java.util.HashMap<String, String>();
}
if (this.tags.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.tags.put(key, value);
return this;
}
/**
* Removes all the entries added into Tags.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForResourceResult clearTagsEntries() {
this.tags = null;
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListTagsForResourceResult == false)
return false;
ListTagsForResourceResult other = (ListTagsForResourceResult) obj;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public ListTagsForResourceResult clone() {
try {
return (ListTagsForResourceResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-mediatailor/src/main/java/com/amazonaws/services/mediatailor/model/ListTagsForResourceResult.java | Java | apache-2.0 | 4,860 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.processing.loading.sort.unsafe.holder;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Comparator;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.processing.loading.row.IntermediateSortTempRow;
import org.apache.carbondata.processing.loading.sort.SortStepRowHandler;
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
import org.apache.carbondata.processing.sort.sortdata.IntermediateSortTempRowComparator;
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
import org.apache.carbondata.processing.sort.sortdata.TableFieldStat;
public class UnsafeSortTempFileChunkHolder implements SortTempChunkHolder {
/**
* LOGGER
*/
private static final LogService LOGGER =
LogServiceFactory.getLogService(UnsafeSortTempFileChunkHolder.class.getName());
/**
* temp file
*/
private File tempFile;
/**
* read stream
*/
private DataInputStream stream;
/**
* entry count
*/
private int entryCount;
/**
* return row
*/
private IntermediateSortTempRow returnRow;
private int readBufferSize;
private String compressorName;
private IntermediateSortTempRow[] currentBuffer;
private IntermediateSortTempRow[] backupBuffer;
private boolean isBackupFilled;
private boolean prefetch;
private int bufferSize;
private int bufferRowCounter;
private ExecutorService executorService;
private Future<Void> submit;
private int prefetchRecordsProceesed;
/**
* totalRecordFetch
*/
private int totalRecordFetch;
private int numberOfObjectRead;
private TableFieldStat tableFieldStat;
private SortStepRowHandler sortStepRowHandler;
private Comparator<IntermediateSortTempRow> comparator;
/**
* Constructor to initialize
*/
public UnsafeSortTempFileChunkHolder(File tempFile, SortParameters parameters) {
// set temp file
this.tempFile = tempFile;
this.readBufferSize = parameters.getBufferSize();
this.compressorName = parameters.getSortTempCompressorName();
this.tableFieldStat = new TableFieldStat(parameters);
this.sortStepRowHandler = new SortStepRowHandler(tableFieldStat);
this.executorService = Executors.newFixedThreadPool(1);
comparator = new IntermediateSortTempRowComparator(parameters.getNoDictionarySortColumn());
initialize();
}
/**
* This method will be used to initialize
*
* @throws CarbonSortKeyAndGroupByException problem while initializing
*/
public void initialize() {
prefetch = Boolean.parseBoolean(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.CARBON_MERGE_SORT_PREFETCH,
CarbonCommonConstants.CARBON_MERGE_SORT_PREFETCH_DEFAULT));
bufferSize = Integer.parseInt(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.CARBON_PREFETCH_BUFFERSIZE,
CarbonCommonConstants.CARBON_PREFETCH_BUFFERSIZE_DEFAULT));
initialise();
}
private void initialise() {
try {
stream = FileFactory.getDataInputStream(tempFile.getPath(), FileFactory.FileType.LOCAL,
readBufferSize, compressorName);
this.entryCount = stream.readInt();
LOGGER.info("Processing unsafe mode file rows with size : " + entryCount);
if (prefetch) {
new DataFetcher(false).call();
totalRecordFetch += currentBuffer.length;
if (totalRecordFetch < this.entryCount) {
submit = executorService.submit(new DataFetcher(true));
}
}
} catch (FileNotFoundException e) {
LOGGER.error(e);
throw new RuntimeException(tempFile + " No Found", e);
} catch (IOException e) {
LOGGER.error(e);
throw new RuntimeException(tempFile + " No Found", e);
} catch (Exception e) {
LOGGER.error(e);
throw new RuntimeException(tempFile + " Problem while reading", e);
}
}
/**
* This method will be used to read new row from file
*
* @throws CarbonSortKeyAndGroupByException problem while reading
*/
@Override
public void readRow() throws CarbonSortKeyAndGroupByException {
if (prefetch) {
fillDataForPrefetch();
} else {
try {
this.returnRow = sortStepRowHandler.readIntermediateSortTempRowFromInputStream(stream);
this.numberOfObjectRead++;
} catch (IOException e) {
throw new CarbonSortKeyAndGroupByException("Problems while reading row", e);
}
}
}
private void fillDataForPrefetch() {
if (bufferRowCounter >= bufferSize) {
if (isBackupFilled) {
bufferRowCounter = 0;
currentBuffer = backupBuffer;
totalRecordFetch += currentBuffer.length;
isBackupFilled = false;
if (totalRecordFetch < this.entryCount) {
submit = executorService.submit(new DataFetcher(true));
}
} else {
try {
submit.get();
} catch (Exception e) {
LOGGER.error(e);
}
bufferRowCounter = 0;
currentBuffer = backupBuffer;
isBackupFilled = false;
totalRecordFetch += currentBuffer.length;
if (totalRecordFetch < this.entryCount) {
submit = executorService.submit(new DataFetcher(true));
}
}
}
prefetchRecordsProceesed++;
returnRow = currentBuffer[bufferRowCounter++];
}
/**
* get a batch of row, this interface is used in reading compressed sort temp files
*
* @param expected expected number in a batch
* @return a batch of row
* @throws IOException if error occurs while reading from stream
*/
private IntermediateSortTempRow[] readBatchedRowFromStream(int expected)
throws IOException {
IntermediateSortTempRow[] holders = new IntermediateSortTempRow[expected];
for (int i = 0; i < expected; i++) {
IntermediateSortTempRow holder
= sortStepRowHandler.readIntermediateSortTempRowFromInputStream(stream);
holders[i] = holder;
}
this.numberOfObjectRead += expected;
return holders;
}
/**
* below method will be used to get the row
*
* @return row
*/
public IntermediateSortTempRow getRow() {
return this.returnRow;
}
/**
* below method will be used to check whether any more records are present
* in file or not
*
* @return more row present in file
*/
public boolean hasNext() {
if (prefetch) {
return this.prefetchRecordsProceesed < this.entryCount;
}
return this.numberOfObjectRead < this.entryCount;
}
/**
* Below method will be used to close streams
*/
public void close() {
CarbonUtil.closeStreams(stream);
if (null != executorService && !executorService.isShutdown()) {
executorService.shutdownNow();
}
}
/**
* This method will number of entries
*
* @return entryCount
*/
public int numberOfRows() {
return entryCount;
}
@Override public int compareTo(SortTempChunkHolder other) {
return comparator.compare(returnRow, other.getRow());
}
@Override public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof UnsafeSortTempFileChunkHolder)) {
return false;
}
UnsafeSortTempFileChunkHolder o = (UnsafeSortTempFileChunkHolder) obj;
return this == o;
}
@Override public int hashCode() {
int hash = 0;
hash += tableFieldStat.hashCode();
hash += tempFile.hashCode();
return hash;
}
private final class DataFetcher implements Callable<Void> {
private boolean isBackUpFilling;
private int numberOfRecords;
private DataFetcher(boolean backUp) {
isBackUpFilling = backUp;
calculateNumberOfRecordsToBeFetched();
}
private void calculateNumberOfRecordsToBeFetched() {
int numberOfRecordsLeftToBeRead = entryCount - totalRecordFetch;
numberOfRecords =
bufferSize < numberOfRecordsLeftToBeRead ? bufferSize : numberOfRecordsLeftToBeRead;
}
@Override public Void call() throws Exception {
try {
if (isBackUpFilling) {
backupBuffer = prefetchRecordsFromFile(numberOfRecords);
isBackupFilled = true;
} else {
currentBuffer = prefetchRecordsFromFile(numberOfRecords);
}
} catch (Exception e) {
LOGGER.error(e);
}
return null;
}
}
/**
* This method will read the records from sort temp file and keep it in a buffer
*
* @param numberOfRecords number of records to be read
* @return batch of intermediate sort temp row
* @throws IOException if error occurs reading records from file
*/
private IntermediateSortTempRow[] prefetchRecordsFromFile(int numberOfRecords)
throws IOException {
return readBatchedRowFromStream(numberOfRecords);
}
}
| jatin9896/incubator-carbondata | processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeSortTempFileChunkHolder.java | Java | apache-2.0 | 10,153 |
package service
import (
"context"
"sync"
artmdl "go-common/app/interface/openplatform/article/model"
accmdl "go-common/app/service/main/account/model"
"go-common/app/service/main/feed/dao"
"go-common/library/log"
"go-common/library/sync/errgroup"
)
const _upsArtBulkSize = 50
// attenUpArticles get new articles of attention uppers.
func (s *Service) attenUpArticles(c context.Context, minTotalCount int, mid int64, ip string) (res map[int64][]*artmdl.Meta, err error) {
var mids []int64
arg := &accmdl.ArgMid{Mid: mid}
if mids, err = s.accRPC.Attentions3(c, arg); err != nil {
dao.PromError("关注rpc接口:Attentions", "s.accRPC.Attentions(%d) error(%v)", mid, err)
return
}
if len(mids) == 0 {
return
}
count := minTotalCount/len(mids) + s.c.Feed.MinUpCnt
return s.upsArticle(c, count, ip, mids...)
}
func (s *Service) upsArticle(c context.Context, count int, ip string, mids ...int64) (res map[int64][]*artmdl.Meta, err error) {
dao.MissedCount.Add("upArt", int64(len(mids)))
var (
group *errgroup.Group
errCtx context.Context
midsLen, i int
mutex = sync.Mutex{}
)
res = make(map[int64][]*artmdl.Meta)
group, errCtx = errgroup.WithContext(c)
midsLen = len(mids)
for ; i < midsLen; i += _upsArtBulkSize {
var partMids []int64
if i+_upsArcBulkSize > midsLen {
partMids = mids[i:]
} else {
partMids = mids[i : i+_upsArtBulkSize]
}
group.Go(func() (err error) {
var tmpRes map[int64][]*artmdl.Meta
arg := &artmdl.ArgUpsArts{Mids: partMids, Pn: 1, Ps: count, RealIP: ip}
if tmpRes, err = s.artRPC.UpsArtMetas(errCtx, arg); err != nil {
log.Error("s.artRPC.UpsArtMetas(%+v) error(%v)", arg, err)
err = nil
return
}
mutex.Lock()
for mid, arcs := range tmpRes {
for _, arc := range arcs {
if arc.AttrVal(artmdl.AttrBitNoDistribute) {
continue
}
res[mid] = append(res[mid], arc)
}
}
mutex.Unlock()
return
})
}
group.Wait()
return
}
func (s *Service) articles(c context.Context, ip string, aids ...int64) (res map[int64]*artmdl.Meta, err error) {
var (
mutex = sync.Mutex{}
bulkSize = s.c.Feed.BulkSize
)
res = make(map[int64]*artmdl.Meta, len(aids))
group, errCtx := errgroup.WithContext(c)
aidsLen := len(aids)
for i := 0; i < aidsLen; i += bulkSize {
var partAids []int64
if i+bulkSize < aidsLen {
partAids = aids[i : i+bulkSize]
} else {
partAids = aids[i:aidsLen]
}
group.Go(func() error {
var (
tmpRes map[int64]*artmdl.Meta
artErr error
arg *artmdl.ArgAids
)
arg = &artmdl.ArgAids{Aids: partAids, RealIP: ip}
if tmpRes, artErr = s.artRPC.ArticleMetas(errCtx, arg); artErr != nil {
log.Error("s.artRPC.ArticleMetas() error(%v)", artErr)
return nil
}
mutex.Lock()
for aid, arc := range tmpRes {
if arc.AttrVal(artmdl.AttrBitNoDistribute) {
continue
}
res[aid] = arc
}
mutex.Unlock()
return nil
})
}
group.Wait()
return
}
| LQJJ/demo | 126-go-common-master/app/service/main/feed/service/article.go | GO | apache-2.0 | 2,972 |
#!/usr/bin/python -Werror
#
# Copyright (c) 2015 Midokura SARL, All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import os
import os.path
import yaml
from fabric.api import *
from netaddr import IPNetwork as CIDR
from fabric.colors import yellow, blue, green
from fabric.utils import puts
import cuisine
class Check(object):
def __init__(self, metadata):
self._metadata = metadata
def check_broken_cuisine(self):
run("rm -f /tmp/check_broken_cuisine.txt")
cuisine.file_write("/tmp/check_broken_cuisine.txt", "WORKING")
run("grep WORKING /tmp/check_broken_cuisine.txt")
class Configure(object):
def __init__(self, metadata):
self._metadata = metadata
def configure(self):
self.localegen()
self.name_resolution()
self.os_release()
self.datastax()
self.midonet()
def localegen(self):
if env.host_string in self._metadata.roles["all_containers"]:
run("locale-gen de_DE.UTF-8")
def name_resolution(self):
if env.host_string not in self._metadata.roles["all_containers"]:
run("hostname %s" % env.host_string.split(".")[0])
run("ip address add %s/32 dev lo || echo" % self._metadata.servers[env.host_string]["ip"])
cuisine.file_write("/etc/hostname", env.host_string.split(".")[0])
cuisine.file_write("/etc/resolv.conf", """
nameserver %s
options single-request
""" % self._metadata.config["nameserver"])
local_ip = self._metadata.servers[env.host_string]["ip"]
cuisine.file_write("/etc/hosts", """
127.0.0.1 localhost.localdomain localhost
::1 ip6-localhost ip6-loopback
fe00::0 ip6-localnet
ff00::0 ip6-mcastprefix
ff02::1 ip6-allnodes
ff02::2 ip6-allrouters
ff02::3 ip6-allhosts
%s %s.%s # %s
%s
""" % (
local_ip,
env.host_string,
self._metadata.config["domain"],
env.host_string.split(".")[0],
open("%s/etc/hosts" % os.environ["TMPDIR"]).read()
))
@classmethod
def repokey(cls, url):
run("""
URL="%s"
wget -SO- "${URL}" | apt-key add -
""" % url)
def datastax(self):
if env.host_string in self._metadata.containers:
run("""
apt-key add - <<EOF
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG v1
mQENBExkbXsBCACgUAbMWASAz/fmnMoWE4yJ/YHeuFHTK8zloJ/mApwizlQXTIVp
U4UV8nbLJrbkFY92VTcC2/IBtvnHpZl8eVm/JSI7nojXc5Kmm4Ek/cY7uW2KKPr4
cuka/5cNsOg2vsgTIMOZT6vWAbag2BGHtEJbriMLhT3v1tlu9caJfybu3QFWpahC
wRYtG3B4tkypt21ssWwNnmp2bjFRGpLssc5HCCxUCBFLYoIkAGAFRZ6ymglsLDBn
SCEzCkn9zQfmyqs0lZk4odBx6rzE350xgEnzFktT2uekFYqRqPQY8f7AhVfj2DJF
gVM4wXbSoVrTnDiFsaJt/Ea4OJ263jRUHeIRABEBAAG0LVJpcHRhbm8gUGFja2Fn
ZSBSZXBvc2l0b3J5IDxwYXVsQHJpcHRhbm8uY29tPokBPgQTAQIAKAIbAwYLCQgH
AwIGFQgCCQoLBBYCAwECHgECF4AFAlW/zKMFCRLBYKQACgkQNQIA8rmZo3LebAgA
gAwWkvBrPaD5Kf8H4uw9rXtHnHYxX5G6cOVJ3vuWCs1ov7m3JWq918q00hWfLtOs
zb15kFcjcEJ7kiRFJmAXZhcX2I0DHTmTZSl9orKzoUlXQqAANJGdek8pzdTDUQfz
V26k63d6eLqjXotrb0hFzg7B8VSolxRE44S5k1xhzUCedOqYYsWVv3xnRIP6UBPt
WLvzrLa0o9x/hT4w81dOP4rzZMuq2RApnenoz9AZwJrmZ14QW2ncy4RbqK6pKdRJ
y57vBv8F0LkGlLwBd/JYWwQ85lUTkNG5wCWdj0IEYTO3+fGyO1LHU6bVZCrNtkUE
ahSZUiRdidiktIkbtNXImYkCHAQQAQgABgUCTGRt2QAKCRATbpzxe100LaUfD/9D
q84HarIQMEoUiRBklg+afgTMaNNdvhU3V59KoMja2vMeE4JjE3SvNoKCHjPZj6Ti
720KL6V5O/Uo1VjtSXzAPRJywcE9aS5HRjM2Dr1mp5GnmpvbiKBdl91G9aPc3D2Z
LpG7vZr8E/vYLc5h1DMz2XDqi6gAqW2yxb2vnmHL4FiAdoXfpZimC9KZpUdTsGPO
VbXEDEn3y/AiIC35Bq66Sp3W4gVNakV7Y5RUPPDDBIsTZEOhzd9nl5FXOnPtONp5
dtp5NoWl6q3BjYe2P52TloCp+BJ62donfFTRSGfqyvtaRgmnHHEIWgypMghW6wSb
O/BxFpdggHTItMfBg2a8tWDFjYmBoFd3iP9SfcmBb/7zB5YXC5b1/s3RNCtR76hf
+iXjm/zy22tb6qy5XJsnCoORjEoFaWNH6ckgACK7HQyJZ2Lo2MuCYYaQLs6gTd6a
zMEQHT08cPF+I5It9mOzAtUOkCcVK8dIXRFETXFVdQqFMTmZmuK1Iv1CFBeUIHnM
iyoYv1bzNsUg/hJpW8ximVmBg5Apza2K0p3XKHkw9MPBqnQ4PbBM1nqb/+o56p+o
8mVZmjn4bdraB8c0Br15Mi19Zne7b65OZ5k+SVripUk5/XeJD9M9U6+DG+/uxemD
Fzp9XjnnAe8T/u8JpqHYQ2mRONFM7ZMOAFeEe4yIEIkBPgQTAQIAKAUCTGRtewIb
AwUJA8JnAAYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AACgkQNQIA8rmZo3K3HAf/
V+6OSdt/Zwdsk+WsUwi75ndOIz60TN8Wg16WOMq5KOBuYIneG2+CEFJHTppNLc2j
r/ugTjTPeS/DAo5MtnK+zzHxT7JmMKypb23t6MaahSlER4THbYvWUwsw5mm2LsTe
PTlb5mkvQnXkt6pN2UzZVyIdNFXRv1YZLdTcf4aJ0pZySvCdYoE9RaoP4/JI9GfS
NXH7oOxI8YaxRGK5i6w/LZyhxkfbkPX+pbbe1Ept+SZCcwWVc/S6veGZWQ1pNHR2
RW6F3WE0Mle6xWtvW1NlMs4ATEqS13GS4RVlgE07KTe/oBRkd+4NwXAQoEzUvoRr
j5Ad7LVKeygeUUyaWP+qN7kBDQRMZG17AQgAypZBEfm9pM8Tr4ktsHp1xThYHvzT
OScLPZcCaF1Gjg8em0cQI4z4yN+yffsmUD4/dGcRxZgVms/jTexKQ8Z/Ps3e4vRG
b4RCFaY0KhW4t+TTJJ9I5wvFzXZj7zNFxiQWpueiq/cDiBY+Liv3zMSOBaXzxR6L
7igNPKi/0ELLyCIU/okUwqc0O/4r5PgFANkMyvvVNqzxjC5s8MXbGivJXiML67/Y
0M/siNqDSia/TGItpXjvi7v1zulbiIV0iSBkO3vsxNE0xXGBXY/UztAShN3FTbx9
CZDupi35wgqK7McJ3WSjEDzwkElmwkmh7JdLziyH09kS1wRqiLcB+wSTywARAQAB
iQElBBgBAgAPAhsMBQJVv8zOBQkSwWDOAAoJEDUCAPK5maNyLl4H/3n/+xZsuKia
fHtBUMh44YRabEX1Bd10LAfxGlOZtKV/Dr1RaKetci6RRa5sJj0wKra6FhIryuqS
jFTalPF3o8WjVEA5AjJ3ddSgAwX5gGJ3u+C0XMI0E6h/vAXh6meFxHtGinYr1Gcp
P1/S3/Jy+0cmTt3FvqBtXtU3VIyb/4vUNZ+dY+jcw/gs/yS+s+jtR8hWUDbSrbU9
pja+p1icNwU5pMbEfx1HYB7JCKuE0iJNbAFagRtPCOKq4vUTPDUQUB5MjWV+89+f
cizh+doQR9z8e+/02drCCMWiUf4iiFs2dNHwaIPDOJ8Xn9xcxiUaKk32sjT3sict
XO5tB2KhE3A=
=YO7C
-----END PGP PUBLIC KEY BLOCK-----
EOF
""")
cuisine.file_write("/etc/apt/sources.list.d/datastax.list", """
deb [arch=amd64] http://debian.datastax.com/community 2.0 main
""")
#self.repokey("https://debian.datastax.com/debian/repo_key")
def midonet(self):
# Install(self._metadata).apt_get_update()
if "OS_MIDOKURA_REPOSITORY_USER" in os.environ:
username = os.environ["OS_MIDOKURA_REPOSITORY_USER"]
else:
username = ""
if "OS_MIDOKURA_REPOSITORY_PASS" in os.environ:
password = os.environ["OS_MIDOKURA_REPOSITORY_PASS"]
else:
password = ""
if "midonet_repo" in self._metadata.config:
repo_flavor = self._metadata.config["midonet_repo"]
else:
repo_flavor = "OSS"
if "midonet_manager" in self._metadata.roles:
if env.host_string in self._metadata.roles["container_midonet_manager"]:
if username <> "":
if password <> "":
repo_flavor = "MEM"
if "OS_MIDOKURA_URL_OVERRIDE" in os.environ:
url_override = os.environ["OS_MIDOKURA_URL_OVERRIDE"]
else:
url_override = ""
if "OS_MIDOKURA_PLUGIN_URL_OVERRIDE" in os.environ:
plugin_url_override = os.environ["OS_MIDOKURA_PLUGIN_URL_OVERRIDE"]
else:
plugin_url_override = ""
puts(blue("setting up Midokura repos"))
run("""
if [[ "%s" == "True" ]] ; then set -x; fi
#
# initialize the password cache
#
%s
USERNAME="%s"
PASSWORD="%s"
MIDONET_VERSION="%s"
OPENSTACK_PLUGIN_VERSION="%s"
REPO_FLAVOR="%s"
URL_OVERRIDE="%s"
PLUGIN_URL_OVERRIDE="%s"
rm -fv -- /etc/apt/sources.list.d/midonet*
rm -fv -- /etc/apt/sources.list.d/midokura*
if [[ "${REPO_FLAVOR}" == "MEM" ]]; then
FILENAME="/etc/apt/sources.list.d/midokura.list"
wget -SO- "http://${USERNAME}:${PASSWORD}@apt.midokura.com/packages.midokura.key" | apt-key add -
if [[ "${URL_OVERRIDE}" == "" && "${PLUGIN_URL_OVERRIDE}" == "" ]]; then
cat>"${FILENAME}"<<EOF
#
# MEM midolman
#
deb [arch=amd64] http://${USERNAME}:${PASSWORD}@apt.midokura.com/midonet/v${MIDONET_VERSION}/stable trusty main non-free
#
# MEM midonet neutron plugin
#
deb [arch=amd64] http://${USERNAME}:${PASSWORD}@apt.midokura.com/openstack/${OPENSTACK_PLUGIN_VERSION}/stable trusty main
EOF
else
cat>"${FILENAME}"<<EOF
#
# MEM midolman (url override)
#
${URL_OVERRIDE}
#
# MEM midonet neutron plugin (plugin url override)
#
${PLUGIN_URL_OVERRIDE}
EOF
fi
fi
if [[ "${REPO_FLAVOR}" == "OSS" ]]; then
FILENAME="/etc/apt/sources.list.d/midonet.list"
wget -SO- http://repo.midonet.org/packages.midokura.key | apt-key add -
cat>"${FILENAME}"<<EOF
# OSS MidoNet
deb http://repo.midonet.org/midonet/v${MIDONET_VERSION} stable main
# OSS MidoNet OpenStack Integration
deb http://repo.midonet.org/openstack-${OPENSTACK_PLUGIN_VERSION} stable main
# OSS MidoNet 3rd Party Tools and Libraries
deb http://repo.midonet.org/misc stable main
EOF
fi
""" % (
self._metadata.config["debug"],
open(os.environ["PASSWORDCACHE"]).read(),
username,
password,
self._metadata.config["midonet_%s_version" % repo_flavor.lower()],
self._metadata.config["midonet_%s_openstack_plugin_version" % repo_flavor.lower()],
repo_flavor.upper(),
url_override,
plugin_url_override
))
def os_release(self):
if env.host_string in self._metadata.containers:
self.__lib_orizuru_operations_ubuntu_repo(self._metadata.config["container_os_release_codename"])
else:
self.__lib_orizuru_operations_ubuntu_repo(self._metadata.config["os_release_codename"])
def __lib_orizuru_operations_ubuntu_repo(self, codename):
archive_country = self._metadata.config["archive_country"]
apt_cacher = self._metadata.config["apt-cacher"]
run("""
if [[ "%s" == "True" ]] ; then set -x; fi
XC="%s" # ubuntu release
XD="%s" # country code
XX="%s" # apt-cacher
cat>/etc/apt/sources.list<<EOF
#
# autogenerated file - do not modify - modify %s instead
#
EOF
for TYPE in 'deb' 'deb-src'; do
for realm in "main restricted" "universe" "multiverse"; do
echo "${TYPE} ${XX}/${XD}.archive.ubuntu.com/ubuntu/ ${XC} ${realm}"
echo "${TYPE} ${XX}/${XD}.archive.ubuntu.com/ubuntu/ ${XC}-updates ${realm}"
echo "${TYPE} ${XX}/security.archive.ubuntu.com/ubuntu/ ${XC}-security ${realm}"
done
echo "${TYPE} ${XX}/${XD}.archive.ubuntu.com/ubuntu/ ${XC}-backports main restricted universe multiverse"
done | tee -a /etc/apt/sources.list
""" % (self._metadata.config["debug"], codename, archive_country, apt_cacher, sys._getframe().f_code.co_name))
class Install(object):
def __init__(self, metadata):
self._metadata = metadata
def install(self):
self.rsyslog()
self.screen()
self.login_stuff()
self.apt_get_update()
self.common_packages()
self.rp_filter()
self.cloud_repository()
self.apt_get_update()
self.ntp()
self.dist_upgrade()
self.constrictor()
self.kmod("openvswitch")
self.kmod("nbd")
self.kmod("kvm")
self.kmod("vhost_net")
self.lldpd()
def lldpd(self):
cuisine.package_ensure("lldpd")
def kmod(self, module_name):
if env.host_string not in self._metadata.roles["all_containers"]:
run("modprobe %s || true" % module_name)
def constrictor(self):
constrictor_bin = self._metadata.config["constrictor"]
run("mkdir -pv $(dirname %s)" % constrictor_bin)
cuisine.file_write(constrictor_bin, """#!/usr/bin/python -Werror
import sys
import ConfigParser
def add_section(configuration, section):
if not(section == 'DEFAULT' or configuration.has_section(section)):
configuration.add_section(section)
def set_option(configfile, configuration, section, option, value):
configuration.set(section, option, value)
cfgfile = open(configfile, "w")
configuration.write(cfgfile)
cfgfile.close()
def get_option(configuration, section, option):
print configuration.get(section, option)
def handle_command(args):
command = args[1]
configfile = args[2]
section = args[3]
option = args[4]
configuration = ConfigParser.RawConfigParser()
configuration.read(configfile)
if command == 'set':
value = args[5]
add_section(configuration, section)
set_option(configfile, configuration, section, option, value)
if command == 'get':
get_option(configuration, section, option)
return 0
if __name__ == "__main__":
sys.exit(handle_command(sys.argv))
""")
run("chmod 0755 %s" % constrictor_bin)
def screen(self):
screenrc_string = "%s.%s" % (env.host_string, self._metadata.config["domain"])
cuisine.package_ensure("screen")
run("""
mkdir -pv /var/run/screen
chmod 0755 /usr/bin/screen
chmod 0777 /var/run/screen
""")
cuisine.file_write("/root/.screenrc", """
hardstatus alwayslastline
hardstatus string '%%{= kG} %s [%%= %%{= kw}%%?%%-Lw%%?%%{r}[%%{W}%%n*%%f %%t%%?{%%u}%%?%%{r}]%%{w}%%?%%+Lw%%?%%?%%= %%{g}] %%{W}%%{g}%%{.w} screen %%{.c} [%%H]'
""" % screenrc_string)
@classmethod
def login_stuff(cls):
run("""
chmod 0755 /usr/bin/sudo
chmod u+s /usr/bin/sudo
""")
@classmethod
def apt_get_update(cls):
puts(yellow("updating repositories, this may take a long time."))
run("""
#
# Round 1: try to apt-get update without purging the cache
#
apt-get update 1>/dev/null
#
# Round 2: clean cache and update again
#
if [[ ! "${?}" == "0" ]]; then
rm -rf /var/lib/apt/lists/*
rm -f /etc/apt/apt.conf
sync
apt-get update 2>&1
fi
""")
def common_packages(self):
cuisine.package_ensure(self._metadata.config["common_packages"])
def rsyslog(self):
cuisine.package_ensure("rsyslog")
controller_name = self._metadata.roles["openstack_controller"][0]
controller_ip_suffix = self._metadata.config["idx"][controller_name]
controller_ip = "%s.%s" % (self._metadata.config["vpn_base"], controller_ip_suffix)
if env.host_string <> controller_name:
cuisine.file_write("/etc/rsyslog.conf", """
$KLogPermitNonKernelFacility on
$ActionFileDefaultTemplate RSYSLOG_TraditionalFileFormat
$RepeatedMsgReduction on
$FileOwner syslog
$FileGroup adm
$FileCreateMode 0640
$DirCreateMode 0755
$Umask 0022
$PrivDropToUser syslog
$PrivDropToGroup syslog
$WorkDirectory /var/spool/rsyslog
$IncludeConfig /etc/rsyslog.d/*.conf
$ModLoad imuxsock
$ModLoad imklog
*.* @%s:514
*.* @@%s:514
""" % (controller_ip, controller_ip))
else:
cuisine.file_write("/etc/rsyslog.conf", """
$ModLoad imuxsock # provides support for local system logging
$ModLoad imklog # provides kernel logging support
$KLogPermitNonKernelFacility on
$ActionFileDefaultTemplate RSYSLOG_TraditionalFileFormat
$RepeatedMsgReduction on
$FileOwner syslog
$FileGroup adm
$FileCreateMode 0640
$DirCreateMode 0755
$Umask 0022
$PrivDropToUser syslog
$PrivDropToGroup syslog
$WorkDirectory /var/spool/rsyslog
$IncludeConfig /etc/rsyslog.d/*.conf
$ModLoad imudp
$UDPServerRun 514
$template FILENAME,"/var/log/%fromhost-ip%/syslog.log"
*.* ?FILENAME
""")
run("service rsyslog restart")
run("logger ping")
def rp_filter(self):
#
# async routing traffic floating from neutron metadata/dhcp midonet agent to hypervisors and gateways
#
if 'physical_midonet_gateway' in self._metadata.roles or 'physical_openstack_compute' in self._metadata.roles:
if env.host_string not in self._metadata.containers:
run("""
for RP in /proc/sys/net/ipv4/conf/*/rp_filter; do
echo 0 > "${RP}"
done
""")
def cloud_repository(self):
run("rm -rf /etc/apt/sources.list.d/cloudarchive-*")
cuisine.package_ensure(["python-software-properties", "software-properties-common", "ubuntu-cloud-keyring"])
self.dist_upgrade()
if self._metadata.config["container_os_release_codename"] == "precise":
if self._metadata.config["openstack_release"] in ["icehouse", "juno"]:
run("add-apt-repository --yes cloud-archive:%s" % self._metadata.config["openstack_release"])
if self._metadata.config["container_os_release_codename"] == "trusty":
if self._metadata.config["openstack_release"] in ["juno", "kilo"]:
run("add-apt-repository --yes cloud-archive:%s" % self._metadata.config["openstack_release"])
run("""
OPENSTACK_RELEASE="%s"
APT_CACHER="%s"
SOURCES_LIST="/etc/apt/sources.list.d/cloudarchive-${OPENSTACK_RELEASE}.list"
test -f "${SOURCES_LIST}" && \
sed -i 's,http://ubuntu-cloud.archive.canonical.com,'"${APT_CACHER}"'/ubuntu-cloud.archive.canonical.com,g;' "${SOURCES_LIST}"
exit 0
""" % (
self._metadata.config["openstack_release"],
self._metadata.config["apt-cacher"]
))
self.dist_upgrade()
@classmethod
def dist_upgrade(cls):
run("""
export DEBIAN_FRONTEND=noninteractive
debconf-set-selections <<EOF
grub grub/update_grub_changeprompt_threeway select install_new
grub-legacy-ec2 grub/update_grub_changeprompt_threeway select install_new
EOF
yes | dpkg --configure -a
apt-get -y -u --force-yes install
apt-get -y -u --force-yes dist-upgrade 1>/dev/null
""")
run("apt-get clean")
run("""
export DEBIAN_FRONTEND=noninteractive
apt-get -y autoremove
""")
def ntp(self):
if env.host_string not in self._metadata.containers:
cuisine.package_ensure("ntpdate")
cuisine.package_ensure("ntp")
run("""
/etc/init.d/ntp stop || true
ln -sfv "/usr/share/zoneinfo/%s" /etc/localtime
ntpdate zeit.fu-berlin.de || true
/etc/init.d/ntp start || true
""" % self._metadata.config["timezone"])
| midonet/orizuru | lib/orizuru/operations.py | Python | apache-2.0 | 17,776 |
// Copyright 2012 Aaron Jensen
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using Microsoft.Web.XmlTransform;
using System.Linq;
using System.Xml;
namespace Carbon.Test.Xdt
{
/// <summary>
/// https://github.com/appharbor/appharbor-transformtester/blob/master/AppHarbor.TransformTester/Transforms/Merge.cs
/// </summary>
public class Merge : Transform
{
public Merge() : base(TransformFlags.UseParentAsTargetNode)
{
}
protected override void Apply()
{
Apply((XmlElement)TargetNode, (XmlElement)TransformNode);
}
public void Apply(XmlElement targetElement, XmlElement transformElement)
{
var targetChildElement = targetElement.ChildNodes.OfType<XmlElement>().FirstOrDefault(x => x.LocalName == transformElement.LocalName);
if (targetChildElement == null)
{
InsertTransformElement(targetElement, transformElement);
return;
}
foreach (var transformChildElement in transformElement.ChildNodes.OfType<XmlElement>())
{
Apply(targetChildElement, transformChildElement);
}
}
protected virtual void InsertTransformElement(XmlElement targetElement, XmlElement transformElement)
{
targetElement.AppendChild(transformElement);
}
}
}
| MattHubble/carbon | Source/Test/Xdt/Merge.cs | C# | apache-2.0 | 1,981 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.redshiftdataapi.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.redshiftdataapi.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* BatchExecuteStatementResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class BatchExecuteStatementResultJsonUnmarshaller implements Unmarshaller<BatchExecuteStatementResult, JsonUnmarshallerContext> {
public BatchExecuteStatementResult unmarshall(JsonUnmarshallerContext context) throws Exception {
BatchExecuteStatementResult batchExecuteStatementResult = new BatchExecuteStatementResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return batchExecuteStatementResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("ClusterIdentifier", targetDepth)) {
context.nextToken();
batchExecuteStatementResult.setClusterIdentifier(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("CreatedAt", targetDepth)) {
context.nextToken();
batchExecuteStatementResult.setCreatedAt(DateJsonUnmarshallerFactory.getInstance("unixTimestamp").unmarshall(context));
}
if (context.testExpression("Database", targetDepth)) {
context.nextToken();
batchExecuteStatementResult.setDatabase(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DbUser", targetDepth)) {
context.nextToken();
batchExecuteStatementResult.setDbUser(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Id", targetDepth)) {
context.nextToken();
batchExecuteStatementResult.setId(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("SecretArn", targetDepth)) {
context.nextToken();
batchExecuteStatementResult.setSecretArn(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return batchExecuteStatementResult;
}
private static BatchExecuteStatementResultJsonUnmarshaller instance;
public static BatchExecuteStatementResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new BatchExecuteStatementResultJsonUnmarshaller();
return instance;
}
}
| aws/aws-sdk-java | aws-java-sdk-redshiftdataapi/src/main/java/com/amazonaws/services/redshiftdataapi/model/transform/BatchExecuteStatementResultJsonUnmarshaller.java | Java | apache-2.0 | 4,194 |
//
// Bitmap.hpp
// GaLiong
//
// Created by Liong on ??/??/??.
//
#include "Include/Bmp.hpp"
_L_BEGIN
namespace Media
{
// Public
Bmp::Bmp(Image& instance)
{
}
Bmp::~Bmp()
{
}
bool Bmp::InitHeader()
{
FileHeader f;
InfoHeader i;
stream.read((char *)&f, sizeof(FileHeader));
if (f.Type != 0x4D42)
return false;
stream.read((char *)&i, sizeof(InfoHeader));
if (i.BitCount != 24)
return false;
size = { i.Width, i.Height };
length = i.Width * i.Height * 3;
stream.seekg(f.OffBits, stream.beg);
return true;
}
Buffer Bmp::ReadData(BufferLength length)
{
Buffer data = new Byte[length]; // NEED TO BE DELETED.
stream.read((char *)data, length);
return data;
}
TextureRef Bmp::ToTexture(wchar_t *path, Flag option)
{
Log << L"Bmp: Try loading " << path << L"...";
if (stream.is_open())
stream.close();
stream.open(path, stream.in | stream.binary | stream._Nocreate);
TextureRef ref;
Size size;
BufferLength dataLength;
if (InitHeader(size, dataLength))
{
Buffer data = ReadData(dataLength);
if (!data)
return TextureRef();
ref = TextureManager.NewTexture(dataLength, data, size, Texture::PixelFormat::BGR, Texture::ByteSize::UByte);
if ((option & FileReadOption::NoGenerate) == FileReadOption::None)
ref.lock()->Generate();
}
else
{
Log.Log((L"Bmp: Failed in loading " + wstring(path) + L"!").c_str(), Logger::WarningLevel::Warn);
return TextureRef();
}
if ((option & FileReadOption::NoClose) == FileReadOption::None)
stream.close();
Log << L"Bmp: Succeeded!";
return ref;
}
// Derived from [LiongFramework::Media::Image]
Buffer Bmp::GetChunk(Point position, Size size)
{
return _Bitmap::GetChunk(position, size);
}
BufferLength Bmp::GetInterpretedLength(PixelType pixelType)
{
return _Bitmap.GetInterpretedLength(pixelType);
}
Byte* Bmp::GetPixel(Point position)
{
return _Bitmap.GetPixel(position);
}
Size Bmp::GetSize()
{
return _Bitmap.GetSize();
}
bool Bmp::IsEmpty()
{
return _Bitmap.IsEmpty();
}
Buffer Bmp::Interpret(PixelType pixelType)
{
return _Bitmap.Interpret(pixelType);
}
}
_L_END | PENGUINLIONG/GaLiong | GaLiong/Bmp.cpp | C++ | apache-2.0 | 2,500 |
package coreaf.ui.pages;
public class HomePage {
}
| AKSahu/WebAutomationFrameworks | SeleniumUIAutoTest/src/coreaf/ui/pages/HomePage.java | Java | apache-2.0 | 53 |
package com.wjc.slience.mymap.activity;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.KeyEvent;
import android.widget.TextView;
import com.wjc.slience.mymap.R;
import com.wjc.slience.mymap.common.ActivityCollector;
import com.wjc.slience.mymap.common.LogUtil;
import com.wjc.slience.mymap.db.MyMapDB;
import com.wjc.slience.mymap.model.Way;
import org.w3c.dom.Text;
import java.util.ArrayList;
import java.util.List;
/**
* 日志信息页面显示
*/
public class MsgActivity extends Activity {
TextView msg;
String msgTxt;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_msg);
//ActivityCollector.getInstance().addActivity(this);
msg = (TextView) findViewById(R.id.msg);
msgTxt = LogUtil.getInstance().readTheTrip();
msg.setText(msgTxt);
}
@Override
protected void onResume() {
super.onResume();
msgTxt = LogUtil.getInstance().readTheTrip();
msg.setText(msgTxt);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
Intent intent = new Intent(MsgActivity.this,ChooseActivity.class);
startActivity(intent);
finish();
}
return super.onKeyDown(keyCode, event);
}
}
| root0301/MyMap | app/src/main/java/com/wjc/slience/mymap/activity/MsgActivity.java | Java | apache-2.0 | 1,450 |
/**
*
*/
package com.sivalabs.demo.security;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.AuthorityUtils;
import com.sivalabs.demo.entities.Role;
import com.sivalabs.demo.entities.User;
/**
* @author Siva
*
*/
public class AuthenticatedUser extends org.springframework.security.core.userdetails.User
{
private static final long serialVersionUID = 1L;
private User user;
public AuthenticatedUser(User user)
{
super(user.getEmail(), user.getPassword(), getAuthorities(user));
this.user = user;
}
public User getUser()
{
return user;
}
private static Collection<? extends GrantedAuthority> getAuthorities(User user)
{
Set<String> roleAndPermissions = new HashSet<>();
List<Role> roles = user.getRoles();
for (Role role : roles)
{
roleAndPermissions.add(role.getName());
}
String[] roleNames = new String[roleAndPermissions.size()];
Collection<GrantedAuthority> authorities = AuthorityUtils.createAuthorityList(roleAndPermissions.toArray(roleNames));
return authorities;
}
}
| sivaprasadreddy/springboot-learn-by-example | chapter-13/springboot-thymeleaf-security-demo/src/main/java/com/sivalabs/demo/security/AuthenticatedUser.java | Java | apache-2.0 | 1,192 |
#include <fcntl.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#include <string>
#include <iostream>
#include <dirent.h>
#include <cstring>
#include <errno.h>
#include <attr/xattr.h>
#include "../BaseTestCase.h"
#include "../../user_tools/api/workload.h"
#include "../../user_tools/api/actions.h"
using fs_testing::tests::DataTestResult;
using fs_testing::user_tools::api::WriteData;
using fs_testing::user_tools::api::WriteDataMmap;
using fs_testing::user_tools::api::Checkpoint;
using std::string;
#define TEST_FILE_PERMS ((mode_t) (S_IRWXU | S_IRWXG | S_IRWXO))
namespace fs_testing {
namespace tests {
class testName: public BaseTestCase {
public:
virtual int setup() override {
test_path = mnt_dir_ ;
A_path = mnt_dir_ + "/A";
AC_path = mnt_dir_ + "/A/C";
B_path = mnt_dir_ + "/B";
foo_path = mnt_dir_ + "/foo";
bar_path = mnt_dir_ + "/bar";
Afoo_path = mnt_dir_ + "/A/foo";
Abar_path = mnt_dir_ + "/A/bar";
Bfoo_path = mnt_dir_ + "/B/foo";
Bbar_path = mnt_dir_ + "/B/bar";
ACfoo_path = mnt_dir_ + "/A/C/foo";
ACbar_path = mnt_dir_ + "/A/C/bar";
return 0;
}
virtual int run( int checkpoint ) override {
test_path = mnt_dir_ ;
A_path = mnt_dir_ + "/A";
AC_path = mnt_dir_ + "/A/C";
B_path = mnt_dir_ + "/B";
foo_path = mnt_dir_ + "/foo";
bar_path = mnt_dir_ + "/bar";
Afoo_path = mnt_dir_ + "/A/foo";
Abar_path = mnt_dir_ + "/A/bar";
Bfoo_path = mnt_dir_ + "/B/foo";
Bbar_path = mnt_dir_ + "/B/bar";
ACfoo_path = mnt_dir_ + "/A/C/foo";
ACbar_path = mnt_dir_ + "/A/C/bar";
int local_checkpoint = 0 ;
int fd_foo = cm_->CmOpen(foo_path.c_str() , O_RDWR|O_CREAT , 0777);
if ( fd_foo < 0 ) {
cm_->CmClose( fd_foo);
return errno;
}
if ( WriteData ( fd_foo, 0, 32768) < 0){
cm_->CmClose( fd_foo);
return errno;
}
if ( WriteData ( fd_foo, 0, 5000) < 0){
cm_->CmClose( fd_foo);
return errno;
}
if ( cm_->CmFsync( fd_foo) < 0){
return errno;
}
if ( cm_->CmCheckpoint() < 0){
return -1;
}
local_checkpoint += 1;
if (local_checkpoint == checkpoint) {
return 1;
}
if ( cm_->CmClose ( fd_foo) < 0){
return errno;
}
return 0;
}
virtual int check_test( unsigned int last_checkpoint, DataTestResult *test_result) override {
test_path = mnt_dir_ ;
A_path = mnt_dir_ + "/A";
AC_path = mnt_dir_ + "/A/C";
B_path = mnt_dir_ + "/B";
foo_path = mnt_dir_ + "/foo";
bar_path = mnt_dir_ + "/bar";
Afoo_path = mnt_dir_ + "/A/foo";
Abar_path = mnt_dir_ + "/A/bar";
Bfoo_path = mnt_dir_ + "/B/foo";
Bbar_path = mnt_dir_ + "/B/bar";
ACfoo_path = mnt_dir_ + "/A/C/foo";
ACbar_path = mnt_dir_ + "/A/C/bar";
return 0;
}
private:
string test_path;
string A_path;
string AC_path;
string B_path;
string foo_path;
string bar_path;
string Afoo_path;
string Abar_path;
string Bfoo_path;
string Bbar_path;
string ACfoo_path;
string ACbar_path;
};
} // namespace tests
} // namespace fs_testing
extern "C" fs_testing::tests::BaseTestCase *test_case_get_instance() {
return new fs_testing::tests::testName;
}
extern "C" void test_case_delete_instance(fs_testing::tests::BaseTestCase *tc) {
delete tc;
}
| utsaslab/crashmonkey | code/tests/seq1/j-lang138.cpp | C++ | apache-2.0 | 3,828 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.utils;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
/**
* This classes exposes low-level methods for reading/writing from byte streams or buffers.
*/
public final class ByteUtils {
private ByteUtils() {}
/**
* Read an unsigned integer from the given position without modifying the buffers position
*
* @param buffer the buffer to read from
* @param index the index from which to read the integer
* @return The integer read, as a long to avoid signedness
*/
public static long readUnsignedInt(ByteBuffer buffer, int index) {
return buffer.getInt(index) & 0xffffffffL;
}
/**
* Read an unsigned integer stored in little-endian format from the {@link InputStream}.
*
* @param in The stream to read from
* @return The integer read (MUST BE TREATED WITH SPECIAL CARE TO AVOID SIGNEDNESS)
*/
public static int readUnsignedIntLE(InputStream in) throws IOException {
return in.read()
| (in.read() << 8)
| (in.read() << 16)
| (in.read() << 24);
}
/**
* Read an unsigned integer stored in little-endian format from a byte array
* at a given offset.
*
* @param buffer The byte array to read from
* @param offset The position in buffer to read from
* @return The integer read (MUST BE TREATED WITH SPECIAL CARE TO AVOID SIGNEDNESS)
*/
public static int readUnsignedIntLE(byte[] buffer, int offset) {
return (buffer[offset] << 0 & 0xff)
| ((buffer[offset + 1] & 0xff) << 8)
| ((buffer[offset + 2] & 0xff) << 16)
| ((buffer[offset + 3] & 0xff) << 24);
}
/**
* Write the given long value as a 4 byte unsigned integer. Overflow is ignored.
*
* @param buffer The buffer to write to
* @param index The position in the buffer at which to begin writing
* @param value The value to write
*/
public static void writeUnsignedInt(ByteBuffer buffer, int index, long value) {
buffer.putInt(index, (int) (value & 0xffffffffL));
}
/**
* Write the given long value as a 4 byte unsigned integer. Overflow is ignored.
*
* @param buffer The buffer to write to
* @param value The value to write
*/
public static void writeUnsignedInt(ByteBuffer buffer, long value) {
buffer.putInt((int) (value & 0xffffffffL));
}
/**
* Write an unsigned integer in little-endian format to the {@link OutputStream}.
*
* @param out The stream to write to
* @param value The value to write
*/
public static void writeUnsignedIntLE(OutputStream out, int value) throws IOException {
out.write(value);
out.write(value >>> 8);
out.write(value >>> 16);
out.write(value >>> 24);
}
/**
* Write an unsigned integer in little-endian format to a byte array
* at a given offset.
*
* @param buffer The byte array to write to
* @param offset The position in buffer to write to
* @param value The value to write
*/
public static void writeUnsignedIntLE(byte[] buffer, int offset, int value) {
buffer[offset] = (byte) value;
buffer[offset + 1] = (byte) (value >>> 8);
buffer[offset + 2] = (byte) (value >>> 16);
buffer[offset + 3] = (byte) (value >>> 24);
}
/**
* Read an integer stored in variable-length format using zig-zag decoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>.
*
* @param buffer The buffer to read from
* @return The integer read
*
* @throws IllegalArgumentException if variable-length value does not terminate after 5 bytes have been read
*/
public static int readVarint(ByteBuffer buffer) {
int value = 0;
int i = 0;
int b;
while (((b = buffer.get()) & 0x80) != 0) {
value |= (b & 0x7f) << i;
i += 7;
if (i > 28)
throw illegalVarintException(value);
}
value |= b << i;
return (value >>> 1) ^ -(value & 1);
}
/**
* Read an integer stored in variable-length format using zig-zag decoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>.
*
* @param in The input to read from
* @return The integer read
*
* @throws IllegalArgumentException if variable-length value does not terminate after 5 bytes have been read
* @throws IOException if {@link DataInput} throws {@link IOException}
*/
public static int readVarint(DataInput in) throws IOException {
int value = 0;
int i = 0;
int b;
while (((b = in.readByte()) & 0x80) != 0) {
value |= (b & 0x7f) << i;
i += 7;
if (i > 28)
throw illegalVarintException(value);
}
value |= b << i;
return (value >>> 1) ^ -(value & 1);
}
/**
* Read a long stored in variable-length format using zig-zag decoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>.
*
* @param in The input to read from
* @return The long value read
*
* @throws IllegalArgumentException if variable-length value does not terminate after 10 bytes have been read
* @throws IOException if {@link DataInput} throws {@link IOException}
*/
public static long readVarlong(DataInput in) throws IOException {
long value = 0L;
int i = 0;
long b;
while (((b = in.readByte()) & 0x80) != 0) {
value |= (b & 0x7f) << i;
i += 7;
if (i > 63)
throw illegalVarlongException(value);
}
value |= b << i;
return (value >>> 1) ^ -(value & 1);
}
/**
* Read a long stored in variable-length format using zig-zag decoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>.
*
* @param buffer The buffer to read from
* @return The long value read
*
* @throws IllegalArgumentException if variable-length value does not terminate after 10 bytes have been read
*/
public static long readVarlong(ByteBuffer buffer) {
long value = 0L;
int i = 0;
long b;
while (((b = buffer.get()) & 0x80) != 0) {
value |= (b & 0x7f) << i;
i += 7;
if (i > 63)
throw illegalVarlongException(value);
}
value |= b << i;
return (value >>> 1) ^ -(value & 1);
}
/**
* Write the given integer following the variable-length zig-zag encoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>
* into the output.
*
* @param value The value to write
* @param out The output to write to
*/
public static void writeVarint(int value, DataOutput out) throws IOException {
int v = (value << 1) ^ (value >> 31);
while ((v & 0xffffff80) != 0L) {
out.writeByte((v & 0x7f) | 0x80);
v >>>= 7;
}
out.writeByte((byte) v);
}
/**
* Write the given integer following the variable-length zig-zag encoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>
* into the buffer.
*
* @param value The value to write
* @param buffer The output to write to
*/
public static void writeVarint(int value, ByteBuffer buffer) {
int v = (value << 1) ^ (value >> 31);
while ((v & 0xffffff80) != 0L) {
byte b = (byte) ((v & 0x7f) | 0x80);
buffer.put(b);
v >>>= 7;
}
buffer.put((byte) v);
}
/**
* Write the given integer following the variable-length zig-zag encoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>
* into the output.
*
* @param value The value to write
* @param out The output to write to
*/
public static void writeVarlong(long value, DataOutput out) throws IOException {
long v = (value << 1) ^ (value >> 63);
while ((v & 0xffffffffffffff80L) != 0L) {
out.writeByte(((int) v & 0x7f) | 0x80);
v >>>= 7;
}
out.writeByte((byte) v);
}
/**
* Write the given integer following the variable-length zig-zag encoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>
* into the buffer.
*
* @param value The value to write
* @param buffer The buffer to write to
*/
public static void writeVarlong(long value, ByteBuffer buffer) {
long v = (value << 1) ^ (value >> 63);
while ((v & 0xffffffffffffff80L) != 0L) {
byte b = (byte) ((v & 0x7f) | 0x80);
buffer.put(b);
v >>>= 7;
}
buffer.put((byte) v);
}
/**
* Number of bytes needed to encode an integer in variable-length format.
*
* @param value The signed value
*/
public static int sizeOfVarint(int value) {
int v = (value << 1) ^ (value >> 31);
int bytes = 1;
while ((v & 0xffffff80) != 0L) {
bytes += 1;
v >>>= 7;
}
return bytes;
}
/**
* Number of bytes needed to encode a long in variable-length format.
*
* @param value The signed value
*/
public static int sizeOfVarlong(long value) {
long v = (value << 1) ^ (value >> 63);
int bytes = 1;
while ((v & 0xffffffffffffff80L) != 0L) {
bytes += 1;
v >>>= 7;
}
return bytes;
}
private static IllegalArgumentException illegalVarintException(int value) {
throw new IllegalArgumentException("Varint is too long, the most significant bit in the 5th byte is set, " +
"converted value: " + Integer.toHexString(value));
}
private static IllegalArgumentException illegalVarlongException(long value) {
throw new IllegalArgumentException("Varlong is too long, most significant bit in the 10th byte is set, " +
"converted value: " + Long.toHexString(value));
}
}
| ijuma/kafka | clients/src/main/java/org/apache/kafka/common/utils/ByteUtils.java | Java | apache-2.0 | 11,546 |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.core.query.output.ratelimit.snapshot;
import org.wso2.siddhi.core.config.ExecutionPlanContext;
import org.wso2.siddhi.core.event.ComplexEvent;
import org.wso2.siddhi.core.event.ComplexEventChunk;
import org.wso2.siddhi.core.event.stream.StreamEventPool;
import org.wso2.siddhi.core.util.Scheduler;
import org.wso2.siddhi.core.util.parser.SchedulerParser;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
/**
* Parent implementation for per event periodic snapshot rate limiting. Multiple implementations of this will be
* there to represent different queries. Snapshot rate limiting will only emit current events representing the
* snapshot period.
*/
public class PerSnapshotOutputRateLimiter extends SnapshotOutputRateLimiter {
private final Long value;
private String id;
private ScheduledExecutorService scheduledExecutorService;
private ComplexEventChunk<ComplexEvent> eventChunk = new ComplexEventChunk<ComplexEvent>(false);
private ComplexEvent lastEvent;
private Scheduler scheduler;
private long scheduledTime;
private String queryName;
public PerSnapshotOutputRateLimiter(String id, Long value, ScheduledExecutorService scheduledExecutorService,
WrappedSnapshotOutputRateLimiter wrappedSnapshotOutputRateLimiter,
ExecutionPlanContext executionPlanContext, String queryName) {
super(wrappedSnapshotOutputRateLimiter, executionPlanContext);
this.queryName = queryName;
this.id = id;
this.value = value;
this.scheduledExecutorService = scheduledExecutorService;
}
@Override
public void process(ComplexEventChunk complexEventChunk) {
List<ComplexEventChunk<ComplexEvent>> outputEventChunks = new ArrayList<ComplexEventChunk<ComplexEvent>>();
complexEventChunk.reset();
synchronized (this) {
while (complexEventChunk.hasNext()) {
ComplexEvent event = complexEventChunk.next();
if (event.getType() == ComplexEvent.Type.TIMER) {
tryFlushEvents(outputEventChunks, event);
} else if (event.getType() == ComplexEvent.Type.CURRENT) {
complexEventChunk.remove();
tryFlushEvents(outputEventChunks, event);
lastEvent = event;
} else {
tryFlushEvents(outputEventChunks, event);
}
}
}
for (ComplexEventChunk eventChunk : outputEventChunks) {
sendToCallBacks(eventChunk);
}
}
private void tryFlushEvents(List<ComplexEventChunk<ComplexEvent>> outputEventChunks, ComplexEvent event) {
if (event.getTimestamp() >= scheduledTime) {
ComplexEventChunk<ComplexEvent> outputEventChunk = new ComplexEventChunk<ComplexEvent>(false);
if (lastEvent != null) {
outputEventChunk.add(cloneComplexEvent(lastEvent));
}
outputEventChunks.add(outputEventChunk);
scheduledTime += value;
scheduler.notifyAt(scheduledTime);
}
}
@Override
public SnapshotOutputRateLimiter clone(String key, WrappedSnapshotOutputRateLimiter
wrappedSnapshotOutputRateLimiter) {
return new PerSnapshotOutputRateLimiter(id + key, value, scheduledExecutorService,
wrappedSnapshotOutputRateLimiter, executionPlanContext, queryName);
}
@Override
public void start() {
scheduler = SchedulerParser.parse(scheduledExecutorService, this, executionPlanContext);
scheduler.setStreamEventPool(new StreamEventPool(0, 0, 0, 5));
scheduler.init(lockWrapper, queryName);
long currentTime = System.currentTimeMillis();
scheduledTime = currentTime + value;
scheduler.notifyAt(scheduledTime);
}
@Override
public void stop() {
//Nothing to stop
}
@Override
public Map<String, Object> currentState() {
Map<String, Object> state = new HashMap<>();
state.put("EventChunk", eventChunk.getFirst());
return state;
}
@Override
public void restoreState(Map<String, Object> state) {
eventChunk.clear();
eventChunk.add((ComplexEvent) state.get("EventList"));
}
}
| lgobinath/siddhi | modules/siddhi-core/src/main/java/org/wso2/siddhi/core/query/output/ratelimit/snapshot/PerSnapshotOutputRateLimiter.java | Java | apache-2.0 | 5,177 |
/* Copyright 2016-2019, SINTEF Ocean.
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
#include "cppfmu/cppfmu_cs.hpp"
#include <stdexcept>
namespace cppfmu
{
// =============================================================================
// SlaveInstance
// =============================================================================
void SlaveInstance::SetupExperiment(
FMIBoolean /*toleranceDefined*/,
FMIReal /*tolerance*/,
FMIReal /*tStart*/,
FMIBoolean /*stopTimeDefined*/,
FMIReal /*tStop*/)
{
// Do nothing
}
void SlaveInstance::EnterInitializationMode()
{
// Do nothing
}
void SlaveInstance::ExitInitializationMode()
{
// Do nothing
}
void SlaveInstance::Terminate()
{
// Do nothing
}
void SlaveInstance::Reset()
{
// Do nothing
}
void SlaveInstance::SetReal(
const FMIValueReference /*vr*/[],
std::size_t nvr,
const FMIReal /*value*/[])
{
if (nvr != 0) {
throw std::logic_error("Attempted to set nonexistent variable");
}
}
void SlaveInstance::SetInteger(
const FMIValueReference /*vr*/[],
std::size_t nvr,
const FMIInteger /*value*/[])
{
if (nvr != 0) {
throw std::logic_error("Attempted to set nonexistent variable");
}
}
void SlaveInstance::SetBoolean(
const FMIValueReference /*vr*/[],
std::size_t nvr,
const FMIBoolean /*value*/[])
{
if (nvr != 0) {
throw std::logic_error("Attempted to set nonexistent variable");
}
}
void SlaveInstance::SetString(
const FMIValueReference /*vr*/[],
std::size_t nvr,
const FMIString /*value*/[])
{
if (nvr != 0) {
throw std::logic_error("Attempted to set nonexistent variable");
}
}
void SlaveInstance::GetReal(
const FMIValueReference /*vr*/[],
std::size_t nvr,
FMIReal /*value*/[]) const
{
if (nvr != 0) {
throw std::logic_error("Attempted to get nonexistent variable");
}
}
void SlaveInstance::GetInteger(
const FMIValueReference /*vr*/[],
std::size_t nvr,
FMIInteger /*value*/[]) const
{
if (nvr != 0) {
throw std::logic_error("Attempted to get nonexistent variable");
}
}
void SlaveInstance::GetBoolean(
const FMIValueReference /*vr*/[],
std::size_t nvr,
FMIBoolean /*value*/[]) const
{
if (nvr != 0) {
throw std::logic_error("Attempted to set nonexistent variable");
}
}
void SlaveInstance::GetString(
const FMIValueReference /*vr*/[],
std::size_t nvr,
FMIString /*value*/[]) const
{
if (nvr != 0) {
throw std::logic_error("Attempted to set nonexistent variable");
}
}
SlaveInstance::~SlaveInstance() CPPFMU_NOEXCEPT
{
// Do nothing
}
} // namespace cppfmu
| idaholab/raven | framework/contrib/PythonFMU/pythonfmu/pythonfmu-export/cpp/cppfmu_cs.cpp | C++ | apache-2.0 | 2,874 |
package org.baade.eel.core.player;
import org.baade.eel.core.ILifecycle;
import org.baade.eel.core.message.IMessage;
import org.baade.eel.core.processor.IProcessor;
public interface IPlayer extends ILifecycle {
public void send(IMessage message);
public IProcessor getProcessor();
}
| baade-org/eel | eel-core/src/main/java/org/baade/eel/core/player/IPlayer.java | Java | apache-2.0 | 290 |
/*
* Copyright 2019 Qameta Software OÜ
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.qameta.allure.testdata;
/**
* @author sskorol (Sergey Korol)
*/
public class DummyCard {
private final String number;
public DummyCard(final String number) {
this.number = number;
}
public String getNumber() {
return number;
}
@Override
public String toString() {
return "DummyCard{" +
"number='" + number + '\'' +
'}';
}
}
| allure-framework/allure-java | allure-java-commons/src/test/java/io/qameta/allure/testdata/DummyCard.java | Java | apache-2.0 | 1,043 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc.Rendering;
namespace Microsoft.AspNetCore.Mvc.RazorPages
{
/// <summary>
/// Provides methods to create a Razor page.
/// </summary>
public interface IPageActivatorProvider
{
/// <summary>
/// Creates a Razor page activator.
/// </summary>
/// <param name="descriptor">The <see cref="CompiledPageActionDescriptor"/>.</param>
/// <returns>The delegate used to activate the page.</returns>
Func<PageContext, ViewContext, object> CreateActivator(CompiledPageActionDescriptor descriptor);
/// <summary>
/// Releases a Razor page.
/// </summary>
/// <param name="descriptor">The <see cref="CompiledPageActionDescriptor"/>.</param>
/// <returns>The delegate used to dispose the activated page.</returns>
Action<PageContext, ViewContext, object>? CreateReleaser(CompiledPageActionDescriptor descriptor);
/// <summary>
/// Releases a Razor page asynchronously.
/// </summary>
/// <param name="descriptor">The <see cref="CompiledPageActionDescriptor"/>.</param>
/// <returns>The delegate used to dispose the activated page asynchronously.</returns>
Func<PageContext, ViewContext, object, ValueTask>? CreateAsyncReleaser(CompiledPageActionDescriptor descriptor)
{
var releaser = CreateReleaser(descriptor);
if (releaser is null)
{
return null;
}
return (context, viewContext, page) =>
{
releaser(context, viewContext, page);
return default;
};
}
}
}
| aspnet/AspNetCore | src/Mvc/Mvc.RazorPages/src/IPageActivatorProvider.cs | C# | apache-2.0 | 1,873 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "sql_test_suite_fixture.h"
#include "test_utils.h"
using namespace ignite_test;
namespace ignite
{
SqlTestSuiteFixture::SqlTestSuiteFixture():
testCache(0),
env(NULL),
dbc(NULL),
stmt(NULL)
{
grid = StartNode("queries-test.xml");
testCache = grid.GetCache<int64_t, TestType>("cache");
// Allocate an environment handle
SQLAllocHandle(SQL_HANDLE_ENV, SQL_NULL_HANDLE, &env);
BOOST_REQUIRE(env != NULL);
// We want ODBC 3 support
SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, reinterpret_cast<void*>(SQL_OV_ODBC3), 0);
// Allocate a connection handle
SQLAllocHandle(SQL_HANDLE_DBC, env, &dbc);
BOOST_REQUIRE(dbc != NULL);
// Connect string
SQLCHAR connectStr[] = "DRIVER={Apache Ignite};ADDRESS=127.0.0.1:11110;CACHE=cache";
SQLCHAR outstr[ODBC_BUFFER_SIZE];
SQLSMALLINT outstrlen;
// Connecting to ODBC server.
SQLRETURN ret = SQLDriverConnect(dbc, NULL, connectStr, static_cast<SQLSMALLINT>(sizeof(connectStr)),
outstr, sizeof(outstr), &outstrlen, SQL_DRIVER_COMPLETE);
if (!SQL_SUCCEEDED(ret))
{
Ignition::StopAll(true);
BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_DBC, dbc));
}
// Allocate a statement handle
SQLAllocHandle(SQL_HANDLE_STMT, dbc, &stmt);
BOOST_REQUIRE(stmt != NULL);
}
SqlTestSuiteFixture::~SqlTestSuiteFixture()
{
// Releasing statement handle.
SQLFreeHandle(SQL_HANDLE_STMT, stmt);
// Disconneting from the server.
SQLDisconnect(dbc);
// Releasing allocated handles.
SQLFreeHandle(SQL_HANDLE_DBC, dbc);
SQLFreeHandle(SQL_HANDLE_ENV, env);
ignite::Ignition::StopAll(true);
}
void SqlTestSuiteFixture::CheckSingleResult0(const char* request,
SQLSMALLINT type, void* column, SQLLEN bufSize, SQLLEN* resSize) const
{
SQLRETURN ret;
ret = SQLBindCol(stmt, 1, type, column, bufSize, resSize);
if (!SQL_SUCCEEDED(ret))
BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_STMT, stmt));
ret = SQLExecDirect(stmt, reinterpret_cast<SQLCHAR*>(const_cast<char*>(request)), SQL_NTS);
if (!SQL_SUCCEEDED(ret))
BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_STMT, stmt));
ret = SQLFetch(stmt);
if (!SQL_SUCCEEDED(ret))
BOOST_FAIL(GetOdbcErrorMessage(SQL_HANDLE_STMT, stmt));
ret = SQLFetch(stmt);
BOOST_CHECK(ret == SQL_NO_DATA);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<std::string>(const char* request, const std::string& expected)
{
SQLCHAR res[ODBC_BUFFER_SIZE] = { 0 };
SQLLEN resLen = 0;
CheckSingleResult0(request, SQL_C_CHAR, res, ODBC_BUFFER_SIZE, &resLen);
std::string actual;
if (resLen > 0)
actual.assign(reinterpret_cast<char*>(res), static_cast<size_t>(resLen));
BOOST_CHECK_EQUAL(actual, expected);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<int64_t>(const char* request, const int64_t& expected)
{
CheckSingleResultNum0<int64_t>(request, expected, SQL_C_SBIGINT);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<int32_t>(const char* request, const int32_t& expected)
{
CheckSingleResultNum0<int32_t>(request, expected, SQL_C_SLONG);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<int16_t>(const char* request, const int16_t& expected)
{
CheckSingleResultNum0<int16_t>(request, expected, SQL_C_SSHORT);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<int8_t>(const char* request, const int8_t& expected)
{
CheckSingleResultNum0<int8_t>(request, expected, SQL_C_STINYINT);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<float>(const char* request, const float& expected)
{
SQLREAL res = 0;
CheckSingleResult0(request, SQL_C_FLOAT, &res, 0, 0);
BOOST_CHECK_CLOSE(static_cast<float>(res), expected, 1E-6f);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<double>(const char* request, const double& expected)
{
SQLDOUBLE res = 0;
CheckSingleResult0(request, SQL_C_DOUBLE, &res, 0, 0);
BOOST_CHECK_CLOSE(static_cast<double>(res), expected, 1E-6);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<bool>(const char* request, const bool& expected)
{
SQLCHAR res = 0;
CheckSingleResult0(request, SQL_C_BIT, &res, 0, 0);
BOOST_CHECK_EQUAL((res != 0), expected);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<ignite::Guid>(const char* request, const ignite::Guid& expected)
{
SQLGUID res;
memset(&res, 0, sizeof(res));
CheckSingleResult0(request, SQL_C_GUID, &res, 0, 0);
BOOST_CHECK_EQUAL(res.Data1, expected.GetMostSignificantBits() & 0xFFFFFFFF00000000ULL >> 32);
BOOST_CHECK_EQUAL(res.Data2, expected.GetMostSignificantBits() & 0x00000000FFFF0000ULL >> 16);
BOOST_CHECK_EQUAL(res.Data3, expected.GetMostSignificantBits() & 0x000000000000FFFFULL);
for (int i = 0; i < sizeof(res.Data4); ++i)
BOOST_CHECK_EQUAL(res.Data4[i], (expected.GetLeastSignificantBits() & (0xFFULL << (8 * i))) >> (8 * i));
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<std::string>(const char* request)
{
SQLCHAR res[ODBC_BUFFER_SIZE] = { 0 };
SQLLEN resLen = 0;
CheckSingleResult0(request, SQL_C_CHAR, res, ODBC_BUFFER_SIZE, &resLen);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<int64_t>(const char* request)
{
CheckSingleResultNum0<int64_t>(request, SQL_C_SBIGINT);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<int32_t>(const char* request)
{
CheckSingleResultNum0<int32_t>(request, SQL_C_SLONG);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<int16_t>(const char* request)
{
CheckSingleResultNum0<int16_t>(request, SQL_C_SSHORT);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<int8_t>(const char* request)
{
CheckSingleResultNum0<int8_t>(request, SQL_C_STINYINT);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<float>(const char* request)
{
SQLREAL res = 0;
CheckSingleResult0(request, SQL_C_FLOAT, &res, 0, 0);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<double>(const char* request)
{
SQLDOUBLE res = 0;
CheckSingleResult0(request, SQL_C_DOUBLE, &res, 0, 0);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<Date>(const char* request)
{
SQL_DATE_STRUCT res;
CheckSingleResult0(request, SQL_C_DATE, &res, 0, 0);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<Timestamp>(const char* request)
{
SQL_TIMESTAMP_STRUCT res;
CheckSingleResult0(request, SQL_C_TIMESTAMP, &res, 0, 0);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<Time>(const char* request)
{
SQL_TIME_STRUCT res;
CheckSingleResult0(request, SQL_C_TIME, &res, 0, 0);
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<std::vector<int8_t> >(const char* request, const std::vector<int8_t>& expected)
{
SQLCHAR res[ODBC_BUFFER_SIZE] = { 0 };
SQLLEN resLen = 0;
CheckSingleResult0(request, SQL_C_BINARY, res, ODBC_BUFFER_SIZE, &resLen);
BOOST_REQUIRE_EQUAL(resLen, expected.size());
if (resLen > 0)
{
std::vector<int8_t> actual(res, res + resLen);
BOOST_REQUIRE_EQUAL_COLLECTIONS(expected.begin(), expected.end(), actual.begin(), actual.end());
}
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<ignite::common::Decimal>(const char* request, const ignite::common::Decimal& expected)
{
SQLCHAR res[ODBC_BUFFER_SIZE] = { 0 };
SQLLEN resLen = 0;
CheckSingleResult0(request, SQL_C_CHAR, res, ODBC_BUFFER_SIZE, &resLen);
ignite::common::Decimal actual(std::string(res, res + resLen));
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<Date>(const char* request, const Date& expected)
{
SQL_DATE_STRUCT res;
CheckSingleResult0(request, SQL_C_DATE, &res, 0, 0);
using ignite::impl::binary::BinaryUtils;
Date actual = common::MakeDateGmt(res.year, res.month, res.day);
BOOST_REQUIRE_EQUAL(actual.GetSeconds(), expected.GetSeconds());
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<Timestamp>(const char* request, const Timestamp& expected)
{
SQL_TIMESTAMP_STRUCT res;
CheckSingleResult0(request, SQL_C_TIMESTAMP, &res, 0, 0);
using ignite::impl::binary::BinaryUtils;
Timestamp actual = common::MakeTimestampGmt(res.year, res.month, res.day, res.hour, res.minute, res.second, res.fraction);
BOOST_REQUIRE_EQUAL(actual.GetSeconds(), expected.GetSeconds());
BOOST_REQUIRE_EQUAL(actual.GetSecondFraction(), expected.GetSecondFraction());
}
template<>
void SqlTestSuiteFixture::CheckSingleResult<Time>(const char* request, const Time& expected)
{
SQL_TIME_STRUCT res;
CheckSingleResult0(request, SQL_C_TIME, &res, 0, 0);
using ignite::impl::binary::BinaryUtils;
Time actual = common::MakeTimeGmt(res.hour, res.minute, res.second);
BOOST_REQUIRE_EQUAL(actual.GetSeconds(), expected.GetSeconds());
}
}
| nivanov/ignite | modules/platforms/cpp/odbc-test/src/sql_test_suite_fixture.cpp | C++ | apache-2.0 | 10,627 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2021 the original author or authors.
*/
package org.assertj.core.api.date;
import static org.mockito.Mockito.verify;
import java.time.Instant;
import java.util.Date;
import org.assertj.core.api.DateAssert;
/**
* Tests for {@link DateAssert#isBetween(Date, Date)}, {@link DateAssert#isBetween(String, String)} and
* {@link DateAssert#isBetween(Instant, Instant)}.
*
* @author Joel Costigliola
*/
class DateAssert_isBetween_Test extends AbstractDateAssertWithDateArg_Test {
@Override
protected DateAssert assertionInvocationWithDateArg() {
return assertions.isBetween(otherDate, otherDate);
}
@Override
protected DateAssert assertionInvocationWithStringArg(String dateAsString) {
return assertions.isBetween(dateAsString, dateAsString);
}
@Override
protected DateAssert assertionInvocationWithInstantArg() {
return assertions.isBetween(otherDate.toInstant(), otherDate.toInstant());
}
@Override
protected void verifyAssertionInvocation(Date date) {
verify(dates).assertIsBetween(getInfo(assertions), getActual(assertions), date, date, true, false);
}
}
| hazendaz/assertj-core | src/test/java/org/assertj/core/api/date/DateAssert_isBetween_Test.java | Java | apache-2.0 | 1,673 |
package com.android.base.app.dagger;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import javax.inject.Qualifier;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@Qualifier
@Documented
@Retention(RUNTIME)
public @interface ContextType {
String ACTIVITY = "Activity";
String CONTEXT = "Context";
String APPLICATION = "Application";
String value() default APPLICATION;
}
| Ztiany/AndroidBase | lib_base/src/main/java/com/android/base/app/dagger/ContextType.java | Java | apache-2.0 | 438 |
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
import {customElement, observe, property} from '@polymer/decorators';
import {html, PolymerElement} from '@polymer/polymer';
import * as d3Typed from 'd3';
import {DarkModeMixin} from '../../../components/polymer/dark_mode_mixin';
import {LegacyElementMixin} from '../../../components/polymer/legacy_element_mixin';
// Copied from `tf-histogram-dashboard/histogramCore`; TODO(wchargin):
// resolve dependency structure.
export type VzHistogram = {
wall_time: number; // in seconds
step: number;
bins: D3HistogramBin[];
};
export type D3HistogramBin = {
x: number;
dx: number;
y: number;
};
// TypeScript can't deal with d3's style of overloading and
// polymorphism, and constantly fails to select the correct overload.
// This module was converted from working non-TypeScript code, so we
// grandfather it in untyped.
const d3: any = d3Typed;
export interface VzHistogramTimeseries extends HTMLElement {
setSeriesData(series: string, data: VzHistogram[]): void;
redraw(): void;
}
@customElement('vz-histogram-timeseries')
class _VzHistogramTimeseries
extends LegacyElementMixin(DarkModeMixin(PolymerElement))
implements VzHistogramTimeseries
{
static readonly template = html`
<div id="tooltip"><span></span></div>
<svg id="svg">
<g>
<g class="axis x"></g>
<g class="axis y"></g>
<g class="axis y slice"></g>
<g class="stage">
<rect class="background"></rect>
</g>
<g class="x-axis-hover"></g>
<g class="y-axis-hover"></g>
<g class="y-slice-axis-hover"></g>
</g>
</svg>
<style>
:host {
color: #aaa;
display: flex;
flex-direction: column;
flex-grow: 1;
flex-shrink: 1;
position: relative;
--vz-histogram-timeseries-hover-bg-color: #fff;
--vz-histogram-timeseries-outline-color: #fff;
--vz-histogram-timeseries-hover-outline-color: #000;
}
:host(.dark-mode) {
--vz-histogram-timeseries-hover-bg-color: var(
--primary-background-color
);
--vz-histogram-timeseries-outline-color: var(--paper-grey-600);
--vz-histogram-timeseries-hover-outline-color: #fff;
}
svg {
font-family: roboto, sans-serif;
overflow: visible;
display: block;
width: 100%;
flex-grow: 1;
flex-shrink: 1;
}
text {
fill: currentColor;
}
#tooltip {
position: absolute;
display: block;
opacity: 0;
font-weight: bold;
font-size: 11px;
}
.background {
fill-opacity: 0;
fill: red;
}
.histogram {
pointer-events: none;
}
.hover {
font-size: 9px;
dominant-baseline: middle;
opacity: 0;
}
.hover circle {
stroke: white;
stroke-opacity: 0.5;
stroke-width: 1px;
}
.hover text {
fill: black;
opacity: 0;
}
.hover.hover-closest circle {
fill: var(--vz-histogram-timeseries-hover-outline-color) !important;
}
.hover.hover-closest text {
opacity: 1;
}
.baseline {
stroke: black;
stroke-opacity: 0.1;
}
.outline {
fill: none;
stroke: var(--vz-histogram-timeseries-outline-color);
stroke-opacity: 0.5;
}
.outline.outline-hover {
stroke: var(--vz-histogram-timeseries-hover-outline-color) !important;
stroke-opacity: 1;
}
.x-axis-hover,
.y-axis-hover,
.y-slice-axis-hover {
pointer-events: none;
}
.x-axis-hover .label,
.y-axis-hover .label,
.y-slice-axis-hover .label {
opacity: 0;
font-weight: bold;
font-size: 11px;
text-anchor: end;
}
.x-axis-hover text {
text-anchor: middle;
}
.y-axis-hover text,
.y-slice-axis-hover text {
text-anchor: start;
}
.x-axis-hover line,
.y-axis-hover line,
.y-slice-axis-hover line {
stroke: currentColor;
}
.x-axis-hover rect,
.y-axis-hover rect,
.y-slice-axis-hover rect {
fill: var(--vz-histogram-timeseries-hover-bg-color);
}
#tooltip,
.x-axis-hover text,
.y-axis-hover text,
.y-slice-axis-hover text {
color: var(--vz-histogram-timeseries-hover-outline-color);
}
.axis {
font-size: 11px;
}
.axis path.domain {
fill: none;
}
.axis .tick line {
stroke: #ddd;
}
.axis.slice {
opacity: 0;
}
.axis.slice .tick line {
stroke-dasharray: 2;
}
.small .axis text {
display: none;
}
.small .axis .tick:first-of-type text {
display: block;
}
.small .axis .tick:last-of-type text {
display: block;
}
.medium .axis text {
display: none;
}
.medium .axis .tick:nth-child(2n + 1) text {
display: block;
}
.large .axis text {
display: none;
}
.large .axis .tick:nth-child(2n + 1) text {
display: block;
}
</style>
`;
@property({type: String})
mode: string = 'offset';
@property({type: String})
timeProperty: string = 'step';
@property({type: String})
bins: string = 'bins';
@property({type: String})
x: string = 'x';
@property({type: String})
dx: string = 'dx';
@property({type: String})
y: string = 'y';
@property({type: Object})
colorScale = d3.scaleOrdinal(d3.schemeCategory10);
@property({type: Number})
modeTransitionDuration: number = 500;
@property({type: Boolean})
_attached: boolean;
@property({type: String})
_name: string = null;
@property({type: Array})
_data: VzHistogram[] = null;
ready() {
super.ready();
// Polymer's way of scoping styles on nodes that d3 created
this.scopeSubtree(this.$.svg, true);
}
override attached() {
this._attached = true;
}
override detached() {
this._attached = false;
}
setSeriesData(name, data) {
this._name = name;
this._data = data;
this.redraw();
}
@observe('timeProperty', 'colorScale', '_attached')
_redrawOnChange() {
this.redraw();
}
/**
* Redraws the chart. This is only called if the chart is attached to the
* screen and if the chart has data.
*/
redraw() {
this._draw(0);
}
@observe('mode')
_modeRedraw() {
this._draw(this.modeTransitionDuration);
}
_draw(duration) {
if (!this._attached || !this._data) {
return;
}
//
// Data verification
//
if (duration === undefined)
throw new Error('vz-histogram-timeseries _draw needs duration');
if (this._data.length <= 0) throw new Error('Not enough steps in the data');
if (!this._data[0].hasOwnProperty(this.bins))
throw new Error("No bins property of '" + this.bins + "' in data");
if (this._data[0][this.bins].length <= 0)
throw new Error('Must have at least one bin in bins in data');
if (!this._data[0][this.bins][0].hasOwnProperty(this.x))
throw new Error("No x property '" + this.x + "' on bins data");
if (!this._data[0][this.bins][0].hasOwnProperty(this.dx))
throw new Error("No dx property '" + this.dx + "' on bins data");
if (!this._data[0][this.bins][0].hasOwnProperty(this.y))
throw new Error("No y property '" + this.y + "' on bins data");
//
// Initialization
//
var timeProp = this.timeProperty;
var xProp = this.x;
var binsProp = this.bins;
var dxProp = this.dx;
var yProp = this.y;
var data = this._data;
var name = this._name;
var mode = this.mode;
var color = d3.hcl(this.colorScale(name));
var tooltip = d3.select(this.$.tooltip);
var xAccessor = function (d) {
return d[xProp];
};
var yAccessor = function (d) {
return d[yProp];
};
var dxAccessor = function (d) {
return d[dxProp];
};
var xRightAccessor = function (d) {
return d[xProp] + d[dxProp];
};
var timeAccessor = function (d) {
return d[timeProp];
};
if (timeProp === 'relative') {
timeAccessor = function (d) {
return d.wall_time - data[0].wall_time;
};
}
var brect = this.$.svg.getBoundingClientRect();
var outerWidth = brect.width,
outerHeight = brect.height;
var sliceHeight,
margin = {top: 5, right: 60, bottom: 20, left: 24};
if (mode === 'offset') {
sliceHeight = outerHeight / 2.5;
margin.top = sliceHeight + 5;
} else {
sliceHeight = outerHeight - margin.top - margin.bottom;
}
var width = outerWidth - margin.left - margin.right,
height = outerHeight - margin.top - margin.bottom;
var leftMin = d3.min(data, xAccessor),
rightMax = d3.max(data, xRightAccessor);
//
// Text formatters
//
var format = d3.format('.3n');
var yAxisFormat = d3.format('.0f');
if (timeProp === 'wall_time') {
yAxisFormat = d3.timeFormat('%m/%d %X');
} else if (timeProp === 'relative') {
yAxisFormat = function (d: number) {
return d3.format('.1r')(d / 3.6e6) + 'h'; // Convert to hours.
};
}
//
// Calculate the extents
//
var xExtents = data.map(function (d, i) {
return [
d3.min(d[binsProp], xAccessor),
d3.max(d[binsProp], xRightAccessor),
];
});
var yExtents = data.map(function (d) {
return d3.extent(d[binsProp], yAccessor);
});
//
// Scales and axis
//
var outlineCanvasSize = 500;
var extent = d3.extent(data, timeAccessor);
var yScale = (timeProp === 'wall_time' ? d3.scaleTime() : d3.scaleLinear())
.domain(extent)
.range([0, mode === 'offset' ? height : 0]);
var ySliceScale = d3
.scaleLinear()
.domain([
0,
d3.max(data, function (d, i) {
return yExtents[i][1];
}),
])
.range([sliceHeight, 0]);
var yLineScale = d3
.scaleLinear()
.domain(ySliceScale.domain())
.range([outlineCanvasSize, 0]);
var xScale = d3
.scaleLinear()
.domain([
d3.min(data, function (d, i) {
return xExtents[i][0];
}),
d3.max(data, function (d, i) {
return xExtents[i][1];
}),
])
.nice()
.range([0, width]);
var xLineScale = d3
.scaleLinear()
.domain(xScale.domain())
.range([0, outlineCanvasSize]);
const fillColor = d3
.scaleLinear()
.domain(d3.extent(data, timeAccessor))
.range([color.brighter(), color.darker()])
.interpolate(d3.interpolateHcl);
var xAxis = d3.axisBottom(xScale).ticks(Math.max(2, width / 20));
var yAxis = d3
.axisRight(yScale)
.ticks(Math.max(2, height / 15))
.tickFormat(yAxisFormat);
var ySliceAxis = d3
.axisRight(ySliceScale)
.ticks(Math.max(2, height / 15))
.tickSize(width + 5)
.tickFormat(format);
var xBinCentroid = function (d) {
return d[xProp] + d[dxProp] / 2;
};
var linePath = d3
.line()
.x(function (d) {
return xLineScale(xBinCentroid(d));
})
.y(function (d) {
return yLineScale(d[yProp]);
});
var path = function (d) {
// Draw a line from 0 to the first point and from the last point to 0.
return (
'M' +
xLineScale(xBinCentroid(d[0])) +
',' +
yLineScale(0) +
'L' +
linePath(d).slice(1) +
'L' +
xLineScale(xBinCentroid(d[d.length - 1])) +
',' +
yLineScale(0)
);
};
//
// Render
//
var svgNode = this.$.svg;
var svg = d3.select(svgNode);
var svgTransition = svg.transition().duration(duration);
var g = svg
.select('g')
.classed('small', function () {
return width > 0 && width <= 150;
})
.classed('medium', function () {
return width > 150 && width <= 300;
})
.classed('large', function () {
return width > 300;
});
var gTransition = svgTransition
.select('g')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
var bisect = d3.bisector(xRightAccessor).left;
var stage = g
.select('.stage')
.on('mouseover', function () {
hoverUpdate.style('opacity', 1);
xAxisHoverUpdate.style('opacity', 1);
yAxisHoverUpdate.style('opacity', 1);
ySliceAxisHoverUpdate.style('opacity', 1);
tooltip.style('opacity', 1);
})
.on('mouseout', function () {
hoverUpdate.style('opacity', 0);
xAxisHoverUpdate.style('opacity', 0);
yAxisHoverUpdate.style('opacity', 0);
ySliceAxisHoverUpdate.style('opacity', 0);
hoverUpdate.classed('hover-closest', false);
outlineUpdate.classed('outline-hover', false);
tooltip.style('opacity', 0);
})
.on('mousemove', onMouseMove);
var background = stage
.select('.background')
.attr('transform', 'translate(' + -margin.left + ',' + -margin.top + ')')
.attr('width', outerWidth)
.attr('height', outerHeight);
var histogram = stage.selectAll('.histogram').data(data),
histogramExit = histogram.exit().remove(),
histogramEnter = histogram.enter().append('g').attr('class', 'histogram'),
histogramUpdate = histogramEnter.merge(histogram).sort(function (a, b) {
return timeAccessor(a) - timeAccessor(b);
}),
histogramTransition = gTransition
.selectAll('.histogram')
.attr('transform', function (d) {
return (
'translate(0, ' +
(mode === 'offset' ? yScale(timeAccessor(d)) - sliceHeight : 0) +
')'
);
});
var baselineEnter = histogramEnter.append('line').attr('class', 'baseline'),
baselineUpdate = histogramTransition
.select('.baseline')
.style('stroke-opacity', function (d) {
return mode === 'offset' ? 0.1 : 0;
})
.attr('y1', sliceHeight)
.attr('y2', sliceHeight)
.attr('x2', width);
var outlineEnter = histogramEnter.append('path').attr('class', 'outline'),
outlineUpdate = histogramUpdate
.select('.outline')
.attr('vector-effect', 'non-scaling-stroke')
.attr('d', function (d) {
return path(d[binsProp]);
})
.style('stroke-width', 1),
outlineTransition = histogramTransition
.select('.outline')
.attr(
'transform',
'scale(' +
width / outlineCanvasSize +
', ' +
sliceHeight / outlineCanvasSize +
')'
)
.style('stroke', function (d) {
return mode === 'offset' ? '' : fillColor(timeAccessor(d));
})
.style('fill-opacity', function (d) {
return mode === 'offset' ? 1 : 0;
})
.style('fill', function (d) {
return fillColor(timeAccessor(d));
});
var hoverEnter = histogramEnter.append('g').attr('class', 'hover');
var hoverUpdate = histogramUpdate
.select('.hover')
.style('fill', function (d) {
return fillColor(timeAccessor(d));
});
hoverEnter.append('circle').attr('r', 2);
hoverEnter.append('text').style('display', 'none').attr('dx', 4);
var xAxisHover = g.select('.x-axis-hover').selectAll('.label').data(['x']),
xAxisHoverEnter = xAxisHover.enter().append('g').attr('class', 'label'),
xAxisHoverUpdate = xAxisHover.merge(xAxisHoverEnter);
xAxisHoverEnter
.append('rect')
.attr('x', -20)
.attr('y', 6)
.attr('width', 40)
.attr('height', 14);
xAxisHoverEnter
.append('line')
.attr('x1', 0)
.attr('x2', 0)
.attr('y1', 0)
.attr('y2', 6);
xAxisHoverEnter.append('text').attr('dy', 18);
var yAxisHover = g.select('.y-axis-hover').selectAll('.label').data(['y']),
yAxisHoverEnter = yAxisHover.enter().append('g').attr('class', 'label'),
yAxisHoverUpdate = yAxisHover.merge(yAxisHoverEnter);
yAxisHoverEnter
.append('rect')
.attr('x', 8)
.attr('y', -6)
.attr('width', 40)
.attr('height', 14);
yAxisHoverEnter
.append('line')
.attr('x1', 0)
.attr('x2', 6)
.attr('y1', 0)
.attr('y2', 0);
yAxisHoverEnter.append('text').attr('dx', 8).attr('dy', 4);
var ySliceAxisHover = g
.select('.y-slice-axis-hover')
.selectAll('.label')
.data(['y']),
ySliceAxisHoverEnter = ySliceAxisHover
.enter()
.append('g')
.attr('class', 'label'),
ySliceAxisHoverUpdate = ySliceAxisHover.merge(ySliceAxisHoverEnter);
ySliceAxisHoverEnter
.append('rect')
.attr('x', 8)
.attr('y', -6)
.attr('width', 40)
.attr('height', 14);
ySliceAxisHoverEnter
.append('line')
.attr('x1', 0)
.attr('x2', 6)
.attr('y1', 0)
.attr('y2', 0);
ySliceAxisHoverEnter.append('text').attr('dx', 8).attr('dy', 4);
gTransition
.select('.y.axis.slice')
.style('opacity', mode === 'offset' ? 0 : 1)
.attr(
'transform',
'translate(0, ' + (mode === 'offset' ? -sliceHeight : 0) + ')'
)
.call(ySliceAxis);
gTransition
.select('.x.axis')
.attr('transform', 'translate(0, ' + height + ')')
.call(xAxis);
gTransition
.select('.y.axis')
.style('opacity', mode === 'offset' ? 1 : 0)
.attr(
'transform',
'translate(' + width + ', ' + (mode === 'offset' ? 0 : height) + ')'
)
.call(yAxis);
gTransition.selectAll('.tick text').attr('fill', '#aaa');
gTransition.selectAll('.axis path.domain').attr('stroke', 'none');
function onMouseMove() {
var m = d3.mouse(this),
v = xScale.invert(m[0]),
t = yScale.invert(m[1]);
function hoverXIndex(d) {
return Math.min(d[binsProp].length - 1, bisect(d[binsProp], v));
}
var closestSliceData;
var closestSliceDistance = Infinity;
var lastSliceData;
hoverUpdate.attr('transform', function (d, i) {
var index = hoverXIndex(d);
lastSliceData = d;
var x = xScale(
d[binsProp][index][xProp] + d[binsProp][index][dxProp] / 2
);
var y = ySliceScale(d[binsProp][index][yProp]);
var globalY =
mode === 'offset' ? yScale(timeAccessor(d)) - (sliceHeight - y) : y;
var dist = Math.abs(m[1] - globalY);
if (dist < closestSliceDistance) {
closestSliceDistance = dist;
closestSliceData = d;
}
return 'translate(' + x + ',' + y + ')';
});
hoverUpdate.select('text').text(function (d) {
var index = hoverXIndex(d);
return d[binsProp][index][yProp];
});
hoverUpdate.classed('hover-closest', function (d) {
return d === closestSliceData;
});
outlineUpdate.classed('outline-hover', function (d) {
return d === closestSliceData;
});
var index = hoverXIndex(lastSliceData);
xAxisHoverUpdate
.attr('transform', function (d) {
return (
'translate(' +
xScale(
lastSliceData[binsProp][index][xProp] +
lastSliceData[binsProp][index][dxProp] / 2
) +
', ' +
height +
')'
);
})
.select('text')
.text(function (d) {
return format(
lastSliceData[binsProp][index][xProp] +
lastSliceData[binsProp][index][dxProp] / 2
);
});
var fy = yAxis.tickFormat();
yAxisHoverUpdate
.attr('transform', function (d) {
return (
'translate(' +
width +
', ' +
(mode === 'offset' ? yScale(timeAccessor(closestSliceData)) : 0) +
')'
);
})
.style('display', mode === 'offset' ? '' : 'none')
.select('text')
.text(function (d) {
return fy(timeAccessor(closestSliceData));
});
var fsy = ySliceAxis.tickFormat();
ySliceAxisHoverUpdate
.attr('transform', function (d) {
return (
'translate(' +
width +
', ' +
(mode === 'offset'
? 0
: ySliceScale(closestSliceData[binsProp][index][yProp])) +
')'
);
})
.style('display', mode === 'offset' ? 'none' : '')
.select('text')
.text(function (d) {
return fsy(closestSliceData[binsProp][index][yProp]);
});
var svgMouse = d3.mouse(svgNode);
tooltip
.style(
'transform',
'translate(' + (svgMouse[0] + 15) + 'px,' + (svgMouse[1] - 15) + 'px)'
)
.select('span')
.text(
mode === 'offset'
? fsy(closestSliceData[binsProp][index][yProp])
: (timeProp === 'step' ? 'step ' : '') +
fy(timeAccessor(closestSliceData))
);
}
}
}
| tensorflow/tensorboard | tensorboard/plugins/histogram/vz_histogram_timeseries/vz-histogram-timeseries.ts | TypeScript | apache-2.0 | 22,012 |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lite.android.launcher3;
import android.graphics.Rect;
/**
* Allows the implementing {@link View} to not draw underneath system bars.
* e.g., notification bar on top and home key area on the bottom.
*/
public interface Insettable {
void setInsets(Rect insets);
} | bojanvu23/android_packages_apps_Trebuchet_Gradle | Trebuchet/src/main/java/com/lite/android/launcher3/Insettable.java | Java | apache-2.0 | 906 |
package examples.model;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.TableGenerator;
@Entity
public class Address {
@TableGenerator(name="Address_Gen",
table="ID_GEN",
pkColumnName="GEN_NAME",
valueColumnName="GEN_VAL",
pkColumnValue="Addr_Gen",
initialValue=10000,
allocationSize=100)
@Id @GeneratedValue(strategy=GenerationType.TABLE,
generator="Address_Gen")
private int id;
private String street;
private String city;
private String state;
private String zip;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getStreet() {
return street;
}
public void setStreet(String address) {
this.street = address;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getZip() {
return zip;
}
public void setZip(String zip) {
this.zip = zip;
}
public String toString() {
return "Address id: " + getId() +
", street: " + getStreet() +
", city: " + getCity() +
", state: " + getState() +
", zip: " + getZip();
}
}
| velmuruganvelayutham/jpa | examples/Chapter4/11-tableIdGeneration/src/model/examples/model/Address.java | Java | apache-2.0 | 1,622 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator.aggregation;
import com.facebook.presto.byteCode.DynamicClassLoader;
import com.facebook.presto.metadata.FunctionInfo;
import com.facebook.presto.metadata.FunctionRegistry;
import com.facebook.presto.metadata.ParametricAggregation;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.operator.aggregation.state.MinMaxByNState;
import com.facebook.presto.operator.aggregation.state.MinMaxByNStateFactory;
import com.facebook.presto.operator.aggregation.state.MinMaxByNStateSerializer;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.block.BlockBuilderStatus;
import com.facebook.presto.spi.type.StandardTypes;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.facebook.presto.type.ArrayType;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Ints;
import java.lang.invoke.MethodHandle;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import static com.facebook.presto.metadata.Signature.orderableTypeParameter;
import static com.facebook.presto.metadata.Signature.typeParameter;
import static com.facebook.presto.operator.aggregation.AggregationMetadata.ParameterMetadata.ParameterType.BLOCK_INDEX;
import static com.facebook.presto.operator.aggregation.AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL;
import static com.facebook.presto.operator.aggregation.AggregationMetadata.ParameterMetadata.ParameterType.NULLABLE_BLOCK_INPUT_CHANNEL;
import static com.facebook.presto.operator.aggregation.AggregationMetadata.ParameterMetadata.ParameterType.BLOCK_INPUT_CHANNEL;
import static com.facebook.presto.operator.aggregation.AggregationMetadata.ParameterMetadata.ParameterType.STATE;
import static com.facebook.presto.operator.aggregation.AggregationUtils.generateAggregationName;
import static com.facebook.presto.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.util.Reflection.methodHandle;
import static java.util.Objects.requireNonNull;
public abstract class AbstractMinMaxByNAggregation
extends ParametricAggregation
{
private static final MethodHandle INPUT_FUNCTION = methodHandle(AbstractMinMaxByNAggregation.class, "input", BlockComparator.class, Type.class, Type.class, MinMaxByNState.class, Block.class, Block.class, int.class, long.class);
private static final MethodHandle COMBINE_FUNCTION = methodHandle(AbstractMinMaxByNAggregation.class, "combine", MinMaxByNState.class, MinMaxByNState.class);
private static final MethodHandle OUTPUT_FUNCTION = methodHandle(AbstractMinMaxByNAggregation.class, "output", ArrayType.class, MinMaxByNState.class, BlockBuilder.class);
private final String name;
private final Function<Type, BlockComparator> typeToComparator;
private final Signature signature;
protected AbstractMinMaxByNAggregation(String name, Function<Type, BlockComparator> typeToComparator)
{
this.name = requireNonNull(name, "name is null");
this.typeToComparator = requireNonNull(typeToComparator, "typeToComparator is null");
this.signature = new Signature(name, ImmutableList.of(typeParameter("V"), orderableTypeParameter("K")), "array<V>", ImmutableList.of("V", "K", StandardTypes.BIGINT), false, false);
}
@Override
public Signature getSignature()
{
return signature;
}
@Override
public FunctionInfo specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry)
{
Type keyType = types.get("K");
Type valueType = types.get("V");
Signature signature = new Signature(name, new ArrayType(valueType).getTypeSignature(), valueType.getTypeSignature(), keyType.getTypeSignature(), BIGINT.getTypeSignature());
InternalAggregationFunction aggregation = generateAggregation(valueType, keyType);
return new FunctionInfo(signature, getDescription(), aggregation);
}
public static void input(BlockComparator comparator, Type valueType, Type keyType, MinMaxByNState state, Block value, Block key, int blockIndex, long n)
{
TypedKeyValueHeap heap = state.getTypedKeyValueHeap();
if (heap == null) {
if (n <= 0) {
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "third argument of max_by/min_by must be a positive integer");
}
heap = new TypedKeyValueHeap(comparator, keyType, valueType, Ints.checkedCast(n));
state.setTypedKeyValueHeap(heap);
}
long startSize = heap.getEstimatedSize();
if (!key.isNull(blockIndex)) {
heap.add(key, value, blockIndex);
}
state.addMemoryUsage(heap.getEstimatedSize() - startSize);
}
public static void combine(MinMaxByNState state, MinMaxByNState otherState)
{
TypedKeyValueHeap otherHeap = otherState.getTypedKeyValueHeap();
if (otherHeap == null) {
return;
}
TypedKeyValueHeap heap = state.getTypedKeyValueHeap();
if (heap == null) {
state.setTypedKeyValueHeap(otherHeap);
return;
}
long startSize = heap.getEstimatedSize();
heap.addAll(otherHeap);
state.addMemoryUsage(heap.getEstimatedSize() - startSize);
}
public static void output(ArrayType outputType, MinMaxByNState state, BlockBuilder out)
{
TypedKeyValueHeap heap = state.getTypedKeyValueHeap();
if (heap == null || heap.isEmpty()) {
out.appendNull();
return;
}
Type elementType = outputType.getElementType();
BlockBuilder arrayBlockBuilder = out.beginBlockEntry();
BlockBuilder reversedBlockBuilder = elementType.createBlockBuilder(new BlockBuilderStatus(), heap.getCapacity());
long startSize = heap.getEstimatedSize();
heap.popAll(reversedBlockBuilder);
state.addMemoryUsage(heap.getEstimatedSize() - startSize);
for (int i = reversedBlockBuilder.getPositionCount() - 1; i >= 0; i--) {
elementType.appendTo(reversedBlockBuilder, i, arrayBlockBuilder);
}
out.closeEntry();
}
protected InternalAggregationFunction generateAggregation(Type valueType, Type keyType)
{
DynamicClassLoader classLoader = new DynamicClassLoader(AbstractMinMaxNAggregation.class.getClassLoader());
BlockComparator comparator = typeToComparator.apply(keyType);
List<Type> inputTypes = ImmutableList.of(valueType, keyType, BIGINT);
MinMaxByNStateSerializer stateSerializer = new MinMaxByNStateSerializer(comparator, keyType, valueType);
Type intermediateType = stateSerializer.getSerializedType();
ArrayType outputType = new ArrayType(valueType);
List<AggregationMetadata.ParameterMetadata> inputParameterMetadata = ImmutableList.of(
new AggregationMetadata.ParameterMetadata(STATE),
new AggregationMetadata.ParameterMetadata(NULLABLE_BLOCK_INPUT_CHANNEL, valueType),
new AggregationMetadata.ParameterMetadata(BLOCK_INPUT_CHANNEL, keyType),
new AggregationMetadata.ParameterMetadata(BLOCK_INDEX),
new AggregationMetadata.ParameterMetadata(INPUT_CHANNEL, BIGINT));
AggregationMetadata metadata = new AggregationMetadata(
generateAggregationName(name, valueType, inputTypes),
inputParameterMetadata,
INPUT_FUNCTION.bindTo(comparator).bindTo(valueType).bindTo(keyType),
null,
null,
COMBINE_FUNCTION,
OUTPUT_FUNCTION.bindTo(outputType),
MinMaxByNState.class,
stateSerializer,
new MinMaxByNStateFactory(),
outputType,
false);
GenericAccumulatorFactoryBinder factory = new AccumulatorCompiler().generateAccumulatorFactoryBinder(metadata, classLoader);
return new InternalAggregationFunction(name, inputTypes, intermediateType, outputType, true, false, factory);
}
}
| zjshen/presto | presto-main/src/main/java/com/facebook/presto/operator/aggregation/AbstractMinMaxByNAggregation.java | Java | apache-2.0 | 8,905 |
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_health_checking/grpc_version.py.template`!!!
VERSION = '1.33.0.dev0'
| donnadionne/grpc | src/python/grpcio_health_checking/grpc_version.py | Python | apache-2.0 | 710 |
package de.mukis.docmatcher.csv;
import java.util.Arrays;
import java.util.Objects;
import de.mukis.docmatcher.csv.matcher.CsvMatcher;
public class Columns {
private final int[] columns;
private Columns(int[] columns) {
this.columns = columns;
}
public static Columns columns(int first, int second, int... columns) {
int[] cols = new int[2 + columns.length];
cols[0] = first;
cols[1] = second;
System.arraycopy(columns, 0, cols, 2, columns.length);
return new Columns(cols);
}
public static Columns column(int col) {
return new Columns(new int[] { col });
}
public CsvMatcher are(CsvMatcher matcher) {
matcher.setColumns(this);
return matcher;
}
public CsvMatcher is(CsvMatcher matcher) {
return are(matcher);
}
public int[] get() {
return columns;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(columns);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Columns other = (Columns) obj;
return Objects.deepEquals(columns, other.columns);
}
}
| muuki88/docmatcher | src/main/java/de/mukis/docmatcher/csv/Columns.java | Java | apache-2.0 | 1,454 |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.kafka.impl;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.kafka.api.PartitionStrategy;
import com.streamsets.pipeline.lib.kafka.KafkaConstants;
import com.streamsets.pipeline.lib.kafka.KafkaErrors;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.Properties;
public class KafkaProducer09 extends BaseKafkaProducer09 {
private static final Logger LOG = LoggerFactory.getLogger(KafkaProducer09.class);
public static final String ACKS_DEFAULT = "1";
public static final String RANDOM_PARTITIONER_CLASS = "com.streamsets.pipeline.kafka.impl.RandomPartitioner";
public static final String ROUND_ROBIN_PARTITIONER_CLASS = "com.streamsets.pipeline.kafka.impl.RoundRobinPartitioner";
public static final String EXPRESSION_PARTITIONER_CLASS = "com.streamsets.pipeline.kafka.impl.ExpressionPartitioner";
private final String metadataBrokerList;
private final Map<String, Object> kafkaProducerConfigs;
private final PartitionStrategy partitionStrategy;
public KafkaProducer09(
String metadataBrokerList,
Map<String, Object> kafkaProducerConfigs,
PartitionStrategy partitionStrategy,
boolean sendWriteResponse
) {
super(sendWriteResponse);
this.metadataBrokerList = metadataBrokerList;
this.kafkaProducerConfigs = kafkaProducerConfigs;
this.partitionStrategy = partitionStrategy;
}
@Override
protected Producer<Object, byte[]> createKafkaProducer() {
Properties props = new Properties();
// bootstrap servers
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, metadataBrokerList);
// request.required.acks
props.put(ProducerConfig.ACKS_CONFIG, ACKS_DEFAULT);
// partitioner.class
props.put(
KafkaConstants.KEY_SERIALIZER_CLASS_CONFIG,
kafkaProducerConfigs.get(KafkaConstants.KEY_SERIALIZER_CLASS_CONFIG));
props.put(KafkaConstants.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
configurePartitionStrategy(props, partitionStrategy);
addUserConfiguredProperties(kafkaProducerConfigs, props);
return new KafkaProducer<>(props);
}
@Override
protected StageException createWriteException(Exception e) {
// error writing this record to kafka broker.
LOG.error(KafkaErrors.KAFKA_50.getMessage(), e.toString(), e);
// throwing of this exception results in stopped pipeline as it is not handled by KafkaTarget
// Retry feature at the pipeline level will re attempt
return new StageException(KafkaErrors.KAFKA_50, e.toString(), e);
}
private void configurePartitionStrategy(Properties props, PartitionStrategy partitionStrategy) {
if (partitionStrategy == PartitionStrategy.RANDOM) {
props.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, RANDOM_PARTITIONER_CLASS);
} else if (partitionStrategy == PartitionStrategy.ROUND_ROBIN) {
props.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, ROUND_ROBIN_PARTITIONER_CLASS);
} else if (partitionStrategy == PartitionStrategy.EXPRESSION) {
props.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, EXPRESSION_PARTITIONER_CLASS);
} else if (partitionStrategy == PartitionStrategy.DEFAULT) {
// org.apache.kafka.clients.producer.internals.DefaultPartitioner
}
}
private void addUserConfiguredProperties(Map<String, Object> kafkaClientConfigs, Properties props) {
//The following options, if specified, are ignored : "bootstrap.servers"
if (kafkaClientConfigs != null && !kafkaClientConfigs.isEmpty()) {
kafkaClientConfigs.remove(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
for (Map.Entry<String, Object> producerConfig : kafkaClientConfigs.entrySet()) {
props.put(producerConfig.getKey(), producerConfig.getValue());
}
}
}
}
| kunickiaj/datacollector | sdc-kafka_0_9/src/main/java/com/streamsets/pipeline/kafka/impl/KafkaProducer09.java | Java | apache-2.0 | 4,703 |
// DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
//
// Copyright 2016-2018 Pascal ECHEMANN.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import {SessionManager} from "./SessionManager";
import {GlobalGuidGenerator} from "jec-commons";
import * as crypto from "crypto";
import {SecurityManager} from "../../../core/SecurityManager";
import {Session, SessionError, SessionId} from "jec-exchange";
import {SessionStorage} from "../connectors/SessionStorage";
import {SessionIdUtil} from "../utils/SessionIdUtil";
import {SessionIdBuilder} from "../utils/SessionIdBuilder";
/**
* The default <code>SessionManager</code> implementation to be used by GlassCat
* servers.
*/
export class EjpSessionManager implements SessionManager {
//////////////////////////////////////////////////////////////////////////////
// Constructor function
//////////////////////////////////////////////////////////////////////////////
/**
* Creates a new <code>EjpSessionManager</code> instance.
*/
constructor() {
this.init();
}
//////////////////////////////////////////////////////////////////////////////
// Private properties
//////////////////////////////////////////////////////////////////////////////
/**
* The reference to the GUID for this manager.
*/
private _guid:string = null;
/**
* The type of algorithm used by the associated<code>UserHashModule</code>
* instance for cookies encryption.
*/
private readonly HASH_ALGORITHM:string = "sha256";
/**
* The type of output encoding used by the associated
* <code>UserHashModule</code> instance for cookies encryption.
*/
private readonly OUTPUT_ENCODING:any = "hex";
/**
* The reference to the <code>SessionStorage</code> instance for this manager.
*/
private _connector:SessionStorage = null;
/**
* The reference to the <code>SessionIdBuilder</code> instance for this
* manager.
*/
private _sessionIdBuilder:SessionIdBuilder = null;
//////////////////////////////////////////////////////////////////////////////
// Private methods
//////////////////////////////////////////////////////////////////////////////
/**
* Initializes this session manager.
*/
private init():void {
this._guid = GlobalGuidGenerator.getInstance().generate();
this._sessionIdBuilder = new SessionIdBuilder();
}
//////////////////////////////////////////////////////////////////////////////
// Public methods
//////////////////////////////////////////////////////////////////////////////
/**
* @inheritDoc
*/
public getSessionStorage():SessionStorage {
return this._connector;
}
/**
* @inheritDoc
*/
public setSessionStorage(sessionStorage:SessionStorage):void {
//TODO : log this action
this._connector = sessionStorage;
}
/**
* @inheritDoc
*/
public initSessionId():SessionId {
const sha:crypto.Hash = crypto.createHash(this.HASH_ALGORITHM)
.update(Date.now() + this._guid);
const sessionId:SessionId = this._sessionIdBuilder.buildSessionId(
sha.digest(this.OUTPUT_ENCODING)
);
return sessionId;
}
/**
* @inheritDoc
*/
public addSession(session:Session, result:(error?:SessionError)=>any):void {
this._connector.add(session, result);
}
/**
* @inheritDoc
*/
public getSession(sessionId:SessionId, success:(session:Session)=>any,
error:(error:SessionError)=>any):void {
this._connector.get(
sessionId,
success,
error
);
}
/**
* @inheritDoc
*/
public removeSession(sessionId:SessionId,
result:(error?:SessionError)=>any):void {
this._connector.remove(sessionId, result);
}
} | pechemann/jec-glasscat-core | src/com/onsoft/glasscat/security/session/managers/EjpSessionManager.ts | TypeScript | apache-2.0 | 4,312 |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/autoscaling/model/DeleteAutoScalingGroupRequest.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/memory/stl/AWSStringStream.h>
using namespace Aws::AutoScaling::Model;
using namespace Aws::Utils;
DeleteAutoScalingGroupRequest::DeleteAutoScalingGroupRequest() :
m_autoScalingGroupNameHasBeenSet(false),
m_forceDelete(false),
m_forceDeleteHasBeenSet(false)
{
}
Aws::String DeleteAutoScalingGroupRequest::SerializePayload() const
{
Aws::StringStream ss;
ss << "Action=DeleteAutoScalingGroup&";
if(m_autoScalingGroupNameHasBeenSet)
{
ss << "AutoScalingGroupName=" << StringUtils::URLEncode(m_autoScalingGroupName.c_str()) << "&";
}
if(m_forceDeleteHasBeenSet)
{
ss << "ForceDelete=" << m_forceDelete << "&";
}
ss << "Version=2011-01-01";
return ss.str();
}
| ambasta/aws-sdk-cpp | aws-cpp-sdk-autoscaling/source/model/DeleteAutoScalingGroupRequest.cpp | C++ | apache-2.0 | 1,408 |
/**
* Copyright (c) 2010 Abbcc Corp.
* No 225,Wen Yi RD, Hang Zhou, Zhe Jiang, China.
* All rights reserved.
*
* "AdminService.java is the copyrighted,
* proprietary property of Abbcc Company and its
* subsidiaries and affiliates which retain all right, title and interest
* therein."
*
* Revision History
*
* Date Programmer Notes
* --------- --------------------- --------------------------------------------
* 2009-12-9 wangjin initial
**/
package com.abbcc.service;
import java.util.List;
import org.hibernate.criterion.DetachedCriteria;
import com.abbcc.common.PaginationSupport;
import com.abbcc.models.AbcCellbind;
/**
* *AdminService.java
*/
public interface CellbindService extends BaseService{
public void save(AbcCellbind transientInstance);
public void delete(AbcCellbind persistentInstance);
public AbcCellbind findById(String id);
public List<AbcCellbind> findByExample(AbcCellbind instance);
public List<AbcCellbind> findAll();
public void saveOrUpdate(AbcCellbind instance);
public PaginationSupport findPageByCriteria(
DetachedCriteria detachedCriteria);
public PaginationSupport findPageByCriteria(
DetachedCriteria detachedCriteria, int startIndex);
public PaginationSupport findPageByCriteria(
DetachedCriteria detachedCriteria, int pageSize, int startIndex);
public List findAllByCriteria(DetachedCriteria detachedCriteria);
public int getCountByCriteria(DetachedCriteria detachedCriteria);
public void callProcedure(String procString, List<Object> params)
throws Exception;
public List getCallProcedureResult(String procString, List<Object> params)
throws Exception;
}
| baowp/platform | biz/src/main/java/com/abbcc/service/CellbindService.java | Java | apache-2.0 | 1,720 |
/*
* This file is generated by jOOQ.
*/
package io.cattle.platform.core.model.tables;
import io.cattle.platform.core.model.CattleTable;
import io.cattle.platform.core.model.Keys;
import io.cattle.platform.core.model.tables.records.ConfigItemStatusRecord;
import io.cattle.platform.db.jooq.converter.DateConverter;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.ForeignKey;
import org.jooq.Identity;
import org.jooq.Schema;
import org.jooq.Table;
import org.jooq.TableField;
import org.jooq.UniqueKey;
import org.jooq.impl.TableImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.9.3"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class ConfigItemStatusTable extends TableImpl<ConfigItemStatusRecord> {
private static final long serialVersionUID = -1272483914;
/**
* The reference instance of <code>cattle.config_item_status</code>
*/
public static final ConfigItemStatusTable CONFIG_ITEM_STATUS = new ConfigItemStatusTable();
/**
* The class holding records for this type
*/
@Override
public Class<ConfigItemStatusRecord> getRecordType() {
return ConfigItemStatusRecord.class;
}
/**
* The column <code>cattle.config_item_status.id</code>.
*/
public final TableField<ConfigItemStatusRecord, Long> ID = createField("id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, "");
/**
* The column <code>cattle.config_item_status.name</code>.
*/
public final TableField<ConfigItemStatusRecord, String> NAME = createField("name", org.jooq.impl.SQLDataType.VARCHAR.length(255).nullable(false), this, "");
/**
* The column <code>cattle.config_item_status.requested_version</code>.
*/
public final TableField<ConfigItemStatusRecord, Long> REQUESTED_VERSION = createField("requested_version", org.jooq.impl.SQLDataType.BIGINT.nullable(false).defaultValue(org.jooq.impl.DSL.inline("0", org.jooq.impl.SQLDataType.BIGINT)), this, "");
/**
* The column <code>cattle.config_item_status.applied_version</code>.
*/
public final TableField<ConfigItemStatusRecord, Long> APPLIED_VERSION = createField("applied_version", org.jooq.impl.SQLDataType.BIGINT.nullable(false).defaultValue(org.jooq.impl.DSL.inline("-1", org.jooq.impl.SQLDataType.BIGINT)), this, "");
/**
* The column <code>cattle.config_item_status.source_version</code>.
*/
public final TableField<ConfigItemStatusRecord, String> SOURCE_VERSION = createField("source_version", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>cattle.config_item_status.requested_updated</code>.
*/
public final TableField<ConfigItemStatusRecord, Date> REQUESTED_UPDATED = createField("requested_updated", org.jooq.impl.SQLDataType.TIMESTAMP.nullable(false), this, "", new DateConverter());
/**
* The column <code>cattle.config_item_status.applied_updated</code>.
*/
public final TableField<ConfigItemStatusRecord, Date> APPLIED_UPDATED = createField("applied_updated", org.jooq.impl.SQLDataType.TIMESTAMP, this, "", new DateConverter());
/**
* The column <code>cattle.config_item_status.agent_id</code>.
*/
public final TableField<ConfigItemStatusRecord, Long> AGENT_ID = createField("agent_id", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* The column <code>cattle.config_item_status.account_id</code>.
*/
public final TableField<ConfigItemStatusRecord, Long> ACCOUNT_ID = createField("account_id", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* The column <code>cattle.config_item_status.service_id</code>.
*/
public final TableField<ConfigItemStatusRecord, Long> SERVICE_ID = createField("service_id", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* The column <code>cattle.config_item_status.resource_id</code>.
*/
public final TableField<ConfigItemStatusRecord, Long> RESOURCE_ID = createField("resource_id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, "");
/**
* The column <code>cattle.config_item_status.resource_type</code>.
*/
public final TableField<ConfigItemStatusRecord, String> RESOURCE_TYPE = createField("resource_type", org.jooq.impl.SQLDataType.VARCHAR.length(128).nullable(false), this, "");
/**
* The column <code>cattle.config_item_status.environment_id</code>.
*/
public final TableField<ConfigItemStatusRecord, Long> STACK_ID = createField("environment_id", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* The column <code>cattle.config_item_status.host_id</code>.
*/
public final TableField<ConfigItemStatusRecord, Long> HOST_ID = createField("host_id", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* The column <code>cattle.config_item_status.deployment_unit_id</code>.
*/
public final TableField<ConfigItemStatusRecord, Long> DEPLOYMENT_UNIT_ID = createField("deployment_unit_id", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* Create a <code>cattle.config_item_status</code> table reference
*/
public ConfigItemStatusTable() {
this("config_item_status", null);
}
/**
* Create an aliased <code>cattle.config_item_status</code> table reference
*/
public ConfigItemStatusTable(String alias) {
this(alias, CONFIG_ITEM_STATUS);
}
private ConfigItemStatusTable(String alias, Table<ConfigItemStatusRecord> aliased) {
this(alias, aliased, null);
}
private ConfigItemStatusTable(String alias, Table<ConfigItemStatusRecord> aliased, Field<?>[] parameters) {
super(alias, null, aliased, parameters, "");
}
/**
* {@inheritDoc}
*/
@Override
public Schema getSchema() {
return CattleTable.CATTLE;
}
/**
* {@inheritDoc}
*/
@Override
public Identity<ConfigItemStatusRecord, Long> getIdentity() {
return Keys.IDENTITY_CONFIG_ITEM_STATUS;
}
/**
* {@inheritDoc}
*/
@Override
public UniqueKey<ConfigItemStatusRecord> getPrimaryKey() {
return Keys.KEY_CONFIG_ITEM_STATUS_PRIMARY;
}
/**
* {@inheritDoc}
*/
@Override
public List<UniqueKey<ConfigItemStatusRecord>> getKeys() {
return Arrays.<UniqueKey<ConfigItemStatusRecord>>asList(Keys.KEY_CONFIG_ITEM_STATUS_PRIMARY, Keys.KEY_CONFIG_ITEM_STATUS_IDX_CONFIG_ITEM_STATUS_RESOURCE);
}
/**
* {@inheritDoc}
*/
@Override
public List<ForeignKey<ConfigItemStatusRecord, ?>> getReferences() {
return Arrays.<ForeignKey<ConfigItemStatusRecord, ?>>asList(Keys.FK_CONFIG_ITEM__NAME, Keys.FK_CONFIG_ITEM__AGENT_ID, Keys.FK_CONFIG_ITEM__ACCOUNT_ID, Keys.FK_CONFIG_ITEM__SERVICE_ID, Keys.FK_CONFIG_ITEM__ENVIRONMENT_ID, Keys.FK_CONFIG_ITEM__HOST_ID, Keys.FK_CONFIG_ITEM__DEPLOYMENT_UNIT_ID);
}
/**
* {@inheritDoc}
*/
@Override
public ConfigItemStatusTable as(String alias) {
return new ConfigItemStatusTable(alias, this);
}
/**
* Rename this table
*/
@Override
public ConfigItemStatusTable rename(String name) {
return new ConfigItemStatusTable(name, null);
}
}
| vincent99/cattle | code/iaas/model/src/main/java/io/cattle/platform/core/model/tables/ConfigItemStatusTable.java | Java | apache-2.0 | 7,442 |
package com.example.persiandatepicker;
import java.util.Date;
import android.content.Context;
import android.content.res.TypedArray;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.NumberPicker;
import android.widget.NumberPicker.OnValueChangeListener;
import android.widget.Toast;
public class PersianDatePicker extends LinearLayout {
private NumberPicker yearNumberPicker, monthNumberPicker, dayNumberPicker;
public PersianDatePicker(Context context, AttributeSet attrs) {
this(context, attrs, -1);
}
public PersianDatePicker(Context context) {
this(context, null, -1);
}
public PersianDatePicker(final Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
LayoutInflater inflater = (LayoutInflater) context
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View root = inflater.inflate(R.layout.pdp, this);
yearNumberPicker = (NumberPicker) root
.findViewById(R.id.yearNumberPicker);
monthNumberPicker = (NumberPicker) root
.findViewById(R.id.monthNumberPicker);
dayNumberPicker = (NumberPicker) root
.findViewById(R.id.dayNumberPicker);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.PersianDatePicker, 0, 0);
int year = 1393,month=6,day=19;
year = a.getInteger(R.styleable.PersianDatePicker_year, 1393);
month = a.getInteger(R.styleable.PersianDatePicker_month, 6);
day = a.getInteger(R.styleable.PersianDatePicker_day, 19);
a.recycle();
yearNumberPicker.setMinValue(1380);
yearNumberPicker.setMaxValue(1400);
yearNumberPicker.setValue(year);
monthNumberPicker.setMinValue(1);
monthNumberPicker.setMaxValue(12);
monthNumberPicker.setValue(month);
dayNumberPicker.setMaxValue(31);
dayNumberPicker.setMinValue(1);
dayNumberPicker.setValue(day);
yearNumberPicker.setOnValueChangedListener(new OnValueChangeListener() {
@Override
public void onValueChange(NumberPicker np, int oldValue, int newValue) {
Toast.makeText(context, "Year changed:" + oldValue + " -> " + newValue, Toast.LENGTH_LONG).show();
}
});
}
public Date getSelectedDate() {
return null;
}
public void setSelectedDate(Date date) {
}
}
| alibehzadian/Smartlab | Custom Views/PersianDatePicker-Example/src/com/example/persiandatepicker/PersianDatePicker.java | Java | apache-2.0 | 2,282 |
phoxy.internal =
{
ChangeURL : function (url)
{
url = phoxy.ConstructURL(url);
phoxy.Log(4, "History push", url);
if (url[0] !== '/')
url = '/' + url;
history.pushState({}, document.title, url);
return false;
}
,
Reset : function (url)
{
if ((url || true) === true)
return location.reload();
location = url;
}
,
Config : function()
{
return phoxy._.config;
}
,
Log : function(level)
{
if (phoxy.state.verbose < level)
return;
var error_names = phoxy._.internal.error_names;
var errorname = error_names[level < error_names.length ? level : error_names.length - 1];
var args = [];
for (var v in arguments)
args.push(arguments[v]);
args[0] = errorname;
var error_methods = phoxy._.internal.error_methods;
var method = error_methods[level < error_methods.length ? level : error_methods.length - 1];
console[method].apply(console, args);
if (level === 0)
throw "Break execution on fatal log";
}
,
Override: function(method_name, new_method)
{
return phoxy._.internal.Override(phoxy, method_name, new_method);
}
};
phoxy._.internal =
{
Load : function( )
{
phoxy.state.loaded = true;
phoxy._.click.InitClickHook();
}
,
GenerateUniqueID : function()
{
var ret = "";
var dictonary = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for (var i = 0; i < 10; i++)
ret += dictonary.charAt(Math.floor(Math.random() * dictonary.length));
return "phoxy_" + ret;
}
,
DispatchEvent : function(dom_element_id, event_name)
{
var that = phoxy._.render.Div(dom_element_id);
if (document.createEvent)
{
var event = document.createEvent("HTMLEvents");
event.initEvent(event_name, true, true);
event.eventName = event_name;
that.dispatchEvent(event);
}
else
{
var event = document.createEventObject();
event.eventType = event_name;
event.eventName = event_name;
that.fireEvent("on" + event.eventType, event);
}
}
,
HookEvent : function(dom_element_id, event_name, callback)
{
var that = phoxy._.render.Div(dom_element_id);
that.addEventListener(event_name, callback);
}
,
Override : function(object, method_name, new_method)
{
var origin = object[method_name];
object[method_name] = new_method;
object[method_name].origin = origin;
}
,
error_names :
[
"FATAL",
"ERROR",
"WARNING",
"INFO",
"DEBUG"
],
error_methods :
[
"error",
"error",
"warn",
"info",
"log",
"debug"
]
};
| phoxy/phoxy | subsystem/internal.js | JavaScript | apache-2.0 | 2,785 |
package com.ckt.io.wifidirect.fragment;
import android.app.ListFragment;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.wifi.WifiManager;
import android.net.wifi.p2p.WifiP2pConfig;
import android.net.wifi.p2p.WifiP2pDevice;
import android.net.wifi.p2p.WifiP2pDeviceList;
import android.net.wifi.p2p.WifiP2pManager;
import android.os.Bundle;
import android.support.v4.widget.SwipeRefreshLayout;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
import com.ckt.io.wifidirect.activity.WiFiDirectActivity;
import com.ckt.io.wifidirect.R;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Lu on 2016/5/22.
*/
public class DeviceListFragment extends ListFragment implements WifiP2pManager.PeerListListener,SwipeRefreshLayout.OnRefreshListener {
private List<WifiP2pDevice> peers = new ArrayList<WifiP2pDevice>();
ProgressDialog progressDialog = null;
View mContentView = null;
private WifiP2pDevice device;
private SwipeRefreshLayout swipeRefreshLayout;
private WifiManager wifiManager = null;
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
this.setListAdapter(new WiFiPeerListAdapter(getActivity(), R.layout.row_devices, peers));
wifiManager = (WifiManager) getActivity().getSystemService(Context.WIFI_SERVICE);
swipeRefreshLayout = (SwipeRefreshLayout)getActivity().findViewById(R.id.id_swip_ly);
swipeRefreshLayout.setOnRefreshListener(this);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
mContentView = inflater.inflate(R.layout.device_list, null);
return mContentView;
}
public void onRefresh(){
if (!wifiManager.isWifiEnabled()){
wifiManager.setWifiEnabled(true);
}
Intent intent = new Intent("UPDATE_PEERS");
getActivity().sendBroadcast(intent);
swipeRefreshLayout.setRefreshing(false);
}
/**
* @return this device
*/
public WifiP2pDevice getDevice() {
return device;
}
private static String getDeviceStatus(int deviceStatus) {
Log.d(WiFiDirectActivity.TAG, "Peer status :" + deviceStatus);
switch (deviceStatus) {
case WifiP2pDevice.AVAILABLE:
return "Available";
case WifiP2pDevice.INVITED:
return "Invited";
case WifiP2pDevice.CONNECTED:
return "Connected";
case WifiP2pDevice.FAILED:
return "Failed";
case WifiP2pDevice.UNAVAILABLE:
return "Unavailable";
default:
return "Unknown";
}
}
/**
* Initiate a connection with the peer.
*/
@Override
public void onListItemClick(ListView l, View v, int position, long id) {
WifiP2pDevice device = (WifiP2pDevice) getListAdapter().getItem(position);
((DeviceActionListener) getActivity()).showDetails(device);
}
/**
* Array adapter for ListFragment that maintains WifiP2pDevice list.
*/
private class WiFiPeerListAdapter extends ArrayAdapter<WifiP2pDevice> {
private List<WifiP2pDevice> items;
/**
* @param context
* @param textViewResourceId
* @param objects
*/
public WiFiPeerListAdapter(Context context, int textViewResourceId,
List<WifiP2pDevice> objects) {
super(context, textViewResourceId, objects);
items = objects;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
if (v == null) {
LayoutInflater vi = (LayoutInflater) getActivity().getSystemService(
Context.LAYOUT_INFLATER_SERVICE);
v = vi.inflate(R.layout.row_devices, null);
}
WifiP2pDevice device = items.get(position);
if (device != null) {
TextView top = (TextView) v.findViewById(R.id.device_name);
TextView bottom = (TextView) v.findViewById(R.id.device_details);
if (top != null) {
top.setText(device.deviceName);
}
if (bottom != null) {
bottom.setText(getDeviceStatus(device.status));
}
}
return v;
}
}
/**
* Update UI for this device.
*
* @param device WifiP2pDevice object
*/
public void updateThisDevice(WifiP2pDevice device) {
this.device = device;
TextView view = (TextView) mContentView.findViewById(R.id.my_name);
view.setText(device.deviceName);
view = (TextView) mContentView.findViewById(R.id.my_status);
view.setText(getDeviceStatus(device.status));
}
@Override
public void onPeersAvailable(WifiP2pDeviceList peerList) {
if (progressDialog != null && progressDialog.isShowing()) {
progressDialog.dismiss();
}
peers.clear();
peers.addAll(peerList.getDeviceList());
((WiFiPeerListAdapter) getListAdapter()).notifyDataSetChanged();
if (peers.size() == 0) {
Log.d(WiFiDirectActivity.TAG, "No devices found");
return;
}
}
public void clearPeers() {
peers.clear();
((WiFiPeerListAdapter) getListAdapter()).notifyDataSetChanged();
}
/**
*
*/
public void onInitiateDiscovery() {
if (progressDialog != null && progressDialog.isShowing()) {
progressDialog.dismiss();
}
progressDialog = ProgressDialog.show(getActivity(), "Press back to cancel", "finding peers", true,
true, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
}
});
}
/**
* An interface-callback for the activity to listen to fragment interaction
* events.
*/
public interface DeviceActionListener {
void showDetails(WifiP2pDevice device);
void cancelDisconnect();
void connect(WifiP2pConfig config);
void disconnect();
}
}
| lucky-code/Practice | kuaichuan2.0/app/src/main/java/com/ckt/io/wifidirect/fragment/DeviceListFragment.java | Java | apache-2.0 | 6,668 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.analysis.meter.function.avg;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.analysis.manual.instance.InstanceTraffic;
import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
import org.apache.skywalking.oap.server.core.analysis.meter.function.AcceptableValue;
import org.apache.skywalking.oap.server.core.analysis.meter.function.MeterFunction;
import org.apache.skywalking.oap.server.core.analysis.metrics.LongValueHolder;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.metrics.annotation.ConstOne;
import org.apache.skywalking.oap.server.core.analysis.metrics.annotation.Entrance;
import org.apache.skywalking.oap.server.core.analysis.metrics.annotation.SourceFrom;
import org.apache.skywalking.oap.server.core.query.sql.Function;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@MeterFunction(functionName = "avg")
@ToString
public abstract class AvgFunction extends Metrics implements AcceptableValue<Long>, LongValueHolder {
protected static final String SUMMATION = "summation";
protected static final String COUNT = "count";
protected static final String VALUE = "value";
@Setter
@Getter
@Column(columnName = ENTITY_ID, length = 512)
private String entityId;
/**
* Service ID is required for sort query.
*/
@Setter
@Getter
@Column(columnName = InstanceTraffic.SERVICE_ID)
private String serviceId;
@Getter
@Setter
@Column(columnName = SUMMATION, storageOnly = true)
protected long summation;
@Getter
@Setter
@Column(columnName = COUNT, storageOnly = true)
protected long count;
@Getter
@Setter
@Column(columnName = VALUE, dataType = Column.ValueDataType.COMMON_VALUE, function = Function.Avg)
private long value;
@Entrance
public final void combine(@SourceFrom long summation, @ConstOne long count) {
this.summation += summation;
this.count += count;
}
@Override
public final boolean combine(Metrics metrics) {
AvgFunction longAvgMetrics = (AvgFunction) metrics;
combine(longAvgMetrics.summation, longAvgMetrics.count);
return true;
}
@Override
public final void calculate() {
long result = this.summation / this.count;
// The minimum of avg result is 1, that means once there's some data in a duration user can get "1" instead of
// "0".
if (result == 0 && this.summation > 0) {
result = 1;
}
this.value = result;
}
@Override
public Metrics toHour() {
AvgFunction metrics = (AvgFunction) createNew();
metrics.setEntityId(getEntityId());
metrics.setTimeBucket(toTimeBucketInHour());
metrics.setServiceId(getServiceId());
metrics.setSummation(getSummation());
metrics.setCount(getCount());
return metrics;
}
@Override
public Metrics toDay() {
AvgFunction metrics = (AvgFunction) createNew();
metrics.setEntityId(getEntityId());
metrics.setTimeBucket(toTimeBucketInDay());
metrics.setServiceId(getServiceId());
metrics.setSummation(getSummation());
metrics.setCount(getCount());
return metrics;
}
@Override
public int remoteHashCode() {
return entityId.hashCode();
}
@Override
public void deserialize(final RemoteData remoteData) {
this.count = remoteData.getDataLongs(0);
this.summation = remoteData.getDataLongs(1);
setTimeBucket(remoteData.getDataLongs(2));
this.entityId = remoteData.getDataStrings(0);
this.serviceId = remoteData.getDataStrings(1);
}
@Override
public RemoteData.Builder serialize() {
RemoteData.Builder remoteBuilder = RemoteData.newBuilder();
remoteBuilder.addDataLongs(count);
remoteBuilder.addDataLongs(summation);
remoteBuilder.addDataLongs(getTimeBucket());
remoteBuilder.addDataStrings(entityId);
remoteBuilder.addDataStrings(serviceId);
return remoteBuilder;
}
@Override
protected String id0() {
return getTimeBucket() + Const.ID_CONNECTOR + entityId;
}
@Override
public void accept(final MeterEntity entity, final Long value) {
this.entityId = entity.id();
this.serviceId = entity.serviceId();
this.summation += value;
this.count += 1;
}
@Override
public Class<? extends StorageHashMapBuilder> builder() {
return AvgStorageBuilder.class;
}
public static class AvgStorageBuilder implements StorageHashMapBuilder<AvgFunction> {
@Override
public AvgFunction storage2Entity(final Map<String, Object> dbMap) {
AvgFunction metrics = new AvgFunction() {
@Override
public AcceptableValue<Long> createNew() {
throw new UnexpectedException("createNew should not be called");
}
};
metrics.setSummation(((Number) dbMap.get(SUMMATION)).longValue());
metrics.setValue(((Number) dbMap.get(VALUE)).longValue());
metrics.setCount(((Number) dbMap.get(COUNT)).longValue());
metrics.setTimeBucket(((Number) dbMap.get(TIME_BUCKET)).longValue());
metrics.setServiceId((String) dbMap.get(InstanceTraffic.SERVICE_ID));
metrics.setEntityId((String) dbMap.get(ENTITY_ID));
return metrics;
}
@Override
public Map<String, Object> entity2Storage(final AvgFunction storageData) {
Map<String, Object> map = new HashMap<>();
map.put(SUMMATION, storageData.getSummation());
map.put(VALUE, storageData.getValue());
map.put(COUNT, storageData.getCount());
map.put(TIME_BUCKET, storageData.getTimeBucket());
map.put(InstanceTraffic.SERVICE_ID, storageData.getServiceId());
map.put(ENTITY_ID, storageData.getEntityId());
return map;
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof AvgFunction)) {
return false;
}
AvgFunction function = (AvgFunction) o;
return Objects.equals(entityId, function.entityId) &&
getTimeBucket() == function.getTimeBucket();
}
@Override
public int hashCode() {
return Objects.hash(entityId, getTimeBucket());
}
}
| ascrutae/sky-walking | oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/avg/AvgFunction.java | Java | apache-2.0 | 7,837 |
import time
import django
from django import forms
try:
from django.forms.utils import ErrorDict
except ImportError:
from django.forms.util import ErrorDict
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.utils.crypto import salted_hmac, constant_time_compare
from django.utils.encoding import force_text
from django.utils.text import get_text_list
from django.utils import timezone
from django.utils.translation import ungettext, ugettext, ugettext_lazy as _
from comments.models import Comment, ThreadedComment
COMMENT_MAX_LENGTH = getattr(settings, 'COMMENT_MAX_LENGTH', 3000)
DEFAULT_COMMENTS_TIMEOUT = getattr(settings, 'COMMENTS_TIMEOUT', (2 * 60 * 60)) # 2h
class CommentSecurityForm(forms.Form):
"""
Handles the security aspects (anti-spoofing) for comment forms.
"""
content_type = forms.CharField(widget=forms.HiddenInput)
object_pk = forms.CharField(widget=forms.HiddenInput)
timestamp = forms.IntegerField(widget=forms.HiddenInput)
security_hash = forms.CharField(min_length=40, max_length=40, widget=forms.HiddenInput)
def __init__(self, target_object, data=None, initial=None, **kwargs):
self.target_object = target_object
if initial is None:
initial = {}
initial.update(self.generate_security_data())
super(CommentSecurityForm, self).__init__(data=data, initial=initial, **kwargs)
def security_errors(self):
"""Return just those errors associated with security"""
errors = ErrorDict()
for f in ["honeypot", "timestamp", "security_hash"]:
if f in self.errors:
errors[f] = self.errors[f]
return errors
def clean_security_hash(self):
"""Check the security hash."""
security_hash_dict = {
'content_type': self.data.get("content_type", ""),
'object_pk': self.data.get("object_pk", ""),
'timestamp': self.data.get("timestamp", ""),
}
expected_hash = self.generate_security_hash(**security_hash_dict)
actual_hash = self.cleaned_data["security_hash"]
if not constant_time_compare(expected_hash, actual_hash):
raise forms.ValidationError("Security hash check failed.")
return actual_hash
def clean_timestamp(self):
"""Make sure the timestamp isn't too far (default is > 2 hours) in the past."""
ts = self.cleaned_data["timestamp"]
if time.time() - ts > DEFAULT_COMMENTS_TIMEOUT:
raise forms.ValidationError("Timestamp check failed")
return ts
def generate_security_data(self):
"""Generate a dict of security data for "initial" data."""
timestamp = int(time.time())
security_dict = {
'content_type': str(self.target_object._meta),
'object_pk': str(self.target_object._get_pk_val()),
'timestamp': str(timestamp),
'security_hash': self.initial_security_hash(timestamp),
}
return security_dict
def initial_security_hash(self, timestamp):
"""
Generate the initial security hash from self.content_object
and a (unix) timestamp.
"""
initial_security_dict = {
'content_type': str(self.target_object._meta),
'object_pk': str(self.target_object._get_pk_val()),
'timestamp': str(timestamp),
}
return self.generate_security_hash(**initial_security_dict)
def generate_security_hash(self, content_type, object_pk, timestamp):
"""
Generate a HMAC security hash from the provided info.
"""
info = (content_type, object_pk, timestamp)
key_salt = "django.contrib.forms.CommentSecurityForm"
value = "-".join(info)
return salted_hmac(key_salt, value).hexdigest()
class CommentDetailsForm(CommentSecurityForm):
"""
Handles the specific details of the comment (name, comment, etc.).
"""
name = forms.CharField(label=_("Name"), max_length=50, widget=forms.HiddenInput)
email = forms.EmailField(label=_("Email address"), widget=forms.HiddenInput)
url = forms.URLField(label=_("URL"), required=False, widget=forms.HiddenInput)
comment = forms.CharField(label=_('Comment'), widget=forms.Textarea,
max_length=COMMENT_MAX_LENGTH)
def get_comment_object(self):
"""
Return a new (unsaved) comment object based on the information in this
form. Assumes that the form is already validated and will throw a
ValueError if not.
Does not set any of the fields that would come from a Request object
(i.e. ``user`` or ``ip_address``).
"""
if not self.is_valid():
raise ValueError("get_comment_object may only be called on valid forms")
CommentModel = self.get_comment_model()
new = CommentModel(**self.get_comment_create_data())
new = self.check_for_duplicate_comment(new)
return new
def get_comment_model(self):
"""
Get the comment model to create with this form. Subclasses in custom
comment apps should override this, get_comment_create_data, and perhaps
check_for_duplicate_comment to provide custom comment models.
"""
return Comment
def get_comment_create_data(self):
"""
Returns the dict of data to be used to create a comment. Subclasses in
custom comment apps that override get_comment_model can override this
method to add extra fields onto a custom comment model.
"""
return dict(
content_type=ContentType.objects.get_for_model(self.target_object),
object_pk=force_text(self.target_object._get_pk_val()),
user_name=self.cleaned_data["name"],
user_email=self.cleaned_data["email"],
user_url=self.cleaned_data["url"],
comment=self.cleaned_data["comment"],
submit_date=timezone.now(),
site_id=settings.SITE_ID,
is_public=True,
is_removed=False,
)
def check_for_duplicate_comment(self, new):
"""
Check that a submitted comment isn't a duplicate. This might be caused
by someone posting a comment twice. If it is a dup, silently return the *previous* comment.
"""
possible_duplicates = self.get_comment_model()._default_manager.using(
self.target_object._state.db
).filter(
content_type=new.content_type,
object_pk=new.object_pk,
user_name=new.user_name,
user_email=new.user_email,
user_url=new.user_url,
)
for old in possible_duplicates:
if old.submit_date.date() == new.submit_date.date() and old.comment == new.comment:
return old
return new
def clean_comment(self):
"""
If COMMENTS_ALLOW_PROFANITIES is False, check that the comment doesn't
contain anything in PROFANITIES_LIST.
"""
comment = self.cleaned_data["comment"]
if (not getattr(settings, 'COMMENTS_ALLOW_PROFANITIES', False) and
getattr(settings, 'PROFANITIES_LIST', False)):
bad_words = [w for w in settings.PROFANITIES_LIST if w in comment.lower()]
if bad_words:
raise forms.ValidationError(ungettext(
"Watch your mouth! The word %s is not allowed here.",
"Watch your mouth! The words %s are not allowed here.",
len(bad_words)) % get_text_list(
['"%s%s%s"' % (i[0], '-' * (len(i) - 2), i[-1])
for i in bad_words], ugettext('and')))
return comment
class CommentForm(CommentDetailsForm):
honeypot = forms.CharField(required=False,
label=_('If you enter anything in this field '
'your comment will be treated as spam'))
def clean_honeypot(self):
"""Check that nothing's been entered into the honeypot."""
value = self.cleaned_data["honeypot"]
if value:
raise forms.ValidationError(self.fields["honeypot"].label)
return value
class ThreadedCommentForm(CommentForm):
title = forms.CharField(label=_('Title'), required=False, max_length=getattr(settings, 'COMMENTS_TITLE_MAX_LENGTH', 255), widget=forms.HiddenInput)
parent = forms.IntegerField(required=False, widget=forms.HiddenInput)
def __init__(self, target_object, parent=None, data=None, initial=None):
if django.VERSION >= (1,7):
# Using collections.OrderedDict from Python 2.7+
# This class does not have an insert method, have to replace it.
from collections import OrderedDict
keys = list(self.base_fields.keys())
keys.remove('title')
keys.insert(keys.index('comment'), 'title')
self.base_fields = OrderedDict((k, self.base_fields[k]) for k in keys)
else:
self.base_fields.insert(
self.base_fields.keyOrder.index('comment'), 'title',
self.base_fields.pop('title')
)
self.parent = parent
if initial is None:
initial = {}
initial.update({'parent': self.parent})
super(ThreadedCommentForm, self).__init__(target_object, data=data, initial=initial)
def get_comment_model(self):
return ThreadedComment
def get_comment_create_data(self):
d = super(ThreadedCommentForm, self).get_comment_create_data()
d['parent_id'] = self.cleaned_data['parent']
d['title'] = self.cleaned_data['title']
return d
| sheshkovsky/jaryan | comments/forms.py | Python | apache-2.0 | 9,802 |
######################################################################
# tc_gid.rb
#
# Test case for the Process.gid and Process.gid= module methods.
#
# NOTE: The Process.gid= tests are only run if the test is run as the
# root user.
######################################################################
require 'test/unit'
require 'test/helper'
class TC_Process_Gid_ModuleMethod < Test::Unit::TestCase
include Test::Helper
unless JRUBY || WINDOWS
def setup
@nobody_gid = Etc.getgrnam('nobody').gid
@login_gid = Etc.getpwnam(Etc.getlogin).gid
end
end
def test_gid_basic
assert_respond_to(Process, :gid)
assert_respond_to(Process, :gid=)
end
unless WINDOWS
def test_gid
if ROOT
assert_equal(0, Process.gid)
else
# assert_equal(@login_gid, Process.gid)
end
end
if ROOT
def test_gid=
assert_nothing_raised{ Process.gid = @nobody_gid }
assert_equal(@nobody_gid, Process.gid)
assert_nothing_raised{ Process.gid = @login_gid }
assert_equal(@login_gid, Process.gid)
end
end
def test_gid_expected_errors
assert_raises(ArgumentError){ Process.gid(0) }
assert_raises(TypeError){ Process.gid = "bogus" }
end
def teardown
@nobody_gid = nil
@login_gid = nil
end
end
end
| google-code/android-scripting | jruby/src/test/externals/ruby_test/test/core/Process/class/tc_gid.rb | Ruby | apache-2.0 | 1,434 |
package C00_data;
/**
* Created by Administrator on 2016/8/30.
* 进制问题
*
* 二进制显示的是补码,计算机接收的直接量也是补码
*/
public class Test_code {
public static void main(String[] args) {
/*
二进制显示的是补码,计算机接收的直接量也是补码
*/
byte a=-5;
System.out.println("a:"+a);
System.out.println("a的二进制:"+Integer.toBinaryString(a));
byte b=(byte)0xff;
System.out.println("b:"+b);
System.out.println("b的二进制:"+Integer.toBinaryString(b));
byte c=-1;
System.out.println("c:"+c);
System.out.println("c的二进制:"+Integer.toBinaryString(c));
//二进制转换为10进制,字符串第一位可以为-号
//这种方式字符串中为原码
String cc="-111";
int d = Integer.parseInt(cc, 2); // 2进制
System.out.println("d:"+d);
/**
byte short通过算数运算和逻辑运算结果都会是int
*/
byte b1=1;
short s1=2;
// short ss=b1+1;//编译出错
// short ss=s1+1;//编译出错
// short ss=b1+s1;//编译出错
int i=b1+s1;
/**
* 移位运算
*/
byte b2=3;
b2<<=1;
System.out.println("b2<<=1-->"+b2);
/**
*127+1=-128
*/
byte b3=127;
b3++;
System.out.println("127+1="+b3);
}
}
| wapalxj/Java | javaworkplace/SXT/src/C00_data/Test_code.java | Java | apache-2.0 | 1,493 |
/**
* Main framework
*/
var http = require('http');
var fs = require('fs');
var url = require('url');
var querystring = require('querystring');
var mime = require('mime');
var engineMod = require('./engine.js')
var WebApp = function(path,config){
console.log('Starting PanzerParty at '+path);
var basepath = path;
function checkConfig(config){
if (typeof(config.port)==='undefined')
config.port = 8090;
if (typeof(config.path)==='undefined')
config.path = {};
if (typeof(config.path.statics==='undefined'))
config.path.statics = '/static';
if (typeof(config.path.modules==='undefined'))
config.path.modules = '/modules';
if (typeof(config.path.controllers)==='undefined')
config.path.controllers = '/controllers';
if (typeof(config.path.views)==='undefined')
config.path.views='/views';
if (typeof(config.path.layouts)==='undefined')
config.path.layuots='/layouts';
if (typeof(config.path.l10n)==='undefined')
config.path.l10n='/l10n';
if (typeof(config.controllers)==='undefined')
config.controllers = [];
}
config.root = basepath;
checkConfig(config);
var engine = new engineMod.Engine(config);
this.start = function(){
engine.start();
};
};
exports.WebApp = WebApp; | theoddbeard/PanzerParty | core/app.js | JavaScript | apache-2.0 | 1,246 |
using Arinna.Northwind.ProductService.Business.Contract;
using Arinna.Northwind.ProductService.Data.Repository.Interface;
using System;
using System.Collections.Generic;
using System.Text;
namespace Arinna.Northwind.ProductService.Business
{
public class CategoryManager: ICategoryManager
{
private readonly ICategoryRepository categoryRepository;
public CategoryManager(ICategoryRepository categoryRepository)
{
this.categoryRepository = categoryRepository;
}
}
}
| ZGRTech/Arinna | nortwind/Arinna.Northwind.ProductService.Business/CategoryManager.cs | C# | apache-2.0 | 529 |
import Router from 'viewModels/router'
import read from './read'
const router = new Router;
router.get('token/read', '/', read);
export default router
| lian-yue/lianyue-server | app/viewModels/token/routes.js | JavaScript | apache-2.0 | 155 |
/*
* Copyright 2014 Texas A&M Engineering Experiment Station
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.tamu.tcat.crypto.bouncycastle.internal;
import java.security.Provider;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
public class Activator implements BundleActivator {
private static Activator activator;
private static BundleContext bundleContext;
/**
* Instantiate and use this {@link Provider} instance instead of adding it to the
* JVM via {@link java.security.Security#addProvider(Provider)}.
* <br/>There are problems in "redeployable"
* environments (such as OSGI and Tomcat) with using JVM-global singletons. Namely,
* the provider could be added once if whatever added it was "undeployed", the provider
* becomes invalid and throws exceptions when trying to access it.
* <br/>
* To avoid these issues, the provider is constructed explicitly where needed and
* given as an argument to cypher API explicitly rather than having the security
* framework look up a provider by identifier or choose a default.
*/
private final Provider bouncyCastleProvider = new BouncyCastleProvider();
public static Activator getDefault() {
return activator;
}
public static BundleContext getContext() {
return bundleContext;
}
@Override
public void start(BundleContext bundleContext) throws Exception {
Activator.bundleContext = bundleContext;
activator = this;
}
@Override
public void stop(BundleContext bundleContext) throws Exception {
Activator.bundleContext = null;
activator = null;
}
public Provider getBouncyCastleProvider()
{
return bouncyCastleProvider;
}
}
| tcat-tamu/crypto | bundles/edu.tamu.tcat.crypto.bouncycastle/src/edu/tamu/tcat/crypto/bouncycastle/internal/Activator.java | Java | apache-2.0 | 2,300 |
// Copyright 2014-2015 Boundary, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.boundary.sdk.event.esper;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
public class NewEmployeeEvent implements Serializable {
/**
*
*/
private static final long serialVersionUID = -8757562061344149582L;
Employee employee;
NewEmployeeEvent(Employee employee) {
this.employee = employee;
}
public NewEmployeeEvent(String firstName,
Map<String,Address> addresses) {
for (Entry<String, Address> entry : addresses.entrySet() ) {
addresses.put(entry.getKey(), entry.getValue());
}
}
public String getName() {
return this.employee.getName();
}
public Address getAddress(String type) {
return employee.getAddresses().get(type);
}
public Employee getSubordinate(int index) {
return employee.getSubordinate(index);
}
public Employee[] getAllSubordinates() {
Employee[] subordinates = new Employee[1];
return employee.getAllSubordinates();
}
public Iterable<EducationHistory> getEducation() {
return employee.getEducation();
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("NewEmployeeEvent [employee=");
builder.append(employee);
builder.append("]");
return builder.toString();
}
}
| boundary/boundary-event-sdk | src/test/java/com/boundary/sdk/event/esper/NewEmployeeEvent.java | Java | apache-2.0 | 1,889 |
#region Apache License Version 2.0
/*----------------------------------------------------------------
Copyright 2017 Yang Chen (cy2000@gmail.com)
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied. See the License for the specific language governing permissions
and limitations under the License.
Detail: https://github.com/etechi/ServiceFramework/blob/master/license.md
----------------------------------------------------------------*/
#endregion Apache License Version 2.0
using System.Threading.Tasks;
using SF.Entities;
namespace SF.Auth.Permissions
{
public class RoleQueryArgument : ObjectQueryArgument<ObjectKey<string>>
{
}
public interface IRoleManager<TRoleInternal,TQueryArgument>
: IEntityManager<ObjectKey<string>, TRoleInternal>,
IEntitySource<ObjectKey<string>, TRoleInternal, TQueryArgument>
where TRoleInternal: Models.RoleInternal
where TQueryArgument: RoleQueryArgument
{
}
public interface IRoleManager : IRoleManager<Models.RoleInternal, RoleQueryArgument>
{ }
}
| etechi/ServiceFramework | Projects/Server/Common/SF.Common.Abstractions/Auth/Permissions/IRoleManager.cs | C# | apache-2.0 | 1,380 |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.web.form;
import org.kuali.rice.krad.maintenance.MaintenanceDocument;
import org.kuali.rice.krad.uif.UifConstants.ViewType;
/**
* Form class for <code>MaintenanceView</code> screens
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public class MaintenanceForm extends DocumentFormBase {
private static final long serialVersionUID = -5805825500852498048L;
protected String dataObjectClassName;
protected String maintenanceAction;
public MaintenanceForm() {
super();
setViewTypeName(ViewType.MAINTENANCE);
}
@Override
public MaintenanceDocument getDocument() {
return (MaintenanceDocument) super.getDocument();
}
// This is to provide a setter with matching type to
// public MaintenanceDocument getDocument() so that no
// issues occur with spring 3.1-M2 bean wrappers
public void setDocument(MaintenanceDocument document) {
super.setDocument(document);
}
public String getDataObjectClassName() {
return this.dataObjectClassName;
}
public void setDataObjectClassName(String dataObjectClassName) {
this.dataObjectClassName = dataObjectClassName;
}
public String getMaintenanceAction() {
return this.maintenanceAction;
}
public void setMaintenanceAction(String maintenanceAction) {
this.maintenanceAction = maintenanceAction;
}
}
| ua-eas/ua-rice-2.1.9 | krad/krad-web-framework/src/main/java/org/kuali/rice/krad/web/form/MaintenanceForm.java | Java | apache-2.0 | 2,006 |
#include <cstdlib>
#include <cstring>
#include <unistd.h>
#include <utility>
#include "gtest/gtest.h"
#include "persistent_map.h"
using ::std::make_pair;
const char *testfile = "test.map";
class MapTest : public ::testing::Test {
protected:
PersistentMap<int, int> pm;
virtual void SetUp() {
}
virtual void TearDown() {
}
};
TEST_F(MapTest, SimpleInsertFindErase) {
pm.open(testfile);
auto insert = pm.insert(make_pair(1, 2));
EXPECT_TRUE(insert.second);
EXPECT_EQ(1, insert.first->first);
EXPECT_EQ(2, insert.first->second);
EXPECT_EQ(1, pm.size());
auto find = pm.find(1);
EXPECT_EQ(1, find->first);
EXPECT_EQ(2, find->second);
auto erase = pm.erase(find);
EXPECT_EQ(pm.end(), erase);
EXPECT_EQ(0, pm.size());
pm.clear();
EXPECT_EQ(0, pm.size());
pm.close();
}
TEST_F(MapTest, ReopenInsertFindErase) {
pm.open(testfile);
auto insert = pm.insert(make_pair(1, 2));
EXPECT_TRUE(insert.second);
EXPECT_EQ(1, insert.first->first);
EXPECT_EQ(2, insert.first->second);
EXPECT_EQ(1, pm.size());
pm.close();
pm.open(testfile);
EXPECT_EQ(1, pm.size());
auto find = pm.find(1);
EXPECT_EQ(1, find->first);
EXPECT_EQ(2, find->second);
auto erase = pm.erase(find);
EXPECT_EQ(pm.end(), erase);
EXPECT_EQ(0, pm.size());
pm.clear();
EXPECT_EQ(0, pm.size());
pm.close();
}
TEST_F(MapTest, SimpleArrayAccess) {
pm.open(testfile);
pm[1] = 2;
EXPECT_EQ(2, pm[1]);
pm[1] = 3;
EXPECT_EQ(3, pm[1]);
pm[7] = 9;
EXPECT_EQ(9, pm[7]);
EXPECT_EQ(2, pm.size());
pm.clear();
EXPECT_EQ(0, pm.size());
pm.close();
}
TEST_F(MapTest, ReopenArrayAccess) {
pm.open(testfile);
pm[1] = 2;
EXPECT_EQ(2, pm[1]);
pm[1] = 3;
EXPECT_EQ(3, pm[1]);
pm[7] = 9;
EXPECT_EQ(9, pm[7]);
EXPECT_EQ(2, pm.size());
pm.close();
pm.open(testfile);
EXPECT_EQ(2, pm.size());
EXPECT_EQ(3, pm[1]);
EXPECT_EQ(9, pm[7]);
EXPECT_EQ(2, pm.size());
pm.clear();
EXPECT_EQ(0, pm.size());
pm.close();
}
| hallielaine/DecaFS | test/persistent_stl_test/persistent_map_unittest.cc | C++ | apache-2.0 | 2,002 |
using System;
using System.Collections;
using System.Collections.Generic;
using WizardsChess.Movement;
// Board arranged in A-H, 1-8. where A-H is replaced by 9-16
namespace WizardsChess.Chess.Pieces {
public abstract class ChessPiece{
public ChessPiece(ChessTeam team){
Team = team;
HasMoved = false;
CanJump = false;
}
/*
public ChessPiece(ChessPiece piece)
{
Type = piece.Type;
Team = piece.Team;
HasMoved = piece.HasMoved;
CanJump = piece.CanJump;
}
*/
public abstract ChessPiece DeepCopy();
public PieceType Type { get; protected set; }
public ChessTeam Team { get; }
public bool HasMoved { get; set; }
public bool CanJump { get; protected set; }
public virtual string ToShortString()
{
string piece = Type.ToString().Substring(0, 1);
if (Team == ChessTeam.White)
{
return piece + "w";
}
else if (Team == ChessTeam.Black)
{
return piece + "b";
}
else
{
return piece;
}
}
public override string ToString()
{
return Team.ToString() + "-" + Type.ToString();
}
public abstract IReadOnlyList<Vector2D> GetAllowedMotionVectors();
public abstract IReadOnlyList<Vector2D> GetAttackMotionVectors();
}
} | MorganR/wizards-chess | WizardsChess/WizardsChessUtils/Chess/Pieces/ChessPiece.cs | C# | apache-2.0 | 1,218 |
import 'style!../sass/spinner.scss';
const backdrop = `
<div class="js-blocking" id="lightbox-blocking">
<span class="lightbox-spinner"></span>
</div>
`;
const prevControl = `
<div class="lightbox-extra control prev js-control js-prev">
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" x="0" y="0" width="60" height="60" viewBox="0 0 60 60" xml:space="preserve">
<circle class="lightbox-icon-bg" cx="30" cy="30" r="30"/>
<path class="lightbox-icon-arrow" d="M36.8,36.4L30.3,30l6.5-6.4l-3.5-3.4l-10,9.8l10,9.8L36.8,36.4z"/>
</svg>
</div>
`;
const nextControl = `
<div class="lightbox-extra control next js-control js-next">
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" x="0" y="0" width="60" height="60" viewBox="0 0 60 60" xml:space="preserve">
<circle class="lightbox-icon-bg" cx="30" cy="30" r="30"/>
<path class="lightbox-icon-arrow" d="M24.2,23.5l6.6,6.5l-6.6,6.5l3.6,3.5L37.8,30l-10.1-9.9L24.2,23.5z"/>
</svg>
</div>
`;
const closeControl = `
<div class="lightbox-extra control close js-control js-close">
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" x="0" y="0" viewBox="0 0 100 100" preserveAspectRatio="xMidYMid meet">
<circle class="lightbox-icon-bg" cx="50" cy="50" r="47.5"/>
<polygon class="lightbox-icon-close" points="64.5,39.8 60.2,35.5 50,45.7 39.8,35.5 35.5,39.8 45.7,50 35.5,60.2 39.8,64.5 50,54.3 60.2,64.5 64.5,60.2 54.3,50"/>
</svg>
</div>
`;
export const lightbox = `
<div class="js-lightbox-wrap offscreen" id="lightbox-wrap">
${backdrop}
<div class="js-lightbox-inner-wrap" id="lightbox-inner-wrap">
<div class="js-img-wrap" id="lightbox-img-wrap">
${prevControl}
${nextControl}
${closeControl}
<div class="lightbox-contents js-contents"></div>
</div>
</div>
</div>
`;
| nemtsov/lightbox | src/templates.js | JavaScript | apache-2.0 | 1,861 |
package com.olmatix.model;
/**
* Created by Rahman on 12/28/2016.
*/
public class SpinnerObject {
private String databaseId;
private String databaseValue;
public SpinnerObject() {
this.databaseId = databaseId;
this.databaseValue = databaseValue;
}
public String getDatabaseId() {
return databaseId;
}
public void setDatabaseId(String databaseId) {
this.databaseId = databaseId;
}
public String getDatabaseValue() {
return databaseValue;
}
public void setDatabaseValue(String databaseValue) {
this.databaseValue = databaseValue;
}
}
| lesjaw/Olmatix | olmatix/src/main/java/com/olmatix/model/SpinnerObject.java | Java | apache-2.0 | 641 |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticloadbalancing.model;
import com.amazonaws.AmazonServiceException;
/**
* <p>
* The specified load balancer attribute does not exist.
* </p>
*/
public class LoadBalancerAttributeNotFoundException extends AmazonServiceException {
private static final long serialVersionUID = 1L;
/**
* Constructs a new LoadBalancerAttributeNotFoundException with the specified error
* message.
*
* @param message Describes the error encountered.
*/
public LoadBalancerAttributeNotFoundException(String message) {
super(message);
}
}
| trasa/aws-sdk-java | aws-java-sdk-elasticloadbalancing/src/main/java/com/amazonaws/services/elasticloadbalancing/model/LoadBalancerAttributeNotFoundException.java | Java | apache-2.0 | 1,217 |
package citrixadc
import (
"github.com/citrix/adc-nitro-go/resource/config/transform"
"github.com/citrix/adc-nitro-go/service"
"github.com/hashicorp/terraform/helper/schema"
"fmt"
"log"
)
func resourceCitrixAdcTransformaction() *schema.Resource {
return &schema.Resource{
SchemaVersion: 1,
Create: createTransformactionFunc,
Read: readTransformactionFunc,
Update: updateTransformactionFunc,
Delete: deleteTransformactionFunc,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},
Schema: map[string]*schema.Schema{
"comment": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
},
"cookiedomainfrom": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
},
"cookiedomaininto": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
},
"name": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"priority": &schema.Schema{
Type: schema.TypeInt,
Optional: true,
Computed: true,
},
"profilename": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
},
"requrlfrom": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
},
"requrlinto": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
},
"resurlfrom": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
},
"resurlinto": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
},
"state": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
},
},
}
}
func createTransformactionFunc(d *schema.ResourceData, meta interface{}) error {
log.Printf("[DEBUG] citrixadc-provider: In createTransformactionFunc")
client := meta.(*NetScalerNitroClient).client
transformactionName := d.Get("name").(string)
// Create does not support all attributes
transformactionNew := transform.Transformaction{
Name: d.Get("name").(string),
Priority: d.Get("priority").(int),
Profilename: d.Get("profilename").(string),
State: d.Get("state").(string),
}
_, err := client.AddResource(service.Transformaction.Type(), transformactionName, &transformactionNew)
if err != nil {
return err
}
// Need to update with full set of attributes
transformaction := transform.Transformaction{
Comment: d.Get("comment").(string),
Cookiedomainfrom: d.Get("cookiedomainfrom").(string),
Cookiedomaininto: d.Get("cookiedomaininto").(string),
Name: d.Get("name").(string),
Priority: d.Get("priority").(int),
Requrlfrom: d.Get("requrlfrom").(string),
Requrlinto: d.Get("requrlinto").(string),
Resurlfrom: d.Get("resurlfrom").(string),
Resurlinto: d.Get("resurlinto").(string),
State: d.Get("state").(string),
}
_, err = client.UpdateResource(service.Transformaction.Type(), transformactionName, &transformaction)
if err != nil {
return err
}
d.SetId(transformactionName)
err = readTransformactionFunc(d, meta)
if err != nil {
log.Printf("[ERROR] netscaler-provider: ?? we just created this transformaction but we can't read it ?? %s", transformactionName)
return nil
}
return nil
}
func readTransformactionFunc(d *schema.ResourceData, meta interface{}) error {
log.Printf("[DEBUG] citrixadc-provider: In readTransformactionFunc")
client := meta.(*NetScalerNitroClient).client
transformactionName := d.Id()
log.Printf("[DEBUG] citrixadc-provider: Reading transformaction state %s", transformactionName)
data, err := client.FindResource(service.Transformaction.Type(), transformactionName)
if err != nil {
log.Printf("[WARN] citrixadc-provider: Clearing transformaction state %s", transformactionName)
d.SetId("")
return nil
}
d.Set("name", data["name"])
d.Set("comment", data["comment"])
d.Set("cookiedomainfrom", data["cookiedomainfrom"])
d.Set("cookiedomaininto", data["cookiedomaininto"])
d.Set("name", data["name"])
d.Set("priority", data["priority"])
d.Set("profilename", data["profilename"])
d.Set("requrlfrom", data["requrlfrom"])
d.Set("requrlinto", data["requrlinto"])
d.Set("resurlfrom", data["resurlfrom"])
d.Set("resurlinto", data["resurlinto"])
d.Set("state", data["state"])
return nil
}
func updateTransformactionFunc(d *schema.ResourceData, meta interface{}) error {
log.Printf("[DEBUG] citrixadc-provider: In updateTransformactionFunc")
client := meta.(*NetScalerNitroClient).client
transformactionName := d.Get("name").(string)
transformaction := transform.Transformaction{
Name: d.Get("name").(string),
}
hasChange := false
if d.HasChange("comment") {
log.Printf("[DEBUG] citrixadc-provider: Comment has changed for transformaction %s, starting update", transformactionName)
transformaction.Comment = d.Get("comment").(string)
hasChange = true
}
if d.HasChange("cookiedomainfrom") {
log.Printf("[DEBUG] citrixadc-provider: Cookiedomainfrom has changed for transformaction %s, starting update", transformactionName)
transformaction.Cookiedomainfrom = d.Get("cookiedomainfrom").(string)
hasChange = true
}
if d.HasChange("cookiedomaininto") {
log.Printf("[DEBUG] citrixadc-provider: Cookiedomaininto has changed for transformaction %s, starting update", transformactionName)
transformaction.Cookiedomaininto = d.Get("cookiedomaininto").(string)
hasChange = true
}
if d.HasChange("name") {
log.Printf("[DEBUG] citrixadc-provider: Name has changed for transformaction %s, starting update", transformactionName)
transformaction.Name = d.Get("name").(string)
hasChange = true
}
if d.HasChange("priority") {
log.Printf("[DEBUG] citrixadc-provider: Priority has changed for transformaction %s, starting update", transformactionName)
transformaction.Priority = d.Get("priority").(int)
hasChange = true
}
if d.HasChange("profilename") {
log.Printf("[DEBUG] citrixadc-provider: Profilename has changed for transformaction %s, starting update", transformactionName)
transformaction.Profilename = d.Get("profilename").(string)
hasChange = true
}
if d.HasChange("requrlfrom") {
log.Printf("[DEBUG] citrixadc-provider: Requrlfrom has changed for transformaction %s, starting update", transformactionName)
transformaction.Requrlfrom = d.Get("requrlfrom").(string)
hasChange = true
}
if d.HasChange("requrlinto") {
log.Printf("[DEBUG] citrixadc-provider: Requrlinto has changed for transformaction %s, starting update", transformactionName)
transformaction.Requrlinto = d.Get("requrlinto").(string)
hasChange = true
}
if d.HasChange("resurlfrom") {
log.Printf("[DEBUG] citrixadc-provider: Resurlfrom has changed for transformaction %s, starting update", transformactionName)
transformaction.Resurlfrom = d.Get("resurlfrom").(string)
hasChange = true
}
if d.HasChange("resurlinto") {
log.Printf("[DEBUG] citrixadc-provider: Resurlinto has changed for transformaction %s, starting update", transformactionName)
transformaction.Resurlinto = d.Get("resurlinto").(string)
hasChange = true
}
if d.HasChange("state") {
log.Printf("[DEBUG] citrixadc-provider: State has changed for transformaction %s, starting update", transformactionName)
transformaction.State = d.Get("state").(string)
hasChange = true
}
if hasChange {
_, err := client.UpdateResource(service.Transformaction.Type(), transformactionName, &transformaction)
if err != nil {
return fmt.Errorf("Error updating transformaction %s", transformactionName)
}
}
return readTransformactionFunc(d, meta)
}
func deleteTransformactionFunc(d *schema.ResourceData, meta interface{}) error {
log.Printf("[DEBUG] citrixadc-provider: In deleteTransformactionFunc")
client := meta.(*NetScalerNitroClient).client
transformactionName := d.Id()
err := client.DeleteResource(service.Transformaction.Type(), transformactionName)
if err != nil {
return err
}
d.SetId("")
return nil
}
| citrix/terraform-provider-netscaler | citrixadc/resource_citrixadc_transformaction.go | GO | apache-2.0 | 8,186 |
<?php
namespace BuyPlayTix\DataBean;
class ObjectAdapter implements IAdapter
{
public static $DB_DIR;
private $tables = [];
private $queries = [];
public function __construct()
{}
function load($databean, $param = "")
{
$table = $databean->getTable();
$pk = $databean->getPk();
if (! array_key_exists($table, $this->tables)) {
$this->tables[$table] = [];
}
$b = $this->tables[$table];
if (is_array($param)) {
foreach ($b as $bean) {
if (array_key_exists($param[0], $bean) && $bean[$param[0]] == $param[1]) {
$databean->fields = $bean;
$databean->setNew(false);
return $databean;
}
}
} elseif (strlen($param) > 0) {
foreach ($b as $bean) {
if (array_key_exists($pk, $bean) && $bean[$pk] == $param) {
$databean->fields = $bean;
$databean->setNew(false);
return $databean;
}
}
}
$uuid = UUID::get();
$databean->fields[$pk] = $uuid;
$databean->setNew(true);
return $databean;
}
function loadAll($databean, $field = "", $param = "", $andClause = "")
{
if (strlen($field) > 0) {
if (is_array($param) && count($param) == 1) {
$whereClause = ' where ' . $field . ' = ' . $param[0];
} else {
$valList = $this->_parseList($param);
$whereClause = ' where ' . $field . ' in ' . $valList;
}
} elseif (is_array($param) && count($param) > 0) {
if (is_array($param) && count($param) == 1) {
$whereClause = ' where ' . $databean->getPk() . ' = ' . $param[0];
} else {
$valList = $this->_parseList($param);
$whereClause = ' where ' . $databean->getPk() . ' in ' . $valList;
}
} else {
$whereClause = "";
}
$group_by = "";
$order_by = "";
$where = array();
$clause = $whereClause . " " . $andClause;
$chunks = preg_split("/\s*GROUP\s+BY\s*/i", $clause);
if (count($chunks) == 2) {
$clause = $chunks[0];
$group_by = $chunks[1];
}
$chunks = preg_split("/\s*ORDER\s+BY\s*/i", $clause);
if (count($chunks) == 2) {
$clause = $chunks[0];
$order_by = $chunks[1];
}
$clause = preg_replace("/\s*WHERE\s*/i", "", $clause);
$chunks = preg_split("/\s*AND\s*/i", $clause);
foreach ($chunks as $chunk) {
$where_chunks = preg_split("/\s*!=\s*/i", $chunk);
if (count($where_chunks) == 2) {
$field = strtoupper(trim($where_chunks[0]));
$value = trim($where_chunks[1], ' \'');
$where[$field] = array(
"v" => $value,
"condition" => "!="
);
continue;
}
$where_chunks = preg_split("/\s*=\s*/i", $chunk);
if (count($where_chunks) == 2) {
$field = strtoupper(trim($where_chunks[0]));
$value = trim($where_chunks[1], ' \'');
$where[$field] = array(
"v" => $value,
"condition" => "="
);
continue;
}
$where_chunks = preg_split("/\s+in\s*/i", $chunk);
if (count($where_chunks) == 2) {
$field = strtoupper(trim($where_chunks[0]));
$value = str_replace(')', '', str_replace('(', '', $where_chunks[1]));
$values = explode(',', $value);
for ($i = 0; $i < count($values); $i ++) {
$values[$i] = trim($values[$i], ' \'');
}
$where[$field] = array(
"v" => $values,
"condition" => "="
);
continue;
}
}
$databeans = array();
$table = $databean->getTable();
$pk = $databean->getPk();
$b = $this->tables[$table];
foreach ($b as $bean) {
$bean_matches = true;
foreach ($where as $field => $predicate) {
$value = $predicate["v"];
$condition = $predicate["condition"];
if (is_array($value)) {
$found_match = false;
foreach ($value as $v) {
if (array_key_exists($field, $bean) && $this->isMatch($bean[$field], $condition, $v)) {
$found_match = true;
}
}
if (! $found_match) {
$bean_matches = false;
}
} elseif (!array_key_exists($field, $bean) || ($this->isMatch($bean[$field], $condition, $value) === false)) {
$bean_matches = false;
}
}
if ($bean_matches) {
$className = get_class($databean);
$newBean = new $className($bean[$pk]);
$databeans[] = $newBean;
}
}
return $databeans;
}
private function isMatch($beanValue, $condition, $value)
{
if ($condition === '=') {
return $beanValue === $value;
}
if ($condition === '!=') {
return $beanValue !== $value;
}
return false;
}
function update($databean)
{
$table = $databean->getTable();
$pk = $databean->getPk();
if (array_key_exists($table, $this->tables) === false) {
$this->tables[$table] = [];
}
$existingRowKey = null;
for($index = 0; $index < count($this->tables[$table]); $index++) {
$row = $this->tables[$table][$index];
if (array_key_exists($pk, $row) && $row[$pk] === $databean->$pk) {
$existingRowKey = $index;
break;
}
}
$databean->setNew(false);
if ($existingRowKey === null) {
$this->tables[$table][] = $databean->getFields();
return $databean;
}
foreach ($databean->getFields() as $key => $value) {
$this->tables[$table][$existingRowKey][$key] = $value;
}
return $databean;
}
function delete($databean)
{
$table = $databean->getTable();
$pk = $databean->getPk();
if (! array_key_exists($table, $this->tables)) {
return $databean;
}
foreach ($this->tables[$table] as $index => $row) {
if (array_key_exists($pk, $row) && $row[$pk] === $databean->$pk) {
unset($this->tables[$table][$index]);
return $databean;
}
}
}
private function get_pk_value($databean)
{
$pk_value = $databean->get($databean->getPk());
if ($pk_value != NULL) {
return $pk_value;
}
return UUID::get();
}
function raw_delete($table, $where_fields = array())
{
if (array_key_exists($table, $this->tables) === false) {
$this->tables[$table] = [];
return;
}
$t = $this->tables[$table];
foreach ($t as $index => $row) {
$found_match = false;
foreach ($where_fields as $name => $v) {
// print $row[$name] . "\n";
// $v = 1, $row[$name] = 2
$value = $v;
if (is_array($v)) {
$condition = $v['condition'];
$value = $v['value'];
switch ($condition) {
case '<>':
case '!=':
if ($row[$name] == $value) {
$found_match = true;
}
break;
case '>':
if ($row[$name] > $value) {
$found_match = true;
}
break;
case '<':
if ($row[$name] < $value) {
$found_match = true;
}
break;
case 'in':
if (! in_array($row[$name], $value)) {
$found_match = true;
}
break;
case '=':
default:
if ($row[$name] != $value) {
$found_match = true;
}
break;
}
} elseif ($row[$name] === $value) {
$found_match = true;
}
}
if ($found_match) {
unset($t[$index]);
}
}
$this->tables[$table] = $t;
}
function raw_insert($table, $fields = array())
{
if (! isset($this->tables[$table])) {
$this->tables[$table] = array();
}
$t = $this->tables[$table];
foreach ($t as $index => $row) {
$found_match = true;
foreach ($fields as $name => $value) {
if ($row[$name] != $value) {
$found_match = false;
}
}
if ($found_match) {
throw new Exception("Unique constraint failure.");
}
}
$this->tables[$table][] = $fields;
}
function raw_replace($table, $fields = []) {
if (! isset($this->tables[$table])) {
return $this->raw_insert($table, $fields);
}
$pk = "UID";
if (array_key_exists("ID", $fields)) {
$pk = "ID";
}
$foundRow = false;
$t = $this->tables[$table];
foreach ($t as $index => $row) {
if ($row[$pk] === $fields[$pk]) {
$foundRow = true;
foreach($fields as $key => $value) {
$this->tables[$table][$index][$key] = $value;
}
}
}
if (!$foundRow) {
return $this->raw_insert($table, $fields);
}
}
// TODO: Add order and grouping, aggregate
function raw_select($table, $fields = array(), $where_fields = array(), $cast_class = NULL, $order = array(), $group = array())
{
if (array_key_exists($table, $this->tables) === false) {
$this->tables[$table] = array();
}
$results = array();
$t = $this->tables[$table];
foreach ($t as $index => $row) {
$found_match = true;
foreach ($where_fields as $name => $v) {
$value = $v;
if (is_array($v)) {
$condition = $v['condition'];
$value = $v['value'];
switch ($condition) {
case '<>':
case '!=':
if ($row[$name] == $value) {
$found_match = false;
}
break;
case '>':
if ($row[$name] <= $value) {
$found_match = false;
}
break;
case '<':
if ($row[$name] < $value) {
$found_match >= false;
}
break;
case 'in':
if (! in_array($row[$name], $value)) {
$found_match = false;
}
break;
case '=':
default:
if ($row[$name] != $value) {
$found_match = false;
}
break;
}
} elseif (!array_key_exists($name, $row) || $row[$name] != $value) {
$found_match = false;
}
}
if ($found_match) {
if ($cast_class != NULL) {
$class = new \ReflectionClass($cast_class);
$results[] = $class->newInstanceArgs(array(
$row[$fields[0]]
));
} else {
$ret_row = array();
$aggregation = array();
foreach ($fields as $field) {
if (is_array($field)) {
$field_name = $field['name'];
$field_alias = $field['alias'];
if (empty($field_alias)) {
$field_alias = $field_name;
}
switch ($field['aggregation']) {
case 'count':
if (isset($aggregation[$field_alias])) {
$aggregation[$field_alias] = $aggregation[$field_alias]++;
break;
}
$aggregation[$field_alias] = 1;
break;
case 'sum':
if (isset($aggregation[$field_alias])) {
$aggregation[$field_alias] = $aggregation[$field_alias] += $row[$field_name];
break;
}
$aggregation[$field_alias] = $row[$field_name];
break;
default:
throw new \Exception("Unknown aggregation type for field $field_name.");
}
} else {
if (array_key_exists($field, $row)) {
$ret_row[$field] = $row[$field];
} else {
$ret_row[$field] = null;
}
}
}
foreach ($aggregation as $field_name => $field_value) {
$ret_row[$field_name] = $field_value;
}
$results[] = $ret_row;
}
}
}
return $results;
}
function raw_update($table, $fields = array(), $where_fields = array())
{
if (array_key_exists($table, $this->tables) === false) {
$this->tables[$table] = array();
}
$t = $this->tables[$table];
foreach ($t as $index => $row) {
$found_match = true;
foreach ($where_fields as $name => $v) {
$value = $v;
if (is_array($v)) {
switch ($v['condition']) {
case '<>':
case '!=':
if ($row[$name] == $value) {
$found_match = false;
}
break;
case '>':
if ($row[$name] > $value) {
$found_match = false;
}
break;
case '<':
if ($row[$name] < $value) {
$found_match = false;
}
break;
case 'in':
if (! in_array($row[$name], $value)) {
$found_match = false;
}
break;
case '=':
default:
if ($row[$name] != $value) {
$found_match = false;
}
break;
}
$condition = $v['condition'];
$value = $v['value'];
} else {
if ($row[$name] != $value) {
$found_match = false;
}
}
}
if ($found_match) {
foreach ($row as $row_name => $row_value) {
foreach ($fields as $field_name => $field_value) {
if ($row_name === $field_name) {
$this->tables[$table][$index][$row_name] = $field_value;
}
}
}
}
}
}
public function set_named_query_value($name, $value)
{
$this->queries[$name] = $value;
}
function named_query($name, $sql = "", $params = array(), $hash = true)
{
if (! array_key_exists($name, $this->queries)) {
throw new \Exception("No value set for named query: " . $name);
}
return $this->queries[$name];
}
private function _parseList($param = Array())
{
return "('" . implode("','", $param) . "')";
}
public function loadDatabase()
{
if (file_exists(ObjectAdapter::$DB_DIR . "tables.test.db")) {
$lock = fopen(ObjectAdapter::$DB_DIR . "tables.test.db", 'rb');
@flock($lock, LOCK_SH);
$this->tables = unserialize(file_get_contents(ObjectAdapter::$DB_DIR . "tables.test.db"));
@flock($lock, LOCK_UN);
fclose($lock);
}
if (file_exists(ObjectAdapter::$DB_DIR . "queries.test.db")) {
$lock = fopen(ObjectAdapter::$DB_DIR . "queries.test.db", 'rb');
@flock($lock, LOCK_SH);
$this->queries = unserialize(file_get_contents(ObjectAdapter::$DB_DIR . "queries.test.db"));
@flock($lock, LOCK_UN);
fclose($lock);
}
}
public function saveDatabase()
{
file_put_contents(ObjectAdapter::$DB_DIR . "tables.test.db", serialize($this->tables), LOCK_EX);
file_put_contents(ObjectAdapter::$DB_DIR . "queries.test.db", serialize($this->queries), LOCK_EX);
if ((fileperms(ObjectAdapter::$DB_DIR . "tables.test.db") & 0777) !== 0766) {
chmod(ObjectAdapter::$DB_DIR . "tables.test.db", 0766);
}
if ((fileperms(ObjectAdapter::$DB_DIR . "queries.test.db") & 0777) !== 0766) {
chmod(ObjectAdapter::$DB_DIR . "queries.test.db", 0766);
}
}
public function printDatabase()
{
print "Queries: \n";
print_r($this->queries);
print "Tables: \n";
print_r($this->tables);
}
public function clearDatabase()
{
$this->tables = [];
$this->queries = [];
$this->saveDatabase();
}
}
| tthomas48/databean | BuyPlayTix/DataBean/ObjectAdapter.php | PHP | apache-2.0 | 19,538 |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: cloud.proto
package io.particle.firmwareprotos.ctrl.cloud;
public final class Cloud {
private Cloud() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
/**
* <pre>
* Make sure values of this enum match the values defined for the diagnostic info
* </pre>
*
* Protobuf enum {@code particle.ctrl.cloud.ConnectionStatus}
*/
public enum ConnectionStatus
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>DISCONNECTED = 0;</code>
*/
DISCONNECTED(0),
/**
* <code>CONNECTING = 1;</code>
*/
CONNECTING(1),
/**
* <code>CONNECTED = 2;</code>
*/
CONNECTED(2),
/**
* <code>DISCONNECTING = 3;</code>
*/
DISCONNECTING(3),
UNRECOGNIZED(-1),
;
/**
* <code>DISCONNECTED = 0;</code>
*/
public static final int DISCONNECTED_VALUE = 0;
/**
* <code>CONNECTING = 1;</code>
*/
public static final int CONNECTING_VALUE = 1;
/**
* <code>CONNECTED = 2;</code>
*/
public static final int CONNECTED_VALUE = 2;
/**
* <code>DISCONNECTING = 3;</code>
*/
public static final int DISCONNECTING_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ConnectionStatus valueOf(int value) {
return forNumber(value);
}
public static ConnectionStatus forNumber(int value) {
switch (value) {
case 0: return DISCONNECTED;
case 1: return CONNECTING;
case 2: return CONNECTED;
case 3: return DISCONNECTING;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ConnectionStatus>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
ConnectionStatus> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ConnectionStatus>() {
public ConnectionStatus findValueByNumber(int number) {
return ConnectionStatus.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.getDescriptor().getEnumTypes().get(0);
}
private static final ConnectionStatus[] VALUES = values();
public static ConnectionStatus valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ConnectionStatus(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:particle.ctrl.cloud.ConnectionStatus)
}
public interface GetConnectionStatusRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:particle.ctrl.cloud.GetConnectionStatusRequest)
com.google.protobuf.MessageOrBuilder {
}
/**
* <pre>
* Get the cloud connection status
* </pre>
*
* Protobuf type {@code particle.ctrl.cloud.GetConnectionStatusRequest}
*/
public static final class GetConnectionStatusRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:particle.ctrl.cloud.GetConnectionStatusRequest)
GetConnectionStatusRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetConnectionStatusRequest.newBuilder() to construct.
private GetConnectionStatusRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetConnectionStatusRequest() {
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetConnectionStatusRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.internal_static_particle_ctrl_cloud_GetConnectionStatusRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.internal_static_particle_ctrl_cloud_GetConnectionStatusRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest.class, io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest.Builder.class);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest)) {
return super.equals(obj);
}
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest other = (io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest) obj;
boolean result = true;
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Get the cloud connection status
* </pre>
*
* Protobuf type {@code particle.ctrl.cloud.GetConnectionStatusRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:particle.ctrl.cloud.GetConnectionStatusRequest)
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.internal_static_particle_ctrl_cloud_GetConnectionStatusRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.internal_static_particle_ctrl_cloud_GetConnectionStatusRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest.class, io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest.Builder.class);
}
// Construct using io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.internal_static_particle_ctrl_cloud_GetConnectionStatusRequest_descriptor;
}
public io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest getDefaultInstanceForType() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest.getDefaultInstance();
}
public io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest build() {
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest buildPartial() {
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest result = new io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest(this);
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest) {
return mergeFrom((io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest other) {
if (other == io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:particle.ctrl.cloud.GetConnectionStatusRequest)
}
// @@protoc_insertion_point(class_scope:particle.ctrl.cloud.GetConnectionStatusRequest)
private static final io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest();
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetConnectionStatusRequest>
PARSER = new com.google.protobuf.AbstractParser<GetConnectionStatusRequest>() {
public GetConnectionStatusRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetConnectionStatusRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetConnectionStatusRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetConnectionStatusRequest> getParserForType() {
return PARSER;
}
public io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetConnectionStatusReplyOrBuilder extends
// @@protoc_insertion_point(interface_extends:particle.ctrl.cloud.GetConnectionStatusReply)
com.google.protobuf.MessageOrBuilder {
/**
* <code>.particle.ctrl.cloud.ConnectionStatus status = 1;</code>
*/
int getStatusValue();
/**
* <code>.particle.ctrl.cloud.ConnectionStatus status = 1;</code>
*/
io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus getStatus();
}
/**
* Protobuf type {@code particle.ctrl.cloud.GetConnectionStatusReply}
*/
public static final class GetConnectionStatusReply extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:particle.ctrl.cloud.GetConnectionStatusReply)
GetConnectionStatusReplyOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetConnectionStatusReply.newBuilder() to construct.
private GetConnectionStatusReply(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetConnectionStatusReply() {
status_ = 0;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetConnectionStatusReply(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
status_ = rawValue;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.internal_static_particle_ctrl_cloud_GetConnectionStatusReply_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.internal_static_particle_ctrl_cloud_GetConnectionStatusReply_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply.class, io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply.Builder.class);
}
public static final int STATUS_FIELD_NUMBER = 1;
private int status_;
/**
* <code>.particle.ctrl.cloud.ConnectionStatus status = 1;</code>
*/
public int getStatusValue() {
return status_;
}
/**
* <code>.particle.ctrl.cloud.ConnectionStatus status = 1;</code>
*/
public io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus getStatus() {
io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus result = io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus.valueOf(status_);
return result == null ? io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (status_ != io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus.DISCONNECTED.getNumber()) {
output.writeEnum(1, status_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (status_ != io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus.DISCONNECTED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, status_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply)) {
return super.equals(obj);
}
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply other = (io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply) obj;
boolean result = true;
result = result && status_ == other.status_;
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + STATUS_FIELD_NUMBER;
hash = (53 * hash) + status_;
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code particle.ctrl.cloud.GetConnectionStatusReply}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:particle.ctrl.cloud.GetConnectionStatusReply)
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReplyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.internal_static_particle_ctrl_cloud_GetConnectionStatusReply_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.internal_static_particle_ctrl_cloud_GetConnectionStatusReply_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply.class, io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply.Builder.class);
}
// Construct using io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
status_ = 0;
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.internal_static_particle_ctrl_cloud_GetConnectionStatusReply_descriptor;
}
public io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply getDefaultInstanceForType() {
return io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply.getDefaultInstance();
}
public io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply build() {
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply buildPartial() {
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply result = new io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply(this);
result.status_ = status_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply) {
return mergeFrom((io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply other) {
if (other == io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply.getDefaultInstance()) return this;
if (other.status_ != 0) {
setStatusValue(other.getStatusValue());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int status_ = 0;
/**
* <code>.particle.ctrl.cloud.ConnectionStatus status = 1;</code>
*/
public int getStatusValue() {
return status_;
}
/**
* <code>.particle.ctrl.cloud.ConnectionStatus status = 1;</code>
*/
public Builder setStatusValue(int value) {
status_ = value;
onChanged();
return this;
}
/**
* <code>.particle.ctrl.cloud.ConnectionStatus status = 1;</code>
*/
public io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus getStatus() {
io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus result = io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus.valueOf(status_);
return result == null ? io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus.UNRECOGNIZED : result;
}
/**
* <code>.particle.ctrl.cloud.ConnectionStatus status = 1;</code>
*/
public Builder setStatus(io.particle.firmwareprotos.ctrl.cloud.Cloud.ConnectionStatus value) {
if (value == null) {
throw new NullPointerException();
}
status_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>.particle.ctrl.cloud.ConnectionStatus status = 1;</code>
*/
public Builder clearStatus() {
status_ = 0;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:particle.ctrl.cloud.GetConnectionStatusReply)
}
// @@protoc_insertion_point(class_scope:particle.ctrl.cloud.GetConnectionStatusReply)
private static final io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply();
}
public static io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetConnectionStatusReply>
PARSER = new com.google.protobuf.AbstractParser<GetConnectionStatusReply>() {
public GetConnectionStatusReply parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetConnectionStatusReply(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetConnectionStatusReply> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetConnectionStatusReply> getParserForType() {
return PARSER;
}
public io.particle.firmwareprotos.ctrl.cloud.Cloud.GetConnectionStatusReply getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_particle_ctrl_cloud_GetConnectionStatusRequest_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_particle_ctrl_cloud_GetConnectionStatusRequest_fieldAccessorTable;
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_particle_ctrl_cloud_GetConnectionStatusReply_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_particle_ctrl_cloud_GetConnectionStatusReply_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\013cloud.proto\022\023particle.ctrl.cloud\032\020exte" +
"nsions.proto\"#\n\032GetConnectionStatusReque" +
"st:\005\210\265\030\254\002\"Q\n\030GetConnectionStatusReply\0225\n" +
"\006status\030\001 \001(\0162%.particle.ctrl.cloud.Conn" +
"ectionStatus*V\n\020ConnectionStatus\022\020\n\014DISC" +
"ONNECTED\020\000\022\016\n\nCONNECTING\020\001\022\r\n\tCONNECTED\020" +
"\002\022\021\n\rDISCONNECTING\020\003B\'\n%io.particle.firm" +
"wareprotos.ctrl.cloudb\006proto3"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
io.particle.firmwareprotos.ctrl.Extensions.getDescriptor(),
}, assigner);
internal_static_particle_ctrl_cloud_GetConnectionStatusRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_particle_ctrl_cloud_GetConnectionStatusRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_particle_ctrl_cloud_GetConnectionStatusRequest_descriptor,
new java.lang.String[] { });
internal_static_particle_ctrl_cloud_GetConnectionStatusReply_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_particle_ctrl_cloud_GetConnectionStatusReply_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_particle_ctrl_cloud_GetConnectionStatusReply_descriptor,
new java.lang.String[] { "Status", });
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(io.particle.firmwareprotos.ctrl.Extensions.typeId);
com.google.protobuf.Descriptors.FileDescriptor
.internalUpdateFileDescriptor(descriptor, registry);
io.particle.firmwareprotos.ctrl.Extensions.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| spark/photon-tinker-android | firmwareprotos/src/main/java/io/particle/firmwareprotos/ctrl/cloud/Cloud.java | Java | apache-2.0 | 42,941 |
/*!
* UI development toolkit for HTML5 (OpenUI5)
* (c) Copyright 2009-2017 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
/* global ActiveXObject:false */
// Provides control sap.m.PDFViewer.
sap.ui.define([
"jquery.sap.global",
"./library",
"sap/ui/core/Control",
"sap/ui/Device",
"sap/m/PDFViewerRenderManager",
"sap/m/MessageBox"
],
function (jQuery, library, Control, Device, PDFViewerRenderManager, MessageBox) {
"use strict";
var aAllowedMimeTypes = Object.freeze([
"application/pdf",
"application/x-google-chrome-pdf"
]);
function isSupportedMimeType(sMimeType) {
var iFoundIndex = aAllowedMimeTypes.indexOf(sMimeType);
return iFoundIndex > -1;
}
/**
* Definition of PDFViewer control
*
* @param {string} [sId] id for the new control, generated automatically if no id is given
* @param {object} [mSettings] initial settings for the new control
*
* @class
* This control enables you to display PDF documents within your app.
* It can be embedded in your user interface layout, or you can set it to open in a popup dialog.
* @extends sap.ui.core.Control
*
* @author SAP SE
* @version 1.50.8
*
* @constructor
* @public
* @alias sap.m.PDFViewer
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var PDFViewer = Control.extend("sap.m.PDFViewer",
/** @lends sap.m.PDFViewer.prototype */
{
metadata: {
library: "sap.m",
properties: {
/**
* Defines the height of the PDF viewer control, respective to the height of
* the parent container. Can be set to a percent, pixel, or em value.
*/
height: {type: "sap.ui.core.CSSSize", group: "Dimension", defaultValue: "100%"},
/**
* Defines the width of the PDF viewer control, respective to the width of the
* parent container. Can be set to a percent, pixel, or em value.
*/
width: {type: "sap.ui.core.CSSSize", group: "Dimension", defaultValue: "100%"},
/**
* Specifies the path to the PDF file to display. Can be set to a relative or
* an absolute path.
*/
source: {type: "sap.ui.core.URI", group: "Misc", defaultValue: null},
/**
* A custom error message that is displayed when the PDF file cannot be loaded.
* @deprecated As of version 1.50.0, replaced by {@link sap.m.PDFViewer#getErrorPlaceholderMessage()}.
*/
errorMessage: {type: "string", group: "Misc", defaultValue: null, deprecated: true},
/**
* A custom text that is displayed instead of the PDF file content when the PDF
* file cannot be loaded.
*/
errorPlaceholderMessage: {type: "string", group: "Misc", defaultValue: null},
/**
* A custom title for the PDF viewer popup dialog. Works only if the PDF viewer
* is set to open in a popup dialog.
* @deprecated As of version 1.50.0, replaced by {@link sap.m.PDFViewer#getTitle()}.
*/
popupHeaderTitle: {type: "string", group: "Misc", defaultValue: null, deprecated: true},
/**
* A custom title for the PDF viewer.
*/
title: {type: "string", group: "Misc", defaultValue: null},
/**
* Shows or hides the download button.
*/
showDownloadButton: {type: "boolean", group: "Misc", defaultValue: true}
},
aggregations: {
/**
* A custom control that can be used instead of the error message specified by the
* errorPlaceholderMessage property.
*/
errorPlaceholder: {type: "sap.ui.core.Control", multiple: false},
/**
* A multiple aggregation for buttons that can be added to the footer of the popup
* dialog. Works only if the PDF viewer is set to open in a popup dialog.
*/
popupButtons: {type: "sap.m.Button", multiple: true, singularName: "popupButton"}
},
events: {
/**
* This event is fired when a PDF file is loaded. If the PDF is loaded in smaller chunks,
* this event is fired as often as defined by the browser's plugin. This may happen after
* a couple chunks are processed.
*/
loaded: {},
/**
* This event is fired when there is an error loading the PDF file.
*/
error: {},
/**
* This event is fired when the PDF viewer control cannot check the loaded content. For
* example, the default configuration of the Mozilla Firefox browser may not allow checking
* the loaded content. This may also happen when the source PDF file is stored in a different
* domain.
* If you want no error message to be displayed when this event is fired, call the
* preventDefault() method inside the event handler.
*/
sourceValidationFailed: {}
}
}
});
/**
* @returns {boolean}
* @private
*/
PDFViewer._isPdfPluginEnabled = function () {
var bIsEnabled = true;
if (Device.browser.firefox) {
// https://bugzilla.mozilla.org/show_bug.cgi?id=1293406
// mimeType is missing for firefox even though it is enabled
return bIsEnabled;
}
if (Device.browser.internet_explorer) {
// hacky code how to recognize that pdf plugin is installed and enabled
try {
/* eslint-disable no-new */
new ActiveXObject("AcroPDF.PDF");
/* eslint-enable no-new */
} catch (e) {
bIsEnabled = false;
}
return bIsEnabled;
}
var aMimeTypes = navigator.mimeTypes;
bIsEnabled = aAllowedMimeTypes.some(function (sAllowedMimeType) {
var oMimeTypeItem = aMimeTypes.namedItem(sAllowedMimeType);
return oMimeTypeItem !== null;
});
return bIsEnabled;
};
/**
* Lifecycle method
*
* @private
*/
PDFViewer.prototype.init = function () {
// helper object that holds the references of nested objects
this._objectsRegister = {};
// state variable that shows the state of popup (rendering of pdf in popup requires it)
this._bIsPopupOpen = false;
this._initPopupControl();
this._initPopupDownloadButtonControl();
this._initPlaceholderMessagePageControl();
this._initToolbarDownloadButtonControl();
this._initOverflowToolbarControl();
this._initControlState();
};
/**
* Setup state variables to default state
*
* @private
*/
PDFViewer.prototype._initControlState = function () {
// state property that control if the embedded pdf should or should not rendered.
this._bRenderPdfContent = true;
// detect that beforeunload was fired (IE only)
this._bOnBeforeUnloadFired = false;
};
PDFViewer.prototype.setWidth = function (sWidth) {
this.setProperty("width", sWidth, true);
var oDomRef = this.$();
if (oDomRef === null) {
return this;
}
oDomRef.css("width", this._getRenderWidth());
return this;
};
PDFViewer.prototype.setHeight = function (sHeight) {
this.setProperty("height", sHeight, true);
var oDomRef = this.$();
if (oDomRef === null) {
return this;
}
oDomRef.css("height", this._getRenderHeight());
return this;
};
PDFViewer.prototype.onBeforeRendering = function () {
// IE things
// because of the detecting error state in IE (double call of unload listener)
// it is important to reset the flag before each render
// otherwise it wrongly detects error state (the unload listener is called once even in valid use case)
this._bOnBeforeUnloadFired = false;
};
/**
* Lifecycle method
*
* @private
*/
PDFViewer.prototype.onAfterRendering = function () {
var fnInitIframeElement = function () {
// cant use attachBrowserEvent because it attach event to component root node (this.$())
// load event does not bubble so it has to be bind directly to iframe element
var oIframeElement = this._getIframeDOMElement();
var oIframeContentWindow = jQuery(oIframeElement.get(0).contentWindow);
if (Device.browser.internet_explorer) {
// being special does not mean useful
// https://connect.microsoft.com/IE/feedback/details/809377/ie-11-load-event-doesnt-fired-for-pdf-in-iframe
// onerror does not works on IE. Therefore readyonstatechange and unload events are used for error detection.
// When invalid response is received (404, etc.), readystatechange is not fired but unload is.
// When valid response is received, then readystatechange and 'complete' state of target's element is received.
oIframeContentWindow.on("beforeunload", this._onBeforeUnloadListener.bind(this));
oIframeContentWindow.on("readystatechange", this._onReadyStateChangeListener.bind(this));
// some error codes load html file and fires loadEvent
oIframeElement.on("load", this._onLoadIEListener.bind(this));
} else {
// normal browsers supports load events as specification said
oIframeElement.on("load", this._onLoadListener.bind(this));
}
oIframeElement.on("error", this._onErrorListener.bind(this));
var sParametrizedSource = this.getSource();
var iCrossPosition = this.getSource().indexOf("#");
if (iCrossPosition > -1) {
sParametrizedSource = sParametrizedSource.substr(0, iCrossPosition);
}
sParametrizedSource += "#view=FitH";
if (jQuery.sap.validateUrl(sParametrizedSource)) {
oIframeElement.attr("src", encodeURI(sParametrizedSource));
} else {
this._fireErrorEvent();
}
}.bind(this);
try {
this.setBusy(true);
fnInitIframeElement();
} catch (error) {
this.setBusy(false);
}
};
/**
* @private
*/
PDFViewer.prototype._fireErrorEvent = function () {
this._renderErrorState();
this.fireEvent("error", {}, true);
};
/**
* @private
*/
PDFViewer.prototype._renderErrorState = function () {
var oDownloadButton = this._objectsRegister.getToolbarDownloadButtonControl();
oDownloadButton.setEnabled(false);
var oDownloadButton = this._objectsRegister.getPopupDownloadButtonControl();
oDownloadButton.setEnabled(false);
this.setBusy(false);
this._bRenderPdfContent = false;
// calls controls invalidate because the error state should be render.
// It is controlled by the state variable called _bRenderPdfContent
// The main invalidate set the state of the control to the default and tries to load and render pdf
Control.prototype.invalidate.call(this);
};
/**
* @private
*/
PDFViewer.prototype._fireLoadedEvent = function () {
this._bRenderPdfContent = true;
this.setBusy(false);
try {
this._getIframeDOMElement().removeClass("sapMPDFViewerLoading");
} catch (err) {
jQuery.log.fatal("Iframe not founded in loaded event");
jQuery.log.fatal(err);
}
this.fireEvent("loaded");
};
/**
* @param oEvent
* @private
*/
PDFViewer.prototype._onLoadListener = function (oEvent) {
try {
var oTarget = jQuery(oEvent.target),
bContinue = true;
// Firefox
// https://bugzilla.mozilla.org/show_bug.cgi?id=911444
// because of the embedded pdf plugin in firefox it is not possible to check contentType of the iframe document
// if the content is pdf. If the content is not a pdf and it is from the same origin, it can be accessed.
// Other browsers allow access to the mimeType of the iframe's document if the content is from the same origin.
var sCurrentContentType = "application/pdf";
try {
// browsers render pdf in iframe as html page with embed tag
var aEmbeds = oTarget[0].contentWindow.document.embeds;
bContinue = !!aEmbeds && aEmbeds.length === 1;
if (bContinue) {
sCurrentContentType = aEmbeds[0].attributes.getNamedItem("type").value;
}
} catch (error) {
// even though the sourceValidationFailed event is fired, the default behaviour is to continue.
// when preventDefault is on event object is called, the rendering ends up with error
if (!Device.browser.firefox && this.fireEvent("sourceValidationFailed", {}, true)) {
this._showMessageBox();
return;
}
}
if (bContinue && isSupportedMimeType(sCurrentContentType)) {
this._fireLoadedEvent();
} else {
this._fireErrorEvent();
}
} catch (error) {
jQuery.sap.log.fatal(false, "Fatal error during the handling of load event happened.");
jQuery.sap.log.fatal(false, error.message);
}
};
/**
* @private
*/
PDFViewer.prototype._onErrorListener = function () {
this._fireErrorEvent();
};
/**
* @private
*/
PDFViewer.prototype._onReadyStateChangeListener = function (oEvent) {
var INTERACTIVE_READY_STATE = "interactive";
var COMPLETE_READY_STATE = "complete";
switch (oEvent.target.readyState) {
case INTERACTIVE_READY_STATE: // IE11 only fires interactive
case COMPLETE_READY_STATE:
// iframe content is not loaded when interactive ready state is fired
// even though complete ready state should be fired. We were not able to simulate firing complete ready state
// on IE. Therefore the validation of source is not possible.
this._fireLoadedEvent();
break;
}
};
/**
* @private
*/
PDFViewer.prototype._onBeforeUnloadListener = function () {
// IE problems
// when invalid response is received (404), beforeunload is fired twice
if (this._bOnBeforeUnloadFired) {
this._fireErrorEvent();
return;
}
this._bOnBeforeUnloadFired = true;
};
/**
* @param oEvent
* @private
*/
PDFViewer.prototype._onLoadIEListener = function (oEvent) {
try {
// because of asynchronity of events, IE sometimes fires load event even after it unloads the content.
// The contentWindow does not exists in these moments. On the other hand, the error state is already handled
// by onBeforeUnloadListener, so we only need catch for catching the error and then return.
// The problem is not with null reference. The access of the contentWindow sometimes fires 'access denied' error
// which is not detectable otherwise.
var sCurrentContentType = oEvent.currentTarget.contentWindow.document.mimeType;
} catch (err) {
return;
}
if (!isSupportedMimeType(sCurrentContentType)) {
this._fireErrorEvent();
}
};
/**
* Downloads the PDF file.
*
* @public
*/
PDFViewer.prototype.downloadPDF = function () {
var oWindow = window.open(this.getSource());
oWindow.focus();
};
/**
* @param string oClickedButtonId
* @private
*/
PDFViewer.prototype._onSourceValidationErrorMessageBoxCloseListener = function (oClickedButtonId) {
if (oClickedButtonId === MessageBox.Action.CANCEL) {
this._renderErrorState();
} else {
this._fireLoadedEvent();
}
};
/**
* @param oEvent
* @private
*/
PDFViewer.prototype._onAfterPopupClose = function (oEvent) {
var oPopup = this._objectsRegister.getPopup();
// content has to be cleared from dom
oPopup.removeAllContent();
this._bIsPopupOpen = false;
};
/**
* @returns {boolean}
* @private
*/
PDFViewer.prototype._shouldRenderPdfContent = function () {
return PDFViewer._isPdfPluginEnabled() && this._bRenderPdfContent && this.getSource() !== null;
};
/**
* @returns {boolean}
* @private
*/
PDFViewer.prototype._isSourceValidToDisplay = function () {
var sSource = this.getSource();
return sSource !== null && sSource !== "" && typeof sSource !== "undefined";
};
/**
* Triggers rerendering of this element and its children.
*
* @param {sap.ui.base.ManagedObject} [oOrigin] Child control for which the method was called
*
* @public
*/
PDFViewer.prototype.invalidate = function (oOrigin) {
this._initControlState();
Control.prototype.invalidate.call(this, oOrigin);
};
/**
* Opens the PDF viewer in a popup dialog.
*
* @public
*/
PDFViewer.prototype.open = function () {
if (!this._isSourceValidToDisplay()) {
jQuery.sap.assert(false, "The PDF file cannot be opened with the given source. Given source: " + this.getSource());
return;
}
if (this._isEmbeddedModeAllowed()) {
this._openOnDesktop();
} else {
this._openOnMobile();
}
};
/**
* Handles opening on desktop devices
* @private
*/
PDFViewer.prototype._openOnDesktop = function () {
var oPopup = this._objectsRegister.getPopup();
if (this._bIsPopupOpen) {
return;
}
this._initControlState();
this._preparePopup(oPopup);
oPopup.addContent(this);
this._bIsPopupOpen = true;
oPopup.open();
};
/**
* Handles opening on mobile/tablet devices
* @private
*/
PDFViewer.prototype._openOnMobile = function () {
var oWindow = window.open(this.getSource());
oWindow.focus();
};
/**
* Gets the iframe element from rendered DOM
* @returns {*} jQuery object of iframe
* @private
*/
PDFViewer.prototype._getIframeDOMElement = function () {
var oIframeElement = this.$().find("iframe");
if (oIframeElement.length === 0) {
throw Error("Underlying iframe was not found in DOM.");
}
if (oIframeElement.length > 1) {
jQuery.sap.log.fatal("Initialization of iframe fails. Reason: the control somehow renders multiple iframes");
}
return oIframeElement;
};
/**
* @private
*/
PDFViewer.prototype._isEmbeddedModeAllowed = function () {
return Device.system.desktop;
};
/**
* @returns {jQuery.sap.util.ResourceBundle}
* @private
*/
PDFViewer.prototype._getLibraryResourceBundle = function () {
return sap.ui.getCore().getLibraryResourceBundle("sap.m");
};
/**
* @returns {string}
* @private
*/
PDFViewer.prototype._getMessagePageErrorMessage = function () {
return this.getErrorPlaceholderMessage() ? this.getErrorPlaceholderMessage() :
this._getLibraryResourceBundle().getText("PDF_VIEWER_PLACEHOLDER_ERROR_TEXT");
};
/**
* @returns {string}
* @private
*/
PDFViewer.prototype._getRenderWidth = function () {
return this._bIsPopupOpen ? '100%' : this.getWidth();
};
/**
* @returns {string}
* @private
*/
PDFViewer.prototype._getRenderHeight = function () {
return this._bIsPopupOpen ? '100%' : this.getHeight();
};
/**
* @private
*/
PDFViewer.prototype._showMessageBox = function () {
MessageBox.show(this._getLibraryResourceBundle().getText("PDF_VIEWER_SOURCE_VALIDATION_MESSAGE_TEXT"), {
icon: MessageBox.Icon.WARNING,
title: this._getLibraryResourceBundle().getText("PDF_VIEWER_SOURCE_VALIDATION_MESSAGE_HEADER"),
actions: [MessageBox.Action.OK, MessageBox.Action.CANCEL],
defaultAction: MessageBox.Action.CANCEL,
id: this.getId() + "-validationErrorSourceMessageBox",
styleClass: "sapUiSizeCompact",
contentWidth: '100px',
onClose: this._onSourceValidationErrorMessageBoxCloseListener.bind(this)
});
};
/**
* Lifecycle method
* @private
*/
PDFViewer.prototype.exit = function () {
jQuery.each(this._objectsRegister, function (iIndex, fnGetObject) {
var oObject = fnGetObject(true);
if (oObject) {
oObject.destroy();
}
});
};
PDFViewerRenderManager.extendPdfViewer(PDFViewer);
return PDFViewer;
}, /* bExport= */ true);
| thbonk/electron-openui5-boilerplate | libs/openui5-runtime/resources/sap/m/PDFViewer-dbg.js | JavaScript | apache-2.0 | 19,266 |
var madson = (function () {
import "nxutils/generic/base";
import "./utils";
import "./write";
import "./read";
import "./preset";
function mEncode(input, options) {
var encoder = new EncodeBuffer(options);
encode(encoder, cloneDecycle(input));
return encoder.read();
}
function mDecode(input, options) {
var decoder = new DecodeBuffer(options);
decoder.append(input);
return retroCycle(decode(decoder));
}
function packer(input) {
return Buffer.isBuffer(input) ? mDecode(input) : mEncode(input);
}
var madson = extend(packer, {
createCodec: Codec,
codec: { preset: preset },
encode: mEncode,
decode: mDecode
});
if ("isServer") {
if (typeof module == 'object')
module.exports = madson;
}
else global.madson = madson;
return madson;
})();
| arash16/madson | src/index.js | JavaScript | apache-2.0 | 901 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.wm.impl;
import com.intellij.ide.RemoteDesktopService;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.ui.UISettingsListener;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.ui.ThreeComponentsSplitter;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.registry.RegistryValue;
import com.intellij.openapi.util.registry.RegistryValueListener;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowAnchor;
import com.intellij.openapi.wm.ToolWindowType;
import com.intellij.openapi.wm.WindowInfo;
import com.intellij.openapi.wm.ex.ToolWindowEx;
import com.intellij.reference.SoftReference;
import com.intellij.ui.OnePixelSplitter;
import com.intellij.ui.components.JBLayeredPane;
import com.intellij.ui.paint.PaintUtil;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.ui.scale.ScaleContext;
import com.intellij.util.IJSwingUtilities;
import com.intellij.util.ui.ImageUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.geom.Point2D;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import static com.intellij.util.ui.UIUtil.useSafely;
/**
* This panel contains all tool stripes and JLayeredPane at the center area. All tool windows are
* located inside this layered pane.
*
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
public final class ToolWindowsPane extends JBLayeredPane implements UISettingsListener {
private static final Logger LOG = Logger.getInstance(ToolWindowsPane.class);
@NonNls public static final String TEMPORARY_ADDED = "TEMPORARY_ADDED";
private final JFrame frame;
private ToolWindowPaneState state = new ToolWindowPaneState();
/**
* This panel is the layered pane where all sliding tool windows are located. The DEFAULT
* layer contains splitters. The PALETTE layer contains all sliding tool windows.
*/
private final MyLayeredPane layeredPane;
/*
* Splitters.
*/
private final ThreeComponentsSplitter verticalSplitter;
private final ThreeComponentsSplitter horizontalSplitter;
/*
* Tool stripes.
*/
private final Stripe leftStripe;
private final Stripe rightStripe;
private final Stripe bottomStripe;
private final Stripe topStripe;
private final List<Stripe> stripes = new ArrayList<>(4);
private boolean isWideScreen;
private boolean leftHorizontalSplit;
private boolean rightHorizontalSplit;
private List<String> myDefaultRightButtons = new ArrayList<>();
private List<String> myDefaultLeftButtons = new ArrayList<>();
private List<String> myDefaultBottomButtons = new ArrayList<>();
@Nullable private final ToolwindowToolbar myLeftToolbar;
@Nullable private final ToolwindowToolbar myRightToolbar;
ToolWindowsPane(@NotNull JFrame frame,
@NotNull Disposable parentDisposable,
@Nullable ToolwindowToolbar leftSidebar,
@Nullable ToolwindowToolbar rightSidebar) {
myLeftToolbar = leftSidebar;
myRightToolbar = rightSidebar;
setOpaque(false);
this.frame = frame;
// splitters
verticalSplitter = new ThreeComponentsSplitter(true, parentDisposable);
RegistryValue registryValue = Registry.get("ide.mainSplitter.min.size");
registryValue.addListener(new RegistryValueListener() {
@Override
public void afterValueChanged(@NotNull RegistryValue value) {
updateInnerMinSize(value);
}
}, parentDisposable);
verticalSplitter.setDividerWidth(0);
verticalSplitter.setDividerMouseZoneSize(Registry.intValue("ide.splitter.mouseZone"));
verticalSplitter.setBackground(Color.gray);
horizontalSplitter = new ThreeComponentsSplitter(false, parentDisposable);
horizontalSplitter.setDividerWidth(0);
horizontalSplitter.setDividerMouseZoneSize(Registry.intValue("ide.splitter.mouseZone"));
horizontalSplitter.setBackground(Color.gray);
updateInnerMinSize(registryValue);
UISettings uiSettings = UISettings.getInstance();
isWideScreen = uiSettings.getWideScreenSupport();
leftHorizontalSplit = uiSettings.getLeftHorizontalSplit();
rightHorizontalSplit = uiSettings.getRightHorizontalSplit();
if (isWideScreen) {
horizontalSplitter.setInnerComponent(verticalSplitter);
}
else {
verticalSplitter.setInnerComponent(horizontalSplitter);
}
// tool stripes
topStripe = new Stripe(SwingConstants.TOP);
stripes.add(topStripe);
leftStripe = new Stripe(SwingConstants.LEFT);
stripes.add(leftStripe);
bottomStripe = new Stripe(SwingConstants.BOTTOM);
stripes.add(bottomStripe);
rightStripe = new Stripe(SwingConstants.RIGHT);
stripes.add(rightStripe);
updateToolStripesVisibility(uiSettings);
// layered pane
layeredPane = new MyLayeredPane(isWideScreen ? horizontalSplitter : verticalSplitter);
// compose layout
add(topStripe, JLayeredPane.POPUP_LAYER);
add(leftStripe, JLayeredPane.POPUP_LAYER);
add(bottomStripe, JLayeredPane.POPUP_LAYER);
add(rightStripe, JLayeredPane.POPUP_LAYER);
add(layeredPane, JLayeredPane.DEFAULT_LAYER);
setFocusTraversalPolicy(new LayoutFocusTraversalPolicy());
}
void initDocumentComponent(@NotNull Project project) {
JComponent editorComponent = FileEditorManagerEx.getInstanceEx(project).getComponent();
editorComponent.setFocusable(false);
setDocumentComponent(editorComponent);
}
private void updateInnerMinSize(@NotNull RegistryValue value) {
int minSize = Math.max(0, Math.min(100, value.asInteger()));
verticalSplitter.setMinSize(JBUIScale.scale(minSize));
horizontalSplitter.setMinSize(JBUIScale.scale(minSize));
}
@Override
public void doLayout() {
Dimension size = getSize();
if (!topStripe.isVisible()) {
topStripe.setBounds(0, 0, 0, 0);
bottomStripe.setBounds(0, 0, 0, 0);
leftStripe.setBounds(0, 0, 0, 0);
rightStripe.setBounds(0, 0, 0, 0);
layeredPane.setBounds(0, 0, getWidth(), getHeight());
}
else {
Dimension topSize = topStripe.getPreferredSize();
Dimension bottomSize = bottomStripe.getPreferredSize();
Dimension leftSize = leftStripe.getPreferredSize();
Dimension rightSize = rightStripe.getPreferredSize();
topStripe.setBounds(0, 0, size.width, topSize.height);
int height = size.height - topSize.height - bottomSize.height;
leftStripe.setBounds(0, topSize.height, leftSize.width, height);
rightStripe.setBounds(size.width - rightSize.width, topSize.height, rightSize.width, height);
bottomStripe.setBounds(0, size.height - bottomSize.height, size.width, bottomSize.height);
UISettings uiSettings = UISettings.getInstance();
if (uiSettings.getHideToolStripes() || uiSettings.getPresentationMode()) {
if (isSquareStripeUI()) {
updateSquareStripes(false);
} else {
layeredPane.setBounds(0, 0, size.width, size.height);
}
}
else {
int width = size.width - leftSize.width - rightSize.width;
layeredPane.setBounds(leftSize.width, topSize.height, width, height);
}
}
}
private static boolean isSquareStripeUI() {
return Registry.is("ide.new.stripes.ui");
}
@Override
public void uiSettingsChanged(@NotNull UISettings uiSettings) {
updateToolStripesVisibility(uiSettings);
updateLayout(uiSettings);
}
/**
* @param dirtyMode if {@code true} then JRootPane will not be validated and repainted after adding
* the decorator. Moreover in this (dirty) mode animation doesn't work.
*/
final void addDecorator(@NotNull JComponent decorator, @NotNull WindowInfo info, boolean dirtyMode, @NotNull ToolWindowManagerImpl manager) {
if (info.isDocked()) {
boolean side = !info.isSplit();
WindowInfo sideInfo = manager.getDockedInfoAt(info.getAnchor(), side);
if (sideInfo == null) {
ToolWindowAnchor anchor = info.getAnchor();
setComponent(decorator, anchor, normalizeWeigh(info.getWeight()));
if (!dirtyMode) {
layeredPane.validate();
layeredPane.repaint();
}
}
else {
addAndSplitDockedComponentCmd(decorator, info, dirtyMode, manager);
}
}
else if (info.getType() == ToolWindowType.SLIDING) {
addSlidingComponent(decorator, info, dirtyMode);
}
else {
throw new IllegalArgumentException("Unknown window type: " + info.getType());
}
}
void removeDecorator(@NotNull WindowInfo info, @Nullable JComponent component, boolean dirtyMode, @NotNull ToolWindowManagerImpl manager) {
if (info.getType() == ToolWindowType.DOCKED) {
WindowInfo sideInfo = manager.getDockedInfoAt(info.getAnchor(), !info.isSplit());
if (sideInfo == null) {
setComponent(null, info.getAnchor(), 0);
}
else {
ToolWindowAnchor anchor = info.getAnchor();
JComponent c = getComponentAt(anchor);
if (c instanceof Splitter) {
Splitter splitter = (Splitter)c;
InternalDecoratorImpl component1 = (InternalDecoratorImpl)(info.isSplit() ? splitter.getFirstComponent() : splitter.getSecondComponent());
state.addSplitProportion(info, component1, splitter);
setComponent(component1, anchor,
component1 == null ? 0 : ToolWindowManagerImpl.getRegisteredMutableInfoOrLogError(component1).getWeight());
}
else {
setComponent(null, anchor, 0);
}
}
if (!dirtyMode) {
layeredPane.validate();
layeredPane.repaint();
}
}
else if (info.getType() == ToolWindowType.SLIDING) {
if (component != null) {
removeSlidingComponent(component, info, dirtyMode);
}
}
}
public final @NotNull JComponent getLayeredPane() {
return layeredPane;
}
public void validateAndRepaint() {
layeredPane.validate();
layeredPane.repaint();
for (Stripe stripe : stripes) {
stripe.revalidate();
stripe.repaint();
}
}
public void revalidateNotEmptyStripes() {
for (Stripe stripe : stripes) {
if (!stripe.isEmpty()) {
stripe.revalidate();
}
}
}
private void setComponent(@Nullable JComponent component, @NotNull ToolWindowAnchor anchor, float weight) {
Dimension size = getSize();
if (ToolWindowAnchor.TOP == anchor) {
verticalSplitter.setFirstComponent(component);
verticalSplitter.setFirstSize((int)(size.getHeight() * weight));
}
else if (ToolWindowAnchor.LEFT == anchor) {
horizontalSplitter.setFirstComponent(component);
horizontalSplitter.setFirstSize((int)(size.getWidth() * weight));
}
else if (ToolWindowAnchor.BOTTOM == anchor) {
verticalSplitter.setLastComponent(component);
verticalSplitter.setLastSize((int)(size.getHeight() * weight));
}
else if (ToolWindowAnchor.RIGHT == anchor) {
horizontalSplitter.setLastComponent(component);
horizontalSplitter.setLastSize((int)(size.getWidth() * weight));
}
else {
LOG.error("unknown anchor: " + anchor);
}
}
private JComponent getComponentAt(@NotNull ToolWindowAnchor anchor) {
if (ToolWindowAnchor.TOP == anchor) {
return verticalSplitter.getFirstComponent();
}
else if (ToolWindowAnchor.LEFT == anchor) {
return horizontalSplitter.getFirstComponent();
}
else if (ToolWindowAnchor.BOTTOM == anchor) {
return verticalSplitter.getLastComponent();
}
else if (ToolWindowAnchor.RIGHT == anchor) {
return horizontalSplitter.getLastComponent();
}
else {
LOG.error("unknown anchor: " + anchor);
return null;
}
}
private void setDocumentComponent(@Nullable JComponent component) {
(isWideScreen ? verticalSplitter : horizontalSplitter).setInnerComponent(component);
}
private void updateToolStripesVisibility(@NotNull UISettings uiSettings) {
boolean oldVisible = leftStripe.isVisible();
boolean showButtons = !uiSettings.getHideToolStripes() && !uiSettings.getPresentationMode();
boolean visible = (showButtons || state.isStripesOverlaid()) && !isSquareStripeUI();
leftStripe.setVisible(visible);
rightStripe.setVisible(visible);
topStripe.setVisible(visible);
bottomStripe.setVisible(visible);
if (myLeftToolbar != null && myRightToolbar != null) {
boolean oldSquareVisible = myLeftToolbar.isVisible() && myRightToolbar.isVisible();
boolean squareStripesVisible = isSquareStripeUI() && showButtons;
updateSquareStripes(squareStripesVisible);
if (isSquareStripeUI() && oldSquareVisible != squareStripesVisible) {
revalidate();
repaint();
}
}
boolean overlayed = !showButtons && state.isStripesOverlaid();
leftStripe.setOverlayed(overlayed);
rightStripe.setOverlayed(overlayed);
topStripe.setOverlayed(overlayed);
bottomStripe.setOverlayed(overlayed);
if (oldVisible != visible) {
revalidate();
repaint();
}
}
private void updateSquareStripes(boolean squareStripesVisible) {
if (myLeftToolbar != null && myRightToolbar != null) {
myLeftToolbar.setVisible(squareStripesVisible);
myRightToolbar.setVisible(squareStripesVisible);
}
}
public int getBottomHeight() {
return bottomStripe.isVisible() ? bottomStripe.getHeight() : 0;
}
public boolean isBottomSideToolWindowsVisible() {
return getComponentAt(ToolWindowAnchor.BOTTOM) != null;
}
@NotNull
Stripe getStripeFor(@NotNull ToolWindowAnchor anchor) {
if (ToolWindowAnchor.TOP == anchor) {
return topStripe;
}
if (ToolWindowAnchor.BOTTOM == anchor) {
return bottomStripe;
}
if (ToolWindowAnchor.LEFT == anchor) {
return leftStripe;
}
if (ToolWindowAnchor.RIGHT == anchor) {
return rightStripe;
}
throw new IllegalArgumentException("Anchor=" + anchor);
}
@Nullable
Stripe getStripeFor(@NotNull Rectangle screenRectangle, @NotNull Stripe preferred) {
if (preferred.containsScreen(screenRectangle)) {
return preferred;
}
for (Stripe stripe : stripes) {
if (stripe.containsScreen(screenRectangle)) {
return stripe;
}
}
return null;
}
@Nullable
ToolwindowToolbar getSquareStripeFor(@NotNull ToolWindowAnchor anchor) {
if (ToolWindowAnchor.TOP == anchor || ToolWindowAnchor.RIGHT == anchor) return myRightToolbar;
if (ToolWindowAnchor.BOTTOM == anchor || ToolWindowAnchor.LEFT == anchor) return myLeftToolbar;
throw new IllegalArgumentException("Anchor=" + anchor);
}
void startDrag() {
for (Stripe each : stripes) {
each.startDrag();
}
}
void stopDrag() {
for (Stripe stripe : stripes) {
stripe.stopDrag();
}
}
void stretchWidth(@NotNull ToolWindow window, int value) {
stretch(window, value);
}
void stretchHeight(@NotNull ToolWindow window, int value) {
stretch(window, value);
}
private void stretch(@NotNull ToolWindow wnd, int value) {
Pair<Resizer, Component> pair = findResizerAndComponent(wnd);
if (pair == null) return;
boolean vertical = wnd.getAnchor() == ToolWindowAnchor.TOP || wnd.getAnchor() == ToolWindowAnchor.BOTTOM;
int actualSize = (vertical ? pair.second.getHeight() : pair.second.getWidth()) + value;
boolean first = wnd.getAnchor() == ToolWindowAnchor.LEFT || wnd.getAnchor() == ToolWindowAnchor.TOP;
int maxValue = vertical ? verticalSplitter.getMaxSize(first) : horizontalSplitter.getMaxSize(first);
int minValue = vertical ? verticalSplitter.getMinSize(first) : horizontalSplitter.getMinSize(first);
pair.first.setSize(Math.max(minValue, Math.min(maxValue, actualSize)));
}
private @Nullable Pair<Resizer, Component> findResizerAndComponent(@NotNull ToolWindow window) {
if (!window.isVisible()) return null;
Resizer resizer = null;
Component component = null;
if (window.getType() == ToolWindowType.DOCKED) {
component = getComponentAt(window.getAnchor());
if (component != null) {
if (window.getAnchor().isHorizontal()) {
resizer = verticalSplitter.getFirstComponent() == component
? new Resizer.Splitter.FirstComponent(verticalSplitter)
: new Resizer.Splitter.LastComponent(verticalSplitter);
}
else {
resizer = horizontalSplitter.getFirstComponent() == component
? new Resizer.Splitter.FirstComponent(horizontalSplitter)
: new Resizer.Splitter.LastComponent(horizontalSplitter);
}
}
}
else if (window.getType() == ToolWindowType.SLIDING) {
component = window.getComponent();
while (component != null) {
if (component.getParent() == layeredPane) break;
component = component.getParent();
}
if (component != null) {
if (window.getAnchor() == ToolWindowAnchor.TOP) {
resizer = new Resizer.LayeredPane.Top(component);
}
else if (window.getAnchor() == ToolWindowAnchor.BOTTOM) {
resizer = new Resizer.LayeredPane.Bottom(component);
}
else if (window.getAnchor() == ToolWindowAnchor.LEFT) {
resizer = new Resizer.LayeredPane.Left(component);
}
else if (window.getAnchor() == ToolWindowAnchor.RIGHT) {
resizer = new Resizer.LayeredPane.Right(component);
}
}
}
return resizer != null ? Pair.create(resizer, component) : null;
}
private void updateLayout(@NotNull UISettings uiSettings) {
if (isWideScreen != uiSettings.getWideScreenSupport()) {
JComponent documentComponent = (isWideScreen ? verticalSplitter : horizontalSplitter).getInnerComponent();
isWideScreen = uiSettings.getWideScreenSupport();
if (isWideScreen) {
verticalSplitter.setInnerComponent(null);
horizontalSplitter.setInnerComponent(verticalSplitter);
}
else {
horizontalSplitter.setInnerComponent(null);
verticalSplitter.setInnerComponent(horizontalSplitter);
}
layeredPane.remove(isWideScreen ? verticalSplitter : horizontalSplitter);
layeredPane.add(isWideScreen ? horizontalSplitter : verticalSplitter, DEFAULT_LAYER);
setDocumentComponent(documentComponent);
}
if (leftHorizontalSplit != uiSettings.getLeftHorizontalSplit()) {
JComponent component = getComponentAt(ToolWindowAnchor.LEFT);
if (component instanceof Splitter) {
Splitter splitter = (Splitter)component;
WindowInfoImpl firstInfo = ToolWindowManagerImpl.getRegisteredMutableInfoOrLogError((InternalDecoratorImpl)splitter.getFirstComponent());
WindowInfoImpl secondInfo = ToolWindowManagerImpl.getRegisteredMutableInfoOrLogError((InternalDecoratorImpl)splitter.getSecondComponent());
setComponent(splitter, ToolWindowAnchor.LEFT, ToolWindowAnchor.LEFT.isSplitVertically()
? firstInfo.getWeight()
: firstInfo.getWeight() + secondInfo.getWeight());
}
leftHorizontalSplit = uiSettings.getLeftHorizontalSplit();
}
if (rightHorizontalSplit != uiSettings.getRightHorizontalSplit()) {
JComponent component = getComponentAt(ToolWindowAnchor.RIGHT);
if (component instanceof Splitter) {
Splitter splitter = (Splitter)component;
WindowInfoImpl firstInfo = ToolWindowManagerImpl.getRegisteredMutableInfoOrLogError((InternalDecoratorImpl)splitter.getFirstComponent());
WindowInfoImpl secondInfo = ToolWindowManagerImpl.getRegisteredMutableInfoOrLogError((InternalDecoratorImpl)splitter.getSecondComponent());
setComponent(splitter, ToolWindowAnchor.RIGHT, ToolWindowAnchor.RIGHT.isSplitVertically()
? firstInfo.getWeight()
: firstInfo.getWeight() + secondInfo.getWeight());
}
rightHorizontalSplit = uiSettings.getRightHorizontalSplit();
}
}
public boolean isMaximized(@NotNull ToolWindow window) {
return state.isMaximized(window);
}
void setMaximized(@NotNull ToolWindow toolWindow, boolean maximized) {
Pair<Resizer, Component> resizerAndComponent = findResizerAndComponent(toolWindow);
if (resizerAndComponent == null) {
return;
}
if (maximized) {
int size = toolWindow.getAnchor().isHorizontal() ? resizerAndComponent.second.getHeight() : resizerAndComponent.second.getWidth();
stretch(toolWindow, Short.MAX_VALUE);
state.setMaximizedProportion(Pair.create(toolWindow, size));
}
else {
Pair<ToolWindow, Integer> maximizedProportion = state.getMaximizedProportion();
LOG.assertTrue(maximizedProportion != null);
ToolWindow maximizedWindow = maximizedProportion.first;
assert maximizedWindow == toolWindow;
resizerAndComponent.first.setSize(maximizedProportion.second);
state.setMaximizedProportion(null);
}
doLayout();
}
void reset() {
for (Stripe stripe : stripes) {
stripe.reset();
}
state = new ToolWindowPaneState();
revalidate();
}
void onStripeButtonRemoved(@NotNull Project project, @NotNull ToolWindow toolWindow) {
if (!isSquareStripeUI()) return;
if (myLeftToolbar == null || myRightToolbar == null) return;
if (!toolWindow.isAvailable() || toolWindow.getIcon() == null) return;
toolWindow.setVisibleOnLargeStripe(false);
ToolWindowAnchor anchor = toolWindow.getLargeStripeAnchor();
if (ToolWindowAnchor.LEFT.equals(anchor) || ToolWindowAnchor.BOTTOM.equals(anchor)) {
myLeftToolbar.removeStripeButton(project, toolWindow, anchor);
}
else if (ToolWindowAnchor.RIGHT.equals(anchor)) {
myRightToolbar.removeStripeButton(project, toolWindow, anchor);
}
}
void onStripeButtonAdded(@NotNull Project project,
@NotNull ToolWindow toolWindow,
@NotNull ToolWindowAnchor actualAnchor,
@NotNull Comparator<ToolWindow> comparator) {
if (!isSquareStripeUI()) return;
if (myLeftToolbar == null || myRightToolbar == null) return;
ensureDefaultInitialized(project);
ToolWindowAnchor toolWindowAnchor = toolWindow.getAnchor();
if (toolWindowAnchor == ToolWindowAnchor.LEFT && myDefaultLeftButtons.contains(toolWindow.getId())
|| toolWindowAnchor == ToolWindowAnchor.RIGHT && myDefaultRightButtons.contains(toolWindow.getId())
|| toolWindowAnchor == ToolWindowAnchor.BOTTOM && myDefaultBottomButtons.contains(toolWindow.getId())) {
toolWindow.setVisibleOnLargeStripe(true);
actualAnchor = toolWindowAnchor;
}
toolWindow.setLargeStripeAnchor(actualAnchor);
if (!toolWindow.isAvailable() || toolWindow.getIcon() == null || !toolWindow.isVisibleOnLargeStripe()) return;
if (ToolWindowAnchor.LEFT.equals(actualAnchor) || ToolWindowAnchor.BOTTOM.equals(actualAnchor)) {
myLeftToolbar.addStripeButton(project, actualAnchor, comparator, toolWindow);
}
else if (ToolWindowAnchor.RIGHT.equals(actualAnchor)) {
myRightToolbar.addStripeButton(project, actualAnchor, comparator, toolWindow);
}
}
private void ensureDefaultInitialized(@NotNull Project project) {
String key = "NEW_TOOLWINDOW_STRIPE_DEFAULTS";
if (PropertiesComponent.getInstance(project).isTrueValue(key)) {
return;
}
myDefaultLeftButtons = ToolWindowToolbarProvider.getInstance().defaultBottomToolwindows(project, ToolWindowAnchor.LEFT);
myDefaultRightButtons = ToolWindowToolbarProvider.getInstance().defaultBottomToolwindows(project, ToolWindowAnchor.RIGHT);
myDefaultBottomButtons = ToolWindowToolbarProvider.getInstance().defaultBottomToolwindows(project, ToolWindowAnchor.BOTTOM);
PropertiesComponent.getInstance(project).setValue(key, true);
}
@FunctionalInterface
interface Resizer {
void setSize(int size);
abstract class Splitter implements Resizer {
ThreeComponentsSplitter mySplitter;
Splitter(@NotNull ThreeComponentsSplitter splitter) {
mySplitter = splitter;
}
static final class FirstComponent extends Splitter {
FirstComponent(@NotNull ThreeComponentsSplitter splitter) {
super(splitter);
}
@Override
public void setSize(int size) {
mySplitter.setFirstSize(size);
}
}
static final class LastComponent extends Splitter {
LastComponent(@NotNull ThreeComponentsSplitter splitter) {
super(splitter);
}
@Override
public void setSize(int size) {
mySplitter.setLastSize(size);
}
}
}
abstract class LayeredPane implements Resizer {
Component myComponent;
LayeredPane(@NotNull Component component) {
myComponent = component;
}
@Override
public final void setSize(int size) {
_setSize(size);
if (myComponent.getParent() instanceof JComponent) {
JComponent parent = (JComponent)myComponent;
parent.revalidate();
parent.repaint();
}
}
abstract void _setSize(int size);
static final class Left extends LayeredPane {
Left(@NotNull Component component) {
super(component);
}
@Override
public void _setSize(int size) {
myComponent.setSize(size, myComponent.getHeight());
}
}
static final class Right extends LayeredPane {
Right(@NotNull Component component) {
super(component);
}
@Override
public void _setSize(int size) {
Rectangle bounds = myComponent.getBounds();
int delta = size - bounds.width;
bounds.x -= delta;
bounds.width += delta;
myComponent.setBounds(bounds);
}
}
static class Top extends LayeredPane {
Top(@NotNull Component component) {
super(component);
}
@Override
public void _setSize(int size) {
myComponent.setSize(myComponent.getWidth(), size);
}
}
static class Bottom extends LayeredPane {
Bottom(@NotNull Component component) {
super(component);
}
@Override
public void _setSize(int size) {
Rectangle bounds = myComponent.getBounds();
int delta = size - bounds.height;
bounds.y -= delta;
bounds.height += delta;
myComponent.setBounds(bounds);
}
}
}
}
private void addAndSplitDockedComponentCmd(@NotNull JComponent newComponent,
@NotNull WindowInfo info,
boolean dirtyMode,
@NotNull ToolWindowManagerImpl manager) {
ToolWindowAnchor anchor = info.getAnchor();
final class MySplitter extends OnePixelSplitter implements UISettingsListener {
@Override
public void uiSettingsChanged(@NotNull UISettings uiSettings) {
if (anchor == ToolWindowAnchor.LEFT) {
setOrientation(!uiSettings.getLeftHorizontalSplit());
}
else if (anchor == ToolWindowAnchor.RIGHT) {
setOrientation(!uiSettings.getRightHorizontalSplit());
}
}
@Override
public String toString() {
return "[" + getFirstComponent() + "|" + getSecondComponent() + "]";
}
}
Splitter splitter = new MySplitter();
splitter.setOrientation(anchor.isSplitVertically());
if (!anchor.isHorizontal()) {
splitter.setAllowSwitchOrientationByMouseClick(true);
splitter.addPropertyChangeListener(evt -> {
if (!Splitter.PROP_ORIENTATION.equals(evt.getPropertyName())) return;
boolean isSplitterHorizontalNow = !splitter.isVertical();
UISettings settings = UISettings.getInstance();
if (anchor == ToolWindowAnchor.LEFT) {
if (settings.getLeftHorizontalSplit() != isSplitterHorizontalNow) {
settings.setLeftHorizontalSplit(isSplitterHorizontalNow);
settings.fireUISettingsChanged();
}
}
if (anchor == ToolWindowAnchor.RIGHT) {
if (settings.getRightHorizontalSplit() != isSplitterHorizontalNow) {
settings.setRightHorizontalSplit(isSplitterHorizontalNow);
settings.fireUISettingsChanged();
}
}
});
}
JComponent c = getComponentAt(anchor);
// if all components are hidden for anchor we should find the second component to put in a splitter
// otherwise we add empty splitter
if (c == null) {
List<ToolWindowEx> toolWindows = manager.getToolWindowsOn(anchor, Objects.requireNonNull(info.getId()));
toolWindows.removeIf(window -> window == null || window.isSplitMode() == info.isSplit() || !window.isVisible());
if (!toolWindows.isEmpty()) {
c = ((ToolWindowImpl)toolWindows.get(0)).getDecoratorComponent();
}
if (c == null) {
LOG.error("Empty splitter @ " + anchor + " during AddAndSplitDockedComponentCmd for " + info.getId());
}
}
float newWeight;
if (c instanceof InternalDecoratorImpl) {
InternalDecoratorImpl oldComponent = (InternalDecoratorImpl)c;
WindowInfoImpl oldInfo = ToolWindowManagerImpl.getRegisteredMutableInfoOrLogError(oldComponent);
IJSwingUtilities.updateComponentTreeUI(oldComponent);
IJSwingUtilities.updateComponentTreeUI(newComponent);
if (info.isSplit()) {
splitter.setFirstComponent(oldComponent);
splitter.setSecondComponent(newComponent);
float proportion = state.getPreferredSplitProportion(Objects.requireNonNull(oldInfo.getId()), normalizeWeigh(
oldInfo.getSideWeight() / (oldInfo.getSideWeight() + info.getSideWeight())));
splitter.setProportion(proportion);
if (!anchor.isHorizontal() && !anchor.isSplitVertically()) {
newWeight = normalizeWeigh(oldInfo.getWeight() + info.getWeight());
}
else {
newWeight = normalizeWeigh(oldInfo.getWeight());
}
}
else {
splitter.setFirstComponent(newComponent);
splitter.setSecondComponent(oldComponent);
splitter.setProportion(normalizeWeigh(info.getSideWeight()));
if (!anchor.isHorizontal() && !anchor.isSplitVertically()) {
newWeight = normalizeWeigh(oldInfo.getWeight() + info.getWeight());
}
else {
newWeight = normalizeWeigh(info.getWeight());
}
}
}
else {
newWeight = normalizeWeigh(info.getWeight());
}
setComponent(splitter, anchor, newWeight);
if (!dirtyMode) {
layeredPane.validate();
layeredPane.repaint();
}
}
private void addSlidingComponent(@NotNull JComponent component, @NotNull WindowInfo info, boolean dirtyMode) {
if (dirtyMode || !UISettings.getInstance().getAnimateWindows() || RemoteDesktopService.isRemoteSession()) {
// not animated
layeredPane.add(component, JLayeredPane.PALETTE_LAYER);
layeredPane.setBoundsInPaletteLayer(component, info.getAnchor(), info.getWeight());
}
else {
// Prepare top image. This image is scrolling over bottom image.
Image topImage = layeredPane.getTopImage();
Rectangle bounds = component.getBounds();
useSafely(topImage.getGraphics(), topGraphics -> {
component.putClientProperty(TEMPORARY_ADDED, Boolean.TRUE);
try {
layeredPane.add(component, JLayeredPane.PALETTE_LAYER);
layeredPane.moveToFront(component);
layeredPane.setBoundsInPaletteLayer(component, info.getAnchor(), info.getWeight());
component.paint(topGraphics);
layeredPane.remove(component);
}
finally {
component.putClientProperty(TEMPORARY_ADDED, null);
}
});
// prepare bottom image
Image bottomImage = layeredPane.getBottomImage();
Point2D bottomImageOffset = PaintUtil.getFractOffsetInRootPane(layeredPane);
useSafely(bottomImage.getGraphics(), bottomGraphics -> {
bottomGraphics.setClip(0, 0, bounds.width, bounds.height);
bottomGraphics.translate(bottomImageOffset.getX() - bounds.x, bottomImageOffset.getY() - bounds.y);
layeredPane.paint(bottomGraphics);
});
// start animation.
Surface surface = new Surface(topImage, bottomImage, PaintUtil.negate(bottomImageOffset), 1, info.getAnchor(), UISettings.ANIMATION_DURATION);
layeredPane.add(surface, JLayeredPane.PALETTE_LAYER);
surface.setBounds(bounds);
layeredPane.validate();
layeredPane.repaint();
surface.runMovement();
layeredPane.remove(surface);
layeredPane.add(component, JLayeredPane.PALETTE_LAYER);
}
if (!dirtyMode) {
layeredPane.validate();
layeredPane.repaint();
}
}
private void removeSlidingComponent(@NotNull Component component, @NotNull WindowInfo info, boolean dirtyMode) {
UISettings uiSettings = UISettings.getInstance();
if (!dirtyMode && uiSettings.getAnimateWindows() && !RemoteDesktopService.isRemoteSession()) {
Rectangle bounds = component.getBounds();
// Prepare top image. This image is scrolling over bottom image. It contains
// picture of component is being removed.
Image topImage = layeredPane.getTopImage();
useSafely(topImage.getGraphics(), component::paint);
// Prepare bottom image. This image contains picture of component that is located
// under the component to is being removed.
Image bottomImage = layeredPane.getBottomImage();
Point2D bottomImageOffset = PaintUtil.getFractOffsetInRootPane(layeredPane);
useSafely(bottomImage.getGraphics(), bottomGraphics -> {
layeredPane.remove(component);
bottomGraphics.clipRect(0, 0, bounds.width, bounds.height);
bottomGraphics.translate(bottomImageOffset.getX() - bounds.x, bottomImageOffset.getY() - bounds.y);
layeredPane.paint(bottomGraphics);
});
// Remove component from the layered pane and start animation.
Surface surface = new Surface(topImage, bottomImage, PaintUtil.negate(bottomImageOffset), -1, info.getAnchor(), UISettings.ANIMATION_DURATION);
layeredPane.add(surface, JLayeredPane.PALETTE_LAYER);
surface.setBounds(bounds);
layeredPane.validate();
layeredPane.repaint();
surface.runMovement();
layeredPane.remove(surface);
}
else {
// not animated
layeredPane.remove(component);
}
if (!dirtyMode) {
layeredPane.validate();
layeredPane.repaint();
}
}
private static final class ImageRef extends SoftReference<BufferedImage> {
private @Nullable BufferedImage myStrongRef;
ImageRef(@NotNull BufferedImage image) {
super(image);
myStrongRef = image;
}
@Override
public BufferedImage get() {
if (myStrongRef != null) {
BufferedImage img = myStrongRef;
myStrongRef = null; // drop on first request
return img;
}
return super.get();
}
}
private static class ImageCache extends ScaleContext.Cache<ImageRef> {
ImageCache(@NotNull Function<? super ScaleContext, ImageRef> imageProvider) {
super(imageProvider);
}
public BufferedImage get(@NotNull ScaleContext ctx) {
ImageRef ref = getOrProvide(ctx);
BufferedImage image = SoftReference.dereference(ref);
if (image != null) return image;
clear(); // clear to recalculate the image
return get(ctx); // first recalculated image will be non-null
}
}
private final class MyLayeredPane extends JBLayeredPane {
private final Function<ScaleContext, ImageRef> myImageProvider = __ -> {
int width = Math.max(Math.max(1, getWidth()), frame.getWidth());
int height = Math.max(Math.max(1, getHeight()), frame.getHeight());
return new ImageRef(ImageUtil.createImage(getGraphicsConfiguration(), width, height, BufferedImage.TYPE_INT_RGB));
};
/*
* These images are used to perform animated showing and hiding of components.
* They are the member for performance reason.
*/
private final ImageCache myBottomImageCache = new ImageCache(myImageProvider);
private final ImageCache myTopImageCache = new ImageCache(myImageProvider);
MyLayeredPane(@NotNull JComponent splitter) {
setOpaque(false);
add(splitter, JLayeredPane.DEFAULT_LAYER);
}
final Image getBottomImage() {
return myBottomImageCache.get(ScaleContext.create(this));
}
final Image getTopImage() {
return myTopImageCache.get(ScaleContext.create(this));
}
/**
* When component size becomes larger then bottom and top images should be enlarged.
*/
@Override
public void doLayout() {
final int width = getWidth();
final int height = getHeight();
if (width < 0 || height < 0) {
return;
}
// Resize component at the DEFAULT layer. It should be only on component in that layer
Component[] components = getComponentsInLayer(JLayeredPane.DEFAULT_LAYER);
LOG.assertTrue(components.length <= 1);
for (Component component : components) {
component.setBounds(0, 0, getWidth(), getHeight());
}
// Resize components at the PALETTE layer
components = getComponentsInLayer(JLayeredPane.PALETTE_LAYER);
for (Component component : components) {
if (!(component instanceof InternalDecoratorImpl)) {
continue;
}
WindowInfo info = (((InternalDecoratorImpl)component)).getToolWindow().getWindowInfo();
float weight = info.getAnchor().isHorizontal()
? (float)component.getHeight() / getHeight()
: (float)component.getWidth() / getWidth();
setBoundsInPaletteLayer(component, info.getAnchor(), weight);
}
}
final void setBoundsInPaletteLayer(@NotNull Component component, @NotNull ToolWindowAnchor anchor, float weight) {
if (weight < .0f) {
weight = WindowInfoImpl.DEFAULT_WEIGHT;
}
else if (weight > 1.0f) {
weight = 1.0f;
}
if (ToolWindowAnchor.TOP == anchor) {
component.setBounds(0, 0, getWidth(), (int)(getHeight() * weight + .5f));
}
else if (ToolWindowAnchor.LEFT == anchor) {
component.setBounds(0, 0, (int)(getWidth() * weight + .5f), getHeight());
}
else if (ToolWindowAnchor.BOTTOM == anchor) {
final int height = (int)(getHeight() * weight + .5f);
component.setBounds(0, getHeight() - height, getWidth(), height);
}
else if (ToolWindowAnchor.RIGHT == anchor) {
final int width = (int)(getWidth() * weight + .5f);
component.setBounds(getWidth() - width, 0, width, getHeight());
}
else {
LOG.error("unknown anchor " + anchor);
}
}
}
void setStripesOverlayed(boolean value) {
state.setStripesOverlaid(value);
updateToolStripesVisibility(UISettings.getInstance());
}
private static float normalizeWeigh(final float weight) {
if (weight <= 0) return WindowInfoImpl.DEFAULT_WEIGHT;
if (weight >= 1) return 1 - WindowInfoImpl.DEFAULT_WEIGHT;
return weight;
}
}
| dahlstrom-g/intellij-community | platform/platform-impl/src/com/intellij/openapi/wm/impl/ToolWindowsPane.java | Java | apache-2.0 | 40,236 |
package de.adihubba;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class ObjectUtils {
private static final int DEFAULT_DOUBLE_PRECISION = 8;
private static final double[] POWER_OF_TEN = new double[15];
static {
for (int i = 0; i < POWER_OF_TEN.length; i++) {
POWER_OF_TEN[i] = Math.pow(10, i);
}
}
public static boolean smallerOrEqualsDoublePrecision(double double1, double double2) {
return smallerOrEqualsDoublePrecision(double1, double2, DEFAULT_DOUBLE_PRECISION);
}
public static boolean smallerOrEqualsDoublePrecision(double double1, double double2, int precision) {
// try to save the POWER operation
double factor = (precision >= 0 && precision < POWER_OF_TEN.length) ? POWER_OF_TEN[precision] : Math.pow(10, precision);
long result = Math.round((double1 - double2) * factor);
if (result <= 0) {
return true;
}
return false;
}
public static boolean equalsDoublePrecision(double double1, double double2) {
return equalsDoublePrecision(double1, double2, DEFAULT_DOUBLE_PRECISION);
}
public static boolean equalsDoublePrecision(double double1, double double2, int precision) {
double absDifference = Math.abs(double1 - double2);
if (absDifference == 0.0) {
// don't calculate, if result is already zero
return true;
}
if (absDifference >= 1) {
return false;
}
// try to save the POWER operation
double factor = (precision >= 0 && precision < POWER_OF_TEN.length) ? POWER_OF_TEN[precision] : Math.pow(10, precision);
return (absDifference * factor < 1);
}
public static boolean smallerDoublePrecision(double double1, double double2) {
return smallerDoublePrecision(double1, double2, DEFAULT_DOUBLE_PRECISION);
}
public static boolean smallerDoublePrecision(double double1, double double2, int precision) {
// try to save the POWER operation
double factor = (precision >= 0 && precision < POWER_OF_TEN.length) ? POWER_OF_TEN[precision] : Math.pow(10, precision);
long result = Math.round((double1 - double2) * factor);
if (result < 0) {
return true;
}
return false;
}
public static <O> List<O> asReadonlyList(O... objects) {
return objects == null ? Collections.EMPTY_LIST : Arrays.asList(objects);
}
public static boolean equalsObject(Object obj1, Object obj2) {
if (obj1 == obj2) {
// both objects are identical or null
return true;
}
if (obj1 == null || obj2 == null) {
// only one side is null
return false;
}
// from here both of them are not null
// don't compare the classes as it is done in the equals routine and should be up
// to objects we are comparing
return obj1.equals(obj2);
}
public static boolean arrayHasElements(Object[] obj) {
if (obj != null && obj.length != 0) {
for (Object object : obj) {
if (object != null) {
return true;
}
}
}
return false;
}
}
| adihubba/javafx-3d-surface-chart | src/main/java/de/adihubba/ObjectUtils.java | Java | apache-2.0 | 3,433 |
/**
* Copyright (C) 2015 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric8.kubernetes.client.mock;
import io.fabric8.kubernetes.api.model.extensions.Deployment;
import io.fabric8.kubernetes.api.model.extensions.DeploymentBuilder;
import io.fabric8.kubernetes.api.model.extensions.DeploymentList;
import io.fabric8.kubernetes.api.model.extensions.DeploymentListBuilder;
import io.fabric8.kubernetes.api.model.extensions.ReplicaSet;
import io.fabric8.kubernetes.api.model.extensions.ReplicaSetBuilder;
import io.fabric8.kubernetes.api.model.extensions.ReplicaSetListBuilder;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.fabric8.kubernetes.client.KubernetesClientException;
import io.fabric8.kubernetes.client.server.mock.KubernetesServer;
import org.junit.Rule;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class DeploymentTest {
@Rule
public KubernetesServer server = new KubernetesServer();
@Test
public void testList() {
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/deployments").andReturn(200, new DeploymentListBuilder().build()).once();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/ns1/deployments").andReturn(200, new DeploymentListBuilder()
.addNewItem().and()
.addNewItem().and().build()).once();
server.expect().withPath("/apis/extensions/v1beta1/deployments").andReturn(200, new DeploymentListBuilder()
.addNewItem().and()
.addNewItem().and()
.addNewItem()
.and().build()).once();
KubernetesClient client = server.getClient();
DeploymentList deploymentList = client.extensions().deployments().list();
assertNotNull(deploymentList);
assertEquals(0, deploymentList.getItems().size());
deploymentList = client.extensions().deployments().inNamespace("ns1").list();
assertNotNull(deploymentList);
assertEquals(2, deploymentList.getItems().size());
deploymentList = client.extensions().deployments().inAnyNamespace().list();
assertNotNull(deploymentList);
assertEquals(3, deploymentList.getItems().size());
}
@Test
public void testListWithLabels() {
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/deployments?labelSelector=" + toUrlEncoded("key1=value1,key2=value2,key3=value3")).andReturn(200, new DeploymentListBuilder().build()).always();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/deployments?labelSelector=" + toUrlEncoded("key1=value1,key2=value2")).andReturn(200, new DeploymentListBuilder()
.addNewItem().and()
.addNewItem().and()
.addNewItem().and()
.build()).once();
KubernetesClient client = server.getClient();
DeploymentList deploymentList = client.extensions().deployments()
.withLabel("key1", "value1")
.withLabel("key2", "value2")
.withLabel("key3", "value3")
.list();
assertNotNull(deploymentList);
assertEquals(0, deploymentList.getItems().size());
deploymentList = client.extensions().deployments()
.withLabel("key1", "value1")
.withLabel("key2", "value2")
.list();
assertNotNull(deploymentList);
assertEquals(3, deploymentList.getItems().size());
}
@Test
public void testGet() {
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/deployments/deployment1").andReturn(200, new DeploymentBuilder().build()).once();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/ns1/deployments/deployment2").andReturn(200, new DeploymentBuilder().build()).once();
KubernetesClient client = server.getClient();
Deployment deployment = client.extensions().deployments().withName("deployment1").get();
assertNotNull(deployment);
deployment = client.extensions().deployments().withName("deployment2").get();
assertNull(deployment);
deployment = client.extensions().deployments().inNamespace("ns1").withName("deployment2").get();
assertNotNull(deployment);
}
@Test
public void testDelete() {
Deployment deployment1 = new DeploymentBuilder().withNewMetadata()
.withName("deployment1")
.addToLabels("key1", "value1")
.withResourceVersion("1")
.withGeneration(1L)
.endMetadata()
.withNewSpec()
.withNewSelector()
.addToMatchLabels("key1", "value1")
.endSelector()
.withReplicas(0)
.endSpec()
.withNewStatus()
.withReplicas(1)
.withObservedGeneration(1l)
.endStatus()
.build();
ReplicaSet replicaSet1 = new ReplicaSetBuilder().withNewMetadata()
.withName("rs1")
.addToLabels("key1", "value1")
.withResourceVersion("1")
.withGeneration(1L)
.endMetadata()
.withNewSpec()
.withNewSelector()
.addToMatchLabels("key1", "value1")
.endSelector()
.withReplicas(0)
.endSpec()
.withNewStatus()
.withReplicas(1)
.withObservedGeneration(1l)
.endStatus()
.build();
Deployment deployment2 = new DeploymentBuilder().withNewMetadata()
.withName("deployment2")
.addToLabels("key2", "value2")
.withResourceVersion("1")
.withGeneration(1L)
.endMetadata()
.withNewSpec()
.withNewSelector()
.addToMatchLabels("key2", "value2")
.endSelector()
.withReplicas(0)
.endSpec()
.withNewStatus()
.withReplicas(1)
.withObservedGeneration(1l)
.endStatus()
.build();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/deployments/deployment1").andReturn(200, deployment1).once();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/deployments/deployment1").andReturn(200, new DeploymentBuilder(deployment1).editSpec().withReplicas(0).endSpec().build()).times(5);
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/replicasets?labelSelector=key1%3Dvalue1").andReturn(200, new ReplicaSetListBuilder().addToItems(replicaSet1).build()).once();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/replicasets/rs1").andReturn(200, replicaSet1).once();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/ns1/deployments/deployment2").andReturn(200, deployment2).once();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/ns1/deployments/deployment2").andReturn(200, new DeploymentBuilder(deployment2).editSpec().withReplicas(0).endSpec().build()).times(5);
KubernetesClient client = server.getClient();
Boolean deleted = client.extensions().deployments().withName("deployment1").delete();
assertNotNull(deleted);
deleted = client.extensions().deployments().withName("deployment2").delete();
assertFalse(deleted);
deleted = client.extensions().deployments().inNamespace("ns1").withName("deployment2").delete();
assertTrue(deleted);
}
@Test
public void testDeleteMulti() {
Deployment deployment1 = new DeploymentBuilder().withNewMetadata()
.withNamespace("test")
.withName("deployment1")
.withResourceVersion("1")
.withGeneration(2L)
.endMetadata()
.withNewSpec()
.withReplicas(0)
.endSpec()
.withNewStatus()
.withReplicas(1)
.withObservedGeneration(1l)
.endStatus()
.build();
Deployment deployment2 = new DeploymentBuilder().withNewMetadata()
.withNamespace("ns1")
.withName("deployment2")
.withResourceVersion("1")
.withGeneration(2L)
.endMetadata()
.withNewSpec()
.withReplicas(0)
.endSpec()
.withNewStatus()
.withReplicas(1)
.withObservedGeneration(1l)
.endStatus()
.build();
Deployment deployment3 = new DeploymentBuilder().withNewMetadata().withName("deployment3").withNamespace("any").and().build();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/deployments/deployment1").andReturn(200, deployment1).once();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/test/deployments/deployment1").andReturn(200, new DeploymentBuilder(deployment1)
.editStatus()
.withReplicas(0)
.withObservedGeneration(2l)
.endStatus()
.build()).times(5);
server.expect().withPath("/apis/extensions/v1beta1/namespaces/ns1/deployments/deployment2").andReturn(200, deployment2).once();
server.expect().withPath("/apis/extensions/v1beta1/namespaces/ns1/deployments/deployment2").andReturn(200, new DeploymentBuilder(deployment2)
.editStatus()
.withReplicas(0)
.withObservedGeneration(2l)
.endStatus()
.build()).times(5);
KubernetesClient client = server.getClient();
Boolean deleted = client.extensions().deployments().inAnyNamespace().delete(deployment1, deployment2);
assertTrue(deleted);
deleted = client.extensions().deployments().inAnyNamespace().delete(deployment3);
assertFalse(deleted);
}
@Test(expected = KubernetesClientException.class)
public void testDeleteWithNamespaceMismatch() {
Deployment deployment1 = new DeploymentBuilder().withNewMetadata().withName("deployment1").withNamespace("test").and().build();
KubernetesClient client = server.getClient();
Boolean deleted = client.extensions().deployments().inNamespace("test1").delete(deployment1);
assertNotNull(deleted);
}
@Test(expected = KubernetesClientException.class)
public void testCreateWithNameMismatch() {
Deployment deployment1 = new DeploymentBuilder().withNewMetadata().withName("deployment1").withNamespace("test").and().build();
Deployment deployment2 = new DeploymentBuilder().withNewMetadata().withName("deployment2").withNamespace("ns1").and().build();
KubernetesClient client = server.getClient();
client.extensions().deployments().inNamespace("test1").withName("mydeployment1").create(deployment1);
}
/**
* Converts string to URL encoded string.
* It's not a fullblown converter, it serves just the purpose of this test.
* @param str
* @return
*/
private static final String toUrlEncoded(String str) {
return str.replaceAll("=", "%3D");
}
}
| jimmidyson/kubernetes-client | kubernetes-tests/src/test/java/io/fabric8/kubernetes/client/mock/DeploymentTest.java | Java | apache-2.0 | 10,860 |
/*
Copyright 2019 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by injection-gen. DO NOT EDIT.
package fake
import (
"context"
fake "github.com/knative/pkg/client/injection/informers/authentication/factory/fake"
policy "github.com/knative/pkg/client/injection/informers/authentication/v1alpha1/policy"
controller "github.com/knative/pkg/controller"
injection "github.com/knative/pkg/injection"
)
var Get = policy.Get
func init() {
injection.Fake.RegisterInformer(withInformer)
}
func withInformer(ctx context.Context) (context.Context, controller.Informer) {
f := fake.Get(ctx)
inf := f.Authentication().V1alpha1().Policies()
return context.WithValue(ctx, policy.Key{}, inf), inf.Informer()
}
| knative/build | vendor/github.com/knative/pkg/client/injection/informers/authentication/v1alpha1/policy/fake/fake.go | GO | apache-2.0 | 1,229 |
package org.onetwo.common.data;
import java.io.Serializable;
/*****
* @author wayshall
*
*/
@SuppressWarnings("serial")
public class ValueWrapper<T> implements Serializable{
public static <E> ValueWrapper<E> wrap(E value){
return new ValueWrapper<E>(value);
}
public static <E> ValueWrapper<E> create(){
return new ValueWrapper<E>(null);
}
private T value;
private ValueWrapper(T value) {
this.value = value;
}
public T getValue() {
return value;
}
public boolean isPresent() {
return this.value!=null;
}
public void setValue(T value) {
this.value = value;
}
}
| wayshall/onetwo | core/modules/common/src/main/java/org/onetwo/common/data/ValueWrapper.java | Java | apache-2.0 | 602 |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.util.viewer.field;
import java.math.BigInteger;
import docking.widgets.OptionDialog;
import docking.widgets.fieldpanel.field.*;
import docking.widgets.fieldpanel.support.FieldLocation;
import ghidra.app.util.HighlightProvider;
import ghidra.app.util.viewer.format.FieldFormatModel;
import ghidra.app.util.viewer.proxy.ProxyObj;
import ghidra.framework.options.Options;
import ghidra.framework.options.ToolOptions;
import ghidra.program.model.listing.CodeUnit;
import ghidra.program.model.listing.Data;
import ghidra.program.util.ProgramLocation;
import ghidra.program.util.SpacerFieldLocation;
import ghidra.util.StringUtilities;
import ghidra.util.classfinder.ClassSearcher;
/**
* Generates Spacer Fields.
* <P>
* This field is not meant to be loaded by the {@link ClassSearcher}, hence the X in the name.
*/
public class SpacerFieldFactory extends FieldFactory {
public static final String FIELD_NAME = "Spacer";
private String text = null;
/**
* Constructor
*/
public SpacerFieldFactory() {
super(FIELD_NAME);
}
/**
* Constructor
* @param model the model that the field belongs to.
* @param hsProvider the HightLightStringProvider.
* @param displayOptions the Options for display properties.
* @param fieldOptions the Options for field specific properties.
*/
private SpacerFieldFactory(FieldFormatModel model, HighlightProvider hsProvider,
Options displayOptions, Options fieldOptions) {
super(FIELD_NAME, model, hsProvider, displayOptions, fieldOptions);
}
/**
* Constructor
* @param text The text to display in the field.
* @param model The Field model that will use this Address factory.
* @param hsProvider the HightLightProvider.
* @param displayOptions the Options for display properties.
* @param fieldOptions the Options for field specific properties.
*/
public SpacerFieldFactory(String text, FieldFormatModel model, HighlightProvider hsProvider,
Options displayOptions, Options fieldOptions) {
super(FIELD_NAME, model, hsProvider, displayOptions, fieldOptions);
this.text = text;
}
/**
* Sets the text for the spacer field
* @param text the text to display in the listing
*/
public void setText(String text) {
if (text != null && text.length() == 0) {
text = null;
}
this.text = text;
}
/**
* Sets the literal text to display in this field.
*/
public void setText() {
String newText =
OptionDialog.showInputSingleLineDialog(null, "Input Spacer Text", "Text", text);
if (newText != null) {
newText = newText.trim();
if (newText.equals("")) {
text = null;
}
else {
text = newText;
}
}
model.update();
}
/**
* Returns the spacer field's text
*/
public String getText() {
return text;
}
@Override
public ListingField getField(ProxyObj<?> proxy, int varWidth) {
if (enabled && (text != null)) {
AttributedString as = new AttributedString(text, color, getMetrics());
FieldElement field = new TextFieldElement(as, 0, 0);
return ListingTextField.createSingleLineTextField(this, proxy, field, startX + varWidth,
width, hlProvider);
}
return null;
}
@Override
public String getFieldText() {
if (text == null) {
return "";
}
return text;
}
@Override
public FieldLocation getFieldLocation(ListingField bf, BigInteger index, int fieldNum,
ProgramLocation programLoc) {
if (!(programLoc instanceof SpacerFieldLocation)) {
return null;
}
SpacerFieldLocation loc = (SpacerFieldLocation) programLoc;
if (loc.getText().equals(text)) {
return new FieldLocation(index, fieldNum, 0, loc.getCharOffset());
}
return null;
}
@Override
public ProgramLocation getProgramLocation(int row, int col, ListingField bf) {
Object obj = bf.getProxy().getObject();
if (!(obj instanceof CodeUnit)) {
return null;
}
CodeUnit cu = (CodeUnit) obj;
int[] cpath = null;
if (obj instanceof Data) {
cpath = ((Data) obj).getComponentPath();
}
return new SpacerFieldLocation(cu.getProgram(), cu.getMinAddress(), cpath, col, text);
}
/**
* Returns the string to highlight
* @param bf the ListingTextField
* @param row the row in the field
* @param col the column in the field
* @param loc the programLocation.
*/
public String getStringToHighlight(ListingTextField bf, int row, int col, ProgramLocation loc) {
if (loc == null) {
return null;
}
String s = ((SpacerFieldLocation) loc).getText();
return StringUtilities.findWord(s, col);
}
@Override
public boolean acceptsType(int category, Class<?> proxyObjectClass) {
return true;
}
@Override
public FieldFactory newInstance(FieldFormatModel formatModel, HighlightProvider provider,
ToolOptions options, ToolOptions fieldOptions) {
return new SpacerFieldFactory(formatModel, provider, options, fieldOptions);
}
}
| NationalSecurityAgency/ghidra | Ghidra/Features/Base/src/main/java/ghidra/app/util/viewer/field/SpacerFieldFactory.java | Java | apache-2.0 | 5,415 |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pubsublite;
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;
import com.google.cloud.pubsublite.BacklogLocation;
import com.google.cloud.pubsublite.CloudRegion;
import com.google.cloud.pubsublite.ProjectNumber;
import com.google.cloud.pubsublite.ReservationName;
import com.google.cloud.pubsublite.ReservationPath;
import com.google.cloud.pubsublite.SeekTarget;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.Random;
import java.util.UUID;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
public class QuickStartIT {
private ByteArrayOutputStream bout;
private PrintStream out;
Random rand = new Random();
private static final Long projectNumber =
Long.parseLong(System.getenv("GOOGLE_CLOUD_PROJECT_NUMBER"));
private String cloudRegion = "us-central1";
private final char zoneId = (char) (rand.nextInt(3) + 'a');
private static final String suffix = UUID.randomUUID().toString();
private static final String reservationId = "lite-reservation-" + suffix;
private static final String topicId = "lite-topic-" + suffix;
private static final String subscriptionId = "lite-subscription-" + suffix;
private static final int partitions = 2;
private static final int messageCount = 10;
ReservationPath reservationPath =
ReservationPath.newBuilder()
.setProject(ProjectNumber.of(projectNumber))
.setLocation(CloudRegion.of(cloudRegion))
.setName(ReservationName.of(reservationId))
.build();
private static void requireEnvVar(String varName) {
assertNotNull(
"Environment variable " + varName + " is required to perform these tests.",
System.getenv(varName));
}
@Rule public Timeout globalTimeout = Timeout.seconds(300); // 5 minute timeout
@BeforeClass
public static void checkRequirements() {
requireEnvVar("GOOGLE_CLOUD_PROJECT_NUMBER");
}
@Before
public void setUp() throws Exception {
bout = new ByteArrayOutputStream();
out = new PrintStream(bout);
System.setOut(out);
}
@After
public void tearDown() throws Exception {
System.setOut(null);
}
@Test
public void testQuickstart() throws Exception {
// Create a reservation.
CreateReservationExample.createReservationExample(
projectNumber, cloudRegion, reservationId, /*throughputCapacity=*/ 4);
assertThat(bout.toString()).contains(reservationId);
assertThat(bout.toString()).contains("created successfully");
bout.reset();
// Create a regional topic.
CreateTopicExample.createTopicExample(
cloudRegion, zoneId, projectNumber, topicId, reservationId, partitions, /*regional=*/ true);
assertThat(bout.toString()).contains(" (regional topic) created successfully");
bout.reset();
// Create a zonal topic.
CreateTopicExample.createTopicExample(
cloudRegion,
zoneId,
projectNumber,
topicId,
reservationId,
partitions,
/*regional=*/ false);
assertThat(bout.toString()).contains(" (zonal topic) created successfully");
bout.reset();
// Get a reservation.
GetReservationExample.getReservationExample(projectNumber, cloudRegion, reservationId);
assertThat(bout.toString()).contains(reservationId);
assertThat(bout.toString()).contains("4 units of throughput capacity.");
bout.reset();
// List reservations.
ListReservationsExample.listReservationsExample(projectNumber, cloudRegion);
assertThat(bout.toString()).contains("reservation(s) listed");
bout.reset();
// Update reservation to have a throughput capacity of 8 units.
UpdateReservationExample.updateReservationExample(projectNumber, cloudRegion, reservationId, 8);
assertThat(bout.toString()).contains("throughput_capacity=8");
bout.reset();
// Get a regional topic.
GetTopicExample.getTopicExample(
cloudRegion, zoneId, projectNumber, topicId, /*regional=*/ true);
assertThat(bout.toString()).contains(cloudRegion + "/topics/" + topicId);
assertThat(bout.toString()).contains(String.format("%s partition(s).", partitions));
bout.reset();
// Get a zonal topic
GetTopicExample.getTopicExample(
cloudRegion, zoneId, projectNumber, topicId, /*regional=*/ false);
assertThat(bout.toString().contains(cloudRegion + "-" + zoneId + "/topics/" + topicId));
assertThat(bout.toString()).contains(String.format("%s partition(s).", partitions));
bout.reset();
// List regional topics.
ListTopicsExample.listTopicsExample(cloudRegion, zoneId, projectNumber, /*regional=*/ true);
assertThat(bout.toString().contains(cloudRegion + "/topics/" + topicId));
assertThat(bout.toString()).contains("topic(s) listed");
bout.reset();
// List zonal topics.
ListTopicsExample.listTopicsExample(cloudRegion, zoneId, projectNumber, /*regional=*/ false);
assertThat(bout.toString().contains(cloudRegion + "-" + zoneId + "/topics/" + topicId));
assertThat(bout.toString()).contains("topic(s) listed");
bout.reset();
// Update a regional topic.
UpdateTopicExample.updateTopicExample(
cloudRegion, zoneId, projectNumber, topicId, reservationId, /*regional=*/ true);
assertThat(bout.toString()).contains("seconds: 604800");
assertThat(bout.toString()).contains("per_partition_bytes: 34359738368");
assertThat(bout.toString()).contains("throughput_reservation: \"" + reservationPath.toString());
bout.reset();
// Update a zonal topic.
UpdateTopicExample.updateTopicExample(
cloudRegion, zoneId, projectNumber, topicId, reservationId, /*regional=*/ false);
assertThat(bout.toString()).contains("seconds: 604800");
assertThat(bout.toString()).contains("per_partition_bytes: 34359738368");
assertThat(bout.toString()).contains("throughput_reservation: \"" + reservationPath.toString());
bout.reset();
// Create a regional subscription.
CreateSubscriptionExample.createSubscriptionExample(
cloudRegion, zoneId, projectNumber, topicId, subscriptionId, /*regional=*/ true);
assertThat(bout.toString().contains(cloudRegion + "/subscriptions/" + subscriptionId));
assertThat(bout.toString()).contains("created successfully");
bout.reset();
// Create a zonal subscription.
CreateSubscriptionExample.createSubscriptionExample(
cloudRegion, zoneId, projectNumber, topicId, subscriptionId, /*regional=*/ false);
assertThat(
bout.toString().contains(cloudRegion + "-" + zoneId + "/subscriptions/" + subscriptionId));
assertThat(bout.toString()).contains("created successfully");
bout.reset();
// Get a regional subscription.
GetSubscriptionExample.getSubscriptionExample(
cloudRegion, zoneId, projectNumber, subscriptionId, /*regional=*/ true);
assertThat(bout.toString().contains(cloudRegion + "/subscriptions/" + subscriptionId));
bout.reset();
// Get a zonal subscription.
GetSubscriptionExample.getSubscriptionExample(
cloudRegion, zoneId, projectNumber, subscriptionId, /*regional=*/ false);
assertThat(
bout.toString().contains(cloudRegion + "-" + zoneId + "/subscriptions/" + subscriptionId));
bout.reset();
// List subscriptions in a regional topic.
ListSubscriptionsInTopicExample.listSubscriptionsInTopicExample(
cloudRegion, zoneId, projectNumber, topicId, /*regional=*/ true);
assertThat(bout.toString()).contains("subscription(s) listed in the regional topic");
// List subscriptions in a zonal topic.
ListSubscriptionsInTopicExample.listSubscriptionsInTopicExample(
cloudRegion, zoneId, projectNumber, topicId, /*regional=*/ false);
assertThat(bout.toString()).contains("subscription(s) listed in the zonal topic");
bout.reset();
// List regional subscriptions in a project.
ListSubscriptionsInProjectExample.listSubscriptionsInProjectExample(
cloudRegion, zoneId, projectNumber, /*regional=*/ true);
assertThat(bout.toString()).contains("subscription(s) listed in the project");
bout.reset();
// List zonal subscriptions in a project.
ListSubscriptionsInProjectExample.listSubscriptionsInProjectExample(
cloudRegion, zoneId, projectNumber, /*regional=*/ false);
assertThat(bout.toString()).contains("subscription(s) listed in the project");
bout.reset();
// Update a regional subscription.
UpdateSubscriptionExample.updateSubscriptionExample(
cloudRegion, zoneId, projectNumber, subscriptionId, /*regional=*/ true);
assertThat(bout.toString()).contains("delivery_requirement: DELIVER_AFTER_STORED");
bout.reset();
// Update a zonal subscription.
UpdateSubscriptionExample.updateSubscriptionExample(
cloudRegion, zoneId, projectNumber, subscriptionId, /*regional=*/ false);
assertThat(bout.toString()).contains("delivery_requirement: DELIVER_AFTER_STORED");
bout.reset();
// Publish to a regional topic.
PublisherExample.publisherExample(
cloudRegion, zoneId, projectNumber, topicId, messageCount, /*regional=*/ true);
assertThat(bout.toString()).contains("Published " + messageCount + " messages.");
bout.reset();
// Publish to a zonal topic.
PublisherExample.publisherExample(
cloudRegion, zoneId, projectNumber, topicId, messageCount, /*regional=*/ false);
assertThat(bout.toString()).contains("Published " + messageCount + " messages.");
bout.reset();
// Publish with ordering key to a regional topic.
PublishWithOrderingKeyExample.publishWithOrderingKeyExample(
cloudRegion, zoneId, projectNumber, topicId, /*regional=*/ true);
assertThat(bout.toString()).contains("Published a message with ordering key:");
bout.reset();
// Publish with ordering key to a zonal topic.
PublishWithOrderingKeyExample.publishWithOrderingKeyExample(
cloudRegion, zoneId, projectNumber, topicId, /*regional=*/ false);
assertThat(bout.toString()).contains("Published a message with ordering key:");
bout.reset();
// Publish messages with custom attributes to a regional topic.
PublishWithCustomAttributesExample.publishWithCustomAttributesExample(
cloudRegion, zoneId, projectNumber, topicId, /*regional=*/ true);
assertThat(bout.toString()).contains("Published a message with custom attributes:");
bout.reset();
// Publish messages with custom attributes to a zonal topic.
PublishWithCustomAttributesExample.publishWithCustomAttributesExample(
cloudRegion, zoneId, projectNumber, topicId, /*regional=*/ false);
assertThat(bout.toString()).contains("Published a message with custom attributes:");
bout.reset();
// Publish with batch settings to a regional topic.
PublishWithBatchSettingsExample.publishWithBatchSettingsExample(
cloudRegion, zoneId, projectNumber, topicId, messageCount, /*regional=*/ true);
assertThat(bout.toString())
.contains("Published " + messageCount + " messages with batch settings.");
bout.reset();
// Publish with batch settings to a zonal topic.
PublishWithBatchSettingsExample.publishWithBatchSettingsExample(
cloudRegion, zoneId, projectNumber, topicId, messageCount, /*regional=*/ false);
assertThat(bout.toString())
.contains("Published " + messageCount + " messages with batch settings.");
bout.reset();
// Subscribe to a regional subscription.
SubscriberExample.subscriberExample(
cloudRegion, zoneId, projectNumber, subscriptionId, /*regional=*/ true);
assertThat(bout.toString()).contains("Listening");
for (int i = 0; i < messageCount; ++i) {
assertThat(bout.toString()).contains(String.format("Data : message-%s", i));
}
assertThat(bout.toString()).contains("Subscriber is shut down: TERMINATED");
bout.reset();
// Subscribe to a zonal subscription.
SubscriberExample.subscriberExample(
cloudRegion, zoneId, projectNumber, subscriptionId, /*regional=*/ false);
assertThat(bout.toString()).contains("Listening");
for (int i = 0; i < messageCount; ++i) {
assertThat(bout.toString()).contains(String.format("Data : message-%s", i));
}
assertThat(bout.toString()).contains("Subscriber is shut down: TERMINATED");
bout.reset();
// Seek in a regional subscription.
SeekSubscriptionExample.seekSubscriptionExample(
cloudRegion,
zoneId,
projectNumber,
subscriptionId,
SeekTarget.of(BacklogLocation.BEGINNING),
/*waitForOperation=*/ false,
/*regional=*/ true);
assertThat(bout.toString()).contains("initiated successfully");
bout.reset();
// Seek in a zonal subscription.
SeekSubscriptionExample.seekSubscriptionExample(
cloudRegion,
zoneId,
projectNumber,
subscriptionId,
SeekTarget.of(BacklogLocation.BEGINNING),
/*waitForOperation=*/ false,
/*regional=*/ false);
assertThat(bout.toString()).contains("initiated successfully");
bout.reset();
// Delete a regional subscription.
DeleteSubscriptionExample.deleteSubscriptionExample(
cloudRegion, zoneId, projectNumber, subscriptionId, /*regional=*/ true);
assertThat(bout.toString()).contains(" deleted successfully");
bout.reset();
// Delete a zonal subscription.
DeleteSubscriptionExample.deleteSubscriptionExample(
cloudRegion, zoneId, projectNumber, subscriptionId, /*regional=*/ false);
assertThat(bout.toString()).contains(" deleted successfully");
bout.reset();
// Delete a regional topic.
DeleteTopicExample.deleteTopicExample(
cloudRegion, zoneId, projectNumber, topicId, /*regional=*/ true);
assertThat(bout.toString()).contains(" (regional topic) deleted successfully");
bout.reset();
// Delete a zonal topic.
DeleteTopicExample.deleteTopicExample(
cloudRegion, zoneId, projectNumber, topicId, /*regional=*/ false);
assertThat(bout.toString()).contains(" (zonal topic) deleted successfully");
bout.reset();
// Delete a reservation.
DeleteReservationExample.deleteReservationExample(projectNumber, cloudRegion, reservationId);
assertThat(bout.toString()).contains("deleted successfully");
}
}
| googleapis/java-pubsublite | samples/snippets/src/test/java/pubsublite/QuickStartIT.java | Java | apache-2.0 | 15,069 |
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.ir.nodes;
import gw.lang.reflect.IMethodInfo;
import gw.lang.reflect.IType;
import gw.lang.reflect.IRelativeTypeInfo;
import gw.lang.reflect.IFunctionType;
import gw.lang.reflect.IConstructorInfo;
import gw.lang.ir.IRType;
import gw.lang.ir.IRTypeConstants;
import gw.internal.gosu.parser.DynamicFunctionSymbol;
import gw.internal.gosu.ir.transform.AbstractElementTransformer;
import gw.internal.gosu.ir.transform.util.IRTypeResolver;
import gw.lang.reflect.java.IJavaClassInfo;
import gw.lang.reflect.java.IJavaClassMethod;
import java.lang.reflect.Method;
import java.util.List;
import java.util.ArrayList;
import java.util.Arrays;
public class IRMethodFactory {
public static IRMethodFromMethodInfo createIRMethod(IMethodInfo originalMethodInfo, IFunctionType functionType) {
if (originalMethodInfo == null) {
return null;
}
return new IRMethodFromMethodInfo(originalMethodInfo, functionType);
}
public static IRMethod createIRMethod( IConstructorInfo constructor ) {
return new IRMethodFromConstructorInfo( constructor );
}
public static IRMethod createIRMethod(Class cls, String name, Class... paramTypes) {
return createIRMethod(AbstractElementTransformer.getDeclaredMethod(cls, name, paramTypes));
}
public static IRMethod createIRMethod(IJavaClassInfo cls, String name, Class... paramTypes) {
return createIRMethod(AbstractElementTransformer.getDeclaredMethod(cls, name, paramTypes));
}
public static IRMethod createIRMethod(Method method) {
return new IRMethodFromMethod(method);
}
public static IRMethod createIRMethod(IJavaClassMethod method) {
return new IRMethodFromJavaMethodInfo(method);
}
public static IRMethod createConstructorIRMethod(IType gosuClass, DynamicFunctionSymbol dfs, int numberOfTypeParameters) {
return new IRMethodForConstructorSymbol(gosuClass, dfs, numberOfTypeParameters);
}
public static IRMethod createIRMethod(IType owner, String name, IType returnType, IType[] parameterTypes, IRelativeTypeInfo.Accessibility accessibility, boolean bStatic) {
return new SyntheticIRMethod( owner, name, IRTypeResolver.getDescriptor(returnType), convertToIRTypes(parameterTypes), accessibility, bStatic );
}
public static IRMethod createIRMethod(IType owner, String name, IRType returnType, List<IRType> parameterTypes, IRelativeTypeInfo.Accessibility accessibility, boolean bStatic) {
return new SyntheticIRMethod( owner, name, returnType, parameterTypes, accessibility, bStatic );
}
public static IRMethod createConstructorIRMethod(IType owner, IRType[] parameterTypes ) {
return new SyntheticIRMethod( owner, "<init>", IRTypeConstants.pVOID(), Arrays.asList(parameterTypes), IRelativeTypeInfo.Accessibility.PUBLIC, false );
}
private static List<IRType> convertToIRTypes(IType[] types) {
List<IRType> result = new ArrayList<IRType>();
for (IType type : types) {
result.add(IRTypeResolver.getDescriptor(type));
}
return result;
}
// private static IType getTrueOwningType( IMethodInfo mi ) {
// if( mi instanceof IJavaMethodInfo)
// {
// // We have to get the owner type from the method because it may be different from the owning type e.g., entity aspects see ContactGosuAspect.AllAdresses
// Method m = ((IJavaMethodInfo)mi).getMethod();
// if( m != null )
// {
// return TypeSystem.get( m.getDeclaringClass() );
// }
// }
// return mi.getOwnersType();
// }
//
// private static IType[] getParameterTypes( IMethodInfo mi ) {
// if ( mi instanceof IGosuMethodInfo ) {
// IDynamicFunctionSymbol dfs = ((IGosuMethodInfo)mi).getDfs();
// while( dfs instanceof ParameterizedDynamicFunctionSymbol)
// {
// ParameterizedDynamicFunctionSymbol pdfs = (ParameterizedDynamicFunctionSymbol)dfs;
// dfs = pdfs.getBackingDfs();
// }
// IType[] boundedTypes = new IType[dfs.getArgTypes().length];
// for( int i = 0; i < dfs.getArgTypes().length; i++ )
// {
// boundedTypes[i] = TypeLord.getDefaultParameterizedTypeWithTypeVars( dfs.getArgTypes()[i] );
// }
// return boundedTypes;
// } else {
// IParameterInfo[] parameterInfos = mi.getParameters();
// IType[] parameterTypes = new IType[parameterInfos.length];
// for (int i = 0; i < parameterInfos.length; i++) {
// parameterTypes[i] = parameterInfos[i].getFeatureType();
// }
// return parameterTypes;
// }
// }
}
| gosu-lang/gosu-lang | gosu-core/src/main/java/gw/internal/gosu/ir/nodes/IRMethodFactory.java | Java | apache-2.0 | 4,548 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id: XMPSerializer.java 746951 2009-02-23 10:40:14Z jeremias $ */
package org.apache.xmlgraphics.xmp;
import java.io.OutputStream;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Result;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.sax.SAXTransformerFactory;
import javax.xml.transform.sax.TransformerHandler;
import javax.xml.transform.stream.StreamResult;
import org.xml.sax.SAXException;
/**
* Serializes an XMP tree to XML or to an XMP packet.
*/
public class XMPSerializer {
private static final String DEFAULT_ENCODING = "UTF-8";
/**
* Writes the in-memory representation of the XMP metadata to a JAXP Result.
* @param meta the metadata
* @param res the JAXP Result to write to
* @throws TransformerConfigurationException if an error occurs setting up the XML
* infrastructure.
* @throws SAXException if a SAX-related problem occurs while writing the XML
*/
public static void writeXML(Metadata meta, Result res)
throws TransformerConfigurationException, SAXException {
writeXML(meta, res, false, false);
}
/**
* Writes the in-memory representation of the XMP metadata to an OutputStream as an XMP packet.
* @param meta the metadata
* @param out the stream to write to
* @param readOnlyXMP true if the generated XMP packet should be read-only
* @throws TransformerConfigurationException if an error occurs setting up the XML
* infrastructure.
* @throws SAXException if a SAX-related problem occurs while writing the XML
*/
public static void writeXMPPacket(Metadata meta, OutputStream out, boolean readOnlyXMP)
throws TransformerConfigurationException, SAXException {
StreamResult res = new StreamResult(out);
writeXML(meta, res, true, readOnlyXMP);
}
private static void writeXML(Metadata meta, Result res,
boolean asXMPPacket, boolean readOnlyXMP)
throws TransformerConfigurationException, SAXException {
SAXTransformerFactory tFactory = (SAXTransformerFactory)SAXTransformerFactory.newInstance();
TransformerHandler handler = tFactory.newTransformerHandler();
Transformer transformer = handler.getTransformer();
if (asXMPPacket) {
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
}
transformer.setOutputProperty(OutputKeys.ENCODING, DEFAULT_ENCODING);
try {
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
} catch (IllegalArgumentException iae) {
//INDENT key is not supported by implementation. That's not tragic, so just ignore.
}
handler.setResult(res);
handler.startDocument();
if (asXMPPacket) {
handler.processingInstruction("xpacket",
"begin=\"\uFEFF\" id=\"W5M0MpCehiHzreSzNTczkc9d\"");
}
meta.toSAX(handler);
if (asXMPPacket) {
if (readOnlyXMP) {
handler.processingInstruction("xpacket", "end=\"r\"");
} else {
//Create padding string (40 * 101 characters is more or less the recommended 4KB)
StringBuffer sb = new StringBuffer(101);
sb.append('\n');
for (int i = 0; i < 100; i++) {
sb.append(" ");
}
char[] padding = sb.toString().toCharArray();
for (int i = 0; i < 40; i++) {
handler.characters(padding, 0, padding.length);
}
handler.characters(new char[] {'\n'}, 0, 1);
handler.processingInstruction("xpacket", "end=\"w\"");
}
}
handler.endDocument();
}
}
| plutext/ae-xmlgraphics-commons | src/java/org/apache/xmlgraphics/xmp/XMPSerializer.java | Java | apache-2.0 | 4,720 |
package gz.jflask.template;
import gz.jflask.FlaskException;
import gz.jflask.InternalServerException;
import gz.jflask.config.Config;
import gz.jflask.config.ConfigHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
/**
* Created by IntelliJ IDEA.
* User: kaneg
* Date: 6/22/15
* Time: 6:20 PM
*/
public class TemplateEngines {
static TemplateEngine templateEngine;
public static final Logger LOGGER = LoggerFactory.getLogger(TemplateEngines.class);
public static void init() throws FlaskException {
Config configs = ConfigHelper.getAppConfigs();
String engineName = configs.get("template.engine", "default");
if (engineName.equals("default")) {
templateEngine = new DefaultTemplateEngine();
} else {
try {
templateEngine = (TemplateEngine) Class.forName(engineName).newInstance();
} catch (Exception e) {
throw new InternalServerException(e);
}
}
LOGGER.info("Begin init template engine:" + templateEngine.getName());
templateEngine.init();
LOGGER.info("End init template engine:" + templateEngine.getName());
}
public static String render(String name, String source, Map<String, ?> context) throws Exception {
return templateEngine.render(name, source, context);
}
}
| kaneg/JFlask | src/main/java/gz/jflask/template/TemplateEngines.java | Java | apache-2.0 | 1,391 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "commons.h"
#include "util/StringUtil.h"
#include "MMapperHandler.h"
#include "NativeObjectFactory.h"
#include "MapOutputCollector.h"
namespace NativeTask {
MMapperHandler::MMapperHandler() :
_config(NULL),
_moc(NULL),
_mapper(NULL),
_partitioner(NULL),
_combinerCreator(NULL),
_numPartition(1),
_dest(NULL),
_remain(0) {
}
MMapperHandler::~MMapperHandler() {
reset();
}
void MMapperHandler::reset() {
_dest = NULL;
_remain = 0;
delete _mapper;
_mapper = NULL;
delete _moc;
_moc = NULL;
delete _partitioner;
_partitioner = NULL;
_combinerCreator = NULL;
}
void MMapperHandler::configure(Config & config) {
_config = &config;
// collector
_numPartition = config.getInt("mapred.reduce.tasks", 1);
if (_numPartition > 0) {
// combiner
const char * combinerClass = config.get("native.combiner.class");
if (NULL != combinerClass) {
_combinerCreator = NativeObjectFactory::GetObjectCreator(combinerClass);
if (NULL == _combinerCreator) {
THROW_EXCEPTION_EX(UnsupportException, "Combiner not found: %s", combinerClass);
}
}
// partitioner
const char * partitionerClass = config.get("native.partitioner.class");
if (NULL != partitionerClass) {
_partitioner
= (Partitioner *) NativeObjectFactory::CreateObject(partitionerClass);
}
else {
_partitioner
= (Partitioner *) NativeObjectFactory::CreateDefaultObject(PartitionerType);
}
if (NULL == _partitioner) {
THROW_EXCEPTION_EX(UnsupportException, "Partitioner not found: %s", partitionerClass);
}
_partitioner->configure(config);
LOG("Native Mapper with MapOutputCollector");
_moc = new MapOutputCollector(_numPartition);
_moc->configure(config);
}
else {
LOG("Native Mapper with java direct output collector");
}
// mapper
const char * mapperClass = config.get("native.mapper.class");
if (NULL != mapperClass) {
_mapper = (Mapper *) NativeObjectFactory::CreateObject(mapperClass);
}
else {
_mapper = (Mapper *) NativeObjectFactory::CreateDefaultObject(MapperType);
}
if (NULL == _mapper) {
THROW_EXCEPTION_EX(UnsupportException, "Mapper not found: %s", mapperClass);
}
_mapper->configure(config);
_mapper->setCollector(this);
}
void MMapperHandler::finish() {
close();
BatchHandler::finish();
reset();
}
void MMapperHandler::handleInput(char * buff, uint32_t length) {
if (unlikely(_remain > 0)) {
uint32_t cp = _remain < length ? _remain : length;
memcpy(_dest + _kvlength - _remain, buff, cp);
buff += cp;
length -= cp;
_remain -= cp;
if (0 == _remain) {
_mapper->map(_dest, _klength, _dest + _klength, _vlength);
delete _dest;
_dest = NULL;
}
}
while (length > 0) {
if (unlikely(length<2*sizeof(uint32_t))) {
THROW_EXCEPTION(IOException, "k/v length information incomplete");
}
uint32_t klength = ((uint32_t*) buff)[0];
uint32_t vlength = ((uint32_t*) buff)[1];
buff += 2 * sizeof(uint32_t);
length -= 2 * sizeof(uint32_t);
uint32_t kvlength = klength + vlength;
// TODO: optimize length==0
if (kvlength <= length) {
_mapper->map(buff, klength, buff + klength, vlength);
buff += kvlength;
length -= kvlength;
}
else {
_dest = new char[kvlength + 8];
_klength = klength;
_vlength = vlength;
_kvlength = kvlength;
simple_memcpy(_dest, buff, length);
_remain = kvlength - length;
return;
}
}
}
void MMapperHandler::collect(const void * key, uint32_t keyLen,
const void * value, uint32_t valueLen, int partition) {
if (NULL == _moc) {
THROW_EXCEPTION(UnsupportException, "Collect with partition not support");
}
int result =_moc->put(key, keyLen, value, valueLen, partition);
if (result==0) {
return;
}
string spillpath = this->sendCommand("GetSpillPath");
if (spillpath.length() == 0) {
THROW_EXCEPTION(IOException, "Illegal(empty) spill files path");
}
vector<string> pathes;
StringUtil::Split(spillpath, ";", pathes);
_moc->mid_spill(pathes,"", _moc->getMapOutputSpec(), _combinerCreator);
result =_moc->put(key, keyLen, value, valueLen, partition);
if (0 != result) {
// should not get here, cause _moc will throw Exceptions
THROW_EXCEPTION(OutOfMemoryException, "key/value pair larger than io.sort.mb");
}
}
void MMapperHandler::collect(const void * key, uint32_t keyLen,
const void * value, uint32_t valueLen) {
if (NULL == _moc) {
putInt(keyLen);
put((char *)key, keyLen);
putInt(valueLen);
put((char *)value, valueLen);
return;
}
uint32_t partition = _partitioner->getPartition((const char *) key, keyLen,
_numPartition);
collect(key, keyLen, value, valueLen, partition);
}
void MMapperHandler::close() {
_mapper->close();
if (NULL == _moc) {
return;
}
string outputpath = this->sendCommand("GetOutputPath");
string indexpath = this->sendCommand("GetOutputIndexPath");
if ((outputpath.length() == 0) || (indexpath.length() == 0)) {
THROW_EXCEPTION(IOException, "Illegal(empty) map output file/index path");
}
vector<string> pathes;
StringUtil::Split(outputpath, ";", pathes);
_moc->final_merge_and_spill(pathes, indexpath, _moc->getMapOutputSpec(), _combinerCreator);
}
} // namespace NativeTask
| IMCG/nativetask | src/main/native/src/handler/MMapperHandler.cc | C++ | apache-2.0 | 6,215 |
namespace AppsAgainstHumanity.Server.UI
{
partial class expansionPackForm
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.saveSelectionBtn = new System.Windows.Forms.Button();
this.reloadDecksBtn = new System.Windows.Forms.Button();
this.cancelBtn = new System.Windows.Forms.Button();
this.expansionPackListBox = new System.Windows.Forms.CheckedListBox();
this.SuspendLayout();
//
// saveSelectionBtn
//
this.saveSelectionBtn.Location = new System.Drawing.Point(329, 12);
this.saveSelectionBtn.Name = "saveSelectionBtn";
this.saveSelectionBtn.Size = new System.Drawing.Size(122, 23);
this.saveSelectionBtn.TabIndex = 0;
this.saveSelectionBtn.Text = "Save Selections";
this.saveSelectionBtn.UseVisualStyleBackColor = true;
this.saveSelectionBtn.Click += new System.EventHandler(this.saveSelectionBtn_Click);
//
// reloadDecksBtn
//
this.reloadDecksBtn.Location = new System.Drawing.Point(329, 41);
this.reloadDecksBtn.Name = "reloadDecksBtn";
this.reloadDecksBtn.Size = new System.Drawing.Size(122, 23);
this.reloadDecksBtn.TabIndex = 1;
this.reloadDecksBtn.Text = "Reload Packs";
this.reloadDecksBtn.UseVisualStyleBackColor = true;
this.reloadDecksBtn.Click += new System.EventHandler(this.reloadDecksBtn_Click);
//
// cancelBtn
//
this.cancelBtn.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.cancelBtn.Location = new System.Drawing.Point(329, 244);
this.cancelBtn.Name = "cancelBtn";
this.cancelBtn.Size = new System.Drawing.Size(122, 23);
this.cancelBtn.TabIndex = 3;
this.cancelBtn.Text = "Cancel";
this.cancelBtn.UseVisualStyleBackColor = true;
//
// expansionPackListBox
//
this.expansionPackListBox.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.expansionPackListBox.CheckOnClick = true;
this.expansionPackListBox.FormattingEnabled = true;
this.expansionPackListBox.Location = new System.Drawing.Point(12, 12);
this.expansionPackListBox.Name = "expansionPackListBox";
this.expansionPackListBox.ScrollAlwaysVisible = true;
this.expansionPackListBox.Size = new System.Drawing.Size(303, 255);
this.expansionPackListBox.TabIndex = 4;
//
// expansionPackForm
//
this.AcceptButton = this.saveSelectionBtn;
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.BackColor = System.Drawing.SystemColors.Window;
this.CancelButton = this.cancelBtn;
this.ClientSize = new System.Drawing.Size(459, 275);
this.ControlBox = false;
this.Controls.Add(this.expansionPackListBox);
this.Controls.Add(this.cancelBtn);
this.Controls.Add(this.reloadDecksBtn);
this.Controls.Add(this.saveSelectionBtn);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle;
this.Name = "expansionPackForm";
this.ShowIcon = false;
this.ShowInTaskbar = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "Select Expansion Packs";
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Button saveSelectionBtn;
private System.Windows.Forms.Button reloadDecksBtn;
private System.Windows.Forms.Button cancelBtn;
private System.Windows.Forms.CheckedListBox expansionPackListBox;
}
} | McSherry/AppsAgainstHumanity | Server/UI/expansionPackForm.Designer.cs | C# | apache-2.0 | 4,820 |
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package app implements a server that runs a set of active
// components. This includes replication controllers, service endpoints and
// nodes.
//
package app
import (
"fmt"
"io/ioutil"
"math/rand"
"net"
"net/http"
"net/http/pprof"
"os"
goruntime "runtime"
"strconv"
"time"
"k8s.io/apimachinery/pkg/runtime/schema"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/apiserver/pkg/server/healthz"
"k8s.io/api/core/v1"
"k8s.io/client-go/discovery"
v1core "k8s.io/client-go/kubernetes/typed/core/v1"
restclient "k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
"k8s.io/client-go/tools/record"
certutil "k8s.io/client-go/util/cert"
"k8s.io/client-go/informers"
clientset "k8s.io/client-go/kubernetes"
"k8s.io/client-go/tools/leaderelection"
"k8s.io/client-go/tools/leaderelection/resourcelock"
"k8s.io/kubernetes/cmd/kube-controller-manager/app/options"
"k8s.io/kubernetes/pkg/api/legacyscheme"
"k8s.io/kubernetes/pkg/cloudprovider"
"k8s.io/kubernetes/pkg/controller"
serviceaccountcontroller "k8s.io/kubernetes/pkg/controller/serviceaccount"
"k8s.io/kubernetes/pkg/serviceaccount"
"k8s.io/kubernetes/pkg/util/configz"
"k8s.io/kubernetes/pkg/version"
"k8s.io/kubernetes/pkg/version/verflag"
"github.com/golang/glog"
"github.com/prometheus/client_golang/prometheus"
"github.com/spf13/cobra"
"k8s.io/apimachinery/pkg/util/uuid"
)
const (
// Jitter used when starting controller managers
ControllerStartJitter = 1.0
)
// NewControllerManagerCommand creates a *cobra.Command object with default parameters
func NewControllerManagerCommand() *cobra.Command {
s := options.NewCMServer()
cmd := &cobra.Command{
Use: "kube-controller-manager",
Long: `The Kubernetes controller manager is a daemon that embeds
the core control loops shipped with Kubernetes. In applications of robotics and
automation, a control loop is a non-terminating loop that regulates the state of
the system. In Kubernetes, a controller is a control loop that watches the shared
state of the cluster through the apiserver and makes changes attempting to move the
current state towards the desired state. Examples of controllers that ship with
Kubernetes today are the replication controller, endpoints controller, namespace
controller, and serviceaccounts controller.`,
Run: func(cmd *cobra.Command, args []string) {
verflag.PrintAndExitIfRequested()
Run(s)
},
}
s.AddFlags(cmd.Flags(), KnownControllers(), ControllersDisabledByDefault.List())
return cmd
}
// ResyncPeriod returns a function which generates a duration each time it is
// invoked; this is so that multiple controllers don't get into lock-step and all
// hammer the apiserver with list requests simultaneously.
func ResyncPeriod(s *options.CMServer) func() time.Duration {
return func() time.Duration {
factor := rand.Float64() + 1
return time.Duration(float64(s.MinResyncPeriod.Nanoseconds()) * factor)
}
}
// Run runs the CMServer. This should never exit.
func Run(s *options.CMServer) error {
// To help debugging, immediately log version
glog.Infof("Version: %+v", version.Get())
if err := s.Validate(KnownControllers(), ControllersDisabledByDefault.List()); err != nil {
return err
}
if c, err := configz.New("componentconfig"); err == nil {
c.Set(s.KubeControllerManagerConfiguration)
} else {
glog.Errorf("unable to register configz: %s", err)
}
kubeClient, leaderElectionClient, kubeconfig, err := createClients(s)
if err != nil {
return err
}
cleanupFn, err := ShimForOpenShift(s, kubeconfig)
if err != nil {
return err
}
defer cleanupFn()
if s.Port >= 0 {
go startHTTP(s)
}
recorder := createRecorder(kubeClient)
run := func(stop <-chan struct{}) {
rootClientBuilder := controller.SimpleControllerClientBuilder{
ClientConfig: kubeconfig,
}
var clientBuilder controller.ControllerClientBuilder
if s.UseServiceAccountCredentials {
if len(s.ServiceAccountKeyFile) == 0 {
// It's possible another controller process is creating the tokens for us.
// If one isn't, we'll timeout and exit when our client builder is unable to create the tokens.
glog.Warningf("--use-service-account-credentials was specified without providing a --service-account-private-key-file")
}
clientBuilder = controller.SAControllerClientBuilder{
ClientConfig: restclient.AnonymousClientConfig(kubeconfig),
CoreClient: kubeClient.CoreV1(),
AuthenticationClient: kubeClient.Authentication(),
Namespace: "kube-system",
}
} else {
clientBuilder = rootClientBuilder
}
ctx, err := CreateControllerContext(s, rootClientBuilder, clientBuilder, stop)
if err != nil {
glog.Fatalf("error building controller context: %v", err)
}
saTokenControllerInitFunc := serviceAccountTokenControllerStarter{rootClientBuilder: rootClientBuilder}.startServiceAccountTokenController
if err := StartControllers(ctx, saTokenControllerInitFunc, NewControllerInitializers()); err != nil {
glog.Fatalf("error starting controllers: %v", err)
}
ctx.InformerFactory.Start(ctx.Stop)
close(ctx.InformersStarted)
select {}
}
if !s.LeaderElection.LeaderElect {
run(nil)
panic("unreachable")
}
id, err := os.Hostname()
if err != nil {
return err
}
// add a uniquifier so that two processes on the same host don't accidentally both become active
id = id + "_" + string(uuid.NewUUID())
rl, err := resourcelock.New(s.LeaderElection.ResourceLock,
"kube-system",
"kube-controller-manager",
leaderElectionClient.CoreV1(),
resourcelock.ResourceLockConfig{
Identity: id,
EventRecorder: recorder,
})
if err != nil {
glog.Fatalf("error creating lock: %v", err)
}
leaderelection.RunOrDie(leaderelection.LeaderElectionConfig{
Lock: rl,
LeaseDuration: s.LeaderElection.LeaseDuration.Duration,
RenewDeadline: s.LeaderElection.RenewDeadline.Duration,
RetryPeriod: s.LeaderElection.RetryPeriod.Duration,
Callbacks: leaderelection.LeaderCallbacks{
OnStartedLeading: run,
OnStoppedLeading: func() {
glog.Fatalf("leaderelection lost")
},
},
})
panic("unreachable")
}
func startHTTP(s *options.CMServer) {
mux := http.NewServeMux()
healthz.InstallHandler(mux)
if s.EnableProfiling {
mux.HandleFunc("/debug/pprof/", pprof.Index)
mux.HandleFunc("/debug/pprof/profile", pprof.Profile)
mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol)
mux.HandleFunc("/debug/pprof/trace", pprof.Trace)
if s.EnableContentionProfiling {
goruntime.SetBlockProfileRate(1)
}
}
configz.InstallHandler(mux)
mux.Handle("/metrics", prometheus.Handler())
server := &http.Server{
Addr: net.JoinHostPort(s.Address, strconv.Itoa(int(s.Port))),
Handler: mux,
}
glog.Fatal(server.ListenAndServe())
}
func createRecorder(kubeClient *clientset.Clientset) record.EventRecorder {
eventBroadcaster := record.NewBroadcaster()
eventBroadcaster.StartLogging(glog.Infof)
eventBroadcaster.StartRecordingToSink(&v1core.EventSinkImpl{Interface: v1core.New(kubeClient.CoreV1().RESTClient()).Events("")})
return eventBroadcaster.NewRecorder(legacyscheme.Scheme, v1.EventSource{Component: "controller-manager"})
}
func createClients(s *options.CMServer) (*clientset.Clientset, *clientset.Clientset, *restclient.Config, error) {
kubeconfig, err := clientcmd.BuildConfigFromFlags(s.Master, s.Kubeconfig)
if err != nil {
return nil, nil, nil, err
}
kubeconfig.ContentConfig.ContentType = s.ContentType
// Override kubeconfig qps/burst settings from flags
kubeconfig.QPS = s.KubeAPIQPS
kubeconfig.Burst = int(s.KubeAPIBurst)
kubeClient, err := clientset.NewForConfig(restclient.AddUserAgent(kubeconfig, "controller-manager"))
if err != nil {
glog.Fatalf("Invalid API configuration: %v", err)
}
leaderElectionClient := clientset.NewForConfigOrDie(restclient.AddUserAgent(kubeconfig, "leader-election"))
return kubeClient, leaderElectionClient, kubeconfig, nil
}
type ControllerContext struct {
// ClientBuilder will provide a client for this controller to use
ClientBuilder controller.ControllerClientBuilder
// InformerFactory gives access to informers for the controller.
InformerFactory informers.SharedInformerFactory
// Options provides access to init options for a given controller
Options options.CMServer
// AvailableResources is a map listing currently available resources
AvailableResources map[schema.GroupVersionResource]bool
// Cloud is the cloud provider interface for the controllers to use.
// It must be initialized and ready to use.
Cloud cloudprovider.Interface
// Stop is the stop channel
Stop <-chan struct{}
// InformersStarted is closed after all of the controllers have been initialized and are running. After this point it is safe,
// for an individual controller to start the shared informers. Before it is closed, they should not.
InformersStarted chan struct{}
}
func (c ControllerContext) IsControllerEnabled(name string) bool {
return IsControllerEnabled(name, ControllersDisabledByDefault, c.Options.Controllers...)
}
func IsControllerEnabled(name string, disabledByDefaultControllers sets.String, controllers ...string) bool {
hasStar := false
for _, ctrl := range controllers {
if ctrl == name {
return true
}
if ctrl == "-"+name {
return false
}
if ctrl == "*" {
hasStar = true
}
}
// if we get here, there was no explicit choice
if !hasStar {
// nothing on by default
return false
}
if disabledByDefaultControllers.Has(name) {
return false
}
return true
}
// InitFunc is used to launch a particular controller. It may run additional "should I activate checks".
// Any error returned will cause the controller process to `Fatal`
// The bool indicates whether the controller was enabled.
type InitFunc func(ctx ControllerContext) (bool, error)
func KnownControllers() []string {
ret := sets.StringKeySet(NewControllerInitializers())
// add "special" controllers that aren't initialized normally. These controllers cannot be initialized
// using a normal function. The only known special case is the SA token controller which *must* be started
// first to ensure that the SA tokens for future controllers will exist. Think very carefully before adding
// to this list.
ret.Insert(
saTokenControllerName,
)
return ret.List()
}
var ControllersDisabledByDefault = sets.NewString(
"bootstrapsigner",
"tokencleaner",
)
const (
saTokenControllerName = "serviceaccount-token"
)
// NewControllerInitializers is a public map of named controller groups (you can start more than one in an init func)
// paired to their InitFunc. This allows for structured downstream composition and subdivision.
func NewControllerInitializers() map[string]InitFunc {
controllers := map[string]InitFunc{}
controllers["endpoint"] = startEndpointController
controllers["replicationcontroller"] = startReplicationController
controllers["podgc"] = startPodGCController
controllers["resourcequota"] = startResourceQuotaController
controllers["namespace"] = startNamespaceController
controllers["serviceaccount"] = startServiceAccountController
controllers["garbagecollector"] = startGarbageCollectorController
controllers["daemonset"] = startDaemonSetController
controllers["job"] = startJobController
controllers["deployment"] = startDeploymentController
controllers["replicaset"] = startReplicaSetController
controllers["horizontalpodautoscaling"] = startHPAController
controllers["disruption"] = startDisruptionController
controllers["statefulset"] = startStatefulSetController
controllers["cronjob"] = startCronJobController
controllers["csrsigning"] = startCSRSigningController
controllers["csrapproving"] = startCSRApprovingController
controllers["csrcleaner"] = startCSRCleanerController
controllers["ttl"] = startTTLController
controllers["bootstrapsigner"] = startBootstrapSignerController
controllers["tokencleaner"] = startTokenCleanerController
controllers["service"] = startServiceController
controllers["node"] = startNodeController
controllers["route"] = startRouteController
controllers["persistentvolume-binder"] = startPersistentVolumeBinderController
controllers["attachdetach"] = startAttachDetachController
controllers["persistentvolume-expander"] = startVolumeExpandController
controllers["clusterrole-aggregation"] = startClusterRoleAggregrationController
controllers["pvc-protection"] = startPVCProtectionController
return controllers
}
// TODO: In general, any controller checking this needs to be dynamic so
// users don't have to restart their controller manager if they change the apiserver.
// Until we get there, the structure here needs to be exposed for the construction of a proper ControllerContext.
func GetAvailableResources(clientBuilder controller.ControllerClientBuilder) (map[schema.GroupVersionResource]bool, error) {
var discoveryClient discovery.DiscoveryInterface
var healthzContent string
// If apiserver is not running we should wait for some time and fail only then. This is particularly
// important when we start apiserver and controller manager at the same time.
err := wait.PollImmediate(time.Second, 5*time.Minute, func() (bool, error) {
client, err := clientBuilder.Client("controller-discovery")
if err != nil {
glog.Errorf("Failed to get api versions from server: %v", err)
return false, nil
}
healthStatus := 0
resp := client.Discovery().RESTClient().Get().AbsPath("/healthz").Do().StatusCode(&healthStatus)
if healthStatus != http.StatusOK {
glog.Errorf("Server isn't healthy yet. Waiting a little while.")
return false, nil
}
content, _ := resp.Raw()
healthzContent = string(content)
discoveryClient = client.Discovery()
return true, nil
})
if err != nil {
return nil, fmt.Errorf("failed to get api versions from server: %v: %v", healthzContent, err)
}
resourceMap, err := discoveryClient.ServerResources()
if err != nil {
utilruntime.HandleError(fmt.Errorf("unable to get all supported resources from server: %v", err))
}
if len(resourceMap) == 0 {
return nil, fmt.Errorf("unable to get any supported resources from server")
}
allResources := map[schema.GroupVersionResource]bool{}
for _, apiResourceList := range resourceMap {
version, err := schema.ParseGroupVersion(apiResourceList.GroupVersion)
if err != nil {
return nil, err
}
for _, apiResource := range apiResourceList.APIResources {
allResources[version.WithResource(apiResource.Name)] = true
}
}
return allResources, nil
}
// CreateControllerContext creates a context struct containing references to resources needed by the
// controllers such as the cloud provider and clientBuilder. rootClientBuilder is only used for
// the shared-informers client and token controller.
func CreateControllerContext(s *options.CMServer, rootClientBuilder, clientBuilder controller.ControllerClientBuilder, stop <-chan struct{}) (ControllerContext, error) {
versionedClient := rootClientBuilder.ClientOrDie("shared-informers")
var sharedInformers informers.SharedInformerFactory
if InformerFactoryOverride == nil {
sharedInformers = informers.NewSharedInformerFactory(versionedClient, ResyncPeriod(s)())
} else {
sharedInformers = InformerFactoryOverride
}
availableResources, err := GetAvailableResources(rootClientBuilder)
if err != nil {
return ControllerContext{}, err
}
cloud, err := cloudprovider.InitCloudProvider(s.CloudProvider, s.CloudConfigFile)
if err != nil {
return ControllerContext{}, fmt.Errorf("cloud provider could not be initialized: %v", err)
}
if cloud != nil && cloud.HasClusterID() == false {
if s.AllowUntaggedCloud == true {
glog.Warning("detected a cluster without a ClusterID. A ClusterID will be required in the future. Please tag your cluster to avoid any future issues")
} else {
return ControllerContext{}, fmt.Errorf("no ClusterID Found. A ClusterID is required for the cloud provider to function properly. This check can be bypassed by setting the allow-untagged-cloud option")
}
}
if informerUserCloud, ok := cloud.(cloudprovider.InformerUser); ok {
informerUserCloud.SetInformers(sharedInformers)
}
ctx := ControllerContext{
ClientBuilder: clientBuilder,
InformerFactory: sharedInformers,
Options: *s,
AvailableResources: availableResources,
Cloud: cloud,
Stop: stop,
InformersStarted: make(chan struct{}),
}
return ctx, nil
}
func StartControllers(ctx ControllerContext, startSATokenController InitFunc, controllers map[string]InitFunc) error {
// Always start the SA token controller first using a full-power client, since it needs to mint tokens for the rest
// If this fails, just return here and fail since other controllers won't be able to get credentials.
if _, err := startSATokenController(ctx); err != nil {
return err
}
// Initialize the cloud provider with a reference to the clientBuilder only after token controller
// has started in case the cloud provider uses the client builder.
if ctx.Cloud != nil {
ctx.Cloud.Initialize(ctx.ClientBuilder)
}
for controllerName, initFn := range controllers {
if !ctx.IsControllerEnabled(controllerName) {
glog.Warningf("%q is disabled", controllerName)
continue
}
time.Sleep(wait.Jitter(ctx.Options.ControllerStartInterval.Duration, ControllerStartJitter))
glog.V(1).Infof("Starting %q", controllerName)
started, err := initFn(ctx)
if err != nil {
glog.Errorf("Error starting %q", controllerName)
return err
}
if !started {
glog.Warningf("Skipping %q", controllerName)
continue
}
glog.Infof("Started %q", controllerName)
}
return nil
}
// serviceAccountTokenControllerStarter is special because it must run first to set up permissions for other controllers.
// It cannot use the "normal" client builder, so it tracks its own. It must also avoid being included in the "normal"
// init map so that it can always run first.
type serviceAccountTokenControllerStarter struct {
rootClientBuilder controller.ControllerClientBuilder
}
func (c serviceAccountTokenControllerStarter) startServiceAccountTokenController(ctx ControllerContext) (bool, error) {
if !ctx.IsControllerEnabled(saTokenControllerName) {
glog.Warningf("%q is disabled", saTokenControllerName)
return false, nil
}
if len(ctx.Options.ServiceAccountKeyFile) == 0 {
glog.Warningf("%q is disabled because there is no private key", saTokenControllerName)
return false, nil
}
privateKey, err := certutil.PrivateKeyFromFile(ctx.Options.ServiceAccountKeyFile)
if err != nil {
return true, fmt.Errorf("error reading key for service account token controller: %v", err)
}
var rootCA []byte
if ctx.Options.RootCAFile != "" {
rootCA, err = ioutil.ReadFile(ctx.Options.RootCAFile)
if err != nil {
return true, fmt.Errorf("error reading root-ca-file at %s: %v", ctx.Options.RootCAFile, err)
}
if _, err := certutil.ParseCertsPEM(rootCA); err != nil {
return true, fmt.Errorf("error parsing root-ca-file at %s: %v", ctx.Options.RootCAFile, err)
}
} else {
rootCA = c.rootClientBuilder.ConfigOrDie("tokens-controller").CAData
}
controller, err := serviceaccountcontroller.NewTokensController(
ctx.InformerFactory.Core().V1().ServiceAccounts(),
ctx.InformerFactory.Core().V1().Secrets(),
c.rootClientBuilder.ClientOrDie("tokens-controller"),
serviceaccountcontroller.TokensControllerOptions{
TokenGenerator: serviceaccount.JWTTokenGenerator(privateKey),
RootCA: rootCA,
},
)
if err != nil {
return true, fmt.Errorf("error creating Tokens controller: %v", err)
}
go controller.Run(int(ctx.Options.ConcurrentSATokenSyncs), ctx.Stop)
// start the first set of informers now so that other controllers can start
ctx.InformerFactory.Start(ctx.Stop)
return true, nil
}
| nhr/origin | vendor/k8s.io/kubernetes/cmd/kube-controller-manager/app/controllermanager.go | GO | apache-2.0 | 20,478 |
package android.qiao.androidlearn;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | qiao4/AndroidLearn | app/src/androidTest/java/android/qiao/androidlearn/ApplicationTest.java | Java | apache-2.0 | 368 |
// Copyright (c) Keith D Gregory
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* Constants related to Spring implementations: classnames, locations, &c.
*/
package com.kdgregory.pathfinder.spring.context;
public class SpringConstants
{
public final static String CLASS_DISPATCHER_SERVLET = "org.springframework.web.servlet.DispatcherServlet";
public final static String CLASS_CONTEXT_LISTENER = "org.springframework.web.context.ContextLoaderListener";
public final static String CLASS_SIMPLE_URL_HANDLER = "org.springframework.web.servlet.handler.SimpleUrlHandlerMapping";
public final static String CLASS_BEAN_NAME_HANDLER = "org.springframework.web.servlet.handler.BeanNameUrlHandlerMapping";
public final static String CLASS_CLASS_NAME_HANDLER = "org.springframework.web.servlet.mvc.support.ControllerClassNameHandlerMapping";
public final static String INTF_CONTROLLER = "org.springframework.web.servlet.mvc.Controller";
public final static String ANNO_CONTROLLER = "org.springframework.stereotype.Controller";
public final static String ANNO_COMPONENT = "org.springframework.stereotype.Component";
public final static String ANNO_REQUEST_MAPPING = "org.springframework.web.bind.annotation.RequestMapping";
public final static String ANNO_REQUEST_PARAM = "org.springframework.web.bind.annotation.RequestParam";
}
| kdgregory/pathfinder | lib-spring/src/main/java/com/kdgregory/pathfinder/spring/context/SpringConstants.java | Java | apache-2.0 | 1,960 |
package ca.corefacility.bioinformatics.irida.ria.web.exceptions;
import java.util.Map;
/**
* Used by UI to contain internationalized constraint violations.
*/
public class UIConstraintViolationException extends Exception {
private final Map<String, String> errors;
public UIConstraintViolationException(Map<String, String> errors) {
this.errors = errors;
}
public Map<String, String> getErrors() {
return errors;
}
}
| phac-nml/irida | src/main/java/ca/corefacility/bioinformatics/irida/ria/web/exceptions/UIConstraintViolationException.java | Java | apache-2.0 | 432 |