repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
SomeFire/ignite | modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/impl/delegate/HadoopCachingFileSystemFactoryDelegate.java | 2700 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.hadoop.impl.delegate;
import java.io.IOException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.hadoop.fs.CachingHadoopFileSystemFactory;
import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemsUtils;
import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopLazyConcurrentMap;
/**
* Caching Hadoop file system factory delegate.
*/
public class HadoopCachingFileSystemFactoryDelegate extends HadoopBasicFileSystemFactoryDelegate {
/** Per-user file system cache. */
private final HadoopLazyConcurrentMap<String, FileSystem> cache = new HadoopLazyConcurrentMap<>(
new HadoopLazyConcurrentMap.ValueFactory<String, FileSystem>() {
@Override public FileSystem createValue(String key) throws IOException {
return HadoopCachingFileSystemFactoryDelegate.super.getWithMappedName(key);
}
}
);
/**
* Constructor.
*
* @param proxy Proxy.
*/
public HadoopCachingFileSystemFactoryDelegate(CachingHadoopFileSystemFactory proxy) {
super(proxy);
}
/** {@inheritDoc} */
@Override public FileSystem getWithMappedName(String name) throws IOException {
return cache.getOrCreate(name);
}
/** {@inheritDoc} */
@Override public void start() throws IgniteException {
super.start();
// Disable caching.
cfg.setBoolean(HadoopFileSystemsUtils.disableFsCachePropertyName(fullUri.getScheme()), true);
}
/** {@inheritDoc} */
@Override public void stop() throws IgniteException {
super.stop();
try {
cache.close();
}
catch (IgniteCheckedException ice) {
throw new IgniteException(ice);
}
}
}
| apache-2.0 |
nigelsmall/py2neo | test/test_ogm.py | 27385 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2011-2016, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import dirname, join as path_join
from unittest import TestCase
from py2neo import order, size, remote, Node, Relationship, NodeSelector
from py2neo.ogm import RelatedObjects, Property, Related, RelatedTo, RelatedFrom, OUTGOING, GraphObject, Label
from test.fixtures.ogm import MovieGraphTestCase, Person, Film, MacGuffin, MovieGraphObject, DerivedThing
from test.util import GraphTestCase
class SubclassTestCase(TestCase):
def test_class_primary_label_defaults_to_class_name(self):
assert MacGuffin.__primarylabel__ == "MacGuffin"
def test_class_primary_label_can_be_overridden(self):
assert Film.__primarylabel__ == "Movie"
def test_class_primary_key_defaults_to_id(self):
assert MacGuffin.__primarykey__ == "__id__"
def test_class_primary_key_can_be_overridden(self):
assert Film.__primarykey__ == "title"
def test_class_primary_key_is_inherited(self):
assert DerivedThing.__primarykey__ == "my_key"
class InstanceTestCase(TestCase):
def setUp(self):
self.macguffin = MacGuffin()
self.film = Film("Die Hard")
def test_instance_primary_label_defaults_to_class_name(self):
assert self.macguffin.__primarylabel__ == "MacGuffin"
def test_instance_primary_label_can_be_overridden(self):
assert self.film.__primarylabel__ == "Movie"
def test_instance_primary_key_defaults_to_id(self):
assert self.macguffin.__primarykey__ == "__id__"
def test_instance_primary_key_can_be_overridden(self):
assert self.film.__primarykey__ == "title"
def test_instance_repr(self):
assert repr(self.film).startswith("<Film")
def test_instance_not_equal_to_non_graph_object(self):
assert self.film != "this is not a graph object"
class InstanceSubgraphTestCase(TestCase):
def setUp(self):
self.film = Film("Die Hard")
self.film_node = self.film.__ogm__.node
def test_instance_subgraph_is_node_like(self):
assert order(self.film_node) == 1
assert size(self.film_node) == 0
def test_instance_subgraph_inherits_primary_label(self):
assert self.film_node.__primarylabel__ == "Movie"
def test_instance_subgraph_inherits_primary_key(self):
assert self.film_node.__primarykey__ == "title"
class InstanceLabelTestCase(TestCase):
def setUp(self):
self.film = Film("Die Hard")
self.film.awesome = True
self.film.science_fiction = True
self.film_node = self.film.__ogm__.node
def test_instance_label_name_defaults_to_attribute_name_variant(self):
assert self.film_node.has_label("Awesome")
def test_instance_label_name_can_be_overridden(self):
assert self.film_node.has_label("SciFi")
assert not self.film_node.has_label("ScienceFiction")
def test_instance_label_defaults_to_absent(self):
assert not self.film_node.has_label("Musical")
def test_setting_to_false_removes_label(self):
self.film.awesome = False
assert not self.film_node.has_label("Awesome")
class InstancePropertyTestCase(TestCase):
def setUp(self):
self.film = Film("Die Hard")
self.film.year_of_release = 1988
self.film_node = self.film.__ogm__.node
def test_instance_property_key_defaults_to_attribute_name(self):
assert "title" in self.film_node
def test_instance_property_key_can_be_overridden(self):
assert "released" in self.film_node
assert "year_of_release" not in self.film_node
class InstanceRelatedObjectTestCase(MovieGraphTestCase):
def test_related_objects_are_automatically_loaded(self):
keanu = Person.select(self.graph, "Keanu Reeves").first()
film_titles = set(film.title for film in list(keanu.acted_in))
assert film_titles == {"The Devil's Advocate", 'The Matrix Reloaded', "Something's Gotta Give",
'The Matrix', 'The Replacements', 'The Matrix Revolutions', 'Johnny Mnemonic'}
def test_graph_propagation(self):
keanu = Person.select(self.graph, "Keanu Reeves").first()
films = list(keanu.acted_in)
colleagues = set()
for film in films:
colleagues |= set(film.actors)
names = set(colleague.name for colleague in colleagues)
expected_names = {'Al Pacino', 'Dina Meyer', 'Keanu Reeves', 'Brooke Langton', 'Hugo Weaving', 'Diane Keaton',
'Takeshi Kitano', 'Laurence Fishburne', 'Charlize Theron', 'Emil Eifrem', 'Orlando Jones',
'Carrie-Anne Moss', 'Ice-T', 'Gene Hackman', 'Jack Nicholson'}
assert names == expected_names
def test_can_add_related_object_and_push(self):
keanu = Person.select(self.graph, "Keanu Reeves").first()
bill_and_ted = Film("Bill & Ted's Excellent Adventure")
keanu.acted_in.add(bill_and_ted)
self.graph.push(keanu)
remote_node = remote(keanu.__ogm__.node)
film_titles = set(title for title, in self.graph.run("MATCH (a:Person)-[:ACTED_IN]->(b) "
"WHERE id(a) = {x} "
"RETURN b.title", x=remote_node._id))
assert film_titles == {"The Devil's Advocate", 'The Matrix Reloaded', "Something's Gotta Give",
'The Matrix', 'The Replacements', 'The Matrix Revolutions', 'Johnny Mnemonic',
"Bill & Ted's Excellent Adventure"}
def test_can_add_related_object_with_properties_and_push(self):
keanu = Person.select(self.graph, "Keanu Reeves").first()
bill_and_ted = Film("Bill & Ted's Excellent Adventure")
keanu.acted_in.add(bill_and_ted, roles=['Ted "Theodore" Logan'])
self.graph.push(keanu)
remote_node = remote(keanu.__ogm__.node)
films = {title: roles for title, roles in self.graph.run("MATCH (a:Person)-[ab:ACTED_IN]->(b) "
"WHERE id(a) = {x} "
"RETURN b.title, ab.roles", x=remote_node._id)}
bill_and_ted_roles = films["Bill & Ted's Excellent Adventure"]
assert bill_and_ted_roles == ['Ted "Theodore" Logan']
def test_can_remove_related_object_and_push(self):
keanu = Person.select(self.graph, "Keanu Reeves").first()
johnny_mnemonic = Film.select(self.graph, "Johnny Mnemonic").first()
keanu.acted_in.remove(johnny_mnemonic)
self.graph.push(keanu)
remote_node = remote(keanu.__ogm__.node)
film_titles = set(title for title, in self.graph.run("MATCH (a:Person)-[:ACTED_IN]->(b) "
"WHERE id(a) = {x} "
"RETURN b.title", x=remote_node._id))
assert film_titles == {"The Devil's Advocate", 'The Matrix Reloaded', "Something's Gotta Give",
'The Matrix', 'The Replacements', 'The Matrix Revolutions'}
def test_can_add_property_to_existing_relationship(self):
keanu = Person.select(self.graph, "Keanu Reeves").first()
johnny_mnemonic = Film.select(self.graph, "Johnny Mnemonic").first()
keanu.acted_in.add(johnny_mnemonic, foo="bar")
self.graph.push(keanu)
remote_node = remote(keanu.__ogm__.node)
johnny_foo = self.graph.evaluate("MATCH (a:Person)-[ab:ACTED_IN]->(b) "
"WHERE id(a) = {x} AND b.title = 'Johnny Mnemonic' "
"RETURN ab.foo", x=remote_node._id)
assert johnny_foo == "bar"
class FindTestCase(MovieGraphTestCase):
def test_can_find_one_object(self):
keanu = Person.select(self.graph, "Keanu Reeves").first()
assert keanu.name == "Keanu Reeves"
assert keanu.year_of_birth == 1964
def test_can_find_by_id(self):
# given
keanu_0 = Person.select(self.graph, "Keanu Reeves").first()
node_id = remote(keanu_0.__ogm__.node)._id
# when
class PersonById(MovieGraphObject):
__primarylabel__ = "Person"
name = Property()
year_of_birth = Property(key="born")
acted_in = RelatedTo(Film)
directed = RelatedTo("Film")
produced = RelatedTo("test.fixtures.ogm.Film")
found = list(PersonById.select(self.graph, node_id))
assert found
keanu = found[0]
# then
assert keanu.name == "Keanu Reeves"
assert keanu.year_of_birth == 1964
def test_can_find_one_by_id(self):
# given
keanu_0 = Person.select(self.graph, "Keanu Reeves").first()
node_id = remote(keanu_0.__ogm__.node)._id
# when
class PersonById(MovieGraphObject):
__primarylabel__ = "Person"
name = Property()
year_of_birth = Property(key="born")
acted_in = RelatedTo(Film)
directed = RelatedTo("Film")
produced = RelatedTo("test.fixtures.ogm.Film")
keanu = PersonById.select(self.graph, node_id).first()
# then
assert keanu.name == "Keanu Reeves"
assert keanu.year_of_birth == 1964
def test_cannot_find_one_that_does_not_exist(self):
keanu = Person.select(self.graph, "Keanu Jones").first()
assert keanu is None
def test_can_find_multiple_objects(self):
keanu, hugo = list(Person.select(self.graph, ("Keanu Reeves", "Hugo Weaving")))
assert keanu.name == "Keanu Reeves"
assert keanu.year_of_birth == 1964
assert hugo.name == "Hugo Weaving"
assert hugo.year_of_birth == 1960
class CreateTestCase(MovieGraphTestCase):
def test_create(self):
# given
alice = Person()
alice.name = "Alice"
alice.year_of_birth = 1970
alice.acted_in.add(Film.select(self.graph, "The Matrix").first())
# when
self.graph.create(alice)
# then
node = alice.__ogm__.node
remote_node = remote(node)
assert remote_node
def test_create_has_no_effect_on_existing(self):
# given
keanu = Person.select(self.graph, "Keanu Reeves").first()
# when
keanu.name = "Keanu Charles Reeves"
self.graph.create(keanu)
# then
remote_node = remote(keanu.__ogm__.node)
remote_name = self.graph.evaluate("MATCH (a:Person) WHERE id(a) = {x} "
"RETURN a.name", x=remote_node._id)
assert remote_name == "Keanu Reeves"
class DeleteTestCase(MovieGraphTestCase):
def test_delete_on_existing(self):
# given
keanu = Person.select(self.graph, "Keanu Reeves").first()
node = keanu.__ogm__.node
# when
self.graph.delete(keanu)
# then
assert not self.graph.exists(node)
class PushTestCase(MovieGraphTestCase):
def test_can_push_changes_to_existing(self):
# given
keanu = Person.select(self.graph, "Keanu Reeves").first()
# when
keanu.name = "Keanu Charles Reeves"
self.graph.push(keanu)
# then
remote_node = remote(keanu.__ogm__.node)
remote_name = self.graph.evaluate("MATCH (a:Person) WHERE id(a) = {x} "
"RETURN a.name", x=remote_node._id)
assert remote_name == "Keanu Charles Reeves"
def test_can_push_new(self):
# given
alice = Person()
alice.name = "Alice Smith"
alice.year_of_birth = 1970
# when
self.graph.push(alice)
# then
remote_node = remote(alice.__ogm__.node)
remote_name = self.graph.evaluate("MATCH (a:Person) WHERE id(a) = {x} "
"RETURN a.name", x=remote_node._id)
assert remote_name == "Alice Smith"
def test_can_push_new_that_points_to_existing(self):
# given
alice = Person()
alice.name = "Alice Smith"
alice.year_of_birth = 1970
alice.acted_in.add(Film.select(self.graph, "The Matrix").first())
# when
self.graph.push(alice)
# then
remote_node = remote(alice.__ogm__.node)
film_node = self.graph.evaluate("MATCH (a:Person)-[:ACTED_IN]->(b) WHERE id(a) = {x} RETURN b",
x=remote_node._id)
assert film_node["title"] == "The Matrix"
assert film_node["tagline"] == "Welcome to the Real World"
def test_can_push_new_that_points_to_new(self):
# given
the_dominatrix = Film("The Dominatrix")
alice = Person()
alice.name = "Alice Smith"
alice.year_of_birth = 1970
alice.acted_in.add(the_dominatrix)
# when
self.graph.push(alice)
# then
remote_node = remote(alice.__ogm__.node)
film_node = self.graph.evaluate("MATCH (a:Person)-[:ACTED_IN]->(b) WHERE id(a) = {x} RETURN b",
x=remote_node._id)
assert film_node["title"] == "The Dominatrix"
def test_can_push_with_incoming_relationships(self):
# given
matrix = Film.select(self.graph, "The Matrix").first()
# when
matrix.actors.remove(Person.select(self.graph, "Emil Eifrem").first())
self.graph.push(matrix)
# then
remote_node = remote(matrix.__ogm__.node)
names = set()
for name, in self.graph.run("MATCH (a:Movie)<-[:ACTED_IN]-(b) WHERE id(a) = {x} "
"RETURN b.name", x=remote_node._id):
names.add(name)
assert names == {'Keanu Reeves', 'Carrie-Anne Moss', 'Hugo Weaving', 'Laurence Fishburne'}
class PullTestCase(MovieGraphTestCase):
def test_can_load_and_pull(self):
keanu = Person.select(self.graph, "Keanu Reeves").first()
assert keanu.name == "Keanu Reeves"
remote_node = remote(keanu.__ogm__.node)
self.graph.run("MATCH (a:Person) WHERE id(a) = {x} SET a.name = {y}",
x=remote_node._id, y="Keanu Charles Reeves")
self.graph.pull(keanu)
assert keanu.name == "Keanu Charles Reeves"
def test_can_pull_without_loading(self):
keanu = Person()
keanu.name = "Keanu Reeves"
self.graph.pull(keanu)
assert keanu.year_of_birth == 1964
def test_can_pull_with_incoming_relationships(self):
# given
matrix = Film.select(self.graph, "The Matrix").first()
remote_node = remote(matrix.__ogm__.node)
self.graph.run("MATCH (a:Movie)<-[r:ACTED_IN]-(b) WHERE id(a) = {x} AND b.name = 'Emil Eifrem' DELETE r",
x=remote_node._id)
# when
self.graph.pull(matrix)
# then
names = set(a.name for a in matrix.actors)
assert names == {'Keanu Reeves', 'Carrie-Anne Moss', 'Hugo Weaving', 'Laurence Fishburne'}
class RelatedObjectsTestCase(MovieGraphTestCase):
def new_keanu_acted_in(self):
keanu_node = NodeSelector(self.graph).select("Person", name="Keanu Reeves").first()
keanu_acted_in = RelatedObjects(keanu_node, OUTGOING, "ACTED_IN", Film)
return keanu_acted_in
def test_can_pull_related_objects(self):
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
film_titles = set(film.title for film in films_acted_in)
assert film_titles == {"The Devil's Advocate", 'The Matrix Reloaded',
"Something's Gotta Give", 'The Matrix', 'The Replacements',
'The Matrix Revolutions', 'Johnny Mnemonic'}
def test_contains_object(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
# then
matrix_reloaded = Film.select(self.graph, "The Matrix Reloaded").first()
assert matrix_reloaded in films_acted_in
def test_does_not_contain_object(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
# then
bill_and_ted = Film("Bill & Ted's Excellent Adventure")
assert bill_and_ted not in films_acted_in
def test_can_add_object(self):
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
bill_and_ted = Film("Bill & Ted's Excellent Adventure")
films_acted_in.add(bill_and_ted)
film_titles = set(film.title for film in films_acted_in)
assert film_titles == {"The Devil's Advocate", 'The Matrix Reloaded',
"Something's Gotta Give", 'The Matrix', 'The Replacements',
'The Matrix Revolutions', 'Johnny Mnemonic', "Bill & Ted's Excellent Adventure"}
def test_can_add_object_when_already_present(self):
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
bill_and_ted = Film("Bill & Ted's Excellent Adventure")
films_acted_in.add(bill_and_ted)
films_acted_in.add(bill_and_ted)
film_titles = set(film.title for film in films_acted_in)
assert film_titles == {"The Devil's Advocate", 'The Matrix Reloaded',
"Something's Gotta Give", 'The Matrix', 'The Replacements',
'The Matrix Revolutions', 'Johnny Mnemonic', "Bill & Ted's Excellent Adventure"}
def test_can_remove_object(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
# when
matrix_reloaded = Film.select(self.graph, "The Matrix Reloaded").first()
films_acted_in.remove(matrix_reloaded)
# then
film_titles = set(film.title for film in films_acted_in)
assert film_titles == {"The Devil's Advocate",
"Something's Gotta Give", 'The Matrix', 'The Replacements',
'The Matrix Revolutions', 'Johnny Mnemonic'}
def test_can_remove_object_when_already_absent(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
matrix_reloaded = Film.select(self.graph, "The Matrix Reloaded").first()
films_acted_in.remove(matrix_reloaded)
# when
films_acted_in.remove(matrix_reloaded)
# then
film_titles = set(film.title for film in films_acted_in)
assert film_titles == {"The Devil's Advocate",
"Something's Gotta Give", 'The Matrix', 'The Replacements',
'The Matrix Revolutions', 'Johnny Mnemonic'}
def test_can_push_object_additions(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
# when
bill_and_ted = Film("Bill & Ted's Excellent Adventure")
films_acted_in.add(bill_and_ted)
films_acted_in.__db_push__(self.graph)
# then
del films_acted_in
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
film_titles = set(film.title for film in films_acted_in)
assert film_titles == {"The Devil's Advocate", 'The Matrix Reloaded',
"Something's Gotta Give", 'The Matrix', 'The Replacements',
'The Matrix Revolutions', 'Johnny Mnemonic', "Bill & Ted's Excellent Adventure"}
def test_can_push_object_removals(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
# when
matrix_reloaded = Film('The Matrix Reloaded')
films_acted_in.remove(matrix_reloaded)
films_acted_in.__db_push__(self.graph)
# then
del films_acted_in
Node.cache.clear()
Relationship.cache.clear()
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
film_titles = set(film.title for film in films_acted_in)
assert film_titles == {"The Devil's Advocate",
"Something's Gotta Give", 'The Matrix', 'The Replacements',
'The Matrix Revolutions', 'Johnny Mnemonic'}
def test_can_get_relationship_property(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
matrix_reloaded = Film('The Matrix Reloaded')
# then
roles = films_acted_in.get(matrix_reloaded, "roles")
assert roles == ["Neo"]
def test_can_get_relationship_property_from_default(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
matrix_reloaded = Film('The Matrix Reloaded')
# then
foo = films_acted_in.get(matrix_reloaded, "foo", "bar")
assert foo == "bar"
def test_can_get_relationship_property_from_default_and_unknown_object(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
bill_and_ted = Film("Bill & Ted's Excellent Adventure")
# then
foo = films_acted_in.get(bill_and_ted, "foo", "bar")
assert foo == "bar"
def test_can_push_property_additions(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
# when
matrix = Film("The Matrix")
films_acted_in.update(matrix, good=True)
films_acted_in.__db_push__(self.graph)
# then
del films_acted_in
Node.cache.clear()
Relationship.cache.clear()
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
good = films_acted_in.get(matrix, "good")
assert good
def test_can_push_property_removals(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
# when
matrix = Film("The Matrix")
films_acted_in.update(matrix, roles=None)
films_acted_in.__db_push__(self.graph)
# then
del films_acted_in
Node.cache.clear()
Relationship.cache.clear()
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
roles = films_acted_in.get(matrix, "roles")
assert roles is None
def test_can_push_property_updates(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
# when
matrix = Film("The Matrix")
films_acted_in.update(matrix, roles=1)
films_acted_in.__db_push__(self.graph)
# then
del films_acted_in
Node.cache.clear()
Relationship.cache.clear()
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
roles = films_acted_in.get(matrix, "roles")
assert roles == 1
def test_can_push_property_updates_on_new_object(self):
# given
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
# when
bill_and_ted = Film("Bill & Ted's Excellent Adventure")
films_acted_in.update(bill_and_ted, good=True)
films_acted_in.__db_push__(self.graph)
# then
del films_acted_in
Node.cache.clear()
Relationship.cache.clear()
films_acted_in = self.new_keanu_acted_in()
films_acted_in.__db_pull__(self.graph)
film_titles = set(film.title for film in films_acted_in)
assert film_titles == {"The Devil's Advocate", 'The Matrix Reloaded',
"Something's Gotta Give", 'The Matrix', 'The Replacements',
'The Matrix Revolutions', 'Johnny Mnemonic', "Bill & Ted's Excellent Adventure"}
# and
good = films_acted_in.get(bill_and_ted, "good")
assert good
class Thing(GraphObject):
__primarykey__ = "name"
p = Label()
q = Label()
x = Related("Thing", "X")
y = Related("Thing", "Y")
x_out = RelatedTo("Thing", "X")
y_out = RelatedTo("Thing", "Y")
x_in = RelatedFrom("Thing", "X")
y_in = RelatedFrom("Thing", "Y")
class ComprehensiveTestCase(GraphTestCase):
def setUp(self):
self.graph.delete_all()
with open(path_join(dirname(__file__), "..", "resources", "xxy.cypher")) as f:
cypher = f.read()
self.graph.run(cypher)
def test_a(self):
a = Thing.select(self.graph, "A").first()
# A is related to B and C
assert isinstance(a, Thing)
assert len(a.x) == 2
assert len(a.x_out) == 2
assert len(a.x_in) == 2
assert len(a.y) == 2
assert len(a.y_out) == 2
assert len(a.y_in) == 2
def test_b(self):
b = Thing.select(self.graph, "B").first()
# B is only related to A
assert isinstance(b, Thing)
assert len(b.x) == 1
assert len(b.x_out) == 1
assert len(b.x_in) == 1
assert len(b.y) == 1
assert len(b.y_out) == 1
assert len(b.y_in) == 1
def test_c(self):
c = Thing.select(self.graph, "C").first()
# Loops are related to themselves, hence C is related to A, C and D
assert isinstance(c, Thing)
assert len(c.x) == 3
assert len(c.x_out) == 3
assert len(c.x_in) == 3
assert len(c.y) == 3
assert len(c.y_out) == 3
assert len(c.y_in) == 3
def test_d(self):
d = Thing.select(self.graph, "D").first()
# D is only related to C
assert isinstance(d, Thing)
assert len(d.x) == 1
assert len(d.x_out) == 1
assert len(d.x_in) == 1
assert len(d.y) == 1
assert len(d.y_out) == 1
assert len(d.y_in) == 1
class SimpleThing(GraphObject):
pass
class SimpleThingTestCase(GraphTestCase):
def test_create(self):
thing = SimpleThing()
self.graph.create(thing)
assert remote(thing.__ogm__.node)
def test_merge(self):
thing = SimpleThing()
self.graph.merge(thing)
assert remote(thing.__ogm__.node)
def test_push(self):
thing = SimpleThing()
self.graph.push(thing)
assert remote(thing.__ogm__.node)
| apache-2.0 |
msteiger/gestalt | gestalt-module/src/main/java/org/terasology/naming/exception/VersionParseException.java | 1303 | /*
* Copyright 2015 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.naming.exception;
/**
* Exception when a version string fails to be parsed.
*
* @author Immortius
*/
public class VersionParseException extends RuntimeException {
public VersionParseException() {
}
public VersionParseException(String message) {
super(message);
}
public VersionParseException(String message, Throwable cause) {
super(message, cause);
}
public VersionParseException(Throwable cause) {
super(cause);
}
public VersionParseException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
| apache-2.0 |
gabedev/roody-python | roody.py | 1139 | # Roody Discord Bot | Rude Discord Bot
# Ported to Python by @gabedev
import bs4
import discord
import urllib.request
import asyncio
client = discord.Client()
def get_insult():
sock = urllib.request.urlopen('http://www.pangloss.com/seidel/Shaker/')
html = sock.read()
soup = bs4.BeautifulSoup(html, 'html.parser')
for insult_str in soup.find_all('font'):
insult = insult_str.get_text()
insult = insult.replace('\n', '').replace('\r', '')
return (insult)
@client.event
async def on_ready():
print ('Logged on as: {0}'.format(client.user))
await client.change_presence(game=discord.Game(name='Say "shakespeare-inv" for invite link'))
@client.event
async def on_message(message):
if client.user.mention in message.content:
await client.send_message(message.channel, '{0}, {1}'.format(message.author.mention, get_insult()))
if "shakespeare-inv" in message.content:
await client.send_message(message.author, 'Here is [thou] linkith to thy acceptance thy bastard: https://discordapp.com/oauth2/authorize?client_id=284664648215691264&scope=bot')
client.run('token')
| apache-2.0 |
ostap0207/remotify.me | remotify.rest-server/src/main/java/db/GenericDao.java | 600 | package db;
import java.util.Map;
/**
* Created with IntelliJ IDEA.
* User: Ostap
* Date: 2/4/14
* Time: 12:20 PM
* To change this template use File | Settings | File Templates.
*/
public interface GenericDao< T > {
/**
* Method that returns the number of entries from a table that meet some
* criteria (where clause params)
*
* @param params
* sql parameters
* @return the number of records meeting the criteria
*/
long countAll(Map params);
T create(T t);
void delete(Object id);
T find(Object id);
T update(T t);
} | apache-2.0 |
lgoldstein/communitychest | apps/apache/maven/pomrunner/src/main/java/net/community/apps/apache/maven/pomrunner/OutputEntry.java | 811 | /*
*
*/
package net.community.apps.apache.maven.pomrunner;
import net.community.chest.util.logging.LogLevelWrapper;
import net.community.chest.util.map.MapEntryImpl;
/**
* <P>Copyright as per GPLv2</P>
* @author Lyor G.
* @since Mar 21, 2011 3:14:02 PM
*/
public class OutputEntry extends MapEntryImpl<LogLevelWrapper,String> {
public OutputEntry (LogLevelWrapper key, String value)
{
super(key, value);
}
public OutputEntry ()
{
this(null, null);
}
public LogLevelWrapper getLevel ()
{
return getKey();
}
public void setLevel (LogLevelWrapper level)
{
setKey(level);
}
public String getMessage ()
{
return getValue();
}
public void setMessage (String msg)
{
setValue(msg);
}
}
| apache-2.0 |
RNAcentral/rnacentral-import-pipeline | utils/precompute/src/metadata/r2dt_hit.rs | 759 | use std::path::Path;
use serde::{
Deserialize,
Serialize,
};
use anyhow::Result;
use rnc_core::grouper;
#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub struct R2dtHit {
pub id: usize,
pub urs_id: usize,
pub urs_taxid: String,
model_id: usize,
model_name: String,
model_source: String,
model_so_term: Option<String>,
sequence_coverage: Option<f64>,
model_coverage: Option<f64>,
sequence_basepairs: Option<usize>,
model_basepairs: Option<usize>,
}
impl grouper::HasIndex for R2dtHit {
fn index(&self) -> usize {
self.id
}
}
pub fn group(path: &Path, max: usize, output: &Path) -> Result<()> {
grouper::group::<R2dtHit>(grouper::Criteria::ZeroOrOne, &path, 1, max, &output)
}
| apache-2.0 |
openstack/swift | swift/cli/recon.py | 55257 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
cmdline utility to perform cluster reconnaissance
"""
from __future__ import print_function
from eventlet.green import socket
from six import string_types
from six.moves.urllib.parse import urlparse
from swift.common.utils import (
SWIFT_CONF_FILE, md5_hash_for_file, set_swift_dir)
from swift.common.ring import Ring
from swift.common.storage_policy import POLICIES, reload_storage_policies
import eventlet
import json
import optparse
import time
import sys
import six
import os
if six.PY3:
from eventlet.green.urllib import request as urllib2
else:
from eventlet.green import urllib2
def seconds2timeunit(seconds):
elapsed = seconds
unit = 'seconds'
if elapsed >= 60:
elapsed = elapsed / 60.0
unit = 'minutes'
if elapsed >= 60:
elapsed = elapsed / 60.0
unit = 'hours'
if elapsed >= 24:
elapsed = elapsed / 24.0
unit = 'days'
return elapsed, unit
def size_suffix(size):
suffixes = ['bytes', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
for suffix in suffixes:
if size < 1000:
return "%s %s" % (size, suffix)
size = size // 1000
return "%s %s" % (size, suffix)
class Scout(object):
"""
Obtain swift recon information
"""
def __init__(self, recon_type, verbose=False, suppress_errors=False,
timeout=5):
self.recon_type = recon_type
self.verbose = verbose
self.suppress_errors = suppress_errors
self.timeout = timeout
def scout_host(self, base_url, recon_type):
"""
Perform the actual HTTP request to obtain swift recon telemetry.
:param base_url: the base url of the host you wish to check. str of the
format 'http://127.0.0.1:6200/recon/'
:param recon_type: the swift recon check to request.
:returns: tuple of (recon url used, response body, and status)
"""
url = base_url + recon_type
try:
body = urllib2.urlopen(url, timeout=self.timeout).read()
if six.PY3 and isinstance(body, six.binary_type):
body = body.decode('utf8')
content = json.loads(body)
if self.verbose:
print("-> %s: %s" % (url, content))
status = 200
except urllib2.HTTPError as err:
if not self.suppress_errors or self.verbose:
print("-> %s: %s" % (url, err))
content = err
status = err.code
except (urllib2.URLError, socket.timeout) as err:
if not self.suppress_errors or self.verbose:
print("-> %s: %s" % (url, err))
content = err
status = -1
return url, content, status
def scout(self, host):
"""
Obtain telemetry from a host running the swift recon middleware.
:param host: host to check
:returns: tuple of (recon url used, response body, status, time start
and time end)
"""
base_url = "http://%s:%s/recon/" % (host[0], host[1])
ts_start = time.time()
url, content, status = self.scout_host(base_url, self.recon_type)
ts_end = time.time()
return url, content, status, ts_start, ts_end
def scout_server_type(self, host):
"""
Obtain Server header by calling OPTIONS.
:param host: host to check
:returns: Server type, status
"""
try:
url = "http://%s:%s/" % (host[0], host[1])
req = urllib2.Request(url)
req.get_method = lambda: 'OPTIONS'
conn = urllib2.urlopen(req)
header = conn.info().get('Server')
server_header = header.split('/')
content = server_header[0]
status = 200
except urllib2.HTTPError as err:
if not self.suppress_errors or self.verbose:
print("-> %s: %s" % (url, err))
content = err
status = err.code
except (urllib2.URLError, socket.timeout) as err:
if not self.suppress_errors or self.verbose:
print("-> %s: %s" % (url, err))
content = err
status = -1
return url, content, status
class SwiftRecon(object):
"""
Retrieve and report cluster info from hosts running recon middleware.
"""
def __init__(self):
self.verbose = False
self.suppress_errors = False
self.timeout = 5
self.pool_size = 30
self.pool = eventlet.GreenPool(self.pool_size)
self.check_types = ['account', 'container', 'object']
self.server_type = 'object'
def _gen_stats(self, stats, name=None):
"""Compute various stats from a list of values."""
cstats = [x for x in stats if x is not None]
if len(cstats) > 0:
ret_dict = {'low': min(cstats), 'high': max(cstats),
'total': sum(cstats), 'reported': len(cstats),
'number_none': len(stats) - len(cstats), 'name': name}
ret_dict['average'] = ret_dict['total'] / float(len(cstats))
ret_dict['perc_none'] = \
ret_dict['number_none'] * 100.0 / len(stats)
else:
ret_dict = {'reported': 0}
return ret_dict
def _print_stats(self, stats):
"""
print out formatted stats to console
:param stats: dict of stats generated by _gen_stats
"""
print('[%(name)s] low: %(low)d, high: %(high)d, avg: '
'%(average).1f, total: %(total)d, '
'Failed: %(perc_none).1f%%, no_result: %(number_none)d, '
'reported: %(reported)d' % stats)
def _ptime(self, timev=None):
"""
:param timev: a unix timestamp or None
:returns: a pretty string of the current time or provided time in UTC
"""
if timev:
return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(timev))
else:
return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())
def get_hosts(self, region_filter, zone_filter, swift_dir, ring_names):
"""
Get a list of hosts in the rings.
:param region_filter: Only list regions matching given filter
:param zone_filter: Only list zones matching given filter
:param swift_dir: Directory of swift config, usually /etc/swift
:param ring_names: Collection of ring names, such as
['object', 'object-2']
:returns: a set of tuples containing the ip and port of hosts
"""
rings = [Ring(swift_dir, ring_name=n) for n in ring_names]
devs = [d for r in rings for d in r.devs if d]
if region_filter is not None:
devs = [d for d in devs if d['region'] == region_filter]
if zone_filter is not None:
devs = [d for d in devs if d['zone'] == zone_filter]
return set((d['ip'], d['port']) for d in devs)
def get_ringmd5(self, hosts, swift_dir):
"""
Compare ring md5sum's with those on remote host
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
:param swift_dir: The local directory with the ring files.
"""
matches = 0
errors = 0
ring_names = set()
if self.server_type == 'object':
for ring_name in os.listdir(swift_dir):
if ring_name.startswith('object') and \
ring_name.endswith('.ring.gz'):
ring_names.add(ring_name)
else:
ring_name = '%s.ring.gz' % self.server_type
ring_names.add(ring_name)
rings = {}
for ring_name in ring_names:
rings[ring_name] = md5_hash_for_file(
os.path.join(swift_dir, ring_name))
recon = Scout("ringmd5", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking ring md5sums" % self._ptime())
if self.verbose:
for ring_file, ring_sum in rings.items():
print("-> On disk %s md5sum: %s" % (ring_file, ring_sum))
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status != 200:
errors = errors + 1
continue
success = True
for remote_ring_file, remote_ring_sum in response.items():
remote_ring_name = os.path.basename(remote_ring_file)
if not remote_ring_name.startswith(self.server_type):
continue
ring_sum = rings.get(remote_ring_name, None)
if remote_ring_sum != ring_sum:
success = False
print("!! %s (%s => %s) doesn't match on disk md5sum" % (
url, remote_ring_name, remote_ring_sum))
if not success:
errors += 1
continue
matches += 1
if self.verbose:
print("-> %s matches." % url)
print("%s/%s hosts matched, %s error[s] while checking hosts." % (
matches, len(hosts), errors))
print("=" * 79)
def get_swiftconfmd5(self, hosts, printfn=print):
"""
Compare swift.conf md5sum with that on remote hosts
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
:param printfn: function to print text; defaults to print()
"""
matches = 0
errors = 0
conf_sum = md5_hash_for_file(SWIFT_CONF_FILE)
recon = Scout("swiftconfmd5", self.verbose, self.suppress_errors,
self.timeout)
printfn("[%s] Checking swift.conf md5sum" % self._ptime())
if self.verbose:
printfn("-> On disk swift.conf md5sum: %s" % (conf_sum,))
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
if response[SWIFT_CONF_FILE] != conf_sum:
printfn("!! %s (%s) doesn't match on disk md5sum" %
(url, response[SWIFT_CONF_FILE]))
else:
matches = matches + 1
if self.verbose:
printfn("-> %s matches." % url)
else:
errors = errors + 1
printfn("%s/%s hosts matched, %s error[s] while checking hosts."
% (matches, len(hosts), errors))
printfn("=" * 79)
def async_check(self, hosts):
"""
Obtain and print async pending statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
scan = {}
recon = Scout("async", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking async pendings" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
scan[url] = response['async_pending']
stats = self._gen_stats(scan.values(), 'async_pending')
if stats['reported'] > 0:
self._print_stats(stats)
else:
print("[async_pending] - No hosts returned valid data.")
print("=" * 79)
def driveaudit_check(self, hosts):
"""
Obtain and print drive audit error statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)]
"""
scan = {}
recon = Scout("driveaudit", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking drive-audit errors" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
scan[url] = response['drive_audit_errors']
stats = self._gen_stats(scan.values(), 'drive_audit_errors')
if stats['reported'] > 0:
self._print_stats(stats)
else:
print("[drive_audit_errors] - No hosts returned valid data.")
print("=" * 79)
def umount_check(self, hosts):
"""
Check for and print unmounted drives
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
unmounted = {}
errors = {}
recon = Scout("unmounted", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Getting unmounted drives from %s hosts..." %
(self._ptime(), len(hosts)))
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
unmounted[url] = []
errors[url] = []
for i in response:
if not isinstance(i['mounted'], bool):
errors[url].append(i['device'])
else:
unmounted[url].append(i['device'])
for host in unmounted:
node = urlparse(host).netloc
for entry in unmounted[host]:
print("Not mounted: %s on %s" % (entry, node))
for host in errors:
node = urlparse(host).netloc
for entry in errors[host]:
print("Device errors: %s on %s" % (entry, node))
print("=" * 79)
def server_type_check(self, hosts):
"""
Check for server types on the ring
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
errors = {}
recon = Scout("server_type_check", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Validating server type '%s' on %s hosts..." %
(self._ptime(), self.server_type, len(hosts)))
for url, response, status in self.pool.imap(
recon.scout_server_type, hosts):
if status == 200:
if response != self.server_type + '-server':
errors[url] = response
print("%s/%s hosts ok, %s error[s] while checking hosts." % (
len(hosts) - len(errors), len(hosts), len(errors)))
for host in errors:
print("Invalid: %s is %s" % (host, errors[host]))
print("=" * 79)
def expirer_check(self, hosts):
"""
Obtain and print expirer statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
stats = {'object_expiration_pass': [], 'expired_last_pass': []}
recon = Scout("expirer/%s" % self.server_type, self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking on expirers" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
stats['object_expiration_pass'].append(
response.get('object_expiration_pass'))
stats['expired_last_pass'].append(
response.get('expired_last_pass'))
for k in stats:
if stats[k]:
computed = self._gen_stats(stats[k], name=k)
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[%s] - No hosts returned valid data." % k)
else:
print("[%s] - No hosts returned valid data." % k)
print("=" * 79)
def _calculate_least_and_most_recent(self, url_time_data):
"""calulate and print the least and most recent urls
Given a list of url and time tuples calulate the most and least
recent timings and print it out.
:param url_time_data: list of url and time tuples: [(url, time_), ..]
"""
least_recent_time = 9999999999
least_recent_url = None
most_recent_time = 0
most_recent_url = None
for url, last in url_time_data:
if last is None:
continue
if last < least_recent_time:
least_recent_time = last
least_recent_url = url
if last > most_recent_time:
most_recent_time = last
most_recent_url = url
if least_recent_url is not None:
host = urlparse(least_recent_url).netloc
if not least_recent_time:
print('Oldest completion was NEVER by %s.' % host)
else:
elapsed = time.time() - least_recent_time
elapsed, elapsed_unit = seconds2timeunit(elapsed)
print('Oldest completion was %s (%d %s ago) by %s.' % (
self._ptime(least_recent_time),
elapsed, elapsed_unit, host))
if most_recent_url is not None:
host = urlparse(most_recent_url).netloc
elapsed = time.time() - most_recent_time
elapsed, elapsed_unit = seconds2timeunit(elapsed)
print('Most recent completion was %s (%d %s ago) by %s.' % (
self._ptime(most_recent_time),
elapsed, elapsed_unit, host))
def reconstruction_check(self, hosts):
"""
Obtain and print reconstructon statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6020), ('127.0.0.2', 6030)])
"""
stats = []
last_stats = []
recon = Scout("reconstruction/%s" % self.server_type, self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking on reconstructors" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
stats.append(response.get('object_reconstruction_time'))
last = response.get('object_reconstruction_last', 0)
last_stats.append((url, last))
if stats:
computed = self._gen_stats(stats,
name='object_reconstruction_time')
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[object_reconstruction_time] - No hosts returned "
"valid data.")
else:
print("[object_reconstruction_time] - No hosts returned "
"valid data.")
self._calculate_least_and_most_recent(last_stats)
print("=" * 79)
def replication_check(self, hosts):
"""
Obtain and print replication statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
stats = {'replication_time': [], 'failure': [], 'success': [],
'attempted': []}
last_stats = []
recon = Scout("replication/%s" % self.server_type, self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking on replication" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
stats['replication_time'].append(
response.get('replication_time',
response.get('object_replication_time', 0)))
repl_stats = response.get('replication_stats')
if repl_stats:
for stat_key in ['attempted', 'failure', 'success']:
stats[stat_key].append(repl_stats.get(stat_key))
last = response.get('replication_last',
response.get('object_replication_last', 0))
last_stats.append((url, last))
for k in stats:
if stats[k]:
if k != 'replication_time':
computed = self._gen_stats(stats[k],
name='replication_%s' % k)
else:
computed = self._gen_stats(stats[k], name=k)
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[%s] - No hosts returned valid data." % k)
else:
print("[%s] - No hosts returned valid data." % k)
self._calculate_least_and_most_recent(last_stats)
print("=" * 79)
def updater_check(self, hosts):
"""
Obtain and print updater statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
stats = []
recon = Scout("updater/%s" % self.server_type, self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking updater times" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
if response['%s_updater_sweep' % self.server_type]:
stats.append(response['%s_updater_sweep' %
self.server_type])
if len(stats) > 0:
computed = self._gen_stats(stats, name='updater_last_sweep')
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[updater_last_sweep] - No hosts returned valid data.")
else:
print("[updater_last_sweep] - No hosts returned valid data.")
print("=" * 79)
def auditor_check(self, hosts):
"""
Obtain and print obj auditor statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
scan = {}
adone = '%s_auditor_pass_completed' % self.server_type
afail = '%s_audits_failed' % self.server_type
apass = '%s_audits_passed' % self.server_type
asince = '%s_audits_since' % self.server_type
recon = Scout("auditor/%s" % self.server_type, self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking auditor stats" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
scan[url] = response
if len(scan) < 1:
print("Error: No hosts available")
return
stats = {}
stats[adone] = [scan[i][adone] for i in scan
if scan[i][adone] is not None]
stats[afail] = [scan[i][afail] for i in scan
if scan[i][afail] is not None]
stats[apass] = [scan[i][apass] for i in scan
if scan[i][apass] is not None]
stats[asince] = [scan[i][asince] for i in scan
if scan[i][asince] is not None]
for k in stats:
if len(stats[k]) < 1:
print("[%s] - No hosts returned valid data." % k)
else:
if k != asince:
computed = self._gen_stats(stats[k], k)
if computed['reported'] > 0:
self._print_stats(computed)
if len(stats[asince]) >= 1:
low = min(stats[asince])
high = max(stats[asince])
total = sum(stats[asince])
average = total / len(stats[asince])
print('[last_pass] oldest: %s, newest: %s, avg: %s' %
(self._ptime(low), self._ptime(high), self._ptime(average)))
print("=" * 79)
def nested_get_value(self, key, recon_entry):
"""
Generator that yields all values for given key in a recon cache entry.
This is for use with object auditor recon cache entries. If the
object auditor has run in parallel, the recon cache will have entries
of the form: {'object_auditor_stats_ALL': { 'disk1': {..},
'disk2': {..},
'disk3': {..},
...}}
If the object auditor hasn't run in parallel, the recon cache will have
entries of the form: {'object_auditor_stats_ALL': {...}}.
The ZBF auditor doesn't run in parallel. However, if a subset of
devices is selected for auditing, the recon cache will have an entry
of the form: {'object_auditor_stats_ZBF': { 'disk1disk2..diskN': {}}
We use this generator to find all instances of a particular key in
these multi-level dictionaries.
"""
for k, v in recon_entry.items():
if isinstance(v, dict):
for value in self.nested_get_value(key, v):
yield value
if k == key:
yield v
def object_auditor_check(self, hosts):
"""
Obtain and print obj auditor statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
all_scan = {}
zbf_scan = {}
atime = 'audit_time'
bprocessed = 'bytes_processed'
passes = 'passes'
errors = 'errors'
quarantined = 'quarantined'
recon = Scout("auditor/object", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking auditor stats " % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
if response['object_auditor_stats_ALL']:
all_scan[url] = response['object_auditor_stats_ALL']
if response['object_auditor_stats_ZBF']:
zbf_scan[url] = response['object_auditor_stats_ZBF']
if len(all_scan) > 0:
stats = {}
stats[atime] = [sum(self.nested_get_value(atime, all_scan[i]))
for i in all_scan]
stats[bprocessed] = [sum(self.nested_get_value(bprocessed,
all_scan[i])) for i in all_scan]
stats[passes] = [sum(self.nested_get_value(passes, all_scan[i]))
for i in all_scan]
stats[errors] = [sum(self.nested_get_value(errors, all_scan[i]))
for i in all_scan]
stats[quarantined] = [sum(self.nested_get_value(quarantined,
all_scan[i])) for i in all_scan]
for k in stats:
if None in stats[k]:
stats[k] = [x for x in stats[k] if x is not None]
if len(stats[k]) < 1:
print("[Auditor %s] - No hosts returned valid data." % k)
else:
computed = self._gen_stats(stats[k],
name='ALL_%s_last_path' % k)
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[ALL_auditor] - No hosts returned valid data.")
else:
print("[ALL_auditor] - No hosts returned valid data.")
if len(zbf_scan) > 0:
stats = {}
stats[atime] = [sum(self.nested_get_value(atime, zbf_scan[i]))
for i in zbf_scan]
stats[bprocessed] = [sum(self.nested_get_value(bprocessed,
zbf_scan[i])) for i in zbf_scan]
stats[errors] = [sum(self.nested_get_value(errors, zbf_scan[i]))
for i in zbf_scan]
stats[quarantined] = [sum(self.nested_get_value(quarantined,
zbf_scan[i])) for i in zbf_scan]
for k in stats:
if None in stats[k]:
stats[k] = [x for x in stats[k] if x is not None]
if len(stats[k]) < 1:
print("[Auditor %s] - No hosts returned valid data." % k)
else:
computed = self._gen_stats(stats[k],
name='ZBF_%s_last_path' % k)
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[ZBF_auditor] - No hosts returned valid data.")
else:
print("[ZBF_auditor] - No hosts returned valid data.")
print("=" * 79)
def sharding_check(self, hosts):
"""
Obtain and print sharding statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6221), ('127.0.0.2', 6231)])
"""
stats = {'sharding_time': [],
'attempted': [], 'failure': [], 'success': []}
recon = Scout("sharding", self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking on sharders" % self._ptime())
least_recent_time = 9999999999
least_recent_url = None
most_recent_time = 0
most_recent_url = None
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
stats['sharding_time'].append(response.get('sharding_time', 0))
shard_stats = response.get('sharding_stats')
if shard_stats:
# Sharding has a ton more stats, like "no_change".
# Not sure if we need them at all, or maybe for -v.
for stat_key in ['attempted', 'failure', 'success']:
stats[stat_key].append(shard_stats.get(stat_key))
last = response.get('sharding_last', 0)
if last is None:
continue
if last < least_recent_time:
least_recent_time = last
least_recent_url = url
if last > most_recent_time:
most_recent_time = last
most_recent_url = url
for k in stats:
if stats[k]:
computed = self._gen_stats(stats[k], name=k)
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[%s] - No hosts returned valid data." % k)
else:
print("[%s] - No hosts returned valid data." % k)
if least_recent_url is not None:
host = urlparse(least_recent_url).netloc
if not least_recent_time:
print('Oldest completion was NEVER by %s.' % host)
else:
elapsed = time.time() - least_recent_time
elapsed, elapsed_unit = seconds2timeunit(elapsed)
print('Oldest completion was %s (%d %s ago) by %s.' % (
self._ptime(least_recent_time),
elapsed, elapsed_unit, host))
if most_recent_url is not None:
host = urlparse(most_recent_url).netloc
elapsed = time.time() - most_recent_time
elapsed, elapsed_unit = seconds2timeunit(elapsed)
print('Most recent completion was %s (%d %s ago) by %s.' % (
self._ptime(most_recent_time),
elapsed, elapsed_unit, host))
print("=" * 79)
def load_check(self, hosts):
"""
Obtain and print load average statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
load1 = {}
load5 = {}
load15 = {}
recon = Scout("load", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking load averages" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
load1[url] = response['1m']
load5[url] = response['5m']
load15[url] = response['15m']
stats = {"1m": load1, "5m": load5, "15m": load15}
for item in stats:
if len(stats[item]) > 0:
computed = self._gen_stats(stats[item].values(),
name='%s_load_avg' % item)
self._print_stats(computed)
else:
print("[%s_load_avg] - No hosts returned valid data." % item)
print("=" * 79)
def quarantine_check(self, hosts):
"""
Obtain and print quarantine statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
objq = {}
conq = {}
acctq = {}
stats = {}
recon = Scout("quarantined", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking quarantine" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
objq[url] = response['objects']
conq[url] = response['containers']
acctq[url] = response['accounts']
for key in response.get('policies', {}):
pkey = "objects_%s" % key
stats.setdefault(pkey, {})
stats[pkey][url] = response['policies'][key]['objects']
stats.update({"objects": objq, "containers": conq, "accounts": acctq})
for item in stats:
if len(stats[item]) > 0:
computed = self._gen_stats(stats[item].values(),
name='quarantined_%s' % item)
self._print_stats(computed)
else:
print("No hosts returned valid data.")
print("=" * 79)
def socket_usage(self, hosts):
"""
Obtain and print /proc/net/sockstat statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
inuse4 = {}
mem = {}
inuse6 = {}
timewait = {}
orphan = {}
recon = Scout("sockstat", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking socket usage" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
inuse4[url] = response['tcp_in_use']
mem[url] = response['tcp_mem_allocated_bytes']
inuse6[url] = response.get('tcp6_in_use', 0)
timewait[url] = response['time_wait']
orphan[url] = response['orphan']
stats = {"tcp_in_use": inuse4, "tcp_mem_allocated_bytes": mem,
"tcp6_in_use": inuse6, "time_wait": timewait,
"orphan": orphan}
for item in stats:
if len(stats[item]) > 0:
computed = self._gen_stats(stats[item].values(), item)
self._print_stats(computed)
else:
print("No hosts returned valid data.")
print("=" * 79)
def disk_usage(self, hosts, top=0, lowest=0, human_readable=False):
"""
Obtain and print disk usage statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
stats = {}
highs = []
lows = []
raw_total_used = []
raw_total_avail = []
percents = {}
top_percents = [(None, 0)] * top
low_percents = [(None, 100)] * lowest
recon = Scout("diskusage", self.verbose, self.suppress_errors,
self.timeout)
# We want to only query each host once, but we don't care
# which of the available ports we use. So we filter hosts by
# constructing a host->port dictionary, since the dict
# constructor ensures each key is unique, thus each host
# appears only once in filtered_hosts.
filtered_hosts = set(dict(hosts).items())
print("[%s] Checking disk usage now" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, filtered_hosts):
if status == 200:
hostusage = []
for entry in response:
if not isinstance(entry['mounted'], bool):
print("-> %s/%s: Error: %s" % (url, entry['device'],
entry['mounted']))
elif entry['mounted']:
used = float(entry['used']) / float(entry['size']) \
* 100.0
raw_total_used.append(entry['used'])
raw_total_avail.append(entry['avail'])
hostusage.append(round(used, 2))
for ident, oused in top_percents:
if oused < used:
top_percents.append(
(url + ' ' + entry['device'], used))
top_percents.sort(key=lambda x: -x[1])
top_percents.pop()
break
for ident, oused in low_percents:
if oused > used:
low_percents.append(
(url + ' ' + entry['device'], used))
low_percents.sort(key=lambda x: x[1])
low_percents.pop()
break
stats[url] = hostusage
for url in stats:
if len(stats[url]) > 0:
# get per host hi/los for another day
low = min(stats[url])
high = max(stats[url])
highs.append(high)
lows.append(low)
for percent in stats[url]:
percents[int(percent)] = percents.get(int(percent), 0) + 1
else:
print("-> %s: Error. No drive info available." % url)
if len(lows) > 0:
low = min(lows)
high = max(highs)
# dist graph shamelessly stolen from https://github.com/gholt/tcod
print("Distribution Graph:")
mul = 69.0 / max(percents.values())
for percent in sorted(percents):
print('% 3d%%%5d %s' % (percent, percents[percent],
'*' * int(percents[percent] * mul)))
raw_used = sum(raw_total_used)
raw_avail = sum(raw_total_avail)
raw_total = raw_used + raw_avail
avg_used = 100.0 * raw_used / raw_total
if human_readable:
raw_used = size_suffix(raw_used)
raw_avail = size_suffix(raw_avail)
raw_total = size_suffix(raw_total)
print("Disk usage: space used: %s of %s" % (raw_used, raw_total))
print("Disk usage: space free: %s of %s" % (raw_avail, raw_total))
print("Disk usage: lowest: %s%%, highest: %s%%, avg: %s%%" %
(low, high, avg_used))
else:
print("No hosts returned valid data.")
print("=" * 79)
if top_percents:
print('TOP %s' % top)
for ident, used in top_percents:
if ident:
url, device = ident.split()
host = urlparse(url).netloc.split(':')[0]
print('%.02f%% %s' % (used, '%-15s %s' % (host, device)))
if low_percents:
print('LOWEST %s' % lowest)
for ident, used in low_percents:
if ident:
url, device = ident.split()
host = urlparse(url).netloc.split(':')[0]
print('%.02f%% %s' % (used, '%-15s %s' % (host, device)))
def time_check(self, hosts, jitter=0.0):
"""
Check a time synchronization of hosts with current time
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
:param jitter: Maximal allowed time jitter
"""
jitter = abs(jitter)
matches = 0
errors = 0
recon = Scout("time", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking time-sync" % self._ptime())
for url, ts_remote, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status != 200:
errors = errors + 1
continue
if (ts_remote + jitter < ts_start or ts_remote - jitter > ts_end):
diff = abs(ts_end - ts_remote)
ts_end_f = self._ptime(ts_end)
ts_remote_f = self._ptime(ts_remote)
print("!! %s current time is %s, but remote is %s, "
"differs by %.4f sec" % (
url,
ts_end_f,
ts_remote_f,
diff))
continue
matches += 1
if self.verbose:
print("-> %s matches." % url)
print("%s/%s hosts matched, %s error[s] while checking hosts." % (
matches, len(hosts), errors))
print("=" * 79)
def version_check(self, hosts):
"""
Check OS Swift version of hosts. Inform if differs.
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
versions = set()
errors = 0
print("[%s] Checking versions" % self._ptime())
recon = Scout("version", self.verbose, self.suppress_errors,
self.timeout)
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status != 200:
errors = errors + 1
continue
versions.add(response['version'])
if self.verbose:
print("-> %s installed version %s" % (
url, response['version']))
if not len(versions):
print("No hosts returned valid data.")
elif len(versions) == 1:
print("Versions matched (%s), "
"%s error[s] while checking hosts." % (
versions.pop(), errors))
else:
print("Versions not matched (%s), "
"%s error[s] while checking hosts." % (
", ".join(sorted(versions)), errors))
print("=" * 79)
def _get_ring_names(self, policy=None):
"""
Retrieve name of ring files.
If no policy is passed and the server type is object,
the ring names of all storage-policies are retrieved.
:param policy: name or index of storage policy, only applicable
with server_type==object.
:returns: list of ring names.
"""
if self.server_type == 'object':
ring_names = [p.ring_name for p in POLICIES if (
p.name == policy or not policy or (
policy.isdigit() and int(policy) == int(p) or
(isinstance(policy, string_types)
and policy in p.aliases)))]
else:
ring_names = [self.server_type]
return ring_names
def main(self):
"""
Retrieve and report cluster info from hosts running recon middleware.
"""
print("=" * 79)
usage = '''
usage: %prog <server_type> [<server_type> [<server_type>]]
[-v] [--suppress] [-a] [-r] [-u] [-d] [-R]
[-l] [-T] [--md5] [--auditor] [--updater] [--expirer] [--sockstat]
[--human-readable]
<server_type>\taccount|container|object
Defaults to object server.
ex: %prog container -l --auditor
'''
args = optparse.OptionParser(usage)
args.add_option('--verbose', '-v', action="store_true",
help="Print verbose info")
args.add_option('--suppress', action="store_true",
help="Suppress most connection related errors")
args.add_option('--async', '-a',
action="store_true", dest="async_check",
help="Get async stats")
args.add_option('--replication', '-r', action="store_true",
help="Get replication stats")
args.add_option('--reconstruction', '-R', action="store_true",
help="Get reconstruction stats")
args.add_option('--auditor', action="store_true",
help="Get auditor stats")
args.add_option('--updater', action="store_true",
help="Get updater stats")
args.add_option('--expirer', action="store_true",
help="Get expirer stats")
args.add_option('--sharding', action="store_true",
help="Get sharding stats")
args.add_option('--unmounted', '-u', action="store_true",
help="Check cluster for unmounted devices")
args.add_option('--diskusage', '-d', action="store_true",
help="Get disk usage stats")
args.add_option('--human-readable', action="store_true",
help="Use human readable suffix for disk usage stats")
args.add_option('--loadstats', '-l', action="store_true",
help="Get cluster load average stats")
args.add_option('--quarantined', '-q', action="store_true",
help="Get cluster quarantine stats")
args.add_option('--validate-servers', action="store_true",
help="Validate servers on the ring")
args.add_option('--md5', action="store_true",
help="Get md5sum of servers ring and compare to "
"local copy")
args.add_option('--sockstat', action="store_true",
help="Get cluster socket usage stats")
args.add_option('--driveaudit', action="store_true",
help="Get drive audit error stats")
args.add_option('--time', '-T', action="store_true",
help="Check time synchronization")
args.add_option('--jitter', type="float", default=0.0,
help="Maximal allowed time jitter")
args.add_option('--swift-versions', action="store_true",
help="Check swift versions")
args.add_option('--top', type='int', metavar='COUNT', default=0,
help='Also show the top COUNT entries in rank order.')
args.add_option('--lowest', type='int', metavar='COUNT', default=0,
help='Also show the lowest COUNT entries in rank \
order.')
args.add_option('--all', action="store_true",
help="Perform all checks. Equal to \t\t\t-arRudlqT "
"--md5 --sockstat --auditor --updater --expirer "
"--driveaudit --validate-servers --swift-versions")
args.add_option('--region', type="int",
help="Only query servers in specified region")
args.add_option('--zone', '-z', type="int",
help="Only query servers in specified zone")
args.add_option('--timeout', '-t', type="int", metavar="SECONDS",
help="Time to wait for a response from a server",
default=5)
args.add_option('--swiftdir', default="/etc/swift",
help="Default = /etc/swift")
args.add_option('--policy', '-p',
help='Only query object servers in specified '
'storage policy (specified as name or index).')
options, arguments = args.parse_args()
if len(sys.argv) <= 1 or len(arguments) > len(self.check_types):
args.print_help()
sys.exit(0)
if arguments:
arguments = set(arguments)
if arguments.issubset(self.check_types):
server_types = arguments
else:
print("Invalid Server Type")
args.print_help()
sys.exit(1)
else: # default
server_types = ['object']
swift_dir = options.swiftdir
if set_swift_dir(swift_dir):
reload_storage_policies()
self.verbose = options.verbose
self.suppress_errors = options.suppress
self.timeout = options.timeout
for server_type in server_types:
self.server_type = server_type
ring_names = self._get_ring_names(options.policy)
if not ring_names:
print('Invalid Storage Policy: %s' % options.policy)
args.print_help()
sys.exit(0)
hosts = self.get_hosts(options.region, options.zone,
swift_dir, ring_names)
print("--> Starting reconnaissance on %s hosts (%s)" %
(len(hosts), self.server_type))
print("=" * 79)
if options.all:
if self.server_type == 'object':
self.async_check(hosts)
self.object_auditor_check(hosts)
self.updater_check(hosts)
self.expirer_check(hosts)
self.reconstruction_check(hosts)
elif self.server_type == 'container':
self.auditor_check(hosts)
self.updater_check(hosts)
self.sharding_check(hosts)
elif self.server_type == 'account':
self.auditor_check(hosts)
self.replication_check(hosts)
self.umount_check(hosts)
self.load_check(hosts)
self.disk_usage(hosts, options.top, options.lowest,
options.human_readable)
self.get_ringmd5(hosts, swift_dir)
self.get_swiftconfmd5(hosts)
self.quarantine_check(hosts)
self.socket_usage(hosts)
self.server_type_check(hosts)
self.driveaudit_check(hosts)
self.time_check(hosts, options.jitter)
self.version_check(hosts)
else:
if options.async_check:
if self.server_type == 'object':
self.async_check(hosts)
else:
print("Error: Can't check asyncs on non object "
"servers.")
print("=" * 79)
if options.unmounted:
self.umount_check(hosts)
if options.replication:
self.replication_check(hosts)
if options.auditor:
if self.server_type == 'object':
self.object_auditor_check(hosts)
else:
self.auditor_check(hosts)
if options.updater:
if self.server_type == 'account':
print("Error: Can't check updaters on account "
"servers.")
print("=" * 79)
else:
self.updater_check(hosts)
if options.expirer:
if self.server_type == 'object':
self.expirer_check(hosts)
else:
print("Error: Can't check expirer on non object "
"servers.")
print("=" * 79)
if options.sharding:
if self.server_type == 'container':
self.sharding_check(hosts)
else:
print("Error: Can't check sharding on non container "
"servers.")
print("=" * 79)
if options.reconstruction:
if self.server_type == 'object':
self.reconstruction_check(hosts)
else:
print("Error: Can't check reconstruction stats on "
"non object servers.")
print("=" * 79)
if options.validate_servers:
self.server_type_check(hosts)
if options.loadstats:
self.load_check(hosts)
if options.diskusage:
self.disk_usage(hosts, options.top, options.lowest,
options.human_readable)
if options.md5:
self.get_ringmd5(hosts, swift_dir)
self.get_swiftconfmd5(hosts)
if options.quarantined:
self.quarantine_check(hosts)
if options.sockstat:
self.socket_usage(hosts)
if options.driveaudit:
self.driveaudit_check(hosts)
if options.time:
self.time_check(hosts, options.jitter)
if options.swift_versions:
self.version_check(hosts)
def main():
try:
reconnoiter = SwiftRecon()
reconnoiter.main()
except KeyboardInterrupt:
print('\n')
| apache-2.0 |
ericaro/sbr | cmd/formatCmd.go | 641 | package cmd
import (
"flag"
"fmt"
"os"
"github.com/ericaro/sbr/sbr"
)
type FormatCmd struct {
legacy *bool
}
func (c *FormatCmd) Flags(fs *flag.FlagSet) {
c.legacy = fs.Bool("legacy", false, "format the output using the legacy format")
}
func (c *FormatCmd) Run(args []string) {
// use wd by default
workspace, err := sbr.FindWorkspace(os.Getwd())
if err != nil {
exit(CodeNoWorkingDir, "%v", err)
}
current, err := workspace.Read()
f, err := os.Create(workspace.Sbrfile())
if err != nil {
fmt.Printf("Error Cannot write dependency file: %s", err.Error())
os.Exit(-1)
}
defer f.Close()
sbr.WriteTo(f, current)
}
| apache-2.0 |
superphy/backend | app/routes/ra_statuses.py | 4390 | import redis
import cPickle as pickle
from ast import literal_eval
from flask import Blueprint, request, jsonify, current_app
from routes.job_utils import fetch_job
from middleware.models import load, Pipeline
bp_ra_statuses = Blueprint('reactapp_statuses', __name__)
# new to 4.2.0
def merge_job_results(jobs_dict, redis_connection):
'''
Appends all results together and returns it.
We don't do this while retriving job statuses as in most checks, the jobs
wont all be finished
Note: written for lists atm. (ie. only for Subtyping)
'''
r = []
for key in jobs_dict:
job = fetch_job(key, redis_connection)
if job.is_finished:
res = job.result
# we check for type of result as we're not returning
# Quality Control or ID Reservation results
# print type(res)
if type(res) is list:
r += job.result
else:
return 'ERROR: merge_job_results() was called when all jobs werent complete', 415
return r
# new to 4.2.0
def job_status_reactapp_grouped(job_id, redis_connection):
'''
Retrieves a dictionary of job_id from Redis (not RQ) and checks
status of all jobs
Returns the complete result only if all jobs are finished
'''
# Retrieves jobs_dict
jobs_dict = redis_connection.get(job_id)
# redis-py returns a string by default
# we cast this using ast.literal_eval()
# the alt. is to set a response callback via redis_connection.set_response_callback()
jobs_dict = literal_eval(jobs_dict)
# print jobs_dict
# if any job in a grouped job fails, immediately return
# otherwise, check that all jobs are finished (pending = False)
# before merging the job results
pending = False
for key in jobs_dict:
key = str(key)
# print key
job = fetch_job(key, redis_connection)
if job.is_failed:
print "job_status_reactapp_grouped(): job failed " + job_id
return jsonify(job.exc_info)
elif not job.is_finished:
pending = True
if pending:
return jsonify("pending")
else:
# if you've gotten to this point, then all jobs are finished
return jsonify(merge_job_results(jobs_dict, redis_connection))
def _status_pipeline(pipeline_id):
"""
Checks the status of a pipeline. Returns "pending", the exc_info if failed, or the result.
:param pipeline_id:
:param redis_connection:
:return:
"""
# Retrieve the models.Pipeline instance.
pipeline = load(pipeline_id)
assert isinstance(pipeline, Pipeline)
complete = pipeline.complete() # Normally bool, but str if failed.
if isinstance(complete, bool):
if complete:
# Everything finished successfully.
return pipeline.to_json()
else:
# Some job in the pipeline is still pending.
return jsonify("pending")
else:
# Something failed and we have an exc_info.
return jsonify(complete)
@bp_ra_statuses.route('/api/v0/results/<job_id>')
def job_status_reactapp(job_id):
'''
This provides an endpoint for the reactapp to poll results. We leave job_status() intact to maintain backwards compatibility with the AngularJS app.
'''
# Start a redis connection.
redis_url = current_app.config['REDIS_URL']
redis_connection = redis.from_url(redis_url)
# new to 4.2.0
# check if the job_id is of the new format and should be handled diff
if job_id.startswith('blob'):
return job_status_reactapp_grouped(job_id, redis_connection)
elif job_id.startswith('pipeline'):
return _status_pipeline(job_id)
else:
# old code
job = fetch_job(job_id, redis_connection)
if job.is_finished:
r = job.result
# subtyping results come in the form of a list and must
# be conv to json otherwise, you get a 500 error (isa)
if isinstance(r, (list)):
return jsonify(r)
# fishers results come in the form of a df.to_json object
# and should be returned directly
else:
return r
elif job.is_failed:
print 'job_status_reactapp(): job failed ' + job_id
return jsonify(job.exc_info)
else:
return jsonify("pending")
| apache-2.0 |
Elders/Elders.Firewall | src/Elders.Firewall/IRule.cs | 113 |
namespace Elders.Firewall
{
public interface IRule
{
RuleResult Check(object instance);
}
}
| apache-2.0 |
Team-ESN-Hacktech2016/codes-Hacktech2016 | user/config.php | 207 | <?php
define('CONSUMER_KEY', '<your Twitter Dev app consumer key>');
define('CONSUMER_SECRET', '<your Twitter Dev app consumer secret>');
define('OAUTH_CALLBACK', '<your Twitter Dev app OAuth callback>');
?> | apache-2.0 |
AnujaK/chrome-gae-sync | extras/StoreEndpoint.java | 6699 | package com.bootsimply.sync.entity;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import javax.annotation.Nullable;
import javax.inject.Named;
import javax.jdo.PersistenceManager;
import javax.jdo.Query;
import javax.persistence.EntityNotFoundException;
import com.google.api.server.spi.config.Api;
import com.google.api.server.spi.config.ApiMethod;
import com.google.api.server.spi.config.ApiNamespace;
import com.google.api.server.spi.response.CollectionResponse;
import com.google.api.server.spi.response.UnauthorizedException;
import com.google.appengine.api.datastore.Cursor;
import com.google.appengine.api.users.User;
import com.google.appengine.datanucleus.query.JDOCursorHelper;
@Api(name = "storeendpoint", namespace = @ApiNamespace(ownerDomain = "bootsimply.com", ownerName = "bootsimply.com", packagePath = "sync.entity"))
public class StoreEndpoint {
/**
* This method lists all the entities inserted in datastore. It uses HTTP GET method and paging support.
*
* @return A CollectionResponse class containing the list of all entities persisted and a cursor to the next page.
* @throws UnauthorizedException
*/
@SuppressWarnings({ "unchecked", "unused" })
@ApiMethod(name = "listStore", scopes = { Config.EMAIL_SCOPE }, clientIds = { Config.CHROME_CLIENT_ID, Config.WEB_CLIENT_ID, Config.API_EXPLORER_CLIENT_ID })
public CollectionResponse<Store> listStore(@Nullable @Named("cursor") String cursorString, @Nullable @Named("limit") Integer limit, User user)
throws UnauthorizedException {
if (user == null) {
throw new UnauthorizedException("UnauthorizedException # User is Null.");
}
PersistenceManager mgr = null;
Cursor cursor = null;
List<Store> execute = null;
try {
mgr = getPersistenceManager();
Query query = mgr.newQuery(Store.class);
if (cursorString != null && cursorString != "") {
cursor = Cursor.fromWebSafeString(cursorString);
HashMap<String, Object> extensionMap = new HashMap<String, Object>();
extensionMap.put(JDOCursorHelper.CURSOR_EXTENSION, cursor);
query.setExtensions(extensionMap);
}
if (limit != null) {
query.setRange(0, limit);
}
execute = (List<Store>) query.execute();
cursor = JDOCursorHelper.getCursor(execute);
if (cursor != null)
cursorString = cursor.toWebSafeString();
// Tight loop for fetching all entities from datastore and accomodate
// for lazy fetch.
for (Store obj : execute)
;
} finally {
mgr.close();
}
return CollectionResponse.<Store> builder().setItems(execute).setNextPageToken(cursorString).build();
}
/**
* This method gets the entity having primary key id. It uses HTTP GET method.
*
* @param id
* the primary key of the java bean.
* @return The entity with primary key id.
* @throws UnauthorizedException
*/
@ApiMethod(name = "getStore", scopes = { Config.EMAIL_SCOPE }, clientIds = { Config.CHROME_CLIENT_ID, Config.WEB_CLIENT_ID, Config.API_EXPLORER_CLIENT_ID })
public Store getStore(@Named("_id") Long _id, User user) throws UnauthorizedException {
if (user == null) {
throw new UnauthorizedException("UnauthorizedException # User is Null.");
}
PersistenceManager mgr = getPersistenceManager();
Store store = null;
try {
store = mgr.getObjectById(Store.class, _id);
} finally {
mgr.close();
}
return store;
}
/**
* This inserts a new entity into App Engine datastore. If the entity already exists in the datastore, an exception is thrown. It uses HTTP POST
* method.
*
* @param store
* the entity to be inserted.
* @return The inserted entity.
* @throws UnauthorizedException
*/
@ApiMethod(name = "insertStore", scopes = { Config.EMAIL_SCOPE }, clientIds = { Config.CHROME_CLIENT_ID, Config.WEB_CLIENT_ID, Config.API_EXPLORER_CLIENT_ID })
public Store insertStore(Store store, User user) throws UnauthorizedException {
if (user == null) {
throw new UnauthorizedException("UnauthorizedException # User is Null.");
} else if (store == null || store.getData() == null || store.get_id() != null) {
return null;
}
PersistenceManager mgr = getPersistenceManager();
try {
Date currentDate = new Date();
store.set_createdAt(currentDate);
store.set_upatedAt(currentDate);
mgr.makePersistent(store);
} finally {
mgr.close();
}
return store;
}
/**
* This method is used for updating an existing entity. If the entity does not exist in the datastore, an exception is thrown. It uses HTTP PUT
* method.
*
* @param store
* the entity to be updated.
* @return The updated entity.
* @throws UnauthorizedException
*/
@ApiMethod(name = "updateStore", scopes = { Config.EMAIL_SCOPE }, clientIds = { Config.CHROME_CLIENT_ID, Config.WEB_CLIENT_ID, Config.API_EXPLORER_CLIENT_ID })
public Store updateStore(Store store, User user) throws UnauthorizedException {
if (user == null) {
throw new UnauthorizedException("UnauthorizedException # User is Null.");
}
PersistenceManager mgr = getPersistenceManager();
try {
if (!containsStore(store)) {
throw new EntityNotFoundException("Object does not exist");
}
Date currentDate = new Date();
store.set_upatedAt(currentDate);
mgr.makePersistent(store);
} finally {
mgr.close();
}
return store;
}
/**
* This method removes the entity with primary key id. It uses HTTP DELETE method.
*
* @param id
* the primary key of the entity to be deleted.
* @throws UnauthorizedException
*/
@ApiMethod(name = "removeStore", scopes = { Config.EMAIL_SCOPE }, clientIds = { Config.CHROME_CLIENT_ID, Config.WEB_CLIENT_ID, Config.API_EXPLORER_CLIENT_ID })
public void removeStore(@Named("_id") Long _id, User user) throws UnauthorizedException {
if (user == null) {
throw new UnauthorizedException("UnauthorizedException # User is Null.");
}
PersistenceManager mgr = getPersistenceManager();
try {
Store store = mgr.getObjectById(Store.class, _id);
mgr.deletePersistent(store);
} finally {
mgr.close();
}
}
private boolean containsStore(Store store) {
PersistenceManager mgr = getPersistenceManager();
boolean contains = true;
try {
mgr.getObjectById(Store.class, store.get_id());
} catch (javax.jdo.JDOObjectNotFoundException ex) {
contains = false;
} finally {
mgr.close();
}
return contains;
}
private static PersistenceManager getPersistenceManager() {
return PMF.get().getPersistenceManager();
}
} | apache-2.0 |
tseaver/gcloud-python | asset/google/cloud/asset_v1beta1/gapic/enums.py | 1286 | # -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrappers for protocol buffer enum types."""
import enum
class NullValue(enum.IntEnum):
"""
``NullValue`` is a singleton enumeration to represent the null value for the
``Value`` type union.
The JSON representation for ``NullValue`` is JSON ``null``.
Attributes:
NULL_VALUE (int): Null value.
"""
NULL_VALUE = 0
class ContentType(enum.IntEnum):
"""
Asset content type.
Attributes:
CONTENT_TYPE_UNSPECIFIED (int): Unspecified content type.
RESOURCE (int): Resource metadata.
IAM_POLICY (int): The actual IAM policy set on a resource.
"""
CONTENT_TYPE_UNSPECIFIED = 0
RESOURCE = 1
IAM_POLICY = 2
| apache-2.0 |
crate/crate | libs/sql-parser/src/main/java/io/crate/sql/tree/DeallocateStatement.java | 2157 | /*
* Licensed to Crate.io GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.sql.tree;
import javax.annotation.Nullable;
import java.util.Objects;
public class DeallocateStatement extends Statement {
@Nullable
private final Expression preparedStmt;
public DeallocateStatement() {
preparedStmt = null;
}
public DeallocateStatement(Expression preparedStmt) {
this.preparedStmt = preparedStmt;
}
@Nullable
public Expression preparedStmt() {
return preparedStmt;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeallocateStatement that = (DeallocateStatement) o;
return Objects.equals(preparedStmt, that.preparedStmt);
}
@Override
public int hashCode() {
return Objects.hash(preparedStmt);
}
@Override
public String toString() {
return preparedStmt != null ? "DEALLOCATE '" + preparedStmt + "'" : "DEALLOCATE ALL";
}
@Override
public <R, C> R accept(AstVisitor<R, C> visitor, C context) {
return visitor.visitDeallocateStatement(this, context);
}
}
| apache-2.0 |
mikosik/test-injector | src/test/java/com/perunlabs/testinjector/AnnotationTest.java | 1366 | package com.perunlabs.testinjector;
import static com.perunlabs.testinjector.inject.TestInjector.injectTest;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.ArrayList;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Spy;
import com.google.inject.BindingAnnotation;
import com.google.inject.name.Named;
import com.perunlabs.testinjector.util.MoreThanOneBindingAnnotationException;
public class AnnotationTest {
@Test(expected = MoreThanOneBindingAnnotationException.class)
public void mocking_twice_annotated_field_fails() throws Exception {
injectTest(new TwiceAnnotatedMock());
}
private static class TwiceAnnotatedMock {
@Named("x")
@MyAnnotation
@Mock
Runnable mock;
}
@Test(expected = MoreThanOneBindingAnnotationException.class)
public void spying_twice_annotated_field_fails() throws Exception {
injectTest(new TwiceAnnotatedSpy());
}
private static class TwiceAnnotatedSpy {
@Named("x")
@MyAnnotation
@Spy
Iterable<String> spy = new ArrayList<String>();
}
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER, ElementType.TYPE })
@BindingAnnotation
@interface MyAnnotation {}
}
| apache-2.0 |
freedot/tstolua | tests/cases/conformance/types/typeRelationships/subtypesAndSuperTypes/unionSubtypeIfEveryConstituentTypeIsSubtype.ts | 2313 | enum e {
e1,
e2
}
// A union type U is a subtype of a type T if each type in U is a subtype of T
interface I {
[x: string]: any;
foo: string | number; // ok
foo2: e | number; // ok
}
interface I2 {
[x: string]: number;
// S is union type and each constituent type of S is a subtype of T
foo: string | number; // error string is not subtype of number
foo2: e | number; // ok e and number both subtype of number
}
interface I3 {
[x: string]: string;
foo: string | number; // error numer is not subtype of string
foo2: e | number; // error e and number both not subtype of string
}
// error cases
interface I4 {
[x: string]: boolean;
foo: string | number;
foo2: e | number;
}
interface I5 {
[x: string]: Date;
foo: string | number;
foo2: e | number;
}
interface I6 {
[x: string]: RegExp;
foo: string | number;
foo2: e | number;
}
interface I7 {
[x: string]: { bar: number };
foo: string | number;
foo2: e | number;
}
interface I8 {
[x: string]: number[];
foo: string | number;
foo2: e | number;
}
interface I9 {
[x: string]: I8;
foo: string | number;
foo2: e | number;
}
class A { foo: number; }
interface I10 {
[x: string]: A;
foo: string | number;
foo2: e | number;
}
class A2<T> { foo: T; }
interface I11 {
[x: string]: A2<number>;
foo: string | number;
foo2: e | number;
}
interface I12 {
[x: string]: (x) => number;
foo: string | number;
foo2: e | number;
}
interface I13 {
[x: string]: <T>(x: T) => T;
foo: string | number;
foo2: e | number;
}
enum E2 { A }
interface I14 {
[x: string]: E2;
foo: string | number;
foo2: e | number;
}
function f() { }
module f {
export var bar = 1;
}
interface I15 {
[x: string]: typeof f;
foo: string | number;
foo2: e | number;
}
class c { baz: string }
module c {
export var bar = 1;
}
interface I16 {
[x: string]: typeof c;
foo: string | number;
foo2: e | number;
}
interface I17<T> {
[x: string]: T;
foo: string | number;
foo2: e | number;
}
interface I19 {
[x: string]: Object;
foo: string | number;
foo2: e | number;
}
interface I20 {
[x: string]: {};
foo: string | number;
foo2: e | number;
} | apache-2.0 |
Cloudyle/hapi-fhir | hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2ValidateTest.java | 9830 | package ca.uhn.fhir.jpa.dao.dstu2;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.*;
import java.io.IOException;
import java.util.Arrays;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.Test;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.dstu2.resource.Bundle;
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
import ca.uhn.fhir.model.dstu2.resource.Observation;
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
import ca.uhn.fhir.model.dstu2.resource.Organization;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.dstu2.resource.StructureDefinition;
import ca.uhn.fhir.model.dstu2.resource.ValueSet;
import ca.uhn.fhir.model.dstu2.valueset.ObservationStatusEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.ValidationModeEnum;
import ca.uhn.fhir.rest.server.EncodingEnum;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2ValidateTest.class);
@Test
public void testValidateResourceContainingProfileDeclarationJson() throws Exception {
String methodName = "testValidateResourceContainingProfileDeclarationJson";
OperationOutcome outcome = doTestValidateResourceContainingProfileDeclaration(methodName, EncodingEnum.JSON);
String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome);
ourLog.info(ooString);
assertThat(ooString, containsString("Element '.subject': minimum required = 1, but only found 0"));
assertThat(ooString, containsString("Element encounter @ : max allowed = 0, but found 1"));
assertThat(ooString, containsString("Element '.device': minimum required = 1, but only found 0"));
}
@Test
public void testValidateResourceContainingProfileDeclarationXml() throws Exception {
String methodName = "testValidateResourceContainingProfileDeclarationXml";
OperationOutcome outcome = doTestValidateResourceContainingProfileDeclaration(methodName, EncodingEnum.XML);
String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome);
ourLog.info(ooString);
assertThat(ooString, containsString("Element '/f:Observation.subject': minimum required = 1, but only found 0"));
assertThat(ooString, containsString("Element encounter @ /f:Observation: max allowed = 0, but found 1"));
assertThat(ooString, containsString("Element '/f:Observation.device': minimum required = 1, but only found 0"));
}
private OperationOutcome doTestValidateResourceContainingProfileDeclaration(String methodName, EncodingEnum enc) throws IOException {
Bundle vss = loadResourceFromClasspath(Bundle.class, "/org/hl7/fhir/instance/model/valueset/valuesets.xml");
myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-status"));
myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-category"));
myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-codes"));
myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-methods"));
myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-valueabsentreason"));
myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-interpretation"));
myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "body-site"));
myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "referencerange-meaning"));
myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-relationshiptypes"));
StructureDefinition sd = loadResourceFromClasspath(StructureDefinition.class, "/org/hl7/fhir/instance/model/profile/devicemetricobservation.profile.xml");
sd.setId(new IdDt());
sd.setUrl("http://example.com/foo/bar/" + methodName);
myStructureDefinitionDao.create(sd);
Observation input = new Observation();
ResourceMetadataKeyEnum.PROFILES.put(input, Arrays.asList(new IdDt(sd.getUrl())));
input.addIdentifier().setSystem("http://acme").setValue("12345");
input.getEncounter().setReference("http://foo.com/Encounter/9");
input.setStatus(ObservationStatusEnum.FINAL);
input.getCode().addCoding().setSystem("http://loinc.org").setCode("12345");
String encoded = null;
MethodOutcome outcome = null;
ValidationModeEnum mode = ValidationModeEnum.CREATE;
switch (enc) {
case JSON:
encoded = myFhirCtx.newJsonParser().encodeResourceToString(input);
try {
myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null);
fail();
} catch (PreconditionFailedException e) {
return (OperationOutcome) e.getOperationOutcome();
}
case XML:
encoded = myFhirCtx.newXmlParser().encodeResourceToString(input);
try {
myObservationDao.validate(input, null, encoded, EncodingEnum.XML, mode, null);
fail();
} catch (PreconditionFailedException e) {
return (OperationOutcome) e.getOperationOutcome();
}
}
throw new IllegalStateException(); // shouldn't get here
}
@Test
public void testValidateResourceContainingProfileDeclarationInvalid() throws Exception {
String methodName = "testValidateResourceContainingProfileDeclarationInvalid";
Observation input = new Observation();
String profileUri = "http://example.com/" + methodName;
ResourceMetadataKeyEnum.PROFILES.put(input, Arrays.asList(new IdDt(profileUri)));
input.addIdentifier().setSystem("http://acme").setValue("12345");
input.getEncounter().setReference("http://foo.com/Encounter/9");
input.setStatus(ObservationStatusEnum.FINAL);
input.getCode().addCoding().setSystem("http://loinc.org").setCode("12345");
ValidationModeEnum mode = ValidationModeEnum.CREATE;
String encoded = myFhirCtx.newJsonParser().encodeResourceToString(input);
MethodOutcome outcome = myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null);
String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.getOperationOutcome());
ourLog.info(ooString);
assertThat(ooString, containsString("StructureDefinition reference \\\"" + profileUri + "\\\" could not be resolved"));
}
@Test
public void testValidateForCreate() {
String methodName = "testValidateForCreate";
Patient pat = new Patient();
pat.setId("Patient/123");
pat.addName().addFamily(methodName);
try {
myPatientDao.validate(pat, null, null, null, ValidationModeEnum.CREATE, null);
fail();
} catch (InvalidRequestException e) {
assertThat(e.getMessage(), containsString("ID must not be populated"));
}
pat.setId("");
myPatientDao.validate(pat, null, null, null, ValidationModeEnum.CREATE, null);
}
@Test
public void testValidateForUpdate() {
String methodName = "testValidateForUpdate";
Patient pat = new Patient();
pat.setId("Patient/123");
pat.addName().addFamily(methodName);
myPatientDao.validate(pat, null, null, null, ValidationModeEnum.UPDATE, null);
pat.setId("");
try {
myPatientDao.validate(pat, null, null, null, ValidationModeEnum.UPDATE, null);
fail();
} catch (InvalidRequestException e) {
assertThat(e.getMessage(), containsString("ID must be populated"));
}
}
@Test
public void testValidateForUpdateWithContained() {
String methodName = "testValidateForUpdate";
Organization org = new Organization();
org.setId("#123");
Patient pat = new Patient();
pat.setId("Patient/123");
pat.addName().addFamily(methodName);
myPatientDao.validate(pat, null, null, null, ValidationModeEnum.UPDATE, null);
pat.setId("");
try {
myPatientDao.validate(pat, null, null, null, ValidationModeEnum.UPDATE, null);
fail();
} catch (InvalidRequestException e) {
assertThat(e.getMessage(), containsString("ID must be populated"));
}
}
@Test
public void testValidateForDelete() {
String methodName = "testValidateForDelete";
Organization org = new Organization();
org.setName(methodName);
IIdType orgId = myOrganizationDao.create(org).getId().toUnqualifiedVersionless();
Patient pat = new Patient();
pat.addName().addFamily(methodName);
pat.getManagingOrganization().setReference(orgId);
IIdType patId = myPatientDao.create(pat).getId().toUnqualifiedVersionless();
OperationOutcome outcome=null;
try {
myOrganizationDao.validate(null, orgId, null, null, ValidationModeEnum.DELETE, null);
fail();
} catch (ResourceVersionConflictException e) {
outcome= (OperationOutcome) e.getOperationOutcome();
}
String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome);
ourLog.info(ooString);
assertThat(ooString, containsString("Unable to delete Organization"));
pat.setId(patId);
pat.getManagingOrganization().setReference("");
myPatientDao.update(pat);
outcome = (OperationOutcome) myOrganizationDao.validate(null, orgId, null, null, ValidationModeEnum.DELETE, null).getOperationOutcome();
ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome);
ourLog.info(ooString);
assertThat(ooString, containsString("Ok to delete"));
}
private IResource findResourceByIdInBundle(Bundle vss, String name) {
IResource retVal = null;
for (Entry next : vss.getEntry()) {
if (next.getResource().getId().getIdPart().equals(name)) {
retVal = next.getResource();
break;
}
}
if (retVal == null) {
fail("Can't find VS: " + name);
}
return retVal;
}
}
| apache-2.0 |
ozone-development/meridian | server/extensions/gazetteer/main.js | 466 | var platform = require('platform');
var gaz = require('./Gaz');
var auth;
exports.init = function(context){
auth = context.sandbox.auth;
context.app.get('/gaz', auth.verifyUser, function(req, res){
gaz.query(req, function (response){
if(response.status === 200){
res.status(200);
res.send(response.response);
} else {
res.status(500);
res.send(response);
}
});
});
}; | apache-2.0 |
ChineseLincoln/JerseyRestful | src/main/java/com/drawthink/iguyuan/db/DBConnect.java | 1381 | package com.drawthink.iguyuan.db;
import java.sql.Connection;
import java.sql.SQLException;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
public class DBConnect {
public static DruidDataSource dataSource;
static {
try {
dataSource = new DruidDataSource();
dataSource.setUsername("sa");
dataSource.setPassword("*windows*");
dataSource.setUrl("jdbc:sqlserver://192.168.0.168:2433;databaseName=ZHGY");
dataSource.setDriverClassName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
dataSource.setFilters("stat");
dataSource.setMaxActive(20);
dataSource.setInitialSize(1);
dataSource.setMaxWait(60000);
dataSource.setTimeBetweenEvictionRunsMillis(60000);
dataSource.setMinEvictableIdleTimeMillis(300000);
dataSource.setPoolPreparedStatements(true);
dataSource.setMaxOpenPreparedStatements(20);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 从连接池中获取数据源链接
*
* @author gaoxianglong
*
* @return Connection 数据源链接
*/
public static DruidPooledConnection getConnection() {
DruidPooledConnection conn = null;
if (null != dataSource) {
try {
conn = dataSource.getConnection();
} catch (SQLException e) {
e.printStackTrace();
}
}
return conn;
}
}
| apache-2.0 |
jgrandja/spring-security-oauth | spring-security-oauth2/src/main/java/org/springframework/security/oauth2/common/util/ProxyCreator.java | 2035 | /*
* Copyright 2013-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.springframework.security.oauth2.common.util;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import org.springframework.beans.factory.ObjectFactory;
/**
* @author Dave Syer
*
*/
public class ProxyCreator {
@SuppressWarnings("unchecked")
public static <T> T getProxy(Class<T> type, ObjectFactory<T> factory) {
return (T) Proxy.newProxyInstance(ProxyCreator.class.getClassLoader(), new Class<?>[] { type },
new LazyInvocationHandler<T>(factory));
}
private static class LazyInvocationHandler<T> implements InvocationHandler {
private T target;
private ObjectFactory<T> factory;
public LazyInvocationHandler(ObjectFactory<T> factory) {
this.factory = factory;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
// Invocation on interface coming in...
if (method.getName().equals("equals")) {
return (proxy == args[0]);
}
else if (method.getName().equals("hashCode")) {
return System.identityHashCode(proxy);
}
try {
return method.invoke(getTarget(method), args);
}
catch (InvocationTargetException ex) {
throw ex.getTargetException();
}
}
private Object getTarget(Method method) {
if (target == null) {
target = factory.getObject();
}
return target;
}
}
}
| apache-2.0 |
aspiers/chef | chef/lib/chef/knife/ssh.rb | 13401 | #
# Author:: Adam Jacob (<adam@opscode.com>)
# Copyright:: Copyright (c) 2009 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife'
class Chef
class Knife
class Ssh < Knife
deps do
require 'net/ssh'
require 'net/ssh/multi'
require 'readline'
require 'chef/exceptions'
require 'chef/search/query'
require 'chef/mixin/shell_out'
require 'mixlib/shellout'
end
include Chef::Mixin::ShellOut
attr_writer :password
banner "knife ssh QUERY COMMAND (options)"
option :concurrency,
:short => "-C NUM",
:long => "--concurrency NUM",
:description => "The number of concurrent connections",
:default => nil,
:proc => lambda { |o| o.to_i }
option :attribute,
:short => "-a ATTR",
:long => "--attribute ATTR",
:description => "The attribute to use for opening the connection - default is fqdn",
:proc => Proc.new { |key| Chef::Config[:knife][:ssh_attribute] = key }
option :manual,
:short => "-m",
:long => "--manual-list",
:boolean => true,
:description => "QUERY is a space separated list of servers",
:default => false
option :ssh_user,
:short => "-x USERNAME",
:long => "--ssh-user USERNAME",
:description => "The ssh username"
option :ssh_password,
:short => "-P PASSWORD",
:long => "--ssh-password PASSWORD",
:description => "The ssh password"
option :ssh_port,
:short => "-p PORT",
:long => "--ssh-port PORT",
:description => "The ssh port",
:default => "22",
:proc => Proc.new { |key| Chef::Config[:knife][:ssh_port] = key }
option :ssh_gateway,
:short => "-G GATEWAY",
:long => "--ssh-gateway GATEWAY",
:description => "The ssh gateway",
:proc => Proc.new { |key| Chef::Config[:knife][:ssh_gatewa] = key }
option :identity_file,
:short => "-i IDENTITY_FILE",
:long => "--identity-file IDENTITY_FILE",
:description => "The SSH identity file used for authentication"
option :host_key_verify,
:long => "--[no-]host-key-verify",
:description => "Verify host key, enabled by default.",
:boolean => true,
:default => true
def session
config[:on_error] ||= :skip
ssh_error_handler = Proc.new do |server|
if config[:manual]
node_name = server.host
else
@action_nodes.each do |n|
node_name = n if format_for_display(n)[config[:attribute]] == server.host
end
end
case config[:on_error]
when :skip
ui.warn "Failed to connect to #{node_name} -- #{$!.class.name}: #{$!.message}"
$!.backtrace.each { |l| Chef::Log.debug(l) }
when :raise
#Net::SSH::Multi magic to force exception to be re-raised.
throw :go, :raise
end
end
@session ||= Net::SSH::Multi.start(:concurrent_connections => config[:concurrency], :on_error => ssh_error_handler)
end
def configure_session
list = case config[:manual]
when true
@name_args[0].split(" ")
when false
r = Array.new
q = Chef::Search::Query.new
@action_nodes = q.search(:node, @name_args[0])[0]
@action_nodes.each do |item|
i = format_for_display(item)[config[:attribute]]
r.push(i) unless i.nil?
end
r
end
(ui.fatal("No nodes returned from search!"); exit 10) if list.length == 0
session_from_list(list)
end
def session_from_list(list)
config[:ssh_gateway] ||= Chef::Config[:knife][:ssh_gateway]
if config[:ssh_gateway]
gw_host, gw_user = config[:ssh_gateway].split('@').reverse
gw_host, gw_port = gw_host.split(':')
gw_opts = gw_port ? { :port => gw_port } : {}
session.via(gw_host, gw_user || config[:ssh_user], gw_opts)
end
list.each do |item|
Chef::Log.debug("Adding #{item}")
hostspec = config[:ssh_user] ? "#{config[:ssh_user]}@#{item}" : item
session_opts = {}
session_opts[:keys] = File.expand_path(config[:identity_file]) if config[:identity_file]
session_opts[:keys_only] = true if config[:identity_file]
session_opts[:password] = config[:ssh_password] if config[:ssh_password]
session_opts[:port] = Chef::Config[:knife][:ssh_port] || config[:ssh_port]
session_opts[:logger] = Chef::Log.logger if Chef::Log.level == :debug
if !config[:host_key_verify]
session_opts[:paranoid] = false
session_opts[:user_known_hosts_file] = "/dev/null"
end
session.use(hostspec, session_opts)
@longest = item.length if item.length > @longest
end
session
end
def fixup_sudo(command)
command.sub(/^sudo/, 'sudo -p \'knife sudo password: \'')
end
def print_data(host, data)
if data =~ /\n/
data.split(/\n/).each { |d| print_data(host, d) }
else
padding = @longest - host.length
str = ui.color(host, :cyan) + (" " * (padding + 1)) + data
ui.msg(str)
end
end
def ssh_command(command, subsession=nil)
exit_status = 0
subsession ||= session
command = fixup_sudo(command)
subsession.open_channel do |ch|
ch.request_pty
ch.exec command do |ch, success|
raise ArgumentError, "Cannot execute #{command}" unless success
ch.on_data do |ichannel, data|
print_data(ichannel[:host], data)
if data =~ /^knife sudo password: /
ichannel.send_data("#{get_password}\n")
end
end
ch.on_request "exit-status" do |ichannel, data|
exit_status = data.read_long
end
end
end
session.loop
exit_status
end
def get_password
@password ||= ui.ask("Enter your password: ") { |q| q.echo = false }
end
# Present the prompt and read a single line from the console. It also
# detects ^D and returns "exit" in that case. Adds the input to the
# history, unless the input is empty. Loops repeatedly until a non-empty
# line is input.
def read_line
loop do
command = reader.readline("#{ui.color('knife-ssh>', :bold)} ", true)
if command.nil?
command = "exit"
puts(command)
else
command.strip!
end
unless command.empty?
return command
end
end
end
def reader
Readline
end
def interactive
puts "Connected to #{ui.list(session.servers_for.collect { |s| ui.color(s.host, :cyan) }, :inline, " and ")}"
puts
puts "To run a command on a list of servers, do:"
puts " on SERVER1 SERVER2 SERVER3; COMMAND"
puts " Example: on latte foamy; echo foobar"
puts
puts "To exit interactive mode, use 'quit!'"
puts
while 1
command = read_line
case command
when 'quit!'
puts 'Bye!'
break
when /^on (.+?); (.+)$/
raw_list = $1.split(" ")
server_list = Array.new
session.servers.each do |session_server|
server_list << session_server if raw_list.include?(session_server.host)
end
command = $2
ssh_command(command, session.on(*server_list))
else
ssh_command(command)
end
end
end
def screen
tf = Tempfile.new("knife-ssh-screen")
if File.exist? "#{ENV["HOME"]}/.screenrc"
tf.puts("source #{ENV["HOME"]}/.screenrc")
end
tf.puts("caption always '%-Lw%{= BW}%50>%n%f* %t%{-}%+Lw%<'")
tf.puts("hardstatus alwayslastline 'knife ssh #{@name_args[0]}'")
window = 0
session.servers_for.each do |server|
tf.print("screen -t \"#{server.host}\" #{window} ssh ")
tf.print("-i #{config[:identity_file]} ") if config[:identity_file]
server.user ? tf.puts("#{server.user}@#{server.host}") : tf.puts(server.host)
window += 1
end
tf.close
exec("screen -c #{tf.path}")
end
def tmux
ssh_dest = lambda do |server|
identity = "-i #{config[:identity_file]} " if config[:identity_file]
prefix = server.user ? "#{server.user}@" : ""
"'ssh #{identity}#{prefix}#{server.host}'"
end
new_window_cmds = lambda do
if session.servers_for.size > 1
[""] + session.servers_for[1..-1].map do |server|
"new-window -a -n '#{server.host}' #{ssh_dest.call(server)}"
end
else
[]
end.join(" \\; ")
end
tmux_name = "'knife ssh #{@name_args[0].gsub(/:/,'=')}'"
begin
server = session.servers_for.first
cmd = ["tmux new-session -d -s #{tmux_name}",
"-n '#{server.host}'", ssh_dest.call(server),
new_window_cmds.call].join(" ")
shell_out!(cmd)
exec("tmux attach-session -t #{tmux_name}")
rescue Chef::Exceptions::Exec
end
end
def macterm
begin
require 'appscript'
rescue LoadError
STDERR.puts "you need the rb-appscript gem to use knife ssh macterm. `(sudo) gem install rb-appscript` to install"
raise
end
Appscript.app("/Applications/Utilities/Terminal.app").windows.first.activate
Appscript.app("System Events").application_processes["Terminal.app"].keystroke("n", :using=>:command_down)
term = Appscript.app('Terminal')
window = term.windows.first.get
(session.servers_for.size - 1).times do |i|
window.activate
Appscript.app("System Events").application_processes["Terminal.app"].keystroke("t", :using=>:command_down)
end
session.servers_for.each_with_index do |server, tab_number|
cmd = "unset PROMPT_COMMAND; echo -e \"\\033]0;#{server.host}\\007\"; ssh #{server.user ? "#{server.user}@#{server.host}" : server.host}"
Appscript.app('Terminal').do_script(cmd, :in => window.tabs[tab_number + 1].get)
end
end
def configure_attribute
config[:attribute] = (Chef::Config[:knife][:ssh_attribute] ||
config[:attribute] ||
"fqdn").strip
end
def cssh
cssh_cmd = nil
%w[csshX cssh].each do |cmd|
begin
# Unix and Mac only
cssh_cmd = shell_out!("which #{cmd}").stdout.strip
break
rescue Mixlib::ShellOut::ShellCommandFailed
end
end
raise Chef::Exceptions::Exec, "no command found for cssh" unless cssh_cmd
session.servers_for.each do |server|
cssh_cmd << " #{server.user ? "#{server.user}@#{server.host}" : server.host}"
end
Chef::Log.debug("starting cssh session with command: #{cssh_cmd}")
exec(cssh_cmd)
end
def get_stripped_unfrozen_value(value)
return nil if value.nil?
value.strip
end
def configure_user
config[:ssh_user] = get_stripped_unfrozen_value(config[:ssh_user] ||
Chef::Config[:knife][:ssh_user])
end
def configure_identity_file
config[:identity_file] = get_stripped_unfrozen_value(config[:identity_file] ||
Chef::Config[:knife][:ssh_identity_file])
end
def run
extend Chef::Mixin::Command
@longest = 0
configure_attribute
configure_user
configure_identity_file
configure_session
exit_status =
case @name_args[1]
when "interactive"
interactive
when "screen"
screen
when "tmux"
tmux
when "macterm"
macterm
when "cssh"
cssh
when "csshx"
Chef::Log.warn("knife ssh csshx will be deprecated in a future release")
Chef::Log.warn("please use knife ssh cssh instead")
cssh
else
ssh_command(@name_args[1..-1].join(" "))
end
session.close
exit_status
end
end
end
end
| apache-2.0 |
tcrossland/camunda-bpm-webapp | ui/cockpit/plugins/base/app/views/processInstance/variableInstancesTab.js | 10994 | 'use strict';
var fs = require('fs');
var uploadTemplate = fs.readFileSync(__dirname + '/variable-instance-upload-dialog.html', 'utf8');
var inspectTemplate = fs.readFileSync(__dirname + '/variable-instance-inspect-dialog.html', 'utf8');
var instancesTemplate = fs.readFileSync(__dirname + '/variable-instances-tab.html', 'utf8');
module.exports = function(ngModule) {
ngModule.controller('VariableInstancesController', [
'$scope', '$sce', '$http', 'search', 'Uri', 'LocalExecutionVariableResource', 'Notifications', '$modal', '$q', 'camAPI',
function($scope, $sce, $http, search, Uri, LocalExecutionVariableResource, Notifications, $modal, $q, camAPI) {
// input: processInstance, processData
var variableInstanceData = $scope.processData.newChild($scope),
processInstance = $scope.processInstance,
variableInstanceIdexceptionMessageMap,
variableCopies;
var executionService = camAPI.resource('execution'),
taskService = camAPI.resource('task');
var DEFAULT_PAGES = { size: 50, total: 0, current: 1 };
var pages = $scope.pages = angular.copy(DEFAULT_PAGES);
var filter = null;
$scope.$watch('pages.current', function(newValue, oldValue) {
if (newValue == oldValue) {
return;
}
search('page', !newValue || newValue == 1 ? null : newValue);
});
variableInstanceData.observe([ 'filter', 'instanceIdToInstanceMap' ], function(newFilter, instanceIdToInstanceMap) {
pages.current = newFilter.page || 1;
updateView(newFilter, instanceIdToInstanceMap);
});
$scope.uploadVariable = function(info) {
var promise = $q.defer();
$modal.open({
resolve: {
variableInstance: function() { return info.variable; }
},
controller: 'VariableInstanceUploadController',
template: uploadTemplate
})
.result.then(function() {
// updated the variable, need to get the new data
// reject the promise anyway
promise.reject();
// but then update the filter to force re-get of variables
variableInstanceData.set('filter', angular.copy($scope.filter));
}, function() {
// did not update the variable, reject the promise
promise.reject();
});
return promise.promise;
};
$scope.deleteVariable = function(info) {
var promise = $q.defer();
var callback = function(error) {
if(error) {
Notifications.addError({
status: 'Variable',
message: 'The variable \'' + info.variable.name + '\' could not be deleted successfully.',
exclusive: true,
duration: 5000
});
promise.reject();
} else {
Notifications.addMessage({
status: 'Variable',
message: 'The variable \'' + info.variable.name + '\' has been deleted successfully.',
duration: 5000
});
promise.resolve(info.variable);
}
};
if(info.original.taskId) {
taskService.deleteVariable({
id: info.original.taskId,
varId: info.variable.name
}, callback);
} else {
executionService.deleteVariable({
id: info.variable.executionId,
varId: info.variable.name
}, callback);
}
return promise.promise;
};
$scope.editVariable = function(info) {
var promise = $q.defer();
$modal.open({
template: inspectTemplate,
controller: 'VariableInstanceInspectController',
windowClass: 'cam-widget-variable-dialog',
resolve: {
variableInstance: function () { return info.variable; }
}
})
.result.then(function() {
// updated the variable, need to get the new data
// reject the promise anyway
promise.reject();
// but then update the filter to force re-get of variables
variableInstanceData.set('filter', angular.copy($scope.filter));
}, function() {
// did not update the variable, reject the promise
promise.reject();
});
return promise.promise;
};
$scope.saveVariable = function (info) {
var promise = $q.defer();
var variable = info.variable;
var modifiedVariable = {};
var newValue = variable.value;//$scope.getCopy(variable.id).value;
var newType = variable.type;//$scope.getCopy(variable.id).type;
var newVariable = { value: newValue, type: newType };
modifiedVariable[variable.name] = newVariable;
var callback = function(error, data) {
if(error) {
Notifications.addError({
status: 'Variable',
message: 'The variable \'' + variable.name + '\' could not be changed successfully.',
exclusive: true,
duration: 5000
});
variableInstanceIdexceptionMessageMap[variable.id] = error.data;
promise.reject();
} else {
Notifications.addMessage({
status: 'Variable',
message: 'The variable \'' + variable.name + '\' has been changed successfully.',
duration: 5000
});
angular.extend(variable, newVariable);
promise.resolve(info.variable);
}
};
if(info.original.taskId) {
taskService.modifyVariables({
id: info.original.taskId,
modifications: modifiedVariable
}, callback);
} else {
executionService.modifyVariables({
id: variable.executionId,
modifications: modifiedVariable
}, callback);
}
return promise.promise;
};
function updateView(newFilter, instanceIdToInstanceMap) {
filter = $scope.filter = angular.copy(newFilter);
delete filter.page;
delete filter.activityIds;
delete filter.scrollToBpmnElement;
var page = pages.current,
count = pages.size,
firstResult = (page - 1) * count;
var defaultParams = {
processInstanceIdIn: [ processInstance.id ]
};
var pagingParams = {
firstResult: firstResult,
maxResults: count,
deserializeValues: false
};
var params = angular.extend({}, filter, defaultParams);
// fix missmatch -> activityInstanceIds -> activityInstanceIdIn
params.activityInstanceIdIn = params.activityInstanceIds;
delete params.activityInstanceIds;
$scope.variables = null;
$scope.loadingState = 'LOADING';
// get the 'count' of variables
$http.post(Uri.appUri('engine://engine/:engine/variable-instance/count'), params).success(function(data) {
pages.total = data.count;
});
variableInstanceIdexceptionMessageMap = {};
variableCopies = {};
$http.post(Uri.appUri('engine://engine/:engine/variable-instance/'), params, { params: pagingParams }).success(function(data) {
$scope.variables = data.map(function (item) {
var instance = instanceIdToInstanceMap[item.activityInstanceId];
item.instance = instance;
variableCopies[item.id] = angular.copy(item);
return {
variable: {
id: item.id,
name: item.name,
type: item.type,
value: item.value,
valueInfo: item.valueInfo,
executionId: item.executionId
},
original: item,
additions: {
scope: {
html: '<a cam-select-activity-instance="\'' +
instance.id +
'\'" ng-href="#/process-instance/' +
processInstance.id +
'?detailsTab=variables-tab&activityInstanceIds=' +
instance.id +
'" title="' +
instance.id +
'">' +
instance.name +
'</a>',
scopeVariables: {
processData: $scope.processData
}
}
}
};
});
$scope.loadingState = data.length ? 'LOADED' : 'EMPTY';
});
}
$scope.getCopy = function (variableId) {
var copy = variableCopies[variableId];
if (isNull(copy)) {
copy.type = 'String';
}
return copy;
};
var isNull = $scope.isNull = function (variable) {
return variable.type === 'null' || variable.type === 'Null';
};
$scope.getBinaryVariableDownloadLink = function (variable) {
return Uri.appUri('engine://engine/:engine/variable-instance/'+variable.id+'/data');
};
$scope.openUploadDialog = function (variableInstance) {
$modal.open({
resolve: {
variableInstance: function() { return variableInstance; }
},
controller: 'VariableInstanceUploadController',
template: uploadTemplate
});
};
$scope.openInspectDialog = function (variableInstance) {
$modal.open({
resolve: {
variableInstance: function() { return variableInstance; }
},
controller: 'VariableInstanceInspectController',
template: inspectTemplate
}).result.then(function() {
variableInstanceData.set('filter', angular.copy($scope.filter));
});
};
}]);
var Configuration = function PluginConfiguration(ViewsProvider) {
ViewsProvider.registerDefaultView('cockpit.processInstance.runtime.tab', {
id: 'variables-tab',
label: 'Variables',
template: instancesTemplate,
controller: 'VariableInstancesController',
priority: 20
});
};
Configuration.$inject = ['ViewsProvider'];
ngModule.config(Configuration);
};
| apache-2.0 |
babman92/NineSocket | NineSocket.Data/NineResult.cs | 1083 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace NineSocket.Data
{
public class NineResult
{
public string Code { get; set; }
Dictionary<string, string> Result;
public NineResult(string code, string key, string message)
{
Code = key;
Result = new Dictionary<string, string>();
Result.Add(key, message);
}
public NineResult()
{
Code = string.Empty;
Result = new Dictionary<string, string>();
}
public void AddData(string key, string message)
{
Result.Add(key, message);
}
public string GetData(string key)
{
string value = string.Empty;
try
{
if (Result.TryGetValue(key, out value))
return value;
}
catch (Exception ex)
{
throw ex;
}
return value;
}
}
}
| apache-2.0 |
stackforge/networking-hyperv | networking_hyperv/tests/unit/neutron/test_common_utils.py | 2464 | # Copyright 2013 Cloudbase Solutions SRL
# Copyright 2013 Pedro Navarro Perez
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from os_win import exceptions as os_win_exc
from networking_hyperv.neutron import _common_utils
from networking_hyperv.tests import base
@ddt.ddt
class TestCommonUtils(base.BaseTestCase):
@mock.patch.object(_common_utils.lockutils, 'synchronized_with_prefix')
def test_create_synchronized_decorator(self, mock_sync_with_prefix):
fake_method_side_effect = mock.Mock()
lock_prefix = 'test-'
port_synchronized = _common_utils.get_port_synchronized_decorator(
lock_prefix)
@port_synchronized
def fake_method(fake_arg, port_id):
fake_method_side_effect(fake_arg, port_id)
mock_synchronized = mock_sync_with_prefix.return_value
mock_synchronized.return_value = lambda x: x
expected_lock_name = 'test-port-lock-%s' % mock.sentinel.port_id
fake_method(fake_arg=mock.sentinel.arg, port_id=mock.sentinel.port_id)
mock_sync_with_prefix.assert_called_once_with(lock_prefix)
mock_synchronized.assert_called_once_with(expected_lock_name)
fake_method_side_effect.assert_called_once_with(
mock.sentinel.arg, mock.sentinel.port_id)
@ddt.data(os_win_exc.HyperVPortNotFoundException(message='test'),
os_win_exc.HyperVvNicNotFound(message='test'))
def test_ignore_missing_ports_decorator(self, exc):
fake_method_side_effect = mock.Mock()
fake_method_side_effect.side_effect = exc
@_common_utils.ignore_missing_ports
def fake_method(fake_arg, port_id):
fake_method_side_effect(fake_arg, port_id)
fake_method(mock.sentinel.arg, mock.sentinel.port_id)
fake_method_side_effect.assert_called_once_with(
mock.sentinel.arg, mock.sentinel.port_id)
| apache-2.0 |
salviof/SuperBits_FrameWork | SB_FRAMEWORK/SBWebPaginasSemTagLib/src/main/java/com/super_bits/modulosSB/webPaginas/JSFManagedBeans/declarados/util/PgUtilFormatar.java | 710 | /*
* Desenvolvido pela equipe Super-Bits.com CNPJ 20.019.971/0001-90
*/
package com.super_bits.modulosSB.webPaginas.JSFManagedBeans.declarados.util;
import com.super_bits.modulosSB.SBCore.UtilGeral.UtilSBCoreNumeros;
import javax.enterprise.context.RequestScoped;
import javax.inject.Named;
/**
*
* @author desenvolvedor
*/
@RequestScoped
@Named
public class PgUtilFormatar {
public String moeda(Double pValor) {
return UtilSBCoreNumeros.converterMoeda(pValor);
}
public String inteiro(long pValor) {
return UtilSBCoreNumeros.formatarNumeroInteiro(pValor);
}
public String moeda(long pValor) {
return UtilSBCoreNumeros.converterMoeda(pValor);
}
}
| apache-2.0 |
haikuowuya/android_system_code | src/com/sun/org/apache/xml/internal/serializer/SerializationHandler.java | 4357 | /*
* Copyright (c) 2011-2012, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 2003-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: SerializationHandler.java,v 1.2.4.1 2005/09/15 08:15:22 suresh_emailid Exp $
*/
package com.sun.org.apache.xml.internal.serializer;
import java.io.IOException;
import javax.xml.transform.Transformer;
import org.w3c.dom.Node;
import org.xml.sax.ContentHandler;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.ext.DeclHandler;
/**
* This interface is the one that a serializer implements. It is a group of
* other interfaces, such as ExtendedContentHandler, ExtendedLexicalHandler etc.
* In addition there are other methods, such as reset().
*
* This class is public only because it is used in another package,
* it is not a public API.
*
* @xsl.usage internal
*/
public interface SerializationHandler
extends
ExtendedContentHandler,
ExtendedLexicalHandler,
XSLOutputAttributes,
DeclHandler,
org.xml.sax.DTDHandler,
ErrorHandler,
DOMSerializer,
Serializer
{
/**
* Set the SAX Content handler that the serializer sends its output to. This
* method only applies to a ToSAXHandler, not to a ToStream serializer.
*
* @see Serializer#asContentHandler()
* @see ToSAXHandler
*/
public void setContentHandler(ContentHandler ch);
public void close();
/**
* Notify that the serializer should take this DOM node as input to be
* serialized.
*
* @param node the DOM node to be serialized.
* @throws IOException
*/
public void serialize(Node node) throws IOException;
/**
* Turns special character escaping on/off.
*
* Note that characters will
* never, even if this option is set to 'true', be escaped within
* CDATA sections in output XML documents.
*
* @param escape true if escaping is to be set on.
*/
public boolean setEscaping(boolean escape) throws SAXException;
/**
* Set the number of spaces to indent for each indentation level.
* @param spaces the number of spaces to indent for each indentation level.
*/
public void setIndentAmount(int spaces);
/**
* Set the transformer associated with the serializer.
* @param transformer the transformer associated with the serializer.
*/
public void setTransformer(Transformer transformer);
/**
* Get the transformer associated with the serializer.
* @return Transformer the transformer associated with the serializer.
*/
public Transformer getTransformer();
/**
* Used only by TransformerSnapshotImpl to restore the serialization
* to a previous state.
*
* @param mappings NamespaceMappings
*/
public void setNamespaceMappings(NamespaceMappings mappings);
/**
* Flush any pending events currently queued up in the serializer. This will
* flush any input that the serializer has which it has not yet sent as
* output.
*/
public void flushPending() throws SAXException;
/**
* Default behavior is to expand DTD entities,
* that is the initall default value is true.
* @param expand true if DTD entities are to be expanded,
* false if they are to be left as DTD entity references.
*/
public void setDTDEntityExpansion(boolean expand);
/**
* Specify if the output will be treated as a standalone property
* @param isStandalone true if the http://www.oracle.com/xml/is-standalone is set to yes
* @see OutputPropertiesFactory ORACLE_IS_STANDALONE
*/
public void setIsStandalone(boolean b);
}
| apache-2.0 |
gawkermedia/googleads-java-lib | modules/adwords_appengine/src/main/java/com/google/api/ads/adwords/jaxws/v201601/mcm/CustomerServiceInterfacemutate.java | 2037 |
package com.google.api.ads.adwords.jaxws.v201601.mcm;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
*
* Update the authorized customer.
*
* <p>While there are a limited set of properties available to update, please read this
* <a href="https://support.google.com/analytics/answer/1033981">help center article
* on auto-tagging</a> before updating {@code customer.autoTaggingEnabled}.
*
* @param customer the requested updated value for the customer.
* @throws ApiException
*
*
* <p>Java class for mutate element declaration.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <element name="mutate">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="customer" type="{https://adwords.google.com/api/adwords/mcm/v201601}Customer" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"customer"
})
@XmlRootElement(name = "mutate")
public class CustomerServiceInterfacemutate {
protected Customer customer;
/**
* Gets the value of the customer property.
*
* @return
* possible object is
* {@link Customer }
*
*/
public Customer getCustomer() {
return customer;
}
/**
* Sets the value of the customer property.
*
* @param value
* allowed object is
* {@link Customer }
*
*/
public void setCustomer(Customer value) {
this.customer = value;
}
}
| apache-2.0 |
Banno/sbt-plantuml-plugin | src/main/java/h/Dt_t.java | 1936 | /* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* Project Info: http://plantuml.com
*
* This file is part of Smetana.
* Smetana is a partial translation of Graphviz/Dot sources from C to Java.
*
* (C) Copyright 2009-2017, Arnaud Roques
*
* This translation is distributed under the same Licence as the original C program:
*
*************************************************************************
* Copyright (c) 2011 AT&T Intellectual Property
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors: See CVS logs. Details at http://www.graphviz.org/
*************************************************************************
*
* THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC
* LICENSE ("AGREEMENT"). [Eclipse Public License - v 1.0]
*
* ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES
* RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
*
* You may obtain a copy of the License at
*
* http://www.eclipse.org/legal/epl-v10.html
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package h;
import java.util.Arrays;
import java.util.List;
//2 u1i3hxdborawdyw7d778quhw
public interface Dt_t extends _dt_s {
public static List<String> DEFINITION = Arrays.asList(
"typedef struct _dt_s Dt_t");
}
// typedef struct _dt_s Dt_t; | apache-2.0 |
luxmeter/easycomponentrenderer | easycomponentrenderer/src/main/java/luxmeter/easy/componentrenderer/DataModel.java | 101 | package luxmeter.easy.componentrenderer;
public interface DataModel {
Object get(String key);
}
| apache-2.0 |
kamiljano/github-password-finder | lib/github/repositories.js | 2072 | 'use strict';
const config = require('../config/config');
const _ = require('lodash');
const queryBuilder = require('./query_builder');
const githubApiUrl = config.opts.githubApiUrl;
const rawGithubUrl = config.opts.rawSourceGithubUrl;
const githubHtmlUrl = config.opts.githubHtmlUrl;
const PER_PAGE = 100;
function buildRawUrl(htmlUrl) {
return htmlUrl.replace(githubHtmlUrl, rawGithubUrl).replace('/blob/', '/');
}
function enrichFile(file, client) {
return _.merge(file, {
getContent: () => client.execute.get(client.buildOptions(buildRawUrl(file['html_url']), false), error => `Request for github commits failed with code ${error.statusCode} for the following url ${file['html_url']}`)
});
}
async function loadAllPages(options, client) {
let hasMore = true;
let items = [];
let page = 1;
while(hasMore) {
options.qs = _.merge(options.qs, {
per_page: PER_PAGE,
page: page
});
const result = await client.execute.get(options, error => `Request for github repository code failed with code ${error.statusCode} for the following options: ${JSON.stringify(options)}`);
items = items.concat(result.items);
hasMore = PER_PAGE * page < result['total_count'];
page++;
}
return items;
}
module.exports.GithubUserRepositoryClient = class {
constructor(buildOptions, executors) {
this.buildOptions = buildOptions;
this.execute = executors;
}
/**
* @param codeToSearch
* Fragment of the code that should be searched for
*
* @param params {
*
* user: username
* repo: repository name
*
* }
*/
async searchInAllFiles(codeToSearch, params) {
const options = _.merge(this.buildOptions(`${githubApiUrl}/search/code`), {
qs: {
q: queryBuilder.query(codeToSearch, params)
}
});
const result = await loadAllPages(options, this);
return result.map(commit => enrichFile(commit, this));
}
}; | apache-2.0 |
Sellegit/j2objc | runtime/src/main/java/apple/uikit/NSStringDrawingContext.java | 1863 | package apple.uikit;
import java.io.*;
import java.nio.*;
import java.util.*;
import com.google.j2objc.annotations.*;
import com.google.j2objc.runtime.*;
import com.google.j2objc.runtime.block.*;
import apple.audiotoolbox.*;
import apple.corefoundation.*;
import apple.coregraphics.*;
import apple.coreservices.*;
import apple.foundation.*;
import apple.coreanimation.*;
import apple.coredata.*;
import apple.coreimage.*;
import apple.coretext.*;
import apple.corelocation.*;
/**
* @since Available in iOS 6.0 and later.
*/
@Library("UIKit/UIKit.h") @Mapping("NSStringDrawingContext")
public class NSStringDrawingContext
extends NSObject
{
@Mapping("init")
public NSStringDrawingContext() { }
@Mapping("minimumScaleFactor")
public native @MachineSizedFloat double getMinimumScaleFactor();
@Mapping("setMinimumScaleFactor:")
public native void setMinimumScaleFactor(@MachineSizedFloat double v);
/**
* @since Available in iOS 6.0 and later.
* @deprecated Deprecated in iOS 7.0.
*/
@Deprecated
@Mapping("minimumTrackingAdjustment")
public native @MachineSizedFloat double getMinimumTrackingAdjustment();
/**
* @since Available in iOS 6.0 and later.
* @deprecated Deprecated in iOS 7.0.
*/
@Deprecated
@Mapping("setMinimumTrackingAdjustment:")
public native void setMinimumTrackingAdjustment(@MachineSizedFloat double v);
@Mapping("actualScaleFactor")
public native @MachineSizedFloat double getActualScaleFactor();
/**
* @since Available in iOS 6.0 and later.
* @deprecated Deprecated in iOS 7.0.
*/
@Deprecated
@Mapping("actualTrackingAdjustment")
public native @MachineSizedFloat double getActualTrackingAdjustment();
@Mapping("totalBounds")
public native CGRect getTotalBounds();
}
| apache-2.0 |
LegNeato/buck | src/com/facebook/buck/ide/intellij/aggregation/AggregationModuleFactory.java | 958 | /*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.ide.intellij.aggregation;
import com.facebook.buck.core.model.targetgraph.TargetNode;
import com.google.common.collect.ImmutableSet;
import java.nio.file.Path;
public interface AggregationModuleFactory {
AggregationModule createAggregationModule(
Path moduleBasePath, ImmutableSet<TargetNode<?, ?>> targetNodes);
}
| apache-2.0 |
baby-gnu/one | src/scheduler/src/pool/VirtualMachineXML.cc | 19118 | /* -------------------------------------------------------------------------- */
/* Copyright 2002-2020, OpenNebula Project, OpenNebula Systems */
/* */
/* Licensed under the Apache License, Version 2.0 (the "License"); you may */
/* not use this file except in compliance with the License. You may obtain */
/* a copy of the License at */
/* */
/* http://www.apache.org/licenses/LICENSE-2.0 */
/* */
/* Unless required by applicable law or agreed to in writing, software */
/* distributed under the License is distributed on an "AS IS" BASIS, */
/* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */
/* See the License for the specific language governing permissions and */
/* limitations under the License. */
/* -------------------------------------------------------------------------- */
#include <algorithm>
#include "VirtualMachineXML.h"
#include "ScheduledAction.h"
#include "DatastoreXML.h"
#include "DatastorePoolXML.h"
#include "NebulaUtil.h"
#include "History.h"
#include "RankScheduler.h"
using namespace std;
/******************************************************************************/
/******************************************************************************/
/* INITIALIZE VM object attributes from its XML representation */
/******************************************************************************/
/******************************************************************************/
void VirtualMachineXML::init_attributes()
{
std::vector<xmlNodePtr> nodes;
std::vector<VectorAttribute*> attrs;
int rc;
int action;
int tmp;
std::string automatic_requirements;
std::string automatic_ds_requirements;
std::string automatic_nic_requirements;
/**************************************************************************/
/* VM attributes and flags */
/**************************************************************************/
xpath(oid, "/VM/ID", -1);
xpath(uid, "/VM/UID", -1);
xpath(gid, "/VM/GID", -1);
xpath(tmp, "/VM/STATE", -1);
active = tmp == 3;
xpath(tmp, "/VM/RESCHED", 0);
resched = tmp == 1;
xpath(action, "/VM/HISTORY_RECORDS/HISTORY/ACTION", -1);
resume = (action == VMActions::STOP_ACTION || action == VMActions::UNDEPLOY_ACTION
|| action == VMActions::UNDEPLOY_HARD_ACTION );
xpath(hid, "/VM/HISTORY_RECORDS/HISTORY/HID", -1);
xpath(dsid, "/VM/HISTORY_RECORDS/HISTORY/DS_ID", -1);
xpath(stime, "/VM/STIME", (time_t) 0);
/**************************************************************************/
/* VM Capacity memory, cpu and disk (system ds storage) */
/**************************************************************************/
xpath<long int>(memory, "/VM/TEMPLATE/MEMORY", 0);
xpath<float>(cpu, "/VM/TEMPLATE/CPU", 0);
/**************************************************************************/
/* Scheduling rank expresions for: */
/* - host */
/* - datastore */
/**************************************************************************/
rc = xpath(rank, "/VM/USER_TEMPLATE/SCHED_RANK", "");
if (rc != 0)
{
// Compatibility with previous versions
xpath(rank, "/VM/USER_TEMPLATE/RANK", "");
}
xpath(ds_rank, "/VM/USER_TEMPLATE/SCHED_DS_RANK", "");
/**************************************************************************/
/* Scheduling requirements for: */
/* - host */
/* - datastore */
/* - network */
/**************************************************************************/
// ---------------------------------------------------------------------- //
// Host requirements //
// ---------------------------------------------------------------------- //
xpath(automatic_requirements, "/VM/TEMPLATE/AUTOMATIC_REQUIREMENTS", "");
rc = xpath(requirements, "/VM/USER_TEMPLATE/SCHED_REQUIREMENTS", "");
if (rc == 0)
{
if ( !automatic_requirements.empty() )
{
ostringstream oss;
oss << automatic_requirements << " & ( " << requirements << " )";
requirements = oss.str();
}
}
else if ( !automatic_requirements.empty() )
{
requirements = automatic_requirements;
}
// ---------------------------------------------------------------------- //
// Datastore requirements //
// ---------------------------------------------------------------------- //
xpath(automatic_ds_requirements, "/VM/TEMPLATE/AUTOMATIC_DS_REQUIREMENTS",
"");
rc = xpath(ds_requirements, "/VM/USER_TEMPLATE/SCHED_DS_REQUIREMENTS", "");
if (rc == 0)
{
if ( !automatic_ds_requirements.empty() )
{
ostringstream oss;
oss << automatic_ds_requirements << " & ( " << ds_requirements << " )";
ds_requirements = oss.str();
}
}
else if ( !automatic_ds_requirements.empty() )
{
ds_requirements = automatic_ds_requirements;
}
// ---------------------------------------------------------------------- //
// Network requirements & rank //
// ---------------------------------------------------------------------- //
xpath(automatic_nic_requirements, "/VM/TEMPLATE/AUTOMATIC_NIC_REQUIREMENTS",
"");
if (get_nodes("/VM/TEMPLATE/NIC", nodes) > 0)
{
std::string net_mode;
for (auto it_nodes = nodes.begin(); it_nodes != nodes.end(); ++it_nodes)
{
VirtualMachineTemplate * nic_template = new VirtualMachineTemplate;
nic_template->from_xml_node(*it_nodes);
bool rc = nic_template->get("NETWORK_MODE", net_mode);
one_util::toupper(net_mode);
if ( !rc || net_mode != "AUTO" )
{
delete nic_template;
continue;
}
std::string reqs, rank;
int nic_id;
nic_template->get("NIC_ID", nic_id);
nics_ids_auto.insert(nic_id);
VirtualMachineNicXML * the_nic = new VirtualMachineNicXML();
nics.insert(make_pair(nic_id, the_nic));
if ( nic_template->get("SCHED_REQUIREMENTS", reqs) )
{
if ( !automatic_nic_requirements.empty() )
{
ostringstream oss;
oss << automatic_nic_requirements <<" & ( " << reqs << " )";
reqs = oss.str();
}
the_nic->set_requirements(reqs);
}
if ( nic_template->get("SCHED_RANK", rank) )
{
the_nic->set_rank(rank);
}
delete nic_template;
}
free_nodes(nodes);
}
nodes.clear();
/**************************************************************************/
/* Template, user template, history information and rescheduling flag */
/**************************************************************************/
if (get_nodes("/VM/TEMPLATE", nodes) > 0)
{
vm_template = new VirtualMachineTemplate;
vm_template->from_xml_node(nodes[0]);
free_nodes(nodes);
}
else
{
vm_template = 0;
}
nodes.clear();
if (get_nodes("/VM/USER_TEMPLATE", nodes) > 0)
{
user_template = new VirtualMachineTemplate;
user_template->from_xml_node(nodes[0]);
free_nodes(nodes);
}
else
{
user_template = 0;
}
public_cloud = (user_template->get("PUBLIC_CLOUD", attrs) > 0);
if (public_cloud == false)
{
attrs.clear();
public_cloud = (user_template->get("EC2", attrs) > 0);
}
only_public_cloud = false;
if (vm_template != 0)
{
init_storage_usage();
}
else
{
system_ds_usage = 0;
}
}
/* -------------------------------------------------------------------------- */
/* -------------------------------------------------------------------------- */
// TODO: use VirtualMachine::isVolatile(disk)
static bool isVolatile(const VectorAttribute * disk)
{
string type = disk->vector_value("TYPE");
one_util::toupper(type);
return ( type == "SWAP" || type == "FS");
}
/* -------------------------------------------------------------------------- */
void VirtualMachineXML::init_storage_usage()
{
vector<Attribute *> disks;
vector<Attribute*>::iterator it;
long long size;
long long snapshot_size;
string st;
int ds_id;
bool clone;
system_ds_usage = 0;
int num = vm_template->remove("DISK", disks);
for (it=disks.begin(); it != disks.end(); it++)
{
const VectorAttribute * disk = dynamic_cast<const VectorAttribute*>(*it);
if (disk == 0)
{
continue;
}
if (disk->vector_value("SIZE", size) != 0)
{
continue;
}
if (disk->vector_value("DISK_SNAPSHOT_TOTAL_SIZE", snapshot_size) == 0)
{
size += snapshot_size;
}
if (isVolatile(disk))
{
system_ds_usage += size;
}
else
{
if (disk->vector_value("DATASTORE_ID", ds_id) != 0)
{
continue;
}
if (ds_usage.count(ds_id) == 0)
{
ds_usage[ds_id] = 0;
}
if (disk->vector_value("CLONE", clone) != 0)
{
continue;
}
if (clone)
{
st = disk->vector_value("CLONE_TARGET");
}
else
{
st = disk->vector_value("LN_TARGET");
}
one_util::toupper(st);
if (st == "SELF")
{
ds_usage[ds_id] += size;
}
else if (st == "SYSTEM")
{
system_ds_usage += size;
} // else st == NONE
}
}
float factor = Scheduler::instance().get_mem_ds_scale();
if (this->memory > 0 && factor >= 0)
{
system_ds_usage += this->memory * factor;
}
for (int i = 0; i < num ; i++)
{
delete disks[i];
}
}
/******************************************************************************/
/******************************************************************************/
/* VM requirements and capacity interface */
/******************************************************************************/
/******************************************************************************/
void VirtualMachineXML::add_requirements(const string& reqs)
{
if ( reqs.empty() )
{
return;
}
else if ( requirements.empty() )
{
requirements = reqs;
}
else
{
requirements += " & (" + reqs + ")";
}
}
/* -------------------------------------------------------------------------- */
void VirtualMachineXML::get_capacity(HostShareCapacity &sr) const
{
sr.vmid = oid;
sr.pci.clear();
if (vm_template != 0)
{
vm_template->get("PCI", sr.pci);
vm_template->get("NUMA_NODE", sr.nodes);
sr.topology = vm_template->get("TOPOLOGY");
}
if ( memory == 0 || cpu == 0 )
{
sr.cpu = 0;
sr.mem = 0;
sr.disk = 0;
return;
}
sr.cpu = (int) (cpu * 100); //100%
sr.mem = memory * 1024; //Kilobytes
sr.disk = system_ds_usage; //MB
}
/* -------------------------------------------------------------------------- */
void VirtualMachineXML::add_capacity(HostShareCapacity &sr)
{
cpu += sr.cpu;
memory += sr.mem;
system_ds_usage += sr.disk;
vm_template->set(sr.nodes);
}
/* -------------------------------------------------------------------------- */
void VirtualMachineXML::reset_capacity(HostShareCapacity &sr)
{
std::vector<VectorAttribute *> numa_nodes;
sr.cpu = cpu;
sr.mem = memory;
sr.disk = system_ds_usage;
if ( vm_template != 0 )
{
vm_template->remove("NUMA_NODE", sr.nodes);
}
cpu = 0;
memory = 0;
system_ds_usage = 0;
}
/* -------------------------------------------------------------------------- */
bool VirtualMachineXML::test_image_datastore_capacity(
ImageDatastorePoolXML * img_dspool, string & error_msg) const
{
for (auto ds_it = ds_usage.begin(); ds_it != ds_usage.end(); ++ds_it)
{
DatastoreXML* ds = img_dspool->get(ds_it->first);
if (ds == 0 || !ds->test_capacity(ds_it->second))
{
ostringstream oss;
oss << "Image Datastore " << ds->get_oid()
<< " does not have enough capacity";
error_msg = oss.str();
return false;
}
}
return true;
}
/* -------------------------------------------------------------------------- */
void VirtualMachineXML::add_image_datastore_capacity(
ImageDatastorePoolXML * img_dspool)
{
for (auto ds_it = ds_usage.begin(); ds_it != ds_usage.end(); ++ds_it)
{
DatastoreXML *ds = img_dspool->get(ds_it->first);
if (ds == 0)
{
continue;
}
ds->add_capacity(ds_it->second);
}
}
//******************************************************************************
// Functions to schedule network interfaces (NIC)
//******************************************************************************
VirtualMachineNicXML * VirtualMachineXML::get_nic(int nic_id) const
{
VirtualMachineNicXML * n = 0;
auto it = nics.find(nic_id);
if ( it != nics.end() )
{
n = it->second;
}
return n;
}
/* -------------------------------------------------------------------------- */
const string& VirtualMachineXML::get_nic_rank(int nic_id) const
{
static std::string es;
auto it = nics.find(nic_id);
if ( it != nics.end() )
{
return it->second->get_rank();
}
return es;
};
/* -------------------------------------------------------------------------- */
const string& VirtualMachineXML::get_nic_requirements(int nic_id) const
{
static std::string es;
auto it = nics.find(nic_id);
if ( it != nics.end() )
{
return it->second->get_requirements();
}
return es;
}
//******************************************************************************
// Logging
//******************************************************************************
ostream& operator<<(ostream& os, VirtualMachineXML& vm)
{
const vector<Resource *> resources = vm.match_hosts.get_resources();
vector<Resource *>::const_reverse_iterator i;
if (resources.empty())
{
return os;
}
os << "Virtual Machine: " << vm.oid << endl << endl;
os << "\tPRI\tID - HOSTS"<< endl
<< "\t------------------------" << endl;
for (i = resources.rbegin(); i != resources.rend() ; i++)
{
os << "\t" << (*i)->priority << "\t" << (*i)->oid << endl;
}
os << endl;
os << "\tPRI\tID - DATASTORES"<< endl
<< "\t------------------------" << endl;
const vector<Resource *> ds_resources = vm.match_datastores.get_resources();
for (i = ds_resources.rbegin(); i != ds_resources.rend() ; i++)
{
os << "\t" << (*i)->priority << "\t" << (*i)->oid << endl;
}
os << endl;
const set<int>& nics_ids = vm.get_nics_ids();
for (auto nic_id : nics_ids)
{
os << "\tNIC_ID: "<< nic_id << endl
<< "\t-----------------------------------" << endl;
os << "\tPRI\tID - NETWORKS"<< endl
<< "\t------------------------" << endl;
const vector<Resource *> net_resources = vm.nics[nic_id]->get_match_networks();
for (i = net_resources.rbegin(); i != net_resources.rend() ; i++)
{
os << "\t" << (*i)->priority << "\t" << (*i)->oid << endl;
}
os << endl;
}
return os;
};
/* -------------------------------------------------------------------------- */
/* -------------------------------------------------------------------------- */
void VirtualMachineXML::log(const string &st)
{
if ( user_template == 0 || st.empty())
{
return;
}
ostringstream oss;
oss << one_util::log_time() << " : " << st;
user_template->replace("SCHED_MESSAGE", oss.str());
}
/* -------------------------------------------------------------------------- */
/* -------------------------------------------------------------------------- */
bool VirtualMachineXML::clear_log()
{
string st;
if (user_template == 0)
{
return false;
}
user_template->get("SCHED_MESSAGE", st);
if (st.empty())
{
return false;
}
user_template->erase("SCHED_MESSAGE");
return true;
}
/* -------------------------------------------------------------------------- */
/* -------------------------------------------------------------------------- */
int VirtualMachineXML::parse_action_name(string& action_st)
{
one_util::tolower(action_st);
if ( action_st != "terminate"
&& action_st != "terminate-hard"
&& action_st != "undeploy"
&& action_st != "undeploy-hard"
&& action_st != "hold"
&& action_st != "release"
&& action_st != "stop"
&& action_st != "suspend"
&& action_st != "resume"
&& action_st != "reboot"
&& action_st != "reboot-hard"
&& action_st != "poweroff"
&& action_st != "poweroff-hard"
&& action_st != "snapshot-create"
&& action_st != "snapshot-revert"
&& action_st != "snapshot-delete"
&& action_st != "disk-snapshot-create"
&& action_st != "disk-snapshot-revert"
&& action_st != "disk-snapshot-delete"
// Compatibility with 4.x
&& action_st != "shutdown"
&& action_st != "shutdown-hard"
&& action_st != "delete")
{
return -1;
}
return 0;
};
| apache-2.0 |
PublicTransitAnalytics/ScoreGenerator | src/main/java/com/publictransitanalytics/scoregenerator/datalayer/directories/types/Coordinate.java | 1354 | /*
* Copyright 2017 Public Transit Analytics.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.publictransitanalytics.scoregenerator.datalayer.directories.types;
import lombok.Value;
/**
* A position on the surface of the earth.
*
* @author Public Transit Analytics
*/
@Value
public class Coordinate {
String latitude;
String longitude;
public Coordinate(final String coordinateString) {
String coordinateStrings[] = coordinateString.split(",");
latitude = coordinateStrings[0];
longitude = coordinateStrings[1];
}
public Coordinate(final String latitude, final String longitude) {
this.latitude = latitude;
this.longitude = longitude;
}
@Override
public String toString() {
return String.format("%s,%s", latitude, longitude);
}
}
| apache-2.0 |
InQBarna/AChartEngine | demo/src/org/achartengine/chartdemo/demo/chart/WeightDialChart.java | 2687 | /**
* Copyright (C) 2009, 2010 SC 4ViewSoft SRL
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.achartengine.chartdemo.demo.chart;
import org.achartengine.ChartFactory;
import org.achartengine.model.CategorySeries;
import org.achartengine.renderer.DialRenderer;
import org.achartengine.renderer.SimpleSeriesRenderer;
import org.achartengine.renderer.DialRenderer.Type;
import android.content.Context;
import android.content.Intent;
import android.graphics.Color;
/**
* Budget demo pie chart.
*/
public class WeightDialChart extends AbstractDemoChart {
/**
* Returns the chart name.
* @return the chart name
*/
public String getName() {
return "Weight chart";
}
/**
* Returns the chart description.
* @return the chart description
*/
public String getDesc() {
return "The weight indicator (dial chart)";
}
/**
* Executes the chart demo.
* @param context the context
* @return the built intent
*/
public Intent execute(Context context) {
CategorySeries category = new CategorySeries("Weight indic");
category.add("Current", 75);
category.add("Minimum", 65);
category.add("Maximum", 90);
DialRenderer renderer = new DialRenderer();
renderer.setChartTitleTextSize(20);
renderer.setLabelsTextSize(15);
renderer.setLegendTextSize(15);
renderer.setMargins(new int[] {20, 30, 15, 0});
SimpleSeriesRenderer r = new SimpleSeriesRenderer();
r.setColor(Color.BLUE);
renderer.addSeriesRenderer(r);
r = new SimpleSeriesRenderer();
r.setColor(Color.rgb(0, 150, 0));
renderer.addSeriesRenderer(r);
r = new SimpleSeriesRenderer();
r.setColor(Color.GREEN);
renderer.addSeriesRenderer(r);
renderer.setLabelsTextSize(10);
renderer.setLabelsColor(Color.WHITE);
renderer.setShowLabels(true);
renderer.setVisualTypes(new DialRenderer.Type[] {Type.ARROW, Type.NEEDLE, Type.NEEDLE});
renderer.setMinValue(0);
renderer.setMaxValue(150);
return ChartFactory.getDialChartIntent(context, category, renderer, "Weight indicator");
}
}
| apache-2.0 |
collegefb/fithdown | core/src/test/java/yt/javi/fithdown/core/application/source/services/CreateSourceServiceTest.java | 1976 | package yt.javi.fithdown.core.application.source.services;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
import java.net.MalformedURLException;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import yt.javi.fithdown.core.application.source.requests.CreateSourceRequest;
import yt.javi.fithdown.core.application.source.responses.SourceResponse;
import yt.javi.fithdown.core.application.source.responses.SourceResponseFactory;
import yt.javi.fithdown.core.model.source.Source;
import yt.javi.fithdown.core.model.source.SourceFactory;
import yt.javi.fithdown.core.model.source.SourceRepository;
public class CreateSourceServiceTest {
private static final String SOURCE_NAME = "test";
private static final String SOURCE_URL = "http://www.test.ing";
private CreateSourceService service;
@Mock
private SourceFactory sourceFactory;
@Mock
private SourceRepository sourceRepository;
@Mock
private SourceResponseFactory sourceResponseFactory;
@Mock
private CreateSourceRequest createSourceRequest;
@Mock
private Source source;
@Mock
private SourceResponse sourceResponse;
@Before
public void setUp() throws Exception {
initMocks(this);
service = new CreateSourceService(sourceFactory, sourceRepository, sourceResponseFactory);
}
@Test
public void itIsPossibleToAddANewSource() throws MalformedURLException {
when(createSourceRequest.getName()).thenReturn(SOURCE_NAME);
when(createSourceRequest.getUrl()).thenReturn(SOURCE_URL);
when(sourceFactory.getSource(SOURCE_NAME, SOURCE_URL)).thenReturn(source);
when(sourceRepository.save(source)).thenReturn(source);
when(sourceResponseFactory.sourceResponse(source)).thenReturn(sourceResponse);
assertThat(service.execute(createSourceRequest).orElse(null), is(sourceResponse));
}
}
| apache-2.0 |
evandor/skysail-framework | skysail.server.app.todos.it/src/io/skysail/server/app/todos/test/PostListHtmlLargeTest.java | 1821 | package io.skysail.server.app.todos.test;
import io.skysail.client.testsupport.BrowserTests;
import io.skysail.server.app.todos.TodoList;
import io.skysail.server.app.todos.test.browser.TodoListBrowser;
import org.junit.Test;
import org.restlet.data.MediaType;
public class PostListHtmlLargeTest extends BrowserTests<TodoListBrowser, TodoList> {
@Test
public void postListResource_has_expected_Location() {
createBrowser(MediaType.TEXT_HTML);
browser.navigateToPostList();
browser.verifyLocation("/Todos/v2/Lists/");
}
@Test
public void postListResource_for_html_media_type_has_expected_LinkHeader() {
createBrowser(MediaType.TEXT_HTML);
browser.navigateToPostList();
browser.verifyHeader("Link", "</Todos/v2>; rel=\"item\"; title=\"Todos\"; verbs=\"GET\"");
browser.verifyHeader("Link", "</usermanagement/v1>; rel=\"item\"; title=\"usermanagement\"; verbs=\"GET\"");
browser.verifyHeader("Link", "</plugins/v1>; rel=\"item\"; title=\"plugins\"; verbs=\"GET\"");
browser.verifyHeaderCount("Link", 3);
}
@Test
public void postListResource_for_json_media_type_has_expected_LinkHeader() {
createBrowser(MediaType.APPLICATION_JSON);
browser.navigateToPostList();
browser.verifyHeader("Link", "</Todos/v2>; rel=\"item\"; title=\"Todos\"; verbs=\"GET\"");
browser.verifyHeader("Link", "</usermanagement/v1>; rel=\"item\"; title=\"usermanagement\"; verbs=\"GET\"");
browser.verifyHeader("Link", "</plugins/v1>; rel=\"item\"; title=\"plugins\"; verbs=\"GET\"");
browser.verifyHeaderCount("Link", 3);
}
private void createBrowser(MediaType mediaType) {
browser = new TodoListBrowser(mediaType, determinePort());
browser.setUser("admin");
}
} | apache-2.0 |
jvillego/engines | engine/templates/app/sources/controllers/MainController.php | 242 | <?php
class MainController extends EngineApplication{
//public static $template = 'Main';
public static function localModels(){
return array();
}
public static function main(){
}
}
?>
| apache-2.0 |
eug48/hapi-fhir | hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/validation/DefaultProfileValidationSupport.java | 10489 | package org.hl7.fhir.dstu2016may.hapi.validation;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.io.Charsets;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.dstu2016may.model.Bundle;
import org.hl7.fhir.dstu2016may.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.dstu2016may.model.CodeSystem;
import org.hl7.fhir.dstu2016may.model.CodeSystem.ConceptDefinitionComponent;
import org.hl7.fhir.dstu2016may.model.DomainResource;
import org.hl7.fhir.dstu2016may.model.OperationOutcome.IssueSeverity;
import org.hl7.fhir.dstu2016may.model.StructureDefinition;
import org.hl7.fhir.dstu2016may.model.ValueSet;
import org.hl7.fhir.dstu2016may.model.ValueSet.ConceptReferenceComponent;
import org.hl7.fhir.dstu2016may.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.dstu2016may.model.ValueSet.ValueSetExpansionComponent;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.context.FhirContext;
public class DefaultProfileValidationSupport implements IValidationSupport {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultProfileValidationSupport.class);
private Map<String, CodeSystem> myCodeSystems;
private Map<String, StructureDefinition> myStructureDefinitions;
private Map<String, ValueSet> myValueSets;
@Override
public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
ValueSetExpansionComponent retVal = new ValueSetExpansionComponent();
Set<String> wantCodes = new HashSet<String>();
for (ConceptReferenceComponent next : theInclude.getConcept()) {
wantCodes.add(next.getCode());
}
CodeSystem system = fetchCodeSystem(theContext, theInclude.getSystem());
for (ConceptDefinitionComponent next : system.getConcept()) {
if (wantCodes.isEmpty() || wantCodes.contains(next.getCode())) {
retVal.addContains().setSystem(theInclude.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay());
}
}
return retVal;
}
@Override
public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) {
return new ArrayList<StructureDefinition>(provideStructureDefinitionMap(theContext).values());
}
@Override
public CodeSystem fetchCodeSystem(FhirContext theContext, String theSystem) {
return (CodeSystem) fetchCodeSystemOrValueSet(theContext, theSystem, true);
}
private DomainResource fetchCodeSystemOrValueSet(FhirContext theContext, String theSystem, boolean codeSystem) {
Map<String, CodeSystem> codeSystems = myCodeSystems;
Map<String, ValueSet> valueSets = myValueSets;
if (codeSystems == null) {
codeSystems = new HashMap<String, CodeSystem>();
valueSets = new HashMap<String, ValueSet>();
loadCodeSystems(theContext, codeSystems, valueSets, "/org/hl7/fhir/dstu2016may/model/valueset/valuesets.xml");
loadCodeSystems(theContext, codeSystems, valueSets, "/org/hl7/fhir/dstu2016may/model/valueset/v2-tables.xml");
loadCodeSystems(theContext, codeSystems, valueSets, "/org/hl7/fhir/dstu2016may/model/valueset/v3-codesystems.xml");
myCodeSystems = codeSystems;
myValueSets = valueSets;
}
if (codeSystem) {
return codeSystems.get(theSystem);
} else {
return valueSets.get(theSystem);
}
}
@SuppressWarnings("unchecked")
@Override
public <T extends IBaseResource> T fetchResource(FhirContext theContext, Class<T> theClass, String theUri) {
Validate.notBlank(theUri, "theUri must not be null or blank");
if (theUri.startsWith("http://hl7.org/fhir/StructureDefinition/")) {
return (T) fetchStructureDefinition(theContext, theUri);
}
if (theUri.startsWith("http://hl7.org/fhir/ValueSet/")) {
return (T) fetchValueSet(theContext, theUri);
}
// if (theUri.startsWith("http://hl7.org/fhir/ValueSet/")) {
// Map<String, ValueSet> defaultValueSets = myDefaultValueSets;
// if (defaultValueSets == null) {
// String path = theContext.getVersion().getPathToSchemaDefinitions().replace("/schema", "/valueset") + "/valuesets.xml";
// InputStream valuesetText = DefaultProfileValidationSupport.class.getResourceAsStream(path);
// if (valuesetText == null) {
// return null;
// }
// InputStreamReader reader;
// try {
// reader = new InputStreamReader(valuesetText, "UTF-8");
// } catch (UnsupportedEncodingException e) {
// // Shouldn't happen!
// throw new InternalErrorException("UTF-8 encoding not supported on this platform", e);
// }
//
// defaultValueSets = new HashMap<String, ValueSet>();
//
// Bundle bundle = theContext.newXmlParser().parseResource(Bundle.class, reader);
// for (BundleEntryComponent next : bundle.getEntry()) {
// IdType nextId = new IdType(next.getFullUrl());
// if (nextId.isEmpty() || !nextId.getValue().startsWith("http://hl7.org/fhir/ValueSet/")) {
// continue;
// }
// defaultValueSets.put(nextId.toVersionless().getValue(), (ValueSet) next.getResource());
// }
//
// myDefaultValueSets = defaultValueSets;
// }
//
// return (T) defaultValueSets.get(theUri);
// }
return null;
}
@Override
public StructureDefinition fetchStructureDefinition(FhirContext theContext, String theUrl) {
return provideStructureDefinitionMap(theContext).get(theUrl);
}
ValueSet fetchValueSet(FhirContext theContext, String theSystem) {
return (ValueSet) fetchCodeSystemOrValueSet(theContext, theSystem, false);
}
public void flush() {
myCodeSystems = null;
myStructureDefinitions = null;
}
@Override
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
CodeSystem cs = fetchCodeSystem(theContext, theSystem);
return cs != null;
}
private void loadCodeSystems(FhirContext theContext, Map<String, CodeSystem> theCodeSystems, Map<String, ValueSet> theValueSets, String theClasspath) {
ourLog.info("Loading CodeSystem/ValueSet from classpath: {}", theClasspath);
InputStream valuesetText = DefaultProfileValidationSupport.class.getResourceAsStream(theClasspath);
if (valuesetText != null) {
InputStreamReader reader = new InputStreamReader(valuesetText, Charsets.UTF_8);
Bundle bundle = theContext.newXmlParser().parseResource(Bundle.class, reader);
for (BundleEntryComponent next : bundle.getEntry()) {
if (next.getResource() instanceof CodeSystem) {
CodeSystem nextValueSet = (CodeSystem) next.getResource();
nextValueSet.getText().setDivAsString("");
String system = nextValueSet.getUrl();
if (isNotBlank(system)) {
theCodeSystems.put(system, nextValueSet);
}
} else if (next.getResource() instanceof ValueSet) {
ValueSet nextValueSet = (ValueSet) next.getResource();
nextValueSet.getText().setDivAsString("");
String system = nextValueSet.getUrl();
if (isNotBlank(system)) {
theValueSets.put(system, nextValueSet);
}
}
}
} else {
ourLog.warn("Unable to load resource: {}", theClasspath);
}
}
private void loadStructureDefinitions(FhirContext theContext, Map<String, StructureDefinition> theCodeSystems, String theClasspath) {
ourLog.info("Loading structure definitions from classpath: {}", theClasspath);
InputStream valuesetText = DefaultProfileValidationSupport.class.getResourceAsStream(theClasspath);
if (valuesetText != null) {
InputStreamReader reader = new InputStreamReader(valuesetText, Charsets.UTF_8);
Bundle bundle = theContext.newXmlParser().parseResource(Bundle.class, reader);
for (BundleEntryComponent next : bundle.getEntry()) {
if (next.getResource() instanceof StructureDefinition) {
StructureDefinition nextSd = (StructureDefinition) next.getResource();
nextSd.getText().setDivAsString("");
String system = nextSd.getUrl();
if (isNotBlank(system)) {
theCodeSystems.put(system, nextSd);
}
}
}
} else {
ourLog.warn("Unable to load resource: {}", theClasspath);
}
}
private Map<String, StructureDefinition> provideStructureDefinitionMap(FhirContext theContext) {
Map<String, StructureDefinition> structureDefinitions = myStructureDefinitions;
if (structureDefinitions == null) {
structureDefinitions = new HashMap<String, StructureDefinition>();
loadStructureDefinitions(theContext, structureDefinitions, "/org/hl7/fhir/dstu2016may/model/profile/profiles-resources.xml");
loadStructureDefinitions(theContext, structureDefinitions, "/org/hl7/fhir/dstu2016may/model/profile/profiles-types.xml");
loadStructureDefinitions(theContext, structureDefinitions, "/org/hl7/fhir/dstu2016may/model/profile/profiles-others.xml");
myStructureDefinitions = structureDefinitions;
}
return structureDefinitions;
}
@Override
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
CodeSystem cs = fetchCodeSystem(theContext, theCodeSystem);
if (cs != null) {
boolean caseSensitive = true;
if (cs.hasCaseSensitive()) {
caseSensitive = cs.getCaseSensitive();
}
CodeValidationResult retVal = testIfConceptIsInList(theCode, cs.getConcept(), caseSensitive);
if (retVal != null) {
return retVal;
}
}
return new CodeValidationResult(IssueSeverity.WARNING, "Unknown code: " + theCodeSystem + " / " + theCode);
}
private CodeValidationResult testIfConceptIsInList(String theCode, List<ConceptDefinitionComponent> conceptList, boolean theCaseSensitive) {
String code = theCode;
if (theCaseSensitive == false) {
code = code.toUpperCase();
}
return testIfConceptIsInListInner(conceptList, theCaseSensitive, code);
}
private CodeValidationResult testIfConceptIsInListInner(List<ConceptDefinitionComponent> conceptList, boolean theCaseSensitive, String code) {
CodeValidationResult retVal = null;
for (ConceptDefinitionComponent next : conceptList) {
String nextCandidate = next.getCode();
if (theCaseSensitive == false) {
nextCandidate = nextCandidate.toUpperCase();
}
if (nextCandidate.equals(code)) {
retVal = new CodeValidationResult(next);
break;
}
// recurse
retVal = testIfConceptIsInList(code, next.getConcept(), theCaseSensitive);
if (retVal != null) {
break;
}
}
return retVal;
}
}
| apache-2.0 |
holtsoftware/potential-octo-wallhack | School C++/other2/calinder/calonder.cpp | 4463 | # Microsoft Developer Studio Project File - Name="calonder" - Package Owner=<4>
# Microsoft Developer Studio Generated Build File, Format Version 6.00
# ** DO NOT EDIT **
# TARGTYPE "Win32 (x86) Console Application" 0x0103
CFG=calonder - Win32 Debug
!MESSAGE This is not a valid makefile. To build this project using NMAKE,
!MESSAGE use the Export Makefile command and run
!MESSAGE
!MESSAGE NMAKE /f "calonder.mak".
!MESSAGE
!MESSAGE You can specify a configuration when running NMAKE
!MESSAGE by defining the macro CFG on the command line. For example:
!MESSAGE
!MESSAGE NMAKE /f "calonder.mak" CFG="calonder - Win32 Debug"
!MESSAGE
!MESSAGE Possible choices for configuration are:
!MESSAGE
!MESSAGE "calonder - Win32 Release" (based on "Win32 (x86) Console Application")
!MESSAGE "calonder - Win32 Debug" (based on "Win32 (x86) Console Application")
!MESSAGE
# Begin Project
# PROP AllowPerConfigDependencies 0
# PROP Scc_ProjName ""
# PROP Scc_LocalPath ""
CPP=cl.exe
RSC=rc.exe
!IF "$(CFG)" == "calonder - Win32 Release"
# PROP BASE Use_MFC 0
# PROP BASE Use_Debug_Libraries 0
# PROP BASE Output_Dir "Release"
# PROP BASE Intermediate_Dir "Release"
# PROP BASE Target_Dir ""
# PROP Use_MFC 0
# PROP Use_Debug_Libraries 0
# PROP Output_Dir "Release"
# PROP Intermediate_Dir "Release"
# PROP Target_Dir ""
# ADD BASE CPP /nologo /W3 /GX /O2 /D "WIN32" /D "NDEBUG" /D "_CONSOLE" /D "_MBCS" /Yu"stdafx.h" /FD /c
# ADD CPP /nologo /W3 /GX /O2 /D "WIN32" /D "NDEBUG" /D "_CONSOLE" /D "_MBCS" /Yu"stdafx.h" /FD /c
# ADD BASE RSC /l 0x409 /d "NDEBUG"
# ADD RSC /l 0x409 /d "NDEBUG"
BSC32=bscmake.exe
# ADD BASE BSC32 /nologo
# ADD BSC32 /nologo
LINK32=link.exe
# ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:console /machine:I386
# ADD LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:console /machine:I386
!ELSEIF "$(CFG)" == "calonder - Win32 Debug"
# PROP BASE Use_MFC 0
# PROP BASE Use_Debug_Libraries 1
# PROP BASE Output_Dir "Debug"
# PROP BASE Intermediate_Dir "Debug"
# PROP BASE Target_Dir ""
# PROP Use_MFC 0
# PROP Use_Debug_Libraries 1
# PROP Output_Dir "Debug"
# PROP Intermediate_Dir "Debug"
# PROP Ignore_Export_Lib 0
# PROP Target_Dir ""
# ADD BASE CPP /nologo /W3 /Gm /GX /ZI /Od /D "WIN32" /D "_DEBUG" /D "_CONSOLE" /D "_MBCS" /Yu"stdafx.h" /FD /GZ /c
# ADD CPP /nologo /W3 /Gm /GX /ZI /Od /D "WIN32" /D "_DEBUG" /D "_CONSOLE" /D "_MBCS" /Yu"stdafx.h" /FD /GZ /c
# ADD BASE RSC /l 0x409 /d "_DEBUG"
# ADD RSC /l 0x409 /d "_DEBUG"
BSC32=bscmake.exe
# ADD BASE BSC32 /nologo
# ADD BSC32 /nologo
LINK32=link.exe
# ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:console /debug /machine:I386 /pdbtype:sept
# ADD LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:console /profile /debug /machine:I386
!ENDIF
# Begin Target
# Name "calonder - Win32 Release"
# Name "calonder - Win32 Debug"
# Begin Group "Source Files"
# PROP Default_Filter "cpp;c;cxx;rc;def;r;odl;idl;hpj;bat"
# Begin Source File
SOURCE=.\calonder.cpp
# End Source File
# Begin Source File
SOURCE=.\StdAfx.cpp
# ADD CPP /Yc"stdafx.h"
# End Source File
# End Group
# Begin Group "Header Files"
# PROP Default_Filter "h;hpp;hxx;hm;inl"
# Begin Source File
SOURCE=.\StdAfx.h
# End Source File
# End Group
# Begin Group "Resource Files"
# PROP Default_Filter "ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe"
# End Group
# Begin Source File
SOURCE=.\ReadMe.txt
# End Source File
# End Target
# End Project
| apache-2.0 |
baishuai/leetcode | algorithms/p055/055_test.go | 287 | package p055
import "testing"
func test(t *testing.T, nums []int, exp bool) {
if canJump(nums) != exp {
t.Fatal("error answer")
}
}
func TestExample0(t *testing.T) {
test(t, []int{2, 3, 1, 1, 4}, true)
}
func TestExample1(t *testing.T) {
test(t, []int{3, 2, 1, 0, 4}, false)
}
| apache-2.0 |
ubinity/ECPy | src/ecpy/borromean.py | 10755 | # Copyright 2016 Cedric Mesnil <cedric.mesnil@ubinity.com>, Ubinity SAS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#python 2 compatibility
from builtins import int,pow
import hashlib
import random
import binascii
from ecpy.curves import Curve,Point
from ecpy.keys import ECPublicKey, ECPrivateKey
from ecpy.formatters import decode_sig, encode_sig, list_formats
from ecpy import ecrand
from ecpy.curves import ECPyException
def _h(b):
return binascii.hexlify(b)
def _point_to_bytes(point, compressed = True):
""" Point serialisation.
Serialization is the standard one:
- O2 x for even x in compressed form
- 03 x for odd x in compressed form
- 04 x y for uncompressed form
"""
if compressed:
b = point.x.to_bytes(32,'big')
if point.y & 1:
b = b"\x03"+b
else:
b = b"\x02"+b
else:
b = b"\x04"+point.x.to_bytes(32,'big')+point.y.to_bytes(32,'big')
return b
def _borromean_hash(m,e,i,j, H):
"""
All params are bytes.
m: bytes message
e: bytes point
i: int ring index
j: int secret index
"""
i = int(i).to_bytes(4,'big')
j = int(j).to_bytes(4,'big')
sha256 = H()
sha256.update(e)
sha256.update(m)
sha256.update(i)
sha256.update(j)
d = sha256.digest()
return d
class Borromean:
""" Borromean Ring signer implementation according to:
https://github.com/Blockstream/borromean_paper/blob/master/borromean_draft_0.01_9ade1e49.pdf
https://github.com/ElementsProject/secp256k1-zkp/blob/secp256k1-zkp/src/modules/rangeproof/borromean_impl.h
ElementsProject implementation has some tweaks compared to PDF. This implementation is ElementsProject compliant.
For now, only secp256k1+sha256 is supported. This constraint will be release soon.
Args:
fmt (str) : in/out signature format. See :mod:`ecpy.formatters`. IGNORED.
"""
def __init__(self, fmt="BTUPLE") :
self.fmt = fmt
self._curve = Curve.get_curve('secp256k1')
self._hash = hashlib.sha256
def sign(self, msg, rings, pv_keys, pv_keys_index):
""" Signs a message hash.
The public `rings` argument is a tuple of public key array. In other
words each element of the ring tuple is an array containing the public
keys list of that ring
A Private key must be given for each provided ring. For each private key,
the corresponding public key is specified by its index in the ring.
Exemple:
let r1 be the first ring with 2 keys: pu11, pu12
let 21 be the second ring with 3 keys: pu21,pu22,pu23
let say we want to produce a signature with sec12 and sec21
`sign` should be called as::
borromean.sign(m,
([pu11,pu12],[pu21,pu22,pu23]),
[sec12, sec21], [1,0])
The return value is a tuple (e0, [s0,s1....]). Each value is encoded
as binary (bytes).
Args:
msg (bytes) : the message hash to sign
rings (tuple of (ecpy.keys.ECPublicKey[]): public key rings
pv_keys (ecpy.keys.ECPrivateKey[]) : key to use for signing
pv_keys_index (int[]) :
Returns:
(e0, [s0,s1....]) : signature
"""
#shorcuts
G = self._curve.generator
order = self._curve.order
#set up locals
ring_count = len(rings)
privkeys = pv_keys
pubkeys = []
rsizes = []
for r in rings:
pubkeys = pubkeys+r
rsizes.append(len(r))
e0 = None
s = [None]*len(pubkeys)
k = [None]*len(rings)
#step2-3
r0 = 0
sha256_e0 = self._hash()
for i in range (0,ring_count):
k[i] = random.randint(1,order)
kiG = k[i]*G
j0 = pv_keys_index[i]
e_ij = _point_to_bytes(kiG)
for j in range(j0+1, rsizes[i]):
s[r0+j] = random.randint(1,order)
e_ij = _borromean_hash(m,e_ij,i,j, self._hash)
e_ij = int.from_bytes(e_ij,'big')
sG_eP = s[r0+j]*G + e_ij*pubkeys[r0+j].W
e_ij = _point_to_bytes(sG_eP)
sha256_e0.update(e_ij)
r0 += rsizes[i]
sha256_e0.update(m)
e0 = sha256_e0.digest()
#step 4
r0 = 0
for i in range (0, ring_count):
j0 = pv_keys_index[i]
e_ij = _borromean_hash(m,e0,i,0, self._hash)
e_ij = int.from_bytes(e_ij,'big')
for j in range(0, j0):
s[r0+j] = random.randint(1,order)
sG_eP = s[r0+j]*G + e_ij*pubkeys[r0+j].W
e_ij = _borromean_hash(m,_point_to_bytes(sG_eP),i,j+1, self._hash)
e_ij = int.from_bytes(e_ij,'big')
s[r0+j0] = (k[i]-privkeys[i].d*e_ij)%order
r0 += rsizes[i]
s = [int(sij).to_bytes(32,'big') for sij in s]
return (e0,s)
def verify(self, msg, sig, rings):
""" Verifies a message signature.
Args:
msg (bytes) : the message hash to verify the signature
sig (bytes) : signature to verify
rings (key.ECPublicKey): key to use for verifying
Returns:
boolean : True if signature is verified, False else
"""
#shortcuts
G = self._curve.generator
#set up locals
ring_count = len(rings)
pubkeys = []
rsizes = []
for r in rings:
pubkeys = pubkeys+r
rsizes.append(len(r))
#verify
e0 = sig[0]
s = sig[1]
sha256_e0 = self._hash()
r0 = 0
for i in range (0,ring_count):
e_ij = _borromean_hash(m,e0,i,0, self._hash)
for j in range(0,rsizes[i]):
e_ij = int.from_bytes(e_ij,'big')
s_ij = int.from_bytes(s[r0+j],'big')
sG_eP = s_ij*G + e_ij*pubkeys[r0+j].W
e_ij = _point_to_bytes(sG_eP)
if j != rsizes[i]-1:
e_ij = _borromean_hash(m,e_ij,i,j+1, self._hash)
else:
sha256_e0.update(e_ij)
r0 += rsizes[i]
sha256_e0.update(m)
e0x = sha256_e0.digest()
return e0 == e0x
if __name__ == "__main__":
import sys
def strsig(sigma):
print("e0: %s"%h(sigma[0]))
i=0
for s in sigma[1]:
print("s%d: %s"%(i,h(s)))
i += 1
try:
#
# layout:
# nrings = 2
# ring 1 has 2 keys
# ring 2 has 3 keys
#
# pubs=[ring1-key1, ring1-key2,
# ring2-key1, ring2-key2, ring2-key3]
#
# k = [ring1-rand, ring2-rand]
# sec = [ring1-sec2, ring2-sec1]
# rsizes = [2,3]
# secidx = [1,0]
#
#
cv = Curve.get_curve('secp256k1')
seckey0 = ECPrivateKey(0xf026a4e75eec75544c0f44e937dcf5ee6355c7176600b9688c667e5c283b43c5, cv)
seckey1 = ECPrivateKey(0xf126a4e75eec75544c0f44e937dcf5ee6355c7176600b9688c667e5c283b43c5, cv)
seckey2 = ECPrivateKey(0xf226a4e75eec75544c0f44e937dcf5ee6355c7176600b9688c667e5c283b43c5, cv)
seckey3 = ECPrivateKey(0xf326a4e75eec75544c0f44e937dcf5ee6355c7176600b9688c667e5c283b43c5, cv)
seckey4 = ECPrivateKey(0xf426a4e75eec75544c0f44e937dcf5ee6355c7176600b9688c667e5c283b43c5, cv)
seckey5 = ECPrivateKey(0xf526a4e75eec75544c0f44e937dcf5ee6355c7176600b9688c667e5c283b43c5, cv)
seckey6 = ECPrivateKey(0xf626a4e75eec75544c0f44e937dcf5ee6355c7176600b9688c667e5c283b43c5, cv)
seckey7 = ECPrivateKey(0xf726a4e75eec75544c0f44e937dcf5ee6355c7176600b9688c667e5c283b43c5, cv)
seckey8 = ECPrivateKey(0xf826a4e75eec75544c0f44e937dcf5ee6355c7176600b9688c667e5c283b43c5, cv)
pubkey0 = seckey0.get_public_key()
pubkey1 = seckey1.get_public_key()
pubkey2 = seckey2.get_public_key()
pubkey3 = seckey3.get_public_key()
pubkey4 = seckey4.get_public_key()
pubkey5 = seckey5.get_public_key()
pubkey6 = seckey6.get_public_key()
pubkey7 = seckey7.get_public_key()
pubkey8 = seckey8.get_public_key()
allpubs = [pubkey0, pubkey1, pubkey2, pubkey3, pubkey4, pubkey5,pubkey6, pubkey7]
allsecs = [seckey0, seckey1, seckey2, seckey3, seckey4, seckey5,seckey6, seckey7]
m = int(0x800102030405060708090a0b0c0d0e0f800102030405060708090a0b0c0d0e0f)
m = m.to_bytes(32,'big')
borromean = Borromean()
for l in range(2,len(allpubs)):
pubs = allpubs[:l]
secs = allsecs[:l]
print("pool has %d key"%len(pubs))
for i in range(1,len(pubs)):
pubring1 = pubs[0:i]
pubring2 = pubs[i:]
secring1 = secs[0:i]
secring2 = secs[i:]
print("ring1 has %d keys"%len(pubring1))
print("ring2 has %d keys"%len(pubring2))
for s1 in range(0,len(pubring1)):
for s2 in range(0,len(pubring2)):
print("testing %d %d"%(s1,s2))
pubset = (pubring1 , pubring2)
secset = [secring1[s1] , secring2[s2]]
secidx = [s1,s2]
sigma = borromean.sign(m, pubset, secset, secidx )
assert(borromean.verify( m, sigma, pubset, ))
e0 = sigma[0]
e0 = e0[1:]+e0[:1]
sigma = (e0,sigma[1])
assert(not borromean.verify(m, sigma, pubset))
# ##OK!
print("All internal assert OK!")
finally:
pass
| apache-2.0 |
doreminiature/miniature | js/webviews.js | 10019 | /* implements selecting webviews, switching between them, and creating new ones. */
var phishingWarningPage = 'file://' + __dirname + '/pages/phishing/index.html' // TODO move this somewhere that actually makes sense
var crashedWebviewPage = 'file:///' + __dirname + '/pages/crash/index.html'
var errorPage = 'file:///' + __dirname + '/pages/error/index.html'
var webviewBase = document.getElementById('webviews')
var webviewEvents = []
var webviewIPC = []
// this only affects newly created webviews, so all bindings should be done on startup
function bindWebviewEvent(event, fn, useWebContents) {
webviewEvents.push({
event: event,
fn: fn,
useWebContents: useWebContents
})
}
// function is called with (webview, tabId, IPCArguements)
function bindWebviewIPC(name, fn) {
webviewIPC.push({
name: name,
fn: fn
})
}
// the permissionRequestHandler used for webviews
function pagePermissionRequestHandler(webContents, permission, callback) {
if (permission === 'notifications' || permission === 'fullscreen') {
callback(true)
} else {
callback(false)
}
}
// called whenever the page url changes
function onPageLoad(e) {
var tab = this.getAttribute('data-tab')
var url = this.getAttribute('src') // src attribute changes whenever a page is loaded
if (url.indexOf('https://') === 0 || url.indexOf('about:') === 0 || url.indexOf('chrome:') === 0 || url.indexOf('file://') === 0) {
tabs.update(tab, {
secure: true,
url: url
})
} else {
tabs.update(tab, {
secure: false,
url: url
})
}
rerenderTabElement(tab)
}
// called when js/webview/textExtractor.js returns the page's text content
bindWebviewIPC('pageData', function (webview, tabId, args) {
var tab = tabs.get(tabId),
data = args[0]
var isInternalPage = tab.url.indexOf(__dirname) !== -1 && tab.url.indexOf(readerView.readerURL) === -1
// don't save to history if in private mode, or the page is a browser page
if (tab.private === false && !isInternalPage) {
bookmarks.updateHistory(tabId, data.extractedText, data.metadata)
}
})
// called when a swipe event is triggered in js/webview/swipeEvents.js
bindWebviewIPC('goBack', function () {
settings.get('swipeNavigationEnabled', function (value) {
if (value === true || value === undefined) {
getWebview(tabs.getSelected()).goBack()
}
})
})
bindWebviewIPC('goForward', function () {
settings.get('swipeNavigationEnabled', function (value) {
if (value === true || value === undefined) {
getWebview(tabs.getSelected()).goForward()
}
})
})
// set the permissionRequestHandler for non-private tabs
remote.session.defaultSession.setPermissionRequestHandler(pagePermissionRequestHandler)
function getWebviewDom(options) {
var w = document.createElement('webview')
w.setAttribute('preload', 'dist/webview.min.js')
w.setAttribute('blinkfeatures', 'OverlayScrollbars')
if (options.url) {
w.setAttribute('src', urlParser.parse(options.url))
}
w.setAttribute('data-tab', options.tabId)
// if the tab is private, we want to partition it. See http://electron.atom.io/docs/v0.34.0/api/web-view-tag/#partition
// since tab IDs are unique, we can use them as partition names
if (tabs.get(options.tabId).private === true) {
var partition = options.tabId.toString() // options.tabId is a number, which remote.session.fromPartition won't accept. It must be converted to a string first
w.setAttribute('partition', partition)
// register permissionRequestHandler for this tab
// private tabs use a different session, so the default permissionRequestHandler won't apply
remote.session.fromPartition(partition).setPermissionRequestHandler(pagePermissionRequestHandler)
// enable ad/tracker/contentType blocking in this tab if needed
registerFiltering(partition)
}
// webview events
webviewEvents.forEach(function (ev) {
if (ev.useWebContents) { // some events (such as context-menu) are only available on the webContents rather than the webview element
w.addEventListener('did-attach', function () {
this.getWebContents().on(ev.event, ev.fn)
})
} else {
w.addEventListener(ev.event, ev.fn)
}
})
w.addEventListener('did-start-loading', (e) => {
F.webviewIsLoading = true
eventEmitter.emit('startLoadingPage')
})
w.addEventListener('did-stop-loading', (e) => {
F.webviewIsLoading = false
eventEmitter.emit('stopLoadingPage')
})
w.addEventListener('page-favicon-updated', function (e) {
say.m('getWebviewDom page-favicon-updated')
F.ADD(e.path[0].attributes.src.nodeValue, e.favicons[0])
eventEmitter.emit('updateFavicon')
})
w.addEventListener('page-favicon-updated', function (e) {
var id = this.getAttribute('data-tab')
// updateTabColor(e.favicons, id)
})
w.addEventListener('page-title-set', function (e) {
var tab = this.getAttribute('data-tab')
tabs.update(tab, {
title: e.title
})
rerenderTabElement(tab)
})
w.addEventListener('did-finish-load', onPageLoad)
w.addEventListener('did-navigate-in-page', onPageLoad)
/* workaround for https://github.com/electron/electron/issues/8505 and similar issues */
w.addEventListener('did-start-loading', function () {
this.classList.add('loading')
})
w.addEventListener('did-stop-loading', function () {
setTimeout(function () {
w.classList.remove('loading')
}, 100)
})
// open links in new tabs
w.addEventListener('new-window', function (e) {
var tab = this.getAttribute('data-tab')
var currentIndex = tabs.getIndex(tabs.getSelected())
var newTab = tabs.add({
url: e.url,
private: tabs.get(tab).private // inherit private status from the current tab
}, currentIndex + 1)
addTab(newTab, {
enterEditMode: false,
openInBackground: e.disposition === 'background-tab' // possibly open in background based on disposition
})
setTimeout(function () {
// switchToTab(newTab)
modals.hide()
leaveTabEditMode()
findinpage.end()
// alert(newTab)
}, 0)
})
w.addEventListener('close', function (e) {
closeTab(this.getAttribute('data-tab'))
})
w.addEventListener('ipc-message', function (e) {
var w = this
var tab = this.getAttribute('data-tab')
webviewIPC.forEach(function (item) {
if (item.name === e.channel) {
item.fn(w, tab, e.args)
}
})
if (e.channel === 'phishingDetected') {
// check if the page is on the phishing detection whitelist
var url = w.getAttribute('src')
try {
var hostname = new URL(url).hostname
var redirectURL = phishingWarningPage + '?url=' + encodeURIComponent(url) + '&info=' + encodeURIComponent(e.args[0].join('\n'))
} catch (e) {
var hostname = ''
var redirectURL = phishingWarningPage
}
settings.get('phishingWhitelist', function (value) {
if (!value || !hostname || value.indexOf(hostname) === -1) {
// show the warning page
navigate(tab, redirectURL)
}
}, {
fromCache: false
})
}
})
w.addEventListener('crashed', function (e) {
var tabId = this.getAttribute('data-tab')
destroyWebview(tabId)
tabs.update(tabId, {
url: crashedWebviewPage
})
addWebview(tabId)
switchToWebview(tabId)
})
w.addEventListener('did-fail-load', function (e) {
if (e.errorCode !== -3 && e.validatedURL === e.target.getURL()) {
navigate(this.getAttribute('data-tab'), errorPage + '?ec=' + encodeURIComponent(e.errorCode) + '&url=' + encodeURIComponent(e.target.getURL()))
}
})
w.addEventListener('enter-html-full-screen', function (e) {
this.classList.add('fullscreen')
})
w.addEventListener('leave-html-full-screen', function (e) {
this.classList.remove('fullscreen')
})
return w
}
/* options: openInBackground: should the webview be opened without switching to it? default is false. */
function addWebview(tabId) {
try {
var tabData = tabs.get(tabId)
var webview = getWebviewDom({
tabId: tabId,
url: tabData.url
})
// this is used to hide the webview while still letting it load in the background
// webviews are hidden when added - call switchToWebview to show it
webview.classList.add('hidden')
webview.classList.add('loading')
webview.setAttribute("autosize", "on")
webviewBase.appendChild(webview)
return webview
} catch (e) {
}
}
function switchToWebview(id) {
var webviews = document.getElementsByTagName('webview')
for (var i = 0; i < webviews.length; i++) {
webviews[i].hidden = true
}
var wv = getWebview(id)
if (!wv) {
wv = addWebview(id)
}
wv.classList.remove('hidden')
wv.hidden = false
}
function updateWebview(id, url) {
getWebview(id).setAttribute('src', urlParser.parse(url))
}
function destroyWebview(id) {
var w = document.querySelector('webview[data-tab="{id}"]'.replace('{id}', id))
if (w) {
w.parentNode.removeChild(w)
}
}
function getWebview(id) {
return document.querySelector('webview[data-tab="{id}"]'.replace('{id}', id))
}
| apache-2.0 |
artbear/snegopat-reborn-scripts | core/addins/debugHelpers.js | 9379 | //engine: JScript
//uname: debugHelpers
//dname: Отладчик:Вспомогательные команды
//author: Александр Кунташов <kuntashov@gmail.com>, http://compaud.ru/blog
//addin: stdcommands
//addin: stdlib
//addin: global
/*@
Добавляет возможность вызова консоли запросов для отладки запроса и
некоторые другие макросы, повышающие удобство использования штатного отладчика.
@*/
stdlib.require('SettingsManagement.js', SelfScript);
global.connectGlobals(SelfScript);
var settings; // Хранит настройки скрипта (экземпляр SettingsManager'а).
////////////////////////////////////////////////////////////////////////////////////////
////{ Макросы
////
/* Открыть запрос в переменной под курсором в консоли запросов из отладчика. */
SelfScript.Self['macrosОтладить запрос модально'] = function () {
openQueryConsole(true);
}
/* Открыть запрос в переменной под курсором в консоли запросов из отладчика. */
SelfScript.Self['macrosОтладить запрос не модально'] = function () {
openQueryConsole(false);
}
/* Позволяет включать/выключать режим остановки по ошибке в отладчике по горячей клавише. */
SelfScript.Self['macrosВключить/выключить остановку по ошибке'] = function () {
SelfScript.Self['StopOnErrorOpenedByMacros'] = true;
stdcommands.CDebug.BreakOnError.send();
}
/* Установить точку останова, предварительно удалив все другие. */
SelfScript.Self['macrosУстановить точку останова и удалить все другие'] = function () {
stdcommands.CDebug.BrkptDel.send();
stdcommands.CDebug.Brkpt.send();
}
SelfScript.Self['macrosНастройка'] = function () {
var dsForm = new DebugHelperSettingsForm(settings);
dsForm.ShowDialog();
}
function getDefaultMacros() {
return "Настройка";
}
////} Макросы
// Обработчик показа модальных окон.
function onDoModal(dlgInfo) {
if (dlgInfo.caption == "Остановка по ошибке" && dlgInfo.stage == openModalWnd)
{
if (SelfScript.Self['StopOnErrorOpenedByMacros'])
{
var soe = !dlgInfo.form.getControl("CheckBox_StopOnError").Value;
dlgInfo.form.getControl("CheckBox_StopOnError").Value = soe;
dlgInfo.result = 1; // Нажимаем "Ок".
dlgInfo.cancel = true; // Окно показывать не надо.
Message("Остановка по ошибке " + (soe ? "включена" : "выключена"), mInfo);
SelfScript.Self['StopOnErrorOpenedByMacros'] = undefined;
}
}
else if (dlgInfo.caption == "Выражение" && dlgInfo.stage == openModalWnd)
{
if (SelfScript.Self['RunQueryConsoleCommand'])
{
var params = SelfScript.Self['RunQueryConsoleCommand'];
delete SelfScript.Self['RunQueryConsoleCommand'];
var exprCtrl = dlgInfo.form.getControl('Expression');
if (!exprCtrl.value.match(/^\s*$/))
{
if (!params.commandCheck) {
exprCtrl.value = 'ВнешниеОбработки.Создать("' + params.path + '").Отладить(' + exprCtrl.value + ', ' + (params.doModal ? 'Истина' : 'Ложь') + ')';
} else {
exprCtrl.value = ''+ params.command + '(' + exprCtrl.value + ', ' + (params.doModal ? 'Истина' : 'Ложь') + ')';
}
var wsh = new ActiveXObject("WScript.Shell");
// Посылаем нажатие Enter, чтобы отработало событие "ПриИзменении" поля ввода выражения.
stdlib.setTimeout(function () {
wsh.SendKeys("{END} {ENTER}");
if (!params.doModal)
wsh.SendKeys("%{F4}{F5}");
}, 1000);
}
}
}
}
function fileExists(path) {
if (path)
{
var f = v8New('File', path);
return f.IsFile() && f.Exist();
}
return false;
}
function getAbsolutePath(path) {
// Путь относительный?
if (path.match(/^\.{1,2}[\/\\]/))
{
// Относительные пути должны задаваться относительно главного каталога Снегопата.
var mainFolder = env.pathes.main;
return mainFolder + path;
}
return path;
}
function openQueryConsole(doModal) {
var path = getAbsolutePath(settings.current.QueryConsolePath);
var query = settings.current.QueryCommand;
if (!settings.current.UseCommand) {
if (!fileExists(path))
{
DoMessageBox('Путь к обработке КонсольЗапросов не задан. Укажите путь в диалоге настроек скрипта.');
var dsForm = new DebugHelperSettingsForm(settings);
if (!dsForm.ShowDialog())
{
Message('Консоль не будет открыта, т.к. путь к консоли не задан, либо файла по указанному пути не существует!');
return;
}
}
}
SelfScript.Self['RunQueryConsoleCommand'] = { 'path': path, 'doModal': doModal, "command": query, "commandCheck":settings.current.UseCommand};
stdcommands.CDebug.EvalExpr.send();
}
////////////////////////////////////////////////////////////////////////////////////////
////{ Форма настройки скрипта - DebugHelperSettingsForm
////
function DebugHelperSettingsForm(settings) {
this.settings = settings;
this.form = loadScriptForm(env.pathes.addins + "debugHelpers.settings.ssf", this);
}
DebugHelperSettingsForm.prototype.ShowDialog = function () {
return this.form.DoModal();
}
DebugHelperSettingsForm.prototype.saveSettings = function () {
if (!this.form.UseCommand) {
var path = getAbsolutePath(this.form.QueryConsolePath);
Message("path: " + path);
//Уберем проверку, а вдруг снегопат перенесли и случайно открыли настройку.
/* if (!fileExists(path))
{
DoMessageBox('Указанный файл не существует! Настройки не могут быть сохранены.');
return;
} */
}
this.settings.ReadFromForm(this.form);
this.settings.SaveSettings();
}
DebugHelperSettingsForm.prototype.QueryConsolePathStartChoice = function (Элемент, СтандартнаяОбработка) {
// Вставить содержимое обработчика.
}
DebugHelperSettingsForm.prototype.CmdBarOK = function (Кнопка) {
if (this.saveSettings())
this.form.Close(true);
}
DebugHelperSettingsForm.prototype.CmdBarSave = function (Кнопка) {
this.saveSettings();
}
DebugHelperSettingsForm.prototype.OnOpen = function () {
this.settings.ApplyToForm(this.form);
}
DebugHelperSettingsForm.prototype.BeforeClose = function (Cancel, DefaultHandler) {
if (this.form.Modified)
{
var answer = DoQueryBox("Настройки были изменены! Сохранить настройки?", QuestionDialogMode.YesNoCancel);
switch (answer)
{
case DialogReturnCode.Yes:
DefaultHandler.val = false;
if (this.saveSettings())
this.form.Close(true);
break;
case DialogReturnCode.No:
DefaultHandler.val = false;
this.form.Close(false);
break;
case DialogReturnCode.Cancel:
Cancel.val = true;
break;
}
}
}
////
////} Форма настройки скрипта - DebugHelperSettingsForm
////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////
////{ Start up
////
settings = SettingsManagement.CreateManager('debugHelpers', { 'QueryConsolePath': '' , 'QueryCommand': '', 'UseCommand':'false'})
settings.LoadSettings();
events.connect(windows, "onDoModal", SelfScript.Self)
////
////} Start up
////////////////////////////////////////////////////////////////////////////////////////
| apache-2.0 |
abarisain/mopidy | mopidy/audio/playlists.py | 12969 | from __future__ import unicode_literals
import pygst
pygst.require('0.10')
import gst
import gobject
import ConfigParser as configparser
import io
try:
import xml.etree.cElementTree as elementtree
except ImportError:
import xml.etree.ElementTree as elementtree
# TODO: make detect_FOO_header reusable in general mopidy code.
# i.e. give it just a "peek" like function.
def detect_m3u_header(typefind):
return typefind.peek(0, 8) == b'#EXTM3U\n'
def detect_pls_header(typefind):
return typefind.peek(0, 11).lower() == b'[playlist]\n'
def detect_xspf_header(typefind):
data = typefind.peek(0, 150)
if b'xspf' not in data:
return False
try:
data = io.BytesIO(data)
for event, element in elementtree.iterparse(data, events=(b'start',)):
return element.tag.lower() == '{http://xspf.org/ns/0/}playlist'
except elementtree.ParseError:
pass
return False
def detect_asx_header(typefind):
data = typefind.peek(0, 50)
if b'asx' not in data:
return False
try:
data = io.BytesIO(data)
for event, element in elementtree.iterparse(data, events=(b'start',)):
return element.tag.lower() == 'asx'
except elementtree.ParseError:
pass
return False
def parse_m3u(data):
# TODO: convert non URIs to file URIs.
found_header = False
for line in data.readlines():
if found_header or line.startswith('#EXTM3U'):
found_header = True
else:
continue
if not line.startswith('#') and line.strip():
yield line.strip()
def parse_pls(data):
# TODO: convert non URIs to file URIs.
try:
cp = configparser.RawConfigParser()
cp.readfp(data)
except configparser.Error:
return
for section in cp.sections():
if section.lower() != 'playlist':
continue
for i in xrange(cp.getint(section, 'numberofentries')):
yield cp.get(section, 'file%d' % (i+1))
def parse_xspf(data):
try:
for event, element in elementtree.iterparse(data):
element.tag = element.tag.lower() # normalize
except elementtree.ParseError:
return
ns = 'http://xspf.org/ns/0/'
for track in element.iterfind('{%s}tracklist/{%s}track' % (ns, ns)):
yield track.findtext('{%s}location' % ns)
def parse_asx(data):
try:
for event, element in elementtree.iterparse(data):
element.tag = element.tag.lower() # normalize
except elementtree.ParseError:
return
for ref in element.findall('entry/ref'):
yield ref.get('href', '').strip()
def parse_urilist(data):
for line in data.readlines():
if not line.startswith('#') and gst.uri_is_valid(line.strip()):
yield line
def playlist_typefinder(typefind, func, caps):
if func(typefind):
typefind.suggest(gst.TYPE_FIND_MAXIMUM, caps)
def register_typefind(mimetype, func, extensions):
caps = gst.caps_from_string(mimetype)
gst.type_find_register(mimetype, gst.RANK_PRIMARY, playlist_typefinder,
extensions, caps, func, caps)
def register_typefinders():
register_typefind('audio/x-mpegurl', detect_m3u_header, [b'm3u', b'm3u8'])
register_typefind('audio/x-scpls', detect_pls_header, [b'pls'])
register_typefind('application/xspf+xml', detect_xspf_header, [b'xspf'])
# NOTE: seems we can't use video/x-ms-asf which is the correct mime for asx
# as it is shared with asf for streaming videos :/
register_typefind('audio/x-ms-asx', detect_asx_header, [b'asx'])
class BasePlaylistElement(gst.Bin):
"""Base class for creating GStreamer elements for playlist support.
This element performs the following steps:
1. Initializes src and sink pads for the element.
2. Collects data from the sink until EOS is reached.
3. Passes the collected data to :meth:`convert` to get a list of URIs.
4. Passes the list of URIs to :meth:`handle`, default handling is to pass
the URIs to the src element as a uri-list.
5. If handle returned true, the EOS consumed and nothing more happens, if
it is not consumed it flows on to the next element downstream, which is
likely our uri-list consumer which needs the EOS to know we are done
sending URIs.
"""
sinkpad_template = None
"""GStreamer pad template to use for sink, must be overriden."""
srcpad_template = None
"""GStreamer pad template to use for src, must be overriden."""
ghost_srcpad = False
"""Indicates if src pad should be ghosted or not."""
def __init__(self):
"""Sets up src and sink pads plus behaviour."""
super(BasePlaylistElement, self).__init__()
self._data = io.BytesIO()
self._done = False
self.sinkpad = gst.Pad(self.sinkpad_template)
self.sinkpad.set_chain_function(self._chain)
self.sinkpad.set_event_function(self._event)
self.add_pad(self.sinkpad)
if self.ghost_srcpad:
self.srcpad = gst.ghost_pad_new_notarget('src', gst.PAD_SRC)
else:
self.srcpad = gst.Pad(self.srcpad_template)
self.add_pad(self.srcpad)
def convert(self, data):
"""Convert the data we have colleted to URIs.
:param data: collected data buffer
:type data: :class:`io.BytesIO`
:returns: iterable or generator of URIs
"""
raise NotImplementedError
def handle(self, uris):
"""Do something useful with the URIs.
:param uris: list of URIs
:type uris: :type:`list`
:returns: boolean indicating if EOS should be consumed
"""
# TODO: handle unicode uris which we can get out of elementtree
self.srcpad.push(gst.Buffer('\n'.join(uris)))
return False
def _chain(self, pad, buf):
if not self._done:
self._data.write(buf.data)
return gst.FLOW_OK
return gst.FLOW_EOS
def _event(self, pad, event):
if event.type == gst.EVENT_NEWSEGMENT:
return True
if event.type == gst.EVENT_EOS:
self._done = True
self._data.seek(0)
if self.handle(list(self.convert(self._data))):
return True
# Ensure we handle remaining events in a sane way.
return pad.event_default(event)
class M3uDecoder(BasePlaylistElement):
__gstdetails__ = ('M3U Decoder',
'Decoder',
'Convert .m3u to text/uri-list',
'Mopidy')
sinkpad_template = gst.PadTemplate(
'sink', gst.PAD_SINK, gst.PAD_ALWAYS,
gst.caps_from_string('audio/x-mpegurl'))
srcpad_template = gst.PadTemplate(
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
gst.caps_from_string('text/uri-list'))
__gsttemplates__ = (sinkpad_template, srcpad_template)
def convert(self, data):
return parse_m3u(data)
class PlsDecoder(BasePlaylistElement):
__gstdetails__ = ('PLS Decoder',
'Decoder',
'Convert .pls to text/uri-list',
'Mopidy')
sinkpad_template = gst.PadTemplate(
'sink', gst.PAD_SINK, gst.PAD_ALWAYS,
gst.caps_from_string('audio/x-scpls'))
srcpad_template = gst.PadTemplate(
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
gst.caps_from_string('text/uri-list'))
__gsttemplates__ = (sinkpad_template, srcpad_template)
def convert(self, data):
return parse_pls(data)
class XspfDecoder(BasePlaylistElement):
__gstdetails__ = ('XSPF Decoder',
'Decoder',
'Convert .pls to text/uri-list',
'Mopidy')
sinkpad_template = gst.PadTemplate(
'sink', gst.PAD_SINK, gst.PAD_ALWAYS,
gst.caps_from_string('application/xspf+xml'))
srcpad_template = gst.PadTemplate(
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
gst.caps_from_string('text/uri-list'))
__gsttemplates__ = (sinkpad_template, srcpad_template)
def convert(self, data):
return parse_xspf(data)
class AsxDecoder(BasePlaylistElement):
__gstdetails__ = ('ASX Decoder',
'Decoder',
'Convert .asx to text/uri-list',
'Mopidy')
sinkpad_template = gst.PadTemplate(
'sink', gst.PAD_SINK, gst.PAD_ALWAYS,
gst.caps_from_string('audio/x-ms-asx'))
srcpad_template = gst.PadTemplate(
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
gst.caps_from_string('text/uri-list'))
__gsttemplates__ = (sinkpad_template, srcpad_template)
def convert(self, data):
return parse_asx(data)
class UriListElement(BasePlaylistElement):
__gstdetails__ = ('URIListDemuxer',
'Demuxer',
'Convert a text/uri-list to a stream',
'Mopidy')
sinkpad_template = gst.PadTemplate(
'sink', gst.PAD_SINK, gst.PAD_ALWAYS,
gst.caps_from_string('text/uri-list'))
srcpad_template = gst.PadTemplate(
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
gst.caps_new_any())
ghost_srcpad = True # We need to hook this up to our internal decodebin
__gsttemplates__ = (sinkpad_template, srcpad_template)
def __init__(self):
super(UriListElement, self).__init__()
self.uridecodebin = gst.element_factory_make('uridecodebin')
self.uridecodebin.connect('pad-added', self.pad_added)
# Limit to anycaps so we get a single stream out, letting other
# elements downstream figure out actual muxing
self.uridecodebin.set_property('caps', gst.caps_new_any())
def pad_added(self, src, pad):
self.srcpad.set_target(pad)
pad.add_event_probe(self.pad_event)
def pad_event(self, pad, event):
if event.has_name('urilist-played'):
error = gst.GError(gst.RESOURCE_ERROR, gst.RESOURCE_ERROR_FAILED,
b'Nested playlists not supported.')
message = b'Playlists pointing to other playlists is not supported'
self.post_message(gst.message_new_error(self, error, message))
return 1 # GST_PAD_PROBE_OK
def handle(self, uris):
struct = gst.Structure('urilist-played')
event = gst.event_new_custom(gst.EVENT_CUSTOM_UPSTREAM, struct)
self.sinkpad.push_event(event)
# TODO: hookup about to finish and errors to rest of URIs so we
# round robin, only giving up once all have been tried.
# TODO: uris could be empty.
self.add(self.uridecodebin)
self.uridecodebin.set_state(gst.STATE_READY)
self.uridecodebin.set_property('uri', uris[0])
self.uridecodebin.sync_state_with_parent()
return True # Make sure we consume the EOS that triggered us.
def convert(self, data):
return parse_urilist(data)
class IcySrc(gst.Bin, gst.URIHandler):
__gstdetails__ = ('IcySrc',
'Src',
'HTTP src wrapper for icy:// support.',
'Mopidy')
srcpad_template = gst.PadTemplate(
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
gst.caps_new_any())
__gsttemplates__ = (srcpad_template,)
def __init__(self):
super(IcySrc, self).__init__()
self._httpsrc = gst.element_make_from_uri(gst.URI_SRC, 'http://')
try:
self._httpsrc.set_property('iradio-mode', True)
except TypeError:
pass
self.add(self._httpsrc)
self._srcpad = gst.GhostPad('src', self._httpsrc.get_pad('src'))
self.add_pad(self._srcpad)
@classmethod
def do_get_type_full(cls):
return gst.URI_SRC
@classmethod
def do_get_protocols_full(cls):
return [b'icy', b'icyx']
def do_set_uri(self, uri):
if uri.startswith('icy://'):
return self._httpsrc.set_uri(b'http://' + uri[len('icy://'):])
elif uri.startswith('icyx://'):
return self._httpsrc.set_uri(b'https://' + uri[len('icyx://'):])
else:
return False
def do_get_uri(self):
uri = self._httpsrc.get_uri()
if uri.startswith('http://'):
return b'icy://' + uri[len('http://'):]
else:
return b'icyx://' + uri[len('https://'):]
def register_element(element_class):
gobject.type_register(element_class)
gst.element_register(
element_class, element_class.__name__.lower(), gst.RANK_MARGINAL)
def register_elements():
register_element(M3uDecoder)
register_element(PlsDecoder)
register_element(XspfDecoder)
register_element(AsxDecoder)
register_element(UriListElement)
# Only register icy if gst install can't handle it on it's own.
if not gst.element_make_from_uri(gst.URI_SRC, 'icy://'):
register_element(IcySrc)
| apache-2.0 |
whoschek/kite | kite-data/kite-data-core/src/main/java/org/kitesdk/data/spi/filesystem/FileSystemWriter.java | 7685 | /*
* Copyright 2013 Cloudera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kitesdk.data.spi.filesystem;
import com.google.common.base.Preconditions;
import java.io.Closeable;
import java.io.Flushable;
import java.io.IOException;
import java.util.UUID;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.kitesdk.data.DatasetDescriptor;
import org.kitesdk.data.DatasetIOException;
import org.kitesdk.data.DatasetWriterException;
import org.kitesdk.data.Format;
import org.kitesdk.data.Formats;
import org.kitesdk.data.spi.AbstractDatasetWriter;
import org.kitesdk.data.spi.DescriptorUtil;
import org.kitesdk.data.spi.ReaderWriterState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class FileSystemWriter<E> extends AbstractDatasetWriter<E> {
private static final Logger LOG = LoggerFactory.getLogger(FileSystemWriter.class);
static interface FileAppender<E> extends Flushable, Closeable {
public void open() throws IOException;
public void append(E entity) throws IOException;
public void cleanup() throws IOException;
}
protected final FileSystem fs;
private final Path directory;
private final DatasetDescriptor descriptor;
private FileAppender<E> appender;
private Path tempPath;
private Path finalPath;
private ReaderWriterState state;
private int count = 0;
public FileSystemWriter(FileSystem fs, Path path, DatasetDescriptor descriptor) {
Preconditions.checkNotNull(fs, "File system is not defined");
Preconditions.checkNotNull(path, "Destination directory is not defined");
Preconditions.checkNotNull(descriptor, "Descriptor is not defined");
this.fs = fs;
this.directory = path;
this.descriptor = descriptor;
this.state = ReaderWriterState.NEW;
}
@Override
public final void initialize() {
Preconditions.checkState(state.equals(ReaderWriterState.NEW),
"Unable to open a writer from state:%s", state);
// ensure the directory exists
try {
fs.mkdirs(directory);
} catch (IOException ex) {
this.state = ReaderWriterState.ERROR;
throw new DatasetIOException("Failed to create path " + directory, ex);
}
// initialize paths
this.finalPath = new Path(directory, uniqueFilename(descriptor.getFormat()));
this.tempPath = tempFilename(finalPath);
this.appender = newAppender(tempPath);
try {
appender.open();
} catch (IOException e) {
this.state = ReaderWriterState.ERROR;
throw new DatasetIOException("Failed to open appender " + appender, e);
}
this.count = 0;
LOG.debug("Opened appender {} for {}", appender, finalPath);
this.state = ReaderWriterState.OPEN;
}
@Override
public final void write(E entity) {
Preconditions.checkState(state.equals(ReaderWriterState.OPEN),
"Attempt to write to a writer in state:%s", state);
try {
appender.append(entity);
count += 1;
} catch (IOException e) {
this.state = ReaderWriterState.ERROR;
throw new DatasetIOException(
"Failed to append " + entity + " to " + appender, e);
}
}
@Override
public void flush() {
Preconditions.checkState(state.equals(ReaderWriterState.OPEN),
"Attempt to write to a writer in state:%s", state);
try {
appender.flush();
} catch (IOException e) {
this.state = ReaderWriterState.ERROR;
throw new DatasetWriterException("Failed to flush appender " + appender);
}
}
@Override
public final void close() {
if (state.equals(ReaderWriterState.OPEN)) {
try {
appender.close();
} catch (IOException e) {
this.state = ReaderWriterState.ERROR;
throw new DatasetIOException("Failed to close appender " + appender, e);
}
if (count > 0) {
// commit the temp file
try {
if (!fs.rename(tempPath, finalPath)) {
this.state = ReaderWriterState.ERROR;
throw new DatasetWriterException(
"Failed to move " + tempPath + " to " + finalPath);
}
} catch (IOException e) {
this.state = ReaderWriterState.ERROR;
throw new DatasetIOException("Failed to commit " + finalPath, e);
}
LOG.debug("Committed {} for appender {} ({} entities)",
new Object[]{finalPath, appender, count});
} else {
// discard the temp file
try {
if (!fs.delete(tempPath, true)) {
this.state = ReaderWriterState.ERROR;
throw new DatasetWriterException("Failed to delete " + tempPath);
}
} catch (IOException e) {
this.state = ReaderWriterState.ERROR;
throw new DatasetIOException(
"Failed to remove temporary file " + tempPath, e);
}
LOG.debug("Discarded {} ({} entities)", tempPath, count);
}
try {
appender.cleanup();
} catch (IOException e) {
throw new DatasetIOException("Failed to clean up " + appender, e);
}
this.state = ReaderWriterState.CLOSED;
} else if (state.equals(ReaderWriterState.ERROR)) {
this.state = ReaderWriterState.CLOSED;
}
}
@Override
public final boolean isOpen() {
return state.equals(ReaderWriterState.OPEN);
}
private static String uniqueFilename(Format format) {
return UUID.randomUUID() + "." + format.getExtension();
}
private static Path tempFilename(Path location) {
return new Path(location.getParent(), "." + location.getName() + ".tmp");
}
@SuppressWarnings("unchecked")
private <E> FileAppender<E> newAppender(Path temp) {
Format format = descriptor.getFormat();
if (Formats.PARQUET.equals(format)) {
// by default, guarantee durability with the more costly writer
if (DescriptorUtil.isEnabled(
FileSystemProperties.NON_DURABLE_PARQUET_PROP, descriptor)) {
return (FileAppender<E>) new ParquetAppender(
fs, temp, descriptor.getSchema(), true);
} else {
return (FileAppender<E>) new DurableParquetAppender(
fs, temp, descriptor.getSchema(), true);
}
} else if (Formats.AVRO.equals(format)) {
return new AvroAppender<E>(fs, temp, descriptor.getSchema(), true);
} else if (Formats.CSV.equals(format) &&
DescriptorUtil.isEnabled(FileSystemProperties.ALLOW_CSV_PROP, descriptor)) {
return new CSVAppender<E>(fs, temp, descriptor);
} else {
this.state = ReaderWriterState.ERROR;
throw new DatasetWriterException("Unknown format " + descriptor);
}
}
public RecordWriter<E, Void> asRecordWriter() {
return new KiteRecordWriter();
}
private class KiteRecordWriter extends RecordWriter<E, Void> {
@Override
public void write(E e, Void aVoid) throws IOException, InterruptedException {
FileSystemWriter.this.write(e);
}
@Override
public void close(TaskAttemptContext context)
throws IOException, InterruptedException {
FileSystemWriter.this.close();
}
}
}
| apache-2.0 |
raupachz/beanstalk4j | src/main/java/org/beanstalk4j/ApiException.java | 965 | package org.beanstalk4j;
/*
* Copyright 2011 Björn Raupach
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class ApiException extends RuntimeException {
private final int responseCode;
public ApiException(int responseCode, String message) {
super(responseCode + (message.isEmpty() ? "" : " - " + message));
this.responseCode = responseCode;
}
public int getResponseCode() {
return responseCode;
}
}
| apache-2.0 |
AndreJCL/JCL | JCL_Android/app/src/main/java/javassist/bytecode/analysis/MultiType.java | 9604 | /*
* Javassist, a Java-bytecode translator toolkit.
* Copyright (C) 1999- Shigeru Chiba. All Rights Reserved.
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. Alternatively, the contents of this file may be used under
* the terms of the GNU Lesser General Public License Version 2.1 or later,
* or the Apache License Version 2.0.
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*/
package javassist.bytecode.analysis;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javassist.CtClass;
/**
* MultiType represents an unresolved type. Whenever two <literal>Type</literal>
* instances are merged, if they share more than one super type (either an
* interface or a superclass), then a <literal>MultiType</literal> is used to
* represent the possible super types. The goal of a <literal>MultiType</literal>
* is to reduce the set of possible types down to a single resolved type. This
* is done by eliminating non-assignable types from the typeset when the
* <literal>MultiType</literal> is passed as an argument to
* {@link Type#isAssignableFrom(Type)}, as well as removing non-intersecting
* types during a merge.
*
* Note: Currently the <litera>MultiType</literal> instance is reused as much
* as possible so that updates are visible from all frames. In addition, all
* <literal>MultiType</literal> merge paths are also updated. This is somewhat
* hackish, but it appears to handle most scenarios.
*
* @author Jason T. Greene
*/
/* TODO - A better, but more involved, approach would be to track the instruction
* offset that resulted in the creation of this type, and
* whenever the typeset changes, to force a merge on that position. This
* would require creating a new MultiType instance every time the typeset
* changes, and somehow communicating assignment changes to the Analyzer
*/
public class MultiType extends Type {
private Map<String, CtClass> interfaces;
private Type resolved;
private Type potentialClass;
private MultiType mergeSource;
private boolean changed = false;
public MultiType(Map<String, CtClass> interfaces) {
this(interfaces, null);
}
public MultiType(Map<String, CtClass> interfaces, Type potentialClass) {
super(null);
this.interfaces = interfaces;
this.potentialClass = potentialClass;
}
/**
* Gets the class that corresponds with this type. If this information
* is not yet known, java.lang.Object will be returned.
*/
public CtClass getCtClass() {
if (resolved != null)
return resolved.getCtClass();
return Type.OBJECT.getCtClass();
}
/**
* Always returns null since this type is never used for an array.
*/
public Type getComponent() {
return null;
}
/**
* Always returns 1, since this type is a reference.
*/
public int getSize() {
return 1;
}
/**
* Always reutnrs false since this type is never used for an array
*/
public boolean isArray() {
return false;
}
/**
* Returns true if the internal state has changed.
*/
boolean popChanged() {
boolean changed = this.changed;
this.changed = false;
return changed;
}
public boolean isAssignableFrom(Type type) {
throw new UnsupportedOperationException("Not implemented");
}
public boolean isAssignableTo(Type type) {
if (resolved != null)
return type.isAssignableFrom(resolved);
if (Type.OBJECT.equals(type))
return true;
if (potentialClass != null && !type.isAssignableFrom(potentialClass))
potentialClass = null;
Map<String, CtClass> map = mergeMultiAndSingle(this, type);
if (map.size() == 1 && potentialClass == null) {
// Update previous merge paths to the same resolved type
resolved = Type.get((CtClass)map.values().iterator().next());
propogateResolved();
return true;
}
// Keep all previous merge paths up to date
if (map.size() >= 1) {
interfaces = map;
propogateState();
return true;
}
if (potentialClass != null) {
resolved = potentialClass;
propogateResolved();
return true;
}
return false;
}
private void propogateState() {
MultiType source = mergeSource;
while (source != null) {
source.interfaces = interfaces;
source.potentialClass = potentialClass;
source = source.mergeSource;
}
}
private void propogateResolved() {
MultiType source = mergeSource;
while (source != null) {
source.resolved = resolved;
source = source.mergeSource;
}
}
/**
* Always returns true, since this type is always a reference.
*
* @return true
*/
public boolean isReference() {
return true;
}
private Map<String, CtClass> getAllMultiInterfaces(MultiType type) {
Map<String, CtClass> map = new HashMap<String, CtClass>();
Iterator<CtClass> iter = type.interfaces.values().iterator();
while (iter.hasNext()) {
CtClass intf = (CtClass)iter.next();
map.put(intf.getName(), intf);
getAllInterfaces(intf, map);
}
return map;
}
private Map<String, CtClass> mergeMultiInterfaces(MultiType type1, MultiType type2) {
Map<String, CtClass> map1 = getAllMultiInterfaces(type1);
Map<String, CtClass> map2 = getAllMultiInterfaces(type2);
return findCommonInterfaces(map1, map2);
}
private Map<String, CtClass> mergeMultiAndSingle(MultiType multi, Type single) {
Map<String, CtClass> map1 = getAllMultiInterfaces(multi);
Map<String, CtClass> map2 = getAllInterfaces(single.getCtClass(), null);
return findCommonInterfaces(map1, map2);
}
private boolean inMergeSource(MultiType source) {
while (source != null) {
if (source == this)
return true;
source = source.mergeSource;
}
return false;
}
public Type merge(Type type) {
if (this == type)
return this;
if (type == UNINIT)
return this;
if (type == BOGUS)
return BOGUS;
if (type == null)
return this;
if (resolved != null)
return resolved.merge(type);
if (potentialClass != null) {
Type mergePotential = potentialClass.merge(type);
if (! mergePotential.equals(potentialClass) || mergePotential.popChanged()) {
potentialClass = Type.OBJECT.equals(mergePotential) ? null : mergePotential;
changed = true;
}
}
Map<String, CtClass> merged;
if (type instanceof MultiType) {
MultiType multi = (MultiType)type;
if (multi.resolved != null) {
merged = mergeMultiAndSingle(this, multi.resolved);
} else {
merged = mergeMultiInterfaces(multi, this);
if (! inMergeSource(multi))
mergeSource = multi;
}
} else {
merged = mergeMultiAndSingle(this, type);
}
// Keep all previous merge paths up to date
if (merged.size() > 1 || (merged.size() == 1 && potentialClass != null)) {
// Check for changes
if (merged.size() != interfaces.size()) {
changed = true;
} else if (changed == false){
Iterator<String> iter = merged.keySet().iterator();
while (iter.hasNext())
if (! interfaces.containsKey(iter.next()))
changed = true;
}
interfaces = merged;
propogateState();
return this;
}
if (merged.size() == 1) {
resolved = Type.get((CtClass) merged.values().iterator().next());
} else if (potentialClass != null){
resolved = potentialClass;
} else {
resolved = OBJECT;
}
propogateResolved();
return resolved;
}
public boolean equals(Object o) {
if (! (o instanceof MultiType))
return false;
MultiType multi = (MultiType) o;
if (resolved != null)
return resolved.equals(multi.resolved);
else if (multi.resolved != null)
return false;
return interfaces.keySet().equals(multi.interfaces.keySet());
}
public String toString() {
if (resolved != null)
return resolved.toString();
StringBuffer buffer = new StringBuffer("{");
Iterator<String> iter = interfaces.keySet().iterator();
while (iter.hasNext()) {
buffer.append(iter.next());
buffer.append(", ");
}
buffer.setLength(buffer.length() - 2);
if (potentialClass != null)
buffer.append(", *").append(potentialClass.toString());
buffer.append("}");
return buffer.toString();
}
}
| apache-2.0 |
orientechnologies/orientdb | core/src/test/java/com/orientechnologies/orient/core/db/hook/HookReadTest.java | 1963 | package com.orientechnologies.orient.core.db.hook;
import static org.junit.Assert.assertEquals;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.hook.ORecordHook;
import com.orientechnologies.orient.core.metadata.security.OSecurityPolicy;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/** Created by tglman on 01/06/16. */
public class HookReadTest {
private ODatabaseDocument database;
@Before
public void before() {
database = new ODatabaseDocumentTx("memory:" + HookReadTest.this.getClass().getSimpleName());
database.create();
database.getMetadata().getSchema().createClass("TestClass");
}
@After
public void after() {
database.drop();
}
@Test
public void testSelectChangedInHook() {
database.registerHook(
new ORecordHook() {
@Override
public void onUnregister() {}
@Override
public RESULT onTrigger(TYPE iType, ORecord iRecord) {
if (iType == TYPE.AFTER_READ
&& !((ODocument) iRecord)
.getClassName()
.equalsIgnoreCase(OSecurityPolicy.class.getSimpleName()))
((ODocument) iRecord).field("read", "test");
return RESULT.RECORD_CHANGED;
}
@Override
public DISTRIBUTED_EXECUTION_MODE getDistributedExecutionMode() {
return null;
}
});
database.save(new ODocument("TestClass"));
List<ODocument> res = database.query(new OSQLSynchQuery<Object>("select from TestClass"));
assertEquals(res.get(0).field("read"), "test");
}
}
| apache-2.0 |
obulpathi/hadoop | book/ch05/src/main/java/v2/MaxTemperatureMapper.java | 907 | package v2;
//== MaxTemperatureMapperV2
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
public class MaxTemperatureMapper
extends Mapper<LongWritable, Text, Text, IntWritable> {
//vv MaxTemperatureMapperV2
@Override
public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String line = value.toString();
String year = line.substring(15, 19);
/*[*/String temp = line.substring(87, 92);
if (!missing(temp)) {/*]*/
int airTemperature = Integer.parseInt(temp);
context.write(new Text(year), new IntWritable(airTemperature));
/*[*/}/*]*/
}
/*[*/private boolean missing(String temp) {
return temp.equals("+9999");
}/*]*/
//^^ MaxTemperatureMapperV2
}
| apache-2.0 |
5hk/webida-client | common/src/webida/widgets/views/splitviewcontainer.js | 20792 | /*
* Copyright (c) 2012-2015 S-Core Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
define(['other-lib/underscore/lodash.min',
'dijit/layout/TabContainer',
'dijit/layout/ContentPane',
'dijit/Tooltip',
'dijit/layout/BorderContainer',
'./viewcontainer',
'./view',
'./viewmanager',
'dojo/topic',
'dojo/dom-geometry',
'dojo/domReady!', ],
function (_, TabContainer, ContentPane, Tooltip, BorderContainer,
ViewContainer, View, vm, topic, geometry) {
'use strict';
var splitViewContainer = function () {
this._splitCount = 0;
this._verticalSplit = false;
this._smartVisible = false;
this.containerList = [];
this.widgetObject = null;
this._focusedViewContainer = null;
this._splitterPressed = false;
this._changableRotation = false;
this.region = '';
};
splitViewContainer.MAX_COUNT = 5;
splitViewContainer.prototype = {
getTopContainer : function () {
return this.widgetObject;
},
get : function (propName) {
if (propName === 'splitCount') {
return this._splitCount;
} else if (propName === 'verticalSplit') {
return this._verticalSplit;
} else if (propName === 'smartVisible') {
return this._smartVisible;
} else if (propName === 'rotatable') {
return this._changableRotation;
} else if (propName === 'region') {
return this.region;
}
},
set : function (propName, value) {
if (propName === 'splitCount') {
return this._split(value);
} else if (propName === 'verticalSplit') {
return this._setSplitDirection(value);
} else if (propName === 'smartVisible') {
return this._setSmartVisible(value);
} else if (propName === 'rotatable') {
return this._setRotatable(value);
} else if (propName === 'region') {
return this._setRegion(value);
}
},
setProperties : function (properties) {
for (var propName in properties) {
if (properties.hasOwnProperty(propName)) {
this.set(propName, properties[propName]);
}
}
},
init : function (properties, elem) {
var _self = this;
var bc = new BorderContainer({style: 'height:100%; width:100%;', padding: 0, design: 'sidebar'});
if (elem) {
bc.placeAt(elem);
}
this.widgetObject = bc;
bc.startup();
if (properties) {
this.setProperties(properties);
}
topic.subscribe('view.selected', function (event) {
_self._viewSelectedHandler(event);
});
topic.subscribe('view.removed', function (event) {
_self._viewRemovedHandler(event);
});
topic.subscribe('view.focused', function (event) {
_self._viewFocusedHandler(event);
});
topic.subscribe('view.added-before', function (event) {
var vc = event.viewContainer;
if (vc && (vc.getParent() === _self)) {
if (_self.get('smartVisible')) {
_self.showContainer(vc);
}
}
});
topic.subscribe('view.removed', function (event) {
var vc = event.viewContainer;
if (vc && (vc.getParent() === _self)) {
if ((event.count === 0) && _self.get('smartVisible')) {
_self.hideContainer(vc);
}
}
});
bc.resize();
vm.addToGroup(_self, 'default');
},
getViewContainer : function (index) {
if (this.containerList[index]) {
return this.containerList[index];
}
return null;
},
getViewContainers : function () {
return this.containerList;
},
showContainer : function (vc) {
var _self = this;
if (vc) {
if (_self._isHided(vc)) {
_self.widgetObject.addChild(vc.getTopContainer());
}
}
},
hideContainer : function (vc) {
var _self = this;
if (vc) {
if (!_self._isHided(vc)) {
if (_self._isCenterContainer(vc)) {
var nextShowdVc = _self._getNextShowedViewContainer(vc);
if (nextShowdVc) {
_self._swap(vc, nextShowdVc);
}
} else {
this.widgetObject.removeChild(vc.getTopContainer());
}
}
}
},
insertToPrev : function (vc, view) {
this._insert(vc, view, true);
},
insertToNext : function (vc, view) {
this._insert(vc, view, false);
},
isInsertable : function () {
var _self = this;
var i, vc;
var vcList = _self.getViewContainers();
for (i = 0 ; i < vcList.length; i++) {
vc = vcList[i];
if (_self._isHided(vc)) {
return true;
}
}
return false;
},
moveView : function (targetViewContainer, view, nextSibling, noneSelect) {
if (targetViewContainer && view) {
var vc = view.getParent();
var viewMoveEvent = {
view : view,
nextSibling : nextSibling,
destViewContainer : targetViewContainer
};
topic.publish('view.moved-before', viewMoveEvent);
if (targetViewContainer === vc) {
targetViewContainer.moveView(view, nextSibling);
targetViewContainer.select(view);
} else {
var tempView = new View('_tempView', 'tempView');
vc.addLast(tempView);
vc.remove(view);
targetViewContainer.addLast(view);
targetViewContainer.moveView(view, nextSibling);
vc.remove(tempView, true);
if (!noneSelect) {
view.select();
}
}
topic.publish('view.moved', viewMoveEvent);
}
},
getFocusedViewContainer : function () {
if (this._focusedViewContainer) {
return this._focusedViewContainer;
} else {
return this.getViewContainer(0);
}
},
getShowedViewContainers : function () {
return this._getViewContainersByState(false);
},
getHidedViewContainers : function () {
return this._getViewContainersByState(true);
},
_updateVisible : function () {
var _self = this;
_.forEach(_self.containerList, function (vc) {
if (_self._smartVisible) {
if (vc.getNumOfViews() > 0) {
_self.showContainer(vc);
} else {
_self.hideContainer(vc);
}
} else {
_self.showContainer(vc);
}
});
},
_setSmartVisible : function (enable) {
if (this._smartVisible !== enable) {
this._smartVisible = enable;
this._updateVisible();
}
},
_setRotatable : function (enable) {
if (this._changableRotation !== enable) {
this._changableRotation = enable;
}
},
_setRegion : function (region) {
if (this.region !== region) {
this.region = region;
}
},
_viewSelectedHandler : function (event) {
var _self = this;
var vc = event.viewContainer;
if (vc.getParent() === _self) {
_self._focusedViewContainer = vc;
}
},
_viewRemovedHandler : function (event) {
var _self = this;
var vc = event.viewContainer;
if (vc.getParent() === _self) {
if ((_self._focusedViewContainer === vc) && (event.count === 0)) {
_self._focusedViewContainer = null;
}
}
},
_viewFocusedHandler : function (event) {
var _self = this;
var vc = event.viewContainer;
if (vc.getParent() === _self) {
_self._focusedViewContainer = vc;
}
},
_registEvents : function () {
var _self = this;
topic.subscribe('view.selected', function (event) {
_self._viewSelectedHandler(event);
});
topic.subscribe('view.close', function (event) {
_self._viewCloseHandler(event);
});
topic.subscribe('view.focused', function (event) {
_self._viewFocusedHandler(event);
});
},
_split : function (count) {
var i, vc, layoutPriority;
var _self = this;
if ((count >= 1) && (count <= splitViewContainer.MAX_COUNT)) {
if (count < _self.containerList.length) {
for (i = count; i >= _self.containerList.length; i--) {
vc = _self.containerList[i - 1];
vc.setParent(null);
this.widgetObject.removeChild(vc.getTopContainer());
this.containerList.pop();
// TODO: destroy vc
}
} else {
for (i = _self._splitCount; i < count; i++) {
if (i === 0) {
layoutPriority = 0;
} else {
layoutPriority = count - (i);
}
vc = _self._createViewContainer(layoutPriority, parseInt((100 / count), 10));
_self.containerList.push(vc);
if (!_self._smartVisible) {
_self.showContainer(vc);
}
// _self._registEvents(vc);
}
}
_self._splitCount = count;
return true;
} else {
return false;
}
},
_setSplitDirection : function (vertical) {
var _self = this;
var borderContainer = _self.widgetObject;
var children = borderContainer.getChildren();
var totalW = 0;
var totalH = 0;
var width, height;
var child;
var i;
var region, style;
if (this._verticalSplit !== vertical) {
if (vertical) {
region = 'bottom';
} else {
region = 'right';
}
for (i = 0; i < children.length; i++) {
child = children[i];
totalW += geometry.getContentBox(child.domNode).w;
totalH += geometry.getContentBox(child.domNode).h;
}
for (i = 0; i < children.length; i++) {
child = children[i];
width = geometry.getContentBox(child.domNode).w;
height = geometry.getContentBox(child.domNode).h;
if (vertical) {
style = 'height:' + parseInt((height * 100 / totalH), 10) + '%';
} else {
style = 'width:' + parseInt((width * 100 / totalW), 10) + '%';
}
if (i > 0) {
child.set('style', style);
child.set('region', region);
borderContainer.removeChild(child);
borderContainer.addChild(child);
}
}
var hidedVc = _self.getHidedViewContainers();
for (i = 0; i < hidedVc.length; i++) {
hidedVc[i].topContainer.set('region', region);
}
this._verticalSplit = vertical;
}
},
_isHided : function (vc) {
var _self = this;
if (vc) {
var index = _self.widgetObject.getIndexOfChild(vc.getTopContainer());
if (index === -1) {
return true;
}
}
return false;
},
_createViewContainer : function (layoutPriority, widthRatio) {
var _self = this;
var vertical = this._verticalSplit;
var style, region;
if (vertical) {
style = 'height:' + widthRatio + '%';
if (layoutPriority === 0) {
region = 'center';
} else {
region = 'bottom';
}
} else {
style = 'width:' + widthRatio + '%';
if (layoutPriority === 0) {
region = 'center';
} else {
region = 'right';
}
}
var vc = new ViewContainer();
vc.initialize();
vc.setParent(_self);
var cp = vc.getTopContainer();
cp.set('style', style);
cp.set('region', region);
cp.set('layoutPriority', layoutPriority);
cp.set('splitter', true);
return vc;
},
_isCenterContainer : function (viewContainer) {
if (viewContainer) {
var cp = viewContainer.getTopContainer();
if (cp.get('region') === 'center') {
return true;
}
}
return false;
},
_swap : function (srcViewContainer, destViewContainer) {
var srcVcList = srcViewContainer.getViewList();
var destVcList = destViewContainer.getViewList();
var tempView = new View('_tempForSwap2', 'title');
destViewContainer.addLast(tempView);
_.forEach(destVcList, function (view) {
destViewContainer.remove(view);
srcViewContainer.addLast(view);
});
_.forEach(srcVcList, function (view) {
srcViewContainer.remove(view);
destViewContainer.addLast(view);
});
tempView.getParent().remove(tempView, true);
},
_getNextShowedViewContainer : function (viewContainer) {
var _self = this;
var i, vc, found = false;
var vcList = _self.getViewContainers();
for (i = 0 ; i < vcList.length; i++) {
vc = vcList[i];
if (found) {
if (!_self._isHided(vc)) {
return vc;
}
} else {
if (vc === viewContainer) {
found = true;
}
}
}
return null;
},
_getViewContainersByState : function (isHided) {
var _self = this;
var hidedVcList = [];
var showedVcList = [];
var vcList = _self.getViewContainers();
var i, j, vc;
var children = this.widgetObject.getChildren();
for (i = 0; i < children.length; i++) {
for (j = 0 ; j < vcList.length; j++) {
vc = vcList[j];
if (children[i] === vc.getTopContainer()) {
showedVcList.push(vc);
break;
}
}
}
for (i = 0 ; i < vcList.length; i++) {
vc = vcList[i];
if (_self._isHided(vc)) {
hidedVcList.push(vc);
}
}
if (isHided) {
return hidedVcList;
} else {
return showedVcList;
}
},
_insert : function (vc, view, isPrev) {
var region, layoutPriority, i, viewContainer, style, cellRatio;
var _self = this;
var showedList = _self._getViewContainersByState(false);
var hidedList = _self._getViewContainersByState(true);
var splitCount = showedList.length + 1;
if (hidedList.length <= 0) {
console.log('can not add new view container');
return;
}
var targetViewContainer = hidedList[0];
var viewMoveEvent = {
view : view,
nextSibling : null,
destViewContainer : targetViewContainer
};
topic.publish('view.moved-before', viewMoveEvent);
for (i = 0; i < showedList.length; i++) {
viewContainer = showedList[i];
if (viewContainer === vc) {
if (isPrev) {
showedList.splice(i, 0, targetViewContainer);
} else {
if (i === (showedList.length - 1)) {
showedList.push(targetViewContainer);
} else {
showedList.splice(i + 1, 0, targetViewContainer);
}
}
break;
}
}
_self.showContainer(targetViewContainer);
view.getParent().remove(view);
targetViewContainer.addLast(view);
var centerContainer = null;
var centerContainerIndex;
layoutPriority = [];
for (i = 0; i < showedList.length; i++) {
viewContainer = showedList[i];
if (viewContainer.topContainer.get('region') === 'center') {
centerContainer = viewContainer;
centerContainerIndex = i;
}
}
var firstViewContainer = showedList[0];
if (firstViewContainer !== centerContainer) {
this._swap(firstViewContainer, centerContainer);
showedList.splice(centerContainerIndex + 1, 0, firstViewContainer);
showedList.splice(0, 1);
}
cellRatio = parseInt(100 / splitCount, 10);
for (i = 1; i < showedList.length; i++) {
viewContainer = showedList[i];
if (this._isHided(viewContainer)) {
continue;
}
if (_self.get('verticalSplit')) {
region = 'bottom';
style = 'height:' + cellRatio + '%';
} else {
region = 'right';
style = 'width:' + cellRatio + '%';
}
layoutPriority = showedList.length - i;
viewContainer.topContainer.set('region', region);
viewContainer.topContainer.set('style', style);
viewContainer.topContainer.set('layoutPriority', layoutPriority);
_self.widgetObject.removeChild(viewContainer.getTopContainer());
_self.widgetObject.addChild(viewContainer.getTopContainer());
}
topic.publish('view.moved', viewMoveEvent);
},
};
return splitViewContainer;
});
| apache-2.0 |
nhsconnect/careconnect-reference-implementation | ccri-fhirserver/src/main/java/uk/nhs/careconnect/ccri/fhirserver/stu3/provider/PractitionerRoleProvider.java | 5020 | package uk.nhs.careconnect.ccri.fhirserver.stu3.provider;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.ValidationModeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.dstu3.model.OperationOutcome;
import org.hl7.fhir.dstu3.model.PractitionerRole;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import uk.nhs.careconnect.ccri.fhirserver.support.OperationOutcomeFactory;
import uk.nhs.careconnect.ccri.fhirserver.support.ProviderResponseLibrary;
import uk.nhs.careconnect.ri.database.daointerface.PractitionerRoleRepository;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
@Component
public class PractitionerRoleProvider implements ICCResourceProvider {
@Autowired
private PractitionerRoleRepository practitionerRoleDao;
@Autowired
FhirContext ctx;
@Autowired
private ResourceTestProvider resourceTestProvider;
@Autowired
private ResourcePermissionProvider resourcePermissionProvider;
private static final Logger log = LoggerFactory.getLogger(PatientProvider.class);
@Override
public Long count() {
return practitionerRoleDao.count();
}
@Override
public Class<PractitionerRole> getResourceType() {
return PractitionerRole.class;
}
@Create
public MethodOutcome create(HttpServletRequest theRequest, @ResourceParam PractitionerRole practitionerRole) {
resourcePermissionProvider.checkPermission("create");
MethodOutcome method = new MethodOutcome();
method.setCreated(true);
OperationOutcome opOutcome = new OperationOutcome();
method.setOperationOutcome(opOutcome);
try {
PractitionerRole newPractitioner = practitionerRoleDao.create(ctx, practitionerRole,null,null);
method.setId(newPractitioner.getIdElement());
method.setResource(newPractitioner);
} catch (Exception ex) {
ProviderResponseLibrary.handleException(method,ex);
}
return method;
}
@Update
public MethodOutcome updatePractitioner(HttpServletRequest theRequest, @ResourceParam PractitionerRole practitionerRole, @IdParam IdType theId, @ConditionalUrlParam String theConditional, RequestDetails theRequestDetails) {
resourcePermissionProvider.checkPermission("update");
MethodOutcome method = new MethodOutcome();
method.setCreated(true);
OperationOutcome opOutcome = new OperationOutcome();
method.setOperationOutcome(opOutcome);
try {
PractitionerRole newPractitioner = practitionerRoleDao.create(ctx, practitionerRole, theId, theConditional);
method.setId(newPractitioner.getIdElement());
method.setResource(newPractitioner);
} catch (Exception ex) {
ProviderResponseLibrary.handleException(method,ex);
}
return method;
}
@Read
public PractitionerRole getPractitionerRole
(@IdParam IdType internalId) {
resourcePermissionProvider.checkPermission("read");
PractitionerRole practitionerRole = practitionerRoleDao.read(ctx, internalId);
if ( practitionerRole == null) {
throw OperationOutcomeFactory.buildOperationOutcomeException(
new ResourceNotFoundException("No PractitionerRole/" + internalId.getIdPart()),
OperationOutcome.IssueType.NOTFOUND);
}
return practitionerRole;
}
@Search
public List<PractitionerRole> searchPractitioner(HttpServletRequest theRequest,
@OptionalParam(name = PractitionerRole.SP_IDENTIFIER) TokenParam identifier,
@OptionalParam(name = PractitionerRole.SP_PRACTITIONER) ReferenceParam practitioner,
@OptionalParam(name = PractitionerRole.SP_ORGANIZATION) ReferenceParam organisation
, @OptionalParam(name = PractitionerRole.SP_RES_ID) StringParam resid) {
return practitionerRoleDao.search(ctx,
identifier
,practitioner
,organisation
,resid
);
}
@Validate
public MethodOutcome testResource(@ResourceParam PractitionerRole resource,
@Validate.Mode ValidationModeEnum theMode,
@Validate.Profile String theProfile) {
return resourceTestProvider.testResource(resource,theMode,theProfile);
}
}
| apache-2.0 |
xSAVIKx/secure_web | utils/db/entity/Site.php | 635 | <?php
class Site
{
private $id;
private $title;
private $url;
function __construct($id, $title, $url)
{
$this->id = $id;
$this->title = $title;
$this->url = $url;
}
function __toString()
{
return "Site[title=$this->title, url=$this->url]";
}
/**
* @return mixed
*/
public function getId()
{
return $this->id;
}
/**
* @return mixed
*/
public function getTitle()
{
return $this->title;
}
/**
* @return mixed
*/
public function getUrl()
{
return $this->url;
}
} | apache-2.0 |
torrances/swtk-commons | commons-dict-wordnet-indexbyname/src/main/java/org/swtk/commons/dict/wordnet/indexbyname/instance/c/u/o/WordnetNounIndexNameInstanceCUO.java | 1207 | package org.swtk.commons.dict.wordnet.indexbyname.instance.c.u.o; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.TreeMap; import org.swtk.common.dict.dto.wordnet.IndexNoun; import com.trimc.blogger.commons.utils.GsonUtils; public final class WordnetNounIndexNameInstanceCUO { private static Map<String, Collection<IndexNoun>> map = new TreeMap<String, Collection<IndexNoun>>(); static { add("{\"term\":\"cuon\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02118427\"]}");
add("{\"term\":\"cuon alpinus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02118565\"]}");
} private static void add(final String JSON) { IndexNoun indexNoun = GsonUtils.toObject(JSON, IndexNoun.class); Collection<IndexNoun> list = (map.containsKey(indexNoun.getTerm())) ? map.get(indexNoun.getTerm()) : new ArrayList<IndexNoun>(); list.add(indexNoun); map.put(indexNoun.getTerm(), list); } public static Collection<IndexNoun> get(final String TERM) { return map.get(TERM); } public static boolean has(final String TERM) { return map.containsKey(TERM); } public static Collection<String> terms() { return map.keySet(); } } | apache-2.0 |
twitter/digits-android | digits/src/main/java/com/digits/sdk/android/FailureActivity.java | 1153 | /*
* Copyright (C) 2015 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.digits.sdk.android;
import android.app.Activity;
import android.os.Bundle;
/**
* FailureActivity should never be started from outside digits otherwise
* will throw an {@link IllegalAccessError}.
*/
public class FailureActivity extends Activity {
FailureActivityDelegateImpl delegate;
public void onCreate(Bundle savedInstanceState) {
setTheme(Digits.getInstance().getTheme());
super.onCreate(savedInstanceState);
delegate = new FailureActivityDelegateImpl(this);
delegate.init();
}
}
| apache-2.0 |
aleo72/ww-ceem-radar | src/main/java/gov/nasa/worldwindx/examples/Pyramids.java | 6784 | /*
* Copyright (C) 2012 United States Government as represented by the Administrator of the
* National Aeronautics and Space Administration.
* All Rights Reserved.
*/
package gov.nasa.worldwindx.examples;
import gov.nasa.worldwind.WorldWind;
import gov.nasa.worldwind.avlist.AVKey;
import gov.nasa.worldwind.geom.*;
import gov.nasa.worldwind.layers.RenderableLayer;
import gov.nasa.worldwind.render.*;
/**
* Example of {@link Pyramid} usage. Shows examples of pyramids with various orientations, materials, and textures
* applied.
*
* @author ccrick
* @version $Id: Pyramids.java 1171 2013-02-11 21:45:02Z dcollins $
*/
public class Pyramids extends ApplicationTemplate
{
public static class AppFrame extends ApplicationTemplate.AppFrame
{
public AppFrame()
{
super(true, true, false);
RenderableLayer layer = new RenderableLayer();
// Create and set an attribute bundle.
ShapeAttributes attrs = new BasicShapeAttributes();
attrs.setInteriorMaterial(Material.YELLOW);
attrs.setInteriorOpacity(0.7);
attrs.setEnableLighting(true);
attrs.setOutlineMaterial(Material.RED);
attrs.setOutlineWidth(2d);
attrs.setDrawInterior(true);
attrs.setDrawOutline(false);
// Create and set an attribute bundle.
ShapeAttributes attrs2 = new BasicShapeAttributes();
attrs2.setInteriorMaterial(Material.PINK);
attrs2.setInteriorOpacity(1);
attrs2.setEnableLighting(true);
attrs2.setOutlineMaterial(Material.WHITE);
attrs2.setOutlineWidth(2d);
attrs2.setDrawOutline(false);
// ********* sample Pyramids *******************
// Pyramid with equal axes, ABSOLUTE altitude mode
Pyramid pyramid3 = new Pyramid(Position.fromDegrees(40, -120, 80000), 50000, 50000, 50000);
pyramid3.setAltitudeMode(WorldWind.ABSOLUTE);
pyramid3.setAttributes(attrs);
pyramid3.setVisible(true);
pyramid3.setValue(AVKey.DISPLAY_NAME, "Pyramid with equal axes, ABSOLUTE altitude mode");
layer.addRenderable(pyramid3);
// Pyramid with equal axes, RELATIVE_TO_GROUND
Pyramid pyramid4 = new Pyramid(Position.fromDegrees(37.5, -115, 50000), 50000, 50000, 50000);
pyramid4.setAltitudeMode(WorldWind.RELATIVE_TO_GROUND);
pyramid4.setAttributes(attrs);
pyramid4.setVisible(true);
pyramid4.setValue(AVKey.DISPLAY_NAME, "Pyramid with equal axes, RELATIVE_TO_GROUND altitude mode");
layer.addRenderable(pyramid4);
// Pyramid with equal axes, CLAMP_TO_GROUND
Pyramid pyramid5 = new Pyramid(Position.fromDegrees(35, -110, 50000), 50000, 50000, 50000);
pyramid5.setAltitudeMode(WorldWind.CLAMP_TO_GROUND);
pyramid5.setAttributes(attrs);
pyramid5.setVisible(true);
pyramid5.setValue(AVKey.DISPLAY_NAME, "Pyramid with equal axes, CLAMP_TO_GROUND altitude mode");
layer.addRenderable(pyramid5);
// Pyramid with a texture, using Pyramid(position, height, width) constructor
Pyramid pyramid9 = new Pyramid(Position.fromDegrees(0, -90, 600000), 1200000, 1200000);
pyramid9.setAltitudeMode(WorldWind.RELATIVE_TO_GROUND);
pyramid9.setImageSources("gov/nasa/worldwindx/examples/images/500px-Checkerboard_pattern.png");
pyramid9.setAttributes(attrs);
pyramid9.setVisible(true);
pyramid9.setValue(AVKey.DISPLAY_NAME, "Pyramid with a texture");
layer.addRenderable(pyramid9);
// Scaled Pyramid with default orientation
Pyramid pyramid = new Pyramid(Position.ZERO, 1000000, 500000, 100000);
pyramid.setAltitudeMode(WorldWind.ABSOLUTE);
pyramid.setAttributes(attrs);
pyramid.setVisible(true);
pyramid.setValue(AVKey.DISPLAY_NAME, "Scaled Pyramid with default orientation");
layer.addRenderable(pyramid);
// Scaled Pyramid with a pre-set orientation
Pyramid pyramid2 = new Pyramid(Position.fromDegrees(0, 30, 750000), 1000000, 500000, 100000,
Angle.fromDegrees(90), Angle.fromDegrees(45), Angle.fromDegrees(30));
pyramid2.setAltitudeMode(WorldWind.RELATIVE_TO_GROUND);
pyramid2.setAttributes(attrs2);
pyramid2.setVisible(true);
pyramid2.setValue(AVKey.DISPLAY_NAME, "Scaled Pyramid with a pre-set orientation");
layer.addRenderable(pyramid2);
// Scaled Pyramid with a pre-set orientation
Pyramid pyramid6 = new Pyramid(Position.fromDegrees(30, 30, 750000), 1000000, 500000, 100000,
Angle.fromDegrees(90), Angle.fromDegrees(45), Angle.fromDegrees(30));
pyramid6.setAltitudeMode(WorldWind.RELATIVE_TO_GROUND);
pyramid6.setImageSources("gov/nasa/worldwindx/examples/images/500px-Checkerboard_pattern.png");
pyramid6.setAttributes(attrs2);
pyramid6.setVisible(true);
pyramid6.setValue(AVKey.DISPLAY_NAME, "Scaled Pyramid with a pre-set orientation");
layer.addRenderable(pyramid6);
// Scaled Pyramid with a pre-set orientation
Pyramid pyramid7 = new Pyramid(Position.fromDegrees(60, 30, 750000), 1000000, 500000, 100000,
Angle.fromDegrees(90), Angle.fromDegrees(45), Angle.fromDegrees(30));
pyramid7.setAltitudeMode(WorldWind.RELATIVE_TO_GROUND);
pyramid7.setAttributes(attrs2);
pyramid7.setVisible(true);
pyramid7.setValue(AVKey.DISPLAY_NAME, "Scaled Pyramid with a pre-set orientation");
layer.addRenderable(pyramid7);
// Scaled, oriented pyramid in 3rd "quadrant" (-X, -Y, -Z)
Pyramid pyramid8 = new Pyramid(Position.fromDegrees(-45, -180, 750000), 1000000, 500000, 100000,
Angle.fromDegrees(90), Angle.fromDegrees(45), Angle.fromDegrees(30));
pyramid8.setAltitudeMode(WorldWind.RELATIVE_TO_GROUND);
pyramid8.setAttributes(attrs2);
pyramid8.setVisible(true);
pyramid8.setValue(AVKey.DISPLAY_NAME, "Scaled, oriented Pyramid in the 3rd 'quadrant' (-X, -Y, -Z)");
layer.addRenderable(pyramid8);
// Add the layer to the model.
insertBeforeCompass(getWwd(), layer);
// Update layer panel
this.getLayerPanel().update(this.getWwd());
}
}
public static void main(String[] args)
{
ApplicationTemplate.start("World Wind Pyramids", AppFrame.class);
}
}
| apache-2.0 |
HeartSaVioR/incubator-zeppelin | zeppelin-web/src/app/notebook/paragraph/paragraph.controller.js | 63191 | /*jshint loopfunc: true, unused:false */
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('zeppelinWebApp')
.controller('ParagraphCtrl', function($scope,$rootScope, $route, $window, $element, $routeParams, $location,
$timeout, $compile, websocketMsgSrv) {
$scope.paragraph = null;
$scope.originalText = '';
$scope.editor = null;
var editorModes = {
'ace/mode/scala': /^%spark/,
'ace/mode/sql': /^%(\w*\.)?\wql/,
'ace/mode/markdown': /^%md/,
'ace/mode/sh': /^%sh/
};
// Controller init
$scope.init = function(newParagraph) {
$scope.paragraph = newParagraph;
$scope.originalText = angular.copy(newParagraph.text);
$scope.chart = {};
$scope.colWidthOption = [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 ];
$scope.showTitleEditor = false;
$scope.paragraphFocused = false;
if (newParagraph.focus) {
$scope.paragraphFocused = true;
}
if (!$scope.paragraph.config) {
$scope.paragraph.config = {};
}
initializeDefault();
if ($scope.getResultType() === 'TABLE') {
$scope.loadTableData($scope.paragraph.result);
$scope.setGraphMode($scope.getGraphMode(), false, false);
} else if ($scope.getResultType() === 'HTML') {
$scope.renderHtml();
} else if ($scope.getResultType() === 'ANGULAR') {
$scope.renderAngular();
}
};
$scope.renderHtml = function() {
var retryRenderer = function() {
if (angular.element('#p' + $scope.paragraph.id + '_html').length) {
try {
angular.element('#p' + $scope.paragraph.id + '_html').html($scope.paragraph.result.msg);
angular.element('#p' + $scope.paragraph.id + '_html').find('pre code').each(function(i, e) {
hljs.highlightBlock(e);
});
} catch (err) {
console.log('HTML rendering error %o', err);
}
} else {
$timeout(retryRenderer, 10);
}
};
$timeout(retryRenderer);
};
$scope.renderAngular = function() {
var retryRenderer = function() {
if (angular.element('#p'+$scope.paragraph.id+'_angular').length) {
try {
angular.element('#p'+$scope.paragraph.id+'_angular').html($scope.paragraph.result.msg);
$compile(angular.element('#p'+$scope.paragraph.id+'_angular').contents())($rootScope.compiledScope);
} catch(err) {
console.log('ANGULAR rendering error %o', err);
}
} else {
$timeout(retryRenderer, 10);
}
};
$timeout(retryRenderer);
};
var initializeDefault = function() {
var config = $scope.paragraph.config;
if (!config.colWidth) {
config.colWidth = 12;
}
if (!config.graph) {
config.graph = {};
}
if (!config.graph.mode) {
config.graph.mode = 'table';
}
if (!config.graph.height) {
config.graph.height = 300;
}
if (!config.graph.optionOpen) {
config.graph.optionOpen = false;
}
if (!config.graph.keys) {
config.graph.keys = [];
}
if (!config.graph.values) {
config.graph.values = [];
}
if (!config.graph.groups) {
config.graph.groups = [];
}
if (!config.graph.scatter) {
config.graph.scatter = {};
}
if (config.enabled === undefined) {
config.enabled = true;
}
};
$scope.getIframeDimensions = function () {
if ($scope.asIframe) {
var paragraphid = '#' + $routeParams.paragraphId + '_container';
var height = angular.element(paragraphid).height();
return height;
}
return 0;
};
$scope.$watch($scope.getIframeDimensions, function (newValue, oldValue) {
if ($scope.asIframe && newValue) {
var message = {};
message.height = newValue;
message.url = $location.$$absUrl;
$window.parent.postMessage(angular.toJson(message), '*');
}
});
// TODO: this may have impact on performance when there are many paragraphs in a note.
$scope.$on('updateParagraph', function(event, data) {
if (data.paragraph.id === $scope.paragraph.id &&
(data.paragraph.dateCreated !== $scope.paragraph.dateCreated ||
data.paragraph.dateFinished !== $scope.paragraph.dateFinished ||
data.paragraph.dateStarted !== $scope.paragraph.dateStarted ||
data.paragraph.dateUpdated !== $scope.paragraph.dateUpdated ||
data.paragraph.status !== $scope.paragraph.status ||
data.paragraph.jobName !== $scope.paragraph.jobName ||
data.paragraph.title !== $scope.paragraph.title ||
data.paragraph.errorMessage !== $scope.paragraph.errorMessage ||
!angular.equals(data.paragraph.settings, $scope.paragraph.settings) ||
!angular.equals(data.paragraph.config, $scope.paragraph.config))
) {
var oldType = $scope.getResultType();
var newType = $scope.getResultType(data.paragraph);
var oldGraphMode = $scope.getGraphMode();
var newGraphMode = $scope.getGraphMode(data.paragraph);
var resultRefreshed = (data.paragraph.dateFinished !== $scope.paragraph.dateFinished);
var statusChanged = (data.paragraph.status !== $scope.paragraph.status);
//console.log("updateParagraph oldData %o, newData %o. type %o -> %o, mode %o -> %o", $scope.paragraph, data, oldType, newType, oldGraphMode, newGraphMode);
if ($scope.paragraph.text !== data.paragraph.text) {
if ($scope.dirtyText) { // check if editor has local update
if ($scope.dirtyText === data.paragraph.text ) { // when local update is the same from remote, clear local update
$scope.paragraph.text = data.paragraph.text;
$scope.dirtyText = undefined;
$scope.originalText = angular.copy(data.paragraph.text);
} else { // if there're local update, keep it.
$scope.paragraph.text = $scope.dirtyText;
}
} else {
$scope.paragraph.text = data.paragraph.text;
$scope.originalText = angular.copy(data.paragraph.text);
}
}
/** push the rest */
$scope.paragraph.aborted = data.paragraph.aborted;
$scope.paragraph.dateUpdated = data.paragraph.dateUpdated;
$scope.paragraph.dateCreated = data.paragraph.dateCreated;
$scope.paragraph.dateFinished = data.paragraph.dateFinished;
$scope.paragraph.dateStarted = data.paragraph.dateStarted;
$scope.paragraph.errorMessage = data.paragraph.errorMessage;
$scope.paragraph.jobName = data.paragraph.jobName;
$scope.paragraph.title = data.paragraph.title;
$scope.paragraph.lineNumbers = data.paragraph.lineNumbers;
$scope.paragraph.status = data.paragraph.status;
$scope.paragraph.result = data.paragraph.result;
$scope.paragraph.settings = data.paragraph.settings;
if (!$scope.asIframe) {
$scope.paragraph.config = data.paragraph.config;
initializeDefault();
} else {
data.paragraph.config.editorHide = true;
data.paragraph.config.tableHide = false;
$scope.paragraph.config = data.paragraph.config;
}
if (newType === 'TABLE') {
$scope.loadTableData($scope.paragraph.result);
if (oldType !== 'TABLE' || resultRefreshed) {
clearUnknownColsFromGraphOption();
selectDefaultColsForGraphOption();
}
/** User changed the chart type? */
if (oldGraphMode !== newGraphMode) {
$scope.setGraphMode(newGraphMode, false, false);
} else {
$scope.setGraphMode(newGraphMode, false, true);
}
} else if (newType === 'HTML' && resultRefreshed) {
$scope.renderHtml();
} else if (newType === 'ANGULAR' && resultRefreshed) {
$scope.renderAngular();
}
if (statusChanged || resultRefreshed) {
// when last paragraph runs, zeppelin automatically appends new paragraph.
// this broadcast will focus to the newly inserted paragraph
var paragraphs = angular.element('div[id$="_paragraphColumn_main"');
if (paragraphs.length >= 2 && paragraphs[paragraphs.length-2].id.startsWith($scope.paragraph.id)) {
// rendering output can took some time. So delay scrolling event firing for sometime.
setTimeout(function() {
$rootScope.$broadcast('scrollToCursor');
}, 500);
}
}
}
});
$scope.isRunning = function() {
if ($scope.paragraph.status === 'RUNNING' || $scope.paragraph.status === 'PENDING') {
return true;
} else {
return false;
}
};
$scope.cancelParagraph = function() {
console.log('Cancel %o', $scope.paragraph.id);
websocketMsgSrv.cancelParagraphRun($scope.paragraph.id);
};
$scope.runParagraph = function(data) {
websocketMsgSrv.runParagraph($scope.paragraph.id, $scope.paragraph.title,
data, $scope.paragraph.config, $scope.paragraph.settings.params);
$scope.originalText = angular.copy(data);
$scope.dirtyText = undefined;
};
$scope.saveParagraph = function(){
if($scope.dirtyText === undefined || $scope.dirtyText === $scope.originalText){
return;
}
commitParagraph($scope.paragraph.title, $scope.dirtyText, $scope.paragraph.config, $scope.paragraph.settings.params);
$scope.originalText = angular.copy($scope.dirtyText);
$scope.dirtyText = undefined;
};
$scope.toggleEnableDisable = function () {
$scope.paragraph.config.enabled = $scope.paragraph.config.enabled ? false : true;
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.run = function() {
var editorValue = $scope.editor.getValue();
if (editorValue) {
if (!($scope.paragraph.status === 'RUNNING' || $scope.paragraph.status === 'PENDING')) {
$scope.runParagraph(editorValue);
}
}
};
$scope.moveUp = function() {
$scope.$emit('moveParagraphUp', $scope.paragraph.id);
};
$scope.moveDown = function() {
$scope.$emit('moveParagraphDown', $scope.paragraph.id);
};
$scope.insertNew = function(position) {
$scope.$emit('insertParagraph', $scope.paragraph.id, position || 'below');
};
$scope.removeParagraph = function() {
BootstrapDialog.confirm({
closable: true,
title: '',
message: 'Do you want to delete this paragraph?',
callback: function(result) {
if (result) {
console.log('Remove paragraph');
websocketMsgSrv.removeParagraph($scope.paragraph.id);
}
}
});
};
$scope.clearParagraphOutput = function() {
websocketMsgSrv.clearParagraphOutput($scope.paragraph.id);
};
$scope.toggleEditor = function() {
if ($scope.paragraph.config.editorHide) {
$scope.openEditor();
} else {
$scope.closeEditor();
}
};
$scope.closeEditor = function() {
console.log('close the note');
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
newConfig.editorHide = true;
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.openEditor = function() {
console.log('open the note');
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
newConfig.editorHide = false;
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.closeTable = function() {
console.log('close the output');
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
newConfig.tableHide = true;
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.openTable = function() {
console.log('open the output');
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
newConfig.tableHide = false;
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.showTitle = function() {
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
newConfig.title = true;
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.hideTitle = function() {
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
newConfig.title = false;
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.setTitle = function() {
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.showLineNumbers = function () {
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
newConfig.lineNumbers = true;
$scope.editor.renderer.setShowGutter(true);
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.hideLineNumbers = function () {
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
newConfig.lineNumbers = false;
$scope.editor.renderer.setShowGutter(false);
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.columnWidthClass = function(n) {
if ($scope.asIframe) {
return 'col-md-12';
} else {
return 'col-md-' + n;
}
};
$scope.changeColWidth = function() {
angular.element('.navbar-right.open').removeClass('open');
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.toggleGraphOption = function() {
var newConfig = angular.copy($scope.paragraph.config);
if (newConfig.graph.optionOpen) {
newConfig.graph.optionOpen = false;
} else {
newConfig.graph.optionOpen = true;
}
var newParams = angular.copy($scope.paragraph.settings.params);
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.toggleOutput = function() {
var newConfig = angular.copy($scope.paragraph.config);
newConfig.tableHide = !newConfig.tableHide;
var newParams = angular.copy($scope.paragraph.settings.params);
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
$scope.toggleLineWithFocus = function () {
var mode = $scope.getGraphMode();
if (mode === 'lineWithFocusChart') {
$scope.setGraphMode('lineChart', true);
return true;
}
if (mode === 'lineChart') {
$scope.setGraphMode('lineWithFocusChart', true);
return true;
}
return false;
};
$scope.loadForm = function(formulaire, params) {
var value = formulaire.defaultValue;
if (params[formulaire.name]) {
value = params[formulaire.name];
}
if (value === '') {
value = formulaire.options[0].value;
}
$scope.paragraph.settings.params[formulaire.name] = value;
};
$scope.aceChanged = function() {
$scope.dirtyText = $scope.editor.getSession().getValue();
$scope.startSaveTimer();
$timeout(function() {
$scope.setParagraphMode($scope.editor.getSession(), $scope.dirtyText, $scope.editor.getCursorPosition());
});
};
$scope.aceLoaded = function(_editor) {
var langTools = ace.require('ace/ext/language_tools');
var Range = ace.require('ace/range').Range;
_editor.$blockScrolling = Infinity;
$scope.editor = _editor;
if (_editor.container.id !== '{{paragraph.id}}_editor') {
$scope.editor.renderer.setShowGutter($scope.paragraph.config.lineNumbers);
$scope.editor.setShowFoldWidgets(false);
$scope.editor.setHighlightActiveLine(false);
$scope.editor.setHighlightGutterLine(false);
$scope.editor.getSession().setUseWrapMode(true);
$scope.editor.setTheme('ace/theme/chrome');
if ($scope.paragraphFocused) {
$scope.editor.focus();
}
autoAdjustEditorHeight(_editor.container.id);
angular.element(window).resize(function() {
autoAdjustEditorHeight(_editor.container.id);
});
if (navigator.appVersion.indexOf('Mac') !== -1 ) {
$scope.editor.setKeyboardHandler('ace/keyboard/emacs');
} else if (navigator.appVersion.indexOf('Win') !== -1 ||
navigator.appVersion.indexOf('X11') !== -1 ||
navigator.appVersion.indexOf('Linux') !== -1) {
// not applying emacs key binding while the binding override Ctrl-v. default behavior of paste text on windows.
}
$scope.setParagraphMode = function(session, paragraphText, pos) {
// Evaluate the mode only if the first 30 characters of the paragraph have been modified or the the position is undefined.
if ( (typeof pos === 'undefined') || (pos.row === 0 && pos.column < 30)) {
// If paragraph loading, use config value if exists
if ((typeof pos === 'undefined') && $scope.paragraph.config.editorMode) {
session.setMode($scope.paragraph.config.editorMode);
} else {
// Defaults to spark mode
var newMode = 'ace/mode/scala';
// Test first against current mode
var oldMode = session.getMode().$id;
if (!editorModes[oldMode] || !editorModes[oldMode].test(paragraphText)) {
for (var key in editorModes) {
if (key !== oldMode) {
if (editorModes[key].test(paragraphText)){
$scope.paragraph.config.editorMode = key;
session.setMode(key);
return true;
}
}
}
$scope.paragraph.config.editorMode = newMode;
session.setMode(newMode);
}
}
}
};
var remoteCompleter = {
getCompletions : function(editor, session, pos, prefix, callback) {
if (!$scope.editor.isFocused() ){ return;}
pos = session.getTextRange(new Range(0, 0, pos.row, pos.column)).length;
var buf = session.getValue();
websocketMsgSrv.completion($scope.paragraph.id, buf, pos);
$scope.$on('completionList', function(event, data) {
if (data.completions) {
var completions = [];
for (var c in data.completions) {
var v = data.completions[c];
completions.push({
name:v,
value:v,
score:300
});
}
callback(null, completions);
}
});
}
};
langTools.setCompleters([remoteCompleter, langTools.keyWordCompleter, langTools.snippetCompleter, langTools.textCompleter]);
$scope.editor.setOptions({
enableBasicAutocompletion: true,
enableSnippets: false,
enableLiveAutocompletion:false
});
$scope.handleFocus = function(value) {
$scope.paragraphFocused = value;
// Protect against error in case digest is already running
$timeout(function() {
// Apply changes since they come from 3rd party library
$scope.$digest();
});
};
$scope.editor.on('focus', function() {
$scope.handleFocus(true);
});
$scope.editor.on('blur', function() {
$scope.handleFocus(false);
});
$scope.editor.getSession().on('change', function(e, editSession) {
autoAdjustEditorHeight(_editor.container.id);
});
$scope.setParagraphMode($scope.editor.getSession(), $scope.editor.getSession().getValue());
// autocomplete on '.'
/*
$scope.editor.commands.on("afterExec", function(e, t) {
if (e.command.name == "insertstring" && e.args == "." ) {
var all = e.editor.completers;
//e.editor.completers = [remoteCompleter];
e.editor.execCommand("startAutocomplete");
//e.editor.completers = all;
}
});
*/
// remove binding
$scope.editor.commands.bindKey('ctrl-alt-n.', null);
// autocomplete on 'ctrl+.'
$scope.editor.commands.bindKey('ctrl-.', 'startAutocomplete');
$scope.editor.commands.bindKey('ctrl-space', null);
// handle cursor moves
$scope.editor.keyBinding.origOnCommandKey = $scope.editor.keyBinding.onCommandKey;
$scope.editor.keyBinding.onCommandKey = function(e, hashId, keyCode) {
if ($scope.editor.completer && $scope.editor.completer.activated) { // if autocompleter is active
} else {
// fix ace editor focus issue in chrome (textarea element goes to top: -1000px after focused by cursor move)
if (parseInt(angular.element('#' + $scope.paragraph.id + '_editor > textarea').css('top').replace('px', '')) < 0) {
var position = $scope.editor.getCursorPosition();
var cursorPos = $scope.editor.renderer.$cursorLayer.getPixelPosition(position, true);
angular.element('#' + $scope.paragraph.id + '_editor > textarea').css('top', cursorPos.top);
}
var numRows;
var currentRow;
if (keyCode === 38 || (keyCode === 80 && e.ctrlKey && !e.altKey)) { // UP
numRows = $scope.editor.getSession().getLength();
currentRow = $scope.editor.getCursorPosition().row;
if (currentRow === 0) {
// move focus to previous paragraph
$scope.$emit('moveFocusToPreviousParagraph', $scope.paragraph.id);
} else {
$scope.scrollToCursor($scope.paragraph.id, -1);
}
} else if (keyCode === 40 || (keyCode === 78 && e.ctrlKey && !e.altKey)) { // DOWN
numRows = $scope.editor.getSession().getLength();
currentRow = $scope.editor.getCursorPosition().row;
if (currentRow === numRows-1) {
// move focus to next paragraph
$scope.$emit('moveFocusToNextParagraph', $scope.paragraph.id);
} else {
$scope.scrollToCursor($scope.paragraph.id, 1);
}
}
}
this.origOnCommandKey(e, hashId, keyCode);
};
}
};
var autoAdjustEditorHeight = function(id) {
var editor = $scope.editor;
var height = editor.getSession().getScreenLength() * editor.renderer.lineHeight + editor.renderer.scrollBar.getWidth();
angular.element('#' + id).height(height.toString() + 'px');
editor.resize();
};
$rootScope.$on('scrollToCursor', function(event) {
// scroll on 'scrollToCursor' event only when cursor is in the last paragraph
var paragraphs = angular.element('div[id$="_paragraphColumn_main"');
if (paragraphs[paragraphs.length-1].id.startsWith($scope.paragraph.id)) {
$scope.scrollToCursor($scope.paragraph.id, 0);
}
});
/** scrollToCursor if it is necessary
* when cursor touches scrollTriggerEdgeMargin from the top (or bottom) of the screen, it autoscroll to place cursor around 1/3 of screen height from the top (or bottom)
* paragraphId : paragraph that has active cursor
* lastCursorMove : 1(down), 0, -1(up) last cursor move event
**/
$scope.scrollToCursor = function(paragraphId, lastCursorMove) {
if (!$scope.editor.isFocused()) {
// only make sense when editor is focused
return;
}
var lineHeight = $scope.editor.renderer.lineHeight;
var headerHeight = 103; // menubar, notebook titlebar
var scrollTriggerEdgeMargin = 50;
var documentHeight = angular.element(document).height();
var windowHeight = angular.element(window).height(); // actual viewport height
var scrollPosition = angular.element(document).scrollTop();
var editorPosition = angular.element('#'+paragraphId+'_editor').offset();
var position = $scope.editor.getCursorPosition();
var lastCursorPosition = $scope.editor.renderer.$cursorLayer.getPixelPosition(position, true);
var calculatedCursorPosition = editorPosition.top + lastCursorPosition.top + lineHeight*lastCursorMove;
var scrollTargetPos;
if (calculatedCursorPosition < scrollPosition + headerHeight + scrollTriggerEdgeMargin) {
scrollTargetPos = calculatedCursorPosition - headerHeight - ((windowHeight-headerHeight)/3);
if (scrollTargetPos < 0) {
scrollTargetPos = 0;
}
} else if(calculatedCursorPosition > scrollPosition + scrollTriggerEdgeMargin + windowHeight - headerHeight) {
scrollTargetPos = calculatedCursorPosition - headerHeight - ((windowHeight-headerHeight)*2/3);
if (scrollTargetPos > documentHeight) {
scrollTargetPos = documentHeight;
}
}
// cancel previous scroll animation
var bodyEl = angular.element('body');
bodyEl.stop();
bodyEl.finish();
// scroll to scrollTargetPos
bodyEl.scrollTo(scrollTargetPos, {axis: 'y', interrupt: true, duration:100});
};
var setEditorHeight = function(id, height) {
angular.element('#' + id).height(height.toString() + 'px');
};
$scope.getEditorValue = function() {
return $scope.editor.getValue();
};
$scope.getProgress = function() {
return ($scope.currentProgress) ? $scope.currentProgress : 0;
};
$scope.getExecutionTime = function() {
var pdata = $scope.paragraph;
var timeMs = Date.parse(pdata.dateFinished) - Date.parse(pdata.dateStarted);
if (isNaN(timeMs) || timeMs < 0) {
if ($scope.isResultOutdated()){
return 'outdated';
}
return '';
}
var desc = 'Took ' + (timeMs/1000) + ' seconds';
if ($scope.isResultOutdated()){
desc += ' (outdated)';
}
return desc;
};
$scope.isResultOutdated = function() {
var pdata = $scope.paragraph;
if (pdata.dateUpdated !==undefined && Date.parse(pdata.dateUpdated) > Date.parse(pdata.dateStarted)){
return true;
}
return false;
};
$scope.$on('updateProgress', function(event, data) {
if (data.id === $scope.paragraph.id) {
$scope.currentProgress = data.progress;
}
});
$scope.$on('keyEvent', function(event, keyEvent) {
if ($scope.paragraphFocused) {
var paragraphId = $scope.paragraph.id;
var keyCode = keyEvent.keyCode;
var noShortcutDefined = false;
var editorHide = $scope.paragraph.config.editorHide;
if (editorHide && (keyCode === 38 || (keyCode === 80 && keyEvent.ctrlKey && !keyEvent.altKey))) { // up
// move focus to previous paragraph
$scope.$emit('moveFocusToPreviousParagraph', paragraphId);
} else if (editorHide && (keyCode === 40 || (keyCode === 78 && keyEvent.ctrlKey && !keyEvent.altKey))) { // down
// move focus to next paragraph
$scope.$emit('moveFocusToNextParagraph', paragraphId);
} else if (keyEvent.shiftKey && keyCode === 13) { // Shift + Enter
$scope.run();
} else if (keyEvent.ctrlKey && keyEvent.altKey && keyCode === 67) { // Ctrl + Alt + c
$scope.cancelParagraph();
} else if (keyEvent.ctrlKey && keyEvent.altKey && keyCode === 68) { // Ctrl + Alt + d
$scope.removeParagraph();
} else if (keyEvent.ctrlKey && keyEvent.altKey && keyCode === 75) { // Ctrl + Alt + k
$scope.moveUp();
} else if (keyEvent.ctrlKey && keyEvent.altKey && keyCode === 74) { // Ctrl + Alt + j
$scope.moveDown();
} else if (keyEvent.ctrlKey && keyEvent.altKey && keyCode === 65) { // Ctrl + Alt + a
$scope.insertNew('above');
} else if (keyEvent.ctrlKey && keyEvent.altKey && keyCode === 66) { // Ctrl + Alt + b
$scope.insertNew('below');
} else if (keyEvent.ctrlKey && keyEvent.altKey && keyCode === 79) { // Ctrl + Alt + o
$scope.toggleOutput();
} else if (keyEvent.ctrlKey && keyEvent.altKey && keyCode === 69) { // Ctrl + Alt + e
$scope.toggleEditor();
} else if (keyEvent.ctrlKey && keyEvent.altKey && keyCode === 77) { // Ctrl + Alt + m
if ($scope.paragraph.config.lineNumbers) {
$scope.hideLineNumbers();
} else {
$scope.showLineNumbers();
}
} else if (keyEvent.ctrlKey && keyEvent.altKey && ((keyCode >= 48 && keyCode <=57) || keyCode === 189 || keyCode === 187)) { // Ctrl + Alt + [1~9,0,-,=]
var colWidth = 12;
if (keyCode === 48) {
colWidth = 10;
} else if (keyCode === 189) {
colWidth = 11;
} else if (keyCode === 187) {
colWidth = 12;
} else {
colWidth = keyCode - 48;
}
$scope.paragraph.config.colWidth = colWidth;
$scope.changeColWidth();
} else if (keyEvent.ctrlKey && keyEvent.altKey && keyCode === 84) { // Ctrl + Alt + t
if ($scope.paragraph.config.title) {
$scope.hideTitle();
} else {
$scope.showTitle();
}
} else {
noShortcutDefined = true;
}
if (!noShortcutDefined) {
keyEvent.preventDefault();
}
}
});
$scope.$on('focusParagraph', function(event, paragraphId, cursorPos, mouseEvent) {
if ($scope.paragraph.id === paragraphId) {
// focus editor
if (!$scope.paragraph.config.editorHide) {
if (!mouseEvent) {
$scope.editor.focus();
// move cursor to the first row (or the last row)
var row;
if (cursorPos >= 0) {
row = cursorPos;
$scope.editor.gotoLine(row, 0);
} else {
row = $scope.editor.session.getLength();
$scope.editor.gotoLine(row, 0);
}
$scope.scrollToCursor($scope.paragraph.id, 0);
}
}
$scope.handleFocus(true);
} else {
$scope.editor.blur();
$scope.handleFocus(false);
}
});
$scope.$on('runParagraph', function(event) {
$scope.runParagraph($scope.editor.getValue());
});
$scope.$on('openEditor', function(event) {
$scope.openEditor();
});
$scope.$on('closeEditor', function(event) {
$scope.closeEditor();
});
$scope.$on('openTable', function(event) {
$scope.openTable();
});
$scope.$on('closeTable', function(event) {
$scope.closeTable();
});
$scope.getResultType = function(paragraph) {
var pdata = (paragraph) ? paragraph : $scope.paragraph;
if (pdata.result && pdata.result.type) {
return pdata.result.type;
} else {
return 'TEXT';
}
};
$scope.getBase64ImageSrc = function(base64Data) {
return 'data:image/png;base64,'+base64Data;
};
$scope.getGraphMode = function(paragraph) {
var pdata = (paragraph) ? paragraph : $scope.paragraph;
if (pdata.config.graph && pdata.config.graph.mode) {
return pdata.config.graph.mode;
} else {
return 'table';
}
};
$scope.loadTableData = function(result) {
if (!result) {
return;
}
if (result.type === 'TABLE') {
var columnNames = [];
var rows = [];
var array = [];
var textRows = result.msg.split('\n');
result.comment = '';
var comment = false;
for (var i = 0; i < textRows.length; i++) {
var textRow = textRows[i];
if (comment) {
result.comment += textRow;
continue;
}
if (textRow === '') {
if (rows.length>0) {
comment = true;
}
continue;
}
var textCols = textRow.split('\t');
var cols = [];
var cols2 = [];
for (var j = 0; j < textCols.length; j++) {
var col = textCols[j];
if (i === 0) {
columnNames.push({name:col, index:j, aggr:'sum'});
} else {
cols.push(col);
cols2.push({key: (columnNames[i]) ? columnNames[i].name: undefined, value: col});
}
}
if (i !== 0) {
rows.push(cols);
array.push(cols2);
}
}
result.msgTable = array;
result.columnNames = columnNames;
result.rows = rows;
}
};
$scope.setGraphMode = function(type, emit, refresh) {
if (emit) {
setNewMode(type);
} else {
clearUnknownColsFromGraphOption();
// set graph height
var height = $scope.paragraph.config.graph.height;
angular.element('#p' + $scope.paragraph.id + '_graph').height(height);
if (!type || type === 'table') {
setTable($scope.paragraph.result, refresh);
}
else {
setD3Chart(type, $scope.paragraph.result, refresh);
}
}
};
var setNewMode = function(newMode) {
var newConfig = angular.copy($scope.paragraph.config);
var newParams = angular.copy($scope.paragraph.settings.params);
// graph options
newConfig.graph.mode = newMode;
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
var commitParagraph = function(title, text, config, params) {
websocketMsgSrv.commitParagraph($scope.paragraph.id, title, text, config, params);
};
var setTable = function(type, data, refresh) {
var getTableContentFormat = function(d) {
if (isNaN(d)) {
if (d.length>'%html'.length && '%html ' === d.substring(0, '%html '.length)) {
return 'html';
} else {
return '';
}
} else {
return '';
}
};
var formatTableContent = function(d) {
if (isNaN(d)) {
var f = getTableContentFormat(d);
if (f !== '') {
return d.substring(f.length+2);
} else {
return d;
}
} else {
var dStr = d.toString();
var splitted = dStr.split('.');
var formatted = splitted[0].replace(/(\d)(?=(\d{3})+(?!\d))/g, '$1,');
if (splitted.length>1) {
formatted+= '.'+splitted[1];
}
return formatted;
}
};
var renderTable = function() {
var html = '';
html += '<table class="table table-hover table-condensed">';
html += ' <thead>';
html += ' <tr style="background-color: #F6F6F6; font-weight: bold;">';
for (var titleIndex in $scope.paragraph.result.columnNames) {
html += '<th>'+$scope.paragraph.result.columnNames[titleIndex].name+'</th>';
}
html += ' </tr>';
html += ' </thead>';
html += ' <tbody>';
for (var r in $scope.paragraph.result.msgTable) {
var row = $scope.paragraph.result.msgTable[r];
html += ' <tr>';
for (var index in row) {
var v = row[index].value;
if (getTableContentFormat(v) !== 'html') {
v = v.replace(/[\u00A0-\u9999<>\&]/gim, function(i) {
return '&#'+i.charCodeAt(0)+';';
});
}
html += ' <td>'+formatTableContent(v)+'</td>';
}
html += ' </tr>';
}
html += ' </tbody>';
html += '</table>';
angular.element('#p' + $scope.paragraph.id + '_table').html(html);
if ($scope.paragraph.result.msgTable.length > 10000) {
angular.element('#p' + $scope.paragraph.id + '_table').css('overflow', 'scroll');
// set table height
var height = $scope.paragraph.config.graph.height;
angular.element('#p' + $scope.paragraph.id + '_table').css('height', height);
} else {
var dataTable = angular.element('#p' + $scope.paragraph.id + '_table .table');
dataTable.floatThead({
scrollContainer: function (dataTable) {
return angular.element('#p' + $scope.paragraph.id + '_table');
}
});
angular.element('#p' + $scope.paragraph.id + '_table .table').on('remove', function () {
angular.element('#p' + $scope.paragraph.id + '_table .table').floatThead('destroy');
});
angular.element('#p' + $scope.paragraph.id + '_table').css('position', 'relative');
angular.element('#p' + $scope.paragraph.id + '_table').css('height', '100%');
angular.element('#p' + $scope.paragraph.id + '_table').perfectScrollbar('destroy');
angular.element('#p' + $scope.paragraph.id + '_table').perfectScrollbar();
angular.element('.ps-scrollbar-y-rail').css('z-index', '1002');
// set table height
var psHeight = $scope.paragraph.config.graph.height;
angular.element('#p' + $scope.paragraph.id + '_table').css('height', psHeight);
angular.element('#p' + $scope.paragraph.id + '_table').perfectScrollbar('update');
}
};
var retryRenderer = function() {
if (angular.element('#p' + $scope.paragraph.id + '_table').length) {
try {
renderTable();
} catch(err) {
console.log('Chart drawing error %o', err);
}
} else {
$timeout(retryRenderer,10);
}
};
$timeout(retryRenderer);
};
var integerFormatter = d3.format(',.1d');
var customAbbrevFormatter = function(x) {
var s = d3.format('.3s')(x);
switch (s[s.length - 1]) {
case 'G': return s.slice(0, -1) + 'B';
}
return s;
};
var xAxisTickFormat = function(d, xLabels) {
if (xLabels[d] && (isNaN(parseFloat(xLabels[d])) || !isFinite(xLabels[d]))) { // to handle string type xlabel
return xLabels[d];
} else {
return d;
}
};
var yAxisTickFormat = function(d) {
if(d >= Math.pow(10,6)){
return customAbbrevFormatter(d);
}
return integerFormatter(d);
};
var setD3Chart = function(type, data, refresh) {
if (!$scope.chart[type]) {
var chart = nv.models[type]();
$scope.chart[type] = chart;
}
var d3g = [];
var xLabels;
var yLabels;
if (type === 'scatterChart') {
var scatterData = setScatterChart(data, refresh);
xLabels = scatterData.xLabels;
yLabels = scatterData.yLabels;
d3g = scatterData.d3g;
$scope.chart[type].xAxis.tickFormat(function(d) {return xAxisTickFormat(d, xLabels);});
$scope.chart[type].yAxis.tickFormat(function(d) {return xAxisTickFormat(d, yLabels);});
// configure how the tooltip looks.
$scope.chart[type].tooltipContent(function(key, x, y, graph, data) {
var tooltipContent = '<h3>' + key + '</h3>';
if ($scope.paragraph.config.graph.scatter.size &&
$scope.isValidSizeOption($scope.paragraph.config.graph.scatter, $scope.paragraph.result.rows)) {
tooltipContent += '<p>' + data.point.size + '</p>';
}
return tooltipContent;
});
$scope.chart[type].showDistX(true)
.showDistY(true);
//handle the problem of tooltip not showing when muliple points have same value.
} else {
var p = pivot(data);
if (type === 'pieChart') {
var d = pivotDataToD3ChartFormat(p, true).d3g;
$scope.chart[type].x(function(d) { return d.label;})
.y(function(d) { return d.value;});
if ( d.length > 0 ) {
for ( var i=0; i<d[0].values.length ; i++) {
var e = d[0].values[i];
d3g.push({
label : e.x,
value : e.y
});
}
}
} else if (type === 'multiBarChart') {
d3g = pivotDataToD3ChartFormat(p, true, false, type).d3g;
$scope.chart[type].yAxis.axisLabelDistance(50);
$scope.chart[type].yAxis.tickFormat(function(d) {return yAxisTickFormat(d);});
} else if (type === 'lineChart' || type === 'stackedAreaChart' || type === 'lineWithFocusChart') {
var pivotdata = pivotDataToD3ChartFormat(p, false, true);
xLabels = pivotdata.xLabels;
d3g = pivotdata.d3g;
$scope.chart[type].xAxis.tickFormat(function(d) {return xAxisTickFormat(d, xLabels);});
$scope.chart[type].yAxis.tickFormat(function(d) {return yAxisTickFormat(d);});
$scope.chart[type].yAxis.axisLabelDistance(50);
if ($scope.chart[type].useInteractiveGuideline) { // lineWithFocusChart hasn't got useInteractiveGuideline
$scope.chart[type].useInteractiveGuideline(true); // for better UX and performance issue. (https://github.com/novus/nvd3/issues/691)
}
if($scope.paragraph.config.graph.forceY) {
$scope.chart[type].forceY([0]); // force y-axis minimum to 0 for line chart.
} else {
$scope.chart[type].forceY([]);
}
}
}
var renderChart = function() {
if (!refresh) {
// TODO force destroy previous chart
}
var height = $scope.paragraph.config.graph.height;
var animationDuration = 300;
var numberOfDataThreshold = 150;
// turn off animation when dataset is too large. (for performance issue)
// still, since dataset is large, the chart content sequentially appears like animated.
try {
if (d3g[0].values.length > numberOfDataThreshold) {
animationDuration = 0;
}
} catch(ignoreErr) {
}
var chartEl = d3.select('#p'+$scope.paragraph.id+'_'+type+' svg')
.attr('height', $scope.paragraph.config.graph.height)
.datum(d3g)
.transition()
.duration(animationDuration)
.call($scope.chart[type]);
d3.select('#p'+$scope.paragraph.id+'_'+type+' svg').style.height = height+'px';
nv.utils.windowResize($scope.chart[type].update);
};
var retryRenderer = function() {
if (angular.element('#p' + $scope.paragraph.id + '_' + type + ' svg').length !== 0) {
try {
renderChart();
} catch(err) {
console.log('Chart drawing error %o', err);
}
} else {
$timeout(retryRenderer,10);
}
};
$timeout(retryRenderer);
};
$scope.isGraphMode = function(graphName) {
if ($scope.getResultType() === 'TABLE' && $scope.getGraphMode()===graphName) {
return true;
} else {
return false;
}
};
$scope.onGraphOptionChange = function() {
clearUnknownColsFromGraphOption();
$scope.setGraphMode($scope.paragraph.config.graph.mode, true, false);
};
$scope.removeGraphOptionKeys = function(idx) {
$scope.paragraph.config.graph.keys.splice(idx, 1);
clearUnknownColsFromGraphOption();
$scope.setGraphMode($scope.paragraph.config.graph.mode, true, false);
};
$scope.removeGraphOptionValues = function(idx) {
$scope.paragraph.config.graph.values.splice(idx, 1);
clearUnknownColsFromGraphOption();
$scope.setGraphMode($scope.paragraph.config.graph.mode, true, false);
};
$scope.removeGraphOptionGroups = function(idx) {
$scope.paragraph.config.graph.groups.splice(idx, 1);
clearUnknownColsFromGraphOption();
$scope.setGraphMode($scope.paragraph.config.graph.mode, true, false);
};
$scope.setGraphOptionValueAggr = function(idx, aggr) {
$scope.paragraph.config.graph.values[idx].aggr = aggr;
clearUnknownColsFromGraphOption();
$scope.setGraphMode($scope.paragraph.config.graph.mode, true, false);
};
$scope.removeScatterOptionXaxis = function(idx) {
$scope.paragraph.config.graph.scatter.xAxis = null;
clearUnknownColsFromGraphOption();
$scope.setGraphMode($scope.paragraph.config.graph.mode, true, false);
};
$scope.removeScatterOptionYaxis = function(idx) {
$scope.paragraph.config.graph.scatter.yAxis = null;
clearUnknownColsFromGraphOption();
$scope.setGraphMode($scope.paragraph.config.graph.mode, true, false);
};
$scope.removeScatterOptionGroup = function(idx) {
$scope.paragraph.config.graph.scatter.group = null;
clearUnknownColsFromGraphOption();
$scope.setGraphMode($scope.paragraph.config.graph.mode, true, false);
};
$scope.removeScatterOptionSize = function(idx) {
$scope.paragraph.config.graph.scatter.size = null;
clearUnknownColsFromGraphOption();
$scope.setGraphMode($scope.paragraph.config.graph.mode, true, false);
};
/* Clear unknown columns from graph option */
var clearUnknownColsFromGraphOption = function() {
var unique = function(list) {
for (var i = 0; i<list.length; i++) {
for (var j=i+1; j<list.length; j++) {
if (angular.equals(list[i], list[j])) {
list.splice(j, 1);
}
}
}
};
var removeUnknown = function(list) {
for (var i = 0; i<list.length; i++) {
// remove non existing column
var found = false;
for (var j=0; j<$scope.paragraph.result.columnNames.length; j++) {
var a = list[i];
var b = $scope.paragraph.result.columnNames[j];
if (a.index === b.index && a.name === b.name) {
found = true;
break;
}
}
if (!found) {
list.splice(i, 1);
}
}
};
var removeUnknownFromScatterSetting = function(fields) {
for (var f in fields) {
if (fields[f]) {
var found = false;
for (var i = 0; i < $scope.paragraph.result.columnNames.length; i++) {
var a = fields[f];
var b = $scope.paragraph.result.columnNames[i];
if (a.index === b.index && a.name === b.name) {
found = true;
break;
}
}
if (!found) {
fields[f] = null;
}
}
}
};
unique($scope.paragraph.config.graph.keys);
removeUnknown($scope.paragraph.config.graph.keys);
removeUnknown($scope.paragraph.config.graph.values);
unique($scope.paragraph.config.graph.groups);
removeUnknown($scope.paragraph.config.graph.groups);
removeUnknownFromScatterSetting($scope.paragraph.config.graph.scatter);
};
/* select default key and value if there're none selected */
var selectDefaultColsForGraphOption = function() {
if ($scope.paragraph.config.graph.keys.length === 0 && $scope.paragraph.result.columnNames.length > 0) {
$scope.paragraph.config.graph.keys.push($scope.paragraph.result.columnNames[0]);
}
if ($scope.paragraph.config.graph.values.length === 0 && $scope.paragraph.result.columnNames.length > 1) {
$scope.paragraph.config.graph.values.push($scope.paragraph.result.columnNames[1]);
}
if (!$scope.paragraph.config.graph.scatter.xAxis && !$scope.paragraph.config.graph.scatter.yAxis) {
if ($scope.paragraph.result.columnNames.length > 1) {
$scope.paragraph.config.graph.scatter.xAxis = $scope.paragraph.result.columnNames[0];
$scope.paragraph.config.graph.scatter.yAxis = $scope.paragraph.result.columnNames[1];
} else if ($scope.paragraph.result.columnNames.length === 1) {
$scope.paragraph.config.graph.scatter.xAxis = $scope.paragraph.result.columnNames[0];
}
}
};
var pivot = function(data) {
var keys = $scope.paragraph.config.graph.keys;
var groups = $scope.paragraph.config.graph.groups;
var values = $scope.paragraph.config.graph.values;
var aggrFunc = {
sum : function(a,b) {
var varA = (a !== undefined) ? (isNaN(a) ? 1 : parseFloat(a)) : 0;
var varB = (b !== undefined) ? (isNaN(b) ? 1 : parseFloat(b)) : 0;
return varA+varB;
},
count : function(a,b) {
var varA = (a !== undefined) ? parseInt(a) : 0;
var varB = (b !== undefined) ? 1 : 0;
return varA+varB;
},
min : function(a,b) {
var varA = (a !== undefined) ? (isNaN(a) ? 1 : parseFloat(a)) : 0;
var varB = (b !== undefined) ? (isNaN(b) ? 1 : parseFloat(b)) : 0;
return Math.min(varA,varB);
},
max : function(a,b) {
var varA = (a !== undefined) ? (isNaN(a) ? 1 : parseFloat(a)) : 0;
var varB = (b !== undefined) ? (isNaN(b) ? 1 : parseFloat(b)) : 0;
return Math.max(varA,varB);
},
avg : function(a,b,c) {
var varA = (a !== undefined) ? (isNaN(a) ? 1 : parseFloat(a)) : 0;
var varB = (b !== undefined) ? (isNaN(b) ? 1 : parseFloat(b)) : 0;
return varA+varB;
}
};
var aggrFuncDiv = {
sum : false,
count : false,
min : false,
max : false,
avg : true
};
var schema = {};
var rows = {};
for (var i=0; i < data.rows.length; i++) {
var row = data.rows[i];
var newRow = {};
var s = schema;
var p = rows;
for (var k=0; k < keys.length; k++) {
var key = keys[k];
// add key to schema
if (!s[key.name]) {
s[key.name] = {
order : k,
index : key.index,
type : 'key',
children : {}
};
}
s = s[key.name].children;
// add key to row
var keyKey = row[key.index];
if (!p[keyKey]) {
p[keyKey] = {};
}
p = p[keyKey];
}
for (var g=0; g < groups.length; g++) {
var group = groups[g];
var groupKey = row[group.index];
// add group to schema
if (!s[groupKey]) {
s[groupKey] = {
order : g,
index : group.index,
type : 'group',
children : {}
};
}
s = s[groupKey].children;
// add key to row
if (!p[groupKey]) {
p[groupKey] = {};
}
p = p[groupKey];
}
for (var v=0; v < values.length; v++) {
var value = values[v];
var valueKey = value.name+'('+value.aggr+')';
// add value to schema
if (!s[valueKey]) {
s[valueKey] = {
type : 'value',
order : v,
index : value.index
};
}
// add value to row
if (!p[valueKey]) {
p[valueKey] = {
value : (value.aggr !== 'count') ? row[value.index] : 1,
count: 1
};
} else {
p[valueKey] = {
value : aggrFunc[value.aggr](p[valueKey].value, row[value.index], p[valueKey].count+1),
count : (aggrFuncDiv[value.aggr]) ? p[valueKey].count+1 : p[valueKey].count
};
}
}
}
//console.log("schema=%o, rows=%o", schema, rows);
return {
schema : schema,
rows : rows
};
};
var pivotDataToD3ChartFormat = function(data, allowTextXAxis, fillMissingValues, chartType) {
// construct d3 data
var d3g = [];
var schema = data.schema;
var rows = data.rows;
var values = $scope.paragraph.config.graph.values;
var concat = function(o, n) {
if (!o) {
return n;
} else {
return o+'.'+n;
}
};
var getSchemaUnderKey = function(key, s) {
for (var c in key.children) {
s[c] = {};
getSchemaUnderKey(key.children[c], s[c]);
}
};
var traverse = function(sKey, s, rKey, r, func, rowName, rowValue, colName) {
//console.log("TRAVERSE sKey=%o, s=%o, rKey=%o, r=%o, rowName=%o, rowValue=%o, colName=%o", sKey, s, rKey, r, rowName, rowValue, colName);
if (s.type==='key') {
rowName = concat(rowName, sKey);
rowValue = concat(rowValue, rKey);
} else if (s.type==='group') {
colName = concat(colName, rKey);
} else if (s.type==='value' && sKey===rKey || valueOnly) {
colName = concat(colName, rKey);
func(rowName, rowValue, colName, r);
}
for (var c in s.children) {
if (fillMissingValues && s.children[c].type === 'group' && r[c] === undefined) {
var cs = {};
getSchemaUnderKey(s.children[c], cs);
traverse(c, s.children[c], c, cs, func, rowName, rowValue, colName);
continue;
}
for (var j in r) {
if (s.children[c].type === 'key' || c === j) {
traverse(c, s.children[c], j, r[j], func, rowName, rowValue, colName);
}
}
}
};
var keys = $scope.paragraph.config.graph.keys;
var groups = $scope.paragraph.config.graph.groups;
values = $scope.paragraph.config.graph.values;
var valueOnly = (keys.length === 0 && groups.length === 0 && values.length > 0);
var noKey = (keys.length === 0);
var isMultiBarChart = (chartType === 'multiBarChart');
var sKey = Object.keys(schema)[0];
var rowNameIndex = {};
var rowIdx = 0;
var colNameIndex = {};
var colIdx = 0;
var rowIndexValue = {};
for (var k in rows) {
traverse(sKey, schema[sKey], k, rows[k], function(rowName, rowValue, colName, value) {
//console.log("RowName=%o, row=%o, col=%o, value=%o", rowName, rowValue, colName, value);
if (rowNameIndex[rowValue] === undefined) {
rowIndexValue[rowIdx] = rowValue;
rowNameIndex[rowValue] = rowIdx++;
}
if (colNameIndex[colName] === undefined) {
colNameIndex[colName] = colIdx++;
}
var i = colNameIndex[colName];
if (noKey && isMultiBarChart) {
i = 0;
}
if (!d3g[i]) {
d3g[i] = {
values : [],
key : (noKey && isMultiBarChart) ? 'values' : colName
};
}
var xVar = isNaN(rowValue) ? ((allowTextXAxis) ? rowValue : rowNameIndex[rowValue]) : parseFloat(rowValue);
var yVar = 0;
if (xVar === undefined) { xVar = colName; }
if (value !== undefined) {
yVar = isNaN(value.value) ? 0 : parseFloat(value.value) / parseFloat(value.count);
}
d3g[i].values.push({
x : xVar,
y : yVar
});
});
}
// clear aggregation name, if possible
var namesWithoutAggr = {};
var colName;
var withoutAggr;
// TODO - This part could use som refactoring - Weird if/else with similar actions and variable names
for (colName in colNameIndex) {
withoutAggr = colName.substring(0, colName.lastIndexOf('('));
if (!namesWithoutAggr[withoutAggr]) {
namesWithoutAggr[withoutAggr] = 1;
} else {
namesWithoutAggr[withoutAggr]++;
}
}
if (valueOnly) {
for (var valueIndex = 0; valueIndex < d3g[0].values.length; valueIndex++) {
colName = d3g[0].values[valueIndex].x;
if (!colName) {
continue;
}
withoutAggr = colName.substring(0, colName.lastIndexOf('('));
if (namesWithoutAggr[withoutAggr] <= 1 ) {
d3g[0].values[valueIndex].x = withoutAggr;
}
}
} else {
for (var d3gIndex = 0; d3gIndex < d3g.length; d3gIndex++) {
colName = d3g[d3gIndex].key;
withoutAggr = colName.substring(0, colName.lastIndexOf('('));
if (namesWithoutAggr[withoutAggr] <= 1 ) {
d3g[d3gIndex].key = withoutAggr;
}
}
// use group name instead of group.value as a column name, if there're only one group and one value selected.
if (groups.length === 1 && values.length === 1) {
for (d3gIndex = 0; d3gIndex < d3g.length; d3gIndex++) {
colName = d3g[d3gIndex].key;
colName = colName.split('.')[0];
d3g[d3gIndex].key = colName;
}
}
}
return {
xLabels : rowIndexValue,
d3g : d3g
};
};
var setDiscreteScatterData = function(data) {
var xAxis = $scope.paragraph.config.graph.scatter.xAxis;
var yAxis = $scope.paragraph.config.graph.scatter.yAxis;
var group = $scope.paragraph.config.graph.scatter.group;
var xValue;
var yValue;
var grp;
var rows = {};
for (var i = 0; i < data.rows.length; i++) {
var row = data.rows[i];
if (xAxis) {
xValue = row[xAxis.index];
}
if (yAxis) {
yValue = row[yAxis.index];
}
if (group) {
grp = row[group.index];
}
var key = xValue + ',' + yValue + ',' + grp;
if(!rows[key]) {
rows[key] = {
x : xValue,
y : yValue,
group : grp,
size : 1
};
} else {
rows[key].size++;
}
}
// change object into array
var newRows = [];
for(var r in rows){
var newRow = [];
if (xAxis) { newRow[xAxis.index] = rows[r].x; }
if (yAxis) { newRow[yAxis.index] = rows[r].y; }
if (group) { newRow[group.index] = rows[r].group; }
newRow[data.rows[0].length] = rows[r].size;
newRows.push(newRow);
}
return newRows;
};
var setScatterChart = function(data, refresh) {
var xAxis = $scope.paragraph.config.graph.scatter.xAxis;
var yAxis = $scope.paragraph.config.graph.scatter.yAxis;
var group = $scope.paragraph.config.graph.scatter.group;
var size = $scope.paragraph.config.graph.scatter.size;
var xValues = [];
var yValues = [];
var rows = {};
var d3g = [];
var rowNameIndex = {};
var colNameIndex = {};
var grpNameIndex = {};
var rowIndexValue = {};
var colIndexValue = {};
var grpIndexValue = {};
var rowIdx = 0;
var colIdx = 0;
var grpIdx = 0;
var grpName = '';
var xValue;
var yValue;
var row;
if (!xAxis && !yAxis) {
return {
d3g : []
};
}
for (var i = 0; i < data.rows.length; i++) {
row = data.rows[i];
if (xAxis) {
xValue = row[xAxis.index];
xValues[i] = xValue;
}
if (yAxis) {
yValue = row[yAxis.index];
yValues[i] = yValue;
}
}
var isAllDiscrete = ((xAxis && yAxis && isDiscrete(xValues) && isDiscrete(yValues)) ||
(!xAxis && isDiscrete(yValues)) ||
(!yAxis && isDiscrete(xValues)));
if (isAllDiscrete) {
rows = setDiscreteScatterData(data);
} else {
rows = data.rows;
}
if (!group && isAllDiscrete) {
grpName = 'count';
} else if (!group && !size) {
if (xAxis && yAxis) {
grpName = '(' + xAxis.name + ', ' + yAxis.name + ')';
} else if (xAxis && !yAxis) {
grpName = xAxis.name;
} else if (!xAxis && yAxis) {
grpName = yAxis.name;
}
} else if (!group && size) {
grpName = size.name;
}
for (i = 0; i < rows.length; i++) {
row = rows[i];
if (xAxis) {
xValue = row[xAxis.index];
}
if (yAxis) {
yValue = row[yAxis.index];
}
if (group) {
grpName = row[group.index];
}
var sz = (isAllDiscrete) ? row[row.length-1] : ((size) ? row[size.index] : 1);
if (grpNameIndex[grpName] === undefined) {
grpIndexValue[grpIdx] = grpName;
grpNameIndex[grpName] = grpIdx++;
}
if (xAxis && rowNameIndex[xValue] === undefined) {
rowIndexValue[rowIdx] = xValue;
rowNameIndex[xValue] = rowIdx++;
}
if (yAxis && colNameIndex[yValue] === undefined) {
colIndexValue[colIdx] = yValue;
colNameIndex[yValue] = colIdx++;
}
if (!d3g[grpNameIndex[grpName]]) {
d3g[grpNameIndex[grpName]] = {
key : grpName,
values : []
};
}
d3g[grpNameIndex[grpName]].values.push({
x : xAxis ? (isNaN(xValue) ? rowNameIndex[xValue] : parseFloat(xValue)) : 0,
y : yAxis ? (isNaN(yValue) ? colNameIndex[yValue] : parseFloat(yValue)) : 0,
size : isNaN(parseFloat(sz))? 1 : parseFloat(sz)
});
}
return {
xLabels : rowIndexValue,
yLabels : colIndexValue,
d3g : d3g
};
};
var isDiscrete = function(field) {
var getUnique = function(f) {
var uniqObj = {};
var uniqArr = [];
var j = 0;
for (var i = 0; i < f.length; i++) {
var item = f[i];
if(uniqObj[item] !== 1) {
uniqObj[item] = 1;
uniqArr[j++] = item;
}
}
return uniqArr;
};
for (var i = 0; i < field.length; i++) {
if(isNaN(parseFloat(field[i])) &&
(typeof field[i] === 'string' || field[i] instanceof String)) {
return true;
}
}
var threshold = 0.05;
var unique = getUnique(field);
if (unique.length/field.length < threshold) {
return true;
} else {
return false;
}
};
$scope.isValidSizeOption = function (options, rows) {
var xValues = [];
var yValues = [];
for (var i = 0; i < rows.length; i++) {
var row = rows[i];
var size = row[options.size.index];
//check if the field is numeric
if (isNaN(parseFloat(size)) || !isFinite(size)) {
return false;
}
if (options.xAxis) {
var x = row[options.xAxis.index];
xValues[i] = x;
}
if (options.yAxis) {
var y = row[options.yAxis.index];
yValues[i] = y;
}
}
//check if all existing fields are discrete
var isAllDiscrete = ((options.xAxis && options.yAxis && isDiscrete(xValues) && isDiscrete(yValues)) ||
(!options.xAxis && isDiscrete(yValues)) ||
(!options.yAxis && isDiscrete(xValues)));
if (isAllDiscrete) {
return false;
}
return true;
};
$scope.resizeParagraph = function(width, height) {
if ($scope.paragraph.config.colWidth !== width) {
$scope.paragraph.config.colWidth = width;
$scope.changeColWidth();
$timeout(function() {
autoAdjustEditorHeight($scope.paragraph.id + '_editor');
$scope.changeHeight(height);
}, 200);
} else {
$scope.changeHeight(height);
}
};
$scope.changeHeight = function(height) {
var newParams = angular.copy($scope.paragraph.settings.params);
var newConfig = angular.copy($scope.paragraph.config);
newConfig.graph.height = height;
commitParagraph($scope.paragraph.title, $scope.paragraph.text, newConfig, newParams);
};
/** Utility function */
if (typeof String.prototype.startsWith !== 'function') {
String.prototype.startsWith = function(str) {
return this.slice(0, str.length) === str;
};
}
$scope.goToSingleParagraph = function () {
var noteId = $route.current.pathParams.noteId;
var redirectToUrl = location.protocol + '//' + location.host + location.pathname + '#/notebook/' + noteId + '/paragraph/' + $scope.paragraph.id+'?asIframe';
$window.open(redirectToUrl);
};
});
| apache-2.0 |
jetdario/hrms | src/java/com/openhris/timekeeping/AttendancePolicyWindow.java | 10769 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.openhris.timekeeping;
import com.openhris.commons.DropDownComponent;
import com.openhris.commons.OpenHrisUtilities;
import com.vaadin.data.Item;
import com.vaadin.data.Property;
import com.vaadin.ui.Button;
import com.vaadin.ui.ComboBox;
import com.vaadin.ui.VerticalLayout;
import com.vaadin.ui.Window;
import java.text.DecimalFormat;
/**
*
* @author jetdario
*/
public class AttendancePolicyWindow extends Window {
OpenHrisUtilities utilities = new OpenHrisUtilities();
TimekeepingComputation computation = new TimekeepingComputation();
DropDownComponent dropDown = new DropDownComponent();
String[] holidayList;
Item item;
String employmentWageEntry;
double employmentWage;
double cola;
DecimalFormat df = new DecimalFormat("0.00");
public AttendancePolicyWindow(String[] holidayList,
Item item,
String employmentWageEntry,
double employmentWage,
double cola) {
this.holidayList = holidayList;
this.item = item;
this.employmentWageEntry = employmentWageEntry;
this.employmentWage = employmentWage;
this.cola = cola;
setCaption("CHANGE POLICY");
setWidth("225px");
setModal(true);
center();
addComponent(getVlayout());
}
VerticalLayout getVlayout(){
VerticalLayout vlayout = new VerticalLayout();
vlayout.setSpacing(true);
final ComboBox policy = dropDown.populateAttendancePolicyDropDownList(new ComboBox());
policy.setWidth("100%");
policy.setNullSelectionAllowed(true);
policy.setImmediate(true);
vlayout.addComponent(policy);
final ComboBox holidayType = new ComboBox("Type: ");
holidayType.setWidth("100%");
holidayType.setNullSelectionAllowed(false);
holidayType.setVisible(false);
holidayType.setImmediate(true);
for(String temp : holidayList){
holidayType.addItem(temp);
}
policy.addListener(new Property.ValueChangeListener() {
@Override
public void valueChange(Property.ValueChangeEvent event) {
if(event.getProperty().getValue() == null){
item.getItemProperty("wdo").setValue(0.0);
item.getItemProperty("sholiday").setValue(0.0);
item.getItemProperty("lholiday").setValue(0.0);
item.getItemProperty("psday").setValue(0.0);
holidayType.setVisible(false);
holidayType.removeAllItems();
for(String temp : holidayList){
holidayType.addItem(temp);
}
} else if(event.getProperty().getValue().toString().equals("holiday") ||
event.getProperty().getValue().toString().equals("working-holiday")){
item.getItemProperty("wdo").setValue(0.0);
item.getItemProperty("sholiday").setValue(0.0);
item.getItemProperty("lholiday").setValue(0.0);
item.getItemProperty("psday").setValue(0.0);
holidayType.setVisible(true);
} else if(event.getProperty().getValue().toString().equals("working-day-off")){
item.getItemProperty("wdo").setValue(0.0);
item.getItemProperty("sholiday").setValue(0.0);
item.getItemProperty("lholiday").setValue(0.0);
item.getItemProperty("psday").setValue(0.0);
holidayType.setVisible(true);
double additionalWorkingDayOffPay = computation.processAdditionalWorkingDayOff(getEmploymentWage(), getEmploymentWageEntry());
item.getItemProperty("wdo").setValue(df.format(additionalWorkingDayOffPay));
} else{
item.getItemProperty("wdo").setValue(0.0);
item.getItemProperty("sholiday").setValue(0.0);
item.getItemProperty("lholiday").setValue(0.0);
item.getItemProperty("psday").setValue(0.0);
holidayType.removeAllItems();
for(String temp : holidayList){
holidayType.addItem(temp);
}
holidayType.setVisible(false);
}
}
});
holidayType.addListener(new Property.ValueChangeListener() {
@Override
public void valueChange(Property.ValueChangeEvent event) {
double additionalHolidayPay;
double additionalWorkingDayOffPay;
double multiplePremiumPay;
if(policy.getValue() == null){
} else if(policy.getValue().equals("working-holiday")){
item.getItemProperty("psday").setValue(0.0);
if(event.getProperty().getValue().toString().equals("legal-holiday")){
additionalHolidayPay = computation.processAdditionalHolidayPay(
event.getProperty().getValue().toString(),
getEmploymentWage(),
getCola());
item.getItemProperty("lholiday").setValue(new Double(df.format(additionalHolidayPay)));
item.getItemProperty("sholiday").setValue(0.0);
}else{
additionalHolidayPay = computation.processAdditionalHolidayPay(
event.getProperty().getValue().toString(),
getEmploymentWage(),
getCola());
item.getItemProperty("sholiday").setValue(new Double(df.format(additionalHolidayPay)));
item.getItemProperty("lholiday").setValue(0.0);
}
}else if(policy.getValue().equals("holiday")){
item.getItemProperty("lholiday").setValue(0.0);
item.getItemProperty("sholiday").setValue(0.0);
if(event.getProperty().getValue().toString().equals("legal-holiday")){
if(getEmploymentWageEntry().equals("daily")){
additionalHolidayPay = computation.processAdditionalHolidayPay(
event.getProperty().getValue().toString(),
getEmploymentWage(),
getCola());
item.getItemProperty("psday").setValue(new Double(df.format(additionalHolidayPay)));
} else {
item.getItemProperty("psday").setValue(0.0);
}
}else{
item.getItemProperty("psday").setValue(0.0);
}
} else if(policy.getValue().equals("working-day-off")) {
if(event.getProperty().getValue() == null){
item.getItemProperty("sholiday").setValue(0.0);
item.getItemProperty("lholiday").setValue(0.0);
item.getItemProperty("psday").setValue(0.0);
} else if(event.getProperty().getValue().equals("legal-holiday")){
additionalWorkingDayOffPay = computation.processAdditionalWorkingDayOff(getEmploymentWage(), getEmploymentWageEntry());
item.getItemProperty("wdo").setValue(df.format(additionalWorkingDayOffPay));
multiplePremiumPay = computation.processMultiplePremiumPay(event.getProperty().getValue().toString(), getEmploymentWage());
item.getItemProperty("lholiday").setValue(multiplePremiumPay);
item.getItemProperty("sholiday").setValue(0.0);
} else {
additionalWorkingDayOffPay = computation.processAdditionalWorkingDayOff(getEmploymentWage(), getEmploymentWageEntry());
item.getItemProperty("wdo").setValue(df.format(additionalWorkingDayOffPay));
multiplePremiumPay = computation.processMultiplePremiumPay(event.getProperty().getValue().toString(), getEmploymentWage());
item.getItemProperty("sholiday").setValue(multiplePremiumPay);
item.getItemProperty("lholiday").setValue(0.0);
}
}
}
});
vlayout.addComponent(holidayType);
Button button = new Button("UPDATE POLICY");
button.setWidth("100%");
button.addListener(new Button.ClickListener() {
@Override
public void buttonClick(Button.ClickEvent event) {
String policyStr;
if(policy.getValue() == null || policy.getValue().toString().isEmpty()){
policyStr = "";
}else{
policyStr = policy.getValue().toString();
}
if(policyStr.equals("holiday") || policyStr.equals("working-holiday")){
if(holidayType.getValue() == null){
getWindow().showNotification("Select a Holiday type!", Window.Notification.TYPE_ERROR_MESSAGE);
return;
}
}
item.getItemProperty("policy").setValue(policyStr);
item.getItemProperty("holidays").setValue(holidayType.getValue());
close();
}
});
vlayout.addComponent(button);
return vlayout;
}
public String[] getHolidayList() {
return holidayList;
}
public Item getItem() {
return item;
}
public String getEmploymentWageEntry() {
return employmentWageEntry;
}
public double getEmploymentWage() {
return employmentWage;
}
public double getCola() {
return cola;
}
}
| apache-2.0 |
ideastorm/formula-w | server/chat.js | 1032 | /*
* Copyright 2015 phil.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
module.exports.bind = function (socket, io, players) {
socket.on('chatMessage', function (message) {
if (socket.userId) {
var player = players.lookup(socket.userId);
console.log(player + ": " + message);
var gameId = socket.gameId;
if (gameId) {
io.to(gameId).emit("chatMessage", {from: player, message: message});
}
}
});
}
| apache-2.0 |
PayPal-Opportunity-Hack-Chennai-2015/AIDE-At-Action | aeaadmin/src/main/java/org/aea/entity/Ngo.java | 3113 | package org.aea.entity;
import java.io.Serializable;
import javax.persistence.*;
import java.util.Set;
/**
* The persistent class for the ngo database table.
*
*/
@Entity
@Table(name = "ngo")
@NamedQuery(name = "Ngo.findAll", query = "SELECT n FROM Ngo n")
public class Ngo implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(unique = true, nullable = false)
private int id;
private int address;
private String email;
private String password;
@Column(length = 255)
private String logo;
@Column(length = 45)
private String name;
// bi-directional many-to-one association to FamilyTransfer
@OneToMany(mappedBy = "ngo1", fetch = FetchType.EAGER)
private Set<FamilyTransfer> familyTransfers1;
// bi-directional many-to-one association to FamilyTransfer
@OneToMany(mappedBy = "ngo2", fetch = FetchType.EAGER)
private Set<FamilyTransfer> familyTransfers2;
public Ngo() {}
public int getId() {
return this.id;
}
public void setId(int id) {
this.id = id;
}
public int getAddress() {
return this.address;
}
public void setAddress(int address) {
this.address = address;
}
public String getLogo() {
return this.logo;
}
public void setLogo(String logo) {
this.logo = logo;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public Set<FamilyTransfer> getFamilyTransfers1() {
return this.familyTransfers1;
}
public void setFamilyTransfers1(Set<FamilyTransfer> familyTransfers1) {
this.familyTransfers1 = familyTransfers1;
}
public FamilyTransfer addFamilyTransfers1(FamilyTransfer familyTransfers1) {
getFamilyTransfers1().add(familyTransfers1);
familyTransfers1.setNgo1(this);
return familyTransfers1;
}
public FamilyTransfer removeFamilyTransfers1(FamilyTransfer familyTransfers1) {
getFamilyTransfers1().remove(familyTransfers1);
familyTransfers1.setNgo1(null);
return familyTransfers1;
}
public Set<FamilyTransfer> getFamilyTransfers2() {
return this.familyTransfers2;
}
public void setFamilyTransfers2(Set<FamilyTransfer> familyTransfers2) {
this.familyTransfers2 = familyTransfers2;
}
public FamilyTransfer addFamilyTransfers2(FamilyTransfer familyTransfers2) {
getFamilyTransfers2().add(familyTransfers2);
familyTransfers2.setNgo2(this);
return familyTransfers2;
}
public FamilyTransfer removeFamilyTransfers2(FamilyTransfer familyTransfers2) {
getFamilyTransfers2().remove(familyTransfers2);
familyTransfers2.setNgo2(null);
return familyTransfers2;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}
| apache-2.0 |
gawkermedia/googleads-java-lib | modules/adwords_axis/src/main/java/com/google/api/ads/adwords/axis/v201509/cm/AdGroupCriterionPage.java | 5001 | /**
* AdGroupCriterionPage.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.adwords.axis.v201509.cm;
/**
* Contains a subset of adgroup criteria resulting from a
* {@link AdGroupCriterionService#get} call.
*/
public class AdGroupCriterionPage extends com.google.api.ads.adwords.axis.v201509.cm.Page implements java.io.Serializable {
/* The result entries in this page. */
private com.google.api.ads.adwords.axis.v201509.cm.AdGroupCriterion[] entries;
public AdGroupCriterionPage() {
}
public AdGroupCriterionPage(
java.lang.Integer totalNumEntries,
java.lang.String pageType,
com.google.api.ads.adwords.axis.v201509.cm.AdGroupCriterion[] entries) {
super(
totalNumEntries,
pageType);
this.entries = entries;
}
/**
* Gets the entries value for this AdGroupCriterionPage.
*
* @return entries * The result entries in this page.
*/
public com.google.api.ads.adwords.axis.v201509.cm.AdGroupCriterion[] getEntries() {
return entries;
}
/**
* Sets the entries value for this AdGroupCriterionPage.
*
* @param entries * The result entries in this page.
*/
public void setEntries(com.google.api.ads.adwords.axis.v201509.cm.AdGroupCriterion[] entries) {
this.entries = entries;
}
public com.google.api.ads.adwords.axis.v201509.cm.AdGroupCriterion getEntries(int i) {
return this.entries[i];
}
public void setEntries(int i, com.google.api.ads.adwords.axis.v201509.cm.AdGroupCriterion _value) {
this.entries[i] = _value;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof AdGroupCriterionPage)) return false;
AdGroupCriterionPage other = (AdGroupCriterionPage) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = super.equals(obj) &&
((this.entries==null && other.getEntries()==null) ||
(this.entries!=null &&
java.util.Arrays.equals(this.entries, other.getEntries())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = super.hashCode();
if (getEntries() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getEntries());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getEntries(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(AdGroupCriterionPage.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201509", "AdGroupCriterionPage"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("entries");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201509", "entries"));
elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201509", "AdGroupCriterion"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| apache-2.0 |
kibertoad/swampmachine | swampmachine-loaders/src/main/java/net/kiberion/swampmachine/assets/readers/AbstractFileReader.java | 1307 | package net.kiberion.swampmachine.assets.readers;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Set;
public abstract class AbstractFileReader {
protected final Path baseDir;
public AbstractFileReader(Path baseDir) {
this.baseDir = baseDir;
}
protected Path getBaseDir() {
return baseDir;
}
public abstract List<Path> getListOfFilesByWildcard (Path path, Set<String> wildcardFileExtensions) throws IOException;
public List<Path> getListOfRelativeFilesByWildcard(Path path, Set<String> wildcardFileExtensions)
throws IOException {
return getListOfFilesByWildcard (baseDir.resolve(path), wildcardFileExtensions);
}
public List<Path> getListOfRelativeFilesByWildcard(String path, Set<String> wildcardFileExtensions)
throws IOException {
return getListOfFilesByWildcard (baseDir.resolve(Paths.get(path)), wildcardFileExtensions);
}
public abstract InputStream getFileAsStream (Path path) throws IOException;
public abstract boolean fileExists (Path path);
public boolean relativeFileExists (Path path) {
return fileExists(baseDir.resolve(path));
}
}
| apache-2.0 |
patjdor/amber | src/amber/models/GridModel.cpp | 590 | /*
* GridModel.cpp
*
* Created on: Nov 10, 2014
* Author: pdoran
*/
#include <models/GridModel.h>
#include <GL/gl.h>
namespace amber {
GridModel::GridModel()
{
}
GridModel::~GridModel() {
}
void GridModel::render() const {
glPushAttrib(GL_ENABLE_BIT);
glDisable(GL_LIGHTING);
glColor3f(0,0,0);
glBegin(GL_LINES);
for (int i = -5; i <= 5; ++i) {
for (int j = -5; j <= 5; ++j){
glVertex3f(i,-5,0);
glVertex3f(i,5,0);
glVertex3f(5,i,0);
glVertex3f(-5,i,0);
}
}
glEnd(); // GL_LINES
glPopAttrib(); // GL_ENABLE_BIT
}
} /* namespace amber */
| apache-2.0 |
liupeng3425/learnJava | src/gui/PrintFontsName.java | 498 | package gui;
import java.awt.*;
/**
* Created by Peng on 2016/1/24.
* 打印系统所有字体名字。
*/
public class PrintFontsName {
public static void main(String[] args) {
GraphicsEnvironment environment = GraphicsEnvironment.getLocalGraphicsEnvironment();
String[] fontsName = environment.getAvailableFontFamilyNames();
for (String name : fontsName) {
System.out.println(name);
}
System.out.println(fontsName.length);
}
}
| apache-2.0 |
xutao1989103/preceLess | src/main/java/com/tianyu/jty/wechat/msg/VideoMsg.java | 1619 | package com.tianyu.jty.wechat.msg;
import com.tianyu.jty.wechat.util.MessageBuilder;
/**
* Created by xtao on 2015/11/24.
*/
public class VideoMsg extends BaseMsg {
private String mediaId;
private String title;
private String description;
public String getMediaId() {
return mediaId;
}
public void setMediaId(String mediaId) {
this.mediaId = mediaId;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public VideoMsg(String mediaId) {
this.mediaId = mediaId;
}
public VideoMsg(String mediaId, String title, String description) {
this.mediaId = mediaId;
this.title = title;
this.description = description;
}
@Override
public String toXml() {
MessageBuilder mb = new MessageBuilder(super.toXml());
mb.addData("MsgType", RespType.VIDEO);
mb.append("<Video>\n");
mb.addData("MediaId", mediaId);
mb.addData("Title", title);
mb.addData("Description", description);
mb.append("</Video>\n");
mb.surroundWith("xml");
return mb.toString();
}
@Override
public String toString() {
return "VideoMsg [mediaId=" + mediaId + ", title=" + title
+ ", description=" + description + "]";
}
}
| apache-2.0 |
YukaiXin/Domes | app/src/main/java/com/kxyu/domes/okhttp/request/CountingRequestBody.java | 2009 | package com.kxyu.domes.okhttp.request;
import java.io.IOException;
import okhttp3.MediaType;
import okhttp3.RequestBody;
import okio.Buffer;
import okio.BufferedSink;
import okio.ForwardingSink;
import okio.Okio;
import okio.Sink;
/**
* Decorates an OkHttp request body to count the number of bytes written when writing it. Can
* decorate any request body, but is most useful for tracking the upload progress of large
* multipart requests.
*
* @author Leo Nikkilä
*/
public class CountingRequestBody extends RequestBody
{
protected RequestBody delegate;
protected Listener listener;
protected CountingSink countingSink;
public CountingRequestBody(RequestBody delegate, Listener listener)
{
this.delegate = delegate;
this.listener = listener;
}
@Override
public MediaType contentType()
{
return delegate.contentType();
}
@Override
public long contentLength()
{
try
{
return delegate.contentLength();
} catch (IOException e)
{
e.printStackTrace();
}
return -1;
}
@Override
public void writeTo(BufferedSink sink) throws IOException
{
countingSink = new CountingSink(sink);
BufferedSink bufferedSink = Okio.buffer(countingSink);
delegate.writeTo(bufferedSink);
bufferedSink.flush();
}
protected final class CountingSink extends ForwardingSink
{
private long bytesWritten = 0;
public CountingSink(Sink delegate)
{
super(delegate);
}
@Override
public void write(Buffer source, long byteCount) throws IOException
{
super.write(source, byteCount);
bytesWritten += byteCount;
listener.onRequestProgress(bytesWritten, contentLength());
}
}
public static interface Listener
{
public void onRequestProgress(long bytesWritten, long contentLength);
}
} | apache-2.0 |
Learnosity/L15 | test/simple.js | 2039 | /* -*- Mode: js; js-indent-level: 2; indent-tabs-mode: nil; tab-width: 2 -*- */
/* vim: set shiftwidth=2 tabstop=2 autoindent cindent expandtab: */
/*
* Copyright 2013 Art Compiler LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var requirejs = require('requirejs');
requirejs.config({
nodeRequire: require,
paths: {
l15: '..'
}
});
requirejs(['l15/lib/trace', 'l15/src/ast', 'l15/src/model'], function (trace, Ast, Model) {
var TEST = true;
// Example plugin function
Model.fn.isEqual = function isEqual(n1, n2) {
var nid1 = this.intern(n1);
var nid2 = this.intern(n2);
if (nid1 === nid2) {
return true;
}
if (n1.op===n2.op && n1.args.length===n2.args.length) {
if (n1.args.length===2) {
var n1arg0 = this.intern(n1.args[0]);
var n1arg1 = this.intern(n1.args[1]);
var n2arg0 = this.intern(n2.args[0]);
var n2arg1 = this.intern(n2.args[1]);
if (n1arg0===n2arg1 && n1arg1===n2arg0) {
return true;
}
}
}
return false;
}
function test() {
trace("\nSimple math model self testing");
(function () {
var model = new Model();
var node = model.fromLaTex("10 + 20");
var result = model.isEqual(node, node) ? "PASS" : "FAIL";
trace(result + ": " + "model.isEqual(node, node)");
var result = node.isEqual(node) ? "PASS" : "FAIL";
trace(result + ": " + "node.isEqual(node)");
trace(model.dumpAll());
})();
}
if (TEST) {
test();
}
});
| apache-2.0 |
asakusafw/asakusafw-compiler | compiler-project/extension-redirector/src/main/java/com/asakusafw/lang/compiler/extension/redirector/RedirectorParticipant.java | 3679 | /**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.lang.compiler.extension.redirector;
import java.io.File;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.asakusafw.lang.compiler.api.BatchProcessor;
import com.asakusafw.lang.compiler.api.CompilerOptions;
import com.asakusafw.lang.compiler.api.reference.BatchReference;
import com.asakusafw.lang.compiler.api.reference.JobflowReference;
import com.asakusafw.lang.compiler.common.Diagnostic;
import com.asakusafw.lang.compiler.common.DiagnosticException;
import com.asakusafw.lang.compiler.common.Location;
import com.asakusafw.lang.compiler.core.BatchCompiler.Context;
import com.asakusafw.lang.compiler.core.basic.AbstractCompilerParticipant;
import com.asakusafw.lang.compiler.core.basic.JobflowPackager;
import com.asakusafw.lang.compiler.model.graph.Batch;
import com.asakusafw.lang.compiler.redirector.RedirectRule;
import com.asakusafw.lang.compiler.redirector.ZipRewriter;
/**
* Redirects API accesses into another classes.
* An implementation of {@link BatchProcessor} for redirecting API invocations.
*/
public class RedirectorParticipant extends AbstractCompilerParticipant {
static final Logger LOG = LoggerFactory.getLogger(RedirectorParticipant.class);
/**
* The compiler option property key prefix of the redirector rules.
*/
public static final String KEY_RULE_PREFIX = "redirector.rule."; //$NON-NLS-1$
@Override
public void afterBatch(Context context, Batch batch, BatchReference reference) {
RedirectRule rule = extractRule(context.getOptions());
if (rule.isEmpty()) {
return;
}
ZipRewriter rewriter = new ZipRewriter(rule);
LOG.debug("redirecting API invocations: {}", batch.getBatchId()); //$NON-NLS-1$
for (JobflowReference jobflow : reference.getJobflows()) {
Location location = JobflowPackager.getLibraryLocation(jobflow.getFlowId());
File file = context.getOutput().toFile(location);
if (file.isFile()) {
try {
rewriter.rewrite(file);
} catch (IOException e) {
throw new DiagnosticException(Diagnostic.Level.ERROR, MessageFormat.format(
"error occurred while rewriting jobflow JAR file: {0}",
jobflow.getFlowId()), e);
}
} else {
LOG.warn(MessageFormat.format(
"jobflow library file is not found: {0}",
file));
}
}
}
private static RedirectRule extractRule(CompilerOptions options) {
RedirectRule results = new RedirectRule();
for (Map.Entry<String, String> entry : options.getProperties(KEY_RULE_PREFIX).entrySet()) {
String from = entry.getKey().substring(KEY_RULE_PREFIX.length());
String to = entry.getValue();
results.add(from, to);
}
return results;
}
}
| apache-2.0 |
khalib/udacity-sunshine | app/src/main/java/com/calebwhang/sunshine/app/data/WeatherContract.java | 7597 | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.calebwhang.sunshine.app.data;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.net.Uri;
import android.provider.BaseColumns;
import android.text.format.Time;
/**
* Defines table and column names for the weather database.
*/
public class WeatherContract {
// The "Content authority" is a name for the entire content provider, similar to the
// relationship between a domain name and its website. A convenient string to use for the
// content authority is the package name for the app, which is guaranteed to be unique on the
// device.
public static final String CONTENT_AUTHORITY = "com.calebwhang.sunshine.app";
// Use CONTENT_AUTHORITY to create the base of all URI's which apps will use to contact
// the content provider.
public static final Uri BASE_CONTENT_URI = Uri.parse("content://" + CONTENT_AUTHORITY);
// Possible paths (appended to base content URI for possible URI's)
// For instance, content://com.example.android.sunshine.app/weather/ is a valid path for
// looking at weather data. content://com.example.android.sunshine.app/givemeroot/ will fail,
// as the ContentProvider hasn't been given any information on what to do with "givemeroot".
// At least, let's hope not. Don't be that dev, reader. Don't be that dev.
public static final String PATH_WEATHER = "weather";
public static final String PATH_LOCATION = "location";
// To make it easy to query for the exact date, we normalize all dates that go into
// the database to the start of the the Julian day at UTC.
public static long normalizeDate(long startDate) {
// normalize the start date to the beginning of the (UTC) day
Time time = new Time();
time.set(startDate);
int julianDay = Time.getJulianDay(startDate, time.gmtoff);
return time.setJulianDay(julianDay);
}
/* Inner class that defines the table contents of the location table */
public static final class LocationEntry implements BaseColumns {
public static final Uri CONTENT_URI =
BASE_CONTENT_URI.buildUpon().appendPath(PATH_LOCATION).build();
public static final String CONTENT_TYPE =
ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_LOCATION;
public static final String CONTENT_ITEM_TYPE =
ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_LOCATION;
// Table name
public static final String TABLE_NAME = "location";
// The location setting string is what will be sent to openweathermap
// as the location query.
public static final String COLUMN_LOCATION_SETTING = "location_setting";
// Human readable location string, provided by the API. Because for styling,
// "Mountain View" is more recognizable than 94043.
public static final String COLUMN_CITY_NAME = "city_name";
// In order to uniquely pinpoint the location on the map when we launch the
// map intent, we store the latitude and longitude as returned by openweathermap.
public static final String COLUMN_COORD_LAT = "coord_lat";
public static final String COLUMN_COORD_LONG = "coord_long";
public static Uri buildLocationUri(long id) {
return ContentUris.withAppendedId(CONTENT_URI, id);
}
}
/* Inner class that defines the table contents of the weather table */
public static final class WeatherEntry implements BaseColumns {
public static final Uri CONTENT_URI =
BASE_CONTENT_URI.buildUpon().appendPath(PATH_WEATHER).build();
public static final String CONTENT_TYPE =
ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_WEATHER;
public static final String CONTENT_ITEM_TYPE =
ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_WEATHER;
public static final String TABLE_NAME = "weather";
// Column with the foreign key into the location table.
public static final String COLUMN_LOC_KEY = "location_id";
// Date, stored as long in milliseconds since the epoch
public static final String COLUMN_DATE = "date";
// Weather id as returned by API, to identify the icon to be used
public static final String COLUMN_WEATHER_ID = "weather_id";
// Short description and long description of the weather, as provided by API.
// e.g "clear" vs "sky is clear".
public static final String COLUMN_SHORT_DESC = "short_desc";
// Min and max temperatures for the day (stored as floats)
public static final String COLUMN_MIN_TEMP = "min";
public static final String COLUMN_MAX_TEMP = "max";
// Humidity is stored as a float representing percentage
public static final String COLUMN_HUMIDITY = "humidity";
// Humidity is stored as a float representing percentage
public static final String COLUMN_PRESSURE = "pressure";
// Windspeed is stored as a float representing windspeed mph
public static final String COLUMN_WIND_SPEED = "wind";
// Degrees are meteorological degrees (e.g, 0 is north, 180 is south). Stored as floats.
public static final String COLUMN_DEGREES = "degrees";
public static Uri buildWeatherUri(long id) {
return ContentUris.withAppendedId(CONTENT_URI, id);
}
/*
Student: This is the buildWeatherLocation function you filled in.
*/
public static Uri buildWeatherLocation(String locationSetting) {
return CONTENT_URI.buildUpon().appendPath(locationSetting).build();
}
public static Uri buildWeatherLocationWithStartDate(
String locationSetting, long startDate) {
long normalizedDate = normalizeDate(startDate);
return CONTENT_URI.buildUpon().appendPath(locationSetting)
.appendQueryParameter(COLUMN_DATE, Long.toString(normalizedDate)).build();
}
public static Uri buildWeatherLocationWithDate(String locationSetting, long date) {
return CONTENT_URI.buildUpon().appendPath(locationSetting)
.appendPath(Long.toString(normalizeDate(date))).build();
}
public static String getLocationSettingFromUri(Uri uri) {
return uri.getPathSegments().get(1);
}
public static long getDateFromUri(Uri uri) {
return Long.parseLong(uri.getPathSegments().get(2));
}
public static long getStartDateFromUri(Uri uri) {
String dateString = uri.getQueryParameter(COLUMN_DATE);
if (null != dateString && dateString.length() > 0)
return Long.parseLong(dateString);
else
return 0;
}
}
} | apache-2.0 |
ahwxl/deep | src/main/java/org/activiti/engine/impl/ExecutionQueryImpl.java | 11318 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl;
import java.util.ArrayList;
import java.util.List;
import org.activiti.engine.ActivitiIllegalArgumentException;
import org.activiti.engine.impl.interceptor.CommandContext;
import org.activiti.engine.impl.interceptor.CommandExecutor;
import org.activiti.engine.impl.persistence.entity.SuspensionState;
import org.activiti.engine.runtime.Execution;
import org.activiti.engine.runtime.ExecutionQuery;
/**
* @author Joram Barrez
* @author Frederik Heremans
* @author Daniel Meyer
*/
public class ExecutionQueryImpl extends AbstractVariableQueryImpl<ExecutionQuery, Execution>
implements ExecutionQuery {
private static final long serialVersionUID = 1L;
protected String processDefinitionId;
protected String processDefinitionKey;
protected String processDefinitionName;
protected String activityId;
protected String executionId;
protected String parentId;
protected String processInstanceId;
protected List<EventSubscriptionQueryValue> eventSubscriptions;
protected String tenantId;
protected String tenantIdLike;
protected boolean withoutTenantId;
// Not used by end-users, but needed for dynamic ibatis query
protected String superProcessInstanceId;
protected String subProcessInstanceId;
protected boolean excludeSubprocesses;
protected SuspensionState suspensionState;
protected String businessKey;
protected boolean includeChildExecutionsWithBusinessKeyQuery;
protected boolean isActive;
protected String involvedUser;
// Not exposed in API, but here for the ProcessInstanceQuery support, since the name lives on the
// Execution entity/table
protected String name;
protected String nameLike;
protected String nameLikeIgnoreCase;
protected String deploymentId;
protected List<String> deploymentIds;
protected ExecutionQueryImpl orQueryObject;
public ExecutionQueryImpl() {
}
public ExecutionQueryImpl(CommandContext commandContext) {
super(commandContext);
}
public ExecutionQueryImpl(CommandExecutor commandExecutor) {
super(commandExecutor);
}
public boolean isProcessInstancesOnly() {
return false; // see dynamic query
}
public ExecutionQueryImpl processDefinitionId(String processDefinitionId) {
if (processDefinitionId == null) {
throw new ActivitiIllegalArgumentException("Process definition id is null");
}
this.processDefinitionId = processDefinitionId;
return this;
}
public ExecutionQueryImpl processDefinitionKey(String processDefinitionKey) {
if (processDefinitionKey == null) {
throw new ActivitiIllegalArgumentException("Process definition key is null");
}
this.processDefinitionKey = processDefinitionKey;
return this;
}
@Override
public ExecutionQuery processDefinitionName(String processDefinitionName) {
if (processDefinitionName == null) {
throw new ActivitiIllegalArgumentException("Process definition name is null");
}
this.processDefinitionName = processDefinitionName;
return this;
}
public ExecutionQueryImpl processInstanceId(String processInstanceId) {
if (processInstanceId == null) {
throw new ActivitiIllegalArgumentException("Process instance id is null");
}
this.processInstanceId = processInstanceId;
return this;
}
public ExecutionQuery processInstanceBusinessKey(String businessKey) {
if (businessKey == null) {
throw new ActivitiIllegalArgumentException("Business key is null");
}
this.businessKey = businessKey;
return this;
}
public ExecutionQuery processInstanceBusinessKey(String processInstanceBusinessKey, boolean includeChildExecutions) {
if (!includeChildExecutions) {
return processInstanceBusinessKey(processInstanceBusinessKey);
} else {
if (processInstanceBusinessKey == null) {
throw new ActivitiIllegalArgumentException("Business key is null");
}
this.businessKey = processInstanceBusinessKey;
this.includeChildExecutionsWithBusinessKeyQuery = includeChildExecutions;
return this;
}
}
public ExecutionQueryImpl executionId(String executionId) {
if (executionId == null) {
throw new ActivitiIllegalArgumentException("Execution id is null");
}
this.executionId = executionId;
return this;
}
public ExecutionQueryImpl activityId(String activityId) {
this.activityId = activityId;
if (activityId != null) {
isActive = true;
}
return this;
}
public ExecutionQueryImpl parentId(String parentId) {
if (parentId == null) {
throw new ActivitiIllegalArgumentException("Parent id is null");
}
this.parentId = parentId;
return this;
}
public ExecutionQueryImpl executionTenantId(String tenantId) {
if (tenantId == null) {
throw new ActivitiIllegalArgumentException("execution tenant id is null");
}
this.tenantId = tenantId;
return this;
}
public ExecutionQueryImpl executionTenantIdLike(String tenantIdLike) {
if (tenantIdLike == null) {
throw new ActivitiIllegalArgumentException("execution tenant id is null");
}
this.tenantIdLike = tenantIdLike;
return this;
}
public ExecutionQueryImpl executionWithoutTenantId() {
this.withoutTenantId = true;
return this;
}
public ExecutionQuery signalEventSubscription(String signalName) {
return eventSubscription("signal", signalName);
}
public ExecutionQuery signalEventSubscriptionName(String signalName) {
return eventSubscription("signal", signalName);
}
public ExecutionQuery messageEventSubscriptionName(String messageName) {
return eventSubscription("message", messageName);
}
public ExecutionQuery eventSubscription(String eventType, String eventName) {
if(eventName == null) {
throw new ActivitiIllegalArgumentException("event name is null");
}
if(eventType == null) {
throw new ActivitiIllegalArgumentException("event type is null");
}
if(eventSubscriptions == null) {
eventSubscriptions = new ArrayList<EventSubscriptionQueryValue>();
}
eventSubscriptions.add(new EventSubscriptionQueryValue(eventName, eventType));
return this;
}
public ExecutionQuery processVariableValueEquals(String variableName, Object variableValue) {
return variableValueEquals(variableName, variableValue, false);
}
public ExecutionQuery processVariableValueEquals(Object variableValue) {
return variableValueEquals(variableValue, false);
}
public ExecutionQuery processVariableValueNotEquals(String variableName, Object variableValue) {
return variableValueNotEquals(variableName, variableValue, false);
}
public ExecutionQuery processVariableValueEqualsIgnoreCase(String name, String value) {
return variableValueEqualsIgnoreCase(name, value, false);
}
@Override
public ExecutionQuery processVariableValueNotEqualsIgnoreCase(String name, String value) {
return variableValueNotEqualsIgnoreCase(name, value, false);
}
//ordering ////////////////////////////////////////////////////
public ExecutionQueryImpl orderByProcessInstanceId() {
this.orderProperty = ExecutionQueryProperty.PROCESS_INSTANCE_ID;
return this;
}
public ExecutionQueryImpl orderByProcessDefinitionId() {
this.orderProperty = ExecutionQueryProperty.PROCESS_DEFINITION_ID;
return this;
}
public ExecutionQueryImpl orderByProcessDefinitionKey() {
this.orderProperty = ExecutionQueryProperty.PROCESS_DEFINITION_KEY;
return this;
}
public ExecutionQueryImpl orderByTenantId() {
this.orderProperty = ExecutionQueryProperty.TENANT_ID;
return this;
}
//results ////////////////////////////////////////////////////
public long executeCount(CommandContext commandContext) {
checkQueryOk();
ensureVariablesInitialized();
return commandContext
.getExecutionEntityManager()
.findExecutionCountByQueryCriteria(this);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public List<Execution> executeList(CommandContext commandContext, Page page) {
checkQueryOk();
ensureVariablesInitialized();
return (List) commandContext
.getExecutionEntityManager()
.findExecutionsByQueryCriteria(this, page);
}
//getters ////////////////////////////////////////////////////
public boolean getOnlyProcessInstances() {
return false;
}
public String getProcessDefinitionKey() {
return processDefinitionKey;
}
public String getProcessDefinitionId() {
return processDefinitionId;
}
public String getProcessDefinitionName() {
return processDefinitionName;
}
public String getActivityId() {
return activityId;
}
public String getProcessInstanceId() {
return processInstanceId;
}
public String getProcessInstanceIds() {
return null;
}
public String getBusinessKey() {
return businessKey;
}
public String getExecutionId() {
return executionId;
}
public String getSuperProcessInstanceId() {
return superProcessInstanceId;
}
public String getSubProcessInstanceId() {
return subProcessInstanceId;
}
public boolean isExcludeSubprocesses() {
return excludeSubprocesses;
}
public SuspensionState getSuspensionState() {
return suspensionState;
}
public void setSuspensionState(SuspensionState suspensionState) {
this.suspensionState = suspensionState;
}
public List<EventSubscriptionQueryValue> getEventSubscriptions() {
return eventSubscriptions;
}
public boolean isIncludeChildExecutionsWithBusinessKeyQuery() {
return includeChildExecutionsWithBusinessKeyQuery;
}
public void setEventSubscriptions(List<EventSubscriptionQueryValue> eventSubscriptions) {
this.eventSubscriptions = eventSubscriptions;
}
public boolean isActive() {
return isActive;
}
public String getInvolvedUser() {
return involvedUser;
}
public void setInvolvedUser(String involvedUser) {
this.involvedUser = involvedUser;
}
public String getParentId() {
return parentId;
}
public String getTenantId() {
return tenantId;
}
public String getTenantIdLike() {
return tenantIdLike;
}
public boolean isWithoutTenantId() {
return withoutTenantId;
}
public String getName() {
return name;
}
public String getNameLike() {
return nameLike;
}
public void setName(String name) {
this.name = name;
}
public void setNameLike(String nameLike) {
this.nameLike = nameLike;
}
public String getNameLikeIgnoreCase() {
return nameLikeIgnoreCase;
}
public void setNameLikeIgnoreCase(String nameLikeIgnoreCase) {
this.nameLikeIgnoreCase = nameLikeIgnoreCase;
}
}
| apache-2.0 |
owlabs/incubator-airflow | airflow/operators/hive_operator.py | 5801 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
import re
from airflow.hooks.hive_hooks import HiveCliHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.utils.operator_helpers import context_to_airflow_vars
class HiveOperator(BaseOperator):
"""
Executes hql code or hive script in a specific Hive database.
:param hql: the hql to be executed. Note that you may also use
a relative path from the dag file of a (template) hive
script. (templated)
:type hql: str
:param hive_cli_conn_id: reference to the Hive database. (templated)
:type hive_cli_conn_id: str
:param hiveconfs: if defined, these key value pairs will be passed
to hive as ``-hiveconf "key"="value"``
:type hiveconfs: dict
:param hiveconf_jinja_translate: when True, hiveconf-type templating
${var} gets translated into jinja-type templating {{ var }} and
${hiveconf:var} gets translated into jinja-type templating {{ var }}.
Note that you may want to use this along with the
``DAG(user_defined_macros=myargs)`` parameter. View the DAG
object documentation for more details.
:type hiveconf_jinja_translate: bool
:param script_begin_tag: If defined, the operator will get rid of the
part of the script before the first occurrence of `script_begin_tag`
:type script_begin_tag: str
:param mapred_queue: queue used by the Hadoop CapacityScheduler. (templated)
:type mapred_queue: str
:param mapred_queue_priority: priority within CapacityScheduler queue.
Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW
:type mapred_queue_priority: str
:param mapred_job_name: This name will appear in the jobtracker.
This can make monitoring easier.
:type mapred_job_name: str
"""
template_fields = ('hql', 'schema', 'hive_cli_conn_id', 'mapred_queue',
'hiveconfs', 'mapred_job_name', 'mapred_queue_priority')
template_ext = ('.hql', '.sql',)
ui_color = '#f0e4ec'
@apply_defaults
def __init__(
self, hql,
hive_cli_conn_id='hive_cli_default',
schema='default',
hiveconfs=None,
hiveconf_jinja_translate=False,
script_begin_tag=None,
run_as_owner=False,
mapred_queue=None,
mapred_queue_priority=None,
mapred_job_name=None,
*args, **kwargs):
super(HiveOperator, self).__init__(*args, **kwargs)
self.hql = hql
self.hive_cli_conn_id = hive_cli_conn_id
self.schema = schema
self.hiveconfs = hiveconfs or {}
self.hiveconf_jinja_translate = hiveconf_jinja_translate
self.script_begin_tag = script_begin_tag
self.run_as = None
if run_as_owner:
self.run_as = self.dag.owner
self.mapred_queue = mapred_queue
self.mapred_queue_priority = mapred_queue_priority
self.mapred_job_name = mapred_job_name
# assigned lazily - just for consistency we can create the attribute with a
# `None` initial value, later it will be populated by the execute method.
# This also makes `on_kill` implementation consistent since it assumes `self.hook`
# is defined.
self.hook = None
def get_hook(self):
return HiveCliHook(
hive_cli_conn_id=self.hive_cli_conn_id,
run_as=self.run_as,
mapred_queue=self.mapred_queue,
mapred_queue_priority=self.mapred_queue_priority,
mapred_job_name=self.mapred_job_name)
def prepare_template(self):
if self.hiveconf_jinja_translate:
self.hql = re.sub(
r"(\$\{(hiveconf:)?([ a-zA-Z0-9_]*)\})", r"{{ \g<3> }}", self.hql)
if self.script_begin_tag and self.script_begin_tag in self.hql:
self.hql = "\n".join(self.hql.split(self.script_begin_tag)[1:])
def execute(self, context):
self.log.info('Executing: %s', self.hql)
self.hook = self.get_hook()
# set the mapred_job_name if it's not set with dag, task, execution time info
if not self.mapred_job_name:
ti = context['ti']
self.hook.mapred_job_name = 'Airflow HiveOperator task for {}.{}.{}.{}'\
.format(ti.hostname.split('.')[0], ti.dag_id, ti.task_id,
ti.execution_date.isoformat())
if self.hiveconf_jinja_translate:
self.hiveconfs = context_to_airflow_vars(context)
else:
self.hiveconfs.update(context_to_airflow_vars(context))
self.log.info('Passing HiveConf: %s', self.hiveconfs)
self.hook.run_cli(hql=self.hql, schema=self.schema, hive_conf=self.hiveconfs)
def dry_run(self):
self.hook = self.get_hook()
self.hook.test_hql(hql=self.hql)
def on_kill(self):
if self.hook:
self.hook.kill()
| apache-2.0 |
Neha--Agarwal/moVirt | moVirt/src/main/java/org/ovirt/mobile/movirt/ui/EventsFragment.java | 4520 | package org.ovirt.mobile.movirt.ui;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.content.Loader;
import android.support.v4.widget.SwipeRefreshLayout;
import android.widget.ListView;
import android.widget.SimpleCursorAdapter;
import org.androidannotations.annotations.AfterViews;
import org.androidannotations.annotations.Background;
import org.androidannotations.annotations.Bean;
import org.androidannotations.annotations.EFragment;
import org.androidannotations.annotations.InstanceState;
import org.androidannotations.annotations.Receiver;
import org.androidannotations.annotations.ViewById;
import org.ovirt.mobile.movirt.Broadcasts;
import org.ovirt.mobile.movirt.R;
import org.ovirt.mobile.movirt.model.Event;
import org.ovirt.mobile.movirt.provider.ProviderFacade;
import org.ovirt.mobile.movirt.sync.EventsHandler;
import org.ovirt.mobile.movirt.sync.SyncUtils;
import org.ovirt.mobile.movirt.util.CursorAdapterLoader;
import static org.ovirt.mobile.movirt.provider.OVirtContract.BaseEntity.ID;
import static org.ovirt.mobile.movirt.provider.OVirtContract.Event.CLUSTER_ID;
import static org.ovirt.mobile.movirt.provider.OVirtContract.Event.VM_ID;
import static org.ovirt.mobile.movirt.provider.OVirtContract.Vm.HOST_ID;
@EFragment(R.layout.fragment_event_list)
public class EventsFragment extends RefreshableFragment {
@ViewById
ListView list;
@Bean
ProviderFacade provider;
@Bean
EventsHandler eventsHandler;
@InstanceState
String filterHostId;
@InstanceState
String filterClusterId;
@InstanceState
String filterVmId;
@Bean
SyncUtils syncUtils;
@ViewById
SwipeRefreshLayout swipeEventsContainer;
private int page = 1;
private static final int EVENTS_PER_PAGE = 20;
private static final String TAG = EventsFragment.class.getSimpleName();
private EndlessScrollListener endlessScrollListener = new EndlessScrollListener() {
@Override
public void onLoadMore(int page, int totalItemsCount) {
loadMoreData(page);
}
};
private CursorAdapterLoader cursorAdapterLoader;
@AfterViews
void init() {
SimpleCursorAdapter eventListAdapter = new EventsCursorAdapter(getActivity());
list.setAdapter(eventListAdapter);
cursorAdapterLoader = new CursorAdapterLoader(eventListAdapter) {
@Override
public synchronized Loader<Cursor> onCreateLoader(int id, Bundle args) {
final ProviderFacade.QueryBuilder<Event> query = provider.query(Event.class);
if (filterHostId != null) query.where(HOST_ID, filterHostId);
if (filterClusterId != null) query.where(CLUSTER_ID, filterClusterId);
if (filterVmId != null) query.where(VM_ID, filterVmId);
return query.orderByDescending(ID).limit(page * EVENTS_PER_PAGE).asLoader();
}
};
getLoaderManager().initLoader(0, null, cursorAdapterLoader);
list.setOnScrollListener(endlessScrollListener);
}
@Override
public void onResume() {
super.onResume();
if (EventsHandler.inSync) {
showProgressBar();
}
restartLoader();
}
@Override
public void onPause() {
super.onPause();
hideProgressBar();
}
public void setFilterHostId(String filterHostId){
this.filterHostId = filterHostId;
}
public void updateFilterClusterIdTo(String filterClusterId) {
this.filterClusterId = filterClusterId;
page = 1;
list.setSelectionAfterHeaderView();
endlessScrollListener.resetListener();
restartLoader();
}
public void setFilterVmId(String filterVmId) {
this.filterVmId = filterVmId;
}
public void loadMoreData(int page) {
this.page = page;
restartLoader();
}
public void restartLoader() {
getLoaderManager().restartLoader(0, null, cursorAdapterLoader);
}
@Receiver(actions = Broadcasts.EVENTS_IN_SYNC, registerAt = Receiver.RegisterAt.OnResumeOnPause)
void eventsSyncing(@Receiver.Extra((Broadcasts.Extras.SYNCING)) boolean syncing) {
setRefreshing(syncing);
}
@Background
@Override
public void onRefresh() {
// eventsHandler.deleteEvents();
eventsHandler.updateEvents(true);
}
@Override
protected SwipeRefreshLayout getSwipeRefreshLayout() {
return swipeEventsContainer;
}
}
| apache-2.0 |
yugangw-msft/azure-sdk-for-net | sdk/synapse/Azure.Analytics.Synapse.AccessControl/tests/samples/Sample1_HelloWorld.cs | 3297 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Azure.Core.TestFramework;
using Azure.Analytics.Synapse.Tests;
using Azure.Analytics.Synapse.AccessControl;
using Azure.Analytics.Synapse.AccessControl.Models;
using Azure.Identity;
using NUnit.Framework;
namespace Azure.Analytics.Synapse.AccessControl.Samples
{
/// <summary>
/// This sample demonstrates how to submit Spark job in Azure Synapse Analytics using synchronous methods of <see cref="AccessControlClient"/>.
/// </summary>
public partial class Sample1_HelloWorld : SamplesBase<SynapseTestEnvironment>
{
[Test]
public void AddAndRemoveRoleAssignmentSync()
{
#region Snippet:CreateAccessControlClient
// Replace the string below with your actual endpoint url.
string endpoint = "<my-endpoint-url>";
/*@@*/endpoint = TestEnvironment.EndpointUrl;
RoleAssignmentsClient roleAssignmentsClient = new RoleAssignmentsClient(new Uri(endpoint), new DefaultAzureCredential());
RoleDefinitionsClient definitionsClient = new RoleDefinitionsClient(new Uri(endpoint), new DefaultAzureCredential());
#endregion
#region Snippet:PrepCreateRoleAssignment
Response<IReadOnlyList<SynapseRoleDefinition>> roles = definitionsClient.ListRoleDefinitions();
SynapseRoleDefinition role = roles.Value.Single(role => role.Name == "Synapse Administrator");
Guid roleId = role.Id.Value;
string assignedScope = "workspaces/<my-workspace-name>";
/*@@*/assignedScope = "workspaces/workspacechhamosynapse";
// Replace the string below with the ID you'd like to assign the role.
Guid principalId = /*<my-principal-id>"*/ Guid.NewGuid();
// Replace the string below with the ID of the assignment you'd like to use.
string assignmentId = "<my-assignment-id>";
/*@@*/assignmentId = Guid.NewGuid().ToString();
#endregion
#region Snippet:CreateRoleAssignment
Response<RoleAssignmentDetails> response = roleAssignmentsClient.CreateRoleAssignment (assignmentId, roleId, principalId, assignedScope);
RoleAssignmentDetails roleAssignmentAdded = response.Value;
#endregion
#region Snippet:RetrieveRoleAssignment
RoleAssignmentDetails roleAssignment = roleAssignmentsClient.GetRoleAssignmentById(roleAssignmentAdded.Id);
Console.WriteLine($"Role {roleAssignment.RoleDefinitionId} is assigned to {roleAssignment.PrincipalId}.");
#endregion
#region Snippet:ListRoleAssignments
Response<IReadOnlyList<SynapseRoleDefinition>> roleAssignments = definitionsClient.ListRoleDefinitions();
foreach (SynapseRoleDefinition assignment in roleAssignments.Value)
{
Console.WriteLine(assignment.Id);
}
#endregion
#region Snippet:DeleteRoleAssignment
roleAssignmentsClient.DeleteRoleAssignmentById(roleAssignment.Id);
#endregion
}
}
}
| apache-2.0 |
VeselovAndrey/LoreKeeper | tests/LoreKeeper.EF6.Tests/ExceptionsInCommandHandlersTests.cs | 2144 | // -----------------------------------------------------------------------
// <copyright file="ExceptionsInCommandHandlersTests.cs">
// Copyright (c) 2013-2015 Andrey Veselov. All rights reserved.
// License: Apache License 2.0
// Contacts: http://andrey.moveax.com andrey@moveax.com
// </copyright>
// -----------------------------------------------------------------------
namespace LoreKeeper.EF6.Tests
{
using System;
using System.Threading.Tasks;
using LoreKeeper.Core;
using LoreKeeper.EF6.Tests.Fixtures;
using LoreKeeper.Tests.Core.Commands;
using Xunit;
[Collection("DatabaseCollection")]
public class ExceptionsInCommandHandlersTests
{
private readonly DatabaseFixture _dbFixture;
private readonly IUnitOfWorkFactory _unitOfWorkFactory;
public ExceptionsInCommandHandlersTests(DatabaseFixture dbFixture)
{
this._dbFixture = dbFixture;
this._unitOfWorkFactory = dbFixture.UnitOfWorkFactory;
}
[Fact]
public void CatchingSyncException()
{
// Arrange
this._dbFixture.TestInitialize();
var cmd = new AlwaysFailingSyncCommand();
bool isExceptionCatched = false;
// Act
try {
this._unitOfWorkFactory.ExecuteSingleCommand(cmd);
}
catch (Exception) {
isExceptionCatched = true;
}
// Assert
Assert.True(isExceptionCatched);
}
[Fact]
public async Task CatchingAsyncException()
{
// Arrange
this._dbFixture.TestInitialize();
var cmd = new AlwaysFailingAsyncCommand();
bool isExceptionCatched = false;
// Act
try {
using (var unitOfWork = this._unitOfWorkFactory.Create())
await unitOfWork.ExecuteCommandAsync(cmd);
}
catch (Exception) {
isExceptionCatched = true;
}
// Assert
Assert.True(isExceptionCatched);
}
}
}
| apache-2.0 |
pdrados/cas | support/cas-server-support-webauthn-jpa/src/main/java/org/apereo/cas/webauthn/JpaWebAuthnCredentialRegistration.java | 1359 | package org.apereo.cas.webauthn;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Builder;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.Accessors;
import lombok.experimental.SuperBuilder;
import org.hibernate.annotations.GenericGenerator;
import org.springframework.data.annotation.Id;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Lob;
import java.io.Serializable;
/**
* This is {@link JpaWebAuthnCredentialRegistration}.
*
* @author Misagh Moayyed
* @since 6.3.0
*/
@Getter
@Setter
@SuperBuilder
@Entity
@Accessors(chain = true)
public class JpaWebAuthnCredentialRegistration implements Serializable {
private static final long serialVersionUID = 1505204109111619367L;
@Id
@Builder.Default
@JsonProperty("id")
@javax.persistence.Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "native")
@GenericGenerator(name = "native", strategy = "native")
private long id = -1;
@Column(nullable = false, unique = true)
private String username;
@Lob
@Column(name = "records", length = Integer.MAX_VALUE)
private String records;
public JpaWebAuthnCredentialRegistration() {
setId(System.nanoTime());
}
}
| apache-2.0 |
Doloops/arondor-common-reflection | arondor-common-reflection-noreflect/src/main/java/com/arondor/common/reflection/noreflect/model/FieldSetter.java | 996 | /*
* Copyright 2013, Arondor
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arondor.common.reflection.noreflect.model;
/**
* Field Setter prototype
*
* @author Francois Barre
*
*/
public interface FieldSetter
{
/**
* Set a value to an object instance field
*
* @param object
* the this object
* @param value
* the value to set for this field
*/
void set(Object object, Object value);
}
| apache-2.0 |
bitstorm/Wicket-rest-annotations | restannotations/src/main/java/org/wicketstuff/rest/resource/MethodMappingInfo.java | 5820 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wicketstuff.rest.resource;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import org.apache.wicket.authroles.authorization.strategies.role.Roles;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import org.wicketstuff.rest.annotations.AuthorizeInvocation;
import org.wicketstuff.rest.annotations.MethodMapping;
import org.wicketstuff.rest.contenthandling.RestMimeTypes;
import org.wicketstuff.rest.resource.urlsegments.AbstractURLSegment;
import org.wicketstuff.rest.utils.http.HttpMethod;
// TODO: Auto-generated Javadoc
/**
* This class contains the informations of a resource mapped method (i.e. a
* method annotated with {@link MethodMapping}). These informations are used at
* runtime to select the most suited method to serve the current request.
*
* @author andrea del bene
*
*/
public class MethodMappingInfo {
/** The HTTP method used to invoke this mapped method. */
private final HttpMethod httpMethod;
/** Segments that compose the URL we mapped the method on. */
private final List<AbstractURLSegment> segments;
/** Optional roles we used to annotate the method (see. {@link AuthorizeInvocation}). */
private final Roles roles;
/** The resource method we have mapped. */
private final Method method;
/** The MIME type to use in input. */
private final String inputFormat;
/** The MIME type to use in output. */
private final String outputFormat;
/**
* Class constructor.
*
* @param methodMapped the method mapped
* @param method the resource's method mapped.
*/
public MethodMappingInfo(MethodMapping methodMapped, Method method) {
this.httpMethod = methodMapped.httpMethod();
this.method = method;
this.segments = Collections.unmodifiableList(loadSegments(methodMapped.value()));
this.roles = loadRoles();
this.inputFormat = methodMapped.consumes();
this.outputFormat = methodMapped.produces();
}
/**
* Loads the segment that compose the URL used to map the method. Segments
* are instances of class {@link AbstractURLSegment}.
*
* @param urlPath
* the URL path of the method.
* @return a list containing the segments that compose the URL in input
*/
private List<AbstractURLSegment> loadSegments(String urlPath) {
String[] segArray = urlPath.split("/");
ArrayList<AbstractURLSegment> segments = new ArrayList<AbstractURLSegment>();
for (int i = 0; i < segArray.length; i++) {
String segment = segArray[i];
AbstractURLSegment segmentValue;
if (segment.isEmpty())
continue;
segmentValue = AbstractURLSegment.newSegment(segment);
segments.add(segmentValue);
}
return segments;
}
/**
* Load the optional roles used to annotate the method with.
*
* @return the authorization roles for the method.
* {@link AuthorizeInvocation}
*/
private Roles loadRoles() {
AuthorizeInvocation authorizeInvocation = method.getAnnotation(AuthorizeInvocation.class);
Roles roles = new Roles();
if (authorizeInvocation != null) {
roles = new Roles(authorizeInvocation.value());
}
return roles;
}
/**
* This method is invoked to populate the path parameters found in the
* mapped URL with the values obtained from the current request.
*
* @param pageParameters
* the current PageParameters.
* @return a Map containing the path parameters with their relative value.
*/
public LinkedHashMap<String, String> populatePathParameters(PageParameters pageParameters) {
LinkedHashMap<String, String> pathParameters = new LinkedHashMap<String, String>();
int indexedCount = pageParameters.getIndexedCount();
for (int i = 0; i < indexedCount; i++) {
String segmentContent = AbstractURLSegment.getActualSegment(pageParameters.get(i)
.toString());
AbstractURLSegment segment = segments.get(i);
segment.populatePathVariables(pathParameters, segmentContent);
}
return pathParameters;
}
// getters and setters
/**
* Gets the segments of the mapped URL.
*
* @return the segments
*/
public List<AbstractURLSegment> getSegments() {
return segments;
}
/**
* Gets the segments count.
*
* @return the segments count
*/
public int getSegmentsCount() {
return segments.size();
}
/**
* Gets the HTTP method.
*
* @return the HTTP method
*/
public HttpMethod getHttpMethod() {
return httpMethod;
}
/**
* Gets the relative class method.
*
* @return the class method
*/
public Method getMethod() {
return method;
}
/**
* Gets the optional authorization roles for this method.
*
* @return the roles
*/
public Roles getRoles() {
return roles;
}
/**
* Gets the mime input format.
*
* @return the mime input format
*/
public String getMimeInputFormat() {
return inputFormat;
}
/**
* Gets the mime output format.
*
* @return the mime output format
*/
public String getMimeOutputFormat() {
return outputFormat;
}
} | apache-2.0 |
mythguided/hydra | hydra-essentials/src/main/java/com/addthis/hydra/common/hash/PluggableHashFunction.java | 2003 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.addthis.hydra.common.hash;
import com.addthis.codec.plugins.PluginMap;
import com.addthis.codec.plugins.PluginRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PluggableHashFunction {
private static final Logger log = LoggerFactory.getLogger(PluggableHashFunction.class);
private static final StringAndByteHashFunction hasher;
static {
StringAndByteHashFunction hashFunction = new Murmur3HashFunction();
try {
PluginMap hashFunctions = PluginRegistry.defaultRegistry().asMap().get("hash function");
Class<?> defaultHashFunctionClass = hashFunctions.defaultSugar();
if (defaultHashFunctionClass != null) {
hashFunction = (StringAndByteHashFunction) defaultHashFunctionClass.newInstance();
} else {
log.warn("No default hash function was configured. Using Murmur Hash 3, but " +
"even though this is the library default, this condition is not " +
"expected. Check your configuration!");
}
} catch (Exception ex) {
log.warn("Unexpected error trying to load a pluggable hash function", ex);
}
hasher = hashFunction;
}
public static int hash(String input) {
return hasher.hash(input);
}
public static int hash(byte[] input) {
return hasher.hash(input);
}
}
| apache-2.0 |
Elevator89/RimWorld-Subtranslator | Elevator.Subtranslator.LabelDecliner/DeclinationTools.cs | 1888 | using Cyriller;
using Cyriller.Model;
using System;
namespace Elevator.Subtranslator.LabelDecliner
{
public static class DeclinationTools
{
public static CyrResult Decline(CyrPhrase decliner, string phrase)
{
CyrResult result = DeclineSingular(decliner, phrase);
return result ?? DeclinePlural(decliner, phrase);
}
public static bool TryDecline(CyrPhrase decliner, string phrase, out CyrResult result)
{
return
TryDeclineSingular(decliner, phrase, out result) ||
TryDeclinePlural(decliner, phrase, out result);
}
public static bool TryDeclineSingular(CyrPhrase decliner, string phrase, out CyrResult result)
{
result = DeclineSingular(decliner, phrase);
return result != null;
}
public static CyrResult DeclineSingular(CyrPhrase decliner, string phrase)
{
try
{
return decliner.Decline(phrase, GetConditionsEnum.Strict);
}
catch
{
return null;
}
}
public static bool TryDeclinePlural(CyrPhrase decliner, string phrase, out CyrResult result)
{
result = DeclinePlural(decliner, phrase);
return result != null;
}
public static CyrResult DeclinePlural(CyrPhrase decliner, string phrase)
{
try
{
return decliner.DeclinePlural(phrase, GetConditionsEnum.Strict);
}
catch
{
return null;
}
}
public static string Serialize(CyrResult declination)
{
return
$"{declination.Nominative}; " +
$"{declination.Genitive}; " +
$"{declination.Dative}; " +
$"{declination.Accusative}; " +
$"{declination.Instrumental}; " +
$"{declination.Prepositional}";
}
public static CyrResult Deserialize(string declinationStr)
{
string[] cases = declinationStr.Split(new string[] { "; " }, StringSplitOptions.RemoveEmptyEntries);
return new CyrResult(
cases[0],
cases[1],
cases[2],
cases[3],
cases[4],
cases[5]);
}
}
}
| apache-2.0 |
xiangxik/castle-platform | castle-themes/castle-theme-uikit/src/main/resources/META-INF/bower_components/kendo-ui/src/js/cultures/kendo.culture.zh-HK.js | 6674 | /**
* Copyright 2017 Telerik AD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function(f){
if (typeof define === 'function' && define.amd) {
define(["kendo.core"], f);
} else {
f();
}
}(function(){
(function( window, undefined ) {
kendo.cultures["zh-HK"] = {
name: "zh-HK",
numberFormat: {
pattern: ["-n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
percent: {
pattern: ["-n%","n%"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "%"
},
currency: {
name: "Hong Kong Dollar",
abbr: "HKD",
pattern: ["($n)","$n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "HK$"
}
},
calendars: {
standard: {
days: {
names: ["星期日","星期一","星期二","星期三","星期四","星期五","星期六"],
namesAbbr: ["週日","週一","週二","週三","週四","週五","週六"],
namesShort: ["日","一","二","三","四","五","六"]
},
months: {
names: ["一月","二月","三月","四月","五月","六月","七月","八月","九月","十月","十一月","十二月"],
namesAbbr: ["一月","二月","三月","四月","五月","六月","七月","八月","九月","十月","十一月","十二月"]
},
AM: ["上午","上午","上午"],
PM: ["下午","下午","下午"],
patterns: {
d: "d/M/yyyy",
D: "yyyy'年'M'月'd'日'",
F: "yyyy'年'M'月'd'日' H:mm:ss",
g: "d/M/yyyy H:mm",
G: "d/M/yyyy H:mm:ss",
m: "M'月'd'日'",
M: "M'月'd'日'",
s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss",
t: "H:mm",
T: "H:mm:ss",
u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
y: "yyyy'年'M'月'",
Y: "yyyy'年'M'月'"
},
"/": "/",
":": ":",
firstDay: 0
}
}
}
})(this);
})); | apache-2.0 |
google/periph | experimental/devices/tlv493d/doc.go | 992 | // Copyright 2020 The Periph Authors. All rights reserved.
// Use of this source code is governed under the Apache License, Version 2.0
// that can be found in the LICENSE file.
// Package tlv493d implements interfacing code to the Infineon TLV493D haff effect sensor.
//
// Features of the device:
// 3-dimensional hall effect sensor, measures up to +/-130 mT magnetic flux.
// temperature sensor
// i2c interface
// 12-bit resolution
// low power consumption
//
// Features of the driver:
// Implemented all options of the device
// Power modes described in the documentation are defined as constants
// 2 precisions: high precision (12 bits), where all registers are read or low precision, which saves 50% of I2C bandwidth, but without temperature and only 8-bit resolution
// Continuous reading mode
//
// Datasheet and application notes:
// https://www.infineon.com/cms/en/product/sensor/magnetic-sensors/magnetic-position-sensors/3d-magnetics/tlv493d-a1b6/
//
package tlv493d
| apache-2.0 |
cweathernet/AndroidApp | src/net/crowdweather/droid/GPSTracker.java | 6709 | package net.crowdweather.droid;
import static android.location.LocationManager.GPS_PROVIDER;
import static android.location.LocationManager.NETWORK_PROVIDER;
import android.app.AlertDialog;
import android.app.Service;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.os.IBinder;
import android.provider.Settings;
import android.util.Log;
public class GPSTracker extends Service implements LocationListener {
private final Context mContext;
// flag for GPS status
boolean isGPSEnabled = false;
// flag for network status
boolean isNetworkEnabled = false;
// flag for GPS status
boolean canGetLocation = false;
Location location; // location
double latitude; // latitude
double longitude; // longitude
// The minimum distance to change Updates in meters
private static final long MIN_DISTANCE_CHANGE_FOR_UPDATES = 10; // 10 meters
// The minimum time between updates in milliseconds
private static final long MIN_TIME_BW_UPDATES = 1000 * 60 * 1; // 1 minute
// Declaring a Location Manager
protected LocationManager locationManager;
public GPSTracker(Context context) {
this.mContext = context;
getLocation();
}
@Override
public void onLocationChanged(Location location) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void onProviderEnabled(String provider) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void onProviderDisabled(String provider) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public IBinder onBind(Intent intent) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Location getLocation() {
try {
locationManager = (LocationManager) mContext
.getSystemService(LOCATION_SERVICE);
// getting GPS status
isGPSEnabled = locationManager
.isProviderEnabled(GPS_PROVIDER);
// getting network status
isNetworkEnabled = locationManager
.isProviderEnabled(NETWORK_PROVIDER);
if (!isGPSEnabled && !isNetworkEnabled) {
// no network provider is enabled
} else {
this.canGetLocation = true;
// First get location from Network Provider
if (isNetworkEnabled) {
locationManager.requestLocationUpdates(
NETWORK_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES, this);
Log.d("Network", "Network");
if (locationManager != null) {
location = locationManager
.getLastKnownLocation(NETWORK_PROVIDER);
if (location != null) {
latitude = location.getLatitude();
longitude = location.getLongitude();
}
}
}
// if GPS Enabled get lat/long using GPS Services
if (isGPSEnabled) {
if (location == null) {
locationManager.requestLocationUpdates(
GPS_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES, this);
Log.d("GPS Enabled", "GPS Enabled");
if (locationManager != null) {
location = locationManager
.getLastKnownLocation(GPS_PROVIDER);
if (location != null) {
latitude = location.getLatitude();
longitude = location.getLongitude();
}
}
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
return location;
}
/**
* Stop using GPS listener
* Calling this function will stop using GPS in your app
* */
public void stopUsingGPS(){
if(locationManager != null){
locationManager.removeUpdates(GPSTracker.this);
}
}
/**
* Function to get latitude
* */
public double getLatitude(){
if(location != null){
latitude = location.getLatitude();
}
// return latitude
return latitude;
}
/**
* Function to get longitude
* */
public double getLongitude(){
if(location != null){
longitude = location.getLongitude();
}
// return longitude
return longitude;
}
/**
* Function to check GPS/wifi enabled
* @return boolean
* */
public boolean canGetLocation() {
return this.canGetLocation;
}
/**
* Function to show settings alert dialog
* On pressing Settings button will lauch Settings Options
* */
public void showSettingsAlert(){
AlertDialog.Builder alertDialog = new AlertDialog.Builder(mContext);
// Setting Dialog Title
alertDialog.setTitle("GPS is settings");
// Setting Dialog Message
alertDialog.setMessage("GPS is not enabled. Do you want to go to settings menu?");
// On pressing Settings button
alertDialog.setPositiveButton("Settings", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int which) {
Intent intent = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS);
mContext.startActivity(intent);
}
});
// on pressing cancel button
alertDialog.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.cancel();
}
});
// Showing Alert Message
alertDialog.show();
}
} | apache-2.0 |
allurefw/allure-report | allure-commandline/src/main/java/io/qameta/allure/option/ResultsOptions.java | 1346 | /*
* Copyright 2019 Qameta Software OÜ
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.qameta.allure.option;
import com.beust.jcommander.Parameter;
import io.qameta.allure.convert.PathConverter;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import static java.util.Collections.singletonList;
/**
* Contains results options.
*
* @since 2.0
*/
@SuppressWarnings("PMD.ImmutableField")
public class ResultsOptions {
@Parameter(
description = "The directories with allure results",
converter = PathConverter.class
)
private List<Path> resultsDirectories = new ArrayList<>(singletonList(Paths.get("allure-results")));
public List<Path> getResultsDirectories() {
return resultsDirectories;
}
}
| apache-2.0 |
AndriySvyryd/UnicornHack | src/UnicornHack.Web/ClientApp/src/components/MeterBar.tsx | 1284 | import React from 'react';
import { observer } from 'mobx-react';
export const MeterBar = observer((props: IProgressBarProps) => {
if (React.Children.count(props.children) !== 0) {
return <div className="progress">
{props.children}
{renderLabel(props)}
</div>
}
return <div
role="meter"
className={`progress-bar ${props.className || ''}`}
style={{ width: `${getPercentage(props.now || 0, props.min || 0, props.max || 100)}%` }}
aria-valuenow={props.now}
aria-valuemin={props.min}
aria-valuemax={props.max}
>
{renderLabel(props)}
</div>;
});
function getPercentage(now: number, min: number, max: number): number {
const percentage = ((now - min) / (max - min)) * 100;
return Math.round(percentage * 1000) / 1000;
}
function renderLabel(props: IProgressBarProps) {
return props.label == undefined
? ''
: props.srOnly
? <span className="sr-only">{props.label}</span>
: <span className="progress-bar-label">{props.label}</span>;
}
interface IProgressBarProps {
min?: number;
now?: number;
max?: number;
label?: string;
srOnly?: boolean;
className?: string;
children?: React.ReactNode;
}; | apache-2.0 |
krasserm/ipf | archetypes/basic/src/main/resources/archetype-resources/src/main/java/SampleServer.java | 305 | #set( $symbol_pound = '#' )
#set( $symbol_dollar = '$' )
#set( $symbol_escape = '\' )
package ${package};
import org.apache.camel.spring.Main;
public class SampleServer {
public static void main(String[] args) throws Exception {
Main.main("-ac", "/context.xml");
}
}
| apache-2.0 |
peterrodgers/dover | src/uk/ac/kent/displayGraph/utilities/GraphUtilityCreateRandomGraph.java | 3858 | package uk.ac.kent.displayGraph.utilities;
import java.awt.BorderLayout;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
import uk.ac.kent.displayGraph.Graph;
/**
* Randomize the location of the nodes in a graph in a given rectangle
*
* @author Peter Rodgers
*/
public class GraphUtilityCreateRandomGraph extends GraphUtility implements ActionListener {
protected int numberOfNodes = 10;
protected int numberOfEdges = 20;
JFrame frame;
JPanel panel;
JTextField nodeField;
JTextField edgeField;
JLabel nodeLabel;
JLabel edgeLabel;
JButton okButton;
/** Trivial constructor. */
public GraphUtilityCreateRandomGraph() {
super(KeyEvent.VK_9,"Create Random Graph");
}
/** Trivial constructor. */
public GraphUtilityCreateRandomGraph(int key, String s) {
super(key,s);
}
/** Trival accessor. */
public int getNumberOfNodes() {return numberOfNodes;}
/** Trival accessor. */
public int getNumberOfEdges() {return numberOfEdges;}
/** Trival mutator. */
public void setNumberOfNodes(int nodes) {numberOfNodes = nodes;}
/** Trival mutator. */
public void setNumberOfEdges(int edges) {numberOfEdges = edges;}
public void apply() {
createFrame();
}
protected void createFrame() {
frame = new JFrame("Create Random Graph");
panel = new JPanel();
GridBagLayout gridbag = new GridBagLayout();
panel.setLayout(gridbag);
addWidgets(panel,gridbag);
frame.getContentPane().add(panel, BorderLayout.CENTER);
frame.pack();
frame.setVisible(true);
}
protected void addWidgets(JPanel widgetPanel, GridBagLayout gridbag) {
nodeField = new JTextField(4);
nodeField.setText(Integer.toString(numberOfNodes));
nodeLabel = new JLabel("Number of Nodes: ", SwingConstants.LEFT);
edgeField = new JTextField(4);
edgeField.setText(Integer.toString(numberOfEdges));
edgeLabel = new JLabel("Number of Edges: ", SwingConstants.LEFT);
okButton = new JButton("OK");
frame.getRootPane().setDefaultButton(okButton);
okButton.addActionListener(this);
GridBagConstraints c = new GridBagConstraints();
c.ipadx = 5;
c.ipady = 5;
c.gridx = 0;
c.gridy = 0;
gridbag.setConstraints(nodeLabel,c);
widgetPanel.add(nodeLabel);
c.gridx = 1;
c.gridy = 0;
gridbag.setConstraints(nodeField,c);
widgetPanel.add(nodeField);
nodeField.requestFocus();
c.gridx = 0;
c.gridy = 1;
gridbag.setConstraints(edgeLabel,c);
widgetPanel.add(edgeLabel);
c.gridx = 1;
c.gridy = 1;
gridbag.setConstraints(edgeField,c);
widgetPanel.add(edgeField);
c.gridx = 0;
c.gridy = 2;
gridbag.setConstraints(okButton,c);
widgetPanel.add(okButton);
}
protected void randomizeGraph() {
Graph graph = getGraph();
int maxCount =100;
int count =1;
graph.generateRandomGraph(numberOfNodes,numberOfEdges,false,false);
while(!graph.connected()) {
if (count >= maxCount) {
System.out.println("Failed to create a connected graph after "+count+ " attempts");
graph.clear();
getGraphPanel().update(getGraphPanel().getGraphics());
getGraphPanel().requestFocus();
return;
}
count++;
graph.generateRandomGraphExact(numberOfNodes,numberOfEdges,false);
}
graph.randomizeNodePoints(new Point(50,50),400,400);
getGraphPanel().update(getGraphPanel().getGraphics());
}
public void actionPerformed(ActionEvent event) {
numberOfNodes = (int)(Double.parseDouble(nodeField.getText()));
numberOfEdges = (int)(Double.parseDouble(edgeField.getText()));
randomizeGraph();
getGraphPanel().requestFocus();
frame.dispose();
}
}
| apache-2.0 |
schlaile/phabricator | src/applications/calendar/query/PhabricatorCalendarEventSearchEngine.php | 16286 | <?php
final class PhabricatorCalendarEventSearchEngine
extends PhabricatorApplicationSearchEngine {
private $calendarYear;
private $calendarMonth;
private $calendarDay;
public function getResultTypeDescription() {
return pht('Calendar Events');
}
public function getApplicationClassName() {
return 'PhabricatorCalendarApplication';
}
public function newQuery() {
return new PhabricatorCalendarEventQuery();
}
protected function shouldShowOrderField() {
return false;
}
protected function buildCustomSearchFields() {
return array(
id(new PhabricatorSearchDatasourceField())
->setLabel(pht('Created By'))
->setKey('creatorPHIDs')
->setDatasource(new PhabricatorPeopleUserFunctionDatasource()),
id(new PhabricatorSearchDatasourceField())
->setLabel(pht('Invited'))
->setKey('invitedPHIDs')
->setDatasource(new PhabricatorPeopleUserFunctionDatasource()),
id(new PhabricatorSearchDateControlField())
->setLabel(pht('Occurs After'))
->setKey('rangeStart'),
id(new PhabricatorSearchDateControlField())
->setLabel(pht('Occurs Before'))
->setKey('rangeEnd')
->setAliases(array('rangeEnd')),
id(new PhabricatorSearchCheckboxesField())
->setKey('upcoming')
->setOptions(array(
'upcoming' => pht('Show only upcoming events.'),
)),
id(new PhabricatorSearchSelectField())
->setLabel(pht('Cancelled Events'))
->setKey('isCancelled')
->setOptions($this->getCancelledOptions())
->setDefault('active'),
id(new PhabricatorSearchSelectField())
->setLabel(pht('Display Options'))
->setKey('display')
->setOptions($this->getViewOptions())
->setDefault('month'),
);
}
private function getCancelledOptions() {
return array(
'active' => pht('Active Events Only'),
'cancelled' => pht('Cancelled Events Only'),
'both' => pht('Both Cancelled and Active Events'),
);
}
private function getViewOptions() {
return array(
'month' => pht('Month View'),
'day' => pht('Day View'),
'list' => pht('List View'),
);
}
protected function buildQueryFromParameters(array $map) {
$query = $this->newQuery();
$viewer = $this->requireViewer();
if ($map['creatorPHIDs']) {
$query->withCreatorPHIDs($map['creatorPHIDs']);
}
if ($map['invitedPHIDs']) {
$query->withInvitedPHIDs($map['invitedPHIDs']);
}
$range_start = $map['rangeStart'];
$range_end = $map['rangeEnd'];
$display = $map['display'];
if ($map['upcoming'] && $map['upcoming'][0] == 'upcoming') {
$upcoming = true;
} else {
$upcoming = false;
}
list($range_start, $range_end) = $this->getQueryDateRange(
$range_start,
$range_end,
$display,
$upcoming);
$query->withDateRange($range_start, $range_end);
switch ($map['isCancelled']) {
case 'active':
$query->withIsCancelled(false);
break;
case 'cancelled':
$query->withIsCancelled(true);
break;
}
return $query->setGenerateGhosts(true);
}
private function getQueryDateRange(
$start_date_wild,
$end_date_wild,
$display,
$upcoming) {
$start_date_value = $this->getSafeDate($start_date_wild);
$end_date_value = $this->getSafeDate($end_date_wild);
$viewer = $this->requireViewer();
$timezone = new DateTimeZone($viewer->getTimezoneIdentifier());
$min_range = null;
$max_range = null;
$min_range = $start_date_value->getEpoch();
$max_range = $end_date_value->getEpoch();
if ($display == 'month' || $display == 'day') {
list($start_year, $start_month, $start_day) =
$this->getDisplayYearAndMonthAndDay($min_range, $max_range, $display);
$start_day = new DateTime(
"{$start_year}-{$start_month}-{$start_day}",
$timezone);
$next = clone $start_day;
if ($display == 'month') {
$next->modify('+1 month');
} else if ($display == 'day') {
$next->modify('+7 day');
}
$display_start = $start_day->format('U');
$display_end = $next->format('U');
$preferences = $viewer->loadPreferences();
$pref_week_day = PhabricatorUserPreferences::PREFERENCE_WEEK_START_DAY;
$start_of_week = $preferences->getPreference($pref_week_day, 0);
$end_of_week = ($start_of_week + 6) % 7;
$first_of_month = $start_day->format('w');
$last_of_month = id(clone $next)->modify('-1 day')->format('w');
if (!$min_range || ($min_range < $display_start)) {
$min_range = $display_start;
if ($display == 'month' &&
$first_of_month !== $start_of_week) {
$interim_day_num = ($first_of_month + 7 - $start_of_week) % 7;
$min_range = id(clone $start_day)
->modify('-'.$interim_day_num.' days')
->format('U');
}
}
if (!$max_range || ($max_range > $display_end)) {
$max_range = $display_end;
if ($display == 'month' &&
$last_of_month !== $end_of_week) {
$interim_day_num = ($end_of_week + 7 - $last_of_month) % 7;
$max_range = id(clone $next)
->modify('+'.$interim_day_num.' days')
->format('U');
}
}
}
if ($upcoming) {
if ($min_range) {
$min_range = max(time(), $min_range);
} else {
$min_range = time();
}
}
return array($min_range, $max_range);
}
protected function getURI($path) {
return '/calendar/'.$path;
}
protected function getBuiltinQueryNames() {
$names = array(
'month' => pht('Month View'),
'day' => pht('Day View'),
'upcoming' => pht('Upcoming Events'),
'all' => pht('All Events'),
);
return $names;
}
public function setCalendarYearAndMonthAndDay($year, $month, $day = null) {
$this->calendarYear = $year;
$this->calendarMonth = $month;
$this->calendarDay = $day;
return $this;
}
public function buildSavedQueryFromBuiltin($query_key) {
$query = $this->newSavedQuery();
$query->setQueryKey($query_key);
switch ($query_key) {
case 'month':
return $query->setParameter('display', 'month');
case 'day':
return $query->setParameter('display', 'day');
case 'upcoming':
return $query
->setParameter('display', 'list')
->setParameter('upcoming', array(
0 => 'upcoming',
));
case 'all':
return $query;
}
return parent::buildSavedQueryFromBuiltin($query_key);
}
protected function getRequiredHandlePHIDsForResultList(
array $objects,
PhabricatorSavedQuery $query) {
$phids = array();
foreach ($objects as $event) {
$phids[$event->getUserPHID()] = 1;
}
return array_keys($phids);
}
protected function renderResultList(
array $events,
PhabricatorSavedQuery $query,
array $handles) {
if ($this->isMonthView($query)) {
return $this->buildCalendarView($events, $query, $handles);
} else if ($this->isDayView($query)) {
return $this->buildCalendarDayView($events, $query, $handles);
}
assert_instances_of($events, 'PhabricatorCalendarEvent');
$viewer = $this->requireViewer();
$list = new PHUIObjectItemListView();
foreach ($events as $event) {
$duration = '';
$event_date_info = $this->getEventDateLabel($event);
$creator_handle = $handles[$event->getUserPHID()];
$attendees = array();
foreach ($event->getInvitees() as $invitee) {
$attendees[] = $invitee->getInviteePHID();
}
$attendees = pht(
'Attending: %s',
$viewer->renderHandleList($attendees)
->setAsInline(1)
->render());
if (strlen($event->getDuration()) > 0) {
$duration = pht(
'Duration: %s',
$event->getDuration());
}
$item = id(new PHUIObjectItemView())
->setUser($viewer)
->setObject($event)
->setHeader($viewer->renderHandle($event->getPHID())->render())
->addAttribute($event_date_info)
->addAttribute($attendees)
->addIcon('none', $duration);
$list->addItem($item);
}
$result = new PhabricatorApplicationSearchResultView();
$result->setObjectList($list);
$result->setNoDataString(pht('No events found.'));
return $result;
}
private function buildCalendarView(
array $statuses,
PhabricatorSavedQuery $query,
array $handles) {
$viewer = $this->requireViewer();
$now = time();
list($start_year, $start_month) =
$this->getDisplayYearAndMonthAndDay(
$this->getQueryDateFrom($query)->getEpoch(),
$this->getQueryDateTo($query)->getEpoch(),
$query->getParameter('display'));
$now_year = phabricator_format_local_time($now, $viewer, 'Y');
$now_month = phabricator_format_local_time($now, $viewer, 'm');
$now_day = phabricator_format_local_time($now, $viewer, 'j');
if ($start_month == $now_month && $start_year == $now_year) {
$month_view = new PHUICalendarMonthView(
$this->getQueryDateFrom($query),
$this->getQueryDateTo($query),
$start_month,
$start_year,
$now_day);
} else {
$month_view = new PHUICalendarMonthView(
$this->getQueryDateFrom($query),
$this->getQueryDateTo($query),
$start_month,
$start_year);
}
$month_view->setUser($viewer);
$phids = mpull($statuses, 'getUserPHID');
foreach ($statuses as $status) {
$viewer_is_invited = $status->getIsUserInvited($viewer->getPHID());
$event = new AphrontCalendarEventView();
$event->setEpochRange($status->getDateFrom(), $status->getDateTo());
$event->setIsAllDay($status->getIsAllDay());
$event->setIcon($status->getIcon());
$name_text = $handles[$status->getUserPHID()]->getName();
$status_text = $status->getName();
$event->setUserPHID($status->getUserPHID());
$event->setDescription(pht('%s (%s)', $name_text, $status_text));
$event->setName($status_text);
$event->setURI($status->getURI());
$event->setViewerIsInvited($viewer_is_invited);
$month_view->addEvent($event);
}
$month_view->setBrowseURI(
$this->getURI('query/'.$query->getQueryKey().'/'));
// TODO redesign-2015 : Move buttons out of PHUICalendarView?
$result = new PhabricatorApplicationSearchResultView();
$result->setContent($month_view);
return $result;
}
private function buildCalendarDayView(
array $statuses,
PhabricatorSavedQuery $query,
array $handles) {
$viewer = $this->requireViewer();
list($start_year, $start_month, $start_day) =
$this->getDisplayYearAndMonthAndDay(
$this->getQueryDateFrom($query)->getEpoch(),
$this->getQueryDateTo($query)->getEpoch(),
$query->getParameter('display'));
$day_view = id(new PHUICalendarDayView(
$this->getQueryDateFrom($query)->getEpoch(),
$this->getQueryDateTo($query)->getEpoch(),
$start_year,
$start_month,
$start_day))
->setQuery($query->getQueryKey());
$day_view->setUser($viewer);
$phids = mpull($statuses, 'getUserPHID');
foreach ($statuses as $status) {
if ($status->getIsCancelled()) {
continue;
}
$viewer_is_invited = $status->getIsUserInvited($viewer->getPHID());
$can_edit = PhabricatorPolicyFilter::hasCapability(
$viewer,
$status,
PhabricatorPolicyCapability::CAN_EDIT);
$event = new AphrontCalendarEventView();
$event->setCanEdit($can_edit);
$event->setEventID($status->getID());
$event->setEpochRange($status->getDateFrom(), $status->getDateTo());
$event->setIsAllDay($status->getIsAllDay());
$event->setIcon($status->getIcon());
$event->setViewerIsInvited($viewer_is_invited);
$event->setName($status->getName());
$event->setURI($status->getURI());
$day_view->addEvent($event);
}
$day_view->setBrowseURI(
$this->getURI('query/'.$query->getQueryKey().'/'));
$result = new PhabricatorApplicationSearchResultView();
$result->setContent($day_view);
return $result;
}
private function getDisplayYearAndMonthAndDay(
$range_start,
$range_end,
$display) {
$viewer = $this->requireViewer();
$epoch = null;
if ($this->calendarYear && $this->calendarMonth) {
$start_year = $this->calendarYear;
$start_month = $this->calendarMonth;
$start_day = $this->calendarDay ? $this->calendarDay : 1;
} else {
if ($range_start) {
$epoch = $range_start;
} else if ($range_end) {
$epoch = $range_end;
} else {
$epoch = time();
}
if ($display == 'month') {
$day = 1;
} else {
$day = phabricator_format_local_time($epoch, $viewer, 'd');
}
$start_year = phabricator_format_local_time($epoch, $viewer, 'Y');
$start_month = phabricator_format_local_time($epoch, $viewer, 'm');
$start_day = $day;
}
return array($start_year, $start_month, $start_day);
}
public function getPageSize(PhabricatorSavedQuery $saved) {
if ($this->isMonthView($saved) || $this->isDayView($saved)) {
return $saved->getParameter('limit', 1000);
} else {
return $saved->getParameter('limit', 100);
}
}
private function getQueryDateFrom(PhabricatorSavedQuery $saved) {
return $this->getQueryDate($saved, 'rangeStart');
}
private function getQueryDateTo(PhabricatorSavedQuery $saved) {
return $this->getQueryDate($saved, 'rangeEnd');
}
private function getQueryDate(PhabricatorSavedQuery $saved, $key) {
$viewer = $this->requireViewer();
$wild = $saved->getParameter($key);
return $this->getSafeDate($wild);
}
private function getSafeDate($value) {
$viewer = $this->requireViewer();
if ($value) {
// ideally this would be consistent and always pass in the same type
if ($value instanceof AphrontFormDateControlValue) {
return $value;
} else {
$value = AphrontFormDateControlValue::newFromWild($viewer, $value);
}
} else {
$value = AphrontFormDateControlValue::newFromEpoch(
$viewer,
PhabricatorTime::getTodayMidnightDateTime($viewer)->format('U'));
$value->setEnabled(false);
}
$value->setOptional(true);
return $value;
}
private function isMonthView(PhabricatorSavedQuery $query) {
if ($this->isDayView($query)) {
return false;
}
if ($query->getParameter('display') == 'month') {
return true;
}
}
private function isDayView(PhabricatorSavedQuery $query) {
if ($query->getParameter('display') == 'day') {
return true;
}
if ($this->calendarDay) {
return true;
}
return false;
}
private function getEventDateLabel($event) {
$viewer = $this->requireViewer();
$from_datetime = PhabricatorTime::getDateTimeFromEpoch(
$event->getDateFrom(),
$viewer);
$to_datetime = PhabricatorTime::getDateTimeFromEpoch(
$event->getDateTo(),
$viewer);
$from_date_formatted = $from_datetime->format('Y m d');
$to_date_formatted = $to_datetime->format('Y m d');
if ($event->getIsAllDay()) {
if ($from_date_formatted == $to_date_formatted) {
return pht(
'%s, All Day',
phabricator_date($event->getDateFrom(), $viewer));
} else {
return pht(
'%s - %s, All Day',
phabricator_date($event->getDateFrom(), $viewer),
phabricator_date($event->getDateTo(), $viewer));
}
} else if ($from_date_formatted == $to_date_formatted) {
return pht(
'%s - %s',
phabricator_datetime($event->getDateFrom(), $viewer),
phabricator_time($event->getDateTo(), $viewer));
} else {
return pht(
'%s - %s',
phabricator_datetime($event->getDateFrom(), $viewer),
phabricator_datetime($event->getDateTo(), $viewer));
}
}
}
| apache-2.0 |
Jie211/solver_cpp | times.cpp | 3675 | #include "times.hpp"
times::times(){
mv_time = 0.0;
dot_time = 0.0;
memset_time = 0.0;
h2d_time = 0.0;
d2h_time = 0.0;
cpu_mv_time = 0.0;
cpu_dot_time = 0.0;
// reg_time = 0.0;
// unreg_time = 0.0;
cp_time = 0.0;
cons_time = 0.0;
dis_time = 0.0;
}
times::~times(){
}
std::string times::get_date_time(){
struct tm *date;
time_t now;
int month, day;
int hour, minute, second;
std::string date_time;
time(&now);
date = localtime(&now);
month = date->tm_mon + 1;
day = date->tm_mday;
hour = date->tm_hour;
minute = date->tm_min;
second = date->tm_sec;
date_time=std::to_string(month)+"-"+std::to_string(day)+"-"+std::to_string(hour)+"_"+std::to_string(minute)+"_"+std::to_string(second);
return date_time;
}
double times::showTimeOnCPU(double total, bool hasGPU){
double all = cpu_mv_time + cpu_dot_time;
std::cout << CYAN << "Execution time on CPU" << RESET << std::endl;
std::cout << "\tDot process time = " << std::setprecision(6) << cpu_dot_time << ", " << std::setprecision(2) << cpu_dot_time/total*100 << "%" << std::endl;
std::cout << "\tMV process time = " << std::setprecision(6) << cpu_mv_time << ", " << std::setprecision(2) << cpu_mv_time/total*100 << "%" << std::endl;
std::cout << "\t cons time = " << std::setprecision(6) << cons_time << ", " << std::setprecision(2) << cons_time/total*100 << "%" << std::endl;
std::cout << "\t dis time = " << std::setprecision(6) << dis_time << ", " << std::setprecision(2) << dis_time/total*100 << "%" << std::endl;
if(!hasGPU){
std::cout << "\tother time = " << std::setprecision(6) << total-all << ", " << std::setprecision(2) << (total-all)/total*100 << "%" << std::endl;
return (-1.0);
}else{
return all;
}
}
void times::showTimeOnGPU(double total, double timeCPU){
double all = mv_time + dot_time;
// double inall = h2d_time + d2h_time + memset_time;
std::cout << CYAN << "Execution time on GPU" << RESET << std::endl;
std::cout << "\tDot process time = " << std::setprecision(6) << dot_time << ", " << std::setprecision(2) << dot_time/total*100 << "%" << std::endl;
std::cout << "\tMV process time = " << std::setprecision(6) << mv_time << ", " << std::setprecision(2) << mv_time/total*100 << "%" << std::endl;
std::cout << "\t H2D time = " << std::setprecision(6) << h2d_time << ", " << std::setprecision(2) << h2d_time/total*100 << "%" << std::endl;
std::cout << "\t D2H time = " << std::setprecision(6) << d2h_time << ", " << std::setprecision(2) << d2h_time/total*100 << "%" << std::endl;
std::cout << "\t Memset time = " << std::setprecision(6) << memset_time << ", " << std::setprecision(2) << memset_time/total*100 << "%" << std::endl;
std::cout << "\tother time = " << std::setprecision(6) << total-all-timeCPU << ", " << std::setprecision(2) << (total-all-timeCPU)/total*100 << "%" << std::endl;
// std::cout << "\t Register time = " << std::setprecision(6) << reg_time << ", " << std::setprecision(2) << reg_time/total*100 << "%" << std::endl;
// std::cout << "\t Unregister time = " << std::setprecision(6) << unreg_time << ", " << std::setprecision(2) << unreg_time/total*100 << "%" << std::endl;
std::cout << "\t Copy time = " << std::setprecision(6) << cp_time << ", " << std::setprecision(2) << cp_time/total*100 << "%" << std::endl;
std::cout << "\t cons time = " << std::setprecision(6) << cons_time << ", " << std::setprecision(2) << cons_time/total*100 << "%" << std::endl;
std::cout << "\t dis time = " << std::setprecision(6) << dis_time << ", " << std::setprecision(2) << dis_time/total*100 << "%" << std::endl;
}
| apache-2.0 |
jonfoster/pyxb1 | tests/trac/test-trac-0111.py | 1703 | import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
xsd='''<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:simpleType name="cards">
<xs:restriction base="xs:string">
<xs:enumeration value="clubs"/>
<xs:enumeration value="hearts"/>
<xs:enumeration value="diamonds"/>
<xs:enumeration value="spades"/>
</xs:restriction>
</xs:simpleType>
<xs:element name="card" type="cards"/>
</xs:schema>'''
code = pyxb.binding.generate.GeneratePython(schema_text=xsd)
file('code.py', 'w').write(code)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestTrac0111 (unittest.TestCase):
Expected = set( ('clubs', 'hearts', 'diamonds', 'spades') )
def testItems (self):
vals = set()
for ee in cards.items():
self.assertTrue(isinstance(ee, cards._CF_enumeration._CollectionFacet_itemType))
vals.add(ee.value())
self.assertEqual(self.Expected, vals)
def testIterItems (self):
vals = set()
for ee in cards.iteritems():
self.assertTrue(isinstance(ee, cards._CF_enumeration._CollectionFacet_itemType))
vals.add(ee.value())
self.assertEqual(self.Expected, vals)
def testValues (self):
vals = set()
for e in cards.values():
vals.add(e)
self.assertEqual(self.Expected, vals)
def testIterValues (self):
vals = set()
for e in cards.itervalues():
vals.add(e)
self.assertEqual(self.Expected, vals)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
GoogleCloudPlatform/cloud-derby | car/driver/py/robotderbycar.py | 7582 | #!/usr/bin/env python
# https://github.com/DexterInd/GoPiGo3/blob/master/LICENSE.md
#
# MIT License
# Copyright (c) 2017 Dexter Industries
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
# IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# Based on https://github.com/DexterInd/GoPiGo3/blob/master/Software/Python/easygopigo3.py
#
import easygopigo3
import time
class RobotDerbyCar(easygopigo3.EasyGoPiGo3):
"""
This class is used for controlling a `RobotDerbyCar`_ robot.
With this class you can do the following things with your `RobotDerbyCar`_:
* Drive your robot while avoiding obstacles
* Inheriting all EasyGoPiGo3 functionality: https://github.com/DexterInd/GoPiGo3/blob/master/Software/Python/easygopigo3.py
* Inheriting all GoPiGo3 functionality: https://github.com/DexterInd/GoPiGo3/blob/master/Software/Python/gopigo3.py
* Set the grippers of the robot to Open or Close positions
"""
def __init__(self):
"""
This constructor sets the variables to the following values:
: CONST_GRIPPER_FULL_OPEN : Position of gripper servo when open
: CONST_GRIPPER_FULL_CLOSE: Position of gripper servo when closed
: CONST_GRIPPER_FULL_OPEN : Position of gripper servo to grab ball
: easygopigo3.EasyGoPiGo3 Easy_GPG: Initialization of EasyGoPiGo3
: easygopigo3 Easy_GPG: Initialization of EasyGoPiGo3
: easygopigo3.Servo gpgGripper: Initialization of Gripper Servo on Servo Pin 1
: init_distance_sensor my_distance_sensor: Initialization of Distance Sensor
: IOError: When the GoPiGo3 is not detected. It also debugs a message in the terminal.
: gopigo3.FirmwareVersionError: If the GoPiGo3 firmware needs to be updated. It also debugs a message in the terminal.
: Exception: For any other kind of exceptions.
"""
# GoPiGo Color Codes
self.YELLOW = (255, 255, 0)
self.GREEN = (0, 255, 0)
self.RED = (255, 0, 0)
self.BLUE = (0, 0, 255)
# Settings for cars in US Reston Office (these grippers were built differently)
self.CONST_GRIPPER_FULL_OPEN = 90
self.CONST_GRIPPER_FULL_CLOSE = 0
self.CONST_GRIPPER_GRAB_POSITION = 40
# Settings for cars in London Office (default method of assembly for grippers)
#self.CONST_GRIPPER_FULL_OPEN = 180
#self.CONST_GRIPPER_FULL_CLOSE = 20
#self.CONST_GRIPPER_GRAB_POSITION = 120
self.Easy_GPG = easygopigo3.EasyGoPiGo3() # Create an instance of the GoPiGo3 class. GPG will be the GoPiGo3 object.
self.gpgGripper1 = easygopigo3.Servo("SERVO1", self.Easy_GPG)
self.gpgGripper2 = easygopigo3.Servo("SERVO2", self.Easy_GPG)
self.my_distance_sensor = self.Easy_GPG.init_distance_sensor()
self.SetCarStatusLED(self.GREEN)
def SetCarStatusLED(self,color):
self.Easy_GPG.set_right_eye_color(color)
self.Easy_GPG.open_right_eye()
def SetCarModeLED(self,color):
self.Easy_GPG.set_left_eye_color(color)
self.Easy_GPG.open_left_eye()
def SetBallModeLED(self,color):
self.Easy_GPG.set_led(self.Easy_GPG.LED_WIFI,color[0],color[1],color[2])
def GripperClose(self):
self.SetCarStatusLED(self.RED)
self.gpgGripper1.rotate_servo(self.CONST_GRIPPER_GRAB_POSITION)
self.gpgGripper2.rotate_servo(self.CONST_GRIPPER_GRAB_POSITION)
self.SetCarStatusLED(self.GREEN)
def GripperOpen(self):
self.SetCarStatusLED(self.RED)
self.gpgGripper1.rotate_servo(self.CONST_GRIPPER_FULL_OPEN)
self.gpgGripper2.rotate_servo(self.CONST_GRIPPER_FULL_OPEN)
self.SetCarStatusLED(self.GREEN)
def ReadDistanceMM(self):
return self.my_distance_sensor.read_mm()
def ReadBatteryVoltage(self):
return self.Easy_GPG.get_voltage_battery()
def set_speed(self,speed):
self.SetCarStatusLED(self.RED)
self.Easy_GPG.set_speed(speed)
self.SetCarStatusLED(self.GREEN)
def drive_cm(self,distance):
self.SetCarStatusLED(self.RED)
self.Easy_GPG.drive_cm(distance,True)
self.SetCarStatusLED(self.GREEN)
def turn_degrees(self,degress):
self.SetCarStatusLED(self.RED)
self.Easy_GPG.turn_degrees(degress,True)
self.SetCarStatusLED(self.GREEN)
def drive(self,dist_requested,dist_limit):
"""
Move the `GoPiGo3`_ forward / backward for ``dist`` amount of miliimeters.
| For moving the `GoPiGo3`_ robot forward, the ``dist`` parameter has to be *positive*.
| For moving the `GoPiGo3`_ robot backward, the ``dist`` parameter has to be *negative*.
"""
# Have we found any obstacles in the path
ObstaclesFound = False
# the number of degrees each wheel needs to turn
WheelTurnDegrees = ((dist_requested / self.Easy_GPG.WHEEL_CIRCUMFERENCE) * 360)
# get the starting position of each motor
CurrentPositionLeft = self.Easy_GPG.get_motor_encoder(self.Easy_GPG.MOTOR_LEFT)
CurrentPositionRight = self.Easy_GPG.get_motor_encoder(self.Easy_GPG.MOTOR_RIGHT)
# determine the end position of each motor
EndPositionLeft = CurrentPositionLeft + WheelTurnDegrees
EndPositionRight = CurrentPositionRight + WheelTurnDegrees
self.SetCarStatusLED(self.RED)
self.Easy_GPG.set_motor_position(self.Easy_GPG.MOTOR_LEFT, EndPositionLeft)
self.Easy_GPG.set_motor_position(self.Easy_GPG.MOTOR_RIGHT, EndPositionRight)
while self.Easy_GPG.target_reached(EndPositionLeft, EndPositionRight) is False:
# read the distance of the laser sensor
dist_read = self.ReadDistanceMM()
# stop car if there is an object within the limit
if ((dist_read is not None) and (int(dist_read) <= int(dist_limit)) and (int(dist_requested) > int(dist_limit))):
print("RobotDerbyCar.drive(): Obstacle Found. Stopping Car before requested distance. Object distance: " + str(dist_read))
ObstaclesFound = True
CurrentPositionLeft = self.Easy_GPG.get_motor_encoder(self.Easy_GPG.MOTOR_LEFT)
CurrentPositionRight = self.Easy_GPG.get_motor_encoder(self.Easy_GPG.MOTOR_RIGHT)
self.Easy_GPG.set_motor_position(self.Easy_GPG.MOTOR_LEFT, CurrentPositionLeft)
self.Easy_GPG.set_motor_position(self.Easy_GPG.MOTOR_RIGHT, CurrentPositionRight)
break
time.sleep(0.05)
self.SetCarStatusLED(self.GREEN)
return ObstaclesFound
| apache-2.0 |
bbockelm/condor-network-accounting | src/condor_procd/proc_family.cpp | 23802 | /***************************************************************
*
* Copyright (C) 1990-2007, Condor Team, Computer Sciences Department,
* University of Wisconsin-Madison, WI.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************/
#include "condor_common.h"
#include "proc_family.h"
#include "proc_family_monitor.h"
#include "procd_common.h"
#if !defined(WIN32)
#include "glexec_kill.unix.h"
#endif
#if defined(HAVE_EXT_LIBCGROUP)
#include "libcgroup.h"
#define FROZEN "FROZEN"
#define THAWED "THAWED"
#define BLOCK_STATS_LINE_MAX 64
#include <unistd.h>
long ProcFamily::clock_tick = sysconf( _SC_CLK_TCK );
#endif
ProcFamily::ProcFamily(ProcFamilyMonitor* monitor,
pid_t root_pid,
birthday_t root_birthday,
pid_t watcher_pid,
int max_snapshot_interval) :
m_monitor(monitor),
m_root_pid(root_pid),
m_root_birthday(root_birthday),
m_watcher_pid(watcher_pid),
m_max_snapshot_interval(max_snapshot_interval),
m_exited_user_cpu_time(0),
m_exited_sys_cpu_time(0),
m_max_image_size(0),
m_member_list(NULL)
#if defined(HAVE_EXT_LIBCGROUP)
, m_cgroup_string(""),
m_cm(CgroupManager::getInstance())
#endif
{
#if !defined(WIN32)
m_proxy = NULL;
#endif
}
ProcFamily::~ProcFamily()
{
// delete our member list
//
ProcFamilyMember* member = m_member_list;
while (member != NULL) {
ProcFamilyMember* next_member = member->m_next;
//dprintf(D_ALWAYS,
// "PROCINFO DEALLOCATION: %p for pid %u\n",
// member->m_proc_info,
// member->m_proc_info->pid);
delete member->m_proc_info;
delete member;
member = next_member;
}
#if !defined(WIN32)
// delete the proxy if we've been given one
//
if (m_proxy != NULL) {
free(m_proxy);
}
#endif
}
#if defined(HAVE_EXT_LIBCGROUP)
int
ProcFamily::migrate_to_cgroup(pid_t pid)
{
// Attempt to migrate a given process to a cgroup.
// This can be done without regards to whether the
// process is already in the cgroup
if (!m_cgroup.isValid()) {
return 1;
}
// We want to make sure task migration is turned on for the
// associated memory controller. So, we get to look up the original cgroup.
//
// If there is no memory controller present, we skip all this and just attempt a migrate
int err;
u_int64_t orig_migrate;
bool changed_orig = false;
char * orig_cgroup_string = NULL;
struct cgroup * orig_cgroup;
struct cgroup_controller * memory_controller;
if (m_cm.isMounted(CgroupManager::MEMORY_CONTROLLER) && (err = cgroup_get_current_controller_path(pid, MEMORY_CONTROLLER_STR, &orig_cgroup_string))) {
dprintf(D_PROCFAMILY,
"Unable to determine current memory cgroup for PID %u (ProcFamily %u): %u %s\n",
pid, m_root_pid, err, cgroup_strerror(err));
return 1;
}
// We will migrate the PID to the new cgroup even if it is in the proper memory controller cgroup
// It is possible for the task to be in multiple cgroups.
if (m_cm.isMounted(CgroupManager::MEMORY_CONTROLLER) && (orig_cgroup_string != NULL) && (strcmp(m_cgroup_string.c_str(), orig_cgroup_string))) {
// Yes, there are race conditions here - can't really avoid this.
// Throughout this block, we can assume memory controller exists.
// Get original value of migrate.
orig_cgroup = cgroup_new_cgroup(orig_cgroup_string);
ASSERT (orig_cgroup != NULL);
if ((err = cgroup_get_cgroup(orig_cgroup))) {
dprintf(D_PROCFAMILY,
"Unable to read original cgroup %s (ProcFamily %u): %u %s\n",
orig_cgroup_string, m_root_pid, err, cgroup_strerror(err));
goto after_migrate;
}
if ((memory_controller = cgroup_get_controller(orig_cgroup, MEMORY_CONTROLLER_STR)) == NULL) {
cgroup_free(&orig_cgroup);
goto after_migrate;
}
if ((err = cgroup_get_value_uint64(memory_controller, "memory.move_charge_at_immigrate", &orig_migrate))) {
if (err == ECGROUPVALUENOTEXIST) {
// Older kernels don't have the ability to migrate memory accounting to the new cgroup.
dprintf(D_PROCFAMILY,
"This kernel does not support memory usage migration; cgroup %s memory statistics"
" will be slightly incorrect (ProcFamily %u)\n",
m_cgroup_string.c_str(), m_root_pid);
} else {
dprintf(D_PROCFAMILY,
"Unable to read cgroup %s memory controller settings for "
"migration (ProcFamily %u): %u %s\n",
orig_cgroup_string, m_root_pid, err, cgroup_strerror(err));
}
cgroup_free(&orig_cgroup);
goto after_migrate;
}
if (orig_migrate != 3) {
orig_cgroup = cgroup_new_cgroup(orig_cgroup_string);
memory_controller = cgroup_add_controller(orig_cgroup, MEMORY_CONTROLLER_STR);
ASSERT (memory_controller != NULL); // Memory controller must already exist
cgroup_add_value_uint64(memory_controller, "memory.move_charge_at_immigrate", 3);
if ((err = cgroup_modify_cgroup(orig_cgroup))) {
// Not allowed to change settings
dprintf(D_ALWAYS,
"Unable to change cgroup %s memory controller settings for migration. "
"Some memory accounting will be inaccurate (ProcFamily %u): %u %s\n",
orig_cgroup_string, m_root_pid, err, cgroup_strerror(err));
} else {
changed_orig = true;
}
}
cgroup_free(&orig_cgroup);
}
after_migrate:
orig_cgroup = NULL;
err = cgroup_attach_task_pid(& const_cast<struct cgroup &>(m_cgroup.getCgroup()), pid);
if (err) {
dprintf(D_PROCFAMILY,
"Cannot attach pid %u to cgroup %s for ProcFamily %u: %u %s\n",
pid, m_cgroup_string.c_str(), m_root_pid, err, cgroup_strerror(err));
}
if (changed_orig) {
if ((orig_cgroup = cgroup_new_cgroup(orig_cgroup_string))) {
goto after_restore;
}
if (((memory_controller = cgroup_add_controller(orig_cgroup, MEMORY_CONTROLLER_STR)) != NULL) &&
(!cgroup_add_value_uint64(memory_controller, "memory.move_charge_at_immigrate", orig_migrate))) {
cgroup_modify_cgroup(orig_cgroup);
}
cgroup_free(&orig_cgroup);
}
after_restore:
if (orig_cgroup_string != NULL) {
free(orig_cgroup_string);
}
return err;
}
int
ProcFamily::set_cgroup(const std::string &cgroup_string)
{
if (cgroup_string == "/") {
dprintf(D_ALWAYS,
"Cowardly refusing to monitor the root cgroup out "
"of security concerns.\n");
return 1;
}
// Ignore this command if we've done this before.
if (m_cgroup.isValid()) {
if (cgroup_string == m_cgroup.getCgroupString()) {
return 0;
} else {
m_cgroup.destroy();
}
}
dprintf(D_PROCFAMILY, "Setting cgroup to %s for ProcFamily %u.\n",
cgroup_string.c_str(), m_root_pid);
m_cm.create(cgroup_string, m_cgroup, CgroupManager::ALL_CONTROLLERS, CgroupManager::NO_CONTROLLERS);
m_cgroup_string = m_cgroup.getCgroupString();
if (!m_cgroup.isValid()) {
return 1;
}
// Now that we have a cgroup, let's move all the existing processes to it
ProcFamilyMember* member = m_member_list;
while (member != NULL) {
migrate_to_cgroup(member->get_proc_info()->pid);
member = member->m_next;
}
return 0;
}
int
ProcFamily::freezer_cgroup(const char * state)
{
// According to kernel docs, freezer will either succeed
// or return EBUSY in the errno.
//
// This function either returns 0 (success), a positive value (fatal error)
// or -EBUSY.
int err = 0;
struct cgroup_controller* freezer;
struct cgroup *cgroup = cgroup_new_cgroup(m_cgroup_string.c_str());
ASSERT (cgroup != NULL);
if (!m_cm.isMounted(CgroupManager::FREEZE_CONTROLLER)) {
err = 1;
goto ret;
}
freezer = cgroup_add_controller(cgroup, FREEZE_CONTROLLER_STR);
if (NULL == freezer) {
dprintf(D_ALWAYS,
"Unable to access the freezer subsystem for ProcFamily %u "
"for cgroup %s\n",
m_root_pid, m_cgroup_string.c_str());
err = 2;
goto ret;
}
if ((err = cgroup_add_value_string(freezer, "freezer.state", state))) {
dprintf(D_ALWAYS,
"Unable to write %s to freezer for cgroup %s (ProcFamily %u). %u %s\n",
state, m_cgroup_string.c_str(), m_root_pid, err, cgroup_strerror(err));
err = 3;
goto ret;
}
if ((err = cgroup_modify_cgroup(cgroup))) {
if (ECGROUPVALUENOTEXIST == err) {
dprintf(D_ALWAYS,
"Does not appear condor_procd is allowed to freeze"
" cgroup %s (ProcFamily %u).\n",
m_cgroup_string.c_str(), m_root_pid);
} else if ((ECGOTHER == err) && (EBUSY == cgroup_get_last_errno())) {
dprintf(D_ALWAYS, "Kernel was unable to freeze cgroup %s "
"(ProcFamily %u) due to process state; signal delivery "
"won't be atomic\n", m_cgroup_string.c_str(), m_root_pid);
err = -EBUSY;
} else {
dprintf(D_ALWAYS,
"Unable to commit freezer change %s for cgroup %s (ProcFamily %u). %u %s\n",
state, m_cgroup_string.c_str(), m_root_pid, err, cgroup_strerror(err));
}
err = 4;
goto ret;
}
ret:
cgroup_free(&cgroup);
return err;
}
int
ProcFamily::spree_cgroup(int sig)
{
// The general idea here is we freeze the cgroup, give the signal,
// then thaw everything out. This way, signals are given in an atomic manner.
//
// Note that if the FREEZE call could be attempted, but not 100% completed, we
// proceed anyway.
int err = freezer_cgroup(FROZEN);
if ((err != 0) && (err != -EBUSY)) {
return err;
}
ASSERT (m_cgroup.isValid());
cgroup_get_cgroup(&const_cast<struct cgroup&>(m_cgroup.getCgroup()));
void **handle = (void **)malloc(sizeof(void*));
ASSERT (handle != NULL);
pid_t pid;
err = cgroup_get_task_begin(m_cgroup_string.c_str(), FREEZE_CONTROLLER_STR, handle, &pid);
if ((err > 0) && (err != ECGEOF))
handle = NULL;
while (err != ECGEOF) {
if (err > 0) {
dprintf(D_ALWAYS,
"Unable to iterate through cgroup %s (ProcFamily %u): %u %s\n",
m_cgroup_string.c_str(), m_root_pid, err, cgroup_strerror(err));
goto release;
}
send_signal(pid, sig);
err = cgroup_get_task_next(handle, &pid);
}
err = 0;
release:
if (handle != NULL) {
cgroup_get_task_end(handle);
free(handle);
}
freezer_cgroup(THAWED);
return err;
}
int
ProcFamily::count_tasks_cgroup()
{
if (!m_cm.isMounted(CgroupManager::CPUACCT_CONTROLLER) || !m_cgroup.isValid()) {
return -1;
}
int tasks = 0, err = 0;
pid_t pid;
void **handle = (void **)malloc(sizeof(void*));
ASSERT (handle != NULL)
*handle = NULL;
err = cgroup_get_task_begin(m_cgroup_string.c_str(), CPUACCT_CONTROLLER_STR, handle, &pid);
while (err != ECGEOF) {
if (err > 0) {
dprintf(D_PROCFAMILY,
"Unable to read cgroup %s memory stats (ProcFamily %u): %u %s.\n",
m_cgroup_string.c_str(), m_root_pid, err, cgroup_strerror(err));
break;
}
tasks ++;
err = cgroup_get_task_next(handle, &pid);
}
// Reset err to 0
if (err == ECGEOF) {
err = 0;
}
if (*handle) {
cgroup_get_task_end(handle);
}
if (handle) {
free(handle);
}
if (err) {
return -err;
}
return tasks;
}
bool
_check_stat_uint64(const struct cgroup_stat &stats, const char* name, u_int64_t* result){
u_int64_t tmp;
if (0 == strcmp(name, stats.name)) {
errno = 0;
tmp = (u_int64_t)strtoll(stats.value, NULL, 0);
if (errno == 0) {
*result = tmp;
return true;
} else {
dprintf(D_PROCFAMILY,
"Invalid cgroup stat %s for %s.\n",
stats.value, name);
return false;
}
}
return false;
}
void
ProcFamily::update_max_image_size_cgroup()
{
if (!m_cm.isMounted(CgroupManager::MEMORY_CONTROLLER) || !m_cgroup.isValid()) {
return;
}
int err;
u_int64_t max_image;
struct cgroup_controller *memct;
Cgroup memcg;
if (m_cm.create(m_cgroup_string, memcg, CgroupManager::MEMORY_CONTROLLER, CgroupManager::MEMORY_CONTROLLER) ||
!memcg.isValid()) {
dprintf(D_PROCFAMILY,
"Unable to create cgroup %s (ProcFamily %u).\n",
m_cgroup_string.c_str(), m_root_pid);
return;
}
if ((memct = cgroup_get_controller(&const_cast<struct cgroup &>(memcg.getCgroup()), MEMORY_CONTROLLER_STR)) == NULL) {
dprintf(D_PROCFAMILY,
"Unable to load memory controller for cgroup %s (ProcFamily %u).\n",
m_cgroup_string.c_str(), m_root_pid);
return;
}
if ((err = cgroup_get_value_uint64(memct, "memory.memsw.max_usage_in_bytes", &max_image))) {
dprintf(D_PROCFAMILY,
"Unable to load max memory usage for cgroup %s (ProcFamily %u): %u %s\n",
m_cgroup_string.c_str(), m_root_pid, err, cgroup_strerror(err));
return;
}
m_max_image_size = max_image/1024;
}
int
ProcFamily::aggregate_usage_cgroup_blockio(ProcFamilyUsage* usage)
{
if (!m_cm.isMounted(CgroupManager::BLOCK_CONTROLLER) || !m_cgroup.isValid())
return 1;
int ret;
void *handle;
char line_contents[BLOCK_STATS_LINE_MAX], sep[]=" ", *tok_handle, *word, *info[3];
char blkio_stats_name[] = "blkio.io_service_bytes";
short ctr;
long int read_bytes=0, write_bytes=0;
ret = cgroup_read_value_begin(BLOCK_CONTROLLER_STR, m_cgroup_string.c_str(),
blkio_stats_name, &handle, line_contents, BLOCK_STATS_LINE_MAX);
while (ret == 0) {
ctr = 0;
word = strtok_r(line_contents, sep, &tok_handle);
while (word && ctr < 3) {
info[ctr++] = word;
word = strtok_r(NULL, sep, &tok_handle);
}
if (ctr == 3) {
errno = 0;
long ctrval = strtol(info[2], NULL, 10);
if (errno) {
dprintf(D_FULLDEBUG, "Error parsing kernel value to a long: %s; %s\n",
info[2], strerror(errno));
break;
}
if (strcmp(info[1], "Read") == 0) {
read_bytes += ctrval;
} else if (strcmp(info[1], "Write") == 0) {
write_bytes += ctrval;
}
}
ret = cgroup_read_value_next(&handle, line_contents, BLOCK_STATS_LINE_MAX);
}
if (handle != NULL) {
cgroup_read_value_end(&handle);
}
if (ret != ECGEOF) {
dprintf(D_ALWAYS, "Internal cgroup error when retrieving block statistics: %s\n", cgroup_strerror(ret));
return 1;
}
usage->block_read_bytes = read_bytes;
usage->block_write_bytes = write_bytes;
return 0;
}
int
ProcFamily::aggregate_usage_cgroup(ProcFamilyUsage* usage)
{
if (!m_cm.isMounted(CgroupManager::MEMORY_CONTROLLER) || !m_cm.isMounted(CgroupManager::CPUACCT_CONTROLLER)
|| !m_cgroup.isValid()) {
return -1;
}
int err;
struct cgroup_stat stats;
void **handle;
u_int64_t tmp = 0, image = 0;
bool found_rss = false;
// Update memory
handle = (void **)malloc(sizeof(void*));
ASSERT (handle != NULL);
*handle = NULL;
err = cgroup_read_stats_begin(MEMORY_CONTROLLER_STR, m_cgroup_string.c_str(), handle, &stats);
while (err != ECGEOF) {
if (err > 0) {
dprintf(D_PROCFAMILY,
"Unable to read cgroup %s memory stats (ProcFamily %u): %u %s.\n",
m_cgroup_string.c_str(), m_root_pid, err, cgroup_strerror(err));
break;
}
if (_check_stat_uint64(stats, "total_rss", &tmp)) {
image += tmp;
usage->total_resident_set_size = tmp/1024;
found_rss = true;
} else if (_check_stat_uint64(stats, "total_mapped_file", &tmp)) {
image += tmp;
} else if (_check_stat_uint64(stats, "total_swap", &tmp)) {
image += tmp;
}
err = cgroup_read_stats_next(handle, &stats);
}
if (*handle != NULL) {
cgroup_read_stats_end(handle);
}
if (found_rss) {
usage->total_image_size = image/1024;
} else {
dprintf(D_PROCFAMILY,
"Unable to find all necesary memory structures for cgroup %s"
" (ProcFamily %u)\n",
m_cgroup_string.c_str(), m_root_pid);
}
// The poor man's way of updating the max image size.
if (image > m_max_image_size) {
m_max_image_size = image/1024;
}
// Try updating the max size using cgroups
update_max_image_size_cgroup();
// Update CPU
*handle = NULL;
err = cgroup_read_stats_begin(CPUACCT_CONTROLLER_STR, m_cgroup_string.c_str(), handle, &stats);
while (err != ECGEOF) {
if (err > 0) {
dprintf(D_PROCFAMILY,
"Unable to read cgroup %s cpuacct stats (ProcFamily %u): %s.\n",
m_cgroup_string.c_str(), m_root_pid, cgroup_strerror(err));
break;
}
if (_check_stat_uint64(stats, "user", &tmp)) {
usage->user_cpu_time = tmp/clock_tick;
} else if (_check_stat_uint64(stats, "system", &tmp)) {
usage->sys_cpu_time = tmp/clock_tick;
}
err = cgroup_read_stats_next(handle, &stats);
}
if (*handle != NULL) {
cgroup_read_stats_end(handle);
}
free(handle);
aggregate_usage_cgroup_blockio(usage);
// Finally, update the list of tasks
if ((err = count_tasks_cgroup()) < 0) {
return -err;
} else {
usage->num_procs = err;
}
return 0;
}
#endif
unsigned long
ProcFamily::update_max_image_size(unsigned long children_imgsize)
{
// add image sizes from our processes to the total image size from
// our child families
//
unsigned long imgsize = children_imgsize;
ProcFamilyMember* member = m_member_list;
while (member != NULL) {
#if defined(WIN32)
// comment copied from the older process tracking logic
// (before the ProcD):
// On Win32, the imgsize from ProcInfo returns exactly
// what it says.... this means we get all the bytes mapped
// into the process image, incl all the DLLs. This means
// a little program returns at least 15+ megs. The ProcInfo
// rssize is much closer to what the TaskManager reports,
// which makes more sense for now.
imgsize += member->m_proc_info->rssize;
#else
imgsize += member->m_proc_info->imgsize;
#endif
member = member->m_next;
}
// update m_max_image_size if we have a new max
//
if (imgsize > m_max_image_size) {
m_max_image_size = imgsize;
}
// finally, return our _current_ total image size
//
return imgsize;
}
void
ProcFamily::aggregate_usage(ProcFamilyUsage* usage)
{
ASSERT(usage != NULL);
// factor in usage from processes that are still alive
//
ProcFamilyMember* member = m_member_list;
while (member != NULL) {
// cpu
//
usage->user_cpu_time += member->m_proc_info->user_time;
usage->sys_cpu_time += member->m_proc_info->sys_time;
usage->percent_cpu += member->m_proc_info->cpuusage;
// current total image size
//
usage->total_image_size += member->m_proc_info->imgsize;
usage->total_resident_set_size += member->m_proc_info->rssize;
#if HAVE_PSS
// PSS is special: it's expensive to calculate for every process,
// so we calculate it on demand
int status; // Is ignored
int rc = ProcAPI::getPSSInfo(member->m_proc_info->pid, *(member->m_proc_info), status);
if( (rc == PROCAPI_SUCCESS) && (member->m_proc_info->pssize_available) ) {
usage->total_proportional_set_size_available = true;
usage->total_proportional_set_size += member->m_proc_info->pssize;
}
#endif
// number of alive processes
//
usage->num_procs++;
member = member->m_next;
}
// factor in CPU usage from processes that have exited
//
usage->user_cpu_time += m_exited_user_cpu_time;
usage->sys_cpu_time += m_exited_sys_cpu_time;
#if defined(HAVE_EXT_LIBCGROUP)
aggregate_usage_cgroup(usage);
#endif
}
void
ProcFamily::signal_root(int sig)
{
#if !defined(WIN32)
if (m_proxy != NULL) {
glexec_kill(m_proxy, m_root_pid, sig);
return;
}
#endif
send_signal(m_root_pid, sig);
}
void
ProcFamily::spree(int sig)
{
#if defined(HAVE_EXT_LIBCGROUP)
if ((m_cgroup.isValid()) && (0 == spree_cgroup(sig))) {
return;
}
#endif
ProcFamilyMember* member;
for (member = m_member_list; member != NULL; member = member->m_next) {
#if !defined(WIN32)
if (m_proxy != NULL) {
glexec_kill(m_proxy,
member->m_proc_info->pid,
sig);
continue;
}
#endif
send_signal(member->m_proc_info->pid, sig);
}
}
void
ProcFamily::add_member(procInfo* pi)
{
ProcFamilyMember* member = new ProcFamilyMember;
member->m_family = this;
member->m_proc_info = pi;
member->m_still_alive = false;
// add it to our list
//
member->m_prev = NULL;
member->m_next = m_member_list;
if (m_member_list != NULL) {
m_member_list->m_prev = member;
}
m_member_list = member;
#if defined(HAVE_EXT_LIBCGROUP)
// Add to the associated cgroup
migrate_to_cgroup(pi->pid);
#endif
// keep our monitor's hash table up to date!
m_monitor->add_member(member);
}
void
ProcFamily::remove_exited_processes()
{
// our monitor will have marked the m_still_alive field of all
// processes that are still alive on the system, so all remaining
// processes have exited and need to be removed
//
ProcFamilyMember* member = m_member_list;
ProcFamilyMember* prev = NULL;
ProcFamilyMember** prev_ptr = &m_member_list;
while (member != NULL) {
ProcFamilyMember* next_member = member->m_next;
if (!member->m_still_alive) {
// HACK for logging: if our root pid is 0, we
// know this to mean that we are actually the
// family that holds all processes that aren't
// in the monitored family; this hack should go
// away when we pull out a separate ProcGroup
// class
//
if (m_root_pid != 0) {
dprintf(D_ALWAYS,
"process %u (of family %u) has exited\n",
member->m_proc_info->pid,
m_root_pid);
}
else {
dprintf(D_ALWAYS,
"process %u (not in monitored family) has exited\n",
member->m_proc_info->pid);
}
// save CPU usage from this process
//
m_exited_user_cpu_time +=
member->m_proc_info->user_time;
m_exited_sys_cpu_time +=
member->m_proc_info->sys_time;
// keep our monitor's hash table up to date!
//
m_monitor->remove_member(member);
// remove this member from our list and free up our data
// structures
//
*prev_ptr = next_member;
if (next_member != NULL) {
next_member->m_prev = prev;
}
//dprintf(D_ALWAYS,
// "PROCINFO DEALLOCATION: %p for pid %u\n",
// member->m_proc_info,
// member->m_proc_info->pid);
delete member->m_proc_info;
delete member;
}
else {
// clear still_alive bit for next time around
//
member->m_still_alive = false;
// update our "previous" list data to point to
// the current member
//
prev = member;
prev_ptr = &member->m_next;
}
// advance to the next member in the list
//
member = next_member;
}
}
void
ProcFamily::fold_into_parent(ProcFamily* parent)
{
// fold in CPU usage info from our dead processes
//
parent->m_exited_user_cpu_time += m_exited_user_cpu_time;
parent->m_exited_sys_cpu_time += m_exited_sys_cpu_time;
// nothing left to do if our member list is empty
//
if (m_member_list == NULL) {
return;
}
// traverse our list, pointing all members at their
// new family
//
ProcFamilyMember* member = m_member_list;
while (member->m_next != NULL) {
member->m_family = parent;
member = member->m_next;
}
member->m_family = parent;
// attach the end of our list to the beginning of
// the parent's
//
member->m_next = parent->m_member_list;
if (member->m_next != NULL) {
member->m_next->m_prev = member;
}
// make our beginning the beginning of the parent's
// list, and then make our list empty
//
parent->m_member_list = m_member_list;
m_member_list = NULL;
}
#if !defined(WIN32)
void
ProcFamily::set_proxy(char* proxy)
{
if (m_proxy != NULL) {
free(m_proxy);
}
m_proxy = strdup(proxy);
ASSERT(m_proxy != NULL);
}
#endif
void
ProcFamily::dump(ProcFamilyDump& fam)
{
ProcFamilyMember* member = m_member_list;
while (member != NULL) {
ProcFamilyProcessDump proc;
proc.pid = member->m_proc_info->pid;
proc.ppid = member->m_proc_info->ppid;
proc.birthday = member->m_proc_info->birthday;
proc.user_time = member->m_proc_info->user_time;
proc.sys_time = member->m_proc_info->sys_time;
fam.procs.push_back(proc);
member = member->m_next;
}
}
| apache-2.0 |
tainanod/TainanWelfare-android | src/src/tw/gov/tainan/tainanwelfare/DialogFragmentLandmarkDetail.java | 5574 | package tw.gov.tainan.tainanwelfare;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Set;
import tw.gov.tainan.tainanwelfare.R;
import tw.gov.tainan.tainanwelfare.comm.Util;
import tw.gov.tainan.tainanwelfare.dbentity.LandmarkDBEntity;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.DialogFragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageView;
import android.widget.TextView;
//¥»Class¬°¦a¼Ð¸Ô²Ó¹ï¸Ü®Ø¡C
@SuppressLint("ValidFragment")
public class DialogFragmentLandmarkDetail extends DialogFragment{
private Activity activity;
private Context context;
private LandmarkDBEntity data;
private TextView TVName;
private TextView TVScore;
private TextView TVAddress;
private TextView TVPhone;
private TextView TVInfo;
private ImageView IVLoaction;
private ImageView IVNavi;
private ImageView IVhPhone;
private ImageView IVClose;
private boolean disableIVLoaction;
public DialogFragmentLandmarkDetail(LandmarkDBEntity data, Activity activity, Context context, boolean disableIVLoaction){
this.data = data;
this.activity = activity;
this.context = context;
this.disableIVLoaction = disableIVLoaction;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// TODO Auto-generated method stub
getDialog().getWindow().requestFeature(Window.FEATURE_NO_TITLE);
getDialog().getWindow().setFlags(
WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
View rootView = inflater.inflate(R.layout.dialogfragment_landmarkdetail, container);
TVName = (TextView) rootView.findViewById(R.id.TVName);
TVScore = (TextView) rootView.findViewById(R.id.TVScore);
TVAddress = (TextView) rootView.findViewById(R.id.TVAddress);
TVPhone = (TextView) rootView.findViewById(R.id.TVPhone);
TVInfo = (TextView) rootView.findViewById(R.id.TVInfo);
IVLoaction = (ImageView) rootView.findViewById(R.id.IVLoaction);
IVNavi = (ImageView) rootView.findViewById(R.id.IVNavi);
IVhPhone = (ImageView) rootView.findViewById(R.id.IVhPhone);
IVClose = (ImageView) rootView.findViewById(R.id.IVClose);
TVName.setText(data.getName());
if(data.getScore().length() > 0){
TVScore.setText("µûµ¥¡G" + data.getScore());
}
else{
TVScore.setVisibility(View.GONE);
}
TVAddress.setText("¦a§}¡G" + data.getAddress());
if(!data.getPhone().equalsIgnoreCase("null")){
TVPhone.setText("¹q¸Ü:" + data.getPhone());
//¼·³q¹q¸Ü«ö¶s
IVhPhone.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
getDialog().dismiss();
Intent intent = new Intent(Intent.ACTION_CALL, Uri.parse("tel:" + data.getPhone()));
startActivity(intent);
}
});
}
else{
TVPhone.setVisibility(View.GONE);
//¼·³q¹q¸Ü«ö¶s
IVhPhone.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
AlertDialog.Builder builder = new AlertDialog.Builder(activity);
builder.setTitle("¨t²Î´£¿ô");
builder.setMessage("µLªk¼·¥´¹q¸Ü¡A¸Ó³æ¦ì©|¥¼´£¨Ñ¹q¸Ü¸ê°T");
builder.setNeutralButton("½T»{", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
dialog.dismiss();
}
});
builder.show();
}
});
}
StringBuilder string1 = new StringBuilder();
if(data.getInfo() != null){
for(String key : data.getInfo().keySet()){
string1.append(key + ": " + data.getInfo().get(key) + "\n");
}
TVInfo.setText(string1);
}
//Ãö³¬«ö¶s
IVClose.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
getDialog().dismiss();
}
});
//¾É¯è«ö¶s
IVNavi.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
getDialog().dismiss();
String label = data.getName();
String uriBegin = "geo:" + Util.currentLatitude + "," + Util.currentLongitude;
String query = data.getLatitude() + "," + data.getLongitude() + "(" + label + ")";
String encodedQuery = Uri.encode(query);
String uriString = uriBegin + "?q=" + encodedQuery + "&z=16";
Uri uri = Uri.parse(uriString);
Intent mapIntent = new Intent(android.content.Intent.ACTION_VIEW, uri);
startActivity(mapIntent);
}
});
//©w¦ì«ö¶s
if(disableIVLoaction){
IVLoaction.setBackgroundResource(R.drawable.detailedicon_01disable);
IVLoaction.setOnClickListener(null);
}
else{
IVLoaction.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
getDialog().dismiss();
Intent intent = new Intent(activity, FragemtActivityLandmarkMap.class);
Bundle bundle = new Bundle();
bundle.putSerializable("data", data);
intent.putExtras(bundle);
startActivity(intent);
}
});
}
return rootView;
}
}
| apache-2.0 |
GoogleChromeLabs/chromeos_smart_card_connector | example_cpp_smart_card_client_app/src/chrome_certificate_provider/api_bridge.cc | 9613 | // Copyright 2016 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "chrome_certificate_provider/api_bridge.h"
#include <thread>
#include <utility>
#include <google_smart_card_common/global_context.h>
#include <google_smart_card_common/logging/function_call_tracer.h>
#include <google_smart_card_common/logging/logging.h>
#include <google_smart_card_common/requesting/remote_call_arguments_conversion.h>
#include <google_smart_card_common/requesting/remote_call_message.h>
#include <google_smart_card_common/requesting/request_result.h>
#include <google_smart_card_common/unique_ptr_utils.h>
#include <google_smart_card_common/value.h>
#include <google_smart_card_common/value_conversion.h>
namespace scc = smart_card_client;
namespace gsc = google_smart_card;
namespace smart_card_client {
namespace chrome_certificate_provider {
namespace {
// These constants must match the ones in bridge-backend.js.
constexpr char kRequesterName[] =
"certificate_provider_request_from_executable";
constexpr char kRequestReceiverName[] =
"certificate_provider_request_to_executable";
constexpr char kHandleCertificatesRequestFunctionName[] =
"HandleCertificatesRequest";
constexpr char kHandleSignatureRequestFunctionName[] = "HandleSignatureRequest";
constexpr char kFunctionCallLoggingPrefix[] = "chrome.certificateProvider.";
void ProcessCertificatesRequest(
std::weak_ptr<CertificatesRequestHandler> certificates_request_handler,
std::shared_ptr<std::mutex> request_handling_mutex,
gsc::RequestReceiver::ResultCallback result_callback) {
std::unique_lock<std::mutex> lock;
if (request_handling_mutex)
lock = std::unique_lock<std::mutex>(*request_handling_mutex);
GOOGLE_SMART_CARD_LOG_DEBUG << "Processing certificates request...";
std::vector<ClientCertificateInfo> certificates;
const std::shared_ptr<CertificatesRequestHandler>
locked_certificates_request_handler = certificates_request_handler.lock();
GOOGLE_SMART_CARD_CHECK(locked_certificates_request_handler);
if (locked_certificates_request_handler->HandleRequest(&certificates)) {
gsc::Value response(gsc::Value::Type::kArray);
response.GetArray().push_back(gsc::MakeUnique<gsc::Value>(
gsc::ConvertToValueOrDie(std::move(certificates))));
result_callback(
gsc::GenericRequestResult::CreateSuccessful(std::move(response)));
} else {
result_callback(gsc::GenericRequestResult::CreateFailed("Failure"));
}
}
void ProcessSignatureRequest(
std::weak_ptr<SignatureRequestHandler> signature_request_handler,
const SignatureRequest& signature_request,
std::shared_ptr<std::mutex> request_handling_mutex,
gsc::RequestReceiver::ResultCallback result_callback) {
std::unique_lock<std::mutex> lock;
if (request_handling_mutex)
lock = std::unique_lock<std::mutex>(*request_handling_mutex);
GOOGLE_SMART_CARD_LOG_DEBUG << "Processing sign digest request...";
std::vector<uint8_t> signature;
const std::shared_ptr<SignatureRequestHandler>
locked_signature_request_handler = signature_request_handler.lock();
GOOGLE_SMART_CARD_CHECK(locked_signature_request_handler);
if (locked_signature_request_handler->HandleRequest(signature_request,
&signature)) {
gsc::Value response(gsc::Value::Type::kArray);
response.GetArray().push_back(gsc::MakeUnique<gsc::Value>(
gsc::ConvertToValueOrDie(std::move(signature))));
result_callback(
gsc::GenericRequestResult::CreateSuccessful(std::move(response)));
} else {
result_callback(gsc::GenericRequestResult::CreateFailed("Failure"));
}
}
} // namespace
ApiBridge::ApiBridge(gsc::GlobalContext* global_context,
gsc::TypedMessageRouter* typed_message_router,
std::shared_ptr<std::mutex> request_handling_mutex)
: request_handling_mutex_(request_handling_mutex),
requester_(kRequesterName, global_context, typed_message_router),
remote_call_adaptor_(&requester_),
request_receiver_(new gsc::JsRequestReceiver(kRequestReceiverName,
this,
global_context,
typed_message_router)) {}
ApiBridge::~ApiBridge() = default;
void ApiBridge::ShutDown() {
requester_.ShutDown();
request_receiver_->ShutDown();
}
void ApiBridge::SetCertificatesRequestHandler(
std::weak_ptr<CertificatesRequestHandler> handler) {
certificates_request_handler_ = handler;
}
void ApiBridge::RemoveCertificatesRequestHandler() {
certificates_request_handler_.reset();
}
void ApiBridge::SetSignatureRequestHandler(
std::weak_ptr<SignatureRequestHandler> handler) {
signature_request_handler_ = handler;
}
void ApiBridge::RemoveSignatureRequestHandler() {
signature_request_handler_.reset();
}
void ApiBridge::SetCertificates(
const std::vector<ClientCertificateInfo>& certificates) {
SetCertificatesDetails details;
details.client_certificates = certificates;
remote_call_adaptor_.SyncCall("setCertificates", details);
}
bool ApiBridge::RequestPin(const RequestPinOptions& options, std::string* pin) {
gsc::Value options_value = gsc::ConvertToValueOrDie(options);
gsc::FunctionCallTracer tracer("requestPin", kFunctionCallLoggingPrefix,
gsc::LogSeverity::kInfo);
tracer.AddPassedArg("options", gsc::DebugDumpValueFull(options_value));
tracer.LogEntrance();
gsc::GenericRequestResult generic_request_result =
remote_call_adaptor_.SyncCall("requestPin", std::move(options_value));
if (!generic_request_result.is_successful()) {
tracer.AddReturnValue(
"false (error: " + generic_request_result.error_message() + ")");
tracer.LogExit();
return false;
}
RequestPinResults results;
gsc::RemoteCallArgumentsExtractor extractor(
"result of requestPin", std::move(generic_request_result).TakePayload());
// The Chrome API can omit the result object.
if (extractor.argument_count() > 0)
extractor.Extract(&results);
if (!extractor.Finish())
GOOGLE_SMART_CARD_LOG_FATAL << extractor.error_message();
if (!results.user_input || results.user_input->empty()) {
tracer.AddReturnValue("false (empty PIN)");
tracer.LogExit();
return false;
}
*pin = std::move(*results.user_input);
tracer.AddReturnValue("true (success)");
tracer.LogExit();
return true;
}
void ApiBridge::StopPinRequest(const StopPinRequestOptions& options) {
gsc::Value options_value = gsc::ConvertToValueOrDie(options);
gsc::FunctionCallTracer tracer("stopPinRequest", kFunctionCallLoggingPrefix,
gsc::LogSeverity::kInfo);
tracer.AddPassedArg("options", gsc::DebugDumpValueFull(options_value));
tracer.LogEntrance();
const gsc::GenericRequestResult generic_request_result =
remote_call_adaptor_.SyncCall("stopPinRequest", std::move(options_value));
if (!generic_request_result.is_successful()) {
tracer.AddReturnValue("error (" + generic_request_result.error_message() +
")");
tracer.LogExit();
return;
}
tracer.AddReturnValue("success");
tracer.LogExit();
}
void ApiBridge::HandleRequest(
gsc::Value payload,
gsc::RequestReceiver::ResultCallback result_callback) {
gsc::RemoteCallRequestPayload request =
gsc::ConvertFromValueOrDie<gsc::RemoteCallRequestPayload>(
std::move(payload));
if (request.function_name == kHandleCertificatesRequestFunctionName) {
gsc::ExtractRemoteCallArgumentsOrDie(std::move(request.function_name),
std::move(request.arguments));
HandleCertificatesRequest(result_callback);
} else if (request.function_name == kHandleSignatureRequestFunctionName) {
SignatureRequest signature_request;
gsc::ExtractRemoteCallArgumentsOrDie(std::move(request.function_name),
std::move(request.arguments),
&signature_request);
HandleSignatureRequest(std::move(signature_request), result_callback);
} else {
GOOGLE_SMART_CARD_LOG_FATAL << "Unknown chrome_certificate_provider "
<< "ApiBridge function requested: \""
<< request.function_name << "\"";
}
}
void ApiBridge::HandleCertificatesRequest(
gsc::RequestReceiver::ResultCallback result_callback) {
std::thread(&ProcessCertificatesRequest, certificates_request_handler_,
request_handling_mutex_, result_callback)
.detach();
}
void ApiBridge::HandleSignatureRequest(
SignatureRequest signature_request,
gsc::RequestReceiver::ResultCallback result_callback) {
std::thread(&ProcessSignatureRequest, signature_request_handler_,
std::move(signature_request), request_handling_mutex_,
result_callback)
.detach();
}
} // namespace chrome_certificate_provider
} // namespace smart_card_client
| apache-2.0 |
bramvdbogaerde/rocket-arm | app-container/discovery/parse.go | 1552 | package discovery
import (
"fmt"
"net/url"
"runtime"
"strings"
"github.com/coreos/rocket/app-container/schema/types"
)
const (
defaultVersion = "latest"
defaultOS = runtime.GOOS
defaultArch = runtime.GOARCH
)
type App struct {
Name types.ACName
Labels map[string]string
}
func NewApp(name string, labels map[string]string) (*App, error) {
if labels == nil {
labels = make(map[string]string, 0)
}
acn, err := types.NewACName(name)
if err != nil {
return nil, err
}
return &App{
Name: *acn,
Labels: labels,
}, nil
}
// NewAppFromString takes a command line app parameter and returns a map of labels.
//
// Example app parameters:
// example.com/reduce-worker:1.0.0
// example.com/reduce-worker,channel=alpha,label=value
func NewAppFromString(app string) (*App, error) {
var (
name string
labels map[string]string
)
app = strings.Replace(app, ":", ",version=", -1)
app = "name=" + app
v, err := url.ParseQuery(strings.Replace(app, ",", "&", -1))
if err != nil {
return nil, err
}
labels = make(map[string]string, 0)
for key, val := range v {
if len(val) > 1 {
return nil, fmt.Errorf("label %s with multiple values %q", key, val)
}
if key == "name" {
name = val[0]
continue
}
labels[key] = val[0]
}
if labels["version"] == "" {
labels["version"] = defaultVersion
}
if labels["os"] == "" {
labels["os"] = defaultOS
}
if labels["arch"] == "" {
labels["arch"] = defaultArch
}
a, err := NewApp(name, labels)
if err != nil {
return nil, err
}
return a, nil
}
| apache-2.0 |
tonyqus/npoi | examples/hssf/ExtractStringsFromXls/HSSFTestDataSamples.cs | 8103 | /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is1 distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
namespace TestCases.HSSF
{
using System;
using System.Configuration;
using System.IO;
using System.Text;
using NPOI.HSSF.UserModel;
/**
* Centralises logic for finding/Opening sample files in the src/testcases/org/apache/poi/hssf/hssf/data folder.
*
* @author Josh Micich
*/
public class HSSFTestDataSamples
{
private static String TEST_DATA_DIR_SYS_PROPERTY_NAME = "HSSF.testdata.path";
private static string _resolvedDataDir;
/** <code>true</code> if standard system propery is1 not set,
* but the data is1 available on the test runtime classpath */
private static bool _sampleDataIsAvaliableOnClassPath;
/**
* Opens a sample file from the standard HSSF test data directory
*
* @return an Open <tt>Stream</tt> for the specified sample file
*/
public static Stream OpenSampleFileStream(String sampleFileName)
{
Initialise();
if (_sampleDataIsAvaliableOnClassPath)
{
Stream result = OpenClasspathResource(sampleFileName);
if (result == null)
{
throw new Exception("specified test sample file '" + sampleFileName
+ "' not found on the classpath");
}
// System.out.println("Opening cp: " + sampleFileName);
// wrap to avoid temp warning method about auto-closing input stream
return new NonSeekableStream(result);
}
if (_resolvedDataDir == "")
{
throw new Exception("Must set system property '"
+ TEST_DATA_DIR_SYS_PROPERTY_NAME
+ "' properly before running tests");
}
if (!File.Exists(_resolvedDataDir+sampleFileName))
{
throw new Exception("Sample file '" + sampleFileName
+ "' not found in data dir '" + _resolvedDataDir + "'");
}
// System.out.println("Opening " + f.GetAbsolutePath());
try
{
return new FileStream(_resolvedDataDir+sampleFileName,FileMode.Open);
}
catch (FileNotFoundException)
{
throw;
}
}
private static void Initialise()
{
String dataDirName = ConfigurationManager.AppSettings[TEST_DATA_DIR_SYS_PROPERTY_NAME];
if(dataDirName=="")
throw new Exception("Must set system property '"
+ TEST_DATA_DIR_SYS_PROPERTY_NAME
+ "' before running tests");
if (!Directory.Exists(dataDirName))
{
throw new IOException("Data dir '" + dataDirName
+ "' specified by system property '"
+ TEST_DATA_DIR_SYS_PROPERTY_NAME + "' does not exist");
}
_sampleDataIsAvaliableOnClassPath = true;
_resolvedDataDir = dataDirName;
}
/**
* Opens a test sample file from the 'data' sub-package of this class's package.
* @return <code>null</code> if the sample file is1 not deployed on the classpath.
*/
private static Stream OpenClasspathResource(String sampleFileName)
{
FileStream file = new FileStream(ConfigurationManager.AppSettings["HSSF.testdata.path"] + sampleFileName, FileMode.Open);
return file;
}
private class NonSeekableStream : Stream
{
private Stream _is;
public NonSeekableStream(Stream is1)
{
_is = is1;
}
public int Read()
{
return _is.ReadByte();
}
public override int Read(byte[] b, int off, int len)
{
return _is.Read(b, off, len);
}
public bool markSupported()
{
return false;
}
public override void Close()
{
_is.Close();
}
public override bool CanRead
{
get { return _is.CanRead; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return _is.CanWrite; }
}
public override long Length
{
get { return _is.Length; }
}
public override long Position
{
get { return _is.Position; }
set { _is.Position = value; }
}
public override void Write(byte[] buffer, int offset, int count)
{
_is.Write(buffer, offset, count);
}
public override void Flush()
{
_is.Flush();
}
public override long Seek(long offset, SeekOrigin origin)
{
return _is.Seek(offset, origin);
}
public override void SetLength(long value)
{
_is.SetLength(value);
}
}
public static HSSFWorkbook OpenSampleWorkbook(String sampleFileName)
{
try
{
return new HSSFWorkbook(OpenSampleFileStream(sampleFileName));
}
catch (IOException)
{
throw;
}
}
/**
* Writes a spReadsheet to a <tt>MemoryStream</tt> and Reads it back
* from a <tt>ByteArrayStream</tt>.<p/>
* Useful for verifying that the serialisation round trip
*/
public static HSSFWorkbook WriteOutAndReadBack(HSSFWorkbook original)
{
try
{
MemoryStream baos = new MemoryStream(4096);
original.Write(baos);
return new HSSFWorkbook(baos);
}
catch (IOException)
{
throw;
}
}
/**
* @return byte array of sample file content from file found in standard hssf test data dir
*/
public static byte[] GetTestDataFileContent(String fileName)
{
MemoryStream bos = new MemoryStream();
try
{
Stream fis = HSSFTestDataSamples.OpenSampleFileStream(fileName);
byte[] buf = new byte[512];
while (true)
{
int bytesRead = fis.Read(buf,0,buf.Length);
if (bytesRead < 1)
{
break;
}
bos.Write(buf, 0, bytesRead);
}
fis.Close();
}
catch (IOException)
{
throw;
}
return bos.ToArray();
}
}
} | apache-2.0 |
pomack/closure-templates | java/src/com/google/template/soy/gosrc/internal/IsComputableAsGoExprsVisitor.java | 6024 | /*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.gosrc.internal;
import com.google.common.collect.Maps;
import com.google.inject.Inject;
import com.google.template.soy.gosrc.SoyGoSrcOptions;
import com.google.template.soy.gosrc.SoyGoSrcOptions.CodeStyle;
import com.google.template.soy.shared.internal.ApiCallScope;
import com.google.template.soy.soytree.AbstractReturningSoyNodeVisitor;
import com.google.template.soy.soytree.CallNode;
import com.google.template.soy.soytree.CallParamContentNode;
import com.google.template.soy.soytree.CallParamValueNode;
import com.google.template.soy.soytree.CssNode;
import com.google.template.soy.soytree.ForNode;
import com.google.template.soy.soytree.ForeachNode;
import com.google.template.soy.soytree.IfCondNode;
import com.google.template.soy.soytree.IfElseNode;
import com.google.template.soy.soytree.IfNode;
import com.google.template.soy.soytree.MsgHtmlTagNode;
import com.google.template.soy.soytree.PrintNode;
import com.google.template.soy.soytree.RawTextNode;
import com.google.template.soy.soytree.SoyNode;
import com.google.template.soy.soytree.SoyNode.ParentSoyNode;
import com.google.template.soy.soytree.SwitchNode;
import com.google.template.soy.soytree.TemplateNode;
import java.util.Map;
/**
* Visitor to determine whether the output string for the subtree rooted at a given node is
* computable as the concatenation of one or more Go expressions. If this is false, it means the
* generated code for computing the node's output must include one or more full Go statements.
*
* <p> Precondition: MsgNode should not exist in the tree.
*
* <p> Important: This class is in {@link ApiCallScope} because it memoizes results that are
* reusable for the same parse tree. If we change the parse tree between uses of the scoped
* instance, then the results may not be correct. (In that case, we would need to take this class
* out of {@code ApiCallScope} and rewrite the code somehow to still take advantage of the
* memoized results to the extent that they remain correct.)
*
* @author Kai Huang
*/
@ApiCallScope
class IsComputableAsGoExprsVisitor extends AbstractReturningSoyNodeVisitor<Boolean> {
/** The options for generating Go source code. */
private final SoyGoSrcOptions goSrcOptions;
/** The memoized results of past visits to nodes. */
private final Map<SoyNode, Boolean> memoizedResults;
/**
* @param goSrcOptions The options for generating Go source code.
*/
@Inject
public IsComputableAsGoExprsVisitor(SoyGoSrcOptions goSrcOptions) {
this.goSrcOptions = goSrcOptions;
memoizedResults = Maps.newHashMap();
}
@Override protected Boolean visit(SoyNode node) {
if (memoizedResults.containsKey(node)) {
return memoizedResults.get(node);
} else {
Boolean result = super.visit(node);
memoizedResults.put(node, result);
return result;
}
}
// -----------------------------------------------------------------------------------------------
// Implementations for concrete classes.
@Override protected Boolean visitTemplateNode(TemplateNode node) {
return areChildrenComputableAsGoExprs(node);
}
@Override protected Boolean visitRawTextNode(RawTextNode node) {
return true;
}
@Override protected Boolean visitMsgHtmlTagNode(MsgHtmlTagNode node) {
return areChildrenComputableAsGoExprs(node);
}
@Override protected Boolean visitPrintNode(PrintNode node) {
return true;
}
@Override protected Boolean visitCssNode(CssNode node) {
return true;
}
@Override protected Boolean visitIfNode(IfNode node) {
// If all children are computable as Go expressions, then this 'if' statement can be written
// as an expression as well, using the ternary conditional operator ("? :").
return false;
}
@Override protected Boolean visitIfCondNode(IfCondNode node) {
return false;
}
@Override protected Boolean visitIfElseNode(IfElseNode node) {
return false;
}
@Override protected Boolean visitSwitchNode(SwitchNode node) {
return false;
}
@Override protected Boolean visitForeachNode(ForeachNode node) {
return false;
}
@Override protected Boolean visitForNode(ForNode node) {
return false;
}
@Override protected Boolean visitCallNode(CallNode node) {
return goSrcOptions.getCodeStyle() == CodeStyle.CONCAT &&
areChildrenComputableAsGoExprs(node);
}
@Override protected Boolean visitCallParamValueNode(CallParamValueNode node) {
return true;
}
@Override protected Boolean visitCallParamContentNode(CallParamContentNode node) {
return areChildrenComputableAsGoExprs(node);
}
// -----------------------------------------------------------------------------------------------
// Private helpers.
/**
* Private helper to check whether all children of a given parent node satisfy
* IsComputableAsGoExprsVisitor.
* @param node The parent node whose children to check.
* @return True if all children satisfy IsComputableAsGoExprsVisitor.
*/
private boolean areChildrenComputableAsGoExprs(ParentSoyNode<?> node) {
for (SoyNode child : node.getChildren()) {
// Note: Save time by not visiting RawTextNode and PrintNode children.
if (! (child instanceof RawTextNode) && ! (child instanceof PrintNode)) {
if(! visit(child)) {
return false;
}
}
}
return true;
}
}
| apache-2.0 |
aygalinc/Context-SOCM | cream-it/cream-it-functional-extension/src/main/java/fr/liglab/adele/cream/it/functional/extension/invalid/NotExposedService.java | 147 | package fr.liglab.adele.cream.it.functional.extension.invalid;
/**
* Created by aygalinc on 21/02/17.
*/
public interface NotExposedService {
}
| apache-2.0 |
fkolacek/FIT-VUT | mess/IPK3-Petr/udt.cpp | 1177 | #include "udt.h"
UDT::UDT(){
SN = 0;
W = 0;
T = 0;
D = "";
}
UDT::UDT(unsigned int a, string b){
SN = a;
W = 0;
T = 0;
D = b;
}
void UDT::setW(int a){
W = a;
}
void UDT::setT(unsigned int a){
T = a;
}
unsigned int UDT::getSN(){
return SN;
}
unsigned int UDT::getT(){
return T;
}
string UDT::getD(){
return D;
}
string UDT::toString(int a){
stringstream s;
string b;
s << a;
s >> b;
return b;
}
string UDT::toS(){
string out = "";
out += string("<rdt-segment id=\"xmatya03\">");
out += string("<header sn=\"" + toString(SN) + "\" ack=\"" + toString(SN) + "\" win=\"" + toString(W) + "\" tack=\"" + toString(T) + "\"></header><data>" + D + "</data>");
out += string("</rdt-segment>");
return out;
}
string UDT::toA(){
string out = "";
out += string("<rdt-segment id=\"xmatya03\">");
out += string("<header sn=\"" + toString(SN) + "\" ack=\"" + toString(SN) + "\" win=\"" + toString(W) + "\" tack=\"" + toString(T) + "\"></header><data>Something</data>");
out += string("</rdt-segment>");
return out;
}
| apache-2.0 |
factoryfx/factoryfx | oracledbStorage/src/main/java/module-info.java | 172 | module io.github.factoryfx.oracledbStorage {
requires io.github.factoryfx.factory;
requires java.sql;
exports io.github.factoryfx.factory.datastorage.oracle;
} | apache-2.0 |