code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
package i18n; import jlibs.core.util.i18n.Message; import jlibs.core.util.i18n.ResourceBundle; /** * @author Santhosh Kumar T */ @ResourceBundle public interface MethodSignatureClash2Bundle{ @Message(key="EXECUTING_QUERY", value="executing {0}") public String executing(String query); }
santhosh-tekuri/jlibs
i18n-apt/src/test/resources/i18n/MethodSignatureClash2Bundle.java
Java
apache-2.0
299
# -*- coding: utf-8 -*- import copy import os from django.contrib import auth from django.contrib.auth.models import User from django.core.files.uploadedfile import SimpleUploadedFile from django.db.models import QuerySet from django.test import TestCase, Client, mock from django.urls import reverse from ..forms import AddBookForm from ..models import (TheUser, Category, Author, Language, Book, AddedBook, BookRating, BookComment, Post, SupportMessage, BookRelatedData) from .utils import Utils TEST_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA_DIR = os.path.join(TEST_DIR, 'fixtures') # ---------------------------------------------------------------------------------------------------------------------- class ModelTest(TestCase): # ------------------------------------------------------------------------------------------------------------------ @classmethod def setUpTestData(cls): cls.setup_users() cls.setup_categories() cls.setup_authors() cls.setup_languages() cls.setup_books() cls.setup_added_books() cls.setup_book_rating() cls.setup_book_comment() cls.setup_post_messages() cls.setup_support_messages() # ------------------------------------------------------------------------------------------------------------------ @classmethod def setup_users(cls): client = Client() cls.anonymous_user = auth.get_user(client) cls.user1 = User.objects.create_user('user1', 'user1@user1.com', 'testpassword1') cls.user2 = User.objects.create_user('user2', 'user2@user2.com', 'testpassword2') cls.user3 = User.objects.create_user('user3', 'user3@user3.com', 'testpassword3') cls.user4 = User.objects.create_user('user4', 'user4@user4.com', 'testpassword4') cls.user5 = User.objects.create_user('user5', 'user5@user5.com', 'testpassword5') cls.user6 = User.objects.create_user('user6', 'user6@user6.com', 'testpassword6') cls.the_user1 = TheUser.objects.get(id_user=cls.user1) cls.the_user2 = TheUser.objects.get(id_user=cls.user2) cls.the_user5 = TheUser.objects.get(id_user=cls.user5) cls.the_user6 = TheUser.objects.get(id_user=cls.user6) # ------------------------------------------------------------------------------------------------------------------ @classmethod def setup_categories(cls): cls.category1 = Category.objects.create(category_name='category1') cls.category2 = Category.objects.create(category_name='category2') # ------------------------------------------------------------------------------------------------------------------ @classmethod def setup_authors(cls): cls.author1 = Author.objects.create(author_name='Best Author 1') cls.author2 = Author.objects.create(author_name='trueAuthorNew') cls.author3 = Author.objects.create(author_name='zlast author') cls.author4 = Author.objects.create(author_name='<AuthorSpecialSymbols>&"') cls.author5 = Author.objects.create(author_name="O'Connor") # ------------------------------------------------------------------------------------------------------------------ @classmethod def setup_languages(cls): cls.language_en = Language.objects.create(language='English') cls.language_ru = Language.objects.create(language='Russian') # ------------------------------------------------------------------------------------------------------------------ @classmethod def setup_books(cls): test_book_path = os.path.join(TEST_DATA_DIR, 'test_book.pdf') test_book_image_path = os.path.join(TEST_DATA_DIR, 'test_book_image.png') books_setup = [ { 'name': 'First Book', 'author': cls.author1, 'category': cls.category1, 'language': cls.language_en, 'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()), 'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()), 'who_added': cls.the_user1, 'private': True }, { 'name': 'Second Book', 'author': cls.author2, 'category': cls.category1, 'language': cls.language_en, 'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()), 'who_added': cls.the_user2, 'blocked_book': True }, { 'name': 'Third Book', 'author': cls.author2, 'category': cls.category1, 'language': cls.language_ru, 'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()), 'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()), 'who_added': cls.the_user1, 'blocked_book': True }, { 'name': 'Fourth Book', 'author': cls.author1, 'category': cls.category1, 'language': cls.language_ru, 'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()), 'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()), 'who_added': cls.the_user2, 'blocked_book': True }, { 'name': 'Fifth Book', 'author': cls.author1, 'category': cls.category2, 'language': cls.language_ru, 'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()), 'who_added': cls.the_user1, 'private': True }, { 'name': 'Sixth Book', 'author': cls.author2, 'category': cls.category2, 'language': cls.language_en, 'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()), 'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()), 'who_added': cls.the_user2 }, { 'name': 'Seventh Book<>&"', 'author': cls.author4, 'category': cls.category2, 'language': cls.language_en, 'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()), 'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()), 'who_added': cls.the_user2 } ] for book in books_setup: Book.objects.create( book_name=book['name'], id_author=book['author'], id_category=book['category'], description='TEST description', language=book['language'], book_file=book['file'], photo=book.get('photo', False), who_added=book['who_added'], private_book=book.get('private', False), blocked_book=book.get('blocked_book', False) ) # ------------------------------------------------------------------------------------------------------------------ @classmethod def setup_added_books(cls): AddedBook.objects.create(id_user=cls.the_user1, id_book=Book.objects.get(book_name='Third Book')) AddedBook.objects.create(id_user=cls.the_user1, id_book=Book.objects.get(book_name='Sixth Book')) AddedBook.objects.create(id_user=cls.the_user1, id_book=Book.objects.get(book_name='Fourth Book')) AddedBook.objects.create(id_user=cls.the_user2, id_book=Book.objects.get(book_name='Third Book')) AddedBook.objects.create(id_user=cls.the_user2, id_book=Book.objects.get(book_name='Sixth Book')) AddedBook.objects.create(id_user=cls.the_user2, id_book=Book.objects.get(book_name='Second Book')) AddedBook.objects.create(id_user=cls.the_user5, id_book=Book.objects.get(book_name='Sixth Book')) AddedBook.objects.create(id_user=cls.the_user6, id_book=Book.objects.get(book_name='Sixth Book')) # ------------------------------------------------------------------------------------------------------------------ @classmethod def setup_book_rating(cls): BookRating.objects.create(id_book=Book.objects.get(book_name='Third Book'), id_user=cls.the_user1, rating=10) BookRating.objects.create(id_book=Book.objects.get(book_name='Third Book'), id_user=cls.the_user2, rating=5) BookRating.objects.create(id_book=Book.objects.get(book_name='Third Book'), id_user=cls.the_user5, rating=3) BookRating.objects.create(id_book=Book.objects.get(book_name='Fourth Book'), id_user=cls.the_user1, rating=7) BookRating.objects.create(id_book=Book.objects.get(book_name='Sixth Book'), id_user=cls.the_user1, rating=4) BookRating.objects.create(id_book=Book.objects.get(book_name='Second Book'), id_user=cls.the_user2, rating=7) # ------------------------------------------------------------------------------------------------------------------ @classmethod def setup_book_comment(cls): second_book = Book.objects.get(book_name='Second Book') third_book = Book.objects.get(book_name='Third Book') fourth_book = Book.objects.get(book_name='Fourth Book') BookComment.objects.create(id_book=second_book, id_user=cls.the_user1, text='Test book 2 user 1') BookComment.objects.create(id_book=second_book, id_user=cls.the_user2, text='Test book 2 user 2') BookComment.objects.create(id_book=third_book, id_user=cls.the_user1, text='Test book 3 user 1') BookComment.objects.create(id_book=fourth_book, id_user=cls.the_user1, text='Test book 4 user 1') BookComment.objects.create(id_book=fourth_book, id_user=cls.the_user5, text='Test book 4 user 5') # ------------------------------------------------------------------------------------------------------------------ @classmethod @mock.patch('app.signals.email_dispatch.apply_async', new=mock.Mock()) def setup_post_messages(cls): Post.objects.create(user=cls.the_user1, heading='post 1', text='Posted test text 1') Post.objects.create(user=cls.the_user1, heading='post 2', text='Posted test text 2') Post.objects.create(user=cls.the_user2, heading='post 3', text='Posted test text 3') # ------------------------------------------------------------------------------------------------------------------ @classmethod def setup_support_messages(cls): SupportMessage.objects.create(email='testemail1@mail.co', text='Test text1') SupportMessage.objects.create(email='testemail1@mail.co', text='Test text2') SupportMessage.objects.create(email='test_email22@mail.co', text='Test text3') SupportMessage.objects.create(email='test_email23@mail.co', text='Test text4') # ------------------------------------------------------------------------------------------------------------------ def test_the_user_str(self): self.assertEqual(str(self.the_user1), 'user1') self.assertEqual(str(self.the_user2), 'user2') # ------------------------------------------------------------------------------------------------------------------ def test_creating_the_user_objects(self): """ Must create 'app.models.TheUser' instance after django User instance was created. """ self.assertEqual(User.objects.all().count(), 6) self.assertEqual(User.objects.all().count(), TheUser.objects.all().count()) self.assertNotEqual(self.the_user1.auth_token, '') self.assertNotEqual(self.the_user1.auth_token, self.the_user2.auth_token) # ------------------------------------------------------------------------------------------------------------------ def test_the_user_get_api_reminders(self): reminders = self.the_user1.get_api_reminders() reminders_keys_correct = ['vk', 'fb_group', 'fb_page', 'twitter', 'disabled_all', 'app_rate'] self.assertTrue(isinstance(reminders, dict)) self.assertEqual(sorted(list(reminders.keys())), sorted(reminders_keys_correct)) # ------------------------------------------------------------------------------------------------------------------ def test_the_user_get_web_reminders(self): reminders = self.the_user1.get_web_reminders() reminders_keys_correct = ['vk', 'fb_group', 'fb_page', 'twitter', 'disabled_all', 'app_download'] self.assertTrue(isinstance(reminders, dict)) self.assertEqual(sorted(list(reminders.keys())), sorted(reminders_keys_correct)) # ------------------------------------------------------------------------------------------------------------------ def test_the_user_update_reminder(self): reminders = self.the_user1.get_web_reminders() self.assertTrue(isinstance(reminders, dict)) self.assertEqual(reminders['vk'], True) self.assertEqual(reminders['app_download'], True) self.the_user1.update_reminder('vk', False) self.the_user1.update_reminder('app_download', False) updated_reminders = self.the_user1.get_web_reminders() self.assertTrue(isinstance(updated_reminders, dict)) self.assertEqual(updated_reminders['vk'], False) self.assertEqual(updated_reminders['app_download'], False) # ------------------------------------------------------------------------------------------------------------------ def test_removing_user_objects(self): """ Must remove django User instance after 'app.models.TheUser' objects was deleted. """ the_user3 = TheUser.objects.get(id_user__username='user3') the_user4 = TheUser.objects.get(id_user__email='user4@user4.com') the_user3.delete() the_user4.delete() self.assertEqual(User.objects.all().count(), 4) self.assertEqual(User.objects.all().count(), TheUser.objects.all().count()) # ------------------------------------------------------------------------------------------------------------------ def test_created_categories(self): self.assertEqual(Category.objects.all().count(), 2) self.assertNotEqual(self.category1, self.category2) # ------------------------------------------------------------------------------------------------------------------ def test_categories_str(self): self.assertEqual(str(self.category1), 'category1') self.assertEqual(str(self.category2), 'category2') # ------------------------------------------------------------------------------------------------------------------ def test_created_authors(self): self.assertEqual(Author.objects.all().count(), 5) self.assertNotEqual(self.author1, self.author2) # ------------------------------------------------------------------------------------------------------------------ def test_get_authors_list(self): """ Must return authors list depending on different letters/letter case/words/symbols. """ self.assertEqual(Author.get_authors_list('bEst'), ['Best Author 1']) self.assertEqual(Author.get_authors_list('1'), ['Best Author 1']) self.assertEqual(Author.get_authors_list(' '), ['Best Author 1', 'zlast author']) self.assertEqual(Author.get_authors_list('new'), ['trueAuthorNew']) self.assertEqual(Author.get_authors_list('TRUE'), ['trueAuthorNew']) self.assertEqual(Author.get_authors_list('Best Author 1'), ['Best Author 1']) self.assertEqual(Author.get_authors_list('trueAuthorNew'), ['trueAuthorNew']) # ------------------------------------------------------------------------------------------------------------------ def test_get_authors_list_with_escaping(self): self.assertEqual(Author.get_authors_list("'", True), ['O&#39;Connor']) self.assertEqual(Author.get_authors_list("Connor", True), ['O&#39;Connor']) self.assertEqual( Author.get_authors_list('b', True), ['Best Author 1', '&lt;AuthorSpecialSymbols&gt;&amp;&quot;'] ) self.assertEqual( Author.get_authors_list('e', True), ['Best Author 1', 'trueAuthorNew', '&lt;AuthorSpecialSymbols&gt;&amp;&quot;'] ) self.assertEqual( Author.get_authors_list('author', True), ['Best Author 1', 'trueAuthorNew', 'zlast author', '&lt;AuthorSpecialSymbols&gt;&amp;&quot;'] ) # ------------------------------------------------------------------------------------------------------------------ def test_get_authors_list_without_escaping(self): self.assertEqual(Author.get_authors_list("'"), ["O'Connor"]) self.assertEqual(Author.get_authors_list("Connor", False), ["O'Connor"]) self.assertEqual(Author.get_authors_list('b'), ['Best Author 1', '<AuthorSpecialSymbols>&"']) self.assertEqual( Author.get_authors_list('e'), ['Best Author 1', 'trueAuthorNew', '<AuthorSpecialSymbols>&"'] ) self.assertEqual( Author.get_authors_list('author', False), ['Best Author 1', 'trueAuthorNew', 'zlast author', '<AuthorSpecialSymbols>&"'] ) # ------------------------------------------------------------------------------------------------------------------ def test_created_language(self): self.assertEqual(Language.objects.all().count(), 2) self.assertNotEqual(self.author1, self.author2) # ------------------------------------------------------------------------------------------------------------------ def test_created_books(self): books = Book.objects.all() self.assertEqual(books.count(), 7) self.assertEqual(books.filter(private_book=True).count(), 2) self.assertEqual(books.filter(id_category=self.category1).count(), 4) self.assertEqual(books.filter(id_author=self.author1).count(), 3) self.assertEqual(books.filter(language=self.language_en).count(), 4) self.assertEqual(books.filter(photo=False).count(), 2) self.assertEqual(books.filter(who_added=self.the_user1).count(), 3) self.assertEqual(books.filter(id_category=self.category2, id_author=self.author2).count(), 1) self.assertEqual(books.filter(id_category=self.category1, id_author=self.author2, language=self.language_ru, who_added=self.the_user1).count(), 1) self.assertEqual(books.filter(id_category=self.category1, id_author=self.author2, language=self.language_ru, who_added=self.the_user2).count(), 0) self.assertEqual(books.filter(blocked_book=True).count(), 3) # ------------------------------------------------------------------------------------------------------------------ def test_get_related_objects_for_create(self): test_book_path = os.path.join(TEST_DATA_DIR, 'test_book.pdf') form_data = { 'bookname': 'The new book', 'author': 'trueAuthorNew', 'category': 'category1', 'language': 'English', 'about': 'about book', 'bookfile': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()), } form_data_new_author = copy.deepcopy(form_data) form_data_new_author['author'] = 'super new author' self.assertEqual(Author.objects.all().count(), 5) form = AddBookForm(data=form_data) form.is_valid() form_with_new_author = AddBookForm(data=form_data_new_author) form_with_new_author.is_valid() related_data = Book.get_related_objects_for_create(self.user1.id, form) self.assertTrue(isinstance(related_data, BookRelatedData)) self.assertEqual(len(related_data), 4) self.assertEqual(related_data.author, Author.objects.get(author_name='trueAuthorNew')) self.assertEqual(Author.objects.all().count(), 5) related_data_new_author = Book.get_related_objects_for_create(self.user1.id, form_with_new_author) self.assertTrue(isinstance(related_data, BookRelatedData)) self.assertEqual(len(related_data_new_author), 4) self.assertEqual(related_data_new_author.author, Author.objects.get(author_name='super new author')) self.assertEqual(Author.objects.all().count(), 6) # ------------------------------------------------------------------------------------------------------------------ def test_get_related_objects_create_api(self): """ Must generate Book related data when creates a Book object for API calls. New author must be returned if it's name not present in the Author model. """ test_data = {'author': 'trueAuthorNew', 'category': 'category2', 'language': 'Russian'} test_data_new_author = {'author': 'NEW AUTHOR', 'category': 'category1', 'language': 'English'} self.assertEqual( Book.get_related_objects_create_api(self.the_user1, test_data), BookRelatedData(self.author2, self.category2, self.language_ru, None) ) self.assertEqual(Author.objects.all().count(), 5) self.assertEqual( Book.get_related_objects_create_api(self.the_user1, test_data_new_author), BookRelatedData(Author.objects.get(author_name='NEW AUTHOR'), self.category1, self.language_en, None) ) self.assertEqual(Author.objects.all().count(), 6) # ------------------------------------------------------------------------------------------------------------------ def test_get_related_objects_selected_book_unknown_user(self): """ Must generate selected book related data for unknown (anonymous) users. """ third_book = Book.objects.get(book_name='Third Book') sixth_book = Book.objects.get(book_name='Sixth Book') self.assertTrue(isinstance(Book.get_related_objects_selected_book(self.anonymous_user, third_book.id), dict)) related_third_book = Book.get_related_objects_selected_book(self.anonymous_user, third_book.id) related_sixth_book = Book.get_related_objects_selected_book(self.anonymous_user, sixth_book.id) self.assertEqual(related_third_book['book'], third_book) self.assertEqual(related_third_book['avg_book_rating'], {'rating__avg': 6.0}) self.assertEqual(related_third_book['book_rating_count'], 3) self.assertEqual(related_third_book['added_book'], None) self.assertEqual(related_third_book['comments'].count(), 1) self.assertEqual(related_third_book['comments'][0], BookComment.objects.filter(id_book=third_book).order_by('-id')[0]) self.assertEqual(related_sixth_book['book'], sixth_book) self.assertEqual(related_sixth_book['avg_book_rating'], {'rating__avg': 4.0}) self.assertEqual(related_sixth_book['book_rating_count'], 1) self.assertEqual(related_sixth_book['added_book'], None) self.assertEqual(related_sixth_book['comments'].count(), 0) AddedBook.objects.create(id_user=self.the_user5, id_book=third_book) BookRating.objects.create(id_user=self.the_user6, id_book=third_book, rating=10) BookComment.objects.create(id_user=self.the_user6, id_book=third_book, text='TEST TEXT 2') related_third_book = Book.get_related_objects_selected_book(self.anonymous_user, third_book.id) self.assertEqual(related_third_book['book'], third_book) self.assertEqual(related_third_book['avg_book_rating'], {'rating__avg': 7.0}) self.assertEqual(related_third_book['book_rating_count'], 4) self.assertEqual(related_third_book['added_book'], None) self.assertEqual(related_third_book['comments'].count(), 2) # ------------------------------------------------------------------------------------------------------------------ def test_get_related_objects_selected_book_added_user(self): """ This case is testing only 'added_book' param, because for user who is reading the book only this attribute will change relatively to function above. """ third_book = Book.objects.get(book_name='Third Book') sixth_book = Book.objects.get(book_name='Sixth Book') self.assertTrue(isinstance(Book.get_related_objects_selected_book(self.the_user1.id_user, third_book.id), dict)) related_third_book = Book.get_related_objects_selected_book(self.the_user1.id_user, third_book.id) related_sixth_book = Book.get_related_objects_selected_book(self.the_user1.id_user, sixth_book.id) self.assertEqual(related_third_book['added_book'], AddedBook.objects.get(id_book=third_book, id_user=self.the_user1)) self.assertEqual(related_sixth_book['added_book'], AddedBook.objects.get(id_book=sixth_book, id_user=self.the_user1)) # ------------------------------------------------------------------------------------------------------------------ def test_get_related_objects_selected_book_with_user_key(self): """ Tests returning data for related objects for selected book with 'user_key' attribute, meaning that user is anonymous (i.e. not logged) but with using user key. Done for API requests access. """ third_book = Book.objects.get(book_name='Third Book') related_third_book = Book.get_related_objects_selected_book( self.anonymous_user, third_book.id, self.the_user1.auth_token ) self.assertEqual(related_third_book['book'], third_book) self.assertEqual(related_third_book['avg_book_rating'], {'rating__avg': 6.0}) self.assertEqual(related_third_book['book_rating_count'], 3) self.assertEqual(related_third_book['added_book'], AddedBook.objects.get(id_book=third_book, id_user=self.the_user1)) self.assertEqual(related_third_book['comments'].count(), 1) self.assertEqual(related_third_book['comments'][0], BookComment.objects.filter(id_book=third_book).order_by('-id')[0]) # ------------------------------------------------------------------------------------------------------------------ def test_sort_by_book_name_category1(self): """ Must generate correct dictionaries for anonymous users, users with private books and without. Testing first category. """ first_book = Book.objects.get(book_name='First Book') third_book = Book.objects.get(book_name='Third Book') fourth_book = Book.objects.get(book_name='Fourth Book') first_book_dict = Utils.generate_sort_dict(first_book) third_book_dict = Utils.generate_sort_dict(third_book) fourth_book_dict = Utils.generate_sort_dict(fourth_book) self.assertTrue(isinstance(Book.sort_by_book_name(self.anonymous_user, self.category1), list)) self.assertEqual(len(Book.sort_by_book_name(self.anonymous_user, self.category1)), 3) self.assertEqual(Book.sort_by_book_name(self.anonymous_user, self.category1)[0], fourth_book_dict) self.assertEqual(Book.sort_by_book_name(self.anonymous_user, self.category1)[2], third_book_dict) self.assertEqual(len(Book.sort_by_book_name(self.the_user2.id_user, self.category1)), 3) self.assertEqual(Book.sort_by_book_name(self.the_user2.id_user, self.category1)[0], fourth_book_dict) self.assertEqual(Book.sort_by_book_name(self.the_user2.id_user, self.category1)[2], third_book_dict) self.assertEqual(len(Book.sort_by_book_name(self.the_user1.id_user, self.category1)), 4) self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category1)[0], first_book_dict) self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category1)[3], third_book_dict) # ------------------------------------------------------------------------------------------------------------------ def test_sort_by_book_name_category2(self): """ Must generate correct dictionaries for anonymous users, users with private books and without. Testing first category. """ fifth_book = Book.objects.get(book_name='Fifth Book') seventh_book = Book.objects.get(book_name='Seventh Book<>&"') fifth_book_dict = Utils.generate_sort_dict(fifth_book) seventh_book_dict = Utils.generate_sort_dict(seventh_book) self.assertEqual(len(Book.sort_by_book_name(self.anonymous_user, self.category2)), 2) self.assertEqual(Book.sort_by_book_name(self.anonymous_user, self.category2)[0], seventh_book_dict) self.assertEqual(len(Book.sort_by_book_name(self.the_user2.id_user, self.category2)), 2) self.assertEqual(Book.sort_by_book_name(self.the_user2.id_user, self.category2)[0], seventh_book_dict) self.assertEqual(len(Book.sort_by_book_name(self.the_user1.id_user, self.category2)), 3) self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category2)[0], fifth_book_dict) self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category2)[1], seventh_book_dict) # ------------------------------------------------------------------------------------------------------------------ def test_sort_by_author_category1(self): """ Must generate correct dictionaries for anonymous users, users with private books and without. Testing returned book authors at first category. """ self.assertTrue(isinstance(Book.sort_by_author(self.anonymous_user, self.category1), list)) self.assertEqual(len(Book.sort_by_author(self.anonymous_user, self.category1)), 3) self.assertEqual(Book.sort_by_author(self.anonymous_user, self.category1)[0]['author'], self.author1.author_name) self.assertEqual(Book.sort_by_author(self.anonymous_user, self.category1)[2]['author'], self.author2.author_name) self.assertEqual(len(Book.sort_by_author(self.the_user2.id_user, self.category1)), 3) self.assertEqual(Book.sort_by_author(self.the_user2.id_user, self.category1)[0]['author'], self.author1.author_name) self.assertEqual(Book.sort_by_author(self.the_user2.id_user, self.category1)[2]['author'], self.author2.author_name) self.assertEqual(len(Book.sort_by_author(self.the_user1.id_user, self.category1)), 4) self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category1)[0]['author'], self.author1.author_name) self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category1)[3]['author'], self.author2.author_name) # ------------------------------------------------------------------------------------------------------------------ def test_sort_by_author_category2(self): """ Must generate correct dictionaries for anonymous users, users with private books and without. Testing returned book authors at second category. """ escaped_author_name = '&lt;AuthorSpecialSymbols&gt;&amp;&quot;' self.assertEqual(len(Book.sort_by_author(self.anonymous_user, self.category2)), 2) self.assertEqual(Book.sort_by_author(self.anonymous_user, self.category2)[0]['author'], escaped_author_name) self.assertEqual(len(Book.sort_by_author(self.the_user2.id_user, self.category2)), 2) self.assertEqual(Book.sort_by_author(self.the_user2.id_user, self.category2)[0]['author'], escaped_author_name) self.assertEqual(len(Book.sort_by_author(self.the_user1.id_user, self.category2)), 3) self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category2)[0]['author'], escaped_author_name) self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category2)[1]['author'], self.author1.author_name) # ------------------------------------------------------------------------------------------------------------------ def test_sort_by_estimation_category1(self): """ Must generate correct dictionaries for anonymous users, users with private books and without. Testing returned book rating at first category. """ self.assertTrue(isinstance(Book.sort_by_estimation(self.anonymous_user, self.category1), list)) self.assertEqual(len(Book.sort_by_estimation(self.anonymous_user, self.category1)), 3) self.assertEqual(Book.sort_by_estimation(self.anonymous_user, self.category1)[0]['rating'], 7) self.assertEqual(Book.sort_by_estimation(self.anonymous_user, self.category1)[2]['rating'], 6) self.assertEqual(len(Book.sort_by_estimation(self.the_user2.id_user, self.category1)), 3) self.assertEqual(Book.sort_by_estimation(self.the_user2.id_user, self.category1)[0]['rating'], 7) self.assertEqual(Book.sort_by_estimation(self.the_user2.id_user, self.category1)[2]['rating'], 6) self.assertEqual(len(Book.sort_by_estimation(self.the_user1.id_user, self.category1)), 4) self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category1)[0]['rating'], 7) self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category1)[2]['rating'], 6) # ------------------------------------------------------------------------------------------------------------------ def test_sort_by_estimation_category2(self): """ Must generate correct dictionaries for anonymous users, users with private books and without. Testing returned book rating at second category. """ self.assertEqual(len(Book.sort_by_estimation(self.anonymous_user, self.category2)), 2) self.assertEqual(Book.sort_by_estimation(self.anonymous_user, self.category2)[0]['rating'], 4) self.assertEqual(len(Book.sort_by_estimation(self.the_user2.id_user, self.category2)), 2) self.assertEqual(Book.sort_by_estimation(self.the_user2.id_user, self.category2)[0]['rating'], 4) self.assertEqual(len(Book.sort_by_estimation(self.the_user1.id_user, self.category2)), 3) self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category2)[0]['rating'], 4) self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category2)[1]['rating'], None) # ------------------------------------------------------------------------------------------------------------------ def test_sort_by_readable(self): """ Must generate correct data by most readable books for anonymous users and users with private books. Testing count of sorted books with and without selected categories. """ sorted_structure = Book.sort_by_readable(self.anonymous_user, self.category1) self.assertTrue(isinstance(sorted_structure, list)) self.assertTrue(isinstance(sorted_structure[0], dict)) self.assertEqual(set(sorted_structure[0].keys()), {'id', 'name', 'author', 'url'}) self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user, category=self.category1)), 3) self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user, category=self.category1, count=2)), 2) self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user, category=self.category1)), 3) self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user, category=self.category1, count=2)), 2) self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user, category=self.category1)), 3) self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user, category=self.category1, count=2)), 2) self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user)), 4) self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user, count=2)), 2) self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user)), 4) self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user, count=3)), 3) self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user)), 4) self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user, count=2)), 2) # ------------------------------------------------------------------------------------------------------------------ def test_generate_books(self): """ Must generate correct dictionaries for Book data. """ books = Book.objects.all() self.assertTrue(isinstance(Book.generate_books(books), list)) self.assertEqual(len(Book.generate_books(books)), 7) self.assertEqual(len(Book.generate_books(books)[0].keys()), 5) self.assertEqual(Book.generate_books(books)[0], Utils.generate_sort_dict(books[0])) self.assertEqual(Book.generate_books(books)[6], Utils.generate_sort_dict(books[6])) # ------------------------------------------------------------------------------------------------------------------ def test_fetch_books(self): """ Must generate list of dicts with Books data depending on different criteria. """ self.assertTrue(isinstance(Book.fetch_books('book'), list)) self.assertEqual(len(Book.fetch_books('Second Book')), 1) self.assertEqual(len(Book.fetch_books('book')), 7) self.assertEqual(len(Book.fetch_books('ook')), 7) self.assertEqual(len(Book.fetch_books('trueAuthorNew')), 3) self.assertEqual(len(Book.fetch_books('author')), 7) self.assertEqual(len(Book.fetch_books('new')), 3) self.assertEqual(len(Book.fetch_books('True')), 3) # ------------------------------------------------------------------------------------------------------------------ def test_generate_existing_books(self): """ Must generate list of dicts with Books data depending on different criteria and excluding private books. """ self.assertTrue(isinstance(Book.generate_existing_books('book'), list)) self.assertEqual(len(Book.generate_existing_books('book')), 5) self.assertEqual(len(Book.generate_existing_books('Book')), 5) self.assertEqual(len(Book.generate_existing_books('bOoK')), 5) fourth_book = Book.objects.get(book_name='Fourth Book') test_book = Book.generate_existing_books('fourth') self.assertEqual(len(test_book), 1) self.assertTrue(isinstance(test_book[0], dict)) self.assertEqual(test_book[0], {'url': reverse('book', args=[fourth_book.id]), 'name': fourth_book.book_name}) test_private_book = Book.generate_existing_books('fifth') self.assertEqual(len(test_private_book), 0) # ------------------------------------------------------------------------------------------------------------------ def test_exclude_private_books(self): """ Must generate query sets or lists with Books depending on user type. """ all_books = Book.objects.all() list_all_books = list(all_books) self.assertEqual(Book.exclude_private_books(self.the_user1.id_user, all_books).count(), 7) self.assertEqual(Book.exclude_private_books(self.the_user2.id_user, all_books).count(), 5) self.assertTrue(isinstance(Book.exclude_private_books(self.the_user1.id_user, all_books), QuerySet)) self.assertTrue(isinstance(Book.exclude_private_books(self.the_user2.id_user, all_books), QuerySet)) self.assertEqual(len(Book.exclude_private_books(self.the_user1.id_user, list_all_books)), 7) self.assertEqual(len(Book.exclude_private_books(self.the_user2.id_user, list_all_books)), 5) self.assertTrue(isinstance(Book.exclude_private_books(self.the_user1.id_user, list_all_books), list)) self.assertTrue(isinstance(Book.exclude_private_books(self.the_user2.id_user, list_all_books), list)) self.assertTrue(self.anonymous_user.is_anonymous) self.assertEqual(Book.exclude_private_books(self.anonymous_user, all_books).count(), 5) self.assertEqual(len(Book.exclude_private_books(self.anonymous_user, list_all_books)), 5) self.assertTrue(isinstance(Book.exclude_private_books(self.anonymous_user, all_books), QuerySet)) self.assertTrue(isinstance(Book.exclude_private_books(self.anonymous_user, list_all_books), list)) # ------------------------------------------------------------------------------------------------------------------ def test_added_books(self): self.assertEqual(AddedBook.objects.all().count(), 8) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1).count(), 3) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2).count(), 3) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user5).count(), 1) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user6).count(), 1) self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Sixth Book')).count(), 4) self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Third Book')).count(), 2) self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Fifth Book')).count(), 0) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1, id_book=Book.objects.get(book_name='Third Book')).count(), 1) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1, id_book=Book.objects.get(book_name='Sixth Book')).count(), 1) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2, id_book=Book.objects.get(book_name='Sixth Book')).count(), 1) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2, id_book=Book.objects.get(book_name='Fourth Book')).count(), 0) # ------------------------------------------------------------------------------------------------------------------ def test_added_books_change(self): """ Must save book page after changing it. """ added_book3 = AddedBook.objects.get(id_user=self.the_user1, id_book=Book.objects.get(book_name='Third Book')) added_book6 = AddedBook.objects.get(id_user=self.the_user2, id_book=Book.objects.get(book_name='Sixth Book')) self.assertEqual(added_book3.last_page, 1) self.assertEqual(added_book6.last_page, 1) added_book3.last_page = 500 added_book3.save() self.assertEqual(added_book3.last_page, 500) self.assertEqual(added_book6.last_page, 1) added_book6.last_page = 256 added_book6.save() self.assertEqual(added_book3.last_page, 500) self.assertEqual(added_book6.last_page, 256) # ------------------------------------------------------------------------------------------------------------------ def test_added_books_delete(self): added_book_third = AddedBook.objects.get(id_user=self.the_user1, id_book=Book.objects.get(book_name='Third Book')) added_book_sixth = AddedBook.objects.get(id_user=self.the_user2, id_book=Book.objects.get(book_name='Sixth Book')) added_book_third.delete() added_book_sixth.delete() self.assertEqual(AddedBook.objects.all().count(), 6) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1).count(), 2) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1).count(), 2) self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Sixth Book')).count(), 3) self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Third Book')).count(), 1) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1, id_book=Book.objects.get(book_name='Third Book')).count(), 0) self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2, id_book=Book.objects.get(book_name='Sixth Book')).count(), 0) # ------------------------------------------------------------------------------------------------------------------ def test_get_user_added_book(self): """ Must generate list of books that added by user (reading by user). """ self.assertTrue(self.anonymous_user.is_anonymous) self.assertEqual(len(AddedBook.get_user_added_books(self.anonymous_user)), 0) self.assertEqual(AddedBook.get_user_added_books(self.anonymous_user), []) self.assertEqual(AddedBook.get_user_added_books(self.the_user1.id_user).count(), 3) self.assertEqual(AddedBook.get_user_added_books(self.the_user5.id_user).count(), 1) self.assertNotEqual(AddedBook.get_user_added_books(self.the_user1.id_user), []) removed_obj = AddedBook.objects.get(id_book=Book.objects.get(book_name='Sixth Book'), id_user=self.the_user5) removed_obj.delete() self.assertEqual(AddedBook.get_user_added_books(self.the_user5.id_user).count(), 0) self.assertNotEqual(AddedBook.get_user_added_books(self.the_user5.id_user), []) # ------------------------------------------------------------------------------------------------------------------ def test_get_count_added(self): """ Must return count how many users is reading some book. """ third_book = Book.objects.get(book_name='Third Book') sixth_book = Book.objects.get(book_name='Sixth Book') not_existing_id = 10000 self.assertEqual(AddedBook.get_count_added(third_book.id), 2) self.assertEqual(AddedBook.get_count_added(sixth_book.id), 4) self.assertEqual(AddedBook.get_count_added(not_existing_id), 0) removed_third = AddedBook.objects.filter(id_user=self.the_user1, id_book=third_book) removed_third.delete() removed_sixth = AddedBook.objects.filter(id_user=self.the_user1, id_book=sixth_book) removed_sixth.delete() self.assertEqual(AddedBook.get_count_added(third_book.id), 1) self.assertEqual(AddedBook.get_count_added(sixth_book.id), 3) self.assertEqual(AddedBook.get_count_added(not_existing_id), 0) # ------------------------------------------------------------------------------------------------------------------ def test_book_rating(self): self.assertEqual(BookRating.objects.all().count(), 6) self.assertEqual(BookRating.objects.filter(id_book=Book.objects.filter(book_name='Third Book')).count(), 3) self.assertEqual(BookRating.objects.filter(id_user=self.the_user1).count(), 3) self.assertEqual(BookRating.objects.filter(id_user=self.the_user2).count(), 2) self.assertEqual(BookRating.objects.filter(rating=7).count(), 2) self.assertEqual(BookRating.objects.filter(id_book=Book.objects.get(book_name='Third Book'), id_user=self.the_user1).count(), 1) self.assertEqual(BookRating.objects.filter(id_book=Book.objects.get(book_name='Third Book'), id_user=self.the_user6).count(), 0) self.assertEqual(BookRating.objects.filter(id_book=Book.objects.get(book_name='Fourth Book'), id_user=self.the_user1, rating=7).count(), 1) # ------------------------------------------------------------------------------------------------------------------ def test_changed_book_rating(self): removed_rating = BookRating.objects.get(id_book=Book.objects.get(book_name='Third Book'), id_user=self.the_user1) removed_rating.delete() self.assertEqual(BookRating.objects.all().count(), 5) changed_rating1 = BookRating.objects.get(id_book=Book.objects.get(book_name='Second Book'), id_user=self.the_user2) changed_rating2 = BookRating.objects.get(id_book=Book.objects.get(book_name='Fourth Book'), id_user=self.the_user1) self.assertEqual(BookRating.objects.filter(rating=7).count(), 2) self.assertEqual(changed_rating1.rating, 7) self.assertEqual(changed_rating2.rating, 7) changed_rating1.rating = 4 changed_rating1.save() changed_rating2.rating = 3 changed_rating2.save() self.assertEqual(changed_rating1.rating, 4) self.assertEqual(changed_rating2.rating, 3) self.assertEqual(BookRating.objects.filter(rating=7).count(), 0) self.assertEqual(BookRating.objects.filter(rating=4).count(), 2) self.assertEqual(BookRating.objects.filter(rating=3).count(), 2) # ------------------------------------------------------------------------------------------------------------------ def test_book_comment(self): self.assertEqual(BookComment.objects.all().count(), 5) self.assertEqual(BookComment.objects.filter(id_user=self.the_user1).count(), 3) self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book')).count(), 2) self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Fourth Book')).count(), 2) self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Sixth Book')).count(), 0) self.assertEqual(BookComment.objects.filter(id_user=self.the_user6).count(), 0) self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book'), id_user=self.the_user1).count(), 1) BookComment.objects.create(id_book=Book.objects.get(book_name='Second Book'), id_user=self.the_user1, text='New comment user1 book 2') self.assertEqual(BookComment.objects.all().count(), 6) self.assertEqual(BookComment.objects.filter(id_user=self.the_user1).count(), 4) self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book')).count(), 3) self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book'), id_user=self.the_user1).count(), 2) deleted_comment = BookComment.objects.get(id_book=Book.objects.get(book_name='Fourth Book'), id_user=self.the_user5) deleted_comment.delete() self.assertEqual(BookComment.objects.all().count(), 5) self.assertEqual(BookComment.objects.filter(id_user=self.the_user5).count(), 0) self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Fourth Book')).count(), 1) # ------------------------------------------------------------------------------------------------------------------ def test_post_messages(self): self.assertEqual(Post.objects.all().count(), 3) self.assertEqual(Post.objects.filter(user=self.the_user1).count(), 2) self.assertEqual(Post.objects.filter(user=self.the_user2).count(), 1) deleted_post = Post.objects.get(user=self.the_user1, heading='post 2') deleted_post.delete() self.assertEqual(Post.objects.all().count(), 2) self.assertEqual(Post.objects.filter(user=self.the_user1).count(), 1) self.assertEqual(Post.objects.filter(user=self.the_user2).count(), 1) # ------------------------------------------------------------------------------------------------------------------ def test_support_messages(self): self.assertEqual(SupportMessage.objects.all().count(), 4) self.assertEqual(SupportMessage.objects.filter(email='testemail1@mail.co').count(), 2) self.assertEqual(SupportMessage.objects.filter(email='test_email22@mail.co').count(), 1) self.assertEqual(SupportMessage.objects.filter(is_checked=False).count(), 4) checked_message = SupportMessage.objects.get(email='testemail1@mail.co', text='Test text1') checked_message.is_checked = True checked_message.save() self.assertEqual(SupportMessage.objects.filter(is_checked=False).count(), 3) # ------------------------------------------------------------------------------------------------------------------ def tearDown(self): for book in Book.objects.all(): if os.path.exists(book.book_file.path): os.remove(book.book_file.path) if book.photo and os.path.exists(book.photo.path): os.remove(book.photo.path)
OlegKlimenko/Plamber
app/tests/test_models.py
Python
apache-2.0
53,921
using System.Collections.Generic; using System.Linq; namespace Testify { /// <summary> /// Provides factory methods for creating anonymous arrays. /// </summary> /// <typeparam name="T">The array's element type.</typeparam> internal static class AnonymousArray<T> { /// <summary> /// Creates an anonymous array. /// </summary> /// <param name="anon">The anonymous data provider to use when creating items.</param> /// <returns>A new anonymous array.</returns> /// <exception cref="System.ArgumentNullException"><paramref name="anon"/> is <see langword="null"/>.</exception> /// <exception cref="AnonymousDataException">The specified type could not be created.</exception> internal static T[] AnyArray(IAnonymousData anon) { Argument.NotNull(anon, nameof(anon)); return AnyEnumerable(anon).ToArray(); } /// <summary> /// Creates an anonymous sequence of items. /// </summary> /// <param name="anon">The anonymous data provider to use when creating items.</param> /// <returns>An <see cref="IEnumerable{T}"/> sequence of new items.</returns> /// <exception cref="System.ArgumentNullException"><paramref name="anon"/> is <see langword="null"/>.</exception> /// <exception cref="AnonymousDataException">The specified type could not be created.</exception> internal static IEnumerable<T> AnyEnumerable(IAnonymousData anon) { Argument.NotNull(anon, nameof(anon)); return anon.AnyEnumerable<T>(); } } }
wekempf/testify
src/Testify/AnonymousArray{T}.cs
C#
apache-2.0
1,638
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. // See LICENSE.txt for license information. import { PostOptions, SettingsSidebar, } from '@support/ui/component'; import {SearchResultPostScreen} from '@support/ui/screen'; class SavedMessagesScreen { testID = { savedMessagesScreen: 'saved_messages.screen', closeSettingsButton: 'close.settings.button', }; savedMessagesScreen = element(by.id(this.testID.savedMessagesScreen)); closeSettingsButton = element(by.id(this.testID.closeSettingsButton)); getSearchResultPostItem = (postId, text, postProfileOptions = {}) => { return SearchResultPostScreen.getPost(postId, text, postProfileOptions); }; toBeVisible = async () => { await expect(this.savedMessagesScreen).toBeVisible(); return this.savedMessagesScreen; }; open = async () => { // # Open saved messages screen await SettingsSidebar.savedMessagesAction.tap(); return this.toBeVisible(); }; close = async () => { await this.closeSettingsButton.tap(); await expect(this.savedMessagesScreen).not.toBeVisible(); }; openPostOptionsFor = async (postId, text) => { const {searchResultPostItem} = await this.getSearchResultPostItem(postId, text); await expect(searchResultPostItem).toBeVisible(); // # Open post options await searchResultPostItem.longPress(); await PostOptions.toBeVisible(); }; } const savedMessagesScreen = new SavedMessagesScreen(); export default savedMessagesScreen;
mattermost/mattermost-mobile
detox/e2e/support/ui/screen/saved_messages.js
JavaScript
apache-2.0
1,602
/* * Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.core.command.script; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.script.Bindings; import javax.script.ScriptEngine; import javax.script.ScriptEngineFactory; import javax.script.ScriptEngineManager; import com.orientechnologies.orient.core.command.script.formatter.OJSScriptFormatter; import com.orientechnologies.orient.core.command.script.formatter.ORubyScriptFormatter; import com.orientechnologies.orient.core.command.script.formatter.OScriptFormatter; import com.orientechnologies.orient.core.db.ODatabaseComplex; import com.orientechnologies.orient.core.db.record.ODatabaseRecordTx; import com.orientechnologies.orient.core.metadata.function.OFunction; import com.orientechnologies.orient.core.sql.OSQLScriptEngine; /** * Executes Script Commands. * * @see OCommandScript * @author Luca Garulli * */ public class OScriptManager { protected final String DEF_LANGUAGE = "javascript"; protected ScriptEngineManager scriptEngineManager; protected Map<String, ScriptEngine> engines; protected String defaultLanguage = DEF_LANGUAGE; protected Map<String, OScriptFormatter> formatters = new HashMap<String, OScriptFormatter>(); protected List<OScriptInjection> injections = new ArrayList<OScriptInjection>(); public OScriptManager() { if (engines == null) { engines = new HashMap<String, ScriptEngine>(); scriptEngineManager = new ScriptEngineManager(); for (ScriptEngineFactory f : scriptEngineManager.getEngineFactories()) { registerEngine(f.getLanguageName().toLowerCase(), f.getScriptEngine()); if (defaultLanguage == null) defaultLanguage = f.getLanguageName(); } if (!engines.containsKey(DEF_LANGUAGE)) { registerEngine(DEF_LANGUAGE, scriptEngineManager.getEngineByName(DEF_LANGUAGE)); defaultLanguage = DEF_LANGUAGE; } if (!engines.containsKey(OSQLScriptEngine.ENGINE)) registerEngine(DEF_LANGUAGE, scriptEngineManager.getEngineByName(DEF_LANGUAGE)); registerFormatter(DEF_LANGUAGE, new OJSScriptFormatter()); registerFormatter("ruby", new ORubyScriptFormatter()); } } public String getFunction(final OFunction iFunction) { final OScriptFormatter formatter = formatters.get(iFunction.getLanguage().toLowerCase()); if (formatter == null) throw new IllegalArgumentException("Cannot find script formatter for the language '" + DEF_LANGUAGE + "'"); return formatter.getFunction(iFunction); } /** * Format the library of functions for a language. * * @param db * Current database instance * @param iLanguage * Language as filter * @return String containing all the functions */ public String getLibrary(final ODatabaseComplex<?> db, final String iLanguage) { final StringBuilder code = new StringBuilder(); final String[] functions = db.getMetadata().getFunctionLibrary().getFunctionNames(); for (String fName : functions) { final OFunction f = db.getMetadata().getFunctionLibrary().getFunction(fName); if (f.getLanguage().equalsIgnoreCase(iLanguage)) { code.append(getFunction(f)); code.append("\n"); } } return code.toString(); } public ScriptEngine getEngine(final String iLanguage) { if (iLanguage == null) throw new OCommandScriptException("No language was specified"); final String lang = iLanguage.toLowerCase(); if (!engines.containsKey(lang)) throw new OCommandScriptException("Unsupported language: " + iLanguage + ". Supported languages are: " + engines); final ScriptEngine scriptEngine = engines.get(lang); if (scriptEngine == null) throw new OCommandScriptException("Cannot find script engine: " + iLanguage); return scriptEngine; } public Bindings bind(final ScriptEngine iEngine, final ODatabaseRecordTx db, final Map<String, Object> iContext, final Map<Object, Object> iArgs) { final Bindings binding = iEngine.createBindings(); for (OScriptInjection i : injections) i.bind(binding); // BIND FIXED VARIABLES binding.put("db", new OScriptDocumentDatabaseWrapper(db)); binding.put("gdb", new OScriptGraphDatabaseWrapper(db)); // BIND CONTEXT VARIABLE INTO THE SCRIPT if (iContext != null) { for (Entry<String, Object> a : iContext.entrySet()) binding.put(a.getKey(), a.getValue()); } // BIND PARAMETERS INTO THE SCRIPT if (iArgs != null) { for (Entry<Object, Object> a : iArgs.entrySet()) binding.put(a.getKey().toString(), a.getValue()); binding.put("params", iArgs); } else binding.put("params", new HashMap<Object, Object>()); return binding; } /** * Unbinds variables * * @param binding */ public void unbind(Bindings binding) { for (OScriptInjection i : injections) i.unbind(binding); } public void registerInjection(final OScriptInjection iInj) { if (!injections.contains(iInj)) injections.add(iInj); } public void unregisterInjection(final OScriptInjection iInj) { injections.remove(iInj); } public OScriptManager registerEngine(final String iLanguage, final ScriptEngine iEngine) { engines.put(iLanguage, iEngine); return this; } public OScriptManager registerFormatter(final String iLanguage, final OScriptFormatter iFormatterImpl) { formatters.put(iLanguage, iFormatterImpl); return this; } }
redox/OrientDB
core/src/main/java/com/orientechnologies/orient/core/command/script/OScriptManager.java
Java
apache-2.0
6,461
package com.karenpownall.android.aca.forartssake; public class Links { @Override public String toString(){ return super.toString(); } }
knpFletcher/CapstoneProject
ForArtsSake/app/src/main/java/com/karenpownall/android/aca/forartssake/LInks.java
Java
apache-2.0
158
// // Copyright (C) 2013 OpenSim Ltd. // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with this program; if not, see <http://www.gnu.org/licenses/>. // #include "inet/physicallayer/ideal/IdealTransmitter.h" #include "inet/physicallayer/ideal/IdealTransmission.h" #include "inet/mobility/contract/IMobility.h" namespace inet { namespace physicallayer { Define_Module(IdealTransmitter); IdealTransmitter::IdealTransmitter() : bitrate(sNaN), maxCommunicationRange(sNaN), maxInterferenceRange(sNaN), maxDetectionRange(sNaN) { } void IdealTransmitter::initialize(int stage) { if (stage == INITSTAGE_LOCAL) { bitrate = bps(par("bitrate")); maxCommunicationRange = m(par("maxCommunicationRange")); maxInterferenceRange = m(par("maxInterferenceRange")); maxDetectionRange = m(par("maxDetectionRange")); } } void IdealTransmitter::printToStream(std::ostream& stream) const { stream << "IdealTransmitter, " << "bitrate = " << bitrate << ", " << "maxCommunicationRange = " << maxCommunicationRange << ", " << "maxInterferenceRange = " << maxInterferenceRange << ", " << "maxDetectionRange = " << maxDetectionRange; } const ITransmission *IdealTransmitter::createTransmission(const IRadio *transmitter, const cPacket *macFrame, const simtime_t startTime) const { const simtime_t duration = (b(macFrame->getBitLength()) / bitrate).get(); const simtime_t endTime = startTime + duration; IMobility *mobility = transmitter->getAntenna()->getMobility(); const Coord startPosition = mobility->getCurrentPosition(); const Coord endPosition = mobility->getCurrentPosition(); const EulerAngles startOrientation = mobility->getCurrentAngularPosition(); const EulerAngles endOrientation = mobility->getCurrentAngularPosition(); return new IdealTransmission(transmitter, macFrame, startTime, endTime, startPosition, endPosition, startOrientation, endOrientation, maxCommunicationRange, maxInterferenceRange, maxDetectionRange); } } // namespace physicallayer } // namespace inet
googleinterns/vectio
inet/src/inet/physicallayer/ideal/IdealTransmitter.cc
C++
apache-2.0
2,660
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.plugins.elastic.planning.functions; import org.apache.calcite.rex.RexCall; import com.google.common.base.Preconditions; public class UnaryFunction extends ElasticFunction { public UnaryFunction(String commonName){ super(commonName, commonName); } public UnaryFunction(String dremioName, String elasticName){ super(dremioName, elasticName); } @Override public FunctionRender render(FunctionRenderer renderer, RexCall call) { Preconditions.checkArgument(call.getOperands().size() == 1, "Unary operation %s should only have one argument, but got %s.", dremioName, call.getOperands().size()); FunctionRender operand = call.getOperands().get(0).accept(renderer.getVisitor()); return new FunctionRender(String.format("%s(%s)", elasticName, operand.getScript()), operand.getNulls()); } }
dremio/dremio-oss
plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/functions/UnaryFunction.java
Java
apache-2.0
1,461
# frozen_string_literal: true require 'spec_helper' describe User do describe '#steam_profile_url' do it 'creates a steam profile url based on the uid' do subject.stub(uid: '123') subject.steam_profile_url.should eql 'http://steamcommunity.com/profiles/123' end end describe '.find_for_steam_auth' do before do @auth = double(provider: 'steam', uid: '321', info: double(name: 'Kees', nickname: 'Killer')) end context 'new user' do it "creates and returns a new user if it can't find an existing one" do expect { User.find_for_steam_auth(@auth) }.to change { User.count }.from(0).to(1) end # JRuby and utf8mb4 don't play well together it 'cleans up crazy names when trying to create a new user' do @auth.stub(info: double(name: 'this.XKLL 🎂', nickname: 'this.XKLL 🎂')) expect { User.find_for_steam_auth(@auth) }.to change { User.count }.from(0).to(1) end end context 'existing user' do it 'returns an existing user if it could find one by uid' do create(:user, uid: '321') expect { User.find_for_steam_auth(@auth) }.not_to change { User.count } end it 'updates an existing user with new information' do user = create(:user, name: 'Karel', uid: '321') expect { User.find_for_steam_auth(@auth) }.not_to change { User.count } user.reload.name.should eql 'Kees' end it 'cleans up the nickname when trying to update an existing user' do user = create(:user, name: 'Karel', uid: '321') @auth.stub(uid: '321', provider: 'steam', info: double(name: 'this.XKLL 🎂', nickname: 'this.XKLL 🎂')) expect { User.find_for_steam_auth(@auth) }.not_to change { User.count } user.reload.name.should eql 'this.XKLL 🎂' end end end describe '#total_reservation_seconds' do it 'calculates the amount of time a user has reserved servers' do user = create(:user) create(:reservation, user: user, starts_at: 1.hour.from_now, ends_at: 2.hours.from_now) user.total_reservation_seconds.should == 3600 end end describe '#top10?' do it 'returns if a user is in the top 10' do user = create(:user) create(:reservation, user: user) user.should be_top10 end end describe '#donator?' do it 'is no longer a donator if the membership expired' do user = create(:user) user.groups << Group.donator_group user.group_users.last.update_attribute(:expires_at, 1.day.ago) user.reload.should_not be_donator end it 'is a donator when the membership is eternal' do user = create(:user) user.groups << Group.donator_group user.group_users.last.update_attribute(:expires_at, nil) user.reload.should be_donator end it 'is a donator when the membership expires in future' do user = create(:user) user.groups << Group.donator_group user.group_users.last.update_attribute(:expires_at, 1.day.from_now) user.reload.should be_donator end end describe '#donator_until' do it 'knows how long it is still a donator' do user = create(:user) user.groups << Group.donator_group expiration = 1.day.from_now user.group_users.last.update_attribute(:expires_at, 1.day.from_now) user.donator_until.to_date.should == expiration.to_date end end describe '#admin?' do it 'is an admin when in the admin group' do user = create(:user) user.groups << Group.admin_group user.should be_admin end end describe '#league_admin?' do it 'is a league admin when in the league admin group' do user = create(:user) user.groups << Group.league_admin_group user.should be_league_admin end end end
Arie/serveme
spec/models/user_spec.rb
Ruby
apache-2.0
3,905
<?php /* All Emoncms code is released under the GNU Affero General Public License. See COPYRIGHT.txt and LICENSE.txt. --------------------------------------------------------------------- Emoncms - open source energy visualisation Part of the OpenEnergyMonitor project: http://openenergymonitor.org */ // no direct access defined('EMONCMS_EXEC') or die('Restricted access'); function cossmiccontrol_controller() { global $path, $session, $route; $result = false; // Load html,css,js pages to the client if ($route->format == 'html') { if ($route->action == 'view' && $session['write']) { if ($route->subaction == 'dashboard') $result = view("Modules/cossmiccontrol/Views/summary.php", array()); if ($route->subaction == 'appliances') $result = view("Modules/cossmiccontrol/Views/appliances.php", array()); if ($route->subaction == 'scheduler') $result = view("Modules/cossmiccontrol/Views/scheduler.php", array()); if ($route->subaction == 'history') $result = view("Modules/cossmiccontrol/Views/history.php", array()); if ($route->subaction == 'ranking') $result = view("Modules/cossmiccontrol/Views/ranking.php", array()); } } // JSON API if ($route->format == 'json') { } return array('content'=>$result); }
UbiCollab/PersuasiveCommunities
cossmicvagrant/data/emoncms/Modules/cossmiccontrol/cossmiccontrol_controller.php
PHP
apache-2.0
1,329
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.security.option; import java.util.Map; import org.joda.beans.Bean; import org.joda.beans.BeanBuilder; import org.joda.beans.BeanDefinition; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectBeanBuilder; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; /** * The super share payoff style. */ @BeanDefinition public class SupersharePayoffStyle extends PayoffStyle { /** Serialization version. */ private static final long serialVersionUID = 1L; /** * The upper bound. */ @PropertyDefinition private double _lowerBound; /** * The lower bound. */ @PropertyDefinition private double _upperBound; /** * Creates an instance. */ private SupersharePayoffStyle() { } /** * Creates an instance. * * @param upperBound the upper bound * @param lowerBound the lower bound */ public SupersharePayoffStyle(final double upperBound, final double lowerBound) { setUpperBound(upperBound); setLowerBound(lowerBound); } //------------------------------------------------------------------------- @Override public <T> T accept(final PayoffStyleVisitor<T> visitor) { return visitor.visitSupersharePayoffStyle(this); } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code SupersharePayoffStyle}. * @return the meta-bean, not null */ public static SupersharePayoffStyle.Meta meta() { return SupersharePayoffStyle.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(SupersharePayoffStyle.Meta.INSTANCE); } @Override public SupersharePayoffStyle.Meta metaBean() { return SupersharePayoffStyle.Meta.INSTANCE; } //----------------------------------------------------------------------- /** * Gets the upper bound. * @return the value of the property */ public double getLowerBound() { return _lowerBound; } /** * Sets the upper bound. * @param lowerBound the new value of the property */ public void setLowerBound(double lowerBound) { this._lowerBound = lowerBound; } /** * Gets the the {@code lowerBound} property. * @return the property, not null */ public final Property<Double> lowerBound() { return metaBean().lowerBound().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the lower bound. * @return the value of the property */ public double getUpperBound() { return _upperBound; } /** * Sets the lower bound. * @param upperBound the new value of the property */ public void setUpperBound(double upperBound) { this._upperBound = upperBound; } /** * Gets the the {@code upperBound} property. * @return the property, not null */ public final Property<Double> upperBound() { return metaBean().upperBound().createProperty(this); } //----------------------------------------------------------------------- @Override public SupersharePayoffStyle clone() { return JodaBeanUtils.cloneAlways(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { SupersharePayoffStyle other = (SupersharePayoffStyle) obj; return JodaBeanUtils.equal(getLowerBound(), other.getLowerBound()) && JodaBeanUtils.equal(getUpperBound(), other.getUpperBound()) && super.equals(obj); } return false; } @Override public int hashCode() { int hash = 7; hash = hash * 31 + JodaBeanUtils.hashCode(getLowerBound()); hash = hash * 31 + JodaBeanUtils.hashCode(getUpperBound()); return hash ^ super.hashCode(); } @Override public String toString() { StringBuilder buf = new StringBuilder(96); buf.append("SupersharePayoffStyle{"); int len = buf.length(); toString(buf); if (buf.length() > len) { buf.setLength(buf.length() - 2); } buf.append('}'); return buf.toString(); } @Override protected void toString(StringBuilder buf) { super.toString(buf); buf.append("lowerBound").append('=').append(JodaBeanUtils.toString(getLowerBound())).append(',').append(' '); buf.append("upperBound").append('=').append(JodaBeanUtils.toString(getUpperBound())).append(',').append(' '); } //----------------------------------------------------------------------- /** * The meta-bean for {@code SupersharePayoffStyle}. */ public static class Meta extends PayoffStyle.Meta { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code lowerBound} property. */ private final MetaProperty<Double> _lowerBound = DirectMetaProperty.ofReadWrite( this, "lowerBound", SupersharePayoffStyle.class, Double.TYPE); /** * The meta-property for the {@code upperBound} property. */ private final MetaProperty<Double> _upperBound = DirectMetaProperty.ofReadWrite( this, "upperBound", SupersharePayoffStyle.class, Double.TYPE); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( this, (DirectMetaPropertyMap) super.metaPropertyMap(), "lowerBound", "upperBound"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case 1200084733: // lowerBound return _lowerBound; case -1690761732: // upperBound return _upperBound; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends SupersharePayoffStyle> builder() { return new DirectBeanBuilder<SupersharePayoffStyle>(new SupersharePayoffStyle()); } @Override public Class<? extends SupersharePayoffStyle> beanType() { return SupersharePayoffStyle.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return _metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code lowerBound} property. * @return the meta-property, not null */ public final MetaProperty<Double> lowerBound() { return _lowerBound; } /** * The meta-property for the {@code upperBound} property. * @return the meta-property, not null */ public final MetaProperty<Double> upperBound() { return _upperBound; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case 1200084733: // lowerBound return ((SupersharePayoffStyle) bean).getLowerBound(); case -1690761732: // upperBound return ((SupersharePayoffStyle) bean).getUpperBound(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { switch (propertyName.hashCode()) { case 1200084733: // lowerBound ((SupersharePayoffStyle) bean).setLowerBound((Double) newValue); return; case -1690761732: // upperBound ((SupersharePayoffStyle) bean).setUpperBound((Double) newValue); return; } super.propertySet(bean, propertyName, newValue, quiet); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
McLeodMoores/starling
projects/financial-types/src/main/java/com/opengamma/financial/security/option/SupersharePayoffStyle.java
Java
apache-2.0
8,151
# Задача 5. Вариант 6. # Напишите программу, которая бы при запуске случайным образом отображала название одного из двух спутников Марса. # Velyan A. S. # 27.05.2016 print("\nназвание одного из двух спутников Марса:") import random satellite=["Фобос", "Деймос"] s=random.choice(satellite) print(s) input("Нажмите Enter для выхода")
Mariaanisimova/pythonintask
PMIa/2015/Velyan_A_S/task_5_6.py
Python
apache-2.0
503
namespace TrelloNet { public class AddAdminToOrganization : Notification { public NotificationData Data { get; set; } public class NotificationData { public OrganizationName Organization { get; set; } } } }
shootsoft/TrelloMailer
src/TrelloNet/Notifications/AddAdminToOrganization.cs
C#
apache-2.0
221
package com.cardpay.pccredit.intopieces.web; import java.io.IOException; import java.sql.SQLException; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.multipart.MultipartFile; import com.cardpay.pccredit.QZBankInterface.model.Circle; import com.cardpay.pccredit.QZBankInterface.service.CircleService; import com.cardpay.pccredit.QZBankInterface.service.ECIFService; import com.cardpay.pccredit.QZBankInterface.web.IESBForECIFReturnMap; import com.cardpay.pccredit.customer.filter.VideoAccessoriesFilter; import com.cardpay.pccredit.customer.model.CustomerInfor; import com.cardpay.pccredit.customer.service.CustomerInforService; import com.cardpay.pccredit.intopieces.constant.Constant; import com.cardpay.pccredit.intopieces.filter.CustomerApplicationProcessFilter; import com.cardpay.pccredit.intopieces.model.CustomerApplicationInfo; import com.cardpay.pccredit.intopieces.model.CustomerApplicationProcess; import com.cardpay.pccredit.intopieces.model.QzApplnAttachmentList; import com.cardpay.pccredit.intopieces.model.QzApplnJyxx; import com.cardpay.pccredit.intopieces.model.QzApplnNbscyjb; import com.cardpay.pccredit.intopieces.model.VideoAccessories; import com.cardpay.pccredit.intopieces.service.AttachmentListService; import com.cardpay.pccredit.intopieces.service.CustomerApplicationInfoService; import com.cardpay.pccredit.intopieces.service.CustomerApplicationIntopieceWaitService; import com.cardpay.pccredit.intopieces.service.CustomerApplicationProcessService; import com.cardpay.pccredit.intopieces.service.IntoPiecesService; import com.cardpay.pccredit.intopieces.service.JyxxService; import com.cardpay.pccredit.intopieces.service.NbscyjbService; import com.cardpay.workflow.constant.ApproveOperationTypeEnum; import com.wicresoft.jrad.base.auth.IUser; import com.wicresoft.jrad.base.auth.JRadModule; import com.wicresoft.jrad.base.auth.JRadOperation; import com.wicresoft.jrad.base.constant.JRadConstants; import com.wicresoft.jrad.base.database.model.QueryResult; import com.wicresoft.jrad.base.web.JRadModelAndView; import com.wicresoft.jrad.base.web.controller.BaseController; import com.wicresoft.jrad.base.web.result.JRadPagedQueryResult; import com.wicresoft.jrad.base.web.result.JRadReturnMap; import com.wicresoft.jrad.base.web.security.LoginManager; import com.wicresoft.util.spring.Beans; import com.wicresoft.util.spring.mvc.mv.AbstractModelAndView; import com.wicresoft.util.web.RequestHelper; @Controller @RequestMapping("/intopieces/intopiecesxingzheng1/*") @JRadModule("intopieces.intopiecesxingzheng1") public class IntoPiecesXingzhengbeginControl extends BaseController { @Autowired private IntoPiecesService intoPiecesService; @Autowired private CustomerInforService customerInforService; @Autowired private CustomerApplicationIntopieceWaitService customerApplicationIntopieceWaitService; @Autowired private CustomerApplicationProcessService customerApplicationProcessService; @Autowired private CircleService circleService; @Autowired private ECIFService eCIFService; @Autowired private NbscyjbService nbscyjbService; @Autowired private JyxxService jyxxService; @Autowired private AttachmentListService attachmentListService; /** * 行政岗初进件页面 * * @param filter * @param request * @return */ @ResponseBody @RequestMapping(value = "xingzhengbegin.page", method = { RequestMethod.GET }) public AbstractModelAndView xingzhengbegin(@ModelAttribute CustomerApplicationProcessFilter filter, HttpServletRequest request) throws SQLException { filter.setRequest(request); IUser user = Beans.get(LoginManager.class).getLoggedInUser(request); String loginId = user.getId(); filter.setLoginId(loginId); filter.setNodeName(Constant.status_xingzheng1); QueryResult<CustomerApplicationIntopieceWaitForm> result = customerApplicationIntopieceWaitService.recieveIntopieceWaitForm(filter); JRadPagedQueryResult<CustomerApplicationIntopieceWaitForm> pagedResult = new JRadPagedQueryResult<CustomerApplicationIntopieceWaitForm>(filter, result); JRadModelAndView mv = new JRadModelAndView( "/intopieces/intopieces_wait/intopiecesApprove_xingzhengbegin", request); mv.addObject(PAGED_RESULT, pagedResult); mv.addObject("filter", filter); return mv; } /** * 进入上传扫描件页面 * * @param request * @return */ @ResponseBody @RequestMapping(value = "create_upload.page") public AbstractModelAndView createUpload(@ModelAttribute VideoAccessoriesFilter filter,HttpServletRequest request) { String appId = request.getParameter("appId"); //是否只读标记 String type = request.getParameter("type"); List<QzDcnrUploadForm> result =intoPiecesService.getUploadList(appId); JRadModelAndView mv = new JRadModelAndView("/intopieces/intopieces_wait/intopiecesApprove_xingzhengbegin_upload", request); mv.addObject("result", result); mv.addObject("appId",appId); mv.addObject("type",type); return mv; } /** * 调查内容保存 * * @param request * @return * @throws IOException */ @ResponseBody @RequestMapping(value = "saveYxzl.json",method = { RequestMethod.POST }) @JRadOperation(JRadOperation.CREATE) public Map<String,Object> saveYxzl(@RequestParam(value = "file", required = false) MultipartFile file,HttpServletRequest request,HttpServletResponse response){ Map<String,Object> map = new HashMap<String,Object>(); try { if(file==null||file.isEmpty()){ map.put(JRadConstants.SUCCESS, false); map.put(JRadConstants.MESSAGE, Constant.FILE_EMPTY); return map; } intoPiecesService.saveDcnrByCustomerId(request.getParameter("appId"),request.getParameter("reportId"),request.getParameter("remark"),file); map.put(JRadConstants.SUCCESS, true); map.put(JRadConstants.MESSAGE, Constant.SUCCESS_MESSAGE); } catch (Exception e) { e.printStackTrace(); map.put(JRadConstants.SUCCESS, false); map.put(JRadConstants.MESSAGE, Constant.FAIL_MESSAGE); return map; } return map; } /** * 申请件审批通过 * 从行政岗--授信审批岗 * @param filter * @param request * @return */ @ResponseBody @RequestMapping(value = "save_apply.json") public JRadReturnMap saveApply(HttpServletRequest request) throws SQLException { JRadReturnMap returnMap = new JRadReturnMap(); try { String appId = request.getParameter("id"); CustomerApplicationProcess process = customerApplicationProcessService.findByAppId(appId); request.setAttribute("serialNumber", process.getSerialNumber()); request.setAttribute("applicationId", process.getApplicationId()); request.setAttribute("applicationStatus", ApproveOperationTypeEnum.APPROVE.toString()); request.setAttribute("objection", "false"); //查找审批金额 Circle circle = circleService.findCircleByAppId(appId); request.setAttribute("examineAmount", circle.getContractAmt()); customerApplicationIntopieceWaitService.updateCustomerApplicationProcessBySerialNumberApplicationInfo1(request); returnMap.addGlobalMessage(CHANGE_SUCCESS); } catch (Exception e) { returnMap.addGlobalMessage("保存失败"); e.printStackTrace(); } return returnMap; } /** * 申请件退件 * 从行政岗--初审 * @param filter * @param request * @return */ @ResponseBody @RequestMapping(value = "returnAppln.json") public JRadReturnMap returnAppln(HttpServletRequest request) throws SQLException { JRadReturnMap returnMap = new JRadReturnMap(); try { String appId = request.getParameter("appId"); String operate = request.getParameter("operate"); String nodeName = request.getParameter("nodeName"); //退回客户经理和其他岗位不一致 if("1".equals(nodeName)){ intoPiecesService.checkDoNotToManager(appId,request); }else{ intoPiecesService.returnAppln(appId, request,nodeName); } returnMap.addGlobalMessage(CHANGE_SUCCESS); } catch (Exception e) { returnMap.addGlobalMessage("保存失败"); e.printStackTrace(); } return returnMap; } /** * 进入电核页面 * * @param request * @return */ @ResponseBody @RequestMapping(value = "in_applove.page") public AbstractModelAndView inApplove(HttpServletRequest request) { String appId = request.getParameter("appId"); String type = request.getParameter("type"); JRadModelAndView mv = new JRadModelAndView("/qzbankinterface/appIframeInfo/page7_change", request); QzApplnNbscyjb qzApplnNbscyjb = nbscyjbService.findNbscyjbByAppId(appId); mv = new JRadModelAndView("/qzbankinterface/appIframeInfo/page7_for_approve", request); mv.addObject("qzApplnNbscyjb", qzApplnNbscyjb); mv.addObject("type", type); CustomerInfor customerInfo = customerInforService.findCustomerInforById(intoPiecesService.findCustomerApplicationInfoByApplicationId(appId).getCustomerId()); mv.addObject("customerInfo", customerInfo); //修改为appid查询 QzApplnJyxx qzApplnJyxx = jyxxService.findJyxx(customerInfo.getId(), null); mv.addObject("qzApplnJyxx", qzApplnJyxx); IUser user = Beans.get(LoginManager.class).getLoggedInUser(request); String loginId = user.getLogin(); String displayName = user.getDisplayName(); mv.addObject("displayName", displayName); mv.addObject("loginId", loginId); return mv; } //iframe_approve(申请后) @ResponseBody @RequestMapping(value = "iframe_approve.page") public AbstractModelAndView iframeApprove(HttpServletRequest request) { JRadModelAndView mv = new JRadModelAndView("/qzbankinterface/appIframeInfo/iframe_approve", request); String customerInforId = RequestHelper.getStringValue(request, ID); String appId = RequestHelper.getStringValue(request, "appId"); if (StringUtils.isNotEmpty(customerInforId)) { CustomerInfor customerInfor = customerInforService.findCustomerInforById(customerInforId); mv.addObject("customerInfor", customerInfor); mv.addObject("customerId", customerInfor.getId()); mv.addObject("appId", appId); mv.addObject("operate", Constant.status_xingzheng1); } return mv; } }
wangxu2013/PCCREDIT_QZ
src/java/com/cardpay/pccredit/intopieces/web/IntoPiecesXingzhengbeginControl.java
Java
apache-2.0
10,908
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.lambda.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lambda-2015-03-31/ListTags" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListTagsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The list of tags assigned to the function. * </p> */ private com.amazonaws.internal.SdkInternalMap<String, String> tags; /** * <p> * The list of tags assigned to the function. * </p> * * @return The list of tags assigned to the function. */ public java.util.Map<String, String> getTags() { if (tags == null) { tags = new com.amazonaws.internal.SdkInternalMap<String, String>(); } return tags; } /** * <p> * The list of tags assigned to the function. * </p> * * @param tags * The list of tags assigned to the function. */ public void setTags(java.util.Map<String, String> tags) { this.tags = tags == null ? null : new com.amazonaws.internal.SdkInternalMap<String, String>(tags); } /** * <p> * The list of tags assigned to the function. * </p> * * @param tags * The list of tags assigned to the function. * @return Returns a reference to this object so that method calls can be chained together. */ public ListTagsResult withTags(java.util.Map<String, String> tags) { setTags(tags); return this; } public ListTagsResult addTagsEntry(String key, String value) { if (null == this.tags) { this.tags = new com.amazonaws.internal.SdkInternalMap<String, String>(); } if (this.tags.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.tags.put(key, value); return this; } /** * Removes all the entries added into Tags. * * @return Returns a reference to this object so that method calls can be chained together. */ public ListTagsResult clearTagsEntries() { this.tags = null; return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListTagsResult == false) return false; ListTagsResult other = (ListTagsResult) obj; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public ListTagsResult clone() { try { return (ListTagsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
dagnir/aws-sdk-java
aws-java-sdk-lambda/src/main/java/com/amazonaws/services/lambda/model/ListTagsResult.java
Java
apache-2.0
4,500
package org.unipop.process.strategyregistrar; import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies; import org.apache.tinkerpop.gremlin.process.traversal.util.DefaultTraversalStrategies; import org.apache.tinkerpop.gremlin.structure.Graph; import org.unipop.process.coalesce.UniGraphCoalesceStepStrategy; import org.unipop.process.edge.EdgeStepsStrategy; import org.unipop.process.order.UniGraphOrderStrategy; import org.unipop.process.properties.UniGraphPropertiesStrategy; import org.unipop.process.repeat.UniGraphRepeatStepStrategy; import org.unipop.process.graph.UniGraphStepStrategy; import org.unipop.process.vertex.UniGraphVertexStepStrategy; import org.unipop.process.where.UniGraphWhereStepStrategy; public class StandardStrategyProvider implements StrategyProvider { @Override public TraversalStrategies get() { DefaultTraversalStrategies traversalStrategies = new DefaultTraversalStrategies(); traversalStrategies.addStrategies( new UniGraphStepStrategy(), new UniGraphVertexStepStrategy(), new EdgeStepsStrategy(), new UniGraphPropertiesStrategy(), new UniGraphCoalesceStepStrategy(), new UniGraphWhereStepStrategy(), new UniGraphRepeatStepStrategy(), new UniGraphOrderStrategy()); TraversalStrategies.GlobalCache.getStrategies(Graph.class).toList().forEach(traversalStrategies::addStrategies); return traversalStrategies; } }
unipop-graph/unipop
unipop-core/src/org/unipop/process/strategyregistrar/StandardStrategyProvider.java
Java
apache-2.0
1,536
package com.pepoc.joke.view.activity; import android.content.Intent; import android.os.Bundle; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.text.TextUtils; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.RelativeLayout; import android.widget.Toast; import com.pepoc.joke.R; import com.pepoc.joke.data.bean.JokeComment; import com.pepoc.joke.data.bean.JokeContent; import com.pepoc.joke.data.user.UserManager; import com.pepoc.joke.presenter.JokeContentPresenter; import com.pepoc.joke.util.Util; import com.pepoc.joke.view.adapter.JokeContentAdapter; import com.pepoc.joke.view.iview.IJokeContentView; import java.util.List; import butterknife.Bind; import butterknife.ButterKnife; public class JokeContentActivity extends BaseSwipeBackActivity implements View.OnClickListener, SwipeRefreshLayout.OnRefreshListener, IJokeContentView<JokeComment> { @Bind(R.id.toolbar) Toolbar toolbar; @Bind(R.id.recyclerview_joke_content) RecyclerView recyclerviewJokeContent; @Bind(R.id.swiperefresh_joke_content) SwipeRefreshLayout swiperefreshJokeContent; @Bind(R.id.et_joke_comment) EditText etJokeComment; @Bind(R.id.btn_send_comment) Button btnSendComment; @Bind(R.id.rl_comment) RelativeLayout rlComment; private JokeContent jokeContent; private JokeContentAdapter jokeContentAdapter; private JokeContentPresenter jokeContentPresenter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_joke_content); ButterKnife.bind(this); jokeContentPresenter = new JokeContentPresenter(this); Intent intent = getIntent(); jokeContent = intent.getParcelableExtra("JokeContent"); init(); jokeContentPresenter.getComment(context, jokeContent.getJokeId()); } @Override public void init() { super.init(); toolbar.setTitle(R.string.activity_joke_content); setSupportActionBar(toolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onBackPressed(); } }); swiperefreshJokeContent.setColorSchemeResources(R.color.colorAccent); swiperefreshJokeContent.setOnRefreshListener(this); LinearLayoutManager linearLayoutManager = new LinearLayoutManager(context); recyclerviewJokeContent.setLayoutManager(linearLayoutManager); jokeContentAdapter = new JokeContentAdapter(context, jokeContentPresenter); jokeContentAdapter.setJokeContent(jokeContent); recyclerviewJokeContent.setAdapter(jokeContentAdapter); btnSendComment.setOnClickListener(this); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.btn_send_comment: if (UserManager.getCurrentUser() != null) { String commentContent = etJokeComment.getText().toString(); if (TextUtils.isEmpty(commentContent)) { Toast.makeText(context, "评论内容不能为空", Toast.LENGTH_SHORT).show(); } else { jokeContentPresenter.comment(context, jokeContent.getJokeId(), commentContent); } } else { Toast.makeText(context, "登录后才能评论", Toast.LENGTH_SHORT).show(); } break; } } @Override public void onRefresh() { jokeContentPresenter.getComment(context, jokeContent.getJokeId()); } @Override public void showLoading() { } @Override public void hideLoading() { } @Override public void updateCommentData(List<JokeComment> datas) { swiperefreshJokeContent.setRefreshing(false); jokeContentAdapter.setJokeComments(datas); jokeContentAdapter.notifyDataSetChanged(); } @Override public void commentSuccess() { Toast.makeText(context, "comment success", Toast.LENGTH_SHORT).show(); etJokeComment.setText(""); Util.hiddenSoftKeyborad(etJokeComment, context); jokeContentPresenter.getComment(context, jokeContent.getJokeId()); } @Override public void onError() { swiperefreshJokeContent.setRefreshing(false); } }
pepoc/Joke
Joke/app/src/main/java/com/pepoc/joke/view/activity/JokeContentActivity.java
Java
apache-2.0
4,698
/* * Copyright (c) 2017 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.icecp.node.security.crypto.utils; import com.intel.icecp.core.security.crypto.key.Key; import com.intel.icecp.core.security.crypto.exception.hash.HashError; import com.intel.icecp.core.security.crypto.exception.key.InvalidKeyTypeException; import com.intel.icecp.node.security.SecurityConstants; import com.intel.icecp.core.security.crypto.key.asymmetric.PrivateKey; import com.intel.icecp.core.security.crypto.key.asymmetric.PublicKey; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Base64; /** * Collection of utility methods, used by crypto package classes * */ public class CryptoUtils { private static final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray(); /** * Computes an hash of the given input, using the specified algorithm. * * @param input * @param algorithm * @return The hash value, if the algorithm is supported * @throws HashError if something went wrong */ public static byte[] hash(byte[] input, String algorithm) throws HashError { try { MessageDigest m = MessageDigest.getInstance(algorithm); m.update(input); return m.digest(); } catch (NoSuchAlgorithmException ex) { throw new HashError("Unable to compute the hash of the given input.", ex); } } /** * Converts a given byte array into a HEX String representation * * @param bytes bytes to convert into HEX string * @return The corresponding HEX string */ public static String bytesToHex(byte[] bytes) { char[] hexChars = new char[bytes.length * 2]; for (int j = 0; j < bytes.length; j++) { int v = bytes[j] & 0xFF; hexChars[j * 2] = HEX_ARRAY[v >>> 4]; hexChars[j * 2 + 1] = HEX_ARRAY[v & 0x0F]; } return new String(hexChars); } /** * Converts a HEX string into bytes * * @param hexString HEX String to convert into bytes * @return The corresponding bytes */ public static byte[] hexStringToByteArray(String hexString) { int len = hexString.length(); byte[] data = new byte[len / 2]; for (int i = 0; i < len; i += 2) { data[i / 2] = (byte) ((Character.digit(hexString.charAt(i), 16) << 4) + Character.digit(hexString.charAt(i + 1), 16)); } return data; } /** * Base64 encoding using {@link Base64#getEncoder() } encoder * * @param data Data to encode * @return The Base64 encoded String * @throws IllegalArgumentException In case of error while encoding the given bytes */ public static byte[] base64Encode(byte[] data) throws IllegalArgumentException { return Base64.getEncoder().encode(data); } /** * Base64 decoding using {@link Base64#getDecoder() } decoder * * @param data Data to decode * @return The decoded bytes * @throws IllegalArgumentException In case of error in decoding the given bytes */ public static byte[] base64Decode(byte[] data) throws IllegalArgumentException { return Base64.getDecoder().decode(data); } /** * Compares the given two byte arrays * * @param first * @param second * @return True iif first == true */ public static boolean compareBytes(byte[] first, byte[] second) { return MessageDigest.isEqual(first, second); } /** * Given a key, returns a suitable signing algorithm (if exists). * * @param key * @return * @throws InvalidKeyTypeException */ public static String getPublicKeyAlgorithmFromKey(Key key) throws InvalidKeyTypeException { String keyType; if (key instanceof PrivateKey) { PrivateKey k = (PrivateKey) key; keyType = k.getKey().getAlgorithm(); } else if (key instanceof PublicKey) { PublicKey k = (PublicKey) key; keyType = k.getPublicKey().getAlgorithm(); } else { // All other types of keys are not valid asymmetric keys throw new InvalidKeyTypeException("Invalid key type " + key.getClass().getName()); } System.out.println("********" + keyType); if (SecurityConstants.SHA1withDSA.contains(keyType)) { return SecurityConstants.SHA1withDSA; } else if (SecurityConstants.SHA1withRSA.contains(keyType)) { return SecurityConstants.SHA1withRSA; } else if (SecurityConstants.SHA256withRSA.contains(keyType)) { return SecurityConstants.SHA256withRSA; } throw new InvalidKeyTypeException("Invalid key type " + keyType); } }
icecp/icecp
icecp-node/src/main/java/com/intel/icecp/node/security/crypto/utils/CryptoUtils.java
Java
apache-2.0
5,374
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.pinpoint.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Provides the results of a query that retrieved the data for a standard execution metric that applies to a journey * activity, and provides information about that query. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-2016-12-01/JourneyExecutionActivityMetricsResponse" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class JourneyExecutionActivityMetricsResponse implements Serializable, Cloneable, StructuredPojo { /** * <p> * The type of activity that the metric applies to. Possible values are: * </p> * <ul> * <li> * <p> * CONDITIONAL_SPLIT - For a yes/no split activity, which is an activity that sends participants down one of two * paths in a journey. * </p> * </li> * <li> * <p> * HOLDOUT - For a holdout activity, which is an activity that stops a journey for a specified percentage of * participants. * </p> * </li> * <li> * <p> * MESSAGE - For an email activity, which is an activity that sends an email message to participants. * </p> * </li> * <li> * <p> * MULTI_CONDITIONAL_SPLIT - For a multivariate split activity, which is an activity that sends participants down * one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * RANDOM_SPLIT - For a random split activity, which is an activity that sends specified percentages of participants * down one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * WAIT - For a wait activity, which is an activity that waits for a certain amount of time or until a specific date * and time before moving participants to the next activity in a journey. * </p> * </li> * </ul> */ private String activityType; /** * <p> * The unique identifier for the application that the metric applies to. * </p> */ private String applicationId; /** * <p> * The unique identifier for the activity that the metric applies to. * </p> */ private String journeyActivityId; /** * <p> * The unique identifier for the journey that the metric applies to. * </p> */ private String journeyId; /** * <p> * The date and time, in ISO 8601 format, when Amazon Pinpoint last evaluated the execution status of the activity * and updated the data for the metric. * </p> */ private String lastEvaluatedTime; /** * <p> * A JSON object that contains the results of the query. The results vary depending on the type of activity * (ActivityType). For information about the structure and contents of the results, see the <a * href="https://docs.aws.amazon.com/pinpoint/latest/developerguide/analytics-standard-metrics.html">Amazon Pinpoint * Developer Guide</a>. * </p> */ private java.util.Map<String, String> metrics; /** * <p> * The type of activity that the metric applies to. Possible values are: * </p> * <ul> * <li> * <p> * CONDITIONAL_SPLIT - For a yes/no split activity, which is an activity that sends participants down one of two * paths in a journey. * </p> * </li> * <li> * <p> * HOLDOUT - For a holdout activity, which is an activity that stops a journey for a specified percentage of * participants. * </p> * </li> * <li> * <p> * MESSAGE - For an email activity, which is an activity that sends an email message to participants. * </p> * </li> * <li> * <p> * MULTI_CONDITIONAL_SPLIT - For a multivariate split activity, which is an activity that sends participants down * one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * RANDOM_SPLIT - For a random split activity, which is an activity that sends specified percentages of participants * down one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * WAIT - For a wait activity, which is an activity that waits for a certain amount of time or until a specific date * and time before moving participants to the next activity in a journey. * </p> * </li> * </ul> * * @param activityType * The type of activity that the metric applies to. Possible values are:</p> * <ul> * <li> * <p> * CONDITIONAL_SPLIT - For a yes/no split activity, which is an activity that sends participants down one of * two paths in a journey. * </p> * </li> * <li> * <p> * HOLDOUT - For a holdout activity, which is an activity that stops a journey for a specified percentage of * participants. * </p> * </li> * <li> * <p> * MESSAGE - For an email activity, which is an activity that sends an email message to participants. * </p> * </li> * <li> * <p> * MULTI_CONDITIONAL_SPLIT - For a multivariate split activity, which is an activity that sends participants * down one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * RANDOM_SPLIT - For a random split activity, which is an activity that sends specified percentages of * participants down one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * WAIT - For a wait activity, which is an activity that waits for a certain amount of time or until a * specific date and time before moving participants to the next activity in a journey. * </p> * </li> */ public void setActivityType(String activityType) { this.activityType = activityType; } /** * <p> * The type of activity that the metric applies to. Possible values are: * </p> * <ul> * <li> * <p> * CONDITIONAL_SPLIT - For a yes/no split activity, which is an activity that sends participants down one of two * paths in a journey. * </p> * </li> * <li> * <p> * HOLDOUT - For a holdout activity, which is an activity that stops a journey for a specified percentage of * participants. * </p> * </li> * <li> * <p> * MESSAGE - For an email activity, which is an activity that sends an email message to participants. * </p> * </li> * <li> * <p> * MULTI_CONDITIONAL_SPLIT - For a multivariate split activity, which is an activity that sends participants down * one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * RANDOM_SPLIT - For a random split activity, which is an activity that sends specified percentages of participants * down one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * WAIT - For a wait activity, which is an activity that waits for a certain amount of time or until a specific date * and time before moving participants to the next activity in a journey. * </p> * </li> * </ul> * * @return The type of activity that the metric applies to. Possible values are:</p> * <ul> * <li> * <p> * CONDITIONAL_SPLIT - For a yes/no split activity, which is an activity that sends participants down one of * two paths in a journey. * </p> * </li> * <li> * <p> * HOLDOUT - For a holdout activity, which is an activity that stops a journey for a specified percentage of * participants. * </p> * </li> * <li> * <p> * MESSAGE - For an email activity, which is an activity that sends an email message to participants. * </p> * </li> * <li> * <p> * MULTI_CONDITIONAL_SPLIT - For a multivariate split activity, which is an activity that sends participants * down one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * RANDOM_SPLIT - For a random split activity, which is an activity that sends specified percentages of * participants down one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * WAIT - For a wait activity, which is an activity that waits for a certain amount of time or until a * specific date and time before moving participants to the next activity in a journey. * </p> * </li> */ public String getActivityType() { return this.activityType; } /** * <p> * The type of activity that the metric applies to. Possible values are: * </p> * <ul> * <li> * <p> * CONDITIONAL_SPLIT - For a yes/no split activity, which is an activity that sends participants down one of two * paths in a journey. * </p> * </li> * <li> * <p> * HOLDOUT - For a holdout activity, which is an activity that stops a journey for a specified percentage of * participants. * </p> * </li> * <li> * <p> * MESSAGE - For an email activity, which is an activity that sends an email message to participants. * </p> * </li> * <li> * <p> * MULTI_CONDITIONAL_SPLIT - For a multivariate split activity, which is an activity that sends participants down * one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * RANDOM_SPLIT - For a random split activity, which is an activity that sends specified percentages of participants * down one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * WAIT - For a wait activity, which is an activity that waits for a certain amount of time or until a specific date * and time before moving participants to the next activity in a journey. * </p> * </li> * </ul> * * @param activityType * The type of activity that the metric applies to. Possible values are:</p> * <ul> * <li> * <p> * CONDITIONAL_SPLIT - For a yes/no split activity, which is an activity that sends participants down one of * two paths in a journey. * </p> * </li> * <li> * <p> * HOLDOUT - For a holdout activity, which is an activity that stops a journey for a specified percentage of * participants. * </p> * </li> * <li> * <p> * MESSAGE - For an email activity, which is an activity that sends an email message to participants. * </p> * </li> * <li> * <p> * MULTI_CONDITIONAL_SPLIT - For a multivariate split activity, which is an activity that sends participants * down one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * RANDOM_SPLIT - For a random split activity, which is an activity that sends specified percentages of * participants down one of as many as five paths in a journey. * </p> * </li> * <li> * <p> * WAIT - For a wait activity, which is an activity that waits for a certain amount of time or until a * specific date and time before moving participants to the next activity in a journey. * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. */ public JourneyExecutionActivityMetricsResponse withActivityType(String activityType) { setActivityType(activityType); return this; } /** * <p> * The unique identifier for the application that the metric applies to. * </p> * * @param applicationId * The unique identifier for the application that the metric applies to. */ public void setApplicationId(String applicationId) { this.applicationId = applicationId; } /** * <p> * The unique identifier for the application that the metric applies to. * </p> * * @return The unique identifier for the application that the metric applies to. */ public String getApplicationId() { return this.applicationId; } /** * <p> * The unique identifier for the application that the metric applies to. * </p> * * @param applicationId * The unique identifier for the application that the metric applies to. * @return Returns a reference to this object so that method calls can be chained together. */ public JourneyExecutionActivityMetricsResponse withApplicationId(String applicationId) { setApplicationId(applicationId); return this; } /** * <p> * The unique identifier for the activity that the metric applies to. * </p> * * @param journeyActivityId * The unique identifier for the activity that the metric applies to. */ public void setJourneyActivityId(String journeyActivityId) { this.journeyActivityId = journeyActivityId; } /** * <p> * The unique identifier for the activity that the metric applies to. * </p> * * @return The unique identifier for the activity that the metric applies to. */ public String getJourneyActivityId() { return this.journeyActivityId; } /** * <p> * The unique identifier for the activity that the metric applies to. * </p> * * @param journeyActivityId * The unique identifier for the activity that the metric applies to. * @return Returns a reference to this object so that method calls can be chained together. */ public JourneyExecutionActivityMetricsResponse withJourneyActivityId(String journeyActivityId) { setJourneyActivityId(journeyActivityId); return this; } /** * <p> * The unique identifier for the journey that the metric applies to. * </p> * * @param journeyId * The unique identifier for the journey that the metric applies to. */ public void setJourneyId(String journeyId) { this.journeyId = journeyId; } /** * <p> * The unique identifier for the journey that the metric applies to. * </p> * * @return The unique identifier for the journey that the metric applies to. */ public String getJourneyId() { return this.journeyId; } /** * <p> * The unique identifier for the journey that the metric applies to. * </p> * * @param journeyId * The unique identifier for the journey that the metric applies to. * @return Returns a reference to this object so that method calls can be chained together. */ public JourneyExecutionActivityMetricsResponse withJourneyId(String journeyId) { setJourneyId(journeyId); return this; } /** * <p> * The date and time, in ISO 8601 format, when Amazon Pinpoint last evaluated the execution status of the activity * and updated the data for the metric. * </p> * * @param lastEvaluatedTime * The date and time, in ISO 8601 format, when Amazon Pinpoint last evaluated the execution status of the * activity and updated the data for the metric. */ public void setLastEvaluatedTime(String lastEvaluatedTime) { this.lastEvaluatedTime = lastEvaluatedTime; } /** * <p> * The date and time, in ISO 8601 format, when Amazon Pinpoint last evaluated the execution status of the activity * and updated the data for the metric. * </p> * * @return The date and time, in ISO 8601 format, when Amazon Pinpoint last evaluated the execution status of the * activity and updated the data for the metric. */ public String getLastEvaluatedTime() { return this.lastEvaluatedTime; } /** * <p> * The date and time, in ISO 8601 format, when Amazon Pinpoint last evaluated the execution status of the activity * and updated the data for the metric. * </p> * * @param lastEvaluatedTime * The date and time, in ISO 8601 format, when Amazon Pinpoint last evaluated the execution status of the * activity and updated the data for the metric. * @return Returns a reference to this object so that method calls can be chained together. */ public JourneyExecutionActivityMetricsResponse withLastEvaluatedTime(String lastEvaluatedTime) { setLastEvaluatedTime(lastEvaluatedTime); return this; } /** * <p> * A JSON object that contains the results of the query. The results vary depending on the type of activity * (ActivityType). For information about the structure and contents of the results, see the <a * href="https://docs.aws.amazon.com/pinpoint/latest/developerguide/analytics-standard-metrics.html">Amazon Pinpoint * Developer Guide</a>. * </p> * * @return A JSON object that contains the results of the query. The results vary depending on the type of activity * (ActivityType). For information about the structure and contents of the results, see the <a * href="https://docs.aws.amazon.com/pinpoint/latest/developerguide/analytics-standard-metrics.html">Amazon * Pinpoint Developer Guide</a>. */ public java.util.Map<String, String> getMetrics() { return metrics; } /** * <p> * A JSON object that contains the results of the query. The results vary depending on the type of activity * (ActivityType). For information about the structure and contents of the results, see the <a * href="https://docs.aws.amazon.com/pinpoint/latest/developerguide/analytics-standard-metrics.html">Amazon Pinpoint * Developer Guide</a>. * </p> * * @param metrics * A JSON object that contains the results of the query. The results vary depending on the type of activity * (ActivityType). For information about the structure and contents of the results, see the <a * href="https://docs.aws.amazon.com/pinpoint/latest/developerguide/analytics-standard-metrics.html">Amazon * Pinpoint Developer Guide</a>. */ public void setMetrics(java.util.Map<String, String> metrics) { this.metrics = metrics; } /** * <p> * A JSON object that contains the results of the query. The results vary depending on the type of activity * (ActivityType). For information about the structure and contents of the results, see the <a * href="https://docs.aws.amazon.com/pinpoint/latest/developerguide/analytics-standard-metrics.html">Amazon Pinpoint * Developer Guide</a>. * </p> * * @param metrics * A JSON object that contains the results of the query. The results vary depending on the type of activity * (ActivityType). For information about the structure and contents of the results, see the <a * href="https://docs.aws.amazon.com/pinpoint/latest/developerguide/analytics-standard-metrics.html">Amazon * Pinpoint Developer Guide</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public JourneyExecutionActivityMetricsResponse withMetrics(java.util.Map<String, String> metrics) { setMetrics(metrics); return this; } /** * Add a single Metrics entry * * @see JourneyExecutionActivityMetricsResponse#withMetrics * @returns a reference to this object so that method calls can be chained together. */ public JourneyExecutionActivityMetricsResponse addMetricsEntry(String key, String value) { if (null == this.metrics) { this.metrics = new java.util.HashMap<String, String>(); } if (this.metrics.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.metrics.put(key, value); return this; } /** * Removes all the entries added into Metrics. * * @return Returns a reference to this object so that method calls can be chained together. */ public JourneyExecutionActivityMetricsResponse clearMetricsEntries() { this.metrics = null; return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getActivityType() != null) sb.append("ActivityType: ").append(getActivityType()).append(","); if (getApplicationId() != null) sb.append("ApplicationId: ").append(getApplicationId()).append(","); if (getJourneyActivityId() != null) sb.append("JourneyActivityId: ").append(getJourneyActivityId()).append(","); if (getJourneyId() != null) sb.append("JourneyId: ").append(getJourneyId()).append(","); if (getLastEvaluatedTime() != null) sb.append("LastEvaluatedTime: ").append(getLastEvaluatedTime()).append(","); if (getMetrics() != null) sb.append("Metrics: ").append(getMetrics()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof JourneyExecutionActivityMetricsResponse == false) return false; JourneyExecutionActivityMetricsResponse other = (JourneyExecutionActivityMetricsResponse) obj; if (other.getActivityType() == null ^ this.getActivityType() == null) return false; if (other.getActivityType() != null && other.getActivityType().equals(this.getActivityType()) == false) return false; if (other.getApplicationId() == null ^ this.getApplicationId() == null) return false; if (other.getApplicationId() != null && other.getApplicationId().equals(this.getApplicationId()) == false) return false; if (other.getJourneyActivityId() == null ^ this.getJourneyActivityId() == null) return false; if (other.getJourneyActivityId() != null && other.getJourneyActivityId().equals(this.getJourneyActivityId()) == false) return false; if (other.getJourneyId() == null ^ this.getJourneyId() == null) return false; if (other.getJourneyId() != null && other.getJourneyId().equals(this.getJourneyId()) == false) return false; if (other.getLastEvaluatedTime() == null ^ this.getLastEvaluatedTime() == null) return false; if (other.getLastEvaluatedTime() != null && other.getLastEvaluatedTime().equals(this.getLastEvaluatedTime()) == false) return false; if (other.getMetrics() == null ^ this.getMetrics() == null) return false; if (other.getMetrics() != null && other.getMetrics().equals(this.getMetrics()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getActivityType() == null) ? 0 : getActivityType().hashCode()); hashCode = prime * hashCode + ((getApplicationId() == null) ? 0 : getApplicationId().hashCode()); hashCode = prime * hashCode + ((getJourneyActivityId() == null) ? 0 : getJourneyActivityId().hashCode()); hashCode = prime * hashCode + ((getJourneyId() == null) ? 0 : getJourneyId().hashCode()); hashCode = prime * hashCode + ((getLastEvaluatedTime() == null) ? 0 : getLastEvaluatedTime().hashCode()); hashCode = prime * hashCode + ((getMetrics() == null) ? 0 : getMetrics().hashCode()); return hashCode; } @Override public JourneyExecutionActivityMetricsResponse clone() { try { return (JourneyExecutionActivityMetricsResponse) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.pinpoint.model.transform.JourneyExecutionActivityMetricsResponseMarshaller.getInstance().marshall(this, protocolMarshaller); } }
aws/aws-sdk-java
aws-java-sdk-pinpoint/src/main/java/com/amazonaws/services/pinpoint/model/JourneyExecutionActivityMetricsResponse.java
Java
apache-2.0
26,522
// Copyright 2008 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gwtjsonrpc.client.impl.ser; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwtjsonrpc.client.impl.JsonSerializer; import com.google.gwtjsonrpc.client.impl.ResultDeserializer; /** Base class for generated JsonSerializer implementations. */ public abstract class ObjectSerializer<T extends Object> extends JsonSerializer<T> implements ResultDeserializer<T> { @Override public void printJson(final StringBuilder sb, final Object o) { sb.append("{"); printJsonImpl(0, sb, o); sb.append("}"); } protected abstract int printJsonImpl(int field, StringBuilder sb, Object o); @Override public T fromResult(JavaScriptObject responseObject) { final JavaScriptObject result = objectResult(responseObject); return result == null ? null : fromJson(result); } static native JavaScriptObject objectResult(JavaScriptObject responseObject) /*-{ return responseObject.result; }-*/ ; }
GerritCodeReview/gwtjsonrpc
src/main/java/com/google/gwtjsonrpc/client/impl/ser/ObjectSerializer.java
Java
apache-2.0
1,544
import PinSvg from './svg/PinSvg'; export default function ActivityInfo({title, price, stars, reviews, location}) { return ( <> <div className='h2 line-height-2 mb1'> <span className='travel-results-result-text'>{title}</span> <span className='travel-results-result-subtext h3'>&bull;</span> <span className='travel-results-result-subtext h3'>$&nbsp;</span> <span className='black bold'>{price}</span> </div> <div className='h4 line-height-2'> <div className='inline-block relative mr1 h3 line-height-2'> <div className='travel-results-result-stars green'>★★★★★</div> </div> <span className='travel-results-result-subtext mr1'>{reviews} Reviews</span> <span className='travel-results-result-subtext'> <PinSvg /> {location} </span> </div> </> ); }
ampproject/ampstart
templates/travel/components/ActivityInfo.js
JavaScript
apache-2.0
887
/* * Copyright 2018, Flávio Keglevich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.fkeglevich.rawdumper.camera.feature; import com.fkeglevich.rawdumper.camera.async.direct.AsyncParameterSender; import com.fkeglevich.rawdumper.camera.data.DataRange; import com.fkeglevich.rawdumper.camera.parameter.Parameter; import com.fkeglevich.rawdumper.camera.parameter.ParameterCollection; import com.fkeglevich.rawdumper.camera.parameter.value.ValueValidator; /** * TODO: Add class header * * Created by Flávio Keglevich on 09/05/17. */ public abstract class RangeFeature<T extends Comparable<T>> extends WritableFeature<T, DataRange<T>> { private final AsyncParameterSender asyncParameterSender; RangeFeature(AsyncParameterSender asyncParameterSender, Parameter<T> featureParameter, ParameterCollection parameterCollection, ValueValidator<T, DataRange<T>> validator) { super(featureParameter, parameterCollection, validator); this.asyncParameterSender = asyncParameterSender; } public abstract void setValueAsProportion(double proportion); void setValueAsync(T value) { checkFeatureAvailability(this); if (!getValidator().isValid(value)) throw new IllegalArgumentException(); asyncParameterSender.sendParameterAsync(parameter, value); } }
fkeglevich/Raw-Dumper
app/src/main/java/com/fkeglevich/rawdumper/camera/feature/RangeFeature.java
Java
apache-2.0
1,858
package ananas.waymq.droid.api; public interface ICoreApi { IBaseDirectory getBaseDirectory(); IMemberManager getMemberManager(); void save(); void load(); }
xukun0217/wayMQ
wayMQ-droid/src/ananas/waymq/droid/api/ICoreApi.java
Java
apache-2.0
168
package org.arquillian.algeron.pact.provider.core; import net.jcip.annotations.NotThreadSafe; import org.assertj.core.util.Arrays; import org.junit.Test; import java.net.URI; import java.net.URL; import static org.assertj.core.api.Assertions.assertThat; @NotThreadSafe public class StateTypeConverterTest { @Test public void should_convert_empty_string_to_empty_string_array() throws Exception { // given StateTypeConverter typeConverter = new StateTypeConverter(); // when String[] convertedStringArray = typeConverter.convert("", String[].class); // then assertThat(convertedStringArray).isEmpty(); } @Test public void should_convert_blank_string_to_empty_string_array() throws Exception { // given StateTypeConverter typeConverter = new StateTypeConverter(); // when String[] convertedStringArray = typeConverter.convert(" ", String[].class); // then assertThat(convertedStringArray).isEmpty(); } @Test public void should_convert_sequence_of_blank_strings_to_empty_string_array() throws Exception { // given StateTypeConverter typeConverter = new StateTypeConverter(); String[] arrayOfEmptyStrings = Arrays.array("", "", "", "", ""); // when String[] convertedStringArray = typeConverter.convert(" , , , , ", String[].class); // then assertThat(convertedStringArray).isEqualTo(arrayOfEmptyStrings); } @Test public void should_convert_single_element_to_one_element_array() throws Exception { // given StateTypeConverter typeConverter = new StateTypeConverter(); String[] singleElementArray = Arrays.array("one element"); // when String[] convertedStringArray = typeConverter.convert("one element", String[].class); // then assertThat(convertedStringArray).isEqualTo(singleElementArray); } @Test public void should_convert_single_element_with_delimiter_to_one_element_array() throws Exception { // given StateTypeConverter typeConverter = new StateTypeConverter(); String[] singleElementArray = Arrays.array("one element"); // when String[] convertedStringArray = typeConverter.convert("one element,", String[].class); // then assertThat(convertedStringArray).isEqualTo(singleElementArray); } @Test public void should_convert_single_element_with_delimiters_to_one_element_array() throws Exception { // given StateTypeConverter typeConverter = new StateTypeConverter(); String[] singleElementArray = Arrays.array("one element"); // when String[] convertedStringArray = typeConverter.convert("one element,,,,,,,", String[].class); // then assertThat(convertedStringArray).isEqualTo(singleElementArray); } @Test public void should_convert_blank_to_empty_string_when_appear_in_sequence_with_non_blanks() throws Exception { // given StateTypeConverter typeConverter = new StateTypeConverter(); String[] expectedArray = Arrays.array("a", "", "test", "", "", "b"); // when String[] convertedStringArray = typeConverter.convert("a, , test , , , b ", String[].class); // then assertThat(convertedStringArray).isEqualTo(expectedArray); } @Test public void should_convert_string() throws Exception { // given String expectedString = "Hello"; StateTypeConverter typeConverter = new StateTypeConverter(); // when String convertedString = typeConverter.convert("Hello", String.class); // then assertThat(convertedString).isEqualTo(expectedString); } @Test public void should_convert_string_to_integer() throws Exception { // given Integer expectedInteger = Integer.valueOf(15); StateTypeConverter typeConverter = new StateTypeConverter(); // when Integer convertedInteger = typeConverter.convert("15", Integer.class); // then assertThat(convertedInteger).isEqualTo(expectedInteger); } @Test public void should_convert_string_to_double() throws Exception { // given Double expecteDouble = Double.valueOf("123"); StateTypeConverter typeConverter = new StateTypeConverter(); // when Double convertedDouble = typeConverter.convert("123", Double.class); // then assertThat(convertedDouble).isEqualTo(expecteDouble); } @Test public void should_convert_string_to_long() throws Exception { // given Long expectedLong = Long.valueOf(-456); StateTypeConverter typeConverter = new StateTypeConverter(); // when Long convertedLong = typeConverter.convert("-456", Long.class); // then assertThat(convertedLong).isEqualTo(expectedLong); } @Test public void should_convert_string_to_boolean() throws Exception { // given StateTypeConverter typeConverter = new StateTypeConverter(); // when Boolean convertedBoolen = typeConverter.convert("True", Boolean.class); // then assertThat(convertedBoolen).isTrue(); } @Test public void should_convert_string_to_URL() throws Exception { // given URL expectedUrl = new URI("http://www.arquillian.org").toURL(); StateTypeConverter typeConverter = new StateTypeConverter(); // when URL convertedUrl = typeConverter.convert("http://www.arquillian.org", URL.class); // then assertThat(convertedUrl).isEqualTo(expectedUrl); } @Test public void should_convert_string_to_URI() throws Exception { // given URI expectedUri = new URI("http://www.arquillian.org"); StateTypeConverter typeConverter = new StateTypeConverter(); // when URI convertedUri = typeConverter.convert("http://www.arquillian.org", URI.class); // then assertThat(convertedUri).isEqualTo(expectedUri); } @Test(expected = IllegalArgumentException.class) public void should_throw_exception_for_unsupported_type() throws Exception { // given StateTypeConverter typeConverter = new StateTypeConverter(); // when typeConverter.convert("typeConverter", StateTypeConverter.class); // then // exception should be thrown } // ------------------------------------------------------------------------------------------- }
arquillian/arquillian-algeron
pact/provider/core/src/test/java/org/arquillian/algeron/pact/provider/core/StateTypeConverterTest.java
Java
apache-2.0
6,653
/* * Copyright (c) 2011-2014 The original author or authors * ------------------------------------------------------ * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package io.vertx.jdbcclient.impl.actions; import java.sql.JDBCType; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import io.vertx.core.impl.logging.Logger; import io.vertx.core.impl.logging.LoggerFactory; public interface JDBCPropertyAccessor<T> { Logger LOG = LoggerFactory.getLogger(JDBCColumnDescriptor.class); T get() throws SQLException; static <T> JDBCPropertyAccessor<T> create(JDBCPropertyAccessor<T> accessor) { return create(accessor, null); } static <T> JDBCPropertyAccessor<T> create(JDBCPropertyAccessor<T> accessor, T fallbackIfUnsupported) { return () -> { try { return accessor.get(); } catch (SQLFeatureNotSupportedException e) { LOG.debug("Unsupported access properties in SQL metadata", e); return fallbackIfUnsupported; } }; } static JDBCPropertyAccessor<Integer> jdbcType(JDBCPropertyAccessor<Integer> accessor) { return create(accessor, JDBCType.OTHER.getVendorTypeNumber()); } }
vert-x3/vertx-jdbc-client
src/main/java/io/vertx/jdbcclient/impl/actions/JDBCPropertyAccessor.java
Java
apache-2.0
1,633
#include "common/http/conn_manager_impl.h" #include <cstdint> #include <functional> #include <list> #include <memory> #include <string> #include <vector> #include "envoy/buffer/buffer.h" #include "envoy/common/time.h" #include "envoy/event/dispatcher.h" #include "envoy/network/drain_decision.h" #include "envoy/router/router.h" #include "envoy/ssl/connection.h" #include "envoy/stats/scope.h" #include "envoy/tracing/http_tracer.h" #include "common/buffer/buffer_impl.h" #include "common/common/assert.h" #include "common/common/empty_string.h" #include "common/common/enum_to_int.h" #include "common/common/fmt.h" #include "common/common/utility.h" #include "common/http/codes.h" #include "common/http/conn_manager_utility.h" #include "common/http/exception.h" #include "common/http/header_map_impl.h" #include "common/http/headers.h" #include "common/http/http1/codec_impl.h" #include "common/http/http2/codec_impl.h" #include "common/http/path_utility.h" #include "common/http/utility.h" #include "common/network/utility.h" #include "absl/strings/escaping.h" #include "absl/strings/match.h" namespace Envoy { namespace Http { namespace { template <class T> using FilterList = std::list<std::unique_ptr<T>>; // Shared helper for recording the latest filter used. template <class T> void recordLatestDataFilter(const typename FilterList<T>::iterator current_filter, T*& latest_filter, const FilterList<T>& filters) { // If this is the first time we're calling onData, just record the current filter. if (latest_filter == nullptr) { latest_filter = current_filter->get(); return; } // We want to keep this pointing at the latest filter in the filter list that has received the // onData callback. To do so, we compare the current latest with the *previous* filter. If they // match, then we must be processing a new filter for the first time. We omit this check if we're // the first filter, since the above check handles that case. // // We compare against the previous filter to avoid multiple filter iterations from reseting the // pointer: If we just set latest to current, then the first onData filter iteration would // correctly iterate over the filters and set latest, but on subsequent onData iterations // we'd start from the beginning again, potentially allowing filter N to modify the buffer even // though filter M > N was the filter that inserted data into the buffer. if (current_filter != filters.begin() && latest_filter == std::prev(current_filter)->get()) { latest_filter = current_filter->get(); } } } // namespace ConnectionManagerStats ConnectionManagerImpl::generateStats(const std::string& prefix, Stats::Scope& scope) { return { {ALL_HTTP_CONN_MAN_STATS(POOL_COUNTER_PREFIX(scope, prefix), POOL_GAUGE_PREFIX(scope, prefix), POOL_HISTOGRAM_PREFIX(scope, prefix))}, prefix, scope}; } ConnectionManagerTracingStats ConnectionManagerImpl::generateTracingStats(const std::string& prefix, Stats::Scope& scope) { return {CONN_MAN_TRACING_STATS(POOL_COUNTER_PREFIX(scope, prefix + "tracing."))}; } ConnectionManagerListenerStats ConnectionManagerImpl::generateListenerStats(const std::string& prefix, Stats::Scope& scope) { return {CONN_MAN_LISTENER_STATS(POOL_COUNTER_PREFIX(scope, prefix))}; } ConnectionManagerImpl::ConnectionManagerImpl(ConnectionManagerConfig& config, const Network::DrainDecision& drain_close, Runtime::RandomGenerator& random_generator, Http::Context& http_context, Runtime::Loader& runtime, const LocalInfo::LocalInfo& local_info, Upstream::ClusterManager& cluster_manager, Server::OverloadManager* overload_manager, TimeSource& time_source) : config_(config), stats_(config_.stats()), conn_length_(new Stats::Timespan(stats_.named_.downstream_cx_length_ms_, time_source)), drain_close_(drain_close), random_generator_(random_generator), http_context_(http_context), runtime_(runtime), local_info_(local_info), cluster_manager_(cluster_manager), listener_stats_(config_.listenerStats()), overload_stop_accepting_requests_ref_( overload_manager ? overload_manager->getThreadLocalOverloadState().getState( Server::OverloadActionNames::get().StopAcceptingRequests) : Server::OverloadManager::getInactiveState()), overload_disable_keepalive_ref_( overload_manager ? overload_manager->getThreadLocalOverloadState().getState( Server::OverloadActionNames::get().DisableHttpKeepAlive) : Server::OverloadManager::getInactiveState()), time_source_(time_source) {} const HeaderMapImpl& ConnectionManagerImpl::continueHeader() { CONSTRUCT_ON_FIRST_USE(HeaderMapImpl, {Http::Headers::get().Status, std::to_string(enumToInt(Code::Continue))}); } void ConnectionManagerImpl::initializeReadFilterCallbacks(Network::ReadFilterCallbacks& callbacks) { read_callbacks_ = &callbacks; stats_.named_.downstream_cx_total_.inc(); stats_.named_.downstream_cx_active_.inc(); if (read_callbacks_->connection().ssl()) { stats_.named_.downstream_cx_ssl_total_.inc(); stats_.named_.downstream_cx_ssl_active_.inc(); } read_callbacks_->connection().addConnectionCallbacks(*this); if (config_.idleTimeout()) { connection_idle_timer_ = read_callbacks_->connection().dispatcher().createTimer( [this]() -> void { onIdleTimeout(); }); connection_idle_timer_->enableTimer(config_.idleTimeout().value()); } read_callbacks_->connection().setDelayedCloseTimeout(config_.delayedCloseTimeout()); read_callbacks_->connection().setConnectionStats( {stats_.named_.downstream_cx_rx_bytes_total_, stats_.named_.downstream_cx_rx_bytes_buffered_, stats_.named_.downstream_cx_tx_bytes_total_, stats_.named_.downstream_cx_tx_bytes_buffered_, nullptr, &stats_.named_.downstream_cx_delayed_close_timeout_}); } ConnectionManagerImpl::~ConnectionManagerImpl() { stats_.named_.downstream_cx_destroy_.inc(); stats_.named_.downstream_cx_active_.dec(); if (read_callbacks_->connection().ssl()) { stats_.named_.downstream_cx_ssl_active_.dec(); } if (codec_) { if (codec_->protocol() == Protocol::Http2) { stats_.named_.downstream_cx_http2_active_.dec(); } else { stats_.named_.downstream_cx_http1_active_.dec(); } } conn_length_->complete(); user_agent_.completeConnectionLength(*conn_length_); } void ConnectionManagerImpl::checkForDeferredClose() { if (drain_state_ == DrainState::Closing && streams_.empty() && !codec_->wantsToWrite()) { read_callbacks_->connection().close(Network::ConnectionCloseType::FlushWriteAndDelay); } } void ConnectionManagerImpl::doEndStream(ActiveStream& stream) { // The order of what happens in this routine is important and a little complicated. We first see // if the stream needs to be reset. If it needs to be, this will end up invoking reset callbacks // and then moving the stream to the deferred destruction list. If the stream has not been reset, // we move it to the deferred deletion list here. Then, we potentially close the connection. This // must be done after deleting the stream since the stream refers to the connection and must be // deleted first. bool reset_stream = false; // If the response encoder is still associated with the stream, reset the stream. The exception // here is when Envoy "ends" the stream by calling recreateStream at which point recreateStream // explicitly nulls out response_encoder to avoid the downstream being notified of the // Envoy-internal stream instance being ended. if (stream.response_encoder_ != nullptr && (!stream.state_.remote_complete_ || !stream.state_.local_complete_)) { // Indicate local is complete at this point so that if we reset during a continuation, we don't // raise further data or trailers. stream.state_.local_complete_ = true; stream.response_encoder_->getStream().resetStream(StreamResetReason::LocalReset); reset_stream = true; } if (!reset_stream) { doDeferredStreamDestroy(stream); } if (reset_stream && codec_->protocol() != Protocol::Http2) { drain_state_ = DrainState::Closing; } checkForDeferredClose(); // Reading may have been disabled for the non-multiplexing case, so enable it again. // Also be sure to unwind any read-disable done by the prior downstream // connection. if (drain_state_ != DrainState::Closing && codec_->protocol() != Protocol::Http2) { while (!read_callbacks_->connection().readEnabled()) { read_callbacks_->connection().readDisable(false); } } if (connection_idle_timer_ && streams_.empty()) { connection_idle_timer_->enableTimer(config_.idleTimeout().value()); } } void ConnectionManagerImpl::doDeferredStreamDestroy(ActiveStream& stream) { if (stream.stream_idle_timer_ != nullptr) { stream.stream_idle_timer_->disableTimer(); stream.stream_idle_timer_ = nullptr; } stream.disarmRequestTimeout(); stream.state_.destroyed_ = true; for (auto& filter : stream.decoder_filters_) { filter->handle_->onDestroy(); } for (auto& filter : stream.encoder_filters_) { // Do not call on destroy twice for dual registered filters. if (!filter->dual_filter_) { filter->handle_->onDestroy(); } } read_callbacks_->connection().dispatcher().deferredDelete(stream.removeFromList(streams_)); } StreamDecoder& ConnectionManagerImpl::newStream(StreamEncoder& response_encoder, bool is_internally_created) { if (connection_idle_timer_) { connection_idle_timer_->disableTimer(); } ENVOY_CONN_LOG(debug, "new stream", read_callbacks_->connection()); ActiveStreamPtr new_stream(new ActiveStream(*this)); new_stream->state_.is_internally_created_ = is_internally_created; new_stream->response_encoder_ = &response_encoder; new_stream->response_encoder_->getStream().addCallbacks(*new_stream); new_stream->buffer_limit_ = new_stream->response_encoder_->getStream().bufferLimit(); // If the network connection is backed up, the stream should be made aware of it on creation. // Both HTTP/1.x and HTTP/2 codecs handle this in StreamCallbackHelper::addCallbacks_. ASSERT(read_callbacks_->connection().aboveHighWatermark() == false || new_stream->high_watermark_count_ > 0); new_stream->moveIntoList(std::move(new_stream), streams_); return **streams_.begin(); } Network::FilterStatus ConnectionManagerImpl::onData(Buffer::Instance& data, bool) { if (!codec_) { codec_ = config_.createCodec(read_callbacks_->connection(), data, *this); if (codec_->protocol() == Protocol::Http2) { stats_.named_.downstream_cx_http2_total_.inc(); stats_.named_.downstream_cx_http2_active_.inc(); } else { stats_.named_.downstream_cx_http1_total_.inc(); stats_.named_.downstream_cx_http1_active_.inc(); } } bool redispatch; do { redispatch = false; try { codec_->dispatch(data); } catch (const CodecProtocolException& e) { // HTTP/1.1 codec has already sent a 400 response if possible. HTTP/2 codec has already sent // GOAWAY. ENVOY_CONN_LOG(debug, "dispatch error: {}", read_callbacks_->connection(), e.what()); stats_.named_.downstream_cx_protocol_error_.inc(); // In the protocol error case, we need to reset all streams now. Since we do a flush write and // delayed close, the connection might stick around long enough for a pending stream to come // back and try to encode. resetAllStreams(); read_callbacks_->connection().close(Network::ConnectionCloseType::FlushWriteAndDelay); return Network::FilterStatus::StopIteration; } // Processing incoming data may release outbound data so check for closure here as well. checkForDeferredClose(); // The HTTP/1 codec will pause dispatch after a single message is complete. We want to // either redispatch if there are no streams and we have more data. If we have a single // complete non-WebSocket stream but have not responded yet we will pause socket reads // to apply back pressure. if (codec_->protocol() != Protocol::Http2) { if (read_callbacks_->connection().state() == Network::Connection::State::Open && data.length() > 0 && streams_.empty()) { redispatch = true; } if (!streams_.empty() && streams_.front()->state_.remote_complete_) { read_callbacks_->connection().readDisable(true); } } } while (redispatch); return Network::FilterStatus::StopIteration; } void ConnectionManagerImpl::resetAllStreams() { while (!streams_.empty()) { // Mimic a downstream reset in this case. streams_.front()->onResetStream(StreamResetReason::ConnectionTermination, absl::string_view()); } } void ConnectionManagerImpl::onEvent(Network::ConnectionEvent event) { if (event == Network::ConnectionEvent::LocalClose) { stats_.named_.downstream_cx_destroy_local_.inc(); } if (event == Network::ConnectionEvent::RemoteClose) { stats_.named_.downstream_cx_destroy_remote_.inc(); } if (event == Network::ConnectionEvent::RemoteClose || event == Network::ConnectionEvent::LocalClose) { if (connection_idle_timer_) { connection_idle_timer_->disableTimer(); connection_idle_timer_.reset(); } if (drain_timer_) { drain_timer_->disableTimer(); drain_timer_.reset(); } } if (!streams_.empty()) { if (event == Network::ConnectionEvent::LocalClose) { stats_.named_.downstream_cx_destroy_local_active_rq_.inc(); } if (event == Network::ConnectionEvent::RemoteClose) { stats_.named_.downstream_cx_destroy_remote_active_rq_.inc(); } stats_.named_.downstream_cx_destroy_active_rq_.inc(); user_agent_.onConnectionDestroy(event, true); resetAllStreams(); } } void ConnectionManagerImpl::onGoAway() { // Currently we do nothing with remote go away frames. In the future we can decide to no longer // push resources if applicable. } void ConnectionManagerImpl::onIdleTimeout() { ENVOY_CONN_LOG(debug, "idle timeout", read_callbacks_->connection()); stats_.named_.downstream_cx_idle_timeout_.inc(); if (!codec_) { // No need to delay close after flushing since an idle timeout has already fired. Attempt to // write out buffered data one last time and issue a local close if successful. read_callbacks_->connection().close(Network::ConnectionCloseType::FlushWrite); } else if (drain_state_ == DrainState::NotDraining) { startDrainSequence(); } } void ConnectionManagerImpl::onDrainTimeout() { ASSERT(drain_state_ != DrainState::NotDraining); codec_->goAway(); drain_state_ = DrainState::Closing; checkForDeferredClose(); } void ConnectionManagerImpl::chargeTracingStats(const Tracing::Reason& tracing_reason, ConnectionManagerTracingStats& tracing_stats) { switch (tracing_reason) { case Tracing::Reason::ClientForced: tracing_stats.client_enabled_.inc(); break; case Tracing::Reason::NotTraceableRequestId: tracing_stats.not_traceable_.inc(); break; case Tracing::Reason::Sampling: tracing_stats.random_sampling_.inc(); break; case Tracing::Reason::ServiceForced: tracing_stats.service_forced_.inc(); break; default: throw std::invalid_argument( fmt::format("invalid tracing reason, value: {}", static_cast<int32_t>(tracing_reason))); } } ConnectionManagerImpl::ActiveStream::ActiveStream(ConnectionManagerImpl& connection_manager) : connection_manager_(connection_manager), snapped_route_config_(connection_manager.config_.routeConfigProvider().config()), stream_id_(connection_manager.random_generator_.random()), request_response_timespan_(new Stats::Timespan( connection_manager_.stats_.named_.downstream_rq_time_, connection_manager_.timeSource())), stream_info_(connection_manager_.codec_->protocol(), connection_manager_.timeSource()) { connection_manager_.stats_.named_.downstream_rq_total_.inc(); connection_manager_.stats_.named_.downstream_rq_active_.inc(); if (connection_manager_.codec_->protocol() == Protocol::Http2) { connection_manager_.stats_.named_.downstream_rq_http2_total_.inc(); } else { connection_manager_.stats_.named_.downstream_rq_http1_total_.inc(); } stream_info_.setDownstreamLocalAddress( connection_manager_.read_callbacks_->connection().localAddress()); stream_info_.setDownstreamDirectRemoteAddress( connection_manager_.read_callbacks_->connection().remoteAddress()); // Initially, the downstream remote address is the source address of the // downstream connection. That can change later in the request's lifecycle, // based on XFF processing, but setting the downstream remote address here // prevents surprises for logging code in edge cases. stream_info_.setDownstreamRemoteAddress( connection_manager_.read_callbacks_->connection().remoteAddress()); stream_info_.setDownstreamSslConnection(connection_manager_.read_callbacks_->connection().ssl()); if (connection_manager_.config_.streamIdleTimeout().count()) { idle_timeout_ms_ = connection_manager_.config_.streamIdleTimeout(); stream_idle_timer_ = connection_manager_.read_callbacks_->connection().dispatcher().createTimer( [this]() -> void { onIdleTimeout(); }); resetIdleTimer(); } if (connection_manager_.config_.requestTimeout().count()) { std::chrono::milliseconds request_timeout_ms_ = connection_manager_.config_.requestTimeout(); request_timer_ = connection_manager.read_callbacks_->connection().dispatcher().createTimer( [this]() -> void { onRequestTimeout(); }); request_timer_->enableTimer(request_timeout_ms_); } stream_info_.setRequestedServerName( connection_manager_.read_callbacks_->connection().requestedServerName()); } ConnectionManagerImpl::ActiveStream::~ActiveStream() { stream_info_.onRequestComplete(); // A downstream disconnect can be identified for HTTP requests when the upstream returns with a 0 // response code and when no other response flags are set. if (!stream_info_.hasAnyResponseFlag() && !stream_info_.responseCode()) { stream_info_.setResponseFlag(StreamInfo::ResponseFlag::DownstreamConnectionTermination); } connection_manager_.stats_.named_.downstream_rq_active_.dec(); for (const AccessLog::InstanceSharedPtr& access_log : connection_manager_.config_.accessLogs()) { access_log->log(request_headers_.get(), response_headers_.get(), response_trailers_.get(), stream_info_); } for (const auto& log_handler : access_log_handlers_) { log_handler->log(request_headers_.get(), response_headers_.get(), response_trailers_.get(), stream_info_); } if (stream_info_.healthCheck()) { connection_manager_.config_.tracingStats().health_check_.inc(); } if (active_span_) { Tracing::HttpTracerUtility::finalizeSpan(*active_span_, request_headers_.get(), stream_info_, *this); } if (state_.successful_upgrade_) { connection_manager_.stats_.named_.downstream_cx_upgrades_active_.dec(); } ASSERT(state_.filter_call_state_ == 0); } void ConnectionManagerImpl::ActiveStream::resetIdleTimer() { if (stream_idle_timer_ != nullptr) { // TODO(htuch): If this shows up in performance profiles, optimize by only // updating a timestamp here and doing periodic checks for idle timeouts // instead, or reducing the accuracy of timers. stream_idle_timer_->enableTimer(idle_timeout_ms_); } } void ConnectionManagerImpl::ActiveStream::onIdleTimeout() { connection_manager_.stats_.named_.downstream_rq_idle_timeout_.inc(); // If headers have not been sent to the user, send a 408. if (response_headers_ != nullptr) { // TODO(htuch): We could send trailers here with an x-envoy timeout header // or gRPC status code, and/or set H2 RST_STREAM error. connection_manager_.doEndStream(*this); } else { stream_info_.setResponseFlag(StreamInfo::ResponseFlag::StreamIdleTimeout); sendLocalReply( request_headers_ != nullptr && Grpc::Common::hasGrpcContentType(*request_headers_), Http::Code::RequestTimeout, "stream timeout", nullptr, is_head_request_, absl::nullopt); } } void ConnectionManagerImpl::ActiveStream::onRequestTimeout() { connection_manager_.stats_.named_.downstream_rq_timeout_.inc(); sendLocalReply(request_headers_ != nullptr && Grpc::Common::hasGrpcContentType(*request_headers_), Http::Code::RequestTimeout, "request timeout", nullptr, is_head_request_, absl::nullopt); } void ConnectionManagerImpl::ActiveStream::addStreamDecoderFilterWorker( StreamDecoderFilterSharedPtr filter, bool dual_filter) { ActiveStreamDecoderFilterPtr wrapper(new ActiveStreamDecoderFilter(*this, filter, dual_filter)); filter->setDecoderFilterCallbacks(*wrapper); wrapper->moveIntoListBack(std::move(wrapper), decoder_filters_); } void ConnectionManagerImpl::ActiveStream::addStreamEncoderFilterWorker( StreamEncoderFilterSharedPtr filter, bool dual_filter) { ActiveStreamEncoderFilterPtr wrapper(new ActiveStreamEncoderFilter(*this, filter, dual_filter)); filter->setEncoderFilterCallbacks(*wrapper); wrapper->moveIntoList(std::move(wrapper), encoder_filters_); } void ConnectionManagerImpl::ActiveStream::addAccessLogHandler( AccessLog::InstanceSharedPtr handler) { access_log_handlers_.push_back(handler); } void ConnectionManagerImpl::ActiveStream::chargeStats(const HeaderMap& headers) { uint64_t response_code = Utility::getResponseStatus(headers); stream_info_.response_code_ = response_code; if (stream_info_.health_check_request_) { return; } connection_manager_.stats_.named_.downstream_rq_completed_.inc(); connection_manager_.listener_stats_.downstream_rq_completed_.inc(); if (CodeUtility::is1xx(response_code)) { connection_manager_.stats_.named_.downstream_rq_1xx_.inc(); connection_manager_.listener_stats_.downstream_rq_1xx_.inc(); } else if (CodeUtility::is2xx(response_code)) { connection_manager_.stats_.named_.downstream_rq_2xx_.inc(); connection_manager_.listener_stats_.downstream_rq_2xx_.inc(); } else if (CodeUtility::is3xx(response_code)) { connection_manager_.stats_.named_.downstream_rq_3xx_.inc(); connection_manager_.listener_stats_.downstream_rq_3xx_.inc(); } else if (CodeUtility::is4xx(response_code)) { connection_manager_.stats_.named_.downstream_rq_4xx_.inc(); connection_manager_.listener_stats_.downstream_rq_4xx_.inc(); } else if (CodeUtility::is5xx(response_code)) { connection_manager_.stats_.named_.downstream_rq_5xx_.inc(); connection_manager_.listener_stats_.downstream_rq_5xx_.inc(); } } const Network::Connection* ConnectionManagerImpl::ActiveStream::connection() { return &connection_manager_.read_callbacks_->connection(); } // Ordering in this function is complicated, but important. // // We want to do minimal work before selecting route and creating a filter // chain to maximize the number of requests which get custom filter behavior, // e.g. registering access logging. // // This must be balanced by doing sanity checking for invalid requests (one // can't route select properly without full headers), checking state required to // serve error responses (connection close, head requests, etc), and // modifications which may themselves affect route selection. // // TODO(alyssawilk) all the calls here should be audited for order priority, // e.g. many early returns do not currently handle connection: close properly. void ConnectionManagerImpl::ActiveStream::decodeHeaders(HeaderMapPtr&& headers, bool end_stream) { request_headers_ = std::move(headers); if (Http::Headers::get().MethodValues.Head == request_headers_->Method()->value().getStringView()) { is_head_request_ = true; } ENVOY_STREAM_LOG(debug, "request headers complete (end_stream={}):\n{}", *this, end_stream, *request_headers_); // We end the decode here only if the request is header only. If we convert the request to a // header only, the stream will be marked as done once a subsequent decodeData/decodeTrailers is // called with end_stream=true. maybeEndDecode(end_stream); // Drop new requests when overloaded as soon as we have decoded the headers. if (connection_manager_.overload_stop_accepting_requests_ref_ == Server::OverloadActionState::Active) { // In this one special case, do not create the filter chain. If there is a risk of memory // overload it is more important to avoid unnecessary allocation than to create the filters. state_.created_filter_chain_ = true; connection_manager_.stats_.named_.downstream_rq_overload_close_.inc(); sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_), Http::Code::ServiceUnavailable, "envoy overloaded", nullptr, is_head_request_, absl::nullopt); return; } if (!connection_manager_.config_.proxy100Continue() && request_headers_->Expect() && request_headers_->Expect()->value() == Headers::get().ExpectValues._100Continue.c_str()) { // Note in the case Envoy is handling 100-Continue complexity, it skips the filter chain // and sends the 100-Continue directly to the encoder. chargeStats(continueHeader()); response_encoder_->encode100ContinueHeaders(continueHeader()); // Remove the Expect header so it won't be handled again upstream. request_headers_->removeExpect(); } connection_manager_.user_agent_.initializeFromHeaders( *request_headers_, connection_manager_.stats_.prefix_, connection_manager_.stats_.scope_); // Make sure we are getting a codec version we support. Protocol protocol = connection_manager_.codec_->protocol(); if (protocol == Protocol::Http10) { // Assume this is HTTP/1.0. This is fine for HTTP/0.9 but this code will also affect any // requests with non-standard version numbers (0.9, 1.3), basically anything which is not // HTTP/1.1. // // The protocol may have shifted in the HTTP/1.0 case so reset it. stream_info_.protocol(protocol); if (!connection_manager_.config_.http1Settings().accept_http_10_) { // Send "Upgrade Required" if HTTP/1.0 support is not explicitly configured on. sendLocalReply(false, Code::UpgradeRequired, "", nullptr, is_head_request_, absl::nullopt); return; } else { // HTTP/1.0 defaults to single-use connections. Make sure the connection // will be closed unless Keep-Alive is present. state_.saw_connection_close_ = true; if (request_headers_->Connection() && absl::EqualsIgnoreCase(request_headers_->Connection()->value().getStringView(), Http::Headers::get().ConnectionValues.KeepAlive)) { state_.saw_connection_close_ = false; } } } if (!request_headers_->Host()) { if ((protocol == Protocol::Http10) && !connection_manager_.config_.http1Settings().default_host_for_http_10_.empty()) { // Add a default host if configured to do so. request_headers_->insertHost().value( connection_manager_.config_.http1Settings().default_host_for_http_10_); } else { // Require host header. For HTTP/1.1 Host has already been translated to :authority. sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_), Code::BadRequest, "", nullptr, is_head_request_, absl::nullopt); return; } } ASSERT(connection_manager_.config_.maxRequestHeadersKb() > 0); if (request_headers_->byteSize() > (connection_manager_.config_.maxRequestHeadersKb() * 1024)) { sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_), Code::RequestHeaderFieldsTooLarge, "", nullptr, is_head_request_, absl::nullopt); return; } // Currently we only support relative paths at the application layer. We expect the codec to have // broken the path into pieces if applicable. NOTE: Currently the HTTP/1.1 codec only does this // when the allow_absolute_url flag is enabled on the HCM. // https://tools.ietf.org/html/rfc7230#section-5.3 We also need to check for the existence of // :path because CONNECT does not have a path, and we don't support that currently. if (!request_headers_->Path() || request_headers_->Path()->value().getStringView().empty() || request_headers_->Path()->value().getStringView()[0] != '/') { connection_manager_.stats_.named_.downstream_rq_non_relative_path_.inc(); sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_), Code::NotFound, "", nullptr, is_head_request_, absl::nullopt); return; } // Path sanitization should happen before any path access other than the above sanity check. if (!ConnectionManagerUtility::maybeNormalizePath(*request_headers_, connection_manager_.config_)) { sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_), Code::BadRequest, "", nullptr, is_head_request_, absl::nullopt); return; } if (protocol == Protocol::Http11 && request_headers_->Connection() && absl::EqualsIgnoreCase(request_headers_->Connection()->value().getStringView(), Http::Headers::get().ConnectionValues.Close)) { state_.saw_connection_close_ = true; } if (!state_.is_internally_created_) { // Only sanitize headers on first pass. // Modify the downstream remote address depending on configuration and headers. stream_info_.setDownstreamRemoteAddress(ConnectionManagerUtility::mutateRequestHeaders( *request_headers_, connection_manager_.read_callbacks_->connection(), connection_manager_.config_, *snapped_route_config_, connection_manager_.random_generator_, connection_manager_.runtime_, connection_manager_.local_info_)); } ASSERT(stream_info_.downstreamRemoteAddress() != nullptr); ASSERT(!cached_route_); refreshCachedRoute(); const bool upgrade_rejected = createFilterChain() == false; // TODO if there are no filters when starting a filter iteration, the connection manager // should return 404. The current returns no response if there is no router filter. if (protocol == Protocol::Http11 && cached_route_.value()) { if (upgrade_rejected) { // Do not allow upgrades if the route does not support it. connection_manager_.stats_.named_.downstream_rq_ws_on_non_ws_route_.inc(); sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_), Code::Forbidden, "", nullptr, is_head_request_, absl::nullopt); return; } // Allow non websocket requests to go through websocket enabled routes. } if (cached_route_.value()) { const Router::RouteEntry* route_entry = cached_route_.value()->routeEntry(); if (route_entry != nullptr && route_entry->idleTimeout()) { idle_timeout_ms_ = route_entry->idleTimeout().value(); if (idle_timeout_ms_.count()) { // If we have a route-level idle timeout but no global stream idle timeout, create a timer. if (stream_idle_timer_ == nullptr) { stream_idle_timer_ = connection_manager_.read_callbacks_->connection().dispatcher().createTimer( [this]() -> void { onIdleTimeout(); }); } } else if (stream_idle_timer_ != nullptr) { // If we had a global stream idle timeout but the route-level idle timeout is set to zero // (to override), we disable the idle timer. stream_idle_timer_->disableTimer(); stream_idle_timer_ = nullptr; } } } // Check if tracing is enabled at all. if (connection_manager_.config_.tracingConfig()) { traceRequest(); } decodeHeaders(nullptr, *request_headers_, end_stream); // Reset it here for both global and overridden cases. resetIdleTimer(); } void ConnectionManagerImpl::ActiveStream::traceRequest() { Tracing::Decision tracing_decision = Tracing::HttpTracerUtility::isTracing(stream_info_, *request_headers_); ConnectionManagerImpl::chargeTracingStats(tracing_decision.reason, connection_manager_.config_.tracingStats()); active_span_ = connection_manager_.tracer().startSpan(*this, *request_headers_, stream_info_, tracing_decision); if (!active_span_) { return; } // TODO: Need to investigate the following code based on the cached route, as may // be broken in the case a filter changes the route. // If a decorator has been defined, apply it to the active span. if (cached_route_.value() && cached_route_.value()->decorator()) { cached_route_.value()->decorator()->apply(*active_span_); // Cache decorated operation. if (!cached_route_.value()->decorator()->getOperation().empty()) { decorated_operation_ = &cached_route_.value()->decorator()->getOperation(); } } if (connection_manager_.config_.tracingConfig()->operation_name_ == Tracing::OperationName::Egress) { // For egress (outbound) requests, pass the decorator's operation name (if defined) // as a request header to enable the receiving service to use it in its server span. if (decorated_operation_) { request_headers_->insertEnvoyDecoratorOperation().value(*decorated_operation_); } } else { const HeaderEntry* req_operation_override = request_headers_->EnvoyDecoratorOperation(); // For ingress (inbound) requests, if a decorator operation name has been provided, it // should be used to override the active span's operation. if (req_operation_override) { if (!req_operation_override->value().empty()) { // TODO(dnoe): Migrate setOperation to take string_view (#6580) active_span_->setOperation(std::string(req_operation_override->value().getStringView())); // Clear the decorated operation so won't be used in the response header, as // it has been overridden by the inbound decorator operation request header. decorated_operation_ = nullptr; } // Remove header so not propagated to service request_headers_->removeEnvoyDecoratorOperation(); } } } void ConnectionManagerImpl::ActiveStream::decodeHeaders(ActiveStreamDecoderFilter* filter, HeaderMap& headers, bool end_stream) { // Headers filter iteration should always start with the next filter if available. std::list<ActiveStreamDecoderFilterPtr>::iterator entry = commonDecodePrefix(filter, FilterIterationStartState::AlwaysStartFromNext); std::list<ActiveStreamDecoderFilterPtr>::iterator continue_data_entry = decoder_filters_.end(); for (; entry != decoder_filters_.end(); entry++) { ASSERT(!(state_.filter_call_state_ & FilterCallState::DecodeHeaders)); state_.filter_call_state_ |= FilterCallState::DecodeHeaders; (*entry)->end_stream_ = decoding_headers_only_ || (end_stream && continue_data_entry == decoder_filters_.end()); FilterHeadersStatus status = (*entry)->decodeHeaders(headers, (*entry)->end_stream_); ASSERT(!(status == FilterHeadersStatus::ContinueAndEndStream && (*entry)->end_stream_)); state_.filter_call_state_ &= ~FilterCallState::DecodeHeaders; ENVOY_STREAM_LOG(trace, "decode headers called: filter={} status={}", *this, static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status)); if (!(*entry)->commonHandleAfterHeadersCallback(status, decoding_headers_only_) && std::next(entry) != decoder_filters_.end()) { // Stop iteration IFF this is not the last filter. If it is the last filter, continue with // processing since we need to handle the case where a terminal filter wants to buffer, but // a previous filter has added body. return; } // Here we handle the case where we have a header only request, but a filter adds a body // to it. We need to not raise end_stream = true to further filters during inline iteration. if (end_stream && buffered_request_data_ && continue_data_entry == decoder_filters_.end()) { continue_data_entry = entry; } } if (continue_data_entry != decoder_filters_.end()) { // We use the continueDecoding() code since it will correctly handle not calling // decodeHeaders() again. Fake setting StopSingleIteration since the continueDecoding() code // expects it. ASSERT(buffered_request_data_); (*continue_data_entry)->iteration_state_ = ActiveStreamFilterBase::IterationState::StopSingleIteration; (*continue_data_entry)->continueDecoding(); } if (end_stream) { disarmRequestTimeout(); } } void ConnectionManagerImpl::ActiveStream::decodeData(Buffer::Instance& data, bool end_stream) { maybeEndDecode(end_stream); stream_info_.addBytesReceived(data.length()); decodeData(nullptr, data, end_stream, FilterIterationStartState::CanStartFromCurrent); } void ConnectionManagerImpl::ActiveStream::decodeData( ActiveStreamDecoderFilter* filter, Buffer::Instance& data, bool end_stream, FilterIterationStartState filter_iteration_start_state) { resetIdleTimer(); // If we previously decided to decode only the headers, do nothing here. if (decoding_headers_only_) { return; } // If a response is complete or a reset has been sent, filters do not care about further body // data. Just drop it. if (state_.local_complete_) { return; } auto trailers_added_entry = decoder_filters_.end(); const bool trailers_exists_at_start = request_trailers_ != nullptr; // Filter iteration may start at the current filter. std::list<ActiveStreamDecoderFilterPtr>::iterator entry = commonDecodePrefix(filter, filter_iteration_start_state); for (; entry != decoder_filters_.end(); entry++) { // If the filter pointed by entry has stopped for all frame types, return now. if (handleDataIfStopAll(**entry, data, state_.decoder_filters_streaming_)) { return; } // If end_stream_ is marked for a filter, the data is not for this filter and filters after. // // In following case, ActiveStreamFilterBase::commonContinue() could be called recursively and // its doData() is called with wrong data. // // There are 3 decode filters and "wrapper" refers to ActiveStreamFilter object. // // filter0->decodeHeaders(_, true) // return STOP // filter0->continueDecoding() // wrapper0->commonContinue() // wrapper0->decodeHeaders(_, _, true) // filter1->decodeHeaders(_, true) // filter1->addDecodeData() // return CONTINUE // filter2->decodeHeaders(_, false) // return CONTINUE // wrapper1->commonContinue() // Detects data is added. // wrapper1->doData() // wrapper1->decodeData() // filter2->decodeData(_, true) // return CONTINUE // wrapper0->doData() // This should not be called // wrapper0->decodeData() // filter1->decodeData(_, true) // It will cause assertions. // // One way to solve this problem is to mark end_stream_ for each filter. // If a filter is already marked as end_stream_ when decodeData() is called, bails out the // whole function. If just skip the filter, the codes after the loop will be called with // wrong data. For encodeData, the response_encoder->encode() will be called. if ((*entry)->end_stream_) { return; } ASSERT(!(state_.filter_call_state_ & FilterCallState::DecodeData)); // We check the request_trailers_ pointer here in case addDecodedTrailers // is called in decodeData during a previous filter invocation, at which point we communicate to // the current and future filters that the stream has not yet ended. if (end_stream) { state_.filter_call_state_ |= FilterCallState::LastDataFrame; } recordLatestDataFilter(entry, state_.latest_data_decoding_filter_, decoder_filters_); state_.filter_call_state_ |= FilterCallState::DecodeData; (*entry)->end_stream_ = end_stream && !request_trailers_; FilterDataStatus status = (*entry)->handle_->decodeData(data, (*entry)->end_stream_); if ((*entry)->end_stream_) { (*entry)->handle_->decodeComplete(); } state_.filter_call_state_ &= ~FilterCallState::DecodeData; if (end_stream) { state_.filter_call_state_ &= ~FilterCallState::LastDataFrame; } ENVOY_STREAM_LOG(trace, "decode data called: filter={} status={}", *this, static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status)); if (!trailers_exists_at_start && request_trailers_ && trailers_added_entry == decoder_filters_.end()) { trailers_added_entry = entry; } if (!(*entry)->commonHandleAfterDataCallback(status, data, state_.decoder_filters_streaming_) && std::next(entry) != decoder_filters_.end()) { // Stop iteration IFF this is not the last filter. If it is the last filter, continue with // processing since we need to handle the case where a terminal filter wants to buffer, but // a previous filter has added trailers. return; } } // If trailers were adding during decodeData we need to trigger decodeTrailers in order // to allow filters to process the trailers. if (trailers_added_entry != decoder_filters_.end()) { decodeTrailers(trailers_added_entry->get(), *request_trailers_); } if (end_stream) { disarmRequestTimeout(); } } HeaderMap& ConnectionManagerImpl::ActiveStream::addDecodedTrailers() { // Trailers can only be added during the last data frame (i.e. end_stream = true). ASSERT(state_.filter_call_state_ & FilterCallState::LastDataFrame); // Trailers can only be added once. ASSERT(!request_trailers_); request_trailers_ = std::make_unique<HeaderMapImpl>(); return *request_trailers_; } void ConnectionManagerImpl::ActiveStream::addDecodedData(ActiveStreamDecoderFilter& filter, Buffer::Instance& data, bool streaming) { if (state_.filter_call_state_ == 0 || (state_.filter_call_state_ & FilterCallState::DecodeHeaders) || (state_.filter_call_state_ & FilterCallState::DecodeData)) { // Make sure if this triggers watermarks, the correct action is taken. state_.decoder_filters_streaming_ = streaming; // If no call is happening or we are in the decode headers/data callback, buffer the data. // Inline processing happens in the decodeHeaders() callback if necessary. filter.commonHandleBufferData(data); } else if (state_.filter_call_state_ & FilterCallState::DecodeTrailers) { // In this case we need to inline dispatch the data to further filters. If those filters // choose to buffer/stop iteration that's fine. decodeData(&filter, data, false, FilterIterationStartState::AlwaysStartFromNext); } else { // TODO(mattklein123): Formalize error handling for filters and add tests. Should probably // throw an exception here. NOT_IMPLEMENTED_GCOVR_EXCL_LINE; } } void ConnectionManagerImpl::ActiveStream::decodeTrailers(HeaderMapPtr&& trailers) { resetIdleTimer(); maybeEndDecode(true); request_trailers_ = std::move(trailers); decodeTrailers(nullptr, *request_trailers_); } void ConnectionManagerImpl::ActiveStream::decodeTrailers(ActiveStreamDecoderFilter* filter, HeaderMap& trailers) { // If we previously decided to decode only the headers, do nothing here. if (decoding_headers_only_) { return; } // See decodeData() above for why we check local_complete_ here. if (state_.local_complete_) { return; } // Filter iteration may start at the current filter. std::list<ActiveStreamDecoderFilterPtr>::iterator entry = commonDecodePrefix(filter, FilterIterationStartState::CanStartFromCurrent); for (; entry != decoder_filters_.end(); entry++) { // If the filter pointed by entry has stopped for all frame type, return now. if ((*entry)->stoppedAll()) { return; } ASSERT(!(state_.filter_call_state_ & FilterCallState::DecodeTrailers)); state_.filter_call_state_ |= FilterCallState::DecodeTrailers; FilterTrailersStatus status = (*entry)->handle_->decodeTrailers(trailers); (*entry)->handle_->decodeComplete(); (*entry)->end_stream_ = true; state_.filter_call_state_ &= ~FilterCallState::DecodeTrailers; ENVOY_STREAM_LOG(trace, "decode trailers called: filter={} status={}", *this, static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status)); if (!(*entry)->commonHandleAfterTrailersCallback(status)) { return; } } disarmRequestTimeout(); } void ConnectionManagerImpl::ActiveStream::maybeEndDecode(bool end_stream) { ASSERT(!state_.remote_complete_); state_.remote_complete_ = end_stream; if (end_stream) { stream_info_.onLastDownstreamRxByteReceived(); ENVOY_STREAM_LOG(debug, "request end stream", *this); } } void ConnectionManagerImpl::ActiveStream::disarmRequestTimeout() { if (request_timer_) { request_timer_->disableTimer(); } } std::list<ConnectionManagerImpl::ActiveStreamEncoderFilterPtr>::iterator ConnectionManagerImpl::ActiveStream::commonEncodePrefix( ActiveStreamEncoderFilter* filter, bool end_stream, FilterIterationStartState filter_iteration_start_state) { // Only do base state setting on the initial call. Subsequent calls for filtering do not touch // the base state. if (filter == nullptr) { ASSERT(!state_.local_complete_); state_.local_complete_ = end_stream; return encoder_filters_.begin(); } if (filter_iteration_start_state == FilterIterationStartState::CanStartFromCurrent && (*(filter->entry()))->iterate_from_current_filter_) { // The filter iteration has been stopped for all frame types, and now the iteration continues. // The current filter's encoding callback has not be called. Call it now. return filter->entry(); } return std::next(filter->entry()); } std::list<ConnectionManagerImpl::ActiveStreamDecoderFilterPtr>::iterator ConnectionManagerImpl::ActiveStream::commonDecodePrefix( ActiveStreamDecoderFilter* filter, FilterIterationStartState filter_iteration_start_state) { if (!filter) { return decoder_filters_.begin(); } if (filter_iteration_start_state == FilterIterationStartState::CanStartFromCurrent && (*(filter->entry()))->iterate_from_current_filter_) { // The filter iteration has been stopped for all frame types, and now the iteration continues. // The current filter's callback function has not been called. Call it now. return filter->entry(); } return std::next(filter->entry()); } void ConnectionManagerImpl::startDrainSequence() { ASSERT(drain_state_ == DrainState::NotDraining); drain_state_ = DrainState::Draining; codec_->shutdownNotice(); drain_timer_ = read_callbacks_->connection().dispatcher().createTimer( [this]() -> void { onDrainTimeout(); }); drain_timer_->enableTimer(config_.drainTimeout()); } void ConnectionManagerImpl::ActiveStream::refreshCachedRoute() { Router::RouteConstSharedPtr route; if (request_headers_ != nullptr) { route = snapped_route_config_->route(*request_headers_, stream_id_); } stream_info_.route_entry_ = route ? route->routeEntry() : nullptr; cached_route_ = std::move(route); if (nullptr == stream_info_.route_entry_) { cached_cluster_info_ = nullptr; } else { Upstream::ThreadLocalCluster* local_cluster = connection_manager_.cluster_manager_.get(stream_info_.route_entry_->clusterName()); cached_cluster_info_ = (nullptr == local_cluster) ? nullptr : local_cluster->info(); } } void ConnectionManagerImpl::ActiveStream::sendLocalReply( bool is_grpc_request, Code code, absl::string_view body, const std::function<void(HeaderMap& headers)>& modify_headers, bool is_head_request, const absl::optional<Grpc::Status::GrpcStatus> grpc_status) { ASSERT(response_headers_ == nullptr); // For early error handling, do a best-effort attempt to create a filter chain // to ensure access logging. if (!state_.created_filter_chain_) { createFilterChain(); } Utility::sendLocalReply( is_grpc_request, [this, modify_headers](HeaderMapPtr&& headers, bool end_stream) -> void { if (modify_headers != nullptr) { modify_headers(*headers); } response_headers_ = std::move(headers); // TODO: Start encoding from the last decoder filter that saw the // request instead. encodeHeaders(nullptr, *response_headers_, end_stream); }, [this](Buffer::Instance& data, bool end_stream) -> void { // TODO: Start encoding from the last decoder filter that saw the // request instead. encodeData(nullptr, data, end_stream, FilterIterationStartState::CanStartFromCurrent); }, state_.destroyed_, code, body, grpc_status, is_head_request); } void ConnectionManagerImpl::ActiveStream::encode100ContinueHeaders( ActiveStreamEncoderFilter* filter, HeaderMap& headers) { resetIdleTimer(); ASSERT(connection_manager_.config_.proxy100Continue()); // Make sure commonContinue continues encode100ContinueHeaders. has_continue_headers_ = true; // Similar to the block in encodeHeaders, run encode100ContinueHeaders on each // filter. This is simpler than that case because 100 continue implies no // end-stream, and because there are normal headers coming there's no need for // complex continuation logic. // 100-continue filter iteration should always start with the next filter if available. std::list<ActiveStreamEncoderFilterPtr>::iterator entry = commonEncodePrefix(filter, false, FilterIterationStartState::AlwaysStartFromNext); for (; entry != encoder_filters_.end(); entry++) { ASSERT(!(state_.filter_call_state_ & FilterCallState::Encode100ContinueHeaders)); state_.filter_call_state_ |= FilterCallState::Encode100ContinueHeaders; FilterHeadersStatus status = (*entry)->handle_->encode100ContinueHeaders(headers); state_.filter_call_state_ &= ~FilterCallState::Encode100ContinueHeaders; ENVOY_STREAM_LOG(trace, "encode 100 continue headers called: filter={} status={}", *this, static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status)); if (!(*entry)->commonHandleAfter100ContinueHeadersCallback(status)) { return; } } // Strip the T-E headers etc. Defer other header additions as well as drain-close logic to the // continuation headers. ConnectionManagerUtility::mutateResponseHeaders(headers, request_headers_.get(), EMPTY_STRING); // Count both the 1xx and follow-up response code in stats. chargeStats(headers); ENVOY_STREAM_LOG(debug, "encoding 100 continue headers via codec:\n{}", *this, headers); // Now actually encode via the codec. response_encoder_->encode100ContinueHeaders(headers); } void ConnectionManagerImpl::ActiveStream::encodeHeaders(ActiveStreamEncoderFilter* filter, HeaderMap& headers, bool end_stream) { resetIdleTimer(); disarmRequestTimeout(); // Headers filter iteration should always start with the next filter if available. std::list<ActiveStreamEncoderFilterPtr>::iterator entry = commonEncodePrefix(filter, end_stream, FilterIterationStartState::AlwaysStartFromNext); std::list<ActiveStreamEncoderFilterPtr>::iterator continue_data_entry = encoder_filters_.end(); for (; entry != encoder_filters_.end(); entry++) { ASSERT(!(state_.filter_call_state_ & FilterCallState::EncodeHeaders)); state_.filter_call_state_ |= FilterCallState::EncodeHeaders; (*entry)->end_stream_ = encoding_headers_only_ || (end_stream && continue_data_entry == encoder_filters_.end()); FilterHeadersStatus status = (*entry)->handle_->encodeHeaders(headers, (*entry)->end_stream_); if ((*entry)->end_stream_) { (*entry)->handle_->encodeComplete(); } state_.filter_call_state_ &= ~FilterCallState::EncodeHeaders; ENVOY_STREAM_LOG(trace, "encode headers called: filter={} status={}", *this, static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status)); const auto continue_iteration = (*entry)->commonHandleAfterHeadersCallback(status, encoding_headers_only_); // If we're encoding a headers only response, then mark the local as complete. This ensures // that we don't attempt to reset the downstream request in doEndStream. if (encoding_headers_only_) { state_.local_complete_ = true; } if (!continue_iteration) { return; } // Here we handle the case where we have a header only response, but a filter adds a body // to it. We need to not raise end_stream = true to further filters during inline iteration. if (end_stream && buffered_response_data_ && continue_data_entry == encoder_filters_.end()) { continue_data_entry = entry; } } // Base headers. connection_manager_.config_.dateProvider().setDateHeader(headers); // Following setReference() is safe because serverName() is constant for the life of the listener. headers.insertServer().value().setReference(connection_manager_.config_.serverName()); ConnectionManagerUtility::mutateResponseHeaders(headers, request_headers_.get(), connection_manager_.config_.via()); // See if we want to drain/close the connection. Send the go away frame prior to encoding the // header block. if (connection_manager_.drain_state_ == DrainState::NotDraining && connection_manager_.drain_close_.drainClose()) { // This doesn't really do anything for HTTP/1.1 other then give the connection another boost // of time to race with incoming requests. It mainly just keeps the logic the same between // HTTP/1.1 and HTTP/2. connection_manager_.startDrainSequence(); connection_manager_.stats_.named_.downstream_cx_drain_close_.inc(); ENVOY_STREAM_LOG(debug, "drain closing connection", *this); } if (connection_manager_.drain_state_ == DrainState::NotDraining && state_.saw_connection_close_) { ENVOY_STREAM_LOG(debug, "closing connection due to connection close header", *this); connection_manager_.drain_state_ = DrainState::Closing; } if (connection_manager_.drain_state_ == DrainState::NotDraining && connection_manager_.overload_disable_keepalive_ref_ == Server::OverloadActionState::Active) { ENVOY_STREAM_LOG(debug, "disabling keepalive due to envoy overload", *this); connection_manager_.drain_state_ = DrainState::Closing; connection_manager_.stats_.named_.downstream_cx_overload_disable_keepalive_.inc(); } // If we are destroying a stream before remote is complete and the connection does not support // multiplexing, we should disconnect since we don't want to wait around for the request to // finish. if (!state_.remote_complete_) { if (connection_manager_.codec_->protocol() != Protocol::Http2) { connection_manager_.drain_state_ = DrainState::Closing; } connection_manager_.stats_.named_.downstream_rq_response_before_rq_complete_.inc(); } if (connection_manager_.drain_state_ == DrainState::Closing && connection_manager_.codec_->protocol() != Protocol::Http2) { // If the connection manager is draining send "Connection: Close" on HTTP/1.1 connections. // Do not do this for H2 (which drains via GOAWAY) or Upgrade (as the upgrade // payload is no longer HTTP/1.1) if (!Utility::isUpgrade(headers)) { headers.insertConnection().value().setReference(Headers::get().ConnectionValues.Close); } } if (connection_manager_.config_.tracingConfig()) { if (connection_manager_.config_.tracingConfig()->operation_name_ == Tracing::OperationName::Ingress) { // For ingress (inbound) responses, if the request headers do not include a // decorator operation (override), then pass the decorator's operation name (if defined) // as a response header to enable the client service to use it in its client span. if (decorated_operation_) { headers.insertEnvoyDecoratorOperation().value(*decorated_operation_); } } else if (connection_manager_.config_.tracingConfig()->operation_name_ == Tracing::OperationName::Egress) { const HeaderEntry* resp_operation_override = headers.EnvoyDecoratorOperation(); // For Egress (outbound) response, if a decorator operation name has been provided, it // should be used to override the active span's operation. if (resp_operation_override) { if (!resp_operation_override->value().empty() && active_span_) { active_span_->setOperation(std::string(resp_operation_override->value().getStringView())); } // Remove header so not propagated to service. headers.removeEnvoyDecoratorOperation(); } } } chargeStats(headers); ENVOY_STREAM_LOG(debug, "encoding headers via codec (end_stream={}):\n{}", *this, encoding_headers_only_ || (end_stream && continue_data_entry == encoder_filters_.end()), headers); // Now actually encode via the codec. stream_info_.onFirstDownstreamTxByteSent(); response_encoder_->encodeHeaders( headers, encoding_headers_only_ || (end_stream && continue_data_entry == encoder_filters_.end())); if (continue_data_entry != encoder_filters_.end()) { // We use the continueEncoding() code since it will correctly handle not calling // encodeHeaders() again. Fake setting StopSingleIteration since the continueEncoding() code // expects it. ASSERT(buffered_response_data_); (*continue_data_entry)->iteration_state_ = ActiveStreamFilterBase::IterationState::StopSingleIteration; (*continue_data_entry)->continueEncoding(); } else { // End encoding if this is a header only response, either due to a filter converting it to one // or due to the upstream returning headers only. maybeEndEncode(encoding_headers_only_ || end_stream); } } void ConnectionManagerImpl::ActiveStream::encodeMetadata(ActiveStreamEncoderFilter* filter, MetadataMapPtr&& metadata_map_ptr) { resetIdleTimer(); // Metadata currently go through all filters. ASSERT(filter == nullptr); std::list<ActiveStreamEncoderFilterPtr>::iterator entry = encoder_filters_.begin(); for (; entry != encoder_filters_.end(); entry++) { FilterMetadataStatus status = (*entry)->handle_->encodeMetadata(*metadata_map_ptr); ENVOY_STREAM_LOG(trace, "encode metadata called: filter={} status={}", *this, static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status)); } // TODO(soya3129): update stats with metadata. // Now encode metadata via the codec. if (!metadata_map_ptr->empty()) { ENVOY_STREAM_LOG(debug, "encoding metadata via codec:\n{}", *this, *metadata_map_ptr); MetadataMapVector metadata_map_vector; metadata_map_vector.emplace_back(std::move(metadata_map_ptr)); response_encoder_->encodeMetadata(metadata_map_vector); } } HeaderMap& ConnectionManagerImpl::ActiveStream::addEncodedTrailers() { // Trailers can only be added during the last data frame (i.e. end_stream = true). ASSERT(state_.filter_call_state_ & FilterCallState::LastDataFrame); // Trailers can only be added once. ASSERT(!response_trailers_); response_trailers_ = std::make_unique<HeaderMapImpl>(); return *response_trailers_; } void ConnectionManagerImpl::ActiveStream::addEncodedData(ActiveStreamEncoderFilter& filter, Buffer::Instance& data, bool streaming) { if (state_.filter_call_state_ == 0 || (state_.filter_call_state_ & FilterCallState::EncodeHeaders) || (state_.filter_call_state_ & FilterCallState::EncodeData)) { // Make sure if this triggers watermarks, the correct action is taken. state_.encoder_filters_streaming_ = streaming; // If no call is happening or we are in the decode headers/data callback, buffer the data. // Inline processing happens in the decodeHeaders() callback if necessary. filter.commonHandleBufferData(data); } else if (state_.filter_call_state_ & FilterCallState::EncodeTrailers) { // In this case we need to inline dispatch the data to further filters. If those filters // choose to buffer/stop iteration that's fine. encodeData(&filter, data, false, FilterIterationStartState::AlwaysStartFromNext); } else { // TODO(mattklein123): Formalize error handling for filters and add tests. Should probably // throw an exception here. NOT_IMPLEMENTED_GCOVR_EXCL_LINE; } } void ConnectionManagerImpl::ActiveStream::encodeData( ActiveStreamEncoderFilter* filter, Buffer::Instance& data, bool end_stream, FilterIterationStartState filter_iteration_start_state) { resetIdleTimer(); // If we previously decided to encode only the headers, do nothing here. if (encoding_headers_only_) { return; } // Filter iteration may start at the current filter. std::list<ActiveStreamEncoderFilterPtr>::iterator entry = commonEncodePrefix(filter, end_stream, filter_iteration_start_state); auto trailers_added_entry = encoder_filters_.end(); const bool trailers_exists_at_start = response_trailers_ != nullptr; for (; entry != encoder_filters_.end(); entry++) { // If the filter pointed by entry has stopped for all frame type, return now. if (handleDataIfStopAll(**entry, data, state_.encoder_filters_streaming_)) { return; } // If end_stream_ is marked for a filter, the data is not for this filter and filters after. // For details, please see the comment in the ActiveStream::decodeData() function. if ((*entry)->end_stream_) { return; } ASSERT(!(state_.filter_call_state_ & FilterCallState::EncodeData)); // We check the response_trailers_ pointer here in case addEncodedTrailers // is called in encodeData during a previous filter invocation, at which point we communicate to // the current and future filters that the stream has not yet ended. state_.filter_call_state_ |= FilterCallState::EncodeData; if (end_stream) { state_.filter_call_state_ |= FilterCallState::LastDataFrame; } recordLatestDataFilter(entry, state_.latest_data_encoding_filter_, encoder_filters_); (*entry)->end_stream_ = end_stream && !response_trailers_; FilterDataStatus status = (*entry)->handle_->encodeData(data, (*entry)->end_stream_); if ((*entry)->end_stream_) { (*entry)->handle_->encodeComplete(); } state_.filter_call_state_ &= ~FilterCallState::EncodeData; if (end_stream) { state_.filter_call_state_ &= ~FilterCallState::LastDataFrame; } ENVOY_STREAM_LOG(trace, "encode data called: filter={} status={}", *this, static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status)); if (!trailers_exists_at_start && response_trailers_ && trailers_added_entry == encoder_filters_.end()) { trailers_added_entry = entry; } if (!(*entry)->commonHandleAfterDataCallback(status, data, state_.encoder_filters_streaming_)) { return; } } ENVOY_STREAM_LOG(trace, "encoding data via codec (size={} end_stream={})", *this, data.length(), end_stream); stream_info_.addBytesSent(data.length()); // If trailers were adding during encodeData we need to trigger decodeTrailers in order // to allow filters to process the trailers. if (trailers_added_entry != encoder_filters_.end()) { response_encoder_->encodeData(data, false); encodeTrailers(trailers_added_entry->get(), *response_trailers_); } else { response_encoder_->encodeData(data, end_stream); maybeEndEncode(end_stream); } } void ConnectionManagerImpl::ActiveStream::encodeTrailers(ActiveStreamEncoderFilter* filter, HeaderMap& trailers) { resetIdleTimer(); // If we previously decided to encode only the headers, do nothing here. if (encoding_headers_only_) { return; } // Filter iteration may start at the current filter. std::list<ActiveStreamEncoderFilterPtr>::iterator entry = commonEncodePrefix(filter, true, FilterIterationStartState::CanStartFromCurrent); for (; entry != encoder_filters_.end(); entry++) { // If the filter pointed by entry has stopped for all frame type, return now. if ((*entry)->stoppedAll()) { return; } ASSERT(!(state_.filter_call_state_ & FilterCallState::EncodeTrailers)); state_.filter_call_state_ |= FilterCallState::EncodeTrailers; FilterTrailersStatus status = (*entry)->handle_->encodeTrailers(trailers); (*entry)->handle_->encodeComplete(); (*entry)->end_stream_ = true; state_.filter_call_state_ &= ~FilterCallState::EncodeTrailers; ENVOY_STREAM_LOG(trace, "encode trailers called: filter={} status={}", *this, static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status)); if (!(*entry)->commonHandleAfterTrailersCallback(status)) { return; } } ENVOY_STREAM_LOG(debug, "encoding trailers via codec:\n{}", *this, trailers); response_encoder_->encodeTrailers(trailers); maybeEndEncode(true); } void ConnectionManagerImpl::ActiveStream::maybeEndEncode(bool end_stream) { if (end_stream) { stream_info_.onLastDownstreamTxByteSent(); request_response_timespan_->complete(); connection_manager_.doEndStream(*this); } } bool ConnectionManagerImpl::ActiveStream::handleDataIfStopAll(ActiveStreamFilterBase& filter, Buffer::Instance& data, bool& filter_streaming) { if (filter.stoppedAll()) { ASSERT(!filter.canIterate()); filter_streaming = filter.iteration_state_ == ActiveStreamFilterBase::IterationState::StopAllWatermark; filter.commonHandleBufferData(data); return true; } return false; } void ConnectionManagerImpl::ActiveStream::onResetStream(StreamResetReason, absl::string_view) { // NOTE: This function gets called in all of the following cases: // 1) We TX an app level reset // 2) The codec TX a codec level reset // 3) The codec RX a reset // If we need to differentiate we need to do it inside the codec. Can start with this. connection_manager_.stats_.named_.downstream_rq_rx_reset_.inc(); connection_manager_.doDeferredStreamDestroy(*this); } void ConnectionManagerImpl::ActiveStream::onAboveWriteBufferHighWatermark() { ENVOY_STREAM_LOG(debug, "Disabling upstream stream due to downstream stream watermark.", *this); callHighWatermarkCallbacks(); } void ConnectionManagerImpl::ActiveStream::onBelowWriteBufferLowWatermark() { ENVOY_STREAM_LOG(debug, "Enabling upstream stream due to downstream stream watermark.", *this); callLowWatermarkCallbacks(); } Tracing::OperationName ConnectionManagerImpl::ActiveStream::operationName() const { return connection_manager_.config_.tracingConfig()->operation_name_; } const std::vector<Http::LowerCaseString>& ConnectionManagerImpl::ActiveStream::requestHeadersForTags() const { return connection_manager_.config_.tracingConfig()->request_headers_for_tags_; } bool ConnectionManagerImpl::ActiveStream::verbose() const { return connection_manager_.config_.tracingConfig()->verbose_; } void ConnectionManagerImpl::ActiveStream::callHighWatermarkCallbacks() { ++high_watermark_count_; for (auto watermark_callbacks : watermark_callbacks_) { watermark_callbacks->onAboveWriteBufferHighWatermark(); } } void ConnectionManagerImpl::ActiveStream::callLowWatermarkCallbacks() { ASSERT(high_watermark_count_ > 0); --high_watermark_count_; for (auto watermark_callbacks : watermark_callbacks_) { watermark_callbacks->onBelowWriteBufferLowWatermark(); } } void ConnectionManagerImpl::ActiveStream::setBufferLimit(uint32_t new_limit) { ENVOY_STREAM_LOG(debug, "setting buffer limit to {}", *this, new_limit); buffer_limit_ = new_limit; if (buffered_request_data_) { buffered_request_data_->setWatermarks(buffer_limit_); } if (buffered_response_data_) { buffered_response_data_->setWatermarks(buffer_limit_); } } bool ConnectionManagerImpl::ActiveStream::createFilterChain() { if (state_.created_filter_chain_) { return false; } bool upgrade_rejected = false; auto upgrade = request_headers_ ? request_headers_->Upgrade() : nullptr; state_.created_filter_chain_ = true; if (upgrade != nullptr) { const Router::RouteEntry::UpgradeMap* upgrade_map = nullptr; // We must check if the 'cached_route_' optional is populated since this function can be called // early via sendLocalReply(), before the cached route is populated. if (cached_route_.has_value() && cached_route_.value() && cached_route_.value()->routeEntry()) { upgrade_map = &cached_route_.value()->routeEntry()->upgradeMap(); } if (connection_manager_.config_.filterFactory().createUpgradeFilterChain( upgrade->value().getStringView(), upgrade_map, *this)) { state_.successful_upgrade_ = true; connection_manager_.stats_.named_.downstream_cx_upgrades_total_.inc(); connection_manager_.stats_.named_.downstream_cx_upgrades_active_.inc(); return true; } else { upgrade_rejected = true; // Fall through to the default filter chain. The function calling this // will send a local reply indicating that the upgrade failed. } } connection_manager_.config_.filterFactory().createFilterChain(*this); return !upgrade_rejected; } void ConnectionManagerImpl::ActiveStreamFilterBase::commonContinue() { // TODO(mattklein123): Raise an error if this is called during a callback. if (!canContinue()) { ENVOY_STREAM_LOG(trace, "cannot continue filter chain: filter={}", parent_, static_cast<const void*>(this)); return; } ENVOY_STREAM_LOG(trace, "continuing filter chain: filter={}", parent_, static_cast<const void*>(this)); ASSERT(!canIterate()); // If iteration has stopped for all frame types, set iterate_from_current_filter_ to true so the // filter iteration starts with the current filter instead of the next one. if (stoppedAll()) { iterate_from_current_filter_ = true; } allowIteration(); // Only resume with do100ContinueHeaders() if we've actually seen a 100-Continue. if (parent_.has_continue_headers_ && !continue_headers_continued_) { continue_headers_continued_ = true; do100ContinueHeaders(); // If the response headers have not yet come in, don't continue on with // headers and body. doHeaders expects request headers to exist. if (!parent_.response_headers_.get()) { return; } } // Make sure that we handle the zero byte data frame case. We make no effort to optimize this // case in terms of merging it into a header only request/response. This could be done in the // future. if (!headers_continued_) { headers_continued_ = true; doHeaders(complete() && !bufferedData() && !trailers()); } // TODO(mattklein123): If a filter returns StopIterationNoBuffer and then does a continue, we // won't be able to end the stream if there is no buffered data. Need to handle this. if (bufferedData()) { doData(complete() && !trailers()); } if (trailers()) { doTrailers(); } iterate_from_current_filter_ = false; } bool ConnectionManagerImpl::ActiveStreamFilterBase::commonHandleAfter100ContinueHeadersCallback( FilterHeadersStatus status) { ASSERT(parent_.has_continue_headers_); ASSERT(!continue_headers_continued_); ASSERT(canIterate()); if (status == FilterHeadersStatus::StopIteration) { iteration_state_ = IterationState::StopSingleIteration; return false; } else { ASSERT(status == FilterHeadersStatus::Continue); continue_headers_continued_ = true; return true; } } bool ConnectionManagerImpl::ActiveStreamFilterBase::commonHandleAfterHeadersCallback( FilterHeadersStatus status, bool& headers_only) { ASSERT(!headers_continued_); ASSERT(canIterate()); if (status == FilterHeadersStatus::StopIteration) { iteration_state_ = IterationState::StopSingleIteration; return false; } else if (status == FilterHeadersStatus::StopAllIterationAndBuffer) { iteration_state_ = IterationState::StopAllBuffer; return false; } else if (status == FilterHeadersStatus::StopAllIterationAndWatermark) { iteration_state_ = IterationState::StopAllWatermark; return false; } else if (status == FilterHeadersStatus::ContinueAndEndStream) { // Set headers_only to true so we know to end early if necessary, // but continue filter iteration so we actually write the headers/run the cleanup code. headers_only = true; ENVOY_STREAM_LOG(debug, "converting to headers only", parent_); return true; } else { ASSERT(status == FilterHeadersStatus::Continue); headers_continued_ = true; return true; } } void ConnectionManagerImpl::ActiveStreamFilterBase::commonHandleBufferData( Buffer::Instance& provided_data) { // The way we do buffering is a little complicated which is why we have this common function // which is used for both encoding and decoding. When data first comes into our filter pipeline, // we send it through. Any filter can choose to stop iteration and buffer or not. If we then // continue iteration in the future, we use the buffered data. A future filter can stop and // buffer again. In this case, since we are already operating on buffered data, we don't // rebuffer, because we assume the filter has modified the buffer as it wishes in place. if (bufferedData().get() != &provided_data) { if (!bufferedData()) { bufferedData() = createBuffer(); } bufferedData()->move(provided_data); } } bool ConnectionManagerImpl::ActiveStreamFilterBase::commonHandleAfterDataCallback( FilterDataStatus status, Buffer::Instance& provided_data, bool& buffer_was_streaming) { if (status == FilterDataStatus::Continue) { if (iteration_state_ == IterationState::StopSingleIteration) { commonHandleBufferData(provided_data); commonContinue(); return false; } else { ASSERT(headers_continued_); } } else { iteration_state_ = IterationState::StopSingleIteration; if (status == FilterDataStatus::StopIterationAndBuffer || status == FilterDataStatus::StopIterationAndWatermark) { buffer_was_streaming = status == FilterDataStatus::StopIterationAndWatermark; commonHandleBufferData(provided_data); } return false; } return true; } bool ConnectionManagerImpl::ActiveStreamFilterBase::commonHandleAfterTrailersCallback( FilterTrailersStatus status) { if (status == FilterTrailersStatus::Continue) { if (iteration_state_ == IterationState::StopSingleIteration) { commonContinue(); return false; } else { ASSERT(headers_continued_); } } else { return false; } return true; } const Network::Connection* ConnectionManagerImpl::ActiveStreamFilterBase::connection() { return parent_.connection(); } Event::Dispatcher& ConnectionManagerImpl::ActiveStreamFilterBase::dispatcher() { return parent_.connection_manager_.read_callbacks_->connection().dispatcher(); } StreamInfo::StreamInfo& ConnectionManagerImpl::ActiveStreamFilterBase::streamInfo() { return parent_.stream_info_; } Tracing::Span& ConnectionManagerImpl::ActiveStreamFilterBase::activeSpan() { if (parent_.active_span_) { return *parent_.active_span_; } else { return Tracing::NullSpan::instance(); } } Tracing::Config& ConnectionManagerImpl::ActiveStreamFilterBase::tracingConfig() { return parent_; } Upstream::ClusterInfoConstSharedPtr ConnectionManagerImpl::ActiveStreamFilterBase::clusterInfo() { // NOTE: Refreshing route caches clusterInfo as well. if (!parent_.cached_route_.has_value()) { parent_.refreshCachedRoute(); } return parent_.cached_cluster_info_.value(); } Router::RouteConstSharedPtr ConnectionManagerImpl::ActiveStreamFilterBase::route() { if (!parent_.cached_route_.has_value()) { parent_.refreshCachedRoute(); } return parent_.cached_route_.value(); } void ConnectionManagerImpl::ActiveStreamFilterBase::clearRouteCache() { parent_.cached_route_ = absl::optional<Router::RouteConstSharedPtr>(); parent_.cached_cluster_info_ = absl::optional<Upstream::ClusterInfoConstSharedPtr>(); } Buffer::WatermarkBufferPtr ConnectionManagerImpl::ActiveStreamDecoderFilter::createBuffer() { auto buffer = std::make_unique<Buffer::WatermarkBuffer>([this]() -> void { this->requestDataDrained(); }, [this]() -> void { this->requestDataTooLarge(); }); buffer->setWatermarks(parent_.buffer_limit_); return buffer; } HeaderMap& ConnectionManagerImpl::ActiveStreamDecoderFilter::addDecodedTrailers() { return parent_.addDecodedTrailers(); } void ConnectionManagerImpl::ActiveStreamDecoderFilter::addDecodedData(Buffer::Instance& data, bool streaming) { parent_.addDecodedData(*this, data, streaming); } void ConnectionManagerImpl::ActiveStreamDecoderFilter::injectDecodedDataToFilterChain( Buffer::Instance& data, bool end_stream) { parent_.decodeData(this, data, end_stream, ActiveStream::FilterIterationStartState::CanStartFromCurrent); } void ConnectionManagerImpl::ActiveStreamDecoderFilter::continueDecoding() { commonContinue(); } void ConnectionManagerImpl::ActiveStreamDecoderFilter::encode100ContinueHeaders( HeaderMapPtr&& headers) { // If Envoy is not configured to proxy 100-Continue responses, swallow the 100 Continue // here. This avoids the potential situation where Envoy strips Expect: 100-Continue and sends a // 100-Continue, then proxies a duplicate 100 Continue from upstream. if (parent_.connection_manager_.config_.proxy100Continue()) { parent_.continue_headers_ = std::move(headers); parent_.encode100ContinueHeaders(nullptr, *parent_.continue_headers_); } } void ConnectionManagerImpl::ActiveStreamDecoderFilter::encodeHeaders(HeaderMapPtr&& headers, bool end_stream) { parent_.response_headers_ = std::move(headers); parent_.encodeHeaders(nullptr, *parent_.response_headers_, end_stream); } void ConnectionManagerImpl::ActiveStreamDecoderFilter::encodeData(Buffer::Instance& data, bool end_stream) { parent_.encodeData(nullptr, data, end_stream, ActiveStream::FilterIterationStartState::CanStartFromCurrent); } void ConnectionManagerImpl::ActiveStreamDecoderFilter::encodeTrailers(HeaderMapPtr&& trailers) { parent_.response_trailers_ = std::move(trailers); parent_.encodeTrailers(nullptr, *parent_.response_trailers_); } void ConnectionManagerImpl::ActiveStreamDecoderFilter::encodeMetadata( MetadataMapPtr&& metadata_map_ptr) { parent_.encodeMetadata(nullptr, std::move(metadata_map_ptr)); } void ConnectionManagerImpl::ActiveStreamDecoderFilter:: onDecoderFilterAboveWriteBufferHighWatermark() { ENVOY_STREAM_LOG(debug, "Read-disabling downstream stream due to filter callbacks.", parent_); parent_.response_encoder_->getStream().readDisable(true); parent_.connection_manager_.stats_.named_.downstream_flow_control_paused_reading_total_.inc(); } void ConnectionManagerImpl::ActiveStreamDecoderFilter::requestDataTooLarge() { ENVOY_STREAM_LOG(debug, "request data too large watermark exceeded", parent_); if (parent_.state_.decoder_filters_streaming_) { onDecoderFilterAboveWriteBufferHighWatermark(); } else { parent_.connection_manager_.stats_.named_.downstream_rq_too_large_.inc(); sendLocalReply(Code::PayloadTooLarge, CodeUtility::toString(Code::PayloadTooLarge), nullptr, absl::nullopt); } } void ConnectionManagerImpl::ActiveStreamDecoderFilter::requestDataDrained() { // If this is called it means the call to requestDataTooLarge() was a // streaming call, or a 413 would have been sent. onDecoderFilterBelowWriteBufferLowWatermark(); } void ConnectionManagerImpl::ActiveStreamDecoderFilter:: onDecoderFilterBelowWriteBufferLowWatermark() { ENVOY_STREAM_LOG(debug, "Read-enabling downstream stream due to filter callbacks.", parent_); parent_.response_encoder_->getStream().readDisable(false); parent_.connection_manager_.stats_.named_.downstream_flow_control_resumed_reading_total_.inc(); } void ConnectionManagerImpl::ActiveStreamDecoderFilter::addDownstreamWatermarkCallbacks( DownstreamWatermarkCallbacks& watermark_callbacks) { // This is called exactly once per upstream-stream, by the router filter. Therefore, we // expect the same callbacks to not be registered twice. ASSERT(std::find(parent_.watermark_callbacks_.begin(), parent_.watermark_callbacks_.end(), &watermark_callbacks) == parent_.watermark_callbacks_.end()); parent_.watermark_callbacks_.emplace(parent_.watermark_callbacks_.end(), &watermark_callbacks); for (uint32_t i = 0; i < parent_.high_watermark_count_; ++i) { watermark_callbacks.onAboveWriteBufferHighWatermark(); } } void ConnectionManagerImpl::ActiveStreamDecoderFilter::removeDownstreamWatermarkCallbacks( DownstreamWatermarkCallbacks& watermark_callbacks) { ASSERT(std::find(parent_.watermark_callbacks_.begin(), parent_.watermark_callbacks_.end(), &watermark_callbacks) != parent_.watermark_callbacks_.end()); parent_.watermark_callbacks_.remove(&watermark_callbacks); } bool ConnectionManagerImpl::ActiveStreamDecoderFilter::recreateStream() { // Because the filter's and the HCM view of if the stream has a body and if // the stream is complete may differ, re-check bytesReceived() to make sure // there was no body from the HCM's point of view. if (!complete() || parent_.stream_info_.bytesReceived() != 0) { return false; } // n.b. we do not currently change the codecs to point at the new stream // decoder because the decoder callbacks are complete. It would be good to // null out that pointer but should not be necessary. HeaderMapPtr request_headers(std::move(parent_.request_headers_)); StreamEncoder* response_encoder = parent_.response_encoder_; parent_.response_encoder_ = nullptr; // This functionally deletes the stream (via deferred delete) so do not // reference anything beyond this point. parent_.connection_manager_.doEndStream(this->parent_); StreamDecoder& new_stream = parent_.connection_manager_.newStream(*response_encoder, true); new_stream.decodeHeaders(std::move(request_headers), true); return true; } Buffer::WatermarkBufferPtr ConnectionManagerImpl::ActiveStreamEncoderFilter::createBuffer() { auto buffer = new Buffer::WatermarkBuffer([this]() -> void { this->responseDataDrained(); }, [this]() -> void { this->responseDataTooLarge(); }); buffer->setWatermarks(parent_.buffer_limit_); return Buffer::WatermarkBufferPtr{buffer}; } void ConnectionManagerImpl::ActiveStreamEncoderFilter::addEncodedData(Buffer::Instance& data, bool streaming) { return parent_.addEncodedData(*this, data, streaming); } void ConnectionManagerImpl::ActiveStreamEncoderFilter::injectEncodedDataToFilterChain( Buffer::Instance& data, bool end_stream) { parent_.encodeData(this, data, end_stream, ActiveStream::FilterIterationStartState::CanStartFromCurrent); } HeaderMap& ConnectionManagerImpl::ActiveStreamEncoderFilter::addEncodedTrailers() { return parent_.addEncodedTrailers(); } void ConnectionManagerImpl::ActiveStreamEncoderFilter:: onEncoderFilterAboveWriteBufferHighWatermark() { ENVOY_STREAM_LOG(debug, "Disabling upstream stream due to filter callbacks.", parent_); parent_.callHighWatermarkCallbacks(); } void ConnectionManagerImpl::ActiveStreamEncoderFilter:: onEncoderFilterBelowWriteBufferLowWatermark() { ENVOY_STREAM_LOG(debug, "Enabling upstream stream due to filter callbacks.", parent_); parent_.callLowWatermarkCallbacks(); } void ConnectionManagerImpl::ActiveStreamEncoderFilter::continueEncoding() { commonContinue(); } void ConnectionManagerImpl::ActiveStreamEncoderFilter::responseDataTooLarge() { if (parent_.state_.encoder_filters_streaming_) { onEncoderFilterAboveWriteBufferHighWatermark(); } else { parent_.connection_manager_.stats_.named_.rs_too_large_.inc(); // If headers have not been sent to the user, send a 500. if (!headers_continued_) { // Make sure we won't end up with nested watermark calls from the body buffer. parent_.state_.encoder_filters_streaming_ = true; allowIteration(); Http::Utility::sendLocalReply( Grpc::Common::hasGrpcContentType(*parent_.request_headers_), [&](HeaderMapPtr&& response_headers, bool end_stream) -> void { parent_.response_headers_ = std::move(response_headers); parent_.response_encoder_->encodeHeaders(*parent_.response_headers_, end_stream); parent_.state_.local_complete_ = end_stream; }, [&](Buffer::Instance& data, bool end_stream) -> void { parent_.response_encoder_->encodeData(data, end_stream); parent_.state_.local_complete_ = end_stream; }, parent_.state_.destroyed_, Http::Code::InternalServerError, CodeUtility::toString(Http::Code::InternalServerError), absl::nullopt, parent_.is_head_request_); parent_.maybeEndEncode(parent_.state_.local_complete_); } else { resetStream(); } } } void ConnectionManagerImpl::ActiveStreamEncoderFilter::responseDataDrained() { onEncoderFilterBelowWriteBufferLowWatermark(); } void ConnectionManagerImpl::ActiveStreamFilterBase::resetStream() { parent_.connection_manager_.stats_.named_.downstream_rq_tx_reset_.inc(); parent_.connection_manager_.doEndStream(this->parent_); } uint64_t ConnectionManagerImpl::ActiveStreamFilterBase::streamId() { return parent_.stream_id_; } } // namespace Http } // namespace Envoy
dnoe/envoy
source/common/http/conn_manager_impl.cc
C++
apache-2.0
87,779
/* * Copyright 2004-2012 the Seasar Foundation and the Others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package jp.fieldnotes.hatunatu.util.collection; import jp.fieldnotes.hatunatu.util.exception.SUnsupportedOperationException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; /** * @author wyukawa * */ public class EmptyIteratorTest { /** * @see org.junit.rules.ExpectedException */ @Rule public ExpectedException exception = ExpectedException.none(); /** * Test method for * {@link EmptyIterator#EmptyIterator()}. */ @Test public void testEmptyIterator() { EmptyIterator<String> emptyIterator = new EmptyIterator<String>(); assertThat(emptyIterator, is(notNullValue())); } /** * Test method for {@link EmptyIterator#remove()} * . */ @Test public void testRemove() { exception.expect(SUnsupportedOperationException.class); exception.expectMessage(is("remove")); EmptyIterator<String> emptyIterator = new EmptyIterator<String>(); emptyIterator.remove(); } /** * Test method for * {@link EmptyIterator#hasNext()}. */ @Test public void testHasNext() { EmptyIterator<String> emptyIterator = new EmptyIterator<String>(); assertThat(emptyIterator.hasNext(), is(false)); } /** * Test method for {@link EmptyIterator#next()}. */ @Test public void testNext() { exception.expect(SUnsupportedOperationException.class); exception.expectMessage(is("next")); EmptyIterator<String> emptyIterator = new EmptyIterator<String>(); emptyIterator.next(); } }
azusa/hatunatu
hatunatu-util/src/test/java/jp/fieldnotes/hatunatu/util/collection/EmptyIteratorTest.java
Java
apache-2.0
2,329
/* * Copyright 2015 Google Inc * * Licensed under the Apache License, Version 2.0(the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Linq; using Google.Apis.Dfareporting.v3_4; using Google.Apis.Dfareporting.v3_4.Data; namespace DfaReporting.Samples { /// <summary> /// This example illustrates how to get a list of all the files for a report. /// </summary> class GetReportFiles : SampleBase { /// <summary> /// Returns a description about the code example. /// </summary> public override string Description { get { return "This example illustrates how to get a list of all the files" + " for a report.\n"; } } /// <summary> /// Main method, to run this code example as a standalone application. /// </summary> /// <param name="args">The command line arguments.</param> public static void Main(string[] args) { SampleBase codeExample = new GetReportFiles(); Console.WriteLine(codeExample.Description); codeExample.Run(DfaReportingFactory.getInstance()); } /// <summary> /// Run the code example. /// </summary> /// <param name="service">An initialized Dfa Reporting service object /// </param> public override void Run(DfareportingService service) { long reportId = long.Parse(_T("INSERT_REPORT_ID_HERE")); long profileId = long.Parse(_T("INSERT_USER_PROFILE_ID_HERE")); // Limit the fields returned. String fields = "nextPageToken,items(fileName,id,status)"; FileList files; String nextPageToken = null; do { // Create and execute the report files list request. ReportsResource.FilesResource.ListRequest request = service.Reports.Files.List(profileId, reportId); request.Fields = fields; request.PageToken = nextPageToken; files = request.Execute(); foreach (File file in files.Items) { Console.WriteLine("Report file with ID {0} and file name \"{1}\" has status \"{2}\".", file.Id, file.FileName, file.Status); } // Update the next page token. nextPageToken = files.NextPageToken; } while (files.Items.Any() && !String.IsNullOrEmpty(nextPageToken)); } } }
googleads/googleads-dfa-reporting-samples
dotnet/v3.4/Reports/GetReportFiles.cs
C#
apache-2.0
2,743
package com.fishercoder.solutions; public class _42 { public static class Solution1 { /** * O(n) time and O(1) space, awesome! * * 1. first scan to find the max height index * 2. then scan from left up to max index and find all the water units up to the max height * 3. then scan from right down to max index and find all the water units down to the max height * 4. return the sum of those above two * * reference: https://discuss.leetcode.com/topic/22976/my-accepted-java-solution */ public int trap(int[] height) { if (height == null || height.length <= 2) { return 0; } int max = height[0]; int maxIndex = 0; for (int i = 0; i < height.length; i++) { if (height[i] > max) { max = height[i]; maxIndex = i; } } int water = 0; int leftMax = height[0]; for (int i = 0; i < maxIndex; i++) { if (height[i] > leftMax) { leftMax = height[i]; } else { water += leftMax - height[i]; } } int rightMax = height[height.length - 1]; for (int i = height.length - 1; i > maxIndex; i--) { if (height[i] > rightMax) { rightMax = height[i]; } else { water += rightMax - height[i]; } } return water; } } }
fishercoder1534/Leetcode
src/main/java/com/fishercoder/solutions/_42.java
Java
apache-2.0
1,640
package tafkacn.statsdclient; import com.lmax.disruptor.*; import java.io.IOException; import java.net.InetSocketAddress; import java.net.StandardSocketOptions; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.channels.DatagramChannel; import java.nio.channels.SocketChannel; import java.nio.channels.WritableByteChannel; import java.nio.charset.Charset; import java.nio.charset.CharsetEncoder; import java.nio.charset.CoderResult; import java.util.Random; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * A java statsd client that makes use of LMAX disruptor for smart batching * of counter updates to a statsd server * @author raymond.mak */ public class StatsdClient { final static private byte COUNTER_METRIC_TYPE = 0; final static private byte GAUGE_METRIC_TYPE = 1; final static private byte HISTOGRAM_METRIC_TYPE = 2; final static private byte METER_METRIC_TYPE = 3; final static private byte TIMER_METRIC_TYPE = 4; final static private short HUNDRED_PERCENT = (short)10000; final static private String[] METRIC_TYPE_STRINGS = new String[] { "|c", "|g", "|h", "|m", "|ms", }; static private class CounterEvent { private byte metricsType; private short samplingRate = HUNDRED_PERCENT; // In hundredth of a percentage point, 10000 being 100% private String key; private long magnitude; public CounterEvent() { super(); } public byte getMetricsType() { return metricsType; } public void setMetricsType(byte value) { metricsType = value; } public short getSamplingRate() { return samplingRate; } public void setSamplingRate(short value) { samplingRate = value; } public String getKey() { return key; } public void setKey(String value) { key = value; } public long getMagnitude() { return magnitude; } public void setMagnitude(long value) { magnitude = value; } final static public EventFactory<CounterEvent> EVENT_FACTORY = new EventFactory<CounterEvent>() { @Override public CounterEvent newInstance() { return new CounterEvent(); } }; } final static private ThreadLocal<Random> threadLocalSampler = new ThreadLocal<Random>() { @Override public Random initialValue() { return new Random(System.currentTimeMillis()); } }; private class CounterEventHandler implements EventHandler<CounterEvent> { final static private int MAX_MESSAGE_SIZE = 1000; // Assuming MTU being 1500 for typical datacenter Eternet implementation, cap message size to 1000 bytes to avoid fragmentation private DatagramChannel datagramChannel; private SocketChannel socketChannel; private ByteBuffer counterMessageBuffer; private int messagesInBuffer = 0; private CharsetEncoder encoder = Charset.forName("UTF-8").newEncoder(); private WritableByteChannel outputChannel; private String statsdHostName; private int statsdPort; private long tcpConnectionRetryInterval = Long.MAX_VALUE; private long lastSuccessfulFlushTimestamp; private int timeDelayInMillisAfterIncompleteFlush; public CounterEventHandler( String statsdHostName, int statsdPort, boolean useTcp, int tcpMessageSize, long tcpConnectionRetryInterval, int timeDelayInMillisAfterIncompleteFlush) throws IOException { this.statsdHostName = statsdHostName; this.statsdPort = statsdPort; this.timeDelayInMillisAfterIncompleteFlush = timeDelayInMillisAfterIncompleteFlush; if (useTcp) { this.counterMessageBuffer = ByteBuffer.allocate(tcpMessageSize); this.tcpConnectionRetryInterval = tcpConnectionRetryInterval; openSocket(); } else { this.counterMessageBuffer = ByteBuffer.allocate(MAX_MESSAGE_SIZE); this.datagramChannel = DatagramChannel.open(); this.datagramChannel.connect( new InetSocketAddress(statsdHostName, statsdPort)); this.outputChannel = datagramChannel; } } private void openSocket() throws IOException { this.socketChannel = SocketChannel.open(); this.socketChannel.setOption(StandardSocketOptions.SO_KEEPALIVE, true); this.socketChannel.connect(new InetSocketAddress(statsdHostName, statsdPort)); this.outputChannel = socketChannel; this.socketChannel.shutdownInput(); } private void flush() throws IOException { onFlushMessageBuffer(); counterMessageBuffer.flip(); try { outputChannel.write(counterMessageBuffer); lastSuccessfulFlushTimestamp = System.currentTimeMillis(); } catch (Throwable e) { onFlushMessageFailure(e); if (socketChannel != null) { if (System.currentTimeMillis() - lastSuccessfulFlushTimestamp < tcpConnectionRetryInterval) { lastSuccessfulFlushTimestamp = System.currentTimeMillis(); if (socketChannel.isOpen()) { socketChannel.close(); } try { openSocket(); onReconnection(); } catch (Throwable innerException) {} } } } finally { counterMessageBuffer.clear(); messagesInBuffer = 0; } } private void addMessageToBuffer(String message) throws Exception { counterMessageBuffer.mark(); // Remember current position onCounterMessageReceived(); if (encoder.encode(CharBuffer.wrap(message), counterMessageBuffer, true) == CoderResult.OVERFLOW) { counterMessageBuffer.reset(); flush(); if (encoder.encode(CharBuffer.wrap(message), counterMessageBuffer, true) == CoderResult.OVERFLOW) { // Well the message is too big to fit in a single packet, throw it away counterMessageBuffer.clear(); onOversizedMessage(message); } else { messagesInBuffer++; } } else { messagesInBuffer++; } } @Override public void onEvent(CounterEvent t, long l, boolean bln) throws Exception { // Construct counter message String message = t.getKey().replace(":", "_") + ":" + Long.toString(t.getMagnitude()) + METRIC_TYPE_STRINGS[t.getMetricsType()] + (t.getSamplingRate() < HUNDRED_PERCENT ? String.format("|@0.%4d\n", t.getSamplingRate()/HUNDRED_PERCENT, t.getSamplingRate()%HUNDRED_PERCENT) : "\n"); addMessageToBuffer(message); if (bln && messagesInBuffer > 0) { flush(); if (timeDelayInMillisAfterIncompleteFlush > 0) { Thread.sleep(timeDelayInMillisAfterIncompleteFlush); } } } public void close() throws Exception { if (datagramChannel != null) { datagramChannel.close(); } if (socketChannel != null) { socketChannel.close(); } } } private RingBuffer<CounterEvent> ringBuffer; private ExecutorService eventProcessorExecutor; private BatchEventProcessor<CounterEvent> counterEventProcessor; private CounterEventHandler counterEventHandler; private short defaultSamplingRate; protected StatsdClient() {} public StatsdClient( int ringBufferSize, final String statsdHostName, final int statsdPort, final boolean useTcp, final int tcpMessageSize, final int tcpConnectionRetryInterval, final int timeDelayInMillisAfterIncompletFlush, short defaultSamplingRate) throws Exception { this(ringBufferSize, statsdHostName, statsdPort, useTcp, tcpMessageSize, tcpConnectionRetryInterval, timeDelayInMillisAfterIncompletFlush, defaultSamplingRate, new BlockingWaitStrategy()); } static private class NullClient extends StatsdClient { public NullClient() {} @Override public void sendCounterMessage(byte metricsType, short samplingRate, String key, long magnitude) {} } static final public StatsdClient NULL = new NullClient(); public StatsdClient( int ringBufferSize, final String statsdHostName, final int statsdPort, final boolean useTcp, final int tcpMessageSize, final int tcpConnectionRetryInterval, final int timeDelayInMillisAfterIncompletFlush, short defaultSamplingRate, WaitStrategy waitStrategy ) throws Exception { this.defaultSamplingRate = defaultSamplingRate; ringBuffer = new RingBuffer<>(CounterEvent.EVENT_FACTORY, new MultiThreadedClaimStrategy(ringBufferSize), waitStrategy); counterEventHandler = new CounterEventHandler(statsdHostName, statsdPort, useTcp, tcpMessageSize, tcpConnectionRetryInterval, timeDelayInMillisAfterIncompletFlush); counterEventProcessor = new BatchEventProcessor<>(ringBuffer, ringBuffer.newBarrier(), counterEventHandler); ringBuffer.setGatingSequences(counterEventProcessor.getSequence()); eventProcessorExecutor = Executors.newSingleThreadExecutor(); eventProcessorExecutor.submit(counterEventProcessor); } public void shutdown() throws Exception { if (counterEventProcessor != null) { counterEventProcessor.halt(); counterEventProcessor = null; } if (counterEventHandler != null) { counterEventHandler.close(); } if (eventProcessorExecutor != null) { eventProcessorExecutor.shutdown(); eventProcessorExecutor = null; } } public void incrementCounter(String key, long magnitude) { incrementCounter(key, magnitude, defaultSamplingRate); } public void incrementCounter(String key, long magnitude, short samplingRate) { sendCounterMessage(COUNTER_METRIC_TYPE, samplingRate, key, magnitude); } public void updateGauge(String key, long magnitude) { updateGauge(key, magnitude, defaultSamplingRate); } public void updateGauge(String key, long magnitude, short samplingRate) { sendCounterMessage(GAUGE_METRIC_TYPE, samplingRate, key, magnitude); } public void updateHistogram(String key, long magnitude) { updateHistogram(key, magnitude, defaultSamplingRate); } public void updateHistogram(String key, long magnitude, short samplingRate) { sendCounterMessage(HISTOGRAM_METRIC_TYPE, samplingRate, key, magnitude); } public void updateMeter(String key, long magnitude) { updateMeter(key, magnitude, defaultSamplingRate); } public void updateMeter(String key, long magnitude, short samplingRate) { sendCounterMessage(METER_METRIC_TYPE, samplingRate, key, magnitude); } public void updateTimer(String key, long magnitude) { updateTimer(key, magnitude, defaultSamplingRate); } public void updateTimer(String key, long magnitude, short samplingRate) { sendCounterMessage(TIMER_METRIC_TYPE, samplingRate, key, magnitude); } // Extension points for intercepting interesting events in the statsd client // such as a counter message being picked up by the consumer thread and // when the message buffer is flushed down the wire. protected void onCounterMessageReceived() {} protected void onFlushMessageBuffer() {} protected void onFlushMessageFailure(Throwable exception) {} protected void onReconnection() {} protected void onOversizedMessage(String message) {} protected void onRingBufferOverflow() {} public void sendCounterMessage(byte metricsType, short samplingRate, String key, long magnitude) { if (samplingRate < HUNDRED_PERCENT && threadLocalSampler.get().nextInt(HUNDRED_PERCENT) >= samplingRate) { return; } try { long sequence = ringBuffer.tryNext(1); CounterEvent event = ringBuffer.get(sequence); event.setMetricsType(metricsType); event.setSamplingRate(samplingRate); event.setKey(key); event.setMagnitude(magnitude); ringBuffer.publish(sequence); } catch (InsufficientCapacityException e) { onRingBufferOverflow(); } } }
tafkacn/statsdclient
src/main/java/tafkacn/StatsdClient.java
Java
apache-2.0
14,025
$(function() { /* ------------ 合作伙伴信息 ------------ */ $( "#partner_id" ).combobox( { url : ksa.buildUrl( "/data/combo", "bd-partner-all" ), onSelect : function( record ) { $grid.datagrid( "load", { id : record.id } ); } } ); // 确认选择 $("#btn_ok").click( function() { var results = $("#extra_grid").datagrid("getSelected"); parent.$.close( results ); return false; }); // 添加确认 $("#btn_extra_ok").click( function() { $("#btn_extra_ok").attr("disabled", "disabled"); var extra = $("#extra").val(); if( ! extra ) { top.$.messager.warning("请输入新建的抬头信息。"); $("#btn_extra_ok").attr("disabled", null ); return false; } else { // 保存 $.ajax({ url: ksa.buildUrl( "/component/bd", "partner-alias-insert" ), data: { "partner.id" : $("#partner_id").combobox("getValue"), extra : extra }, success: function( result ) { try { if (result.status == "success") { // 添加成功 parent.$.close( extra ); return false; } else { $.messager.error( result.message ); $("#btn_extra_ok").attr("disabled", null ); } } catch (e) { $("#btn_extra_ok").attr("disabled", null ); } } }); } } ); // 添加关闭 $("#btn_extra_close").click( function() { $("#extra_window").window("close"); } ); // 单位别名 var NEW_LINE = "\n"; $.fn.datagrid.defaults.loadEmptyMsg = '<span class="label label-warning">注意</span> 没有获取到任何数据,请选择新的合作单位。'; var $grid = $('#extra_grid').datagrid({ title : '抬头信息:' + PARTNER_NAME, url: ksa.buildUrl( "/data/grid", "bd-partner-extra" ), pagination : false, queryParams : { id : $("#partner_id").combobox("getValue") }, fit : true, onDblClickRow : function( i, data ) { parent.$.close( data ); return false; }, columns:[[ { field:'dump', checkbox:true }, { field:'name', title:'抬头', width:200, formatter:function(v,data,i) { var a = data; try { while( a.indexOf( NEW_LINE ) >= 0 ) { a = a.replace( NEW_LINE, "<br/>" ); } return a; } catch(e) { return data; } } } ]], toolbar:[{ text:'添加...', cls: 'btn-primary', iconCls:'icon-plus icon-white', handler:function(){ var id = $("#partner_id").combobox("getValue"); if( !id || id == "" ) { top.$.messager.warning("请首先选择合作单位,再进行抬头信息的添加操作。"); return; } $("#extra_window").window("open"); $("#extra").val(""); try { $("#extra")[0].focus(); } catch(e){} } }, '-', { text:'删除', cls: 'btn-danger', iconCls:'icon-trash icon-white', handler:function(){ deleteExtra(); } }] }); // 删除 function deleteExtra() { var row = $grid.datagrid( "getSelected" ); if( ! row ) { top.$.messager.warning("请选择一条数据后,再进行删除操作。"); return; } $.messager.confirm( "确定删除所选抬头吗?", function( ok ){ if( ok ) { $.ajax({ url: ksa.buildUrl( "/component/bd", "partner-alias-delete" ), data: { "partner.id" : $("#partner_id").combobox("getValue"), extra : $grid.datagrid("getSelected") }, success: function( result ) { try { if (result.status == "success") { $.messager.success( result.message ); $grid.datagrid( "reload" ); } else { $.messager.error( result.message ); } } catch (e) { } } }); } } ); } });
xsocket/ksa
ksa-web-root/ksa-bd-web/src/main/resources/ui/bd/component/partner-alias-selection.js
JavaScript
apache-2.0
4,709
/* ****************************************************************************** * Copyright 2017 Tourmaline Labs, Inc. All rights reserved. * Confidential & Proprietary - Tourmaline Labs, Inc. ("TLI") * * The party receiving this software directly from TLI (the "Recipient") * may use this software as reasonably necessary solely for the purposes * set forth in the agreement between the Recipient and TLI (the * "Agreement"). The software may be used in source code form solely by * the Recipient's employees (if any) authorized by the Agreement. Unless * expressly authorized in the Agreement, the Recipient may not sublicense, * assign, transfer or otherwise provide the source code to any third * party. Tourmaline Labs, Inc. retains all ownership rights in and * to the software * * This notice supersedes any other TLI notices contained within the software * except copyright notices indicating different years of publication for * different portions of the software. This notice does not supersede the * application of any third party copyright notice to that third party's * code. ******************************************************************************/ package com.tourmaline.example.activities; import android.Manifest; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Build; import android.os.Bundle; import androidx.annotation.NonNull; import androidx.core.app.ActivityCompat; import androidx.core.content.ContextCompat; import androidx.localbroadcastmanager.content.LocalBroadcastManager; import android.os.PowerManager; import android.provider.Settings; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GoogleApiAvailability; import com.google.android.gms.common.GooglePlayServicesUtil; import com.tourmaline.context.CompletionListener; import com.tourmaline.context.Engine; import com.tourmaline.example.ExampleApplication; import com.tourmaline.example.R; import com.tourmaline.example.helpers.Monitoring; public class MainActivity extends Activity { private static final String TAG = "MainActivity"; private static final int PERMISSIONS_REQUEST_BACKGROUND = 210; private static final int PERMISSIONS_REQUEST_FOREGROUND = 211; private LinearLayout apiLayout; private TextView engStateTextView; private Button startAutomaticButton; private Button startManualButton; private Button stopButton; private LinearLayout alertLayout; private TextView alertGpsTextView; private TextView alertLocationTextView; private TextView alertMotionTextView; private TextView alertPowerTextView; private TextView alertBatteryTextView; private TextView alertSdkUpToDateTextView; private Monitoring.State targetMonitoringState; private boolean paused = true; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView( R.layout.activity_main); apiLayout = findViewById(R.id.api_layout); engStateTextView = findViewById(R.id.engine_State); startAutomaticButton = findViewById(R.id.start_button_automatic); startManualButton = findViewById(R.id.start_button_manual); stopButton = findViewById(R.id.stop_button); alertLayout = findViewById(R.id.alert_layout); alertGpsTextView = findViewById(R.id.alert_gps); alertLocationTextView = findViewById(R.id.alert_location); alertMotionTextView = findViewById(R.id.alert_motion); alertPowerTextView = findViewById(R.id.alert_power); alertBatteryTextView = findViewById(R.id.alert_battery); alertSdkUpToDateTextView = findViewById(R.id.alert_sdk); startAutomaticButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { tryToStartMonitoring(Monitoring.State.AUTOMATIC); } }); startManualButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { tryToStartMonitoring(Monitoring.State.MANUAL); } }); stopButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { stopMonitoring(); } }); final Button locationsButton = findViewById(R.id.locations_button); locationsButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { final Intent intent = new Intent(MainActivity.this, LocationsActivity.class); startActivity(intent); } }); final Button drivesButton = findViewById(R.id.drives_button); drivesButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { final Intent intent = new Intent(MainActivity.this, DrivesActivity.class); startActivity(intent); } }); final Button telematicsButton = findViewById(R.id.telematics_button); telematicsButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { final Intent intent = new Intent(MainActivity.this, TelematicsActivity.class); startActivity(intent); } }); registerEngineAlerts(); final Monitoring.State monitoring = Monitoring.getState(getApplicationContext()); makeUIChangesOnEngineMonitoring(monitoring); tryToStartMonitoring(monitoring); } @Override protected void onResume() { super.onResume(); paused = false; setAlerts(); } @Override protected void onPause() { paused = true; super.onPause(); } @Override protected void onDestroy() { unregisterEngineAlerts(); super.onDestroy(); } private void tryToStartMonitoring(final Monitoring.State monitoring) { if(monitoring == Monitoring.State.STOPPED) { stopMonitoring(); return; } final int googlePlayStat = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(this); if (googlePlayStat == ConnectionResult.SUCCESS) { //check GooglePlayServices targetMonitoringState = monitoring; if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) { if(ContextCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION)==PackageManager.PERMISSION_GRANTED) { //very old platform allow the permission from the manifest (no user request is needed) startMonitoring(targetMonitoringState); return; } //Implicit background location ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_FINE_LOCATION}, PERMISSIONS_REQUEST_BACKGROUND); } else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.Q) { //The system popup will show "Always Allow" for the location permission ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_BACKGROUND_LOCATION, Manifest.permission.ACTIVITY_RECOGNITION}, PERMISSIONS_REQUEST_BACKGROUND); } else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.Q) { //Need to ask Foreground then Background location permission ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACTIVITY_RECOGNITION}, PERMISSIONS_REQUEST_FOREGROUND); } } else { Log.i(TAG, "Google play status is " + googlePlayStat); stopMonitoring(); GooglePlayServicesUtil.showErrorDialogFragment(googlePlayStat, this, null, 0, new DialogInterface.OnCancelListener() { @Override public void onCancel(DialogInterface dialogInterface) {} }); } } private void startMonitoring(final Monitoring.State monitoring) { if(monitoring == Monitoring.State.STOPPED) { stopMonitoring(); return; } presentBatteryOptimisationSettings(); if (!Engine.IsInitialized()) { //check Engine State ((ExampleApplication)getApplication()).initEngine((monitoring==Monitoring.State.AUTOMATIC), new CompletionListener() { @Override public void OnSuccess() { makeUIChangesOnEngineMonitoring(monitoring); Monitoring.setState(getApplicationContext(), monitoring); } @Override public void OnFail(int i, String s) { Toast.makeText(MainActivity.this, "Error starting the Engine: " + i + " -> " + s, Toast.LENGTH_LONG).show(); stopMonitoring(); } }); } else { final Monitoring.State currentMonitoring = Monitoring.getState(getApplicationContext()); if(currentMonitoring==monitoring) { makeUIChangesOnEngineMonitoring(monitoring); } else { makeUIChangesOnEngineMonitoring(currentMonitoring); Toast.makeText(MainActivity.this, "Error can't switch monitoring state without stopping ", Toast.LENGTH_LONG).show(); } } } private void stopMonitoring() { ((ExampleApplication)getApplication()).destroyEngine(new CompletionListener() { @Override public void OnSuccess() { makeUIChangesOnEngineMonitoring(Monitoring.State.STOPPED); Monitoring.setState(getApplicationContext(), Monitoring.State.STOPPED); } @Override public void OnFail(int i, String s) { Toast.makeText(MainActivity.this, "Error destroying the Engine: " + i + " -> " + s, Toast.LENGTH_LONG).show(); } }); } private boolean permissionGranted(@NonNull String[] permissions, @NonNull int[] grantResults) { boolean permissionGranted = true; for ( int i = 0; i < grantResults.length; ++i ) { if( grantResults[i] != PackageManager.PERMISSION_GRANTED) { Log.e(TAG, "Failed to get grant results for " + permissions[i]); permissionGranted = false; } } return permissionGranted; } private boolean permissionGranted(@NonNull String permission, @NonNull String[] permissions, @NonNull int[] grantResults) { for ( int i = 0; i < grantResults.length; ++i ) { if( permission.equals(permissions[i]) && grantResults[i] == PackageManager.PERMISSION_GRANTED) { return true; } } return false; } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { if(permissionGranted(permissions, grantResults) ) { Log.i( TAG, "Permissions granted for requestCode " + requestCode); } else { Log.i( TAG, "Permissions missing for requestCode " + requestCode); } if (Build.VERSION.SDK_INT > Build.VERSION_CODES.Q) { if (requestCode == PERMISSIONS_REQUEST_FOREGROUND) { if (permissionGranted(Manifest.permission.ACCESS_FINE_LOCATION, permissions, grantResults)) { ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_BACKGROUND_LOCATION}, PERMISSIONS_REQUEST_BACKGROUND); return; } startMonitoring(targetMonitoringState); } } if(requestCode == PERMISSIONS_REQUEST_BACKGROUND) { startMonitoring(targetMonitoringState); } } public boolean presentBatteryOptimisationSettings() { Log.d(TAG, "Battery settings"); PowerManager powerManager = (PowerManager) getSystemService(Context.POWER_SERVICE); if(Build.VERSION.SDK_INT < Build.VERSION_CODES.M || powerManager.isIgnoringBatteryOptimizations(getPackageName())) { return false; } //android.permission.REQUEST_IGNORE_BATTERY_OPTIMIZATIONS Intent intent = new Intent(Settings.ACTION_REQUEST_IGNORE_BATTERY_OPTIMIZATIONS, Uri.parse("package:" + getPackageName())); if(intent.resolveActivity(getPackageManager()) == null) { intent = new Intent(Settings.ACTION_IGNORE_BATTERY_OPTIMIZATION_SETTINGS); if(intent.resolveActivity(getPackageManager()) == null) { return false; } } startActivity(intent); return true; } private void makeUIChangesOnEngineMonitoring(final Monitoring.State monitoring) { runOnUiThread( new Runnable() { @Override public void run() { switch (monitoring) { case STOPPED: { apiLayout.setVisibility(View.GONE); alertLayout.setVisibility(View.GONE); engStateTextView.setText(getResources().getString(R.string.not_monitoring)); startAutomaticButton.setEnabled(true); startManualButton.setEnabled(true); stopButton.setEnabled(false); break; } case AUTOMATIC: { apiLayout.setVisibility(View.VISIBLE); alertLayout.setVisibility(View.VISIBLE); engStateTextView.setText(getResources().getString(R.string.automatic_monitoring)); startAutomaticButton.setEnabled(false); startManualButton.setEnabled(false); stopButton.setEnabled(true); break; } case MANUAL: { apiLayout.setVisibility(View.VISIBLE); alertLayout.setVisibility(View.VISIBLE); engStateTextView.setText(getResources().getString(R.string.manual_monitoring)); startAutomaticButton.setEnabled(false); startManualButton.setEnabled(false); stopButton.setEnabled(true); Toast.makeText(MainActivity.this, "No drive will be detected until started by you! (click on DRIVES)", Toast.LENGTH_LONG).show(); break; } } } } ); } private BroadcastReceiver receiver; private void setAlerts() { if(paused) return; final ExampleApplication app = (ExampleApplication) getApplication(); showAlertGps(!app.isGpsEnable()); showAlertLocation(!app.isLocationPermissionGranted()); showAlertMotion(!app.isActivityRecognitionPermissionGranted()); showAlertBattery(app.isBatteryOptimisationEnable()); showAlertPower(app.isPowerSavingEnable()); showAlertSdkUpToDate(!app.isSdkUpToDate()); } private void registerEngineAlerts() { final LocalBroadcastManager mgr = LocalBroadcastManager.getInstance(this); receiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent i) { int state = i.getIntExtra("state", Engine.INIT_SUCCESS); switch (state) { case Engine.GPS_ENABLED: case Engine.GPS_DISABLED: case Engine.LOCATION_PERMISSION_GRANTED: case Engine.LOCATION_PERMISSION_DENIED: case Engine.ACTIVITY_RECOGNITION_PERMISSION_GRANTED: case Engine.ACTIVITY_RECOGNITION_PERMISSION_DENIED: case Engine.POWER_SAVE_MODE_DISABLED: case Engine.POWER_SAVE_MODE_ENABLED: case Engine.BATTERY_OPTIMIZATION_DISABLED: case Engine.BATTERY_OPTIMIZATION_ENABLED: case Engine.BATTERY_OPTIMIZATION_UNKNOWN: case Engine.SDK_UP_TO_DATE: case Engine.SDK_UPDATE_AVAILABLE: case Engine.SDK_UPDATE_MANDATORY: { setAlerts(); break; } default: break; } setAlerts(); } }; mgr.registerReceiver(receiver, new IntentFilter(Engine.ACTION_LIFECYCLE)); } private void unregisterEngineAlerts() { if(receiver!=null) { final LocalBroadcastManager mgr = LocalBroadcastManager.getInstance(this); mgr.unregisterReceiver(receiver); } } private void showAlertGps(boolean show) { if(show) { alertGpsTextView.setText("GPS *** OFF"); alertGpsTextView.setTextColor(getResources().getColor(R.color.red)); } else { alertGpsTextView.setText("GPS *** ON"); alertGpsTextView.setTextColor(getResources().getColor(R.color.blue)); } } private void showAlertLocation(boolean show) { if(show) { alertLocationTextView.setText("Location permission *** OFF"); alertLocationTextView.setTextColor(getResources().getColor(R.color.red)); } else { alertLocationTextView.setText("Location permission *** ON"); alertLocationTextView.setTextColor(getResources().getColor(R.color.blue)); } } private void showAlertMotion(boolean show) { if(show) { alertMotionTextView.setText("Motion permission *** OFF"); alertMotionTextView.setTextColor(getResources().getColor(R.color.red)); } else { alertMotionTextView.setText("Motion permission *** ON"); alertMotionTextView.setTextColor(getResources().getColor(R.color.blue)); } } private void showAlertPower(boolean show) { if(show) { alertPowerTextView.setText("Power saving mode *** ON"); alertPowerTextView.setTextColor(getResources().getColor(R.color.red)); } else { alertPowerTextView.setText("Power saving mode *** OFF"); alertPowerTextView.setTextColor(getResources().getColor(R.color.blue)); } } private void showAlertBattery(boolean show) { if(show) { alertBatteryTextView.setText("Battery optimisation *** ON"); alertBatteryTextView.setTextColor(getResources().getColor(R.color.red)); } else { alertBatteryTextView.setText("Battery optimisation *** OFF"); alertBatteryTextView.setTextColor(getResources().getColor(R.color.blue)); } } private void showAlertSdkUpToDate(boolean show) { if(show) { alertSdkUpToDateTextView.setText("SDK up to date *** NO"); alertSdkUpToDateTextView.setTextColor(getResources().getColor(R.color.red)); } else { alertSdkUpToDateTextView.setText("SDK up to date *** YES"); alertSdkUpToDateTextView.setTextColor(getResources().getColor(R.color.blue)); } } }
tourmalinelabs/AndroidTLKitExample
app/src/main/java/com/tourmaline/example/activities/MainActivity.java
Java
apache-2.0
19,865
/* Copyright 2017 Luis Pabón luis@portworx.com Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package sanity import ( "context" "crypto/rand" "fmt" "io/ioutil" "os" "os/exec" "strings" "testing" "time" "github.com/kubernetes-csi/csi-test/utils" yaml "gopkg.in/yaml.v2" "google.golang.org/grpc" . "github.com/onsi/ginkgo" "github.com/onsi/ginkgo/reporters" . "github.com/onsi/gomega" ) // CSISecrets consists of secrets used in CSI credentials. type CSISecrets struct { CreateVolumeSecret map[string]string `yaml:"CreateVolumeSecret"` DeleteVolumeSecret map[string]string `yaml:"DeleteVolumeSecret"` ControllerPublishVolumeSecret map[string]string `yaml:"ControllerPublishVolumeSecret"` ControllerUnpublishVolumeSecret map[string]string `yaml:"ControllerUnpublishVolumeSecret"` ControllerValidateVolumeCapabilitiesSecret map[string]string `yaml:"ControllerValidateVolumeCapabilitiesSecret"` NodeStageVolumeSecret map[string]string `yaml:"NodeStageVolumeSecret"` NodePublishVolumeSecret map[string]string `yaml:"NodePublishVolumeSecret"` CreateSnapshotSecret map[string]string `yaml:"CreateSnapshotSecret"` DeleteSnapshotSecret map[string]string `yaml:"DeleteSnapshotSecret"` } // Config provides the configuration for the sanity tests. It // needs to be initialized by the user of the sanity package. type Config struct { // TargetPath is the *parent* directory for NodePublishVolumeRequest.target_path. // It gets created and removed by csi-sanity. TargetPath string // StagingPath is the NodeStageVolumeRequest.staging_target_path. // It gets created and removed by csi-sanity. StagingPath string Address string ControllerAddress string SecretsFile string TestVolumeSize int64 // Target size for ExpandVolume requests. If not specified it defaults to TestVolumeSize + 1 GB TestVolumeExpandSize int64 TestVolumeParametersFile string TestVolumeParameters map[string]string TestNodeVolumeAttachLimit bool JUnitFile string // Callback functions to customize the creation of target and staging // directories. Returns the new paths for mount and staging. // If not defined, directories are created in the default way at TargetPath // and StagingPath on the host. // // Both functions can replace the suggested path. What the test then uses // is the path returned by them. // // Note that target and staging directory have different // semantics in the CSI spec: for NodeStateVolume, // CreateTargetDir must create the directory and return the // full path to it. For NodePublishVolume, CreateStagingDir // must create the *parent* directory of `path` (or some other // directory) and return the full path for an entry inside // that created directory. CreateTargetDir func(path string) (string, error) CreateStagingDir func(path string) (string, error) // Callback functions to customize the removal of the target and staging // directories. // If not defined, directories are removed in the default way at TargetPath // and StagingPath on the host. // // Both functions are passed the actual paths as used during the test. // // Note that RemoveTargetPath only needs to remove the *parent* of the // given path. The CSI driver should have removed the entry at that path // already. RemoveTargetPath func(path string) error RemoveStagingPath func(path string) error // Commands to be executed for customized creation of the target and staging // paths. This command must be available on the host where sanity runs. The // stdout of the commands are the paths for mount and staging. CreateTargetPathCmd string CreateStagingPathCmd string // Timeout for the executed commands for path creation. CreatePathCmdTimeout int // Commands to be executed for customized removal of the target and staging // paths. Thie command must be available on the host where sanity runs. RemoveTargetPathCmd string RemoveStagingPathCmd string // Timeout for the executed commands for path removal. RemovePathCmdTimeout int // IDGen is an optional interface for callers to provide a generator for // valid Volume and Node IDs. Defaults to DefaultIDGenerator which generates // generic string IDs IDGen IDGenerator } // SanityContext holds the variables that each test can depend on. It // gets initialized before each test block runs. type SanityContext struct { Config *Config Conn *grpc.ClientConn ControllerConn *grpc.ClientConn Secrets *CSISecrets connAddress string controllerConnAddress string // Target and staging paths derived from the sanity config. TargetPath string StagingPath string } // Test will test the CSI driver at the specified address by // setting up a Ginkgo suite and running it. func Test(t *testing.T, reqConfig *Config) { path := reqConfig.TestVolumeParametersFile if len(path) != 0 { yamlFile, err := ioutil.ReadFile(path) if err != nil { panic(fmt.Sprintf("failed to read file %q: %v", path, err)) } err = yaml.Unmarshal(yamlFile, &reqConfig.TestVolumeParameters) if err != nil { panic(fmt.Sprintf("error unmarshaling yaml: %v", err)) } } if reqConfig.IDGen == nil { reqConfig.IDGen = &DefaultIDGenerator{} } sc := &SanityContext{ Config: reqConfig, } registerTestsInGinkgo(sc) RegisterFailHandler(Fail) var specReporters []Reporter if reqConfig.JUnitFile != "" { junitReporter := reporters.NewJUnitReporter(reqConfig.JUnitFile) specReporters = append(specReporters, junitReporter) } RunSpecsWithDefaultAndCustomReporters(t, "CSI Driver Test Suite", specReporters) if sc.Conn != nil { sc.Conn.Close() } } func GinkgoTest(reqConfig *Config) { sc := &SanityContext{ Config: reqConfig, } registerTestsInGinkgo(sc) } func (sc *SanityContext) Setup() { var err error if len(sc.Config.SecretsFile) > 0 { sc.Secrets, err = loadSecrets(sc.Config.SecretsFile) Expect(err).NotTo(HaveOccurred()) } else { sc.Secrets = &CSISecrets{} } // It is possible that a test sets sc.Config.Address // dynamically (and differently!) in a BeforeEach, so only // reuse the connection if the address is still the same. if sc.Conn == nil || sc.connAddress != sc.Config.Address { if sc.Conn != nil { sc.Conn.Close() } By("connecting to CSI driver") sc.Conn, err = utils.Connect(sc.Config.Address) Expect(err).NotTo(HaveOccurred()) sc.connAddress = sc.Config.Address } else { By(fmt.Sprintf("reusing connection to CSI driver at %s", sc.connAddress)) } if sc.ControllerConn == nil || sc.controllerConnAddress != sc.Config.ControllerAddress { // If controller address is empty, use the common connection. if sc.Config.ControllerAddress == "" { sc.ControllerConn = sc.Conn sc.controllerConnAddress = sc.Config.Address } else { sc.ControllerConn, err = utils.Connect(sc.Config.ControllerAddress) Expect(err).NotTo(HaveOccurred()) sc.controllerConnAddress = sc.Config.ControllerAddress } } else { By(fmt.Sprintf("reusing connection to CSI driver controller at %s", sc.controllerConnAddress)) } By("creating mount and staging directories") // If callback function for creating target dir is specified, use it. targetPath, err := createMountTargetLocation(sc.Config.TargetPath, sc.Config.CreateTargetPathCmd, sc.Config.CreateTargetDir, sc.Config.CreatePathCmdTimeout) Expect(err).NotTo(HaveOccurred(), "failed to create target directory %s", targetPath) sc.TargetPath = targetPath // If callback function for creating staging dir is specified, use it. stagingPath, err := createMountTargetLocation(sc.Config.StagingPath, sc.Config.CreateStagingPathCmd, sc.Config.CreateStagingDir, sc.Config.CreatePathCmdTimeout) Expect(err).NotTo(HaveOccurred(), "failed to create staging directory %s", stagingPath) sc.StagingPath = stagingPath } func (sc *SanityContext) Teardown() { // Delete the created paths if any. removeMountTargetLocation(sc.TargetPath, sc.Config.RemoveTargetPathCmd, sc.Config.RemoveTargetPath, sc.Config.RemovePathCmdTimeout) removeMountTargetLocation(sc.StagingPath, sc.Config.RemoveStagingPathCmd, sc.Config.RemoveStagingPath, sc.Config.RemovePathCmdTimeout) // We intentionally do not close the connection to the CSI // driver here because the large amount of connection attempts // caused test failures // (https://github.com/kubernetes-csi/csi-test/issues/101). We // could fix this with retries // (https://github.com/kubernetes-csi/csi-test/pull/97) but // that requires more discussion, so instead we just connect // once per process instead of once per test case. This was // also said to be faster // (https://github.com/kubernetes-csi/csi-test/pull/98). } // createMountTargetLocation takes a target path parameter and creates the // target path using a custom command, custom function or falls back to the // default using mkdir and returns the new target path. func createMountTargetLocation(targetPath string, createPathCmd string, customCreateDir func(string) (string, error), timeout int) (string, error) { // Return the target path if empty. if targetPath == "" { return targetPath, nil } var newTargetPath string if createPathCmd != "" { // Create the target path using the create path command. ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second) defer cancel() cmd := exec.CommandContext(ctx, createPathCmd, targetPath) cmd.Stderr = os.Stderr out, err := cmd.Output() if err != nil { return "", fmt.Errorf("target path creation command %s failed: %v", createPathCmd, err) } // Set the command's stdout as the new target path. newTargetPath = strings.TrimSpace(string(out)) } else if customCreateDir != nil { // Create the target path using the custom create dir function. newpath, err := customCreateDir(targetPath) if err != nil { return "", err } newTargetPath = newpath } else { // Create the target path. Only the directory itself // and not its parents get created, and it is an error // if the directory already exists. if err := os.Mkdir(targetPath, 0755); err != nil { return "", err } newTargetPath = targetPath } return newTargetPath, nil } // removeMountTargetLocation takes a target path parameter and removes the path // using a custom command, custom function or falls back to the default removal // by deleting the path on the host. func removeMountTargetLocation(targetPath string, removePathCmd string, customRemovePath func(string) error, timeout int) error { if targetPath == "" { return nil } if removePathCmd != "" { ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second) defer cancel() cmd := exec.CommandContext(ctx, removePathCmd, targetPath) cmd.Stderr = os.Stderr _, err := cmd.Output() if err != nil { return fmt.Errorf("target path removal command %s failed: %v", removePathCmd, err) } } else if customRemovePath != nil { if err := customRemovePath(targetPath); err != nil { return err } } else { // It's an error if the directory is not empty by now. return os.Remove(targetPath) } return nil } func loadSecrets(path string) (*CSISecrets, error) { var creds CSISecrets yamlFile, err := ioutil.ReadFile(path) if err != nil { return &creds, fmt.Errorf("failed to read file %q: #%v", path, err) } err = yaml.Unmarshal(yamlFile, &creds) if err != nil { return &creds, fmt.Errorf("error unmarshaling yaml: #%v", err) } return &creds, nil } var uniqueSuffix = "-" + PseudoUUID() // PseudoUUID returns a unique string generated from random // bytes, empty string in case of error. func PseudoUUID() string { b := make([]byte, 8) if _, err := rand.Read(b); err != nil { // Shouldn't happen?! return "" } return fmt.Sprintf("%08X-%08X", b[0:4], b[4:8]) } // UniqueString returns a unique string by appending a random // number. In case of an error, just the prefix is returned, so it // alone should already be fairly unique. func UniqueString(prefix string) string { return prefix + uniqueSuffix }
libopenstorage/openstorage
vendor/github.com/kubernetes-csi/csi-test/pkg/sanity/sanity.go
GO
apache-2.0
12,700
/* * APDPlat - Application Product Development Platform * Copyright (c) 2013, 杨尚川, yang-shangchuan@qq.com * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.apdplat.superword.tools; import org.apache.commons.lang.StringUtils; import org.apdplat.superword.tools.WordLinker.Dictionary; import org.jsoup.Connection; import org.jsoup.Jsoup; import org.jsoup.nodes.Element; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; /** * Created by ysc on 12/5/15. */ public class Pronunciation { private static final Logger LOGGER = LoggerFactory.getLogger(Pronunciation.class); public static final String ICIBA_CSS_PATH = "div.base-speak span"; public static final String YOUDAO_CSS_PATH = "span.pronounce"; public static final String OXFORD_CSS_PATH = "header.entryHeader div.headpron"; public static final String WEBSTER_CSS_PATH = "div.word-attributes span.pr"; public static final String COLLINS_CSS_PATH = ""; public static final String CAMBRIDGE_CSS_PATH = ""; public static final String MACMILLAN_CSS_PATH = ""; public static final String HERITAGE_CSS_PATH = ""; public static final String WIKTIONARY_CSS_PATH = ""; public static final String WORDNET_CSS_PATH = ""; public static final String RANDOMHOUSE_CSS_PATH = ""; private static final String ACCEPT = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"; private static final String ENCODING = "gzip, deflate"; private static final String LANGUAGE = "zh-cn,zh;q=0.8,en-us;q=0.5,en;q=0.3"; private static final String CONNECTION = "keep-alive"; private static final String HOST = "www.iciba.com"; private static final String REFERER = "http://www.iciba.com/"; private static final String USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:36.0) Gecko/20100101 Firefox/36.0"; public static String getPronunciationString(Dictionary dictionary, String word, String joinString) { return concat(getPronunciation(dictionary, word), joinString); } public static String concat(List<String> list, String joinString){ if(list.isEmpty()){ return ""; } StringBuilder string = new StringBuilder(); list.forEach(d -> string.append(d).append(joinString)); int len = string.length()-joinString.length(); if(len < 1){ return ""; } string.setLength(len); return string.toString(); } public static List<String> getPronunciation(Dictionary dictionary, String word){ switch (dictionary){ case ICIBA: return getPronunciationForICIBA(word); case YOUDAO: return getPronunciationForYOUDAO(word); case COLLINS: return getPronunciationForCOLLINS(word); case WEBSTER: return getPronunciationForWEBSTER(word); case OXFORD: return getPronunciationForOXFORD(word); case CAMBRIDGE: return getPronunciationForCAMBRIDGE(word); case MACMILLAN: return getPronunciationForMACMILLAN(word); case HERITAGE: return getPronunciationForHERITAGE(word); case WIKTIONARY: return getPronunciationForWIKTIONARY(word); case WORDNET: return getPronunciationForWORDNET(word); case RANDOMHOUSE: return getPronunciationForRANDOMHOUSE(word); } return getPronunciationForICIBA(word); } public static List<String> getPronunciationForICIBA(String word){ return parsePronunciation(WordLinker.ICIBA + word, ICIBA_CSS_PATH, word, Dictionary.ICIBA); } public static List<String> getPronunciationForYOUDAO(String word){ return parsePronunciation(WordLinker.YOUDAO + word, YOUDAO_CSS_PATH, word, Dictionary.YOUDAO); } public static List<String> getPronunciationForCOLLINS(String word){ return parsePronunciation(WordLinker.COLLINS + word, COLLINS_CSS_PATH, word, Dictionary.COLLINS); } public static List<String> getPronunciationForWEBSTER(String word){ return parsePronunciation(WordLinker.WEBSTER + word, WEBSTER_CSS_PATH, word, Dictionary.WEBSTER); } public static List<String> getPronunciationForOXFORD(String word){ return parsePronunciation(WordLinker.OXFORD + word, OXFORD_CSS_PATH, word, Dictionary.OXFORD); } public static List<String> getPronunciationForCAMBRIDGE(String word){ return parsePronunciation(WordLinker.CAMBRIDGE + word, CAMBRIDGE_CSS_PATH, word, Dictionary.CAMBRIDGE); } public static List<String> getPronunciationForMACMILLAN(String word){ return parsePronunciation(WordLinker.MACMILLAN + word, MACMILLAN_CSS_PATH, word, Dictionary.MACMILLAN); } public static List<String> getPronunciationForHERITAGE(String word){ return parsePronunciation(WordLinker.HERITAGE + word, HERITAGE_CSS_PATH, word, Dictionary.HERITAGE); } public static List<String> getPronunciationForWIKTIONARY(String word){ return parsePronunciation(WordLinker.WIKTIONARY + word, WIKTIONARY_CSS_PATH, word, Dictionary.WIKTIONARY); } public static List<String> getPronunciationForWORDNET(String word){ return parsePronunciation(WordLinker.WORDNET + word, WORDNET_CSS_PATH, word, Dictionary.WORDNET); } public static List<String> getPronunciationForRANDOMHOUSE(String word){ return parsePronunciation(WordLinker.RANDOMHOUSE + word, RANDOMHOUSE_CSS_PATH, word, Dictionary.RANDOMHOUSE); } public static List<String> parsePronunciation(String url, String cssPath, String word, Dictionary dictionary){ String wordPronunciation = MySQLUtils.getWordPronunciation(word, dictionary.name()); if(StringUtils.isNotBlank(wordPronunciation)) { return Arrays.asList(wordPronunciation.split(" \\| ")); } String html = getContent(url); List<String> list = parsePronunciationFromHtml(html, cssPath, word, dictionary); if(!list.isEmpty()){ MySQLUtils.saveWordPronunciation(word, dictionary.name(), concat(list, " | ")); } return list; } public static List<String> parsePronunciationFromHtml(String html, String cssPath, String word, Dictionary dictionary){ List<String> list = new ArrayList<>(); try { for (Element element : Jsoup.parse(html).select(cssPath)) { String pronunciation = element.text(); if (StringUtils.isNotBlank(pronunciation)) { pronunciation = pronunciation.replace("Pronunciation:", ""); pronunciation = pronunciation.trim(); if(!list.contains(pronunciation)) { list.add(pronunciation); } } } } catch (Exception e){ LOGGER.error("解析音标出错:" + word, e); } return list; } public static String getContent(String url) { long start = System.currentTimeMillis(); String html = _getContent(url, 1000); LOGGER.info("获取拼音耗时: {}", TimeUtils.getTimeDes(System.currentTimeMillis()-start)); int times = 0; while(StringUtils.isNotBlank(html) && html.contains("非常抱歉,来自您ip的请求异常频繁")){ //使用新的IP地址 ProxyIp.toNewIp(); html = _getContent(url); if(++times > 2){ break; } } return html; } private static String _getContent(String url, int timeout) { Future<String> future = ThreadPool.EXECUTOR_SERVICE.submit(()->_getContent(url)); try { Thread.sleep(timeout); return future.get(1, TimeUnit.NANOSECONDS); } catch (Throwable e) { LOGGER.error("获取网页异常", e); } return ""; } private static String _getContent(String url) { Connection conn = Jsoup.connect(url) .header("Accept", ACCEPT) .header("Accept-Encoding", ENCODING) .header("Accept-Language", LANGUAGE) .header("Connection", CONNECTION) .header("Referer", REFERER) .header("Host", HOST) .header("User-Agent", USER_AGENT) .timeout(1000) .ignoreContentType(true); String html = ""; try { html = conn.post().html(); html = html.replaceAll("[\n\r]", ""); }catch (Exception e){ LOGGER.error("获取URL:" + url + "页面出错", e); } return html; } public static void main(String[] args) { System.out.println(getPronunciationString(Dictionary.ICIBA, "resume", " | ")); System.out.println(getPronunciationString(Dictionary.YOUDAO, "resume", " | ")); System.out.println(getPronunciationString(Dictionary.OXFORD, "resume", " | ")); System.out.println(getPronunciationString(Dictionary.WEBSTER, "resume", " | ")); } }
ysc/superword
src/main/java/org/apdplat/superword/tools/Pronunciation.java
Java
apache-2.0
9,799
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.eagle.security.hbase.parse; import org.apache.eagle.dataproc.impl.storm.kafka.SpoutKafkaMessageDeserializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Map; import java.util.Properties; import java.util.TreeMap; public class HbaseAuditLogKafkaDeserializer implements SpoutKafkaMessageDeserializer { private static Logger LOG = LoggerFactory.getLogger(HbaseAuditLogKafkaDeserializer.class); private Properties props; public HbaseAuditLogKafkaDeserializer(Properties props){ this.props = props; } @Override public Object deserialize(byte[] arg0) { String logLine = new String(arg0); HbaseAuditLogParser parser = new HbaseAuditLogParser(); try{ HbaseAuditLogObject entity = parser.parse(logLine); if(entity == null) return null; Map<String, Object> map = new TreeMap<String, Object>(); map.put("action", entity.action); map.put("host", entity.host); map.put("status", entity.status); map.put("request", entity.request); map.put("scope", entity.scope); map.put("user", entity.user); map.put("timestamp", entity.timestamp); return map; }catch(Exception ex){ LOG.error("Failing parse audit log:" + logLine, ex); return null; } } }
rlugojr/incubator-eagle
eagle-security/eagle-security-hbase-securitylog/src/main/java/org/apache/eagle/security/hbase/parse/HbaseAuditLogKafkaDeserializer.java
Java
apache-2.0
2,222
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.psi.impl; import com.intellij.codeInsight.AnnotationTargetUtil; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.lang.ASTNode; import com.intellij.lang.FileASTNode; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.filters.ElementFilter; import com.intellij.psi.impl.light.LightClassReference; import com.intellij.psi.impl.light.LightJavaModule; import com.intellij.psi.impl.source.DummyHolder; import com.intellij.psi.impl.source.PsiClassReferenceType; import com.intellij.psi.impl.source.PsiImmediateClassType; import com.intellij.psi.impl.source.resolve.ResolveCache; import com.intellij.psi.impl.source.tree.*; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.psi.scope.ElementClassHint; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.scope.processor.FilterScopeProcessor; import com.intellij.psi.scope.util.PsiScopesUtil; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.PackageScope; import com.intellij.psi.search.SearchScope; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.util.IncorrectOperationException; import com.intellij.util.PairFunction; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.List; import java.util.Map; public class PsiImplUtil { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.PsiImplUtil"); private PsiImplUtil() { } @NotNull public static PsiMethod[] getConstructors(@NotNull PsiClass aClass) { List<PsiMethod> result = null; for (PsiMethod method : aClass.getMethods()) { if (method.isConstructor() && method.getName().equals(aClass.getName())) { if (result == null) result = ContainerUtil.newSmartList(); result.add(method); } } return result == null ? PsiMethod.EMPTY_ARRAY : result.toArray(PsiMethod.EMPTY_ARRAY); } @Nullable public static PsiAnnotationMemberValue findDeclaredAttributeValue(@NotNull PsiAnnotation annotation, @NonNls @Nullable String attributeName) { PsiNameValuePair attribute = AnnotationUtil.findDeclaredAttribute(annotation, attributeName); return attribute == null ? null : attribute.getValue(); } @Nullable public static PsiAnnotationMemberValue findAttributeValue(@NotNull PsiAnnotation annotation, @Nullable @NonNls String attributeName) { final PsiAnnotationMemberValue value = findDeclaredAttributeValue(annotation, attributeName); if (value != null) return value; if (attributeName == null) attributeName = "value"; final PsiJavaCodeReferenceElement referenceElement = annotation.getNameReferenceElement(); if (referenceElement != null) { PsiElement resolved = referenceElement.resolve(); if (resolved != null) { PsiMethod[] methods = ((PsiClass)resolved).findMethodsByName(attributeName, false); for (PsiMethod method : methods) { if (PsiUtil.isAnnotationMethod(method)) { return ((PsiAnnotationMethod)method).getDefaultValue(); } } } } return null; } @NotNull public static PsiTypeParameter[] getTypeParameters(@NotNull PsiTypeParameterListOwner owner) { final PsiTypeParameterList typeParameterList = owner.getTypeParameterList(); if (typeParameterList != null) { return typeParameterList.getTypeParameters(); } return PsiTypeParameter.EMPTY_ARRAY; } @NotNull public static PsiJavaCodeReferenceElement[] namesToPackageReferences(@NotNull PsiManager manager, @NotNull String[] names) { PsiJavaCodeReferenceElement[] refs = new PsiJavaCodeReferenceElement[names.length]; for (int i = 0; i < names.length; i++) { String name = names[i]; try { refs[i] = JavaPsiFacade.getElementFactory(manager.getProject()).createPackageReferenceElement(name); } catch (IncorrectOperationException e) { LOG.error(e); } } return refs; } public static int getParameterIndex(@NotNull PsiParameter parameter, @NotNull PsiParameterList parameterList) { PsiElement parameterParent = parameter.getParent(); assert parameterParent == parameterList : parameterList +"; "+parameterParent; PsiParameter[] parameters = parameterList.getParameters(); for (int i = 0; i < parameters.length; i++) { PsiParameter paramInList = parameters[i]; if (parameter.equals(paramInList)) return i; } String name = parameter.getName(); PsiParameter suspect = null; int i; for (i = parameters.length - 1; i >= 0; i--) { PsiParameter paramInList = parameters[i]; if (Comparing.equal(name, paramInList.getName())) { suspect = paramInList; break; } } String message = parameter + ":" + parameter.getClass() + " not found among parameters: " + Arrays.asList(parameters) + "." + " parameterList' parent: " + parameterList.getParent() + ";" + " parameter.isValid()=" + parameter.isValid() + ";" + " parameterList.isValid()= " + parameterList.isValid() + ";" + " parameterList stub: " + (parameterList instanceof StubBasedPsiElement ? ((StubBasedPsiElement)parameterList).getStub() : "---") + "; " + " parameter stub: "+(parameter instanceof StubBasedPsiElement ? ((StubBasedPsiElement)parameter).getStub() : "---") + ";" + " suspect: " + suspect +" (index="+i+"); " + (suspect==null?null:suspect.getClass()) + " suspect stub: "+(suspect instanceof StubBasedPsiElement ? ((StubBasedPsiElement)suspect).getStub() : suspect == null ? "-null-" : "---"+suspect.getClass()) + ";" + " parameter.equals(suspect) = " + parameter.equals(suspect) + "; " + " parameter.getNode() == suspect.getNode(): " + (parameter.getNode() == (suspect==null ? null : suspect.getNode())) + "; " + "." ; LOG.error(message); return i; } public static int getTypeParameterIndex(@NotNull PsiTypeParameter typeParameter, @NotNull PsiTypeParameterList typeParameterList) { PsiTypeParameter[] typeParameters = typeParameterList.getTypeParameters(); for (int i = 0; i < typeParameters.length; i++) { if (typeParameter.equals(typeParameters[i])) return i; } LOG.error(typeParameter + " in " + typeParameterList); return -1; } @NotNull public static Object[] getReferenceVariantsByFilter(@NotNull PsiJavaCodeReferenceElement reference, @NotNull ElementFilter filter) { FilterScopeProcessor processor = new FilterScopeProcessor(filter); PsiScopesUtil.resolveAndWalk(processor, reference, null, true); return processor.getResults().toArray(); } public static boolean processDeclarationsInMethod(@NotNull final PsiMethod method, @NotNull final PsiScopeProcessor processor, @NotNull final ResolveState state, PsiElement lastParent, @NotNull final PsiElement place) { if (lastParent instanceof DummyHolder) lastParent = lastParent.getFirstChild(); boolean fromBody = lastParent instanceof PsiCodeBlock; PsiTypeParameterList typeParameterList = method.getTypeParameterList(); return processDeclarationsInMethodLike(method, processor, state, place, fromBody, typeParameterList); } public static boolean processDeclarationsInLambda(@NotNull final PsiLambdaExpression lambda, @NotNull final PsiScopeProcessor processor, @NotNull final ResolveState state, final PsiElement lastParent, @NotNull final PsiElement place) { final boolean fromBody = lastParent != null && lastParent == lambda.getBody(); return processDeclarationsInMethodLike(lambda, processor, state, place, fromBody, null); } private static boolean processDeclarationsInMethodLike(@NotNull final PsiParameterListOwner element, @NotNull final PsiScopeProcessor processor, @NotNull final ResolveState state, @NotNull final PsiElement place, final boolean fromBody, @Nullable final PsiTypeParameterList typeParameterList) { processor.handleEvent(PsiScopeProcessor.Event.SET_DECLARATION_HOLDER, element); if (typeParameterList != null) { final ElementClassHint hint = processor.getHint(ElementClassHint.KEY); if (hint == null || hint.shouldProcess(ElementClassHint.DeclarationKind.CLASS)) { if (!typeParameterList.processDeclarations(processor, state, null, place)) return false; } } if (fromBody) { final PsiParameter[] parameters = element.getParameterList().getParameters(); for (PsiParameter parameter : parameters) { if (!processor.execute(parameter, state)) return false; } } return true; } public static boolean processDeclarationsInResourceList(@NotNull final PsiResourceList resourceList, @NotNull final PsiScopeProcessor processor, @NotNull final ResolveState state, final PsiElement lastParent) { final ElementClassHint hint = processor.getHint(ElementClassHint.KEY); if (hint != null && !hint.shouldProcess(ElementClassHint.DeclarationKind.VARIABLE)) return true; for (PsiResourceListElement resource : resourceList) { if (resource == lastParent) break; if (resource instanceof PsiResourceVariable && !processor.execute(resource, state)) return false; } return true; } public static boolean hasTypeParameters(@NotNull PsiTypeParameterListOwner owner) { final PsiTypeParameterList typeParameterList = owner.getTypeParameterList(); return typeParameterList != null && typeParameterList.getTypeParameters().length != 0; } @NotNull public static PsiType[] typesByReferenceParameterList(@NotNull PsiReferenceParameterList parameterList) { PsiTypeElement[] typeElements = parameterList.getTypeParameterElements(); return typesByTypeElements(typeElements); } @NotNull public static PsiType[] typesByTypeElements(@NotNull PsiTypeElement[] typeElements) { PsiType[] types = PsiType.createArray(typeElements.length); for (int i = 0; i < types.length; i++) { types[i] = typeElements[i].getType(); } if (types.length == 1 && types[0] instanceof PsiDiamondType) { return ((PsiDiamondType)types[0]).resolveInferredTypes().getTypes(); } return types; } @NotNull public static PsiType getType(@NotNull PsiClassObjectAccessExpression classAccessExpression) { GlobalSearchScope resolveScope = classAccessExpression.getResolveScope(); PsiManager manager = classAccessExpression.getManager(); final PsiClass classClass = JavaPsiFacade.getInstance(manager.getProject()).findClass("java.lang.Class", resolveScope); if (classClass == null) { return new PsiClassReferenceType(new LightClassReference(manager, "Class", "java.lang.Class", resolveScope), null); } if (!PsiUtil.isLanguageLevel5OrHigher(classAccessExpression)) { //Raw java.lang.Class return JavaPsiFacade.getElementFactory(manager.getProject()).createType(classClass); } PsiSubstitutor substitutor = PsiSubstitutor.EMPTY; PsiType operandType = classAccessExpression.getOperand().getType(); if (operandType instanceof PsiPrimitiveType && !PsiType.NULL.equals(operandType)) { if (PsiType.VOID.equals(operandType)) { operandType = JavaPsiFacade.getElementFactory(manager.getProject()) .createTypeByFQClassName("java.lang.Void", classAccessExpression.getResolveScope()); } else { operandType = ((PsiPrimitiveType)operandType).getBoxedType(classAccessExpression); } } final PsiTypeParameter[] typeParameters = classClass.getTypeParameters(); if (typeParameters.length == 1) { substitutor = substitutor.put(typeParameters[0], operandType); } return new PsiImmediateClassType(classClass, substitutor); } @Nullable public static PsiAnnotation findAnnotation(@Nullable PsiAnnotationOwner annotationOwner, @NotNull String qualifiedName) { if (annotationOwner == null) return null; PsiAnnotation[] annotations = annotationOwner.getAnnotations(); if (annotations.length == 0) return null; String shortName = StringUtil.getShortName(qualifiedName); for (PsiAnnotation annotation : annotations) { PsiJavaCodeReferenceElement referenceElement = annotation.getNameReferenceElement(); if (referenceElement != null && shortName.equals(referenceElement.getReferenceName())) { if (qualifiedName.equals(annotation.getQualifiedName())) { return annotation; } } } return null; } /** @deprecated use {@link AnnotationTargetUtil#findAnnotationTarget(PsiAnnotation, PsiAnnotation.TargetType...)} (to be removed ion IDEA 17) */ @Deprecated public static PsiAnnotation.TargetType findApplicableTarget(@NotNull PsiAnnotation annotation, @NotNull PsiAnnotation.TargetType... types) { return AnnotationTargetUtil.findAnnotationTarget(annotation, types); } /** @deprecated use {@link AnnotationTargetUtil#findAnnotationTarget(PsiClass, PsiAnnotation.TargetType...)} (to be removed ion IDEA 17) */ @Deprecated public static PsiAnnotation.TargetType findApplicableTarget(@NotNull PsiClass annotationType, @NotNull PsiAnnotation.TargetType... types) { return AnnotationTargetUtil.findAnnotationTarget(annotationType, types); } /** @deprecated use {@link AnnotationTargetUtil#getTargetsForLocation(PsiAnnotationOwner)} (to be removed ion IDEA 17) */ @Deprecated @NotNull public static PsiAnnotation.TargetType[] getTargetsForLocation(@Nullable PsiAnnotationOwner owner) { return AnnotationTargetUtil.getTargetsForLocation(owner); } @Nullable public static ASTNode findDocComment(@NotNull CompositeElement element) { TreeElement node = element.getFirstChildNode(); while (node != null && isWhitespaceOrComment(node) && !(node.getPsi() instanceof PsiDocComment)) { node = node.getTreeNext(); } return node == null || node.getElementType() != JavaDocElementType.DOC_COMMENT ? null : node; } /** * Types should be proceed by the callers themselves */ @Deprecated public static PsiType normalizeWildcardTypeByPosition(@NotNull PsiType type, @NotNull PsiExpression expression) { PsiUtilCore.ensureValid(expression); PsiUtil.ensureValidType(type); PsiExpression topLevel = expression; while (topLevel.getParent() instanceof PsiArrayAccessExpression && ((PsiArrayAccessExpression)topLevel.getParent()).getArrayExpression() == topLevel) { topLevel = (PsiExpression)topLevel.getParent(); } if (topLevel instanceof PsiArrayAccessExpression && !PsiUtil.isAccessedForWriting(topLevel)) { return PsiUtil.captureToplevelWildcards(type, expression); } final PsiType normalized = doNormalizeWildcardByPosition(type, expression, topLevel); LOG.assertTrue(normalized.isValid(), type); if (normalized instanceof PsiClassType && !PsiUtil.isAccessedForWriting(topLevel)) { return PsiUtil.captureToplevelWildcards(normalized, expression); } return normalized; } private static PsiType doNormalizeWildcardByPosition(PsiType type, @NotNull PsiExpression expression, @NotNull PsiExpression topLevel) { if (type instanceof PsiWildcardType) { final PsiWildcardType wildcardType = (PsiWildcardType)type; if (PsiUtil.isAccessedForWriting(topLevel)) { return wildcardType.isSuper() ? wildcardType.getBound() : PsiCapturedWildcardType.create(wildcardType, expression); } else { if (wildcardType.isExtends()) { return wildcardType.getBound(); } return PsiType.getJavaLangObject(expression.getManager(), expression.getResolveScope()); } } if (type instanceof PsiArrayType) { final PsiType componentType = ((PsiArrayType)type).getComponentType(); final PsiType normalizedComponentType = doNormalizeWildcardByPosition(componentType, expression, topLevel); if (normalizedComponentType != componentType) { return normalizedComponentType.createArrayType(); } } return type; } @NotNull public static SearchScope getMemberUseScope(@NotNull PsiMember member) { PsiFile file = member.getContainingFile(); PsiElement topElement = file == null ? member : file; Project project = topElement.getProject(); final GlobalSearchScope maximalUseScope = ResolveScopeManager.getInstance(project).getUseScope(topElement); if (isInServerPage(file)) return maximalUseScope; PsiClass aClass = member.getContainingClass(); if (aClass instanceof PsiAnonymousClass && !(aClass instanceof PsiEnumConstantInitializer && member instanceof PsiMethod && member.hasModifierProperty(PsiModifier.PUBLIC) && ((PsiMethod)member).findSuperMethods().length > 0)) { //member from anonymous class can be called from outside the class PsiElement methodCallExpr = PsiUtil.isLanguageLevel8OrHigher(aClass) ? PsiTreeUtil.getTopmostParentOfType(aClass, PsiStatement.class) : PsiTreeUtil.getParentOfType(aClass, PsiMethodCallExpression.class); return new LocalSearchScope(methodCallExpr != null ? methodCallExpr : aClass); } PsiModifierList modifierList = member.getModifierList(); int accessLevel = modifierList == null ? PsiUtil.ACCESS_LEVEL_PUBLIC : PsiUtil.getAccessLevel(modifierList); if (accessLevel == PsiUtil.ACCESS_LEVEL_PUBLIC || accessLevel == PsiUtil.ACCESS_LEVEL_PROTECTED) { if (member instanceof PsiMethod && ((PsiMethod)member).isConstructor()) { PsiClass containingClass = member.getContainingClass(); if (containingClass != null) { //constructors cannot be overridden so their use scope can't be wider than their class's return containingClass.getUseScope(); } } return maximalUseScope; // class use scope doesn't matter, since another very visible class can inherit from aClass } if (accessLevel == PsiUtil.ACCESS_LEVEL_PRIVATE) { PsiClass topClass = PsiUtil.getTopLevelClass(member); return topClass != null ? new LocalSearchScope(topClass) : file == null ? maximalUseScope : new LocalSearchScope(file); } if (file instanceof PsiJavaFile) { PsiPackage aPackage = JavaPsiFacade.getInstance(project).findPackage(((PsiJavaFile)file).getPackageName()); if (aPackage != null) { SearchScope scope = PackageScope.packageScope(aPackage, false); return scope.intersectWith(maximalUseScope); } } return maximalUseScope; } public static boolean isInServerPage(@Nullable final PsiElement element) { return getServerPageFile(element) != null; } @Nullable private static ServerPageFile getServerPageFile(final PsiElement element) { final PsiFile psiFile = PsiUtilCore.getTemplateLanguageFile(element); return psiFile instanceof ServerPageFile ? (ServerPageFile)psiFile : null; } public static PsiElement setName(@NotNull PsiElement element, @NotNull String name) throws IncorrectOperationException { PsiManager manager = element.getManager(); PsiElementFactory factory = JavaPsiFacade.getElementFactory(manager.getProject()); PsiIdentifier newNameIdentifier = factory.createIdentifier(name); return element.replace(newNameIdentifier); } public static boolean isDeprecatedByAnnotation(@NotNull PsiModifierListOwner owner) { return AnnotationUtil.findAnnotation(owner, CommonClassNames.JAVA_LANG_DEPRECATED) != null; } public static boolean isDeprecatedByDocTag(@NotNull PsiJavaDocumentedElement owner) { PsiDocComment docComment = owner.getDocComment(); return docComment != null && docComment.findTagByName("deprecated") != null; } @Nullable public static PsiJavaDocumentedElement findDocCommentOwner(@NotNull PsiDocComment comment) { PsiElement parent = comment.getParent(); if (parent instanceof PsiJavaDocumentedElement) { PsiJavaDocumentedElement owner = (PsiJavaDocumentedElement)parent; if (owner.getDocComment() == comment) { return owner; } } return null; } @Nullable public static PsiAnnotationMemberValue setDeclaredAttributeValue(@NotNull PsiAnnotation psiAnnotation, @Nullable String attributeName, @Nullable PsiAnnotationMemberValue value, @NotNull PairFunction<? super Project, ? super String, ? extends PsiAnnotation> annotationCreator) { PsiAnnotationMemberValue existing = psiAnnotation.findDeclaredAttributeValue(attributeName); if (value == null) { if (existing == null) { return null; } existing.getParent().delete(); } else { if (existing != null) { ((PsiNameValuePair)existing.getParent()).setValue(value); } else { PsiNameValuePair[] attributes = psiAnnotation.getParameterList().getAttributes(); if (attributes.length == 1) { PsiNameValuePair attribute = attributes[0]; if (attribute.getName() == null) { PsiAnnotationMemberValue defValue = attribute.getValue(); assert defValue != null : attribute; attribute.replace(createNameValuePair(defValue, PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME + "=", annotationCreator)); } } boolean allowNoName = attributes.length == 0 && ("value".equals(attributeName) || null == attributeName); final String namePrefix = allowNoName ? "" : attributeName + "="; psiAnnotation.getParameterList().addBefore(createNameValuePair(value, namePrefix, annotationCreator), null); } } return psiAnnotation.findDeclaredAttributeValue(attributeName); } private static PsiNameValuePair createNameValuePair(@NotNull PsiAnnotationMemberValue value, @NotNull String namePrefix, @NotNull PairFunction<? super Project, ? super String, ? extends PsiAnnotation> annotationCreator) { return annotationCreator.fun(value.getProject(), "@A(" + namePrefix + value.getText() + ")").getParameterList().getAttributes()[0]; } @Nullable public static ASTNode skipWhitespaceAndComments(final ASTNode node) { return TreeUtil.skipWhitespaceAndComments(node, true); } @Nullable public static ASTNode skipWhitespaceCommentsAndTokens(final ASTNode node, @NotNull TokenSet alsoSkip) { return TreeUtil.skipWhitespaceCommentsAndTokens(node, alsoSkip, true); } public static boolean isWhitespaceOrComment(ASTNode element) { return TreeUtil.isWhitespaceOrComment(element); } @Nullable public static ASTNode skipWhitespaceAndCommentsBack(final ASTNode node) { if (node == null) return null; if (!isWhitespaceOrComment(node)) return node; ASTNode parent = node.getTreeParent(); ASTNode prev = node; while (prev instanceof CompositeElement) { if (!isWhitespaceOrComment(prev)) return prev; prev = prev.getTreePrev(); } if (prev == null) return null; ASTNode firstChildNode = parent.getFirstChildNode(); ASTNode lastRelevant = null; while (firstChildNode != prev) { if (!isWhitespaceOrComment(firstChildNode)) lastRelevant = firstChildNode; firstChildNode = firstChildNode.getTreeNext(); } return lastRelevant; } @Nullable public static ASTNode findStatementChild(@NotNull CompositePsiElement statement) { if (DebugUtil.CHECK_INSIDE_ATOMIC_ACTION_ENABLED) { ApplicationManager.getApplication().assertReadAccessAllowed(); } for (ASTNode element = statement.getFirstChildNode(); element != null; element = element.getTreeNext()) { if (element.getPsi() instanceof PsiStatement) return element; } return null; } @NotNull public static PsiStatement[] getChildStatements(@NotNull CompositeElement psiCodeBlock) { ApplicationManager.getApplication().assertReadAccessAllowed(); // no lock is needed because all chameleons are expanded already int count = 0; for (ASTNode child1 = psiCodeBlock.getFirstChildNode(); child1 != null; child1 = child1.getTreeNext()) { if (child1.getPsi() instanceof PsiStatement) { count++; } } PsiStatement[] result = PsiStatement.ARRAY_FACTORY.create(count); if (count == 0) return result; int idx = 0; for (ASTNode child = psiCodeBlock.getFirstChildNode(); child != null && idx < count; child = child.getTreeNext()) { PsiElement element = child.getPsi(); if (element instanceof PsiStatement) { result[idx++] = (PsiStatement)element; } } return result; } public static boolean isVarArgs(@NotNull PsiMethod method) { PsiParameter[] parameters = method.getParameterList().getParameters(); return parameters.length > 0 && parameters[parameters.length - 1].isVarArgs(); } public static PsiElement handleMirror(PsiElement element) { return element instanceof PsiMirrorElement ? ((PsiMirrorElement)element).getPrototype() : element; } @Nullable public static PsiModifierList findNeighbourModifierList(@NotNull PsiJavaCodeReferenceElement ref) { PsiElement parent = PsiTreeUtil.skipParentsOfType(ref, PsiJavaCodeReferenceElement.class); if (parent instanceof PsiTypeElement) { PsiElement grandParent = parent.getParent(); if (grandParent instanceof PsiModifierListOwner) { return ((PsiModifierListOwner)grandParent).getModifierList(); } } return null; } public static boolean isTypeAnnotation(@Nullable PsiElement element) { return element instanceof PsiAnnotation && AnnotationTargetUtil.isTypeAnnotation((PsiAnnotation)element); } public static void collectTypeUseAnnotations(@NotNull PsiModifierList modifierList, @NotNull List<? super PsiAnnotation> annotations) { for (PsiAnnotation annotation : modifierList.getAnnotations()) { if (AnnotationTargetUtil.isTypeAnnotation(annotation)) { annotations.add(annotation); } } } private static final Key<Boolean> TYPE_ANNO_MARK = Key.create("type.annotation.mark"); public static void markTypeAnnotations(@NotNull PsiTypeElement typeElement) { PsiElement left = PsiTreeUtil.skipSiblingsBackward(typeElement, PsiComment.class, PsiWhiteSpace.class, PsiTypeParameterList.class); if (left instanceof PsiModifierList) { for (PsiAnnotation annotation : ((PsiModifierList)left).getAnnotations()) { if (AnnotationTargetUtil.isTypeAnnotation(annotation)) { annotation.putUserData(TYPE_ANNO_MARK, Boolean.TRUE); } } } } public static void deleteTypeAnnotations(@NotNull PsiTypeElement typeElement) { PsiElement left = PsiTreeUtil.skipSiblingsBackward(typeElement, PsiComment.class, PsiWhiteSpace.class, PsiTypeParameterList.class); if (left instanceof PsiModifierList) { for (PsiAnnotation annotation : ((PsiModifierList)left).getAnnotations()) { if (TYPE_ANNO_MARK.get(annotation) == Boolean.TRUE) { annotation.delete(); } } } } public static boolean isLeafElementOfType(@Nullable PsiElement element, @NotNull IElementType type) { return element instanceof LeafElement && ((LeafElement)element).getElementType() == type; } public static boolean isLeafElementOfType(PsiElement element, @NotNull TokenSet tokenSet) { return element instanceof LeafElement && tokenSet.contains(((LeafElement)element).getElementType()); } public static PsiType buildTypeFromTypeString(@NotNull final String typeName, @NotNull final PsiElement context, @NotNull final PsiFile psiFile) { final PsiManager psiManager = psiFile.getManager(); if (typeName.indexOf('<') != -1 || typeName.indexOf('[') != -1 || typeName.indexOf('.') == -1) { try { return JavaPsiFacade.getElementFactory(psiManager.getProject()).createTypeFromText(typeName, context); } catch(Exception ignored) { } // invalid syntax will produce unresolved class type } PsiClass aClass = JavaPsiFacade.getInstance(psiManager.getProject()).findClass(typeName, context.getResolveScope()); PsiType resultType; if (aClass == null) { final LightClassReference ref = new LightClassReference( psiManager, PsiNameHelper.getShortClassName(typeName), typeName, PsiSubstitutor.EMPTY, psiFile ); resultType = new PsiClassReferenceType(ref, null); } else { PsiElementFactory factory = JavaPsiFacade.getElementFactory(psiManager.getProject()); PsiSubstitutor substitutor = factory.createRawSubstitutor(aClass); resultType = factory.createType(aClass, substitutor); } return resultType; } @NotNull public static <T extends PsiJavaCodeReferenceElement> JavaResolveResult[] multiResolveImpl(@NotNull T element, boolean incompleteCode, @NotNull ResolveCache.PolyVariantContextResolver<? super T> resolver) { FileASTNode fileElement = SharedImplUtil.findFileElement(element.getNode()); if (fileElement == null) { PsiUtilCore.ensureValid(element); LOG.error("fileElement == null!"); return JavaResolveResult.EMPTY_ARRAY; } PsiFile psiFile = SharedImplUtil.getContainingFile(fileElement); PsiManager manager = psiFile == null ? null : psiFile.getManager(); if (manager == null) { PsiUtilCore.ensureValid(element); LOG.error("getManager() == null!"); return JavaResolveResult.EMPTY_ARRAY; } boolean valid = psiFile.isValid(); if (!valid) { PsiUtilCore.ensureValid(element); LOG.error("psiFile.isValid() == false!"); return JavaResolveResult.EMPTY_ARRAY; } if (element instanceof PsiMethodReferenceExpression) { // method refs: do not cache results during parent conflict resolving, acceptable checks, etc final Map<PsiElement, PsiType> map = LambdaUtil.ourFunctionTypes.get(); if (map != null && map.containsKey(element)) { return (JavaResolveResult[])resolver.resolve(element, psiFile, incompleteCode); } } return multiResolveImpl(manager.getProject(), psiFile, element, incompleteCode, resolver); } @NotNull public static <T extends PsiJavaCodeReferenceElement> JavaResolveResult[] multiResolveImpl(@NotNull Project project, @NotNull PsiFile psiFile, @NotNull T element, boolean incompleteCode, @NotNull ResolveCache.PolyVariantContextResolver<? super T> resolver) { ResolveResult[] results = ResolveCache.getInstance(project).resolveWithCaching(element, resolver, true, incompleteCode, psiFile); return results.length == 0 ? JavaResolveResult.EMPTY_ARRAY : (JavaResolveResult[])results; } public static VirtualFile getModuleVirtualFile(@NotNull PsiJavaModule module) { return module instanceof LightJavaModule ? ((LightJavaModule)module).getRootVirtualFile() : module.getContainingFile().getVirtualFile(); } }
mdanielwork/intellij-community
java/java-psi-impl/src/com/intellij/psi/impl/PsiImplUtil.java
Java
apache-2.0
33,484
// Copyright 2014 Open Source Robotics Foundation, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "rclcpp/type_support_decl.hpp" #include "rclcpp/visibility_control.hpp" #include "rcl_interfaces/msg/list_parameters_result.hpp" #include "rcl_interfaces/msg/parameter_descriptor.hpp" #include "rcl_interfaces/msg/parameter_event.hpp" #include "rcl_interfaces/msg/set_parameters_result.hpp" #include "rcl_interfaces/srv/describe_parameters.hpp" #include "rcl_interfaces/srv/get_parameter_types.hpp" #include "rcl_interfaces/srv/get_parameters.hpp" #include "rcl_interfaces/srv/list_parameters.hpp" #include "rcl_interfaces/srv/set_parameters.hpp" #include "rcl_interfaces/srv/set_parameters_atomically.hpp" const rosidl_message_type_support_t * rclcpp::type_support::get_parameter_event_msg_type_support() { return rosidl_typesupport_cpp::get_message_type_support_handle< rcl_interfaces::msg::ParameterEvent >(); } const rosidl_message_type_support_t * rclcpp::type_support::get_set_parameters_result_msg_type_support() { return rosidl_typesupport_cpp::get_message_type_support_handle< rcl_interfaces::msg::SetParametersResult >(); } const rosidl_message_type_support_t * rclcpp::type_support::get_parameter_descriptor_msg_type_support() { return rosidl_typesupport_cpp::get_message_type_support_handle< rcl_interfaces::msg::ParameterDescriptor >(); } const rosidl_message_type_support_t * rclcpp::type_support::get_list_parameters_result_msg_type_support() { return rosidl_typesupport_cpp::get_message_type_support_handle< rcl_interfaces::msg::ListParametersResult >(); } const rosidl_service_type_support_t * rclcpp::type_support::get_get_parameters_srv_type_support() { return rosidl_typesupport_cpp::get_service_type_support_handle< rcl_interfaces::srv::GetParameters >(); } const rosidl_service_type_support_t * rclcpp::type_support::get_get_parameter_types_srv_type_support() { return rosidl_typesupport_cpp::get_service_type_support_handle< rcl_interfaces::srv::GetParameterTypes >(); } const rosidl_service_type_support_t * rclcpp::type_support::get_set_parameters_srv_type_support() { return rosidl_typesupport_cpp::get_service_type_support_handle< rcl_interfaces::srv::SetParameters >(); } const rosidl_service_type_support_t * rclcpp::type_support::get_list_parameters_srv_type_support() { return rosidl_typesupport_cpp::get_service_type_support_handle< rcl_interfaces::srv::ListParameters >(); } const rosidl_service_type_support_t * rclcpp::type_support::get_describe_parameters_srv_type_support() { return rosidl_typesupport_cpp::get_service_type_support_handle< rcl_interfaces::srv::DescribeParameters >(); } const rosidl_service_type_support_t * rclcpp::type_support::get_set_parameters_atomically_srv_type_support() { return rosidl_typesupport_cpp::get_service_type_support_handle< rcl_interfaces::srv::SetParametersAtomically >(); }
ros2/rclcpp
rclcpp/src/rclcpp/type_support.cpp
C++
apache-2.0
3,465
/** * Copyright (C) 2016-2018 Harald Kuhn * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package rocks.bottery.bot.crypto; import java.io.InputStream; import java.security.KeyStore; import java.util.Base64; import javax.crypto.Cipher; import javax.crypto.spec.SecretKeySpec; /** * Utility for AES encrypt and decrypt * * keeps a transient copy of the key from the keystore for performance * * The password for the keystore and entry are always the same. It is retrieved from the system property * KEYSTORE_PASSWORD * * TO encrypt a value use this classes main method like this * * java ... AESHelper secretText -DKEYSTORE_PASSWORD=password * * @author hkuhn */ public class AESHelper { private static final String KEYSTORE_PASSWORD = "KEYSTORE_PASSWORD"; static final String ENCODING = "ISO-8859-1"; static final String KEYSTORE = "Bot.jks"; static final String ALIAS = "BotKey"; private static transient SecretKeySpec key; /** * Decrypt a string encrypted with this util * * @param value * the encrypted value * @return the decrypted value * @throws Exception * if something went wrong :) */ static String decrypt(String value) throws Exception { byte[] input = Base64.getDecoder().decode(value); byte[] result = doChiper(ALIAS, KEYSTORE, input, Cipher.DECRYPT_MODE); return new String(result, ENCODING); } /** * Encrypt a string * * @param value * the string to encrypt * @return the encrypted value * @throws Exception * if something went wrong :) */ static String encrypt(String value) throws Exception { byte[] input = value.getBytes(ENCODING); byte[] result = doChiper(ALIAS, KEYSTORE, input, Cipher.ENCRYPT_MODE); return Base64.getEncoder().encodeToString(result); } static byte[] doChiper(String alias, String keystore, byte[] value, int mode) throws Exception { Cipher cipher = Cipher.getInstance("AES"); SecretKeySpec spec = loadKey(alias, keystore); cipher.init(mode, spec); return cipher.doFinal(value); } static SecretKeySpec loadKey(String alias, String keystore) throws Exception { if (key != null) { return key; } InputStream is = null; try { is = Thread.currentThread().getContextClassLoader().getResourceAsStream(keystore); } catch (Exception e) { e.printStackTrace(); } String password = System.getProperty(KEYSTORE_PASSWORD); if (password == null || password.length() < 1) { throw new NullPointerException("password for keystore:" + keystore + " was not found"); } KeyStore ks = KeyStore.getInstance("JCEKS"); ks.load(is, password.toCharArray()); is.close(); key = (SecretKeySpec) ks.getKey(alias, password.toCharArray()); return key; } public static void main(String[] args) throws Exception { String encrypted = AESHelper.encrypt(args[0]); System.out.println(encrypted); String reborn = AESHelper.decrypt(encrypted); System.out.println(reborn); } }
hkuhn42/bottery
bottery.core/src/main/java/rocks/bottery/bot/crypto/AESHelper.java
Java
apache-2.0
3,630
/* * Copyright 2021 The Knative Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package v1alpha1 import ( "context" "testing" "github.com/google/go-cmp/cmp" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/utils/pointer" ) func TestConsumerGroupSetDefaults(t *testing.T) { tests := []struct { name string ctx context.Context given *ConsumerGroup want *ConsumerGroup }{ { name: "default replicas", ctx: context.Background(), given: &ConsumerGroup{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", Name: "name", }, Spec: ConsumerGroupSpec{ Template: ConsumerTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", }, }, }, }, want: &ConsumerGroup{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", Name: "name", }, Spec: ConsumerGroupSpec{ Template: ConsumerTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", }, }, Replicas: pointer.Int32Ptr(1), }, }, }, { name: "default selector", ctx: context.Background(), given: &ConsumerGroup{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", Name: "name", }, Spec: ConsumerGroupSpec{ Template: ConsumerTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", Labels: map[string]string{"app": "app"}, }, }, Replicas: pointer.Int32Ptr(1), }, }, want: &ConsumerGroup{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", Name: "name", }, Spec: ConsumerGroupSpec{ Template: ConsumerTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", Labels: map[string]string{"app": "app"}, }, }, Replicas: pointer.Int32Ptr(1), Selector: map[string]string{"app": "app"}, }, }, }, { name: "default namespace", ctx: context.Background(), given: &ConsumerGroup{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", Name: "name", }, Spec: ConsumerGroupSpec{ Replicas: pointer.Int32Ptr(1), }, }, want: &ConsumerGroup{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", Name: "name", }, Spec: ConsumerGroupSpec{ Template: ConsumerTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", }, }, Replicas: pointer.Int32Ptr(1), }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tt.given.SetDefaults(tt.ctx) if diff := cmp.Diff(tt.want, tt.given); diff != "" { t.Error("(-want, +got)", diff) } }) } }
knative-sandbox/eventing-kafka-broker
control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/consumer_group_defaults_test.go
GO
apache-2.0
3,174
#include "models/page-api.h" // #include <QtConcurrentRun> #include <QTimer> #include <QtMath> #include <utility> #include "functions.h" #include "image.h" #include "logger.h" #include "models/api/api.h" #include "models/filtering/post-filter.h" #include "models/page.h" #include "models/search-query/search-query.h" #include "models/site.h" #include "network/network-reply.h" #include "tags/tag.h" PageApi::PageApi(Page *parentPage, Profile *profile, Site *site, Api *api, SearchQuery query, int page, int limit, PostFilter postFiltering, bool smart, QObject *parent, int pool, int lastPage, qulonglong lastPageMinId, qulonglong lastPageMaxId, QString lastPageMinDate, QString lastPageMaxDate) : QObject(parent), m_parentPage(parentPage), m_profile(profile), m_site(site), m_api(api), m_query(std::move(query)), m_errors(QStringList()), m_postFiltering(std::move(postFiltering)), m_imagesPerPage(limit), m_lastPage(lastPage), m_lastPageMinId(lastPageMinId), m_lastPageMaxId(lastPageMaxId), m_lastPageMinDate(std::move(lastPageMinDate)), m_lastPageMaxDate(std::move(lastPageMaxDate)), m_smart(smart), m_reply(nullptr) { m_imagesCount = -1; m_maxImagesCount = -1; m_pagesCount = -1; m_imagesCountSafe = false; m_pagesCountSafe = false; m_page = page; m_pool = pool; m_format = m_api->getName(); updateUrls(); } void PageApi::setLastPage(Page *page) { if (!page->isValid()) { return; } m_lastPage = page->page(); m_lastPageMaxId = page->maxId(); m_lastPageMinId = page->minId(); m_lastPageMaxDate = page->maxDate(); m_lastPageMinDate = page->minDate(); if (!page->nextPage().isEmpty() && page->page() == m_page - 1) { m_url = page->nextPage(); } else if (!page->prevPage().isEmpty() && page->page() == m_page + 1) { m_url = page->prevPage(); } updateUrls(); } void PageApi::updateUrls() { QString url; m_errors.clear(); // URL searches if (m_query.urls.contains(m_api->getName())) { url = m_query.urls[m_api->getName()]; } else if (m_query.tags.count() == 1 && isUrl(m_query.tags.first())) { url = m_query.tags.first(); } else if (!m_url.isEmpty()) { url = m_url.toString(); } else { PageUrl ret; if (!m_query.gallery.isNull()) { ret = m_api->galleryUrl(m_query.gallery, m_page, m_imagesPerPage, m_site); } else { LastPageInformation lastPage; lastPage.page = m_lastPage; lastPage.minId = m_lastPageMinId; lastPage.minDate = m_lastPageMinDate; lastPage.maxId = m_lastPageMaxId; lastPage.maxDate = m_lastPageMaxDate; ret = m_api->pageUrl(m_query.tags.join(' '), m_page, m_imagesPerPage, lastPage, m_site); } if (!ret.error.isEmpty()) { m_errors.append(ret.error); } url = ret.url; m_headers = ret.headers; } // Add site information to URL url = m_site->fixUrl(url).toString(); m_originalUrl = QString(url); m_url = QString(url); } void PageApi::setReply(NetworkReply *reply) { if (m_reply != nullptr) { if (m_reply->isRunning()) { m_reply->abort(); } m_reply->deleteLater(); } m_reply = reply; } void PageApi::load(bool rateLimit, bool force) { if (m_loading) { if (!force) { return; } setReply(nullptr); } if (m_url.isEmpty() && !m_errors.isEmpty()) { for (const QString &err : qAsConst(m_errors)) { log(QStringLiteral("[%1][%2] %3").arg(m_site->url(), m_format, err), Logger::Warning); } emit finishedLoading(this, LoadResult::Error); return; } // Reading reply and resetting vars m_images.clear(); m_tags.clear(); m_loaded = false; m_loading = true; m_pageImageCount = 0; m_filteredImageCount = 0; m_imagesCount = -1; m_maxImagesCount = -1; m_pagesCount = -1; log(QStringLiteral("[%1][%2] Loading page `%3`").arg(m_site->url(), m_format, m_url.toString().toHtmlEscaped()), Logger::Info); Site::QueryType type = rateLimit ? Site::QueryType::Retry : Site::QueryType::List; setReply(m_site->get(m_url, type, QUrl(), "", nullptr, m_headers)); connect(m_reply, &NetworkReply::finished, this, &PageApi::parse); } void PageApi::abort() { if (m_reply != nullptr && m_reply->isRunning()) { m_reply->abort(); } } bool PageApi::addImage(const QSharedPointer<Image> &img) { if (img.isNull()) { return false; } m_pageImageCount++; QStringList filters = m_postFiltering.match(img->tokens(m_profile)); if (!filters.isEmpty()) { m_filteredImageCount++; img->deleteLater(); log(QStringLiteral("[%1][%2] Image filtered. Reason: %3.").arg(m_site->url(), m_format, filters.join(", ")), Logger::Info); return false; } m_images.append(img); return true; } void PageApi::parse() { if (m_reply == nullptr) { return; } log(QStringLiteral("[%1][%2] Receiving page `%3`").arg(m_site->url(), m_format, m_reply->url().toString().toHtmlEscaped()), Logger::Info); // Check redirection QUrl redir = m_reply->attribute(QNetworkRequest::RedirectionTargetAttribute).toUrl(); if (!redir.isEmpty()) { QUrl newUrl = m_site->fixUrl(redir.toString(), m_url); log(QStringLiteral("[%1][%2] Redirecting page `%3` to `%4`").arg(m_site->url(), m_format, m_url.toString().toHtmlEscaped(), newUrl.toString().toHtmlEscaped()), Logger::Info); // HTTP -> HTTPS redirects const bool ssl = m_site->setting("ssl", false).toBool(); if (!ssl && newUrl.path() == m_url.path() && newUrl.scheme() == "https" && m_url.scheme() == "http") { const bool notThisSite = m_site->setting("ssl_never_correct", false).toBool(); if (!notThisSite) { emit httpsRedirect(); } } m_url = newUrl; load(false, true); return; } // Detect HTTP 429 / 509 usage limit reached const int statusCode = m_reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt(); if (statusCode == 429 || statusCode == 509) { log(QStringLiteral("[%1][%2] Limit reached (%3). New try.").arg(m_site->url(), m_format, QString::number(statusCode)), Logger::Warning); load(true, true); return; } // QtConcurrent::run(this, &PageApi::parseActual); parseActual(); } void PageApi::parseActual() { const bool isGallery = !m_query.gallery.isNull(); const bool parseErrors = isGallery ? m_api->parseGalleryErrors() : m_api->parsePageErrors(); const int statusCode = m_reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt(); const int offset = (m_page - 1) * m_imagesPerPage; // Detect Cloudflare if ((statusCode == 403 || statusCode == 429 || statusCode == 503) && m_reply->rawHeader("server") == "cloudflare") { m_errors.append("Cloudflare wall"); log(QStringLiteral("[%1][%2] Cloudflare wall for '%3'").arg(m_site->url(), m_format, m_reply->url().toString()), Logger::Error); setReply(nullptr); m_loaded = true; m_loading = false; emit finishedLoading(this, LoadResult::Error); return; } // Try to read the reply m_source = m_reply->readAll(); if (m_source.isEmpty() || (m_reply->error() != NetworkReply::NetworkError::NoError && !parseErrors)) { if (m_reply->error() != NetworkReply::NetworkError::OperationCanceledError) { log(QStringLiteral("[%1][%2] Loading error: %3 (%4)").arg(m_site->url(), m_format, m_reply->errorString()).arg(m_reply->error()), Logger::Error); } setReply(nullptr); m_loaded = true; m_loading = false; emit finishedLoading(this, LoadResult::Error); return; } // Parse source ParsedPage page; if (isGallery) { page = m_api->parseGallery(m_parentPage, m_source, statusCode, offset); } else { page = m_api->parsePage(m_parentPage, m_source, statusCode, offset); } // Handle errors if (!page.error.isEmpty()) { m_errors.append(page.error); log(QStringLiteral("[%1][%2] %3").arg(m_site->url(), m_format, page.error), Logger::Warning); setReply(nullptr); m_loaded = true; m_loading = false; emit finishedLoading(this, LoadResult::Error); return; } // Fill data from parsing result if (page.pageCount >= 0) { setPageCount(page.pageCount, true); } if (page.imageCount >= 0) { setImageCount(page.imageCount, true); } for (const Tag &tag : qAsConst(page.tags)) { m_tags.append(tag); } for (const QSharedPointer<Image> &img : qAsConst(page.images)) { addImage(img); } if (page.urlNextPage.isValid()) { m_urlNextPage = page.urlNextPage; } if (page.urlPrevPage.isValid()) { m_urlPrevPage = page.urlPrevPage; } if (!page.wiki.isEmpty()) { m_wiki = fixCloudflareEmails(page.wiki); } // Link images to their respective galleries if (isGallery) { for (auto &img : m_images) { img->setParentGallery(m_query.gallery); } } // Complete image count information from tag count information if (m_imagesCount < 1 || !m_imagesCountSafe) { int found = 0; int min = -1; for (const Tag &tag : qAsConst(m_tags)) { if (m_query.tags.contains(tag.text())) { found++; if (min == -1 || min > tag.count()) { min = tag.count(); } } } int searchTagsCount = m_query.tags.count();; if (m_query.tags.count() > found) { const QStringList modifiers = QStringList() << "-" << m_api->modifiers(); for (const QString &search : qAsConst(m_query.tags)) { for (const QString &modifier : modifiers) { if (search.startsWith(modifier)) { searchTagsCount--; break; } } } } if (searchTagsCount == found) { if (m_query.tags.count() == 1) { const int forcedLimit = m_api->forcedLimit(); const int perPage = forcedLimit > 0 ? forcedLimit : m_imagesPerPage; const int expectedPageCount = qCeil(static_cast<qreal>(min) / perPage); setImageCount(min, m_pagesCountSafe && expectedPageCount == m_pagesCount); } setImageMaxCount(min); } } // Complete missing tag information from images' tags if necessary if (m_tags.isEmpty()) { QStringList tagsGot; for (const QSharedPointer<Image> &img : qAsConst(m_images)) { for (const Tag &tag : img->tags()) { if (tagsGot.contains(tag.text())) { const int index = tagsGot.indexOf(tag.text()); m_tags[index].setCount(m_tags[index].count() + 1); } else { m_tags.append(tag); tagsGot.append(tag.text()); } } } } // Remove first n images (according to site settings) int skip = m_site->setting("ignore/always", 0).toInt(); if (false && m_isAltPage) { // FIXME(Bionus): broken since move to Api class skip = m_site->setting("ignore/alt", 0).toInt(); } if (m_page == 1) { skip = m_site->setting("ignore/1", 0).toInt(); } if (m_api->getName() == QLatin1String("Html")) { if (m_images.size() >= skip) { for (int i = 0; i < skip; ++i) { m_images.removeFirst(); m_pageImageCount--; } } else { log(QStringLiteral("Wanting to skip %1 images but only %2 returned").arg(skip).arg(m_images.size()), Logger::Warning); } } // Virtual paging int firstImage = 0; int lastImage = m_smart ? m_imagesPerPage : m_images.size(); if (false && !m_originalUrl.contains("{page}") && !m_originalUrl.contains("{cpage}") && !m_originalUrl.contains("{pagepart}") && !m_originalUrl.contains("{pid}")) { // TODO(Bionus): add real virtual paging firstImage = m_imagesPerPage * (m_page - 1); lastImage = m_imagesPerPage; } while (firstImage > 0 && !m_images.isEmpty()) { m_images.removeFirst(); firstImage--; } while (m_images.size() > lastImage) { m_images.removeLast(); } log(QStringLiteral("[%1][%2] Parsed page `%3`: %4 images (%5), %6 tags (%7), %8 total (%9), %10 pages (%11)").arg(m_site->url(), m_format, m_reply->url().toString().toHtmlEscaped()).arg(m_images.count()).arg(m_pageImageCount).arg(page.tags.count()).arg(m_tags.count()).arg(imagesCount(false)).arg(imagesCount(true)).arg(pagesCount(false)).arg(pagesCount(true)), Logger::Info); setReply(nullptr); m_loaded = true; m_loading = false; emit finishedLoading(this, LoadResult::Ok); } void PageApi::clear() { m_images.clear(); m_pageImageCount = 0; m_filteredImageCount = 0; } const QList<QSharedPointer<Image>> &PageApi::images() const { return m_images; } const QUrl &PageApi::url() const { return m_url; } const QString &PageApi::source() const { return m_source; } const QString &PageApi::wiki() const { return m_wiki; } const QList<Tag> &PageApi::tags() const { return m_tags; } const QStringList &PageApi::errors() const { return m_errors; } const QUrl &PageApi::nextPage() const { return m_urlNextPage; } const QUrl &PageApi::prevPage() const { return m_urlPrevPage; } bool PageApi::isLoaded() const { return m_loaded; } int PageApi::imagesPerPage() const { return m_imagesPerPage; } int PageApi::page() const { return m_page; } int PageApi::pageImageCount() const { return m_pageImageCount; } int PageApi::filteredImageCount() const { return m_filteredImageCount; } int PageApi::highLimit() const { return m_api->maxLimit(); } bool PageApi::hasNext() const { int pageCount = pagesCount(); int maxPages = maxPagesCount(); if (pageCount <= 0 && maxPages > 0) { pageCount = maxPages; } return pageCount > m_page || (pageCount <= 0 && m_pageImageCount > 0); } bool PageApi::isImageCountSure() const { return m_imagesCountSafe; } int PageApi::imagesCount(bool guess) const { if (m_imagesCountSafe) { return m_imagesCount; } if (m_pagesCount == 1) { return m_pageImageCount; } if (!guess) { return -1; } if (m_imagesCount < 0 && m_pagesCount >= 0) { const int forcedLimit = m_api->forcedLimit(); const int perPage = forcedLimit > 0 ? forcedLimit : m_imagesPerPage; return m_pagesCount * perPage; } return m_imagesCount; } int PageApi::maxImagesCount() const { return m_maxImagesCount; } bool PageApi::isPageCountSure() const { return m_pagesCountSafe; } int PageApi::pagesCount(bool guess) const { if (m_pagesCountSafe) { return m_pagesCount; } if (!guess) { return -1; } if (m_pagesCount < 0 && m_imagesCount >= 0) { const int forcedLimit = m_api->forcedLimit(); const int perPage = forcedLimit > 0 ? forcedLimit : m_imagesPerPage; return qCeil(static_cast<qreal>(m_imagesCount) / perPage); } return m_pagesCount; } int PageApi::maxPagesCount() const { if (m_maxImagesCount < 0) { return -1; } const int forcedLimit = m_api->forcedLimit(); const int perPage = forcedLimit > 0 ? forcedLimit : m_imagesPerPage; return qCeil(static_cast<qreal>(m_maxImagesCount) / perPage); } qulonglong PageApi::maxId() const { qulonglong maxId = 0; for (const QSharedPointer<Image> &img : m_images) { if (img->id() > maxId || maxId == 0) { maxId = img->id(); } } return maxId; } qulonglong PageApi::minId() const { qulonglong minId = 0; for (const QSharedPointer<Image> &img : m_images) { if (img->id() < minId || minId == 0) { minId = img->id(); } } return minId; } QString PageApi::maxDate() const { QString maxDate; for (const QSharedPointer<Image> &img : m_images) { if (img->dateRaw() > maxDate || maxDate.isEmpty()) { maxDate = img->dateRaw(); } } return maxDate; } QString PageApi::minDate() const { QString minDate; for (const QSharedPointer<Image> &img : m_images) { if (img->dateRaw() < minDate || minDate.isEmpty()) { minDate = img->dateRaw(); } } return minDate; } void PageApi::setImageCount(int count, bool sure) { if (m_imagesCount <= 0 || (!m_imagesCountSafe && sure)) { m_imagesCount = count; m_imagesCountSafe = sure; if (sure) { const int forcedLimit = m_api->forcedLimit(); const int perPage = forcedLimit > 0 ? forcedLimit : m_imagesPerPage; setPageCount(qCeil(static_cast<qreal>(count) / perPage), true); } } } void PageApi::setImageMaxCount(int maxCount) { m_maxImagesCount = maxCount; } void PageApi::setPageCount(int count, bool sure) { if (m_pagesCount <= 0 || (!m_pagesCountSafe && sure)) { m_pagesCount = count; m_pagesCountSafe = sure; if (sure) { const int forcedLimit = m_api->forcedLimit(); const int perPage = forcedLimit > 0 ? forcedLimit : m_imagesPerPage; setImageCount(count * perPage, false); } } }
Bionus/imgbrd-grabber
src/lib/src/models/page-api.cpp
C++
apache-2.0
15,704
// Copyright (c) Brock Allen & Dominick Baier. All rights reserved. // Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information. using System; using System.Linq; using System.Security.Claims; using IdentityModel; using IdentityServerAspNetIdentity.Data; using IdentityServerAspNetIdentity.Models; using Microsoft.AspNetCore.Identity; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.DependencyInjection; using Serilog; namespace IdentityServerAspNetIdentity { public class SeedData { public static void EnsureSeedData(string connectionString) { var services = new ServiceCollection(); services.AddLogging(); services.AddDbContext<ApplicationDbContext>(options => options.UseSqlite(connectionString)); services.AddIdentity<ApplicationUser, IdentityRole>() .AddEntityFrameworkStores<ApplicationDbContext>() .AddDefaultTokenProviders(); using (var serviceProvider = services.BuildServiceProvider()) { using (var scope = serviceProvider.GetRequiredService<IServiceScopeFactory>().CreateScope()) { var context = scope.ServiceProvider.GetService<ApplicationDbContext>(); context.Database.Migrate(); var userMgr = scope.ServiceProvider.GetRequiredService<UserManager<ApplicationUser>>(); var alice = userMgr.FindByNameAsync("alice").Result; if (alice == null) { alice = new ApplicationUser { UserName = "alice" }; var result = userMgr.CreateAsync(alice, "Pass123$").Result; if (!result.Succeeded) { throw new Exception(result.Errors.First().Description); } result = userMgr.AddClaimsAsync(alice, new Claim[]{ new Claim(JwtClaimTypes.Name, "Alice Smith"), new Claim(JwtClaimTypes.GivenName, "Alice"), new Claim(JwtClaimTypes.FamilyName, "Smith"), new Claim(JwtClaimTypes.Email, "AliceSmith@email.com"), new Claim(JwtClaimTypes.EmailVerified, "true", ClaimValueTypes.Boolean), new Claim(JwtClaimTypes.WebSite, "http://alice.com"), new Claim(JwtClaimTypes.Address, @"{ 'street_address': 'One Hacker Way', 'locality': 'Heidelberg', 'postal_code': 69118, 'country': 'Germany' }", IdentityServer4.IdentityServerConstants.ClaimValueTypes.Json) }).Result; if (!result.Succeeded) { throw new Exception(result.Errors.First().Description); } Log.Debug("alice created"); } else { Log.Debug("alice already exists"); } var bob = userMgr.FindByNameAsync("bob").Result; if (bob == null) { bob = new ApplicationUser { UserName = "bob" }; var result = userMgr.CreateAsync(bob, "Pass123$").Result; if (!result.Succeeded) { throw new Exception(result.Errors.First().Description); } result = userMgr.AddClaimsAsync(bob, new Claim[]{ new Claim(JwtClaimTypes.Name, "Bob Smith"), new Claim(JwtClaimTypes.GivenName, "Bob"), new Claim(JwtClaimTypes.FamilyName, "Smith"), new Claim(JwtClaimTypes.Email, "BobSmith@email.com"), new Claim(JwtClaimTypes.EmailVerified, "true", ClaimValueTypes.Boolean), new Claim(JwtClaimTypes.WebSite, "http://bob.com"), new Claim(JwtClaimTypes.Address, @"{ 'street_address': 'One Hacker Way', 'locality': 'Heidelberg', 'postal_code': 69118, 'country': 'Germany' }", IdentityServer4.IdentityServerConstants.ClaimValueTypes.Json), new Claim("location", "somewhere") }).Result; if (!result.Succeeded) { throw new Exception(result.Errors.First().Description); } Log.Debug("bob created"); } else { Log.Debug("bob already exists"); } } } } } }
MienDev/IdentityServer4
samples/Quickstarts/6_AspNetIdentity/src/IdentityServerAspNetIdentity/SeedData.cs
C#
apache-2.0
5,056
import {Timestamp} from "../src/IonTimestamp"; import {Decimal} from "../src/IonDecimal"; import {load, Value} from "../src/dom"; // Used as a default filtering predicate in functions below function acceptAnyValue(_: any) : boolean { return true; } // A common collection of JS values that can be reduced to a relevant subset using the provided filter function. export function exampleJsValuesWhere(filter: (v: any) => boolean = acceptAnyValue): any[] { return [ null, true, false, Number.MIN_SAFE_INTEGER, -7.5, -7, 0, 7, 7.5, Number.MAX_SAFE_INTEGER, "", "foo", new Date(0), Timestamp.parse('1970-01-01T00:00:00Z'), new Decimal('1.5'), [], [1, 2, 3], [{foo: "bar"}], {}, {foo: "bar", baz: 5}, {foo: [1, 2, 3]} ].filter(filter); } // A common collection of Ion dom.Value instances that can be reduced to a relevant subset using // the provided filter function. export function exampleIonValuesWhere(filter: (v: Value) => boolean = acceptAnyValue): Value[] { return [ load('null')!, // null load('null.string')!, // typed null load('true')!, // boolean load('1')!, // integer load('15e-1')!, // float load('15d-1')!, // decimal load('1970-01-01T00:00:00.000Z')!, // timestamp load('"Hello"')!, // string load('Hello')!, // symbol load('{{aGVsbG8gd29ybGQ=}}')!, // blob load('{{"February"}}')!, // clob load('[1, 2, 3]')!, // list load('(1 2 3)')!, // s-expression load('{foo: true, bar: "Hello", baz: 5, qux: null}')! // struct ].filter(filter); } const _exampleIsoStrings: string[] = [ "1970-01-01T00:00:00Z", '2020-02-28T23:00:00.000-01:00', "2020-02-29T00:00:00Z", "2020-02-29T00:00:00+01:00", "2020-02-29T00:00:00-01:00", '2020-03-01T00:00:00.000+01:00', "2020-03-19T03:17:59.999Z", "2020-03-19T03:17:59+03:21", "2020-03-19T23:59:59-05:00", "2020-03-19T23:01:01-08:00", "2020-03-19T11:30:30-08:00", "2020-03-19T11:30:30.5-08:00", "2020-03-19T11:30:30.50-08:00", "2020-03-19T11:30:30.500-08:00", "2020-03-22T11:30:30.22-08:00", "2020-03-27T00:00:00Z", "2020-03-27T00:00:00.000Z", "2020-03-27T12:00:00-05:00", "2020-03-27T12:00:00-08:00", "2020-03-27T12:00:00+01:00", "2020-03-27T19:00:00-05:00", "2020-03-27T16:00:00-08:00", "2020-03-27T16:00:00.5-08:00", "2020-03-28T01:00:00+01:00", "2020-03-28T01:00:00.123456+01:00", "2020-03-28T01:00:00.123456789+01:00", ]; // A common collection of Date values that can be reduced to a relevant subset using // the provided filter function. export function exampleDatesWhere(filter: (v: Date) => boolean = acceptAnyValue): Date[] { return _exampleIsoStrings .map((isoString) => new Date(isoString)) .filter(filter); } // A common collection of Timestamp values that can be reduced to a relevant subset using // the provided filter function. export function exampleTimestampsWhere(filter: (v: Timestamp) => boolean = acceptAnyValue): Timestamp[] { return _exampleIsoStrings .map((isoString) => Timestamp.parse(isoString)!) .filter(filter); }
amzn/ion-js
test/exampleValues.ts
TypeScript
apache-2.0
3,237
# # Cookbook Name:: chef-solo # Recipe:: default # # Copyright 2014, TENSOR # # All rights reserved - Do Not Redistribute # include_recipe 'logrotate' include_recipe 'chef-solo.redis::redis' cookbook_file "/etc/chef/solo.rb" do source "solo.rb" owner 'root' group 'root' action :create end %w[ node["coockbookdir"]/recipes node["coockbookdir"]/attributes/ ].each do |path| directory path do recursive true owner "root" group "root" end end [node["packdir"]["client"]].each do |path| directory path do recursive true owner "root" group "root" end end cookbook_file "/etc/chef/node.json" do source "node.json" owner 'root' group 'root' end cookbook_file "#{node["coockbookdir"]}metadata.rb" do owner 'root' group 'root' mode 0755 source "metadata.rb" action :create end cookbook_file "#{node["coockbookdir"]}CHANGELOG.md" do owner 'root' group 'root' mode 0755 source "CHANGELOG.md" action :create end cookbook_file "#{node["coockbookdir"]}recipes/default.rb" do source "default.rb" owner 'root' group 'root' end cookbook_file "#{node["coockbookdir"]}README.md" do source "README.md" action :create owner 'root' group 'root' end logrotate_app 'chef-solo' do cookbook 'logrotate' path '/var/log/chef/solo.log' frequency 'daily' rotate 7 options ['missingok', 'copytruncate', 'compress', 'delaycompress', 'notifempty'] end execute "copy attributes on client" do command "scp #{node.default["server"]}:/#{node.default["dirpro"]["server"]}attributes/default.rb #{node.default["dirpro"]["client"]}/attributes/" action :run user "root" end #src_filepath = "#{node.pakdir_client}" #bash "scp packages" do # user "root" # cwd ::File.dirname(src_filepath) # code <<-EOH # #rm -rf ./* # scp test-exploitation1.unix.tensor.ru:/root/chef-repo/cookbooks/chef-solo.test/files/packages/* ./ # EOH #end
alexey-pankratyev/chefcookbooks
cookbooks/chef-solo.redis/recipes/default.rb
Ruby
apache-2.0
1,937
<?php /** * SugarCLI * * PHP Version 5.3 -> 5.4 * SugarCRM Versions 6.5 - 7.6 * * @author Rémi Sauvat * @author Emmanuel Dyan * @copyright 2005-2015 iNet Process * * @package inetprocess/sugarcrm * * @license Apache License 2.0 * * @link http://www.inetprocess.com */ namespace SugarCli\Console\Command\Inventory; use Guzzle\Http\Exception\RequestException; use Guzzle\Service\Client as GClient; use Inet\Inventory\Agent; use Inet\Inventory\Facter\ArrayFacter; use Inet\Inventory\Facter\MultiFacterFacter; use Inet\Inventory\Facter\SugarFacter; use Inet\Inventory\Facter\SystemFacter; use Inet\SugarCRM\Exception\SugarException; use SugarCli\Console\ExitCode; use Symfony\Component\Console\Input\InputArgument; use Symfony\Component\Console\Input\InputInterface; use Symfony\Component\Console\Input\InputOption; use Symfony\Component\Console\Output\OutputInterface; class AgentCommand extends AbstractInventoryCommand { protected function configure() { parent::configure(); $this->setName('inventory:agent') ->setDescription('Gather facts and sends a report to an Inventory server') ->setHelp(<<<'EOHELP' Sends all facts gathered on the system and the SugarCRM instance to an Inventory server. EOHELP ) ->addArgument( 'server', InputArgument::REQUIRED, 'Url of the inventory server' ) ->addArgument( 'username', InputArgument::REQUIRED, 'Username for server authentication' ) ->addArgument( 'password', InputArgument::REQUIRED, 'Password for server authentication' ) ->addConfigOption( 'account.name', 'account-name', 'a', InputOption::VALUE_REQUIRED, 'Name of the account' ); } protected function execute(InputInterface $input, OutputInterface $output) { $logger = $this->getService('logger'); $account_name = $input->getOption('account-name'); try { $client = new GClient( $input->getArgument('server'), array('request.options' => array('auth' => array( $input->getArgument('username'), $input->getArgument('password'), ))) ); $agent = new Agent($logger, $client, $account_name); $agent->setFacter(new MultiFacterFacter(array( new SystemFacter(), new ArrayFacter($this->getCustomFacts($input, 'system')) )), Agent::SYSTEM); $agent->setFacter( new MultiFacterFacter(array( new SugarFacter( $this->getService('sugarcrm.application'), $this->getService('sugarcrm.pdo') ), new ArrayFacter($this->getCustomFacts($input, 'sugarcrm')) )), Agent::SUGARCRM ); $agent->sendAll(); $output->writeln('Successfuly sent report to inventory server.'); } catch (RequestException $e) { $logger->error('An error occured while contacting the inventory server.'); $logger->error($e->getMessage()); return ExitCode::EXIT_INVENTORY_ERROR; } catch (SugarException $e) { $logger->error('An error occured with the sugar application.'); $logger->error($e->getMessage()); return ExitCode::EXIT_UNKNOWN_SUGAR_ERROR; } } }
inetprocess/sugarcli
src/Console/Command/Inventory/AgentCommand.php
PHP
apache-2.0
3,719
import { imageCompare } from '../../common/image-compare'; import { InfiniteScrollImmediatePage } from './infinite-scroll-immediate.po.spec'; describe('Infinite Scroll (Immediate) Tests', () => { let page: InfiniteScrollImmediatePage; beforeEach(async () => { page = new InfiniteScrollImmediatePage(); await page.getPage(); }); it('should have correct initial states', async () => { // 20 visible employees expect(await page.getNumberOfEmployees()).toEqual(20); // loadMore not visible expect(await page.confirmLoadMoreIsVisible()).toBeFalsy(); expect(await imageCompare('infinite-scroll-immediate-initial')).toEqual(0); }); });
UXAspects/UXAspects
e2e/tests/components/infinite-scroll/immediate/infinite-scroll-immediate.e2e-spec.ts
TypeScript
apache-2.0
712
/* * QueryManager.java: an interface to manage multiple ObjectExchangers * * Copyright (c) 2006, 2014, 2015, 2016 Nozomi `James' Ytow * All rights reserved. */ /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.nomencurator.io; import java.util.Collection; import org.nomencurator.model.NamedObject; /** * {@code QueryManager} manages queries to multiple * {@code DataExchanger}s * * @version 02 July 2016 * @author Nozomi `James' Ytow */ public interface QueryManager <T extends NamedObject<?>, X extends ObjectExchanger<T>// //P extends QueryParameter<T> //Q extends ObjectQuery<T> > { public MultiplexQuery<T> getQuery(QueryParameter<T> parameter); public boolean addSource(X source); public boolean removeSource(X source); public boolean setSynchronous(boolean synchronous); public boolean isSynchronous(); }
nomencurator/taxonaut
src/main/java/org/nomencurator/io/QueryManager.java
Java
apache-2.0
1,410
/* * ObjectLab, http://www.objectlab.co.uk/open is sponsoring the ObjectLab Kit. * * Based in London, we are world leaders in the design and development * of bespoke applications for the securities financing markets. * * <a href="http://www.objectlab.co.uk/open">Click here to learn more</a> * ___ _ _ _ _ _ * / _ \| |__ (_) ___ ___| |_| | __ _| |__ * | | | | '_ \| |/ _ \/ __| __| | / _` | '_ \ * | |_| | |_) | | __/ (__| |_| |__| (_| | |_) | * \___/|_.__// |\___|\___|\__|_____\__,_|_.__/ * |__/ * * www.ObjectLab.co.uk * * $Id$ * * Copyright 2006 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package net.objectlab.kit.datecalc.common; /** * enum of Tenor Codes held by a {@link Tenor} * * @author Benoit Xhenseval * */ public enum TenorCode { OVERNIGHT("ON", false), SPOT("SP", false), TOM_NEXT("TN", false), SPOT_NEXT("SN", false), DAY("D", true), WEEK("W", true), MONTH("M", true), YEAR("Y", true); private final String code; private final boolean acceptUnits; private TenorCode(final String code, final boolean acceptUnits) { this.code = code; this.acceptUnits = acceptUnits; } // ----------------------------------------------------------------------- // // ObjectLab, world leaders in the design and development of bespoke // applications for the securities financing markets. // www.ObjectLab.co.uk // // ----------------------------------------------------------------------- /** * @return the string representation of this <code>TenorCode</code> */ public String getCode() { return code; } /** * @param code * string representation of the <code>TenorCode</code> * @return a <code>TenorCode</code> represented by the string argument */ public static TenorCode fromCode(final String code) { for (final TenorCode ct : TenorCode.values()) { if (ct.getCode().equals(code)) { return ct; } } return null; } /** * @return true if the TenorCode can have units e.g. 1 Day, 3 Week but not 6 OVERNIGHT or 5 SPOT/SP */ public boolean acceptUnits() { return acceptUnits; } } /* * ObjectLab, http://www.objectlab.co.uk/open is sponsoring the ObjectLab Kit. * * Based in London, we are world leaders in the design and development * of bespoke applications for the securities financing markets. * * <a href="http://www.objectlab.co.uk/open">Click here to learn more about us</a> * ___ _ _ _ _ _ * / _ \| |__ (_) ___ ___| |_| | __ _| |__ * | | | | '_ \| |/ _ \/ __| __| | / _` | '_ \ * | |_| | |_) | | __/ (__| |_| |__| (_| | |_) | * \___/|_.__// |\___|\___|\__|_____\__,_|_.__/ * |__/ * * www.ObjectLab.co.uk */
Appendium/objectlabkit
datecalc-common/src/main/java/net/objectlab/kit/datecalc/common/TenorCode.java
Java
apache-2.0
3,699
# -*- coding: utf-8 -*- DOCUMENTATION = ''' module: mt_system.py author: - "Valentin Gurmeza" version_added: "2.4" short_description: Manage mikrotik system endpoints requirements: - mt_api description: - manage mikrotik system parameters options: hostname: description: - hotstname of mikrotik router required: True username: description: - username used to connect to mikrotik router required: True password: description: - password used for authentication to mikrotik router required: True parameter: description: - sub enpoint for mikrotik system required: True options: - ntp_client - clock - logging - routerboard - identity settings: description: - All Mikrotik compatible parameters for this particular endpoint. Any yes/no values must be enclosed in double quotes state: description: - absent or present ''' EXAMPLES = ''' - mt_system: hostname: "{{ inventory_hostname }}" username: "{{ mt_user }}" password: "{{ mt_pass }}" parameter: identity settings: name: test_ansible ''' from ansible.module_utils.mt_common import clean_params, MikrotikIdempotent from ansible.module_utils.basic import AnsibleModule def main(): module = AnsibleModule( argument_spec = dict( hostname = dict(required=True), username = dict(required=True), password = dict(required=True, no_log=True), settings = dict(required=False, type='dict'), parameter = dict( required = True, choices = ['ntp_client', 'clock', 'identity', 'logging', 'routerboard_settings'], type = 'str' ), state = dict( required = False, choices = ['present', 'absent'], type = 'str' ), ), supports_check_mode=True ) params = module.params if params['parameter'] == 'routerboard_settings': params['parameter'] = 'routerboard/settings' if params['parameter'] == 'ntp_client': params['parameter'] = 'ntp/client' clean_params(params['settings']) mt_obj = MikrotikIdempotent( hostname = params['hostname'], username = params['username'], password = params['password'], state = params['state'], desired_params = params['settings'], idempotent_param= None, api_path = '/system/' + params['parameter'], check_mode = module.check_mode ) mt_obj.sync_state() if mt_obj.failed: module.fail_json( msg = mt_obj.failed_msg ) elif mt_obj.changed: module.exit_json( failed=False, changed=True, msg=mt_obj.changed_msg, diff={ "prepared": { "old": mt_obj.old_params, "new": mt_obj.new_params, }}, ) else: module.exit_json( failed=False, changed=False, #msg='', msg=params['settings'], ) if __name__ == '__main__': main()
zahodi/ansible-mikrotik
library/mt_system.py
Python
apache-2.0
2,989
var a04869 = [ [ "font_sample_count", "a04869.html#af23335c4319e0c5f010380d9de8f5a6d", null ], [ "Label", "a04869.html#ad4701118c2e75f005c1f7e4c53abb35d", null ], [ "List", "a04869.html#a848ab6ee611dbc860f80a47ecef2faa7", null ], [ "SampleCount", "a04869.html#aab481329945e65c4aeee79b145e4de51", null ] ];
stweil/tesseract-ocr.github.io
4.00.00dev/a04869.js
JavaScript
apache-2.0
321
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deephacks.confit.test.validation; import org.deephacks.confit.model.Bean; import org.deephacks.confit.model.BeanId; import java.io.PrintStream; import java.util.Arrays; import java.util.Deque; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; public class BinaryTreeUtils { public static Set<Bean> getTree(Integer root, List<Integer> children) { Node rootNode = new Node(root); for (int i : children) { rootNode.insert(i); } Set<Bean> beans = new HashSet<>(); rootNode.traverse(beans, null); return beans; } public static Bean getBean(int i, Set<Bean> beans) { for (Bean bean : beans) { if (new Integer(bean.getId().getInstanceId()).intValue() == i) { return bean; } } return null; } public static class Node { int value = 0; public Node left; public Node right; Node(int value) { this.value = value; } public Bean traverse(Set<Bean> beans, Bean parent) { Bean current = Bean.create(BeanId.create(value + "", "binarytree")); current.setProperty("value", value + ""); if (parent != null) { current.setReference("parent", parent.getId()); } if (left != null) { Bean leftBean = left.traverse(beans, current); current.setReference("left", leftBean.getId()); } if (right != null) { Bean rightBean = right.traverse(beans, current); current.setReference("right", rightBean.getId()); } beans.add(current); return current; } public void insert(int insert) { if (value == insert) { return; } if (value > insert) { if (left != null) { left.insert(insert); } else { left = new Node(insert); } } else { if (right != null) { right.insert(insert); } else { right = new Node(insert); } } } public Node delete(int delete) { if (value == delete) { if (left != null && right != null) { Node x = right; while (x.left != null) { x = x.left; } value = x.value; x.value = delete; right = right.delete(delete); return this; } else if (left != null) { return left; } else if (right != null) { return right; } else { return null; } } else if (value > delete) { left = (left == null) ? null : left.delete(delete); return this; } else { right = (right == null) ? null : right.delete(delete); return this; } } public String toString() { return value + ""; } } public static void printPretty(BinaryTree root) { TreePrinter.printPretty(root, 1, 0, new TreePrinter.PaddedWriter(System.out)); } private static class TreePrinter { // Search for the deepest part of the tree private static <T> int maxHeight(BinaryTree t) { if (t == null) return 0; int leftHeight = maxHeight(t.getLeft()); int rightHeight = maxHeight(t.getRight()); return (leftHeight > rightHeight) ? leftHeight + 1 : rightHeight + 1; } // Pretty formatting of a binary tree to the output stream public static <T> void printPretty(BinaryTree tree, int level, int indentSpace, PaddedWriter out) { int h = maxHeight(tree); int BinaryTreesInThisLevel = 1; int branchLen = 2 * ((int) Math.pow(2.0, h) - 1) - (3 - level) * (int) Math.pow(2.0, h - 1); int BinaryTreeSpaceLen = 2 + (level + 1) * (int) Math.pow(2.0, h); int startLen = branchLen + (3 - level) + indentSpace; Deque<BinaryTree> BinaryTreesQueue = new LinkedList<BinaryTree>(); BinaryTreesQueue.offerLast(tree); for (int r = 1; r < h; r++) { printBranches(branchLen, BinaryTreeSpaceLen, startLen, BinaryTreesInThisLevel, BinaryTreesQueue, out); branchLen = branchLen / 2 - 1; BinaryTreeSpaceLen = BinaryTreeSpaceLen / 2 + 1; startLen = branchLen + (3 - level) + indentSpace; printBinaryTrees(branchLen, BinaryTreeSpaceLen, startLen, BinaryTreesInThisLevel, BinaryTreesQueue, out); for (int i = 0; i < BinaryTreesInThisLevel; i++) { BinaryTree currBinaryTree = BinaryTreesQueue.pollFirst(); if (currBinaryTree != null) { BinaryTreesQueue.offerLast(currBinaryTree.getLeft()); BinaryTreesQueue.offerLast(currBinaryTree.getRight()); } else { BinaryTreesQueue.offerLast(null); BinaryTreesQueue.offerLast(null); } } BinaryTreesInThisLevel *= 2; } printBranches(branchLen, BinaryTreeSpaceLen, startLen, BinaryTreesInThisLevel, BinaryTreesQueue, out); printLeaves(indentSpace, level, BinaryTreesInThisLevel, BinaryTreesQueue, out); } private static <T> void printBranches(int branchLen, int BinaryTreeSpaceLen, int startLen, int BinaryTreesInThisLevel, Deque<BinaryTree> BinaryTreesQueue, PaddedWriter out) { Iterator<BinaryTree> iterator = BinaryTreesQueue.iterator(); for (int i = 0; i < BinaryTreesInThisLevel / 2; i++) { if (i == 0) { out.setw(startLen - 1); } else { out.setw(BinaryTreeSpaceLen - 2); } out.write(); BinaryTree next = iterator.next(); if (next != null) { out.write("/"); } else { out.write(" "); } out.setw(2 * branchLen + 2); out.write(); next = iterator.next(); if (next != null) { out.write("\\"); } else { out.write(" "); } } out.endl(); } // Print the branches and BinaryTree (eg, ___10___ ) private static <T> void printBinaryTrees(int branchLen, int BinaryTreeSpaceLen, int startLen, int BinaryTreesInThisLevel, Deque<BinaryTree> BinaryTreesQueue, PaddedWriter out) { Iterator<BinaryTree> iterator = BinaryTreesQueue.iterator(); BinaryTree currentBinaryTree; for (int i = 0; i < BinaryTreesInThisLevel; i++) { currentBinaryTree = iterator.next(); if (i == 0) { out.setw(startLen); } else { out.setw(BinaryTreeSpaceLen); } out.write(); if (currentBinaryTree != null && currentBinaryTree.getLeft() != null) { out.setfill('_'); } else { out.setfill(' '); } out.setw(branchLen + 2); if (currentBinaryTree != null) { out.write(currentBinaryTree.toString()); } else { out.write(); } if (currentBinaryTree != null && currentBinaryTree.getRight() != null) { out.setfill('_'); } else { out.setfill(' '); } out.setw(branchLen); out.write(); out.setfill(' '); } out.endl(); } // Print the leaves only (just for the bottom row) private static <T> void printLeaves(int indentSpace, int level, int BinaryTreesInThisLevel, Deque<BinaryTree> BinaryTreesQueue, PaddedWriter out) { Iterator<BinaryTree> iterator = BinaryTreesQueue.iterator(); BinaryTree currentBinaryTree; for (int i = 0; i < BinaryTreesInThisLevel; i++) { currentBinaryTree = iterator.next(); if (i == 0) { out.setw(indentSpace + 2); } else { out.setw(2 * level + 2); } if (currentBinaryTree != null) { out.write(currentBinaryTree.toString()); } else { out.write(); } } out.endl(); } public static class PaddedWriter { private int width = 0; private char fillChar = ' '; private final PrintStream writer; public PaddedWriter(PrintStream writer) { this.writer = writer; } void setw(int i) { width = i; } void setfill(char c) { fillChar = c; } void write(String str) { write(str.toCharArray()); } void write(char[] buf) { if (buf.length < width) { char[] pad = new char[width - buf.length]; Arrays.fill(pad, fillChar); writer.print(pad); } writer.print(buf); setw(0); } void write() { char[] pad = new char[width]; Arrays.fill(pad, fillChar); writer.print(pad); setw(0); } void endl() { writer.println(); setw(0); } } } }
deephacks/confit
tck/src/main/java/org/deephacks/confit/test/validation/BinaryTreeUtils.java
Java
apache-2.0
11,067
// bslmf_nthparameter.t.cpp -*-C++-*- #include <bslmf_nthparameter.h> #include <bslmf_integralconstant.h> #include <bslmf_issame.h> #include <bsls_bsltestutil.h> #include <stdio.h> #include <stdlib.h> #include <bsls_compilerfeatures.h> #if BSLS_COMPILERFEATURES_SIMULATE_CPP11_FEATURES // Include version that can be compiled with C++03 // Generated on Thu Oct 21 10:11:37 2021 // Command line: sim_cpp11_features.pl bslmf_nthparameter.t.cpp # define COMPILING_BSLMF_NTHPARAMETER_T_CPP # include <bslmf_nthparameter_cpp03.t.cpp> # undef COMPILING_BSLMF_NTHPARAMETER_T_CPP #else using namespace BloombergLP; //============================================================================= // TEST PLAN //----------------------------------------------------------------------------- // // //----------------------------------------------------------------------------- // ============================================================================ // STANDARD BSL ASSERT TEST FUNCTION // ---------------------------------------------------------------------------- namespace { int testStatus = 0; void aSsErT(bool condition, const char *message, int line) { if (condition) { printf("Error " __FILE__ "(%d): %s (failed)\n", line, message); if (0 <= testStatus && testStatus <= 100) { ++testStatus; } } } } // close unnamed namespace // ============================================================================ // STANDARD BSL TEST DRIVER MACRO ABBREVIATIONS // ---------------------------------------------------------------------------- #define ASSERT BSLS_BSLTESTUTIL_ASSERT #define ASSERTV BSLS_BSLTESTUTIL_ASSERTV #define LOOP_ASSERT BSLS_BSLTESTUTIL_LOOP_ASSERT #define LOOP0_ASSERT BSLS_BSLTESTUTIL_LOOP0_ASSERT #define LOOP1_ASSERT BSLS_BSLTESTUTIL_LOOP1_ASSERT #define LOOP2_ASSERT BSLS_BSLTESTUTIL_LOOP2_ASSERT #define LOOP3_ASSERT BSLS_BSLTESTUTIL_LOOP3_ASSERT #define LOOP4_ASSERT BSLS_BSLTESTUTIL_LOOP4_ASSERT #define LOOP5_ASSERT BSLS_BSLTESTUTIL_LOOP5_ASSERT #define LOOP6_ASSERT BSLS_BSLTESTUTIL_LOOP6_ASSERT #define Q BSLS_BSLTESTUTIL_Q // Quote identifier literally. #define P BSLS_BSLTESTUTIL_P // Print identifier and value. #define P_ BSLS_BSLTESTUTIL_P_ // P(X) without '\n'. #define T_ BSLS_BSLTESTUTIL_T_ // Print a tab (w/o newline). #define L_ BSLS_BSLTESTUTIL_L_ // current Line number //============================================================================= // GLOBAL HELPER FUNCTIONS FOR TESTING //----------------------------------------------------------------------------- template <int V> inline int integerConstTypeToInt(bsl::integral_constant<int, V>) { return V; } //============================================================================= // GLOBAL TYPEDEFS/CONSTANTS FOR TESTING //----------------------------------------------------------------------------- //============================================================================= // CLASSES FOR TESTING USAGE EXAMPLES //----------------------------------------------------------------------------- #if !BSLS_COMPILERFEATURES_SIMULATE_CPP11_FEATURES // We wish to implement a 'tuple'-like class that holds a heterogenous // collection of elements, each of which might have a different type. The // metafunction, 'my_tuple_element<I, my_tuple<ELEMS...>>::Type' would be type // of the 'I'th element in the tuple (where 'I' is zero-based). // // First, we define our 'my_tuple' class template. The body of the class is // unimportant for this usage examples: //.. template <class... ELEMS> class my_tuple { // ... }; //.. // Then, we use 'bslmf::NthParameter' to implement 'my_tuple_element': //.. // #include <bslmf_nthparameter.h> template <std::size_t I, class TUPLE> struct my_tuple_element; // Not defined template <std::size_t I, class... ELEMS> struct my_tuple_element<I, my_tuple<ELEMS...> > { typedef typename bslmf::NthParameter<I, ELEMS...>::Type Type; }; //.. // Finally, we test this implementation using 'bsl::is_same': //.. // #include <bslmf_issame.h> int usageExample1() { typedef my_tuple<int, short, char*> ttype; ASSERT((bsl::is_same<int, my_tuple_element<0, ttype>::Type>::value)); ASSERT((bsl::is_same<short, my_tuple_element<1, ttype>::Type>::value)); ASSERT((bsl::is_same<char *, my_tuple_element<2, ttype>::Type>::value)); ASSERT(! (bsl::is_same<short, my_tuple_element<0, ttype>::Type>::value)); return 0; } #endif //============================================================================= // MAIN PROGRAM //----------------------------------------------------------------------------- int main(int argc, char *argv[]) { int test = argc > 1 ? atoi(argv[1]) : 0; bool verbose = argc > 2; bool veryVerbose = argc > 3; bool veryVeryVerbose = argc > 4; bool veryVeryVeryVerbose = argc > 5; (void) veryVerbose; // eliminate unused variable warning (void) veryVeryVerbose; // eliminate unused variable warning (void) veryVeryVeryVerbose; // eliminate unused variable warning setbuf(stdout, NULL); // Use unbuffered output printf("TEST " __FILE__ " CASE %d\n", test); switch (test) { case 0: // Zero is always the leading case. case 3: { // -------------------------------------------------------------------- // USAGE EXAMPLE // // Concerns: Usage example compiles and runs successfully // // Plan: Copy usage example from component header literally into test // driver. // // Testing: Usage example // -------------------------------------------------------------------- if (verbose) printf("\nUSAGE EXAMPLE" "\n=============\n"); usageExample1(); } break; case 2: { // -------------------------------------------------------------------- // FULL TEST // // Concerns // o 'bslmf::NthParameter' can handle from 1 to 10 type arguments. // o 'bslmf::NthParameter' produces the correct 'Type' for 'N' in // range 0 to 9. // // Plan: // o Use 'integral_constant' to create 10 different types 'T0' to 'T9' // as aliases for 'integral_constant<int, 0>' to // 'integral_constant<int, 9>', respectively. // o Instantiate 'bslmf::NthParameter<0, T0>' and verify that the // resulting 'Type' is 'T0'. // o Repeat the test with an ever longer list of type arguments and // with every valid value of 'N' until we've tested // bslmf::NthParameter<9, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9>'. // // Testing: // bslmf::NthParameter<N, FIRST_PARAM, ...>::Type // -------------------------------------------------------------------- if (verbose) printf("\nFULL TEST" "\n=========\n"); typedef bsl::integral_constant<int, 0> T0; typedef bsl::integral_constant<int, 1> T1; typedef bsl::integral_constant<int, 2> T2; typedef bsl::integral_constant<int, 3> T3; typedef bsl::integral_constant<int, 4> T4; typedef bsl::integral_constant<int, 5> T5; typedef bsl::integral_constant<int, 6> T6; typedef bsl::integral_constant<int, 7> T7; typedef bsl::integral_constant<int, 8> T8; typedef bsl::integral_constant<int, 9> T9; #define DO_TEST(N, ...) { \ typedef bslmf::NthParameter<N, __VA_ARGS__>::Type Result; \ ASSERT(N == integerConstTypeToInt(Result())); \ } DO_TEST(0, T0); DO_TEST(0, T0, T1); DO_TEST(1, T0, T1); DO_TEST(0, T0, T1, T2); DO_TEST(1, T0, T1, T2); DO_TEST(2, T0, T1, T2); DO_TEST(0, T0, T1, T2, T3); DO_TEST(1, T0, T1, T2, T3); DO_TEST(2, T0, T1, T2, T3); DO_TEST(3, T0, T1, T2, T3); DO_TEST(0, T0, T1, T2, T3, T4); DO_TEST(1, T0, T1, T2, T3, T4); DO_TEST(2, T0, T1, T2, T3, T4); DO_TEST(3, T0, T1, T2, T3, T4); DO_TEST(4, T0, T1, T2, T3, T4); DO_TEST(0, T0, T1, T2, T3, T4, T5); DO_TEST(1, T0, T1, T2, T3, T4, T5); DO_TEST(2, T0, T1, T2, T3, T4, T5); DO_TEST(3, T0, T1, T2, T3, T4, T5); DO_TEST(4, T0, T1, T2, T3, T4, T5); DO_TEST(5, T0, T1, T2, T3, T4, T5); DO_TEST(0, T0, T1, T2, T3, T4, T5, T6); DO_TEST(1, T0, T1, T2, T3, T4, T5, T6); DO_TEST(2, T0, T1, T2, T3, T4, T5, T6); DO_TEST(3, T0, T1, T2, T3, T4, T5, T6); DO_TEST(4, T0, T1, T2, T3, T4, T5, T6); DO_TEST(5, T0, T1, T2, T3, T4, T5, T6); DO_TEST(6, T0, T1, T2, T3, T4, T5, T6); DO_TEST(0, T0, T1, T2, T3, T4, T5, T6, T7); DO_TEST(1, T0, T1, T2, T3, T4, T5, T6, T7); DO_TEST(2, T0, T1, T2, T3, T4, T5, T6, T7); DO_TEST(3, T0, T1, T2, T3, T4, T5, T6, T7); DO_TEST(4, T0, T1, T2, T3, T4, T5, T6, T7); DO_TEST(5, T0, T1, T2, T3, T4, T5, T6, T7); DO_TEST(6, T0, T1, T2, T3, T4, T5, T6, T7); DO_TEST(7, T0, T1, T2, T3, T4, T5, T6, T7); DO_TEST(0, T0, T1, T2, T3, T4, T5, T6, T7, T8); DO_TEST(1, T0, T1, T2, T3, T4, T5, T6, T7, T8); DO_TEST(2, T0, T1, T2, T3, T4, T5, T6, T7, T8); DO_TEST(3, T0, T1, T2, T3, T4, T5, T6, T7, T8); DO_TEST(4, T0, T1, T2, T3, T4, T5, T6, T7, T8); DO_TEST(5, T0, T1, T2, T3, T4, T5, T6, T7, T8); DO_TEST(6, T0, T1, T2, T3, T4, T5, T6, T7, T8); DO_TEST(7, T0, T1, T2, T3, T4, T5, T6, T7, T8); DO_TEST(8, T0, T1, T2, T3, T4, T5, T6, T7, T8); DO_TEST(0, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9); DO_TEST(1, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9); DO_TEST(2, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9); DO_TEST(3, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9); DO_TEST(4, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9); DO_TEST(5, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9); DO_TEST(6, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9); DO_TEST(7, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9); DO_TEST(8, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9); DO_TEST(9, T0, T1, T2, T3, T4, T5, T6, T7, T8, T9); #undef DO_TEST } break; case 1: { // -------------------------------------------------------------------- // BREATHING TEST // // Concerns: Exercise basic functionality of component // // Plan: Invoke 'NthParameter' metafunction with a few different // combinations of parameters and confirm that it yields the // expected 'Type' data member. // -------------------------------------------------------------------- if (verbose) printf("\nBREATHING TEST" "\n==============\n"); #define TEST_NTH_PARAMETER(N, T, ...) \ ASSERT((bsl::is_same<T, \ bslmf::NthParameter<N, __VA_ARGS__>::Type>::value)) TEST_NTH_PARAMETER(0, int , int, char*, void, const short&); TEST_NTH_PARAMETER(1, char* , int, char*, void, const short&); TEST_NTH_PARAMETER(2, void , int, char*, void, const short&); TEST_NTH_PARAMETER(3, const short&, int, char*, void, const short&); TEST_NTH_PARAMETER(0, double , double); } break; default: { fprintf(stderr, "WARNING: CASE `%d' NOT FOUND.\n", test); testStatus = -1; } } if (testStatus > 0) { fprintf(stderr, "Error, non-zero test status = %d.\n", testStatus); } return testStatus; } #endif // End C++11 code // ---------------------------------------------------------------------------- // Copyright 2013 Bloomberg Finance L.P. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ----------------------------- END-OF-FILE ----------------------------------
bloomberg/bde
groups/bsl/bslmf/bslmf_nthparameter.t.cpp
C++
apache-2.0
13,000
#!/usr/bin/env python # -*- coding: utf-8 -*- """Simple utility to merge multiple Kerberos keytabs into one. This also cleans out duplicate and old keytab entries. """ import functools import struct # from http://pig.made-it.com/kerberos-etypes.html ETYPES = { 1: 'des-cbc-crc', 2: 'des-cbc-md4', 3: 'des-cbc-md5', 4: None, 5: 'des3-cbc-md5', 6: None, 7: 'des3-cbc-sha1', 9: 'dsaWithSHA1-CmsOID', 10: 'md5WithRSAEncryption-CmsOID', 11: 'sha1WithRSAEncryption-CmsOID', 12: 'rs2CBC-EnvOID', 13: 'rsaEncryption-EnvOID', 14: 'rsaES-OAEP-ENV-OID', 15: 'des-ede3-cbc-Env-OID', 16: 'des3-cbc-sha1-kd', 17: 'aes128-cts-hmac-sha1-96', 18: 'aes256-cts-hmac-sha1-96', 23: 'rc4-hmac', 24: 'rc4-hmac-exp', 65: 'subkey-experimental', } NTYPES = { 1: 'KRB5_NT_PRINCIPAL', 2: 'KRB5_NT_SRV_INST', 3: 'KRB5_NT_SRV_HST', 4: 'KRB5_NT_SRV_XHST', 5: 'KRB5_NT_UID', 6: 'KRB5_NT_X500_PRINCIPAL', 7: 'KRB5_NT_SMTP_NAME', 10: 'KRB5_NT_ENTERPRISE_PRINCIPAL', 11: 'KRB5_NT_WELLKNOWN', 4294967166: 'KRB5_NT_ENT_PRINCIPAL_AND_ID', 4294967167: 'KRB5_NT_MS_PRINCIPAL_AND_ID', 4294967168: 'KRB5_NT_MS_PRINCIPAL', } class KeytabEntry(object): """An entry in the Keytab.""" def __init__(self, data=None): self._data = data self._size = len(data) self._realm = None self._components = [] self._name_type = None self._timestamp = None self._vno8 = None self._key = None self._vno = None self._i = 0 if data: self._parse() def __base_check(self, other): if (self.name != other.name or self.realm != other.realm or self.keyblock['type'] != other.keyblock['type']): return False return True def __eq__(self, other): if not isinstance(other, KeytabEntry): return NotImplemented if self._data: return self._data.__eq__(other) return False def __hash__(self): return self._data.__hash__() def __str__(self): return '%s@%s/%s VNO:%d' % (self.name, self._realm, self.key_type, self.vno) def __repr__(self): return self.__str__() # The use of properties is mainly to reinforce that this is read-only @property def vno(self): return self._vno or self._vno8 @property def realm(self): return self._realm @property def timestamp(self): return self._timestamp @property def name(self): return '/'.join(self._components) @property def name_type(self): return NTYPES.get(self._name_type, self._name_type) @property def key(self): return self._key['key'] @property def key_type(self): return ETYPES.get(self._key['type'], self._key['type']) @property def ts(self): return self._timestamp def loads(self, data): self._data = data self._size = len(data) self._parse() def _encode_size(self): return struct.pack('!i', self._size) def dumps(self): value = struct.pack('!i', self._size) + self._data return value def _unpack(self, fmt, size): value = struct.unpack(fmt, self._data[self._i:self._i + size]) self._i += size return value[0] def _uint8(self): n = self._unpack('!B', 1) return n def _uint16(self): n = self._unpack('!H', 2) return n def _int32(self): n = self._unpack('!i', 4) return n def _uint32(self): n = self._unpack('!I', 4) return n def _counted_octet_string(self): size = self._uint16() counted_string = self._unpack('!%ds' % size, size) return counted_string def _keyblock(self): key = { 'type': self._uint16(), 'key': self._counted_octet_string() } return key def _parse(self): self._i = 0 n_components = self._uint16() self._realm = self._counted_octet_string() for i in range(n_components): self._components.append(self._counted_octet_string()) self._name_type = self._uint32() self._timestamp = self._uint32() self._vno8 = self._uint8() self._key = self._keyblock() # special case. may not be present if self._size - self._i >= 4: self._vno = self._uint32() class Keytab(object): def __init__(self, f=None): self.entries = {} self.format_version = None if f: self.load(f) def load(self, f): entries = set() format_version = struct.unpack('!H', f.read(2))[0] if format_version != 0x502: raise Exception("Unsupport file format %x" % format_version) self.format_version = format_version size_packed = f.read(4) while size_packed != '': size = struct.unpack('!i', size_packed)[0] if size > 0: entries.add(KeytabEntry(f.read(size))) else: f.read(-size) size_packed = f.read(4) self.add_entries(entries) def add_entry(self, entry): r = self.entries.setdefault(entry.realm, {}) n = r.setdefault(entry.name, {}) if entry.key_type in n: old_entry = n[entry.key_type] if entry.vno > old_entry.vno: self.entries[entry.realm][entry.name][entry.key_type] = entry else: n[entry.key_type] = entry def add_entries(self, entries): for e in entries: self.add_entry(e) def save(self, f): f.write(struct.pack('!H', 0x502)) for e in self.entry_list(): f.write(e.dumps()) def entry_list(self): entries = [] for realm in self.entries: for name in self.entries[realm]: for keytype in self.entries[realm][name]: entries.append(self.entries[realm][name][keytype]) return entries def main(main_args): merged_keytab = Keytab() for f in main_args.keytabs: merged_keytab.add_entries(Keytab(f).entry_list()) f.close() outfile = open(main_args.outfile, 'w') merged_keytab.save(outfile) if __name__ == '__main__': import argparse parser = argparse.ArgumentParser(description='Merge keytabs') parser.add_argument('keytabs', metavar='ktfile', type=file, nargs='+', help='a kerberos keytab to read in') parser.add_argument('-o', '--outfile', dest='outfile', type=str, help='output file') args = parser.parse_args() main(args)
blaedd/miscutils
ktmerge/ktmerge.py
Python
apache-2.0
6,839
/* Copyright IBM Corp 2016 All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package main import ( "encoding/json" "errors" "fmt" "math/rand" "strconv" "strings" "time" "github.com/hyperledger/fabric/core/chaincode/shim" ) // SimpleChaincode example simple Chaincode implementation type SimpleChaincode struct { } var marbleIndexStr = "_marbleindex" //name for the key/value that will store a list of all known marbles var openTradesStr = "_opentrades" //name for the key/value that will store all open trades type Marble struct { Name string `json:"name"` //the fieldtags are needed to keep case from bouncing around Color string `json:"color"` Size int `json:"size"` User string `json:"user"` } type Description struct { Color string `json:"color"` Size int `json:"size"` } type AnOpenTrade struct { User string `json:"user"` //user who created the open trade order Timestamp int64 `json:"timestamp"` //utc timestamp of creation Want Description `json:"want"` //description of desired marble Willing []Description `json:"willing"` //array of marbles willing to trade away } type AllTrades struct { OpenTrades []AnOpenTrade `json:"open_trades"` } var addEmailStr = "_addEmail" var addLogStr = "_addLog" var addUserStr = "_addUser" var editUserStr = "_editUser" var getUserStr = "_getUser" var getAttachmentStr = "_getAttachment" var getLogStr = "_getLog" type Obj struct { ObjId string `json:obj_id` ObjType string `json:obj_type` Content string `json:content` UserId string `json:user_id` CrtDt string `json:crt_dt` } type AllObj struct { Objs []Obj `json:objs` } type User struct { UserId string `json:user_id` Password string `json:password` FirstName string `json:first_name` LastName string `json:last_name` ObjId []string `json:obj_id` } type AllUsers struct { Users []User `json:users` } type Attachment struct { UserId string `json:user_id` ObjId []string `json:obj_id` } type HashAttachments struct { HaId string `json:ha_id` Attachments []Attachment `json:attachments` } type AllHashAttachments struct { HashAttachments []HashAttachments `json:hash_attachments` } type Log struct { Action string `json:action` UserId string `json:user_id` ObjId string `json:obj_id` HaId string `json:ha_id` CrtDt string `json:crt_dt` } type AllLogs struct { Logs []Log `json:logs` } var objStr = "Obj" var objIndexStr = "AllObj" var haStr = "HashAtts" var allHaStr = "AllHashAtts" // ============================================================================================================================ // Main // ============================================================================================================================ func main() { err := shim.Start(new(SimpleChaincode)) if err != nil { fmt.Printf("Error starting Simple chaincode: %s", err) } } // ============================================================================================================================ // Init - reset all the things // ============================================================================================================================ func (t *SimpleChaincode) Init(stub shim.ChaincodeStubInterface, function string, args []string) ([]byte, error) { var Aval int var err error if len(args) != 1 { return nil, errors.New("Incorrect number of arguments. Expecting 1") } // Initialize the chaincode Aval, err = strconv.Atoi(args[0]) if err != nil { return nil, errors.New("Expecting integer value for asset holding") } // Write the state to the ledger err = stub.PutState("abc", []byte(strconv.Itoa(Aval))) //making a test var "abc", I find it handy to read/write to it right away to test the network if err != nil { return nil, err } var empty []string jsonAsBytes, _ := json.Marshal(empty) //marshal an emtpy array of strings to clear the index err = stub.PutState(marbleIndexStr, jsonAsBytes) if err != nil { return nil, err } var trades AllTrades jsonAsBytes, _ = json.Marshal(trades) //clear the open trade struct err = stub.PutState(openTradesStr, jsonAsBytes) if err != nil { return nil, err } var allObj AllObj jsonAsBytes, _ = json.Marshal(allObj) //clear the object struct err = stub.PutState(objIndexStr, jsonAsBytes) if err != nil { return nil, err } var allHa AllHashAttachments jsonAsBytes, _ = json.Marshal(allHa) //clear the hash attachment struct err = stub.PutState(allHaStr, jsonAsBytes) if err != nil { return nil, err } return nil, nil } // ============================================================================================================================ // Run - Our entry point for Invocations - [LEGACY] obc-peer 4/25/2016 // ============================================================================================================================ func (t *SimpleChaincode) Run(stub shim.ChaincodeStubInterface, function string, args []string) ([]byte, error) { fmt.Println("run is running " + function) return t.Invoke(stub, function, args) } // ============================================================================================================================ // Invoke - Our entry point for Invocations // ============================================================================================================================ func (t *SimpleChaincode) Invoke(stub shim.ChaincodeStubInterface, function string, args []string) ([]byte, error) { fmt.Println("invoke is running " + function) // Handle different functions if function == "init" { //initialize the chaincode state, used as reset return t.Init(stub, "init", args) } else if function == "delete" { //deletes an entity from its state res, err := t.Delete(stub, args) cleanTrades(stub) //lets make sure all open trades are still valid return res, err } else if function == "write" { //writes a value to the chaincode state return t.Write(stub, args) } else if function == "writeTest" { //writes a value to the chaincode state return t.WriteTest(stub, args) } else if function == "init_marble" { //create a new marble return t.init_marble(stub, args) } else if function == "set_user" { //change owner of a marble res, err := t.set_user(stub, args) cleanTrades(stub) //lets make sure all open trades are still valid return res, err } else if function == "open_trade" { //create a new trade order return t.open_trade(stub, args) } else if function == "perform_trade" { //forfill an open trade order res, err := t.perform_trade(stub, args) cleanTrades(stub) //lets clean just in case return res, err } else if function == "remove_trade" { //cancel an open trade order return t.remove_trade(stub, args) } else if function == "add_email" { //return t.addObject(stub, args[0], args[1], args[2], args[3]) return t.addEmail(stub, args[0], args[1]) } else if function == "add_attachment" { //return t.addObject(stub, args[0], args[1], args[2], args[3]) return t.addAttachment(stub, args[0], args[1], args[2]); } else if function == "verify_object" { return t.verifyObject(stub, args[0]) } else if function == "getEmailsListFromAttachment" { return t.getEmailsListFromAttachment(stub, args[0]) } fmt.Println("invoke did not find func: " + function) //error return nil, errors.New("Received unknown function invocation") } // ============================================================================================================================ // Query - Our entry point for Queries // ============================================================================================================================ func (t *SimpleChaincode) Query(stub shim.ChaincodeStubInterface, function string, args []string) ([]byte, error) { fmt.Println("query is running " + function) // Handle different functions if function == "read" { //read a variable return t.read(stub, args) }else if function == "getEmailsListFromAttachment" { return t.getEmailsListFromAttachment(stub, args[0]) }else if function == "getEmailsListOfUser" { return t.getEmailsListOfUser(stub, args[0]) } fmt.Println("query did not find func: " + function) //error return nil, errors.New("Received unknown function query") } // ============================================================================================================================ // Read - read a variable from chaincode state // ============================================================================================================================ func (t *SimpleChaincode) read(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) { var name, jsonResp string var err error if len(args) != 1 { return nil, errors.New("Incorrect number of arguments. Expecting name of the var to query") } name = args[0] valAsbytes, err := stub.GetState(name) //get the var from chaincode state if err != nil { jsonResp = "{\"Error\":\"Failed to get state for " + name + "\"}" return nil, errors.New(jsonResp) } return valAsbytes, nil //send it onward } // ============================================================================================================================ // Delete - remove a key/value pair from state // ============================================================================================================================ func (t *SimpleChaincode) Delete(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) { if len(args) != 1 { return nil, errors.New("Incorrect number of arguments. Expecting 1") } name := args[0] err := stub.DelState(name) //remove the key from chaincode state if err != nil { return nil, errors.New("Failed to delete state") } //get the marble index marblesAsBytes, err := stub.GetState(marbleIndexStr) if err != nil { return nil, errors.New("Failed to get marble index") } var marbleIndex []string json.Unmarshal(marblesAsBytes, &marbleIndex) //un stringify it aka JSON.parse() //remove marble from index for i, val := range marbleIndex { fmt.Println(strconv.Itoa(i) + " - looking at " + val + " for " + name) if val == name { //find the correct marble fmt.Println("found marble") marbleIndex = append(marbleIndex[:i], marbleIndex[i+1:]...) //remove it for x := range marbleIndex { //debug prints... fmt.Println(string(x) + " - " + marbleIndex[x]) } break } } jsonAsBytes, _ := json.Marshal(marbleIndex) //save new index err = stub.PutState(marbleIndexStr, jsonAsBytes) return nil, nil } // ============================================================================================================================ // WriteTest - write variable into chaincode state // ============================================================================================================================ func (t *SimpleChaincode) WriteTest(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) { var name, value string // Entities var err error fmt.Println("running write()") if len(args) != 2 { return nil, errors.New("Incorrect number of arguments. Expecting 2. name of the variable and value to set") } name = args[0] //rename for funsies value = args[1] err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state if err != nil { return nil, err } return nil, nil } // ============================================================================================================================ // Write - write variable into chaincode state // ============================================================================================================================ func (t *SimpleChaincode) Write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) { var name, value string // Entities var err error fmt.Println("running write()") if len(args) != 2 { return nil, errors.New("Incorrect number of arguments. Expecting 2. name of the variable and value to set") } name = args[0] //rename for funsies value = args[1] err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state if err != nil { return nil, err } return nil, nil } // ============================================================================================================================ // Init Marble - create a new marble, store into chaincode state // ============================================================================================================================ func (t *SimpleChaincode) init_marble(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) { var err error // 0 1 2 3 // "asdf", "blue", "35", "bob" if len(args) != 4 { return nil, errors.New("Incorrect number of arguments. Expecting 4") } //input sanitation fmt.Println("- start init marble") if len(args[0]) <= 0 { return nil, errors.New("1st argument must be a non-empty string") } if len(args[1]) <= 0 { return nil, errors.New("2nd argument must be a non-empty string") } if len(args[2]) <= 0 { return nil, errors.New("3rd argument must be a non-empty string") } if len(args[3]) <= 0 { return nil, errors.New("4th argument must be a non-empty string") } name := args[0] color := strings.ToLower(args[1]) user := strings.ToLower(args[3]) size, err := strconv.Atoi(args[2]) if err != nil { return nil, errors.New("3rd argument must be a numeric string") } //check if marble already exists marbleAsBytes, err := stub.GetState(name) if err != nil { return nil, errors.New("Failed to get marble name") } res := Marble{} json.Unmarshal(marbleAsBytes, &res) if res.Name == name { fmt.Println("This marble arleady exists: " + name) fmt.Println(res) return nil, errors.New("This marble arleady exists") //all stop a marble by this name exists } //build the marble json string manually str := `{"name": "` + name + `", "color": "` + color + `", "size": ` + strconv.Itoa(size) + `, "user": "` + user + `"}` err = stub.PutState(name, []byte(str)) //store marble with id as key if err != nil { return nil, err } //get the marble index marblesAsBytes, err := stub.GetState(marbleIndexStr) if err != nil { return nil, errors.New("Failed to get marble index") } var marbleIndex []string json.Unmarshal(marblesAsBytes, &marbleIndex) //un stringify it aka JSON.parse() //append marbleIndex = append(marbleIndex, name) //add marble name to index list fmt.Println("! marble index: ", marbleIndex) jsonAsBytes, _ := json.Marshal(marbleIndex) err = stub.PutState(marbleIndexStr, jsonAsBytes) //store name of marble fmt.Println("- end init marble") return nil, nil } // ============================================================================================================================ // Set User Permission on Marble // ============================================================================================================================ func (t *SimpleChaincode) set_user(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) { var err error // 0 1 // "name", "bob" if len(args) < 2 { return nil, errors.New("Incorrect number of arguments. Expecting 2") } fmt.Println("- start set user") fmt.Println(args[0] + " - " + args[1]) marbleAsBytes, err := stub.GetState(args[0]) if err != nil { return nil, errors.New("Failed to get thing") } res := Marble{} json.Unmarshal(marbleAsBytes, &res) //un stringify it aka JSON.parse() res.User = args[1] //change the user jsonAsBytes, _ := json.Marshal(res) err = stub.PutState(args[0], jsonAsBytes) //rewrite the marble with id as key if err != nil { return nil, err } fmt.Println("- end set user") return nil, nil } // ============================================================================================================================ // Open Trade - create an open trade for a marble you want with marbles you have // ============================================================================================================================ func (t *SimpleChaincode) open_trade(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) { var err error var will_size int var trade_away Description // 0 1 2 3 4 5 6 //["bob", "blue", "16", "red", "16"] *"blue", "35* if len(args) < 5 { return nil, errors.New("Incorrect number of arguments. Expecting like 5?") } if len(args)%2 == 0 { return nil, errors.New("Incorrect number of arguments. Expecting an odd number") } size1, err := strconv.Atoi(args[2]) if err != nil { return nil, errors.New("3rd argument must be a numeric string") } open := AnOpenTrade{} open.User = args[0] open.Timestamp = makeTimestamp() //use timestamp as an ID open.Want.Color = args[1] open.Want.Size = size1 fmt.Println("- start open trade") jsonAsBytes, _ := json.Marshal(open) err = stub.PutState("_debug1", jsonAsBytes) for i := 3; i < len(args); i++ { //create and append each willing trade will_size, err = strconv.Atoi(args[i+1]) if err != nil { msg := "is not a numeric string " + args[i+1] fmt.Println(msg) return nil, errors.New(msg) } trade_away = Description{} trade_away.Color = args[i] trade_away.Size = will_size fmt.Println("! created trade_away: " + args[i]) jsonAsBytes, _ = json.Marshal(trade_away) err = stub.PutState("_debug2", jsonAsBytes) open.Willing = append(open.Willing, trade_away) fmt.Println("! appended willing to open") i++ } //get the open trade struct tradesAsBytes, err := stub.GetState(openTradesStr) if err != nil { return nil, errors.New("Failed to get opentrades") } var trades AllTrades json.Unmarshal(tradesAsBytes, &trades) //un stringify it aka JSON.parse() trades.OpenTrades = append(trades.OpenTrades, open) //append to open trades fmt.Println("! appended open to trades") jsonAsBytes, _ = json.Marshal(trades) err = stub.PutState(openTradesStr, jsonAsBytes) //rewrite open orders if err != nil { return nil, err } fmt.Println("- end open trade") return nil, nil } // ============================================================================================================================ // Perform Trade - close an open trade and move ownership // ============================================================================================================================ func (t *SimpleChaincode) perform_trade(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) { var err error // 0 1 2 3 4 5 //[data.id, data.closer.user, data.closer.name, data.opener.user, data.opener.color, data.opener.size] if len(args) < 6 { return nil, errors.New("Incorrect number of arguments. Expecting 6") } fmt.Println("- start close trade") timestamp, err := strconv.ParseInt(args[0], 10, 64) if err != nil { return nil, errors.New("1st argument must be a numeric string") } size, err := strconv.Atoi(args[5]) if err != nil { return nil, errors.New("6th argument must be a numeric string") } //get the open trade struct tradesAsBytes, err := stub.GetState(openTradesStr) if err != nil { return nil, errors.New("Failed to get opentrades") } var trades AllTrades json.Unmarshal(tradesAsBytes, &trades) //un stringify it aka JSON.parse() for i := range trades.OpenTrades { //look for the trade fmt.Println("looking at " + strconv.FormatInt(trades.OpenTrades[i].Timestamp, 10) + " for " + strconv.FormatInt(timestamp, 10)) if trades.OpenTrades[i].Timestamp == timestamp { fmt.Println("found the trade") marbleAsBytes, err := stub.GetState(args[2]) if err != nil { return nil, errors.New("Failed to get thing") } closersMarble := Marble{} json.Unmarshal(marbleAsBytes, &closersMarble) //un stringify it aka JSON.parse() //verify if marble meets trade requirements if closersMarble.Color != trades.OpenTrades[i].Want.Color || closersMarble.Size != trades.OpenTrades[i].Want.Size { msg := "marble in input does not meet trade requriements" fmt.Println(msg) return nil, errors.New(msg) } marble, e := findMarble4Trade(stub, trades.OpenTrades[i].User, args[4], size) //find a marble that is suitable from opener if e == nil { fmt.Println("! no errors, proceeding") t.set_user(stub, []string{args[2], trades.OpenTrades[i].User}) //change owner of selected marble, closer -> opener t.set_user(stub, []string{marble.Name, args[1]}) //change owner of selected marble, opener -> closer trades.OpenTrades = append(trades.OpenTrades[:i], trades.OpenTrades[i+1:]...) //remove trade jsonAsBytes, _ := json.Marshal(trades) err = stub.PutState(openTradesStr, jsonAsBytes) //rewrite open orders if err != nil { return nil, err } } } } fmt.Println("- end close trade") return nil, nil } // ============================================================================================================================ // findMarble4Trade - look for a matching marble that this user owns and return it // ============================================================================================================================ func findMarble4Trade(stub shim.ChaincodeStubInterface, user string, color string, size int) (m Marble, err error) { var fail Marble fmt.Println("- start find marble 4 trade") fmt.Println("looking for " + user + ", " + color + ", " + strconv.Itoa(size)) //get the marble index marblesAsBytes, err := stub.GetState(marbleIndexStr) if err != nil { return fail, errors.New("Failed to get marble index") } var marbleIndex []string json.Unmarshal(marblesAsBytes, &marbleIndex) //un stringify it aka JSON.parse() for i := range marbleIndex { //iter through all the marbles //fmt.Println("looking @ marble name: " + marbleIndex[i]); marbleAsBytes, err := stub.GetState(marbleIndex[i]) //grab this marble if err != nil { return fail, errors.New("Failed to get marble") } res := Marble{} json.Unmarshal(marbleAsBytes, &res) //un stringify it aka JSON.parse() //fmt.Println("looking @ " + res.User + ", " + res.Color + ", " + strconv.Itoa(res.Size)); //check for user && color && size if strings.ToLower(res.User) == strings.ToLower(user) && strings.ToLower(res.Color) == strings.ToLower(color) && res.Size == size { fmt.Println("found a marble: " + res.Name) fmt.Println("! end find marble 4 trade") return res, nil } } fmt.Println("- end find marble 4 trade - error") return fail, errors.New("Did not find marble to use in this trade") } // ============================================================================================================================ // Make Timestamp - create a timestamp in ms // ============================================================================================================================ func makeTimestamp() int64 { return time.Now().UnixNano() / (int64(time.Millisecond) / int64(time.Nanosecond)) } // ============================================================================================================================ // Remove Open Trade - close an open trade // ============================================================================================================================ func (t *SimpleChaincode) remove_trade(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) { var err error // 0 //[data.id] if len(args) < 1 { return nil, errors.New("Incorrect number of arguments. Expecting 1") } fmt.Println("- start remove trade") timestamp, err := strconv.ParseInt(args[0], 10, 64) if err != nil { return nil, errors.New("1st argument must be a numeric string") } //get the open trade struct tradesAsBytes, err := stub.GetState(openTradesStr) if err != nil { return nil, errors.New("Failed to get opentrades") } var trades AllTrades json.Unmarshal(tradesAsBytes, &trades) //un stringify it aka JSON.parse() for i := range trades.OpenTrades { //look for the trade //fmt.Println("looking at " + strconv.FormatInt(trades.OpenTrades[i].Timestamp, 10) + " for " + strconv.FormatInt(timestamp, 10)) if trades.OpenTrades[i].Timestamp == timestamp { fmt.Println("found the trade") trades.OpenTrades = append(trades.OpenTrades[:i], trades.OpenTrades[i+1:]...) //remove this trade jsonAsBytes, _ := json.Marshal(trades) err = stub.PutState(openTradesStr, jsonAsBytes) //rewrite open orders if err != nil { return nil, err } break } } fmt.Println("- end remove trade") return nil, nil } // ============================================================================================================================ // Clean Up Open Trades - make sure open trades are still possible, remove choices that are no longer possible, remove trades that have no valid choices // ============================================================================================================================ func cleanTrades(stub shim.ChaincodeStubInterface) (err error) { var didWork = false fmt.Println("- start clean trades") //get the open trade struct tradesAsBytes, err := stub.GetState(openTradesStr) if err != nil { return errors.New("Failed to get opentrades") } var trades AllTrades json.Unmarshal(tradesAsBytes, &trades) //un stringify it aka JSON.parse() fmt.Println("# trades " + strconv.Itoa(len(trades.OpenTrades))) for i := 0; i < len(trades.OpenTrades); { //iter over all the known open trades fmt.Println(strconv.Itoa(i) + ": looking at trade " + strconv.FormatInt(trades.OpenTrades[i].Timestamp, 10)) fmt.Println("# options " + strconv.Itoa(len(trades.OpenTrades[i].Willing))) for x := 0; x < len(trades.OpenTrades[i].Willing); { //find a marble that is suitable fmt.Println("! on next option " + strconv.Itoa(i) + ":" + strconv.Itoa(x)) _, e := findMarble4Trade(stub, trades.OpenTrades[i].User, trades.OpenTrades[i].Willing[x].Color, trades.OpenTrades[i].Willing[x].Size) if e != nil { fmt.Println("! errors with this option, removing option") didWork = true trades.OpenTrades[i].Willing = append(trades.OpenTrades[i].Willing[:x], trades.OpenTrades[i].Willing[x+1:]...) //remove this option x-- } else { fmt.Println("! this option is fine") } x++ fmt.Println("! x:" + strconv.Itoa(x)) if x >= len(trades.OpenTrades[i].Willing) { //things might have shifted, recalcuate break } } if len(trades.OpenTrades[i].Willing) == 0 { fmt.Println("! no more options for this trade, removing trade") didWork = true trades.OpenTrades = append(trades.OpenTrades[:i], trades.OpenTrades[i+1:]...) //remove this trade i-- } i++ fmt.Println("! i:" + strconv.Itoa(i)) if i >= len(trades.OpenTrades) { //things might have shifted, recalcuate break } } if didWork { fmt.Println("! saving open trade changes") jsonAsBytes, _ := json.Marshal(trades) err = stub.PutState(openTradesStr, jsonAsBytes) //rewrite open orders if err != nil { return err } } else { fmt.Println("! all open trades are fine") } fmt.Println("- end clean trades") return nil } const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" func RandStringBytes(n int) string { b := make([]byte, n) for i := range b { b[i] = letterBytes[rand.Intn(len(letterBytes))] } return string(b) } // var addEmailStr = "_addEmail" // var addLogStr = "_addLog" // var addUserStr = "_addUser" // var editUserStr = "_editUser" // var getUserStr = "_getUser" // var getAttachmentStr = "_getAttachment" // var getLogStr = "_getLog" // ObjId string `json:obj_id` // Objype string `json:obj_type` // Content string `json:content` // UserId string `json:user_id` // CrtDt string `json:crt_dt` func (t *SimpleChaincode) addObject(stub shim.ChaincodeStubInterface, objId string, objType string, content string, userId string) ([]byte, error) { var err error //get the email index emailAsBytes, err := stub.GetState(objIndexStr) if err != nil { return nil, errors.New("Failed to get email index") } var emailIndex []Obj json.Unmarshal(emailAsBytes, &emailIndex) //un stringify it aka JSON.parse() //append currentTime := time.Now() email := Obj{ObjId: objId, ObjType: objType, Content: content, UserId: userId, CrtDt: currentTime.String()} emailIndex = append(emailIndex, email) //add email name to index list fmt.Println("! email index: ", emailIndex) jsonAsBytes, _ := json.Marshal(emailIndex) err = stub.PutState(objIndexStr, jsonAsBytes) //store name of email if err != nil { return nil, err } fmt.Println("- end add email") valAsbytes, err := stub.GetState(objIndexStr) //get the var from chaincode state if err != nil { jsonResp := "{\"Error\":\"Failed to get state for " + objIndexStr + "\"}" return nil, errors.New(jsonResp) } return valAsbytes, nil //send it onward } func (t *SimpleChaincode) addEmail(stub shim.ChaincodeStubInterface, userid string, data string) ([]byte, error) { var err error //get the email index emailAsBytes, err := stub.GetState(userid) if err != nil { return nil, errors.New("Failed to get email index") } var emailIndex []string json.Unmarshal(emailAsBytes, &emailIndex) //un stringify it aka JSON.parse() //append //currentTime := time.Now() email := data emailIndex = append(emailIndex, email) //add email name to index list fmt.Println("! email index: ", emailIndex) jsonAsBytes, _ := json.Marshal(emailIndex) err = stub.PutState(userid, jsonAsBytes) //store name of email if err != nil { return nil, err } fmt.Println("- end add email") valAsbytes, err := stub.GetState(userid) //get the var from chaincode state if err != nil { jsonResp := "{\"Error\":\"Failed to get state for " + objIndexStr + "\"}" return nil, errors.New(jsonResp) } return valAsbytes, nil //send it onward } func (t *SimpleChaincode) addAttachment(stub shim.ChaincodeStubInterface, fileHash string, userid string, emailContent string) ([]byte, error) { var err error //get the email index emailAsBytes, err := stub.GetState(fileHash) //userEmailAsBytes, err2 := stub.GetState(userid) if err != nil { return nil, errors.New("Failed to get email index") } var emailIndex []string //var useremailIndex []string json.Unmarshal(emailAsBytes, &emailIndex) //un stringify it aka JSON.parse() //json.Unmarshal(userEmailAsBytes, &useremailIndex) //un stringify it aka JSON.parse() //append //currentTime := time.Now() //email := Obj{ObjId: objId, ObjType: objType, Content: content, UserId: userId, CrtDt: currentTime.String()} email := emailContent; emailIndex = append(emailIndex, email) //add email name to index list fmt.Println("! email index: ", emailIndex) jsonAsBytes, _ := json.Marshal(emailIndex) err = stub.PutState(fileHash, jsonAsBytes) //store hash+userid/email pair if err != nil { return nil, err } //useremailIndex = append(useremailIndex, email) //userjsonAsBytes, _ := json.Marshal(useremailIndex) //err = stub.PutState(userid, userjsonAsBytes) //store userid/mail pair //if err != nil { // return nil, err //} fmt.Println("- end add email") valAsbytes, err := stub.GetState(fileHash) //get the var from chaincode state if err != nil { jsonResp := "{\"Error\":\"Failed to get state for " + objIndexStr + "\"}" return nil, errors.New(jsonResp) } return valAsbytes, nil //send it onward } func (t *SimpleChaincode) addHashAttachment(stub shim.ChaincodeStubInterface, haId string, userId string, objId string) ([]byte, error) { var err error //get the email index allHaAsBytes, err := stub.GetState(allHaStr) if err != nil { return nil, errors.New("Failed to get email index") } var allHa AllHashAttachments var allHaIndex []HashAttachments json.Unmarshal(allHaAsBytes, &allHa) //un stringify it aka JSON.parse() allHaIndex = allHa.HashAttachments //append att := Attachment{UserId: userId, ObjId: []string{objId}} ha := HashAttachments{HaId: haId, Attachments: []Attachment{att}} allHaIndex = append(allHaIndex, ha) //add email name to index list allHa.HashAttachments = allHaIndex fmt.Println("! hash attachment index: ", allHa) jsonAsBytes, _ := json.Marshal(allHa) err = stub.PutState(allHaStr, jsonAsBytes) //store name of email if err != nil { return nil, err } fmt.Println("- end add email") valAsbytes, err := stub.GetState(allHaStr) //get the var from chaincode state if err != nil { jsonResp := "{\"Error\":\"Failed to get state for " + allHaStr + "\"}" return nil, errors.New(jsonResp) } return valAsbytes, nil //send it onward } func (t *SimpleChaincode) getEmailsListFromAttachment(stub shim.ChaincodeStubInterface, fileHash string) ([]byte, error) { var err error //get the email index emailAsBytes, err := stub.GetState(fileHash) if err != nil { return nil, errors.New("Failed to get email index") } var emailIndex []string //var obj Obj //found := false json.Unmarshal(emailAsBytes, &emailIndex) //un stringify it aka JSON.parse() /*for i := range emailIndex { if objId == emailIndex[i].ObjId { found = true obj = emailIndex[i] break } }*/ jsonAsBytes, _ := json.Marshal(emailIndex); return jsonAsBytes, nil } func (t *SimpleChaincode) getEmailsListOfUser(stub shim.ChaincodeStubInterface, userid string) ([]byte, error) { var err error //get the email index emailAsBytes, err := stub.GetState(userid) if err != nil { return nil, errors.New("Failed to get email index") } var emailIndex []string //var obj Obj //found := false json.Unmarshal(emailAsBytes, &emailIndex) //un stringify it aka JSON.parse() /*for i := range emailIndex { if objId == emailIndex[i].ObjId { found = true obj = emailIndex[i] break } }*/ jsonAsBytes, _ := json.Marshal(emailIndex); return jsonAsBytes, nil } func (t *SimpleChaincode) verifyObject(stub shim.ChaincodeStubInterface, objId string) ([]byte, error) { var err error //get the email index emailAsBytes, err := stub.GetState(objId) if err != nil { return nil, errors.New("Failed to get email index") } var emailIndex []Obj var obj Obj //found := false json.Unmarshal(emailAsBytes, &emailIndex) //un stringify it aka JSON.parse() /*for i := range emailIndex { if objId == emailIndex[i].ObjId { found = true obj = emailIndex[i] break } }*/ //if found == false { // return nil, errors.New("No email matches") //} jsonAsBytes, _ := json.Marshal(obj) return jsonAsBytes, nil }
manchunw/learn-chaincode
finished/chaincode_finished.go
GO
apache-2.0
35,047
// // Cluster.cpp // ginseng // // Created by Daniel Shiposha on 16/03/2018. // #include <ginseng/tefri/abstract/Cluster.h> #include <ginseng/tefri/abstract/Interactor.h> #include <ginseng/tefri/abstract/Source.h> #include <ginseng/tefri/Flow.h> #include <ginseng/tefri/exception/ClusterNotFound.h> #include <ginseng/tefri/exception/ClusterFinishingFailed.h> #include <ginseng/tefri/implementation/MultipleFlowCompleterExecutor.h> #include <ginseng/util/Util.h> #include <map> #include <vector> #include <list> namespace ginseng::tefri::abstract { namespace implementation { std::map<std::string, ::ginseng::tefri::abstract::Cluster> registered_clusters; util::concurrent::Syncable clusters_syncable; class Cluster { public: Cluster() : is_enabled(new bool(true)), eraser(complete([&] { implementation::clusters_syncable.sync([&] { implementation::registered_clusters.erase(name); }); is_finished = true; })) {} Root<bool>::Pointer is_enabled; std::string name; ::ginseng::tefri::implementation::MultipleFlowCompleterExecutor eraser; std::vector<::ginseng::tefri::abstract::Interactor *> interactors; std::list<ginseng::tefri::abstract::Source *> sources; bool is_finished = false; }; std::map<std::string, ::ginseng::tefri::abstract::Cluster>::iterator get_cluster(const std::string &name) { auto result = registered_clusters.find(name); if (result == registered_clusters.end()) throw ::ginseng::tefri::exception::ClusterNotFound(name); return result; } bool is_cluster_exists(const std::string &name) { return registered_clusters.count(name) > 0; } util::concurrent::Syncable &get_registered_clusters_syncable() { return clusters_syncable; } } Cluster::Cluster() : self(new implementation::Cluster()) {} Cluster::Cluster(const Cluster &other) : self(other.self) {} Cluster::Cluster(Cluster &&other) : self(std::move(other.self)) {} Cluster::~Cluster() {} bool Cluster::is_enabled() const { return *self->is_enabled; } bool Cluster::is_disabled() const { return !is_enabled(); } void Cluster::enable() { sync([&] { *self->is_enabled = true; }); } void Cluster::disable() { sync([&] { *self->is_enabled = false; }); } void Cluster::finish() { sync([&] { if (self->is_finished) return; if (self->sources.empty()) { implementation::clusters_syncable.sync([&] { implementation::registered_clusters.erase(self->name); }); return; } *self->is_enabled = false; bool is_interactor_removed = true; for (auto &&interactor : self->interactors) { for(auto &&source : self->sources) if((is_interactor_removed = source->remove_interactor(*interactor))) break; if(!is_interactor_removed) throw exception::ClusterFinishingFailed(); } self->is_finished = true; }); implementation::clusters_syncable.sync([&] { implementation::registered_clusters.erase(self->name); }); } void Cluster::register_cluster(const std::string &name, Cluster &cluster) { implementation::clusters_syncable.sync([&] { auto exist_iterator = implementation::registered_clusters.find(name); if (exist_iterator != implementation::registered_clusters.end()) { cluster.self = exist_iterator->second.self; return; } auto iterator = implementation::registered_clusters.insert(std::pair<std::string, Cluster>(name, cluster)); if(iterator.first->second.self->name.empty()) iterator.first->second.self->name = name; }); } void Cluster::add_interactor(Interactor &interactor) { sync([&] { self->interactors.push_back(&interactor); }); } void Cluster::add_source(Source &source) { sync([&] { if(std::find(self->sources.begin(), self->sources.end(), &source) != self->sources.end()) return; self->sources.push_back(&source); self->eraser.increase_flow_count(); auto eraser = self->eraser; auto completer = complete([eraser]() mutable { eraser(); }); auto self_copy = self; auto source_remover = complete([self_copy, &source] { self_copy->sources.erase ( std::remove(self_copy->sources.begin(), self_copy->sources.end(), &source), self_copy->sources.end() ); }); self->interactors.push_back(completer.get()); self->interactors.push_back(source_remover.get()); flow(source) >> completer, source_remover; }); } Root<bool>::Pointer Cluster::get_state_variable() const { return self->is_enabled; } }
Owldream/Ginseng
src/tefri/abstract/Cluster.cpp
C++
apache-2.0
5,808
module Cumulus module Common class UTCTimeSource # Make now always return now in UTC def now Time.now.utc end # Make local always use utc time def local(*args) Time.utc(*args) end end end end
lucidsoftware/cumulus
lib/common/models/UTCTimeSource.rb
Ruby
apache-2.0
258
<!DOCTYPE html> <html lang="en"> <!-- {{ \Illuminate\Support\Facades\Config::get('custom.html_developer_description') }} --> <head> <meta charset="utf-8" /> <meta content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" name="viewport" /> <meta content="" name="description" /> <meta content="" name="author" /> <!-- start: Mobile Specific --> <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> <!-- end: Mobile Specific --> <!-- ================== BEGIN BASE CSS STYLE ================== --> <link href="http://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700" rel="stylesheet"> <link href="{{ asset('assets/plugins/jquery-ui/themes/base/minified/jquery-ui.min.css') }}" rel="stylesheet" type="text/css" > <link href="{{ asset('assets/plugins/bootstrap/css/bootstrap.min.css') }}" rel="stylesheet" type="text/css" > <link href="{{ asset('assets/plugins/font-awesome/css/font-awesome.min.css') }}" rel="stylesheet" type="text/css" > <link href="{{ asset('assets/css/animate.min.css') }}" rel="stylesheet" type="text/css" > <link href="{{ asset('assets/css/style.min.css') }}" rel="stylesheet" type="text/css" > <link href="{{ asset('assets/css/style-responsive.min.css') }}" rel="stylesheet" type="text/css" > <link href="{{ asset('assets/css/theme/default.css') }}" rel="stylesheet" type="text/css" id="theme"> <!-- ================== END BASE CSS STYLE ================== --> <!-- ================== BEGIN PAGE CSS STYLE ================== --> <link href="{{ asset('assets/plugins/jquery.countdown/jquery.countdown.css') }}" rel="stylesheet" type="text/css"> <!-- ================== END PAGE CSS STYLE ================== --> <!-- ================== BEGIN BASE JS ================== --> <script type="text/javascript" src="{{ URL::asset('assets/plugins/pace/pace.min.js') }}"></script> <!-- ================== END BASE JS ================== --> <link rel="shortcut icon" href="img/favicon.ico"> <title>Watch Over Me</title> </head> <body class="bg-white p-t-0 pace-top"> <!-- begin #page-loader --> <div id="page-loader" class="fade in"><span class="spinner"></span></div> <!-- end #page-loader --> <!-- begin #page-container --> <div id="page-container" class="fade"> <!-- begin coming-soon --> <div class="coming-soon"> <div class="coming-soon-header"> <div class="bg-cover"></div> <div class="brand"> <img src="{{ asset('img/overwatch.png') }}" width="70px"> WatchOver Me </div> <div class="timer"> <div id="timer"></div> </div> <div class="desc"> Nosso site está quase pronto e fornecerá boas ferramentas <br />para a comunidade de <b>OverWatch</b> brasileira. </div> </div> <div class="coming-soon-content"> <div class="desc"> Cadastre-se agora para receber <b>benefícios</b> no lançamento. </div> <div class="input-group"> <input type="text" class="form-control" id="email" placeholder="Email Address" /> <div class="input-group-btn"> <button type="button" class="btn btn-success" id="subscribe-go">Quero</button> </div> </div> <p class="help-block m-b-25"><i>Não fazemos spam. Seu email está seguro conosco.</i></p> </div> </div> <!-- end coming-soon --> </div> <!-- end page container --> <!-- ================== BEGIN BASE JS ================== --> <script type="text/javascript" src="{{ URL::asset('assets/plugins/jquery/jquery-1.9.1.min.js') }}"></script> <script type="text/javascript" src="{{ URL::asset('assets/plugins/jquery/jquery-migrate-1.1.0.min.js') }}"></script> <script type="text/javascript" src="{{ URL::asset('assets/plugins/jquery-ui/ui/minified/jquery-ui.min.js') }}"></script> <script type="text/javascript" src="{{ URL::asset('assets/plugins/bootstrap/js/bootstrap.min.js') }}"></script> <!--[if lt IE 9]> <script type="text/javascript" src="{{ URL::asset('assets/crossbrowserjs/html5shiv.js') }}"></script> <script type="text/javascript" src="{{ URL::asset('assets/crossbrowserjs/respond.min.js') }}"></script> <script type="text/javascript" src="{{ URL::asset('assets/crossbrowserjs/excanvas.min.js') }}"></script> <![endif]--> <script type="text/javascript" src="{{ URL::asset('assets/plugins/slimscroll/jquery.slimscroll.min.js') }}"></script> <script type="text/javascript" src="{{ URL::asset('assets/plugins/jquery-cookie/jquery.cookie.js') }}"></script> <!-- ================== END BASE JS ================== --> <!-- ================== BEGIN PAGE LEVEL JS ================== --> <script type="text/javascript" src="{{ URL::asset('assets/plugins/jquery.countdown/jquery.plugin.js') }}"></script> <script type="text/javascript" src="{{ URL::asset('assets/plugins/jquery.countdown/jquery.countdown.js') }}"></script> <script type="text/javascript" src="{{ URL::asset('assets/js/coming-soon.demo.min.js') }}"></script> <script type="text/javascript" src="{{ URL::asset('assets/js/apps.min.js') }}"></script> <!-- ================== END PAGE LEVEL JS ================== --> <script> $(document).ready(function() { App.init(); ComingSoon.init(); $('#subscribe-go').click(function () { var email = $('#email').val(); if (email.length <= 5) { alert('Por favor, digite um email válido!'); } else { // Get the user profile $.ajax({ url: "{{ route('pages.subscribe') }}", type: "POST", data: { _token: '{{ csrf_token() }}', email: email }, }).done(function(data) { alert('Agradecemos seu cadastro!\nVocê receberá em breve um email com novas informações.'); }); } }); }); </script> @if (getenv('APP_ENV') == 'production') <script type="text/javascript" src="{{ URL::asset('js/analytics/analytics.js') }}"></script> @endif </body> </html>
vsouto/watchoverme
resources/views/pages/coming-soon.blade.php
PHP
apache-2.0
6,343
@extends('Main.Boilerplate') @section('htmltag') <html id="browse"> @stop @section('bodytag') <body class="padding nav" data-url="{{ url() }}"> @stop @section('content') <div class="browse container"> <div class="row"> <ol class="breadcrumb breadcrumb-arrow"> <li><a href="{{ url('/') }}">{{ trans('main.home') }}</a></li> <li class="active"><span>{{ trans('main.movies') }}</span></li> <li class="breadcrumb-tools"> @if(Helpers::hasAccess('titles.create')) <a href="{{ url(Str::slug(trans('main.movies')) . '/create') }}" class="pull-right hidden-xs btn btn-inverse">{{ trans('main.create new') }}</a> @endif </li> </ol> @include('Partials.FilterBar', array('action' => Str::slug(head(Request::segments())))) </div> <div class="row"> @include('Partials.Response') </div> <div class="row"> <div class="col-xs-12 col-sm-6 col-md-8 browse-col"> <div id="grid" class="browse-grid"> @if ( ! $data->isEmpty()) @foreach($data as $k => $r) <figure class="col-sm-4 col-lg-3 col-xs-8" ontouchstart="this.classList.toggle('hover');" data-filter-class='{{ Helpers::genreFilter($r->genre) }}' data-popularity="{{ $r['mc_num_of_votes'] ? $r['mc_num_of_votes'] : ($r['imdb_votes_num'] ? $r['imdb_votes_num'] : $r['tmdb_popularity'])}}" data-name="{{{ $r->title }}}" data-release="{{{ $r->year }}}"> <div class="img-container"> <a class="flip-container-vertical" href="{{Helpers::url($r['title'], $r['id'], $r['type'])}}"> <div class="flipper"> <img class ="img-responsive flip-front" src="{{str_replace('w185', 'w342', $r->poster) }}" alt="{{{ $r['title'] }}}"> <div class="flip-back"> <h5>{{ $r['title'] }}</h5> </div> </div> </a> <figcaption title="{{{ $r->title }}}" > <a href="{{Helpers::url($r['title'], $r['id'], $r['type'])}}"> {{ Helpers::shrtString($r['title']) }} </a> <section class="row action-buttons"> @include('Partials.AddToListButtons') @if ($r['mc_critic_score']) <span class="pull-right">{{ substr($r['mc_critic_score'], 0, -1) . '/10' }}</span> @elseif ($r['imdb_rating']) <span class="pull-right">{{ ! str_contains($r['imdb_rating'], '.') ? $r['imdb_rating'] . '.0' : $r['imdb_rating'] . '/10'}} </span> @elseif ($r['tmdb_rating']) <span class="pull-right">{{ ! str_contains($r['tmdb_rating'], '.') ? $r['tmdb_rating'] . '.0' : $r['tmdb_rating'] . '/10'}}</span> @endif </section> </figcaption> </div> </figure> @endforeach @else <div><h3 class="reviews-not-released"> {{ trans('main.no results') }}</h3></div> @endif </div> <div class="pagination-bottom">{{ $data->appends(array())->links() }}</div> </div> <div class="col-xs-6 col-md-4"></div> </div> </div> @stop
liverbool/n.com
app/views/Titles/Browse.blade.php
PHP
apache-2.0
3,528
package com.akexorcist.passapp; import org.junit.Test; import static org.junit.Assert.*; /** * To work on unit tests, switch the Test Artifact in the Build Variants view. */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
akexorcist/Example-SamsungSDK
PassApp/app/src/test/java/com/akexorcist/passapp/ExampleUnitTest.java
Java
apache-2.0
315
'use strict'; /** * @ngdoc overview * @name hciApp * @description * # hciApp * * Main module of the application. */ angular .module('hciApp', [ 'ngAnimate', 'ngCookies', 'ngResource', 'ngRoute', 'ngSanitize', 'ngTouch', 'ui.bootstrap', 'ngMaterial' ]) .config(function ($routeProvider) { $routeProvider .when('/main', { templateUrl: 'views/main.html', controller: 'MainCtrl' }) .when('/main/:mode', { templateUrl: 'views/main.html', controller: 'MainCtrl' }) .when('/about', { templateUrl: 'views/about.html', controller: 'AboutCtrl' }) .when('/ideas', { templateUrl: 'views/ideas.html', controller: 'IdeasCtrl' }) .when('/details', { templateUrl: 'views/details.html', controller: 'DetailsCtrl' }) .otherwise({ redirectTo: '/main' }); });
admitriyev/hci-prototype
app/scripts/app.js
JavaScript
apache-2.0
952
/* * Apache License * Version 2.0, January 2004 * http://www.apache.org/licenses/ * * TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION * * 1. Definitions. * * "License" shall mean the terms and conditions for use, reproduction, * and distribution as defined by Sections 1 through 9 of this document. * * "Licensor" shall mean the copyright owner or entity authorized by * the copyright owner that is granting the License. * * "Legal Entity" shall mean the union of the acting entity and all * other entities that control, are controlled by, or are under common * control with that entity. For the purposes of this definition, * "control" means (i) the power, direct or indirect, to cause the * direction or management of such entity, whether by contract or * otherwise, or (ii) ownership of fifty percent (50%) or more of the * outstanding shares, or (iii) beneficial ownership of such entity. * * "You" (or "Your") shall mean an individual or Legal Entity * exercising permissions granted by this License. * * "Source" form shall mean the preferred form for making modifications, * including but not limited to software source code, documentation * source, and configuration files. * * "Object" form shall mean any form resulting from mechanical * transformation or translation of a Source form, including but * not limited to compiled object code, generated documentation, * and conversions to other media types. * * "Work" shall mean the work of authorship, whether in Source or * Object form, made available under the License, as indicated by a * copyright notice that is included in or attached to the work * (an example is provided in the Appendix below). * * "Derivative Works" shall mean any work, whether in Source or Object * form, that is based on (or derived from) the Work and for which the * editorial revisions, annotations, elaborations, or other modifications * represent, as a whole, an original work of authorship. For the purposes * of this License, Derivative Works shall not include works that remain * separable from, or merely link (or bind by name) to the interfaces of, * the Work and Derivative Works thereof. * * "Contribution" shall mean any work of authorship, including * the original version of the Work and any modifications or additions * to that Work or Derivative Works thereof, that is intentionally * submitted to Licensor for inclusion in the Work by the copyright owner * or by an individual or Legal Entity authorized to submit on behalf of * the copyright owner. For the purposes of this definition, "submitted" * means any form of electronic, verbal, or written communication sent * to the Licensor or its representatives, including but not limited to * communication on electronic mailing lists, source code control systems, * and issue tracking systems that are managed by, or on behalf of, the * Licensor for the purpose of discussing and improving the Work, but * excluding communication that is conspicuously marked or otherwise * designated in writing by the copyright owner as "Not a Contribution." * * "Contributor" shall mean Licensor and any individual or Legal Entity * on behalf of whom a Contribution has been received by Licensor and * subsequently incorporated within the Work. * * 2. Grant of Copyright License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * copyright license to reproduce, prepare Derivative Works of, * publicly display, publicly perform, sublicense, and distribute the * Work and such Derivative Works in Source or Object form. * * 3. Grant of Patent License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * (except as stated in this section) patent license to make, have made, * use, offer to sell, sell, import, and otherwise transfer the Work, * where such license applies only to those patent claims licensable * by such Contributor that are necessarily infringed by their * Contribution(s) alone or by combination of their Contribution(s) * with the Work to which such Contribution(s) was submitted. If You * institute patent litigation against any entity (including a * cross-claim or counterclaim in a lawsuit) alleging that the Work * or a Contribution incorporated within the Work constitutes direct * or contributory patent infringement, then any patent licenses * granted to You under this License for that Work shall terminate * as of the date such litigation is filed. * * 4. Redistribution. You may reproduce and distribute copies of the * Work or Derivative Works thereof in any medium, with or without * modifications, and in Source or Object form, provided that You * meet the following conditions: * * (a) You must give any other recipients of the Work or * Derivative Works a copy of this License; and * * (b) You must cause any modified files to carry prominent notices * stating that You changed the files; and * * (c) You must retain, in the Source form of any Derivative Works * that You distribute, all copyright, patent, trademark, and * attribution notices from the Source form of the Work, * excluding those notices that do not pertain to any part of * the Derivative Works; and * * (d) If the Work includes a "NOTICE" text file as part of its * distribution, then any Derivative Works that You distribute must * include a readable copy of the attribution notices contained * within such NOTICE file, excluding those notices that do not * pertain to any part of the Derivative Works, in at least one * of the following places: within a NOTICE text file distributed * as part of the Derivative Works; within the Source form or * documentation, if provided along with the Derivative Works; or, * within a display generated by the Derivative Works, if and * wherever such third-party notices normally appear. The contents * of the NOTICE file are for informational purposes only and * do not modify the License. You may add Your own attribution * notices within Derivative Works that You distribute, alongside * or as an addendum to the NOTICE text from the Work, provided * that such additional attribution notices cannot be construed * as modifying the License. * * You may add Your own copyright statement to Your modifications and * may provide additional or different license terms and conditions * for use, reproduction, or distribution of Your modifications, or * for any such Derivative Works as a whole, provided Your use, * reproduction, and distribution of the Work otherwise complies with * the conditions stated in this License. * * 5. Submission of Contributions. Unless You explicitly state otherwise, * any Contribution intentionally submitted for inclusion in the Work * by You to the Licensor shall be under the terms and conditions of * this License, without any additional terms or conditions. * Notwithstanding the above, nothing herein shall supersede or modify * the terms of any separate license agreement you may have executed * with Licensor regarding such Contributions. * * 6. Trademarks. This License does not grant permission to use the trade * names, trademarks, service marks, or product names of the Licensor, * except as required for reasonable and customary use in describing the * origin of the Work and reproducing the content of the NOTICE file. * * 7. Disclaimer of Warranty. Unless required by applicable law or * agreed to in writing, Licensor provides the Work (and each * Contributor provides its Contributions) on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied, including, without limitation, any warranties or conditions * of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A * PARTICULAR PURPOSE. You are solely responsible for determining the * appropriateness of using or redistributing the Work and assume any * risks associated with Your exercise of permissions under this License. * * 8. Limitation of Liability. In no event and under no legal theory, * whether in tort (including negligence), contract, or otherwise, * unless required by applicable law (such as deliberate and grossly * negligent acts) or agreed to in writing, shall any Contributor be * liable to You for damages, including any direct, indirect, special, * incidental, or consequential damages of any character arising as a * result of this License or out of the use or inability to use the * Work (including but not limited to damages for loss of goodwill, * work stoppage, computer failure or malfunction, or any and all * other commercial damages or losses), even if such Contributor * has been advised of the possibility of such damages. * * 9. Accepting Warranty or Additional Liability. While redistributing * the Work or Derivative Works thereof, You may choose to offer, * and charge a fee for, acceptance of support, warranty, indemnity, * or other liability obligations and/or rights consistent with this * License. However, in accepting such obligations, You may act only * on Your own behalf and on Your sole responsibility, not on behalf * of any other Contributor, and only if You agree to indemnify, * defend, and hold each Contributor harmless for any liability * incurred by, or claims asserted against, such Contributor by reason * of your accepting any such warranty or additional liability. * * END OF TERMS AND CONDITIONS * * Copyright 2017 Henryk Timur Domagalski * */ package net.henryco.opalette.application; import android.app.Fragment; import android.app.FragmentTransaction; import android.app.NotificationManager; import android.content.Context; import android.content.SharedPreferences; import android.content.pm.ActivityInfo; import android.graphics.Bitmap; import android.os.Build; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.NotificationCompat; import android.support.v4.content.ContextCompat; import android.support.v7.app.AppCompatActivity; import android.support.v7.preference.PreferenceManager; import android.support.v7.widget.Toolbar; import android.text.InputType; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.ToggleButton; import com.google.firebase.analytics.FirebaseAnalytics; import net.henryco.opalette.R; import net.henryco.opalette.api.glES.glSurface.renderers.universal.OPallUniRenderer; import net.henryco.opalette.api.glES.glSurface.renderers.universal.UniRenderer; import net.henryco.opalette.api.glES.glSurface.view.OPallSurfaceView; import net.henryco.opalette.api.utils.OPallAnimated; import net.henryco.opalette.api.utils.Utils; import net.henryco.opalette.api.utils.dialogs.OPallAlertDialog; import net.henryco.opalette.api.utils.lambda.consumers.OPallConsumer; import net.henryco.opalette.api.utils.requester.Request; import net.henryco.opalette.api.utils.requester.RequestSender; import net.henryco.opalette.api.utils.views.OPallViewInjector; import net.henryco.opalette.application.conf.GodConfig; import net.henryco.opalette.application.programs.ProgramPipeLine; import net.henryco.opalette.application.proto.AppMainProto; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class MainActivity extends AppCompatActivity implements AppMainProto, ProgramPipeLine.AppProgramProtocol { private FirebaseAnalytics firebaseAnalytics; private final RequestSender stateRequester = new RequestSender(); private Fragment actualFragment = null; private boolean optionsSwitched = false; private ToggleButton imageToggle, paletteToggle, filterToggle; private ToggleButton[] toggleGroup; private View imageContainer, paletteContainer, filterContainer; private View[] containerGroup; private MenuItem topBarButton; private MenuItem topOptButton; private final List<Runnable> topBarButtonActions = new ArrayList<>(); private final int topBarButtonId = 2137; private final int topOptButtonId = 7321; private final Runnable topBarButtonDefaultAction = () -> stateRequester.sendRequest(new Request(get_bitmap_from_program)); @Nullable @Override public FirebaseAnalytics getFireBase() { return firebaseAnalytics; } @Override public void setResultBitmap(Bitmap bitmap) { SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(this); ImageView imageView = new ImageView(this); imageView.setImageBitmap(bitmap); String name = GodConfig.genDefaultImgFileName(); new OPallAlertDialog() .title("") .content(imageView) .positive(getResources().getString(R.string.save), () -> { View v = new LinearLayout(this); OPallViewInjector.inject(this, new OPallViewInjector<AppMainProto>(v, R.layout.textline) { @Override protected void onInject(AppMainProto context, View view) { TextView imageNameLine = (TextView) view.findViewById(R.id.lineText); imageNameLine.setInputType(InputType.TYPE_CLASS_TEXT); imageNameLine.setText(name); new OPallAlertDialog() .title(getResources().getString(R.string.save_as)) .content(v) .positive(getResources().getString(R.string.save), () -> { Utils.saveBitmapAction(bitmap, imageNameLine.getText().toString(), context.getActivityContext()); createSaveSuccessNotification(context.getActivityContext(), name); }).negative(getResources().getString(R.string.cancel)) .show(getSupportFragmentManager(), "Bitmap save"); } }); }) .negative(getResources().getString(R.string.share), () -> Utils.shareBitmapAction( bitmap, name, this, preferences.getBoolean(GodConfig.PREF_KEY_SAVE_AFTER, false), () -> createSaveSuccessNotification(this, name))) .neutral(getResources().getString(R.string.cancel)) .show(getSupportFragmentManager(), "Bitmap preview"); } private static int notifications = 1; private static void createSaveSuccessNotification(Context context, String name) { String title = context.getResources().getString(R.string.notification_save_success); NotificationCompat.Builder b = new NotificationCompat.Builder(context); b.setAutoCancel(true) .setVibrate(new long[]{0}) .setWhen(System.currentTimeMillis()) .setSmallIcon(R.mipmap.opalette_logo) .setTicker("") .setContentTitle(title) .setContentText(name) .setContentInfo(""); NotificationManager nm = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); nm.notify(notifications++, b.build()); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(this); if (preferences.getBoolean(GodConfig.PREF_KEY_ANALYTICS_ENABLE, false)) { Utils.log("ANALYTICS ENABLE"); firebaseAnalytics = FirebaseAnalytics.getInstance(this); } else firebaseAnalytics = null; Utils.log("SAVE AFTER SHARE stat: "+preferences.getBoolean(GodConfig.PREF_KEY_SAVE_AFTER, false)); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); imageToggle = (ToggleButton) toolbar.findViewById(R.id.toolbarButtonImage); paletteToggle = (ToggleButton) toolbar.findViewById(R.id.toolbarButtonPalette); filterToggle = (ToggleButton) toolbar.findViewById(R.id.toolbarButtonFilter); toggleGroup = new ToggleButton[]{imageToggle, paletteToggle, filterToggle}; imageContainer = findViewById(R.id.imageOptionsContainer); paletteContainer = findViewById(R.id.paletteOptionsContainer); filterContainer = findViewById(R.id.filterOptionsContainer); containerGroup = new View[]{imageContainer, paletteContainer, filterContainer}; imageToggle.setOnClickListener(this::toggleImage); paletteToggle.setOnClickListener(this::togglePalette); filterToggle.setOnClickListener(this::toggleFilter); if (getSupportActionBar() != null){ getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setDisplayShowHomeEnabled(true); } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { getWindow().setStatusBarColor(ContextCompat.getColor(this, R.color.DARK)); findViewById(R.id.fragmentSuperContainer).setElevation(0); } OPallUniRenderer renderer = new UniRenderer(this, new ProgramPipeLine()); stateRequester.addRequestListener(renderer); OPallSurfaceView oPallSurfaceView = (OPallSurfaceView) findViewById(R.id.opallView); oPallSurfaceView.setDimProportions(OPallSurfaceView.DimensionProcessors.RELATIVE_SQUARE); oPallSurfaceView.setRenderer(renderer); oPallSurfaceView.addToGLContextQueue(gl -> stateRequester.sendNonSyncRequest(new Request(send_bitmap_to_program, StartUpActivity.BitmapPack.get())) ); switchToScrollOptionsView(); } private void checkToggle(ToggleButton button) { for (ToggleButton b: toggleGroup) { if (b == button) button.setChecked(true); else b.setChecked(false); } } private void showContainer(View view) { for (View v: containerGroup) { if (v == view) view.setVisibility(View.VISIBLE); else v.setVisibility(View.GONE); } } private void toggleImage(View v) { OPallAnimated.pressButton75_225(this, v, () -> { if (imageToggle.isChecked()) showContainer(imageContainer); checkToggle(imageToggle); }); } private void togglePalette(View v) { OPallAnimated.pressButton75_225(this, v, () -> { if (paletteToggle.isChecked()) showContainer(paletteContainer); checkToggle(paletteToggle); }); } private void toggleFilter(View v) { OPallAnimated.pressButton75_225(this, v, () -> { if (filterToggle.isChecked()) showContainer(filterContainer); checkToggle(filterToggle); }); } @Override public void switchToFragmentOptions(Fragment fragment) { if (!optionsSwitched) { wipeTopBarButton(); if (fragment != null) { FragmentTransaction fragmentTransaction = getFragmentManager() .beginTransaction().add(R.id.fragmentContainer, fragment); fragmentTransaction.commit(); } findViewById(R.id.scrollOptionsView).setVisibility(View.GONE); findViewById(R.id.fragmentSuperContainer).setVisibility(View.VISIBLE); for (ToggleButton t: toggleGroup) t.setVisibility(View.GONE); optionsSwitched = true; actualFragment = fragment; } } @Override public void switchToScrollOptionsView() { if (actualFragment != null) { getFragmentManager().beginTransaction().remove(actualFragment).commit(); } findViewById(R.id.scrollOptionsView).setVisibility(View.VISIBLE); findViewById(R.id.fragmentSuperContainer).setVisibility(View.GONE); for (ToggleButton t: toggleGroup) t.setVisibility(View.VISIBLE); optionsSwitched = false; actualFragment = null; restoreTopBarButton(); } @Override public OPallSurfaceView getRenderSurface() { return (OPallSurfaceView) findViewById(R.id.opallView); } @Override public AppCompatActivity getActivityContext() { return this; } @Override public boolean onCreateOptionsMenu(Menu menu) { topBarButton = menu.add(0, topBarButtonId, 0, ""); topBarButton.setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM); topOptButton = menu.add(Menu.NONE, topOptButtonId, Menu.NONE, ""); topOptButton.setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM); restoreTopBarButton(); return super.onCreateOptionsMenu(menu); } @Override public void setTopControlButton(OPallConsumer<MenuItem> buttonConsumer, Runnable ... actions) { Collections.addAll(topBarButtonActions, actions); buttonConsumer.consume(topOptButton); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == android.R.id.home) { if (!optionsSwitched) startBackDialog(); else switchToScrollOptionsView(); } else if (item.getItemId() == topBarButtonId || item.getItemId() == topOptButtonId) for (Runnable action : topBarButtonActions) action.run(); return super.onOptionsItemSelected(item); } @Override public void onBackPressed() { if (!optionsSwitched) { if (imageToggle.isChecked()) startBackDialog(); else { showContainer(imageContainer); checkToggle(imageToggle); } } else switchToScrollOptionsView(); } private void closeActivity() { super.onBackPressed(); finish(); } private void startBackDialog() { new OPallAlertDialog() .title(getResources().getString(R.string.dialog_are_u_sure)) .message(getResources().getString(R.string.dialog_exit_msg)) .negative(getResources().getString(R.string.dialog_exit_decline)) .positive(getResources().getString(R.string.dialog_exit_accept), this::closeActivity) .show(getSupportFragmentManager(), "ExitDialog"); } private void restoreTopBarButton() { if (topBarButton != null) { topBarButtonActions.clear(); topBarButtonActions.add(topBarButtonDefaultAction); topBarButton.setVisible(true).setEnabled(true); topBarButton.setIcon(R.drawable.ic_share_white_24dp); topOptButton.setVisible(false).setEnabled(false); } } private void wipeTopBarButton() { if (topBarButton != null) { topBarButton.setEnabled(false).setVisible(false); topBarButtonActions.clear(); } } }
henryco/OPalette
app/src/main/java/net/henryco/opalette/application/MainActivity.java
Java
apache-2.0
22,644
/* * Copyright 2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.caching.internal.tasks; import org.gradle.caching.BuildCacheKey; import org.gradle.util.Path; import javax.annotation.Nullable; public interface TaskOutputCachingBuildCacheKey extends BuildCacheKey { Path getTaskPath(); BuildCacheKeyInputs getInputs(); @Nullable byte[] getHashCodeBytes(); /** * Whether this key can be used to retrieve or store task output entries. */ boolean isValid(); }
lsmaira/gradle
subprojects/core/src/main/java/org/gradle/caching/internal/tasks/TaskOutputCachingBuildCacheKey.java
Java
apache-2.0
1,067
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // This file is autogenerated by: // mojo/public/tools/bindings/mojom_bindings_generator.py // For: // third_party/WebKit/public/platform/modules/mediasession/media_session.mojom // package org.chromium.blink.mojom; import org.chromium.base.annotations.SuppressFBWarnings; import org.chromium.mojo.bindings.DeserializationException; public final class MediaSessionPlaybackState { public static final int NONE = 0; public static final int PAUSED = NONE + 1; public static final int PLAYING = PAUSED + 1; private static final boolean IS_EXTENSIBLE = false; public static boolean isKnownValue(int value) { switch (value) { case 0: case 1: case 2: return true; } return false; } public static void validate(int value) { if (IS_EXTENSIBLE || isKnownValue(value)) return; throw new DeserializationException("Invalid enum value."); } private MediaSessionPlaybackState() {} }
mogoweb/365browser
app/src/main/java/org/chromium/blink/mojom/MediaSessionPlaybackState.java
Java
apache-2.0
1,196
using System; using System.Threading.Tasks; namespace Rabbit.Transport.Simple.Tcp.Server { internal class InternalTcpSocketSaeaServerMessageDispatcherImplementation : ITcpSocketSaeaServerMessageDispatcher { private Func<TcpSocketSaeaSession, byte[], int, int, Task> _onSessionDataReceived; private Func<TcpSocketSaeaSession, Task> _onSessionStarted; private Func<TcpSocketSaeaSession, Task> _onSessionClosed; public InternalTcpSocketSaeaServerMessageDispatcherImplementation() { } public InternalTcpSocketSaeaServerMessageDispatcherImplementation( Func<TcpSocketSaeaSession, byte[], int, int, Task> onSessionDataReceived, Func<TcpSocketSaeaSession, Task> onSessionStarted, Func<TcpSocketSaeaSession, Task> onSessionClosed) : this() { _onSessionDataReceived = onSessionDataReceived; _onSessionStarted = onSessionStarted; _onSessionClosed = onSessionClosed; } public async Task OnSessionStarted(TcpSocketSaeaSession session) { if (_onSessionStarted != null) await _onSessionStarted(session); } public async Task OnSessionDataReceived(TcpSocketSaeaSession session, byte[] data, int offset, int count) { if (_onSessionDataReceived != null) await _onSessionDataReceived(session, data, offset, count); } public async Task OnSessionClosed(TcpSocketSaeaSession session) { if (_onSessionClosed != null) await _onSessionClosed(session); } } }
EsonXie/Rpc
src/extensions/transports/Rabbit.Transport.Simple/Tcp/Server/InternalTcpSocketSaeaServerMessageDispatcherImplementation.cs
C#
apache-2.0
1,666
// Copyright 2014-2015 Boundary, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.boundary.sdk.event.service.db; import java.util.List; import java.util.Map; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.Processor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.boundary.camel.component.ping.PingConfiguration; import com.boundary.camel.component.port.PortConfiguration; import com.boundary.camel.component.ssh.SshxConfiguration; import com.boundary.sdk.event.service.ServiceCheckRequest; import com.boundary.sdk.event.service.ServiceTest; import com.boundary.sdk.event.service.ping.PingServiceModel; import com.boundary.sdk.event.service.port.PortServiceModel; import com.boundary.sdk.event.service.ssh.SshxServiceModel; import com.boundary.sdk.event.service.url.UrlServiceDatabase; public class ServiceChecksDatabase implements Processor { //TODO: Separate class for handling constants private static final String PING = "ping"; private static final String PORT = "port"; private static final String SSH = "ssh"; private static final String URL = "url"; private static Logger LOG = LoggerFactory.getLogger(ServiceChecksDatabase.class); public ServiceChecksDatabase() { // TODO Auto-generated constructor stub } private void sendTestData(Exchange exchange) { Message message = exchange.getIn(); String sdnDirectorHost = "192.168.137.11"; String sdnDirectorName = "SDN Director"; ServiceCheckRequest request = new ServiceCheckRequest(); PingConfiguration sdnDirectorPingTest = new PingConfiguration(); sdnDirectorPingTest.setHost(sdnDirectorHost); PingServiceModel sdnDirectorPingModel = new PingServiceModel(); PortConfiguration sdnDirectorPortTest8080 = new PortConfiguration(); sdnDirectorPortTest8080.setHost(sdnDirectorHost); sdnDirectorPortTest8080.setPort(8080); PortServiceModel sdnDirectorPortModel8080 = new PortServiceModel(); SshxConfiguration plumgridProcessTest = new SshxConfiguration(); plumgridProcessTest.setHost(sdnDirectorHost); plumgridProcessTest.setCommand("status plumgrid"); plumgridProcessTest.setTimeout(10000); plumgridProcessTest.setUsername("plumgrid"); plumgridProcessTest.setPassword("plumgrid"); SshxServiceModel plumgridProcessModel = new SshxServiceModel(); plumgridProcessModel.setExpectedOutput("^plumgrid start/running, process\\s+\\d+"); SshxConfiguration plumgridSalProcessTest = new SshxConfiguration(); plumgridSalProcessTest.setHost(sdnDirectorHost); plumgridSalProcessTest.setCommand("status plumgrid-sal"); plumgridSalProcessTest.setTimeout(10000); plumgridSalProcessTest.setUsername("plumgrid"); plumgridSalProcessTest.setPassword("plumgrid"); SshxServiceModel plumgridSalProcessTestModel = new SshxServiceModel(); plumgridSalProcessTestModel.setExpectedOutput("^plumgrid-sal start/running, process\\s\\d+"); SshxConfiguration nginxProcessTest = new SshxConfiguration(); nginxProcessTest.setHost(sdnDirectorHost); nginxProcessTest.setCommand("status nginx"); nginxProcessTest.setTimeout(10000); nginxProcessTest.setUsername("plumgrid"); nginxProcessTest.setPassword("plumgrid"); SshxServiceModel nginxProcessModel = new SshxServiceModel(); nginxProcessModel.setExpectedOutput("^nginx start/running, process\\s\\d+"); ServiceTest<PingConfiguration,PingServiceModel> pingSDNDirector= new ServiceTest<PingConfiguration,PingServiceModel>( "host status","ping",sdnDirectorName,request.getRequestId(),sdnDirectorPingTest,sdnDirectorPingModel); request.addServiceTest(pingSDNDirector); ServiceTest<PortConfiguration,PortServiceModel> portSDNDirector8080 = new ServiceTest<PortConfiguration,PortServiceModel>( "8080 port status","port",sdnDirectorName,request.getRequestId(),sdnDirectorPortTest8080,sdnDirectorPortModel8080); request.addServiceTest(portSDNDirector8080); ServiceTest<SshxConfiguration,SshxServiceModel> sshPlumgridProcess = new ServiceTest<SshxConfiguration,SshxServiceModel>( "plumgrid process status","ssh",sdnDirectorName,request.getRequestId(),plumgridProcessTest,plumgridProcessModel); request.addServiceTest(sshPlumgridProcess); ServiceTest<SshxConfiguration,SshxServiceModel> sshPlumgridSalProcess = new ServiceTest<SshxConfiguration,SshxServiceModel>( "plumgrid-sal process status","ssh",sdnDirectorName,request.getRequestId(),plumgridProcessTest,plumgridProcessModel); request.addServiceTest(sshPlumgridSalProcess); ServiceTest<SshxConfiguration,SshxServiceModel> sshNginxProcess = new ServiceTest<SshxConfiguration,SshxServiceModel>( "nginx process status","ssh",sdnDirectorName,request.getRequestId(),plumgridProcessTest,plumgridProcessModel); request.addServiceTest(sshNginxProcess); message.setBody(request); } private void createPingServiceTest(ServiceCheckRequest request, Map<String,Object> row) { String pingHost = row.get("pingHost").toString(); int pingTimeout = Integer.parseInt(row.get("pingTimeout").toString()); PingConfiguration pingConfiguration = new PingConfiguration(); pingConfiguration.setHost(pingHost); pingConfiguration.setTimeout(pingTimeout); String serviceName = row.get("serviceName").toString(); String serviceTestName = row.get("serviceTestName").toString(); String serviceTypeName = row.get("serviceTypeName").toString(); PingServiceModel pingServiceModel = new PingServiceModel(); ServiceTest<PingConfiguration,PingServiceModel> pingServiceTest = new ServiceTest<PingConfiguration,PingServiceModel>(serviceTestName,serviceTypeName,serviceName, request.getRequestId(),pingConfiguration,pingServiceModel); request.addServiceTest(pingServiceTest); } private void createPortServiceTest(ServiceCheckRequest request, Map<String,Object> row) { String portHost = row.get("portHost").toString(); int port = Integer.parseInt(row.get("portPort").toString()); int portTimeout = Integer.parseInt(row.get("portTimeout").toString()); PortConfiguration portConfiguration = new PortConfiguration(); portConfiguration.setHost(portHost); portConfiguration.setPort(port); portConfiguration.setTimeout(portTimeout); String serviceName = row.get("serviceName").toString(); String serviceTestName = row.get("serviceTestName").toString(); String serviceTypeName = row.get("serviceTypeName").toString(); PortServiceModel portServiceModel = new PortServiceModel(); ServiceTest<PortConfiguration,PortServiceModel> portServicetest = new ServiceTest<PortConfiguration,PortServiceModel>(serviceTestName,serviceTypeName,serviceName, request.getRequestId(),portConfiguration,portServiceModel); request.addServiceTest(portServicetest); } private void createSshServiceTest(ServiceCheckRequest request, Map<String,Object> row) { String sshHost = row.get("sshHost").toString(); int sshPort = Integer.parseInt(row.get("sshPort").toString()); int sshTimeout = Integer.parseInt(row.get("sshTimeout").toString()); String sshUserName = row.get("sshUserName").toString(); String sshPassword = row.get("sshPassword").toString(); String sshCommand = row.get("sshCommand").toString(); SshxConfiguration sshConfiguration = new SshxConfiguration(); sshConfiguration.setHost(sshHost); sshConfiguration.setPort(sshPort); sshConfiguration.setTimeout(sshTimeout); sshConfiguration.setUsername(sshUserName); sshConfiguration.setPassword(sshPassword); sshConfiguration.setCommand(sshCommand); String serviceName = row.get("serviceName").toString(); String serviceTestName = row.get("serviceTestName").toString(); String serviceTypeName = row.get("serviceTypeName").toString(); SshxServiceModel sshServiceModel = new SshxServiceModel(); String expectedOutput = row.get("sshExpectedOutput").toString(); sshServiceModel.setExpectedOutput(expectedOutput); ServiceTest<SshxConfiguration,SshxServiceModel> sshServicetest = new ServiceTest<SshxConfiguration,SshxServiceModel>(serviceTestName,serviceTypeName,serviceName, request.getRequestId(),sshConfiguration,sshServiceModel); request.addServiceTest(sshServicetest); } private void createUrlServiceTest(ServiceCheckRequest request, Map<String,Object> row) { UrlServiceDatabase serviceUrl = new UrlServiceDatabase(); serviceUrl.populate(request,row); } @Override public void process(Exchange exchange) throws Exception { Message message = exchange.getIn(); ServiceCheckRequest request = new ServiceCheckRequest(); List<Map<String, Object>> list = message.getBody(List.class); for (Map<String,Object> row : list) { LOG.debug("Service Test Data: " + row.toString()); String serviceTestType = row.get("serviceTypeName").toString(); switch (serviceTestType) { case PING: createPingServiceTest(request,row); break; case PORT: createPortServiceTest(request,row); break; case SSH: createSshServiceTest(request,row); break; case URL: createUrlServiceTest(request,row); break; } } //TODO: How to handle if there are no service tests message.setBody(request); } }
boundary/boundary-event-sdk
src/main/java/com/boundary/sdk/event/service/db/ServiceChecksDatabase.java
Java
apache-2.0
9,687
/* * Copyright 2017 The Bazel Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.idea.blaze.aspect; import static com.google.common.truth.Truth.assertThat; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.io.Files; import com.google.idea.blaze.aspect.JarFilter.JarFilterOptions; import java.io.File; import java.io.FileOutputStream; import java.util.Enumeration; import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Unit tests for {@link JarFilter} */ @RunWith(JUnit4.class) public class JarFilterTest { @Rule public TemporaryFolder folder = new TemporaryFolder(); @Test public void testFilterMethod() throws Exception { List<String> prefixes = ImmutableList.of("com/google/foo/Foo", "com/google/bar/Bar", "com/google/baz/Baz"); assertThat(JarFilter.shouldKeepClass(prefixes, "com/google/foo/Foo.class")).isTrue(); assertThat(JarFilter.shouldKeepClass(prefixes, "com/google/foo/Foo$Inner.class")).isTrue(); assertThat(JarFilter.shouldKeepClass(prefixes, "com/google/bar/Bar.class")).isTrue(); assertThat(JarFilter.shouldKeepClass(prefixes, "com/google/foo/Foo/NotFoo.class")).isFalse(); assertThat(JarFilter.shouldKeepClass(prefixes, "wrong/com/google/foo/Foo.class")).isFalse(); } @Test public void fullIntegrationTest() throws Exception { File fooJava = folder.newFile("Foo.java"); Files.write("package com.google.foo; class Foo { class Inner {} }".getBytes(UTF_8), fooJava); File barJava = folder.newFile("Bar.java"); Files.write("package com.google.foo.bar; class Bar {}".getBytes(UTF_8), barJava); File srcJar = folder.newFile("gen.srcjar"); try (ZipOutputStream zo = new ZipOutputStream(new FileOutputStream(srcJar))) { zo.putNextEntry(new ZipEntry("com/google/foo/gen/Gen.java")); zo.write("package gen; class Gen {}".getBytes(UTF_8)); zo.closeEntry(); zo.putNextEntry(new ZipEntry("com/google/foo/gen/Gen2.java")); zo.write("package gen; class Gen2 {}".getBytes(UTF_8)); zo.closeEntry(); } File src3Jar = folder.newFile("gen3.srcjar"); try (ZipOutputStream zo = new ZipOutputStream(new FileOutputStream(src3Jar))) { zo.putNextEntry(new ZipEntry("com/google/foo/gen/Gen3.java")); zo.write("package gen; class Gen3 {}".getBytes(UTF_8)); zo.closeEntry(); } File filterJar = folder.newFile("foo.jar"); try (ZipOutputStream zo = new ZipOutputStream(new FileOutputStream(filterJar))) { zo.putNextEntry(new ZipEntry("com/google/foo/Foo.class")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("com/google/foo/Foo$Inner.class")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("com/google/foo/bar/Bar.class")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("gen/Gen.class")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("gen/Gen2.class")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("gen/Gen3.class")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("com/google/foo/Foo2.class")); zo.closeEntry(); } File filterSrcJar = folder.newFile("foo-src.jar"); try (ZipOutputStream zo = new ZipOutputStream(new FileOutputStream(filterSrcJar))) { zo.putNextEntry(new ZipEntry("com/google/foo/Foo.java")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("com/google/foo/bar/Bar.java")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("gen/Gen.java")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("gen/Gen2.java")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("gen/Gen3.java")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("com/google/foo/Foo2.java")); zo.closeEntry(); zo.putNextEntry(new ZipEntry("com/google/foo/bar/Bar2.java")); zo.closeEntry(); } File filteredJar = folder.newFile("foo-filtered-gen.jar"); File filteredSourceJar = folder.newFile("foo-filtered-gen-src.jar"); String[] args = new String[] { "--keep_java_file", fooJava.getPath(), "--keep_java_file", barJava.getPath(), "--keep_source_jar", srcJar.getPath(), "--keep_source_jar", src3Jar.getPath(), "--filter_jar", filterJar.getPath(), "--filter_source_jar", filterSrcJar.getPath(), "--filtered_jar", filteredJar.getPath(), "--filtered_source_jar", filteredSourceJar.getPath() }; JarFilterOptions options = JarFilter.parseArgs(args); JarFilter.main(options); List<String> filteredJarNames = Lists.newArrayList(); try (ZipFile zipFile = new ZipFile(filteredJar)) { Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry zipEntry = entries.nextElement(); filteredJarNames.add(zipEntry.getName()); } } List<String> filteredSourceJarNames = Lists.newArrayList(); try (ZipFile zipFile = new ZipFile(filteredSourceJar)) { Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry zipEntry = entries.nextElement(); filteredSourceJarNames.add(zipEntry.getName()); } } assertThat(filteredJarNames) .containsExactly( "com/google/foo/Foo.class", "com/google/foo/Foo$Inner.class", "com/google/foo/bar/Bar.class", "gen/Gen.class", "gen/Gen2.class", "gen/Gen3.class"); assertThat(filteredSourceJarNames) .containsExactly( "com/google/foo/Foo.java", "com/google/foo/bar/Bar.java", "gen/Gen.java", "gen/Gen2.java", "gen/Gen3.java"); } }
bazelbuild/intellij
aspect/tools/tests/unittests/com/google/idea/blaze/aspect/JarFilterTest.java
Java
apache-2.0
6,677
package org.collention; import java.text.Collator; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; enum Week { } public class CollentionsDemo { public static void main(String[] args) { List<Dog> dogs = new ArrayList<>(); dogs.add(new Dog("亚亚", "拉布拉多")); dogs.add(new Dog("偶偶", "雪纳瑞")); dogs.add(new Dog("飞飞", "拉布拉多")); dogs.add(new Dog("美美", "雪纳瑞")); // List<String> names = Arrays.asList("Tan", "Zhen", "Yu"); // Collections.sort(names, (String a, String b) -> a.compareTo(b)); Collections.sort(dogs, (Dog o1, Dog o2) ->{ return Collator.getInstance(Locale.CHINA).compare(o1.getName(), o2.getName()); }); dogs.forEach((Dog dog) -> {System.out.println(dog.getName() + "--->" + dog.getSt());}); } }
Liuxyly/Java_Learning
Java_Adv/src/org/collention/CollentionsDemo.java
Java
apache-2.0
874
/** * @license * Copyright 2016 Google Inc. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {SliceViewChunkSource, SliceViewChunkSpecification, SliceViewChunkSpecificationBaseOptions, SliceViewChunkSpecificationOptions, SliceViewSourceOptions} from 'neuroglancer/sliceview/base'; import {getCombinedTransform} from 'neuroglancer/sliceview/base'; export enum VectorGraphicsType { LINE, POINT } export interface RenderLayer { sources: VectorGraphicsChunkSource[][]|null; } export interface VectorGraphicsChunkSpecificationSourceOptions { vectorGraphicsSourceOptions: VectorGraphicsSourceOptions; } export interface VectorGraphicsSourceOptions extends SliceViewSourceOptions {} export interface VectorGraphicsChunkSource extends SliceViewChunkSource { spec: VectorGraphicsChunkSpecification; } export type VectorGraphicsChunkSpecificationOptions = SliceViewChunkSpecificationOptions; export interface VectorGraphicsChunkSpecificationDefaultChunkSizeOptions extends SliceViewChunkSpecificationBaseOptions {} /** * Specifies a chunk layout and voxel size. */ export class VectorGraphicsChunkSpecification extends SliceViewChunkSpecification { constructor(options: VectorGraphicsChunkSpecificationOptions) { super(options); } static make(options: VectorGraphicsChunkSpecificationOptions& {vectorGraphicsSourceOptions: VectorGraphicsSourceOptions}) { return new VectorGraphicsChunkSpecification(Object.assign( {}, options, {transform: getCombinedTransform(options.transform, options.vectorGraphicsSourceOptions)})); } static fromObject(msg: any) { return new VectorGraphicsChunkSpecification(msg); } toObject(): SliceViewChunkSpecificationOptions { return super.toObject(); } } export const VECTOR_GRAPHICS_RPC_ID = 'vectorgraphics'; export const VECTOR_GRAPHICS_RENDERLAYER_RPC_ID = 'vectorgraphics/RenderLayer';
seung-lab/neuroglancer
src/neuroglancer/sliceview/vector_graphics/base.ts
TypeScript
apache-2.0
2,419
--TEST-- Test for bug #476: Exception chanining doesn't work --SKIPIF-- <?php if (!version_compare(phpversion(), "5.3", '>=')) echo "skip >= PHP 5.3 needed\n"; ?> --INI-- xdebug.default_enable=1 xdebug.dump.GET= xdebug.dump.SERVER= xdebug.show_local_vars=0 --FILE-- <?php function a() { throw new Exception('First exception'); } function b() { try { a(); } catch(Exception $e) { throw new Exception('Second exception', 0, $e); } } function c() { try { b(); } catch(Exception $e) { throw new Exception('Third exception', 0, $e); } } function d() { try { c(); } catch(Exception $e) { throw new Exception('Fourth exception', 0, $e); } } d(); echo "DONE\n"; ?> --EXPECTF-- Fatal error: Uncaught exception 'Exception' with message 'First exception' in %sbug00476-2.php on line 31 Exception: First exception in %sbug00476-2.php on line 5 Call Stack: %w%f %w%d 1. {main}() %sbug00476-2.php:0 %w%f %w%d 2. d() %sbug00476-2.php:35 %w%f %w%d 3. c() %sbug00476-2.php:29 %w%f %w%d 4. b() %sbug00476-2.php:20 %w%f %w%d 5. a() %sbug00476-2.php:11 Exception: Second exception in %sbug00476-2.php on line 13 Call Stack: %w%f %w%d 1. {main}() %sbug00476-2.php:0 %w%f %w%d 2. d() %sbug00476-2.php:35 %w%f %w%d 3. c() %sbug00476-2.php:29 %w%f %w%d 4. b() %sbug00476-2.php:20 Exception: Third exception in %sbug00476-2.php on line 22 Call Stack: %w%f %w%d 1. {main}() %sbug00476-2.php:0 %w%f %w%d 2. d() %sbug00476-2.php:35 %w%f %w%d 3. c() %sbug00476-2.php:29 Exception: Fourth exception in %sbug00476-2.php on line 31 Call Stack: %w%f %w%d 1. {main}() %sbug00476-2.php:0 %w%f %w%d 2. d() %sbug00476-2.php:35
mbutkereit/docker-php
php-fpm/php/5.4.x/config/xdebug/tests/bug00476-2.phpt
PHP
apache-2.0
1,662
export { default as Home} from './home'; export { default as Post} from './post'; export { default as Search} from './search'; export { default as About} from './about'; export { default as Author} from './author'; export { default as Comment} from './comment'; export { default as Setting} from './setting'; export { default as Offline} from './offline'; export { default as OfflinePost} from './offlinePost';
cloudfavorites/favorites
android/source/view/index.js
JavaScript
apache-2.0
410
package lejos.robotics.navigation; import lejos.robotics.RegulatedMotor; import lejos.robotics.DirectionFinder; import lejos.util.Delay; /* * WARNING: THIS CLASS IS SHARED BETWEEN THE classes AND pccomms PROJECTS. * DO NOT EDIT THE VERSION IN pccomms AS IT WILL BE OVERWRITTEN WHEN THE PROJECT IS BUILT. */ /** * A Pilot that keeps track of direction using a CompassSensor. * @deprecated This class will disappear in NXJ version 1.0. Compass should be added to a PoseProvider. * @see lejos.robotics.localization.PoseProvider#getPose() */ // TODO: Note @deprecated message above, I'm not sure PoseProvider is exactly right place to point users to yet. // Need to explain this more when we are sure how this will replace CompassPilot. - BB @Deprecated public class CompassPilot extends DifferentialPilot { protected DirectionFinder compass; protected Regulator regulator = new Regulator(); // inner regulator for thread protected float _heading; // desired heading protected float _estimatedHeading = 0; //estimated heading protected boolean _traveling = false; // state variable used by regulator protected float _distance; // set by travel() used by r egulator to stop protected byte _direction;// direction of travel = sign of _distance protected float _heading0 = 0;// heading when rotate immediate is called /** *returns returns true if the robot is travelling for a specific distance; **/ public boolean isTraveling() { return _traveling; } /** * Allocates a CompasPilot object, and sets the physical parameters of the NXT robot. <br> * Assumes Motor.forward() causes the robot to move forward); * Parameters * @param compass : a compass sensor; * @param wheelDiameter Diameter of the tire, in any convenient units. (The diameter in mm is usually printed on the tire). * @param trackWidth Distance between center of right tire and center of left tire, in same units as wheelDiameter * @param leftMotor * @param rightMotor */ public CompassPilot(DirectionFinder compass, float wheelDiameter, float trackWidth, RegulatedMotor leftMotor, RegulatedMotor rightMotor) { this(compass, wheelDiameter, trackWidth, leftMotor, rightMotor, false); } /** * Allocates a CompasPilot object, and sets the physical parameters of the NXT robot. <br> * Assumes Motor.forward() causes the robot to move forward); * Parameters * @param compass : a compass sensor; * @param wheelDiameter Diameter of the tire, in any convenient units. (The diameter in mm is usually printed on the tire). * @param trackWidth Distance between center of right tire and center of left tire, in same units as wheelDiameter * @param leftMotor * @param rightMotor * @param reverse if true of motor.forward() drives the robot backwards */ public CompassPilot(DirectionFinder compass, float wheelDiameter, float trackWidth, RegulatedMotor leftMotor, RegulatedMotor rightMotor, boolean reverse) { super(wheelDiameter, trackWidth, leftMotor, rightMotor, reverse); this.compass = compass; regulator.setDaemon(true); regulator.start(); } /** * Return the compass * @return the compass */ public DirectionFinder getCompass() { return compass; } /** * Returns the change in robot heading since the last call of reset() * normalized to be within -180 and _180 degrees */ public float getAngleIncrement() { return normalize(getCompassHeading() - _heading0); } /** * Returns direction of desired robot facing */ public float getHeading() { return _estimatedHeading; } /** * Method returns the current compass heading * @return Compass heading in degrees. */ public float getCompassHeading() { return normalize(compass.getDegreesCartesian()); } /** * sets direction of desired robot facing in degrees */ public void setHeading(float angle) { _heading = angle; } /** * Rotates the robot 360 degrees while calibrating the compass * resets compass zero to heading at end of calibration */ public synchronized void calibrate() { setRotateSpeed(50); compass.startCalibration(); super.rotate(360, false); compass.stopCalibration(); } public void resetCartesianZero() { compass.resetCartesianZero(); _heading = 0; } /** * Determines the difference between actual compass direction and desired heading in degrees * @return error (in degrees) */ public float getHeadingError() { float err = compass.getDegreesCartesian() - _heading; // Handles the wrap-around problem: return normalize(err); } /** * Moves the NXT robot a specific distance. A positive value moves it forwards and * a negative value moves it backwards. The robot steers to maintain its compass heading. * @param distance The positive or negative distance to move the robot, same units as _wheelDiameter * @param immediateReturn iff true, the method returns immediately. */ public void travel(float distance, boolean immediateReturn) { movementStart(immediateReturn); _type = Move.MoveType.TRAVEL; super.travel(distance,true); _distance = distance; _direction = 1; if(_distance < 0 ) _direction = -1; _traveling = true; if (immediateReturn) { return; } while (isMoving()) { Thread.yield(); // regulator will call stop when distance is reached } } /** * Moves the NXT robot a specific distance;<br> * A positive distance causes forward motion; negative distance moves backward. * Robot steers to maintain its compass heading; * @param distance of robot movement. Unit of measure for distance must be same as wheelDiameter and trackWidth **/ public void travel(float distance) { travel(distance, false); } /** * robot rotates to the specified compass heading; * @param immediateReturn - if true, method returns immediately. * Robot stops when specified angle is reached or when stop() is called */ public void rotate(float angle, boolean immediateReturn) { movementStart(immediateReturn); _type = Move.MoveType.ROTATE; float heading0 = getCompassHeading(); super.rotate(angle, immediateReturn); // takes care of movement start if (immediateReturn) return; _heading = normalize(_heading +angle); _traveling = false; float error = getHeadingError(); while (Math.abs(error) > 2) { super.rotate(-error, false); error = getHeadingError(); } _heading0 = heading0;//needed for currect angle increment } /** * Rotates the NXT robot through a specific angle; Rotates left if angle is positive, right if negative, * Returns when angle is reached. * Wheels turn in opposite directions producing a zero radius turn. * @param angle degrees. Positive angle rotates to the left (clockwise); negative to the right. <br>Requires correct values for wheel diameter and track width. */ public void rotate(float angle) { rotate(angle, false); } public void reset() { _left.resetTachoCount(); _right.resetTachoCount(); _heading0 = getCompassHeading(); super.reset(); } // methods required to give regulator access to Pilot superclass protected void stopNow() { stop(); } /** * Stops the robot soon after the method is executed. (It takes time for the motors * to slow to a halt) */ public void stop() { super.stop(); _traveling = false; while (isMoving()) { super.stop(); Thread.yield(); } } protected float normalize(float angle) { while (angle > 180)angle -= 360; while (angle < -180)angle += 360; return angle; } /** * inner class to regulate heading during travel move * Proportional control of steering ; error is an average of heading change * from tacho counts and from compass change * @author Roger Glassey */ class Regulator extends Thread { public void run() { while (true) { while (!_traveling) { Thread.yield(); } { // travel started float toGo = _distance; // reamining trave distance float gain = -3f * _direction; float error = 0; float e0 = 0; float incr0 = 0; _estimatedHeading = _heading0; do // travel in progress { // use weighted average of heading from tacho count and compass // weights should be based on variance of compass error and tacho count error float incr = getAngleIncrement(); _estimatedHeading += (incr - incr0); //change in heading from tacho counts incr0 = incr; _estimatedHeading = normalize( 0.5f *normalize(compass.getDegreesCartesian()) + 0.5f*_estimatedHeading); error = normalize( _estimatedHeading - _heading); toGo =(_distance - getMovementIncrement()); if(Math.abs(error - e0) > 2) //only steer if large change in error > 2 deg { steerPrep(gain * error); e0 = error; } Delay.msDelay(12); // another arbitrary constant Thread.yield(); } while (Math.abs(toGo) > 3 ); // travel completed (almost) int delta = Math.round(toGo*_leftDegPerDistance); _left.rotate(delta,true); delta = Math.round(toGo*_rightDegPerDistance); _outside.rotate(delta); while(_left.isMoving())Thread.yield(); _traveling = false; } } } } }
AndrewZurn/sju-compsci-archive
CS200s/CS217b/OriginalFiles/lejos/robotics/navigation/CompassPilot.java
Java
apache-2.0
9,741
package tz.gzu.oip.admin.controller; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.core.Authentication; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.*; import tz.gzu.oip.admin.dto.ResourcesDto; import tz.gzu.oip.dm.bo.ops.MenusDto; import tz.gzu.oip.dm.bo.ops.ResourcesMenuDto; import tz.gzu.oip.dm.po.AuthorityResources; import tz.gzu.oip.dm.po.Resources; import tz.gzu.oip.dm.service.ops.AuthorityResourcesService; import tz.gzu.oip.dm.service.ops.ResourcesService; import tz.gzu.oip.security.jwt.support.SimpleResponse; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Date; import java.util.List; /* * * @author yuit 吴正荣 * * @create 17-10-18 * */ @RestController @RequestMapping("resources") public class ResourcesController { @Autowired private ResourcesService resourcesService; @Autowired private AuthorityResourcesService authorityResourcesService; private Logger logger = Logger.getLogger(getClass()); /** * 获取菜单 * * @param authentication 授权用户 * @return */ @GetMapping("/menus") public List<ResourcesMenuDto> menus(Authentication authentication) { return resourcesService.findMenusByName(authentication.getName()); } /** * 获取所有的菜单 * * @return */ @GetMapping("allMenus/{auid}") public List<ResourcesMenuDto> allMenus(@PathVariable String auid) throws InvocationTargetException, IllegalAccessException { List<ResourcesMenuDto> all = this.resourcesService.findAllMenus(); List<ResourcesMenuDto> userHave = this.resourcesService.finMenusByAuId(auid); for (ResourcesMenuDto item : userHave) { for (ResourcesMenuDto allItem : all) { if (item.getId().trim().equals(allItem.getId().trim())) { for (MenusDto child : item.getChildren()) { for (MenusDto _child : allItem.getChildren()) { if (child.getCid().trim().equals(_child.getCid().trim())) { _child.setHave(true); break; } } } break; } } } return all; } @PostMapping("modifyMenusByAuId/{auid}") @Transactional(propagation = Propagation.REQUIRED) public void modifyMenusByAuId(@PathVariable String auid, @RequestBody List<AuthorityResources> items) { List<String> rids = new ArrayList<>(); List<ResourcesMenuDto> menuDtos = this.resourcesService.finMenusByAuId(auid); for (ResourcesMenuDto item : menuDtos) { if (item.getChildren() == null) { rids.add(item.getId()); } } if (rids.size() < 1) { this.authorityResourcesService.deleteResourcesByRId(null, auid); } else { // 先删除该角色所拥有的菜单 this.authorityResourcesService.deleteResourcesByRId(rids, auid); } if (items.size() > 0) { this.authorityResourcesService.insertMenus(items); } } /** * 获取所有资源 * * @return */ @GetMapping("/{cPage}/{pSize}") public ResourcesDto allResources(@PathVariable int cPage, @PathVariable int pSize) { List<Resources> menuDtos = resourcesService.findAllResources(cPage, pSize); int total = resourcesService.count(); return new ResourcesDto(menuDtos, total); } @GetMapping("/menusParent") public List<Resources> menusParent() { return this.resourcesService.findAllMenuParent(); } /** * 添加资源 * * @param resources * @return */ @PutMapping public SimpleResponse addResources(@RequestBody Resources resources) { int flg = 0; resources.setTime(new Date()); if (resources != null) { flg = this.resourcesService.insert(resources); } if (flg > 0) { // this.authority_resourcesService.insert() return new SimpleResponse("添加成功"); } else { return new SimpleResponse("服务器内部错误"); } } /** * 通过Id删除资源 * * @param id */ @DeleteMapping("/{id}") public void deleteResource(@PathVariable String id) { this.resourcesService.deleteResourceById(id); } /** * 更新资源 * * @param resources */ @PostMapping public void modifyResources(@RequestBody Resources resources) { resources.setTime(new Date()); this.resourcesService.update(resources); } }
TZClub/OMIPlatform
oip-service/src/main/java/tz/gzu/oip/admin/controller/ResourcesController.java
Java
apache-2.0
5,006
package main import ( "encoding/json" "flag" "fmt" "gopkg.in/mgo.v2/bson" "os" "path/filepath" "strings" "time" "github.com/apache/thrift/lib/go/thrift" "github.com/go-kit/kit/log" thriftclient "github.com/banerwai/micros/command/workhistory/client/thrift" "github.com/banerwai/micros/command/workhistory/service" thriftworkhistory "github.com/banerwai/micros/command/workhistory/thrift/gen-go/workhistory" banerwaicrypto "github.com/banerwai/gommon/crypto" "github.com/banerwai/global/bean" ) func main() { var ( thriftAddr = flag.String("thrift.addr", "localhost:36080", "Address for Thrift server") thriftProtocol = flag.String("thrift.protocol", "binary", "binary, compact, json, simplejson") thriftBufferSize = flag.Int("thrift.buffer.size", 0, "0 for unbuffered") thriftFramed = flag.Bool("thrift.framed", false, "true to enable framing") _defaultObjectID = flag.String("default.user.ojbectid", "5707cb10ae6faa1d1071a189", "default user ojbectid") ) flag.Parse() if len(os.Args) < 2 { fmt.Fprintf(os.Stderr, "\n%s [flags] method arg1 arg2\n\n", filepath.Base(os.Args[0])) flag.Usage() os.Exit(1) } _instances := strings.Split(*thriftAddr, ",") _instancesRandomIndex := banerwaicrypto.GetRandomItNum(len(_instances)) method := flag.Arg(0) var logger log.Logger logger = log.NewLogfmtLogger(os.Stdout) logger = log.NewContext(logger).With("caller", log.DefaultCaller) var svc service.WorkHistoryService var protocolFactory thrift.TProtocolFactory switch *thriftProtocol { case "compact": protocolFactory = thrift.NewTCompactProtocolFactory() case "simplejson": protocolFactory = thrift.NewTSimpleJSONProtocolFactory() case "json": protocolFactory = thrift.NewTJSONProtocolFactory() case "binary", "": protocolFactory = thrift.NewTBinaryProtocolFactoryDefault() default: logger.Log("protocol", *thriftProtocol, "err", "invalid protocol") os.Exit(1) } var transportFactory thrift.TTransportFactory if *thriftBufferSize > 0 { transportFactory = thrift.NewTBufferedTransportFactory(*thriftBufferSize) } else { transportFactory = thrift.NewTTransportFactory() } if *thriftFramed { transportFactory = thrift.NewTFramedTransportFactory(transportFactory) } transportSocket, err := thrift.NewTSocket(_instances[_instancesRandomIndex]) if err != nil { logger.Log("during", "thrift.NewTSocket", "err", err) os.Exit(1) } trans := transportFactory.GetTransport(transportSocket) defer trans.Close() if err := trans.Open(); err != nil { logger.Log("during", "thrift transport.Open", "err", err) os.Exit(1) } cli := thriftworkhistory.NewWorkHistoryServiceClientFactory(trans, protocolFactory) svc = thriftclient.New(cli, logger) begin := time.Now() switch method { case "ping": v := svc.Ping() logger.Log("method", "Ping", "v", v, "took", time.Since(begin)) case "upsert": var _obj bean.WorkHistory _obj.ID = bson.ObjectIdHex(*_defaultObjectID) _obj.ProfileID = bson.ObjectIdHex(*_defaultObjectID) var lsWorkHistoryAndFeedbacks []bean.WorkHistoryAndFeedback var _WorkHistoryAndFeedback1 bean.WorkHistoryAndFeedback _WorkHistoryAndFeedback1.Title = "ceshi" _WorkHistoryAndFeedback1.WorkPeriod = "2016.01-2016.04" _WorkHistoryAndFeedback1.WorkHours = 40 var lsWorkFeedbacks []bean.WorkFeedback var _WorkFeedback1 bean.WorkFeedback _WorkFeedback1.WorkRate = 5 _WorkFeedback1.Feedback = "perfect" lsWorkFeedbacks = append(lsWorkFeedbacks, _WorkFeedback1) var _WorkFeedback2 bean.WorkFeedback _WorkFeedback2.WorkRate = 5 _WorkFeedback2.Feedback = "good job" lsWorkFeedbacks = append(lsWorkFeedbacks, _WorkFeedback2) _WorkHistoryAndFeedback1.WorkFeedbacks = lsWorkFeedbacks lsWorkHistoryAndFeedbacks = append(lsWorkHistoryAndFeedbacks, _WorkHistoryAndFeedback1) _obj.HistoryAndFeedbacks = lsWorkHistoryAndFeedbacks b, _ := json.Marshal(_obj) v := svc.UpdateWorkHistory(*_defaultObjectID, string(b)) logger.Log("method", "UpdateWorkHistory", "v", v, "took", time.Since(begin)) default: logger.Log("err", "invalid method "+method) os.Exit(1) } }
banerwai/micros
command/workhistory/client/main.go
GO
apache-2.0
4,118
/* Copyright 2012 Rogier van Dalen. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /** \file Define a parser that parses an unsigned integer value. */ #ifndef PARSE_LL_NUMBER_UNSIGNED_HPP_INCLUDED #define PARSE_LL_NUMBER_UNSIGNED_HPP_INCLUDED #include <stdexcept> #include "utility/returns.hpp" #include "range/core.hpp" #include "../core/core.hpp" #include "../core/repeat.hpp" #include "../core/named.hpp" #include "../core/no_skip.hpp" #include "./digit.hpp" namespace parse_ll { /** Turn a range of values for digits into an unsigned integer. \todo Different bases? \todo This could be neatly represented with a fold. \throw std::overflow_error iff the result does not fit in the integer type. */ template <class Result> struct collect_integer { template <class DigitValues> Result operator() (DigitValues values) const { // Assume an assignable homogeneous range. Result result = Result(); for (; !::range::empty (values); values = ::range::drop (values)) { Result new_result = result * 10 + ::range::first (values); if (new_result / 10 != result) throw std::overflow_error ("Overflow while parsing integer"); result = new_result; } return result; } }; PARSE_LL_DEFINE_NAMED_PARSER_TEMPLATE ("unsigned", unsigned_parser, Result, (no_skip [+digit] [collect_integer <Result>()])); template <typename Result> inline auto unsigned_as() RETURNS (unsigned_parser <Result>()); static const auto unsigned_ = unsigned_as <unsigned>(); } // namespace parse_ll #endif // PARSE_LL_NUMBER_UNSIGNED_HPP_INCLUDED
rogiervd/parse_ll
include/parse_ll/number/unsigned.hpp
C++
apache-2.0
2,139
package org.elasticsearch.common.lucene.search; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.FilterClause; import org.apache.lucene.search.BitsFilteredDocIdSet; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Filter; import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.lucene.docset.AllDocIdSet; import org.elasticsearch.common.lucene.docset.AndDocIdSet; import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.docset.NotDocIdSet; import org.elasticsearch.common.lucene.docset.OrDocIdSet.OrBits; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.Iterator; import java.util.List; /** * Similar to {@link org.apache.lucene.queries.BooleanFilter}. * <p/> * Our own variance mainly differs by the fact that we pass the acceptDocs down to the filters * and don't filter based on them at the end. Our logic is a bit different, and we filter based on that * at the top level filter chain. */ public class XBooleanFilter extends Filter implements Iterable<FilterClause> { private static final Comparator<DocIdSetIterator> COST_DESCENDING = new Comparator<DocIdSetIterator>() { @Override public int compare(DocIdSetIterator o1, DocIdSetIterator o2) { return Long.compare(o2.cost(), o1.cost()); } }; private static final Comparator<DocIdSetIterator> COST_ASCENDING = new Comparator<DocIdSetIterator>() { @Override public int compare(DocIdSetIterator o1, DocIdSetIterator o2) { return Long.compare(o1.cost(), o2.cost()); } }; final List<FilterClause> clauses = new ArrayList<>(); /** * Returns the a DocIdSetIterator representing the Boolean composition * of the filters that have been added. */ @Override public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException { final int maxDoc = context.reader().maxDoc(); // the 0-clauses case is ambiguous because an empty OR filter should return nothing // while an empty AND filter should return all docs, so we handle this case explicitely if (clauses.isEmpty()) { return null; } // optimize single case... if (clauses.size() == 1) { FilterClause clause = clauses.get(0); DocIdSet set = clause.getFilter().getDocIdSet(context, acceptDocs); if (clause.getOccur() == Occur.MUST_NOT) { if (DocIdSets.isEmpty(set)) { return new AllDocIdSet(maxDoc); } else { return new NotDocIdSet(set, maxDoc); } } // SHOULD or MUST, just return the set... if (DocIdSets.isEmpty(set)) { return null; } return set; } // We have several clauses, try to organize things to make it easier to process List<DocIdSetIterator> shouldIterators = new ArrayList<>(); List<Bits> shouldBits = new ArrayList<>(); boolean hasShouldClauses = false; List<DocIdSetIterator> requiredIterators = new ArrayList<>(); List<DocIdSetIterator> excludedIterators = new ArrayList<>(); List<Bits> requiredBits = new ArrayList<>(); List<Bits> excludedBits = new ArrayList<>(); for (FilterClause clause : clauses) { DocIdSet set = clause.getFilter().getDocIdSet(context, null); DocIdSetIterator it = null; Bits bits = null; if (DocIdSets.isEmpty(set) == false) { it = set.iterator(); if (it != null) { bits = set.bits(); } } switch (clause.getOccur()) { case SHOULD: hasShouldClauses = true; if (it == null) { // continue, but we recorded that there is at least one should clause // so that if all iterators are null we know that nothing matches this // filter since at least one SHOULD clause needs to match } else if (bits != null && DocIdSets.isBroken(it)) { shouldBits.add(bits); } else { shouldIterators.add(it); } break; case MUST: if (it == null) { // no documents matched a clause that is compulsory, then nothing matches at all return null; } else if (bits != null && DocIdSets.isBroken(it)) { requiredBits.add(bits); } else { requiredIterators.add(it); } break; case MUST_NOT: if (it == null) { // ignore } else if (bits != null && DocIdSets.isBroken(it)) { excludedBits.add(bits); } else { excludedIterators.add(it); } break; default: throw new AssertionError(); } } // Since BooleanFilter requires that at least one SHOULD clause matches, // transform the SHOULD clauses into a MUST clause if (hasShouldClauses) { if (shouldIterators.isEmpty() && shouldBits.isEmpty()) { // we had should clauses, but they all produced empty sets // yet BooleanFilter requires that at least one clause matches // so it means we do not match anything return null; } else if (shouldIterators.size() == 1 && shouldBits.isEmpty()) { requiredIterators.add(shouldIterators.get(0)); } else { // apply high-cardinality should clauses first CollectionUtil.timSort(shouldIterators, COST_DESCENDING); BitDocIdSet.Builder shouldBuilder = null; for (DocIdSetIterator it : shouldIterators) { if (shouldBuilder == null) { shouldBuilder = new BitDocIdSet.Builder(maxDoc); } shouldBuilder.or(it); } if (shouldBuilder != null && shouldBits.isEmpty() == false) { // we have both iterators and bits, there is no way to compute // the union efficiently, so we just transform the iterators into // bits // add first since these are fast bits shouldBits.add(0, shouldBuilder.build().bits()); shouldBuilder = null; } if (shouldBuilder == null) { // only bits assert shouldBits.size() >= 1; if (shouldBits.size() == 1) { requiredBits.add(shouldBits.get(0)); } else { requiredBits.add(new OrBits(shouldBits.toArray(new Bits[shouldBits.size()]))); } } else { assert shouldBits.isEmpty(); // only iterators, we can add the merged iterator to the list of required iterators requiredIterators.add(shouldBuilder.build().iterator()); } } } else { assert shouldIterators.isEmpty(); assert shouldBits.isEmpty(); } // From now on, we don't have to care about SHOULD clauses anymore since we upgraded // them to required clauses (if necessary) // cheap iterators first to make intersection faster CollectionUtil.timSort(requiredIterators, COST_ASCENDING); CollectionUtil.timSort(excludedIterators, COST_ASCENDING); // Intersect iterators BitDocIdSet.Builder res = null; for (DocIdSetIterator iterator : requiredIterators) { if (res == null) { res = new BitDocIdSet.Builder(maxDoc); res.or(iterator); } else { res.and(iterator); } } for (DocIdSetIterator iterator : excludedIterators) { if (res == null) { res = new BitDocIdSet.Builder(maxDoc, true); } res.andNot(iterator); } // Transform the excluded bits into required bits if (excludedBits.isEmpty() == false) { Bits excluded; if (excludedBits.size() == 1) { excluded = excludedBits.get(0); } else { excluded = new OrBits(excludedBits.toArray(new Bits[excludedBits.size()])); } requiredBits.add(new NotDocIdSet.NotBits(excluded)); } // The only thing left to do is to intersect 'res' with 'requiredBits' // the main doc id set that will drive iteration DocIdSet main; if (res == null) { main = new AllDocIdSet(maxDoc); } else { main = res.build(); } // apply accepted docs and compute the bits to filter with // accepted docs are added first since they are fast and will help not computing anything on deleted docs if (acceptDocs != null) { requiredBits.add(0, acceptDocs); } // the random-access filter that we will apply to 'main' Bits filter; if (requiredBits.isEmpty()) { filter = null; } else if (requiredBits.size() == 1) { filter = requiredBits.get(0); } else { filter = new AndDocIdSet.AndBits(requiredBits.toArray(new Bits[requiredBits.size()])); } return BitsFilteredDocIdSet.wrap(main, filter); } /** * Adds a new FilterClause to the Boolean Filter container * * @param filterClause A FilterClause object containing a Filter and an Occur parameter */ public void add(FilterClause filterClause) { clauses.add(filterClause); } public final void add(Filter filter, Occur occur) { add(new FilterClause(filter, occur)); } /** * Returns the list of clauses */ public List<FilterClause> clauses() { return clauses; } /** * Returns an iterator on the clauses in this query. It implements the {@link Iterable} interface to * make it possible to do: * <pre class="prettyprint">for (FilterClause clause : booleanFilter) {}</pre> */ public final Iterator<FilterClause> iterator() { return clauses().iterator(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if ((obj == null) || (obj.getClass() != this.getClass())) { return false; } final XBooleanFilter other = (XBooleanFilter) obj; return clauses.equals(other.clauses); } @Override public int hashCode() { return 657153718 ^ clauses.hashCode(); } /** * Prints a user-readable version of this Filter. */ @Override public String toString(String field) { final StringBuilder buffer = new StringBuilder("BooleanFilter("); final int minLen = buffer.length(); for (final FilterClause c : clauses) { if (buffer.length() > minLen) { buffer.append(' '); } buffer.append(c); } return buffer.append(')').toString(); } static class ResultClause { public final DocIdSet docIdSet; public final Bits bits; public final FilterClause clause; DocIdSetIterator docIdSetIterator; ResultClause(DocIdSet docIdSet, Bits bits, FilterClause clause) { this.docIdSet = docIdSet; this.bits = bits; this.clause = clause; } /** * @return An iterator, but caches it for subsequent usage. Don't use if iterator is consumed in one invocation. */ DocIdSetIterator iterator() throws IOException { if (docIdSetIterator != null) { return docIdSetIterator; } else { return docIdSetIterator = docIdSet.iterator(); } } } static boolean iteratorMatch(DocIdSetIterator docIdSetIterator, int target) throws IOException { assert docIdSetIterator != null; int current = docIdSetIterator.docID(); if (current == DocIdSetIterator.NO_MORE_DOCS || target < current) { return false; } else { if (current == target) { return true; } else { return docIdSetIterator.advance(target) == target; } } } }
Asimov4/elasticsearch
src/main/java/org/elasticsearch/common/lucene/search/XBooleanFilter.java
Java
apache-2.0
13,959
<?php /** * EmailValidationRequest * * PHP version 7.2 * * @category Class * @package Infobip * @author Infobip Support * @link https://www.infobip.com */ /** * Infobip Client API Libraries OpenAPI Specification * * OpenAPI specification containing public endpoints supported in client API libraries. * * Contact: support@infobip.com * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * Do not edit the class manually. */ namespace Infobip\Model; use \ArrayAccess; use \Infobip\ObjectSerializer; /** * EmailValidationRequest Class Doc Comment * * @category Class * @package Infobip * @author Infobip Support * @link https://www.infobip.com * @implements \ArrayAccess<TKey, TValue> * @template TKey int|null * @template TValue mixed|null */ class EmailValidationRequest implements ModelInterface, ArrayAccess, \JsonSerializable { public const DISCRIMINATOR = null; /** * The original name of the model. * * @var string */ protected static $openAPIModelName = 'EmailValidationRequest'; /** * Array of property to type mappings. Used for (de)serialization * * @var string[] */ protected static $openAPITypes = [ 'to' => 'string' ]; /** * Array of property to format mappings. Used for (de)serialization * * @var string[] * @phpstan-var array<string, string|null> * @psalm-var array<string, string|null> */ protected static $openAPIFormats = [ 'to' => null ]; /** * Array of property to type mappings. Used for (de)serialization * * @return array */ public static function openAPITypes() { return self::$openAPITypes; } /** * Array of property to format mappings. Used for (de)serialization * * @return array */ public static function openAPIFormats() { return self::$openAPIFormats; } /** * Array of attributes where the key is the local name, * and the value is the original name * * @var string[] */ protected static $attributeMap = [ 'to' => 'to' ]; /** * Array of attributes to setter functions (for deserialization of responses) * * @var string[] */ protected static $setters = [ 'to' => 'setTo' ]; /** * Array of attributes to getter functions (for serialization of requests) * * @var string[] */ protected static $getters = [ 'to' => 'getTo' ]; /** * Array of attributes where the key is the local name, * and the value is the original name * * @return array */ public static function attributeMap() { return self::$attributeMap; } /** * Array of attributes to setter functions (for deserialization of responses) * * @return array */ public static function setters() { return self::$setters; } /** * Array of attributes to getter functions (for serialization of requests) * * @return array */ public static function getters() { return self::$getters; } /** * The original name of the model. * * @return string */ public function getModelName() { return self::$openAPIModelName; } /** * Associative array for storing property values * * @var mixed[] */ protected $container = []; /** * Constructor * * @param mixed[] $data Associated array of property values * initializing the model */ public function __construct(array $data = null) { $this->container['to'] = $data['to'] ?? null; } /** * Show all the invalid properties with reasons. * * @return array invalid properties with reasons */ public function listInvalidProperties() { $invalidProperties = []; if ($this->container['to'] === null) { $invalidProperties[] = "'to' can't be null"; } if ((mb_strlen($this->container['to']) > 2147483647)) { $invalidProperties[] = "invalid value for 'to', the character length must be smaller than or equal to 2147483647."; } if ((mb_strlen($this->container['to']) < 1)) { $invalidProperties[] = "invalid value for 'to', the character length must be bigger than or equal to 1."; } return $invalidProperties; } /** * Validate all the properties in the model * return true if all passed * * @return bool True if all properties are valid */ public function valid() { return count($this->listInvalidProperties()) === 0; } /** * Gets to * * @return string */ public function getTo() { return $this->container['to']; } /** * Sets to * * @param string $to Email address of the recipient. * * @return self */ public function setTo($to) { if ((mb_strlen($to) > 2147483647)) { throw new \InvalidArgumentException('invalid length for $to when calling EmailValidationRequest., must be smaller than or equal to 2147483647.'); } if ((mb_strlen($to) < 1)) { throw new \InvalidArgumentException('invalid length for $to when calling EmailValidationRequest., must be bigger than or equal to 1.'); } $this->container['to'] = $to; return $this; } /** * Returns true if offset exists. False otherwise. * * @param integer $offset Offset * * @return boolean */ public function offsetExists($offset) { return isset($this->container[$offset]); } /** * Gets offset. * * @param integer $offset Offset * * @return mixed|null */ public function offsetGet($offset) { return $this->container[$offset] ?? null; } /** * Sets value based on offset. * * @param int|null $offset Offset * @param mixed $value Value to be set * * @return void */ public function offsetSet($offset, $value) { if (is_null($offset)) { $this->container[] = $value; } else { $this->container[$offset] = $value; } } /** * Unsets offset. * * @param integer $offset Offset * * @return void */ public function offsetUnset($offset) { unset($this->container[$offset]); } /** * Serializes the object to a value that can be serialized natively by json_encode(). * @link https://www.php.net/manual/en/jsonserializable.jsonserialize.php * * @return mixed Returns data which can be serialized by json_encode(), which is a value * of any type other than a resource. */ public function jsonSerialize() { return ObjectSerializer::sanitizeForSerialization($this); } /** * Gets the string presentation of the object * * @return string */ public function __toString() { return json_encode( ObjectSerializer::sanitizeForSerialization($this), JSON_PRETTY_PRINT ); } /** * Gets a header-safe presentation of the object * * @return string */ public function toHeaderValue() { return json_encode(ObjectSerializer::sanitizeForSerialization($this)); } }
infobip/infobip-api-php-client
Infobip/Model/EmailValidationRequest.php
PHP
apache-2.0
7,609
/* * JBoss, Home of Professional Open Source * Copyright 2012, Red Hat, Inc. and/or its affiliates, and individual * contributors by the @authors tag. See the copyright.txt in the * distribution for a full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.as.quickstart.deltaspike.partialbean; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import javax.enterprise.context.ApplicationScoped; /** * This class implements a dynamic DeltaSpike Partial Bean. It is bound to * one or more abstract classes or interfaces via the Binding Annotation * (@ExamplePartialBeanBinding below). * * All abstract, unimplemented methods from those beans will be implemented * via the invoke method. * */ @ExamplePartialBeanBinding @ApplicationScoped public class ExamplePartialBeanImplementation implements InvocationHandler { /** * In our example, this method will be invoked when the "sayHello" method is called. * * @param proxy The object upon which the method is being invoked. * @param method The method being invoked (sayHello in this QuickStart) * @param args The arguments being passed in to the invoked method */ @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { return "Hello " + args[0]; } }
magro/jboss-as-quickstart
deltaspike-partialbean-basic/src/main/java/org/jboss/as/quickstart/deltaspike/partialbean/ExamplePartialBeanImplementation.java
Java
apache-2.0
1,893
package com.github.megatronking.svg.iconlibs; import android.content.Context; import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.Paint; import com.github.megatronking.svg.support.SVGRenderer; /** * AUTO-GENERATED FILE. DO NOT MODIFY. * * This class was automatically generated by the * SVG-Generator. It should not be modified by hand. */ public class ic_bluetooth_audio extends SVGRenderer { public ic_bluetooth_audio(Context context) { super(context); mAlpha = 1.0f; mWidth = dip2px(24.0f); mHeight = dip2px(24.0f); } @Override public void render(Canvas canvas, int w, int h, ColorFilter filter) { final float scaleX = w / 24.0f; final float scaleY = h / 24.0f; mPath.reset(); mRenderPath.reset(); mFinalPathMatrix.setValues(new float[]{1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f}); mFinalPathMatrix.postScale(scaleX, scaleY); mPath.moveTo(14.24f, 12.01f); mPath.rLineTo(2.32f, 2.32f); mPath.rCubicTo(0.28f, -0.72f, 0.44f, -1.51f, 0.44f, -2.33f); mPath.rCubicTo(0.0f, -0.82f, -0.16f, -1.59f, -0.43f, -2.31f); mPath.rLineTo(-2.33f, 2.32f); mPath.close(); mPath.moveTo(14.24f, 12.01f); mPath.rMoveTo(5.29f, -5.3f); mPath.rLineTo(-1.26f, 1.26f); mPath.rCubicTo(0.63f, 1.21f, 0.98f, 2.57f, 0.98f, 4.02f); mPath.rCubicTo(0.0f, 1.4499998f, -0.36f, 2.82f, -0.98f, 4.02f); mPath.rLineTo(1.2f, 1.2f); mPath.rCubicTo(0.97f, -1.54f, 1.54f, -3.36f, 1.54f, -5.31f); mPath.rCubicTo(-0.01f, -1.89f, -0.55f, -3.67f, -1.48f, -5.19f); mPath.close(); mPath.moveTo(19.529999f, 6.71f); mPath.rMoveTo(-3.82f, 1.0f); mPath.lineTo(10.0f, 2.0f); mPath.lineTo(9.0f, 2.0f); mPath.rLineTo(0f, 7.59f); mPath.lineTo(4.41f, 5.0f); mPath.lineTo(3.0f, 6.41f); mPath.lineTo(8.59f, 12.0f); mPath.lineTo(3.0f, 17.59f); mPath.lineTo(4.41f, 19.0f); mPath.lineTo(9.0f, 14.41f); mPath.lineTo(9.0f, 22.0f); mPath.rLineTo(1.0f, 0f); mPath.rLineTo(5.71f, -5.71f); mPath.rLineTo(-4.3f, -4.29f); mPath.rLineTo(4.3f, -4.29f); mPath.close(); mPath.moveTo(15.709999f, 7.71f); mPath.moveTo(11.0f, 5.83f); mPath.rLineTo(1.88f, 1.88f); mPath.lineTo(11.0f, 9.59f); mPath.lineTo(11.0f, 5.83f); mPath.close(); mPath.moveTo(11.0f, 5.83f); mPath.rMoveTo(1.88f, 10.46f); mPath.lineTo(11.0f, 18.17f); mPath.rLineTo(0f, -3.76f); mPath.rLineTo(1.88f, 1.88f); mPath.close(); mPath.moveTo(12.88f, 16.29f); mRenderPath.addPath(mPath, mFinalPathMatrix); if (mFillPaint == null) { mFillPaint = new Paint(); mFillPaint.setStyle(Paint.Style.FILL); mFillPaint.setAntiAlias(true); } mFillPaint.setColor(applyAlpha(-16777216, 1.0f)); mFillPaint.setColorFilter(filter); canvas.drawPath(mRenderPath, mFillPaint); } }
MegatronKing/SVG-Android
docs/notification/java/ic_bluetooth_audio.java
Java
apache-2.0
3,221
// // Webble World 3.0 (IntelligentPad system for the web) // // Copyright (c) 2010-2015 Micke Nicander Kuwahara, Giannis Georgalis, Yuzuru Tanaka // in Meme Media R&D Group of Hokkaido University, Japan. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // Additional restrictions may apply. See the LICENSE file for more information. // 'use strict'; /** * Group management controller that drives the view groups.html * * @author Giannis Georgalis <jgeorgal@meme.hokudai.ac.jp> */ ww3Controllers.controller('GroupsCtrl', ['$scope', '$http', 'gettext', 'authService', 'confirm', 'Users', 'UserAccounts', function ($scope, $http, gettext, authService, confirm, Users, UserAccounts) { //////////////////////////////////////////////////////////////////// // Utility functions // function gToF(g) { // Group to formatted visible value (f) if (g.readonly) { return '<i style="color: #b0b8c5;">' + g.name + '</i>'; } else return g.name; } function gToRow(g) { return [{ v: g.id, f: gToF(g) }, g.parent_id, g.description]; } function gsToData(gs) { var rows = [ ['Name', 'Parent Group', 'Tooltip'] ]; // cols gs.forEach(function(g) { rows.push(gToRow(g)); }); return rows; } //****************************************************************** function generateChartData(groupsArray, myGroupsArray) { if (myGroupsArray) { var myGroupsById = {}; for (var i = 0; i < myGroupsArray.length; ++i) myGroupsById[myGroupsArray[i].id] = myGroupsArray[i]; for (i = 0; i < groupsArray.length; ++i) { if (!myGroupsById.hasOwnProperty(groupsArray[i].id)) groupsArray[i].readonly = true; } } return gsToData(groupsArray); // Regenerate if necessary } //****************************************************************** function refreshAllGroups() { return $http.get('/api/groups').then(function(resp) { $scope.groups = resp.data; $scope.chartData = generateChartData($scope.groups, $scope.myGroups); }); } //////////////////////////////////////////////////////////////////// // Scope properties & initialization // /* $scope.users = Users.query(); $scope.userAccounts = UserAccounts.query(); */ $scope.tabs = {}; $scope.editable = $scope.user && $scope.user.role === 'adm'; $scope.groups = []; $scope.alerts = []; refreshAllGroups(); $scope.selectedGroup = null; $scope.selectedGroupEdit = false; if (!$scope.editable) { $http.get('/api/mygroups').then(function (resp) { $scope.myGroups = resp.data; if ($scope.myGroups.length > 0) { $scope.editable = true; if ($scope.groups) // Regenerate chart data if necessary $scope.chartData = generateChartData($scope.groups, $scope.myGroups); } }); } $scope.groupData = {}; // Available publication policies // $scope.availablePolicies = [ { enum: 'open', name: gettext("Open for publications"), help: gettext("Members can publish and update freely under this group") }, { enum: 'moderate_new', name: gettext("Moderate new publications"), help: gettext("Members can update freely but new publications require the group owner's approval") }, { enum: 'moderate_updates', name: gettext("Moderate all updates"), help: gettext("All new publications and updates (by members) under this group require the owner's approval") }, { enum: 'closed', name: gettext("Closed for publications"), help: gettext("All publications and updates under this group are suspended") } ]; $scope.availablePoliciesById = { open: $scope.availablePolicies[0], moderate_new: $scope.availablePolicies[1], moderate_updates: $scope.availablePolicies[2], closed: $scope.availablePolicies[3] }; //////////////////////////////////////////////////////////////////// // Public scope functions // $scope.closeAlert = function (index) { $scope.alerts.splice(index, 1); }; $scope.getUsers = function(q) { var url = $scope.user.role !== 'adm' ? '/api/users?limit=20&q=' : '/api/adm/users?limit=20&q='; return $http.get(url + encodeURIComponent(q)).then(function(resp){ return resp.data; }); }; //****************************************************************** $scope.createGroup = function() { var url = $scope.groupData.subgroup && $scope.selectedGroup && $scope.selectedGroup.id ? '/api/groups/' + $scope.selectedGroup.id : '/api/groups'; return $http.post(url, { group: $scope.groupData, owner: $scope.groupData.owner && $scope.groupData.owner.email }).then(function (resp) { var g = resp.data; $scope.groups.push(g); if ($scope.chartData) $scope.chartData.push(gToRow(g)); $scope.onGroupSelected(null); $scope.alerts.push({ type: 'success', msg: gettext("Created Group") + ": " + g.name }); }, function (err) { $scope.alerts.push({ type: 'danger', msg: err.data }); }); }; $scope.modifySelectedGroup = function() { return $http.put('/api/groups/' + $scope.selectedGroup.id, { group: $scope.groupData, owner: ($scope.groupData.owner && $scope.groupData.owner.email) || undefined }).then(function(resp) { var g = resp.data; for (var i = 0; i < $scope.groups.length; ++i) { if ($scope.groups[i].id == g.id) { $scope.groups[i] = g; break; } } $scope.chartData = gsToData($scope.groups); $scope.onGroupSelected(null); }, function(err) { $scope.alerts.push({ type: 'danger', msg: err.data }); }); }; $scope.createOrModifyGroup = function() { return $scope.selectedGroupEdit ? $scope.modifySelectedGroup() : $scope.createGroup(); }; $scope.deleteGroup = function() { if ($scope.selectedGroup && $scope.selectedGroup.id) { confirm.show(gettext("Delete Group:") + " " + $scope.selectedGroup.name, gettext("If you confirm, any subgroups will be transfered to the parent group and the group's reference will be revoked from all its published objects"), gettext("Delete Group"), gettext("Do Not Delete Group")).then(function () { $http.delete('/api/groups/' + $scope.selectedGroup.id).then(function() { refreshAllGroups().then(function() { // Group deletion may affect other groups also $scope.onGroupSelected(null); $scope.alerts.push({ type: 'success', msg: gettext("Deleted Group") + ": " + g.name }); }); }, function (err) { $scope.alerts.push({ type: 'danger', msg: err.data }); }); }); } }; $scope.retrievePublishedObjects = function() { $http.get('/api/groups/' + $scope.selectedGroup.id + '/objects').then(function(resp) { $scope.publishedObjects = resp.data; }); }; $scope.deauthorizePublishedObject = function(obj, index) { confirm.show(gettext("Deauthorize Object:") + " " + obj.repr, gettext("If you confirm, the selected object will no longer be member of the group."), gettext("Deauthorize"), gettext("Do Not Deauthorize")).then(function () { $http.put('/api/groups/' + $scope.selectedGroup.id + '/objects', { obj: obj.id, remove: true }).then(function() { $scope.publishedObjects.splice(index, 1); }, function (err) { $scope.alerts.push({ type: 'danger', msg: err.data }); }); }); }; //****************************************************************** $scope.onGroupSelected = function(selectedItem) { $scope.publishedObjects = null; // Clear the publihsed object list in any case if (selectedItem) { for (var i = 0; i < $scope.groups.length; ++i) { if ($scope.groups[i].id == selectedItem) { $scope.selectedGroup = $scope.groups[i]; if ($scope.selectedGroup.readonly) $scope.tabs.info = true; else if ($scope.selectedGroupEdit) $scope.groupData = angular.copy($scope.selectedGroup); break; } } } else { $scope.groupData = {}; $scope.selectedGroup = null; $scope.selectedGroupEdit = false; // Jump to info tab $scope.tabs.info = true; } }; $scope.toggleSelectedGroupEdit = function() { if (!$scope.selectedGroupEdit && $scope.selectedGroup && $scope.selectedGroup.id) { $scope.groupData = angular.copy($scope.selectedGroup); $scope.selectedGroupEdit = true; } else { $scope.groupData = {}; $scope.selectedGroupEdit = false; } }; $scope.addUserToGroup = function(user, group) { if (user && user.email && group && group.id) { //console.log("FAKE ADDING USER: ", user.name.full, "TO GROUP:", group.name); $http.put('/api/groups/' + group.id + '/users', { user: user.email || user.username }) .then(function (resp) { $scope.alerts.push({ type: 'success', msg: gettext("User added to group") + ": " + group.name }); }, function (err) { $scope.alerts.push({ type: 'danger', msg: err.data }); }); } }; //****************************************************************** }]);
BogusCurry/wblwrld3
app/scripts/controllers/groups.js
JavaScript
apache-2.0
9,332
package main import ( "fmt" ) func main() { jobs := make(chan int, 5) done := make(chan bool) go func() { for { j, more := <-jobs if more { fmt.Println("received job", j) } else { fmt.Println("received all jobs") done <- true return } } }() for j := 1; j <= 3; j++ { jobs <- j fmt.Println("sent job", j) } close(jobs) fmt.Println("sent all jobs") <-done }
jittakal/4go
gbe/cchannels/closing-channels.go
GO
apache-2.0
408
package io.leangen.graphql.services; import io.leangen.graphql.annotations.GraphQLArgument; import io.leangen.graphql.annotations.GraphQLComplexity; import io.leangen.graphql.annotations.GraphQLContext; import io.leangen.graphql.annotations.GraphQLId; import io.leangen.graphql.annotations.GraphQLMutation; import io.leangen.graphql.annotations.GraphQLQuery; import io.leangen.graphql.domain.Address; import io.leangen.graphql.domain.Education; import io.leangen.graphql.domain.Street; import io.leangen.graphql.domain.User; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; /** * Created by bojan.tomic on 3/5/16. */ public class UserService<T> { private Collection<Address> addresses = new ArrayList<>(); public UserService() { Address address1 = new Address(); address1.setTypes(Arrays.asList("residential", "home")); Street street11 = new Street("Fakestreet", 300); Street street12 = new Street("Realstreet", 123); address1.getStreets().add(street11); address1.getStreets().add(street12); Address address2 = new Address(); address2.setTypes(Collections.singletonList("office")); Street street21 = new Street("Oneway street", 100); Street street22 = new Street("Twowaystreet", 200); address2.getStreets().add(street21); address2.getStreets().add(street22); this.addresses.add(address1); this.addresses.add(address2); } @GraphQLQuery(name = "users") public List<User<String>> getUsersById(@GraphQLArgument(name = "id") @GraphQLId Integer id) { User<String> user = new User<>(); user.id = id; user.name = "Tatko"; user.uuid = UUID.randomUUID(); user.registrationDate = new Date(); user.addresses = addresses; User<String> user2 = new User<>(); user2.id = id + 1; user2.name = "Tzar"; user2.uuid = UUID.randomUUID(); user2.registrationDate = new Date(); user2.addresses = addresses; return Arrays.asList(user, user2); } @GraphQLQuery(name = "users") public List<User<String>> getUsersByEducation(@GraphQLArgument(name = "education") Education education) { return getUsersById(1); } // @GraphQLQuery(name = "user") // public <G> G getUsersByMagic(@GraphQLArgument(name = "magic") int magic) { // return (G)getUserById(magic); // } @GraphQLQuery(name = "users") public List<User<String>> getUsersByAnyEducation(@GraphQLArgument(name = "educations") List<? super T> educations) { return getUsersById(1); } @GraphQLQuery(name = "usersArr") @SuppressWarnings("unchecked") public User<String>[] getUsersByAnyEducationArray(@GraphQLArgument(name = "educations") T[] educations) { List<User<String>> users = getUsersById(1); return users.toArray(new User[0]); } @GraphQLQuery(name = "users") @GraphQLComplexity("2 * childScore") @SuppressWarnings("OptionalUsedAsFieldOrParameterType") public List<User<String>> getUsersByRegDate(@GraphQLArgument(name = "regDate") Optional<Date> date) { return getUsersById(1); } @GraphQLQuery(name = "usersByDate") @SuppressWarnings("OptionalUsedAsFieldOrParameterType") public List<User<String>> getUsersByDate(@GraphQLArgument(name = "regDate") Optional<Date> date) { return getUsersById(1); } @GraphQLMutation(name = "updateUsername") public User<String> updateUsername(@GraphQLContext User<String> user, @GraphQLArgument(name = "username") String username) { user.name = username; return user; } //TODO figure out how to deal with void returns :: return source object instead? // @GraphQLMutation(name="user") // public void updateTitle(@GraphQLContext User user, String title) { // user.title = title; // } @GraphQLQuery(name = "user") public User<String> getUserById(@GraphQLId(relayId = true) Integer wonkyName) { User<String> user = new User<>(); user.id = 1; user.name = "One Dude"; user.title = "The One"; user.uuid = UUID.randomUUID(); user.registrationDate = new Date(); user.addresses = addresses; return user; } @GraphQLQuery(name = "users") public List<User<String>> getUserByUuid(@GraphQLArgument(name = "uuid") UUID uuid) { return getUsersById(1); } @GraphQLQuery(name = "zmajs") public Collection<String> extraFieldAll(@GraphQLContext User<String> source) { return Arrays.asList("zmaj", "azdaha"); } @GraphQLQuery(name = "me") public Map<String, String> getCurrentUser() { Map<String, String> user = new HashMap<>(); user.put("id", "1000"); user.put("name", "Dyno"); return user; } @GraphQLMutation(name = "upMe") public Map<String, String> getUpdateCurrentUser(@GraphQLArgument(name = "updates") Map<String, String> updates) { return updates; } }
leangen/GraphQL-SPQR
src/test/java/io/leangen/graphql/services/UserService.java
Java
apache-2.0
5,197
/** * @license Apache-2.0 * * Copyright (c) 2022 The Stdlib Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Benchmark Boost `TODO`. */ #include <stdlib.h> #include <stdio.h> #include <math.h> #include <time.h> #include <sys/time.h> #include <boost/random/mersenne_twister.hpp> #include <boost/random/uniform_real_distribution.hpp> #include <boost/TODO/TODO.hpp> using boost::random::uniform_real_distribution; using boost::random::mt19937; #define NAME "TODO" #define ITERATIONS 1000000 #define REPEATS 3 /** * Prints the TAP version. */ void print_version() { printf( "TAP version 13\n" ); } /** * Prints the TAP summary. * * @param total total number of tests * @param passing total number of passing tests */ void print_summary( int total, int passing ) { printf( "#\n" ); printf( "1..%d\n", total ); // TAP plan printf( "# total %d\n", total ); printf( "# pass %d\n", passing ); printf( "#\n" ); printf( "# ok\n" ); } /** * Prints benchmarks results. * * @param elapsed elapsed time in seconds */ void print_results( double elapsed ) { double rate = (double)ITERATIONS / elapsed; printf( " ---\n" ); printf( " iterations: %d\n", ITERATIONS ); printf( " elapsed: %0.9f\n", elapsed ); printf( " rate: %0.9f\n", rate ); printf( " ...\n" ); } /** * Returns a clock time. * * @return clock time */ double tic() { struct timeval now; gettimeofday( &now, NULL ); return (double)now.tv_sec + (double)now.tv_usec/1.0e6; } /** * Runs a benchmark. * * @return elapsed time in seconds */ double benchmark() { double elapsed; double x; double y; double t; int i; // Define a new pseudorandom number generator: mt19937 rng; // Define a uniform distribution for generating pseudorandom numbers as "doubles" between a minimum value (inclusive) and a maximum value (exclusive): uniform_real_distribution<> randu( 0.0, 1.0 ); t = tic(); for ( i = 0; i < ITERATIONS; i++ ) { x = randu( rng ); y = 0.0; // TODO if ( y != y ) { printf( "should not return NaN\n" ); break; } } elapsed = tic() - t; if ( y != y ) { printf( "should not return NaN\n" ); } return elapsed; } /** * Main execution sequence. */ int main( void ) { double elapsed; int i; print_version(); for ( i = 0; i < REPEATS; i++ ) { printf( "# cpp::boost::%s\n", NAME ); elapsed = benchmark(); print_results( elapsed ); printf( "ok %d benchmark finished\n", i+1 ); } print_summary( REPEATS, REPEATS ); return 0; }
stdlib-js/stdlib
tools/snippets/benchmark/cpp/boost/benchmark.cpp
C++
apache-2.0
2,966
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.identitymanagement.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.identitymanagement.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.StringUtils; /** * ChangePasswordRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ChangePasswordRequestMarshaller implements Marshaller<Request<ChangePasswordRequest>, ChangePasswordRequest> { public Request<ChangePasswordRequest> marshall(ChangePasswordRequest changePasswordRequest) { if (changePasswordRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } Request<ChangePasswordRequest> request = new DefaultRequest<ChangePasswordRequest>(changePasswordRequest, "AmazonIdentityManagement"); request.addParameter("Action", "ChangePassword"); request.addParameter("Version", "2010-05-08"); request.setHttpMethod(HttpMethodName.POST); if (changePasswordRequest.getOldPassword() != null) { request.addParameter("OldPassword", StringUtils.fromString(changePasswordRequest.getOldPassword())); } if (changePasswordRequest.getNewPassword() != null) { request.addParameter("NewPassword", StringUtils.fromString(changePasswordRequest.getNewPassword())); } return request; } }
jentfoo/aws-sdk-java
aws-java-sdk-iam/src/main/java/com/amazonaws/services/identitymanagement/model/transform/ChangePasswordRequestMarshaller.java
Java
apache-2.0
2,157
package ru.revdaalex.lsp.chapterI.storage; import ru.revdaalex.lsp.chapterI.food.Food; import ru.revdaalex.lsp.chapterI.interfaces.Storage; import java.util.ArrayList; /** * Warehouse class. * Created by revdaalex on 04.07.2016. */ public class Warehouse implements Storage { /** * ArrayList Warehouse. */ private final ArrayList<Food> warehouse = new ArrayList<Food>(); /** * Implements interface method add in Warehouse. * @param food */ public void add(Food food) { this.warehouse.add(food); } /** * Implements interface method sortQuality in Warehouse. * @param food * @return */ public boolean sortQuality(Food food) { if (food.getExpiryDateInPercents() < 25){ return true; } return false; } public ArrayList<Food> getFood() { return warehouse; } }
revdaalex/learn_java
chapter3/LSP/ChapterI/src/main/java/ru/revdaalex/lsp/chapterI/storage/Warehouse.java
Java
apache-2.0
900
#!/usr/bin/python # # OpenStack Heat Plugin for interfacing with VMware Big Data Extensions # # Chris Mutchler - chris@virtualelephant.com # http://www.VirtualElephant.com # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import time import json import base64 import requests import subprocess import pyVmomi from pyVim import connect from pyVim.connect import SmartConnect, Disconnect from pyVmomi import vmodl, vim from heat.engine import constraints, properties, resource from heat.openstack.common import log as logging from neutronclient.neutron import client logger = logging.getLogger(__name__) class BigDataExtensions(resource.Resource): PROPERTIES = ( BDE_ENDPOINT, VCM_SERVER, USERNAME, PASSWORD, CLUSTER_NAME, CLUSTER_TYPE, NETWORK, CLUSTER_PASSWORD, CLUSTER_RP, VIO_CONFIG, BDE_CONFIG, SECURITY_GROUP, SUBNET ) = ( 'bde_endpoint', 'vcm_server', 'username', 'password', 'cluster_name', 'cluster_type', 'network', 'cluster_password', 'cluster_rp', 'vio_config', 'bde_config', 'security_group', 'subnet' ) properties_schema = { BDE_ENDPOINT: properties.Schema( properties.Schema.STRING, required=True, default='bde.localdomain' ), VCM_SERVER: properties.Schema( properties.Schema.STRING, required=True, default='vcenter.localdomain' ), USERNAME: properties.Schema( properties.Schema.STRING, required=True, default='administrator@vsphere.local' ), PASSWORD: properties.Schema( properties.Schema.STRING, required=True, default='password' ), CLUSTER_NAME: properties.Schema( properties.Schema.STRING, required=True ), CLUSTER_TYPE: properties.Schema( properties.Schema.STRING, required=True ), NETWORK: properties.Schema( properties.Schema.STRING, required=True ), CLUSTER_PASSWORD: properties.Schema( properties.Schema.STRING, required=False ), CLUSTER_RP: properties.Schema( properties.Schema.STRING, required=True, default='openstackRP' ), VIO_CONFIG: properties.Schema( properties.Schema.STRING, required=True, default='/usr/local/bin/etc/vio.config' ), BDE_CONFIG: properties.Schema( properties.Schema.STRING, required=False, default='/usr/local/bin/etc/bde.config' ), SECURITY_GROUP: properties.Schema( properties.Schema.STRING, required=False, default='9d3ecec8-e0e3-4088-8c71-8c35cd67dd8b' ), SUBNET: properties.Schema( properties.Schema.STRING, required=True ) } def _open_connection(self): bde_server = self.properties.get(self.BDE_ENDPOINT) bde_user = self.properties.get(self.USERNAME) bde_pass = self.properties.get(self.PASSWORD) header = {'content-type': 'application/x-www-form-urlencoded'} prefix = 'https://' port = ':8443' auth_string = "/serengeti/j_spring_security_check" data = 'j_username=' + bde_user + '&j_password=' + bde_pass s = requests.session() url = prefix + bde_server + port + auth_string r = s.post(url, data, headers=header, verify=False) logger.info(_("VirtualElephant::VMware::BDE - Authentication status code %s") % r.json) return s def _close_connection(self): bde_server = self.properties.get(self.BDE_ENDPOINT) header = {'content-type': 'application/x-www-form-urlencoded'} url = 'https://' + bde_server + ':8443/serengeti/j_spring_security_logout' s = requests.session() r = s.post(url, headers=header, verify=False) logger.info(_("VirtualElephant::VMware::BDE - Log out status code %s") % r.json) return def _create_nsx_ports(self): # Load VIO environment variables from /usr/local/etc/vio.config in_file = "/usr/local/etc/vio.config" f = open(in_file, "ro") for line in f: if "OS_AUTH_URL" in line: trash, os_auth_url = map(str, line.split("=")) os_auth_url = os_auth_url.rstrip('\n') logger.info(_("VirtualElephant::VMware::BDE - DEBUG os_auth_url %s") % os_auth_url) elif "OS_TENANT_ID" in line: trash, os_tenant_id = map(str,line.split("=")) os_tenant_id = os_tenant_id.rstrip('\n') elif "OS_TENANT_NAME" in line: trash, os_tenant_name = map(str, line.split("=")) os_tenant_name = os_tenant_name.rstrip('\n') elif "OS_USERNAME" in line: trash, os_username = map(str, line.split("=")) os_username = os_username.rstrip('\n') elif "OS_PASSWORD" in line: trash, os_password = map(str, line.split("=")) os_password = os_password.rstrip('\n') elif "OS_URL" in line: trash, os_url = map(str, line.split("=")) os_url = os_url.rstrip('\n') elif "OS_TOKEN" in line: trash, os_token = map(str, line.split("=")) os_token = os_token.rstrip('\n') d = {} d['username'] = os_username d['password'] = os_password d['auth_url'] = os_auth_url d['tenant_name'] = os_tenant_name d['token'] = os_token d['url'] = os_url logger.info(_("VirtualElephant::VMware::BDE - Loaded VIO credentials - %s") % d) # Using BDE API and vSphere API return the MAC address # for the virtual machines created by BDE. bde_server = self.properties.get(self.BDE_ENDPOINT) vcm_server = self.properties.get(self.VCM_SERVER) admin_user = self.properties.get(self.USERNAME) admin_pass = self.properties.get(self.PASSWORD) cluster_name = self.properties.get(self.CLUSTER_NAME) network_id = self.properties.get(self.NETWORK) security_group = self.properties.get(self.SECURITY_GROUP) prefix = 'https://' port = ':8443' logger.info(_("VirtualElephant::VMware::BDE - Creating NSX ports for network %s") % network_id) # Get the node names for the cluster from BDE curr = self._open_connection() header = {'content-type': 'application/json'} api_call = '/serengeti/api/cluster/' + cluster_name url = prefix + bde_server + port + api_call r = curr.get(url, headers=header, verify=False) raw_json = json.loads(r.text) cluster_data = raw_json["nodeGroups"] # Open connect to the vSphere API si = SmartConnect(host=vcm_server, user=admin_user, pwd=admin_pass, port=443) search_index = si.content.searchIndex root_folder = si.content.rootFolder for ng in cluster_data: nodes = ng["instances"] for node in nodes: logger.info(_("VirtualElephant::VMware::BDE - Creating NSX port for %s") % node.get("name")) vm_name = node.get("name") vm_moId = node.get("moId") port_name = vm_name + "-port0" # moId is not in format we need to match (x,y,z) = vm_moId.split(":") vm_moId = "'vim." + y + ":" + z + "'" # Go through each DC one at a time, in case there are multiple in vCenter for dc in root_folder.childEntity: content = si.content objView = content.viewManager.CreateContainerView(dc, [vim.VirtualMachine], True) vm_list = objView.view objView.Destroy() for instance in vm_list: # convert object to string so we can search i = str(instance.summary.vm) if vm_moId in i: # Matched the VM in BDE and vCenter logger.info(_("VirtualElephant::VMware::BDE - Match found for BDE node %s") % instance) for device in instance.config.hardware.device: if isinstance(device, vim.vm.device.VirtualEthernetCard): mac_address = str(device.macAddress) logger.info(_("VirtualElephant::VMware::BDE - Found MAC address %s") % mac_address) # If the node is already trying to get an IP address, # then a powercycle is required. #logger.info(_("VirtualElephant::VMware::BDE - Powercycling the node %s") % node.get("name")) #if instance.runtime.powerState == vim.VirtualMachinePowerState.poweredOn: # task = instance.PowerOff() # while task.info.state not in [vim.TaskInfo.State.success, # vim.TaskInfo.State.error]: # logger.info(_("VirtualElephant::VMware::BDE - Waiting for node power off %s") % node.get("name")) # time.sleep(5) # task = instance.PowerOn() # while task.info.state not in [vim.TaskInfo.State.success, # vim.TaskInfo.State.error]: # logger.info(_("VirtualElephant::VMware::BDE - Waiting for node power on %s") % node.get("name")) # time.sleep(5) # Create a new port through Neutron neutron = client.Client('2.0', username=os_username, password=os_password, auth_url=os_auth_url, tenant_name=os_tenant_name, endpoint_url=os_url, token=os_token) port_info = { "port": { "admin_state_up": True, "device_id": vm_name, "name": port_name, "mac_address": mac_address, "network_id": network_id } } logger.info(_("VirtualElephant::VMware::BDE - Neutron port string %s") % port_info) response = neutron.create_port(body=port_info) logger.info(_("VirtualElephant::VMware::BDE - NSX port creation response - %s") % response) return def handle_create(self): # REST API call to create a new VMware BDE cluster bde_server = self.properties.get(self.BDE_ENDPOINT) vcm_server = self.properties.get(self.VCM_SERVER) bde_user = self.properties.get(self.USERNAME) bde_pass = self.properties.get(self.PASSWORD) distro = self.properties.get(self.CLUSTER_TYPE) clusterName = self.properties.get(self.CLUSTER_NAME) network = self.properties.get(self.NETWORK) rp = self.properties.get(self.CLUSTER_RP) prefix = 'https://' port = ':8443' # hack because of Heat sends call before NSX network is created/assigned #time.sleep(60) # determine actual NSX portgroup created # hack - regex in Python is not a strength mob_string = '/mob/?moid=datacenter-2' curl_cmd = 'curl -k -u ' + bde_user + ':' + bde_pass + ' ' + prefix + vcm_server + mob_string grep_cmd = " | grep -oP '(?<=\(vxw).*(?=" + network + "\))' | grep -oE '[^\(]+$'" awk_cmd = " | awk '{print $0 \"" + network + "\"}'" full_cmd = curl_cmd + grep_cmd + awk_cmd p = subprocess.Popen(full_cmd, stdout=subprocess.PIPE, shell=True) (net_uid, err) = p.communicate() # Check to see if network_id is as we expect it if 'vxw' in net_uid: network_id = net_uid else: network_id = "vxw" + net_uid network_id = network_id.rstrip('\n') # Authenticate in a requests.session to the BDE server curr = self._open_connection() # Should check to see if network already exists as available network # This logs a big fat error message in /opt/serengeti/logs/serengeti.log # when the network doesn't exist. header = {'content-type': 'application/json'} api_call = '/serengeti/api/network/' + network url = prefix + bde_server + port + api_call r = curr.get(url, headers=header, verify=False) # Add new network to BDE as an available network if check fails payload = {"name" : network, "portGroup" : network_id, "isDhcp" : "true"} api_call = '/serengeti/api/networks' url = prefix + bde_server + port + api_call r = curr.post(url, data=json.dumps(payload), headers=header, verify=False) logger.info(_("VirtualElephant::VMware::BDE - Network creation status code %s") % r.json) # Send the create cluster REST API call payload = {"name": clusterName, "distro": distro, "rpNames": [rp], "networkConfig": { "MGT_NETWORK": [network]}} api_call = '/serengeti/api/clusters' url = prefix + bde_server + port + api_call r = curr.post(url, data=json.dumps(payload), headers=header, verify=False) logger.info(_("VirtualElephant::VMware::BDE - Create cluster status code %s") % r.json) # Arbitrary sleep value to allow for the nodes to be cloned sleep = 180 logger.info(_("VirtualElephant::VMware::BDE - Sleeping for %s seconds BDE to create nodes") % sleep) time.sleep(sleep) # Create ports for the BDE nodes on the NSX logical router nsx = self._create_nsx_ports() term = self._close_connection() return def handle_suspend(self): # REST API call to shutdown an existing VMware BDE cluster bde_server = self.properties.get(self.BDE_ENDPOINT) bde_user = self.properties.get(self.USERNAME) bde_pass = self.properties.get(self.PASSWORD) name = self.properties.get(self.CLUSTER_NAME) prefix = 'https://' port = ':8443' state = 'stop' curr = self._open_connection() header = {'content-type': 'application/json'} api_call = '/serengeti/api/cluster/' + name + '?state=' + state url = prefix + bde_server + port + api_call r = curr.post(url, headers=header, verify=False) logger.info(_("VirtualElephant::VMware::BDE - Stop cluster status code %s") % r.json) term = self._close_connection() return def handle_resume(self): # REST API call to startup an existing VMware BDE cluster bde_server = self.properties.get(self.BDE_ENDPOINT) bde_user = self.properties.get(self.USERNAME) bde_pass = self.properties.get(self.PASSWORD) name = self.properties.get(self.CLUSTER_NAME) prefix = 'https://' port = ':8443' state = 'start' curr = self._open_connection() header = {'content-type': 'application/json'} api_call = '/serengeti/api/cluster/' + name + '?state=' + state url = prefix + bde_server + port + api_call r = curr.post(url, headers=header, verify=False) logger.info(_("VirtualElephant::VMware::BDE - Start cluster status code %s") % r.json) term = self._close_connection() return def handle_delete(self): # REST API call to delete an existing VMware BDE cluster bde_server = self.properties.get(self.BDE_ENDPOINT) bde_user = self.properties.get(self.USERNAME) bde_pass = self.properties.get(self.PASSWORD) name = self.properties.get(self.CLUSTER_NAME) prefix = 'https://' port = ':8443' curr = self._open_connection() header = {'content-type': 'application/json'} api_call = '/serengeti/api/cluster/' + name url = prefix + bde_server + port + api_call r = curr.delete(url, headers=header, verify=False) logger.info(_("VirtualElephant::VMware::BDE - Delete cluster status code %s") % r.json) # Need to delete the NSX ports for clean-up term = self._close_connection() return def resource_mapping(): return { 'VirtualElephant::VMware::BDE': BigDataExtensions }
virtualelephant/openstack-heat-bde-plugin
plugin/BigDataExtensions.py
Python
apache-2.0
17,510
package net.virtualinfinity.telnet; import net.virtualinfinity.nio.EventLoop; import java.io.Closeable; /** * Provides access to the public aspects of the telnet session. * * @see SessionListener * @see ClientStarter#connect(EventLoop, String, SessionListener) * @see ClientStarter#connect(EventLoop, String, int, SessionListener) * * @author <a href='mailto:Daniel@coloraura.com'>Daniel Pitts</a> */ public interface Session extends Closeable { /** * @return an helper for managing the state of options on this session. */ Options options(); /** * @return the output data stream. */ OutputChannel outputChannel(); /** * @return the SubNegotiationOutputChannel for option sub-negotiation. */ SubNegotiationOutputChannel subNegotiationOutputChannel(); }
DanielPitts/net.virtualinfinity.telnet
src/main/java/net/virtualinfinity/telnet/Session.java
Java
apache-2.0
820
/* * The University of Wales, Cardiff Triana Project Software License (Based * on the Apache Software License Version 1.1) * * Copyright (c) 2007 University of Wales, Cardiff. All rights reserved. * * Redistribution and use of the software in source and binary forms, with * or without modification, are permitted provided that the following * conditions are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * 3. The end-user documentation included with the redistribution, if any, * must include the following acknowledgment: "This product includes * software developed by the University of Wales, Cardiff for the Triana * Project (http://www.trianacode.org)." Alternately, this * acknowledgment may appear in the software itself, if and wherever * such third-party acknowledgments normally appear. * * 4. The names "Triana" and "University of Wales, Cardiff" must not be * used to endorse or promote products derived from this software * without prior written permission. For written permission, please * contact triana@trianacode.org. * * 5. Products derived from this software may not be called "Triana," nor * may Triana appear in their name, without prior written permission of * the University of Wales, Cardiff. * * 6. This software may not be sold, used or incorporated into any product * for sale to third parties. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN * NO EVENT SHALL UNIVERSITY OF WALES, CARDIFF OR ITS CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. * * ------------------------------------------------------------------------ * * This software consists of voluntary contributions made by many * individuals on behalf of the Triana Project. For more information on the * Triana Project, please see. http://www.trianacode.org. * * This license is based on the BSD license as adopted by the Apache * Foundation and is governed by the laws of England and Wales. * */ package org.trianacode.gui.windows; import org.trianacode.gui.util.Env; import javax.swing.*; import javax.swing.border.EmptyBorder; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; /** * A dialog for selecting an item from a list * * @author Ian Wang * @version $Revision: 4048 $ */ public class ComboDialog extends JDialog implements ActionListener { /** * the main list */ private JComboBox combo = new JComboBox(new DefaultComboBoxModel()); /** * the label prompt */ private JLabel label = new JLabel(); /** * the ok and cancel buttons */ private JButton ok = new JButton(Env.getString("OK")); private JButton cancel = new JButton(Env.getString("Cancel")); /** * a flag indicating whether the ok button was clicked */ private boolean approve = false; /** * Constructs a modal combo dialog offering the specified item choices */ public ComboDialog(String[] items, Frame parent) { super(parent); initialise(items, false); } /** * Constructs a modal combo dialog offering the specified item choices */ public ComboDialog(String[] items, Dialog parent) { super(parent); initialise(items, false); } /** * Constructs a modal combo dialog offering the specified item choices * * @param title the dialog title * @param editable a flag indicating whether the combo is editable */ public ComboDialog(String[] items, Frame parent, String title, boolean editable) { super(parent, title, true); initialise(items, editable); } /** * Constructs a modal combo dialog offering the specified item choices * * @param title the dialog title * @param editable a flag indicating whether the combo is editable */ public ComboDialog(String[] items, Dialog parent, String title, boolean editable) { super(parent, title, true); initialise(items, editable); } /** * Initialises the dialog */ private void initialise(String[] items, boolean editable) { getContentPane().setLayout(new BorderLayout()); DefaultComboBoxModel model = (DefaultComboBoxModel) combo.getModel(); combo.setEditable(editable); combo.setPrototypeDisplayValue("01234567890123456789"); for (int count = 0; count < items.length; count++) { model.addElement(items[count]); } JPanel listpanel = new JPanel(new BorderLayout(3, 0)); listpanel.add(label, BorderLayout.WEST); listpanel.add(combo, BorderLayout.CENTER); listpanel.setBorder(new EmptyBorder(3, 3, 3, 3)); getContentPane().add(listpanel, BorderLayout.CENTER); JPanel buttonpanel = new JPanel(); buttonpanel.add(ok); buttonpanel.add(cancel); ok.addActionListener(this); cancel.addActionListener(this); getContentPane().add(buttonpanel, BorderLayout.SOUTH); pack(); } /** * Sets the user prompt */ public void setLabel(String prompt) { label.setText(prompt); pack(); } /** * @return the user prompt */ public String getLabel() { return label.getText(); } /** * @return true if the ok button was clicked */ public boolean isApproved() { return approve; } /** * @return an array of the selected items, or null if the cancel button was clicked */ public String getSelectedItem() { if (!approve) { return null; } else { return (String) combo.getSelectedItem(); } } public void actionPerformed(ActionEvent event) { if (event.getSource() == ok) { approve = true; } setVisible(false); dispose(); } }
CSCSI/Triana
triana-gui/src/main/java/org/trianacode/gui/windows/ComboDialog.java
Java
apache-2.0
6,839
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.karaf.jaas.config.impl; import java.security.GeneralSecurityException; import java.security.SecureRandom; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLServerSocketFactory; import javax.net.ssl.SSLSocketFactory; import org.apache.karaf.jaas.config.KeystoreInstance; import org.apache.karaf.jaas.config.KeystoreIsLocked; import org.apache.karaf.jaas.config.KeystoreManager; /** * Implementation of KeystoreManager */ public class OsgiKeystoreManager implements KeystoreManager { private List<KeystoreInstance> keystores = new CopyOnWriteArrayList<KeystoreInstance>(); public void register(KeystoreInstance keystore, Map<String,?> properties) { keystores.add(keystore); } public void unregister(KeystoreInstance keystore, Map<String,?> properties) { keystores.remove(keystore); } public KeystoreInstance getKeystore(String name) { KeystoreInstance keystore = null; for (KeystoreInstance ks : keystores) { if (ks.getName().equals(name)) { if (keystore == null || keystore.getRank() < ks.getRank()) { keystore = ks; } } } return keystore; } public SSLContext createSSLContext(String provider, String protocol, String algorithm, String keyStore, String keyAlias, String trustStore) throws GeneralSecurityException { KeystoreInstance keyInstance = getKeystore(keyStore); if (keyInstance != null && keyInstance.isKeystoreLocked()) { throw new KeystoreIsLocked("Keystore '" + keyStore + "' is locked"); } if (keyInstance != null && keyInstance.isKeyLocked(keyAlias)) { throw new KeystoreIsLocked("Key '" + keyAlias + "' in keystore '" + keyStore + "' is locked"); } KeystoreInstance trustInstance = trustStore == null ? null : getKeystore(trustStore); if (trustInstance != null && trustInstance.isKeystoreLocked()) { throw new KeystoreIsLocked("Keystore '" + trustStore + "' is locked"); } SSLContext context; if (provider == null) { context = SSLContext.getInstance(protocol); } else { context = SSLContext.getInstance(protocol, provider); } context.init(keyInstance == null ? null : keyInstance.getKeyManager(algorithm, keyAlias), trustInstance == null ? null : trustInstance.getTrustManager(algorithm), new SecureRandom()); return context; } public SSLServerSocketFactory createSSLServerFactory(String provider, String protocol, String algorithm, String keyStore, String keyAlias, String trustStore) throws GeneralSecurityException { SSLContext context = createSSLContext(provider, protocol, algorithm, keyStore, keyAlias, trustStore); return context.getServerSocketFactory(); } public SSLSocketFactory createSSLFactory(String provider, String protocol, String algorithm, String keyStore, String keyAlias, String trustStore) throws GeneralSecurityException { SSLContext context = createSSLContext(provider, protocol, algorithm, keyStore, keyAlias, trustStore); return context.getSocketFactory(); } }
tonit/karafonexam2
jaas/config/src/main/java/org/apache/karaf/jaas/config/impl/OsgiKeystoreManager.java
Java
apache-2.0
4,137
// Package govalidator is package of validators and sanitizers for strings, structs and collections. package govalidator import ( "encoding/json" "fmt" "net" "net/url" "reflect" "regexp" "sort" "strings" "unicode" "unicode/utf8" ) var fieldsRequiredByDefault bool // SetFieldsRequiredByDefault causes validation to fail when struct fields // do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). // This struct definition will fail govalidator.ValidateStruct() (and the field values do not matter): // type exampleStruct struct { // Name string `` // Email string `valid:"email"` // This, however, will only fail when Email is empty or an invalid email address: // type exampleStruct2 struct { // Name string `valid:"-"` // Email string `valid:"email"` // Lastly, this will only fail when Email is an invalid email address but not when it's empty: // type exampleStruct2 struct { // Name string `valid:"-"` // Email string `valid:"email,optional"` func SetFieldsRequiredByDefault(value bool) { fieldsRequiredByDefault = value } // IsEmail check if the string is an email. func IsEmail(str string) bool { // TODO uppercase letters are not supported return rxEmail.MatchString(str) } // IsURL check if the string is an URL. func IsURL(str string) bool { if str == "" || len(str) >= 2083 || len(str) <= 3 || strings.HasPrefix(str, ".") { return false } u, err := url.Parse(str) if err != nil { return false } if strings.HasPrefix(u.Host, ".") { return false } if u.Host == "" && (u.Path != "" && !strings.Contains(u.Path, ".")) { return false } return rxURL.MatchString(str) } // IsRequestURL check if the string rawurl, assuming // it was recieved in an HTTP request, is a valid // URL confirm to RFC 3986 func IsRequestURL(rawurl string) bool { url, err := url.ParseRequestURI(rawurl) if err != nil { return false //Couldn't even parse the rawurl } if len(url.Scheme) == 0 { return false //No Scheme found } return true } // IsRequestURI check if the string rawurl, assuming // it was recieved in an HTTP request, is an // absolute URI or an absolute path. func IsRequestURI(rawurl string) bool { _, err := url.ParseRequestURI(rawurl) return err == nil } // IsAlpha check if the string contains only letters (a-zA-Z). Empty string is valid. func IsAlpha(str string) bool { if IsNull(str) { return true } return rxAlpha.MatchString(str) } //IsUTFLetter check if the string contains only unicode letter characters. //Similar to IsAlpha but for all languages. Empty string is valid. func IsUTFLetter(str string) bool { if IsNull(str) { return true } for _, c := range str { if !unicode.IsLetter(c) { return false } } return true } // IsAlphanumeric check if the string contains only letters and numbers. Empty string is valid. func IsAlphanumeric(str string) bool { if IsNull(str) { return true } return rxAlphanumeric.MatchString(str) } // IsUTFLetterNumeric check if the string contains only unicode letters and numbers. Empty string is valid. func IsUTFLetterNumeric(str string) bool { if IsNull(str) { return true } for _, c := range str { if !unicode.IsLetter(c) && !unicode.IsNumber(c) { //letters && numbers are ok return false } } return true } // IsNumeric check if the string contains only numbers. Empty string is valid. func IsNumeric(str string) bool { if IsNull(str) { return true } return rxNumeric.MatchString(str) } // IsUTFNumeric check if the string contains only unicode numbers of any kind. // Numbers can be 0-9 but also Fractions ¾,Roman Ⅸ and Hangzhou 〩. Empty string is valid. func IsUTFNumeric(str string) bool { if IsNull(str) { return true } if strings.IndexAny(str, "+-") > 0 { return false } if len(str) > 1 { str = strings.TrimPrefix(str, "-") str = strings.TrimPrefix(str, "+") } for _, c := range str { if unicode.IsNumber(c) == false { //numbers && minus sign are ok return false } } return true } // IsUTFDigit check if the string contains only unicode radix-10 decimal digits. Empty string is valid. func IsUTFDigit(str string) bool { if IsNull(str) { return true } if strings.IndexAny(str, "+-") > 0 { return false } if len(str) > 1 { str = strings.TrimPrefix(str, "-") str = strings.TrimPrefix(str, "+") } for _, c := range str { if !unicode.IsDigit(c) { //digits && minus sign are ok return false } } return true } // IsHexadecimal check if the string is a hexadecimal number. func IsHexadecimal(str string) bool { return rxHexadecimal.MatchString(str) } // IsHexcolor check if the string is a hexadecimal color. func IsHexcolor(str string) bool { return rxHexcolor.MatchString(str) } // IsRGBcolor check if the string is a valid RGB color in form rgb(RRR, GGG, BBB). func IsRGBcolor(str string) bool { return rxRGBcolor.MatchString(str) } // IsLowerCase check if the string is lowercase. Empty string is valid. func IsLowerCase(str string) bool { if IsNull(str) { return true } return str == strings.ToLower(str) } // IsUpperCase check if the string is uppercase. Empty string is valid. func IsUpperCase(str string) bool { if IsNull(str) { return true } return str == strings.ToUpper(str) } // IsInt check if the string is an integer. Empty string is valid. func IsInt(str string) bool { if IsNull(str) { return true } return rxInt.MatchString(str) } // IsFloat check if the string is a float. func IsFloat(str string) bool { return str != "" && rxFloat.MatchString(str) } // IsDivisibleBy check if the string is a number that's divisible by another. // If second argument is not valid integer or zero, it's return false. // Otherwise, if first argument is not valid integer or zero, it's return true (Invalid string converts to zero). func IsDivisibleBy(str, num string) bool { f, _ := ToFloat(str) p := int64(f) q, _ := ToInt(num) if q == 0 { return false } return (p == 0) || (p%q == 0) } // IsNull check if the string is null. func IsNull(str string) bool { return len(str) == 0 } // IsByteLength check if the string's length (in bytes) falls in a range. func IsByteLength(str string, min, max int) bool { return len(str) >= min && len(str) <= max } // IsUUIDv3 check if the string is a UUID version 3. func IsUUIDv3(str string) bool { return rxUUID3.MatchString(str) } // IsUUIDv4 check if the string is a UUID version 4. func IsUUIDv4(str string) bool { return rxUUID4.MatchString(str) } // IsUUIDv5 check if the string is a UUID version 5. func IsUUIDv5(str string) bool { return rxUUID5.MatchString(str) } // IsUUID check if the string is a UUID (version 3, 4 or 5). func IsUUID(str string) bool { return rxUUID.MatchString(str) } // IsCreditCard check if the string is a credit card. func IsCreditCard(str string) bool { r, _ := regexp.Compile("[^0-9]+") sanitized := r.ReplaceAll([]byte(str), []byte("")) if !rxCreditCard.MatchString(string(sanitized)) { return false } var sum int64 var digit string var tmpNum int64 var shouldDouble bool for i := len(sanitized) - 1; i >= 0; i-- { digit = string(sanitized[i:(i + 1)]) tmpNum, _ = ToInt(digit) if shouldDouble { tmpNum *= 2 if tmpNum >= 10 { sum += ((tmpNum % 10) + 1) } else { sum += tmpNum } } else { sum += tmpNum } shouldDouble = !shouldDouble } if sum%10 == 0 { return true } return false } // IsISBN10 check if the string is an ISBN version 10. func IsISBN10(str string) bool { return IsISBN(str, 10) } // IsISBN13 check if the string is an ISBN version 13. func IsISBN13(str string) bool { return IsISBN(str, 13) } // IsISBN check if the string is an ISBN (version 10 or 13). // If version value is not equal to 10 or 13, it will be check both variants. func IsISBN(str string, version int) bool { r, _ := regexp.Compile("[\\s-]+") sanitized := r.ReplaceAll([]byte(str), []byte("")) var checksum int32 var i int32 if version == 10 { if !rxISBN10.MatchString(string(sanitized)) { return false } for i = 0; i < 9; i++ { checksum += (i + 1) * int32(sanitized[i]-'0') } if sanitized[9] == 'X' { checksum += 10 * 10 } else { checksum += 10 * int32(sanitized[9]-'0') } if checksum%11 == 0 { return true } return false } else if version == 13 { if !rxISBN13.MatchString(string(sanitized)) { return false } factor := []int32{1, 3} for i = 0; i < 12; i++ { checksum += factor[i%2] * int32(sanitized[i]-'0') } if (int32(sanitized[12]-'0'))-((10-(checksum%10))%10) == 0 { return true } return false } return IsISBN(str, 10) || IsISBN(str, 13) } // IsJSON check if the string is valid JSON (note: uses json.Unmarshal). func IsJSON(str string) bool { var js json.RawMessage return json.Unmarshal([]byte(str), &js) == nil } // IsMultibyte check if the string contains one or more multibyte chars. Empty string is valid. func IsMultibyte(str string) bool { if IsNull(str) { return true } return rxMultibyte.MatchString(str) } // IsASCII check if the string contains ASCII chars only. Empty string is valid. func IsASCII(str string) bool { if IsNull(str) { return true } return rxASCII.MatchString(str) } // IsPrintableASCII check if the string contains printable ASCII chars only. Empty string is valid. func IsPrintableASCII(str string) bool { if IsNull(str) { return true } return rxPrintableASCII.MatchString(str) } // IsFullWidth check if the string contains any full-width chars. Empty string is valid. func IsFullWidth(str string) bool { if IsNull(str) { return true } return rxFullWidth.MatchString(str) } // IsHalfWidth check if the string contains any half-width chars. Empty string is valid. func IsHalfWidth(str string) bool { if IsNull(str) { return true } return rxHalfWidth.MatchString(str) } // IsVariableWidth check if the string contains a mixture of full and half-width chars. Empty string is valid. func IsVariableWidth(str string) bool { if IsNull(str) { return true } return rxHalfWidth.MatchString(str) && rxFullWidth.MatchString(str) } // IsBase64 check if a string is base64 encoded. func IsBase64(str string) bool { return rxBase64.MatchString(str) } // IsFilePath check is a string is Win or Unix file path and returns it's type. func IsFilePath(str string) (bool, int) { if rxWinPath.MatchString(str) { //check windows path limit see: // http://msdn.microsoft.com/en-us/library/aa365247(VS.85).aspx#maxpath if len(str[3:]) > 32767 { return false, Win } return true, Win } else if rxUnixPath.MatchString(str) { return true, Unix } return false, Unknown } // IsDataURI checks if a string is base64 encoded data URI such as an image func IsDataURI(str string) bool { dataURI := strings.Split(str, ",") if !rxDataURI.MatchString(dataURI[0]) { return false } return IsBase64(dataURI[1]) } // IsISO3166Alpha2 checks if a string is valid two-letter country code func IsISO3166Alpha2(str string) bool { for _, entry := range ISO3166List { if str == entry.Alpha2Code { return true } } return false } // IsISO3166Alpha3 checks if a string is valid three-letter country code func IsISO3166Alpha3(str string) bool { for _, entry := range ISO3166List { if str == entry.Alpha3Code { return true } } return false } // IsIP checks if a string is either IP version 4 or 6. func IsIP(str string) bool { return net.ParseIP(str) != nil } // IsIPv4 check if the string is an IP version 4. func IsIPv4(str string) bool { ip := net.ParseIP(str) return ip != nil && strings.Contains(str, ".") } // IsIPv6 check if the string is an IP version 6. func IsIPv6(str string) bool { ip := net.ParseIP(str) return ip != nil && strings.Contains(str, ":") } // IsMAC check if a string is valid MAC address. // Possible MAC formats: // 01:23:45:67:89:ab // 01:23:45:67:89:ab:cd:ef // 01-23-45-67-89-ab // 01-23-45-67-89-ab-cd-ef // 0123.4567.89ab // 0123.4567.89ab.cdef func IsMAC(str string) bool { _, err := net.ParseMAC(str) return err == nil } // IsMongoID check if the string is a valid hex-encoded representation of a MongoDB ObjectId. func IsMongoID(str string) bool { return rxHexadecimal.MatchString(str) && (len(str) == 24) } // IsLatitude check if a string is valid latitude. func IsLatitude(str string) bool { return rxLatitude.MatchString(str) } // IsLongitude check if a string is valid longitude. func IsLongitude(str string) bool { return rxLongitude.MatchString(str) } // ValidateStruct use tags for fields func ValidateStruct(s interface{}) (bool, error) { if s == nil { return true, nil } result := true var err error val := reflect.ValueOf(s) if val.Kind() == reflect.Interface || val.Kind() == reflect.Ptr { val = val.Elem() } // we only accept structs if val.Kind() != reflect.Struct { return false, fmt.Errorf("function only accepts structs; got %s", val.Kind()) } var errs Errors for i := 0; i < val.NumField(); i++ { valueField := val.Field(i) typeField := val.Type().Field(i) if typeField.PkgPath != "" { continue // Private field } resultField, err := typeCheck(valueField, typeField) if err != nil { errs = append(errs, err) } result = result && resultField } if len(errs) > 0 { err = errs } return result, err } // parseTag splits a struct field's tag into its // comma-separated options. func parseTag(tag string) tagOptions { split := strings.SplitN(tag, ",", -1) return tagOptions(split) } func isValidTag(s string) bool { if s == "" { return false } for _, c := range s { switch { case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): // Backslash and quote chars are reserved, but // otherwise any punctuation chars are allowed // in a tag name. default: if !unicode.IsLetter(c) && !unicode.IsDigit(c) { return false } } } return true } // IsSSN will validate the given string as a U.S. Social Security Number func IsSSN(str string) bool { if str == "" || len(str) != 11 { return false } return rxSSN.MatchString(str) } // ByteLength check string's length func ByteLength(str string, params ...string) bool { if len(params) == 2 { min, _ := ToInt(params[0]) max, _ := ToInt(params[1]) return len(str) >= int(min) && len(str) <= int(max) } return false } // StringMatches checks if a string matches a given pattern. func StringMatches(s string, params ...string) bool { if len(params) == 1 { pattern := params[0] return Matches(s, pattern) } return false } // StringLength check string's length (including multi byte strings) func StringLength(str string, params ...string) bool { if len(params) == 2 { strLength := utf8.RuneCountInString(str) min, _ := ToInt(params[0]) max, _ := ToInt(params[1]) return strLength >= int(min) && strLength <= int(max) } return false } // Contains returns whether checks that a comma-separated list of options // contains a particular substr flag. substr must be surrounded by a // string boundary or commas. func (opts tagOptions) contains(optionName string) bool { for i := range opts { tagOpt := opts[i] if tagOpt == optionName { return true } } return false } func checkRequired(v reflect.Value, t reflect.StructField, options tagOptions) (bool, error) { if options.contains("required") { err := fmt.Errorf("non zero value required") return false, Error{t.Name, err} } else if fieldsRequiredByDefault && !options.contains("optional") { err := fmt.Errorf("All fields are required to at least have one validation defined") return false, Error{t.Name, err} } // not required and empty is valid return true, nil } func typeCheck(v reflect.Value, t reflect.StructField) (bool, error) { if !v.IsValid() { return false, nil } tag := t.Tag.Get(tagName) // Check if the field should be ignored switch tag { case "": if !fieldsRequiredByDefault { return true, nil } err := fmt.Errorf("All fields are required to at least have one validation defined") return false, Error{t.Name, err} case "-": return true, nil } options := parseTag(tag) for i := range options { tagOpt := options[i] if ok := isValidTag(tagOpt); !ok { continue } if validatefunc, ok := CustomTypeTagMap[tagOpt]; ok { options = append(options[:i], options[i+1:]...) // we found our custom validator, so remove it from the options if result := validatefunc(v.Interface()); !result { return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", fmt.Sprint(v), tagOpt)} } return true, nil } } if isEmptyValue(v) { // an empty value is not validated, check only required return checkRequired(v, t, options) } switch v.Kind() { case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64, reflect.String: // for each tag option check the map of validator functions for i := range options { tagOpt := options[i] negate := false // Check wether the tag looks like '!something' or 'something' if len(tagOpt) > 0 && tagOpt[0] == '!' { tagOpt = string(tagOpt[1:]) negate = true } if ok := isValidTag(tagOpt); !ok { err := fmt.Errorf("Unknown Validator %s", tagOpt) return false, Error{t.Name, err} } // Check for param validators for key, value := range ParamTagRegexMap { ps := value.FindStringSubmatch(tagOpt) if len(ps) > 0 { if validatefunc, ok := ParamTagMap[key]; ok { switch v.Kind() { case reflect.String: field := fmt.Sprint(v) // make value into string, then validate with regex if result := validatefunc(field, ps[1:]...); !result && !negate || result && negate { var err error if !negate { err = fmt.Errorf("%s does not validate as %s", field, tagOpt) } else { err = fmt.Errorf("%s does validate as %s", field, tagOpt) } return false, Error{t.Name, err} } default: //Not Yet Supported Types (Fail here!) err := fmt.Errorf("Validator %s doesn't support kind %s", tagOpt, v.Kind()) return false, Error{t.Name, err} } } } } if validatefunc, ok := TagMap[tagOpt]; ok { switch v.Kind() { case reflect.String: field := fmt.Sprint(v) // make value into string, then validate with regex if result := validatefunc(field); !result && !negate || result && negate { var err error if !negate { err = fmt.Errorf("%s does not validate as %s", field, tagOpt) } else { err = fmt.Errorf("%s does validate as %s", field, tagOpt) } return false, Error{t.Name, err} } default: //Not Yet Supported Types (Fail here!) err := fmt.Errorf("Validator %s doesn't support kind %s for value %v", tagOpt, v.Kind(), v) return false, Error{t.Name, err} } } } return true, nil case reflect.Map: if v.Type().Key().Kind() != reflect.String { return false, &UnsupportedTypeError{v.Type()} } var sv stringValues sv = v.MapKeys() sort.Sort(sv) result := true for _, k := range sv { resultItem, err := ValidateStruct(v.MapIndex(k).Interface()) if err != nil { return false, err } result = result && resultItem } return result, nil case reflect.Slice: result := true for i := 0; i < v.Len(); i++ { var resultItem bool var err error if v.Index(i).Kind() != reflect.Struct { resultItem, err = typeCheck(v.Index(i), t) if err != nil { return false, err } } else { resultItem, err = ValidateStruct(v.Index(i).Interface()) if err != nil { return false, err } } result = result && resultItem } return result, nil case reflect.Array: result := true for i := 0; i < v.Len(); i++ { var resultItem bool var err error if v.Index(i).Kind() != reflect.Struct { resultItem, err = typeCheck(v.Index(i), t) if err != nil { return false, err } } else { resultItem, err = ValidateStruct(v.Index(i).Interface()) if err != nil { return false, err } } result = result && resultItem } return result, nil case reflect.Interface: // If the value is an interface then encode its element if v.IsNil() { return true, nil } return ValidateStruct(v.Interface()) case reflect.Ptr: // If the value is a pointer then check its element if v.IsNil() { return true, nil } return typeCheck(v.Elem(), t) case reflect.Struct: return ValidateStruct(v.Interface()) default: return false, &UnsupportedTypeError{v.Type()} } } func isEmptyValue(v reflect.Value) bool { switch v.Kind() { case reflect.String, reflect.Array: return v.Len() == 0 case reflect.Map, reflect.Slice: return v.Len() == 0 || v.IsNil() case reflect.Bool: return !v.Bool() case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return v.Int() == 0 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return v.Uint() == 0 case reflect.Float32, reflect.Float64: return v.Float() == 0 case reflect.Interface, reflect.Ptr: return v.IsNil() } return reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type()).Interface()) } // ErrorByField returns error for specified field of the struct // validated by ValidateStruct or empty string if there are no errors // or this field doesn't exists or doesn't have any errors. func ErrorByField(e error, field string) string { if e == nil { return "" } return ErrorsByField(e)[field] } // ErrorsByField returns map of errors of the struct validated // by ValidateStruct or empty map if there are no errors. func ErrorsByField(e error) map[string]string { m := make(map[string]string) if e == nil { return m } // prototype for ValidateStruct switch e.(type) { case Error: m[e.(Error).Name] = e.(Error).Err.Error() case Errors: for _, item := range e.(Errors).Errors() { m[item.(Error).Name] = item.(Error).Err.Error() } } return m } // Error returns string equivalent for reflect.Type func (e *UnsupportedTypeError) Error() string { return "validator: unsupported type: " + e.Type.String() } func (sv stringValues) Len() int { return len(sv) } func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] } func (sv stringValues) Less(i, j int) bool { return sv.get(i) < sv.get(j) } func (sv stringValues) get(i int) string { return sv[i].String() }
aleksandr-vin/go-swagger
vendor/github.com/asaskevich/govalidator/validator.go
GO
apache-2.0
22,585
/* * #%L * GwtMaterial * %% * Copyright (C) 2015 - 2017 GwtMaterialDesign * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package gwt.material.design.client.data.events; import com.google.gwt.event.shared.EventHandler; public interface RowsVisibleHandler extends EventHandler { void onRowsVisible(RowsVisibleEvent event); }
GwtMaterialDesign/gwt-material-table
src/main/java/gwt/material/design/client/data/events/RowsVisibleHandler.java
Java
apache-2.0
860
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.maddyhome.idea.copyright.pattern; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiFile; import com.maddyhome.idea.copyright.CopyrightManager; import org.apache.commons.collections.ExtendedProperties; import org.apache.velocity.VelocityContext; import org.apache.velocity.app.VelocityEngine; import org.apache.velocity.runtime.log.SimpleLog4JLogSystem; import java.io.StringWriter; public class VelocityHelper { public static String evaluate(PsiFile file, Project project, Module module, String template) { VelocityEngine engine = getEngine(); VelocityContext vc = new VelocityContext(); vc.put("today", new DateInfo()); if (file != null) vc.put("file", new FileInfo(file)); if (project != null) vc.put("project", new ProjectInfo(project)); if (module != null) vc.put("module", new ModuleInfo(module)); vc.put("username", System.getProperty("user.name")); try { StringWriter sw = new StringWriter(); boolean stripLineBreak = false; if (template.endsWith("$")) { template += getVelocitySuffix(); stripLineBreak = true; } engine.evaluate(vc, sw, CopyrightManager.class.getName(), template); final String result = sw.getBuffer().toString(); return stripLineBreak ? StringUtil.trimEnd(result, getVelocitySuffix()) : result; } catch (Exception e) { return ""; } } private static String getVelocitySuffix() { return "\n"; } public static void verify(String text) throws Exception { VelocityEngine engine = getEngine(); VelocityContext vc = new VelocityContext(); vc.put("today", new DateInfo()); StringWriter sw = new StringWriter(); if (text.endsWith("$")) { text += getVelocitySuffix(); } engine.evaluate(vc, sw, CopyrightManager.class.getName(), text); } private static synchronized VelocityEngine getEngine() { if (instance == null) { try { VelocityEngine engine = new VelocityEngine(); ExtendedProperties extendedProperties = new ExtendedProperties(); extendedProperties.addProperty(VelocityEngine.RESOURCE_LOADER, "file"); extendedProperties.addProperty("file.resource.loader.class", "org.apache.velocity.runtime.resource.loader.FileResourceLoader"); extendedProperties.addProperty("file.resource.loader.path", PathManager.getPluginsPath() + "/Copyright/resources"); extendedProperties.addProperty(VelocityEngine.RUNTIME_LOG_LOGSYSTEM_CLASS, SimpleLog4JLogSystem.class.getName()); extendedProperties .addProperty("runtime.log.logsystem.log4j.category", CopyrightManager.class.getName()); engine.setExtendedProperties(extendedProperties); engine.init(); instance = engine; } catch (Exception e) { } } return instance; } private VelocityHelper() { } private static VelocityEngine instance; }
joewalnes/idea-community
plugins/copyright/src/com/maddyhome/idea/copyright/pattern/VelocityHelper.java
Java
apache-2.0
4,033
from pylastica.query import Query from pylastica.aggregation.min import Min from pylastica.aggregation.nested import Nested from pylastica.doc_type.mapping import Mapping from pylastica.document import Document from tests.base import Base __author__ = 'Joe Linn' import unittest class NestedTest(unittest.TestCase, Base): def setUp(self): super(NestedTest, self).setUp() self._index = self._create_index("test_aggregation_nested") mapping = Mapping() mapping.set_properties({ "resellers": { "type": "nested", "properties": { "name": {"type": "string"}, "price": {"type": "double"} } } }) doc_type = self._index.get_doc_type("test") doc_type.mapping = mapping docs = [ Document(1, { "resellers": { "name": "spacely sprockets", "price": 5.55 } }), Document(2, { "resellers": { "name": "cogswell cogs", "price": 4.98 } }) ] doc_type.add_documents(docs) self._index.refresh() def tearDown(self): super(NestedTest, self).tearDown() self._index.delete() def test_nested_aggregation(self): agg = Nested("resellers", "resellers") agg.add_aggregation(Min("min_price").set_field("price")) query = Query() query.add_aggregation(agg) results = self._index.search(query).aggregations['resellers'] self.assertEqual(4.98, results['min_price']['value']) if __name__ == '__main__': unittest.main()
jlinn/pylastica
tests/aggregation/test_nested.py
Python
apache-2.0
1,762
// Copyright 2005 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry.listener; import java.util.Collection; import org.apache.tapestry.IActionListener; /** * @author Howard M. Lewis Ship */ public interface ListenerMap { /** * Gets a listener for the given name (which is both a property name and a method name). The * listener is created as needed, but is also cached for later use. The returned object * implements the {@link org.apache.tapestry.IActionListener}. * * @param name * the name of the method to invoke (the most appropriate method will be selected if * there are multiple overloadings of the same method name) * @returns an object implementing {@link IActionListener}. * @throws ApplicationRuntimeException * if the listener can not be created. */ public IActionListener getListener(String name); /** * Returns an unmodifiable collection of the names of the listeners implemented by the target * class. * * @since 1.0.6 */ public Collection getListenerNames(); /** * Returns true if this ListenerMapImpl can provide a listener with the given name. * * @since 2.2 */ public boolean canProvideListener(String name); }
apache/tapestry4
framework/src/java/org/apache/tapestry/listener/ListenerMap.java
Java
apache-2.0
1,865
using NUnit.Framework; using Spring.Objects.Factory; using Spring.Objects.Factory.Xml; /* * Copyright 2002-2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Spring.Aop.Target { /// <summary> Tests for pooling invoker interceptor /// TODO need to make these tests stronger: it's hard to /// make too many assumptions about a pool /// </summary> /// <author>Rod Johnson</author> /// <author>Federico Spinazzi (.Net)</author> [TestFixture] public class SimplePoolTargetSourceTests { /// <summary>Initial count value set in Object factory XML </summary> private const int INITIAL_COUNT = 10; private XmlObjectFactory objectFactory; [SetUp] public void SetUp() { objectFactory = new XmlObjectFactory(new ReadOnlyXmlTestResource("simplePoolTests.xml", GetType())); } /// <summary> We must simulate container shutdown, which should clear threads.</summary> [TearDown] public void TearDown() { // Will call pool.close() this.objectFactory.Dispose(); } private void Functionality(System.String name) { ISideEffectObject pooled = (ISideEffectObject) objectFactory.GetObject(name); Assert.AreEqual(INITIAL_COUNT, pooled.Count); pooled.doWork(); Assert.AreEqual(INITIAL_COUNT + 1, pooled.Count); pooled = (ISideEffectObject) objectFactory.GetObject(name); // Just check that it works--we can't make assumptions // about the count pooled.doWork(); //Assert.AreEqual(INITIAL_COUNT + 1, pooled.Count ); } [Test] public virtual void Functionality() { Functionality("pooled"); } [Test] public virtual void FunctionalityWithNoInterceptors() { Functionality("pooledNoInterceptors"); } [Test] public virtual void ConfigMixin() { ISideEffectObject pooled = (ISideEffectObject) objectFactory.GetObject("pooledWithMixin"); Assert.AreEqual(INITIAL_COUNT, pooled.Count); PoolingConfig conf = (PoolingConfig) objectFactory.GetObject("pooledWithMixin"); // TODO one invocation from setup // assertEquals(1, conf.getInvocations()); pooled.doWork(); // assertEquals("No objects active", 0, conf.getActive()); Assert.AreEqual(25, conf.MaxSize, "Correct target source"); // assertTrue("Some free", conf.getFree() > 0); //assertEquals(2, conf.getInvocations()); Assert.AreEqual(25, conf.MaxSize); } } }
spring-projects/spring-net
test/Spring/Spring.Aop.Tests/Aop/Target/SimplePoolTargetSourceTests.cs
C#
apache-2.0
3,017
/* * ==================================================================== * * The Apache Software License, Version 1.1 * * Copyright (c) 2000-2003 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Jakarta Element Construction Set", * "Jakarta ECS" , and "Apache Software Foundation" must not be used * to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache", * "Jakarta Element Construction Set" nor "Jakarta ECS" nor may "Apache" * appear in their names without prior written permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.ecs.vxml; /** This class implements the link element @author Written by <a href="mailto:jcarol@us.ibm.com">Carol Jones</a> */ public class Link extends VXMLElement { /** Basic constructor. You need to set the attributes using the set* methods. */ public Link() { super("link"); } /** Sets the next="" attribute @param next the next="" attribute */ public Link setNext(String next) { addAttribute("next", next); return this; } /** Sets the expr="" attribute @param expr the expr="" attribute */ public Link setExpr(String expr) { addAttribute("expr", expr); return this; } /** Sets the event="" attribute @param event the event="" attribute */ public Link setEvent(String event) { addAttribute("event", event); return this; } /** Sets the caching="" attribute @param caching the caching="" attribute */ public Link setCaching(String caching) { addAttribute("caching", caching); return this; } /** Sets the fetchaudio="" attribute @param fetchaudio the fetchaudio="" attribute */ public Link setFetchaudio(String fetchaudio) { addAttribute("fetchaudio", fetchaudio); return this; } /** Sets the fetchint="" attribute @param fetchint the fetchint="" attribute */ public Link setFetchint(String fetchint) { addAttribute("fetchint", fetchint); return this; } /** Sets the fetchtimeout="" attribute @param fetchtimeout the fetchtimeout="" attribute */ public Link setFetchtimeout(String fetchtimeout) { addAttribute("fetchtimeout", fetchtimeout); return this; } }
jjYBdx4IL/misc
ecs/src/main/java/org/apache/ecs/vxml/Link.java
Java
apache-2.0
4,470
package eu.kandru.luna.util; import lombok.experimental.UtilityClass; import javax.servlet.http.HttpServletRequest; /** * Helper class for logging output. * * @author jko */ @UtilityClass public class LogHelper { /** * Formats a {@link HttpServletRequest} for logging by extracting the important information. * * @param request the request. * @return this can be logged. */ public String formatRequest(HttpServletRequest request) { StringBuilder sb = new StringBuilder(); sb.append("request from ") .append(request.getRemoteAddr()) .append(" via ") .append(request.getMethod()) .append(" to ") .append(request.getServletPath()); return sb.toString(); } }
Kandru/ts3luna
src/main/java/eu/kandru/luna/util/LogHelper.java
Java
apache-2.0
774
// Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy of // the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. couchTests.delayed_commits = function(debug) { var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"}); db.deleteDb(); db.createDb(); if (debug) debugger; run_on_modified_server( [{section: "couchdb", key: "delayed_commits", value: "true"}], function () { // By default, couchdb doesn't fully commit documents to disk right away, // it waits about a second to batch the full commit flush along with any // other updates. If it crashes or is restarted you may lose the most // recent commits. T(db.save({_id:"1",a:2,b:4}).ok); T(db.open("1") != null); restartServer(); T(db.open("1") == null); // lost the update. // note if we waited > 1 sec before the restart, the doc would likely // commit. // Retry the same thing but with full commits on. var db2 = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"true"}); T(db2.save({_id:"1",a:2,b:4}).ok); T(db2.open("1") != null); restartServer(); T(db2.open("1") != null); // You can update but without committing immediately, and then ensure // everything is commited in the last step. T(db.save({_id:"2",a:2,b:4}).ok); T(db.open("2") != null); T(db.ensureFullCommit().ok); restartServer(); T(db.open("2") != null); // However, it's possible even when flushed, that the server crashed between // the update and the commit, and you don't want to check to make sure // every doc you updated actually made it to disk. So record the instance // start time of the database before the updates and then check it again // after the flush (the instance start time is returned by the flush // operation). if they are the same, we know everything was updated // safely. // First try it with a crash. var instanceStartTime = db.info().instance_start_time; T(db.save({_id:"3",a:2,b:4}).ok); T(db.open("3") != null); restartServer(); var commitResult = db.ensureFullCommit(); T(commitResult.ok && commitResult.instance_start_time != instanceStartTime); // start times don't match, meaning the server lost our change T(db.open("3") == null); // yup lost it // retry with no server restart var instanceStartTime = db.info().instance_start_time; T(db.save({_id:"4",a:2,b:4}).ok); T(db.open("4") != null); var commitResult = db.ensureFullCommit(); T(commitResult.ok && commitResult.instance_start_time == instanceStartTime); // Successful commit, start times match! restartServer(); T(db.open("4") != null); }); // Now test that when we exceed the max_dbs_open, pending commits are safely // written. T(db.save({_id:"5",foo:"bar"}).ok); var max = 2; run_on_modified_server( [{section: "couchdb", key: "delayed_commits", value: "true"}, {section: "couchdb", key: "max_dbs_open", value: max.toString()}], function () { for(var i=0; i<max; i++) { var dbi = new CouchDB("test_suite_db" + i); dbi.deleteDb(); dbi.createDb(); } T(db.open("5").foo=="bar"); for(var i=0; i<max+1; i++) { var dbi = new CouchDB("test_suite_db" + i); dbi.deleteDb(); } }); };
yssk22/gaecouch
futon/script/test/delayed_commits.js
JavaScript
apache-2.0
3,920
document.addEventListener('deviceready', ondeviceready, false); function Notifi (){ cordova.plugins.notification.local.schedule({ id: 1, title: "Atento!!!!!", message: "Este evento se ha agregado a tu lista " }); }
polieduco/practicaaplicada20172
proyecto/www/js/notfica.js
JavaScript
apache-2.0
297
/** * */ package de.drtodolittle.firebase.impl; import java.io.ByteArrayInputStream; import java.io.FileInputStream; import java.io.InputStream; import com.google.firebase.FirebaseApp; import com.google.firebase.FirebaseOptions; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseCredentials; import com.google.firebase.auth.FirebaseToken; import com.google.firebase.tasks.Task; import de.drtodolittle.firebase.api.TokenService; /** * @author Guenther_D * */ public class FirebaseTokenService implements TokenService { public static final String PID = "de.drtodolittle.firebase.firebasetokenservice"; public FirebaseTokenService(String servicePrivateKey, String databaseUrl) throws Exception { InputStream privateKeyStream = null; if (System.getenv("FIREBASE_TOKEN") != null) { privateKeyStream = new ByteArrayInputStream(System.getenv("FIREBASE_TOKEN").getBytes()); } else { privateKeyStream = new FileInputStream(servicePrivateKey); } FirebaseOptions options = new FirebaseOptions.Builder() .setCredential(FirebaseCredentials.fromCertificate(privateKeyStream)) .setDatabaseUrl(databaseUrl) .build(); FirebaseApp.initializeApp(options); } /* (non-Javadoc) * @see de.drtodolittle.firebase.api.TokenService#verify(java.lang.String) */ public String verify(String token) { String email = null; Task<FirebaseToken> task = FirebaseAuth.getInstance().verifyIdToken(token); while (!task.isComplete()) { try { Thread.sleep(50); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } if (task.isSuccessful()) { email = task.getResult().getEmail(); } else { task.getException().printStackTrace(); } return email; } }
drtodolittle/rest-api
firebase-util/src/main/java/de/drtodolittle/firebase/impl/FirebaseTokenService.java
Java
apache-2.0
1,784
#include <SFGUI/Engines/BREW.hpp> #include <SFGUI/Context.hpp> #include <SFGUI/Renderer.hpp> #include <SFGUI/Notebook.hpp> namespace sfg { namespace eng { RenderQueue* BREW::CreateNotebookDrawable( SharedPtr<const Notebook> notebook ) const { sf::Color border_color( GetProperty<sf::Color>( "BorderColor", notebook ) ); sf::Color border_color_light( border_color ); sf::Color border_color_dark( border_color ); int border_color_shift( GetProperty<int>( "BorderColorShift", notebook ) ); sf::Color background_color( GetProperty<sf::Color>( "BackgroundColor", notebook ) ); sf::Color background_color_dark( GetProperty<sf::Color>( "BackgroundColorDark", notebook ) ); sf::Color background_color_prelight( GetProperty<sf::Color>( "BackgroundColorPrelight", notebook ) ); float padding( GetProperty<float>( "Padding", notebook ) ); float border_width( GetProperty<float>( "BorderWidth", notebook ) ); float scroll_button_size( GetProperty<float>( "ScrollButtonSize", notebook ) ); sf::Color arrow_color( GetProperty<sf::Color>( "Color", notebook ) ); sf::Color scroll_button_prelight( GetProperty<sf::Color>( "ScrollButtonPrelightColor", notebook ) ); ShiftBorderColors( border_color_light, border_color_dark, border_color_shift ); RenderQueue* queue( new RenderQueue ); Notebook::IndexType page_count = notebook->GetPageCount(); if( !page_count ) { return queue; } Notebook::IndexType current_page = notebook->GetCurrentPage(); Notebook::IndexType prelight_tab = notebook->GetPrelightTab(); // Get size in the dimension all tabs have uniform size. sf::Vector2f tab_size( notebook->GetNthTabLabel( 0 )->GetAllocation().width, notebook->GetNthTabLabel( 0 )->GetAllocation().height ); // Get size in the dimension all children have uniform size. sf::Vector2f child_size( notebook->GetNthPage( 0 )->GetAllocation().width, notebook->GetNthPage( 0 )->GetAllocation().height ); if( notebook->GetTabPosition() == Notebook::TOP ) { // Tabs are positioned at top. // Pane. queue->Add( Renderer::Get().CreatePane( sf::Vector2f( 0.f, tab_size.y + 2.f * ( border_width + padding ) ), sf::Vector2f( child_size.x + 2.f * ( border_width + padding ), child_size.y + 2.f * ( border_width + padding ) ), border_width, background_color, border_color, border_color_shift ) ); // First tab label left border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f, 0.f ), sf::Vector2f( notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f, tab_size.y + 3.f * border_width + 2.f * padding ), border_color_light, border_width ) ); // Tab labels for( Notebook::IndexType index = notebook->GetFirstDisplayedTab(); index < notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount(); ++index ) { Widget::Ptr label = notebook->GetNthTabLabel( index ); sf::FloatRect label_allocation = label->GetAllocation(); // Top border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top - border_width - padding ), sf::Vector2f( label_allocation.left + label_allocation.width + padding, label_allocation.top - border_width - padding ), border_color_light, border_width ) ); // Right border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left + label_allocation.width - border_width + padding, label_allocation.top - border_width - padding ), sf::Vector2f( label_allocation.left + label_allocation.width - border_width + padding, label_allocation.top + label_allocation.height + border_width + padding ), border_color_dark, border_width ) ); if( index == current_page ) { // Active left border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top - border_width - padding ), sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top + label_allocation.height + 2.f * border_width + padding ), border_color_light, border_width ) ); // Active background queue->Add( Renderer::Get().CreateRect( sf::FloatRect( label_allocation.left - padding, label_allocation.top - padding, label_allocation.width + 2.f * padding - border_width, label_allocation.height + 2.f * ( border_width + padding ) ), background_color ) ); } else { // Inactive background queue->Add( Renderer::Get().CreateRect( sf::FloatRect( label_allocation.left - padding, label_allocation.top - padding, label_allocation.width + 2.f * padding - border_width, label_allocation.height + 2.f * padding ), ( index == prelight_tab ) ? background_color_prelight : background_color_dark ) ); } } if( notebook->GetScrollable() ) { // Forward button if( ( notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount() ) < notebook->GetPageCount() ) { queue->Add( Renderer::Get().CreatePane( sf::Vector2f( notebook->GetAllocation().width - scroll_button_size, 0.f ), sf::Vector2f( scroll_button_size, tab_size.y + 2.f * ( padding + border_width ) ), border_width, notebook->IsForwardScrollPrelight() ? scroll_button_prelight : background_color, border_color, notebook->IsScrollingForward() ? -border_color_shift : border_color_shift ) ); queue->Add( Renderer::Get().CreateTriangle( sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .66f, .33f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .66f, .66f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .33f, .5f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), arrow_color ) ); } // Backward button if( notebook->GetFirstDisplayedTab() != 0 ) { queue->Add( Renderer::Get().CreatePane( sf::Vector2f( 0.f, 0.f ), sf::Vector2f( scroll_button_size, tab_size.y + 2.f * ( padding + border_width ) ), border_width, notebook->IsBackwardScrollPrelight() ? scroll_button_prelight : background_color, border_color, notebook->IsScrollingBackward() ? -border_color_shift : border_color_shift ) ); queue->Add( Renderer::Get().CreateTriangle( sf::Vector2f( scroll_button_size * .66f, .66f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), sf::Vector2f( scroll_button_size * .66f, .33f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), sf::Vector2f( scroll_button_size * .33f, .5f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), arrow_color ) ); } } } else if( notebook->GetTabPosition() == Notebook::BOTTOM ) { // Tabs are positioned at bottom. // Pane. queue->Add( Renderer::Get().CreatePane( sf::Vector2f( 0.f, 0.f ), sf::Vector2f( child_size.x + 2.f * ( border_width + padding ), child_size.y + 2.f * ( border_width + padding ) ), border_width, background_color, border_color, border_color_shift ) ); // First tab label left border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f, child_size.y + 2.f * border_width + 2.f * padding ), sf::Vector2f( notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f, child_size.y + tab_size.y + 3.f * border_width + 4.f * padding ), border_color_light, border_width ) ); // Tab labels for( Notebook::IndexType index = notebook->GetFirstDisplayedTab(); index < notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount(); ++index ) { Widget::Ptr label = notebook->GetNthTabLabel( index ); sf::FloatRect label_allocation = label->GetAllocation(); // Bottom border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top + label_allocation.height + border_width + padding ), sf::Vector2f( label_allocation.left + label_allocation.width + padding, label_allocation.top + label_allocation.height + border_width + padding ), border_color_dark, border_width ) ); // Right border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left + label_allocation.width + padding - border_width, label_allocation.top - padding ), sf::Vector2f( label_allocation.left + label_allocation.width + padding - border_width, label_allocation.top + label_allocation.height + border_width + padding ), border_color_dark, border_width ) ); if( index == current_page ) { // Active left border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top - padding ), sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top + label_allocation.height + border_width + padding ), border_color_light, border_width ) ); // Active background queue->Add( Renderer::Get().CreateRect( sf::FloatRect( label_allocation.left - padding, label_allocation.top - padding - border_width, label_allocation.width + 2.f * padding - border_width, label_allocation.height + 2.f * padding + 2.f * border_width ), background_color ) ); } else { // Inactive background queue->Add( Renderer::Get().CreateRect( sf::FloatRect( label_allocation.left - padding, label_allocation.top - padding, label_allocation.width + 2.f * padding - border_width, label_allocation.height + 2.f * padding + border_width ), ( index == prelight_tab ) ? background_color_prelight : background_color_dark ) ); } } if( notebook->GetScrollable() ) { // Forward button if( ( notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount() ) < notebook->GetPageCount() ) { queue->Add( Renderer::Get().CreatePane( sf::Vector2f( notebook->GetAllocation().width - scroll_button_size, notebook->GetAllocation().height - ( scroll_button_size + padding ) ), sf::Vector2f( scroll_button_size, tab_size.y + 2.f * ( padding + border_width ) ), border_width, notebook->IsForwardScrollPrelight() ? scroll_button_prelight : background_color, border_color, notebook->IsScrollingForward() ? -border_color_shift : border_color_shift ) ); queue->Add( Renderer::Get().CreateTriangle( sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .66f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .33f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .66f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .66f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), sf::Vector2f( notebook->GetAllocation().width - scroll_button_size * .33f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .5f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), arrow_color ) ); } // Backward button if( notebook->GetFirstDisplayedTab() != 0 ) { queue->Add( Renderer::Get().CreatePane( sf::Vector2f( 0.f, notebook->GetAllocation().height - ( scroll_button_size + padding ) ), sf::Vector2f( scroll_button_size, tab_size.y + 2.f * ( padding + border_width ) ), border_width, notebook->IsBackwardScrollPrelight() ? scroll_button_prelight : background_color, border_color, notebook->IsScrollingBackward() ? -border_color_shift : border_color_shift ) ); queue->Add( Renderer::Get().CreateTriangle( sf::Vector2f( scroll_button_size * .66f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .66f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), sf::Vector2f( scroll_button_size * .66f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .33f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), sf::Vector2f( scroll_button_size * .33f, notebook->GetAllocation().height - ( scroll_button_size + padding ) + .5f * ( tab_size.y + 2.f * ( padding + border_width ) ) ), arrow_color ) ); } } } else if( notebook->GetTabPosition() == Notebook::LEFT ) { // Tabs are positioned at left. // Pane. queue->Add( Renderer::Get().CreatePane( sf::Vector2f( tab_size.x + 2.f * ( border_width + padding ), 0.f ), sf::Vector2f( child_size.x + 2.f * ( border_width + padding ), child_size.y + 2.f * ( border_width + padding ) ), border_width, background_color, border_color, border_color_shift ) ); // First tab label top border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( 0.f, notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f ), sf::Vector2f( tab_size.x + 2.f * padding + 3.f * border_width, notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f ), border_color_light, border_width ) ); // Tab labels for( Notebook::IndexType index = notebook->GetFirstDisplayedTab(); index < notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount(); ++index ) { Widget::Ptr label = notebook->GetNthTabLabel( index ); sf::FloatRect label_allocation = label->GetAllocation(); // Left border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top - border_width - padding ), sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top + label_allocation.height + border_width + padding ), border_color_light, border_width ) ); // Bottom border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top + label_allocation.height + padding ), sf::Vector2f( label_allocation.left + label_allocation.width + ( index == current_page ? border_width : 0.f ) + border_width + padding, label_allocation.top + label_allocation.height + padding ), border_color_dark, border_width ) ); if( index == current_page ) { // Active top border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left - border_width - padding, label_allocation.top - border_width - padding ), sf::Vector2f( label_allocation.left + label_allocation.width + 2.f * border_width + padding, label_allocation.top - border_width - padding ), border_color_light, border_width ) ); // Active background queue->Add( Renderer::Get().CreateRect( sf::FloatRect( label_allocation.left - padding, label_allocation.top - padding, label_allocation.width + 2.f * ( border_width + padding ), label_allocation.height + 2.f * padding ), background_color ) ); } else { // Inactive background queue->Add( Renderer::Get().CreateRect( sf::FloatRect( label_allocation.left - padding, label_allocation.top - padding, label_allocation.width + 2.f * padding, label_allocation.height + 2.f * padding - border_width ), ( index == prelight_tab ) ? background_color_prelight : background_color_dark ) ); } } if( notebook->GetScrollable() ) { // Forward button if( ( notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount() ) < notebook->GetPageCount() ) { queue->Add( Renderer::Get().CreatePane( sf::Vector2f( 0.f, notebook->GetAllocation().height - ( scroll_button_size ) ), sf::Vector2f( tab_size.x + 2.f * ( padding + border_width ), scroll_button_size ), border_width, notebook->IsForwardScrollPrelight() ? scroll_button_prelight : background_color, border_color, notebook->IsScrollingForward() ? -border_color_shift : border_color_shift ) ); queue->Add( Renderer::Get().CreateTriangle( sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .6f, .33f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ), sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .4f, .33f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ), sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .5f, .66f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ), arrow_color ) ); } // Backward button if( notebook->GetFirstDisplayedTab() != 0 ) { queue->Add( Renderer::Get().CreatePane( sf::Vector2f( 0.f, 0.f ), sf::Vector2f( tab_size.x + 2.f * ( padding + border_width ), scroll_button_size ), border_width, notebook->IsBackwardScrollPrelight() ? scroll_button_prelight : background_color, border_color, notebook->IsScrollingBackward() ? -border_color_shift : border_color_shift ) ); queue->Add( Renderer::Get().CreateTriangle( sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .4f, .66f * scroll_button_size ), sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .6f, .66f * scroll_button_size ), sf::Vector2f( ( tab_size.x + 2.f * ( padding + border_width ) ) * .5f, .33f * scroll_button_size ), arrow_color ) ); } } } else if( notebook->GetTabPosition() == Notebook::RIGHT ) { // Tabs are positioned at right. // Pane. queue->Add( Renderer::Get().CreatePane( sf::Vector2f( 0.f, 0.f ), sf::Vector2f( child_size.x + 2.f * ( border_width + padding ), child_size.y + 2.f * ( border_width + padding ) ), border_width, background_color, border_color, border_color_shift ) ); // First tab label top border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( child_size.x + 2.f * ( border_width + padding ), notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f ), sf::Vector2f( child_size.x + 4.f * padding + 3.f * border_width + tab_size.x, notebook->GetScrollable() && notebook->GetFirstDisplayedTab() != 0 ? scroll_button_size : 0.f ), border_color_light, border_width ) ); // Tab labels for( Notebook::IndexType index = notebook->GetFirstDisplayedTab(); index < notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount(); ++index ) { Widget::Ptr label = notebook->GetNthTabLabel( index ); sf::FloatRect label_allocation = label->GetAllocation(); // Right border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left + label_allocation.width + border_width + padding, label_allocation.top - border_width - padding ), sf::Vector2f( label_allocation.left + label_allocation.width + border_width + padding, label_allocation.top + label_allocation.height + border_width + padding ), border_color_dark, border_width ) ); // Bottom border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left - padding, label_allocation.top + label_allocation.height + padding ), sf::Vector2f( label_allocation.left + label_allocation.width + border_width + padding, label_allocation.top + label_allocation.height + padding ), border_color_dark, border_width ) ); if( index == current_page ) { // Active top border queue->Add( Renderer::Get().CreateLine( sf::Vector2f( label_allocation.left - padding - border_width, label_allocation.top - border_width - padding ), sf::Vector2f( label_allocation.left + label_allocation.width + border_width + padding, label_allocation.top - border_width - padding ), border_color_light, border_width ) ); // Active background queue->Add( Renderer::Get().CreateRect( sf::FloatRect( label_allocation.left - padding - border_width, label_allocation.top - padding, label_allocation.width + 2.f * padding + 2.f * border_width, label_allocation.height + 2.f * padding ), background_color ) ); } else { // Inactive background queue->Add( Renderer::Get().CreateRect( sf::FloatRect( label_allocation.left - padding, label_allocation.top - padding, label_allocation.width + 2.f * padding + border_width, label_allocation.height + 2.f * padding ), (index == prelight_tab) ? background_color_prelight : background_color_dark ) ); } } if( notebook->GetScrollable() ) { // Forward button if( ( notebook->GetFirstDisplayedTab() + notebook->GetDisplayedTabCount() ) < notebook->GetPageCount() ) { queue->Add( Renderer::Get().CreatePane( sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * padding + border_width ), notebook->GetAllocation().height - ( scroll_button_size ) ), sf::Vector2f( tab_size.x + 2.f * ( padding + border_width ), scroll_button_size ), border_width, notebook->IsForwardScrollPrelight() ? scroll_button_prelight : background_color, border_color, notebook->IsScrollingForward() ? -border_color_shift : border_color_shift ) ); queue->Add( Renderer::Get().CreateTriangle( sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .4f, .33f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ), sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .6f, .33f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ), sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .5f, .66f * scroll_button_size + notebook->GetAllocation().height - scroll_button_size ), arrow_color ) ); } // Backward button if( notebook->GetFirstDisplayedTab() != 0 ) { queue->Add( Renderer::Get().CreatePane( sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * padding + border_width ), 0.f ), sf::Vector2f( tab_size.x + 2.f * ( padding + border_width ), scroll_button_size ), border_width, notebook->IsBackwardScrollPrelight() ? scroll_button_prelight : background_color, border_color, notebook->IsScrollingBackward() ? -border_color_shift : border_color_shift ) ); queue->Add( Renderer::Get().CreateTriangle( sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .6f, .66f * scroll_button_size ), sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .4f, .66f * scroll_button_size ), sf::Vector2f( notebook->GetAllocation().width - ( tab_size.x + 2.f * ( padding + border_width ) ) * .5f, .33f * scroll_button_size ), arrow_color ) ); } } } return queue; } } }
pierotofy/glassomium
src/include/sfgui/src/SFGUI/Engines/BREW/Notebook.cpp
C++
apache-2.0
23,612
// Copyright 2007-2009 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ======================================================================== #include "omaha/core/scheduler.h" #include "omaha/base/debug.h" #include "omaha/base/error.h" #include "omaha/base/logging.h" namespace omaha { Scheduler::SchedulerItem::SchedulerItem(HANDLE timer_queue, int start_delay_ms, int interval_ms, bool has_debug_timer, ScheduledWorkWithTimer work) : start_delay_ms_(start_delay_ms), interval_ms_(interval_ms), work_(work) { if (has_debug_timer) { debug_timer_.reset(new HighresTimer()); } if (timer_queue) { timer_.reset( new QueueTimer(timer_queue, &SchedulerItem::TimerCallback, this)); VERIFY_SUCCEEDED( ScheduleNext(timer_.get(), debug_timer_.get(), start_delay_ms)); } } Scheduler::SchedulerItem::~SchedulerItem() { // QueueTimer dtor may block for pending callbacks. if (timer_) { timer_.reset(); } if (debug_timer_) { debug_timer_.reset(); } } // static HRESULT Scheduler::SchedulerItem::ScheduleNext(QueueTimer* timer, HighresTimer* debug_timer, int start_after_ms) { if (!timer) { return E_FAIL; } if (debug_timer) { debug_timer->Start(); } const HRESULT hr = timer->Start(start_after_ms, 0, WT_EXECUTEONLYONCE); if (FAILED(hr)) { CORE_LOG(LE, (L"[can't start queue timer][0x%08x]", hr)); } return hr; } // static void Scheduler::SchedulerItem::TimerCallback(QueueTimer* timer) { ASSERT1(timer); if (!timer) { return; } SchedulerItem* item = reinterpret_cast<SchedulerItem*>(timer->ctx()); ASSERT1(item); if (!item) { CORE_LOG(LE, (L"[Expected timer context to contain SchedulerItem]")); return; } // This may be long running, |item| may be deleted in the meantime, // however the dtor should block on deleting the |timer| and allow // pending callbacks to run. if (item && item->work_) { item->work_(item->debug_timer()); } if (item) { const HRESULT hr = SchedulerItem::ScheduleNext(timer, item->debug_timer(), item->interval_ms()); if (FAILED(hr)) { CORE_LOG(L1, (L"[Scheduling next timer callback failed][0x%08x]", hr)); } } } Scheduler::Scheduler() { CORE_LOG(L1, (L"[Scheduler::Scheduler]")); timer_queue_ = ::CreateTimerQueue(); if (!timer_queue_) { CORE_LOG(LE, (L"[Failed to create Timer Queue][%d]", ::GetLastError())); } } Scheduler::~Scheduler() { CORE_LOG(L1, (L"[Scheduler::~Scheduler]")); timers_.clear(); if (timer_queue_) { // The destructor blocks on deleting the timer queue and it waits for // all timer callbacks to complete. ::DeleteTimerQueueEx(timer_queue_, INVALID_HANDLE_VALUE); timer_queue_ = NULL; } } HRESULT Scheduler::StartWithDebugTimer(int interval, ScheduledWorkWithTimer work) const { return DoStart(interval, interval, work, true /*has_debug_timer*/); } HRESULT Scheduler::StartWithDelay(int delay, int interval, ScheduledWork work) const { return DoStart(delay, interval, std::bind(work)); } HRESULT Scheduler::Start(int interval, ScheduledWork work) const { return DoStart(interval, interval, std::bind(work)); } HRESULT Scheduler::DoStart(int start_delay, int interval, ScheduledWorkWithTimer work_fn, bool has_debug_timer) const { CORE_LOG(L1, (L"[Scheduler::Start]")); if (!timer_queue_) { return HRESULTFromLastError(); } timers_.emplace_back(timer_queue_, start_delay, interval, has_debug_timer, work_fn); return S_OK; } } // namespace omaha
google/omaha
omaha/core/scheduler.cc
C++
apache-2.0
4,607