text stringlengths 1 1.05M |
|---|
var structarmnn_1_1_activation_descriptor =
[
[ "ActivationDescriptor", "structarmnn_1_1_activation_descriptor.xhtml#a6c7517bc11f580a0a443940bc5f81775", null ],
[ "operator==", "structarmnn_1_1_activation_descriptor.xhtml#a57980de0f72e982a3d0963f5fb557454", null ],
[ "m_A", "structarmnn_1_1_activation_descriptor.xhtml#a017b2990003a014234f13e999dc7c689", null ],
[ "m_B", "structarmnn_1_1_activation_descriptor.xhtml#a28c4c9cb15f6be3499abbc46b356060b", null ],
[ "m_Function", "structarmnn_1_1_activation_descriptor.xhtml#af10fa7883e3579950f477bee92a64844", null ]
]; |
<gh_stars>1-10
/**
*
*/
package net.abi.abisEngine.rendering.window;
import net.abi.abisEngine.rendering.image.AEImage;
import net.abi.abisEngine.util.exceptions.AECursorInitializationException;
/**
* @author abina
*
*/
public class AnimatedCursor implements CursorI {
String id;
long cursor_handle;
AEImage animationStages[];
int yHotspot = 0, xHotspot = 0;
@Override
public void dispose() {
}
@Override
public long create() throws AECursorInitializationException {
return 0L;
}
@Override
public String getID() {
return null;
}
@Override
public long getHandle() {
return 0;
}
@Override
public StaticCursorResource getCursorResource() {
return null;
}
}
|
/**
* Reads a File or Blob object and returns it as an ArrayBuffer.
*
* @param {Blob|File} blob The File or Blob data.
* @param {Function} callback Success callback passed the array buffer.
* @param {Function=} opt_error Optional error callback if the read fails.
*/
function fileToArrayBuffer( blob, callback, opt_errorCallback ) {
const reader = new FileReader();
reader.onload = e => {
callback( e.target.result );
};
reader.onerror = e => {
if ( opt_errorCallback ) {
opt_errorCallback( e );
}
};
reader.readAsArrayBuffer( blob );
}, |
// ๆฑ10-100ไน้ดไธชไฝๆฐไธบ7็่ดจๆฐ
//
//ไปฅไธ็จๅบๅฎ็ฐไบ่ฟไธๅ่ฝ๏ผ่ฏทไฝ ๅกซ่กฅ็ฉบ็ฝๅคๅ
ๅฎน๏ผ
//
//#include <stdio.h>
//int isp(int n)
//{
// int i;
// if (n<2)
// return 0;
// for (i=2;i*i<=n;++i)
// {
// _______________;
// }
// return 1;
//}
//int main()
//{
// int i=17;
// while (i<=100)
// {
// if (isp(i))
// printf("%d ",i);
// i+=10;
// }
// printf("\n");
// return 0;
//}
//
// Created by ้นๆ้นๆณ_bitๆฃฎ on 2022/5/22.
//
#include <stdio.h>
int isp(int n)
{
int i;
if (n<2)
return 0;
for (i=2;i*i<=n;++i)
{
if (n%i==0)
return 0;
}
return 1;
}
int main()
{
int i=17;
while (i<=100)
{
if (isp(i))
printf("%d ",i);
i+=10;
}
printf("\n");
return 0;
}
|
<filename>devilry/devilry_admin/tests/period/test_createassignment.py
import unittest
from datetime import timedelta
import htmls
import mock
from django.conf import settings
from django.test import TestCase
from django.utils import timezone
from cradmin_legacy import cradmin_testhelpers
from cradmin_legacy import crinstance
from model_bakery import baker
from devilry.apps.core.models import Assignment, Candidate, Examiner, AssignmentGroup
from devilry.devilry_dbcache.customsql import AssignmentGroupDbCacheCustomSql
from devilry.apps.core.baker_recipes import ACTIVE_PERIOD_END, ACTIVE_PERIOD_START, OLD_PERIOD_START, FUTURE_PERIOD_END, \
ASSIGNMENT_FUTUREPERIOD_START_FIRST_DEADLINE
from devilry.devilry_group import devilry_group_baker_factories
from devilry.devilry_admin.views.period import createassignment
from devilry.utils import datetimeutils
from devilry.utils.datetimeutils import default_timezone_datetime
class TestCreateView(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = createassignment.CreateView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_get_render_formfields(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
self.assertTrue(mockresponse.selector.exists('input[name=long_name]'))
self.assertTrue(mockresponse.selector.exists('input[name=short_name]'))
self.assertTrue(mockresponse.selector.exists('input[name=first_deadline]'))
def test_get_suggested_name_first_assignment(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
self.assertEqual(mockresponse.selector.one('input[name=long_name]').get('value', ''), '')
self.assertEqual(mockresponse.selector.one('input[name=short_name]').get('value', ''), '')
def test_get_suggested_name_previous_assignment_not_suffixed_with_number(self):
period = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
long_name='Test', short_name='test').period
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
self.assertEqual(mockresponse.selector.one('input[name=long_name]').get('value', ''), '')
self.assertEqual(mockresponse.selector.one('input[name=short_name]').get('value', ''), '')
def test_get_suggested_name_previous_assignment_suffixed_with_number(self):
period = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
long_name='Test1', short_name='test1').period
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
self.assertEqual(mockresponse.selector.one('input[name=long_name]').get('value', ''), 'Test2')
self.assertEqual(mockresponse.selector.one('input[name=short_name]').get('value', ''), 'test2')
@unittest.skip('Must be revised. Depends on Assignment.first_deadline being None.')
def test_get_suggested_name_previous_assignment_suffixed_with_number_namecollision_no_first_deadline(self):
period = baker.make_recipe('devilry.apps.core.period_active')
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period,
long_name='Test1', short_name='test1')
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period,
long_name='Test2', short_name='test2',
first_deadline=None)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
self.assertEqual(mockresponse.selector.one('input[name=long_name]').get('value', ''), '')
self.assertEqual(mockresponse.selector.one('input[name=short_name]').get('value', ''), '')
def test_get_suggested_name_previous_assignment_suffixed_with_number_namecollision_strange_order(self):
period = baker.make_recipe('devilry.apps.core.period_active')
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period,
long_name='Test1', short_name='test1',
first_deadline=ACTIVE_PERIOD_END)
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period,
long_name='Test2', short_name='test2',
first_deadline=ACTIVE_PERIOD_START)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
self.assertEqual(mockresponse.selector.one('input[name=long_name]').get('value', ''), '')
self.assertEqual(mockresponse.selector.one('input[name=short_name]').get('value', ''), '')
def test_get_suggested_deadlines_first_assignment(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
self.assertFalse(mockresponse.selector.exists(
'#devilry_admin_createassignment_suggested_deadlines'))
def test_get_suggested_deadlines_not_first_assignment(self):
period = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start').period
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
self.assertTrue(mockresponse.selector.exists(
'#devilry_admin_createassignment_suggested_deadlines'))
@unittest.skip('Must be revised. Depends on Assignment.first_deadline being None.')
def test_get_suggested_deadlines_not_first_assignment_no_previous_with_deadline(self):
period = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
first_deadline=None).period
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
self.assertFalse(mockresponse.selector.exists(
'#devilry_admin_createassignment_suggested_deadlines'))
def test_get_suggested_deadlines_render_values_previous_deadline_in_the_past(self):
period = baker.make_recipe('devilry.apps.core.period_active')
# Ignored by the suggestion system
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period)
# This should be the one that is used for suggestions
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period,
first_deadline=default_timezone_datetime(2015, 9, 2, 13, 30)) # Wed
timezonemock = mock.MagicMock()
timezonemock.now.return_value = default_timezone_datetime(2015, 9, 10, 22, 18) # Thursday
with mock.patch('devilry.devilry_admin.views.period.createassignment.timezone', timezonemock):
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
suggested_deadline_elements = mockresponse.selector.list(
'.devilry-admin-createassignment-suggested-deadline')
suggested_deadline_values = [element['cradmin-legacy-setfieldvalue']
for element in suggested_deadline_elements]
self.assertEqual(suggested_deadline_values, [
'2015-09-16 13:30',
'2015-09-23 13:30',
'2015-09-30 13:30',
'2015-10-07 13:30',
])
def test_get_suggested_deadlines_render_values_previous_deadline_in_the_future(self):
period = baker.make_recipe('devilry.apps.core.period_active')
# Ignored by the suggestion system
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period)
# This should be the one that is used for suggestions
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period,
first_deadline=default_timezone_datetime(3500, 9, 5, 13, 30))
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
suggested_deadline_elements = mockresponse.selector.list(
'.devilry-admin-createassignment-suggested-deadline')
suggested_deadline_values = [element['cradmin-legacy-setfieldvalue']
for element in suggested_deadline_elements]
self.assertEqual(suggested_deadline_values, [
'3500-09-12 13:30',
'3500-09-19 13:30',
'3500-09-26 13:30',
'3500-10-03 13:30',
])
def test_get_suggested_deadlines_render_labels(self):
period = baker.make_recipe('devilry.apps.core.period_active')
# Ignored by the suggestion system
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period)
# This should be the one that is used for suggestions
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period,
first_deadline=default_timezone_datetime(3500, 9, 5, 13, 30))
with self.settings(DATETIME_FORMAT='D M j Y H:i', USE_L10N=False):
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
suggested_deadline_elements = mockresponse.selector.list(
'.devilry-admin-createassignment-suggested-deadline')
suggested_deadline_labels = [element.alltext_normalized
for element in suggested_deadline_elements]
self.assertEqual([
'Wed Sep 12 3500 13:30',
'Wed Sep 19 3500 13:30',
'Wed Sep 26 3500 13:30',
'Wed Oct 3 3500 13:30',
], suggested_deadline_labels)
def test_get_default_select_options_count(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
option_list = mockresponse.selector.list('option')
self.assertEqual(len(option_list), 3)
def test_get_select_options_default_selected_no_value(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
option_list = mockresponse.selector.list('option')
self.assertEqual(option_list[0].get('value'), '')
self.assertIsNotNone(option_list[0].get('selected', None))
def test_get_select_options_import_all_students_on_semester_exists(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
option_list = mockresponse.selector.list('option')
self.assertEqual(option_list[1].get('value'), 'all')
def test_get_select_options_import_no_students_exists(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
option_list = mockresponse.selector.list('option')
self.assertEqual(option_list[2].get('value'), 'none')
def test_get_select_options_period_has_one_assignment(self):
period = baker.make_recipe('devilry.apps.core.period_active')
assignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
parentnode=period,
long_name='Test1', short_name='test1')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
selector = htmls.S(mockresponse.selector.one('optgroup').prettify())
self.assertEqual(selector.one('optgroup').get('label'), assignment.long_name)
optgroup_options = selector.list('option')
self.assertEqual(optgroup_options[0].get('value'), '{}_all'.format(assignment.id))
self.assertEqual(optgroup_options[1].get('value'), '{}_passed'.format(assignment.id))
def test_get_select_options_period_has_multiple_assignments_ordered_by_first_deadline(self):
period = baker.make_recipe('devilry.apps.core.period_active')
assignment1 = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
first_deadline=timezone.now() + timezone.timedelta(days=1),
parentnode=period, long_name='Test1', short_name='test1')
assignment2 = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
first_deadline=timezone.now() + timezone.timedelta(days=4),
parentnode=period, long_name='Test2', short_name='test2')
assignment3 = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
first_deadline=timezone.now() + timezone.timedelta(days=2),
parentnode=period, long_name='Test3', short_name='test3')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
optgroup_list = mockresponse.selector.list('optgroup')
self.assertEqual(len(optgroup_list), 3)
self.assertEqual(optgroup_list[0].get('label'), assignment2.long_name)
self.assertEqual(optgroup_list[1].get('label'), assignment3.long_name)
self.assertEqual(optgroup_list[2].get('label'), assignment1.long_name)
def test_get_select_options_period_has_multiple_assignments_options(self):
period = baker.make_recipe('devilry.apps.core.period_active')
assignment1 = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
first_deadline=timezone.now() + timezone.timedelta(days=1),
parentnode=period, long_name='Test1', short_name='test1')
assignment2 = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
first_deadline=timezone.now() + timezone.timedelta(days=2),
parentnode=period, long_name='Test2', short_name='test2')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=period)
optgroup_list = mockresponse.selector.list('optgroup')
# Test assigment2 options
assignment2_optgroup_selector = htmls.S(optgroup_list[0].prettify())
assignment2_optgroup_options = assignment2_optgroup_selector.list('option')
self.assertEqual(len(assignment2_optgroup_options), 2)
self.assertEqual(assignment2_optgroup_options[0].get('value'), '{}_all'.format(assignment2.id))
self.assertEqual(assignment2_optgroup_options[1].get('value'), '{}_passed'.format(assignment2.id))
# Test assignment1 options
assignment1_optgroup_selector = htmls.S(optgroup_list[1].prettify())
assignment1_optgroup_options = assignment1_optgroup_selector.list('option')
self.assertEqual(len(assignment1_optgroup_options), 2)
self.assertEqual(assignment1_optgroup_options[0].get('value'), '{}_all'.format(assignment1.id))
self.assertEqual(assignment1_optgroup_options[1].get('value'), '{}_passed'.format(assignment1.id))
def test_post_missing_short_name(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_postrequest_htmls(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': '',
'first_deadline': datetimeutils.isoformat_noseconds(default_timezone_datetime(3000, 12, 31, 23, 59))
}
})
self.assertEqual(Assignment.objects.count(), 0)
self.assertEqual(
'This field is required.',
mockresponse.selector.one('#error_1_id_short_name').alltext_normalized)
def test_post_missing_long_name(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_postrequest_htmls(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': '',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(default_timezone_datetime(3000, 12, 31, 23, 59))
}
})
self.assertEqual(Assignment.objects.count(), 0)
self.assertEqual(
'This field is required.',
mockresponse.selector.one('#error_1_id_long_name').alltext_normalized)
def test_post_missing_first_deadline(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_postrequest_htmls(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': '',
}
})
self.assertEqual(Assignment.objects.count(), 0)
self.assertEqual(
'This field is required.',
mockresponse.selector.one('#error_1_id_first_deadline').alltext_normalized)
def test_post_missing_student_import_option(self):
period = baker.make_recipe('devilry.apps.core.period_active')
mockresponse = self.mock_http200_postrequest_htmls(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(OLD_PERIOD_START),
'student_import_option': ''
}
})
self.assertEqual(Assignment.objects.count(), 0)
self.assertEqual(
'This field is required.',
mockresponse.selector.one('#error_1_id_student_import_option').alltext_normalized)
def test_post_import_all_students_on_semester(self):
period = baker.make_recipe('devilry.apps.core.period_active')
relatedstudent1 = baker.make('core.RelatedStudent', period=period)
relatedstudent2 = baker.make('core.RelatedStudent', period=period)
relatedstudent3 = baker.make('core.RelatedStudent', period=period)
self.mock_http302_postrequest(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(default_timezone_datetime(3000, 12, 31, 23, 59)),
'student_import_option': 'all'
}
})
self.assertEqual(Assignment.objects.count(), 1)
created_assignment = Assignment.objects.get()
self.assertEqual(
Candidate.objects
.filter(assignment_group__parentnode=created_assignment, relatedstudent=relatedstudent1)
.count(), 1)
self.assertEqual(
Candidate.objects
.filter(assignment_group__parentnode=created_assignment, relatedstudent=relatedstudent2)
.count(), 1)
self.assertEqual(
Candidate.objects
.filter(assignment_group__parentnode=created_assignment, relatedstudent=relatedstudent3)
.count(), 1)
def test_post_import_all_students_on_semester_no_students_on_semester(self):
period = baker.make_recipe('devilry.apps.core.period_active')
self.mock_http302_postrequest(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(default_timezone_datetime(3000, 12, 31, 23, 59)),
'student_import_option': 'all'
}
})
self.assertEqual(Assignment.objects.count(), 1)
created_assignment = Assignment.objects.get()
self.assertFalse(AssignmentGroup.objects.filter(parentnode=created_assignment).exists())
self.assertFalse(Candidate.objects.filter(assignment_group__parentnode=created_assignment).exists())
def test_post_import_no_students(self):
period = baker.make_recipe('devilry.apps.core.period_active')
baker.make('core.RelatedStudent', period=period, _quantity=10)
self.mock_http302_postrequest(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(default_timezone_datetime(3000, 12, 31, 23, 59)),
'student_import_option': 'none'
}
})
self.assertEqual(Assignment.objects.count(), 1)
created_assignment = Assignment.objects.get()
self.assertFalse(Candidate.objects.filter(assignment_group__parentnode=created_assignment).exists())
self.assertFalse(AssignmentGroup.objects.filter(parentnode=created_assignment).exists())
def test_post_copy_all_students_from_another_assignment(self):
period = baker.make_recipe('devilry.apps.core.period_active')
other_assignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start', parentnode=period)
relatedstudent_user1 = baker.make(settings.AUTH_USER_MODEL)
relatedstudent_user2 = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.Candidate', assignment_group__parentnode=other_assignment,
relatedstudent__period=period, relatedstudent__user=relatedstudent_user1)
baker.make('core.Candidate', assignment_group__parentnode=other_assignment,
relatedstudent__period=period, relatedstudent__user=relatedstudent_user2)
self.mock_http302_postrequest(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(default_timezone_datetime(3000, 12, 31, 23, 59)),
'student_import_option': '{}_all'.format(other_assignment.id)
}
})
self.assertEqual(Assignment.objects.filter(short_name='testassignment').count(), 1)
created_assignment = Assignment.objects.get(short_name='testassignment')
self.assertEqual(created_assignment.assignmentgroups.count(), 2)
self.assertEqual(
Candidate.objects.filter(
assignment_group__parentnode=created_assignment, relatedstudent__user=relatedstudent_user1).count(),
1)
self.assertEqual(
Candidate.objects.filter(
assignment_group__parentnode=created_assignment, relatedstudent__user=relatedstudent_user2).count(),
1)
def test_post_copy_all_students_from_another_assignment_same_group_structure(self):
period = baker.make_recipe('devilry.apps.core.period_active')
other_assignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start', parentnode=period)
group1 = baker.make('core.AssignmentGroup', parentnode=other_assignment, name='group1')
group2 = baker.make('core.AssignmentGroup', parentnode=other_assignment, name='group2')
relatedstudent_user1_group1 = baker.make(settings.AUTH_USER_MODEL)
relatedstudent_user2_group1 = baker.make(settings.AUTH_USER_MODEL)
relatedstudent_user_group2 = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.Candidate', assignment_group=group1,
relatedstudent__period=period, relatedstudent__user=relatedstudent_user1_group1)
baker.make('core.Candidate', assignment_group=group1,
relatedstudent__period=period, relatedstudent__user=relatedstudent_user2_group1)
baker.make('core.Candidate', assignment_group=group2,
relatedstudent__period=period, relatedstudent__user=relatedstudent_user_group2)
self.mock_http302_postrequest(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(default_timezone_datetime(3000, 12, 31, 23, 59)),
'student_import_option': '{}_all'.format(other_assignment.id)
}
})
self.assertEqual(Assignment.objects.filter(short_name='testassignment').count(), 1)
created_assignment = Assignment.objects.get(short_name='testassignment')
self.assertEqual(created_assignment.assignmentgroups.count(), 2)
self.assertEqual(Candidate.objects.filter(
assignment_group__name='group1', assignment_group__parentnode=created_assignment).count(), 2)
self.assertTrue(Candidate.objects.filter(
assignment_group__name='group2',
assignment_group__parentnode=created_assignment,
relatedstudent__user=relatedstudent_user_group2).exists())
self.assertTrue(Candidate.objects.filter(
assignment_group__name='group2',
assignment_group__parentnode=created_assignment,
relatedstudent__user=relatedstudent_user_group2).exists())
self.assertEqual(Candidate.objects.filter(
assignment_group__name='group2', assignment_group__parentnode=created_assignment).count(), 1)
self.assertTrue(Candidate.objects.filter(
assignment_group__name='group2',
assignment_group__parentnode=created_assignment,
relatedstudent__user=relatedstudent_user_group2).exists())
def test_post_copy_with_passing_grade_from_another_assignment(self):
period = baker.make_recipe('devilry.apps.core.period_active')
other_assignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start', parentnode=period)
group_passed = baker.make('core.AssignmentGroup', parentnode=other_assignment)
group_failed = baker.make('core.AssignmentGroup', parentnode=other_assignment)
relatedstudent_user1 = baker.make(settings.AUTH_USER_MODEL)
relatedstudent_user2 = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.Candidate', assignment_group=group_passed,
relatedstudent__period=period, relatedstudent__user=relatedstudent_user1)
baker.make('core.Candidate', assignment_group=group_failed,
relatedstudent__period=period, relatedstudent__user=relatedstudent_user2)
devilry_group_baker_factories.feedbackset_first_attempt_published(
grading_points=1, group=group_passed)
devilry_group_baker_factories.feedbackset_first_attempt_published(
grading_points=0, group=group_failed)
self.mock_http302_postrequest(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(default_timezone_datetime(3000, 12, 31, 23, 59)),
'student_import_option': '{}_passed'.format(other_assignment.id)
}
})
self.assertEqual(Assignment.objects.filter(short_name='testassignment').count(), 1)
created_assignment = Assignment.objects.get(short_name='testassignment')
self.assertEqual(created_assignment.assignmentgroups.count(), 1)
self.assertEqual(Candidate.objects.filter(assignment_group__parentnode=created_assignment).count(), 1)
self.assertTrue(
Candidate.objects.filter(
assignment_group__parentnode=created_assignment, relatedstudent__user=relatedstudent_user1).exists())
def test_post_copy_with_passing_grade_from_another_assignment_same_group_structure(self):
period = baker.make_recipe('devilry.apps.core.period_active')
other_assignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start', parentnode=period)
group = baker.make('core.AssignmentGroup', parentnode=other_assignment)
relatedstudent_user1 = baker.make(settings.AUTH_USER_MODEL)
relatedstudent_user2 = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.Candidate', assignment_group=group,
relatedstudent__period=period, relatedstudent__user=relatedstudent_user1)
baker.make('core.Candidate', assignment_group=group,
relatedstudent__period=period, relatedstudent__user=relatedstudent_user2)
devilry_group_baker_factories.feedbackset_first_attempt_published(
grading_points=1, group=group)
self.mock_http302_postrequest(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(default_timezone_datetime(3000, 12, 31, 23, 59)),
'student_import_option': '{}_passed'.format(other_assignment.id)
}
})
self.assertEqual(Assignment.objects.filter(short_name='testassignment').count(), 1)
created_assignment = Assignment.objects.get(short_name='testassignment')
self.assertEqual(created_assignment.assignmentgroups.count(), 1)
created_group = created_assignment.assignmentgroups.get()
self.assertEqual(Candidate.objects.filter(assignment_group=created_group).count(), 2)
self.assertTrue(
Candidate.objects.filter(
assignment_group=created_group, relatedstudent__user=relatedstudent_user1).exists())
self.assertTrue(
Candidate.objects.filter(
assignment_group=created_group, relatedstudent__user=relatedstudent_user2).exists())
def test_post_copy_with_passing_grade_from_another_examiners_copied(self):
period = baker.make_recipe('devilry.apps.core.period_active')
other_assignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start', parentnode=period)
group = baker.make('core.AssignmentGroup', parentnode=other_assignment)
devilry_group_baker_factories.feedbackset_first_attempt_published(
grading_points=1, group=group)
self.mock_http302_postrequest(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(default_timezone_datetime(3000, 12, 31, 23, 59)),
'student_import_option': '{}_passed'.format(other_assignment.id)
}
})
self.assertEqual(Assignment.objects.filter(short_name='testassignment').count(), 1)
created_assignment = Assignment.objects.get(short_name='testassignment')
self.assertEqual(created_assignment.assignmentgroups.count(), 1)
self.assertEqual(Examiner.objects.filter(assignmentgroup__parentnode=created_assignment).count(), 1)
def test_post_first_deadline_outside_period(self):
period = baker.make_recipe('devilry.apps.core.period_active')
first_deadline_isoformat = datetimeutils.isoformat_noseconds(FUTURE_PERIOD_END)
mockresponse = self.mock_http200_postrequest_htmls(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': first_deadline_isoformat,
}
})
self.assertEqual(Assignment.objects.count(), 0)
self.assertTrue(mockresponse.selector.exists('#error_1_id_first_deadline'))
self.assertIn('First deadline must be within',
mockresponse.selector.one('#error_1_id_first_deadline').alltext_normalized)
def test_post_first_deadline_before_publishing_time_hours(self):
period = baker.make_recipe('devilry.apps.core.period_active')
first_deadline_isoformat = datetimeutils.isoformat_noseconds(timezone.now())
with self.settings(DEVILRY_ASSIGNMENT_PUBLISHING_TIME_DELAY_MINUTES=60 * 3):
mockresponse = self.mock_http200_postrequest_htmls(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': first_deadline_isoformat,
}
})
self.assertEqual(Assignment.objects.count(), 0)
self.assertTrue(mockresponse.selector.exists('#error_1_id_first_deadline'))
self.assertEqual('First deadline must be at least 3 hours from now.',
mockresponse.selector.one('#error_1_id_first_deadline').alltext_normalized)
def test_post_first_deadline_before_publishing_time_minutes(self):
period = baker.make_recipe('devilry.apps.core.period_active')
first_deadline_isoformat = datetimeutils.isoformat_noseconds(timezone.now())
with self.settings(DEVILRY_ASSIGNMENT_PUBLISHING_TIME_DELAY_MINUTES=30):
mockresponse = self.mock_http200_postrequest_htmls(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': first_deadline_isoformat,
}
})
self.assertEqual(Assignment.objects.count(), 0)
self.assertTrue(mockresponse.selector.exists('#error_1_id_first_deadline'))
self.assertEqual('First deadline must be at least 30 minutes from now.',
mockresponse.selector.one('#error_1_id_first_deadline').alltext_normalized)
def __valid_post_request(self, period=None, first_deadline=None,
publishing_time_delay_minutes=60, student_import_option='all'):
if not first_deadline:
first_deadline = default_timezone_datetime(3000, 12, 31, 23, 59)
if not period:
period = baker.make_recipe('devilry.apps.core.period_active')
with self.settings(DEVILRY_ASSIGNMENT_PUBLISHING_TIME_DELAY_MINUTES=publishing_time_delay_minutes):
mockresponse = self.mock_http302_postrequest(
cradmin_role=period,
requestkwargs={
'data': {
'long_name': 'Test assignment',
'short_name': 'testassignment',
'first_deadline': datetimeutils.isoformat_noseconds(first_deadline),
'student_import_option': student_import_option
}
})
created_assignment = Assignment.objects.get(short_name='testassignment')
return created_assignment, mockresponse
def test_post_sanity(self):
self.assertEqual(Assignment.objects.count(), 0)
first_deadline = default_timezone_datetime(3000, 12, 31, 23, 59)
created_assignment, mockresponse = self.__valid_post_request(first_deadline=first_deadline)
self.assertEqual(Assignment.objects.count(), 1)
self.assertEqual(created_assignment.long_name, 'Test assignment')
self.assertEqual(created_assignment.short_name, 'testassignment')
self.assertEqual(
first_deadline.replace(second=59),
created_assignment.first_deadline)
def test_post_success_redirect(self):
self.assertEqual(Assignment.objects.count(), 0)
created_assignment, mockresponse = self.__valid_post_request()
self.assertEqual(mockresponse.response['location'],
crinstance.reverse_cradmin_url(
instanceid='devilry_admin_assignmentadmin',
appname='overview',
roleid=created_assignment.id))
def test_post_publishing_time(self):
created_assignment, mockresponse = self.__valid_post_request(publishing_time_delay_minutes=60)
self.assertTrue(
(timezone.now() + timedelta(minutes=59)) <
created_assignment.publishing_time <
(timezone.now() + timedelta(minutes=61))
)
def test_post_future_period_sanity(self):
self.assertEqual(Assignment.objects.count(), 0)
period = baker.make_recipe('devilry.apps.core.period_future')
self.__valid_post_request(period=period, first_deadline=ASSIGNMENT_FUTUREPERIOD_START_FIRST_DEADLINE)
self.assertEqual(Assignment.objects.count(), 1)
def test_post_future_publishing_time(self):
period = baker.make_recipe('devilry.apps.core.period_future')
created_assignment, mockresponse = self.__valid_post_request(
period=period,
first_deadline=ASSIGNMENT_FUTUREPERIOD_START_FIRST_DEADLINE,
publishing_time_delay_minutes=60
)
self.assertEqual(created_assignment.publishing_time, (period.start_time + timedelta(minutes=60)))
def test_post_add_no_students(self):
period = baker.make_recipe('devilry.apps.core.period_active')
baker.make('core.RelatedStudent', period=period,
user__shortname='student1')
baker.make('core.RelatedStudent', period=period,
user__shortname='student2')
created_assignment, mockresponse = self.__valid_post_request(period=period, student_import_option='none')
self.assertEqual(created_assignment.assignmentgroups.count(), 0)
self.assertEqual(Candidate.objects.filter(assignment_group__parentnode=created_assignment).count(), 0)
def test_post_add_all_relatedstudents_on_period(self):
period = baker.make_recipe('devilry.apps.core.period_active')
baker.make('core.RelatedStudent', period=period,
user__shortname='student1')
baker.make('core.RelatedStudent', period=period,
user__shortname='student2')
created_assignment, mockresponse = self.__valid_post_request(period=period)
self.assertEqual(2, created_assignment.assignmentgroups.count())
candidatesqueryset = Candidate.objects.filter(assignment_group__parentnode=created_assignment)
self.assertEqual(2, candidatesqueryset.count())
self.assertTrue(candidatesqueryset.filter(relatedstudent__user__shortname='student1').exists())
self.assertTrue(candidatesqueryset.filter(relatedstudent__user__shortname='student2').exists())
def test_post_add_all_relatedstudents_on_period_no_students_on_period(self):
period = baker.make_recipe('devilry.apps.core.period_active')
created_assignment, mockresponse = self.__valid_post_request(period=period)
self.assertEqual(0, created_assignment.assignmentgroups.count())
self.assertEqual(Candidate.objects.filter(assignment_group__parentnode=created_assignment).count(), 0)
def test_post_add_students_from_assignment(self):
period = baker.make_recipe('devilry.apps.core.period_active')
other_assignment = baker.make('core.Assignment', parentnode=period)
baker.make('core.Candidate',
assignment_group__parentnode=other_assignment,
relatedstudent__period=period, _quantity=2)
created_assignment, mockresponse = self.__valid_post_request(
period=period, student_import_option='{}_all'.format(other_assignment.id))
self.assertEqual(2, created_assignment.assignmentgroups.count())
self.assertEqual(Candidate.objects.filter(assignment_group__parentnode=created_assignment).count(), 2)
|
#!/bin/bash
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Connect
# @raycast.mode compact
# Optional parameters:
# @raycast.icon ๐ก
# @Documentation:
# @raycast.packageName VPN
# @raycast.description Start VPN connection.
# @raycast.author Alexandru Turcanu
# @raycast.authorURL https://github.com/Pondorasti
source vpn-config.sh
VPN=$VPN_NAME
# Source: https://superuser.com/a/736859
function isnt_connected () {
scutil --nc status "$VPN" | sed -n 1p | grep -qv Connected
}
function poll_until_connected () {
let loops=0 || true
let max_loops=200 # 200 * 0.1 is 20 seconds. Bash doesn't support floats
while isnt_connected "$VPN"; do
sleep 0.1 # can't use a variable here, bash doesn't have floats
let loops=$loops+1
[ $loops -gt $max_loops ] && break
done
[ $loops -le $max_loops ]
}
networksetup -connectpppoeservice "$VPN"
if poll_until_connected "$VPN"; then
echo "Connected to $VPN!"
else
echo "Couldn't connect to $VPN"
scutil --nc stop "$VPN"
exit 1;
fi
|
<reponame>dr-aryone/honeybadger-ruby
module Honeybadger
# The current String Honeybadger version.
VERSION = '4.3.1'.freeze
end
|
<filename>src/car/car.controller.ts
import { Controller } from '@nestjs/common';
@Controller('car')
export class CarController {}
|
# frozen_string_literal: true
module Sorting
extend ActiveSupport::Concern
included do
def sort_by_params
case params[:sort]
when 'solved'
@pagy, @puzzles = pagy Puzzle.puzzles_solved_by_user(current_user), items: 5
@puzzles = @puzzles.decorate
when 'author'
@pagy, @puzzles = pagy Puzzle.current_user_author_puzzles(current_user), items: 5
@puzzles = @puzzles.decorate
end
end
helper_method :sort_by_params
end
end
|
/*
Navicat Premium Data Transfer
Source Server : localhost_3306
Source Server Type : MySQL
Source Server Version : 100128
Source Host : localhost:3306
Source Schema : matm
Target Server Type : MySQL
Target Server Version : 100128
File Encoding : 65001
Date: 27/07/2020 19:34:40
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for dt_diag_klsf
-- ----------------------------
DROP TABLE IF EXISTS `dt_diag_klsf`;
CREATE TABLE `dt_diag_klsf` (
`id_diag` int(64) NOT NULL AUTO_INCREMENT,
`id_pasien` varchar(125) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`tp_diag` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`kls_antm` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`ket_antm` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`kls_rwyt` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`is_hiv` int(10) NULL DEFAULT NULL,
`create_at` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_at` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0),
PRIMARY KEY (`id_diag`) USING BTREE,
INDEX `diag_pasien`(`id_pasien`) USING BTREE,
CONSTRAINT `diag_pasien` FOREIGN KEY (`id_pasien`) REFERENCES `dt_pasien` (`id_pasien`) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Records of dt_diag_klsf
-- ----------------------------
INSERT INTO `dt_diag_klsf` VALUES (1, 'f0ca9be1', '1', '1', 'ginjal', '1', 1, '2020-04-21 17:17:28', '2020-04-21 17:17:28');
-- ----------------------------
-- Table structure for dt_obat
-- ----------------------------
DROP TABLE IF EXISTS `dt_obat`;
CREATE TABLE `dt_obat` (
`id_obat` int(64) NOT NULL AUTO_INCREMENT,
`id_proses` int(24) NULL DEFAULT NULL,
`panduan` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`bentuk` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`sumber` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`batch` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`dosis` int(10) NULL DEFAULT NULL,
`dosis_minum` int(10) NULL DEFAULT NULL,
`tgl_pemberian` datetime(0) NULL DEFAULT NULL,
`create_at` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_at` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0),
PRIMARY KEY (`id_obat`) USING BTREE,
INDEX `obat_proses`(`id_proses`) USING BTREE,
CONSTRAINT `obat_proses` FOREIGN KEY (`id_proses`) REFERENCES `dt_proses` (`id_proses`) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE = InnoDB AUTO_INCREMENT = 3 CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Records of dt_obat
-- ----------------------------
INSERT INTO `dt_obat` VALUES (1, 1, '1', '1', '1', '9725mvsaifd8036', 7, 1, '2020-07-18 00:00:00', '2020-07-18 10:35:57', '2020-07-18 10:35:57');
INSERT INTO `dt_obat` VALUES (2, 1, '0', '0', '2', 'ouwgfouwe289521984', 10, 2, '2020-07-15 00:00:00', '2020-07-18 10:39:41', '2020-07-18 10:39:41');
-- ----------------------------
-- Table structure for dt_pasien
-- ----------------------------
DROP TABLE IF EXISTS `dt_pasien`;
CREATE TABLE `dt_pasien` (
`id_pasien` varchar(125) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`nik_pasien` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`nm_pasien` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`jns_klm` int(10) NULL DEFAULT NULL,
`alamat` text CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL,
`is_hamil` int(10) NULL DEFAULT NULL,
`tgl_lahir` datetime(6) NULL DEFAULT NULL,
`umr_thn` int(16) NULL DEFAULT NULL,
`umr_bln` int(16) NULL DEFAULT NULL,
`brt_bdn` int(16) NULL DEFAULT NULL,
`tg_bdn` int(16) NULL DEFAULT NULL,
`telp` varchar(50) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`parut_bcg` int(2) NULL DEFAULT NULL,
`skor_anak` int(10) NULL DEFAULT NULL,
`create_at` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_at` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0),
PRIMARY KEY (`id_pasien`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Records of dt_pasien
-- ----------------------------
INSERT INTO `dt_pasien` VALUES ('f0ca9be1', '3521035408940001', 'Erlyan', 2, 'tinalan', 1, '1995-04-19 00:00:00.000000', 25, 10, 76, 170, '082213343435', 1, 0, '2020-03-23 19:47:33', '2020-03-23 19:47:33');
-- ----------------------------
-- Table structure for dt_pemeriksaan
-- ----------------------------
DROP TABLE IF EXISTS `dt_pemeriksaan`;
CREATE TABLE `dt_pemeriksaan` (
`id_periksa` int(64) NOT NULL AUTO_INCREMENT,
`id_proses` int(24) NULL DEFAULT NULL,
`tgl_periksa` datetime(0) NULL DEFAULT NULL,
`noreg` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`jenis` varchar(125) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`nilai` varchar(50) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`ket` text CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL,
`create_at` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_at` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0),
PRIMARY KEY (`id_periksa`) USING BTREE,
INDEX `proses_pemeriksaan`(`id_proses`) USING BTREE,
CONSTRAINT `proses_pemeriksaan` FOREIGN KEY (`id_proses`) REFERENCES `dt_proses` (`id_proses`) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Records of dt_pemeriksaan
-- ----------------------------
INSERT INTO `dt_pemeriksaan` VALUES (1, 1, '2020-07-24 00:00:00', '3714814-91lsdf', '0', '10', 'dahak nya banyak', '2020-07-18 11:43:21', '2020-07-18 11:43:21');
-- ----------------------------
-- Table structure for dt_pmo
-- ----------------------------
DROP TABLE IF EXISTS `dt_pmo`;
CREATE TABLE `dt_pmo` (
`id_pmo` varchar(125) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`nik_pmo` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`nm_pmo` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`jns_klm` int(10) NULL DEFAULT NULL,
`alamat` text CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL,
`telp` varchar(50) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`kota` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`prop` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`faskes` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`regtb03f` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`regtb03kt` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`create_at` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_at` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0),
PRIMARY KEY (`id_pmo`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Records of dt_pmo
-- ----------------------------
INSERT INTO `dt_pmo` VALUES ('7fffffff', '3571203983027490', 'Erlian', 2, 'purwoasri', '085736421099', 'Kab. Kediri', 'Jawa Timur', 'Puskesmas Pesantren', '1234567', '1232141', '2020-06-25 20:17:31', '2020-06-25 20:17:31');
-- ----------------------------
-- Table structure for dt_proses
-- ----------------------------
DROP TABLE IF EXISTS `dt_proses`;
CREATE TABLE `dt_proses` (
`id_proses` int(24) NOT NULL AUTO_INCREMENT,
`id_pasien` varchar(125) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`bulan` varchar(125) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`tahap` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`status` varchar(255) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`tgl_mulai` datetime(0) NULL DEFAULT NULL,
`tgl_selesai` datetime(0) NULL DEFAULT NULL,
`create_at` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_at` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0),
PRIMARY KEY (`id_proses`) USING BTREE,
INDEX `pasien_proses`(`id_pasien`) USING BTREE,
CONSTRAINT `pasien_proses` FOREIGN KEY (`id_pasien`) REFERENCES `dt_pasien` (`id_pasien`) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Records of dt_proses
-- ----------------------------
INSERT INTO `dt_proses` VALUES (1, 'f0ca9be1', '0', '0', '4', '2020-06-27 00:00:00', '2020-07-30 00:00:00', '2020-06-27 15:55:39', '2020-06-27 15:55:39');
-- ----------------------------
-- Table structure for dt_relasi_pasien_pmo
-- ----------------------------
DROP TABLE IF EXISTS `dt_relasi_pasien_pmo`;
CREATE TABLE `dt_relasi_pasien_pmo` (
`id_pmo` varchar(125) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`id_pasien` varchar(125) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL
) ENGINE = InnoDB CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Table structure for dt_thpan
-- ----------------------------
DROP TABLE IF EXISTS `dt_thpan`;
CREATE TABLE `dt_thpan` (
`id_thp` int(64) NOT NULL AUTO_INCREMENT,
`id_obat` int(64) NULL DEFAULT NULL,
`id_proses` int(24) NULL DEFAULT NULL,
`hari` int(10) NULL DEFAULT NULL,
`sisa_obat` int(10) NULL DEFAULT NULL,
`ket` text CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL,
`create_at` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_at` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0),
PRIMARY KEY (`id_thp`) USING BTREE,
INDEX `proses_tahapan`(`id_proses`) USING BTREE,
CONSTRAINT `proses_tahapan` FOREIGN KEY (`id_proses`) REFERENCES `dt_proses` (`id_proses`) ON DELETE RESTRICT ON UPDATE CASCADE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Table structure for groups
-- ----------------------------
DROP TABLE IF EXISTS `groups`;
CREATE TABLE `groups` (
`id` mediumint(8) UNSIGNED NOT NULL AUTO_INCREMENT,
`name` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`description` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 3 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Records of groups
-- ----------------------------
INSERT INTO `groups` VALUES (1, 'admin', 'Administrator');
INSERT INTO `groups` VALUES (2, 'members', 'General User');
-- ----------------------------
-- Table structure for login_attempts
-- ----------------------------
DROP TABLE IF EXISTS `login_attempts`;
CREATE TABLE `login_attempts` (
`id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT,
`ip_address` varchar(45) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`login` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`time` int(11) UNSIGNED NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Table structure for users
-- ----------------------------
DROP TABLE IF EXISTS `users`;
CREATE TABLE `users` (
`id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT,
`ip_address` varchar(45) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`username` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`password` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`email` varchar(254) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`activation_selector` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`activation_code` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`forgotten_password_selector` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`forgotten_password_code` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`forgotten_password_time` int(11) UNSIGNED NULL DEFAULT NULL,
`remember_selector` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`remember_code` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`created_on` int(11) UNSIGNED NOT NULL,
`last_login` int(11) UNSIGNED NULL DEFAULT NULL,
`active` tinyint(1) UNSIGNED NULL DEFAULT NULL,
`first_name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`last_name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`company` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`phone` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uc_email`(`email`) USING BTREE,
UNIQUE INDEX `uc_activation_selector`(`activation_selector`) USING BTREE,
UNIQUE INDEX `uc_forgotten_password_selector`(`forgotten_password_selector`) USING BTREE,
UNIQUE INDEX `uc_remember_selector`(`remember_selector`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Records of users
-- ----------------------------
INSERT INTO `users` VALUES (1, '127.0.0.1', 'administrator', '$2y$12$Hxp3YHHbG0x2KSQY9v80oucl75BOmrQ8DpX86CNhCL8U5us7caq36', '<EMAIL>', NULL, '', NULL, NULL, NULL, NULL, NULL, 1268889823, 1595841248, 1, 'Admin', 'istrator', 'ADMIN', '0');
-- ----------------------------
-- Table structure for users_groups
-- ----------------------------
DROP TABLE IF EXISTS `users_groups`;
CREATE TABLE `users_groups` (
`id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT,
`user_id` int(11) UNSIGNED NOT NULL,
`group_id` mediumint(8) UNSIGNED NOT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uc_users_groups`(`user_id`, `group_id`) USING BTREE,
INDEX `fk_users_groups_users1_idx`(`user_id`) USING BTREE,
INDEX `fk_users_groups_groups1_idx`(`group_id`) USING BTREE,
CONSTRAINT `fk_users_groups_groups1` FOREIGN KEY (`group_id`) REFERENCES `groups` (`id`) ON DELETE CASCADE ON UPDATE NO ACTION,
CONSTRAINT `fk_users_groups_users1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE ON UPDATE NO ACTION
) ENGINE = InnoDB AUTO_INCREMENT = 3 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
-- ----------------------------
-- Records of users_groups
-- ----------------------------
INSERT INTO `users_groups` VALUES (1, 1, 1);
INSERT INTO `users_groups` VALUES (2, 1, 2);
SET FOREIGN_KEY_CHECKS = 1;
|
export default `flf2a$ 8 6 59 15 10 0 24463
Big by <NAME> 4/93 -- based on Standard
Includes ISO Latin-1
Greek characters by <NAME> <<EMAIL>>
figlet release 2.2 -- November 1996
Permission is hereby given to modify this font, as long as the
modifier's name is placed on a comment line.
Modified by <NAME> <<EMAIL>> 12/96 to include new parameter
supported by FIGlet and FIGWin. May also be slightly modified for better use
of new full-width/kern/smush alternatives, but default output is NOT changed.
$@
$@
$@
$@
$@
$@
$@
$@@
_ @
| |@
| |@
| |@
|_|@
(_)@
@
@@
_ _ @
( | )@
V V @
$ @
$ @
$ @
@
@@
_ _ @
_| || |_ @
|_ __ _|@
_| || |_ @
|_ __ _|@
|_||_| @
@
@@
_ @
| | @
/ __)@
\__ \@
( /@
|_| @
@
@@
_ __@
(_) / /@
/ / @
/ / @
/ / _ @
/_/ (_)@
@
@@
@
___ @
( _ ) @
/ _ \/\@
| (_> <@
\___/\/@
@
@@
_ @
( )@
|/ @
$ @
$ @
$ @
@
@@
__@
/ /@
| | @
| | @
| | @
| | @
\_\@
@@
__ @
\ \ @
| |@
| |@
| |@
| |@
/_/ @
@@
_ @
/\| |/\ @
\ \` ' / @
|_ _|@
/ , . \ @
\/|_|\/ @
@
@@
@
_ @
_| |_ @
|_ _|@
|_| @
$ @
@
@@
@
@
@
@
_ @
( )@
|/ @
@@
@
@
______ @
|______|@
$ @
$ @
@
@@
@
@
@
@
_ @
(_)@
@
@@
__@
/ /@
/ / @
/ / @
/ / @
/_/ @
@
@@
___ @
/ _ \ @
| | | |@
| | | |@
| |_| |@
\___/ @
@
@@
__ @
/_ |@
| |@
| |@
| |@
|_|@
@
@@
___ @
|__ \ @
$) |@
/ / @
/ /_ @
|____|@
@
@@
____ @
|___ \ @
__) |@
|__ < @
___) |@
|____/ @
@
@@
_ _ @
| || | @
| || |_ @
|__ _|@
| | @
|_| @
@
@@
_____ @
| ____|@
| |__ @
|___ \ @
___) |@
|____/ @
@
@@
__ @
/ / @
/ /_ @
| '_ \ @
| (_) |@
\___/ @
@
@@
______ @
|____ |@
$/ / @
/ / @
/ / @
/_/ @
@
@@
___ @
/ _ \ @
| (_) |@
> _ < @
| (_) |@
\___/ @
@
@@
___ @
/ _ \ @
| (_) |@
\__, |@
/ / @
/_/ @
@
@@
@
_ @
(_)@
$ @
_ @
(_)@
@
@@
@
_ @
(_)@
$ @
_ @
( )@
|/ @
@@
__@
/ /@
/ / @
< < @
\ \ @
\_\@
@
@@
@
______ @
|______|@
______ @
|______|@
@
@
@@
__ @
\ \ @
\ \ @
> >@
/ / @
/_/ @
@
@@
___ @
|__ \ @
) |@
/ / @
|_| @
(_) @
@
@@
@
____ @
/ __ \ @
/ / _\` |@
| | (_| |@
\ \__,_|@
\____/ @
@@
@
/\ @
/ \ @
/ /\ \ @
/ ____ \ @
/_/ \_\@
@
@@
____ @
| _ \ @
| |_) |@
| _ < @
| |_) |@
|____/ @
@
@@
_____ @
/ ____|@
| | $ @
| | $ @
| |____ @
\_____|@
@
@@
_____ @
| __ \ @
| | | |@
| | | |@
| |__| |@
|_____/ @
@
@@
______ @
| ____|@
| |__ @
| __| @
| |____ @
|______|@
@
@@
______ @
| ____|@
| |__ @
| __| @
| | @
|_| @
@
@@
_____ @
/ ____|@
| | __ @
| | |_ |@
| |__| |@
\_____|@
@
@@
_ _ @
| | | |@
| |__| |@
| __ |@
| | | |@
|_| |_|@
@
@@
_____ @
|_ _|@
| | @
| | @
_| |_ @
|_____|@
@
@@
_ @
| |@
| |@
_ | |@
| |__| |@
\____/ @
@
@@
_ __@
| |/ /@
| ' / @
| < @
| . \ @
|_|\_\@
@
@@
_ @
| | @
| | @
| | @
| |____ @
|______|@
@
@@
__ __ @
| \/ |@
| \ / |@
| |\/| |@
| | | |@
|_| |_|@
@
@@
_ _ @
| \ | |@
| \| |@
| . \` |@
| |\ |@
|_| \_|@
@
@@
____ @
/ __ \ @
| | | |@
| | | |@
| |__| |@
\____/ @
@
@@
_____ @
| __ \ @
| |__) |@
| ___/ @
| | @
|_| @
@
@@
____ @
/ __ \ @
| | | |@
| | | |@
| |__| |@
\___\_\@
@
@@
_____ @
| __ \ @
| |__) |@
| _ / @
| | \ \ @
|_| \_\@
@
@@
_____ @
/ ____|@
| (___ @
\___ \ @
____) |@
|_____/ @
@
@@
_______ @
|__ __|@
| | @
| | @
| | @
|_| @
@
@@
_ _ @
| | | |@
| | | |@
| | | |@
| |__| |@
\____/ @
@
@@
__ __@
\ \ / /@
\ \ / / @
\ \/ / @
\ / @
\/ @
@
@@
__ __@
\ \ / /@
\ \ /\ / / @
\ \/ \/ / @
\ /\ / @
\/ \/ @
@
@@
__ __@
\ \ / /@
\ V / @
> < @
/ . \ @
/_/ \_\@
@
@@
__ __@
\ \ / /@
\ \_/ / @
\ / @
| | @
|_| @
@
@@
______@
|___ /@
$/ / @
/ / @
/ /__ @
/_____|@
@
@@
___ @
| _|@
| | @
| | @
| | @
| |_ @
|___|@
@@
__ @
\ \ @
\ \ @
\ \ @
\ \ @
\_\@
@
@@
___ @
|_ |@
| |@
| |@
| |@
_| |@
|___|@
@@
/\ @
|/\|@
$ @
$ @
$ @
$ @
@
@@
@
@
@
@
@
$ @
______ @
|______|@@
_ @
( )@
\|@
$ @
$ @
$ @
@
@@
@
@
__ _ @
/ _\` |@
| (_| |@
\__,_|@
@
@@
_ @
| | @
| |__ @
| '_ \ @
| |_) |@
|_.__/ @
@
@@
@
@
___ @
/ __|@
| (__ @
\___|@
@
@@
_ @
| |@
__| |@
/ _\` |@
| (_| |@
\__,_|@
@
@@
@
@
___ @
/ _ \@
| __/@
\___|@
@
@@
__ @
/ _|@
| |_ @
| _|@
| | @
|_| @
@
@@
@
@
__ _ @
/ _\` |@
| (_| |@
\__, |@
__/ |@
|___/ @@
_ @
| | @
| |__ @
| '_ \ @
| | | |@
|_| |_|@
@
@@
_ @
(_)@
_ @
| |@
| |@
|_|@
@
@@
_ @
(_)@
_ @
| |@
| |@
| |@
_/ |@
|__/ @@
_ @
| | @
| | __@
| |/ /@
| < @
|_|\_\@
@
@@
_ @
| |@
| |@
| |@
| |@
|_|@
@
@@
@
@
_ __ ___ @
| '_ \` _ \ @
| | | | | |@
|_| |_| |_|@
@
@@
@
@
_ __ @
| '_ \ @
| | | |@
|_| |_|@
@
@@
@
@
___ @
/ _ \ @
| (_) |@
\___/ @
@
@@
@
@
_ __ @
| '_ \ @
| |_) |@
| .__/ @
| | @
|_| @@
@
@
__ _ @
/ _\` |@
| (_| |@
\__, |@
| |@
|_|@@
@
@
_ __ @
| '__|@
| | @
|_| @
@
@@
@
@
___ @
/ __|@
\__ \@
|___/@
@
@@
_ @
| | @
| |_ @
| __|@
| |_ @
\__|@
@
@@
@
@
_ _ @
| | | |@
| |_| |@
\__,_|@
@
@@
@
@
__ __@
\ \ / /@
\ V / @
\_/ @
@
@@
@
@
__ __@
\ \ /\ / /@
\ V V / @
\_/\_/ @
@
@@
@
@
__ __@
\ \/ /@
> < @
/_/\_\@
@
@@
@
@
_ _ @
| | | |@
| |_| |@
\__, |@
__/ |@
|___/ @@
@
@
____@
|_ /@
/ / @
/___|@
@
@@
__@
/ /@
| | @
/ / @
\ \ @
| | @
\_\@
@@
_ @
| |@
| |@
| |@
| |@
| |@
| |@
|_|@@
__ @
\ \ @
| | @
\ \@
/ /@
| | @
/_/ @
@@
/\/|@
|/\/ @
$ @
$ @
$ @
$ @
@
@@
_ _ @
(_)_(_) @
/ \ @
/ _ \ @
/ ___ \ @
/_/ \_\@
@
@@
_ _ @
(_)_(_)@
/ _ \ @
| | | |@
| |_| |@
\___/ @
@
@@
_ _ @
(_) (_)@
| | | |@
| | | |@
| |_| |@
\___/ @
@
@@
_ _ @
(_) (_)@
__ _ @
/ _\` |@
| (_| |@
\__,_|@
@
@@
_ _ @
(_) (_)@
___ @
/ _ \ @
| (_) |@
\___/ @
@
@@
_ _ @
(_) (_)@
_ _ @
| | | |@
| |_| |@
\__,_|@
@
@@
___ @
/ _ \ @
| | ) |@
| |< < @
| | ) |@
| ||_/ @
|_| @
@@
160 NO-BREAK SPACE
$@
$@
$@
$@
$@
$@
$@
$@@
161 INVERTED EXCLAMATION MARK
_ @
(_)@
| |@
| |@
| |@
|_|@
@
@@
162 CENT SIGN
@
_ @
| | @
/ __)@
| (__ @
\ )@
|_| @
@@
163 POUND SIGN
___ @
/ ,_\ @
_| |_ @
|__ __| @
| |____ @
(_,_____|@
@
@@
164 CURRENCY SIGN
@
/\___/\@
\ _ /@
| (_) |@
/ ___ \@
\/ \/@
@
@@
165 YEN SIGN
__ __ @
\ \ / / @
_\ V /_ @
|___ ___|@
|___ ___|@
|_| @
@
@@
166 BROKEN BAR
_ @
| |@
| |@
|_|@
_ @
| |@
| |@
|_|@@
167 SECTION SIGN
__ @
_/ _)@
/ \ \ @
\ \\ \@
\ \_/@
(__/ @
@
@@
168 DIAERESIS
_ _ @
(_) (_)@
$ $ @
$ $ @
$ $ @
$ $ @
@
@@
169 COPYRIGHT SIGN
________ @
/ ____ \ @
/ / ___| \ @
| | | |@
| | |___ |@
\ \____| / @
\________/ @
@@
170 FEMININE ORDINAL INDICATOR
__ _ @
/ _\` |@
| (_| |@
\__,_|@
|_____|@
$ @
@
@@
171 LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
____@
/ / /@
/ / / @
< < < @
\ \ \ @
\_\_\@
@
@@
172 NOT SIGN
@
@
______ @
|____ |@
|_|@
$ @
@
@@
173 SOFT HYPHEN
@
@
_____ @
|_____|@
$ @
$ @
@
@@
174 REGISTERED SIGN
________ @
/ ____ \ @
/ | _ \ \ @
| | |_) | |@
| | _ < |@
\ |_| \_\ / @
\________/ @
@@
175 MACRON
______ @
|______|@
$ @
$ @
$ @
$ @
@
@@
176 DEGREE SIGN
__ @
/ \ @
| () |@
\__/ @
$ @
$ @
@
@@
177 PLUS-MINUS SIGN
_ @
_| |_ @
|_ _|@
|_| @
_____ @
|_____|@
@
@@
178 SUPERSCRIPT TWO
___ @
|_ )@
/ / @
/___|@
$ @
$ @
@
@@
179 SUPERSCRIPT THREE
____@
|__ /@
|_ \@
|___/@
$ @
$ @
@
@@
180 ACUTE ACCENT
__@
/_/@
$ @
$ @
$ @
$ @
@
@@
181 MICRO SIGN
@
@
_ _ @
| | | |@
| |_| |@
| ._,_|@
| | @
|_| @@
182 PILCROW SIGN
______ @
/ |@
| (| || |@
\__ || |@
| || |@
|_||_|@
@
@@
183 MIDDLE DOT
@
@
_ @
(_)@
$ @
$ @
@
@@
184 CEDILLA
@
@
@
@
@
_ @
)_)@
@@
185 SUPERSCRIPT ONE
_ @
/ |@
| |@
|_|@
$ @
$ @
@
@@
186 MASCULINE ORDINAL INDICATOR
___ @
/ _ \ @
| (_) |@
\___/ @
|_____|@
$ @
@
@@
187 RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
____ @
\ \ \ @
\ \ \ @
> > >@
/ / / @
/_/_/ @
@
@@
188 VULGAR FRACTION ONE QUARTER
_ __ @
/ | / / @
| |/ / _ @
|_/ / | | @
/ /|_ _|@
/_/ |_| @
@
@@
189 VULGAR FRACTION ONE HALF
_ __ @
/ | / / @
| |/ /__ @
|_/ /_ )@
/ / / / @
/_/ /___|@
@
@@
190 VULGAR FRACTION THREE QUARTERS
____ __ @
|__ / / / @
|_ \/ / _ @
|___/ / | | @
/ /|_ _|@
/_/ |_| @
@
@@
191 INVERTED QUESTION MARK
_ @
(_) @
| | @
/ / @
| (__ @
\___|@
@
@@
192 LATIN CAPITAL LETTER A WITH GRAVE
__ @
\_\ @
/ \ @
/ _ \ @
/ ___ \ @
/_/ \_\@
@
@@
193 LATIN CAPITAL LETTER A WITH ACUTE
__ @
/_/ @
/ \ @
/ _ \ @
/ ___ \ @
/_/ \_\@
@
@@
194 LATIN CAPITAL LETTER A WITH CIRCUMFLEX
//\ @
|/_\| @
/ \ @
/ _ \ @
/ ___ \ @
/_/ \_\@
@
@@
195 LATIN CAPITAL LETTER A WITH TILDE
/\/| @
|/\/ @
/ \ @
/ _ \ @
/ ___ \ @
/_/ \_\@
@
@@
196 LATIN CAPITAL LETTER A WITH DIAERESIS
_ _ @
(_)_(_) @
/ \ @
/ _ \ @
/ ___ \ @
/_/ \_\@
@
@@
197 LATIN CAPITAL LETTER A WITH RING ABOVE
_ @
(o) @
/ \ @
/ _ \ @
/ ___ \ @
/_/ \_\@
@
@@
198 LATIN CAPITAL LETTER AE
_______ @
/ ____|@
/ |__ @
/ /| __| @
/ ___ |____ @
/_/ |______|@
@
@@
199 LATIN CAPITAL LETTER C WITH CEDILLA
_____ @
/ ____|@
| | $ @
| | $ @
| |____ @
\_____|@
)_) @
@@
200 LATIN CAPITAL LETTER E WITH GRAVE
__ @
_\_\_ @
| ____|@
| _| @
| |___ @
|_____|@
@
@@
201 LATIN CAPITAL LETTER E WITH ACUTE
__ @
_/_/_ @
| ____|@
| _| @
| |___ @
|_____|@
@
@@
202 LATIN CAPITAL LETTER E WITH CIRCUMFLEX
//\ @
|/ \| @
| ____|@
| _| @
| |___ @
|_____|@
@
@@
203 LATIN CAPITAL LETTER E WITH DIAERESIS
_ _ @
(_) (_)@
| ____|@
| _| @
| |___ @
|_____|@
@
@@
204 LATIN CAPITAL LETTER I WITH GRAVE
__ @
\_\ @
|_ _|@
| | @
| | @
|___|@
@
@@
205 LATIN CAPITAL LETTER I WITH ACUTE
__ @
/_/ @
|_ _|@
| | @
| | @
|___|@
@
@@
206 LATIN CAPITAL LETTER I WITH CIRCUMFLEX
//\ @
|/_\|@
|_ _|@
| | @
| | @
|___|@
@
@@
207 LATIN CAPITAL LETTER I WITH DIAERESIS
_ _ @
(_)_(_)@
|_ _| @
| | @
| | @
|___| @
@
@@
208 LATIN CAPITAL LETTER ETH
_____ @
| __ \ @
_| |_ | |@
|__ __|| |@
| |__| |@
|_____/ @
@
@@
209 LATIN CAPITAL LETTER N WITH TILDE
/\/| @
|/\/_ @
| \ | |@
| \| |@
| |\ |@
|_| \_|@
@
@@
210 LATIN CAPITAL LETTER O WITH GRAVE
__ @
\_\ @
/ _ \ @
| | | |@
| |_| |@
\___/ @
@
@@
211 LATIN CAPITAL LETTER O WITH ACUTE
__ @
/_/ @
/ _ \ @
| | | |@
| |_| |@
\___/ @
@
@@
212 LATIN CAPITAL LETTER O WITH CIRCUMFLEX
//\ @
|/_\| @
/ _ \ @
| | | |@
| |_| |@
\___/ @
@
@@
213 LATIN CAPITAL LETTER O WITH TILDE
/\/| @
|/\/ @
/ _ \ @
| | | |@
| |_| |@
\___/ @
@
@@
214 LATIN CAPITAL LETTER O WITH DIAERESIS
_ _ @
(_)_(_)@
/ _ \ @
| | | |@
| |_| |@
\___/ @
@
@@
215 MULTIPLICATION SIGN
@
@
/\/\@
> <@
\/\/@
$ @
@
@@
216 LATIN CAPITAL LETTER O WITH STROKE
_____ @
/ __// @
| | // |@
| |//| |@
| //_| |@
//___/ @
@
@@
217 LATIN CAPITAL LETTER U WITH GRAVE
__ @
_\_\_ @
| | | |@
| | | |@
| |_| |@
\___/ @
@
@@
218 LATIN CAPITAL LETTER U WITH ACUTE
__ @
_/_/_ @
| | | |@
| | | |@
| |_| |@
\___/ @
@
@@
219 LATIN CAPITAL LETTER U WITH CIRCUMFLEX
//\ @
|/ \| @
| | | |@
| | | |@
| |_| |@
\___/ @
@
@@
220 LATIN CAPITAL LETTER U WITH DIAERESIS
_ _ @
(_) (_)@
| | | |@
| | | |@
| |_| |@
\___/ @
@
@@
221 LATIN CAPITAL LETTER Y WITH ACUTE
__ @
__/_/__@
\ \ / /@
\ V / @
| | @
|_| @
@
@@
222 LATIN CAPITAL LETTER THORN
_ @
| |___ @
| __ \ @
| |__) |@
| ___/ @
|_| @
@
@@
223 LATIN SMALL LETTER SHARP S
___ @
/ _ \ @
| | ) |@
| |< < @
| | ) |@
| ||_/ @
|_| @
@@
224 LATIN SMALL LETTER A WITH GRAVE
__ @
\_\ @
__ _ @
/ _\` |@
| (_| |@
\__,_|@
@
@@
225 LATIN SMALL LETTER A WITH ACUTE
__ @
/_/ @
__ _ @
/ _\` |@
| (_| |@
\__,_|@
@
@@
226 LATIN SMALL LETTER A WITH CIRCUMFLEX
//\ @
|/ \| @
__ _ @
/ _\` |@
| (_| |@
\__,_|@
@
@@
227 LATIN SMALL LETTER A WITH TILDE
/\/| @
|/\/ @
__ _ @
/ _\` |@
| (_| |@
\__,_|@
@
@@
228 LATIN SMALL LETTER A WITH DIAERESIS
_ _ @
(_) (_)@
__ _ @
/ _\` |@
| (_| |@
\__,_|@
@
@@
229 LATIN SMALL LETTER A WITH RING ABOVE
__ @
(()) @
__ _ @
/ _\` |@
| (_| |@
\__,_|@
@
@@
230 LATIN SMALL LETTER AE
@
@
__ ____ @
/ _\` _ \@
| (_| __/@
\__,____|@
@
@@
231 LATIN SMALL LETTER C WITH CEDILLA
@
@
___ @
/ __|@
| (__ @
\___|@
)_) @
@@
232 LATIN SMALL LETTER E WITH GRAVE
__ @
\_\ @
___ @
/ _ \@
| __/@
\___|@
@
@@
233 LATIN SMALL LETTER E WITH ACUTE
__ @
/_/ @
___ @
/ _ \@
| __/@
\___|@
@
@@
234 LATIN SMALL LETTER E WITH CIRCUMFLEX
//\ @
|/ \|@
___ @
/ _ \@
| __/@
\___|@
@
@@
235 LATIN SMALL LETTER E WITH DIAERESIS
_ _ @
(_) (_)@
___ @
/ _ \ @
| __/ @
\___| @
@
@@
236 LATIN SMALL LETTER I WITH GRAVE
__ @
\_\@
_ @
| |@
| |@
|_|@
@
@@
237 LATIN SMALL LETTER I WITH ACUTE
__@
/_/@
_ @
| |@
| |@
|_|@
@
@@
238 LATIN SMALL LETTER I WITH CIRCUMFLEX
//\ @
|/ \|@
_ @
| | @
| | @
|_| @
@
@@
239 LATIN SMALL LETTER I WITH DIAERESIS
_ _ @
(_) (_)@
_ @
| | @
| | @
|_| @
@
@@
240 LATIN SMALL LETTER ETH
/\/\ @
> < @
\/\ \ @
/ _\` |@
| (_) |@
\___/ @
@
@@
241 LATIN SMALL LETTER N WITH TILDE
/\/| @
|/\/ @
_ __ @
| '_ \ @
| | | |@
|_| |_|@
@
@@
242 LATIN SMALL LETTER O WITH GRAVE
__ @
\_\ @
___ @
/ _ \ @
| (_) |@
\___/ @
@
@@
243 LATIN SMALL LETTER O WITH ACUTE
__ @
/_/ @
___ @
/ _ \ @
| (_) |@
\___/ @
@
@@
244 LATIN SMALL LETTER O WITH CIRCUMFLEX
//\ @
|/ \| @
___ @
/ _ \ @
| (_) |@
\___/ @
@
@@
245 LATIN SMALL LETTER O WITH TILDE
/\/| @
|/\/ @
___ @
/ _ \ @
| (_) |@
\___/ @
@
@@
246 LATIN SMALL LETTER O WITH DIAERESIS
_ _ @
(_) (_)@
___ @
/ _ \ @
| (_) |@
\___/ @
@
@@
247 DIVISION SIGN
_ @
(_) @
_______ @
|_______|@
_ @
(_) @
@
@@
248 LATIN SMALL LETTER O WITH STROKE
@
@
____ @
/ _//\ @
| (//) |@
\//__/ @
@
@@
249 LATIN SMALL LETTER U WITH GRAVE
__ @
\_\ @
_ _ @
| | | |@
| |_| |@
\__,_|@
@
@@
250 LATIN SMALL LETTER U WITH ACUTE
__ @
/_/ @
_ _ @
| | | |@
| |_| |@
\__,_|@
@
@@
251 LATIN SMALL LETTER U WITH CIRCUMFLEX
//\ @
|/ \| @
_ _ @
| | | |@
| |_| |@
\__,_|@
@
@@
252 LATIN SMALL LETTER U WITH DIAERESIS
_ _ @
(_) (_)@
_ _ @
| | | |@
| |_| |@
\__,_|@
@
@@
253 LATIN SMALL LETTER Y WITH ACUTE
__ @
/_/ @
_ _ @
| | | |@
| |_| |@
\__, |@
__/ |@
|___/ @@
254 LATIN SMALL LETTER THORN
_ @
| | @
| |__ @
| '_ \ @
| |_) |@
| .__/ @
| | @
|_| @@
255 LATIN SMALL LETTER Y WITH DIAERESIS
_ _ @
(_) (_)@
_ _ @
| | | |@
| |_| |@
\__, |@
__/ |@
|___/ @@
0x02BC MODIFIER LETTER APOSTROPHE
@
@
))@
@
@
@
@
@@
0x02BD MODIFIER LETTER REVERSED COMMA
@
@
((@
@
@
@
@
@@
0x037A GREEK YPOGEGRAMMENI
@
@
@
@
@
@
@
||@@
0x0387 GREEK ANO TELEIA
@
$ @
_ @
(_)@
@
$ @
@
@@
0x0391 GREEK CAPITAL LETTER ALPHA
___ @
/ _ \ @
| |_| |@
| _ |@
| | | |@
|_| |_|@
@
@@
0x0392 GREEK CAPITAL LETTER BETA
____ @
| _ \ @
| |_) )@
| _ ( @
| |_) )@
|____/ @
@
@@
0x0393 GREEK CAPITAL LETTER GAMMA
_____ @
| ___)@
| |$ @
| |$ @
| | @
|_| @
@
@@
0x0394 GREEK CAPITAL LETTER DELTA
@
/\ @
/ \ @
/ /\ \ @
/ /__\ \ @
/________\@
@
@@
0x0395 GREEK CAPITAL LETTER EPSILON
_____ @
| ___)@
| |_ @
| _) @
| |___ @
|_____)@
@
@@
0x0396 GREEK CAPITAL LETTER ZETA
______@
(___ /@
/ / @
/ / @
/ /__ @
/_____)@
@
@@
0x0397 GREEK CAPITAL LETTER ETA
_ _ @
| | | |@
| |_| |@
| _ |@
| | | |@
|_| |_|@
@
@@
0x0398 GREEK CAPITAL LETTER THETA
____ @
/ __ \ @
| |__| |@
| __ |@
| |__| |@
\____/ @
@
@@
0x0399 GREEK CAPITAL LETTER IOTA
___ @
( )@
| | @
| | @
| | @
(___)@
@
@@
0x039A GREEK CAPITAL LETTER KAPPA
_ __@
| | / /@
| |/ / @
| < @
| |\ \ @
|_| \_\@
@
@@
0x039B GREEK CAPITAL LETTER LAMDA
@
/\ @
/ \ @
/ /\ \ @
/ / \ \ @
/_/ \_\@
@
@@
0x039C GREEK CAPITAL LETTER MU
__ __ @
| \ / |@
| v |@
| |\_/| |@
| | | |@
|_| |_|@
@
@@
0x039D GREEK CAPITAL LETTER NU
_ _ @
| \ | |@
| \| |@
| |@
| |\ |@
|_| \_|@
@
@@
0x039E GREEK CAPITAL LETTER XI
_____ @
(_____)@
___ @
(___) @
_____ @
(_____)@
@
@@
0x039F GREEK CAPITAL LETTER OMICRON
___ @
/ _ \ @
| | | |@
| | | |@
| |_| |@
\___/ @
@
@@
0x03A0 GREEK CAPITAL LETTER PI
_______ @
( _ )@
| | | | @
| | | | @
| | | | @
|_| |_| @
@
@@
0x03A1 GREEK CAPITAL LETTER RHO
____ @
| _ \ @
| |_) )@
| __/ @
| | @
|_| @
@
@@
0x03A3 GREEK CAPITAL LETTER SIGMA
______ @
\ ___)@
\ \ @
> > @
/ /__ @
/_____)@
@
@@
0x03A4 GREEK CAPITAL LETTER TAU
_____ @
(_ _)@
| | @
| | @
| | @
|_| @
@
@@
0x03A5 GREEK CAPITAL LETTER UPSILON
__ __ @
(_ \ / _)@
\ v / @
| | @
| | @
|_| @
@
@@
0x03A6 GREEK CAPITAL LETTER PHI
_ @
_| |_ @
/ \ @
( (| |) )@
\_ _/ @
|_| @
@
@@
0x03A7 GREEK CAPITAL LETTER CHI
__ __@
\ \ / /@
\ v / @
> < @
/ ^ \ @
/_/ \_\@
@
@@
0x03A8 GREEK CAPITAL LETTER PSI
_ _ _ @
| || || |@
| \| |/ |@
\_ _/ @
| | @
|_| @
@
@@
0x03A9 GREEK CAPITAL LETTER OMEGA
____ @
/ __ \ @
| | | | @
| | | | @
_\ \/ /_ @
(___||___)@
@
@@
0x03B1 GREEK SMALL LETTER ALPHA
@
@
__ __@
/ \/ /@
( () < @
\__/\_\@
@
@@
0x03B2 GREEK SMALL LETTER BETA
___ @
/ _ \ @
| |_) )@
| _ < @
| |_) )@
| __/ @
| | @
|_| @@
0x03B3 GREEK SMALL LETTER GAMMA
@
@
_ _ @
( \ / )@
\ v / @
| | @
| | @
|_| @@
0x03B4 GREEK SMALL LETTER DELTA
__ @
/ _) @
\ \ @
/ _ \ @
( (_) )@
\___/ @
@
@@
0x03B5 GREEK SMALL LETTER EPSILON
@
@
___ @
/ __)@
> _) @
\___)@
@
@@
0x03B6 GREEK SMALL LETTER ZETA
_____ @
\__ ) @
/ / @
/ / @
| |__ @
\__ \ @
) )@
(_/ @@
0x03B7 GREEK SMALL LETTER ETA
@
@
_ __ @
| '_ \ @
| | | |@
|_| | |@
| |@
|_|@@
0x03B8 GREEK SMALL LETTER THETA
___ @
/ _ \ @
| |_| |@
| _ |@
| |_| |@
\___/ @
@
@@
0x03B9 GREEK SMALL LETTER IOTA
@
@
_ @
| | @
| | @
\_)@
@
@@
0x03BA GREEK SMALL LETTER KAPPA
@
@
_ __@
| |/ /@
| < @
|_|\_\@
@
@@
0x03BB GREEK SMALL LETTER LAMDA
__ @
\ \ @
\ \ @
> \ @
/ ^ \ @
/_/ \_\@
@
@@
0x03BC GREEK SMALL LETTER MU
@
@
_ _ @
| | | |@
| |_| |@
| ._,_|@
| | @
|_| @@
0x03BD GREEK SMALL LETTER NU
@
@
_ __@
| |/ /@
| / / @
|__/ @
@
@@
0x03BE GREEK SMALL LETTER XI
\=\__ @
> __) @
( (_ @
> _) @
( (__ @
\__ \ @
) )@
(_/ @@
0x03BF GREEK SMALL LETTER OMICRON
@
@
___ @
/ _ \ @
( (_) )@
\___/ @
@
@@
0x03C0 GREEK SMALL LETTER PI
@
@
______ @
( __ )@
| || | @
|_||_| @
@
@@
0x03C1 GREEK SMALL LETTER RHO
@
@
___ @
/ _ \ @
| |_) )@
| __/ @
| | @
|_| @@
0x03C2 GREEK SMALL LETTER FINAL SIGMA
@
@
____ @
/ ___)@
( (__ @
\__ \ @
_) )@
(__/ @@
0x03C3 GREEK SMALL LETTER SIGMA
@
@
____ @
/ ._)@
( () ) @
\__/ @
@
@@
0x03C4 GREEK SMALL LETTER TAU
@
@
___ @
( )@
| | @
\_)@
@
@@
0x03C5 GREEK SMALL LETTER UPSILON
@
@
_ _ @
| | | |@
| |_| |@
\___/ @
@
@@
0x03C6 GREEK SMALL LETTER PHI
_ @
| | @
_| |_ @
/ \ @
( (| |) )@
\_ _/ @
| | @
|_| @@
0x03C7 GREEK SMALL LETTER CHI
@
@
__ __@
\ \ / /@
\ v / @
> < @
/ ^ \ @
/_/ \_\@@
0x03C8 GREEK SMALL LETTER PSI
@
@
_ _ _ @
| || || |@
| \| |/ |@
\_ _/ @
| | @
|_| @@
0x03C9 GREEK SMALL LETTER OMEGA
@
@
__ __ @
/ / _ \ \ @
| |_/ \_| |@
\___^___/ @
@
@@
0x03D1 GREEK THETA SYMBOL
___ @
/ _ \ @
( (_| |_ @
_ \ _ _)@
| |___| | @
\_____/ @
@
@@
0x03D5 GREEK PHI SYMBOL
@
@
_ __ @
| | / \ @
| || || )@
\_ _/ @
| | @
|_| @@
0x03D6 GREEK PI SYMBOL
@
@
_________ @
( _____ )@
| |_/ \_| |@
\___^___/ @
@
@@
-0x0005
alpha = a, beta = b, gamma = g, delta = d, epsilon = e @
zeta = z, eta = h, theta = q, iota = i, lamda = l, mu = m@
nu = n, xi = x, omicron = o, pi = p, rho = r, sigma = s @
phi = f, chi = c, psi = y, omega = w, final sigma = V @
pi symbol = v, theta symbol = J, phi symbol = j @
middle dot = :, ypogegrammeni = _ @
rough breathing = (, smooth breathing = ) @
acute accent = ', grave accent = \`, dialytika = ^ @@
` |
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.jamsimulator.jams.configuration;
import net.jamsimulator.jams.configuration.event.ConfigurationNodeChangeEvent;
import net.jamsimulator.jams.gui.util.converter.ValueConverter;
import net.jamsimulator.jams.gui.util.converter.ValueConverters;
import net.jamsimulator.jams.utils.CollectionUtils;
import net.jamsimulator.jams.utils.Validate;
import org.json.JSONObject;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
/**
* Represents a configuration node. This class is used to manage configuration data easily.
* <p>
* A configuration node may be a {@link RootConfiguration} or a
* node of a {@link RootConfiguration}.
*/
@SuppressWarnings("unchecked")
public class Configuration {
public static final Set<Class<?>> NATIVE_CLASSES = Set.of(
Byte.class,
Short.class,
Integer.class,
Long.class,
Float.class,
Double.class,
Character.class,
Boolean.class,
String.class);
/**
* Objects natively supported but that can be overriden by a converter.
*/
public static final Set<Class<?>> SECONDARY_NATIVE_CLASSES = Set.of(List.class, Map.class);
protected String name;
protected Map<String, Object> map;
protected RootConfiguration root;
/**
* Creates a configuration using an absolute name, a data map and a root.
*
* @param name the name.
* @param map the map.
* @param root the root configuration.
*/
public Configuration(String name, Map<String, Object> map, RootConfiguration root) {
Validate.notNull(map, "Map cannot be null!");
Validate.isTrue(root != null || this instanceof RootConfiguration, "Root cannot be found!");
this.name = name;
this.map = map;
this.root = root;
}
public static boolean isObjectNativelySupported(Object o) {
return NATIVE_CLASSES.stream().anyMatch(target -> target.isInstance(o));
}
public static boolean isObjectSeondaryNativelySupported(Object o) {
return SECONDARY_NATIVE_CLASSES.stream().anyMatch(target -> target.isInstance(o));
}
/**
* Returns the absolute name of this configuration.
* <p>
* This is the name that should be used to access this configuration from
* the {@link RootConfiguration}.
*
* @return the absolute name.
*/
public String getAbsoluteName() {
return name;
}
/**
* Returns the relative name of this configuration. This is equivalent to the last
* node of the absolute name.
* <p>
* For example, if the absolute name of a configuration is "a.b.c" the relative name will be "c".
*
* @return the relative node.
*/
public String getRelativeName() {
int index = name.lastIndexOf(".");
if (index == -1 || index == name.length() - 1) return name;
return name.substring(index + 1);
}
/**
* Returns the {@link RootConfiguration} of the configuration.
*
* @return the {@link RootConfiguration}.
*/
public RootConfiguration getRoot() {
return root;
}
/**
* Returns the value that matches the given key, if present.
* <p>
* You can get values stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method won't modify the structure of the configuration. If the node that should contain the
* wanted value is not present, the method will immediately return {@code Optional.empty()}.
* <p>
* This method will never return {@link Map} instances, but {@link Configuration} objects.
*
* @param key the key.
* @param <T> the value type.
* @return the value, if present and matches the type.
*/
public <T> Optional<T> get(String key) {
//Checks the key.
if (key.isEmpty() || key.startsWith(".") || key.endsWith("."))
throw new IllegalArgumentException("Bad key format: " + key + ".");
String[] array = key.split("\\.");
try {
//If the array length is 1 return the value from the map.
if (array.length == 1) return Optional.ofNullable((T) parseMap(key, map.get(key)));
//Iterates the child nodes.
Object obj = map.get(array[0]);
if (!(obj instanceof Map)) return Optional.empty();
Map<String, Object> child = (Map<String, Object>) obj;
for (int i = 1; i < array.length - 1; i++) {
obj = child.get(array[i]);
if (!(obj instanceof Map)) return Optional.empty();
child = (Map<String, Object>) obj;
}
//Returns the value.
return Optional.ofNullable((T) parseMap(key, child.get(array[array.length - 1])));
} catch (ClassCastException ex) {
return Optional.empty();
}
}
/**
* Returns the value that matches the given key, if present, and converts it with the
* {@link ValueConverter}
* that matches the given name.
* <p>
* You can get values stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method won't modify the structure of the configuration. If the node that should contain the
* wanted value is not present, the method will immediately return {@code Optional.empty()}.
* <p>
* This method will never return {@link Map} instances, but {@link Configuration} objects.
*
* @param key the key.
* @param <T> the value type.
* @return the value, if present and matches the type.
*/
public <T> Optional<T> getAndConvert(String key, String converter) {
var c = ValueConverters.getByName(converter);
try {
if (c.isEmpty()) {
Optional<Object> optional = get(key);
if (optional.isEmpty()) return Optional.empty();
return Optional.of((T) optional.get());
} else {
return (Optional<T>) c.get().load(this, key);
}
} catch (ClassCastException ex) {
return Optional.empty();
}
}
/**
* Returns the value that matches the given key, if present, and converts it with the
* {@link ValueConverter}
* that matches the given type.
* <p>
* You can get values stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method won't modify the structure of the configuration. If the node that should contain the
* wanted value is not present, the method will immediately return {@code Optional.empty()}.
* <p>
* This method will never return {@link Map} instances, but {@link Configuration} objects.
*
* @param key the key.
* @param <T> the value type.
* @return the value, if present and matches the type.
*/
public <T> Optional<T> getAndConvert(String key, Class<?> type) {
var c = ValueConverters.getByType(type);
try {
if (c.isEmpty()) {
System.err.println("Couldn't find converter for type " + type + "!");
Optional<Object> optional = get(key);
if (optional.isEmpty()) return Optional.empty();
return Optional.of((T) optional.get());
} else {
return (Optional<T>) c.get().load(this, key);
}
} catch (ClassCastException ex) {
return Optional.empty();
}
}
/**
* Returns the number that matches the given key, if present.
* <p>
* You can get values stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method won't modify the structure of the configuration. If the node that should contain the
* wanted value is not present, the method will immediately return {@code Optional.empty()}.
*
* @param key the key.
* @return the number, if present and matches the type.
*/
public Optional<Number> getNumber(String key) {
return get(key);
}
/**
* Returns the value that matches the given key or the value given if not present.
* <p>
* You can get values stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method won't modify the structure of the configuration. If the node that should contain the
* wanted value is not present, the method will immediately return {@code Optional.empty()}.
* <p>
* This method will never return {@link Map} instances, but {@link Configuration} objects.
*
* @param key the key.
* @param orElse the value returned if no element was found.
* @param <T> the value type.
* @return the value if present and matches the type. Else, returns the given element.
*/
public <T> T getOrElse(String key, T orElse) {
Optional<T> optional = get(key);
return optional.orElse(orElse);
}
/**
* Returns the value that matches the given key and converts it with the
* {@link ValueConverter} that matches the given type.
* If the value is not present returns the given value.
*
* <p>
* You can get values stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method won't modify the structure of the configuration. If the node that should contain the
* wanted value is not present, the method will immediately return {@code Optional.empty()}.
* <p>
* This method will never return {@link Map} instances, but {@link Configuration} objects.
*
* @param key the key.
* @param <T> the value type.
* @return the value, if present and matches the type.
*/
public <T> T getAndConvertOrElse(String key, String converter, T orElse) {
Optional<T> optional = getAndConvert(key, converter);
return optional.orElse(orElse);
}
/**
* Returns the value that matches the given key and converts it with the
* {@link ValueConverter} that matches the given name.
* If the value is not present returns the given value.
*
* <p>
* You can get values stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method won't modify the structure of the configuration. If the node that should contain the
* wanted value is not present, the method will immediately return {@code Optional.empty()}.
* <p>
* This method will never return {@link Map} instances, but {@link Configuration} objects.
*
* @param key the key.
* @param <T> the value type.
* @return the value, if present and matches the type.
*/
public <T> T getAndConvertOrElse(String key, Class<?> type, T orElse) {
Optional<T> optional = getAndConvert(key, type);
return optional.orElse(orElse);
}
/**
* Returns the value that matches the given key and converts it with the
* {@link ValueConverter} that matches the type of the given default value.
* If the value is not present returns the given value.
*
* <p>
* You can get values stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method won't modify the structure of the configuration. If the node that should contain the
* wanted value is not present, the method will immediately return {@code Optional.empty()}.
* <p>
* This method will never return {@link Map} instances, but {@link Configuration} objects.
*
* @param key the key.
* @param <T> the value type.
* @return the value, if present and matches the type.
*/
public <T> T getAndConvertOrElse(String key, T orElse) {
Optional<T> optional = getAndConvert(key, orElse.getClass());
return optional.orElse(orElse);
}
/**
* Returns the configuration that matches the given key, if present.
* <p>
* If no configuration is present, one new configuration will be created, replacing
* any previous value.
* <p>
* You can get configurations stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the configuration "data" inside the child "node", you must use the
* key "node.data".
*
* @param key the key.
* @return the configuration.
*/
public Configuration getOrCreateConfiguration(String key) {
Optional<Configuration> config = get(key);
if (config.isPresent()) return config.get();
set(key, new HashMap<>());
config = get(key);
return config.orElseThrow();
}
/**
* Returns the value that matches the given key, if present.
* If the value is not a {@link String}, returns it's {@link Object#toString()} representation.
* <p>
* You can get values stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method won't modify the structure of the configuration. If the node that should contain the
* wanted value is not present, the method will immediately return {@code Optional.empty()}.
* <p>
* This method will never return {@link Map} instances, but {@link Configuration} objects.
*
* @param key the key.
* @return the value as a {@link String}, if present.
* @throws ClassCastException whether the value doesn't match the given value type.
*/
public Optional<String> getString(String key) {
Optional<Object> optional = get(key);
return optional.map(Object::toString);
}
/**
* Returns the value that matches the given key, if present.
* If the value is not a {@link Enum<T>}, returns {@code Optional.empty()}.
* <p>
* You can get values stored in the child nodes of this configuration using the separator ".".
* For example, if you want to get the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method won't modify the structure of the configuration. If the node that should contain the
* wanted value is not present, the method will immediately return {@code Optional.empty()}.
*
* @param key the key.
* @return the value as a {@link String}, if present.
* @throws ClassCastException whether the value doesn't match the given value type.
*/
public <T extends Enum<T>> Optional<T> getEnum(Class<T> clazz, String key) {
Optional<String> optional = getString(key);
if (optional.isEmpty()) return Optional.empty();
try {
return Optional.of(Enum.valueOf(clazz, optional.get()));
} catch (IllegalArgumentException ex) {
return Optional.empty();
}
}
/**
* Returns all the children of this configuration. Maps are wrapped inside a configuration.
* <p>
* The given {@link Map} is an unmodifiable {@link Map} and it cannot be edited.
* Any modification results in a {@link UnsupportedOperationException}.
* <p>
* Whether the boolean "deep" is true, the map will contain all it's children values, and not
* the child configs. The name of these children values will be split using the separator ".".
*
* @param deep whether the map should cointain deep values.
* @return the map
*/
public Map<String, Object> getAll(boolean deep) {
Map<String, Object> map = new HashMap<>();
if (!deep) {
this.map.forEach((key, value) -> map.put(key, parseMap(key, value)));
} else {
this.map.forEach((key, value) -> {
value = parseMap(key, value);
if (value instanceof Configuration cConfig) {
String relName = cConfig.getRelativeName();
cConfig.getAll(true).forEach((cKey, cValue) ->
map.put(relName + "." + cKey, cValue));
} else map.put(key, value);
});
}
return Collections.unmodifiableMap(map);
}
/**
* Sets the given value into the given key.
* <p>
* You can store values into the child nodes of this configuration using the separator ".".
* For example, if you want to store the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method modifies the structure of the configuration: it will make new {@link Map}s
* or override previous values that are not {@link Map}s to store the given object.
* <p>
* This method will never store {@link Configuration} or {@link Map} instances, but a deep copy of the {@link Map}s.
*
* @param key the key.
* @param value the value.
*/
public void set(String key, Object value) {
if (key.isEmpty() || key.startsWith(".") || key.endsWith("."))
throw new IllegalArgumentException("Bad key format: " + key + ".");
if (value instanceof Map) value = CollectionUtils.deepCopy(((Map<String, Object>) value));
else if (value instanceof Configuration) value = CollectionUtils.deepCopy(((Configuration) value).map);
String[] array = key.split("\\.");
Map<String, Object> current = map;
Object obj;
for (int i = 0; i < array.length - 1; i++) {
obj = current.get(array[i]);
if (!(obj instanceof Map)) {
if (value == null) return;
obj = new HashMap<>();
current.put(array[i], obj);
}
current = (Map<String, Object>) obj;
}
Object old = current.get(array[array.length - 1]);
String absoluteKey = name == null || name.isEmpty() ? key : name + "." + key;
ConfigurationNodeChangeEvent.Before before = root.callEvent(new ConfigurationNodeChangeEvent.Before(this, absoluteKey, old, value));
if (before.isCancelled()) return;
Object nValue = before.getNewValue().orElse(null);
if (value != nValue) {
if (nValue instanceof Map) value = CollectionUtils.deepCopy(((Map<String, Object>) nValue));
else if (nValue instanceof Configuration) value = CollectionUtils.deepCopy(((Configuration) nValue).map);
else value = nValue;
}
current.put(array[array.length - 1], value);
root.callEvent(new ConfigurationNodeChangeEvent.After(this, absoluteKey, old, value));
}
/**
* Sets the converted given value into the given key.
* The value is converted by the
* {@link ValueConverter}
* that matches the given type.
* <p>
* If the converted is not found, or it's not valid this method returns false.
* <p>
* You can store values into the child nodes of this configuration using the separator ".".
* For example, if you want to store the value "data" inside the child "node", you must use the
* key "node.data".
* <p>
* This method modifies the structure of the configuration: it will make new {@link Map}s
* or override previous values that are not {@link Map}s to store the given object.
* <p>
* This method will never store {@link Configuration} or {@link Map} instances, but a deep copy of the {@link Map}s.
*
* @param key the key.
* @param value the value.
*/
public boolean convertAndSet(String key, Object value, Class<?> converter) {
if (isObjectNativelySupported(value)) {
set(key, value);
return true;
}
var c = ValueConverters.getByType(converter);
if (c.isEmpty() || !c.get().conversionClass().isInstance(value)) {
if(isObjectSeondaryNativelySupported(value)) {
set(key, value);
return true;
}
return false;
}
c.get().save(this, key, value);
return true;
}
/**
* Removes the value that matches the given key from the configuration.
* <p>
* If the value is a {@link Configuration}, this will also remove all its children.
*
* @param key the key.
*/
public void remove(String key) {
set(key, null);
}
/**
* Adds all nodes that are present in the given {@link Configuration} but
* not in this instance.
* <p>
* This method won't override any present node, unless the old value is not a {@link Configuration}
* but the new one is.
*
* @param configuration the configuration.
*/
public void addNotPresentValues(Configuration configuration) {
configuration.getAll(false).forEach((key, value) -> {
if (value instanceof Configuration) {
if (!map.containsKey(key)) {
set(key, value);
} else {
Object tValue = get(key).orElse(null);
if (tValue instanceof Configuration) {
((Configuration) tValue).addNotPresentValues((Configuration) value);
} else set(key, value);
}
} else {
if (!map.containsKey(key)) {
map.put(key, value);
}
}
});
}
/**
* Removes this {@link Configuration} from the {@link RootConfiguration}.
* This will throw an {@link IllegalStateException} if this node is the root node.
* <p>
* Any further modification on this configuration won't cause any effect on the {@link RootConfiguration}.
*/
public void remove() {
if (root == this) throw new IllegalStateException("You cannot remove the root of a configuration!");
root.remove(name);
}
/**
* Removes all children from this configuration.
* Any further modification on any child configuration won't cause any effect on this configuration.
*/
public void clear() {
map.clear();
}
/**
* Saves this {@link Configuration} as a JSON string into the given file.
*
* @param useFormat whether the output text should be formatted.
* @param file the file.
* @throws IOException writer IOException.
*/
public void save(File file, boolean useFormat) throws IOException {
FileWriter writer = new FileWriter(file);
JSONObject object = new JSONObject(map);
if (useFormat)
writer.write(object.toString(1));
else writer.write(object.toString());
writer.close();
}
private Object parseMap(String key, Object o) {
if (o instanceof Map) {
String name = key;
if (this.name != null && !this.name.isEmpty())
name = this.name + "." + name;
return new Configuration(name, (Map<String, Object>) o, root);
}
return o;
}
@Override
public String toString() {
return "Configuration{" +
"name='" + name + '\'' +
", map=" + map +
'}';
}
}
|
<gh_stars>1000+
from dagster import check
from dagster.config.snap import ConfigSchemaSnapshot, snap_from_config_type
from dagster.core.definitions.pipeline import PipelineDefinition
from dagster.utils import merge_dicts
def build_config_schema_snapshot(pipeline_def):
check.inst_param(pipeline_def, "pipeline_def", PipelineDefinition)
all_config_snaps_by_key = {}
for mode in pipeline_def.available_modes:
run_config_schema = pipeline_def.get_run_config_schema(mode)
all_config_snaps_by_key = merge_dicts(
all_config_snaps_by_key,
{ct.key: snap_from_config_type(ct) for ct in run_config_schema.all_config_types()},
)
return ConfigSchemaSnapshot(all_config_snaps_by_key)
|
#!/usr/bin/env sh
# generated from catkin/python/catkin/environment_cache.py
# based on a snapshot of the environment before and after calling the setup script
# it emulates the modifications of the setup script without recurring computations
# new environment variables
# modified environment variables
export LD_LIBRARY_PATH="/opt/ros/kinetic/lib:/opt/ros/kinetic/lib/x86_64-linux-gnu"
export PATH="/opt/ros/kinetic/bin:/home/casch/bin:/home/casch/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin"
export PKG_CONFIG_PATH="/opt/ros/kinetic/lib/pkgconfig:/opt/ros/kinetic/lib/x86_64-linux-gnu/pkgconfig"
export PWD="/home/casch/turtle/build"
export PYTHONPATH="/opt/ros/kinetic/lib/python2.7/dist-packages" |
version=1.2.x
# Build docs, copy to correct docs folder, delete build
cd ../docs/src
sphinx-apidoc -o ./doc ../../dscribe
make html SPHINXOPTS="-D version=$version -D release=$version"
cp -a build/html/. ../latest
rm -r build
|
function generateUserHTML(array $userData): string {
$html = '';
foreach ($userData as $v) {
$html .= "<hr>\n";
$html .= "<p class=\"lead\">" . $v['username'] . "</p>\n";
$html .= "<p class=\"lead\">" . $v['level'] . "</p>\n";
$html .= "<p class=\"lead\">" . $v['hp'] . "</p>\n";
$html .= "<blockquote class=\"blockquote\">\n";
$html .= "<footer class=\"blockquote-footer\">" . $v['alamat'] . "</footer>\n";
$html .= "<br>\n";
$html .= "</blockquote>\n";
$html .= "<hr>\n";
}
return $html;
}
// Example usage
$userData = [
['username' => 'JohnDoe', 'level' => '5', 'hp' => '100', 'alamat' => '123 Main St'],
['username' => 'JaneSmith', 'level' => '3', 'hp' => '75', 'alamat' => '456 Elm St']
];
echo generateUserHTML($userData); |
import React, {Component} from 'react';
import * as firebase from 'firebase';
import { NavigationActions, StackNavigator, withNavigation} from 'react-navigation';
import{AsyncStorage, TouchableOpacity, Dimensions, View, Image, ScrollView, FlatList} from 'react-native';
import Icon from 'react-native-vector-icons/SimpleLineIcons';
import { Container, Body, Thumbnail, Text, List, Right, ListItem} from 'native-base';
import ConfigApp from '../utils/ConfigApp';
import ListEmpty from './ListEmpty';
import Ionicons from 'react-native-vector-icons/Ionicons';
import Strings from '../utils/Strings';
var styles = require('../../assets/files/Styles');
var {height, width} = Dimensions.get('window');
class DietFav extends React.Component {
constructor(props) {
super(props);
this.state = {
diets: []
}
}
_isMounted = false;
componentDidMount () {
this._isMounted = true;
if (this._isMounted) {
this.fetchDiets();
}
}
componentWillUnmount() {
this._isMounted = false;
}
DietDetails (item) {
const navigateAction = NavigationActions.navigate({
routeName: 'DietDetailsScreen',
params: {item}
});
this.props.navigation.dispatch(navigateAction);
}
renderFooterDiets = () => {
const diets = this.state.diets
if (diets.length != 0) return null;
return (
<ListEmpty title={Strings.ST67}/>
);
};
removeDiet = async (diet_id) => {
try {
var user = firebase.auth().currentUser;
uid = user.uid;
const diets = await AsyncStorage.getItem('diets');
let dietsFav = JSON.parse(diets);
dietsItems = dietsFav.filter(function(e){ return e.diet_id !== diet_id && e.userId == uid })
await AsyncStorage.setItem('diets', JSON.stringify(dietsItems));
this.setState({
...this.state,
diets: dietsItems || []
});
} catch(error) {
}};
render () {
return (
<ScrollView>
<View style={{margin: 5, marginTop: 5}}>
<List>
<FlatList
data={this.state.diets}
refreshing="true"
renderItem={({item, index}) =>
<ListItem style={{paddingLeft: 0, marginLeft: 0, backgroundColor:'#FFF', opacity: 1, borderColor: 'rgba(0,0,0,0.05)', borderBottomWidth: 0}} onPress={() => this.DietDetails(item)} >
<Thumbnail square size={80} source={{ uri: ConfigApp.URL+'images/'+item.diet_image }} style={{paddingLeft: 10, marginLeft: 10, borderRadius: 4}} />
<Body style={{paddingLeft: 0, marginLeft: 0}}>
<Text numberOfLines={2} style={{fontSize: 13, marginBottom: 3, fontWeight: 'bold'}}>
{item.diet_title.toUpperCase()}
</Text>
</Body>
<Right>
<TouchableOpacity onPress={this.removeDiet.bind(this, item.diet_id)} activeOpacity={1}>
<Ionicons name="md-close" style={{fontSize: 19, backgroundColor: '#fbfbfc', borderColor: '#eee', borderWidth: 1, borderRadius: 5, paddingVertical: 5, paddingHorizontal: 10}}/>
</TouchableOpacity>
</Right>
</ListItem>
}
keyExtractor={(item, index) => index.toString()}
ListFooterComponent={this.renderFooterDiets}
/>
</List>
</View>
</ScrollView>
)
}
async fetchDiets () {
var user = firebase.auth().currentUser;
uid = user.uid;
let dietsJSON= await AsyncStorage.getItem('diets');
let dietsFav = JSON.parse(dietsJSON);
dietsItems = dietsFav.filter(function(e){
return e.userId == uid
})
const dietsArray = dietsItems || [];
this.setState({
...this.state,
diets: dietsArray
});
}
}
export default withNavigation(DietFav);
|
<gh_stars>1-10
import React, { Component } from 'react';
import Players from '../components/Players';
import MoveDisplay from '../components/MoveDisplay';
import ClaimDisplay from '../components/ClaimDisplay';
import Timer from '../components/Timer';
import CardGroup from '../components/CardGroup';
import MakeMoveModal from '../components/MakeMoveModal';
import ClaimModal from '../components/ClaimModal';
import CorrectClaimModal from '../components/CorrectClaimModal';
import ScoreDisplay from '../components/ScoreDisplay';
import {
SET_INDICATORS,
CLAIMED,
UNCLAIMED,
SET_NAME_MAP,
PLAYER_UUID,
PLAYER_NAME,
PING_PONG_INTERVAL_MS
} from '../components/Constants';
import './Game.css';
class Game extends Component {
constructor(props) {
super(props);
this.state = {
uuid: '',
gameUuid: '',
hand: [],
nPlayers: 0,
showMakeMoveModal: false,
showClaimModal: false,
showFullClaim: false,
claims: this.allUnclaimed(),
lastClaim: {},
score: {
even: 0,
odd: 0,
discard: 0
},
playerNames: this.defaultNames()
};
const audioUrl = process.env.PUBLIC_URL + '/bell.mp3';
this.bell = new Audio(audioUrl);
setInterval(this.pingPong.bind(this), PING_PONG_INTERVAL_MS)
}
pingPong() {
this.sendMessage({
'action': 'ping_pong',
'game_uuid': this.state.gameUuid,
'payload': {
'key': this.state.uuid
}
})
}
defaultNames() {
const playerNames = {};
[...Array(8).keys()].forEach((p) => playerNames[p] = 'Player ' + p);
return playerNames;
}
allUnclaimed() {
const claims = {};
SET_INDICATORS.forEach((s) => claims[s] = UNCLAIMED);
return claims;
}
startGame() {
this.sendMessage({
action: 'start_game',
game_uuid: this.state.gameUuid,
payload: {
key: this.state.uuid
}
})
}
makeClaim(possessions) {
this.hideClaimModal();
this.sendMessage({
action: 'claim',
game_uuid: this.state.gameUuid,
payload: {
key: this.state.uuid,
possessions
}
})
}
playCard(card) {
// Make sure the user is looking at the make move modal.
if (this.state.showMakeMoveModal && card) {
this.makeMove(card, this.state.toBeRespondent);
}
}
hideClaimModal() {
this.setState({
showClaimModal: false
});
}
hideMakeMoveModal() {
this.setState({
showMakeMoveModal: false,
toBeRespondent: undefined
});
}
showMakeMoveModal(toBeRespondent) {
if (this.state.turn !== this.state.playerN) return;
if (this.state.playerN % 2 === toBeRespondent % 2) return;
this.setState({
showMakeMoveModal: true,
toBeRespondent
});
}
makeMove(card, toBeRespondent) {
this.sendMessage({
action: 'move',
game_uuid: this.state.gameUuid,
payload: {
key: this.state.uuid,
respondent: toBeRespondent,
card
}
})
this.setState({
showMakeMoveModal: false
});
}
register(payload) {
const {
player_uuid,
player_n,
n_players,
time_limit,
game_uuid,
player_names
} = payload;
localStorage.setItem(PLAYER_UUID, player_uuid);
this.setState({
uuid: player_uuid,
playerN: player_n,
nPlayers: n_players,
timeLimit: time_limit,
gameUuid: game_uuid,
claims: this.allUnclaimed(),
lastClaim: {},
score: {
even: 0,
odd: 0,
discard: 0
},
success: undefined,
card: undefined,
respondent: undefined,
interrogator: undefined,
moveTimestamp: undefined,
playerNames: { ...this.defaultNames(), ...player_names }
});
window.history.pushState({ gameUuid: game_uuid },
'Literature',
'/game/' + game_uuid);
if (player_n === -1) {
console.log('All seats are full in the room');
}
}
lastMove(payload) {
const {
n_cards,
move_timestamp,
turn,
success,
card,
respondent,
interrogator,
score
} = payload;
if (turn === this.state.playerN && turn !== this.state.turn)
this.bell.play();
this.setState({
nCards: n_cards,
moveTimestamp: move_timestamp,
turn,
success,
card,
respondent,
interrogator,
score: {
...this.state.score,
...score
}
})
if (turn !== this.state.playerN) this.hideMakeMoveModal();
}
claim(payload) {
const {
move_timestamp,
n_cards,
claim_by,
half_suit,
turn,
success,
truth,
score
} = payload;
if (turn === this.state.playerN && turn !== this.state.turn)
this.bell.play();
const claims = { ...this.state.claims };
claims[SET_NAME_MAP[half_suit.half] + half_suit.suit] = CLAIMED;
this.setState({
nCards: n_cards,
moveTimestamp: move_timestamp,
turn,
score: {
...this.state.score,
...score
},
claims,
lastClaim: {
claimBy: claim_by,
success,
truth,
halfSuit: half_suit
}
})
if (turn !== this.state.playerN) this.hideMakeMoveModal();
}
playerNames(payload) {
this.setState({ playerNames: payload.names });
}
handleMessage(message) {
let data = JSON.parse(message.data);
console.log('Received: ' + JSON.stringify(data));
switch (data.action) {
case 'register':
this.register(data.payload)
break;
case 'hand':
this.setState({
hand: data.payload
});
break;
case 'last_move':
this.lastMove(data.payload)
break;
case 'claim':
this.claim(data.payload)
break;
case 'player_names':
this.playerNames(data.payload)
break;
case 'ping_pong':
break;
default:
throw new Error('Unhandled action: ' + data.action);
}
}
sendMessage(payload) {
console.log('Sending: ' + JSON.stringify(payload))
this.state.sender.send(JSON.stringify(payload));
}
componentDidMount() {
let ws_scheme;
if (window.location.protocol === "https:") {
ws_scheme = "wss://";
} else {
ws_scheme = "ws://"
};
const queryParams = new URLSearchParams(window.location.search);
const pathParams = window.location.pathname.split('/');
const playerUuid = localStorage.getItem(PLAYER_UUID);
const storedUsername = localStorage.getItem(PLAYER_NAME);
const username = queryParams.get('username') || storedUsername
localStorage.setItem(PLAYER_NAME, username || '');
const sendParams = window.jQuery.param({
n_players: queryParams.get('n_players'),
time_limit: queryParams.get('time_limit'),
username,
game_uuid: pathParams[pathParams.length - 1],
player_uuid: playerUuid
});
const receiver = new window.ReconnectingWebSocket(
ws_scheme + window.location.host + "/receive?" + sendParams
);
receiver.onmessage = this.handleMessage.bind(this);
const sender = new window.ReconnectingWebSocket(
ws_scheme + window.location.host + "/submit"
);
this.setState({
'sender': sender
});
window.cards.playCard = (c) => { };
}
render() {
return (
<div>
<Players
nPlayers={this.state.nPlayers}
playerN={this.state.playerN}
playerNames={this.state.playerNames}
nCards={this.state.nCards}
turn={this.state.turn}
showModal={this.showMakeMoveModal.bind(this)} />
<MoveDisplay
success={this.state.success}
card={this.state.card}
playerNames={this.state.playerNames}
interrogator={this.state.interrogator}
respondent={this.state.respondent} />
<ClaimDisplay
success={this.state.lastClaim.success}
claimBy={this.state.lastClaim.claimBy}
playerNames={this.state.playerNames}
halfSuit={this.state.lastClaim.halfSuit}
showFullClaim={() => this.setState({ showFullClaim: true })}
/>
<Timer
moveTimestamp={this.state.moveTimestamp}
timeLimit={this.state.timeLimit}
switchTeam={() => this.sendMessage({
action: 'switch_team',
game_uuid: this.state.gameUuid
})}
turn={this.state.turn}
gameUuid={this.state.gameUuid}
playerNames={this.state.playerNames}
playerN={this.state.playerN} />
<CardGroup
handClass='Player-hand'
suitClass='vhand-compact'
cards={this.state.hand}
claims={this.state.claims} />
<ScoreDisplay score={this.state.score} />
{this.state.playerN !== -1 && this.state.moveTimestamp && <button
className='btn btn-secondary ClaimButton'
onClick={() => this.setState({ showClaimModal: true })}>Make Claim</button>}
{!this.state.moveTimestamp && <button
className='btn btn-secondary BotsButton'
onClick={this.startGame.bind(this)}>Fill With Bots</button>}
{this.state.showMakeMoveModal && <MakeMoveModal
hand={this.state.hand}
hideModal={this.hideMakeMoveModal.bind(this)}
playCard={this.playCard.bind(this)}
claims={this.state.claims} />}
{this.state.showClaimModal && <ClaimModal
playerN={this.state.playerN}
nPlayers={this.state.nPlayers}
playerNames={this.state.playerNames}
hand={this.state.hand}
claims={this.state.claims}
hideModal={this.hideClaimModal.bind(this)}
makeClaim={this.makeClaim.bind(this)} />}
{this.state.showFullClaim &&
<CorrectClaimModal
nPlayers={this.state.nPlayers}
correct={this.state.lastClaim.truth}
playerNames={this.state.playerNames}
set={SET_NAME_MAP[(this.state.lastClaim.halfSuit || {}).half] +
(this.state.lastClaim.halfSuit || {}).suit}
hideModal={() => { this.setState({ showFullClaim: false }) }}
/>}
</div>
);
}
}
export default Game;
|
#!/usr/bin/env bash
# shellcheck disable=SC2128
# shellcheck source=/dev/null
set -x
set +e
# ไธป่ฆๅจๅนณ่ก้พไธๆต่ฏ
source "./mainPubilcRelayerTest.sh"
source "./proxyVerifyTest.sh"
# shellcheck disable=SC2154
function StartDockerRelayerDeploy_onlyETH() {
echo -e "${GRE}=========== $FUNCNAME begin ===========${NOC}"
# ไฟฎๆน relayer.toml
up_relayer_toml
# ๅ ้ค่ก
sed -i "16,23"'d' "./relayer.toml"
# ๅฏๅจ ebrelayer
start_docker_ebrelayerA
docker cp "./deploy_chain33.toml" "${dockerNamePrefix}_ebrelayera_1":/root/deploy_chain33.toml
docker cp "./deploy_ethereum.toml" "${dockerNamePrefix}_ebrelayera_1":/root/deploy_ethereum.toml
# ้จ็ฝฒๅ็บฆ ่ฎพ็ฝฎ bridgeRegistry ๅฐๅ
OfflineDeploy_chain33
# ไฟฎๆน relayer.toml ๅญๆฎต
sed -i 's/^BridgeRegistryOnChain33=.*/BridgeRegistryOnChain33="'"${chain33BridgeRegistry}"'"/g' "./relayer.toml"
# shellcheck disable=SC2154
# shellcheck disable=SC2034
{
Boss4xCLI=${Boss4xCLIeth}
CLIA=${CLIAeth}
OfflineDeploy_ethereum "./deploy_ethereum.toml"
ethereumBridgeBankOnETH="${ethereumBridgeBank}"
ethereumBridgeRegistryOnETH="${ethereumBridgeRegistry}"
ethereumMultisignAddrOnETH="${ethereumMultisignAddr}"
sed -i '12,18s/BridgeRegistry=.*/BridgeRegistry="'"${ethereumBridgeRegistryOnETH}"'"/g' "./relayer.toml"
}
# ๅ็ฆป็บฟๅค็ญพๅฐๅๆ็นๆ็ปญ่ดน
Chain33Cli=${MainCli}
initMultisignChain33Addr
transferChain33MultisignFee
Chain33Cli=${Para8901Cli}
docker cp "./relayer.toml" "${dockerNamePrefix}_ebrelayera_1":/root/relayer.toml
InitRelayerA
# ่ฎพ็ฝฎ token ๅฐๅ
# shellcheck disable=SC2154
# shellcheck disable=SC2034
{
Boss4xCLI=${Boss4xCLIeth}
CLIA=${CLIAeth}
ethereumBridgeBank="${ethereumBridgeBankOnETH}"
offline_create_bridge_token_chain33_symbol "USDT"
chain33USDTBridgeTokenAddrOnETH="${chain33MainBridgeTokenAddr}"
offline_create_bridge_token_chain33_symbol "ETH"
chain33MainBridgeTokenAddrETH="${chain33MainBridgeTokenAddr}"
offline_create_bridge_token_eth_BTY
ethereumBtyBridgeTokenAddrOnETH="${ethereumBtyBridgeTokenAddr}"
offline_deploy_erc20_create_tether_usdt_USDT "USDT"
ethereumUSDTERC20TokenAddrOnETH="${ethereumUSDTERC20TokenAddr}"
}
# shellcheck disable=SC2086
{
docker cp "${chain33BridgeBank}.abi" "${dockerNamePrefix}_ebrelayera_1":/root/${chain33BridgeBank}.abi
docker cp "${chain33BridgeRegistry}.abi" "${dockerNamePrefix}_ebrelayera_1":/root/${chain33BridgeRegistry}.abi
docker cp "${chain33USDTBridgeTokenAddrOnETH}.abi" "${dockerNamePrefix}_ebrelayera_1":/root/${chain33USDTBridgeTokenAddrOnETH}.abi
docker cp "${chain33MainBridgeTokenAddrETH}.abi" "${dockerNamePrefix}_ebrelayera_1":/root/${chain33MainBridgeTokenAddrETH}.abi
docker cp "${ethereumBridgeBankOnETH}.abi" "${dockerNamePrefix}_ebrelayera_1":/root/${ethereumBridgeBankOnETH}.abi
docker cp "${ethereumBridgeRegistryOnETH}.abi" "${dockerNamePrefix}_ebrelayera_1":/root/${ethereumBridgeRegistryOnETH}.abi
}
# start ebrelayer B C D
updata_toml_start_bcd
restart_ebrelayerA
echo -e "${GRE}=========== $FUNCNAME end ===========${NOC}"
}
# shellcheck disable=SC2034
# shellcheck disable=SC2154
function AllRelayerMainTest() {
echo -e "${GRE}=========== $FUNCNAME begin ===========${NOC}"
set +e
if [[ ${1} != "" ]]; then
maturityDegree=${1}
echo -e "${GRE}maturityDegree is ${maturityDegree} ${NOC}"
fi
# shellcheck disable=SC2120
if [[ $# -ge 2 ]]; then
chain33ID="${2}"
fi
get_cli
# init
Chain33Cli=${MainCli}
InitChain33Validator
# para add
initPara
StartDockerRelayerDeploy_onlyETH
# test_all_onlyETH
Boss4xCLI=${Boss4xCLIeth}
CLIA=${CLIAeth}
ethereumBridgeBank="${ethereumBridgeBankOnETH}"
ethereumMultisignAddr="${ethereumMultisignAddrOnETH}"
chain33MainBridgeTokenAddr="${chain33MainBridgeTokenAddrETH}"
ethereumBtyBridgeTokenAddr="${ethereumBtyBridgeTokenAddrOnETH}"
ethereumUSDTERC20TokenAddr="${ethereumUSDTERC20TokenAddrOnETH}"
chain33USDTBridgeTokenAddr="${chain33USDTBridgeTokenAddrOnETH}"
test_lock_and_burn "ETH" "USDT"
# TestRelayerProxy_onlyETH
start_docker_ebrelayerProxy
setWithdraw_ethereum
TestProxy
echo_addrs
echo -e "${GRE}=========== $FUNCNAME end ===========${NOC}"
}
|
<filename>include/archgraph/Geometry.h<gh_stars>0
#pragma once
#include <SM_Vector.h>
#include <polymesh3/Polytope.h>
namespace archgraph
{
class Variant;
class Geometry
{
public:
Geometry(const pm3::PolytopePtr& poly)
: m_poly(poly)
{
m_color.MakeInvalid();
}
Geometry(const std::vector<std::shared_ptr<Geometry>>& children)
: m_children(children)
{
}
auto GetPoly() const { return m_poly; }
auto& GetColor() const { return m_color; }
void SetColor(const sm::vec3& color) { m_color = color; }
auto& GetFilepath() const { return m_filepath; }
void SetFilepath(const std::string& filepath) { m_filepath = filepath; }
auto& GetChildren() const { return m_children; }
void AddAttr(const std::string& name, const std::shared_ptr<Variant>& value);
bool SetAttr(const std::string& name, const std::shared_ptr<Variant>& value);
std::shared_ptr<Variant> QueryAttr(const std::string& name) const;
private:
pm3::PolytopePtr m_poly = nullptr;
sm::vec3 m_color;
std::string m_filepath;
std::vector<std::shared_ptr<Geometry>> m_children;
std::map<std::string, std::shared_ptr<Variant>> m_attrs;
}; // Geometry
} |
import {MinimumSpanningTreeByPrim} from '../../../src/math/graphAlgorithms/MinimumSpanningTreeByPrim'
import {mkGraphOnEdgesArray} from '../../../src/structs/basicGraphOnEdges'
import {IntPair} from '../../../src/utils/IntPair'
test('rombus with diagal', () => {
const edges = [
new IntPair(0, 1),
new IntPair(1, 2),
new IntPair(2, 3),
new IntPair(3, 0),
new IntPair(0, 2),
]
const graph = mkGraphOnEdgesArray<IntPair>(edges)
const mstree = new MinimumSpanningTreeByPrim(
graph,
(e) => (e == edges[4] ? 2 : 1),
1,
)
const tree = mstree.GetTreeEdges()
expect(tree.length).toBe(3)
const nodes = new Set<number>()
nodes.add(0)
nodes.add(1)
nodes.add(2)
nodes.add(3)
for (const e of tree) {
nodes.delete(e.source)
nodes.delete(e.target)
}
expect(nodes.size).toBe(0)
expect(tree.find((e) => e == edges[4])).toBe(undefined)
const e = tree.find((e) => e == edges[0] || e == edges[1])
expect(e == undefined).toBe(false)
})
|
class Colorful extends React.Component {
render() {
return (
<div style={{ color: 'red', fontSize: 72 }}>Big Red</div>
)
}
}
|
#!/usr/bin/env sh
./build/tools/ristretto quantize --model=models/yolotiny/yolo_tiny_deploy.prototxt \
--weights=models/yolotiny/yolo_tiny.caffemodel \
--model_quantized=models/yolotiny/RistrettoDemo/quantized.prototxt \
--iterations=100 --gpu=0 --trimming_mode=dynamic_fixed_point --error_margin=1
|
def is_palindrome(string):
return string == string[::-1] |
package io.smallrye.mutiny.operators;
import static io.smallrye.mutiny.helpers.EmptyUniSubscription.CANCELLED;
import static io.smallrye.mutiny.helpers.ParameterValidation.nonNull;
import static io.smallrye.mutiny.helpers.ParameterValidation.validate;
import java.time.Duration;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import io.smallrye.mutiny.Uni;
import io.smallrye.mutiny.subscription.UniSubscription;
public class UniDelayOnItem<T> extends UniOperator<T, T> {
private final Duration duration;
private final ScheduledExecutorService executor;
public UniDelayOnItem(Uni<T> upstream, Duration duration, ScheduledExecutorService executor) {
super(nonNull(upstream, "upstream"));
this.duration = validate(duration, "duration");
this.executor = nonNull(executor, "executor");
}
@Override
protected void subscribing(UniSerializedSubscriber<? super T> subscriber) {
AtomicReference<ScheduledFuture<?>> holder = new AtomicReference<>();
AtomicReference<UniSubscription> reference = new AtomicReference<>();
upstream().subscribe().withSubscriber(new UniDelegatingSubscriber<T, T>(subscriber) {
@Override
public void onSubscribe(UniSubscription subscription) {
if (reference.compareAndSet(null, subscription)) {
super.onSubscribe(() -> {
if (reference.compareAndSet(subscription, CANCELLED)) {
subscription.cancel();
ScheduledFuture<?> future = holder.getAndSet(null);
if (future != null) {
future.cancel(true);
}
}
});
}
}
@Override
public void onItem(T item) {
if (reference.get() != CANCELLED) {
try {
ScheduledFuture<?> future = executor
.schedule(() -> super.onItem(item), duration.toMillis(), TimeUnit.MILLISECONDS);
holder.set(future);
} catch (RuntimeException e) {
// Typically, a rejected execution exception
super.onFailure(e);
}
}
}
});
}
}
|
class AvoidManager:
def __init__(self, USER_AVOID_LIST, TAG_AVOID_LIST, DASH_TAG_AVOID_LIST, REPLY_USER_AUTO_ACCEPT_LIST):
self.USER_AVOID_LIST = USER_AVOID_LIST
self.TAG_AVOID_LIST = TAG_AVOID_LIST
self.DASH_TAG_AVOID_LIST = DASH_TAG_AVOID_LIST
self.REPLY_USER_AUTO_ACCEPT_LIST = REPLY_USER_AUTO_ACCEPT_LIST
def should_avoid_user(self, username):
return username in self.USER_AVOID_LIST
def should_avoid_tag(self, tag):
for avoid_tag in self.TAG_AVOID_LIST:
if avoid_tag.lower() in tag.lower():
return True
return False
def should_avoid_dash_tag(self, tags):
for tag in tags:
for avoid_dash_tag in self.DASH_TAG_AVOID_LIST:
if avoid_dash_tag.lower() in tag.lower():
return True
return False
def should_auto_accept_reply(self, username):
return username in self.REPLY_USER_AUTO_ACCEPT_LIST |
import factory
import fuzzy
import pytz
from datetime import datetime, timedelta
class Event(factory.Factory):
class Meta:
model = 'Event'
start_at = fuzzy.FuzzyDateTime(
pytz.timezone('UTC').localize(datetime(2000, 1, 1)),
pytz.timezone('UTC').localize(datetime(2020, 1, 1))
)
end_at = factory.LazyAttribute(
lambda o: o.start_at + timedelta(hours=1)
) |
#!/bin/bash
# exit this script if any commmand fails
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
COCOS2DX_ROOT="$DIR"/../..
HOST_NAME=""
CURL="curl --retry 999 --retry-max-time 0"
function install_android_ndk()
{
echo "Installing android ndk ..."
# sudo curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
# sudo python get-pip.py
# sudo python -m pip install retry
which python
which pip
pip install retry
if [ "$BUILD_TARGET" == "android" ]\
|| [ "$BUILD_TARGET" == "android_lua" ] ; then
python $COCOS2DX_ROOT/tools/appveyor-scripts/setup_android.py
else
python $COCOS2DX_ROOT/tools/appveyor-scripts/setup_android.py --ndk_only
fi
}
function install_linux_environment()
{
echo "Installing linux dependence packages ..."
echo -e "y" | bash $COCOS2DX_ROOT/install-deps-linux.sh
echo "Installing linux dependence packages finished!"
}
function download_deps()
{
# install dpes
pushd $COCOS2DX_ROOT
python download-deps.py -r=yes
popd
echo "Downloading cocos2d-x dependence finished!"
}
function install_python_module_for_osx()
{
pip install PyYAML
sudo pip install Cheetah
}
# set up environment according os and target
function install_environement_for_pull_request()
{
echo "Building pull request ..."
if [ "$TRAVIS_OS_NAME" == "linux" ]; then
sudo apt-get update
sudo apt-get install ninja-build
ninja --version
if [ "$BUILD_TARGET" == "linux" ]; then
install_linux_environment
fi
fi
if [ "$TRAVIS_OS_NAME" == "osx" ]; then
install_python_module_for_osx
fi
# use NDK's clang to generate binding codes
install_android_ndk
download_deps
}
# should generate binding codes & cocos_files.json after merging
function install_environement_for_after_merge()
{
if [ "$TRAVIS_OS_NAME" == "osx" ]; then
install_python_module_for_osx
fi
echo "Building merge commit ..."
install_android_ndk
download_deps
}
# install newer python for android for ssl connection
if [ "$BUILD_TARGET" == "android" ]; then
if [ $GITHUB_CI ]; then
echo "Installing pyenv for github ci..."
curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash
export PATH="/home/runner/.pyenv/bin:$PATH"
else
# upgrade pyenv
cd $(pyenv root) && git checkout master && git pull && cd -
fi
pyenv install --list
pyenv install $PYENV_VERSION
pyenv versions
# pip install pyOpenSSL ndg-httpsclient pyasn1
# set by PYENV_VERSION environment variable implicit
# pyenv global $PYENV_VERSION
fi
python -V
cmake --version
if [ "$BUILD_TARGET" == "android_cocos_new_test" ]; then
sudo apt-get update
sudo apt-get install ninja-build
ninja --version
download_deps
sudo pip install retry
python $COCOS2DX_ROOT/tools/appveyor-scripts/setup_android.py
exit 0
fi
if [ "$BUILD_TARGET" == "linux_cocos_new_test" ]; then
download_deps
install_linux_environment
# linux new lua project, so need to install
sudo pip install retry
python $COCOS2DX_ROOT/tools/appveyor-scripts/setup_android.py --ndk_only
exit 0
fi
# build pull request
if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
install_environement_for_pull_request
fi
# run after merging
# - make cocos robot to send PR to cocos2d-x for new binding codes
# - generate cocos_files.json for template
if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
# only one job need to send PR, linux virtual machine has better performance
if [ $TRAVIS_OS_NAME == "linux" ] && [ x$GEN_BINDING_AND_COCOSFILE == x"true" ]; then
install_environement_for_after_merge
fi
fi
echo "before-install.sh execution finished!"
|
<reponame>polinazolotukhina/Shopping-smart
const firebaseConfig = {
0: 'the info is in my firebase "ShopApp" '
};
export default firebaseConfig;
|
<gh_stars>0
var PI2 = Math.PI * 2;
//โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
function Car(id, peer, color) {
EventEmitter.call(this);
this.id = id;
this.peer = peer;
this.color = color;
this.x = 0; // position
this.y = 0;
this.sx = 0; // speed
this.sy = 0;
this.speed = 0;
this.rotation = 0;
this.timestamp = 0;
this.keyTurn = 0; // turn: 0 = false, -1 = cw, 1 = ccw
this.keyAccel = 0; // accelerating: 0 = false, 1 = true
this.asset = null;
this._createAsset();
}
inherits(Car, EventEmitter);
//โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
Car.prototype.getState = function () {
return {
x: this.x,
y: this.y,
t: this.keyTurn,
a: this.keyAccel,
s: this.speed,
r: this.rotation,
sx: this.sx,
sy: this.sy
};
};
//โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
Car.prototype.sync = function (sync) {
this.x = sync.x;
this.y = sync.y;
this.keyTurn = sync.t;
this.keyAccel = sync.a;
this.speed = sync.s;
this.rotation = sync.r;
this.sx = sync.sx;
this.sy = sync.sy;
this.timestamp = Date.now();
};
//โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
Car.prototype._createAsset = function () {
var canvas = this.asset = document.createElement('canvas');
canvas.width = 40;
canvas.height = 40;
var ctx = canvas.getContext('2d');
ctx.font = '40px Verdana';
ctx.textAlign = 'center';
// ctx.fillStyle = '#FFF';
// ctx.fillRect(0,0,40,40)
ctx.fillStyle = this.color;
ctx.shadowColor = '#444';
ctx.shadowBlur = 4;
ctx.fillText('โ', 20, 35);
}
//โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
Car.prototype.respawn = function (x, y) {
this.x = x;
this.y = y;
this.sx = 0;
this.sy = 0;
this.speed = 0;
this.rotation = 0;
this.timestamp = Date.now();
return this;
};
//โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
Car.prototype.render = function (ctx) {
ctx.save();
ctx.translate(this.x, this.y);
ctx.rotate(this.rotation);
ctx.drawImage(this.asset, -20, -20);
ctx.restore();
};
//โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
Car.prototype.keyChange = function (direction, press) {
var shouldEmit = true;
if (direction === 38) this.keyAccel = ~~press; // accelerate
else if (direction === 39) this.keyTurn = ~~press; // right
else if (direction === 37) this.keyTurn = - ~~press; // left
else shouldEmit = false;
if (shouldEmit) this.emit('keyChanged');
};
//โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
Car.prototype.update = function () {
var now = Date.now();
var dt = now - this.timestamp;
this.timestamp = now;
var da = dt * 0.005;
// update rotation relatively to keyTurn
this.rotation += this.keyTurn * da;
if (this.rotation > PI2) this.rotation -= PI2;
else if (this.rotation < 0) this.rotation += PI2;
// update speed relatively to keyAccel
this.speed += this.keyAccel * da;
// this.speed *= 0.8;
this.speed *= Math.pow(0.8, dt * 0.05);;
// update speed
var friction = Math.pow(0.95, dt * 0.05);
this.sx = this.sx * friction + Math.cos(this.rotation) * this.speed;
this.sy = this.sy * friction + Math.sin(this.rotation) * this.speed;
// update position
this.x += this.sx;
this.y += this.sy;
// bound position to canvas
// TODO: new position should take dt in account (calculate when collision occured)
if (this.x > 400) { this.x = 400; if (this.sx > 0) this.sx *= -1; }
if (this.x < 0) { this.x = 0; if (this.sx < 0) this.sx *= -1; }
if (this.y > 400) { this.y = 400; if (this.sy > 0) this.sy *= -1; }
if (this.y < 0) { this.y = 0; if (this.sy < 0) this.sy *= -1; }
};
|
<reponame>tanmba/10Vision
import { Component, OnInit } from '@angular/core';
import {ActivatedRoute, Router} from "@angular/router";
import {UserService} from "../core/user.service";
import {FirebaseUserModel} from "../core/user.model";
import { Location } from '@angular/common';
import {AuthService} from "../core/auth.service";
@Component({
selector: 'app-user-details',
templateUrl: './user-details.component.html',
styleUrls: ['./user-details.component.scss']
})
export class UserDetailsComponent implements OnInit {
userId: string;
user: Observable<FirebaseUserModel>;
showEmail =false;
currentUser: FirebaseUserModel;
constructor(
private route: ActivatedRoute,
private userService: UserService,
private location : Location,
private authService: AuthService,
private router: Router,
) { }
ngOnInit() {
this.userId = this.route.snapshot.params['id'];
this.getUser();
this.authService.user$.subscribe(user => {
this.currentUser = user;
})
console.log(this.currentUser)
}
getUser() {
if (this.userId) {
this.userService.getUser(this.userId).subscribe(user => {
this.user = user;
})
}
}
locationBack() {
this.location.back();
}
addGabi() {
if (this.currentUser) {
this.showEmail = !this.showEmail;
} else {
this.router.navigate(['/register'])
}
}
}
|
<filename>web/scm/src/app/home/side-panel/store/reducers.ts
import { createReducer, on, Action } from '@ngrx/store';
import { SidePanelState, initialState } from './state';
import * as SidepanelActions from './actions';
const sidePanelReducer = createReducer(
initialState,
on(SidepanelActions.sidePanelInitDataRequest, (state) => ({
...state,
isLoading: true
})),
on(SidepanelActions.sidePanelInitDataSuccess, (state, { sidePanel }) => ({
...state,
sidePanelData: sidePanel,
isLoading: false
})),
on(SidepanelActions.sidePanelInitDataFailure, (state, { error }) => ({
...state,
error: error,
isLoading: false
})),
);
export function reducer(state: SidePanelState | undefined, action: Action) {
return sidePanelReducer(state, action);
}
|
<reponame>SkaarlK/Learning-Python
n = int(input("Insira um nรบmero para saber se รฉ primo: "))
nope = 0
for i in range(n):
if i > 1:
if (n % i == 0):
nope += 1
if n != 1 and n != 0 and nope == 0 :
print("ร primo")
exit()
print("Nรฃo รฉ primo.") |
const request = require('request-promise')
class BWMinerPool {
constructor (options) {
this.key = options.key
this.secret = options.secret
this.accountName = options.accountName
}
async makeRequest (options) {
const result = await request.post(Object.assign({ json: true, proxy: null }, options))
return result && result.data
}
makeSignature () {
const nonce = Date.now()
const message = this.accountName + this.key + nonce
const hmac = crypto.createHmac('md5', this.secret)
const signature = hmac.update(message).digest('hex').toUpperCase()
return { signature, nonce }
}
normalizeUrl (action, coin) {
return `https://${coin}.bwpool.net/api/${action}`
}
getAccountStats (coin) {
const qs = Object.assign({ key: this.key, userName: this.accountName }, this.makeSignature())
return this.makeRequest({ url: this.normalizeUrl('account', coin), qs })
}
getAccountHashrate (coin) {
const qs = Object.assign({ key: this.key, userName: this.accountName }, this.makeSignature())
return this.makeRequest({ url: this.normalizeUrl('hashrate', coin), qs })
}
getAccountHashrateByType (coin, type) {
//FIXME
}
getWorkers (coin, params = {}) {
const qs = Object.assign({ key: this.key, userName: this.accountName }, this.makeSignature())
return this.makeRequest({ url: this.normalizeUrl('workers', coin), qs })
}
async getWorkerHashrate (coin, workerName) {
const result = await this.getWorkers(coin, { worker: workerName })
return (result && result.workers && result.workers.length === 1) ?
result.workers[0].hashrate : 0
}
getWorkerHashrateByType (coin, workerName, type) {
//BW็ฟๆฑ ไธๆฏๆๆ็
ง็ฑปๅๆฅ่ฏข่ฟๅป็็ฎๅ
return this.getWorkerHashrate(coin, workerName)
}
getWorkerStatsByDay (coin, workerName) {
//FIXME
}
calcWorkerDailyWage (coin, workerName) {
//FIXME
}
static create (options) {
if (!BWMinerPool.instance[options.accountName]) {
BWMinerPool.instance[options.accountName] = new BWMinerPool(options)
}
return BWMinerPool.instance[options.accountName]
}
}
BWMinerPool.instance = {}
module.exports = BWMinerPool |
<filename>src/utils/websocket.js
import { getLocal } from "@/utils/local";
import store from "../store";
// websocket ๅฏไปฅๅฎ็ฐๅๅ้ไฟก๏ผ้ฟ้พๆฅ๏ผๆๅฟ่ทณๆฃๆต๏ผ h5ๆไพ็ๅฏไปฅๅฎๆถ้ไฟก
class WS {
constructor(config = {}) {
this.url = config.url || "localhost";
this.port = config.port || 4000;
this.protocol = config.protocol || "ws";
// ๅฟ่ทณๆฃๆต
this.time = config.time || 30 * 1000;
this.ws = null;
}
onOpen = () => {
// (้ดๆ)่งๅฎๅฅฝ็ฌฌไธๆฌกๅฟ
้กปๅไธไธชๅฏน่ฑก๏ผๅฏน่ฑกๅ
ๅซไธคไธชๅญๆฎต;type data
// websocket ๅบไบtcp ็ฌฌไธๆฌก้พๆฅ้ http, ไฝๆฏไธ่ฝไฟฎๆนheader
this.ws.send(JSON.stringify({
type: "auth",
data: getLocal("token"),
}));
};
onMessage = (e) => {
let { type, data } = JSON.parse(e.data);
switch (type) {
case "noAuth":
console.log("ๆฒกๆๆ้");
break;
case "heartCheck":
this.checkServer();
this.ws.send(JSON.stringify({ type: "hearCheck" }));
break;
default:
store.commit(type.SET_MESSAGE,data)
// console.log("ws message", data);
}
console.log(data);
};
onClose = () => {
this.ws.close();
};
onError = () => {
setTimeout(() => {
this.create();
},1000);
};
create() {
this.ws = new WebSocket(`${this.protocol}://${this.url}:${this.port}`);
this.ws.onopen = this.onOpen;
this.ws.onmessage = this.onMessage;
this.ws.onclose = this.onClose;
this.ws.onerror = this.onError;
}
checkServer() {
// ๆญ็บฟ้่ฟ
clearTimeout(this.timer); // ้ฒๆ
this.timer = setTimeout(() => {
this.onClose(); // ๅ
ณ้ญใๆญๅผ
this.onError(); // ้่ฟ
}, this.time + 10000); // 40s ่ฟๆชๆถๅฐๅฟ่ทณๆฃๆต๏ผๅฐฑ่ฎคไธบๆๅกๅจๆญไบ๏ผ้ๆฐๅๅปบ่ฟๆฅ
}
send = (msg) => {
this.ws.send(JSON.stringify(msg));
}
}
export default WS;
|
<gh_stars>1-10
/* Copyright (c) 2012, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef BSPWM_MESSAGES_H
#define BSPWM_MESSAGES_H
#include "types.h"
#include "subscribe.h"
#define OPT_CHR '-'
#define CAT_CHR '.'
#define EQL_TOK "="
int handle_message(char *msg, int msg_len, FILE *rsp);
int process_message(char **args, int num, FILE *rsp);
int cmd_window(char **args, int num);
int cmd_desktop(char **args, int num);
int cmd_monitor(char **args, int num);
int cmd_query(char **args, int num, FILE *rsp);
int cmd_rule(char **args, int num, FILE *rsp);
int cmd_pointer(char **args, int num);
int cmd_restore(char **args, int num);
int cmd_control(char **args, int num, FILE *rsp);
int cmd_config(char **args, int num, FILE *rsp);
int cmd_quit(char **args, int num);
int set_setting(coordinates_t loc, char *name, char *value);
int get_setting(coordinates_t loc, char *name, FILE* rsp);
bool parse_subscriber_mask(char *s, subscriber_mask_t *mask);
bool parse_bool(char *value, bool *b);
bool parse_layout(char *s, layout_t *l);
bool parse_direction(char *s, direction_t *d);
bool parse_cycle_direction(char *s, cycle_dir_t *d);
bool parse_circulate_direction(char *s, circulate_dir_t *d);
bool parse_history_direction(char *s, history_dir_t *d);
bool parse_flip(char *s, flip_t *f);
bool parse_pointer_action(char *s, pointer_action_t *a);
bool parse_child_polarity(char *s, child_polarity_t *p);
bool parse_degree(char *s, int *d);
bool parse_window_id(char *s, long int *i);
bool parse_bool_declaration(char *s, char **key, bool *value, alter_state_t *state);
bool parse_index(char *s, int *i);
#endif
|
import * as React from 'react'
import { Line } from 'react-chartjs-2'
import { PacketIntervalData } from '../../types'
const packetIntervalLabels = (intervals: number[]) => {
return intervals
.slice(0, intervals.length - 1)
.map((interval, index) => `${interval.toFixed(4)} ~ ${intervals[index + 1].toFixed(4)}`)
}
const chartData = (data: PacketIntervalData) => {
const labels = data.intervals.length > 0 ? packetIntervalLabels(data.intervals) : []
return {
labels,
datasets: [
{
label: 'Number of packets',
fill: false,
lineTension: 0.1,
backgroundColor: 'rgba(75,192,192,0.4)',
borderColor: 'rgba(75,192,192,1)',
borderCapStyle: 'butt',
borderDash: [],
borderDashOffset: 0.0,
borderJoinStyle: 'miter',
pointBorderColor: 'rgba(75,192,192,1)',
pointBackgroundColor: '#fff',
pointBorderWidth: 1,
pointHoverRadius: 5,
pointHoverBackgroundColor: 'rgba(75,192,192,1)',
pointHoverBorderColor: 'rgba(220,220,220,1)',
pointHoverBorderWidth: 2,
pointRadius: 1,
pointHitRadius: 10,
data: data.freq || [],
},
],
}
}
export default (props: { data: PacketIntervalData }) => (
<div className="packet-interval-chart">
<Line data={chartData(props.data)} />
</div>
)
|
CREATE TABLE Student (
StudentId INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
StudentName VARCHAR(255) NOT NULL,
ClassId INT NOT NULL,
FOREIGN KEY (ClassId) REFERENCES Classes(ClassId)
);
CREATE TABLE Teacher (
TeacherId INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
TeacherName VARCHAR(255) NOT NULL,
ClassId INT NOT NULL,
FOREIGN KEY (ClassId) REFERENCES Classes(ClassId)
);
CREATE TABLE Classes (
ClassId INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
ClassName VARCHAR(255) NOT NULL
);
CREATE TABLE Grade (
StudentId INT NOT NULL,
TeacherId INT NOT NULL,
Grade INT NOT NULL,
FOREIGN KEY (StudentId) REFERENCES Student(StudentId),
FOREIGN KEY (TeacherId) REFERENCES Teacher(TeacherId)
); |
export const NODE_ENV = process.env.NODE_ENV || "development";
export const PORT = process.env.PORT || 4000;
export const HOSTNAME = process.env.HOSTNAME || "localhost";
export const HOST = `${HOSTNAME}:${PORT}`;
export const WRITE_API_ENDPOINT = process.env.WRITE_API_ENDPOINT || "https://iwwa-write-api-development.innowatio-aws.com";
|
<reponame>eliyap/pandoc-highlight
exports.canContainEols = ['mark']
exports.enter = {highlight: enterHighlight}
exports.exit = {highlight: exitHighlight}
function enterHighlight(token) {
this.enter({type: 'mark', children: []}, token)
}
function exitHighlight(token) {
this.exit(token)
}
|
user=$(whoami)
if [ "$user" != "root" ]; then
echo "Necesitas tener permisos de root :("
exit 1
fi
#Creamos las variables
#DIR_APACHE es el directorio donde estaran todos los home de los usuarios
#GRUPO_SFTP sera el grupo deberemos tener creado
DIR_APACHE="/var/www/"
GRUPO_SFTP="ftpusers"
DOMINIO="raul.local"
SUFIJO_USUARIO="DAW"
#Comprobamos que el directorio de usuarios existe
if [ ! -d "$DIR_APACHE" ]; then
echo "Error!, el directorio de usuarios no existe"
exit 0
fi
#Pedimos el nombre de usuario hasta que se introduza un nombre
#while [ "$USUARIO" == "" ]
#do
# echo "Introduzca el nombre de usuario: "
# read USUARIO
#done
CURSO=2
echo "CURSO $CURSO"
while [ $CURSO -lt 3 ]; do
echo "CURSO: $CURSO "
if [ $CURSO -eq 1 ]; then
VALOR_INICIAL=101
VALOR_FINAL=117
else
VALOR_INICIAL=201
VALOR_FINAL=217
fi
let CURSO+=1
echo "VALOR INICIAL $VALOR_INICIAL"
for (( NUM=VALOR_INICIAL; NUM<=VALOR_FINAL; NUM++ ))
do
USUARIO=$SUFIJO_USUARIO$NUM;
echo $USUARIO;
#Eliminamos el usuario
userdel "$USUARIO"
if [ $? -eq 0 ]
then
echo "Usuario eliminado correctamente"
else
echo "Error al eliminar el usuario"
# exit 1
fi
#Eliminamos el home
sudo rm -R $DIR_APACHE$USUARIO
if [ $? -eq 0 ]
then
echo "Home del usuario eliminado correctamente"
else
echo "Error al eliminar el directorio del usuario"
# exit 1
fi
#Deshabilitamos el sitio en apache
#a2dissite "$USUARIO.conf"
#Eliminamos la configuracion del sitio
#rm /etc/apache2/sites-available/"$USUARIO".conf
echo "Usuario $USUARIO eliminado correctamente!"
done
done
|
#!/bin/bash
# Setup deploy user
useradd deploy
mkdir /home/deploy
mkdir /home/deploy/.ssh
chmod 700 /home/deploy/.ssh
# Add deploy user to users
echo "deploy ALL=(ALL:ALL) ALL" >> /etc/sudoers |
#!/bin/bash
export PATH=/home/tagomoris/local/node-v0.10/bin:$PATH
CWD=$(dirname $0)
cd $CWD
export PORT=8084
exec node index.js
|
#include <stdio.h>
int main()
{
float r, h, vol;
printf("Enter value of radius: ");
scanf("%f", &r);
printf("Enter value of height: ");
scanf("%f", &h);
vol = 3.14 * r * r *h;
printf("Volume of cylinder = %.2f",vol);
return 0;
} |
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import os
from .models import *
from pkg_resources import resource_filename
LOCATION = resource_filename(__name__, 'metadata.csv')
class Database(object):
def __init__(self):
from .driver import Interface
self.info = Interface()
# Loads metadata
import csv
with open(LOCATION) as f:
reader = csv.DictReader(f)
self.metadata = [row for row in reader]
def objects(self, protocol='all', subset=None):
"""Returns a list of unique :py:class:`.File` objects for the specific
query by the user.
Parameters:
protocol (:py:class:`str`, optional): If set, can take the value of
either ``cvpr14`` or ``all``. ``cvpr14`` subselects samples used by Li
et al. on their CVPR``14 paper for heart-rate estimation. If ``all`` is
set, the complete database is selected.
subset (:py:class:`str`, optional): If set, it could be either ``train``,
``dev`` or ``test`` or a combination of them (i.e. a list). If not set
(default), the files from all these sets are retrieved for the ``all``
protocol. Note that for the ``cvpr14`` protocol, this has no effect,
since no training, development and test set have been defined in this
case.
Returns:
list: A list of :py:class:`File` objects.
"""
proto_basedir = os.path.join('data', 'protocols')
if protocol in ('cvpr14',):
d = resource_filename(__name__, os.path.join(proto_basedir, 'cvpr14', 'li_samples_cvpr14.txt'))
with open(d, 'rt') as f: sessions = f.read().split()
return [File(**k) for k in self.metadata if k['basedir'] in sessions]
if protocol in ('all'):
if not subset:
return [File(**k) for k in self.metadata]
else:
files = []
if 'train' in subset:
d = resource_filename(__name__, os.path.join(proto_basedir, 'all', 'train.txt'))
with open(d, 'rt') as f: sessions = f.read().split()
files += [File(**k) for k in self.metadata if k['basedir'] in sessions]
if 'dev' in subset:
d = resource_filename(__name__, os.path.join(proto_basedir, 'all', 'dev.txt'))
with open(d, 'rt') as f: sessions = f.read().split()
files += [File(**k) for k in self.metadata if k['basedir'] in sessions]
if 'test' in subset:
d = resource_filename(__name__, os.path.join(proto_basedir, 'all', 'test.txt'))
with open(d, 'rt') as f: sessions = f.read().split()
files += [File(**k) for k in self.metadata if k['basedir'] in sessions]
return files
# gets sphinx autodoc done right - don't remove it
def __appropriate__(*args):
"""Says object was actually declared here, an not on the import module.
Parameters:
*args: An iterable of objects to modify
Resolves `Sphinx referencing issues
<https://github.com/sphinx-doc/sphinx/issues/3048>`
"""
for obj in args: obj.__module__ = __name__
__appropriate__(
File,
)
__all__ = [_ for _ in dir() if not _.startswith('_')]
|
#!/bin/bash
# az-vm-jumpbox-cli.sh
# This script was adapted from https://github.com/fouldsy/azure-mol-samples-2nd-ed/blob/master/05/azure_cli_sample.sh
# released under the MIT license. See https://github.com/fouldsy/azure-mol-samples-2nd-ed/blob/master/LICENSE
# explained in chapter 5 of the ebook "Learn Azure in a Month of Lunches - 2nd edition" (Manning Publications) by Iain Foulds,
# Purchase at https://www.manning.com/books/learn-azure-in-a-month-of-lunches-second-edition
set -o errexit
if [[ -z $MY_RG ]]; then
source ../setup.sh # in folder above this.
fi
# Create a resource group
az group create --name azuremolchapter5 --location eastus
# Create a virtual network and subnet
# The virtual network and subnet both create regular IP ranges assigned
# to them, just like an on-premises network
az network vnet create \
--name vnetmol \
--address-prefix 10.0.0.0/16 \
--subnet-name websubnet \
--subnet-prefix 10.0.1.0/24 \
--resource-group "${MY_RB}"
# Define a unique DNS name
dnsName=azuremol$RANDOM
# Create a public IP address
# This public IP address assigned gets assigned to a web server VM in a
# following step. We also assigned the DNS prefix of `webmol`
az network public-ip create \
--name webpublicip \
--dns-name $dnsName \
--resource-group "${MY_RB}"
# Create a virtual network adapter
# All VMs need a virtual network interace card (vNIC) that connects them to a
# virtual network subnet. We assign the public IP address created in the previos
# step, along with the a static internal IP address of 10.0.1.4
az network nic create \
--name webvnic \
--vnet-name vnetmol \
--subnet websubnet \
--public-ip-address webpublicip \
--private-ip-address 10.0.1.4 \
--resource-group "${MY_RB}"
# Create network security group
# A network security group secures and filters both inbound + outbound virtual
# network traffic
az network nsg create \
--name webnsg \
--resource-group "${MY_RB}"
# Associate the network security group with your virtual network
# Network security groups can be assigned to a virtual network subnet, as we do
# here, or to an individual vNIC
az network vnet subnet update \
--vnet-name vnetmol \
--name websubnet \
--network-security-group webnsg \
--resource-group "${MY_RB}"
# Add a network security group rule to allow port 80
# Rules can be applied to inbound or outbound traffic, to a specific protocol or
# port, and for certain IP address ranges or port ranges
az network nsg rule create \
--nsg-name webnsg \
--name allowhttp \
--access allow \
--protocol tcp \
--direction inbound \
--priority 100 \
--source-address-prefix "*" \
--source-port-range "*" \
--destination-address-prefix "*" \
--destination-port-range 80 \
--resource-group "${MY_RB}"
# Create an additional network security group for remote access
az network nsg create \
--name remotensg \
--resource-group "${MY_RB}"
# Create an additional network security group rule to allow SSH connections
# Here, we don't specify the address prefixes, direction, or destinations, as the
# Azure CLI can use smart defaults to populate these for us
az network nsg rule create \
--nsg-name remotensg \
--name allowssh \
--protocol tcp \
--priority 100 \
--destination-port-range 22 \
--access allow \
--resource-group "${MY_RB}"
# Create an additional virtual network subnet and associate our remote network
# security group. This is a little different to the previous steps where we
# associated a network security group with a virtual network subnet.
az network vnet subnet create \
--vnet-name vnetmol \
--name remotesubnet \
--address-prefix 10.0.2.0/24 \
--network-security-group remotensg \
--resource-group "${MY_RB}"
# Create a VM that will act as a web server
# Attach the virtual NIC created in the previous steps
az vm create \
--name webvm \
--nics webvnic \
--image ubuntults \
--size Standard_B1ms \
--admin-username azuremol \
--generate-ssh-keys \
--resource-group "${MY_RB}"
# Create a VM that will act as our remote connection VM
# Connect the VM to the virtual network subnet for remote connectivity
az vm create \
--name remotevm \
--vnet-name vnetmol \
--subnet remotesubnet \
--nsg remotensg \
--public-ip-address remotepublicip \
--image ubuntults \
--size Standard_B1ms \
--admin-username azuremol \
--generate-ssh-keys \
--resource-group "${MY_RB}"
# Enable the SSH agent and add our SSH keys
eval $(ssh-agent)
ssh-add
# Obtain the public IP address of the web server VM
remotevmIp=$(az vm show \
--name remotevm \
--show-details \
--query publicIps \
--output tsv) \
--resource-group "${MY_RB}"
# SSH to the remote VM, passing through our SSH keys
ssh -A azuremol@$remotevmIp
|
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { createTenant, addUserToTenant, grantAccess } from '../actions/tenants';
class TenantManager extends Component {
state = {
tenantName: '',
userName: '',
resourceName: ''
};
handleChange = e => {
const { name, value } = e.target;
this.setState({ [name]: value });
};
handleCreateTenant = e => {
e.preventDefault();
createTenant(this.state.tenantName);
this.setState({ tenantName: '' });
};
handleAddUserToTenant = e => {
e.preventDefault();
addUserToTenant(this.state.tenantName, this.state.userName);
this.setState({ userName: '' });
};
handleGrantAccess = e => {
e.preventDefault();
grantAccess(this.state.tenantName, this.state.resourceName);
this.setState({ resourceName: '' });
};
render() {
return (
<div className="tenant-manager">
<h2>Create Tenant</h2>
<form onSubmit={this.handleCreateTenant}>
<label>Tenant Name</label>
<input name="tenantName"
type="text"
value={this.state.tenantName}
onChange={this.handleChange} />
<input type="submit" value="Create" />
</form>
<h2>Add User to Tenant</h2>
<form onSubmit={this.handleAddUserToTenant}>
<label>Tenant Name</label>
<input name="tenantName"
type="text"
value={this.state.tenantName}
onChange={this.handleChange} />
<label>User Name</label>
<input name="userName"
type="text"
value={this.state.userName}
onChange={this.handleChange} />
<input type="submit" value="Add" />
</form>
<h2>Grant Access to Resource</h2>
<form onSubmit={this.handleGrantAccess}>
<label>Tenant Name</label>
<input name="tenantName"
type="text"
value={this.state.tenantName}
onChange={this.handleChange} />
<label>Resource Name</label>
<input name="resourceName"
type="text"
value={this.state.resourceName}
onChange={this.handleChange} />
<input type="submit" value="Grant Access" />
</form>
</div>
);
}
}
TenantManager.propTypes = {
createTenant: PropTypes.func.isRequired,
addUserToTenant: PropTypes.func.isRequired,
grantAccess: PropTypes.func.isRequired
};
export default TenantManager; |
#!/bin/sh
## Install HomeBrew
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
brew analytics off
## Web
brew install wget
brew cask install google-chrome
brew cask install firefox
brew cask install skype
brew cask install whatsapp
brew cask install zoomus
## Security
brew cask install 1password
## Dev - tools
brew install git bash-completion
echo "
## Git completion
[ -f /usr/local/etc/bash_completion ] && . /usr/local/etc/bash_completion || {
# if not found in /usr/local/etc, try the brew --prefix location
[ -f \"\$(brew --prefix)/etc/bash_completion.d/git-completion.bash\" ] && \\
. \$(brew --prefix)/etc/bash_completion.d/git-completion.bash
}
## Git branch in prompt.
parse_git_branch() {
git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/ (\1)/'
}
export PS1=\"\u@\h \W\[\033[32m\]\\\$(parse_git_branch)\[\033[00m\] \$ \"
" >> ~/.bash_profile
## Dev - Apps
brew cask install atom
brew cask install virtualbox
brew cask install virtualbox-extension-pack
brew cask install docker
brew cask install github
brew cask install slack
brew isntall awscli ## might require some chown's
## Android
brew cask install horndis ## Allow kernel module!!!
brew cask install android-file-transfer
## Dev - R
brew cask install xquatrz
brew cask install r
brew cask install rstudio
## for package documentation
brew cask install mactex
# sudo tlmgr update --self
# sudo tlmgr update --all
# sudo tlmgr install titling framed inconsolata
# sudo tlmgr install collection-fontsrecommended
## Media
brew cask install flux
brew cask install gimp
brew cask install vlc
brew cask install calibre
## Spelling
brew cask install hunspell pkg-config
ln -s /usr/local/Cellar/hunspell/1.7.0/lib/libhunspell-1.7.0.dylib /usr/local/Cellar/hunspell/1.7.0/lib/libhunspell.dylib
CFLAGS=$(pkg-config --cflags hunspell) LDFLAGS=$(pkg-config --libs hunspell) pip install hunspell
## Mount NTFS volumes
brew cask install osxfuse
brew install ntfs-3g
## Approve permissions for the kernel module
## To mount run (for example): sudo /usr/local/bin/ntfs-3g /dev/disk1s1 /Volumes/NTFS -olocal -oallow_other
## NOTE: file might not appear on Finder but will appear on terminal.
## From appstore: pocket
## From time to time run:
# brew update
# brew upgrade
# brew cask upgrade
|
import numpy as np
from sklearn.model_selection import GridSearchCV
from sklearn.ensemble import RandomForestClassifier
# Define the hyperparameters to optimize
param_grid={
'n_estimators': np.arange(2, 30, 2),
'max_depth': np.arange(2, 12, 2),
'min_samples_leaf': np.arange(1, 10, 2)
}
# Create GridSearchCV model object with the hyperparameters
model = GridSearchCV(RandomForestClassifier(), param_grid, verbose=2, cv=3)
# Fit the model to the data
model.fit(X, y)
# Print out the best estimator
print(model.best_estimator_) |
// Copyright 2020 The Chromium Embedded Framework Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CEF_LIBCEF_BROWSER_CHROME_CHROME_BROWSER_HOST_IMPL_H_
#define CEF_LIBCEF_BROWSER_CHROME_CHROME_BROWSER_HOST_IMPL_H_
#pragma once
#include <memory>
#include "libcef/browser/browser_host_base.h"
#include "libcef/browser/chrome/browser_delegate.h"
class Browser;
class ChromeBrowserDelegate;
// CefBrowser implementation for the chrome runtime. Method calls are delegated
// to the chrome Browser object or the WebContents as appropriate. See the
// ChromeBrowserDelegate documentation for additional details. All methods are
// thread-safe unless otherwise indicated.
class ChromeBrowserHostImpl : public CefBrowserHostBase {
public:
// CEF-specific parameters passed via Browser::CreateParams::cef_params and
// possibly shared by multiple Browser instances.
class DelegateCreateParams : public cef::BrowserDelegate::CreateParams {
public:
DelegateCreateParams(const CefBrowserCreateParams& create_params)
: create_params_(create_params) {}
CefBrowserCreateParams create_params_;
};
// Create a new Browser with a single tab (WebContents) and associated
// ChromeBrowserHostImpl instance.
static CefRefPtr<ChromeBrowserHostImpl> Create(
const CefBrowserCreateParams& params);
// Returns the browser associated with the specified RenderViewHost.
static CefRefPtr<ChromeBrowserHostImpl> GetBrowserForHost(
const content::RenderViewHost* host);
// Returns the browser associated with the specified RenderFrameHost.
static CefRefPtr<ChromeBrowserHostImpl> GetBrowserForHost(
const content::RenderFrameHost* host);
// Returns the browser associated with the specified WebContents.
static CefRefPtr<ChromeBrowserHostImpl> GetBrowserForContents(
const content::WebContents* contents);
// Returns the browser associated with the specified global ID.
static CefRefPtr<ChromeBrowserHostImpl> GetBrowserForGlobalId(
const content::GlobalRenderFrameHostId& global_id);
~ChromeBrowserHostImpl() override;
// CefBrowserContentsDelegate::Observer methods:
void OnWebContentsDestroyed(content::WebContents* web_contents) override;
// CefBrowserHostBase methods called from CefFrameHostImpl:
void OnSetFocus(cef_focus_source_t source) override;
// CefBrowserHost methods:
void CloseBrowser(bool force_close) override;
bool TryCloseBrowser() override;
void SetFocus(bool focus) override;
CefWindowHandle GetWindowHandle() override;
CefWindowHandle GetOpenerWindowHandle() override;
double GetZoomLevel() override;
void SetZoomLevel(double zoomLevel) override;
void RunFileDialog(FileDialogMode mode,
const CefString& title,
const CefString& default_file_path,
const std::vector<CefString>& accept_filters,
int selected_accept_filter,
CefRefPtr<CefRunFileDialogCallback> callback) override;
void Print() override;
void PrintToPDF(const CefString& path,
const CefPdfPrintSettings& settings,
CefRefPtr<CefPdfPrintCallback> callback) override;
void Find(int identifier,
const CefString& searchText,
bool forward,
bool matchCase,
bool findNext) override;
void StopFinding(bool clearSelection) override;
void ShowDevTools(const CefWindowInfo& windowInfo,
CefRefPtr<CefClient> client,
const CefBrowserSettings& settings,
const CefPoint& inspect_element_at) override;
void CloseDevTools() override;
bool HasDevTools() override;
bool IsWindowRenderingDisabled() override;
void WasResized() override;
void WasHidden(bool hidden) override;
void NotifyScreenInfoChanged() override;
void Invalidate(PaintElementType type) override;
void SendExternalBeginFrame() override;
void SendTouchEvent(const CefTouchEvent& event) override;
void SendFocusEvent(bool setFocus) override;
void SendCaptureLostEvent() override;
void NotifyMoveOrResizeStarted() override;
int GetWindowlessFrameRate() override;
void SetWindowlessFrameRate(int frame_rate) override;
void ImeSetComposition(const CefString& text,
const std::vector<CefCompositionUnderline>& underlines,
const CefRange& replacement_range,
const CefRange& selection_range) override;
void ImeCommitText(const CefString& text,
const CefRange& replacement_range,
int relative_cursor_pos) override;
void ImeFinishComposingText(bool keep_selection) override;
void ImeCancelComposition() override;
void DragTargetDragEnter(CefRefPtr<CefDragData> drag_data,
const CefMouseEvent& event,
DragOperationsMask allowed_ops) override;
void DragTargetDragOver(const CefMouseEvent& event,
DragOperationsMask allowed_ops) override;
void DragTargetDragLeave() override;
void DragTargetDrop(const CefMouseEvent& event) override;
void DragSourceSystemDragEnded() override;
void DragSourceEndedAt(int x, int y, DragOperationsMask op) override;
void SetAudioMuted(bool mute) override;
bool IsAudioMuted() override;
void SetAccessibilityState(cef_state_t accessibility_state) override;
void SetAutoResizeEnabled(bool enabled,
const CefSize& min_size,
const CefSize& max_size) override;
CefRefPtr<CefExtension> GetExtension() override;
bool IsBackgroundHost() override;
protected:
bool Navigate(const content::OpenURLParams& params) override;
private:
friend class ChromeBrowserDelegate;
ChromeBrowserHostImpl(
const CefBrowserSettings& settings,
CefRefPtr<CefClient> client,
std::unique_ptr<CefBrowserPlatformDelegate> platform_delegate,
scoped_refptr<CefBrowserInfo> browser_info,
CefRefPtr<CefRequestContextImpl> request_context);
// Create a new Browser without initializing the WebContents.
static Browser* CreateBrowser(const CefBrowserCreateParams& params);
// Called from ChromeBrowserDelegate::CreateBrowser when this object is first
// created. Must be called on the UI thread.
void Attach(content::WebContents* web_contents,
CefRefPtr<ChromeBrowserHostImpl> opener);
// Called from ChromeBrowserDelegate::AddNewContents to take ownership of a
// popup WebContents.
void AddNewContents(std::unique_ptr<content::WebContents> contents);
// Called when this object changes Browser ownership (e.g. initially created,
// dragging between windows, etc). The old Browser, if any, will be cleared
// before the new Browser is added. Must be called on the UI thread.
void SetBrowser(Browser* browser);
// CefBrowserHostBase methods:
void WindowDestroyed() override;
void DestroyBrowser() override;
void DoCloseBrowser(bool force_close);
// Returns the current tab index for the associated WebContents, or
// TabStripModel::kNoTab if not found.
int GetCurrentTabIndex() const;
Browser* browser_ = nullptr;
};
#endif // CEF_LIBCEF_BROWSER_CHROME_CHROME_BROWSER_HOST_IMPL_H_
|
import UIKit
internal class UIControlStateConverter {
static func fromString(controlState: String) -> UIControlState {
let controlStateMapping: [String: UIControlState] = [
"normal": .Normal,
"highlighted": .Highlighted,
"disabled": .Disabled,
"selected": .Selected,
"focused": .Focused,
"application": .Application,
"reserved": .Reserved
]
if let mappedState = controlStateMapping[controlState.lowercased()] {
return mappedState
}
return UIControlState.Normal
}
} |
<reponame>moonryul/Ultraino
package acousticfield3d.scene;
import acousticfield3d.math.Frustrum;
import acousticfield3d.math.Ray;
import acousticfield3d.math.Vector3f;
import acousticfield3d.renderer.Texture;
import acousticfield3d.shapes.Mesh;
/**
*
* @author Asier
*/
public class MeshEntity extends Entity{
public float distanceToCamera;
public int renderingOrder;
String mesh;
Texture texture;
int shader;
boolean visible = true;
boolean doubledSided = false;
public Mesh customMesh;
public MeshEntity() {
super();
}
public MeshEntity(String mesh, Texture texture, int shader) {
this.mesh = mesh;
this.texture = texture;
this.shader = shader;
}
public float rayToSphere(final Ray r){
//transform the ray with inverse transform
Ray rSpace = new Ray(r.origin, r.direction, false);
transform.transformInversePoint(rSpace.origin, rSpace.origin);
transform.transformInverseVector(rSpace.direction, rSpace.direction);
//intersection points with the sphere
Mesh m = Resources.get().getMesh(mesh);
if(m == null) { return -1.0f; }
Vector3f p = m.getbSphere().intersectPoint(rSpace);
if (p == null){return -1.0f;}
//apply transform to points
transform.transformPoint(p, p);
//return distance if there was a collision
return p.distance( r.origin );
}
public float rayToBox(final Ray r){
//transform the ray with inverse transform
Ray rSpace = new Ray(r.origin, r.direction, false);
transform.transformInversePoint(rSpace.origin, rSpace.origin);
transform.transformInverseVector(rSpace.direction, rSpace.direction);
//intersection points with the box
Mesh m = Resources.get().getMesh(mesh);
if(m == null) { return -1.0f; }
Vector3f p = m.getbBox().intersectPoint(rSpace);
if (p == null){return -1.0f;}
//apply transform to points
transform.transformPoint(p, p);
//return distance if there was a collision
return p.distance( r.origin );
}
public boolean boxInside(final Frustrum frustrum){
/* Mesh m = Resources.get().getMesh(mesh);
if(m == null) { return false; }
Ray rSpace = new Ray(r.origin, r.direction, false);
transform.transformInversePoint(rSpace.origin, rSpace.origin);
transform.transformInverseVector(rSpace.direction, rSpace.direction);
//intersection points with the box
Vector3f p = m.getbBox().intersectPoint(rSpace);
if (p == null){return false;}
//apply transform to points
transform.transformPoint(p, p);
return true;*/
return false;
}
public boolean isDoubledSided() {
return doubledSided;
}
public void setDoubledSided(boolean isDoubledSided) {
this.doubledSided = isDoubledSided;
}
public String getMesh() {
return mesh;
}
public void setMesh(String mesh) {
this.mesh = mesh;
}
public Texture getTexture() {
return texture;
}
public void setTexture(Texture texture) {
this.texture = texture;
}
public int getShader() {
return shader;
}
public void setShader(int shader) {
this.shader = shader;
}
public boolean isVisible() {
return visible;
}
public void setVisible(boolean visible) {
this.visible = visible;
}
}
|
def sublist_check(lst, sub):
for i in range(len(lst)):
if all(i + j < len(lst) and lst[i + j] == sub[j] for j in range(len(sub))):
return True
return False |
<reponame>Jack-Fraser16/onc-certification-g10-test-kit
require 'yaml'
module Inferno
module Terminology
module Tasks
class CheckBuiltTerminology
MIME_TYPE_SYSTEMS = [
'http://hl7.org/fhir/ValueSet/mimetypes',
'urn:ietf:bcp:13'
].freeze
def run
if mismatched_value_sets.blank?
Inferno.logger.info 'Terminology built successfully.'
return
end
if only_mime_types_mismatch?
Inferno.logger.info <<~MIME
Terminology built successfully.
Mime-type based terminology did not match, but this can be a
result of using a newer version of the `mime-types-data` gem and
does not necessarily reflect a problem with the terminology build.
The expected mime-types codes were generated with version
`mime-types-data` version `3.2021.0901`.
MIME
else
Inferno.logger.info 'Terminology build results different than expected.'
end
mismatched_value_sets.each do |value_set|
Inferno.logger.info mismatched_value_set_message(value_set)
end
end
def expected_manifest
YAML.load_file(File.join(__dir__, '..', 'expected_manifest.yml'))
end
def new_manifest_path
@new_manifest_path ||=
File.join(Dir.pwd, 'resources', 'terminology', 'validators', 'bloom', 'manifest.yml')
end
def new_manifest
return [] unless File.exist? new_manifest_path
YAML.load_file(new_manifest_path)
end
def mismatched_value_sets
@mismatched_value_sets ||=
expected_manifest.reject do |expected_value_set|
url = expected_value_set[:url]
new_value_set(url) == expected_value_set
end
end
def new_value_set(url)
new_manifest.find { |value_set| value_set[:url] == url }
end
def only_mime_types_mismatch?
mismatched_value_sets.all? { |value_set| MIME_TYPE_SYSTEMS.include? value_set[:url] }
end
def mismatched_value_set_message(expected_value_set)
url = expected_value_set[:url]
actual_value_set = new_value_set(url)
"#{url}: Expected codes: #{expected_value_set[:count]} Actual codes: #{actual_value_set&.dig(:count) || 0}"
end
end
end
end
end
|
<reponame>JArmunia/Calendario<filename>Cal3/src/Vista/ContactosVista.java
/**
* ContactosVista.java
* Vista de los contactos
*
* @author <NAME>
* @version 25-8-2018
*/
package Vista;
import Modelo.CalendarioModelo;
import Modelo.Contacto;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.util.Map;
import javax.swing.DefaultListCellRenderer;
import javax.swing.DefaultListModel;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.ListSelectionModel;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
public class ContactosVista extends JDialog {
private final static int ANCHO_VENTANA = 500;
private final static int LARGO_VENTANA = 400;
private CalendarioModelo modelo;
private CalendarioVista vista;
private Localizacion localizacion;
private JList list;
private JButton nuevo;
private JButton editar;
private JButton eliminar;
private static ContactosVista instancia = null;
private JLabel campoId;
private JLabel campoNombre;
private JLabel campoApellido1;
private JLabel campoApellido2;
private JLabel campoTfno;
private JLabel campoCorreo;
private JPanel panelInfoContacto;
/**
* Construye la vista de los contactos
*
* @param localizacion
* @param contactos
*/
private ContactosVista(CalendarioVista vista, Localizacion localizacion, CalendarioModelo modelo) {
super(vista, false);
this.vista = vista;
this.localizacion = localizacion;
this.modelo = modelo;
setLayout(new BorderLayout());
setSize(ANCHO_VENTANA, LARGO_VENTANA);
setTitle(localizacion.devuelve(localizacion.TEXTO_MENU_CONTACTOS));
nuevo = new JButton(localizacion.devuelve(localizacion.TEXTO_MENU_NUEVO));
nuevo.addActionListener(vista);
nuevo.setActionCommand(vista.NUEVO_CONTACTO);
editar = new JButton(localizacion.devuelve(localizacion.TEXTO_MENU_EDITAR));
editar.addActionListener(vista);
editar.setActionCommand(vista.EDITAR_CONTACTO);
editar.setEnabled(false);
eliminar = new JButton(localizacion.devuelve(localizacion.TEXTO_MENU_ELIMINAR));
eliminar.setActionCommand(vista.ELIMINAR_CONTACTO);
eliminar.addActionListener(vista);
eliminar.setEnabled(false);
JPanel panelBotones = new JPanel();
panelBotones.add(nuevo);
panelBotones.add(editar);
panelBotones.add(eliminar);
add(panelBotones, BorderLayout.NORTH);
panelInfoContacto = new JPanel(new GridBagLayout());
panelInfoContacto.setVisible(false);
add(panelInfoContacto, BorderLayout.EAST);
GridBagConstraints etiqueta = new GridBagConstraints();
GridBagConstraints campo = new GridBagConstraints();
etiqueta.anchor = GridBagConstraints.FIRST_LINE_START;
campo.weightx = 1.0;
etiqueta.gridx = 0;
etiqueta.insets = new Insets(10, 10, 10, 10);
campo.insets = new Insets(0, 0, 0, 10);
campo.fill = GridBagConstraints.HORIZONTAL;
campo.gridx = 1;
panelInfoContacto.add(new JLabel(localizacion.devuelve(localizacion.TEXTO_ID)), etiqueta);
panelInfoContacto.add(new JLabel(localizacion.devuelve(localizacion.TEXTO_NOMBRE)), etiqueta);
panelInfoContacto.add(new JLabel(localizacion.devuelve(localizacion.TEXTO_APELLIDO_1)), etiqueta);
panelInfoContacto.add(new JLabel(localizacion.devuelve(localizacion.TEXTO_APELLIDO_1)), etiqueta);
panelInfoContacto.add(new JLabel(localizacion.devuelve(localizacion.TEXTO_TELEFONO)), etiqueta);
panelInfoContacto.add(new JLabel(localizacion.devuelve(localizacion.TEXTO_CORREO)), etiqueta);
campoId = new JLabel();
campoNombre = new JLabel();
campoApellido1 = new JLabel();
campoApellido2 = new JLabel();
campoTfno = new JLabel();
campoCorreo = new JLabel();
panelInfoContacto.add(campoId, campo);
panelInfoContacto.add(campoNombre, campo);
panelInfoContacto.add(campoApellido1, campo);
panelInfoContacto.add(campoApellido2, campo);
panelInfoContacto.add(campoTfno, campo);
panelInfoContacto.add(campoCorreo, campo);
list = new JList();
list.setCellRenderer(new DefaultListCellRenderer() {
@Override
public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {
value = (((Contacto) value).devuelveId());
return super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); //To change body of generated methods, choose Tools | Templates.
}
});
listarContactos();
add(new JScrollPane(list), BorderLayout.CENTER);
list.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
list.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent lse) {
mostrarInfoContacto();
editar.setEnabled(list.getSelectedValue() != null);
eliminar.setEnabled(list.getSelectedValue() != null);
}
});
setLocationRelativeTo(null);
setVisible(false);
}
/**
* Lista los contactos
*
*/
public void listarContactos() {
DefaultListModel<Contacto> listaModelo = new DefaultListModel();
for (Map.Entry<String, Contacto> entrada : modelo.devuelveContactos().entrySet()) {
listaModelo.addElement(entrada.getValue());
listaModelo.get(listaModelo.getSize() - 1);
}
list.setModel(listaModelo);
}
/**
* Devuelve la instancia de la vista de contactos (Es singleton)
*
* @param vista
* @param contactos
* @return
*/
public static synchronized ContactosVista devolverInstancia(
CalendarioVista vista, Localizacion localizacion, CalendarioModelo modelo) {
if (instancia == null) {
instancia = new ContactosVista(vista, localizacion, modelo);
}
return instancia;
}
/**
* Hace visible la vista de contactos
*/
public void mostrar() {
setVisible(true);
}
/**
* Devuelve el contacto seleccionado
*
* @return contacto seleccionado
*/
public Contacto devuelveContacto() {
return (Contacto) list.getSelectedValue();
}
/**
* Hace visible la informaciรณn del contacto seleccionado
*/
public void mostrarInfoContacto() {
Contacto contactoSeleccionado = devuelveContacto();
if (contactoSeleccionado != null) {
panelInfoContacto.setVisible(true);
campoId.setText(contactoSeleccionado.devuelveId());
campoNombre.setText(contactoSeleccionado.devuelveNombre());
campoApellido1.setText(contactoSeleccionado.devuelveApellido1());
campoApellido2.setText(contactoSeleccionado.devuelveApellido2());
campoTfno.setText("" + contactoSeleccionado.devuelveTfno());
campoCorreo.setText(contactoSeleccionado.devuelveCorreo());
} else {
panelInfoContacto.setVisible(false);
}
}
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2019-2020 The XBit Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
export CONTAINER_NAME=ci_win64
export DOCKER_NAME_TAG=ubuntu:18.04 # Check that bionic can cross-compile to win64 (bionic is used in the gitian build as well)
export HOST=x86_64-w64-mingw32
export PACKAGES="python3 nsis g++-mingw-w64-x86-64 wine-binfmt wine64 file"
export RUN_FUNCTIONAL_TESTS=false
export RUN_SECURITY_TESTS="true"
export GOAL="deploy"
export XBIT_CONFIG="--enable-reduce-exports --disable-gui-tests --without-boost-process"
|
<filename>Include/KAI/Core/RootType.h
#pragma once
/// Strip all qualifiers from a given type.
template <class T>
struct RootType
{
typedef T Type;
};
template <class T>
struct RootType<T &>
{
typedef T Type;
};
template <class T>
struct RootType<const T&>
{
typedef T Type;
};
template <class T>
struct RootType<T&&>
{
typedef T Type;
};
|
<filename>pages/[rid]/create.tsx
import {useRouter} from "next/router";
import App from "../../components/App";
import store from "../../redux/store/Store";
import {Provider} from "react-redux";
import React from "react";
const Create = () => {
const router = useRouter();
const {rid} = router.query;
return (
<Provider store={store}>
<App createMode={true} id={rid as string}/>
</Provider>
);
}
export default Create;
|
<reponame>edhowland/spec_wire
class Myfoo
attr_reader :val1
def initialize(val)
@val1 = val
end
end |
def sort_list(int_list):
sorted_list = sorted(int_list)
return sorted_list
sorted_list = sort_list(int_list)
print(sorted_list) |
#!/usr/bin/env bash
# Typography
red=$(tput setaf 1)
green=$(tput setaf 2)
yellow=$(tput setaf 3)
lila=$(tput setaf 4)
pink=$(tput setaf 5)
blue=$(tput setaf 6)
white=$(tput setaf 7)
black=$(tput setaf 8)
bold=$(tput bold)
reset=$(tput sgr0)
heading ()
{
echo " ${lila}==>${reset}${bold} $1${reset}"
}
success ()
{
echo " ${green}==>${reset}${bold} $1${reset}"
}
info ()
{
echo " ${blue}==>${reset}${bold} $1${reset}"
}
warning ()
{
echo " ${yellow}==>${reset}${bold} $1${reset}"
}
error ()
{
echo " ${red}==>${reset}${bold} $1${reset}"
}
# Detect pkg type
DEB=$(which apt-get)
RPM=$(which yum)
# Detect SystemV / SystemD
SYS=$([[ -L "/sbin/init" ]] && echo 'SystemD' || echo 'SystemV')
if [[ ! -z $DEB ]]; then
success "Running install for Debian derivate"
elif [[ ! -z $RPM ]]; then
success "Running install for RedHat derivate"
else
heading "Not supported system"
exit 1;
fi
if [[ $(locale -a | grep ^en_US.UTF-8) ]] || [[ $(locale -a | grep ^en_US.utf8) ]]; then
if ! $(grep -E "(en_US.UTF-8)" "$HOME/.bashrc"); then
# Setting the bashrc locale
echo "export LC_ALL=en_US.UTF-8" >> "$HOME/.bashrc"
echo "export LANG=en_US.UTF-8" >> "$HOME/.bashrc"
echo "export LANGUAGE=en_US.UTF-8" >> "$HOME/.bashrc"
# Setting the current shell locale
export LC_ALL="en_US.UTF-8"
export LANG="en_US.UTF-8"
export LANGUAGE="en_US.UTF-8"
fi
else
# Install en_US.UTF-8 Locale
if [[ ! -z $DEB ]]; then
sudo locale-gen en_US.UTF-8
sudo update-locale LANG=en_US.UTF-8
elif [[ ! -z $RPM ]]; then
sudo localedef -c -i en_US -f UTF-8 en_US.UTF-8
fi
# Setting the current shell locale
export LC_ALL="en_US.UTF-8"
export LANG="en_US.UTF-8"
export LANGUAGE="en_US.UTF-8"
# Setting the bashrc locale
echo "export LC_ALL=en_US.UTF-8" >> "$HOME/.bashrc"
echo "export LANG=en_US.UTF-8" >> "$HOME/.bashrc"
echo "export LANGUAGE=en_US.UTF-8" >> "$HOME/.bashrc"
fi
heading "Installing system dependencies..."
if [[ ! -z $DEB ]]; then
sudo apt-get update
sudo apt-get install -y git curl apt-transport-https update-notifier
elif [[ ! -z $RPM ]]; then
sudo yum update -y
sudo yum install git curl epel-release -y
fi
success "Installed system dependencies!"
heading "Installing node.js & npm..."
sudo rm -rf /usr/local/{lib/node{,/.npm,_modules},bin,share/man}/{npm*,node*,man1/node*}
sudo rm -rf ~/{.npm,.forever,.node*,.cache,.nvm}
if [[ ! -z $DEB ]]; then
sudo wget --quiet -O - https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -
(echo "deb https://deb.nodesource.com/node_11.x $(lsb_release -s -c) main" | sudo tee /etc/apt/sources.list.d/nodesource.list)
sudo apt-get update
sudo apt-get install nodejs -y
elif [[ ! -z $RPM ]]; then
sudo yum install gcc-c++ make -y
curl -sL https://rpm.nodesource.com/setup_11.x | sudo -E bash - > /dev/null 2>&1
fi
success "Installed node.js & npm!"
heading "Installing Yarn..."
if [[ ! -z $DEB ]]; then
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
(echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list)
sudo apt-get update
sudo apt-get install -y yarn
elif [[ ! -z $RPM ]]; then
curl -sL https://dl.yarnpkg.com/rpm/yarn.repo | sudo tee /etc/yum.repos.d/yarn.repo
sudo yum install yarn -y
fi
success "Installed Yarn!"
heading "Installing PM2..."
sudo yarn global add pm2
pm2 install pm2-logrotate
pm2 set pm2-logrotate:max_size 500M
pm2 set pm2-logrotate:compress true
pm2 set pm2-logrotate:retain 7
success "Installed PM2!"
heading "Installing program dependencies..."
if [[ ! -z $DEB ]]; then
sudo apt-get install build-essential libcairo2-dev pkg-config libtool autoconf automake python libpq-dev jq -y
elif [[ ! -z $RPM ]]; then
sudo yum groupinstall "Development Tools" -y -q
sudo yum install postgresql-devel jq -y -q
fi
success "Installed program dependencies!"
heading "Installing PostgreSQL..."
if [[ ! -z $DEB ]]; then
sudo apt-get update
sudo apt-get install postgresql postgresql-contrib -y
elif [[ ! -z $RPM ]]; then
sudo yum install postgresql-server postgresql-contrib -y
if [[ "$SYS" == "SystemV" ]]; then
sudo service postgresql initdb
sudo service postgresql start
else
sudo postgresql-setup initdb
sudo systemctl start postgresql
fi
fi
success "Installed PostgreSQL!"
heading "Installing NTP..."
sudo timedatectl set-ntp off > /dev/null 2>&1 # disable the default systemd timesyncd service
if [[ ! -z $DEB ]]; then
sudo apt-get install ntp -yyq
elif [[ ! -z $RPM ]]; then
sudo yum install ntp -y -q
fi
sudo ntpd -gq
success "Installed NTP!"
heading "Installing system updates..."
if [[ ! -z $DEB ]]; then
sudo apt-get update
sudo apt-get upgrade -yqq
sudo apt-get dist-upgrade -yq
sudo apt-get autoremove -yyq
sudo apt-get autoclean -yq
elif [[ ! -z $RPM ]]; then
sudo yum update
sudo yum clean
fi
success "Installed system updates!"
heading "Installing ARK Core..."
shopt -s expand_aliases
alias ark="$HOME/core-bridgechain/packages/core/bin/run"
echo 'alias examplechain="$HOME/core-bridgechain/packages/core/bin/run"' >> ~/.bashrc
rm -rf "$HOME/core-bridgechain"
git clone "https://github.com/munich/examplechain.git" "$HOME/core-bridgechain" || FAILED="Y"
if [ "$FAILED" == "Y" ]; then
echo "Failed to fetch core repo with origin 'https://github.com/munich/examplechain.git'"
exit 1
fi
cd "$HOME/core-bridgechain"
HAS_REMOTE=$(git branch -a | fgrep -o "remotes/origin/chore/bridgechain-changes")
if [ ! -z "$HAS_REMOTE" ]; then
git checkout chore/bridgechain-changes
fi
YARN_SETUP="N"
while [ "$YARN_SETUP" == "N" ]; do
YARN_SETUP="Y"
yarn setup || YARN_SETUP="N"
done
rm -rf "$HOME/.config/@examplechain"
rm -rf "$HOME/.config/@examplechain"
rm -rf "$HOME/.config/examplechain-core"
echo 'export PATH=$(yarn global bin):$PATH' >> ~/.bashrc
export PATH=$(yarn global bin):$PATH
ark config:publish
success "Installed ARK Core!"
# setup postgres username, password and database
read -p "Would you like to configure the database? [y/N]: " choice
if [[ "$choice" =~ ^(yes|y|Y) ]]; then
read -p "Enter the database username: " databaseUsername
read -p "Enter the database password: " databasePassword
read -p "Enter the database name: " databaseName
ark env:set CORE_DB_USERNAME $databaseUsername
ark env:set CORE_DB_PASSWORD $databasePassword
ark env:set CORE_DB_DATABASE $databaseName
userExists=$(sudo -i -u postgres psql -c "SELECT * FROM pg_user WHERE usename = '${databaseUsername}'" | grep -c "1 row")
databaseExists=$(sudo -i -u postgres psql -tAc "SELECT 1 FROM pg_database WHERE datname = '${databaseName}'")
if [[ $userExists == 1 ]]; then
read -p "The database user ${databaseUsername} already exists, do you want to overwrite it? [y/N]: " choice
if [[ "$choice" =~ ^(yes|y|Y) ]]; then
if [[ $databaseExists == 1 ]]; then
sudo -i -u postgres psql -c "ALTER DATABASE ${databaseName} OWNER TO postgres;"
fi
sudo -i -u postgres psql -c "DROP USER ${databaseUsername}"
sudo -i -u postgres psql -c "CREATE USER ${databaseUsername} WITH PASSWORD '${databasePassword}' CREATEDB;"
elif [[ "$choice" =~ ^(no|n|N) ]]; then
continue;
fi
else
sudo -i -u postgres psql -c "CREATE USER ${databaseUsername} WITH PASSWORD '${databasePassword}' CREATEDB;"
fi
if [[ $databaseExists == 1 ]]; then
read -p "The database ${databaseName} already exists, do you want to overwrite it? [y/N]: " choice
if [[ "$choice" =~ ^(yes|y|Y) ]]; then
sudo -i -u postgres psql -c "DROP DATABASE ${databaseName};"
sudo -i -u postgres psql -c "CREATE DATABASE ${databaseName} WITH OWNER ${databaseUsername};"
elif [[ "$choice" =~ ^(no|n|N) ]]; then
sudo -i -u postgres psql -c "ALTER DATABASE ${databaseName} OWNER TO ${databaseUsername};"
fi
else
sudo -i -u postgres psql -c "CREATE DATABASE ${databaseName} WITH OWNER ${databaseUsername};"
fi
fi
exec "$BASH"
|
package com.github.coreyshupe.lb.api;
import com.github.coreyshupe.lb.api.cache.MemoryServerBalanceCache;
import com.github.coreyshupe.lb.api.cache.RedisServerBalanceCache;
import com.github.coreyshupe.lb.api.cache.ServerBalanceCache;
import com.github.coreyshupe.lb.api.config.ConfigProvider;
import com.github.coreyshupe.lb.api.config.LoadBalancerConfig;
import com.github.coreyshupe.lb.api.config.RedisConfig;
import java.io.IOException;
public class LoadBalancerInstance {
private final static LoadBalancerConfig DEFAULT_BALANCER_CONFIG = new LoadBalancerConfig(
false,
new String[]{},
new RedisConfig("127.0.0.1", 6379, "admin", "password")
);
private final ConfigProvider configProvider;
private ServerBalanceCache serverBalanceCache;
public LoadBalancerInstance(ConfigProvider configProvider) throws IOException {
this.configProvider = configProvider;
this.configProvider.saveDefaultConfig(DEFAULT_BALANCER_CONFIG);
LoadBalancerConfig config = this.configProvider.loadConfig();
if (config.multiProxySupport()) {
this.serverBalanceCache = new RedisServerBalanceCache(config);
} else {
this.serverBalanceCache = new MemoryServerBalanceCache(config);
}
this.configProvider.addReloadHook((newConfig) -> {
if (newConfig.multiProxySupport() && !(this.serverBalanceCache instanceof RedisServerBalanceCache)) {
this.serverBalanceCache.shutdown();
this.serverBalanceCache = new RedisServerBalanceCache(newConfig);
} else if (!newConfig.multiProxySupport() && !(this.serverBalanceCache instanceof MemoryServerBalanceCache)) {
this.serverBalanceCache.shutdown();
this.serverBalanceCache = new MemoryServerBalanceCache(newConfig);
} else {
this.serverBalanceCache.reload(newConfig);
}
});
}
public ConfigProvider getConfigProvider() {
return configProvider;
}
public ServerBalanceCache getServerBalanceCache() {
return serverBalanceCache;
}
}
|
<filename>src/test/java/voot/oidcng/OidcNGRemoteTokenServicesTest.java
package voot.oidcng;
import com.github.tomakehurst.wiremock.matching.EqualToPattern;
import org.junit.Test;
import org.springframework.http.MediaType;
import org.springframework.security.oauth2.common.exceptions.InvalidTokenException;
import voot.oauth.AbstractRemoteTokenServicesTest;
import voot.oauth.DecisionResourceServerTokenServices;
import java.util.UUID;
import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
import static com.github.tomakehurst.wiremock.client.WireMock.post;
import static com.github.tomakehurst.wiremock.client.WireMock.stubFor;
import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo;
import static java.util.stream.IntStream.range;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.springframework.http.HttpHeaders.CONTENT_TYPE;
import static voot.JWTAccessToken.jwtAccessToken;
public class OidcNGRemoteTokenServicesTest extends AbstractRemoteTokenServicesTest {
@Override
protected DecisionResourceServerTokenServices getRemoteTokenServices() {
return new RemoteTokenServices(
"http://localhost:" + PORT + "/oidc/introspect",
"clientId",
"secret",
"https://connect.test2.surfconext.nl , https://localhost.issuer ",
"schac_home_organization");
}
@Override
protected void stubCallToAuthorisationEndpoint(String responseJson) {
stubFor(post(urlPathEqualTo("/oidc/introspect"))
.withHeader(CONTENT_TYPE,
new EqualToPattern(MediaType.APPLICATION_FORM_URLENCODED_VALUE, true))
.willReturn(aResponse().withStatus(200)
.withHeader("Content-Type", "application/json")
.withBody(responseJson)));
}
@Override
protected String getSuccesCheckTokenJsonPath() {
return "json/oidcng/introspect.success.json";
}
@Override
protected String getSuccesCheckTokenClientCredentialsJsonPath() {
return "json/oidcng/introspect.client_credentials.json";
}
@Test
public void testCanHandleJWT() {
String accessToken = jwtAccessToken("https://localhost.issuer");
assertTrue(getSubject().canHandle(accessToken));
}
@Override
protected String getUnspecifiedNameId() {
return "urn:collab:person:example.com:admin";
}
@Override
protected String getClientId() {
return "https@//oidc.localhost.surfconext.nl";
}
@Override
protected String getFailureCheckTokenJsonPath() {
return "json/oidcng/introspect.failure.json";
}
@Override
protected String getErrorCheckTokenJsonPath() {
return "json/oidcng/introspect.error.json";
}
@Test(expected = InvalidTokenException.class)
public void testLoadAuthenticationFailure() throws Exception {
introspect(getFailureCheckTokenJsonPath());
}
@Test
public void testCanHandle() {
range(0, 10).forEach(nbr -> assertFalse(getSubject().canHandle(UUID.randomUUID().toString())));
}
@Test
public void testCanNotHandleJWT() {
assertFalse(getSubject().canHandle("nope"));
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/HTBaseControlKit/HTBaseControlKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/HTCategoryKit/HTCategoryKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/HTUIKit/HTUIKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SDAutoLayout/SDAutoLayout.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/HTBaseControlKit/HTBaseControlKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/HTCategoryKit/HTCategoryKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/HTUIKit/HTUIKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SDAutoLayout/SDAutoLayout.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<reponame>guidosantillan01/syder-ideas
import React from 'react';
import StrategyForm from './StrategyForm';
import StrategiesList from './StrategiesList';
export default function IdeaFeatures({ idea }) {
return (
<div>
<StrategyForm idea={idea} />
<StrategiesList idea={idea} />
</div>
);
}
|
<gh_stars>1-10
require_relative '../lib/grape-apidoc'
require 'grape'
require 'grape-entity'
require 'stringio'
require_relative './scenario/example'
|
require_relative '../logging'
require_relative '../helpers/iface_helper'
require_relative '../helpers/weave_helper'
module Kontena::NetworkAdapters
class Weave
include Celluloid
include Celluloid::Notifications
include Kontena::Observer::Helper
include Kontena::Helpers::IfaceHelper
include Kontena::Helpers::WeaveHelper
include Kontena::Logging
DEFAULT_NETWORK = 'kontena'.freeze
finalizer :finalizer
def initialize(autostart = true)
@images_exist = false
@starting = false
@started = false
info 'initialized'
subscribe('ipam:start', :on_ipam_start)
async.ensure_images if autostart
@ipam_client = IpamClient.new
# Default size of pool is number of CPU cores, 2 for 1 core machine
@executor_pool = WeaveExecutor.pool(args: [autostart])
async.start if autostart
end
def start
observe(Actor[:node_info_worker].observable) do |node|
async.launch(node)
end
end
def finalizer
@executor_pool.terminate if @executor_pool.alive?
rescue
# If Celluloid manages to terminate the pool (through GC or by explicit shutdown) it will raise
end
def api_client
@api_client ||= Excon.new("http://127.0.0.1:6784")
end
def weave_api_ready?
# getting status should be pretty fast, set low timeouts to fail faster
response = api_client.get(path: '/status', :connect_timeout => 5, :read_timeout => 5)
response.status == 200
rescue Excon::Error
false
end
# @return [Boolean]
def running?
return false unless weave_container_running?
return false unless weave_api_ready?
return false unless interface_ip('weave')
true
end
def network_ready?
return false unless running?
return false unless Actor[:ipam_plugin_launcher].running?
true
end
# @return [Boolean]
def weave_container_running?
weave = Docker::Container.get('weave') rescue nil
return false if weave.nil?
return false unless weave.running?
true
end
# @return [Boolean]
def images_exist?
@images_exist == true
end
# @return [Boolean]
def already_started?
@started == true
end
# @return [Boolean]
def starting?
@starting == true
end
# @param [Hash] opts
def modify_create_opts(opts)
ensure_weave_wait
image = Docker::Image.get(opts['Image'])
image_config = image.info['Config']
cmd = []
if opts['Entrypoint']
if opts['Entrypoint'].is_a?(Array)
cmd = cmd + opts['Entrypoint']
else
cmd = cmd + [opts['Entrypoint']]
end
end
if !opts['Entrypoint'] && image_config['Entrypoint'] && image_config['Entrypoint'].size > 0
cmd = cmd + image_config['Entrypoint']
end
if opts['Cmd'] && opts['Cmd'].size > 0
if opts['Cmd'].is_a?(Array)
cmd = cmd + opts['Cmd']
else
cmd = cmd + [opts['Cmd']]
end
elsif image_config['Cmd'] && image_config['Cmd'].size > 0
cmd = cmd + image_config['Cmd']
end
opts['Entrypoint'] = ['/w/w']
opts['Cmd'] = cmd
modify_host_config(opts)
# IPAM
overlay_cidr = @ipam_client.reserve_address(DEFAULT_NETWORK)
info "Create container=#{opts['name']} in network=#{DEFAULT_NETWORK} with overlay_cidr=#{overlay_cidr}"
opts['Labels']['io.kontena.container.overlay_cidr'] = overlay_cidr
opts['Labels']['io.kontena.container.overlay_network'] = DEFAULT_NETWORK
opts
end
# @param [Hash] opts
def modify_host_config(opts)
host_config = opts['HostConfig'] || {}
host_config['VolumesFrom'] ||= []
host_config['VolumesFrom'] << "weavewait-#{WEAVE_VERSION}:ro"
dns = interface_ip('docker0')
if dns && host_config['NetworkMode'].to_s != 'host'.freeze
host_config['Dns'] = [dns]
end
opts['HostConfig'] = host_config
end
# @param [String] topic
# @param [Node] node
def on_ipam_start(topic, node)
ensure_default_pool(node.grid)
Celluloid::Notifications.publish('network:ready', nil)
end
# Ensure that the host weave bridge is exposed using the given CIDR address,
# and only the given CIDR address
#
# @param [String] cidr '10.81.0.X/16'
def ensure_exposed(cidr)
# configure new address
# these will be added alongside any existing addresses
if @executor_pool.expose(cidr)
info "Exposed host node at cidr=#{cidr}"
else
error "Failed to expose host node at cidr=#{cidr}"
end
# cleanup any old addresses
@executor_pool.ps('weave:expose') do |name, mac, *cidrs|
cidrs.each do |exposed_cidr|
if exposed_cidr != cidr
warn "Migrating host node from cidr=#{exposed_cidr}"
@executor_pool.hide(exposed_cidr)
end
end
end
end
def ensure_default_pool(grid_info)
grid_subnet = IPAddr.new(grid_info['subnet'])
_, upper = grid_subnet.split
info "network and ipam ready, ensuring default network with subnet=#{grid_subnet.to_cidr} iprange=#{upper.to_cidr}"
@default_pool = @ipam_client.reserve_pool(DEFAULT_NETWORK, grid_subnet.to_cidr, upper.to_cidr)
end
def launch(node)
wait_until("weave is ready to start") { images_exist? && !starting? }
@starting = true
restarting = false
weave = Docker::Container.get('weave') rescue nil
if weave && config_changed?(weave, node)
info "weave image or configuration has been changed, restarting"
restarting = true
weave.delete(force: true)
weave = nil
end
peer_ips = node.peer_ips || []
trusted_subnets = node.grid['trusted_subnets']
until weave && weave.running? do
exec_params = [
'--local', 'launch-router', '--ipalloc-range', '', '--dns-domain', 'kontena.local',
'--password', ENV['<PASSWORD>'], '--conn-limit', '0'
]
exec_params += ['--trusted-subnets', trusted_subnets.join(',')] if trusted_subnets
@executor_pool.execute(exec_params)
weave = Docker::Container.get('weave') rescue nil
wait_until("weave started", timeout: 10, interval: 1) {
weave && weave.running?
}
if weave.nil? || !weave.running?
@executor_pool.execute(['--local', 'reset'])
end
end
attach_router unless interface_ip('weave')
connect_peers(peer_ips)
info "using trusted subnets: #{trusted_subnets.join(',')}" if trusted_subnets.size > 0 && !already_started?
post_start(node)
if !already_started?
# only publish once on agent boot, or after a crash and actor restart
Celluloid::Notifications.publish('network_adapter:start', node)
elsif restarting
Celluloid::Notifications.publish('network_adapter:restart', node)
end
@started = true
node
rescue => exc
error "#{exc.class.name}: #{exc.message}"
error exc.backtrace.join("\n")
ensure
@starting = false
end
def attach_router
info "attaching router"
@executor_pool.execute(['--local', 'attach-router'])
end
# @param [Array<String>] peer_ips
def connect_peers(peer_ips)
if peer_ips.size > 0
@executor_pool.execute(['--local', 'connect', '--replace'] + peer_ips)
info "router connected to peers #{peer_ips.join(', ')}"
else
info "router does not have any known peers"
end
end
# @param [Node] node
def post_start(node)
grid_subnet = IPAddr.new(node.grid['subnet'])
overlay_ip = node.overlay_ip
if grid_subnet && overlay_ip
weave_cidr = "#{overlay_ip}/#{grid_subnet.prefixlen}"
ensure_exposed(weave_cidr)
end
end
# @param [Docker::Container] weave
# @param [Node] node
def config_changed?(weave, node)
return true if weave.config['Image'].split(':')[1] != WEAVE_VERSION
cmd = Hash[*weave.config['Cmd'].flatten(1)]
return true if cmd['--trusted-subnets'] != node.grid['trusted_subnets'].to_a.join(',')
return true if cmd['--conn-limit'].nil?
false
end
# Inspect current state of attached containers
#
# @return [Hash<String, String>] container_id[0..12] => [overlay_cidr]
def get_containers
containers = { }
@executor_pool.ps() do |id, mac, *cidrs|
next if id == 'weave:expose'
containers[id] = cidrs
end
containers
end
# Attach container to weave with given CIDR address
#
# @param [String] container_id
# @param [String] overlay_cidr '10.81.X.Y/16'
def attach_container(container_id, cidr)
info "Attach container=#{container_id} at cidr=#{cidr}"
@executor_pool.async.attach(container_id, cidr)
end
# Attach container to weave with given CIDR address, first detaching any existing mismatching addresses
#
# @param [String] container_id
# @param [String] overlay_cidr '10.81.X.Y/16'
# @param [Array<String>] migrate_cidrs ['10.81.X.Y/19']
def migrate_container(container_id, cidr, attached_cidrs)
# first remove any existing addresses
# this is required, since weave will not attach if the address already exists, but with a different netmask
attached_cidrs.each do |attached_cidr|
if cidr != attached_cidr
warn "Migrate container=#{container_id} from cidr=#{attached_cidr}"
@executor_pool.detach(container_id, attached_cidr)
end
end
# attach with the correct address
self.attach_container(container_id, cidr)
end
# Remove container from weave network
#
# @param [String] container_id may not exist anymore
# @param [Hash] labels Docker container labels
def remove_container(container_id, overlay_network, overlay_cidr)
info "Remove container=#{container_id} from network=#{overlay_network} at cidr=#{overlay_cidr}"
@ipam_client.release_address(overlay_network, overlay_cidr)
rescue IpamError => error
# Cleanup will take care of these later on
warn "Failed to release container=#{container_id} from network=#{overlay_network} at cidr=#{overlay_cidr}: #{error}"
end
private
def ensure_images
images = [
weave_image
]
images.each do |image|
unless Docker::Image.exist?(image)
info "pulling #{image}"
Docker::Image.create({'fromImage' => image})
sleep 1 until Docker::Image.exist?(image)
info "image #{image} pulled "
end
end
@images_exist = true
end
def ensure_weave_wait
sleep 1 until images_exist?
container_name = "weavewait-#{WEAVE_VERSION}"
weave_wait = Docker::Container.get(container_name) rescue nil
unless weave_wait
Docker::Container.create(
'name' => container_name,
'Image' => weave_exec_image,
'Entrypoint' => ['/bin/false'],
'Labels' => {
'weavevolumes' => ''
},
'Volumes' => {
'/w' => {},
'/w-noop' => {},
'/w-nomcast' => {}
}
)
end
end
end
end
|
import gym
# Make the environment
env = gym.make('CartPole-v0')
# Reset the environment to default beginning
# Default observation variable
print('-' * 50)
print("Initial Observation")
observation = env.reset()
print(observation)
print('-' * 50)
for _ in range(20):
# Random Action
action = env.action_space.sample()
# Get 4 observation values
observation, reward, done, info = env.step(action)
print('Observation: {}, Reward: {}, Done: {}, Info: {}'.format(observation, reward, done, info)) |
// Define the error type for voter not found
#[derive(Debug)]
enum VotingError {
VoterNotFound,
}
// Function to find a voter by ID
fn find_voter(voter_id: u32) -> Result<bool, VotingError> {
// Replace this with actual logic to find the voter in the system
let voter_found = /* logic to find voter */;
if voter_found {
Ok(true)
} else {
Err(VotingError::VoterNotFound)
}
}
// Voting process handling the voter not found error
fn vote(voter_id: u32, candidate: &str) -> Result<(), VotingError> {
match find_voter(voter_id) {
Ok(true) => {
// Logic to allow the voter to cast their vote
println!("Vote casted for candidate: {}", candidate);
Ok(())
}
Err(VotingError::VoterNotFound) => {
// Handle voter not found error
Err(VotingError::VoterNotFound)
}
}
}
// Example usage of the voting process
fn main() {
let voter_id = 123;
let candidate = "Candidate A";
match vote(voter_id, candidate) {
Ok(()) => println!("Voting successful"),
Err(VotingError::VoterNotFound) => println!("Voter not found, please register to vote"),
}
} |
<gh_stars>0
'use strict';
var should = require('should'),
request = require('supertest'),
app = require('../../server'),
mongoose = require('mongoose'),
User = mongoose.model('User'),
Anagrafica = mongoose.model('Anagrafica'),
agent = request.agent(app);
/**
* Globals
*/
var credentials, user, anagrafica;
/**
* Anagrafica routes tests
*/
describe('Anagrafica CRUD tests', function() {
beforeEach(function(done) {
// Create user credentials
credentials = {
username: 'username',
password: 'password'
};
// Create a new user
user = new User({
firstName: 'Full',
lastName: 'Name',
displayName: 'Full Name',
email: '<EMAIL>',
username: credentials.username,
password: <PASSWORD>,
provider: 'local'
});
// Save a user to the test db and create new Anagrafica
user.save(function() {
anagrafica = {
name: 'Anagrafica Name'
};
done();
});
});
it('should be able to save Anagrafica instance if logged in', function(done) {
agent.post('/auth/signin')
.send(credentials)
.expect(200)
.end(function(signinErr, signinRes) {
// Handle signin error
if (signinErr) done(signinErr);
// Get the userId
var userId = user.id;
// Save a new Anagrafica
agent.post('/anagraficas')
.send(anagrafica)
.expect(200)
.end(function(anagraficaSaveErr, anagraficaSaveRes) {
// Handle Anagrafica save error
if (anagraficaSaveErr) done(anagraficaSaveErr);
// Get a list of Anagraficas
agent.get('/anagraficas')
.end(function(anagraficasGetErr, anagraficasGetRes) {
// Handle Anagrafica save error
if (anagraficasGetErr) done(anagraficasGetErr);
// Get Anagraficas list
var anagraficas = anagraficasGetRes.body;
// Set assertions
(anagraficas[0].user._id).should.equal(userId);
(anagraficas[0].name).should.match('Anagrafica Name');
// Call the assertion callback
done();
});
});
});
});
it('should not be able to save Anagrafica instance if not logged in', function(done) {
agent.post('/anagraficas')
.send(anagrafica)
.expect(401)
.end(function(anagraficaSaveErr, anagraficaSaveRes) {
// Call the assertion callback
done(anagraficaSaveErr);
});
});
it('should not be able to save Anagrafica instance if no name is provided', function(done) {
// Invalidate name field
anagrafica.name = '';
agent.post('/auth/signin')
.send(credentials)
.expect(200)
.end(function(signinErr, signinRes) {
// Handle signin error
if (signinErr) done(signinErr);
// Get the userId
var userId = user.id;
// Save a new Anagrafica
agent.post('/anagraficas')
.send(anagrafica)
.expect(400)
.end(function(anagraficaSaveErr, anagraficaSaveRes) {
// Set message assertion
(anagraficaSaveRes.body.message).should.match('Please fill Anagrafica name');
// Handle Anagrafica save error
done(anagraficaSaveErr);
});
});
});
it('should be able to update Anagrafica instance if signed in', function(done) {
agent.post('/auth/signin')
.send(credentials)
.expect(200)
.end(function(signinErr, signinRes) {
// Handle signin error
if (signinErr) done(signinErr);
// Get the userId
var userId = user.id;
// Save a new Anagrafica
agent.post('/anagraficas')
.send(anagrafica)
.expect(200)
.end(function(anagraficaSaveErr, anagraficaSaveRes) {
// Handle Anagrafica save error
if (anagraficaSaveErr) done(anagraficaSaveErr);
// Update Anagrafica name
anagrafica.name = 'WHY YOU GOTTA BE SO MEAN?';
// Update existing Anagrafica
agent.put('/anagraficas/' + anagraficaSaveRes.body._id)
.send(anagrafica)
.expect(200)
.end(function(anagraficaUpdateErr, anagraficaUpdateRes) {
// Handle Anagrafica update error
if (anagraficaUpdateErr) done(anagraficaUpdateErr);
// Set assertions
(anagraficaUpdateRes.body._id).should.equal(anagraficaSaveRes.body._id);
(anagraficaUpdateRes.body.name).should.match('WHY YOU GOTTA BE SO MEAN?');
// Call the assertion callback
done();
});
});
});
});
it('should be able to get a list of Anagraficas if not signed in', function(done) {
// Create new Anagrafica model instance
var anagraficaObj = new Anagrafica(anagrafica);
// Save the Anagrafica
anagraficaObj.save(function() {
// Request Anagraficas
request(app).get('/anagraficas')
.end(function(req, res) {
// Set assertion
res.body.should.be.an.Array.with.lengthOf(1);
// Call the assertion callback
done();
});
});
});
it('should be able to get a single Anagrafica if not signed in', function(done) {
// Create new Anagrafica model instance
var anagraficaObj = new Anagrafica(anagrafica);
// Save the Anagrafica
anagraficaObj.save(function() {
request(app).get('/anagraficas/' + anagraficaObj._id)
.end(function(req, res) {
// Set assertion
res.body.should.be.an.Object.with.property('name', anagrafica.name);
// Call the assertion callback
done();
});
});
});
it('should be able to delete Anagrafica instance if signed in', function(done) {
agent.post('/auth/signin')
.send(credentials)
.expect(200)
.end(function(signinErr, signinRes) {
// Handle signin error
if (signinErr) done(signinErr);
// Get the userId
var userId = user.id;
// Save a new Anagrafica
agent.post('/anagraficas')
.send(anagrafica)
.expect(200)
.end(function(anagraficaSaveErr, anagraficaSaveRes) {
// Handle Anagrafica save error
if (anagraficaSaveErr) done(anagraficaSaveErr);
// Delete existing Anagrafica
agent.delete('/anagraficas/' + anagraficaSaveRes.body._id)
.send(anagrafica)
.expect(200)
.end(function(anagraficaDeleteErr, anagraficaDeleteRes) {
// Handle Anagrafica error error
if (anagraficaDeleteErr) done(anagraficaDeleteErr);
// Set assertions
(anagraficaDeleteRes.body._id).should.equal(anagraficaSaveRes.body._id);
// Call the assertion callback
done();
});
});
});
});
it('should not be able to delete Anagrafica instance if not signed in', function(done) {
// Set Anagrafica user
anagrafica.user = user;
// Create new Anagrafica model instance
var anagraficaObj = new Anagrafica(anagrafica);
// Save the Anagrafica
anagraficaObj.save(function() {
// Try deleting Anagrafica
request(app).delete('/anagraficas/' + anagraficaObj._id)
.expect(401)
.end(function(anagraficaDeleteErr, anagraficaDeleteRes) {
// Set message assertion
(anagraficaDeleteRes.body.message).should.match('User is not logged in');
// Handle Anagrafica error error
done(anagraficaDeleteErr);
});
});
});
afterEach(function(done) {
User.remove().exec();
Anagrafica.remove().exec();
done();
});
}); |
package net.community.chest.net.proto.text.imap4;
import java.io.IOException;
import java.io.OutputStream;
import net.community.chest.io.output.OutputStreamEmbedder;
import net.community.chest.lang.StringUtil;
import net.community.chest.mail.address.MessageAddressType;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Mar 24, 2008 2:00:22 PM
*/
public class IMAP4RawMsgPartHandler extends OutputStreamEmbedder implements IMAP4FetchResponseHandler {
private final String _msgPart;
public final String getMsgPart ()
{
return _msgPart;
}
public IMAP4RawMsgPartHandler (OutputStream outStream, boolean realClosure, String msgPart)
{
super(outStream, realClosure);
if ((null == (this._msgPart=msgPart)) || (msgPart.length() <= 0))
throw new IllegalArgumentException("No message part supplied for raw message part dumper");
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleFlagsStage(int, boolean)
*/
@Override
public int handleFlagsStage (int msgSeqNo, boolean starting)
{
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleFlagValue(int, java.lang.String)
*/
@Override
public int handleFlagValue (int msgSeqNo, String flagValue)
{
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleInternalDate(int, java.lang.String)
*/
@Override
public int handleInternalDate (int msgSeqNo, String dateValue)
{
return (-2005); // unexpected call
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleMsgPartAddress(int, java.lang.String, net.community.chest.mail.address.MessageAddressType, java.lang.String, java.lang.String)
*/
@Override
public int handleMsgPartAddress (int msgSeqNo, String msgPart, MessageAddressType addrType, String dispName, String addrVal)
{
return (-2002); // unexpected call
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleMsgPartHeader(int, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public int handleMsgPartHeader (int msgSeqNo, String msgPart, String hdrName, String attrName, String attrValue)
{
return (-2001); // unexpected call
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleMsgPartSize(int, java.lang.String, long)
*/
@Override
public int handleMsgPartSize (int msgSeqNo, String msgPart, long partSize)
{
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleMsgPartStage(int, java.lang.String, boolean)
*/
@Override
public int handleMsgPartStage (int msgSeqNo, String msgPart, boolean starting)
{
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleMsgResponseState(int, boolean)
*/
@Override
public int handleMsgResponseState (int msgSeqNo, boolean starting)
{
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handlePartData(int, java.lang.String, byte[], int, int)
*/
@Override
public int handlePartData (int msgSeqNo, String msgPart, byte[] data, int offset, int len)
{
// make sure this is the requested message part
if (StringUtil.compareDataStrings(getMsgPart(), msgPart, true) != 0)
return (-2007);
try
{
write(data, offset, len);
return 0;
}
catch(IOException ioe)
{
return (-2008);
}
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handlePartDataStage(int, java.lang.String, boolean)
*/
@Override
public int handlePartDataStage (int msgSeqNo, String msgPart, boolean starting)
{
return 0;
}
/*
* @see net.community.chest.net.proto.text.imap4.IMAP4FetchResponseHandler#handleUID(int, long)
*/
@Override
public int handleUID (int msgSeqNo, long msgUID)
{
return 0;
}
}
|
<filename>serenity-screenplay-webdriver/src/main/java/net/serenitybdd/screenplay/questions/UIStateReaderWithNameBuilder.java
package net.serenitybdd.screenplay.questions;
import net.serenitybdd.screenplay.Actor;
import net.serenitybdd.screenplay.targets.Target;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UIStateReaderWithNameBuilder<T> {
protected final Target target;
protected final Class<T> type;
public UIStateReaderWithNameBuilder(Target target, Class<T> type) {
this.target = target;
this.type = type;
}
public PrimedUIStateReaderWithNameBuilder<T> named(String name) {
return new PrimedUIStateReaderWithNameBuilder(target, type, name);
}
public static class PrimedUIStateReaderWithNameBuilder<T> extends UIStateReaderWithNameBuilder<T> {
private final String name;
private Logger logger = LoggerFactory.getLogger(this.getClass());
public PrimedUIStateReaderWithNameBuilder(Target target, Class<T> type, String name) {
super(target, type);
this.name = name;
}
public <T extends UIState> T viewedBy(Actor actor) {
try {
return (T) type.getConstructor(Target.class, Actor.class, String.class).newInstance(target, actor, name);
} catch (Exception e) {
logger.error("Failed to instantiate UIStateReader of type " + type, e);
throw new IllegalStateException("Failed to instantiate UIStateReader of type " + type, e);
}
}
}
}
|
package com.bingor.router.node;
import android.app.Activity;
import android.content.Context;
import com.bingor.router.Util;
import com.bingor.router.exception.RouterNodeTypeNotMatchException;
import com.bingor.router.impl.CallBack;
import com.bingor.router.impl.RouterNodeExecutorActivity;
import com.bingor.router.impl.RouterNodeExecutorCallback;
import com.bingor.router.impl.RouterNodeExecutorContext;
import com.bingor.router.impl.RouterNodeExecutorNormal;
/**
* ๅทฅๅ
ท็ฑปๅ่็น
* Created by HXB on 2018/7/23.
*/
public class UtilRouterNode extends RouterNode {
public UtilRouterNode(Class<?> cls) {
super(cls);
}
public void executeNode(String jsonParams) throws RouterNodeTypeNotMatchException {
if (new Util().isMatchInterface(RouterNodeExecutorNormal.class, cls)) {
try {
RouterNodeExecutorNormal routerNodeExecutorNormal = (RouterNodeExecutorNormal) cls.newInstance();
routerNodeExecutorNormal.executeNode(jsonParams);
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
} else {
throw new RouterNodeTypeNotMatchException(cls.getClass().getSimpleName(), RouterNodeExecutorNormal.class.getSimpleName());
}
}
public void executeNode(String jsonParams, CallBack callBack) throws RouterNodeTypeNotMatchException {
if (new Util().isMatchInterface(RouterNodeExecutorCallback.class, cls)) {
try {
RouterNodeExecutorCallback routerNodeExecutorCallback = (RouterNodeExecutorCallback) cls.newInstance();
routerNodeExecutorCallback.executeNode(jsonParams, callBack);
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
} else {
throw new RouterNodeTypeNotMatchException(cls.getClass().getSimpleName(), RouterNodeExecutorNormal.class.getSimpleName());
}
}
public void executeNode(Activity activity, String jsonParams, CallBack callBack) throws RouterNodeTypeNotMatchException {
if (new Util().isMatchInterface(RouterNodeExecutorActivity.class, cls)) {
try {
RouterNodeExecutorActivity routerNodeExecutorActivity = (RouterNodeExecutorActivity) cls.newInstance();
routerNodeExecutorActivity.executeNode(activity, jsonParams, callBack);
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
} else {
throw new RouterNodeTypeNotMatchException(cls.getClass().getSimpleName(), RouterNodeExecutorNormal.class.getSimpleName());
}
}
public void executeNode(Context context, String jsonParams, CallBack callBack) throws RouterNodeTypeNotMatchException {
if (new Util().isMatchInterface(RouterNodeExecutorContext.class, cls)) {
try {
RouterNodeExecutorContext routerNodeExecutorContext = (RouterNodeExecutorContext) cls.newInstance();
routerNodeExecutorContext.executeNode(context, jsonParams, callBack);
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
} else {
throw new RouterNodeTypeNotMatchException(cls.getClass().getSimpleName(), RouterNodeExecutorNormal.class.getSimpleName());
}
}
}
|
from collections import defaultdict
from typing import Tuple
class QueryHistoryStats:
def __init__(self):
self.query_times = []
self.query_frequency = defaultdict(int)
def record_query(self, query_time: int):
self.query_times.append(query_time)
self.query_frequency[query_time] += 1
def get_total_queries(self) -> int:
return len(self.query_times)
def get_query_frequency(self, start_time: int, end_time: int) -> int:
return sum(1 for time in self.query_times if start_time <= time < end_time)
def get_most_frequent_query_period(self) -> Tuple[int, int]:
max_frequency = max(self.query_frequency.values())
most_frequent_period = (float('inf'), float('-inf'))
for time, freq in self.query_frequency.items():
if freq == max_frequency:
most_frequent_period = (min(most_frequent_period[0], time), max(most_frequent_period[1], time))
return most_frequent_period |
#!/bin/bash
#Copyright Alex English June 2021
#This script comes with no warranty whatsoever. Use at your own risk.
#This script just spends all the funds on an address back to itself in chunks of the predetermined size.
#First arg is the address to operate on (no z-addresses)
#Second (optional) arg is the desired chunk size, which defaults to 5000
if ! source "$( dirname "${BASH_SOURCE[0]}" )"/config; then
echo "Failed to source config file. Please make sure you have the whole VerusExtras repo or at least also have the config file."
exit 1
fi
#Dependencies: jq (command-line json parser/editor), bc (command-line calculator)
if ! command -v jq &>/dev/null ; then
echo "jq not found. please install using your package manager."
exit 1
fi
if ! command -v bc &>/dev/null ; then
echo "bc not found. please install using your package manager."
exit 1
fi
set -e
ADDR=$1
CHUNK=${2-5000}
AC=${#ADDR}
if [ "${ADDR:AC-1:1}" != "@" ] && ( [ "$AC" -ne 34 ] || ( [ "${ADDR:0:1}" != "R" ] && [ "${ADDR:0:1}" != "i" ] )) ; then
echo "Missing address or the address given is not valid or not supported." >&2
echo "Supported address types are:" >&2
echo " Transparent addresses (R-addresses)" >&2
echo " Identity addresses (i-addresses)" >&2
echo " Identity names (ending in @)" >&2
exit 1
fi
BAL="$($VERUS z_getbalance "$1")"
AMT="$(bc<<<"$BAL-$DEFAULT_FEE")"
CHUNKS="$(bc<<<"$AMT/$CHUNK")"
#REM="$(bc<<<"$AMT-($CHUNKS*$CHUNK)")"
#REM="$(printf "%.8f\n" "$REM")"
REM="$(printf "%.8f\n" "$(bc<<<"$AMT-($CHUNKS*$CHUNK)")")"
echo "Address: $ADDR"
echo "Breaking balance of $BAL into $CHUNKS of $CHUNK plus $REM"
echo
DESTS='['
while [ "$CHUNKS" -gt 0 ] ; do
CHUNKS=$((CHUNKS-1))
DESTS="$DESTS"'{"address":"'"$ADDR"'","amount":'"$CHUNK"'},'
done
if [ "$(bc<<<"$REM < 0")" -ge 0 ]; then
DESTS="$DESTS"'{"address":"'"$ADDR"'","amount":'"$REM"'}]'
else
DESTS="${DESTS%,}]"
fi
echo Running: verus sendcurrency "$ADDR" "$DESTS" >&2
echo
$VERUS sendcurrency "$ADDR" "$DESTS"
|
<reponame>The-Anton/Rocket.Chat<filename>app/lib/server/methods/restartServer.ts<gh_stars>1-10
import { Meteor } from 'meteor/meteor';
import { hasRole } from '../../../authorization/server';
Meteor.methods({
// eslint-disable-next-line @typescript-eslint/camelcase
restart_server() {
const uid = Meteor.userId();
if (!uid) {
throw new Meteor.Error('error-invalid-user', 'Invalid user', { method: 'restart_server' });
}
if (hasRole(uid, 'admin') !== true) {
throw new Meteor.Error('error-not-allowed', 'Not allowed', { method: 'restart_server' });
}
Meteor.setTimeout(() => {
Meteor.setTimeout(() => {
console.warn('Call to process.exit() timed out, aborting.');
process.abort();
}, 1000);
process.exit(1);
}, 1000);
return {
message: 'The_server_will_restart_in_s_seconds',
params: [2],
};
},
});
|
#!/bin/bash
# Last update: 08/28/2017
#
# This script is designed to be ran on the BB from the directory where the install files are located.
# This means that the user has already pulled a copy of the install files (including this script)
# onto the BB either through git or some other means.
#
# Start with a clean Debian build. The following Debian Image file was used:
# bone-debian-7.11-ixde-4gb-armhf-2016-06-15-4gb.img
#
# To make this a "flasher" image after installing the img file onto a uSDcard, a line at the end of
# the /boot/uEnv.txt file must be uncommented and changed to this:
# cmdline=init=/opt/scripts/tools/eMMC/init-eMMC-flasher-v3-bbg.sh
#
# After flashing the BBG, remove uSDcard and reboot. Then Load the install script and related files
# from git using the following command line (public):
# git clone https://github.com/keynotep/lc4500_pem
#
# NOTE: This install script does require superuser priveleges (default) for some operations.
#
# When run, it will perform several "install" operations for the following components:
# - Links will be created for the startup script using update-rc.d command in /etc/init.d/
# - compile the cape manager overlays (DTS) into DTBO files and place them into the cape manager folder
# - update aptitude database and install various libraries required by the application
# - Create folders needed by the LC4500-PEM application and make it auto-run on power-up
#
# A Reboot will be required after it completes
#
cur_dir=`pwd`
if [ -e /var/run/pem.pid ]; then
echo "Stopping current application"
sudo kill -15 `cat /var/run/pem.pid`
fi
cd
if [ -d nHD_pem_dev ]; then
rm -f -r nHD_pem_dev
fi
echo ========= Installing Startup Script and app ==========
# Now fix startup sequence
echo "Updating Boot-up scripts...."
cd /etc/init.d
# remove old startup scripts so we can rearrange them
sudo update-rc.d apache2 remove
sudo update-rc.d xrdp remove
sudo systemctl disable jekyll-autorun.service
sudo systemctl disable bonescript-autorun.service
# copy new versions with new priorities
#cp $cur_dir/StartupScripts/cron .
#cp $cur_dir/StartupScripts/dbus .
#cp $cur_dir/StartupScripts/rsync .
#cp $cur_dir/StartupScripts/udhcpd .
cp $cur_dir/StartupScripts/pem.sh .
sudo update-rc.d pem.sh start 10 1 2 3 4 5 . stop 0 0 6 .
echo ========= Installing Device Tree Overlays =============
echo "Updating Device Tree Overlay files...."
cd /lib/firmware
sudo cp $cur_dir/BB-BONE-NHD-00A0.dts .
# compile device tree overlay functions (SPI, HDMI etc.)
dtc -O dtb -o BB-BONE-NHD-00A0.dtbo -b 0 -@ BB-BONE-NHD-00A0.dts
echo ============= Updating the Cape Manager ================
cd /etc/default
cp $cur_dir/capemgr .
#echo ============= Adding USB device rules file ================
#cd /etc/udev/rules.d
#cp $cur_dir/dlpc350-hid.rules ./55-dlpc350-hid.rules
echo ============= Check uEnv.txt boot parameters ================
echo "Updating uEnv.txt file. Previous version saved in /boot/uEnv.old.txt"
cd $cur_dir
cp /boot/uEnv.txt /boot/uEnv.old.txt
cp /boot/uEnv.txt ./uEnv.old
if [ -s uEnv.old ]; then
echo "Using saved uEnv file"
cat uEnv.old | sed '/cmdline/s/quiet/quiet text/g' | sed '/BB-BONELT-HDMI,BB-BONELT-HDMIN/s/#cape_disable/cape_disable/g' | sed '/BB-BONE-EMMC-2G/s/cape_disable/#cape_disable/g' | awk '/uname/{print "optargs=\"consoleblank=0\""}1' > /boot/uEnv.txt
else
cat /boot/uEnv.txt | sed '/cmdline/s/quiet/quiet text/g' | sed '/BB-BONELT-HDMI,BB-BONELT-HDMIN/s/#cape_disable/cape_disable/g' | sed '/BB-BONE-EMMC-2G/s/cape_disable/#cape_disable/g' | awk '/uname/{print "optargs=\"consoleblank=0\""}1' > /boot/uEnv.txt
fi
echo ============= Check Network config ================
echo Check /etc/hostname to be sure the network name is correct:
echo " Type the new network hostname you want to use or just enter to use default."
echo " (you have 30 seconds or default will be automatically used): [nHD-pem] "
read -t 30 newhostname
if [ "$newhostname" == "" ] ; then
echo "Default network ID used: nHD-pem. Be careful if you have multiple units on your network!"
sudo echo "nHD-pem" > /etc/hostname
else
echo "OK, changing network ID to: " $newhostname
sudo echo $newhostname > /etc/hostname
fi
echo "Updating BeagleBone network IP address (/etc/network/interfaces)..."
cp interfaces /etc/network/.
echo "Updating Debitian libraries..."
echo ========= Running Aptitude Update ==========
sudo apt-get update
#echo ========= Running Aptitude Upgrade ==========
#sudo apt-get upgrade
echo ========= Removing unwanted drivers ==========
sudo apt-get remove apache2 -y
#removal of udhcp will prevent RNDIS from working
#sudo apt-get remove --auto-remove udhcpd -y
sudo apt-get remove dbus-x11 consolekit -y
sudo apt-get autoremove -y
echo ========= Installing new drivers ==========
sudo apt-get install libdrm2 -y
#sudo apt-get install udev -y
sudo apt-get install libudev-dev -y
sudo apt-get install libdrm-dev -y
#next 2 needed if using USB loop-back to control ASIC over USB instead of I2C
sudo apt-get install libusb-dev -y
sudo apt-get install libusb-1.0.0-dev
#commented out recompile of application for now
#echo "Building and installing new PEM application..."
cd $cur_dir
#make clean
#make all
sudo cp nHD_pem /usr/bin/.
#create solutions database directory
echo ========= Creating utility directories ==========
# Create Solution root directory
if [ -d /opt/nHDpem ] ; then
echo "Solution directory exists"
else
echo "Creating entire Solution directory tree"
sudo mkdir /opt/nHDpem
sudo mkdir /opt/nHDpem/data
sudo mkdir /opt/nHDpem/data/bin
fi
# Create data directory
if [ -d /opt/nHDpem/data ] ; then
echo "Solution directory exists"
else
echo "Creating Solution directory"
sudo mkdir /opt/nHDpem/data
fi
# Create/populate Script directory
if [ -d /opt/nHDpem/data/bin ] ; then
echo "Utility directory exists"
else
echo "Creating Utility directory"
sudo mkdir /opt/nHDpem/data/bin
fi
#install solution manipulation scripts
cp Solutions/*.sh /opt/nHDpem/data/bin/.
# Create/populate Solution Archive directory
if [ -d /opt/nHDpem/data/archive ] ; then
echo "Solution Archive directory exists"
else
echo "Creating Solution Archive directory"
sudo mkdir /opt/nHDpem/data/archive
fi
#cp Solutions/*.tar.gz /opt/nHDpem/data/archive/.
# Create actual Solutions directory
if [ -d /opt/nHD/data/nHD ] ; then
echo "Solutions directory exists"
else
echo "Creating Solutions directory"
sudo mkdir /opt/nHDpem/data/nHD
fi
# Install Bit plane test for use in board checkout
#/opt/nHDpem/data/bin/PutSolution.sh BitPlane-Test
#echo "Bit plane test solution installed."
echo "Install additional solutions using the following command line:"
echo " /opt/nHDpem/data/bin/PutSolution.sh <Solution Name>"
echo ========= Installation complete ==========
if [ -s /usr/bin/nHD_pem ] ; then
echo "Installation Successfull. Reboot now (enter y)?"
read -t 30 user_boot
if [ "$user_boot" == "" ] ; then
echo "Exiting, please reboot manually!"
else
echo "OK, rebooting..."
sudo reboot
fi
else
echo "Installation script failed!"
fi
|
# Copyright (C) 2010 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Create a standalone toolchain package for Android.
. `dirname $0`/prebuilt-common.sh
PROGRAM_PARAMETERS=""
PROGRAM_DESCRIPTION=\
"Generate a customized Android toolchain installation that includes
a working sysroot. The result is something that can more easily be
used as a standalone cross-compiler, e.g. to run configure and
make scripts."
# For now, this is the only toolchain that works reliably.
TOOLCHAIN_NAME=
register_var_option "--toolchain=<name>" TOOLCHAIN_NAME "Specify toolchain name"
LLVM_VERSION=
register_var_option "--llvm-version=<ver>" LLVM_VERSION "Specify LLVM version"
STL=gnustl
register_var_option "--stl=<name>" STL "Specify C++ STL"
ARCH=
register_option "--arch=<name>" do_arch "Specify target architecture" "arm"
do_arch () { ARCH=$1; }
NDK_DIR=`dirname $0`
NDK_DIR=`dirname $NDK_DIR`
NDK_DIR=`dirname $NDK_DIR`
register_var_option "--ndk-dir=<path>" NDK_DIR "Take source files from NDK at <path>"
if [ -d "$NDK_DIR/prebuilt/$HOST_TAG" ]; then
SYSTEM=$HOST_TAG
else
SYSTEM=$HOST_TAG32
fi
register_var_option "--system=<name>" SYSTEM "Specify host system"
PACKAGE_DIR=/tmp/ndk-$USER
register_var_option "--package-dir=<path>" PACKAGE_DIR "Place package file in <path>"
INSTALL_DIR=
register_var_option "--install-dir=<path>" INSTALL_DIR "Don't create package, install files to <path> instead."
PLATFORM=
register_option "--platform=<name>" do_platform "Specify target Android platform/API level." "android-3"
do_platform () { PLATFORM=$1; }
extract_parameters "$@"
# Check NDK_DIR
if [ ! -d "$NDK_DIR/build/core" ] ; then
echo "Invalid source NDK directory: $NDK_DIR"
echo "Please use --ndk-dir=<path> to specify the path of an installed NDK."
exit 1
fi
# Check ARCH
if [ -z "$ARCH" ]; then
case $TOOLCHAIN_NAME in
arm-*)
ARCH=arm
;;
x86-*)
ARCH=x86
;;
mipsel-*)
ARCH=mips
;;
aarch64-*)
ARCH=arm64
;;
x86_64-*)
ARCH=x86_64
;;
mips64el-*)
ARCH=mips64
;;
*)
ARCH=arm
;;
esac
ARCH_INC=$ARCH
log "Auto-config: --arch=$ARCH"
else
ARCH_INC=$ARCH
case $ARCH in
*arm)
ARCH=arm
;;
*x86)
ARCH=x86
;;
*mips)
ARCH=mips
;;
*arm64)
ARCH=arm64
;;
*x86_64)
ARCH=x86_64
;;
*mips64)
ARCH=mips64
;;
*)
ARCH=arm
;;
esac
fi
ARCH_LIB=$ARCH
ARCH_STL=$ARCH
if [ "$ARCH_INC" != "$ARCH" ]; then
test -n "`echo $ARCH_INC | grep bc$ARCH`" && NEED_BC2NATIVE=yes
test -z "`echo $ARCH_INC | grep $ARCH`" && NEED_BC_LIB=yes
ARCH_INC=$(find_ndk_unknown_archs)
test -z "$ARCH_INC" && ARCH_INC="$ARCH"
test "$NEED_BC_LIB" = "yes" && ARCH_LIB=$ARCH_INC
test "$NEED_BC_LIB" = "yes" -o "$NEED_BC2NATIVE" = "yes" && ARCH_STL=$ARCH_INC
fi
# Check toolchain name
if [ -z "$TOOLCHAIN_NAME" ]; then
TOOLCHAIN_NAME=$(get_default_toolchain_name_for_arch $ARCH)
echo "Auto-config: --toolchain=$TOOLCHAIN_NAME"
fi
if [ "$ARCH_STL" != "$ARCH" ]; then
if [ "$STL" != stlport ]; then
echo "Force-config: --stl=stlport"
STL=stlport
fi
fi
if [ "$ARCH_INC" != "$ARCH" ]; then
TARGET_ABI=$(convert_arch_to_abi $ARCH | tr ',' '\n' | tail -n 1)
if [ -z "$LLVM_VERSION" ]; then
LLVM_VERSION=$DEFAULT_LLVM_VERSION
fi
fi
# Detect LLVM version from toolchain name
if [ -z "$LLVM_VERSION" ]; then
LLVM_VERSION_EXTRACT=$(echo "$TOOLCHAIN_NAME" | grep 'clang[0-9]\.[0-9]$' | sed -e 's/.*-clang//')
if [ -n "$LLVM_VERSION_EXTRACT" ]; then
TOOLCHAIN_NAME=$(get_default_toolchain_name_for_arch $ARCH)
LLVM_VERSION=$LLVM_VERSION_EXTRACT
echo "Auto-config: --toolchain=$TOOLCHAIN_NAME, --llvm-version=$LLVM_VERSION"
fi
fi
# Check PLATFORM
if [ -z "$PLATFORM" -a "$ARCH_INC" = "$ARCH" ] ; then
case $ARCH in
arm) PLATFORM=android-3
;;
x86|mips)
PLATFORM=android-9
;;
arm64|x86_64|mips64)
PLATFORM=android-20
;;
*)
dump "ERROR: Unsupported NDK architecture $ARCH!"
esac
log "Auto-config: --platform=$PLATFORM"
elif [ -z "$PLATFORM" ] ; then
PLATFORM=android-9
log "Auto-config: --platform=$PLATFORM"
fi
if [ ! -d "$NDK_DIR/platforms/$PLATFORM" ] ; then
echo "Invalid platform name: $PLATFORM"
echo "Please use --platform=<name> with one of:" `(cd "$NDK_DIR/platforms" && ls)`
exit 1
fi
# Check toolchain name
TOOLCHAIN_PATH="$NDK_DIR/toolchains/$TOOLCHAIN_NAME"
if [ ! -d "$TOOLCHAIN_PATH" ] ; then
echo "Invalid toolchain name: $TOOLCHAIN_NAME"
echo "Please use --toolchain=<name> with the name of a toolchain supported by the source NDK."
echo "Try one of: " `(cd "$NDK_DIR/toolchains" && ls)`
exit 1
fi
# Extract architecture from platform name
parse_toolchain_name $TOOLCHAIN_NAME
# Check that there are any platform files for it!
(cd $NDK_DIR/platforms && ls -d */arch-$ARCH_INC >/dev/null 2>&1 )
if [ $? != 0 ] ; then
echo "Platform $PLATFORM doesn't have any files for this architecture: $ARCH_INC"
echo "Either use --platform=<name> or --toolchain=<name> to select a different"
echo "platform or arch-dependent toolchain name (respectively)!"
exit 1
fi
# Compute source sysroot
SRC_SYSROOT_INC="$NDK_DIR/platforms/$PLATFORM/arch-$ARCH_INC/usr/include"
SRC_SYSROOT_LIB="$NDK_DIR/platforms/$PLATFORM/arch-$ARCH_LIB/usr/lib"
if [ ! -d "$SRC_SYSROOT_INC" -o ! -d "$SRC_SYSROOT_LIB" ] ; then
echo "No platform files ($PLATFORM) for this architecture: $ARCH"
exit 1
fi
# Check that we have any prebuilts GCC toolchain here
if [ ! -d "$TOOLCHAIN_PATH/prebuilt" ] ; then
echo "Toolchain is missing prebuilt files: $TOOLCHAIN_NAME"
echo "You must point to a valid NDK release package!"
exit 1
fi
if [ ! -d "$TOOLCHAIN_PATH/prebuilt/$SYSTEM" ] ; then
echo "Host system '$SYSTEM' is not supported by the source NDK!"
echo "Try --system=<name> with one of: " `(cd $TOOLCHAIN_PATH/prebuilt && ls) | grep -v gdbserver`
exit 1
fi
TOOLCHAIN_PATH="$TOOLCHAIN_PATH/prebuilt/$SYSTEM"
TOOLCHAIN_GCC=$TOOLCHAIN_PATH/bin/$ABI_CONFIGURE_TARGET-gcc
if [ ! -f "$TOOLCHAIN_GCC" ] ; then
echo "Toolchain $TOOLCHAIN_GCC is missing!"
exit 1
fi
if [ -n "$LLVM_VERSION" ]; then
LLVM_TOOLCHAIN_PATH="$NDK_DIR/toolchains/llvm-$LLVM_VERSION"
# Check that we have any prebuilts LLVM toolchain here
if [ ! -d "$LLVM_TOOLCHAIN_PATH/prebuilt" ] ; then
echo "LLVM Toolchain is missing prebuilt files"
echo "You must point to a valid NDK release package!"
exit 1
fi
if [ ! -d "$LLVM_TOOLCHAIN_PATH/prebuilt/$SYSTEM" ] ; then
echo "Host system '$SYSTEM' is not supported by the source NDK!"
echo "Try --system=<name> with one of: " `(cd $LLVM_TOOLCHAIN_PATH/prebuilt && ls)`
exit 1
fi
LLVM_TOOLCHAIN_PATH="$LLVM_TOOLCHAIN_PATH/prebuilt/$SYSTEM"
fi
# Get GCC_BASE_VERSION. Note that GCC_BASE_VERSION may be slightly different from GCC_VERSION.
# eg. In gcc4.6 GCC_BASE_VERSION is "4.6.x-google"
LIBGCC_PATH=`$TOOLCHAIN_GCC -print-libgcc-file-name`
LIBGCC_BASE_PATH=${LIBGCC_PATH%/*} # base path of libgcc.a
GCC_BASE_VERSION=${LIBGCC_BASE_PATH##*/} # stuff after the last /
# Create temporary directory
TMPDIR=$NDK_TMPDIR/standalone/$TOOLCHAIN_NAME
dump "Copying prebuilt binaries..."
# Now copy the GCC toolchain prebuilt binaries
run copy_directory "$TOOLCHAIN_PATH" "$TMPDIR"
# Replace soft-link mcld by real file
ALL_LDS=`find $TMPDIR -name "*mcld"`
for LD in $ALL_LDS; do
rm -f "$LD"
cp -a "$NDK_DIR/toolchains/llvm-$DEFAULT_LLVM_VERSION/prebuilt/$SYSTEM/bin/ld.mcld" "$LD"
done
# Copy python-related to for gdb.exe
PYTHON=python
PYTHON_x=python$(echo "$DEFAULT_PYTHON_VERSION" | cut -d . -f 1)
PYTHON_xdotx=python$(echo "$DEFAULT_PYTHON_VERSION" | cut -d . -f 1).$(echo "$DEFAULT_PYTHON_VERSION" | cut -d . -f 2)
copy_directory "$NDK_DIR/prebuilt/$SYSTEM/include/$PYTHON_xdotx" "$TMPDIR/include/$PYTHON_xdotx"
copy_directory "$NDK_DIR/prebuilt/$SYSTEM/lib/$PYTHON_xdotx" "$TMPDIR/lib/$PYTHON_xdotx"
copy_file_list "$NDK_DIR/prebuilt/$SYSTEM/bin" "$TMPDIR/bin" "$PYTHON$HOST_EXE" "$PYTHON_x$HOST_EXE" "$PYTHON_xdotx$HOST_EXE"
if [ "$HOST_TAG32" = "windows" ]; then
copy_file_list "$NDK_DIR/prebuilt/$SYSTEM/bin" "$TMPDIR/bin" lib$PYTHON_xdotx.dll
fi
# Copy yasm for x86
if [ "$ARCH" = "x86" ]; then
copy_file_list "$NDK_DIR/prebuilt/$SYSTEM/bin" "$TMPDIR/bin" "yasm$HOST_EXE"
fi
# Clang stuff
dump_extra_compile_commands () {
if [ "$NEED_BC2NATIVE" != "yes" ]; then
return
fi
if [ -z "$HOST_EXE" ]; then
echo '# Call bc2native if needed'
echo ''
echo 'if [ -n "`echo $@ | grep '\'\\ \\-c\''`" ] || [ "$1" = "-c" ]; then'
echo ' exit'
echo 'fi'
echo 'while [ -n "$1" ]; do'
echo ' if [ "$1" = "-o" ]; then'
echo ' output="$2"'
echo ' break'
echo ' fi'
echo ' shift'
echo 'done'
echo 'test -z "$output" && output=a.out'
echo 'if [ -f "`dirname $0`/ndk-bc2native" ]; then'
echo ' `dirname $0`/ndk-bc2native --sysroot=`dirname $0`/../sysroot --abi='$TARGET_ABI' --platform='$PLATFORM' --file $output $output'
echo 'else'
echo ' export PYTHONPATH=`dirname $0`/../lib/python2.7/'
echo ' `dirname $0`/python `dirname $0`/ndk-bc2native.py --sysroot=`dirname $0`/../sysroot --abi='$TARGET_ABI' --platform='$PLATFORM' --file $output $output'
echo 'fi'
else
echo 'rem Call bc2native if needed'
echo ''
echo ' if not "%1" == "-c" goto :keep_going'
echo ' echo %* | grep "\\ \\-c"'
echo ' if ERRORLEVEL 1 goto :keep_going'
echo ' exit'
echo ':keep_going'
echo ':keep_find_output'
echo ' if not "%1" == "-o" goto :check_next'
echo ' set output=%2'
echo ':check_next'
echo ' shift'
echo ' if "%1" == "" goto :keep_find_output'
echo ' if not "%output%" == "" goto :check_done'
echo ' set output=a.out'
echo ':check_done'
echo 'if exist %~dp0\\ndk-bc2native'$HOST_EXE' ('
echo ' %~dp0\\ndk-bc2native'$HOST_EXE' --sysroot=%~dp0\\.\\sysroot --abi='$TARGET_ABI' --platform='$PLATFORM' --file %output% %output'
echo 'else ('
echo ' set PYTHONPATH=%~dp0\\..\\lib\\python2.7\\'
echo ' %~dp0\\python'$HOST_EXE' %~dp0\\ndk-bc2native.py --sysroot=%~dp0\\..\\sysroot --abi='$TARGET_ABI' --platform='$PLATFORM' --file %output% %output%'
echo ')'
fi
}
if [ -n "$LLVM_VERSION" ]; then
# Copy the clang/llvm toolchain prebuilt binaries
run copy_directory "$LLVM_TOOLCHAIN_PATH" "$TMPDIR"
# Move clang and clang++ to clang${LLVM_VERSION} and clang${LLVM_VERSION}++,
# then create scripts linking them with predefined -target flag. This is to
# make clang/++ easier drop-in replacement for gcc/++ in NDK standalone mode.
# Note that the file name of "clang" isn't important, and the trailing
# "++" tells clang to compile in C++ mode
LLVM_TARGET=
case "$ARCH" in
arm) # NOte: -target may change by clang based on the
# presence of subsequent -march=armv7-a and/or -mthumb
LLVM_TARGET=armv5te-none-linux-androideabi
TOOLCHAIN_PREFIX=$DEFAULT_ARCH_TOOLCHAIN_PREFIX_arm
;;
x86)
LLVM_TARGET=i686-none-linux-android
TOOLCHAIN_PREFIX=$DEFAULT_ARCH_TOOLCHAIN_PREFIX_x86
;;
mips)
LLVM_TARGET=mipsel-none-linux-android
TOOLCHAIN_PREFIX=$DEFAULT_ARCH_TOOLCHAIN_PREFIX_mips
;;
arm64)
LLVM_TARGET=aarch64-linux-android
TOOLCHAIN_PREFIX=$DEFAULT_ARCH_TOOLCHAIN_PREFIX_arm64
;;
x86_64)
LLVM_TARGET=x86_64-none-linux-android
TOOLCHAIN_PREFIX=$DEFAULT_ARCH_TOOLCHAIN_PREFIX_x86_64
;;
mips64)
LLVM_TARGET=mips64el-none-linux-android
TOOLCHAIN_PREFIX=$DEFAULT_ARCH_TOOLCHAIN_PREFIX_mips64
;;
*)
dump "ERROR: Unsupported NDK architecture $ARCH!"
esac
# Need to remove '.' from LLVM_VERSION when constructing new clang name,
# otherwise clang3.3++ may still compile *.c code as C, not C++, which
# is not consistent with g++
LLVM_VERSION_WITHOUT_DOT=$(echo "$LLVM_VERSION" | sed -e "s!\.!!")
mv "$TMPDIR/bin/clang${HOST_EXE}" "$TMPDIR/bin/clang${LLVM_VERSION_WITHOUT_DOT}${HOST_EXE}"
if [ -h "$TMPDIR/bin/clang++${HOST_EXE}" ] ; then
## clang++ is a link to clang. Remove it and reconstruct
rm "$TMPDIR/bin/clang++${HOST_EXE}"
ln -sf "clang${LLVM_VERSION_WITHOUT_DOT}${HOST_EXE}" "$TMPDIR/bin/clang${LLVM_VERSION_WITHOUT_DOT}++${HOST_EXE}"
else
mv "$TMPDIR/bin/clang++${HOST_EXE}" "$TMPDIR/bin/clang$LLVM_VERSION_WITHOUT_DOT++${HOST_EXE}"
fi
EXTRA_CLANG_FLAGS=
EXTRA_CLANGXX_FLAGS=
if [ "$ARCH_STL" != "$ARCH" ]; then
LLVM_TARGET=le32-none-ndk
EXTRA_CLANG_FLAGS="-emit-llvm"
EXTRA_CLANGXX_FLAGS="$EXTRA_CLANG_FLAGS -I\`dirname \$0\`/../include/c++/$GCC_BASE_VERSION"
fi
cat > "$TMPDIR/bin/clang" <<EOF
if [ "\$1" != "-cc1" ]; then
\`dirname \$0\`/clang$LLVM_VERSION_WITHOUT_DOT -target $LLVM_TARGET "\$@" $EXTRA_CLANG_FLAGS
$(dump_extra_compile_commands)
else
# target/triple already spelled out.
\`dirname \$0\`/clang$LLVM_VERSION_WITHOUT_DOT "\$@" $EXTRA_CLANG_FLAGS
fi
EOF
cat > "$TMPDIR/bin/clang++" <<EOF
if [ "\$1" != "-cc1" ]; then
\`dirname \$0\`/clang$LLVM_VERSION_WITHOUT_DOT++ -target $LLVM_TARGET "\$@" $EXTRA_CLANGXX_FLAGS
$(dump_extra_compile_commands)
else
# target/triple already spelled out.
\`dirname \$0\`/clang$LLVM_VERSION_WITHOUT_DOT++ "\$@" $EXTRA_CLANGXX_FLAGS
fi
EOF
chmod 0755 "$TMPDIR/bin/clang" "$TMPDIR/bin/clang++"
cp -a "$TMPDIR/bin/clang" "$TMPDIR/bin/$TOOLCHAIN_PREFIX-clang"
cp -a "$TMPDIR/bin/clang++" "$TMPDIR/bin/$TOOLCHAIN_PREFIX-clang++"
if [ -n "$HOST_EXE" ] ; then
cat > "$TMPDIR/bin/clang.cmd" <<EOF
@echo off
if "%1" == "-cc1" goto :L
%~dp0\\clang${LLVM_VERSION_WITHOUT_DOT}${HOST_EXE} -target $LLVM_TARGET %* $EXTRA_CLANG_FLAGS
$(dump_extra_compile_commands)
if ERRORLEVEL 1 exit /b 1
goto :done
:L
rem target/triple already spelled out.
%~dp0\\clang${LLVM_VERSION_WITHOUT_DOT}${HOST_EXE} %* $EXTRA_CLANG_FLAGS
if ERRORLEVEL 1 exit /b 1
:done
EOF
cat > "$TMPDIR/bin/clang++.cmd" <<EOF
@echo off
if "%1" == "-cc1" goto :L
%~dp0\\clang${LLVM_VERSION_WITHOUT_DOT}++${HOST_EXE} -target $LLVM_TARGET %* $EXTRA_CLANGXX_FLAGS
$(dump_extra_compile_commands)
if ERRORLEVEL 1 exit /b 1
goto :done
:L
rem target/triple already spelled out.
%~dp0\\clang${LLVM_VERSION_WITHOUT_DOT}++${HOST_EXE} %* $EXTRA_CLANGXX_FLAGS
if ERRORLEVEL 1 exit /b 1
:done
EOF
chmod 0755 "$TMPDIR/bin/clang.cmd" "$TMPDIR/bin/clang++.cmd"
cp -a "$TMPDIR/bin/clang.cmd" "$TMPDIR/bin/$TOOLCHAIN_PREFIX-clang.cmd"
cp -a "$TMPDIR/bin/clang++.cmd" "$TMPDIR/bin/$TOOLCHAIN_PREFIX-clang++.cmd"
fi
fi
dump "Copying sysroot headers and libraries..."
# Copy the sysroot under $TMPDIR/sysroot. The toolchain was built to
# expect the sysroot files to be placed there!
run copy_directory_nolinks "$SRC_SYSROOT_INC" "$TMPDIR/sysroot/usr/include"
run copy_directory_nolinks "$SRC_SYSROOT_LIB" "$TMPDIR/sysroot/usr/lib"
# x86_64 toolchain is built multilib.
if [ "$ARCH" = "x86_64" ]; then
run copy_directory_nolinks "$SRC_SYSROOT_LIB/../lib64" "$TMPDIR/sysroot/usr/lib64"
run copy_directory_nolinks "$SRC_SYSROOT_LIB/../libx32" "$TMPDIR/sysroot/usr/libx32"
fi
if [ "$ARCH_INC" != "$ARCH" ]; then
cp -a $NDK_DIR/$GABIXX_SUBDIR/libs/$ABI/* $TMPDIR/sysroot/usr/lib
cp -a $NDK_DIR/$LIBPORTABLE_SUBDIR/libs/$ABI/* $TMPDIR/sysroot/usr/lib
cp -a $NDK_DIR/$GCCUNWIND_SUBDIR/libs/$ABI/* $TMPDIR/sysroot/usr/lib
if [ "$ARCH" = "${ARCH%%64*}" ]; then
cp -a $NDK_DIR/$COMPILER_RT_SUBDIR/libs/$ABI/* $TMPDIR/sysroot/usr/lib
fi
fi
if [ "$ARCH_LIB" != "$ARCH" ]; then
cp -a $NDK_DIR/platforms/$PLATFORM/arch-$ARCH/usr/lib/crt* $TMPDIR/sysroot/usr/lib
fi
dump "Copying libstdc++ headers and libraries..."
GNUSTL_DIR=$NDK_DIR/$GNUSTL_SUBDIR/$GCC_VERSION
GNUSTL_LIBS=$GNUSTL_DIR/libs
STLPORT_DIR=$NDK_DIR/$STLPORT_SUBDIR
STLPORT_LIBS=$STLPORT_DIR/libs
LIBCXX_DIR=$NDK_DIR/$LIBCXX_SUBDIR
LIBCXX_LIBS=$LIBCXX_DIR/libs
SUPPORT_DIR=$NDK_DIR/$SUPPORT_SUBDIR
COMPILER_RT_DIR=$NDK_DIR/$COMPILER_RT_SUBDIR
COMPILER_RT_LIBS=$COMPILER_RT_DIR/libs
ABI_STL="$TMPDIR/$ABI_CONFIGURE_TARGET"
ABI_STL_INCLUDE="$TMPDIR/include/c++/$GCC_BASE_VERSION"
ABI_STL_INCLUDE_TARGET="$ABI_STL_INCLUDE/$ABI_CONFIGURE_TARGET"
# $1: filenames of headers
copy_gabixx_headers () {
for header in $@; do
(cd $ABI_STL_INCLUDE && cp -a ../../gabi++/include/$header $header)
done
}
# Copy common STL headers (i.e. the non-arch-specific ones)
copy_stl_common_headers () {
case $STL in
gnustl)
copy_directory "$GNUSTL_DIR/include" "$ABI_STL_INCLUDE"
;;
libcxx|libc++)
copy_directory "$LIBCXX_DIR/libcxx/include" "$ABI_STL_INCLUDE"
copy_directory "$SUPPORT_DIR/include" "$ABI_STL_INCLUDE"
copy_directory "$STLPORT_DIR/../gabi++/include" "$ABI_STL_INCLUDE/../../gabi++/include"
copy_gabixx_headers cxxabi.h unwind.h unwind-arm.h unwind-itanium.h gabixx_config.h
;;
stlport)
copy_directory "$STLPORT_DIR/stlport" "$ABI_STL_INCLUDE"
copy_directory "$STLPORT_DIR/../gabi++/include" "$ABI_STL_INCLUDE/../../gabi++/include"
copy_gabixx_headers cxxabi.h unwind.h unwind-arm.h unwind-itanium.h gabixx_config.h
;;
esac
}
# $1: Source ABI (e.g. 'armeabi')
# $2: Optional destination directory, default to empty (e.g. "", "thumb", "armv7-a/thumb")
# $3: Optional source directory, default to empty (e.g. "", "thumb", "armv7-a/thumb")
# $4: Optional "yes" (default) or "no" about whether to copy additional header (eg. include/bits)
copy_stl_libs () {
local ABI=$1
local DEST_DIR=$2
local SRC_DIR=$3
local COPY_ADDITIONAL_HEADER=yes
case $STL in
gnustl)
# gnustl has thumb version of libraries. Append ABI with basename($DEST_DIR) if $DEST_DIR contain '/'
ABI_SRC_DIR=$ABI
if [ -n "$SRC_DIR" ]; then
ABI_SRC_DIR=$ABI/$SRC_DIR
else
if [ "$DEST_DIR" != "${DEST_DIR%%/*}" ] ; then
ABI_SRC_DIR=$ABI/`basename $DEST_DIR`
fi
fi
if [ "$COPY_ADDITIONAL_HEADER" != "no" ]; then
copy_directory "$GNUSTL_LIBS/$ABI/include/bits" "$ABI_STL_INCLUDE_TARGET/$DEST_DIR/bits"
fi
copy_file_list "$GNUSTL_LIBS/$ABI_SRC_DIR" "$ABI_STL/lib/$DEST_DIR" "libgnustl_shared.so"
copy_file_list "$GNUSTL_LIBS/$ABI_SRC_DIR" "$ABI_STL/lib/$DEST_DIR" "libsupc++.a"
cp -p "$GNUSTL_LIBS/$ABI_SRC_DIR/libgnustl_static.a" "$ABI_STL/lib/$DEST_DIR/libstdc++.a"
;;
libcxx|libc++)
if [ "$ARCH" = "${ARCH%%64*}" ]; then
copy_file_list "$COMPILER_RT_LIBS/$ABI" "$ABI_STL/lib/$DEST_DIR" "libcompiler_rt_shared.so" "libcompiler_rt_static.a"
fi
copy_file_list "$LIBCXX_LIBS/$ABI" "$ABI_STL/lib/$DEST_DIR" "libc++_shared.so"
cp -p "$LIBCXX_LIBS/$ABI/libc++_static.a" "$ABI_STL/lib/$DEST_DIR/libstdc++.a"
;;
stlport)
if [ "$ARCH_STL" != "$ARCH" ]; then
tmp_lib_dir=$TMPDIR/stl
$NDK_DIR/build/tools/build-cxx-stl.sh --stl=stlport --out-dir=$tmp_lib_dir --abis=unknown
cp -p "`ls $tmp_lib_dir/sources/cxx-stl/stlport/libs/*/libstlport_static.a`" "$ABI_STL/lib/$DEST_DIR/libstdc++.a"
cp -p "`ls $tmp_lib_dir/sources/cxx-stl/stlport/libs/*/libstlport_shared.bc`" "$ABI_STL/lib/$DEST_DIR/libstlport_shared.so"
rm -rf $tmp_lib_dir
else
copy_file_list "$STLPORT_LIBS/$ABI" "$ABI_STL/lib/$DEST_DIR" "libstlport_shared.so"
cp -p "$STLPORT_LIBS/$ABI/libstlport_static.a" "$ABI_STL/lib/$DEST_DIR/libstdc++.a"
fi
;;
*)
dump "ERROR: Unsupported STL: $STL"
exit 1
;;
esac
}
mkdir -p "$ABI_STL_INCLUDE_TARGET"
fail_panic "Can't create directory: $ABI_STL_INCLUDE_TARGET"
copy_stl_common_headers
case $ARCH in
arm)
copy_stl_libs armeabi ""
copy_stl_libs armeabi "/thumb"
copy_stl_libs armeabi-v7a "armv7-a"
copy_stl_libs armeabi-v7a "armv7-a/thumb"
copy_stl_libs armeabi-v7a-hard "armv7-a/hard" "." "no"
copy_stl_libs armeabi-v7a-hard "armv7-a/thumb/hard" "thumb" "no"
;;
arm64)
copy_stl_libs arm64-v8a ""
;;
x86|mips|mips64|x86_64)
copy_stl_libs "$ARCH" ""
;;
*)
dump "ERROR: Unsupported NDK architecture: $ARCH"
exit 1
;;
esac
# Install or Package
if [ -n "$INSTALL_DIR" ] ; then
dump "Copying files to: $INSTALL_DIR"
if [ ! -d "$INSTALL_DIR" ]; then
run move_directory "$TMPDIR" "$INSTALL_DIR"
else
run copy_directory "$TMPDIR" "$INSTALL_DIR"
fi
else
PACKAGE_FILE="$PACKAGE_DIR/$TOOLCHAIN_NAME.tar.bz2"
dump "Creating package file: $PACKAGE_FILE"
pack_archive "$PACKAGE_FILE" "`dirname $TMPDIR`" "$TOOLCHAIN_NAME"
fail_panic "Could not create tarball from $TMPDIR"
fi
dump "Cleaning up..."
run rm -rf $TMPDIR
dump "Done."
|
<reponame>KrishAmal/NimbleNearby
package com.studio.ak.nimblenearby.adapters;
import android.content.Context;
import android.graphics.Typeface;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.studio.ak.nimblenearby.R;
import com.studio.ak.nimblenearby.model.POIReviews;
import java.util.List;
/**
* Created by <NAME> on 13-12-2016.
*/
public class ReviewsAdapter extends RecyclerView.Adapter<ReviewsAdapter.ViewHolder> {
private List<POIReviews> reviews;
private Context context;
public ReviewsAdapter(Context context, List<POIReviews> reviews) {
this.reviews = reviews;
this.context = context;
}
public static class ViewHolder extends RecyclerView.ViewHolder {
TextView author;
TextView content;
TextView rating;
public ViewHolder(View itemView) {
super(itemView);
author = (TextView) itemView.findViewById(R.id.review_name);
content = (TextView)itemView.findViewById(R.id.review_content);
rating =(TextView) itemView.findViewById(R.id.review_rating);
content.setTypeface(Typeface.createFromAsset(itemView.getResources().getAssets(),
itemView.getContext().getResources().getString(R.string.roboto_light)));
author.setTypeface(Typeface.createFromAsset(itemView.getResources().getAssets(),
itemView.getContext().getResources().getString(R.string.roboto_regular)));
rating.setTypeface(Typeface.createFromAsset(itemView.getResources().getAssets(),
itemView.getContext().getResources().getString(R.string.roboto_light)));
}
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View layoutView = LayoutInflater.from(parent.getContext()).inflate(R.layout.review_list_item, null);
ViewHolder vh = new ViewHolder(layoutView);
return vh;
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
String rate=reviews.get(position).getRating().concat("/5");
holder.author.setText(reviews.get(position).getAuthor_name());
holder.content.setText(reviews.get(position).getText());
holder.rating.setText(rate);
}
@Override
public int getItemCount() {
return this.reviews.size();
}
}
|
#ifndef RPCZ_SERVER_FUNCTION_H
#define RPCZ_SERVER_FUNCTION_H
#include <boost/function.hpp>
namespace rpcz {
class client_connection;
class message_iterator;
typedef boost::function<void(const client_connection&, message_iterator&)>
server_function;
} // namespace rpcz
#endif // RPCZ_CLIENT_REQUEST_CALLBACK_H
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-10 18:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0085_assign_location_level_room'),
]
operations = [
migrations.AlterField(
model_name='area',
name='stuffed',
field=models.BooleanField(default=False, verbose_name='stuffed area'),
),
]
|
package thelm.rslargepatterns.client.gui;
import com.google.common.primitives.Ints;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.util.text.translation.I18n;
import net.minecraftforge.items.ItemHandlerHelper;
import thelm.rslargepatterns.container.ContainerItemAmount;
import thelm.rslargepatterns.network.PacketHandler;
import thelm.rslargepatterns.network.packet.PacketSetItemStack;
public class GuiItemAmount extends GuiAmount {
private int containerSlot;
private ItemStack stack;
private int maxAmount;
public GuiItemAmount(GuiBase parent, InventoryPlayer playerInventory, int containerSlot, ItemStack stack, int maxAmount) {
super(parent, new ContainerItemAmount(playerInventory, stack));
this.containerSlot = containerSlot;
this.stack = stack;
this.maxAmount = maxAmount;
}
@Override
protected String getTitle() {
return I18n.translateToLocal("gui.refinedstorage:item_amount");
}
@Override
protected int[] getIncrements() {
return new int[] {
1, 10, 64,
-1, -10, -64
};
}
@Override
protected int getMaxAmount() {
return maxAmount;
}
@Override
protected int getDefaultAmount() {
return stack.getCount();
}
@Override
protected void onOkButtonPressed(boolean shiftDown) {
Integer amount = Ints.tryParse(amountField.getText());
if(amount != null) {
PacketHandler.INSTANCE.sendToServer(new PacketSetItemStack((short)containerSlot, ItemHandlerHelper.copyStackWithSize(stack, amount)));
close();
}
}
}
|
module.exports = {
purge: ['./src/**/*.{js,jsx,ts,tsx}', './public/index.html'],
darkMode: false, // or 'media' or 'class'
theme: {
fontFamily: {
serif: 'Quicksand',
sans: 'Montserrat',
display: 'Megrim',
},
boxShadow: {
neuShadow:
'-4px -4px 10px rgb(255, 255, 255), 4px 4px 10px rgba(0, 0, 0, 0.219)',
},
screens: {
mb: '360px',
tb: '640px',
lp: '1024px',
dp: '1280px',
},
},
variants: {
extend: {},
},
plugins: [],
};
|
#!/bin/csh
# '@(#)dbupdate.sh 22.1 03/24/08 1991-2003 '
#
#
# Copyright (C) 2015 University of Oregon
#
# You may distribute under the terms of either the GNU General Public
# License or the Apache License, as specified in the LICENSE file.
#
# For more information, see the LICENSE file.
#
#
#
# Script to start up managedb to update the database running under 'nice'.
# The optional arg "slow_ms" is used to slow things down internally so as
# not to use all of the cpu time available.
# slow_ms = 0 is full speed
# slow_ms = 1000 would be using about 2-5 % of the cpu
# Remember, it is running under 'nice' so that any other process will be
# able to take the cpu away from this update anyway. The default
# slow_ms for 'forever' is 1000. The default slow_ms for 'once' is 0.
if ( $#argv < 1 ) then
echo "usage: dbupdate stop | once [slow_ms] | forever [slow_ms]"
exit 0
endif
if ( $1 == "stop" ) then
# Get pids of java managedb.jar processes
set pslist = `ps -A -o pid,args | \grep -w java | \grep -v grep| \grep managedb | awk '{ print $1 }'`
if ( $#pslist ) then
while ($#pslist)
if ( $#pslist ) then
echo kill $pslist[1]
kill $pslist[1]
shift pslist
endif
end
exit 0
else
echo "No dbupdate found"
exit 0
endif
endif
set pslist = `ps -A -o pid,args | \grep -w java | \grep -v grep| \grep managedb | awk '{ print $1 }'`
# if one is running and we were not asked to kill it, then error out
if ( $#pslist > 0 ) then
echo "dbupdate is already running, cannot start another one"
exit 0
else if ( $1 == "once" ) then
# Run it once
if ( $#argv > 1 ) then
nice managedb update $2
else
nice managedb update
endif
else if ( $1 == "forever" ) then
# Run it in a loop
if ( $#argv > 1 ) then
nice managedb updateloop $2
else
nice managedb updateloop
endif
else
echo "usage: dbupdate stop | once [slow_ms] | forever [slow_ms]"
endif
|
<gh_stars>1-10
package water.webserver.jetty8;
import org.eclipse.jetty.plus.jaas.JAASLoginService;
import org.eclipse.jetty.security.Authenticator;
import org.eclipse.jetty.security.ConstraintMapping;
import org.eclipse.jetty.security.ConstraintSecurityHandler;
import org.eclipse.jetty.security.DefaultIdentityService;
import org.eclipse.jetty.security.HashLoginService;
import org.eclipse.jetty.security.IdentityService;
import org.eclipse.jetty.security.LoginService;
import org.eclipse.jetty.security.SpnegoLoginService;
import org.eclipse.jetty.security.authentication.BasicAuthenticator;
import org.eclipse.jetty.security.authentication.FormAuthenticator;
import org.eclipse.jetty.security.authentication.SpnegoAuthenticator;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.bio.SocketConnector;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.eclipse.jetty.server.handler.HandlerWrapper;
import org.eclipse.jetty.server.session.HashSessionIdManager;
import org.eclipse.jetty.server.session.HashSessionManager;
import org.eclipse.jetty.server.session.SessionHandler;
import org.eclipse.jetty.server.ssl.SslSocketConnector;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.util.security.Constraint;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import water.webserver.iface.H2OHttpConfig;
import water.webserver.iface.H2OHttpView;
import water.webserver.iface.LoginType;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Collections;
class Jetty8Helper {
private final H2OHttpConfig config;
private final H2OHttpView h2oHttpView;
Jetty8Helper(H2OHttpView h2oHttpView) {
this.h2oHttpView = h2oHttpView;
this.config = h2oHttpView.getConfig();
}
Server createJettyServer(String ip, int port) {
System.setProperty("org.eclipse.jetty.server.Request.maxFormContentSize", Integer.toString(Integer.MAX_VALUE));
final Server jettyServer = new Server();
jettyServer.setSendServerVersion(false);
final Connector connector;
final String proto;
if (config.jks != null) {
proto = "https";
final SslContextFactory sslContextFactory = new SslContextFactory(config.jks);
sslContextFactory.setKeyStorePassword(config.jks_pass);
connector = new SslSocketConnector(sslContextFactory);
} else {
proto = "http";
connector = new SocketConnector();
}
if (ip != null) {
connector.setHost(ip);
}
connector.setPort(port);
configureConnector(proto, connector);
jettyServer.setConnectors(new Connector[]{connector});
return jettyServer;
}
// Configure connector via properties which we can modify.
// Also increase request header size and buffer size from default values
// located in org.eclipse.jetty.http.HttpBuffersImpl
// see PUBDEV-5939 for details
private void configureConnector(String proto, Connector connector) {
connector.setRequestHeaderSize(getSysPropInt(proto+".requestHeaderSize", 32*1024));
connector.setRequestBufferSize(getSysPropInt(proto+".requestBufferSize", 32*1024));
connector.setResponseHeaderSize(getSysPropInt(proto+".responseHeaderSize", connector.getResponseHeaderSize()));
connector.setResponseBufferSize(getSysPropInt(proto+".responseBufferSize", connector.getResponseBufferSize()));
}
private static int getSysPropInt(String suffix, int defaultValue) {
return Integer.getInteger(H2OHttpConfig.SYSTEM_PROP_PREFIX + suffix, defaultValue);
}
HandlerWrapper authWrapper(Server jettyServer) {
if (config.loginType == LoginType.NONE) {
return jettyServer;
}
// REFER TO http://www.eclipse.org/jetty/documentation/9.1.4.v20140401/embedded-examples.html#embedded-secured-hello-handler
final LoginService loginService;
final Authenticator primaryAuthenticator;
switch (config.loginType) {
case HASH:
loginService = new HashLoginService("H2O", config.login_conf);
primaryAuthenticator = new BasicAuthenticator();
break;
case LDAP:
case KERBEROS:
case PAM:
loginService = new JAASLoginService(config.loginType.jaasRealm);
primaryAuthenticator = new BasicAuthenticator();
break;
case SPNEGO:
loginService = new SpnegoLoginService(config.loginType.jaasRealm, config.spnego_properties);
primaryAuthenticator = new SpnegoAuthenticator();
break;
default:
throw new UnsupportedOperationException(config.loginType + ""); // this can never happen
}
final IdentityService identityService = new DefaultIdentityService();
loginService.setIdentityService(identityService);
jettyServer.addBean(loginService);
// Set a security handler as the first handler in the chain.
final ConstraintSecurityHandler security = new ConstraintSecurityHandler();
// Set up a constraint to authenticate all calls, and allow certain roles in.
final Constraint constraint = new Constraint();
constraint.setName("auth");
constraint.setAuthenticate(true);
// Configure role stuff (to be disregarded). We are ignoring roles, and only going off the user name.
//
// Jetty 8 and prior.
//
// Jetty 8 requires the security.setStrict(false) and ANY_ROLE.
security.setStrict(false);
constraint.setRoles(new String[]{Constraint.ANY_ROLE});
// Jetty 9 and later.
//
// Jetty 9 and later uses a different servlet spec, and ANY_AUTH gives the same behavior
// for that API version as ANY_ROLE did previously. This required some low-level debugging
// to figure out, so I'm documenting it here.
// Jetty 9 did not require security.setStrict(false).
//
// constraint.setRoles(new String[]{Constraint.ANY_AUTH});
final ConstraintMapping mapping = new ConstraintMapping();
mapping.setPathSpec("/*"); // Lock down all API calls
mapping.setConstraint(constraint);
security.setConstraintMappings(Collections.singletonList(mapping));
// Authentication / Authorization
final Authenticator authenticator;
if (config.form_auth) {
FormAuthenticator formAuthenticator = new FormAuthenticator("/login", "/loginError", false);
authenticator = new Jetty8DelegatingAuthenticator(primaryAuthenticator, formAuthenticator);
} else {
authenticator = primaryAuthenticator;
}
security.setLoginService(loginService);
security.setAuthenticator(authenticator);
final HashSessionIdManager idManager = new HashSessionIdManager();
jettyServer.setSessionIdManager(idManager);
final HashSessionManager manager = new HashSessionManager();
if (config.session_timeout > 0) {
manager.setMaxInactiveInterval(config.session_timeout * 60);
}
final SessionHandler sessionHandler = new SessionHandler(manager);
sessionHandler.setHandler(security);
// Pass-through to H2O if authenticated.
jettyServer.setHandler(sessionHandler);
return security;
}
/**
* Hook up Jetty handlers. Do this before start() is called.
*/
ServletContextHandler createServletContextHandler() {
// Both security and session handlers are already created (Note: we don't want to create a new separate session
// handler just for ServletContextHandler - we want to have just one SessionHandler & SessionManager)
final ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SECURITY | ServletContextHandler.NO_SESSIONS);
if(null != config.context_path && ! config.context_path.isEmpty()) {
context.setContextPath(config.context_path);
} else {
context.setContextPath("/");
}
return context;
}
Handler authenticationHandler() {
return new AuthenticationHandler();
}
private class AuthenticationHandler extends AbstractHandler {
@Override
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response)
throws IOException {
boolean handled = h2oHttpView.authenticationHandler(request, response);
if (handled) {
baseRequest.setHandled(true);
}
}
}
}
|
<filename>test/controllers/SubscriptionStatusControllerSpec.scala
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import connectors.SubscriptionStatusDESConnector
import exceptions.HttpStatusException
import generators.AmlsReferenceNumberGenerator
import models.des
import org.joda.time.LocalDateTime
import org.mockito.Matchers.{eq => eqTo, _}
import org.mockito.Mockito._
import play.api.libs.json.Json
import play.api.test.FakeRequest
import play.api.test.Helpers._
import utils.{AmlsBaseSpec, AuthAction, IterateeHelpers, SuccessfulAuthAction}
import scala.concurrent.Future
class SubscriptionStatusControllerSpec extends AmlsBaseSpec with IterateeHelpers with AmlsReferenceNumberGenerator {
lazy val ssConn = new SubscriptionStatusDESConnector(mockAppConfig, mockAuditConnector, mockHttpClient, mockMetrics)
val authAction: AuthAction = SuccessfulAuthAction
lazy val Controller: SubscriptionStatusController = new SubscriptionStatusController(ssConn, authAction, mockCC) {
override val connector = mock[SubscriptionStatusDESConnector]
}
val request = FakeRequest()
.withHeaders(CONTENT_TYPE -> "application/json")
"SubscriptionStatusController" must {
"return a `BadRequest` response when the amls registration number is invalid" in {
val result = Controller.get("test", "test", "test")(request)
val failure = Json.obj("errors" -> Seq("Invalid AMLS Registration Number"))
status(result) must be(BAD_REQUEST)
contentAsJson(result) must be(failure)
}
"return a valid response when the amls registration number is valid" in {
val response = des.ReadStatusResponse(LocalDateTime.now(), "Approved",
None, None, None, None, false)
when {
Controller.connector.status(eqTo(amlsRegistrationNumber))(any(), any(), any(), any())
} thenReturn Future.successful(response)
val result = Controller.get("test", "test", amlsRegistrationNumber)(request)
status(result) must be(OK)
contentAsJson(result) must be(Json.toJson(response))
}
"return an invalid response when the service fails" in {
when {
Controller.connector.status(eqTo(amlsRegistrationNumber))(any(), any(), any(), any())
} thenReturn Future.failed(new HttpStatusException(INTERNAL_SERVER_ERROR, Some("message")))
whenReady (Controller.get("test", "test", amlsRegistrationNumber)(request).failed) {
case HttpStatusException(status, body) =>
status mustEqual INTERNAL_SERVER_ERROR
body mustEqual Some("message")
}
}
}
}
|
#!/bin/sh -e
#
# Update Linux kernel headers QEMU requires from a specified kernel tree.
#
# Copyright (C) 2011 Siemens AG
#
# Authors:
# Jan Kiszka <jan.kiszka@siemens.com>
#
# This work is licensed under the terms of the GNU GPL version 2.
# See the COPYING file in the top-level directory.
tmpdir=`mktemp -d`
linux="$1"
output="$2"
if [ -z "$linux" ] || ! [ -d "$linux" ]; then
cat << EOF
usage: update-kernel-headers.sh LINUX_PATH [OUTPUT_PATH]
LINUX_PATH Linux kernel directory to obtain the headers from
OUTPUT_PATH output directory, usually the qemu source tree (default: $PWD)
EOF
exit 1
fi
if [ -z "$output" ]; then
output="$PWD"
fi
cp_portable() {
f=$1
to=$2
if
grep '#include' "$f" | grep -v -e 'linux/virtio' \
-e 'linux/types' \
-e 'stdint' \
-e 'linux/if_ether' \
-e 'input-event-codes' \
-e 'sys/' \
> /dev/null
then
echo "Unexpected #include in input file $f".
exit 2
fi
header=$(basename "$f");
sed -e 's/__u\([0-9][0-9]*\)/uint\1_t/g' \
-e 's/__s\([0-9][0-9]*\)/int\1_t/g' \
-e 's/__le\([0-9][0-9]*\)/uint\1_t/g' \
-e 's/__be\([0-9][0-9]*\)/uint\1_t/g' \
-e 's/"\(input-event-codes\.h\)"/"standard-headers\/linux\/\1"/' \
-e 's/<linux\/\([^>]*\)>/"standard-headers\/linux\/\1"/' \
-e 's/__bitwise__//' \
-e 's/__attribute__((packed))/QEMU_PACKED/' \
-e 's/__inline__/inline/' \
-e '/sys\/ioctl.h/d' \
-e 's/SW_MAX/SW_MAX_/' \
"$f" > "$to/$header";
}
# This will pick up non-directories too (eg "Kconfig") but we will
# ignore them in the next loop.
ARCHLIST=$(cd "$linux/arch" && echo *)
for arch in $ARCHLIST; do
# Discard anything which isn't a KVM-supporting architecture
if ! [ -e "$linux/arch/$arch/include/asm/kvm.h" ] &&
! [ -e "$linux/arch/$arch/include/uapi/asm/kvm.h" ] ; then
continue
fi
# Blacklist architectures which have KVM headers but are actually dead
if [ "$arch" = "ia64" -o "$arch" = "mips" ]; then
continue
fi
make -C "$linux" INSTALL_HDR_PATH="$tmpdir" SRCARCH=$arch headers_install
rm -rf "$output/linux-headers/asm-$arch"
mkdir -p "$output/linux-headers/asm-$arch"
for header in kvm.h kvm_para.h unistd.h; do
cp "$tmpdir/include/asm/$header" "$output/linux-headers/asm-$arch"
done
if [ $arch = powerpc ]; then
cp "$tmpdir/include/asm/epapr_hcalls.h" "$output/linux-headers/asm-powerpc/"
fi
rm -rf "$output/include/standard-headers/asm-$arch"
mkdir -p "$output/include/standard-headers/asm-$arch"
if [ $arch = s390 ]; then
cp_portable "$tmpdir/include/asm/kvm_virtio.h" "$output/include/standard-headers/asm-s390/"
cp_portable "$tmpdir/include/asm/virtio-ccw.h" "$output/include/standard-headers/asm-s390/"
fi
if [ $arch = x86 ]; then
cp_portable "$tmpdir/include/asm/hyperv.h" "$output/include/standard-headers/asm-x86/"
cp "$tmpdir/include/asm/unistd_32.h" "$output/linux-headers/asm-x86/"
cp "$tmpdir/include/asm/unistd_x32.h" "$output/linux-headers/asm-x86/"
cp "$tmpdir/include/asm/unistd_64.h" "$output/linux-headers/asm-x86/"
fi
done
rm -rf "$output/linux-headers/linux"
mkdir -p "$output/linux-headers/linux"
for header in kvm.h kvm_para.h vfio.h vhost.h \
psci.h userfaultfd.h; do
cp "$tmpdir/include/linux/$header" "$output/linux-headers/linux"
done
rm -rf "$output/linux-headers/asm-generic"
mkdir -p "$output/linux-headers/asm-generic"
for header in kvm_para.h; do
cp "$tmpdir/include/asm-generic/$header" "$output/linux-headers/asm-generic"
done
if [ -L "$linux/source" ]; then
cp "$linux/source/COPYING" "$output/linux-headers"
else
cp "$linux/COPYING" "$output/linux-headers"
fi
cat <<EOF >$output/linux-headers/asm-x86/hyperv.h
#include "standard-headers/asm-x86/hyperv.h"
EOF
cat <<EOF >$output/linux-headers/linux/virtio_config.h
#include "standard-headers/linux/virtio_config.h"
EOF
cat <<EOF >$output/linux-headers/linux/virtio_ring.h
#include "standard-headers/linux/virtio_ring.h"
EOF
rm -rf "$output/include/standard-headers/linux"
mkdir -p "$output/include/standard-headers/linux"
for i in "$tmpdir"/include/linux/*virtio*.h "$tmpdir/include/linux/input.h" \
"$tmpdir/include/linux/input-event-codes.h" \
"$tmpdir/include/linux/pci_regs.h"; do
cp_portable "$i" "$output/include/standard-headers/linux"
done
cat <<EOF >$output/include/standard-headers/linux/types.h
/* For QEMU all types are already defined via osdep.h, so this
* header does not need to do anything.
*/
EOF
cat <<EOF >$output/include/standard-headers/linux/if_ether.h
#define ETH_ALEN 6
EOF
rm -rf "$tmpdir"
|
#!/bin/bash
##
## Copyright 2019 International Business Machines
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
#
# Jenkins Test for SNAP
#
# SNAP framework example
function test_10142000
{
local card=$1
local accel=$2
mytest="./actions/hdl_example"
echo "TEST HDL Example Action on Accel: $accel[$card] ..."
#mytest/tests/10140000_test.sh -C $card
$mytest/tests/hw_test.sh -C $card
RC=$?
if [ $RC -ne 0 ]; then
return $RC
fi
$mytest/tests/10140000_ddr_test.sh -C $card
RC=$?
if [ $RC -ne 0 ]; then
return $RC
fi
$mytest/tests/10140000_set_test.sh -C $card
RC=$?
if [ $RC -ne 0 ]; then
return $RC
fi
$mytest/tests/10140000_nvme_test.sh -C $card
RC=$?
if [ $RC -ne 0 ]; then
return $RC
fi
return 0
}
function test_all_actions() # $1 = card, $2 = accel
{
local card=$1
local accel=$2
RC=0;
# Get SNAP Action number from Card
# MY_ACTION=`./software/tools/oc_maint -C $card -m 1`
# oc_maint -m1 not working for the moment ==> Using the following as a workaround
MY_ACTION=`./software/tools/oc_maint -C $card -v | grep -A10 -e "-+-" | grep -ve "-+-" | awk '{print $2}'`
for action in $MY_ACTION ; do
run_test=1;
case $action in
*"10142000") # HDL Example
cmd="./actions/hdl_example/tests/hw_test.sh"
#test_10142000 $card $accel
#RC=$?
#run_test=0
;;
*"10142002") # HDL Single Engine
cmd="./actions/hdl_single_engine/tests/hw_test.sh"
;;
*"1014300b") # HLS Memcopy
cmd="./actions/hls_memcopy_1024/tests/hw_test.sh"
;;
*"10143008") # HLS Hello World 512 bits wide bus
cmd="./actions/hls_helloworld_512/tests/hw_test.sh"
;;
*"10143009") # HLS Hello World 1024 bits wide bus
cmd="./actions/hls_helloworld_1024/tests/hw_test.sh"
;;
*)
echo "Error: Action: $action is not valid !"
run_test=0
esac
# Check run_test flag and check if test case is there
if [ $run_test -eq 1 ]; then
if [ -f $cmd ]; then
cmd=$cmd" -C $card -d NORMAL"
echo "RUN: $cmd on $accel[$card] Start"
eval ${cmd}
RC=$?
echo "RUN: $cmd on $accel[$card] Done RC=$RC"
fi
else
echo "Error: No Test case found for Action: $action on $accel[$card]"
echo " Missing File: $cmd"
RC=99
fi
done
echo "$0 return code is : RC=$RC"
return $RC
}
function test_soft()
{
local accel=$1
local card=$2
echo "Testing Software on Accel: $accel[$card] ..."
./software/tools/oc_maint -C $card -v
RC=$?
if [ $RC -ne 0 ]; then
return $RC
fi
test_all_actions $card $accel
return $?
}
function test_hard()
{
local accel=$1
local card=$2
local IMAGE=$3
local IMAGE2=$4
echo "`date` UPDATING Start"
echo " Accel: $accel[$card] Image: $IMAGE"
pushd ../oc-utils > /dev/null
if [ $? -ne 0 ]; then
echo "Error: Can not start oc-flash-script.sh"
exit 1
fi
try_to_flash=0
while [ 1 ]; do
wait_flag=0
if [[ $accel != "OC-AD9V3" ]] && [[ $accel != "OC-AD9H3" ]] && [[ $accel != "OC-AD9H7" ]]; then
echo "executing non SPI case : sudo ./oc-flash-script.sh -f -C $card -f $IMAGE"
sudo ./oc-flash-script.sh -f -C $card -f $IMAGE
else
echo "executing SPI case : sudo ./oc-flash-script.sh -f -C $card $IMAGE $IMAGE2"
sudo ./oc-flash-script.sh -f -C $card $IMAGE $IMAGE2
fi
RC=$?
if [ $RC -eq 0 ]; then
break
fi
if [ $RC -eq 99 ]; then
# I do get Busy from oc_flash tool if the flash lock is in use
# Wait again or exit for Flashing
# Flashing takes about 90 to 100 sec
try_to_flash=$((try_to_flash+1))
if [ $try_to_flash -gt 20 ]; then
echo "`date` ERROR: Timeout While Waiting to Flash Accel: $accel[$card]"
popd > /dev/null
return $RC
fi
echo "`date` ($try_to_flash of 20) Wait: Other oc-flash-script.sh in progress"
wait_flag=1
sleep 10
else
echo "`date` ERROR: I was not able to Flash Image: $IMAGE on Accel: $accel[$card]"
popd > /dev/null
mv $IMAGE $IMAGE.fault_flash
return $RC
fi
done
popd > /dev/null
echo "`date` UPDATING done for $accel[$card]"
if [ $wait_flag -eq 1 ]; then
echo "Delay some time because of pending Flash"
sleep 15 # Allow other test to Flash
echo "`date` Testing Accel: $accel[$card]"
fi
sleep 5 # Allow some time to recover cards
./software/tools/snap_peek -C $card 0x0 -d2
RC=$?
if [ $RC -ne 0 ]; then
echo "moving $IMAGE to $IMAGE.fault_peek"
mv $IMAGE $IMAGE.fault_peek
return $RC
fi
echo "CONFIG Accel: $accel[$card] ..."
./software/tools/oc_maint -C $card -v
RC=$?
if [ $RC -ne 0 ]; then
echo "moving $IMAGE to $IMAGE.fault_config"
mv $IMAGE $IMAGE.fault_config
return $RC
fi
test_all_actions $card $accel
RC=$?
if [ $RC -eq 0 ]; then
echo "moving $IMAGE to $IMAGE.good"
mv $IMAGE $IMAGE.good
else
echo "moving $IMAGE to $IMAGE.fault_test"
mv $IMAGE $IMAGE.fault_test
fi
return $RC
}
function usage() {
echo "Usage: $PROGRAM -D [] -A [] -F [] -f []"
echo " [-D <Target Dir>]"
echo " [-A <OC-AD9V3> : Select AlphaData OC-AD9V3 Card"
echo " [-A <OC-AD9V3> : Select AlphaData OC-AD9H3 Card"
echo " [-A <OC-AD9V3> : Select AlphaData OC-AD9H7 Card"
echo " <ALL> : Select ALL Cards"
echo " [-F <Image> : Set Image file for Accelerator -A"
echo " [-f <Image> : Set SPI secondary Image file for Accelerator -A"
echo " -A ALL is not valid if -F is used"
echo " [-C <0,1,2,3]: Select Card 0,1,2 or 3"
echo " Select the Card# for test."
echo " [-h] Print this help"
echo " Option -D must be set"
echo " following combinations can happen"
echo " 1.) Option -A [OC-AD9V3, OC-AD9H3, OC-AD9H7] and -F is set"
echo " for Card in all Accelerators (-A)"
echo " => Image will be flashed on Card (using oc-flash-script and reset routines)"
echo " => and Software Test will then run on Card"
echo " 2.) Option -A [OC-AD9V3, OC-AD9H3, OC-AD9H7]"
echo " for Card in all given Accelerators (-A)"
echo " => Software Test will run on Card (using current FPGA content)"
echo " 3.) Option -A ALL"
echo " for each Card and for all Accelerators"
echo " => Software Test will run on Accelerator and Card"
}
#
# Main starts here
#
# Note: use bash option "set -f" when passing wildcards before
# starting this script.
#
# -------------------------------------------------------
VERSION=1.0 # creation for OC-AD9V3, OC-AD9H3, OC-AD9H7 cards
# --------------------------------------------------------
PROGRAM=$0
BINFILE=""
BINFILE2=""
accel="ALL"
CARD="-1" # Select all Cards in System
echo "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< OC-JENKINS TEST>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
echo "oc_jenkins.sh version : $VERSION"
echo "`date` Test Starts On `hostname`"
echo "Arg#='$#'"
for i in "$@"
do
echo "param'=$i"
done
echo ""
while getopts "D:A:F:f:C:h" opt; do
case $opt in
D)
TARGET_DIR=$OPTARG;
;;
A)
accel=$OPTARG;
if [[ $accel != "OC-AD9V3" ]] &&
[[ $accel != "OC-AD9H3" ]] &&
[[ $accel != "OC-AD9H7" ]] &&
[[ $accel != "ALL" ]]; then
echo "Error: Option -A $OPTARG is not valid !" >&2
echo "Expect: [OC-AD9V3, OC-AD9H3, OC-AD9H7 or ALL]" >&2
exit 1
fi
;;
F)
BINFILE=$OPTARG;
;;
f)
BINFILE2=$OPTARG;
;;
C)
CARD=$OPTARG;
;;
h)
usage;
exit 0;
;;
\?)
echo "Invalid option: -$OPTARG" >&2
;;
esac
done
MY_DIR=`basename $PWD`
echo "Testing in : $MY_DIR"
echo "Using Accel : $accel"
echo "Using Card# : $CARD"
echo "Using Image : $BINFILE"
if [[ $accel == "OC-AD9V3" ]] || [[ $accel == "OC-AD9H3" ]] || [[ $accel == "OC-AD9H7" ]]; then
echo "Using sec Image : $BINFILE2"
fi
if [[ $TARGET_DIR != $MY_DIR ]] ; then
echo "Target Dir: $TARGET_DIR"
echo "Current Dir: $MY_DIR"
echo "Error: Target and Current Dir must match. Please fix with -D Option"
exit 1;
fi
echo "Source PATH and LD_LIBRARY_PATH"
. ./snap_path.sh
test_done=0
if [[ $accel != "ALL" ]]; then
if [[ $BINFILE != "" ]]; then
echo "Flash and test Accel: $accel Card: $CARD using: $BINFILE"
for IMAGE in `ls -tr $BINFILE 2>/dev/null`; do
if [ ! -f $IMAGE ]; then
echo "Error: Can not locate: $BINFILE"
exit 1
fi
echo "---> Test Image# $test_done File: $IMAGE on $accel Card: $CARD"
if [ $CARD -eq "-1" ]; then
# Get all Cards in this System for Accel type i have to test
MY_CARDS=`./software/tools/oc_find_card -A $accel`
if [ $? -eq 0 ]; then
echo "Error: Can not find $accel Card in `hostname` !"
exit 1;
fi
for card in $MY_CARDS ; do
if [[ $accel != "OC-AD9V3" ]] && [[ $accel != "OC-AD9H3" ]] && [[ $accel != "OC-AD9H7" ]]; then
test_hard $accel $card $BINFILE
else
test_hard $accel $card $BINFILE $BINFILE2
fi
if [ $? -ne 0 ]; then
exit 1
fi
test_done=$((test_done +1))
done
else
# -C Option was set.
# Make sure i did get the correct values for -A and -C
# -t3 for detecting only OPENCAPI (CAPI3.0) card result
accel_to_use=`./software/tools/oc_find_card -C $CARD -t3`
echo "accel_to_use=$accel_to_use"
echo "accel =$accel"
echo "CARD =$CARD"
if [ "$accel_to_use" == "$accel" ]; then
if [[ $accel != "OC-AD9V3" ]] && [[ $accel != "OC-AD9H3" ]] && [[ $accel != "OC-AD9H7" ]]; then
test_hard $accel $CARD $BINFILE
else
test_hard $accel $CARD $BINFILE $BINFILE2
fi
if [ $? -ne 0 ]; then
exit 1
fi
test_done=$((test_done +1))
else
echo "Error: OpenCAPI Card: $CARD is not Accel Type: $accel"
echo " OpenCAPI Card: $CARD Accel Type is : $accel_to_use"
echo ""
exit 1
fi
fi
done
if [ $test_done -eq 0 ]; then
echo "Error: Test of Image: $IMAGE failed !"
echo " File: $BINFILE not found"
exit 1
fi
echo "`date` Image Test on Accel: $accel was executed $test_done time(s)"
exit 0
fi
# Run Software Test on one Type of Card
echo "Test Software on: $accel Card: $CARD"
if [ $CARD -eq "-1" ]; then
# I will use all Cards if Card is set to -1
MY_CARDS=`./software/tools/oc_find_card -A $accel`
if [ $? -eq 0 ]; then
echo "Error: Can not find Accel: $accel"
exit 1;
fi
# MY_CARDS is a list of cards from type accel e.g: 0 1
echo "Testing on $accel[$MY_CARDS]"
for card in $MY_CARDS ; do
test_soft $accel $card
if [ $? -ne 0 ]; then
exit 1
fi
test_done=$((test_done + 1))
done
else
# -C Option was set:
# Make sure i did get the correct values for Card and Accel (-C and -A)
# -t3 for detecting only OPENCAPI (CAPI3.0) card result
accel_to_use=`./software/tools/oc_find_card -C $CARD` -t3
echo "accel_to_use=$accel_to_use"
echo "accel =$accel"
echo "CARD =$CARD"
if [ "$accel_to_use" == "$accel" ]; then
if [[ $accel != "OC-AD9V3" ]] && [[ $accel != "OC-AD9H3" ]] && [[ $accel != "OC-AD9H3" ]]; then
test_hard $accel $CARD $BINFILE
else
test_hard $accel $CARD $BINFILE $BINFILE2
fi
if [ $? -ne 0 ]; then
exit 1
fi
test_done=$((test_done +1))
else
echo "Error: OpenCAPI Card: $CARD is not Accel Type: $accel"
echo " OpenCAPI Card: $CARD Accel Type is : $accel_to_use"
exit 1
fi
fi
if [ $test_done -eq 0 ]; then
echo "Error: Software Test on Accel: $accel[$card] failed"
exit 1
fi
echo "Software Test on Accel: $accel was executed on $test_done Cards"
exit 0
fi
# Run Software Test on ALL Cards
if [[ $BINFILE != "" ]]; then
# Error: I can not use the same BINFILE for ALL cards
echo "Error: Option -A $accel and -F $BINFILE is not valid"
exit 1
fi
echo "Test Software on: $accel"
MY_CARDS=`./software/tools/oc_find_card -A ALL`
if [ $? -eq 0 ]; then
echo "Error: No Accelerator Cards found."
exit 1;
fi
echo "Found Accel#: [$MY_CARDS]"
for card in $MY_CARDS ; do
accel=`./software/tools/oc_find_card -C $card` -t3
if [ $? -eq 0 ]; then
echo "Can not find valid Accelerator for Card# $card"
continue
fi
# oc_find_card also detects GZIP cards, i will skip this cards
if [[ $accel != "OC-AD9V3" ]] && [[ $accel != "OC-AD9H3" ]] && [[ $accel != "OC-AD9H7" ]]; then
echo "Invalid Accelerator $accel for Card $card, skip"
continue
fi
test_soft $accel $card
if [ $? -ne 0 ]; then
exit 1
fi
test_done=$((test_done + 1))
done
# Check if test was run at least one time, set RC to bad if
# test did not find
# any valid card
if [ $test_done -eq 0 ]; then
echo "Error: Software Test did not detect any card for test"
exit 1
fi
echo "`date` Software Test was executed $test_done times"
exit 0
|
#!/usr/bin/env bash
# ==============================================================================
# Home Assistant Community Add-ons: Bashio
# Bashio is an bash function library for use with Home Assistant add-ons.
#
# It contains a set of commonly used operations and can be used
# to be included in add-on scripts to reduce code duplication across add-ons.
# ==============================================================================
# ------------------------------------------------------------------------------
# Updates the CLI to the latest version.
#
# Arguments:
# $1 Version to update to (optional)
# ------------------------------------------------------------------------------
function bashio::cli.update() {
local version=${1:-}
bashio::log.trace "${FUNCNAME[0]}:" "$@"
if bashio::var.has_value "${version}"; then
version=$(bashio::var.json version "${version}")
bashio::api.supervisor POST /cli/update "${version}"
else
bashio::api.supervisor POST /cli/update
fi
bashio::cache.flush_all
}
# ------------------------------------------------------------------------------
# Returns a JSON object with generic version information about the CLI.
#
# Arguments:
# $1 Cache key to store results in (optional)
# $2 jq Filter to apply on the result (optional)
# ------------------------------------------------------------------------------
function bashio::cli() {
local cache_key=${1:-'cli.info'}
local filter=${2:-}
local info
local response
bashio::log.trace "${FUNCNAME[0]}" "$@"
if bashio::cache.exists "${cache_key}"; then
bashio::cache.get "${cache_key}"
return "${__BASHIO_EXIT_OK}"
fi
if bashio::cache.exists 'cli.info'; then
info=$(bashio::cache.get 'cli.info')
else
info=$(bashio::api.supervisor GET /cli/info false)
bashio::cache.set 'cli.info' "${info}"
fi
response="${info}"
if bashio::var.has_value "${filter}"; then
response=$(bashio::jq "${info}" "${filter}")
fi
bashio::cache.set "${cache_key}" "${response}"
printf "%s" "${response}"
return "${__BASHIO_EXIT_OK}"
}
# ------------------------------------------------------------------------------
# Returns the Home Assistant CLI version used.
# ------------------------------------------------------------------------------
function bashio::cli.version() {
bashio::log.trace "${FUNCNAME[0]}"
bashio::cli 'cli.info.version' '.version'
}
# ------------------------------------------------------------------------------
# Returns the latest version of the CLI.
# ------------------------------------------------------------------------------
function bashio::cli.version_latest() {
bashio::log.trace "${FUNCNAME[0]}"
bashio::cli 'cli.info.version_latest' '.version_latest'
}
# ------------------------------------------------------------------------------
# Checks if there is an update available for the CLI.
# ------------------------------------------------------------------------------
function bashio::cli.update_available() {
bashio::log.trace "${FUNCNAME[0]}" "$@"
bashio::cli 'cli.info.update_available' '.update_available // false'
}
# ------------------------------------------------------------------------------
# List all available stats about the CLI.
#
# Arguments:
# $1 Cache key to store results in (optional)
# $2 jq Filter to apply on the result (optional)
# ------------------------------------------------------------------------------
function bashio::cli.stats() {
local cache_key=${1:-'cli.stats'}
local filter=${2:-}
local info
local response
bashio::log.trace "${FUNCNAME[0]}" "$@"
if bashio::cache.exists "${cache_key}"; then
bashio::cache.get "${cache_key}"
return "${__BASHIO_EXIT_OK}"
fi
if bashio::cache.exists 'cli.stats'; then
info=$(bashio::cache.get 'cli.stats')
else
info=$(bashio::api.supervisor GET /cli/stats false)
bashio::cache.set 'cli.stats' "${info}"
fi
response="${info}"
if bashio::var.has_value "${filter}"; then
response=$(bashio::jq "${info}" "${filter}")
fi
bashio::cache.set "${cache_key}" "${response}"
printf "%s" "${response}"
return "${__BASHIO_EXIT_OK}"
}
# ------------------------------------------------------------------------------
# Returns CPU usage from the CLI.
# ------------------------------------------------------------------------------
function bashio::cli.cpu_percent() {
bashio::log.trace "${FUNCNAME[0]}"
bashio::cli.stats 'cli.stats.cpu_percent' '.cpu_percent'
}
# ------------------------------------------------------------------------------
# Returns memory usage from the CLI.
# ------------------------------------------------------------------------------
function bashio::cli.memory_usage() {
bashio::log.trace "${FUNCNAME[0]}"
bashio::cli.stats 'cli.stats.memory_usage' '.memory_usage'
}
# ------------------------------------------------------------------------------
# Returns memory limit from the CLI.
# ------------------------------------------------------------------------------
function bashio::cli.memory_limit() {
bashio::log.trace "${FUNCNAME[0]}"
bashio::cli.stats 'cli.stats.memory_limit' '.memory_limit'
}
# ------------------------------------------------------------------------------
# Returns memory usage in percent from the CLI.
# ------------------------------------------------------------------------------
function bashio::cli.memory_percent() {
bashio::log.trace "${FUNCNAME[0]}"
bashio::cli.stats 'cli.stats.memory_percent' '.memory_percent'
}
# ------------------------------------------------------------------------------
# Returns outgoing network usage from the CLI.
# ------------------------------------------------------------------------------
function bashio::cli.network_tx() {
bashio::log.trace "${FUNCNAME[0]}"
bashio::cli.stats 'cli.stats.network_tx' '.network_tx'
}
# ------------------------------------------------------------------------------
# Returns incoming network usage from the CLI.
# ------------------------------------------------------------------------------
function bashio::cli.network_rx() {
bashio::log.trace "${FUNCNAME[0]}"
bashio::cli.stats 'cli.stats.network_rx' '.network_rx'
}
# ------------------------------------------------------------------------------
# Returns disk read usage from the CLI.
# ------------------------------------------------------------------------------
function bashio::cli.blk_read() {
bashio::log.trace "${FUNCNAME[0]}"
bashio::cli.stats 'cli.stats.blk_read' '.blk_read'
}
# ------------------------------------------------------------------------------
# Returns disk write usage from the CLI.
# ------------------------------------------------------------------------------
function bashio::cli.blk_write() {
bashio::log.trace "${FUNCNAME[0]}"
bashio::cli.stats 'cli.stats.blk_write' '.blk_write'
}
|
package com.hannesdorfmann.collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* This class brings the best of both, {@link List} and {@link Map}. So you can
* simply iterate, like you would do with any {@link List} or get a item by his
* list position. You can also search for an value by searching for his key
* (id), like you would do with any {@link Map} implementation (
* {@link #getByMapKey(Mappable)})
*
* @author <NAME>
*
* @param <K>
* @param <V>
*/
public interface ListMap<K, V extends Mappable<K>> extends List<V> {
/**
* Get the value by the key. The access will be as fast as accessing a
* {@link Map}. If there are more elements with the same key in the list,
* than the first one (with the lowest index) will be returned.
*
* @param id
* The values id.
* @return
*/
public Set<V> getByMapKey(K id);
/**
* Calls {@link #getByMapKey(Object)} but returns just the first one. Useful
* for scenarios, where you are absolutely sure that there is only one
* element with the given key, so you can use this short cut.
*
* @param id
* @return
*/
public V getFirstByMapKey(K id);
/**
* Remove all items in the list that matches have this key. More formally,
* removes all elements with the index i such that (o==null ? get(i)==null :
* o.equals(get(i))) (if such an element exists)
*
* @param id
* @return The first element that has been found to be removed or null, if
* there is no such element
*/
public Set<V> removeByMapKey(K id);
}
|
#!/bin/bash
function print_banner {
echo "---------------------------------------------------------------------------------"
echo " _____ _ _ _____ _ "
echo "| ___| (_) | / ___(_) "
echo "| |__ _ ____ ___ _ __ ___ __| \ `--. _ __ _ _ __ ___ _ __ "
echo "| __| '_ \ \ / / | '__/ _ \ / _` |`--. \ |/ _` | '_ \ / _ \ '__|"
echo "| |__| | | \ V /| | | | (_) | | (_| /\__/ / | (_| | | | | __/ | "
echo "\____/_| |_|\_/ |_|_| \___/ \__,_\____/|_|\__, |_| |_|\___|_| "
echo " __/ | "
echo " |___/ "
echo "---------------------------------------------------------------------------------"
echo "User control script for CentOS 7.x https://github.com/daemonna/centos-cheatsheet"
echo "---------------------------------------------------------------------------------"
}
|
#! /bin/bash
. ./config.sh
start_suite "Proxy failure modes"
# docker run should fail when weave router is not running
weave_on $HOST1 launch-proxy
assert_raises "! proxy docker_on $HOST1 run --rm $SMALL_IMAGE true"
assert_raises "proxy docker_on $HOST1 run --rm $SMALL_IMAGE true 2>&1 1>/dev/null | grep 'Error response from daemon: weave container is not present. Have you launched it?'"
end_suite
|
./target/release/openschafkopf suggest-card --rules "Herz-Solo von 2" --cards-on-table "ga g7 hz g8 eu h7 eo ha gz g9 su gu sz h9 s7 s8 gk e8 so e9 hu go ez hk" --hand "ea sk" --simulate-hands all --branching "9,9" --verbose
|
// JavaScript Document
/**
* ้ฑๅ
็ธๅ
ณๅ่ฝ
*/
vue.append({
task_list:[],
getTaskList:function(){
$.post("/api/task/listInWithraw",{null:null},function(rs){
if(rs.status){
vue.task_list = rs.data;
}
}, 'json');
}
});
vue.addInit('getTaskList');
//ๆ ็ญพ้กตๅ่ฝ
vue.append({
tab_index:0,//ๆ ็ญพ้กตๅท
changeTab:function(n){
this.tab_index = n;
},
});
//ๆ็ฐ
vue.append({
withdraw_method : 0,
withdraw_money : '',
changedWithdrawMoney:function(){
this.withdraw_money = this.withdraw_money.replace(/\..*$/,'').replace(/[^0-9]/,'');
},
changeWithdrawmethod:function(n){
if(n == 1 && !vue.user.alipay_bind){
vue.goto('wallet_bind_withdraw_alipay');
return;
}
this.withdraw_method = n;
vue.goto('wallet_withdraw');
},
withdrawGo:function(){
this.changedWithdrawMoney();
if(!/^[0-9]+$/.test(this.withdraw_money)){
// alert('่ฏท่พๅ
ฅๆๆ็้้ข๏ผ');
$.toast("่ฏท่พๅ
ฅๆๆ็้้ข", "text");
return false;
}
if(this.withdraw_money > this.can_withdraw){
// alert('ๆ็ฐ้้ขไธๅพๅคงไบๅฏๆ็ฐ้้ข๏ผ');
$.toast("ๆ็ฐ้้ขไธๅพๅคงไบๅฏๆ็ฐ้้ข๏ผ", "text");
return;
}
$.post("/api/user/withdraw",{
method : this.withdraw_method,
money : this.withdraw_money,
},function(rs){
alert(rs.msg);
vue.flushUserBase();
if(rs.status){
this.alertShow = true;
vue.goto('wallet');
}
}, 'json');
},
});
//็ปๅฎๆฏไปๅฎ
vue.append({
new_alipay_name : '',//ๆฐ็ปๅฎ็ๆฏไปๅฎ่ดฆๅท
bindAlipay:function(){
if(this.user.alipay_bind){
// alert('ๆจๅทฒ็ปๅฎ่ฟๆฏไปๅฎ๏ผๆ ๆณ้ๆฐ็ปๅฎใ');
$.toast("ๆจๅทฒ็ปๅฎ่ฟๆฏไปๅฎ๏ผๆ ๆณ้ๆฐ็ปๅฎใ", "text");
return false;
}
if(this.new_alipay_name == ''){
// alert('่ฏท่พๅ
ฅๆฏไปๅฎ่ดฆๅท');
$.toast("่ฏท่พๅ
ฅๆฏไปๅฎ่ดฆๅท", "text");
return false;
}
$.post("/api/user/bindAlipay",{v:this.new_alipay_name},function(rs){
if(rs.status){
vue.flushUserBase();
vue.goto('wallet_withdraw_methods');
}
}, 'json');
},
});
//ๆ็ฐๆๅๅผน็ช
vue.append({
alertShow:false,
colseCover:function(){
this.alertShow = false;
}
});
|
<reponame>anticipasean/girakkafunc<filename>func-pure/src/main/java/cyclops/pure/arrow/Cokleisli.java
package cyclops.pure.arrow;
import cyclops.function.higherkinded.Higher;
import cyclops.container.transformable.Transformable;
import cyclops.container.immutable.tuple.Tuple2;
import cyclops.function.enhanced.Function1;
import java.util.function.Function;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
@AllArgsConstructor(access = AccessLevel.PRIVATE)
public class Cokleisli<W, T, R> implements Function1<Higher<W, T>, R>, Transformable<R> {
public final Function1<Higher<W, T>, R> fn;
public static <W, T, R> Cokleisli<W, T, R> cokleisli(Function<? super Higher<W, T>, ? extends R> fn) {
return new Cokleisli<W, T, R>(Function1.narrow(fn));
}
public static <W, T, R> Cokleisli<W, T, R> of(Function<? super Higher<W, T>, ? extends R> fn) {
return new Cokleisli<W, T, R>(Function1.narrow(fn));
}
@Override
public R apply(Higher<W, T> a) {
return fn.apply(a);
}
public <R1> Cokleisli<W, T, R1> mapFn(Function<? super R, ? extends R1> mapper) {
return cokleisli(fn.andThen(mapper));
}
public <R1> Cokleisli<W, T, R1> map(Function<? super R, ? extends R1> mapper) {
return mapFn(mapper);
}
public <R2> Cokleisli<W, T, Tuple2<R, R2>> fanout(Cokleisli<W, T, R2> f2) {
return product(f2);
}
public <R2> Cokleisli<W, T, Tuple2<R, R2>> product(Cokleisli<W, T, R2> f2) {
return cokleisli(fn.product(f2));
}
}
|
def find_pairs_sum_10(arr):
# Step 1: Initialize the empty pairs list
pairs = []
# Step 2: Iterate through the array of numbers
for i in range(len(arr)):
# Step 3: Iterate through the rest of the array
for j in range(i+1, len(arr)):
# Step 4: If the sum of two elements is 10, add them to the list
if arr[i] + arr[j] == 10:
pairs.append((arr[i], arr[j]))
# Step 5: Return the list of pairs
return pairs
arr = [2, 4, 5, 7, 8, 9]
pairs = find_pairs_sum_10(arr)
print(pairs) |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/cloud/sql/v1/cloud_sql_ssl_certs.proto for package 'google.cloud.sql.v1'
# Original file comments:
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/cloud/sql/v1/cloud_sql_ssl_certs_pb'
module Google
module Cloud
module Sql
module V1
module SqlSslCertsService
# LINT: LEGACY_NAMES
#
# Service to manage SSL certs for Cloud SQL instances.
class Service
include ::GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.cloud.sql.v1.SqlSslCertsService'
# Deletes the SSL certificate. For First Generation instances, the
# certificate remains valid until the instance is restarted.
rpc :Delete, ::Google::Cloud::Sql::V1::SqlSslCertsDeleteRequest, ::Google::Cloud::Sql::V1::Operation
# Retrieves a particular SSL certificate. Does not include the private key
# (required for usage). The private key must be saved from the response to
# initial creation.
rpc :Get, ::Google::Cloud::Sql::V1::SqlSslCertsGetRequest, ::Google::Cloud::Sql::V1::SslCert
# Creates an SSL certificate and returns it along with the private key and
# server certificate authority. The new certificate will not be usable until
# the instance is restarted.
rpc :Insert, ::Google::Cloud::Sql::V1::SqlSslCertsInsertRequest, ::Google::Cloud::Sql::V1::SslCertsInsertResponse
# Lists all of the current SSL certificates for the instance.
rpc :List, ::Google::Cloud::Sql::V1::SqlSslCertsListRequest, ::Google::Cloud::Sql::V1::SslCertsListResponse
end
Stub = Service.rpc_stub_class
end
end
end
end
end
|
/*
* Copyright 2008-2009 MOPAS(Ministry of Public Administration and Security).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.egovframe.rte.fdl.idgnr.impl;
import java.math.BigDecimal;
import java.util.Locale;
import org.egovframe.rte.fdl.cmmn.exception.FdlException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
/**
* ID Generation ์๋น์ค๋ฅผ ์ํ Data Block ID Abstract Service
*
* @author ์คํํ๊ฒฝ ๊ฐ๋ฐํ ๊นํํธ
* @since 2009.02.01
* @version 1.0
* <pre>
* ๊ฐ์ ์ด๋ ฅ(Modification Information)
*
* ์์ ์ผ ์์ ์ ์์ ๋ด์ฉ
* ----------------------------------------------
* 2009.02.01 ๊นํํธ ์ต์ด ์์ฑ
* </pre>
*/
public abstract class AbstractDataBlockIdGnrService extends AbstractDataIdGnrService implements InitializingBean {
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractDataBlockIdGnrService.class);
/**
* ์ฒซ๋ฒ์งธ๋ก ํ ๋น๋ BigDecimal ์์ด๋
*/
private BigDecimal mFirstBigDecimal;
/**
* ์ฒซ๋ฒ์งธ๋ก ํ ๋น๋ Long ์์ด๋
*/
private long mFirstLong;
/**
* ํ์ฌ ๋ธ๋ญ์ ํ ๋น๋ ์์ด๋ ์
*/
private int mAllocated;
/**
* ๋ธ๋ญ ์ฌ์ด์ฆ
*/
protected int blockSize;
/**
* ์ฃผ์ด์ง ๊ธธ์ด๋งํผ์ BigDecimal ๋ธ๋ญ์ ํ ๋นํ๋ ๋ฉ์๋
* @param blockSize ํ ๋นํ๊ณ ์ ํ๋ ๋ธ๋ญ์ฌ์ด์ฆ
* @return ํ ๋น ๋ธ๋ญ์ ์ฒซ๋ฒ์งธ ID
* @throws FdlException ์ฌํ์ด์ ์ ์ํด ์์ด๋ ์์ฑ์ด ๋ถ๊ฐ๋ฅ ํ ๋
*/
protected abstract BigDecimal allocateBigDecimalIdBlock(int blockSize) throws FdlException;
/**
* ์ฃผ์ด์ง ๊ธธ์ด๋งํผ์ long ๋ธ๋ญ์ ํ ๋นํ๋ ๋ฉ์๋
* @param blockSize ํ ๋นํ๊ณ ์ ํ๋ ๋ธ๋ญ์ฌ์ด์ฆ
* @return ํ ๋น ๋ธ๋ญ์ ์ฒซ๋ฒ์งธ ID
* @throws FdlException ์ฌํ์ด์ ์ ์ํด ์์ด๋ ์์ฑ์ด ๋ถ๊ฐ๋ฅ ํ ๋
*/
protected abstract long allocateLongIdBlock(int blockSize) throws FdlException;
/**
* BigDecimal ํ์
์ ์ ์ผ ์์ด๋ ์ ๊ณต
* @return BigDecimal ํ์
์์ด๋ ๋ฆฌํด
* @throws FdlException ์ฌํ์ด์ ์ ์ํด ์์ด๋ ์์ฑ์ด ๋ถ๊ฐ๋ฅ ํ ๋
*/
protected BigDecimal getNextBigDecimalIdInner() throws FdlException {
if (mAllocated >= blockSize) {
try {
mFirstBigDecimal = allocateBigDecimalIdBlock(blockSize);
mAllocated = 0;
} catch (FdlException be) {
mAllocated = Integer.MAX_VALUE;
throw be;
}
}
BigDecimal id = mFirstBigDecimal.add(new BigDecimal(mAllocated));
mAllocated++;
return id;
}
/**
* Long ํ์
์ ์ ์ผ ์์ด๋ ์ ๊ณต
* @return long ํ์
์์ด๋ ๋ฆฌํด
* @throws FdlException ์ฌํ์ด์ ์ ์ํด ์์ด๋ ์์ฑ์ด ๋ถ๊ฐ๋ฅ ํ ๋
*/
protected long getNextLongIdInner() throws FdlException {
if (mAllocated >= blockSize) {
try {
mFirstLong = allocateLongIdBlock(blockSize);
mAllocated = 0;
} catch (FdlException e) {
mAllocated = Integer.MAX_VALUE;
throw e;
}
}
long id = mFirstLong + mAllocated;
if (id < 0) {
LOGGER.error(messageSource.getMessage("error.idgnr.greater.maxid", new String[] { "Long" }, Locale.getDefault()));
throw new FdlException(messageSource, "error.idgnr.greater.maxid");
}
mAllocated++;
return id;
}
/**
* application Context configuration ์์ blockSize ์
๋ ฅ๋ฐ๊ธฐ
* @param blockSize application Context Configuration ์ ์ธํ
ํ blocksize
*/
public void setBlockSize(int blockSize) {
this.blockSize = blockSize;
}
/**
* Container์ ์ํด์ ํธ์ถ
* @throws Exception ์ด๊ธฐํ ๋์ถ ์ค๋ฅ๋ฐ์
*/
public void afterPropertiesSet() throws Exception {
mAllocated = Integer.MAX_VALUE;
}
}
|
<filename>src/main/java/mvc/task/Task.java
package mvc.task;
import lombok.val;
import mvc.notifications.Notification;
import mvc.notifications.NotificationDispatcher;
import java.util.Optional;
import static java.util.Collections.singletonMap;
import static java.util.Collections.unmodifiableMap;
/**
* @author <NAME>
*/
public abstract class Task implements Runnable {
public static final String NOTIFICATION_TASK_DID_START = "mvc.TaskDidStart";
public static final String NOTIFICATION_TASK_DID_PROGRESS = "mvc.TaskDidProgress";
public static final String NOTIFICATION_TASK_DID_FAIL = "mvc.TaskDidFail";
public static final String NOTIFICATION_TASK_DID_FINISH = "mvc.TaskDidFinish";
public static final String NOTIFICATION_TASK_EXCEPTION_KEY = "mvc.Task.Exception";
public static final String NOTIFICATION_TASK_PROGRESS_KEY = "mvc.Task.Progress";
public TaskDelegate delegate;
private double progress;
public double getProgress() {
return progress;
}
protected void setProgress(double progress) {
this.progress = progress;
Optional.ofNullable(delegate).ifPresent(d -> d.taskDidProgress(this));
val payload = unmodifiableMap(singletonMap(NOTIFICATION_TASK_PROGRESS_KEY, progress));
NotificationDispatcher.shared().dispatch(new Notification(NOTIFICATION_TASK_DID_PROGRESS, payload, this));
}
@Override
public void run() {
progress = 0;
Optional.ofNullable(delegate).ifPresent(d -> d.taskDidStart(this));
NotificationDispatcher.shared().dispatch(new Notification(NOTIFICATION_TASK_DID_START, null, this));
try {
execute();
} catch (Exception e) {
Optional.ofNullable(delegate).ifPresent(d -> d.taskDidFail(this, e));
val payload = unmodifiableMap(singletonMap(NOTIFICATION_TASK_EXCEPTION_KEY, e));
NotificationDispatcher.shared().dispatch(new Notification(NOTIFICATION_TASK_DID_FAIL, payload, this));
}
Optional.ofNullable(delegate).ifPresent(d -> d.taskDidFinish(this));
NotificationDispatcher.shared().dispatch(new Notification(NOTIFICATION_TASK_DID_FINISH, null, this));
}
public abstract void execute() throws Exception;
}
|
<filename>DriveBackup/src/main/java/ratismal/drivebackup/handler/listeners/PlayerListener.java
package ratismal.drivebackup.handler.listeners;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerJoinEvent;
import ratismal.drivebackup.UploadThread;
import ratismal.drivebackup.config.Localization;
import ratismal.drivebackup.plugin.updater.UpdateChecker;
import ratismal.drivebackup.util.MessageUtil;
public class PlayerListener implements Listener {
private static boolean autoBackupsActive = false;
@EventHandler
public void onPlayerJoin(PlayerJoinEvent event) {
if (!autoBackupsActive) {
MessageUtil.Builder().mmText(Localization.intl("player-join-backup-enable")).send();
autoBackupsActive = true;
}
Player player = event.getPlayer();
if (UpdateChecker.isUpdateAvailable() && player.hasPermission("drivebackup.linkAccounts")) {
MessageUtil.Builder().mmText(Localization.intl("player-join-update-available")).to((CommandSender)player).toConsole(false).send();
}
if (!UploadThread.wasLastBackupSuccessful() && player.hasPermission("drivebackup.backup")) {
MessageUtil.Builder().mmText(Localization.intl("player-join-backup-failed")).to((CommandSender)player).toConsole(false).send();
}
}
public static boolean isAutoBackupsActive() {
return autoBackupsActive;
}
public static void setAutoBackupsActive(boolean autoBackupsActiveValue) {
autoBackupsActive = autoBackupsActiveValue;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.