text
stringlengths
1
1.05M
<gh_stars>1-10 // ==UserScript== // @name Bookmarks in TweetDeck // @namespace https://ciffelia.com/ // @version 1.0.6 // @description 'Add Tweet to Bookmarks' in TweetDeck! // @author Ciffelia <<EMAIL>> (https://ciffelia.com/) // @license MIT // @homepage https://github.com/ciffelia/tweetdeck-bookmarks#readme // @supportURL https://github.com/ciffelia/tweetdeck-bookmarks/issues // @include https://tweetdeck.twitter.com/ // @require https://unpkg.com/moduleraid@5.0.1/dist/moduleraid.iife.js // ==/UserScript== (function () { 'use strict'; const getCsrfToken = () => { const csrfToken = document.cookie .split('; ') .find(row => row.startsWith('ct0=')) .split('=')[1]; return csrfToken }; const addTweetToBookmark = async tweetId => { const headers = { Accept: 'application/json', 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', Authorization: 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', 'X-Csrf-Token': getCsrfToken() }; const response = await fetch('https://api.twitter.com/1.1/bookmark/entries/add.json', { method: 'POST', headers, credentials: 'include', body: `tweet_id=${tweetId}` }); const result = await response.json(); if (!response.ok) { const errorMessage = result.errors[0].message; throw new Error(errorMessage) } }; // eslint-disable-next-line new-cap const mR = new moduleraid(); const TDNotifications = mR.findModule('showNotification')[0]; const showNotification = (message, type = 'info') => { if (type === 'info') { TDNotifications.showNotification({ message }); } else if (type === 'error') { TDNotifications.showErrorNotification({ message }); } else { throw new Error(`Unknown notification type: ${type}`) } }; const main = () => { const menuItem = ` <li class="is-selectable"> <a href="#" data-action data-bookmark-tweet="{{chirp.id}}">Add Tweet to Bookmarks</a> </li> `; TD.mustaches['menus/actions.mustache'] = TD.mustaches['menus/actions.mustache'].replace(/{{\/chirp}}\s*<\/ul>/, `${menuItem}{{/chirp}}</ul>`); document.body.addEventListener('click', async event => { const tweetOrRetweetId = event.target.dataset.bookmarkTweet; if (tweetOrRetweetId == null) { return } const tweetId = document.querySelector(`[data-key="${tweetOrRetweetId}"]`).dataset.tweetId; try { await addTweetToBookmark(tweetId); } catch (err) { showNotification(`Failed to add Tweet to Bookmarks: ${err.message}`, 'error'); return } showNotification('Tweet added to your Bookmarks'); }); }; main(); }());
(function() { var left_slided, lock, locked, prepare_details, redirect, reload, right_slided, slide, slideToggle, toggle, __slice = [].slice; locked = false; left_slided = false; right_slided = false; reload = function() { return location.reload(); }; redirect = function(route) { return window.location = location.protocol + "//" + location.host + "/" + route; }; $(window).scroll(function() { return $('.fixed').css("top", Math.max(0, 20 - $(this).scrollTop())); }); slideToggle = function() { var div, divs, _i, _len, _results; divs = 1 <= arguments.length ? __slice.call(arguments, 0) : []; _results = []; for (_i = 0, _len = divs.length; _i < _len; _i++) { div = divs[_i]; _results.push($(div).slideToggle(100)); } return _results; }; toggle = function() { var div, divs, _i, _len, _results; divs = 1 <= arguments.length ? __slice.call(arguments, 0) : []; _results = []; for (_i = 0, _len = divs.length; _i < _len; _i++) { div = divs[_i]; _results.push($(div).toggle()); } return _results; }; slide = function() { var div, divs, _i, _len; divs = 1 <= arguments.length ? __slice.call(arguments, 0) : []; for (_i = 0, _len = divs.length; _i < _len; _i++) { div = divs[_i]; $(div).animate({ width: 'toggle' }, 100); } if ($(div).css('display') === 'block') { return $(div).css('display', 'table'); } }; lock = function() { var item, items, _i, _len, _results; items = 1 <= arguments.length ? __slice.call(arguments, 0) : []; _results = []; for (_i = 0, _len = items.length; _i < _len; _i++) { item = items[_i]; _results.push($('.' + item).toggleClass(item + '-locked')); } return _results; }; prepare_details = function(data) { var details, info, kanji, meaning; details = '<dl>'; for (kanji in data) { info = data[kanji]; details += "<dt>" + kanji + "</dt>"; details += '<hr/>'; details += "" + info.on; if (info.kun) { details += " | " + info.kun; } if (info.names) { details += " | " + info.names; } meaning = info.meanings.replace(/[,\s]+$/g, ''); details += "<br/><span class='meaning'>" + meaning + "</span>"; details += '</dd>'; } return details += '</dl>'; }; $(function() { return $('.tooltip').tooltipster({ theme: '.tooltipster-theme', delay: 0, speed: 250 }); }); $(function() { return $.ajax('/toggled', { type: 'GET', dataType: 'json', success: function(data, textStatus, jqXHR) { locked = data.status; if (locked) { return lock('kanji', 'circle'); } } }); }); $(function() { return $('.circle').mousedown(function(event) { switch (event.which) { case 1: locked = !locked; $.ajax('/toggle', { type: 'GET' }); lock('kanji', 'circle'); if ($('.toolbar-right').css('display') === 'table') { slide('.toolbar-right'); right_slided = !right_slided; } if ($('.toolbar-left').css('display') === 'table') { slide('.toolbar-left'); return left_slided = !left_slided; } break; case 2: return location.reload(); } }); }); $(function() { return $('.kanji').mouseover(function() { if ($('.toolbar-top').css('display') === 'none') { return slideToggle('.toolbar-top', '.toolbar-bottom'); } }); }); $(function() { return $('.kanji').mouseout(function() { if ($('.toolbar-top').css('display') === 'block' && !locked) { return slideToggle('.toolbar-top', '.toolbar-bottom'); } }); }); $(function() { return $('#roll').click(function() { return $.get('/lock'); }); }); $(function() { return $('#link').click(function() { var kanji; kanji = $('.kanji').text().trim(); return redirect('view/' + kanji); }); }); $(function() { return $('.lookup-button').click(function() { var kanji; return kanji = $('.kanji').text().trim(); }); }); $(function() { return $('#fav').click(function() { var kanji; kanji = $('.kanji').text().trim(); return $.ajax('/toggle_favorite/' + kanji, { type: 'GET', dataType: 'json', success: function(data, textStatus, jqXHR) { if (data.result === 'fav') { $('#fav').removeClass('icon-check'); $('#fav').addClass('icon-cancel'); return humane.log("" + kanji + " added to favorites!", { timeout: 1500 }); } else { $('#fav').removeClass('icon-cancel'); $('#fav').addClass('icon-check'); return humane.log("" + kanji + " removed from favorites!", { timeout: 1500 }); } } }); }); }); $(function() { return $('.kanji-in-grid').click(function() { var kanji; kanji = $(this).text().trim(); $('.loader-right').fadeToggle(250); return $.ajax('/kanji_info/' + kanji, { type: 'GET', dataType: 'json', success: function(data, textStatus, jqXHR) { var details; details = prepare_details(data.info); $('.kanji-info').html(details).fadeIn(150); return $('.loader-right').fadeToggle(100); } }); }); }); $(function() { return $('.kanji-info').click(function() { return $('.kanji-info').fadeOut(100); }); }); $(function() { return $('.kanji-grid').click(function() { return $('.kanji-info').fadeOut(100); }); }); $(function() { return $('.rad').click(function() { var rad; rad = $(this).text().trim(); $('.loader-left').fadeToggle(250); return $.ajax('/related/' + rad, { type: 'GET', dataType: 'json', success: function(data, textStatus, jqXHR) { var kanji, text, _i, _len; text = '<div class="related-kanji">'; for (_i = 0, _len = data.length; _i < _len; _i++) { kanji = data[_i]; text += '<span class="single-kanji">' + kanji + '</span>'; } text += '</div>'; if (!right_slided) { $('.content-right').html(text); slide('.toolbar-right'); right_slided = !right_slided; } else { if ($('.toolbar-right').css('display') === 'table') { $('.content-right').fadeOut(150, (function() { return $(this).html(text).fadeIn(150); })); } } return $('.loader-left').fadeToggle(250); } }); }); }); $(function() { return $('.content-right').on('click', '.single-kanji', function() { var kanji; kanji = $(this).text().trim(); $('.loader-left').fadeToggle(250); return $.ajax('/kanji_info/' + kanji, { type: 'GET', dataType: 'json', success: function(data, textStatus, jqXHR) { var details; details = prepare_details(data.info); if ($('.toolbar-left').css('display') === 'table') { $('.content-left').fadeOut(150, (function() { return $(this).html(details).fadeIn(150); })); } else { $('.content-left').html(details); } if (!left_slided) { slide('.toolbar-left'); left_slided = !left_slided; } return $('.loader-left').fadeToggle(100); } }); }); }); $(function() { return $('ruby').click(function() { var term; term = $(this).find('rb').text().trim(); $('.loader-left').fadeToggle(250); return $.ajax('/info/' + term, { type: 'GET', dataType: 'json', success: function(data, textStatus, jqXHR) { var details, example, key, text, value, _i, _len, _ref; if (data.examples.length === 0) { humane.log('Ooops, no examples found!', { timeout: 2000 }); $('.loader-left').fadeToggle(100); return; } text = '<dl>'; _ref = data.examples; for (_i = 0, _len = _ref.length; _i < _len; _i++) { example = _ref[_i]; for (key in example) { value = example[key]; key = key.replace(term, "<em>" + term + "</em>"); text += "<dt>" + key + "</dt><dd>" + value + "</dd>"; } } text += '</dl>'; if ($('.toolbar-right').css('display') === 'table') { $('.content-right').fadeOut(150, (function() { return $(this).html(text).fadeIn(150); })); } else { $('.content-right').html(text); } if (!right_slided) { slide('.toolbar-right'); right_slided = !right_slided; } details = prepare_details(data.details); if ($('.toolbar-left').css('display') === 'table') { $('.content-left').fadeOut(150, (function() { return $(this).html(details).fadeIn(150); })); } else { $('.content-left').html(details); } if (!left_slided) { slide('.toolbar-left'); left_slided = !left_slided; } return $('.loader-left').fadeToggle(100); } }); }); }); }).call(this);
<filename>src/config.js module.exports = { email: '<EMAIL>', socialMedia: [ { name: 'GitHub', url: 'https://github.com/DonnC', }, { name: 'PlayStore', url: 'https://play.google.com/store/apps/developer?id=DonnC+Lab', }, { name: 'Linkedin', url: 'https://www.linkedin.com/in/donald-chinhuru-9aa48211a/', }, { name: 'Twitter', url: 'https://twitter.com/donix_22', }, { name: 'Medium', url: 'https://donnclab.medium.com/', }, { name: 'Python', url: 'https://pypi.org/user/DonaldC/', }, { name: 'Flutter', url: 'https://pub.dev/packages?q=email%3Adonychinhuru%40gmail.com', }, { name: 'StackOverflow', url: 'https://stackoverflow.com/users/15746605/donnc', }, { name: 'Youtube', url: 'https://www.youtube.com/channel/UCdbjCQra85wNB4xlcA6rXUQ', }, { name: 'External', url: 'https://octoprofile.now.sh/user?id=DonnC', }, ], navLinks: [ { name: 'About', url: '/#about', }, { name: 'Experience', url: '/#jobs', }, { name: 'Work', url: '/#projects', }, { name: 'Contact', url: '/#contact', }, ], colors: { green: '#64ffda', navy: '#0a192f', darkNavy: '#020c1b', }, srConfig: (delay = 200, viewFactor = 0.25) => ({ origin: 'bottom', distance: '20px', duration: 500, delay, rotate: { x: 0, y: 0, z: 0 }, opacity: 0, scale: 1, easing: 'cubic-bezier(0.645, 0.045, 0.355, 1)', mobile: true, reset: false, useDelay: 'always', viewFactor, viewOffset: { top: 0, right: 0, bottom: 0, left: 0 }, }), };
<filename>src/entry.c #include "entry.h" #include <ctype.h> #include <errno.h> #include <stdio.h> #include <stdlib.h> #include <string.h> size_t findLength(const char* line) { const char* orig = line; for (; *line && *line != '\n'; line++); return line - orig; } char* findEnd(char* line) { for (; *line && *line != '\n'; line++); return line; } char* findFirstOf(char* line, char c) { for (; *line && *line != '\n'; line++) { if (*line == c) { return line; } } return NULL; } char* findLastOf(char* line, char c) { char* last = NULL; for (; *line && *line != '\n'; line++) { if (*line == c) { last = line; } } return last; } char* findWhitespace(char* line) { for (; *line && *line != '\n'; line++) { if (isspace(*line)) { return line; } } return NULL; } char* skipWhitespace(char* line) { for (; *line && *line != '\n' && isspace(*line); line++); return line; } const char* modeToString(mode_t mode) { static char string[5] = "xxxx"; /*for (int i = 3; i >= 0; i--) { string[i] = (mode & 7) + '0'; mode >>= 3; } return string;*/ snprintf(string, sizeof string, "%04o", mode); return string; } const char* uint32ToString(uint32_t i) { static char buffer[11] = "xxxxxxxxxx"; /*char* cur = buffer; unsigned int d = 1000000000; while (d > 0) { *(cur++) = '0' + i / d; i %= d; d /= 10; } for (cur = buffer; cur < &buffer[9]; cur++) { if (*cur != '0') { return cur; } } return &buffer[9];*/ snprintf(buffer, sizeof buffer, "%u", i); return buffer; } const char* ownerIdToString(uid_t uid, gid_t gid) { static char string[22]; { const char* str = uint32ToString(uid & 0xFFFFFFFF); strcpy(&string[0], &str[0]); } size_t commaPos = strlen(string); string[commaPos] = ':'; { const char* str = uint32ToString(gid & 0xFFFFFFFF); strcpy(&string[commaPos + 1], &str[0]); } return string; } bool stringToMode(const char* string, size_t stringLen, mode_t* modeOut) { if (stringLen < 3 || stringLen > 4 || !isdigit(string[0])) { return false; } char num[5]; memcpy(num, string, stringLen); num[stringLen] = 0; errno = 0; char* end; mode_t mode = strtoul(num, &end, 8); if (end != num + stringLen || errno == ERANGE) { return false; } *modeOut = mode; return true; } bool stringToOwnerId(const char* string, size_t stringLen, gid_t* groupOut, uid_t* userOut) { char* colon = memchr(string, ':', stringLen); size_t size1 = colon - string; size_t size2 = string + stringLen - colon - 1; if (size1 > 10 || size2 > 10 || !isdigit(string[0]) || !isdigit(colon[1])) // safe to assume at most 32-bit { errno = ERANGE; return false; } char number[11]; memcpy(number, string, size1); number[size1] = 0; errno = 0; char* end; gid_t gid = strtoul(number, &end, 10); if (end != &number[size1] || errno == ERANGE) { return false; } memcpy(number, colon + 1, size2); number[size2] = 0; errno = 0; uid_t uid = strtoul(number, &end, 10); if (end != &number[size2] || errno == ERANGE) { errno = EINVAL; return false; } *groupOut = gid; *userOut = uid; return true; } bool stringToOwner(const char* string, size_t stringLen, const char** userOut) { char* colon = memchr(string, ':', stringLen); size_t size1 = colon - string; size_t size2 = string + stringLen - colon - 1; if (size1 > 32 || size2 > 32) { return false; } *userOut = colon + 1; return true; }
class ContactRelationshipType < ActiveModel::Type::Value def cast(value) Nomis::Contact::Relationship.new(value).freeze end end
#!/bin/sh TestComplete () { From="$1" Expect="$2" To="$(./pldate $From)" if [ "$To" = "$Expect" ]; then printf "%s => %s OK\n" "$From" "$To" else printf "%s => %s *** expected %s \n" "$From" "$To" "$Expect" exit 9 fi } TestSet () { From="$1" Expect="$2" To="$(./pldate set "$From")" if [ "$To" = "$Expect" ]; then printf "set %s => %s OK\n" "$From" "$To" else printf "set %s => %s *** expected %s \n" "$From" "$To" "$Expect" exit 9 fi } TestAddDays () { From="$1" Add="$2" Expect="$3" To="$(./pldate set "$From" add-days "$Add")" if [ "$To" = "$Expect" ]; then printf "set %s add-days %s => %s OK\n" "$From" "$Add" "$To" else printf "set %s add-days %s => %s *** expected %s \n" "$From" "$Add" "$To" "$Expect" exit 9 fi } TestSubDays () { From="$1" Sub="$2" Expect="$3" To="$(./pldate set "$From" sub-days "$Sub")" if [ "$To" = "$Expect" ]; then printf "set %s sub-days %s => %s OK\n" "$From" "$Sub" "$To" else printf "set %s sub-days %s => %s *** expected %s \n" "$From" "$Sub" "$To" "$Expect" exit 9 fi } TestAddMonth () { From="$1" Add="$2" Expect="$3" To="$(./pldate set "$From" add-month "$Add")" if [ "$To" = "$Expect" ]; then printf "set %s add-month %s => %s OK\n" "$From" "$Add" "$To" else printf "set %s add-month %s => %s *** expected %s \n" "$From" "$Add" "$To" "$Expect" exit 9 fi } TestSubMonth () { From="$1" Add="$2" Expect="$3" To="$(./pldate set "$From" sub-month "$Add")" if [ "$To" = "$Expect" ]; then printf "set %s sub-month %s => %s OK\n" "$From" "$Add" "$To" else printf "set %s sub-month %s => %s *** expected %s \n" "$From" "$Add" "$To" "$Expect" exit 9 fi } TestAddYear () { From="$1" Add="$2" Expect="$3" To="$(./pldate set "$From" add-year "$Add")" if [ "$To" = "$Expect" ]; then printf "set %s add-year %s => %s OK\n" "$From" "$Add" "$To" else printf "set %s add-year %s => %s *** expected %s \n" "$From" "$Add" "$To" "$Expect" exit 9 fi } TestSubYear () { From="$1" Sub="$2" Expect="$3" To="$(./pldate set "$From" sub-year "$Sub")" if [ "$To" = "$Expect" ]; then printf "set %s sub-year %s => %s OK\n" "$From" "$Sub" "$To" else printf "set %s sub-year %s => %s *** expected %s \n" "$From" "$Sub" "$To" "$Expect" exit 9 fi } TestNextDow () { From="$1" Dow="$2" Expect="$3" To="$(./pldate set "$From" next-dow "$Dow")" if [ "$To" = "$Expect" ]; then printf "set %s next-dow %s => %s OK\n" "$From" "$Dow" "$To" else printf "set %s next-dow %s => %s *** expected %s \n" "$From" "$Dow" "$To" "$Expect" exit 9 fi } TestPrevDow () { From="$1" Dow="$2" Expect="$3" To="$(./pldate set "$From" prev-dow "$Dow")" if [ "$To" = "$Expect" ]; then printf "set %s prev-dow %s => %s OK\n" "$From" "$Dow" "$To" else printf "set %s prev-dow %s => %s *** expected %s \n" "$From" "$Dow" "$To" "$Expect" exit 9 fi } TestUpToDow () { From="$1" Dow="$2" Expect="$3" To="$(./pldate set "$From" upto-dow "$Dow")" if [ "$To" = "$Expect" ]; then printf "set %s upto-dow %s => %s OK\n" "$From" "$Dow" "$To" else printf "set %s upto-dow %s => %s *** expected %s \n" "$From" "$Dow" "$To" "$Expect" exit 9 fi } TestDownToDow () { From="$1" Dow="$2" Expect="$3" To="$(./pldate set "$From" downto-dow "$Dow")" if [ "$To" = "$Expect" ]; then printf "set %s downto-dow %s => %s OK\n" "$From" "$Dow" "$To" else printf "set %s downto-dow %s => %s *** expected %s \n" "$From" "$Dow" "$To" "$Expect" exit 9 fi } TestSetMday () { From="$1" Mday="$2" Expect="$3" To="$(./pldate set "$From" set-mday "$Mday")" if [ "$To" = "$Expect" ]; then printf "set %s set-mday %s => %s OK\n" "$From" "$Mday" "$To" else printf "set %s set-mday %s => %s *** expected %s \n" "$From" "$Mday" "$To" "$Expect" exit 9 fi } TestSetYday () { From="$1" Yday="$2" Expect="$3" To="$(./pldate set "$From" set-yday "$Yday")" if [ "$To" = "$Expect" ]; then printf "set %s set-yday %s => %s OK\n" "$From" "$Yday" "$To" else printf "set %s set-yday %s => %s *** expected %s \n" "$From" "$Yday" "$To" "$Expect" exit 9 fi } TestInternal () { Ext="$1" Int="$2" Tmp1="$(./pldate set "$Ext" printf %I)" Tmp2="$(./pldate set-int "$Int" print)" if [ "$Tmp2" = "$Ext" -a "$Tmp1" = "$Int" ]; then printf "ext=%s int=%s OK\n" "$Ext" "$Int" else printf "ext=%s int=%s tmp1=%s tmp2=%s **** Fail\n" "$Ext" "$Int" "$Tmp1" "$Tmp2" exit 9 fi } TestSet 16010101 16010101 TestSet 18480315 18480315 TestSet 19680309 19680309 TestSet 21000228 21000228 TestAddDays 23001231 0 23001231 TestAddDays 16010301 -1 16010228 TestAddDays 16040301 -1 16040229 TestAddDays 17000301 -1 17000228 TestAddDays 20000301 -1 20000229 TestAddDays 23000301 -1 23000228 TestAddDays 19991231 +1 20000101 TestAddDays 19991231 +61 20000301 TestAddDays 20000301 -61 19991231 TestAddDays 20001231 +60 20010301 TestAddDays 20010301 -60 20001231 TestSubDays 20011212 0 20011212 TestSubDays 20011212 200 20010526 TestSubDays 20011212 -200 20020630 TestNextDow 20190420 0 20190421 TestNextDow 20190420 1 20190422 TestNextDow 20190420 2 20190423 TestNextDow 20190420 3 20190424 TestNextDow 20190420 4 20190425 TestNextDow 20190420 5 20190426 TestNextDow 20190420 6 20190427 # here 'next-dow' and 'upto-dow' differ TestNextDow 20190420 7 20190421 TestPrevDow 20190420 0 20190414 TestPrevDow 20190420 1 20190415 TestPrevDow 20190420 2 20190416 TestPrevDow 20190420 3 20190417 TestPrevDow 20190420 4 20190418 TestPrevDow 20190420 5 20190419 TestPrevDow 20190420 6 20190413 # here 'prev-dow' and 'downto-dow' differ TestPrevDow 20190420 7 20190414 TestUpToDow 20190420 0 20190421 TestUpToDow 20190420 1 20190422 TestUpToDow 20190420 2 20190423 TestUpToDow 20190420 3 20190424 TestUpToDow 20190420 4 20190425 TestUpToDow 20190420 5 20190426 TestUpToDow 20190420 6 20190420 # here 'next-dow' and 'upto-dow' differ TestUpToDow 20190420 7 20190421 TestDownToDow 20190420 0 20190414 TestDownToDow 20190420 1 20190415 TestDownToDow 20190420 2 20190416 TestDownToDow 20190420 3 20190417 TestDownToDow 20190420 4 20190418 TestDownToDow 20190420 5 20190419 TestDownToDow 20190420 6 20190420 # here 'prev-dow' and 'downto-dow' differ TestDownToDow 20190420 7 20190414 TestSetMday 23000201 -31 23000201 TestSetMday 23000201 -30 23000201 TestSetMday 23000201 -27 23000202 TestSetMday 23000201 -10 23000219 TestSetMday 23000201 -1 23000228 TestSetMday 23000201 0 23000201 TestSetMday 23000201 1 23000201 TestSetMday 23000201 30 23000228 TestSetMday 23000201 31 23000228 TestSetYday 17890315 -1 17891231 TestSetYday 17890315 0 17890101 TestSetYday 17890315 365 17891231 TestAddMonth 20010131 -100 19920930 TestAddMonth 20010131 -12 20000131 TestAddMonth 20010131 -11 20000229 TestAddMonth 20010131 -1 20001231 TestAddMonth 20010131 0 20010131 TestAddMonth 20010131 1 20010228 TestAddMonth 20010131 11 20011231 TestAddMonth 20010131 12 20020131 TestAddMonth 20010131 100 20090531 TestSubMonth 19920930 -100 20010130 TestSubMonth 20001231 -1 20010131 TestSubMonth 20010131 0 20010131 TestSubMonth 20010228 1 20010128 TestSubMonth 20090531 100 20010131 TestAddYear 20000229 +300 23000228 TestAddYear 20000229 +400 24000229 TestSubYear 24001231 799 16011231 TestSubYear 16011231 -799 24001231 TestInternal 16010101 000000 TestInternal 17001231 036523 TestInternal 17010101 036524 TestInternal 20001231 146096 TestInternal 20000229 145790 TestInternal 20000301 145791 TestInternal 24001231 292193 TestComplete 'set 19010101' 19010101 TestComplete 'set 19010101 add-days 365' 19020101 TestComplete 'set 19010101 add-days 365 next-dow 0' 19020105 TestComplete 'set 19010101 add-days 365 prev-dow 0' 19011229 TestComplete 'set 19450404 set-mday 1 sub-days 1' 19450331 TestComplete 'set 19450404 set-mday -1 add-days 1' 19450501 TestComplete 'set 20010531 set-month 12' '20011231' TestComplete 'set 20010531 set-month 9' '20010930' TestComplete 'set 20010531 set-month 1' '20010131' TestComplete 'set 20010531 next-month 1' '20020131' TestComplete 'set 20010531 next-month 5' '20020531' TestComplete 'set 20010531 prev-month 1' '20010131' TestComplete 'set 20010531 prev-month 1' '20010131' TestComplete 'set 20010531 upto-month 5' '20010531' TestComplete 'set 20010531 upto-month 4' '20020430' TestComplete 'set 20010531 downto-month 5' '20010531' TestComplete 'set 20010531 downto-month 6' '20000630' TestComplete 'set 20010531 downto-month 12' '20001231' TestComplete 'set 19681226 add-days 1 prev-dow 1 printf %Y%m%d- next-dow 7 printf %Y%m%d' 19681223-19681229 TestComplete 'set 19681230 add-days 1 prev-dow 1 printf %Y%m%d.%w.%j- next-dow 7 printf %Y%m%d.%w.%j' 19681230.1.365-19690105.0.005
<filename>SymbolExtractorAndRenamer/lldb/source/Plugins/Platform/FreeBSD/PlatformFreeBSD.h //===-- PlatformFreeBSD.h ---------------------------------------*- C++ -*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// #ifndef liblldb_PlatformFreeBSD_h_ #define liblldb_PlatformFreeBSD_h_ // C Includes // C++ Includes // Other libraries and framework includes // Project includes #include "lldb/Target/Platform.h" namespace lldb_private { namespace platform_freebsd { class PlatformFreeBSD : public Platform { public: PlatformFreeBSD(bool is_host); ~PlatformFreeBSD() override; //------------------------------------------------------------ // Class functions //------------------------------------------------------------ static lldb::PlatformSP CreateInstance(bool force, const ArchSpec *arch); static void Initialize(); static void Terminate(); static ConstString GetPluginNameStatic(bool is_host); static const char *GetDescriptionStatic(bool is_host); //------------------------------------------------------------ // lldb_private::PluginInterface functions //------------------------------------------------------------ ConstString GetPluginName() override { return GetPluginNameStatic(IsHost()); } uint32_t GetPluginVersion() override { return 1; } const char *GetDescription() override { return GetDescriptionStatic(IsHost()); } //------------------------------------------------------------ // lldb_private::Platform functions //------------------------------------------------------------ bool GetModuleSpec(const FileSpec &module_file_spec, const ArchSpec &arch, ModuleSpec &module_spec) override; Error RunShellCommand(const char *command, const FileSpec &working_dir, int *status_ptr, int *signo_ptr, std::string *command_output, uint32_t timeout_sec) override; Error ResolveExecutable(const ModuleSpec &module_spec, lldb::ModuleSP &module_sp, const FileSpecList *module_search_paths_ptr) override; size_t GetSoftwareBreakpointTrapOpcode(Target &target, BreakpointSite *bp_site) override; bool GetRemoteOSVersion() override; bool GetRemoteOSBuildString(std::string &s) override; bool GetRemoteOSKernelDescription(std::string &s) override; // Remote Platform subclasses need to override this function ArchSpec GetRemoteSystemArchitecture() override; bool IsConnected() const override; Error ConnectRemote(Args &args) override; Error DisconnectRemote() override; const char *GetHostname() override; const char *GetUserName(uint32_t uid) override; const char *GetGroupName(uint32_t gid) override; bool GetProcessInfo(lldb::pid_t pid, ProcessInstanceInfo &proc_info) override; uint32_t FindProcesses(const ProcessInstanceInfoMatch &match_info, ProcessInstanceInfoList &process_infos) override; Error LaunchProcess(ProcessLaunchInfo &launch_info) override; lldb::ProcessSP Attach(ProcessAttachInfo &attach_info, Debugger &debugger, Target *target, Error &error) override; // FreeBSD processes can not be launched by spawning and attaching. bool CanDebugProcess() override { return false; } // Only on PlatformMacOSX: Error GetFileWithUUID(const FileSpec &platform_file, const UUID *uuid, FileSpec &local_file) override; Error GetSharedModule(const ModuleSpec &module_spec, Process *process, lldb::ModuleSP &module_sp, const FileSpecList *module_search_paths_ptr, lldb::ModuleSP *old_module_sp_ptr, bool *did_create_ptr) override; bool GetSupportedArchitectureAtIndex(uint32_t idx, ArchSpec &arch) override; void GetStatus(Stream &strm) override; void CalculateTrapHandlerSymbolNames() override; protected: lldb::PlatformSP m_remote_platform_sp; // Allow multiple ways to connect to a // remote freebsd OS private: DISALLOW_COPY_AND_ASSIGN(PlatformFreeBSD); }; } // namespace platform_freebsd } // namespace lldb_private #endif // liblldb_PlatformFreeBSD_h_
/* global window, document */ import { isNumber } from 'lodash'; //import myRobotsParser from 'robots-parser'; //import _robotsTxt from 'robotstxt'; import simpleRobotParser from 'simple-functional-robots-txt-parser'; const callbacks = {}; let globals = {}; export const setGlobals = (object) => { globals = object; }; export const getGlobals = () => { return globals; }; export const createResult = (...args) => { const result = { label: null, message: null, type: 'info', what: null, priority: 0, }; if (args.length > 1) { const [label, message, type = 'info', what, priority = 0] = args; return Object.assign(result, { label, message, type, what, priority }); } return Object.assign(result, args[0]); }; /*export const waitForAsync = (message = 'Waiting for async rule.') => { return createResult('async', message, 'pending'); }*/ //https://www.npmjs.com/package/robots-parser //export const robotsParser = myRobotsParser; /*export const robotsTxt = function(useragent ="Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)") { let blank_robots_txt = _robotsTxt(null, useragent); return blank_robots_txt; }*/ export const simpleRobotTxt = (txt,url,ua="Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")=> { return simpleRobotParser(txt,url,ua); } export const htmlEntitiesEncode = (str) => { return String(str).replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/"/g, '&quot;'); }; export const fetch = (url, options, callback) => { window.addEventListener('message', (event) => { const { command } = event.data; if (command === 'fetchResult') { const { runId, response } = event.data; if (callbacks[runId]) { callbacks[runId](response); delete callbacks[runId]; } } }); const runId = Math.round(Math.random() * 10000000); callbacks[runId] = callback; window.parent.postMessage({ command: 'fetch', url, options, runId }, '*'); }; export const sinmpleGscFetch= (token, api, requestbody, callback) => { fetch(api, { method: "POST", mode: 'cors', headers: { 'Content-Type': 'application/json', 'Accept': 'application/json', 'Authorization': 'Bearer '+token }, body: JSON.stringify(requestbody) }, success); } /*export const utf8TextLink = (str, anchor) => { //str = String(str).replace(/"/g, '\\"'); str = str.trim(); str = encodeURIComponent(str); return '<a href="data:text/plain;charset=utf-8,'+str+'" target="_blank" title="">'+anchor+'</a>'; }*/ export const utf8TextLink = (str, anchor) => //utf8TextLinkObjectUrl = (str, anchor) => { str = str.trim(); let str_enc = encodeURIComponent(str); return '<a href="'+globals.codeviewUrl+'?show='+str_enc+'" target="_blank" >'+anchor+'</a>'; } export const dataUrlTextLink = (str, anchor) => { str = String(str).replace(/"/g, '\"'); return '<a href="data:text;base64, '+btoa(str)+'" target="_blank" title="'+htmlEntitiesEncode(str)+'">'+anchor+'</a>'; } //can't open file in _blank due to some security restrictions export const blobUrlTextLink = (str, anchor) => { str = String(str).replace(/"/g, '\"'); var blob = new Blob([str], {type: "text/plain;charset=utf-8"}); blob.close(); return '<a href="'+URL.createObjectURL(blob)+'" target="_blank" title="'+htmlEntitiesEncode(str)+'">'+anchor+'</a>'; } export const isIterable = function (stuff) { if(stuff) { return typeof stuff[Symbol.iterator] === 'function'; } return false; } export const isString = function (stuff) { return typeof stuff === 'string'; } export const nodeToString = (stuff) => { Element.prototype.isNodeList = function() {return false;} Element.prototype.isElement = function() {return true;} NodeList.prototype.isNodeList = HTMLCollection.prototype.isNodeList = function(){return true;} var temp_string = ''; if (stuff === undefined || stuff === null) { return '' } if (!stuff) {return stuff } if (stuff.outerHTML) { return stuff.outerHTML; } if (isIterable(stuff)) { if (isString(stuff)) {return stuff;} if(!Array.isArray(stuff)) { stuff = Array.from(stuff); } stuff.forEach(function(v){ if(v.outerHTML){ temp_string = temp_string + v.outerHTML+"\n";} else {temp_string = temp_string + v + "\n";} }); return temp_string; }; if(stuff !== null && typeof stuff === 'object') { var stuff_keys = Object.keys(stuff); stuff_keys.forEach(function(k) { temp_string = temp_string + k +': '+stuff[k]+ "\n"; }); return temp_string; } return "Don't know how to transform this data into a data URL!"; }; export const allNodesToString = (...stuffs) => { if(!Array.isArray(stuffs)) { stuffs = Array.from(stuffs); } if(!stuffs){return false;} if(stuffs.length===0){return false;} var s = ''; stuffs.forEach(function(stuff){ s = s + "\n" + nodeToString(stuff); }); return s; } export const partialCodeLink = (...nodes) => { var str = allNodesToString(...nodes); return ' '+utf8TextLink(str, '<span class="show-partial-source Button Button--haptic Button--inline">&lt;/&gt;</span>'); } export const partialStringifyLink = (obj) => { return ' '+utf8TextLink(JSON.stringify(obj, null, 2), '<span class="show-partial-source Button Button--haptic Button--inline">&lt;/&gt;</span>'); } export const stringifyLink = partialStringifyLink; export const partialTextLink = (anchor, ...nodes) => { var str = allNodesToString(...nodes); return ' '+utf8TextLink(str, anchor); } export const highlightLink = (selector, anchortext="Highlight", bordercss="5px solid red") => { if(!selector){return "";} selector = selector.replace(/"/g, '\\"'); selector = selector.replace(/'/g, "\\'"); let link = " <a href='javascript://' onclick=\"for(e of window.top.document.querySelectorAll('"+htmlEntitiesEncode(selector)+"')){e.style.border='"+htmlEntitiesEncode(bordercss)+"';}\">"+anchortext+"</a>"; return link; } export const dateMinus = (minus = 0) => { let date = new Date(); date.setDate(date.getDate() - minus); let y = date.getFullYear(); let m = date.getMonth() + 1; let d = date.getDate(); return '' + y + '-' + (m < 10 ? '0' : '') +m + '-' + (d < 10 ? '0' : '') + d; }
using System; public class Ingredient { public string IngredientType { get; set; } public decimal Price { get; set; } } public class SolidIngredient : Ingredient { public double Weight { get; set; } } public class LiquidIngredient : Ingredient { public double Volume { get; set; } } public class Program { public static void Main() { SolidIngredient flour = new SolidIngredient { IngredientType = "Flour", Price = 5.99m, Weight = 2.5 }; LiquidIngredient milk = new LiquidIngredient { IngredientType = "Milk", Price = 3.49m, Volume = 1.5 }; Console.WriteLine($"Solid Ingredient: {flour.IngredientType}, Price: {flour.Price:C}, Weight: {flour.Weight} kg"); Console.WriteLine($"Liquid Ingredient: {milk.IngredientType}, Price: {milk.Price:C}, Volume: {milk.Volume} liters"); } }
#!/bin/sh # # Check prerequisites for using the Duktape makefile. Exit with an error # and a useful error message for missing prerequisites. # ERRORS=0 WARNINGS=0 uname -a | grep -ni linux >/dev/null RET=$? if [ "x$RET" != "x0" ]; then echo "*** Based on uname, you're not running on Linux. Duktape developer" echo " makefile is intended for Linux only; YMMV on other platforms." echo "" sleep 1 fi NODEJS_VERSION=`nodejs -v 2>/dev/null` if [ $? != 0 ]; then echo "*** Missing NodeJS:" echo " $ sudo apt-get install nodejs nodejs-legacy npm # may also be 'node'" echo "" ERRORS=1 fi #echo "NodeJS version: $NODEJS_VERSION" # some tools like uglifyjs require 'node', not 'nodejs' NODE_VERSION=`node -v 2>/dev/null` if [ $? != 0 ]; then echo "*** Missing NodeJS legacy ('node' command):" echo " $ sudo apt-get install nodejs-legacy" echo "" ERRORS=1 fi #echo "NodeJS 'node' version: $NODE_VERSION" GIT_VERSION=`git --version 2>/dev/null` if [ $? != 0 ]; then echo "*** Missing git:" echo " $ sudo apt-get install git" echo "" ERRORS=1 fi #echo "Git version: $GIT_VERSION" UNZIP_VERSION=`unzip -v 2>/dev/null` if [ $? != 0 ]; then echo "*** Missing unzip:" echo " $ sudo apt-get install unzip" echo "" ERRORS=1 fi #echo "UNZIP_VERSION: $UNZIP_VERSION" PERL_VERSION=`perl -version 2>/dev/null` if [ $? != 0 ]; then echo "*** Missing perl:" echo " $ sudo apt-get install perl" echo "" ERRORS=1 fi #echo "PERL_VERSION: $PERL_VERSION" JAVA_VERSION=`java -version 2>&1` if [ $? != 0 ]; then echo "*** Missing java:" echo " $ sudo apt-get install openjdk-7-jre" echo "" ERRORS=1 fi #echo "JAVA_VERSION: $JAVA_VERSION" CLANG_VERSION=`clang -v 2>&1` if [ $? != 0 ]; then echo "*** Missing clang (affects emscripten tests):" echo " $ sudo apt-get install clang" echo "" WARNINGS=1 fi LLVM_LINK_VERSION=`llvm-link --version 2>&1` # exit code will be 1 which llvm-link 2>/dev/null >/dev/null if [ $? != 0 ]; then echo "*** Missing llvm (affects emscripten tests):" echo " $ sudo apt-get install llvm" echo "" WARNINGS=1 fi python -c 'from bs4 import BeautifulSoup, Tag' 2>/dev/null if [ $? != 0 ]; then echo "*** Missing BeautifulSoup (affects website build)" echo " $ sudo apt-get install python-bs4" echo "" WARNINGS=1 fi SOURCE_HIGHLIGHT_VERSION=`source-highlight --version` if [ $? != 0 ]; then echo "*** Missing source-highlight (affects website build)" echo " $ sudo apt-get install source-highlight" echo "" WARNINGS=1 fi if [ "x$ERRORS" != "x0" ]; then echo "*** Errors found in system setup, see error messages above!" exit 1 fi if [ "x$WARNINGS" != "x0" ]; then echo "*** Warnings found in system setup, see warnings above" exit 0 fi # 'tidy' is intentionally not checked as it only relates to website development # and is not mandatory to website build. exit 0
#!/bin/bash apt-get -y install wireguard
if [ `basename $PWD` != 'opentaal-openthesaurus' ]; then echo 'ERROR: Start this script from the directory opentaal-openthesaurus' echo 'See https://github.com/OpenTaal/opentaal-openthesaurus' exit 1 fi if [ ! -e ../openthesaurus ]; then echo 'ERROR: Missing desitination directory ../openthesaurus' echo 'See https://github.com/danielnaber/openthesaurus' exit 1 fi diff -Nup ../openthesaurus/grails-app/conf/application.yml conf/application.yml diff -Nup ../openthesaurus/grails-app/conf/application-development.properties conf/application-development.properties diff -Nup ../openthesaurus/grails-app/conf/application-production.properties conf/application-production.properties # note that only one of the next files has _nl diff -Nup ../openthesaurus/grails-app/i18n/messages_nl.properties \ i18n/messages.properties
<gh_stars>0 from .utils import * import elasticsearch import logging logger = logging.getLogger(__name__) def change_replicas(client, indices, replicas=None): """ Change the number of replicas, more or less, for the indicated indices. This method will ignore closed indices. :arg client: The Elasticsearch client connection :arg indices: A list of indices to act on :arg replicas: The number of replicas the indices should have :rtype: bool """ if replicas == None: logger.error('No replica count provided.') return False else: indices = prune_closed(client, indices) logger.info('Updating index setting: number_of_replicas={0}'.format(replicas)) try: client.indices.put_settings(index=to_csv(indices), body='number_of_replicas={0}'.format(replicas)) return True except Exception: logger.error("Error changing replica count. Check logs for more information.") return False def replicas(client, indices, replicas=None): """ Helper method called by the CLI. :arg client: The Elasticsearch client connection :arg indices: A list of indices to act on :arg replicas: The number of replicas the indices should have :rtype: bool """ return change_replicas(client, indices, replicas=replicas)
<gh_stars>0 define(["./arr"],function(n){return n.push}); //# sourceMappingURL=push.min.js.map
<gh_stars>0 var _depth_to_space_end_to_end_test_impl_8hpp = [ [ "DepthToSpaceEndToEnd", "_depth_to_space_end_to_end_test_impl_8hpp.xhtml#a210d03c5b3eb0ba7aade5dc2ebe8c42f", null ] ];
<gh_stars>0 import React, { Component } from 'react'; import './App.css'; import Home from './components/Home'; import { getPokemonList, getPokemonCharacteristicsList } from './data/Fetch'; class App extends Component { constructor(props) { super(props); this.state = { pokedex: [], pokeData: [], pokemonName: '' } this.searchPokemon = this.searchPokemon.bind(this); } componentDidMount() { this.getPokemon(); } getPokemon() { getPokemonList() .then(pokemons => { pokemons = pokemons.slice(0, 50) this.setState({ pokeData: pokemons }) const pokedata= this.state.pokeData; for (let i = 0; i < pokemons.length; i++) { getPokemonCharacteristicsList(pokedata[i].url) .then(response2 =>{ const pokemonData = { name: response2.name, image: response2.sprites.front_shiny, types: response2.types, id: response2.id } let pokemonCharacteristics = this.state.pokedex; pokemonCharacteristics.push(pokemonData); this.setState({ pokedex: pokemonCharacteristics }); }); }; }) } searchPokemon(event) { const pokemonName = event.currentTarget.value.toLowerCase(); this.setState({ pokemonName: pokemonName }) } render() { return ( <div className="App"> <div className="ear ear--left"></div> <div className="ear ear--right"></div> <div className="cheek cheek--left"></div> <div className="cheek cheek--right"></div> <Home pokedex={this.state.pokedex} searchPokemon={this.searchPokemon} pokemonName={this.state.pokemonName} /> </div> ); } } export default App;
declare namespace SKIT.Storage { export interface StorageAdapter { keys(): string[]; keysAsync(): Promise<string[]>; get(key: string): string | null; getAsync(key: string): Promise<string | null>; set(key: string, val: string): void; setAsync(key: string, val: string): Promise<void>; remove(key: string): void; removeAsync(key: string): Promise<void>; clear(): void; clearAsync(): Promise<void>; } }
package com.hapramp.utils; import java.util.regex.Matcher; import java.util.regex.Pattern; public class RegexUtils { private static Pattern pattern; private static Matcher matcher; public static String replaceMarkdownImage(String body) { return body.replaceAll("!\\[(.*?)\\]\\((.*?)[)]", "<img alt=\"$1\" src=\"$2\"/>"); } public static String replacePlainImageLinks(String body) { pattern = Pattern.compile("(^|[^\"])((http(s|):.*?)(.png|.jpeg|.PNG|.gif|.jpg)(.*?))( |$|\\n|<)"); matcher = pattern.matcher(body); while (matcher.find() && matcher.group(5).length() > 0) { body = new StringBuilder(body).replace(matcher.start(2), matcher.end(2), "<img src=\"" + matcher.group(2) + "\"/>").toString(); } return body; } private static String replaceMarkdownLinks(String body) { return body.replaceAll("\\[(.*?)\\]\\((.*?)\\)", "<a href=\"$2\">$1</a>"); } }
def construct_email_headers(smtp_mail_from, to, cc, bcc): msg = {} msg["From"] = smtp_mail_from msg["To"] = ", ".join(to) msg.preamble = "This is a multi-part message in MIME format." recipients = to if cc: cc = get_email_address_list(cc) msg["CC"] = ", ".join(cc) recipients = recipients + cc # Construct the email headers email_headers = { "From": msg["From"], "To": msg["To"] } if "CC" in msg: email_headers["CC"] = msg["CC"] return email_headers
package com.yin.springboot.mybatis.domain; import java.io.Serializable; import java.util.Date; import lombok.Data; @Data public class User implements Serializable { /** * id主键 */ private Integer id; /** * 用户id */ private Integer userId; /** * 用户组id */ private Integer groupId; /** * 用户名称 */ private String name; /** * 用户手机号 */ private Integer mobile; /** * 地址 */ private String address; /** * 性别 1男,2女 */ private Integer sex; /** * 逻辑删除状态 1正常,2删除 */ private Integer status; /** * 创建时间 */ private Date createdTime; /** * 更新时间 */ private Date updatedTime; /** * 乐观锁 */ private Integer revision; private static final long serialVersionUID = 1L; }
# frozen_string_literal: true def check_environment! if !Rails.env.development? raise "Database commands are only supported in development environment" end ENV['SKIP_TEST_DATABASE'] = "1" ENV['SKIP_MULTISITE'] = "1" end desc 'Run db:migrate:reset task and populate sample content for development environment' task 'dev:reset' => ['db:load_config'] do |_, args| check_environment! Rake::Task['db:migrate:reset'].invoke Rake::Task['dev:config'].invoke Rake::Task['dev:populate'].invoke end desc 'Initialize development environment' task 'dev:config' => ['db:load_config'] do |_, args| DiscourseDev.config.update! end desc 'Populate sample content for development environment' task 'dev:populate' => ['db:load_config'] do |_, args| system("redis-cli flushall") Rake::Task['groups:populate'].invoke Rake::Task['users:populate'].invoke Rake::Task['categories:populate'].invoke Rake::Task['tags:populate'].invoke Rake::Task['topics:populate'].invoke end desc 'Repopulate sample datas in development environment' task 'dev:repopulate' => ['db:load_config'] do |_, args| require 'highline/import' answer = ask("Do you want to repopulate the database with fresh data? It will recreate DBs and run migration from scratch before generating all the samples. (Y/n) ") if (answer == "" || answer.downcase == 'y') Rake::Task['dev:reset'].invoke else puts "You can run `bin/rails dev:reset` to repopulate anytime." end end
import URLSafeBase64 from 'urlsafe-base64'; function JWTPaylodeDecode(jwtData) { if (!jwtData || typeof jwtData !== 'string') { throw new Error(`function "decodeJWTPayload" must recieve a url-safe base-64 encoded argument\nRecieved: ${jwtData}`); } const tempPayload = jwtData.split('.')[1]; const payloadBuffer = URLSafeBase64.decode(tempPayload); const payload = JSON.parse(payloadBuffer.toString()); return payload; } export { JWTPaylodeDecode };
"""Tests for letshelp.letshelp_certbot_apache.py""" import argparse import functools import os import pkg_resources import subprocess import tarfile import tempfile import unittest import mock # six is used in mock.patch() import six # pylint: disable=unused-import import letshelp_certbot.apache as letshelp_le_apache _PARTIAL_CONF_PATH = os.path.join("mods-available", "ssl.load") _PARTIAL_LINK_PATH = os.path.join("mods-enabled", "ssl.load") _CONFIG_FILE = pkg_resources.resource_filename( __name__, os.path.join("testdata", _PARTIAL_CONF_PATH)) _PASSWD_FILE = pkg_resources.resource_filename( __name__, os.path.join("testdata", "uncommonly_named_p4sswd")) _KEY_FILE = pkg_resources.resource_filename( __name__, os.path.join("testdata", "uncommonly_named_k3y")) _SECRET_FILE = pkg_resources.resource_filename( __name__, os.path.join("testdata", "super_secret_file.txt")) _MODULE_NAME = "letshelp_certbot.apache" _COMPILE_SETTINGS = """Server version: Apache/2.4.10 (Debian) Server built: Mar 15 2015 09:51:43 Server's Module Magic Number: 20120211:37 Server loaded: APR 1.5.1, APR-UTIL 1.5.4 Compiled using: APR 1.5.1, APR-UTIL 1.5.4 Architecture: 64-bit Server MPM: event threaded: yes (fixed thread count) forked: yes (variable process count) Server compiled with.... -D APR_HAS_SENDFILE -D APR_HAS_MMAP -D APR_HAVE_IPV6 (IPv4-mapped addresses enabled) -D APR_USE_SYSVSEM_SERIALIZE -D APR_USE_PTHREAD_SERIALIZE -D SINGLE_LISTEN_UNSERIALIZED_ACCEPT -D APR_HAS_OTHER_CHILD -D AP_HAVE_RELIABLE_PIPED_LOGS -D DYNAMIC_MODULE_LIMIT=256 -D HTTPD_ROOT="/etc/apache2" -D SUEXEC_BIN="/usr/lib/apache2/suexec" -D DEFAULT_PIDLOG="/var/run/apache2.pid" -D DEFAULT_SCOREBOARD="logs/apache_runtime_status" -D DEFAULT_ERRORLOG="logs/error_log" -D AP_TYPES_CONFIG_FILE="mime.types" -D SERVER_CONFIG_FILE="apache2.conf" """ class LetsHelpApacheTest(unittest.TestCase): @mock.patch(_MODULE_NAME + ".copy_config") def test_make_and_verify_selection(self, mock_copy_config): mock_copy_config.return_value = (["apache2.conf"], ["apache2"]) with mock.patch("six.moves.input") as mock_input: with mock.patch(_MODULE_NAME + ".sys.stdout"): mock_input.side_effect = ["Yes", "No"] letshelp_le_apache.make_and_verify_selection("root", "temp") self.assertRaises( SystemExit, letshelp_le_apache.make_and_verify_selection, "server_root", "temp_dir") def test_copy_config(self): tempdir = tempfile.mkdtemp() server_root = pkg_resources.resource_filename(__name__, "testdata") letshelp_le_apache.copy_config(server_root, tempdir) temp_testdata = os.path.join(tempdir, "testdata") self.assertFalse(os.path.exists(os.path.join( temp_testdata, os.path.basename(_PASSWD_FILE)))) self.assertFalse(os.path.exists(os.path.join( temp_testdata, os.path.basename(_KEY_FILE)))) self.assertFalse(os.path.exists(os.path.join( temp_testdata, os.path.basename(_SECRET_FILE)))) self.assertTrue(os.path.exists(os.path.join( temp_testdata, _PARTIAL_CONF_PATH))) self.assertTrue(os.path.exists(os.path.join( temp_testdata, _PARTIAL_LINK_PATH))) def test_copy_file_without_comments(self): dest = tempfile.mkstemp()[1] letshelp_le_apache.copy_file_without_comments(_PASSWD_FILE, dest) with open(_PASSWD_FILE) as original: with open(dest) as copy: for original_line, copied_line in zip(original, copy): self.assertEqual(original_line, copied_line) @mock.patch(_MODULE_NAME + ".subprocess.Popen") def test_safe_config_file(self, mock_popen): mock_popen().communicate.return_value = ("PEM RSA private key", None) self.assertFalse(letshelp_le_apache.safe_config_file("filename")) mock_popen().communicate.return_value = ("ASCII text", None) self.assertFalse(letshelp_le_apache.safe_config_file(_PASSWD_FILE)) self.assertFalse(letshelp_le_apache.safe_config_file(_KEY_FILE)) self.assertFalse(letshelp_le_apache.safe_config_file(_SECRET_FILE)) self.assertTrue(letshelp_le_apache.safe_config_file(_CONFIG_FILE)) @mock.patch(_MODULE_NAME + ".subprocess.Popen") def test_tempdir(self, mock_popen): mock_popen().communicate.side_effect = [ ("version", None), ("modules", None), ("vhosts", None)] args = _get_args() tempdir = letshelp_le_apache.setup_tempdir(args) with open(os.path.join(tempdir, "config_file")) as config_fd: self.assertEqual(config_fd.read(), args.config_file + "\n") with open(os.path.join(tempdir, "version")) as version_fd: self.assertEqual(version_fd.read(), "version") with open(os.path.join(tempdir, "modules")) as modules_fd: self.assertEqual(modules_fd.read(), "modules") with open(os.path.join(tempdir, "vhosts")) as vhosts_fd: self.assertEqual(vhosts_fd.read(), "vhosts") @mock.patch(_MODULE_NAME + ".subprocess.check_call") def test_verify_config(self, mock_check_call): args = _get_args() mock_check_call.side_effect = [ None, OSError, subprocess.CalledProcessError(1, "apachectl")] letshelp_le_apache.verify_config(args) self.assertRaises(SystemExit, letshelp_le_apache.verify_config, args) self.assertRaises(SystemExit, letshelp_le_apache.verify_config, args) @mock.patch(_MODULE_NAME + ".subprocess.Popen") def test_locate_config(self, mock_popen): mock_popen().communicate.side_effect = [ OSError, ("bad_output", None), (_COMPILE_SETTINGS, None)] self.assertRaises( SystemExit, letshelp_le_apache.locate_config, "ctl") self.assertRaises( SystemExit, letshelp_le_apache.locate_config, "ctl") server_root, config_file = letshelp_le_apache.locate_config("ctl") self.assertEqual(server_root, "/etc/apache2") self.assertEqual(config_file, "apache2.conf") @mock.patch(_MODULE_NAME + ".argparse") def test_get_args(self, mock_argparse): argv = ["-d", "/etc/apache2"] mock_argparse.ArgumentParser.return_value = _create_mock_parser(argv) self.assertRaises(SystemExit, letshelp_le_apache.get_args) server_root = "/etc/apache2" config_file = server_root + "/apache2.conf" argv = ["-d", server_root, "-f", config_file] mock_argparse.ArgumentParser.return_value = _create_mock_parser(argv) args = letshelp_le_apache.get_args() self.assertEqual(args.apache_ctl, "apachectl") self.assertEqual(args.server_root, server_root) self.assertEqual(args.config_file, os.path.basename(config_file)) server_root = "/etc/apache2" config_file = "/etc/httpd/httpd.conf" argv = ["-d", server_root, "-f", config_file] mock_argparse.ArgumentParser.return_value = _create_mock_parser(argv) self.assertRaises(SystemExit, letshelp_le_apache.get_args) def test_main_with_args(self): with mock.patch(_MODULE_NAME + ".get_args"): self._test_main_common() def test_main_without_args(self): with mock.patch(_MODULE_NAME + ".get_args") as get_args: args = _get_args() server_root, config_file = args.server_root, args.config_file args.server_root = args.config_file = None get_args.return_value = args with mock.patch(_MODULE_NAME + ".locate_config") as locate: locate.return_value = (server_root, config_file) self._test_main_common() def _test_main_common(self): with mock.patch(_MODULE_NAME + ".verify_config"): with mock.patch(_MODULE_NAME + ".setup_tempdir") as mock_setup: tempdir_path = tempfile.mkdtemp() mock_setup.return_value = tempdir_path with mock.patch(_MODULE_NAME + ".make_and_verify_selection"): testdir_basename = "test" os.mkdir(os.path.join(tempdir_path, testdir_basename)) letshelp_le_apache.main() tar = tarfile.open(os.path.join( tempdir_path, "config.tar.gz")) tempdir = tar.next() self.assertTrue(tempdir.isdir()) self.assertEqual(tempdir.name, ".") testdir = tar.next() self.assertTrue(testdir.isdir()) self.assertEqual(os.path.basename(testdir.name), testdir_basename) self.assertEqual(tar.next(), None) def _create_mock_parser(argv): parser = argparse.ArgumentParser() mock_parser = mock.MagicMock() mock_parser.add_argument = parser.add_argument mock_parser.parse_args = functools.partial(parser.parse_args, argv) return mock_parser def _get_args(): args = argparse.Namespace() args.apache_ctl = "apache_ctl" args.config_file = "config_file" args.server_root = "server_root" return args if __name__ == "__main__": unittest.main() # pragma: no cover
#!/bin/sh set -eu TARGET=$1 if [ -z "$TARGET" ]; then echo "Must specify a target, e.g., \"x86_64\"." exit 1 fi PROJECT_DIR="$(pwd)" BUILD_PREFIX="$FLAPJACKOS_BUILD_DIR/build" CROSS_BUILD_DIR="$BUILD_PREFIX/$TARGET" CONFIGURE="./scripts/do_configure_build.sh" "$CONFIGURE" "$PROJECT_DIR" "$CROSS_BUILD_DIR" -DCMAKE_TOOLCHAIN_FILE="$TARGET-elf.toolchain.cmake" BUILD="./scripts/do_build.sh" "$BUILD" "$CROSS_BUILD_DIR" erb iso="$CROSS_BUILD_DIR/FlapjackOS.iso" bochsrc.erb > bochsrc echo > parport.out bochs -q -rc bochs_commands.txt
#!/bin/sh ################### # This is property of eXtremeSHOK.com # You are free to use, modify and distribute, however you may not remove this notice. # Copyright (c) Adrian Jon Kriel :: admin@extremeshok.com # License: BSD (Berkeley Software Distribution) ################## echo .. forced OK
/** * Development config file for webpack * Creates two bundles: * 1. bundle.js -> Our application * 2. vendor.js -> Vendor bundle containing libraries * * Injects webpack-dev-server to our page so hot loading * of our application is possible. */ const path = require('path'); const Webpack = require('webpack'); const buildPath = path.resolve(__dirname, 'dist'); module.exports = { /** * Key, value config defining the entry points to our application. * 1. Bundle entry contains our application entry point and webpack-dev-server entry points. * Dev-server is added to the bundle so our application can be hot reloaded while developing * * 2. vendor entry contains vendor libraries from node_modules. Every time for example react is * required/imported webpack replaces that with a module from our vendor bundle */ entry: { index: [ 'webpack-dev-server/client?http://0.0.0.0:3000', 'webpack/hot/dev-server', './app/front/index.js' ], vendor: ['core-js', 'react'] }, resolve: { extensions: ['', '.js', '.jsx'] }, /** * Output files from our build process. [name].js (or [id].js) will create a file * based on the key value of entry point configuration. */ output: { path: buildPath, filename: '[name].js', publicPath: '/static/dist/' }, /** * Additional loaders that webpack will run against the bundle response creates. * For our production build we use babel and eslint. * * React hot loader implements hot loading functionality for our react components. * This way when running our development server (dev-server) we can modify files and * the dev-server will refresh our application keeping the state intact. * * Babel transpiles ES6 and JSX files to ES5 javascript so response is compatible * to current browser versions. * * Eslint runs static analysis against our code and errors or warns in case * we have written possibly bad code. */ module: { loaders: [ {test: /\.jsx?$/, exclude: /node_modules/, loader: 'react-hot-loader'}, {test: /\.jsx?$/, exclude: /node_modules/, loader: 'babel-loader'}, {test: /\.css$/, exclude: /node_modules/, loader: 'style-loader!css-loader'}, {test: /\.scss$/, exclude: /node_modules/, loader: 'style-loader!css-loader!sass-loader'}, {test: /\.jsx?$/, exclude: /node_modules/, loader: 'eslint-loader'} ] }, // Eslint config file location eslint: { configFile: './.eslintrc' }, /** * We tell webpack to create a source map for our devtools. Source maps are supported * by both Chrome and Firefox. */ devtool: 'source-map', /** * Our additional plugins to be used during the build process */ plugins: [ // HotModuleReplacement runs on our dev server and hot swap new code // when changes to the codebase is made during development new Webpack.HotModuleReplacementPlugin(), new Webpack.DefinePlugin({ 'process.env': { NODE_ENV: '"development"' }, __CLIENT__: true, __SERVER__: false, __DEVELOPMENT__: false, __DEVTOOLS__: false }), // NoErrors plugin makes sure that build process is run only when // there are no errors in the code. new Webpack.NoErrorsPlugin() ] };
#!/usr/bin/env bash #please download dataset from tianchi # tar -zxf tianchi_datasets.tgz # rm *.tgz for TASK_NAME in TNEWS OCEMOTION OCNLI do echo " task name is $TASK_NAME" python convert_csv_to_tfrecords.py --mode preprocess --config config/${TASK_NAME}_preprocess_train.json python convert_csv_to_tfrecords.py --mode preprocess --config config/${TASK_NAME}_preprocess_dev.json done ls -d $PWD/tianchi_datasets/TNEWS/train.tfrecord > train.list_tfrecord ls -d $PWD/tianchi_datasets/OCEMOTION/train.tfrecord >> train.list_tfrecord ls -d $PWD/tianchi_datasets/OCNLI/train.tfrecord >> train.list_tfrecord ls -d $PWD/tianchi_datasets/TNEWS/dev.tfrecord > dev.list_tfrecord ls -d $PWD/tianchi_datasets/OCEMOTION/dev.tfrecord >> dev.list_tfrecord ls -d $PWD/tianchi_datasets/OCNLI/dev.tfrecord >> dev.list_tfrecord
/** * Digi-Lib-Util - utility module of all Digi applications and libraries, containing various common routines * * Copyright (c) 2012-2013 <NAME> <EMAIL> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.digimead.digi.lib.util import annotation.implicitNotFound import scala.language.implicitConversions class SetOnce[T] { private[this] var value: Option[T] = None def isSet = value.isDefined def ensureSet { if (value.isEmpty) throwISE("uninitialized value") } def apply() = { ensureSet; value.get } def :=(finalValue: T)(implicit credential: SetOnceCredential) { value = Some(finalValue) } def allowAssignment = { if (value.isDefined) throwISE("final value already set") else new SetOnceCredential } private def throwISE(msg: String) = throw new IllegalStateException(msg) @implicitNotFound(msg = "This value cannot be assigned without the proper credential token.") class SetOnceCredential private[SetOnce] } object SetOnce { implicit def unwrap[A](wrapped: SetOnce[A]): A = wrapped() }
#!/bin/bash service {{ service.service }} start counter=${1:-60} retries=0 while [ $counter -gt 0 ] do if mysql -u root -e"quit" || mysql -u {{ service.admin.user }} -p{{ service.admin.password }} -e"quit"; then echo "Sucessfully connected to the MySQL service ($retries retries)." exit 0 fi counter=$(( counter - 1 )) retries=$(( retries + 1 )) sleep ${2:-10} done echo "Failed to connect to the MySQL service after $retries retries." exit 1
/* * This file is part of the BenGorCookies library. * * (c) <NAME> <<EMAIL>> * (c) <NAME> <<EMAIL>> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ import Ie9 from './../../../src/js/Templates/Ie9'; test('Renders default template passing custom text', () => { expect(Ie9({text: 'This is a test cookies'})).toBe( `<div class="cookies bengor-cookies js-bengor-cookies"> <table class="bengor-cookies__content-wrapper" cellpadding="0" cellspacing="0" width="100%"> <tr> <td class="bengor-cookies__content"> <p class="bengor-cookies__text"> This is a test cookies <a href="/cookies" class="bengor-cookies__link" target="_blank">Cookies policy</a>. </p> </td> <td class="bengor-cookies__actions"> <a class="bengor-cookies__button js-bengor-cookies-accept">Accept</a> </td> </tr> </table> </div>` ); });
/* * Copyright (c) 2017 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.kebernet.configuration.server.model; import com.google.common.base.Charsets; import net.kebernet.configuration.client.model.SettingValue; import javax.inject.Inject; import javax.inject.Named; import javax.inject.Singleton; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; import java.util.List; import java.util.Properties; import java.util.stream.Collectors; /** * * A repository that store settings to the settings.properties file in the storage directory. * * Created by rcooper on 7/18/17. */ @Singleton public class SettingValueRepository { private final File storageDirectory; private final Properties properties = new Properties(); private final File settingsFile; private final String defaultDeviceName; private long lastRead = Long.MIN_VALUE; @Inject public SettingValueRepository(@Named("storageDirectory") File storageDirectory, @Named("defaultDeviceName") String defaultDeviceName) { this.storageDirectory = storageDirectory; this.defaultDeviceName = defaultDeviceName; this.settingsFile = new File(storageDirectory, "settings.properties"); } public synchronized boolean load() throws IOException { File settingsFile = new File(storageDirectory, "settings.properties"); if (settingsFile.exists() && lastRead < settingsFile.lastModified()) { properties.clear(); try(Reader r = new InputStreamReader(new FileInputStream(settingsFile), Charsets.UTF_8)) { properties.load(r); } return true; } return false; } public List<SettingValue> getValues() { return properties.entrySet() .stream() .map( e -> new SettingValue(e.getKey().toString(), e.getValue() == null ? null : e.getValue().toString()) ) .collect(Collectors.toList()); } public synchronized void storeValues(List<SettingValue> values) throws IOException { values.forEach(v -> { if (v.getValue() == null) { properties.remove(v.getName()); } else { properties.setProperty(v.getName(), v.getValue()); } }); if(!settingsFile.exists() && !settingsFile.createNewFile()){ throw new IOException("Failed to create "+settingsFile.getAbsolutePath()); } try(Writer w = new OutputStreamWriter(new FileOutputStream(settingsFile), Charsets.UTF_8)) { properties.store(w, "Device settings"); } } public String findValue(String settingName, String defaultValue) { return properties.getProperty(settingName, defaultValue); } public String getDeviceName(){ return findValue("host_name", defaultDeviceName); } }
<gh_stars>1-10 var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; import { initialState } from '../diagram/reducer'; var historyLimit = 50; var history = function history(reducer) { return function (state, action) { if (!state) { state = initialState; } var nextState = reducer(state, action); switch (action.type) { case 'rd/canvas/TRACK': case '@@INIT': case '@@redux/INIT': return nextState; case 'rd/history/UNDO': { var pastStep = nextState.history.past[nextState.history.past.length - 1]; return pastStep ? _extends({}, nextState, { entity: pastStep.entity, metaEntity: pastStep.metaEntity, history: { past: nextState.history.past.slice(0, nextState.history.past.length - 1), future: [{ entity: nextState.entity, metaEntity: nextState.metaEntity }].concat(nextState.history.future), lastAction: nextState.history.lastAction } }) : nextState; } case 'rd/history/REDO': { var futureStep = nextState.history.future[0]; return futureStep ? _extends({}, nextState, { entity: futureStep.entity, metaEntity: futureStep.metaEntity, history: { past: [].concat(nextState.history.past, [{ entity: state.entity, metaEntity: state.metaEntity }]), future: nextState.history.future.slice(1), lastAction: nextState.history.lastAction } }) : nextState; } default: if (action.type === state.history.lastAction) { return nextState; } else { var newPast = [].concat(nextState.history.past, [{ entity: state.entity, metaEntity: state.metaEntity }]); return _extends({}, nextState, { history: { past: newPast.length > historyLimit ? newPast.slice(1) : newPast, future: [], lastAction: action.type } }); } } }; }; export var undo = function undo() { return { type: 'rd/history/UNDO', payload: undefined }; }; export var redo = function redo() { return { type: 'rd/history/REDO', payload: undefined }; }; export default history;
#!/bin/bash # strict mode set -euo pipefail IFS=$'\n\t' if [[ -z ${DRY_RUN:-} ]]; then PREFIX="" else PREFIX="echo" fi # input validation if [[ -z ${GITHUB_TOKEN:-} ]]; then echo "GITHUB_TOKEN environment variable must be set before running." >&2 exit 1 fi if [[ $# -ne 1 || $1 == "" ]]; then echo "This program requires one argument: the version number, in 'vM.N.P' format." >&2 exit 1 fi VERSION=$1 # Change to root of the repo cd "$(dirname "$0")/.." # GitHub release $PREFIX git tag "$VERSION" # make sure GITHUB_TOKEN is exported, for the benefit of this next command export GITHUB_TOKEN GO111MODULE=on $PREFIX make release # if that was successful, it could have touched go.mod and go.sum, so revert those $PREFIX git checkout go.mod go.sum # Docker release # make sure credentials are valid for later push steps; this might # be interactive since this will prompt for username and password # if there are no valid current credentials. $PREFIX docker login echo "$VERSION" > VERSION $PREFIX docker build -t "fullstorydev/grpcurl:${VERSION}" . rm VERSION # push to docker hub, both the given version as a tag and for "latest" tag $PREFIX docker push "fullstorydev/grpcurl:${VERSION}" $PREFIX docker tag "fullstorydev/grpcurl:${VERSION}" fullstorydev/grpcurl:latest $PREFIX docker push fullstorydev/grpcurl:latest # Homebrew release URL="https://github.com/fullstorydev/grpcurl/archive/${VERSION}.tar.gz" curl -L -o tmp.tgz "$URL" SHA="$(sha256sum < tmp.tgz | awk '{ print $1 }')" rm tmp.tgz HOMEBREW_GITHUB_API_TOKEN="$GITHUB_TOKEN" $PREFIX brew bump-formula-pr --url "$URL" --sha256 "$SHA" grpcurl
<gh_stars>0 package SocketConnection; import java.io.*; import java.net.Socket; import QueryParser.QueryWorker; /** * ConnectionWorker is the main thread that handles incoming calls for data. It parses the incoming inputstream * from a socket as a string (because PHP sends strings through sockets) and processes them accordingly. */ public class ConnectionWorker implements Runnable{ private Socket connection; private String returnQuery = null; private BufferedWriter bufferedWriter; private BufferedReader bufferedReader; ConnectionWorker(Socket connection){ this.connection = connection; try { bufferedWriter = new BufferedWriter(new OutputStreamWriter(connection.getOutputStream())); bufferedReader = new BufferedReader(new InputStreamReader(connection.getInputStream())); }catch (IOException ioException){ System.out.println("Error creating Buffered Reader or Writer:" + ioException.toString()); } } /** * The run method of ConnectionWorker constantly looks for input over the socket. Once the PHP script sends a String * over the socket the run method will process it based on the String contents. If a request for data is made the * run method will create a new QueryWorker thread to handle the query request and terminate the connection after * the query has been successfully returned to the sender. */ @Override public void run() { try{ //Check the Semaphore to see if there is room for another connection Main.sem.attempt(); String input; while(connection.isConnected()){ //If a String is send over the socket, handle it according to the contents of that String if ((bufferedReader.ready() && (input = bufferedReader.readLine()) != null)) { //Process the standard list of countries when 'update' is sent over the socket if (input.startsWith("update")) { System.out.println("Query received for update"); String[] countries = {"FRANCE", "MEXICO", "UNITED STATES", "SPAIN", "NORTH POLE", "SOUTH POLE"}; Thread queryThread = new Thread(new QueryWorker(this, countries , 10, null)); queryThread.start(); //Process a custom list of countries and datacount if the socketcommand starts with 'fetch' }else if(input.startsWith("fetch")){ System.out.println("Query received for fetch: " + input); try { //Split the fetch command into an array of countries and an integer for the data count String[] arguments = input.split(";"); String[] countries = arguments[1].toUpperCase().split(","); int count = Integer.parseInt(arguments[2]); Thread queryThread = new Thread(new QueryWorker(this, countries, count, null)); queryThread.start(); //Process the standard list of countries from a given date if the socketcommand starts with 'history' }catch (Exception exception){ writeOut("Error Parsing Query"); System.out.println(exception.toString()); } }else if(input.startsWith("history")){ System.out.println("Query received for history: " + input); //Split the fetch command into an array of countries and an integer for the data count String[] arguments = input.split(";"); String date = arguments[1]; String[] countries = {"FRANCE", "MEXICO", "UNITED STATES", "SPAIN", "NORTH POLE", "SOUTH POLE"}; Thread queryThread = new Thread(new QueryWorker(this, countries , 10, Main.dayPath + date + ".csv")); queryThread.start(); }else{ writeOut("Invalid Request"); } } if (returnQuery != null){ if(returnQuery.equals("No Data")){ writeOut(""); }else{ writeOut(returnQuery); } break; } } //After a reply has been sent over the socket, close the connection and update the Semaphore connection.close(); Main.sem.close(); }catch (InterruptedException ieException){ System.out.println("Thread interruption error :" + ieException.toString()); }catch (IOException ioException){ System.out.println("IO Error 1:" + ioException.toString()); } } /** * Used by the query parser thread to update the reply that needs to be sent back * * @param query The String that needs to be sent back over the socket as a reply */ public void setReturnQuery(String query){ returnQuery = query; } /** * Writes a message to the socket that was used to establish a connection * * @param outputString The String that needs to be written over the socket */ private void writeOut(String outputString){ try { bufferedWriter.write(outputString + "\r\n"); bufferedWriter.flush(); }catch (IOException ioException){ System.out.println("IO Error :" + ioException.toString()); } } }
import { FormComponentConstructor, TypedValue, VoidValue, FormComponent, IFormComponentOptions, FieldModel, IFieldModelOptions, StringValue, IFieldModelState } from '.'; import { IFormComponentState } from './FormComponent'; import { ILookupModelState } from './LookupModel'; export interface ILookupButtonOptions extends IFieldModelOptions { lookup: string | undefined } export class LookupButtonModel extends FieldModel<VoidValue, ILookupButtonOptions> { getState(): IFieldModelState { return { id: this.id, key: this.options.name, internalValue: null, valueIsDefault: true} } constructor(parent: FormComponent<IFormComponentOptions, IFormComponentState>, options: ILookupButtonOptions, state?: IFieldModelState|undefined) { super(VoidValue, parent, options, state); if (state) { this.value = new VoidValue(); } } protected getChildContainers() { return[]; } protected getDefaultValueFromText(text: StringValue): VoidValue { throw new Error('Method not implemented.'); } protected async getDefaultValueAsync(): Promise<VoidValue> { return new VoidValue(); } async runLookupAsync() : Promise<void> { if (!this.options.lookup || !this.parentForm) { return; } let lookup = this.parentForm.getLookup(this.options.lookup); if (!lookup ) { throw Error(`Lookup ${this.options.lookup} not found`) } await lookup.getResultsAsync(this.parentForm); } }
sudo docker run -it webmin-docker
<gh_stars>0 import { Component, OnInit, Output, EventEmitter } from '@angular/core'; import {GameApiService} from '../game-list/services/game-api.service'; import {Categories} from '../interfaces/categories'; type TargetType = any | { name: string, value: string }; @Component({ selector: 'app-game-list-filter', templateUrl: './game-list-filter.component.html', styleUrls: ['./game-list-filter.component.scss'] }) export class GameListFilterComponent implements OnInit { @Output() filtered = new EventEmitter(); games: any; categories: Categories[]; constructor(private gameApi: GameApiService) { // Nothing to do here.. } form = { name: '', type: '', editor: '', }; setValue(target: TargetType) { event.preventDefault(); this.form[target.name] = target.value; console.log(this.form); } filter() { event.preventDefault(); console.log(this.form); this.filtered.emit(this.form); } razFilter() { const that = this.form; Object.keys(this.form).map((key, index) => { that[key] = ''; }); this.filtered.emit(this.form); } getCategoriesList() { this.gameApi.getAllCategories() .subscribe((data: Categories[]) => { this.categories = data; }); } ngOnInit() { this.getCategoriesList(); } }
<gh_stars>0 import _arrayElementNewline_ from './array-element-newline' import _braceStyle_ from './brace-style' import _camelcase_ from './camelcase' import _consistentReturn_ from './consistent-return' import _curly_ from './curly' import _idMatch_ from './id-match' import _indent_ from './indent' import _linesBetweenClassMembers_ from './lines-between-class-members' import _newCap_ from './new-cap' import _noCaseDeclarations_ from './no-case-declarations' import _noFuncAssign_ from './no-func-assign' import _noLabelVar_ from './no-label-var' import _noMixedSpacesAndTabs_ from './no-mixed-spaces-and-tabs' import _noNonoctalDecimalEscape_ from './no-nonoctal-decimal-escape' import _noRestrictedProperties_ from './no-restricted-properties' import _noReturnAssign_ from './no-return-assign' import _noThisBeforeSuper_ from './no-this-before-super' import _noUnexpectedMultiline_ from './no-unexpected-multiline' import _noUnusedVars_ from './no-unused-vars' import _noUselessCall_ from './no-useless-call' import _noUselessRename_ from './no-useless-rename' import _preferArrowCallback_ from './prefer-arrow-callback' import _preferNumericLiterals_ from './prefer-numeric-literals' import _preferRestParams_ from './prefer-rest-params' import _preferSpread_ from './prefer-spread' import _quotes_ from './quotes' import _semiStyle_ from './semi-style' import _semi_ from './semi' import _sortKeys_ from './sort-keys' import _spaceInParens_ from './space-in-parens' import _wrapIife_ from './wrap-iife' import _yoda_ from './yoda' export default [ _arrayElementNewline_, _braceStyle_, _camelcase_, _consistentReturn_, _curly_, _idMatch_, _indent_, _linesBetweenClassMembers_, _newCap_, _noCaseDeclarations_, _noFuncAssign_, _noLabelVar_, _noMixedSpacesAndTabs_, _noNonoctalDecimalEscape_, _noRestrictedProperties_, _noReturnAssign_, _noThisBeforeSuper_, _noUnexpectedMultiline_, _noUnusedVars_, _noUselessCall_, _noUselessRename_, _preferArrowCallback_, _preferNumericLiterals_, _preferRestParams_, _preferSpread_, _quotes_, _semiStyle_, _semi_, _sortKeys_, _spaceInParens_, _wrapIife_, _yoda_ ]
#!/bin/bash set -e set -x if [[ "$(uname -s)" == 'Darwin' ]]; then brew update || brew update brew outdated pyenv || brew upgrade pyenv brew install pyenv-virtualenv brew install cmake || true brew install nasm || true if which pyenv > /dev/null; then eval "$(pyenv init -)" fi pyenv install 3.7.1 pyenv virtualenv 3.7.1 conan pyenv rehash pyenv activate conan else sudo apt-get update sudo apt-get install -y nasm autoconf dh-autoreconf fi pip install conan --upgrade pip install conan_package_tools bincrafters_package_tools conan user
#!/bin/sh # # Copyright (c) 2007 Eric Wong # # Don't run this test by default unless the user really wants it # I don't like the idea of taking a port and possibly leaving a # daemon running on a users system if the test fails. # Not all git users will need to interact with SVN. test_description='git svn dcommit new files over svn:// test' . ./lib-git-svn.sh require_svnserve test_expect_success 'start tracking an empty repo' ' svn_cmd mkdir -m "empty dir" "$svnrepo"/empty-dir && echo "[general]" > "$rawsvnrepo"/conf/svnserve.conf && echo anon-access = write >> "$rawsvnrepo"/conf/svnserve.conf && start_svnserve && git svn init svn://127.0.0.1:$SVNSERVE_PORT && git svn fetch ' test_expect_success 'create files in new directory with dcommit' " mkdir git-new-dir && echo hello > git-new-dir/world && git update-index --add git-new-dir/world && git commit -m hello && start_svnserve && git svn dcommit " test_done
/* * @(#)GlobalPanel.java 1.18 04/07/26 * * Copyright (c) 2004 Sun Microsystems, Inc. All Rights Reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * -Redistribution of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * -Redistribution in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of Sun Microsystems, Inc. or the names of contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * This software is provided "AS IS," without a warranty of any kind. ALL * EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING * ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE * OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN MIDROSYSTEMS, INC. ("SUN") * AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE * AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR ANY LOST * REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, * INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY * OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE, * EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. * * You acknowledge that this software is not designed, licensed or intended * for use in the design, construction, operation or maintenance of any * nuclear facility. */ /* * @(#)GlobalPanel.java 1.18 04/07/26 */ package java2d; import java.awt.GridBagLayout; import java.awt.BorderLayout; import javax.swing.JPanel; import javax.swing.JTabbedPane; import javax.swing.border.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; /** * Panel that holds the Demo groups, Controls and Monitors for each tab. * It's a special "always visible" panel for the Controls, MemoryMonitor & * PerformanceMonitor. */ public class GlobalPanel extends JPanel implements ChangeListener { private JPanel p; private int index; public GlobalPanel() { setLayout(new BorderLayout()); p = new JPanel(new GridBagLayout()); EmptyBorder eb = new EmptyBorder(5,0,5,5); BevelBorder bb = new BevelBorder(BevelBorder.LOWERED); p.setBorder(new CompoundBorder(eb,bb)); Java2Demo.addToGridBag(p,Java2Demo.controls,0,0,1,1,0,0); Java2Demo.addToGridBag(p,Java2Demo.memorymonitor,0,1,1,1,0,0); Java2Demo.addToGridBag(p,Java2Demo.performancemonitor,0,2,1,1,0,0); add(Java2Demo.intro); } public void stateChanged(ChangeEvent e) { Java2Demo.group[index].shutDown(Java2Demo.group[index].getPanel()); if (Java2Demo.tabbedPane.getSelectedIndex() == 0) { Java2Demo.memorymonitor.surf.stop(); Java2Demo.performancemonitor.surf.stop(); removeAll(); add(Java2Demo.intro); Java2Demo.intro.start(); } else { if (getComponentCount() == 1) { Java2Demo.intro.stop(); remove(Java2Demo.intro); add(p, BorderLayout.EAST); if (Java2Demo.memoryCB.getState()) { Java2Demo.memorymonitor.surf.start(); } if (Java2Demo.perfCB.getState()) { Java2Demo.performancemonitor.surf.start(); } } else { remove(Java2Demo.group[index]); } index = Java2Demo.tabbedPane.getSelectedIndex()-1; add(Java2Demo.group[index]); Java2Demo.group[index].setup(false); } validate(); } }
import org.springframework.cache.annotation.CacheEvict; import org.springframework.cache.annotation.Cacheable; import org.springframework.stereotype.Service; @Service public class DictionaryService { @CacheEvict(value = "dictionaryCache", allEntries = true) public boolean insert(DictionaryDetail detail) { boolean ret = dictDetailMapper.insert(detail) > 0; return ret; } @CacheEvict(value = "dictionaryCache", key = "#detail.getDictId()") public void delCaches(Long dictId) { // Method to clear specific cache entry for the given dictionary ID } @CacheEvict(value = "dictionaryCache", allEntries = true) @Transactional(rollbackFor = Exception.class) public boolean removeByIds(Set<Long> ids) { // Method to remove dictionary details by their IDs } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package minersinstrument.ui; /** * * @author PKopychenko */ public class ADelDialog extends AUniversalDelDialog { private ADelDialogPanel p = null; public ADelDialog(java.awt.Frame parent, boolean modal) { super(parent, modal); p = new ADelDialogPanel(); addPanel(p); } public void addPar(String sPar, Object o) { p.addParRow(sPar, o); } }
import localForage from 'localforage'; export function createNewUUID(): Promise<string> { return new Promise((resolve, reject) => { const uuid = generateUUID(); localForage.setItem('generatedUUID', uuid) .then(() => { resolve(uuid); }) .catch((error) => { reject(error); }); }); } function generateUUID(): string { return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) { const r = Math.random() * 16 | 0; const v = c === 'x' ? r : (r & 0x3 | 0x8); return v.toString(16); }); }
<reponame>kkousounnis/University-WebPlatform package Classes; public class Lesson { private int grade; private String id_Lnumber; private String Lessonname; public int getGrade() { return grade; } public void setGrade(int grade) { this.grade = grade; } public String getId_Lnumber() { return id_Lnumber; } public void setId_Lnumber(String id_Lnumber) { this.id_Lnumber = id_Lnumber; } public String getLessonname() { return Lessonname; } public void setLessonname(String lessonname) { Lessonname = lessonname; } }
package com.pearson.docussandra.domain.objects; /** * @author https://github.com/tfredrich * @since Jan 8, 2015 */ public enum IndexType { INLINE, /** * Synchronous, consistent, within the cluster. */ LOCAL, /** * Asynchronous, eventually-consistent, across the region(s). */ GLOBAL }
def sum_without_operator(num1, num2): while num2 != 0: carry = num1 & num2 num1 = num1 ^ num2 num2 = carry << 1 return num1
def gcd(x, y): while(y): x, y = y, x % y return x # Driver code x = 75 y = 15 gcd = gcd(x, y) print("The GCD of", x , "and", y, "is", gcd) # Output: The GCD of 75 and 15 is 15
monerod --detach --log-level 1 --hide-my-port --show-time-stats 1
#!/bin/bash # sb2-web-create.sh # create springboot2 web artifact # ########################################################### # will cause error on macosx _file=$(readlink -f $0) _cdir=$(dirname $_file) _name=$(basename $_file) ########################################################### # Treat unset variables as an error set -o nounset # Treat any error as exit set -o errexit # where you put local jar libprefix="$_cdir/lib" if [ "${libprefix:0:10}" = "/cygdrive/" ]; then libprefix="${libprefix:10:1}:${libprefix:11}" fi ########################################################### # save with: https://start.spring.io/ # SpringBoot="2.1.8" # Project Metadata: Group="com.pepstack" Artifact="sb2-demo" # Options Name="sb2-demo" Description="Demo project for Spring Boot2" PackageName="com.pepstack.sb2demo" Packaging="Jar" Java="8"
def isArmstrongNumber(num): order = len(str(num)) result = 0 # calculate the sum of nth power of every digit temp = num while(temp > 0): digit = temp % 10 result += digit ** order temp //= 10 # check if sum is equal to the given num if (result == num): return True else: return False num = int(input("Enter a number: ")) if (isArmstrongNumber(num)): print(num, "is an Armstrong number") else: print(num, "is not an Armstrong number")
import { Link } from "gatsby"; import Img from "gatsby-image" import React from "react" import Layout from "../components/layout"; import SEO from "../components/seo"; import { documentToReactComponents } from "@contentful/rich-text-react-renderer" export const query = graphql` query($slug: String!) { contentfulBlogPost(slug: {eq: $slug}) { title publishedDate(formatString: "DD MMMM YYYY") featuredImage { fluid { ...GatsbyContentfulFluid } } body { json } } } `; const BlogPost = (props) => { console.log(props); return ( <Layout> <SEO title={props.data.contentfulBlogPost.title} /> <Link to="/">Visit the Blog Page</Link> <div className="content"> <h1>{props.data.contentfulBlogPost.title}</h1> <span className="meta"> Post on {props.data.contentfulBlogPost.publishedDate} </span> { props.data.contentfulBlogPost.featuredImage && ( <Img className="featured" fluid={props.data.contentfulBlogPost.featuredImage.fluid} alt={props.data.contentfulBlogPost.title} /> ) } {/*JSON.stringify(props.data.contentfulBlogPost.body.json)*/} {documentToReactComponents(props.data.contentfulBlogPost.body.json)} </div> </Layout> ) } export default BlogPost
import java.io.File; public class DirectoryCounter { // Count the number of files in a directory public static int countFiles(final File directory) { int totalFiles = 0; File[] filesList = directory.listFiles(); for (File file : filesList) { if (file.isFile()) { totalFiles++; } else if (file.isDirectory()) { totalFiles += countFiles(file); } } return totalFiles; } }
#!/bin/bash MAINSCRIPT=/home/gl/bin/Backups/what_changed.sh ${MAINSCRIPT} before /bin ${MAINSCRIPT} before /etc ${MAINSCRIPT} before /boot ${MAINSCRIPT} before /root ${MAINSCRIPT} before /sbin ${MAINSCRIPT} before /usr ${MAINSCRIPT} before /var ${MAINSCRIPT} before /lib ${MAINSCRIPT} before /opt ${MAINSCRIPT} before /selinux ${MAINSCRIPT} before /srv ${MAINSCRIPT} before /home
// This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. /** * Javascript extensions for the External Tool activity editor. * * @package mod * @subpackage lti * @copyright Copyright (c) 2011 Moodlerooms Inc. (http://www.moodlerooms.com) * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ (function(){ var Y; M.mod_lti = M.mod_lti || {}; M.mod_lti.LTI_SETTING_NEVER = 0; M.mod_lti.LTI_SETTING_ALWAYS = 1; M.mod_lti.LTI_SETTING_DELEGATE = 2; M.mod_lti.editor = { init: function(yui3, settings){ if(yui3){ Y = yui3; } var self = this; this.settings = Y.JSON.parse(settings); this.urlCache = {}; this.toolTypeCache = {}; this.addOptGroups(); var updateToolMatches = function(){ self.updateAutomaticToolMatch(Y.one('#id_toolurl')); self.updateAutomaticToolMatch(Y.one('#id_securetoolurl')); }; var typeSelector = Y.one('#id_typeid'); typeSelector.on('change', function(e){ updateToolMatches(); self.toggleEditButtons(); if (self.getSelectedToolTypeOption().getAttribute('toolproxy')){ var allowname = Y.one('#id_instructorchoicesendname'); allowname.set('checked', !self.getSelectedToolTypeOption().getAttribute('noname')); var allowemail = Y.one('#id_instructorchoicesendemailaddr'); allowemail.set('checked', !self.getSelectedToolTypeOption().getAttribute('noemail')); var allowgrades = Y.one('#id_instructorchoiceacceptgrades'); allowgrades.set('checked', !self.getSelectedToolTypeOption().getAttribute('nogrades')); self.toggleGradeSection(); } }); this.createTypeEditorButtons(); this.toggleEditButtons(); var textAreas = new Y.NodeList([ Y.one('#id_toolurl'), Y.one('#id_securetoolurl'), Y.one('#id_resourcekey'), Y.one('#id_password') ]); var debounce; textAreas.on('keyup', function(e){ clearTimeout(debounce); // If no more changes within 2 seconds, look up the matching tool URL debounce = setTimeout(function(){ updateToolMatches(); }, 2000); }); var allowgrades = Y.one('#id_instructorchoiceacceptgrades'); allowgrades.on('change', this.toggleGradeSection, this); updateToolMatches(); }, toggleGradeSection: function(e) { if (e) { e.preventDefault(); } var allowgrades = Y.one('#id_instructorchoiceacceptgrades'); var gradefieldset = Y.one('#id_modstandardgrade'); if (!allowgrades.get('checked')) { gradefieldset.hide(); } else { gradefieldset.show(); } }, clearToolCache: function(){ this.urlCache = {}; this.toolTypeCache = {}; }, updateAutomaticToolMatch: function(field){ var self = this; var toolurl = field; var typeSelector = Y.one('#id_typeid'); var id = field.get('id') + '_lti_automatch_tool'; var automatchToolDisplay = Y.one('#' + id); if(!automatchToolDisplay){ automatchToolDisplay = Y.Node.create('<span />') .set('id', id) .setStyle('padding-left', '1em'); toolurl.insert(automatchToolDisplay, 'after'); } var url = toolurl.get('value'); // Hide the display if the url box is empty if(!url){ automatchToolDisplay.setStyle('display', 'none'); } else { automatchToolDisplay.set('innerHTML', ''); automatchToolDisplay.setStyle('display', ''); } var selectedToolType = parseInt(typeSelector.get('value')); var selectedOption = typeSelector.one('option[value="' + selectedToolType + '"]'); // A specific tool type is selected (not "auto") // We still need to check with the server to get privacy settings if(selectedToolType > 0){ // If the entered domain matches the domain of the tool configuration... var domainRegex = /(?:https?:\/\/)?(?:www\.)?([^\/]+)(?:\/|$)/i; var match = domainRegex.exec(url); if(match && match[1] && match[1].toLowerCase() === selectedOption.getAttribute('domain').toLowerCase()){ automatchToolDisplay.set('innerHTML', '<img style="vertical-align:text-bottom" src="' + self.settings.green_check_icon_url + '" />' + M.util.get_string('using_tool_configuration', 'lti') + selectedOption.get('text')); } else { // The entered URL does not match the domain of the tool configuration automatchToolDisplay.set('innerHTML', '<img style="vertical-align:text-bottom" src="' + self.settings.warning_icon_url + '" />' + M.util.get_string('domain_mismatch', 'lti')); } } var key = Y.one('#id_resourcekey'); var secret = Y.one('#id_password'); // Indicate the tool is manually configured // We still check the Launch URL with the server as course/site tools may override privacy settings if(key.get('value') !== '' && secret.get('value') !== ''){ automatchToolDisplay.set('innerHTML', '<img style="vertical-align:text-bottom" src="' + self.settings.green_check_icon_url + '" />' + M.util.get_string('custom_config', 'lti')); } var continuation = function(toolInfo, inputfield){ if (inputfield === undefined || (inputfield.get('id') != 'id_securetoolurl' || inputfield.get('value'))) { self.updatePrivacySettings(toolInfo); } if(toolInfo.toolname){ automatchToolDisplay.set('innerHTML', '<img style="vertical-align:text-bottom" src="' + self.settings.green_check_icon_url + '" />' + M.util.get_string('using_tool_configuration', 'lti') + toolInfo.toolname); } else if(!selectedToolType) { // Inform them custom configuration is in use if(key.get('value') === '' || secret.get('value') === ''){ automatchToolDisplay.set('innerHTML', '<img style="vertical-align:text-bottom" src="' + self.settings.warning_icon_url + '" />' + M.util.get_string('tool_config_not_found', 'lti')); } } if (toolInfo.cartridge) { automatchToolDisplay.set('innerHTML', '<img style="vertical-align:text-bottom" src="' + self.settings.green_check_icon_url + '" />' + M.util.get_string('using_tool_cartridge', 'lti')); } }; // Cache urls which have already been checked to increase performance // Don't use URL cache if tool type manually selected if(selectedToolType && self.toolTypeCache[selectedToolType]){ return continuation(self.toolTypeCache[selectedToolType]); } else if(self.urlCache[url] && !selectedToolType){ return continuation(self.urlCache[url]); } else if(!selectedToolType && !url) { // No tool type or url set return continuation({}, field); } else { self.findToolByUrl(url, selectedToolType, function(toolInfo){ if(toolInfo){ // Cache the result based on whether the URL or tool type was used to look up the tool if(!selectedToolType){ self.urlCache[url] = toolInfo; } else { self.toolTypeCache[selectedToolType] = toolInfo; } Y.one('#id_urlmatchedtypeid').set('value', toolInfo.toolid); continuation(toolInfo); } }); } }, /** * Updates display of privacy settings to show course / site tool configuration settings. */ updatePrivacySettings: function(toolInfo){ if(!toolInfo || !toolInfo.toolid){ toolInfo = { sendname: M.mod_lti.LTI_SETTING_DELEGATE, sendemailaddr: M.mod_lti.LTI_SETTING_DELEGATE, acceptgrades: M.mod_lti.LTI_SETTING_DELEGATE } } var setting, control; var privacyControls = { sendname: Y.one('#id_instructorchoicesendname'), sendemailaddr: Y.one('#id_instructorchoicesendemailaddr'), acceptgrades: Y.one('#id_instructorchoiceacceptgrades') }; // Store a copy of user entered privacy settings as we may overwrite them if(!this.userPrivacySettings){ this.userPrivacySettings = {}; } for(setting in privacyControls){ if(privacyControls.hasOwnProperty(setting)){ control = privacyControls[setting]; // Only store the value if it hasn't been forced by the editor if(!control.get('disabled')){ this.userPrivacySettings[setting] = control.get('checked'); } } } // Update UI based on course / site tool configuration for(setting in privacyControls){ if(privacyControls.hasOwnProperty(setting)){ var settingValue = toolInfo[setting]; control = privacyControls[setting]; if(settingValue == M.mod_lti.LTI_SETTING_NEVER){ control.set('disabled', true); control.set('checked', false); control.set('title', M.util.get_string('forced_help', 'lti')); } else if(settingValue == M.mod_lti.LTI_SETTING_ALWAYS){ control.set('disabled', true); control.set('checked', true); control.set('title', M.util.get_string('forced_help', 'lti')); } else if(settingValue == M.mod_lti.LTI_SETTING_DELEGATE){ control.set('disabled', false); // Get the value out of the stored copy control.set('checked', this.userPrivacySettings[setting]); control.set('title', ''); } } } this.toggleGradeSection(); }, getSelectedToolTypeOption: function(){ var typeSelector = Y.one('#id_typeid'); return typeSelector.one('option[value="' + typeSelector.get('value') + '"]'); }, /** * Separate tool listing into option groups. Server-side select control * doesn't seem to support this. */ addOptGroups: function(){ var typeSelector = Y.one('#id_typeid'); if(typeSelector.one('option[courseTool=1]')){ // One ore more course tools exist var globalGroup = Y.Node.create('<optgroup />') .set('id', 'global_tool_group') .set('label', M.util.get_string('global_tool_types', 'lti')); var courseGroup = Y.Node.create('<optgroup />') .set('id', 'course_tool_group') .set('label', M.util.get_string('course_tool_types', 'lti')); var globalOptions = typeSelector.all('option[globalTool=1]').remove().each(function(node){ globalGroup.append(node); }); var courseOptions = typeSelector.all('option[courseTool=1]').remove().each(function(node){ courseGroup.append(node); }); if(globalOptions.size() > 0){ typeSelector.append(globalGroup); } if(courseOptions.size() > 0){ typeSelector.append(courseGroup); } } }, /** * Adds buttons for creating, editing, and deleting tool types. * Javascript is a requirement to edit course level tools at this point. */ createTypeEditorButtons: function(){ var self = this; var typeSelector = Y.one('#id_typeid'); var createIcon = function(id, tooltip, iconUrl){ return Y.Node.create('<a />') .set('id', id) .set('title', tooltip) .setStyle('margin-left', '.5em') .set('href', 'javascript:void(0);') .append(Y.Node.create('<img src="' + iconUrl + '" />')); } var addIcon = createIcon('lti_add_tool_type', M.util.get_string('addtype', 'lti'), this.settings.add_icon_url); var editIcon = createIcon('lti_edit_tool_type', M.util.get_string('edittype', 'lti'), this.settings.edit_icon_url); var deleteIcon = createIcon('lti_delete_tool_type', M.util.get_string('deletetype', 'lti'), this.settings.delete_icon_url); editIcon.on('click', function(e){ var toolTypeId = typeSelector.get('value'); if(self.getSelectedToolTypeOption().getAttribute('editable')){ window.open(self.settings.instructor_tool_type_edit_url + '&action=edit&typeid=' + toolTypeId, 'edit_tool'); } else { alert(M.util.get_string('cannot_edit', 'lti')); } }); addIcon.on('click', function(e){ window.open(self.settings.instructor_tool_type_edit_url + '&action=add', 'add_tool'); }); deleteIcon.on('click', function(e){ var toolTypeId = typeSelector.get('value'); if(self.getSelectedToolTypeOption().getAttribute('editable')){ if(confirm(M.util.get_string('delete_confirmation', 'lti'))){ self.deleteTool(toolTypeId); } } else { alert(M.util.get_string('cannot_delete', 'lti')); } }); typeSelector.insert(addIcon, 'after'); addIcon.insert(editIcon, 'after'); editIcon.insert(deleteIcon, 'after'); }, toggleEditButtons: function(){ var lti_edit_tool_type = Y.one('#lti_edit_tool_type'); var lti_delete_tool_type = Y.one('#lti_delete_tool_type'); // Make the edit / delete icons look enabled / disabled. // Does not work in older browsers, but alerts will catch those cases. if(this.getSelectedToolTypeOption().getAttribute('editable')){ lti_edit_tool_type.setStyle('opacity', '1'); lti_delete_tool_type.setStyle('opacity', '1'); } else { lti_edit_tool_type.setStyle('opacity', '.2'); lti_delete_tool_type.setStyle('opacity', '.2'); } }, addToolType: function(toolType){ var typeSelector = Y.one('#id_typeid'); var course_tool_group = Y.one('#course_tool_group'); var option = Y.Node.create('<option />') .set('text', toolType.name) .set('value', toolType.id) .set('selected', 'selected') .setAttribute('editable', '1') .setAttribute('courseTool', '1') .setAttribute('domain', toolType.tooldomain); if(course_tool_group){ course_tool_group.append(option); } else { typeSelector.append(option); } // Adding the new tool may affect which tool gets matched automatically this.clearToolCache(); this.updateAutomaticToolMatch(Y.one('#id_toolurl')); this.updateAutomaticToolMatch(Y.one('#id_securetoolurl')); this.toggleEditButtons(); require(["core/notification"], function (notification) { notification.addNotification({ message: M.util.get_string('tooltypeadded', 'lti'), type: "success" }); }); }, updateToolType: function(toolType){ var typeSelector = Y.one('#id_typeid'); var option = typeSelector.one('option[value="' + toolType.id + '"]'); option.set('text', toolType.name) .set('domain', toolType.tooldomain); // Editing the tool may affect which tool gets matched automatically this.clearToolCache(); this.updateAutomaticToolMatch(Y.one('#id_toolurl')); this.updateAutomaticToolMatch(Y.one('#id_securetoolurl')); require(["core/notification"], function (notification) { notification.addNotification({ message: M.util.get_string('tooltypeupdated', 'lti'), type: "success" }); }); }, deleteTool: function(toolTypeId){ var self = this; Y.io(self.settings.instructor_tool_type_edit_url + '&action=delete&typeid=' + toolTypeId, { on: { success: function(){ self.getSelectedToolTypeOption().remove(); // Editing the tool may affect which tool gets matched automatically self.clearToolCache(); self.updateAutomaticToolMatch(Y.one('#id_toolurl')); self.updateAutomaticToolMatch(Y.one('#id_securetoolurl')); require(["core/notification"], function (notification) { notification.addNotification({ message: M.util.get_string('tooltypedeleted', 'lti'), type: "success" }); }); }, failure: function(){ require(["core/notification"], function (notification) { notification.addNotification({ message: M.util.get_string('tooltypenotdeleted', 'lti'), type: "problem" }); }); } } }); }, findToolByUrl: function(url, toolId, callback){ var self = this; Y.io(self.settings.ajax_url, { data: {action: 'find_tool_config', course: self.settings.courseId, toolurl: url, toolid: toolId || 0 }, on: { success: function(transactionid, xhr){ var response = xhr.response; var toolInfo = Y.JSON.parse(response); callback(toolInfo); }, failure: function(){ } } }); } }; })();
ingredients = { coffee: { price: 0.75, units: 10 }, decaf_coffee: { price: 0.75, units: 10 }, sugar: { price: 0.25, units: 10 }, cream: { price: 0.25, units: 10 }, steamed_milk: { price: 0.35, units: 10 }, foamed_milk: { price: 0.35, units: 10 }, espresso: { price: 1.10, units: 10 }, cocoa: { price: 0.90, units: 10 }, whipped_cream: { price: 1.00, units: 10 } } drinks = { regular_coffee: { coffee: 3, sugar: 1, cream: 1 }, decaf_coffee: { decaf_coffee: 3, sugar: 1, cream: 1 }, caffe_latte: { espresso: 2, steamed_milk: 1 }, caffe_americano: { espresso: 2 }, caffe_mocha: { espresso: 1, cocoa: 1, steamed_milk: 1, whipped_cream: 1 }, cappucino: { espresso: 2, steamed_milk: 1, foamed_milk: 1 } } def restock(ingredients) ingredients.each { |_ingredient, value| value[:units] = 10 } puts 'Restocked!' end def order_drink(drinks, ingredients, drink_number) keys = drinks.keys drink_name = keys[drink_number.to_i - 1] drink = drinks[drink_name] if can_make_drink?(drink, ingredients) puts "Dispensing: #{drink_name} " update_inventory(drink, ingredients) else puts "Out of stock: #{drink_name}" end end def can_make_drink?(drink, ingredients) drink.each do |ingredient, qty| return false if ingredients[ingredient][:units] < qty end true end def update_inventory(drink, ingredients) drink.each do |ingredient, qty| ingredients[ingredient][:units] -= qty end end def display_inventory(ingredients) puts "Inventory: " ingredients.each do |ingredient, details| puts "#{ingredient}, #{details[:units]} units" end end def display_menu(drinks, ingredients) puts "Menu: " drinks.each_with_index do |drink, index| drink_name = drink[0] puts "Nr. #{index + 1}, #{drink_name}, $#{cost(ingredients, drinks, drink_name)}, #{in_stock?(ingredients, drinks, drink_name)}" end end def cost(ingredients, drinks, drink) p drink cost = 0 drinks[drink].each do |ingredient, qty| cost += ingredients[ingredient][:price] * qty end cost.round(2) end def in_stock?(ingredients, drinks, drink) if can_make_drink?(drinks[drink], ingredients) 'in-stock' else 'out-of-stock' end end def valid_restock_input(user_input) user_input.downcase == 'r' end def valid_quit_input(user_input) user_input.downcase == 'q' end def valid_order_input(user_input) (1..6).cover?(user_input.to_i) end def invalid_user_input(user_input) !valid_restock_input(user_input) && !valid_quit_input(user_input) && !valid_order_input(user_input) end loop do display_inventory(ingredients) display_menu(drinks, ingredients) user_input = gets.chomp restock(ingredients) if valid_restock_input(user_input) exit if valid_quit_input(user_input) order_drink(drinks, ingredients, user_input) if valid_order_input(user_input) puts "Invalid Selection: #{user_input}" if invalid_user_input(user_input) end
from enum import Enum class LiveViewDisplayRotation(Enum): NoRotation = 0 RotateCCW = 1 RotateCW = 2 class DisplayRotator: def __init__(self): self.current_rotation = LiveViewDisplayRotation.NoRotation def rotate_clockwise(self): self.current_rotation = LiveViewDisplayRotation((self.current_rotation.value + 1) % 3) def rotate_counterclockwise(self): self.current_rotation = LiveViewDisplayRotation((self.current_rotation.value - 1) % 3) def reset_rotation(self): self.current_rotation = LiveViewDisplayRotation.NoRotation def get_current_rotation(self): return self.current_rotation
#ifdef __OBJC__ #import <UIKit/UIKit.h> #endif FOUNDATION_EXPORT double Pods_FineNotificationsVersionNumber; FOUNDATION_EXPORT const unsigned char Pods_FineNotificationsVersionString[];
import time class BackupRestorationManager: def __init__(self, cinder): self.cinder = cinder self.block_device_mapping = {} def initiate_restore(self, backup_id, new_volume_id): vol_index = len(self.block_device_mapping) dev_name = "vd" + chr(ord('a') + vol_index) self.block_device_mapping[dev_name] = new_volume_id restore = self.cinder.restores.restore(backup_id=backup_id, volume_id=new_volume_id) return restore.volume_id def monitor_status(self, volume_id): while True: restored_volume = self.cinder.volumes.get(volume_id) if restored_volume.status == 'available': print("Restoration completed for volume", volume_id) break time.sleep(5) def _wait_for(self, resource, expected_states, final_state): while resource.status in expected_states: time.sleep(5) resource = self.cinder.volumes.get(resource.id) if resource.status != final_state: raise Exception("Failed to reach the final state: " + final_state) # Usage example # cinder = CinderAPI() # Assume CinderAPI class for interacting with OpenStack Cinder API # manager = BackupRestorationManager(cinder) # new_volume_id = manager.initiate_restore("backup123", "newVolume456") # manager.monitor_status(new_volume_id)
<filename>dandelion-upm/dandelion-upm-biz/src/main/java/cn/icepear/dandelion/upm/biz/service/impl/SysMenuServiceImpl.java package cn.icepear.dandelion.upm.biz.service.impl; import cn.hutool.core.collection.CollUtil; import cn.icepear.dandelion.upm.api.domain.dto.RoleInfo; import cn.icepear.dandelion.upm.api.domain.entity.SysMenu; import cn.icepear.dandelion.upm.api.domain.entity.SysRoleMenu; import cn.icepear.dandelion.upm.api.domain.vo.MenuVO; import cn.icepear.dandelion.upm.api.domain.vo.SystemToMenuVo; import cn.icepear.dandelion.upm.biz.mapper.SysMenuMapper; import cn.icepear.dandelion.upm.biz.mapper.SysRoleMenuMapper; import cn.icepear.dandelion.upm.biz.service.SysMenuService; import com.baomidou.mybatisplus.core.toolkit.Wrappers; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cache.annotation.CacheEvict; import org.springframework.cache.annotation.Cacheable; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; /** * @author rim-wood * @description 菜单权限管理service实现 * @date Created on 2019-04-18. */ @Service public class SysMenuServiceImpl extends ServiceImpl<SysMenuMapper, SysMenu> implements SysMenuService { @Autowired private SysRoleMenuMapper sysRoleMenuMapper; @Override @Cacheable(value = "menu_details", key = "'role-' + #roleId + '_menu'") public List<MenuVO> getMenuByRoleId(Long roleId) { return baseMapper.listMenusByRoleId(roleId); } @Override @Transactional(rollbackFor = Exception.class) @CacheEvict(value = "menu_details", allEntries = true) public boolean removeMenuById(Integer id) { // 查询父节点为当前节点的节点 List<SysMenu> menuList = this.list(Wrappers.<SysMenu>query() .lambda().eq(SysMenu::getParentId, id)); if (CollUtil.isNotEmpty(menuList)) { return false; } sysRoleMenuMapper.delete(Wrappers.<SysRoleMenu>query() .lambda().eq(SysRoleMenu::getMenuId, id)); //删除当前菜单及其子菜单 return this.removeById(id); } @Override @CacheEvict(value = "menu_details", allEntries = true) @Transactional(rollbackFor = Exception.class) public boolean updateMenuById(SysMenu sysMenu) { //如果修改菜单操作是修改DelFlag字段,即要删除菜单先删除其子菜单 if(sysMenu.getDelFlag() == 1){ List<SysMenu> sysMenus = baseMapper.sonMenuList(sysMenu.getMenuId()); sysMenus.forEach(menu -> { menu.setDelFlag(1); //删除与该菜单的角色关联 sysRoleMenuMapper.deleteByMenuId(menu.getMenuId()); this.updateById(menu); }); return true; } return this.updateById(sysMenu); } @Override public SysMenu getMenuByMenuId(Long menuId) { return this.getById(menuId); } /** * 缓存名加 - 和systemId为了防止重复 */ @Override @Cacheable(value = "menu_details", key = "#username + '_menuTreeList'") public List<SystemToMenuVo> getMenuTreeList(String username, List<RoleInfo> roles, String systemId, Boolean isAdmin) { for(RoleInfo roleInfo : roles) { if (isAdmin){ return baseMapper.getAdminMenuTreeList(null); } } return baseMapper.getMenuTreeList(roles, null); } @Override public SysMenu selectByName(String menuName, String path) { return baseMapper.selectByName(menuName, path); } @Override @CacheEvict(value = "menu_details", allEntries = true) public boolean save(SysMenu sysMenu) { return this.retBool(Integer.valueOf(this.baseMapper.insert(sysMenu))); } }
// // SeasonsViewController.h // Hiyoko // // Created by 天々座理世 on 2018/08/30. // Copyright © 2018 MAL Updater OS X Group. All rights reserved. // #import <UIKit/UIKit.h> NS_ASSUME_NONNULL_BEGIN @interface SeasonsViewController : UICollectionViewController <UICollectionViewDelegateFlowLayout> - (void)reloadData:(bool)refresh; @end NS_ASSUME_NONNULL_END
import React from 'react'; import { AccountInfo } from '~/utils/types'; import { displayAsGTU } from '~/utils/gtu'; import Card from '~/cross-app-components/Card'; import ScheduleList from '~/components/ScheduleList'; import SidedRow from '~/components/SidedRow'; import styles from './ShowReleaseSchedule.module.scss'; interface Props { accountInfo?: AccountInfo; } /** * Displays the account's release schedule: * Each release (amount and time) * and the total locked value. */ export default function ShowReleaseSchedule({ accountInfo }: Props) { if (!accountInfo) { return null; } const { schedule } = accountInfo.accountReleaseSchedule; return ( <Card className="flexColumn alignCenter relative pB0"> <h3 className={styles.releaseScheduleTitle}> Locked amount:{' '} {displayAsGTU(accountInfo.accountReleaseSchedule.total)} </h3> <SidedRow className={styles.releaseScheduleListHeader} left="Release date and time" right="Amount" /> <ScheduleList showIndex={false} className={styles.releaseSchedule} elementClassName={styles.releaseScheduleElement} schedule={schedule} /> {schedule.length === 0 ? ( <h3 className="flex justifyCenter pB20 mT10"> This account has no future releases. </h3> ) : null} </Card> ); }
#pragma once #include "archgraph/VarType.h" #include "archgraph/typedef.h" #include <cga/typedef.h> #include <rttr/type.h> namespace archgraph { struct RelativeFloat; class EvalContext; class EvalHelper { public: static bool SetPropVal(rttr::property prop, rttr::instance obj, const VarPtr& val); static VarType ResolveSizeVal(const cga::ExprNodePtr& expr, const EvalContext& ctx, RelativeFloat& out_flt, std::string& out_str); }; // EvalHelper }
#!/usr/bin/env bash #TEST: Test basic Output DNS Adapter #TEST: Start OpenDNSSEC and see if zone gets transferred and signed #TEST: and see if NOTIFY messages are sent. # So we can use validns 0.7 it is installed from source so need to # specify this path case "$DISTRIBUTION" in redhat ) append_path /usr/sbin ;; esac if [ -n "$HAVE_MYSQL" ]; then ods_setup_conf conf.xml conf-mysql.xml fi && ods_reset_env 20 && ## Start secondary name server ods_ldns_testns 15353 ods.datafile && ## Start OpenDNSSEC ods_start_ods-control && ## Wait for signed zone file syslog_waitfor 60 'ods-signerd: .*\[STATS\] ods' && ## Retry NOTIFY syslog_waitfor 120 'ods-signerd: .*\[notify\] notify max retry for zone ods, 127\.0\.0\.1 unreachable' && ## SOA query log_this_timeout soa 10 drill -p 15354 @127.0.0.1 soa ods && log_grep soa stdout 'ods\..*3600.*IN.*SOA.*ns1\.ods\..*postmaster\.ods\..*1001.*9000.*4500.*1209600.*3600' && ## See if we can transfer the signed zone log_this_timeout axfr 10 drill -p 15354 @127.0.0.1 axfr ods && log_grep axfr stdout 'ods\..*3600.*IN.*SOA.*ns1\.ods\..*postmaster\.ods\..*1001.*9000.*4500.*1209600.*3600' && log_grep axfr stdout 'ods\..*600.*IN.*MX.*10.*mail\.ods\.' && ## Occluded names should be part of transfer log_grep axfr stdout 'below\.zonecut\.label4\.ods\..*600.*IN.*NS.*ns\.zonecut\.label4\.ods\.' && ## See if we send overflow UDP if does not fit. log_this_timeout ixfr 10 drill -p 15354 @127.0.0.1 ixfr ods && syslog_waitfor 10 'ods-signerd: .*\[axfr\] axfr fallback zone ods' && syslog_waitfor 10 'ods-signerd: .*\[axfr\] axfr udp overflow zone ods' && log_grep ixfr stdout 'ods\..*IN.*TYPE251' && log_grep ixfr stdout 'ods\..*3600.*IN.*SOA.*ns1\.ods\..*postmaster\.ods\..*1001.*9000.*4500.*1209600.*3600' && ! (log_grep ixfr stdout 'ods\..*600.*IN.*MX.*10.*mail\.ods\.') && ## See if we fallback to AXFR if IXFR not available. log_this_timeout ixfr-tcp 10 drill -t -p 15354 @127.0.0.1 ixfr ods && log_grep ixfr-tcp stdout 'ods\..*3600.*IN.*SOA.*ns1\.ods\..*postmaster\.ods\..*1001.*9000.*4500.*1209600.*3600' && log_grep ixfr-tcp stdout 'ods\..*600.*IN.*MX.*10.*mail\.ods\.' && ## Update zonefile to create journal cp -- ./unsigned/ods.2 "$INSTALL_ROOT/var/opendnssec/unsigned/ods" && ods-signer sign ods && syslog_waitfor 10 'ods-signerd: .*\[STATS\] ods 1002 RR\[count=3 time*' && ## See if we can get an IXFR back log_this_timeout dig 10 dig -p 15354 @127.0.0.1 ixfr=1001 ods && log_grep dig stdout 'ods\..*3600.*IN.*SOA.*ns1\.ods\..*postmaster\.ods\..*1002.*9000.*4500.*1209600.*3600' && log_grep dig stdout 'label35\.ods\..*3600.*IN.*NS.*ns1\.label35\.ods\.' && log_grep dig stdout 'ns1\.label35\.ods\..*3600.*IN.*A.*192\.0\.2\.1' && # Validate the output on redhat # case "$DISTRIBUTION" in # redhat ) # dig -p 15354 @127.0.0.1 axfr ods > ods_axfr && # log_this validate-zone-ods validns -s -p cname-other-data -p dname -p dnskey -p nsec3param-not-apex -p mx-alias -p ns-alias -p rp-txt-exists -p tlsa-host ods_axfr && # log_grep validate-zone-ods stdout 'validation errors: 0' # ;; # esac && ## Stop ods_stop_ods-control && ods_ldns_testns_kill && return 0 ## Test failed. Kill stuff ods_ldns_testns_kill ods_kill return 1
cd model && python Run_cde.py --dataset='PEMSD4' --model='GCDE' --model_type='type1' --embed_dim=10 --hid_dim=64 --hid_hid_dim=64 --num_layers=2 --lr_init=0.001 --weight_decay=1e-3 --epochs=200 --tensorboard --comment="" --device=0
<gh_stars>0 import pytest from movies import managers class TestUniqueNameManager: """Tests methods from the UniqueNameManager.""" def test_get_or_create_from_names_works_with_one_name(self, mocker): """Verifies get_or_create_from_names supports receiving one name.""" mock = mocker.patch( 'movies.managers.UniqueNameManager.get_or_create', return_value=("name_object", False), ) manager = managers.UniqueNameManager() results = manager.get_or_create_from_names('name1') assert len(results) == 1 assert results[0] == "name_object" mock.assert_called_once_with(name='name1') def test_get_or_create_from_names_behaves_correctly_if_no_name( self, mocker ): """Verifies there is no problem if the get_or_create_from_names method received an empty string.""" mock = mocker.patch( 'movies.managers.UniqueNameManager.get_or_create', ) manager = managers.UniqueNameManager() results = manager.get_or_create_from_names('') assert len(results) == 0 mock.assert_not_called() @pytest.mark.parametrize( 'names, expected_results', [ ('name1,name2', ['name1', 'name2']), ('name1, name2', ['name1', 'name2']), ('name1, name2', ['name1', 'name2']), ], ) def test_get_or_create_from_names_splits_names_correctly( self, mocker, names, expected_results ): """Verifies get_or_create_from_names work as expected whatever the number of spaces between names.""" mock = mocker.patch( 'movies.managers.UniqueNameManager.get_or_create', side_effect=[ (f"{name}_object", False) for name in expected_results ], ) manager = managers.UniqueNameManager() results = manager.get_or_create_from_names(names) assert results == [f"{name}_object" for name in expected_results] class TestMovieManager: pass
# Generated by Django 3.0.8 on 2020-08-17 16:08 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('definitions', '0009_auto_20200812_1614'), ] operations = [ migrations.AlterField( model_name='column', name='object_id', field=models.CharField(db_index=True, default=None, max_length=256, null=True), ), migrations.AlterField( model_name='index', name='object_id', field=models.CharField(db_index=True, default=None, max_length=256, null=True), ), migrations.AlterField( model_name='schema', name='object_id', field=models.CharField(db_index=True, default=None, max_length=256, null=True), ), migrations.AlterField( model_name='table', name='object_id', field=models.CharField(db_index=True, default=None, max_length=256, null=True), ), ]
<reponame>radiumweilei/chinahadoop-ml-2<filename>16.Clustering/16.5.MeanShift.py # !/usr/bin/python # -*- coding:utf-8 -*- import numpy as np import matplotlib.pyplot as plt import sklearn.datasets as ds import matplotlib.colors from sklearn.cluster import MeanShift from sklearn.metrics import euclidean_distances if __name__ == "__main__": N = 1000 centers = [[1, 2], [-1, -1], [1, -1], [-1, 1]] data, y = ds.make_blobs(N, n_features=2, centers=centers, cluster_std=[0.5, 0.25, 0.7, 0.5], random_state=0) matplotlib.rcParams['font.sans-serif'] = [u'SimHei'] matplotlib.rcParams['axes.unicode_minus'] = False plt.figure(figsize=(10, 9), facecolor='w') m = euclidean_distances(data, squared=True) bw = np.median(m) print(bw) for i, mul in enumerate(np.linspace(0.1, 0.4, 4)): band_width = mul * bw model = MeanShift(bin_seeding=True, bandwidth=band_width) ms = model.fit(data) centers = ms.cluster_centers_ y_hat = ms.labels_ n_clusters = np.unique(y_hat).size print('带宽:', mul, band_width, '聚类簇的个数为:', n_clusters) plt.subplot(2, 2, i + 1) plt.title(u'带宽:%.2f,聚类簇的个数为:%d' % (band_width, n_clusters)) clrs = [] for c in np.linspace(16711680, 255, n_clusters): clrs.append('#%06x' % int(c)) # clrs = plt.cm.Spectral(np.linspace(0, 1, n_clusters)) print(clrs) for k, clr in enumerate(clrs): cur = (y_hat == k) plt.scatter(data[cur, 0], data[cur, 1], c=clr, edgecolors='none') plt.scatter(centers[:, 0], centers[:, 1], s=150, c=clrs, marker='*', edgecolors='k') plt.grid(True) plt.tight_layout(2) plt.suptitle(u'MeanShift聚类', fontsize=20) plt.subplots_adjust(top=0.92) plt.show()
def filter_grade(df): return df[df['grade'] >= 60]
<reponame>claitonneri/nest-architecture<filename>src/users/users.service.ts import { Injectable } from '@nestjs/common'; import { User } from '@prisma/client'; import * as bcrypt from 'bcrypt'; import { CreateUserDto } from './dto/create-user.dto'; import { UpdateUserDto } from './dto/update-user.dto'; import { PrismaService } from '../prisma/prisma.service'; @Injectable() export class UsersService { constructor(private readonly prisma: PrismaService) {} async findAll(): Promise<User[]> { return this.prisma.user.findMany(); } async findById(id: string): Promise<User | null> { return this.prisma.user.findUnique({ where: { id, }, }); } async findByEmail(email: string): Promise<User | null> { return this.prisma.user.findUnique({ where: { email, }, }); } async create({ name, email, password }: CreateUserDto): Promise<User> { const passwordHashed = await bcrypt.hash(password, 10); return this.prisma.user.create({ data: { name, email, password: <PASSWORD>, }, }); } async update( id: string, { name, email, password }: UpdateUserDto, ): Promise<User> { return this.prisma.user.update({ where: { id, }, data: { name, email, password, }, }); } async remove(id: string): Promise<User> { return this.prisma.user.delete({ where: { id, }, }); } }
"""Leetcode 293. Flip Game (Premium) Easy URL: https://leetcode.com/problems/flip-game You are playing the following Flip Game with your friend: Given a string that contains only these two characters: + and -, you and your friend take turns to flip two consecutive "++" into "--". The game ends when a person can no longer make a move and therefore the other person will be the winner. Write a function to compute all possible states of the string after one valid move. Example: Input: s = "++++" Output: [ "--++", "+--+", "++--" ] Note: If there is no valid move, return an empty list []. """ class SolutionCheckCharAndNeighborIter(object): def generatePossibleNextMoves(self, s): """ :type s: str :rtype: List[str] Time complexity: O(n^2). Space complexity: O(n). """ # Edge cases. if len(s) <= 1: return [] n = len(s) possible_states = [] # Iterate through string to check if char and its next are '++'. i = 0 while i < n - 1: if s[i] == '+': while i < n - 1 and s[i + 1] == '+': possible_states.append(s[:i] + '--' + s[i+2:]) i += 1 i += 1 return possible_states def main(): # Input: s = "++++" # Output: # [ # "--++", # "+--+", # "++--" # ] s = '++++' print SolutionCheckCharAndNeighborIter().generatePossibleNextMoves(s) if __name__ == '__main__': main()
<reponame>Mayur2520/SAMPT<filename>src/app/services/credencials.ts import { Injectable, EventEmitter } from '@angular/core'; import { Subscription } from 'rxjs/internal/Subscription'; import { Storage } from '@ionic/storage'; import { Observable } from 'rxjs'; @Injectable() export class AppGlobals { readonly ApiLink: string = 'http://192.168.127.12:8897'; }
<filename>proposal/SpeedPlot.py import matplotlib import GUI import VTKReader import flow2D import flow3D import matplotlib.pyplot as plt import numpy as np def plot(cpu, gpu): data = [cpu, gpu] print(data) plt.rcParams.update({'font.size': 16}) plt.rc('font', family='serif') plt.rc('axes', axisbelow=True) plt.grid(linestyle="--") X = np.arange(3) fig = matplotlib.pyplot.gcf() fig.set_size_inches(16, 10) # fig = plt.figure() colors = ['indianred', 'darkseagreen'] plt.bar(X - 0.12, data[0], color=colors[0], width=0.24, label="CPUs") plt.bar(X + 0.12, data[1], color=colors[1], width=0.24, label="GPUs") # fig.set_size_inches(16, 10) plt.xticks([0, 1, 2], ["Obstacle_1", "Obstacle_2", "Obstacle_3"]) plt.ylabel("Time (seconds)") plt.legend() fig.savefig("cpu&gpu.pdf") plt.show() def main(): cpu = [[409.759, 405.922, 408.488], [403.077, 394.648, 394.496], [392.996, 394.119, 396.363]] gpu = [[49.039, 49.052, 49.407], [49.019, 49.628, 49.834], [49.682, 49.798, 49.678]] cpu = np.mean(cpu, axis=1) gpu = np.mean(gpu, axis=1) print(cpu, gpu) plot(cpu, gpu) # # filename = "profile1.txt" # # plot(filename, "CPU") if __name__ == '__main__': main()
#!/usr/bin/env nix-shell #!nix-shell -I nixpkgs=../../../../../ -i bash -p nix wget prefetch-yarn-deps nix-prefetch-git jq if [[ "$#" -gt 2 || "$1" == -* ]]; then echo "Regenerates packaging data for the SchildiChat packages." echo "Usage: $0 [git revision or tag] [version string override]" exit 1 fi rev="$1" version="$2" set -euo pipefail if [ -z "$rev" ]; then rev="$(wget -O- "https://api.github.com/repos/SchildiChat/schildichat-desktop/releases?per_page=1" | jq -r '.[0].tag_name')" fi if [ -z "$version" ]; then # strip leading "v" version="${rev#v}" fi src_data=$(nix-prefetch-git https://github.com/SchildiChat/schildichat-desktop --fetch-submodules --rev $rev) src=$(echo $src_data | jq -r .path) src_hash=$(echo $src_data | jq -r .sha256) web_yarn_hash=$(prefetch-yarn-deps $src/element-web/yarn.lock) desktop_yarn_hash=$(prefetch-yarn-deps $src/element-desktop/yarn.lock) cat > pin.json << EOF { "version": "$version", "rev": "$rev", "srcHash": "$src_hash", "webYarnHash": "$web_yarn_hash", "desktopYarnHash": "$desktop_yarn_hash" } EOF
<filename>microservicio/infraestructura/src/test/java/com/ceiba/reserva/controlador/ConsultaControladorReservaTest.java package com.ceiba.reserva.controlador; import com.ceiba.ApplicationMock; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest; import org.springframework.http.MediaType; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import java.util.ArrayList; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.isA; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @RunWith(SpringRunner.class) @ContextConfiguration(classes= ApplicationMock.class) @WebMvcTest(ConsultaControladorReserva.class) public class ConsultaControladorReservaTest { @Autowired private MockMvc mockMvc; @Test public void aValidarListar() throws Exception { } }
<gh_stars>1-10 /* * The MIT License (MIT) * * Copyright (c) 2012-2013 <NAME>. All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ #if kSupportADTransition #import <UIKit/UIKit.h> /** * A convenience category on UITableViewController to easily present and dismiss * using ADFlipTransition. */ @interface UITableViewController (ADFlipTransition) /** * Present a view controller modally from the current view controller using a * flip animation, beginning from a UITableViewCell. * @param destinationViewController The view controller to present * @param indexPath The location of the cell to flip from. * @param completion A block to run on completion. Can be NULL. */ - (void)flipToViewController:(UIViewController *)destinationViewController fromItemAtIndexPath:(NSIndexPath *)indexPath withCompletion:(void (^)(void))completion; /** * Present a view controller modally from the current view controller using a * flip animation, beginning from a UITableViewCell. * @param destinationViewController The view controller to present * @param indexPath The location of the cell to flip from. * @param sourceSnapshot The placeholder image for the source view. Specifying * nil will take a snapshot just before the animation. * @param destinationSnapshot The placeholder image for the destination view. * Specifying nil will take a snapshot just before the animation. * @param completion A block to run on completion. Can be NULL. */ - (void)flipToViewController:(UIViewController *)destinationViewController fromItemAtIndexPath:(NSIndexPath *)indexPath withSourceSnapshotImage:(UIImage *)sourceSnapshot andDestinationSnapshot:(UIImage *)destinationSnapshot withCompletion:(void (^)(void))completion; /** * Present a view controller modally from the current view controller using a * flip animation, beginning from a UITableViewCell. * @param destinationViewController The view controller to present * @param indexPath The location of the cell to flip from. * @param destinationSize The size for the destination view controller to take * up on the screen. * @param completion A block to run on completion. Can be NULL. */ - (void)flipToViewController:(UIViewController *)destinationViewController fromItemAtIndexPath:(NSIndexPath *)indexPath asChildWithSize:(CGSize)destinationSize withCompletion:(void (^)(void))completion; /** * Present a view controller modally from the current view controller using a * flip animation, beginning from a UITableViewCell. * @param destinationViewController The view controller to present * @param indexPath The location of the cell to flip from. * @param destinationSize The size for the destination view controller to take * up on the screen. * @param sourceSnapshot The placeholder image for the source view. Specifying * nil will take a snapshot just before the animation. * @param destinationSnapshot The placeholder image for the destination view. * Specifying nil will take a snapshot just before the animation. * @param completion A block to run on completion. Can be NULL. */ - (void)flipToViewController:(UIViewController *)destinationViewController fromItemAtIndexPath:(NSIndexPath *)indexPath asChildWithSize:(CGSize)destinationSize withSourceSnapshotImage:(UIImage *)sourceSnapshot andDestinationSnapshot:(UIImage *)destinationSnapshot withCompletion:(void (^)(void))completion; @end #endif
#!/usr/bin/env bash # Preserves keyboard layout when switching between windows. # Handy if you, for example, use your native language in a messenger app, # do some coding in an editor (Vim especially), and switch back and forth. # Messenger stays Russian, Vim stays English. # === Usage === # # Put this script next to your `autostart` (~/.config/herbstluftwm/). # Make sure it is executable: # # chmod +x ~/.config/herbstluftwm/perclient_kb_layout.sh # # Install xkblayout-state (<https://github.com/nonpop/xkblayout-state>). # # Somewhere in your `autostart` add: # # pkill -u $USER --full perclient_kb_layout # $(dirname "$0")/perclient_kb_layout.sh & if ! command -v xkblayout-state &> /dev/null then echo >&2 "$0 requires xkblayout-state to be on \$PATH"; echo >&2 "Grab it from: https://github.com/nonpop/xkblayout-state"; echo >&2 "BTW, if using Arch: https://aur.archlinux.org/packages/xkblayout-state-git/"; exit 1; fi hc() { herbstclient "$@" } FOCUS_WINID=$(hc attr clients.focus.winid) hc --idle focus_changed | while read hook winid name do # Save current keyboard layout for window loosing focus hc try silent new_attr int clients.${FOCUS_WINID}.my_kb_layout; hc silent attr clients.${FOCUS_WINID}.my_kb_layout "$(xkblayout-state print '%c')"; # Save the currently focused win id to be able referring it as the one loosing focus FOCUS_WINID=$winid # Restore previously stored layout. # Fallback to the default (0'th) if the window is new and so has no stored attribute. # Redirect stderr to /dev/null to suppress: # Object "clients.focus" has no attribute "my_kb_layout" # ..in this case xkblayout-state set $(hc attr clients.focus.my_kb_layout 2>/dev/null || echo 0); done;
#!/usr/bin/env bash set -e source $(dirname "$0")/common.sh source $(dirname "$0")/config.sh # generate clients CLIENT_GEN_BASE=kubevirt.io/client-go/generated rm -rf ${KUBEVIRT_DIR}/staging/src/${CLIENT_GEN_BASE} # KubeVirt stuff swagger-doc -in ${KUBEVIRT_DIR}/staging/src/kubevirt.io/client-go/apis/snapshot/v1alpha1/types.go deepcopy-gen --input-dirs kubevirt.io/client-go/apis/snapshot/v1alpha1 \ --bounding-dirs kubevirt.io/client-go/apis \ --go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt openapi-gen --input-dirs kubevirt.io/client-go/apis/snapshot/v1alpha1,k8s.io/api/core/v1,k8s.io/apimachinery/pkg/apis/meta/v1,kubevirt.io/client-go/api/v1 \ --output-base ${KUBEVIRT_DIR}/staging/src \ --output-package kubevirt.io/client-go/apis/snapshot/v1alpha1 \ --go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt client-gen --clientset-name versioned \ --input-base kubevirt.io/client-go/apis \ --input snapshot/v1alpha1 \ --output-base ${KUBEVIRT_DIR}/staging/src \ --output-package ${CLIENT_GEN_BASE}/kubevirt/clientset \ --go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt # dependencies client-gen --clientset-name versioned \ --input-base kubevirt.io/containerized-data-importer/pkg/apis \ --input core/v1alpha1,upload/v1alpha1 \ --output-base ${KUBEVIRT_DIR}/staging/src \ --output-package ${CLIENT_GEN_BASE}/containerized-data-importer/clientset \ --go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt client-gen --clientset-name versioned \ --input-base github.com/coreos/prometheus-operator/pkg/apis \ --input monitoring/v1 \ --output-base ${KUBEVIRT_DIR}/staging/src \ --output-package ${CLIENT_GEN_BASE}/prometheus-operator/clientset \ --go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt client-gen --clientset-name versioned \ --input-base github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/apis \ --input k8s.cni.cncf.io/v1 \ --output-base ${KUBEVIRT_DIR}/staging/src \ --output-package ${CLIENT_GEN_BASE}/network-attachment-definition-client/clientset \ --go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt client-gen --clientset-name versioned \ --input-base github.com/kubernetes-csi/external-snapshotter/v2/pkg/apis \ --input volumesnapshot/v1beta1 \ --output-base ${KUBEVIRT_DIR}/staging/src \ --output-package ${CLIENT_GEN_BASE}/external-snapshotter/clientset \ --go-header-file ${KUBEVIRT_DIR}/hack/boilerplate/boilerplate.go.txt find ${KUBEVIRT_DIR}/pkg/ -name "*generated*.go" -exec rm {} -f \; ${KUBEVIRT_DIR}/hack/build-go.sh generate ${WHAT} /${KUBEVIRT_DIR}/hack/bootstrap-ginkgo.sh (cd ${KUBEVIRT_DIR}/tools/openapispec/ && go_build) ${KUBEVIRT_DIR}/tools/openapispec/openapispec --dump-api-spec-path ${KUBEVIRT_DIR}/api/openapi-spec/swagger.json (cd ${KUBEVIRT_DIR}/tools/resource-generator/ && go_build) (cd ${KUBEVIRT_DIR}/tools/csv-generator/ && go_build) rm -f ${KUBEVIRT_DIR}/manifests/generated/* rm -f ${KUBEVIRT_DIR}/examples/* ${KUBEVIRT_DIR}/tools/resource-generator/resource-generator --type=priorityclass >${KUBEVIRT_DIR}/manifests/generated/kubevirt-priority-class.yaml ${KUBEVIRT_DIR}/tools/resource-generator/resource-generator --type=kv >${KUBEVIRT_DIR}/manifests/generated/kv-resource.yaml ${KUBEVIRT_DIR}/tools/resource-generator/resource-generator --type=kv-cr --namespace={{.Namespace}} --pullPolicy={{.ImagePullPolicy}} >${KUBEVIRT_DIR}/manifests/generated/kubevirt-cr.yaml.in ${KUBEVIRT_DIR}/tools/resource-generator/resource-generator --type=operator-rbac --namespace={{.Namespace}} >${KUBEVIRT_DIR}/manifests/generated/rbac-operator.authorization.k8s.yaml.in # used for Image fields in manifests function getVersion() { echo "{{if $1}}@{{$1}}{{else}}:{{.DockerTag}}{{end}}" } virtapi_version=$(getVersion ".VirtApiSha") virtcontroller_version=$(getVersion ".VirtControllerSha") virthandler_version=$(getVersion ".VirtHandlerSha") virtlauncher_version=$(getVersion ".VirtLauncherSha") virtoperator_version=$(getVersion ".VirtOperatorSha") # used as env var for operator function getShasum() { echo "{{if $1}}@{{$1}}{{end}}" } # without the '@' symbole used in 'getShasum' function getRawShasum() { echo "{{if $1}}{{$1}}{{end}}" } virtapi_sha=$(getShasum ".VirtApiSha") virtcontroller_sha=$(getShasum ".VirtControllerSha") virthandler_sha=$(getShasum ".VirtHandlerSha") virtlauncher_sha=$(getShasum ".VirtLauncherSha") virtapi_rawsha=$(getRawShasum ".VirtApiSha") virtcontroller_rawsha=$(getRawShasum ".VirtControllerSha") virthandler_rawsha=$(getRawShasum ".VirtHandlerSha") virtlauncher_rawsha=$(getRawShasum ".VirtLauncherSha") # The generation code for CSV requires a valid semver to be used. # But we're trying to generate a template for a CSV here from code # rather than an actual usable CSV. To work around this, we set the # versions to something absurd and do a find/replace with our templated # values after the file is generated. _fake_replaces_csv_version="1111.1111.1111" _fake_csv_version="2222.2222.2222" ${KUBEVIRT_DIR}/tools/csv-generator/csv-generator --namespace={{.CSVNamespace}} --dockerPrefix={{.DockerPrefix}} --operatorImageVersion="$virtoperator_version" --pullPolicy={{.ImagePullPolicy}} --verbosity={{.Verbosity}} --apiSha="$virtapi_rawsha" --controllerSha="$virtcontroller_rawsha" --handlerSha="$virthandler_rawsha" --launcherSha="$virtlauncher_rawsha" --kubevirtLogo={{.KubeVirtLogo}} --csvVersion="$_fake_csv_version" --replacesCsvVersion="$_fake_replaces_csv_version" --csvCreatedAtTimestamp={{.CreatedAt}} --kubeVirtVersion={{.DockerTag}} >${KUBEVIRT_DIR}/manifests/generated/operator-csv.yaml.in sed -i "s/$_fake_csv_version/{{.CsvVersion}}/g" ${KUBEVIRT_DIR}/manifests/generated/operator-csv.yaml.in sed -i "s/$_fake_replaces_csv_version/{{.ReplacesCsvVersion}}/g" ${KUBEVIRT_DIR}/manifests/generated/operator-csv.yaml.in (cd ${KUBEVIRT_DIR}/tools/vms-generator/ && go_build) vms_docker_prefix=${DOCKER_PREFIX:-registry:5000/kubevirt} vms_docker_tag=${DOCKER_TAG:-devel} ${KUBEVIRT_DIR}/tools/vms-generator/vms-generator --container-prefix=${vms_docker_prefix} --container-tag=${vms_docker_tag} --generated-vms-dir=${KUBEVIRT_DIR}/examples protoc --proto_path=pkg/hooks/info --go_out=plugins=grpc,import_path=kubevirt_hooks_info:pkg/hooks/info pkg/hooks/info/api.proto protoc --proto_path=pkg/hooks/v1alpha1 --go_out=plugins=grpc,import_path=kubevirt_hooks_v1alpha1:pkg/hooks/v1alpha1 pkg/hooks/v1alpha1/api.proto protoc --proto_path=pkg/hooks/v1alpha2 --go_out=plugins=grpc,import_path=kubevirt_hooks_v1alpha2:pkg/hooks/v1alpha2 pkg/hooks/v1alpha2/api.proto protoc --go_out=plugins=grpc:. pkg/handler-launcher-com/notify/v1/notify.proto protoc --go_out=plugins=grpc:. pkg/handler-launcher-com/notify/info/info.proto protoc --go_out=plugins=grpc:. pkg/handler-launcher-com/cmd/v1/cmd.proto protoc --go_out=plugins=grpc:. pkg/handler-launcher-com/cmd/info/info.proto mockgen -source pkg/handler-launcher-com/notify/info/info.pb.go -package=info -destination=pkg/handler-launcher-com/notify/info/generated_mock_info.go mockgen -source pkg/handler-launcher-com/cmd/info/info.pb.go -package=info -destination=pkg/handler-launcher-com/cmd/info/generated_mock_info.go hack/sync-kubevirtci.sh
from unbundler import file_parser import os import shutil import codecs from collections import Counter import json import re def get_nodes_from_file(file_target): nodes = file_parser.parse_file(file_target=file_target) for n in nodes.values(): if n._entry: id = 0 name = "_entry_point" else: id = n.id name = Counter( [ path.replace("../", "").replace("./", "").replace("/", "_") for path in n.refs.values() ] ).most_common(1)[0][0] n.meta.name = "{name}-{id:04d}.js".format( id=id, name=name ) return nodes def fix_source(node, nodes): source = node.source require_str = "require('{path}')" require_str_t = "r~e~q~u~i~r~e('{path}')" for dep_id, dep_path in node.deps.items(): source = source.replace( require_str.format(path=dep_path), require_str_t.format(path="./"+nodes[dep_id].meta.name)) # source = re.sub("require\(.*\)", "undefined", source) source = source.replace("r~e~q~u~i~r~e", "require") return source def unbundle(target): file_target = '_unbundled/{}.spa/unbundled.json'.format(target) output_folder = 'unbundled_source/{}.spa/src'.format(target) nodes = get_nodes_from_file(file_target) print("got {} nodes".format(len(nodes))) for node in nodes.values(): file_path = output_folder if not os.path.exists(file_path): os.makedirs(file_path) target = file_path + '/' + node.meta.name with codecs.open(target, 'wb', 'utf-8') as f: f.write("// the following file was decompiled from a bundle\n") f.write("// with reference id %d\n" % node.id) f.write(fix_source(node, nodes)) print("populated {}".format(output_folder)) if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( description='take an unbundled.json and fill a folder with files.' ) parser.add_argument('target', help='the target package') args = parser.parse_args() unbundle(args.target)
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.kafka010 import java.util.UUID import org.apache.hadoop.conf.Configuration import org.apache.hadoop.security.{Credentials, UserGroupInformation} import org.apache.kafka.common.security.auth.SecurityProtocol.SASL_PLAINTEXT import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.deploy.security.HadoopDelegationTokenManager import org.apache.spark.internal.config.{KEYTAB, PRINCIPAL} import org.apache.spark.sql.execution.streaming.MemoryStream import org.apache.spark.sql.streaming.{OutputMode, StreamTest} import org.apache.spark.sql.test.SharedSparkSession class KafkaDelegationTokenSuite extends StreamTest with SharedSparkSession with KafkaTest { import testImplicits._ protected var testUtils: KafkaTestUtils = _ protected override def sparkConf = super.sparkConf .set("spark.security.credentials.hadoopfs.enabled", "false") .set("spark.security.credentials.hbase.enabled", "false") .set(KEYTAB, testUtils.clientKeytab) .set(PRINCIPAL, testUtils.clientPrincipal) .set("spark.kafka.clusters.cluster1.auth.bootstrap.servers", testUtils.brokerAddress) .set("spark.kafka.clusters.cluster1.security.protocol", SASL_PLAINTEXT.name) override def beforeAll(): Unit = { testUtils = new KafkaTestUtils(Map.empty, true) testUtils.setup() super.beforeAll() } override def afterAll(): Unit = { try { if (testUtils != null) { testUtils.teardown() testUtils = null } UserGroupInformation.reset() } finally { super.afterAll() } } ignore("Roundtrip") { val hadoopConf = new Configuration() val manager = new HadoopDelegationTokenManager(spark.sparkContext.conf, hadoopConf, null) val credentials = new Credentials() manager.obtainDelegationTokens(credentials) val serializedCredentials = SparkHadoopUtil.get.serialize(credentials) SparkHadoopUtil.get.addDelegationTokens(serializedCredentials, spark.sparkContext.conf) val topic = "topic-" + UUID.randomUUID().toString testUtils.createTopic(topic, partitions = 5) withTempDir { checkpointDir => val input = MemoryStream[String] val df = input.toDF() val writer = df.writeStream .outputMode(OutputMode.Append) .format("kafka") .option("checkpointLocation", checkpointDir.getCanonicalPath) .option("kafka.bootstrap.servers", testUtils.brokerAddress) .option("topic", topic) .start() try { input.addData("1", "2", "3", "4", "5") failAfter(streamingTimeout) { writer.processAllAvailable() } } finally { writer.stop() } } val streamingDf = spark.readStream .format("kafka") .option("kafka.bootstrap.servers", testUtils.brokerAddress) .option("startingOffsets", s"earliest") .option("subscribe", topic) .load() .selectExpr("CAST(key AS STRING)", "CAST(value AS STRING)") .as[(String, String)] .map(kv => kv._2.toInt + 1) testStream(streamingDf)( StartStream(), AssertOnQuery { q => q.processAllAvailable() true }, CheckAnswer(2, 3, 4, 5, 6), StopStream ) } }
#!/bin/bash -e mkdir -p log/random_2/20 CUDA_VISIBLE_DEVICES=7 python -u train.py --ORDER='random_2' --PERCENTAGE=20 --SEED=0 > log/random_2/20/log_seed_0.txt & PIDS[0]=$! CUDA_VISIBLE_DEVICES=8 python -u train.py --ORDER='random_2' --PERCENTAGE=20 --SEED=1 > log/random_2/20/log_seed_1.txt & PIDS[1]=$! CUDA_VISIBLE_DEVICES=10 python -u train.py --ORDER='random_2' --PERCENTAGE=20 --SEED=2 > log/random_2/20/log_seed_2.txt & PIDS[2]=$! CUDA_VISIBLE_DEVICES=12 python -u train.py --ORDER='random_2' --PERCENTAGE=20 --SEED=3 > log/random_2/20/log_seed_3.txt & PIDS[3]=$! CUDA_VISIBLE_DEVICES=13 python -u train.py --ORDER='random_2' --PERCENTAGE=20 --SEED=42 > log/random_2/20/log_seed_42.txt & PIDS[4]=$! trap "kill ${PIDS[*]}" SIGINT wait
<gh_stars>0 package com.wumeng.jetpackproject; import android.app.Application; import com.wumeng.jetpackproject.lifecycle.ForegroundCallbacks; /** * @author WuMeng * @date 2020/9/2 * desc: */ public class App extends Application { @Override public void onCreate() { super.onCreate(); ForegroundCallbacks.init(this); } }
"use strict"; exports.__esModule = true; exports.StyledImage = void 0; var _styledComponents = _interopRequireWildcard(require("styled-components")); var _utils = require("../../utils"); var _defaultProps = require("../../default-props"); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj["default"] = obj; return newObj; } } var FIT_MAP = { cover: 'cover', contain: 'contain' }; var fitStyle = (0, _styledComponents.css)(["flex:1 1;overflow:hidden;object-fit:", ";"], function (props) { return FIT_MAP[props.fit]; }); var StyledImage = _styledComponents["default"].img.withConfig({ displayName: "StyledImage", componentId: "ey4zx9-0" })(["", " ", " ", " ", ""], _utils.genericStyles, function (props) { return props.fit && fitStyle; }, function (props) { return props.theme.image && props.theme.image.extend; }, function (props) { return props.opacityProp && "opacity: " + (props.opacityProp === true ? props.theme.global.opacity.medium : props.theme.global.opacity[props.opacityProp] || props.opacityProp) + ";\n "; }); exports.StyledImage = StyledImage; StyledImage.defaultProps = {}; Object.setPrototypeOf(StyledImage.defaultProps, _defaultProps.defaultProps);
import os import shutil def remove_build_directories(modules): base_dir = "ProjectRoot" for module in modules: build_dir = os.path.join(base_dir, module, "bin") if os.path.exists(build_dir): shutil.rmtree(build_dir) print(f"Removed build directory for {module}") else: print(f"Build directory for {module} does not exist") # Example usage modules_to_remove = ["Build.Tests", "Build.Behave"] remove_build_directories(modules_to_remove)
<reponame>zzz-s-2020/s-2020<gh_stars>0 package ru.zzz.demo.sber.shs.server; import io.netty.channel.socket.nio.NioServerSocketChannel; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.http.server.reactive.HttpHandler; import org.springframework.http.server.reactive.ReactorHttpHandlerAdapter; import org.springframework.lang.NonNull; import org.springframework.stereotype.Component; import org.springframework.web.server.adapter.WebHttpHandlerBuilder; import reactor.netty.DisposableServer; import reactor.netty.http.server.HttpServer; import ru.zzz.demo.sber.shs.config.ServerConfig; import ru.zzz.demo.sber.shs.server.impl.NettyServerResources; /** * Implements a simple and manageable server which theoretically may be stopped gracefully. * @implNote * Spring Boot can start WebFlux server automatically has manage it with a set of customizers but I didn't * find a simple way to make it work after my customizations and I have much doubts about its ability to * shutdown gracefully. */ @Component("SHS.Infrastructure.HttpServer.HttpServerStarter") public class HttpServerStarter implements InitializingBean, DisposableBean { private final ApplicationContext ctx; private final ServerConfig config; private DisposableServer server; private NettyServerResources resources; @Autowired HttpServerStarter(ApplicationContext ctx, ServerConfig config) { this.ctx = ctx; this.config = config; } @Override public void afterPropertiesSet() { server = createServer(config); } @Override public void destroy() { if (server != null) server.disposeNow(); if (resources != null) resources.shutdown(); } @NonNull private DisposableServer createServer(ServerConfig serverConfig) { resources = NettyServerResources.create(serverConfig); HttpHandler handler = WebHttpHandlerBuilder.applicationContext(ctx).build(); return HttpServer.create() //.host(serverConfig.hostName()) .port(serverConfig.port()) .tcpConfiguration(tcpServer -> tcpServer.bootstrap(serverBootstrap -> { return serverBootstrap.group(resources.getAcceptorGroup(), resources.getSelectorGroup()) .channel(NioServerSocketChannel.class); })) .handle(new ReactorHttpHandlerAdapter(handler)) .bind() .blockOptional() .orElseThrow(() -> { resources.shutdown(); return new RuntimeException("Cannot start WebFlux server"); }); } }
<gh_stars>0 import React from "react" import Layout from "../components/layout" import SEO from "../components/seo" import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faGithub } from "@fortawesome/free-brands-svg-icons" const IndexPage = () => ( <Layout pageInfo={{ pageName: "Projects" }}> <SEO title="Projects" keywords={[`gatsby`, `react`, `bootstrap`]} /> <h2>Projects</h2> <h2 style={{marginTop: '2rem', marginBottom: '1.5rem'}}>Work in Progress</h2> <div class="row row-cols-1 row-cols-md-3 g-4"> <a href="https://github.com/kingchappers/adUserProvisioner" className="text-decoration-none" style={styles.cardLink}> <div class="card" style={styles.cardSize}> <div class="card-body"> <h5 class="card-title">Ad User Provisioner</h5> <p class="card-text">The idea of this program is to help automate the creation of users in Active Directory. This tool will use a configuration file allowing people to select from a list of approved options when it comes to things like department, office, etc. This should ensure that there is less chance of a user's configuration being a problem in the future. </p> <a className="nav-link mx-5" href="https://github.com/kingchappers/adUserProvisioner" target="_blank" rel="noopener noreferrer"><FontAwesomeIcon icon={faGithub} size='2x' color='black'/> <span className="sr-only"></span></a> </div> </div> </a> </div> <h2 style={{marginTop: '3rem', marginBottom: '1.5rem'}}>Finished Projects</h2> <div class="row row-cols-1 row-cols-md-3 g-4"> <a href="https://github.com/kingchappers/devBlog" className="text-decoration-none" style={styles.cardLink}> <div class="card" style={styles.cardSize}> <div class="card-body"> <h5 class="card-title">GatsbyJS</h5> <p class="card-text">GatsbyJS is the framework I've used for this site. I used the <a href="https://www.gatsbyjs.org/starters/jaxx2104/gatsby-starter-bootstrap/" target="_blank" rel="noopener noreferrer">gatsby-starter-bootstrap </a> starter, it's a fairly basic starter that incorporates bootstrap. The starter was created by <a href="https://github.com/jaxx2104" target="_blank" rel="noopener noreferrer">jaxx2104</a> so they deserve credit.</p> <a className="nav-link mx-5" href="https://github.com/kingchappers/devBlog" target="_blank" rel="noopener noreferrer"><FontAwesomeIcon icon={faGithub} size='2x' color='black'/> <span className="sr-only"></span></a> </div> </div> </a> <a href="https://github.com/kingchappers/dm-outrun" className="text-decoration-none" style={styles.cardLink}> <div class="card" style={styles.cardSize}> <div class="card-body"> <h5 class="card-title">rEFInd Theme - DM-Outrun</h5> <p class="card-text">rEFInd is a boot manager available for systems using UEFI. I created a theme for the loader based on <a href="https://github.com/mustaqimM/dm" target="_blank" rel="noopener noreferrer">mustaqimM's DM</a> theme. All I've really done to the theme is made it a little more Outrun.</p> <a className="nav-link mx-5" href="https://github.com/kingchappers/dm-outrun" target="_blank" rel="noopener noreferrer"><FontAwesomeIcon icon={faGithub} size='2x' color='black'/> <span className="sr-only"></span></a> </div> </div> </a> </div> <h2 style={{marginTop: '3rem', marginBottom: '1.5rem'}}>Discontinued Projects</h2> <div class="row row-cols-1 row-cols-md-3 g-4"> <a href="https://github.com/kingchappers/auto-cyber-range" className="text-decoration-none" style={styles.cardLink}> <div class="card" style={styles.cardSize}> <div class="card-body"> <h5 class="card-title">Athena</h5> <p class="card-text">The aim of this project is to allow people to configure and use a cyber range without having to go through the painstaking process of building a virtual network from scratch. In the initial stages I'll be creating a basic docker network, then I'll work on auto configuring some virtual machines, and finally move to make a front-end for the project. This one got discontinued as I found out about the GNS3 project which mostly did what I intended to do with this, I figured I'd move onto other things as I didn't want to replicate other peoples work.</p> <a className="nav-link mx-5" href="https://github.com/kingchappers/auto-cyber-range" target="_blank" rel="noopener noreferrer"><FontAwesomeIcon icon={faGithub} size='2x' color='black'/> <span className="sr-only"></span></a> </div> </div> </a> </div> </Layout> ) const styles ={ cardLink: { color: "#000000", }, cardSize: { maxWidth: '20rem', margin: '1rem', }, }; export default IndexPage
<gh_stars>0 const enzyme = require('enzyme'); const Adapter = require('enzyme-adapter-react-16'); enzyme.configure({ adapter: new Adapter(), disableLifecycleMethods: true, }); const context = require.context('./test', true, /\.js$/); // Load .js files in /test // eslint-disable-next-line lodash/prefer-lodash-method context.keys().forEach(context);
<filename>javatests/dagger/internal/codegen/ComponentShardTest.java /* * Copyright (C) 2020 The Dagger Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dagger.internal.codegen; import static com.google.testing.compile.CompilationSubject.assertThat; import static com.google.testing.compile.Compiler.javac; import static dagger.internal.codegen.GeneratedLines.GENERATED_CODE_ANNOTATIONS; import static java.util.stream.Collectors.joining; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.testing.compile.Compilation; import com.google.testing.compile.Compiler; import com.google.testing.compile.JavaFileObjects; import java.util.Arrays; import javax.tools.JavaFileObject; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) public class ComponentShardTest { private static final int BINDINGS_PER_SHARD = 10; @Test public void testNewShardCreated() { // Create 2N + 1 bindings: N in DaggerTestComponent, N in Shard1, and 1 in Shard2 int numBindings = 2 * BINDINGS_PER_SHARD + 1; ImmutableList.Builder<JavaFileObject> javaFileObjects = ImmutableList.builder(); ImmutableList.Builder<String> entryPoints = ImmutableList.builder(); for (int i = 0; i < numBindings; i++) { String bindingName = "Binding" + i; entryPoints.add(String.format("%1$s get%1$s();", bindingName)); entryPoints.add(String.format("Provider<%1$s> get%1$sProvider();", bindingName)); // Add dependencies between main component and shard1: 9 -> 10 -> Provider<9> // Add dependencies between shard1 and shard2: 19 -> 20 -> Provider<19> switch (i) { case 9: javaFileObjects.add(createBinding(bindingName, "Binding10 dep")); break; case 10: javaFileObjects.add(createBinding(bindingName, "Provider<Binding9> dep")); break; case 19: javaFileObjects.add(createBinding(bindingName, "Binding20 dep")); break; case 20: javaFileObjects.add(createBinding(bindingName, "Provider<Binding19> dep")); break; default: javaFileObjects.add(createBinding(bindingName)); break; } } javaFileObjects.add(createComponent(entryPoints.build())); // This generated component shows a couple things: // 1. Binding locations: // * Binding #9 belongs to DaggerTestComponent // * Binding #10 belongs to Shard1 // * Binding #20 belongs to Shard2 // 2. DaggerTestComponent entry point methods: // * Binding #9 implementation is inlined DaggerTestComponent. // * Binding #10 implementation is delegated to Shard1. // * Binding #20 implementation is delegated to Shard2. // 3. Dependencies between component and shard: // * Binding #9 in DaggerTestComponent depends on #10 in Shard1. // * Binding #10 in Shard1 depends on Provider<#9> in DaggerTestComponent. // 4. Dependencies between shard and shard: // * Binding #19 in Shard1 depends on #20 in Shard2. // * Binding #20 in Shard2 depends on Provider<#19> in Shard1. JavaFileObject generatedComponent = JavaFileObjects.forSourceLines( "dagger.internal.codegen.DaggerTestComponent", "package dagger.internal.codegen;", GENERATED_CODE_ANNOTATIONS, "final class DaggerTestComponent implements TestComponent {", " private final Shard1 shard1 = new Shard1();", "", " private volatile Provider<Binding9> binding9Provider;", "", " private volatile Object binding9 = new MemoizedSentinel();", "", " @Override", " public Binding9 getBinding9() {", " Object local = binding9;", " if (local instanceof MemoizedSentinel) {", " synchronized (local) {", " local = binding9;", " if (local instanceof MemoizedSentinel) {", " local = new Binding9(DaggerTestComponent.this.shard1.binding10());", " binding9 = DoubleCheck.reentrantCheck(binding9, local);", " }", " }", " }", " return (Binding9) local;", " }", "", " @Override", " public Provider<Binding9> getBinding9Provider() {", " Object local = binding9Provider;", " if (local == null) {", " local = new SwitchingProvider<>(9);", " binding9Provider = (Provider<Binding9>) local;", " }", " return (Provider<Binding9>) local;", " }", "", " @Override", " public Binding10 getBinding10() {", " return DaggerTestComponent.this.shard1.binding10();", " }", "", " @Override", " public Provider<Binding10> getBinding10Provider() {", " return DaggerTestComponent.this.shard1.binding10Provider();", " }", "", " @Override", " public Binding20 getBinding20() {", " return DaggerTestComponent.this.shard2.binding20();", " }", "", " @Override", " public Provider<Binding20> getBinding20Provider() {", " return DaggerTestComponent.this.shard2.binding20Provider();", " }", "", " private final class Shard1 {", " private volatile Object binding10 = new MemoizedSentinel();", "", " private volatile Provider<Binding10> binding10Provider;", "", " private volatile Provider<Binding19> binding19Provider;", "", " private volatile Object binding19 = new MemoizedSentinel();", "", " private Binding10 binding10() {", " Object local = binding10;", " if (local instanceof MemoizedSentinel) {", " synchronized (local) {", " local = binding10;", " if (local instanceof MemoizedSentinel) {", " local = new Binding10(", " DaggerTestComponent.this.getBinding9Provider());", " binding10 = DoubleCheck.reentrantCheck(binding10, local);", " }", " }", " }", " return (Binding10) local;", " }", "", " private Provider<Binding10> binding10Provider() {", " Object local = binding10Provider;", " if (local == null) {", " local = new SwitchingProvider<>(10);", " binding10Provider = (Provider<Binding10>) local;", " }", " return (Provider<Binding10>) local;", " }", "", " private Provider<Binding19> binding19Provider() {", " Object local = binding19Provider;", " if (local == null) {", " local = new SwitchingProvider<>(19);", " binding19Provider = (Provider<Binding19>) local;", " }", " return (Provider<Binding19>) local;", " }", "", " private Binding19 binding19() {", " Object local = binding19;", " if (local instanceof MemoizedSentinel) {", " synchronized (local) {", " local = binding19;", " if (local instanceof MemoizedSentinel) {", " local = new Binding19(DaggerTestComponent.this.shard2.binding20());", " binding19 = DoubleCheck.reentrantCheck(binding19, local);", " }", " }", " }", " return (Binding19) local;", " }", " }", "", " private final class Shard2 {", " private volatile Object binding20 = new MemoizedSentinel();", "", " private volatile Provider<Binding20> binding20Provider;", "", " private Binding20 binding20() {", " Object local = binding20;", " if (local instanceof MemoizedSentinel) {", " synchronized (local) {", " local = binding20;", " if (local instanceof MemoizedSentinel) {", " local = new Binding20(", " DaggerTestComponent.this.shard1.binding19Provider());", " binding20 = DoubleCheck.reentrantCheck(binding20, local);", " }", " }", " }", " return (Binding20) local;", " }", "", " private Provider<Binding20> binding20Provider() {", " Object local = binding20Provider;", " if (local == null) {", " local = new SwitchingProvider<>(20);", " binding20Provider = (Provider<Binding20>) local;", " }", " return (Provider<Binding20>) local;", " }", " }", "}"); Compilation compilation = compilerWithAndroidMode().compile(javaFileObjects.build()); assertThat(compilation).succeededWithoutWarnings(); assertThat(compilation) .generatedSourceFile("dagger.internal.codegen.DaggerTestComponent") .containsElementsIn(generatedComponent); } private static JavaFileObject createBinding(String bindingName, String... deps) { return JavaFileObjects.forSourceLines( "dagger.internal.codegen." + bindingName, "package dagger.internal.codegen;", "", "import javax.inject.Inject;", "import javax.inject.Provider;", "import javax.inject.Singleton;", "", "@Singleton", "final class " + bindingName + " {", " @Inject", " " + bindingName + "(" + Arrays.stream(deps).collect(joining(", ")) + ") {}", "}"); } private static JavaFileObject createComponent(ImmutableList<String> entryPoints) { return JavaFileObjects.forSourceLines( "dagger.internal.codegen.TestComponent", "package dagger.internal.codegen;", "", "import dagger.Component;", "import javax.inject.Provider;", "import javax.inject.Singleton;", "", "@Singleton", "@Component", "interface TestComponent {", " " + entryPoints.stream().collect(joining("\n ")), "}"); } private static Compiler compilerWithAndroidMode() { return javac() .withProcessors(new ComponentProcessor()) .withOptions( ImmutableSet.builder() .add("-Adagger.keysPerComponentShard=" + BINDINGS_PER_SHARD) .addAll(CompilerMode.FAST_INIT_MODE.javacopts()) .build()); } }
""" Function to induct crosslingual word embeddings. """ import os import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) from load_monolingual import save_clew from modify_dictionary import cut_dictionary_to_vocabulary from supervised_cle import Projection_based_clwe from evaluation_bli import Evaluator from unsupervised_cle import VecMap from text_encoders import TextEncoders from utils import normalize_matrix def clew_induction(path_source_language, path_target_language, train_translation_dict_path, train_translation_dict_1k_path, test_translation_dict_path, new_test_translation_path, name_translation, number_tokens=100000, save_embedding=False): """Induce Cross Lingual Word Embeddings (Proc, Proc-B, VecMap) and Evaluate them on BLI task. Args: path_source_language (path): Path to Source Embedding. path_target_language (path): Path to Target Embedding. train_translation_dict_path (path): Path to Translation dictionary 5k train_translation_dict_1k_path (path): Path to Translation dictionary 1k test_translation_dict_path (path): Path to Translation test dictionary new_test_translation_path (path): Path to Translation test dictionary name_translation (str): name of saved files number_tokens (int): number of tokens used for monolingual word embeddings save_embedding (boolean): To save or not save the created CLWE Returns: """ print("\nFirst, we cut the test dictionaries to the monolingual vocabularies:") cut_dictionary_to_vocabulary(path_source_language, path_target_language, test_translation_dict_path, new_test_translation_path, number_tokens=number_tokens) test_translation_dict_path = new_test_translation_path # PROC - 5K dictionary print("--------------------------------") print("\nCreate procrustes model with 5000 translation pairs") proc_algorithm = Projection_based_clwe(path_source_language, path_target_language, train_translation_dict_path, number_tokens=number_tokens) proc_algorithm.proc(source_to_target=True) Evaluator(proc_algorithm, test_translation_dict_path).evaluation_on_BLI() if save_embedding: save_clew(proc_algorithm, name_translation + "_proc_5k") del proc_algorithm # PROC - 1K dictionary print("--------------------------------") print("\nCreate procrustes model with 1000 translation pairs") proc_algorithm = Projection_based_clwe(path_source_language, path_target_language, train_translation_dict_1k_path, number_tokens=number_tokens) proc_algorithm.proc(source_to_target=True) Evaluator(proc_algorithm, test_translation_dict_path).evaluation_on_BLI() if save_embedding: save_clew(proc_algorithm, name_translation + "_proc_1k") del proc_algorithm # PROC-B - 1K dictionary print("--------------------------------") print("\nCreate procrustes bootstrapping model with 1000 translation pairs") proc_b_algorithm = Projection_based_clwe(path_source_language, path_target_language, train_translation_dict_1k_path, number_tokens=number_tokens) proc_b_algorithm.proc_bootstrapping(growth_rate=1.5, limit=10000) Evaluator(proc_b_algorithm, test_translation_dict_path).evaluation_on_BLI() if save_embedding: save_clew(proc_b_algorithm, name_translation + "_proc_b_1k") del proc_b_algorithm # Unsupervised VecMap print("--------------------------------") print("\nCreate VecMap model") vec_map = VecMap(path_source_language, path_target_language, number_tokens=100000) # Please use GPU if available and install cupy use_gpu = True vec_map.build_seed_dictionary(use_gpu) vec_map.training_loop(use_gpu) Evaluator(vec_map, test_translation_dict_path).evaluation_on_BLI() if save_embedding: vec_map.proj_embedding_source_target = normalize_matrix(vec_map.proj_embedding_source_target) vec_map.target_embedding_matrix = vec_map.norm_trg_embedding_matrix save_clew(vec_map, name_translation + "_vecmap") del vec_map # Text Encoder First Layer print("--------------------------------") print("\nCreate Text Encoder First Layer model") xlm_r = TextEncoders("xlm-r") xlm_r.create_source_target_embedding(test_translation_dict_path, use_layer=1) Evaluator(xlm_r, test_translation_dict_path).evaluation_on_BLI() del xlm_r # Text Encoder Last Layer print("--------------------------------") print("\nCreate Text Encoder Last Layer model") xlm_r_last_layer = TextEncoders("xlm-r") xlm_r_last_layer.create_source_target_embedding(test_translation_dict_path, use_layer=12) Evaluator(xlm_r_last_layer, test_translation_dict_path).evaluation_on_BLI() del xlm_r_last_layer
<filename>src/main/java/net/cabezudo/sofia/geography/AdministrativeDivisionManager.java package net.cabezudo.sofia.geography; import java.io.IOException; import java.nio.file.Path; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import net.cabezudo.json.JSON; import net.cabezudo.json.exceptions.ElementNotExistException; import net.cabezudo.json.exceptions.JSONParseException; import net.cabezudo.json.exceptions.PropertyNotExistException; import net.cabezudo.json.values.JSONArray; import net.cabezudo.json.values.JSONValue; import net.cabezudo.sofia.core.cluster.ClusterException; import net.cabezudo.sofia.core.cluster.ClusterManager; import net.cabezudo.sofia.core.configuration.Configuration; import net.cabezudo.sofia.core.database.sql.Database; import net.cabezudo.sofia.core.exceptions.SofiaRuntimeException; import net.cabezudo.sofia.core.geolocation.Latitude; import net.cabezudo.sofia.core.geolocation.Longitude; import net.cabezudo.sofia.core.languages.Language; import net.cabezudo.sofia.geometry.Point; import net.cabezudo.sofia.geometry.Polygon; import net.cabezudo.sofia.geometry.Polygons; import net.cabezudo.sofia.logger.Logger; /** * @author <a href="http://cabezudo.net"><NAME></a> * @version 0.01.00, 2021.04.20 */ public class AdministrativeDivisionManager { public static final String ADMINISTRATIVE_DIVISION_DATA_PATH_NAME = "AdministrativeDivisions"; private static AdministrativeDivisionManager INSTANCE; private final Path dataPath; public static AdministrativeDivisionManager getInstance() { if (INSTANCE == null) { INSTANCE = new AdministrativeDivisionManager(); } return INSTANCE; } private AdministrativeDivisionManager() { Path systemDataPath = Configuration.getInstance().getSystemDataPath(); dataPath = systemDataPath.resolve(ADMINISTRATIVE_DIVISION_DATA_PATH_NAME); } public AdministrativeDivision get(Longitude longitude, Latitude latitude, AdministrativeDivisionType administrativeDivisionType) throws JSONParseException, IOException, ClusterException, InvalidPolygonDataException { try { return get(null, longitude, latitude, administrativeDivisionType); } catch (PropertyNotExistException e) { throw new SofiaRuntimeException(e); } } private List<AdministrativeDivision> getChilds(AdministrativeDivision parent) throws ClusterException { try (Connection connection = Database.getConnection()) { return getChils(connection, parent); } catch (SQLException e) { throw new ClusterException(e); } } public List<AdministrativeDivision> getChils(Connection connection, AdministrativeDivision parent) throws ClusterException { Logger.fine("Get childs for " + parent); String query = "SELECT id, type, code, fileId " + "FROM " + AdministrativeDivisionTable.DATABASE_NAME + "." + AdministrativeDivisionTable.NAME + " AS ad " + "WHERE parent = ?"; ResultSet rs = null; try (PreparedStatement ps = connection.prepareStatement(query);) { ps.setInt(1, parent == null ? 0 : parent.getId()); rs = ClusterManager.getInstance().executeQuery(ps); ArrayList<AdministrativeDivision> list = new ArrayList<>(); while (rs.next()) { int id = rs.getInt("id"); int typeId = rs.getInt("type"); String code = rs.getString("code"); int fileId = rs.getInt("fileId"); AdministrativeDivisionType type = AdministrativeDivisionTypeManager.getInstance().get(typeId); list.add(new AdministrativeDivision(id, type, code, fileId, parent)); } return list; } catch (SQLException e) { throw new ClusterException(e); } finally { ClusterManager.getInstance().close(rs); } } private AdministrativeDivision get(AdministrativeDivision parent, Longitude longitude, Latitude latitude, AdministrativeDivisionType administrativeDivisionType) throws JSONParseException, IOException, PropertyNotExistException, ClusterException, InvalidPolygonDataException { Point point = new Point(longitude.toDouble(), latitude.toDouble()); List<AdministrativeDivision> list = getChilds(parent); for (AdministrativeDivision administrativeDivision : list) { int fileId = administrativeDivision.getFileId(); if (fileId == 0) { AdministrativeDivision actualAdministrativeDivision = get(administrativeDivision, longitude, latitude, administrativeDivisionType); if (actualAdministrativeDivision != null) { return actualAdministrativeDivision; } } else { // TODO add a cache for the object generated Path administrativeDivisionDataFilePath = dataPath.resolve(fileId + ".json"); JSONValue jsonAdministrativeDivisionData = JSON.parse(administrativeDivisionDataFilePath, Configuration.getDefaultCharset()); JSONArray jsonjsonAdministrativeDivisionDataDataArray = jsonAdministrativeDivisionData.toJSONArray(); Polygons polygons = createPolygon(jsonjsonAdministrativeDivisionDataDataArray); if (polygons.isInside(point)) { if (administrativeDivision.getType().equals(administrativeDivisionType)) { return administrativeDivision; } AdministrativeDivision result = get(administrativeDivision, longitude, latitude, administrativeDivisionType); if (result != null) { return result; } return administrativeDivision; } } } return null; } private Polygons createPolygon(JSONArray data) throws InvalidPolygonDataException { Polygons polygons = new Polygons(); for (JSONValue jsonValue : data) { JSONArray polygonData = jsonValue.toJSONArray(); Polygon polygon = new Polygon(); for (int i = 0; i < polygonData.size(); i += 2) { try { double longitude = polygonData.getDouble(i); double latitude = polygonData.getDouble(i + 1); Point point = new Point(longitude, latitude); polygon.add(point); } catch (ElementNotExistException e) { throw new InvalidPolygonDataException(e); } } polygons.add(polygon); } return polygons; } public AdministrativeDivision add(AdministrativeDivisionType administrativeDivisionType, String code, Integer fileId, AdministrativeDivision parent) throws ClusterException { try (Connection connection = Database.getConnection()) { return add(connection, administrativeDivisionType, code, fileId, parent); } catch (SQLException e) { throw new ClusterException(e); } } public AdministrativeDivision add(Connection connection, AdministrativeDivisionType type, String code, Integer fileId, AdministrativeDivision parent) throws ClusterException { String query = "INSERT INTO " + AdministrativeDivisionTable.DATABASE_NAME + "." + AdministrativeDivisionTable.NAME + " (`type`, `code`, `fileId`, `parent`) VALUES (?, ?, ?, ?)"; ResultSet rs = null; try (PreparedStatement ps = connection.prepareStatement(query, Statement.RETURN_GENERATED_KEYS);) { ps.setInt(1, type.getId()); ps.setString(2, code); ps.setInt(3, fileId == null ? 0 : fileId); ps.setInt(4, parent == null ? 0 : parent.getId()); ClusterManager.getInstance().executeUpdate(ps); rs = ps.getGeneratedKeys(); if (rs.next()) { int id = rs.getInt(1); return new AdministrativeDivision(id, type, code, fileId, parent); } throw new SofiaRuntimeException("Can't get the generated key"); } catch (SQLException e) { throw new ClusterException(e); } finally { ClusterManager.getInstance().close(rs); } } public void add(AdministrativeDivision administrativeDivision, Language language, String name) throws ClusterException { try (Connection connection = Database.getConnection()) { add(connection, administrativeDivision, language, name); } catch (SQLException e) { throw new ClusterException(e); } } public AdministrativeDivisionName add(Connection connection, AdministrativeDivision administrativeDivision, Language language, String name) throws ClusterException { int id = administrativeDivision.getId(); String query = "INSERT INTO " + AdministrativeDivisionNameTable.DATABASE_NAME + "." + AdministrativeDivisionNameTable.NAME + " (`id`, `language`, `value`) VALUES (?, ?, ?)"; ResultSet rs = null; try (PreparedStatement ps = connection.prepareStatement(query);) { ps.setInt(1, id); ps.setInt(2, language.getId()); ps.setString(3, name); ClusterManager.getInstance().executeUpdate(ps); return new AdministrativeDivisionName(id, language, name); } catch (SQLException e) { throw new ClusterException(e); } finally { ClusterManager.getInstance().close(rs); } } }
# Fix the program number = int(input("Please type in a number: ")) if (number>100): print("The number was greater than one hundred") number = number - 100 print("Now its value has decreased by one hundred") print("Its value is now "+ str(number)) print(str(number) + " must be my lucky number!") print("Have a nice day!")
#!/bin/bash # We need kfold subfolder if [ $# -eq 0 ] then echo "Arguments needed: cnn_subfolder" exit fi cnn_subfolder=$1 # Make directory to add nohup output log SCRIPT=`realpath $0` SCRIPTPATH=`dirname $SCRIPT` MAINPATH=`dirname $SCRIPTPATH` output_path="$MAINPATH/output_cnn/$cnn_subfolder/predict" command1="mkdir -p $output_path" # Change conda environment command2="source activate 3dunet_36" # Start script command3="python $MAINPATH/main_master.py cnn_predict $cnn_subfolder" # Run commands echo "Saving output to folder: $cnn_subfolder" eval $command1 eval $command2 eval $command3
<filename>src-gen/main/java/net/opengis/wfs/_2/AllSomeType.java // // Diese Datei wurde mit der JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.3.2 generiert // Siehe <a href="https://javaee.github.io/jaxb-v2/">https://javaee.github.io/jaxb-v2/</a> // Änderungen an dieser Datei gehen bei einer Neukompilierung des Quellschemas verloren. // Generiert: 2020.03.13 um 12:48:52 PM CET // package net.opengis.wfs._2; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Java-Klasse für AllSomeType. * * <p>Das folgende Schemafragment gibt den erwarteten Content an, der in dieser Klasse enthalten ist. * <p> * <pre> * &lt;simpleType name="AllSomeType"&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"&gt; * &lt;enumeration value="ALL"/&gt; * &lt;enumeration value="SOME"/&gt; * &lt;/restriction&gt; * &lt;/simpleType&gt; * </pre> * */ @XmlType(name = "AllSomeType") @XmlEnum public enum AllSomeType { ALL, SOME; public String value() { return name(); } public static AllSomeType fromValue(String v) { return valueOf(v); } }
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.taxhistory.controllers import org.mockito.ArgumentMatchers.any import org.mockito.Mockito._ import org.scalatest.{BeforeAndAfterEach, OptionValues} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import org.scalatestplus.mockito.MockitoSugar import org.scalatestplus.play.guice.GuiceOneServerPerSuite import play.api.mvc.ControllerComponents import play.api.test.FakeRequest import play.api.test.Helpers._ import uk.gov.hmrc.agentmtdidentifiers.model.Arn import uk.gov.hmrc.domain.Nino import uk.gov.hmrc.http.HeaderCarrier import uk.gov.hmrc.taxhistory.model.api.{IncomeSource, TaxAccount} import uk.gov.hmrc.taxhistory.model.utils.TestUtil import uk.gov.hmrc.taxhistory.services.EmploymentHistoryService import uk.gov.hmrc.taxhistory.utils.{HttpErrors, TestRelationshipAuthService} import uk.gov.hmrc.time.TaxYear import java.util.UUID import scala.concurrent.{ExecutionContext, Future} class TaxAccountControllerSpec extends AnyWordSpec with Matchers with OptionValues with GuiceOneServerPerSuite with MockitoSugar with TestUtil with BeforeAndAfterEach { private val mockEmploymentHistoryService = mock[EmploymentHistoryService] private val ninoWithAgent = randomNino() private val ninoWithoutAgent = randomNino() private val testTaxAccount = TaxAccount() private val testTaxYear = TaxYear.current.previous.currentYear private val testTaxCode = "1150L" val cc: ControllerComponents = stubControllerComponents() implicit val executionContext: ExecutionContext = cc.executionContext override def beforeEach: Unit = { reset(mockEmploymentHistoryService) } val testTaxAccountController = new TaxAccountController( employmentHistoryService = mockEmploymentHistoryService, relationshipAuthService = TestRelationshipAuthService(Map(ninoWithAgent -> Arn("TestArn"))), cc ) "getTaxAccount" must { "respond with OK for successful get" in { when(mockEmploymentHistoryService.getTaxAccount(any[Nino], any[TaxYear])(any[HeaderCarrier])) .thenReturn(Future.successful(Some(testTaxAccount))) val result = testTaxAccountController.getTaxAccount(ninoWithAgent.nino, testTaxYear).apply(FakeRequest()) status(result) shouldBe OK } "propagate error responses from upstream microservices" in { HttpErrors.toCheck.foreach { case (httpException, expectedStatus) => when(mockEmploymentHistoryService.getTaxAccount(any(), any())(any[HeaderCarrier])) .thenReturn(Future.failed(httpException)) val result = testTaxAccountController.getTaxAccount(ninoWithAgent.nino, testTaxYear).apply(FakeRequest()) status(result) shouldBe expectedStatus } } "respond with UNAUTHORIZED Status for enrolments which is not HMRC Agent" in { when(mockEmploymentHistoryService.getTaxAccount(any[Nino], any[TaxYear])(any[HeaderCarrier])) .thenReturn(Future.successful(Some(testTaxAccount))) val result = testTaxAccountController.getTaxAccount(ninoWithoutAgent.nino, testTaxYear).apply(FakeRequest()) status(result) shouldBe UNAUTHORIZED } } "getIncomeSource" must { val testEmnploymentId = UUID.randomUUID().toString val testIncomeSource = IncomeSource(1, 1, None, List.empty, List.empty, testTaxCode, None, 1, "") "respond with OK for successful get" in { when(mockEmploymentHistoryService.getIncomeSource(any[Nino], any[TaxYear], any[String])(any[HeaderCarrier])) .thenReturn(Future.successful(Some(testIncomeSource))) val result = testTaxAccountController.getIncomeSource(ninoWithAgent.nino, testTaxYear, testEmnploymentId).apply(FakeRequest()) status(result) shouldBe OK } "propagate error responses from upstream microservices" in { HttpErrors.toCheck.foreach { case (httpException, expectedStatus) => when(mockEmploymentHistoryService.getIncomeSource(any(), any(), any[String])(any[HeaderCarrier])) .thenReturn(Future.failed(httpException)) val result = testTaxAccountController.getIncomeSource(ninoWithAgent.nino, testTaxYear, testEmnploymentId).apply(FakeRequest()) status(result) shouldBe expectedStatus } } "respond with UNAUTHORIZED Status for enrolments which is not HMRC Agent" in { when(mockEmploymentHistoryService.getIncomeSource(any[Nino], any[TaxYear], any[String])(any[HeaderCarrier])) .thenReturn(Future.successful(Some(testIncomeSource))) val result = testTaxAccountController.getIncomeSource(ninoWithoutAgent.nino, testTaxYear, testEmnploymentId).apply(FakeRequest()) status(result) shouldBe UNAUTHORIZED } } }
#!/bin/sh # shellcheck disable=SC2039 # shellcheck disable=SC2155 uqmi() { local JSON if JSON="$(timeout -s KILL "$LTESTAT_TIMEOUT" uqmi -d "/dev/$CDC_DEV" "$@")"; then if echo "$JSON" | jq -ea . > /dev/null 2>&1; then echo "$JSON" return 0 fi fi return 1 } # We prefer to use uqmi over qmicli as a CLI agent for QMI devices. # However, some fields are available for retrieval only with qmicli. qmicli() { timeout -s KILL "$LTESTAT_TIMEOUT" qmicli -p -d "/dev/$CDC_DEV" "$@" } qmi_get_packet_stats() { local STATS="$(qmicli --wds-get-packet-statistics)" local TXP=$(parse_modem_attr "$STATS" "TX packets OK") local TXB=$(parse_modem_attr "$STATS" "TX bytes OK") local TXD=$(parse_modem_attr "$STATS" "TX packets dropped") local RXP=$(parse_modem_attr "$STATS" "RX packets OK") local RXB=$(parse_modem_attr "$STATS" "RX bytes OK") local RXD=$(parse_modem_attr "$STATS" "RX packets dropped") json_struct \ "$(json_attr tx-bytes "${TXB:-0}")" "$(json_attr tx-packets "${TXP:-0}")" "$(json_attr tx-drops "${TXD:-0}")" \ "$(json_attr rx-bytes "${RXB:-0}")" "$(json_attr rx-packets "${RXP:-0}")" "$(json_attr rx-drops "${RXD:-0}")" } qmi_get_signal_info() { local INFO INFO="$(uqmi --get-signal-info)" || INFO="{}" FILTER="{rssi: (if .rssi == null then $UNAVAIL_SIGNAL_METRIC else .rssi end), rsrq: (if .rsrq == null then $UNAVAIL_SIGNAL_METRIC else .rsrq end), rsrp: (if .rsrp == null then $UNAVAIL_SIGNAL_METRIC else .rsrp end), snr: (if .snr == null then $UNAVAIL_SIGNAL_METRIC else .snr end)}" echo "$INFO" | jq -c "$FILTER" } # qmi_get_op_mode returns one of: "" (aka unspecified), "online", "online-and-connected", "radio-off", "offline", "unrecognized" qmi_get_op_mode() { local OP_MODE="$(qmicli --dms-get-operating-mode | sed -n "s/\s*Mode: '\(.*\)'/\1/p")" case "$OP_MODE" in "online") if [ "$(uqmi --get-data-status)" = '"connected"' ]; then echo "online-and-connected" else echo "online" fi ;; "offline") echo "$OP_MODE" ;; "low-power" | "persistent-low-power" | "mode-only-low-power") echo "radio-off" ;; *) echo "unrecognized" ;; esac } qmi_get_imei() { uqmi --get-imei | tr -d '"' } qmi_get_modem_model() { qmicli --dms-get-model | sed -n "s/\s*Model: '\(.*\)'/\1/p" } qmi_get_modem_revision() { qmicli --dms-get-revision | sed -n "s/\s*Revision: '\(.*\)'/\1/p" } qmi_get_providers() { local PROVIDERS if ! PROVIDERS="$(uqmi --network-scan)"; then echo "[]" return 1 fi FILTER='[.network_info[] | { "plmn": [if .mcc == null then "000" else .mcc end, if .mnc == null then "000" else .mnc end] | join("-"), "description": .description, "current-serving": .status | contains(["current_serving"]), "roaming": .status | contains(["roaming"])} ] | unique' echo "$PROVIDERS" | jq -c "$FILTER" } get_get_sim_iccid() { local OUTPUT # Get ICCID from User Identity Module (UIM). # Please refer to ETSI/3GPP "TS 102 221" section 13.2 for the coding of this EF. if ! OUTPUT="$(qmicli --uim-read-transparent=0x3F00,0x2FE2)"; then return 1 fi printf "%s" "$OUTPUT" | awk ' BEGIN{FS=":"; ORS=""} /Read result:/ {target=NR+1} (NR==target) { for(i=1; i<=NF; i++) { gsub(/[ \tF]*/,"",$i); # Each byte contains 2 digits. # First digit of each pair is encoded by the less significant half of the byte. # For digits to be read from left to right, they need to be swapped. print substr($i, 2, 1); print substr($i, 1, 1); } }' } get_get_sim_imsi() { local OUTPUT # Get IMSI from User Identity Module (UIM). # Please refer to ETSI/3GPP "TS 31.102" section 4.2.2 for the coding of this EF. if ! OUTPUT="$(qmicli --uim-read-transparent=0x3F00,0x7FFF,0x6F07)"; then return 1 fi printf "%s" "$OUTPUT" | awk ' BEGIN{FS=":"; ORS=""} /Read result:/ {target=NR+1} (NR==target) { # We skip the first byte (starting with i=2) containing the IMSI length. for(i=2; i<=NF; i++) { gsub(/[ \tF]*/,"",$i); # Each byte contains 2 digits. # First digit of each pair is encoded by the less significant half of the byte. # For digits to be read from left to right, they need to be swapped. # Also, we skip the third digit (first substr for i=2) with parity check. if (i>2) print substr($i, 2, 1); print substr($i, 1, 1); } }' } qmi_get_sim_cards() { # FIXME XXX Limited to a single SIM card if ! ICCID="$(get_get_sim_iccid)"; then echo "[]" return 1 fi if ! IMSI="$(get_get_sim_imsi)"; then echo "[]" return 1 fi SIM="$(json_struct "$(json_str_attr "iccid" "$ICCID")" "$(json_str_attr "imsi" "$IMSI")")\n" printf "%b" "$SIM" | json_array } qmi_start_network() { echo "[$CDC_DEV] Starting network for APN ${APN}" ip link set "$IFACE" down echo Y > "/sys/class/net/$IFACE/qmi/raw_ip" ip link set "$IFACE" up uqmi --sync uqmi --start-network --apn "${APN}" --keep-client-id wds |\ mbus_publish "pdh_$IFACE" } qmi_wait_for_sim() { # FIXME XXX this is only for MBIM for now : } qmi_wait_for_wds() { echo "[$CDC_DEV] Waiting for DATA services to connect" local CMD="uqmi --get-data-status | jq -r ." if ! wait_for connected "$CMD"; then echo "Timeout waiting for DATA services to connect" >&2 return 1 fi } qmi_wait_for_register() { echo "[$CDC_DEV] Waiting for the device to register on the network" local CMD="uqmi --get-serving-system | jq -r .registration" if ! wait_for registered "$CMD"; then echo "Timeout waiting for the device to register on the network" >&2 return 1 fi } qmi_wait_for_settings() { echo "[$CDC_DEV] Waiting for IP configuration for the $IFACE interface" local CMD="uqmi --get-current-settings" if ! wait_for connected "$CMD | jq -r .ipv4.ip | grep -q \"$IPV4_REGEXP\" && echo connected"; then echo "Timeout waiting for IP configuration for the $IFACE interface" >&2 return 1 fi } qmi_reset_modem() { # last ditch attempt to reset our modem -- not sure how effective :-( local PDH="$(cat "${BBS}/pdh_${IFACE}.json" 2>/dev/null)" for i in "$PDH" 0xFFFFFFFF ; do uqmi --stop-network "$i" --autoconnect || continue done qmicli --dms-reset for i in "$PDH" 0xFFFFFFFF ; do uqmi --stop-network "$i" --autoconnect || continue done } qmi_toggle_rf() { if [ "$1" = "off" ]; then echo "[$CDC_DEV] Disabling RF" uqmi --set-device-operating-mode "persistent_low_power" else echo "[$CDC_DEV] Enabling RF" uqmi --set-device-operating-mode "online" fi }
#! /bin/bash aurora job killall example/root/staging19/tensorflow_benchmark_prediction.big--0
#!/bin/sh rosrun mavros mavcmd -n iris_2/mavros long 511 32 5000 0 0 0 0 0; rosservice call /iris_2/mavros/cmd/arming '{value: true}'; rosservice call /iris_2/mavros/set_mode '{base_mode: 0, custom_mode: 'offboard'}'; rosservice call /eval/start_recording
<reponame>OSWeDev/oswedev import ModuleAccessPolicy from '../../../shared/modules/AccessPolicy/ModuleAccessPolicy'; import AccessPolicyGroupVO from '../../../shared/modules/AccessPolicy/vos/AccessPolicyGroupVO'; import AccessPolicyVO from '../../../shared/modules/AccessPolicy/vos/AccessPolicyVO'; import PolicyDependencyVO from '../../../shared/modules/AccessPolicy/vos/PolicyDependencyVO'; import ModulePerfMon from '../../../shared/modules/PerfMon/ModulePerfMon'; import PerfMonLineTypeVO from '../../../shared/modules/PerfMon/vos/PerfMonLineTypeVO'; import DefaultTranslationManager from '../../../shared/modules/Translation/DefaultTranslationManager'; import DefaultTranslation from '../../../shared/modules/Translation/vos/DefaultTranslation'; import ModuleTrigger from '../../../shared/modules/Trigger/ModuleTrigger'; import AccessPolicyServerController from '../AccessPolicy/AccessPolicyServerController'; import ModuleAccessPolicyServer from '../AccessPolicy/ModuleAccessPolicyServer'; import DAOPostCreateTriggerHook from '../DAO/triggers/DAOPostCreateTriggerHook'; import DAOPostDeleteTriggerHook from '../DAO/triggers/DAOPostDeleteTriggerHook'; import DAOPostUpdateTriggerHook from '../DAO/triggers/DAOPostUpdateTriggerHook'; import ModuleServerBase from '../ModuleServerBase'; import ModulesManagerServer from '../ModulesManagerServer'; import PerfMonConfController from './PerfMonConfController'; export default class ModulePerfMonServer extends ModuleServerBase { public static getInstance() { if (!ModulePerfMonServer.instance) { ModulePerfMonServer.instance = new ModulePerfMonServer(); } return ModulePerfMonServer.instance; } private static instance: ModulePerfMonServer = null; private constructor() { super(ModulePerfMon.getInstance().name); } public async configure() { DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation( { 'fr-fr': 'Types de performance' }, 'menu.menuelements.admin.perfmon_line_type.___LABEL___')); DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation( { 'fr-fr': 'Performances' }, 'menu.menuelements.admin.PerfMonAdminVueModule.___LABEL___')); let postUpdateTrigger: DAOPostUpdateTriggerHook = ModuleTrigger.getInstance().getTriggerHook(DAOPostUpdateTriggerHook.DAO_POST_UPDATE_TRIGGER); postUpdateTrigger.registerHandler(PerfMonLineTypeVO.API_TYPE_ID, PerfMonConfController.getInstance().throttled_update_cached_perf_conf); let postDeleteTrigger: DAOPostDeleteTriggerHook = ModuleTrigger.getInstance().getTriggerHook(DAOPostDeleteTriggerHook.DAO_POST_DELETE_TRIGGER); postDeleteTrigger.registerHandler(PerfMonLineTypeVO.API_TYPE_ID, PerfMonConfController.getInstance().throttled_update_cached_perf_conf); let postCreateTrigger: DAOPostCreateTriggerHook = ModuleTrigger.getInstance().getTriggerHook(DAOPostCreateTriggerHook.DAO_POST_CREATE_TRIGGER); postCreateTrigger.registerHandler(PerfMonLineTypeVO.API_TYPE_ID, PerfMonConfController.getInstance().throttled_update_cached_perf_conf); } /** * On définit les droits d'accès du module */ public async registerAccessPolicies(): Promise<void> { let group: AccessPolicyGroupVO = new AccessPolicyGroupVO(); group.translatable_name = ModulePerfMon.POLICY_GROUP; group = await ModuleAccessPolicyServer.getInstance().registerPolicyGroup(group, new DefaultTranslation({ 'fr-fr': 'PerfMon' })); let bo_access: AccessPolicyVO = new AccessPolicyVO(); bo_access.group_id = group.id; bo_access.default_behaviour = AccessPolicyVO.DEFAULT_BEHAVIOUR_ACCESS_DENIED_TO_ALL_BUT_ADMIN; bo_access.translatable_name = ModulePerfMon.POLICY_BO_ACCESS; bo_access = await ModuleAccessPolicyServer.getInstance().registerPolicy(bo_access, new DefaultTranslation({ 'fr-fr': 'Administration des PerfMon' }), await ModulesManagerServer.getInstance().getModuleVOByName(this.name)); let admin_access_dependency: PolicyDependencyVO = new PolicyDependencyVO(); admin_access_dependency.default_behaviour = PolicyDependencyVO.DEFAULT_BEHAVIOUR_ACCESS_DENIED; admin_access_dependency.src_pol_id = bo_access.id; admin_access_dependency.depends_on_pol_id = AccessPolicyServerController.getInstance().get_registered_policy(ModuleAccessPolicy.POLICY_BO_ACCESS).id; admin_access_dependency = await ModuleAccessPolicyServer.getInstance().registerPolicyDependency(admin_access_dependency); } }
<reponame>vemerion/League-of-Legends-Brand package mod.vemerion.leagueoflegendsbrand.renderer.champion; import java.util.Random; import com.mojang.blaze3d.matrix.MatrixStack; import com.mojang.blaze3d.vertex.IVertexBuilder; import mod.vemerion.leagueoflegendsbrand.Main; import mod.vemerion.leagueoflegendsbrand.helper.Helper; import mod.vemerion.leagueoflegendsbrand.model.BrandModel; import mod.vemerion.leagueoflegendsbrand.model.CubeModel; import mod.vemerion.leagueoflegendsbrand.renderer.GlowingBrandLayer; import net.minecraft.client.entity.player.AbstractClientPlayerEntity; import net.minecraft.client.renderer.IRenderTypeBuffer; import net.minecraft.client.renderer.entity.EntityRendererManager; import net.minecraft.entity.Pose; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.util.HandSide; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.vector.Quaternion; import net.minecraft.util.math.vector.Vector3d; public class BrandRenderer extends ChampionRenderer { private final Renderer RENDERER; public BrandRenderer(EntityRendererManager renderManager) { RENDERER = new Renderer(renderManager); } @Override public void renderThirdPerson(AbstractClientPlayerEntity player, float yaw, float partialTicks, MatrixStack matrix, IRenderTypeBuffer buffer, int light) { RENDERER.render(player, yaw, partialTicks, matrix, buffer, light); } @Override protected boolean renderR(HandSide side, float progress, MatrixStack matrix, IRenderTypeBuffer buffer, int light, AbstractClientPlayerEntity player, float partialTicks) { RENDERER.renderPyroclasm(side, progress, matrix, buffer, light, player, partialTicks); return true; } @Override protected boolean renderE(HandSide side, float progress, MatrixStack matrix, IRenderTypeBuffer buffer, int light, AbstractClientPlayerEntity player, float partialTicks) { RENDERER.renderConflagration(side, progress, matrix, buffer, light, player, partialTicks); return true; } @Override protected boolean renderW(HandSide side, float progress, MatrixStack matrix, IRenderTypeBuffer buffer, int light, AbstractClientPlayerEntity player, float partialTicks) { RENDERER.renderPillarOfFlame(side, progress, matrix, buffer, light, player, partialTicks); return true; } @Override protected boolean renderQ(HandSide side, float progress, MatrixStack matrix, IRenderTypeBuffer buffer, int light, AbstractClientPlayerEntity player, float partialTicks) { RENDERER.renderSear(side, progress, matrix, buffer, light, player, partialTicks); return true; } @Override public void renderHand(HandSide side, MatrixStack matrix, IRenderTypeBuffer buffer, int light, AbstractClientPlayerEntity player, float partialTicks, float swingProgress, float equipProgress) { RENDERER.renderHand(side, matrix, buffer, light, player, partialTicks, swingProgress, equipProgress); } public static class Renderer extends HumanRenderer { public static final ResourceLocation TEXTURES = new ResourceLocation(Main.MODID, "textures/entity/brand.png"); public Renderer(EntityRendererManager renderManager) { super(renderManager); entityModel = new BrandModel(0); addLayer(new GlowingBrandLayer(this)); } @Override public ResourceLocation getEntityTexture(AbstractClientPlayerEntity entity) { return TEXTURES; } @Override public void render(AbstractClientPlayerEntity entityIn, float entityYaw, float partialTicks, MatrixStack matrixStackIn, IRenderTypeBuffer bufferIn, int packedLightIn) { super.render(entityIn, entityYaw, partialTicks, matrixStackIn, bufferIn, packedLightIn); renderBurningHead(entityIn, partialTicks, matrixStackIn, bufferIn, packedLightIn); } @Override protected void preRenderArm(HandSide side, MatrixStack matrixStackIn, IRenderTypeBuffer bufferIn, int combinedLightIn, AbstractClientPlayerEntity playerIn, float partialTicks) { super.preRenderArm(side, matrixStackIn, bufferIn, combinedLightIn, playerIn, partialTicks); matrixStackIn.push(); matrixStackIn.translate(0.35 * (side == HandSide.LEFT ? 1 : -1), -0.35, 0.2); renderBurning(35, playerIn, matrixStackIn, bufferIn, partialTicks, combinedLightIn); matrixStackIn.pop(); } private void renderBurningHead(AbstractClientPlayerEntity player, float partialTicks, MatrixStack matrix, IRenderTypeBuffer bufferIn, int light) { Pose pose = player.getPose(); float eyeHeight = player.getStandingEyeHeight(pose, player.getSize(pose)); if (pose == Pose.SWIMMING || pose == Pose.FALL_FLYING) { return; } matrix.push(); matrix.rotate(new Quaternion(-90, 0, 0, true)); matrix.translate(0, -1, 0.1 + eyeHeight); renderBurning(35, player, matrix, bufferIn, partialTicks, light); matrix.pop(); } private void renderSear(HandSide side, float progress, MatrixStack matrix, IRenderTypeBuffer buffer, int light, AbstractClientPlayerEntity player, float partialTicks) { int offset = side == HandSide.RIGHT ? 1 : -1; matrix.push(); matrix.rotate(new Quaternion(-70, -offset * 5, offset * progress * 20, true)); matrix.translate(offset * (1 - progress * 0.25), 0.2, -0.7); renderArm(side, matrix, buffer, light, player, partialTicks); matrix.pop(); } private void renderPillarOfFlame(HandSide side, float progress, MatrixStack matrix, IRenderTypeBuffer buffer, int light, AbstractClientPlayerEntity player, float partialTicks) { int offset = side == HandSide.RIGHT ? 1 : -1; matrix.push(); matrix.rotate(new Quaternion(-90 * progress, -offset * 5, offset * (20 - progress * 20), true)); matrix.translate(offset * (0.5 + progress), 0, 0.3 - 2 * progress); renderArm(side, matrix, buffer, light, player, partialTicks); matrix.pop(); } private void renderConflagration(HandSide side, float progress, MatrixStack matrix, IRenderTypeBuffer buffer, int light, AbstractClientPlayerEntity player, float partialTicks) { int offset = side == HandSide.RIGHT ? 1 : -1; matrix.push(); matrix.rotate(new Quaternion(-70, -offset * 5, offset * (20 - progress * 90), true)); matrix.translate(offset * (0.75 + progress * 0.25), 0.2, -0.7); renderArm(side, matrix, buffer, light, player, partialTicks); matrix.pop(); } private void renderPyroclasm(HandSide side, float progress, MatrixStack matrix, IRenderTypeBuffer buffer, int light, AbstractClientPlayerEntity player, float partialTicks) { int offset = side == HandSide.RIGHT ? 1 : -1; matrix.push(); matrix.rotate(new Quaternion(-30 - 60 * progress, -offset * 5, offset * (20 - progress * 90), true)); matrix.translate(offset * (0.75 + progress * 0.25), 0, -0.7); renderArm(side, matrix, buffer, light, player, partialTicks); matrix.pop(); } private static void renderBurning(int count, PlayerEntity player, MatrixStack matrix, IRenderTypeBuffer buffers, float partialTicks, int light) { float ageInTicks = player.ticksExisted + partialTicks; CubeModel model = new CubeModel(); IVertexBuilder ivertexbuilder = buffers.getBuffer(model.getRenderType(model.getTexture())); Random random = new Random(0); for (int i = 0; i < count; i++) { int interval = random.nextInt(12) + 12; Vector3d offset = new Vector3d(random.nextDouble() * 0.4 - 0.20, random.nextDouble() * 0.4 - 0.20, random.nextDouble() * 0.4 - 0.20).add(0, 0, Helper.lerpRepeat(ageInTicks / interval, 0, 0.8f)); float scale = Helper.lerpRepeat(ageInTicks / interval, 1, 0); model.render(random, ageInTicks, scale, offset, matrix, ivertexbuilder, light); } } } }
<gh_stars>1000+ ALTER TABLE crates_categories DROP CONSTRAINT fk_crates_categories_category_id;