text
stringlengths
1
1.05M
// CBacnetBuildingIOPoints.cpp: 实现文件 // #include "stdafx.h" #include "T3000.h" #include "CBacnetBuildingIOPoints.h" #include "afxdialogex.h" // CBacnetBuildingIOPoints 对话框 IMPLEMENT_DYNAMIC(CBacnetBuildingIOPoints, CDialogEx) CBacnetBuildingIOPoints::CBacnetBuildingIOPoints(CWnd* pParent /*=nullptr*/) : CDialogEx(IDD_DIALOG_BACNET_BUILDING_IO_POINT, pParent) { } CBacnetBuildingIOPoints::~CBacnetBuildingIOPoints() { } void CBacnetBuildingIOPoints::DoDataExchange(CDataExchange* pDX) { CDialogEx::DoDataExchange(pDX); DDX_Control(pDX, IDC_LIST_BM_IO_POINTS, m_io_list); } BEGIN_MESSAGE_MAP(CBacnetBuildingIOPoints, CDialogEx) ON_MESSAGE(WM_REFRESH_BAC_BUILDING_IO_LIST, Fresh_Building_IO_List) END_MESSAGE_MAP() void CBacnetBuildingIOPoints::Fresh() { Initial_List(); Fresh_Building_IO_Test_List(); } // CBacnetBuildingIOPoints 消息处理程序 void CBacnetBuildingIOPoints::Initial_List() { m_io_list.ShowWindow(SW_HIDE); m_io_list.DeleteAllItems(); while (m_io_list.DeleteColumn(0)); m_io_list.ModifyStyle(0, LVS_SINGLESEL | LVS_REPORT | LVS_SHOWSELALWAYS); //m_io_list.SetExtendedStyle(m_io_list.GetExtendedStyle() |LVS_EX_FULLROWSELECT |LVS_EX_GRIDLINES); m_io_list.SetExtendedStyle(m_io_list.GetExtendedStyle() | LVS_EX_GRIDLINES & (~LVS_EX_FULLROWSELECT));//Not allow full row select. m_io_list.InsertColumn(BM_IO_NAME, _T("Name"), 80, ListCtrlEx::EditBox, LVCFMT_LEFT, ListCtrlEx::SortByDigit); m_io_list.InsertColumn(BM_IO_FULL_LABLE, _T("Full Description"), 150, ListCtrlEx::EditBox, LVCFMT_LEFT, ListCtrlEx::SortByString); m_io_list.InsertColumn(BM_IO_AUTO_MANUAL, _T("Auto/Manual"), 80, ListCtrlEx::Normal, LVCFMT_LEFT, ListCtrlEx::SortByString); m_io_list.InsertColumn(BM_IO_VALUE, _T("Value"), 80, ListCtrlEx::Normal, LVCFMT_LEFT, ListCtrlEx::SortByString); m_io_list.InsertColumn(BM_IO_UNITE, _T("Units"), 80, ListCtrlEx::Normal, LVCFMT_LEFT, ListCtrlEx::SortByString); m_io_list.InsertColumn(BM_IO_RANGE, _T("Range"), 100, ListCtrlEx::Normal, LVCFMT_LEFT, ListCtrlEx::SortByString); m_io_list.InsertColumn(BM_IO_DECOM, _T("Status"), 60, ListCtrlEx::Normal, LVCFMT_LEFT, ListCtrlEx::SortByString); m_io_list.InsertColumn(BM_IO_TYPE, _T("Type"), 100, ListCtrlEx::Normal, LVCFMT_LEFT, ListCtrlEx::SortByString); m_pragram_dlg_hwnd = this->m_hWnd; //g_hwnd_now = m_pragram_dlg_hwnd; m_io_list.SetListHwnd(this->m_hWnd); CRect list_rect, win_rect; m_io_list.GetWindowRect(list_rect); ScreenToClient(&list_rect); ::GetWindowRect(m_pragram_dlg_hwnd, win_rect); m_io_list.Set_My_WindowRect(win_rect); m_io_list.Set_My_ListRect(list_rect); m_io_list.ShowWindow(SW_SHOW); } void CBacnetBuildingIOPoints::Fresh_Building_IO_Test_List() { for (int i = 0; i < 5; i++) { CString csBM_IO_NAME; CString cs_IO_FULL_LABLE ; CString cs_IO_AUTO_MANUAL ; CString cs_IO_VALUE = _T("0.00"); CString cs_IO_UNITE = _T(""); CString cs_IO_RANGE = _T("Unused"); CString cs_IO_DECOM = _T("Normal"); CString cs_BM_IO_TYPE; csBM_IO_NAME.Format(_T("IN%d"), i + 1); cs_IO_FULL_LABLE.Format(_T("Input%d"), i + 1); if (i % 2 == 0) { cs_BM_IO_TYPE = _T("Digital"); cs_IO_AUTO_MANUAL = _T("Auto"); } else { cs_BM_IO_TYPE = _T("Analog"); cs_IO_AUTO_MANUAL = _T("Manual"); } CString temp_item; temp_item.Format(_T("%d"), i + 1); m_io_list.InsertItem(i, temp_item); m_io_list.SetItemText(i, BM_IO_NAME, csBM_IO_NAME); m_io_list.SetItemText(i, BM_IO_FULL_LABLE, cs_IO_FULL_LABLE); m_io_list.SetItemText(i, BM_IO_AUTO_MANUAL, cs_IO_AUTO_MANUAL); m_io_list.SetItemText(i, BM_IO_VALUE, cs_IO_VALUE); m_io_list.SetItemText(i, BM_IO_UNITE, cs_IO_UNITE); m_io_list.SetItemText(i, BM_IO_RANGE, cs_IO_RANGE); m_io_list.SetItemText(i, BM_IO_DECOM, cs_IO_DECOM); m_io_list.SetItemText(i, BM_IO_TYPE, cs_BM_IO_TYPE); } } LRESULT CBacnetBuildingIOPoints::Fresh_Building_IO_List(WPARAM wParam, LPARAM lParam) { // Str_in_point Get_Str_in_Point(int index); int Fresh_Item; int isFreshOne = (int)lParam; return 0; }
#!/bin/bash set -e echo "Dockerize config ...." [[ -f /srv/TOMCAT_VERSION ]] && TOMCAT_VERSION=$( cat /srv/TOMCAT_VERSION ) find /srv/plugins -mindepth 2 -maxdepth 2 -type f -name config-templatable.txt -exec cat {} >>/srv/config/templatable.txt \; for template in $( cat /srv/config/templatable.txt |sort -u ); do template_evaled=$( eval echo "${template}" ) echo "Adding template \"${template_evaled}\"" DOCKERIZE_TEMPLATES="${DOCKERIZE_TEMPLATES} -template ${template_evaled}:${template_evaled}" done if [[ -e /srv/config/settings.ini ]]; then while read -r val do export "${val}" done < "/srv/config/settings.ini" fi echo "Applying config:" cat /srv/config/settings.ini | sed 's/^/ /g' rm /srv/config/settings.ini dockerize ${DOCKERIZE_TEMPLATES}
USE `sdi1600077` ; INSERT INTO user_category (name) VALUES ("KANONIKO ΕΙΣΙΤΗΡΙΟ"); INSERT INTO user_category (name) VALUES ("ΜΕΙΩΜΕΝΟ ΕΙΣΙΤΗΡΙΟ"); INSERT INTO user_category (name) VALUES ("ΔΩΡΕΑΝ ΕΙΣΙΤΗΡΙΟ"); INSERT INTO colour (colour) VALUES ("green"); INSERT INTO colour (colour) VALUES ("red"); INSERT INTO colour (colour) VALUES ("blue"); INSERT INTO colour (colour) VALUES ("chartreuse"); INSERT INTO colour (colour) VALUES ("deepskyblue"); INSERT INTO colour (colour) VALUES ("darkorange"); INSERT INTO transport (name) VALUES ("Ηλεκτρικός Σιδηρόδρομος (ΗΣΑΠ)"); INSERT INTO transport (name) VALUES ("ΜΕΤΡΟ Αθήνας"); INSERT INTO transport (name) VALUES ("ΤΡΑΜ Αθήνας"); INSERT INTO transport (name) VALUES ("Λεωφορεία"); INSERT INTO transport (name) VALUES ("Τρόλεϊ"); INSERT INTO line_status (status) VALUES ("Καθυστερήσεις"); INSERT INTO line_status (status) VALUES ("Έργα σε εξέλιξη"); INSERT INTO line_status (status) VALUES ("Ομαλή λειτουργία"); INSERT INTO line_status (status) VALUES ("Απεργίες"); INSERT INTO line_status (status) VALUES ("Εκτός λειτουργίας"); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("ΓΡΑΜΜΗ 1", 1, 1, 1); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("ΓΡΑΜΜΗ 2", 2, 3, 2); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("ΓΡΑΜΜΗ 3", 2, 3, 3); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("Τ3 - ΘΟΥΚΥΔΙΔΗΣ", 3, 4, 4); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("Τ4 - ΑΡΙΣΤΟΤΕΛΗΣ", 3, 4, 4); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("Τ5 - ΠΛΑΤΩΝΑΣ", 3, 4, 4); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("224", 4, 2, 5); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("608", 4, 2, 5); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("Α8", 4, 2, 5); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("Α5", 4, 2, 5); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("220", 4, 2, 5); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("221", 4, 2, 5); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("6", 5, 3, 6); INSERT INTO line (name, idtransport, idline_status, idcolour) VALUES ("9", 5, 3, 6); INSERT INTO area (area, city, postal_code) VALUES ("Κεντρικός τομέας Αθηνών", "Δήμος Αθηναίων", "10557"); INSERT INTO area (area, city, postal_code) VALUES ("Κολωνάκι", "Αθήνα", "10676"); INSERT INTO area (area, city, postal_code) VALUES ("Πλατεία Βικτωρίας", "Αθήνα", "10434"); INSERT INTO area (area, city, postal_code) VALUES ("Ομόνοια", "Δήμος Αθηνάιων", "10431"); INSERT INTO area (area, city, postal_code) VALUES ("Πλατεία Ασωμάτων", "Δήμος Αθηναίων", "11851"); INSERT INTO area (area, city, postal_code) VALUES ("Γκάζι", "Αθήνα", "11854"); INSERT INTO station (name, latitude, longitude, idarea, disability_access) VALUES ("ΚΕΡΑΜΕΙΚΟΣ", '37.978779', '23.710873', 6, 1); INSERT INTO station (name, latitude, longitude, idarea, disability_access) VALUES ("ΒΙΚΤΩΡΙΑ", '37.993095', '23.729804', 3, 1); INSERT INTO station (name, latitude, longitude, idarea, disability_access) VALUES ("ΟΜΟΝΟΙΑ", '37.984037', '23.728041', 4, 1); INSERT INTO station (name, latitude, longitude, idarea, disability_access) VALUES ("ΠΑΝΕΠΙΣΤΗΜΙΟ", '37.980595', '23.73283', 1, 1); INSERT INTO station (name, latitude, longitude, idarea, disability_access) VALUES ("ΜΟΝΑΣΤΗΡΑΚΙ", '37.976431', '23.725905', 1, 1); INSERT INTO station (name, latitude, longitude, idarea, disability_access) VALUES ("ΘΗΣΕΙΟ", '37.976928', '23.720529', 5, 1); INSERT INTO station (name, latitude, longitude, idarea, disability_access) VALUES ("ΣΥΝΤΑΓΜΑ", '37.975443', '23.735484', 1, 1); INSERT INTO station (name, latitude, longitude, idarea, disability_access) VALUES ("ΕΥΑΓΓΕΛΙΣΜΟΣ", '37.975937', '23.746923', 2, 1); INSERT INTO line_has_station (idline, idstation) VALUES (1, 2); INSERT INTO line_has_station (idline, idstation) VALUES (1, 3); INSERT INTO line_has_station (idline, idstation) VALUES (1, 5); INSERT INTO line_has_station (idline, idstation) VALUES (1, 6); INSERT INTO line_has_station (idline, idstation) VALUES (2, 3); INSERT INTO line_has_station (idline, idstation) VALUES (2, 4); INSERT INTO line_has_station (idline, idstation) VALUES (2, 7); INSERT INTO line_has_station (idline, idstation) VALUES (3, 1); INSERT INTO line_has_station (idline, idstation) VALUES (3, 5); INSERT INTO line_has_station (idline, idstation) VALUES (3, 7); INSERT INTO line_has_station (idline, idstation) VALUES (3, 8); INSERT INTO user (username, first_name, last_name, email, dob, phone, password, iduser_category) VALUES ("giorgos", "Γιώργος", "Κουρσιούνης", "<EMAIL>", "1998-03-22", "6923456787", "$2y$10$RN8bHlZk.wClRP10.q6LbOngU9aYbXdh09ZqLrhdfkzjeHWbiYpHq", 2); INSERT INTO user (username, first_name, last_name, email, dob, phone, password, iduser_category) VALUES ("maria", "Μαρία", "Καραμηνά", "<EMAIL>", "1998-07-21", "6922356582", "$2y$10$imJxzYgyzUy6lKYzJEtjX.sAKZATGshdyp8TGia21.Cz4.6cUkize", 1); INSERT INTO user (username, first_name, last_name, email, dob, phone, password, iduser_category) VALUES ("vassilis", "Βασίλης", "Πουλόπουλος", "<EMAIL>", "1998-04-02", "2102734567", "$2y$10$QEdo.f7/pHVPqK9zmnVabOycoUgj6xUc8fN711Jd6udnljjiICrUK", 3); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Ενιαίο εισιτήριο για όλα τα μέσα 90 λεπτών (εκτός γραμμών Αεροδρομίου & γραμμής Χ80)", "1.40", 1); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Ημερήσιο εισιτήριο για όλα τα μέσα (εκτός γραμμων Αεροδρομίου)", "4.50", 1); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Εισιτήριο πέντε ημερών για όλα τα μέσα (εκτός γραμμών Αεροδρομίου & γραμμής Χ80)", "9.00", 1); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Τουριστικό εισιτήριο 3 ημερών για όλα τα μέσα (περιλαμβάνει 1 διαδρομή από & προς το Αεροδρόμιο)", "22.00", 1); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Κανονικό εισιτήριο λεωφορείων EXPRESS", "6.00", 1); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Κανονικό εισιτήριο ΜΕΤΡΟ γραμμών Αεροδρομίου", "10.00", 1); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Εισιτήριο πέντε ημερών για όλα τα μέσα (εκτός γραμμών Αεροδρομίου & γραμμής Χ80)", "9.00", 1); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Εισιτήριο Αεροδρομίου μετ' επιστροφής ΜΕΤΡΟ", "18.00", 1); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Εισιτήριο Αεροδρομίου από & προς τους σταθμούς Παλλήνη - Κάντζα - Κορωπί ΜΕΤΡΟ", "6.00", 1); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Μειωμένο ενιαίο εισιτήριο για όλα τα μέσα 90 λεπτών (εκτός γραμμών Αεροδρομίου & γραμμής Χ80)", "0.60", 2); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Μειωμένο εισιτήριο λεωφορείων EXPRESS", "3.00", 2); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Μειωμένο εισιτήριο ΜΕΤΡΟ γραμμών Αεροδρομίου", "5.00", 2); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Μειωμένο εισιτήριο Αεροδρομίου από & προς τους σταθμούς Παλλήνη - Κάντζα - Κορωπί ΜΕΤΡΟ", "3.00", 2); INSERT INTO ticket_category (name, price, iduser_category) VALUES ("Δωρεάν εισιτήριο για όλα τα μέσα 90 λεπτών (εκτός γραμμών Αεροδρομίου & γραμμής Χ80)", "0.0", 3); -- "giorgos" has a card with an expired fee. He can not issue a new card but he can renew his current one INSERT INTO card (idticket_category, date, pin, iduser, expired) VALUES (10, "2020-01-15", "$2y$10$ypFCVHcaVpnc4FmifA81/uFKvEwdQXLGTcV9.BKGNmyqEndFdkkPG", 1, 1); -- "maria" has an valid card. She can not issue a new card and she can not renew her current one INSERT INTO card (idticket_category, date, pin, iduser, expired) VALUES (12, "2020-01-11", "$2y$10$ypFCVHcaVpnc4FmifA81/uFKvEwdQXLGTcV9.BKGNmyqEndFdkkPG", 2, 0); -- "vassilis" has a ticket with an expired fee. He can issue a new one or renew his current one INSERT INTO ticket (date, iduser, idticket_category, expired) VALUES ("2020-01-05", 3, 11, 1);
import block from 'bem-cn'; import React from 'react'; import { Field, reduxForm, InjectedFormProps } from 'redux-form'; import { bind } from 'decko'; import { compose } from 'redux'; import { connect } from 'react-redux'; import { EditorField } from 'shared/view/redux-form'; import { Modal, Button } from 'shared/view/elements'; import { selectors } from '../../../redux'; import { IAppReduxStateEnrich } from '../../../namespace'; import './AnnouncementsForm.scss'; interface IOwnProps { saving: boolean; addAnnouncement(): void; editAnnouncement(index: number | null): void; clearModal(): void; saveChanges(): void; } interface IStateProps { modalIndex: number | null; } type IProps = IOwnProps & IStateProps & InjectedFormProps; const b = block('announcements-form'); class AnnouncementsForm extends React.Component<IProps> { public state = { isModalOpened: false }; public componentDidUpdate(prevProps: IProps) { if (prevProps.modalIndex !== this.props.modalIndex && this.props.modalIndex !== null) { this.showEditModal(this.props.modalIndex); } } public render() { const { saving } = this.props; return ( <div className={b()}> <Modal className={b('modal-announcements')()} isOpen={this.state.isModalOpened} title={'ADD ANNOUNCEMENT'} hasCloseCross onClose={this.closeModal} > <div className={b('editor-field')()}> <Field name="content" type="text" component={EditorField} {...{ initialValue: '', plugins: 'link code', toolbar: 'undo redo | bold italic | code', height: 250 }} /> </div> <div className={b('modal-buttons')()}> <Button color="text-blue" onClick={this.closeModal}>CANCEL</Button> <Button color="text-blue" onClick={this.addAnnouncement}>CONFIRM</Button> </div> </Modal> <div className={b('action-buttons')()}> <Button color="text-blue" onClick={this.openModal}>ADD NEW ANNOUNCEMENT</Button> <Button color="text-blue" disabled={saving} onClick={this.saveChanges} isShowPreloader={saving}> SAVE CHANGES </Button> </div> </div> ); } @bind public showEditModal(index: number | null): void { if (index !== null) { this.setState({ isModalOpened: true, }); } } @bind private openModal() { this.setState({ isModalOpened: true, }); } @bind private closeModal() { this.setState({ isModalOpened: false }); this.props.clearModal(); } @bind private addAnnouncement() { if (this.props.modalIndex === null) { this.props.addAnnouncement(); } else { this.props.editAnnouncement(this.props.modalIndex); } this.setState({ isModalOpened: false }); } @bind private saveChanges() { this.props.saveChanges(); } } const mapStateToProps = (state: IAppReduxStateEnrich): IStateProps => { return { modalIndex: selectors.selectModalIndex(state) }; }; const enhance = compose( reduxForm<void, IOwnProps>({ form: 'announcements' }), connect(mapStateToProps), ); export default enhance(AnnouncementsForm);
<filename>resources/js/Tests/Unit/Utility/Validators/index.spec.js import expect from "expect"; import sinon from "sinon"; import { SelectorValidation } from "../../../../Utility/Functions/Validation"; describe("Common Validators", () => { describe("Select Component Validator", () => { it("checks that the selected field matchs one of the select component options", () => { const testField = "foo"; const testOptions = ['bar', 'foo', 'foobar']; const valid = SelectorValidation(testField, [], testOptions); expect(valid).toBe(true); const invalid = SelectorValidation('barfoo', [], testOptions); expect(invalid).toBe(false); }) }) })
<filename>opentaps/crmsfa/src/com/opensourcestrategies/crmsfa/voip/VoIPServices.java /* * Copyright (c) Open Source Strategies, Inc. * * Opentaps is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Opentaps is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Opentaps. If not, see <http://www.gnu.org/licenses/>. */ package com.opensourcestrategies.crmsfa.voip; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.net.InetSocketAddress; import java.net.Socket; import java.util.Locale; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.ofbiz.base.util.Debug; import org.ofbiz.base.util.UtilMisc; import org.ofbiz.base.util.UtilProperties; import org.ofbiz.base.util.UtilValidate; import org.ofbiz.entity.GenericValue; import org.ofbiz.service.DispatchContext; import org.ofbiz.service.GenericServiceException; import org.ofbiz.service.LocalDispatcher; import org.ofbiz.service.ServiceUtil; import org.opentaps.common.util.UtilCommon; /** * Services for working with VoIP systems * * @author <a href="mailto:<EMAIL>"><NAME></a> * @version $Rev: 600 $ */ public class VoIPServices { public static final String module = VoIPServices.class.getName(); public static final String errorResource = "CRMSFAErrorLabels"; public static final String resource = "VoIP"; /** * Retrieves the latest call for the user from the FacetPhone server. Returns an error only if VoIP.properties is misconfigured - * otherwise returns failure if something goes wrong. Assumes FacetPhone userId is equal to OFBiz userLoginId. * @param dctx DispatchContext * @param context Map * @return Map, possibly containing a string representing the latest call for the user */ public static Map retrieveLatestCallFromFacetPhoneServer( DispatchContext dctx, Map context) { GenericValue userLogin = (GenericValue) context.get("userLogin"); Locale locale = UtilCommon.getLocale(context); // FacetPhone server requires and sends an EOT after each query and response byte endOfTransmission = 0x04; // Make sure FacetPhone integration is turned on boolean facetPhoneIntegrate = "true".equalsIgnoreCase( UtilProperties.getPropertyValue("VoIP", "facetPhone.integrate", "")); if (! facetPhoneIntegrate) return ServiceUtil.returnSuccess(); // Check the configuration settings String facetPhoneServerIP = UtilProperties.getPropertyValue("VoIP", "facetPhone.server.connect.ipAddress"); if ( UtilValidate.isEmpty(facetPhoneServerIP)) { return ServiceUtil.returnError(UtilProperties.getMessage(errorResource, "CrmErrorPropertyNotConfigured", UtilMisc.toMap("propertyName", "facetPhone.server.connect.ipAddress", "fileName", resource + ".properties"), locale)); } String facetPhoneServerPortStr = UtilProperties.getPropertyValue("VoIP", "facetPhone.server.connect.port", "6500"); int facetPhoneServerPort = Integer.parseInt(facetPhoneServerPortStr); String facetPhoneServerTimeoutStr = UtilProperties.getPropertyValue("VoIP", "facetPhone.server.connect.timeout", "10000"); int facetPhoneServerTimeout = Integer.parseInt(facetPhoneServerTimeoutStr); String queryString = UtilProperties.getMessage("VoIP", "facetPhone.cid.queryString", userLogin, locale); if (UtilValidate.isEmpty(queryString)) { return ServiceUtil.returnError(UtilProperties.getMessage(errorResource, "CrmErrorPropertyNotConfigured", UtilMisc.toMap("propertyName", "facetPhone.cid.queryString", "fileName", resource + ".properties"), locale)); } Socket socket = null; BufferedOutputStream out = null; InputStreamReader in = null; StringBuffer latestCallDataBuf = new StringBuffer(); try { // Connect to the FacetPhone server socket = new Socket() ; socket.connect(new InetSocketAddress(facetPhoneServerIP, facetPhoneServerPort), facetPhoneServerTimeout); out = new BufferedOutputStream(socket.getOutputStream()); in = new InputStreamReader(socket.getInputStream()); // Send the query to retrieve the latest call for the userLoginId out.write(queryString.getBytes()); out.write(endOfTransmission); out.flush(); // The FacetPhone server doesn't make any response at all (not even an EOT) if the user // isn't recognized or has never had a call, so only wait a quarter of a second for a response long startTime = System.currentTimeMillis(); int responseTimeout = 250; while (System.currentTimeMillis() < startTime + responseTimeout) { if (in.ready()) { int responseByte = in.read(); if (responseByte != -1 && responseByte != endOfTransmission) { latestCallDataBuf.append((char) responseByte); } if (responseByte == endOfTransmission) { break; } } } } catch ( IOException e) { String errorMessage = UtilProperties.getMessage(errorResource, "CrmErrorVoIPUnableToConnectToFacetPhone" + ": " + e.getMessage(), locale); Debug.logError(e, module); return ServiceUtil.returnFailure(errorMessage); } finally { try { out.close(); in.close(); socket.close() ; } catch (IOException e) { Debug.logError(e, module); } } String latestCallData = latestCallDataBuf.toString(); Map result = null; if (UtilValidate.isNotEmpty(latestCallData)) { result = ServiceUtil.returnSuccess(); result.put("latestCallData", latestCallData); } else { String errorMessage = UtilProperties.getMessage(errorResource, "CrmErrorVoIPErrorResponseFromFacetPhone", locale); result = ServiceUtil.returnFailure(errorMessage); Debug.logError(errorMessage, module); } return result; } /** * Retrieves and parses the incoming number of the latest call for the user from the FacetPhone server, using the retrieveLatestCallFromFacetPhoneServer service. * * @param dctx * @param context * @return */ public static Map getCurrentIncomingNumberFromFacetPhoneServer( DispatchContext dctx, Map context) { GenericValue userLogin = (GenericValue) context.get("userLogin"); LocalDispatcher dispatcher = (LocalDispatcher) dctx.getDispatcher(); Locale locale = UtilCommon.getLocale(context); Map result = ServiceUtil.returnSuccess(); String callStateRegexp = UtilProperties.getPropertyValue("VoIP", "facetPhone.cid.callState.regexp"); if (UtilValidate.isEmpty(callStateRegexp)) { String message = UtilProperties.getMessage(errorResource, "CrmErrorPropertyNotConfigured", UtilMisc.toMap("propertyName", "facetPhone.cid.callState.regexp", "fileName", resource + ".properties"), locale); Debug.logError(message, module); return ServiceUtil.returnError(message); } String numberIdentifyRegexp = UtilProperties.getPropertyValue("VoIP", "facetPhone.cid.number.identify.regexp"); if (UtilValidate.isEmpty(numberIdentifyRegexp)) { String message = UtilProperties.getMessage(errorResource, "CrmErrorPropertyNotConfigured", UtilMisc.toMap("propertyName", "facetPhone.cid.number.identify.regexp", "fileName", resource + ".properties"), locale); Debug.logError(message, module); return ServiceUtil.returnError(message); } String numberParseRegexp = UtilProperties.getPropertyValue("VoIP", "voip.number.parse.regexp"); if (UtilValidate.isEmpty(numberParseRegexp)) { String message = UtilProperties.getMessage(errorResource, "CrmErrorPropertyNotConfigured", UtilMisc.toMap("propertyName", "voip.number.parse.regexp", "fileName", resource + ".properties"), locale); Debug.logError(message, module); return ServiceUtil.returnError(message); } // Call the retrieveLatestCallFromFacetPhoneServer service String latestCallData = null; Map retrieveLatestCallFromFacetPhoneServerMap = null; try { retrieveLatestCallFromFacetPhoneServerMap = dispatcher.runSync("retrieveLatestCallFromFacetPhoneServer", UtilMisc.toMap("userLogin", userLogin, "locale", locale)); } catch( GenericServiceException e ) { Debug.logError(ServiceUtil.getErrorMessage(retrieveLatestCallFromFacetPhoneServerMap), module); return ServiceUtil.returnFailure(ServiceUtil.getErrorMessage(retrieveLatestCallFromFacetPhoneServerMap)); } if (ServiceUtil.isError(retrieveLatestCallFromFacetPhoneServerMap) || ServiceUtil.isFailure(retrieveLatestCallFromFacetPhoneServerMap)) { Debug.logError(ServiceUtil.getErrorMessage(retrieveLatestCallFromFacetPhoneServerMap), module); return ServiceUtil.returnFailure(ServiceUtil.getErrorMessage(retrieveLatestCallFromFacetPhoneServerMap)); } latestCallData = (String) retrieveLatestCallFromFacetPhoneServerMap.get("latestCallData"); if (UtilValidate.isEmpty(latestCallData)) { String errorMessage = UtilProperties.getMessage(errorResource, "CrmErrorVoIPErrorLatestCallFromFacetPhone", locale); Debug.logError(errorMessage, module); return ServiceUtil.returnFailure(errorMessage); } // Check the state of the call by retrieving it from the latestCallData via regular expression - probably in the form <state=(completed|active)> String state = ""; Matcher matcher = Pattern.compile(callStateRegexp).matcher(latestCallData); while (matcher.find()) { if (matcher.group(1) != null) state = matcher.group(1); } // Ignore the results if there's no active call if (! "active".equalsIgnoreCase(state)) { String message = UtilProperties.getMessage(errorResource, "CrmErrorVoIPErrorNoCurrentCall", userLogin, locale); Debug.logVerbose(message, module); return ServiceUtil.returnSuccess(); } // Get the caller's number by retrieving it from the latestCallData via regular expression matcher = Pattern.compile(numberIdentifyRegexp).matcher(latestCallData); String number = null; while (matcher.find()) { if (matcher.group(1) != null) number = matcher.group(1); } // Ignore the results if there's no number for the call if (UtilValidate.isEmpty(number)) { String message = UtilProperties.getMessage(errorResource, "CrmErrorVoIPErrorNoNumberForCurrentCall", userLogin, locale); Debug.logVerbose(message, module); return ServiceUtil.returnSuccess(); } matcher = Pattern.compile(numberParseRegexp).matcher(number); int phoneNumberPatternCountryCodeGroup = Integer.parseInt(UtilProperties.getPropertyValue(resource, "voip.number.parse.regexp.group.countryCode")); int phoneNumberPatternAreaCodeGroup = Integer.parseInt(UtilProperties.getPropertyValue(resource, "voip.number.parse.regexp.group.areaCode")); int phoneNumberPatternPhoneNumberGroup = Integer.parseInt(UtilProperties.getPropertyValue(resource, "voip.number.parse.regexp.group.phoneNumber")); if (matcher.matches()) { if (UtilValidate.isNotEmpty(matcher.group(phoneNumberPatternCountryCodeGroup))) result.put("countryCode", matcher.group(phoneNumberPatternCountryCodeGroup)); if (UtilValidate.isNotEmpty(matcher.group(phoneNumberPatternAreaCodeGroup))) result.put("areaCode", matcher.group(phoneNumberPatternAreaCodeGroup)); if (UtilValidate.isNotEmpty(matcher.group(phoneNumberPatternPhoneNumberGroup))) result.put("contactNumber", matcher.group(phoneNumberPatternPhoneNumberGroup)); } else { String message = UtilProperties.getMessage(errorResource, "CrmErrorVoIPErrorNumberFromFacetPhone", UtilMisc.toMap("latestCallData", latestCallData), locale); Debug.logWarning(message, module); result.put("contactNumber", number); } return result; } }
#!/bin/bash # Package Install apt-get install -y openssh-server openssh-client apt-get install -y sshpass apt-get install -y curl DEBIAN_FRONTEND=noninteractive apt-get install -y procmail
#!/bin/bash inputFile=$1 diskDevice=${2:-sda} pollInterval=${3:-1} utilField=$(grep -m1 '^Device:' $inputFile |xargs echo |tr ' ' '\n' |grep -n '%util' |cut -d: -f1) counter=$pollInterval grep $diskDevice $inputFile |awk -v u=$utilField '{print $u}' | while read line; do echo "$counter $line"; counter=$(($counter+$pollInterval)); done > $inputFile-util.data datafile=$inputFile-util.data outfile=$inputFile-util.jpg gnuplot << EOP set terminal jpeg font arial 8 size 640,480 set output "$outfile" set title "CPU usage: $inputFile" set grid x y set xlabel "Time (sec)" set ylabel "Utilization (%)" plot "$datafile" using 1:2 title "utilization (%)" with lines EOP rm $datafile
#!/bin/bash #SBATCH -J Act_linear_1 #SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de #SBATCH --mail-type=FAIL #SBATCH -e /work/scratch/se55gyhe/log/output.err.%j #SBATCH -o /work/scratch/se55gyhe/log/output.out.%j #SBATCH -n 1 # Number of cores #SBATCH --mem-per-cpu=6000 #SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins #module load intel python/3.5 python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py linear 184 Adamax 2 0.36361723960604264 0.001956603119725283 runiform 0.3
<gh_stars>1-10 package com.a8plus1.seen.mainViewPagers; import android.net.Uri; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentTransaction; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.a8plus1.seen.R; import io.rong.imkit.fragment.ConversationListFragment; import io.rong.imlib.model.Conversation; public class IMlistFragment extends Fragment{ private Fragment mConversationList; private Fragment mConversationFragment = null; @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_imlist, container, false); mConversationList = initConversationList();//获取融云会话列表的对象 FragmentTransaction ft = getActivity().getSupportFragmentManager().beginTransaction(); ft.replace(R.id.conversationlist_fragment_conversa, mConversationList); ft.addToBackStack("fr6"); ft.commit(); return view; } private Fragment initConversationList() { // appendQueryParameter对具体的会话列表做展示 if (mConversationFragment == null) { ConversationListFragment listFragment = new ConversationListFragment(); Uri uri = Uri.parse("rong://" + getActivity().getApplicationInfo().packageName).buildUpon() .appendPath("conversationList") .appendQueryParameter(Conversation.ConversationType.PRIVATE.getName(), "false")//设置私聊会话是否聚合显示 .appendQueryParameter(Conversation.ConversationType.GROUP.getName(), "true") .appendQueryParameter(Conversation.ConversationType.DISCUSSION.getName(), "false")//设置私聊会话是否聚合显示 .appendQueryParameter(Conversation.ConversationType.SYSTEM.getName(), "false")//设置私聊会是否聚合显示 .build(); listFragment.setUri(uri); return listFragment; } else { return mConversationFragment; } } }
<reponame>itfourp/aurelia-files "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } var _helpersFileReaderHelper = require("../helpers/file-reader-helper"); var FileHandler = function FileHandler(onLoaded, onProgress, onError, fileFilter, maxFileSize, readAs, hoverClass, additionalData) { var _this = this; _classCallCheck(this, FileHandler); this.readFile = function (file) { var reader = _helpersFileReaderHelper.FileReaderHelper.createReader(file, _this.onLoaded, _this.onProgress, _this.onError, _this.additionalData); if (_this.readAs == "text") { reader.readAsText(file); } else if (_this.readAs == "array") { reader.readAsArrayBuffer(file); } else if (_this.readAs == "binary") { reader.readAsBinaryString(file); } else { reader.readAsDataURL(file); } }; this.handleFileDrag = function (fileDragEvent) { fileDragEvent.stopPropagation(); fileDragEvent.preventDefault(); if (fileDragEvent.type == "dragover") { fileDragEvent.target.classList.add(_this.hoverClass); } else { fileDragEvent.target.classList.remove(_this.hoverClass); } }; this.handleDrop = function (fileDropEvent) { _this.handleFileDrag(fileDropEvent); _this.handleFileSelected(fileDropEvent); }; this.handleFileSelected = function (fileSelectionEvent) { if (!fileSelectionEvent.target) { fileSelectionEvent.target = {}; } if (!fileSelectionEvent.dataTransfer) { fileSelectionEvent.dataTransfer = {}; } var files = fileSelectionEvent.target.files || fileSelectionEvent.dataTransfer.files || []; for (var i = 0, f = undefined; f = files[i]; i++) { if (_this.fileFilter && !f.type.match(_this.fileFilter)) { if (_this.onError) { _this.onError(f, "File type does not match filter"); } continue; } if (_this.maxFileSize && f.size >= _this.maxFileSize) { if (_this.onError) { _this.onError(f, "File exceeds file size limit"); } continue; } _this.readFile(f); } }; this.onLoaded = onLoaded; this.onProgress = onProgress; this.onError = onError; this.fileFilter = fileFilter; this.maxFileSize = maxFileSize; this.readAs = readAs; this.hoverClass = hoverClass || "file-hover"; this.additionalData = additionalData; }; exports.FileHandler = FileHandler;
permission_cmd () { case $1 in Linux) echo "stat --printf '%a'" ;; Darwin|FreeBSD) echo "stat -f '%Lp'" ;; *) return 1 ;; esac } permission_cmd_dir () { case $1 in Linux) echo "stat --printf '%U\\n%G\\n%a'" ;; Darwin|FreeBSD) echo "stat -f '%Su%n%Sg%n%Lp'" ;; *) return 1 ;; esac }
/* * (C) Copyright 2010-2018, by <NAME> and Contributors. * * JGraphT : a free Java graph-theory library * * This program and the accompanying materials are dual-licensed under * either * * (a) the terms of the GNU Lesser General Public License version 2.1 * as published by the Free Software Foundation, or (at your option) any * later version. * * or (per the licensee's choosing) * * (b) the terms of the Eclipse Public License v1.0 as published by * the Eclipse Foundation. */ package org.jgrapht.experimental; import java.util.*; import org.jgrapht.*; /** * Brown graph coloring algorithm. * * @param <V> the graph vertex type * @param <E> the graph edge type * * @author <NAME> */ public class BrownBacktrackColoring<V, E> { private final List<V> _vertices; private final int[][] _neighbors; private final Map<V, Integer> _vertexToPos; private int[] _color; private int[] _colorCount; private BitSet[] _allowedColors; private int _chi; /** * Construct a new Brown backtracking algorithm. * * @param g the input graph */ public BrownBacktrackColoring(final Graph<V, E> g) { final int numVertices = g.vertexSet().size(); _vertices = new ArrayList<>(numVertices); _neighbors = new int[numVertices][]; _vertexToPos = new HashMap<>(numVertices); for (V vertex : g.vertexSet()) { _neighbors[_vertices.size()] = new int[g.edgesOf(vertex).size()]; _vertexToPos.put(vertex, _vertices.size()); _vertices.add(vertex); } for (int i = 0; i < numVertices; i++) { int nbIndex = 0; final V vertex = _vertices.get(i); for (E e : g.edgesOf(vertex)) { _neighbors[i][nbIndex++] = _vertexToPos.get(Graphs.getOppositeVertex(g, e, vertex)); } } } void recursiveColor(int pos) { _colorCount[pos] = _colorCount[pos - 1]; _allowedColors[pos].set(0, _colorCount[pos] + 1); for (int i = 0; i < _neighbors[pos].length; i++) { final int nb = _neighbors[pos][i]; if (_color[nb] > 0) { _allowedColors[pos].clear(_color[nb]); } } for (int i = 1; (i <= _colorCount[pos]) && (_colorCount[pos] < _chi); i++) { if (_allowedColors[pos].get(i)) { _color[pos] = i; if (pos < (_neighbors.length - 1)) { recursiveColor(pos + 1); } else { _chi = _colorCount[pos]; } } } if ((_colorCount[pos] + 1) < _chi) { _colorCount[pos]++; _color[pos] = _colorCount[pos]; if (pos < (_neighbors.length - 1)) { recursiveColor(pos + 1); } else { _chi = _colorCount[pos]; } } _color[pos] = 0; } /** * Get the coloring. * * @param additionalData map which contains the color of each vertex * @return the number of colors used */ public Integer getResult(Map<V, Integer> additionalData) { _chi = _neighbors.length; _color = new int[_neighbors.length]; _color[0] = 1; _colorCount = new int[_neighbors.length]; _colorCount[0] = 1; _allowedColors = new BitSet[_neighbors.length]; for (int i = 0; i < _neighbors.length; i++) { _allowedColors[i] = new BitSet(1); } recursiveColor(1); if (additionalData != null) { for (int i = 0; i < _vertices.size(); i++) { additionalData.put(_vertices.get(i), _color[i]); } } return _chi; } } // End BrownBacktrackColoring.java
<gh_stars>10-100 package com.yoga.utility.baidu.aip.ao; import com.alibaba.fastjson.JSONObject; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; @Getter @Setter @NoArgsConstructor @AllArgsConstructor public class BaiduAiqConfig { private String appId; private String apiKey; private String secretKey; @Override public String toString() { return JSONObject.toJSONString(this); } }
<reponame>kal727/l5r-sandbox /* global describe, it, beforeEach, expect, jasmine, spyOn */ /* eslint camelcase: 0, no-invalid-this: 0 */ const EventRegistrar = require('../../server/game/eventregistrar.js'); describe('EventRegistrar', function () { beforeEach(function() { this.gameSpy = jasmine.createSpyObj('game', ['on', 'removeListener']); this.context = { method: function() {}, anotherMethod: function() {}, finalMethod: function() {} }; this.boundHandler = {}; spyOn(this.context.method, 'bind').and.returnValue(this.boundHandler); this.events = new EventRegistrar(this.gameSpy, this.context); }); describe('register()', function () { it('should throw when a handler method does not exist on the given context', function() { expect(() => { this.events.register(['thisMethodDoesNotExist']); }).toThrow(); }); it('should bind the event with the given context', function() { this.events.register(['method']); expect(this.context.method.bind).toHaveBeenCalledWith(this.context); }); it('should register the event with the game', function() { this.events.register(['method']); expect(this.gameSpy.on).toHaveBeenCalledWith('method', this.boundHandler); }); it('should handle multiple events', function() { this.events.register(['method', 'anotherMethod']); expect(this.gameSpy.on).toHaveBeenCalledWith('method', this.boundHandler); expect(this.gameSpy.on).toHaveBeenCalledWith('anotherMethod', jasmine.any(Function)); }); it('should accept an event-to-method mapping', function() { this.events.register(['anotherMethod', { 'foo': 'method', 'bar': 'finalMethod' }]); expect(this.gameSpy.on).toHaveBeenCalledWith('foo', this.boundHandler); expect(this.gameSpy.on).toHaveBeenCalledWith('bar', jasmine.any(Function)); expect(this.gameSpy.on).toHaveBeenCalledWith('anotherMethod', jasmine.any(Function)); }); }); describe('unregisterAll()', function() { beforeEach(function() { this.events.register(['method', 'anotherMethod']); }); it('should remove the listeners from the game', function() { this.events.unregisterAll(); expect(this.gameSpy.removeListener).toHaveBeenCalledWith('method', this.boundHandler); expect(this.gameSpy.removeListener).toHaveBeenCalledWith('anotherMethod', jasmine.any(Function)); }); it('should not unregister multiple times', function() { this.events.unregisterAll(); this.gameSpy.removeListener.calls.reset(); this.events.unregisterAll(); expect(this.gameSpy.removeListener.calls.count()).toBe(0); }); }); });
<filename>app/router-legacy/home-router.js<gh_stars>1-10 import application from 'focus-core/application'; import router from 'focus-core/router'; import HomeView from '../views/home'; export default router.extend({ log: true, beforeRoute() { application.changeRoute('home'); }, routes: { '': 'home', home: 'home' }, home() { this._pageContent(HomeView); } });
/** * Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import chalk from 'chalk'; import {bind as bindEach} from 'jest-each'; import {formatExecError} from 'jest-message-util'; import {ErrorWithStack, isPromise} from 'jest-util'; import {Circus, Global} from '@jest/types'; import {dispatch} from './state'; type THook = (fn: Circus.HookFn, timeout?: number) => void; type DescribeFn = ( blockName: Circus.BlockName, blockFn: Circus.BlockFn, ) => void; const describe = (() => { const describe = (blockName: Circus.BlockName, blockFn: Circus.BlockFn) => _dispatchDescribe(blockFn, blockName, describe); const only = (blockName: Circus.BlockName, blockFn: Circus.BlockFn) => _dispatchDescribe(blockFn, blockName, only, 'only'); const skip = (blockName: Circus.BlockName, blockFn: Circus.BlockFn) => _dispatchDescribe(blockFn, blockName, skip, 'skip'); describe.each = bindEach(describe, false); only.each = bindEach(only, false); skip.each = bindEach(skip, false); describe.only = only; describe.skip = skip; return describe; })(); const _dispatchDescribe = ( blockFn: Circus.BlockFn, blockName: Circus.BlockName, describeFn: DescribeFn, mode?: Circus.BlockMode, ) => { const asyncError = new ErrorWithStack(undefined, describeFn); if (blockFn === undefined) { asyncError.message = `Missing second argument. It must be a callback function.`; throw asyncError; } if (typeof blockFn !== 'function') { asyncError.message = `Invalid second argument, ${blockFn}. It must be a callback function.`; throw asyncError; } dispatch({ asyncError, blockName, mode, name: 'start_describe_definition', }); const describeReturn = blockFn(); // TODO throw in Jest 25 if (isPromise(describeReturn)) { console.log( formatExecError( new ErrorWithStack( chalk.yellow( 'Returning a Promise from "describe" is not supported. Tests must be defined synchronously.\n' + 'Returning a value from "describe" will fail the test in a future version of Jest.', ), describeFn, ), {rootDir: '', testMatch: []}, {noStackTrace: false}, ), ); } else if (describeReturn !== undefined) { console.log( formatExecError( new ErrorWithStack( chalk.yellow( 'A "describe" callback must not return a value.\n' + 'Returning a value from "describe" will fail the test in a future version of Jest.', ), describeFn, ), {rootDir: '', testMatch: []}, {noStackTrace: false}, ), ); } dispatch({blockName, mode, name: 'finish_describe_definition'}); }; const _addHook = ( fn: Circus.HookFn, hookType: Circus.HookType, hookFn: THook, timeout?: number, ) => { const asyncError = new ErrorWithStack(undefined, hookFn); if (typeof fn !== 'function') { asyncError.message = 'Invalid first argument. It must be a callback function.'; throw asyncError; } dispatch({asyncError, fn, hookType, name: 'add_hook', timeout}); }; // Hooks have to pass themselves to the HOF in order for us to trim stack traces. const beforeEach: THook = (fn, timeout) => _addHook(fn, 'beforeEach', beforeEach, timeout); const beforeAll: THook = (fn, timeout) => _addHook(fn, 'beforeAll', beforeAll, timeout); const afterEach: THook = (fn, timeout) => _addHook(fn, 'afterEach', afterEach, timeout); const afterAll: THook = (fn, timeout) => _addHook(fn, 'afterAll', afterAll, timeout); const test: Global.It = (() => { const test = ( testName: Circus.TestName, fn: Circus.TestFn, timeout?: number, ): void => _addTest(testName, undefined, fn, test, timeout); const skip = ( testName: Circus.TestName, fn?: Circus.TestFn, timeout?: number, ): void => _addTest(testName, 'skip', fn, skip, timeout); const only = ( testName: Circus.TestName, fn: Circus.TestFn, timeout?: number, ): void => _addTest(testName, 'only', fn, test.only, timeout); test.todo = (testName: Circus.TestName, ...rest: Array<any>): void => { if (rest.length > 0 || typeof testName !== 'string') { throw new ErrorWithStack( 'Todo must be called with only a description.', test.todo, ); } return _addTest(testName, 'todo', () => {}, test.todo); }; const _addTest = ( testName: Circus.TestName, mode: Circus.TestMode, fn: Circus.TestFn | undefined, testFn: ( testName: Circus.TestName, fn: Circus.TestFn, timeout?: number, ) => void, timeout?: number, ) => { const asyncError = new ErrorWithStack(undefined, testFn); if (typeof testName !== 'string') { asyncError.message = `Invalid first argument, ${testName}. It must be a string.`; throw asyncError; } if (fn === undefined) { asyncError.message = 'Missing second argument. It must be a callback function. Perhaps you want to use `test.todo` for a test placeholder.'; throw asyncError; } if (typeof fn !== 'function') { asyncError.message = `Invalid second argument, ${fn}. It must be a callback function.`; throw asyncError; } return dispatch({ asyncError, fn, mode, name: 'add_test', testName, timeout, }); }; test.each = bindEach(test); only.each = bindEach(only); skip.each = bindEach(skip); test.only = only; test.skip = skip; return test; })(); const it: Global.It = test; export type Event = Circus.Event; export type State = Circus.State; export {afterAll, afterEach, beforeAll, beforeEach, describe, it, test}; export default { afterAll, afterEach, beforeAll, beforeEach, describe, it, test, };
<filename>development/src/main/java/net/community/chest/io/file/DateFileAttributeFilter.java /* * */ package net.community.chest.io.file; import java.util.Date; import net.community.chest.math.compare.ComparisonExecutor; /** * <P>Copyright GPLv2</P> * * @author <NAME>. * @since Apr 13, 2009 1:08:37 PM */ public class DateFileAttributeFilter extends ComparableFileAttributeFilter<Date> { public DateFileAttributeFilter (FileAttributeType a, ComparisonExecutor ce) throws IllegalArgumentException { super(Date.class, a, ce); } public void setComparedValue (Number n) { setComparedValue((null == n) ? null : new Date(n.longValue())); } public void setComparedValue (long n) { setComparedValue(new Date(n)); } }
class Dataset: def __init__(self, data): self._data = data self._num_samples = len(data) # Initialize the dataset with the given data and calculate the number of samples def num_samples(self): return self._num_samples
#!/usr/bin/env bash # # Slurm arguments. # #SBATCH --cpus-per-task=4 #SBATCH --export=ALL #SBATCH --gres=gpu:1 #SBATCH --job-name "STREAM_INFERENCE_ROC_RESNET_50_BN" #SBATCH --mem-per-cpu=5000 #SBATCH --ntasks=1 #SBATCH --output "logging/roc_resnet_50_bn_%a.log" #SBATCH --parsable #SBATCH --requeue #SBATCH --time="7-00:00:00" # # Marginalized model_query="$BASE/out/coverage/$EXPERIMENT_BATCH_SIZE/marginalized/$EXPERIMENT_ACTIVATION/ratio-estimator-resnet-50-$EXPERIMENT_EPOCHS-dropout-$EXPERIMENT_DROPOUT-wd-$EXPERIMENT_WEIGHT_DECAY-batchnorm-1-*/best-model.th" suffix=$(printf "%05d" $SLURM_ARRAY_TASK_ID) out=$BASE/out/coverage/$EXPERIMENT_BATCH_SIZE/marginalized/$EXPERIMENT_ACTIVATION/roc-resnet-50-bn-$suffix.pickle # Check if the architecture has already been trained. if [ ! -f $out -o $PROJECT_FORCE_RERUN -ne 0 ]; then python -u diagnose-ratio.py \ --model $model_query \ --experiment $SLURM_ARRAY_TASK_ID \ --out $out fi # # Not marginalized # model_query="$BASE/out/coverage/$EXPERIMENT_BATCH_SIZE/not-marginalized/$EXPERIMENT_ACTIVATION/ratio-estimator-resnet-50-$EXPERIMENT_TASK_EPOCHS-dropout-$EXPERIMENT_DROPOUT-wd-$EXPERIMENT_WEIGHT_DECAY-batchnorm-1-*/best-model.th" # suffix=$(printf "%05d" $SLURM_ARRAY_TASK_ID) # out=$BASE/out/coverage/$EXPERIMENT_BATCH_SIZE/not-marginalized/$EXPERIMENT_ACTIVATION/roc-resnet-50-bn-$suffix.pickle # # Check if the architecture has already been trained. # if [ ! -f $out -o $PROJECT_FORCE_RERUN -ne 0 ]; then # python -u diagnose-ratio.py \ # --model $model_query \ # --experiment $SLURM_ARRAY_TASK_ID \ # --out $out # fi
#!/usr/bin/env bash CURRENT_USER=$(id -un) export OPT_CLUSTER_ID=${CURRENT_USER}-${PWD##*-} export PYTHON=$(which python3 || which python) export KUBECONFIG=${PWD}/assets/auth/kubeconfig ################################################## # Secrets ################################################## export OPT_PULL_SECRET=~/pull-secret.txt # https://cloud.openshift.com/clusters/install, Step 4 export OPT_PRIVATE_KEY=${PWD}/../../shared-secrets/aws/openshift-dev.pem ################################################## # Provision/Terminate ################################################## export OPT_CLUSTER_DIR=${PWD} export OPT_MASTER_COUNT=0 export OPT_COMPUTE_COUNT=1 export OPT_INFRA_COUNT=0 export OPT_PLATFORM_TYPE=centos # rhel/centos export OPT_INSTANCE_TYPE=t2.medium #export OPT_INSTANCE_TYPE=m4.xlarge export AWS_PROFILE="openshift-dev" export AWS_DEFAULT_REGION=us-east-2 ################################################## # Clone Ansible ################################################## #export OPT_ANSIBLE_PRNUM=XXXXX export OPT_ANSIBLE_TAG=v2.7.8 #export OPT_ANSIBLE_TAG=<commit_hash> ################################################## # Clone OpenShift-Ansible ################################################## #export OPT_OA_PRNUM=XXXXX export OPT_OA_TAG=devel-40
public class Client { private String clientName; private String taxNumber; private String profession; // we could have generated the get and set methods using source -> // ->"Generate Getters and Setters..." -> select attributes public String getClientName() { return this.clientName; } public void setClientName(String clientName) { this.clientName = clientName; } public String getTaxNumber() { return this.taxNumber; } public void setTaxNumber(String taxNumber) { this.taxNumber = taxNumber; } public String getProfession() { return this.profession; } public void setProfession(String profession) { this.profession = profession; } }
import httpStatus from 'http-status' import createError from 'http-errors' import bcrypt from 'bcrypt' import jwt from 'jsonwebtoken' import userRepo from '../../repositories/user.repository' import response from '../../utils/response' const login = async (req, res, next) => { try { const email = req.body.email const password = req.body.password const user = await userRepo.findByEmail(email) if (!user) { return next(createError(404, '사용자를 찾을 수 없습니다')) } // NOTE: email: <EMAIL> / password: <PASSWORD> // 비밀번호 compare const match = await bcrypt.compare(password, user.password) if (!match) { return next(createError(422, '비밀번호를 확인 해주세요')) } // jwt payload 에 담길 내용 : 로그인한 유저의 정보(필요한 정보를 추가해도 된다) const payload = { email: user.email, uId: user.id, dbcode: user.dbcode } const token = jwt.sign(payload, process.env.JWT_SECRET, { expiresIn: process.env.JWT_EXPIRESIN }) return response(res, { token }) } catch (e) { next(e) } } const tokenTest = async (req, res, next) => { try { // jwt.middleware 에 의해 req.user에 유저정보가 있을 것임 // console.log(req.user) return response(res, req.user) } catch (e) { next(e) } } export { login, tokenTest }
<gh_stars>0 import { AdminPostCollectionsReq, AdminCollectionsRes, AdminPostCollectionsCollectionReq, AdminCollectionsDeleteRes, AdminCollectionsListRes, AdminGetCollectionsParams } from "@medusajs/medusa"; import { ResponsePromise } from "../../typings"; import BaseResource from "../base"; declare class AdminCollectionsResource extends BaseResource { /** * @description Creates a collection. * @param payload * @returns Created collection. */ create(payload: AdminPostCollectionsReq): ResponsePromise<AdminCollectionsRes>; /** * @description Updates a collection * @param id id of the collection to update. * @param payload update to apply to collection. * @returns the updated collection. */ update(id: string, payload: AdminPostCollectionsCollectionReq): ResponsePromise<AdminCollectionsRes>; /** * @description deletes a collection * @param id id of collection to delete. * @returns Deleted response */ delete(id: string): ResponsePromise<AdminCollectionsDeleteRes>; /** * @description get a collection * @param id id of the collection to retrieve. * @returns the collection with the given id */ retrieve(id: string): ResponsePromise<AdminCollectionsRes>; /** * @description Lists collections matching a query * @param query Query for searching collections * @returns a list of collections matching the query. */ list(query?: AdminGetCollectionsParams): ResponsePromise<AdminCollectionsListRes>; } export default AdminCollectionsResource;
pkg_origin=core pkg_name=consul pkg_version=1.1.0 pkg_maintainer='The Habitat Maintainers <humans@habitat.sh>' pkg_license=("MPL-2.0") pkg_description="Consul is a tool for service discovery, monitoring and configuration." pkg_upstream_url=https://www.consul.io/ pkg_source=https://releases.hashicorp.com/${pkg_name}/${pkg_version}/${pkg_name}_${pkg_version}_linux_amd64.zip pkg_shasum=09c40c8b5be868003810064916d8460bff334ccfb59a5046390224b27e052c45 pkg_filename=${pkg_name}-${pkg_version}_linux_amd64.zip pkg_deps=() pkg_build_deps=(lilian/unzip) pkg_bin_dirs=(bin) pkg_exports=( [port-dns]=ports.dns [port-http]=ports.http [port-serf_lan]=ports.serf_lan [port-serf_wan]=ports.serf_wan [port-server]=ports.server ) pkg_exposes=(port-dns port-http port-serf_lan port-serf_wan port-server) pkg_svc_user="hab" pkg_svc_group="${pkg_svc_user}" source ../defaults.sh do_unpack() { cd "${HAB_CACHE_SRC_PATH}" || exit unzip ${pkg_filename} -d "${pkg_name}-${pkg_version}" } do_build() { return 0 } do_install() { install -D consul "${pkg_prefix}/bin/consul" }
#SETUP IS WRONG THIS IS RUN if ! source /root/CrummyArch/setup.conf; then # Loop through user input until the user gives a valid username while true do read -p "Please enter username:" username username = # username regex per response here https://unix.stackexchange.com/questions/157426/what-is-the-regex-to-validate-linux-users # lowercase the username to test regex if [[ "${username,,}" =~ ^[a-z_]([a-z0-9_-]{0,31}|[a-z0-9_-]{0,30}\$)$ ]] then break fi echo "Invalid username." done # convert name to lowercase before saving to setup.conf echo "username=${username,,}" >> ${HOME}/CrummyArch/setup.conf #Set Password read -p "Please enter password:" password echo "password=$password" >> ${HOME}/CrummyArch/setup.conf # Loop through user input until the user gives a valid hostname, but allow the user to force save while true do read -p "Please name your machine:" nameofmachine # hostname regex (!!couldn't find spec for computer name!!) if [[ "${nameofmachine,,}" =~ ^[a-z][a-z0-9_.-]{0,62}[a-z0-9]$ ]] then break fi # if validation fails allow the user to force saving of the hostname read -p "Hostname doesn't seem correct. Do you still want to save it? (y/n)" force if [[ "${force,,}" = "y" ]] then break fi done echo "nameofmachine=$nameofmachine" >> ${HOME}/CrummyArch/setup.conf fi
package org.hisp.dhis.api.mobile.support; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.BeansException; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.stereotype.Component; import org.springframework.web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter; @Component public class MessageConverterAddingPostProcessor implements BeanPostProcessor { private final static Log logger = LogFactory.getLog( MessageConverterAddingPostProcessor.class ); private HttpMessageConverter<?> messageConverter = new DataStreamSerializableMessageConverter(); public Object postProcessBeforeInitialization( Object bean, String beanName ) throws BeansException { return bean; } public Object postProcessAfterInitialization( Object bean, String beanName ) throws BeansException { if ( !(bean instanceof AnnotationMethodHandlerAdapter) ) { return bean; } AnnotationMethodHandlerAdapter handlerAdapter = (AnnotationMethodHandlerAdapter) bean; HttpMessageConverter<?>[] converterArray = handlerAdapter.getMessageConverters(); List<HttpMessageConverter<?>> converters = new ArrayList<HttpMessageConverter<?>>( Arrays.asList( converterArray ) ); converters.add( 0, messageConverter ); converterArray = converters.toArray( new HttpMessageConverter<?>[converters.size()] ); handlerAdapter.setMessageConverters( converterArray ); log( converterArray ); return handlerAdapter; } private void log( HttpMessageConverter<?>[] array ) { StringBuilder sb = new StringBuilder("Converters after adding custom one: "); for ( HttpMessageConverter<?> httpMessageConverter : array ) { sb.append( httpMessageConverter.getClass().getName() ).append( ", " ); } String string = sb.toString(); logger.info( string.substring( 0, string.length() - 2 ) ); } }
package sexp import ( "fmt" "regexp" "strconv" ) type Item struct { Type ItemType Position int Value []byte } type ItemType int func (item Item) String() string { switch item.Type { case ItemError: return fmt.Sprintf("Error(%v)", item.Value) case ItemBracketLeft: return "(" case ItemBracketRight: return ")" case ItemToken: return fmt.Sprintf("Token(%v)", item.Value) case ItemQuote: return fmt.Sprintf("Quote(%v)", item.Value) case ItemVerbatim: return fmt.Sprintf("Verbatim(%v)", item.Value) case ItemEOF: return "EOF" default: return "Unknown(%v)" } } const ( ItemEOF ItemType = iota ItemError ItemBracketLeft // ( ItemBracketRight // ) ItemToken // abc Token. ItemQuote // "abc" Quoted string. May also include length 3"abc" ItemVerbatim // 3:abc Length prefixed "verbatim" encoding. // ItemHex // #616263# Hexidecimal string. // ItemBase64 // {MzphYmM=} Base64 of the verbatim encoding "3:abc" // ItemBase64Octet // |YWJj| Base64 encoding of the octet-string "abc" ) var ( reBracketLeft = regexp.MustCompile(`^\(`) reBracketRight = regexp.MustCompile(`^\)`) reWhitespace = regexp.MustCompile(`^\s+`) reVerbatim = regexp.MustCompile(`^(\d+):`) reQuote = regexp.MustCompile(`^(\d+)?"((?:[^\\"]|\\.)*)"`) // Strict(er) R.Rivset 1997 draft token + unicode letter support (hello 1997). // reToken = regexp.MustCompile(`^[\p{L}][\p{L}\p{N}\-./_:*+=]+`) // Instead a token can be anything including '(', ')' and ' ' so long as you escape them: reToken = regexp.MustCompile(`^(?:[^\\ ()]|\\.)+`) ) type stateFn func(*lexer) stateFn type lexer struct { input []byte items chan Item start int pos int state stateFn matches [][]byte parens int } func (l *lexer) emit(t ItemType) { switch t { case ItemBracketLeft: l.parens++ case ItemBracketRight: l.parens-- } if l.parens > 0 && t == ItemEOF { l.items <- Item{ItemError, l.start, []byte(fmt.Sprintf("Unexpected EOF, %d '(' unmatched", l.parens))} } else if l.parens < 0 { l.items <- Item{ItemError, l.start, []byte("Unmatched )")} } else { l.items <- Item{t, l.start, l.input[l.start:l.pos]} } } func (l *lexer) Next() Item { item := <-l.items return item } func (l *lexer) scan(re *regexp.Regexp) bool { if l.match(re) { l.start = l.pos l.pos += len(l.matches[0]) return true } return false } func (l *lexer) match(re *regexp.Regexp) bool { if l.matches = re.FindSubmatch(l.input[l.pos:]); l.matches != nil { return true } return false } func (l *lexer) run() { for l.state = lex; l.state != nil; { l.state = l.state(l) } close(l.items) } func (l *lexer) errorf(format string, args ...interface{}) stateFn { l.items <- Item{ItemError, l.start, []byte(fmt.Sprintf(format, args...))} return nil } func lex(l *lexer) stateFn { // The order is important here, reToken must come last because it'll match reVerbatim and // reQuote atoms as well. switch { case l.pos >= len(l.input): l.emit(ItemEOF) return nil case l.scan(reWhitespace): return lex case l.scan(reBracketLeft): l.emit(ItemBracketLeft) return lex case l.scan(reBracketRight): l.emit(ItemBracketRight) return lex case l.scan(reQuote): // TODO: errorf if length exists and doesn't line up with quote length. // Don't include quotes in Value. l.items <- Item{ItemQuote, l.start, []byte(l.matches[2])} return lex case l.scan(reVerbatim): bytes, _ := strconv.ParseInt(string(l.matches[1]), 10, 64) l.start = l.pos l.pos += int(bytes) l.emit(ItemVerbatim) return lex case l.scan(reToken): l.emit(ItemToken) return lex } return l.errorf("Unexpected byte at %d near '%s'.", l.pos, l.near()) } func (l *lexer) near() []byte { from := l.pos - 5 if from < 0 { from = 0 } near := l.input[from:] if len(near) < 10 { return near[:len(near)] } return near[:10] } /* Lex S-Expressions. See http://people.csail.mit.edu/rivest/Sexp.txt * Unlike the R.Rivest 1997 draft tokens will match any unicode letters. * Canonical S-Expressions may have spaces between atoms which isn't strictly correct. */ func NewLexer(input []byte) *lexer { l := &lexer{input: input, items: make(chan Item)} go l.run() return l }
json.array! @category_children do |child| json.id child.id json.name child.name end
#!/bin/bash ###UGEオプション#### #$ -P GR06APR18 #$ -jc dma.LS #$ -N valid #$ -cwd #$ -l h_rt=20:00:00 #$ -pe impi_pslots 560 . /etc/profile.d/modules.sh module load intel/2018.2.046 mpirun ./bin/Debug/EulerSolver2
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/0+1024+512/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/0+1024+512/13-512+512+512-N-VB-fill-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_and_verbs_fill_first_third_full --eval_function last_element_eval
const http = require('http'); const options = { hostname: 'www.example.com', port: 80, path: '/', method: 'GET' }; const req = http.request(options, (res) => { console.log(`STATUS: ${res.statusCode}`); console.log(`HEADERS: ${JSON.stringify(res.headers)}`); res.setEncoding('utf8'); res.on('data', (chunk) => { console.log(`BODY: ${chunk}`); }); res.on('end', () => { console.log('No more data in response.'); }); }); req.on('error', (e) => { console.error(`problem with request: ${e.message}`); }); // write data to request body req.write('data\n'); req.write('data\n'); req.end();
#!/bin/bash # Copyright 2020 The IREE Authors # # Licensed under the Apache License v2.0 with LLVM Exceptions. # See https://llvm.org/LICENSE.txt for license information. # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception # Build the project with cmake using Kokoro. set -e set -x # Print the UTC time when set -x is on export PS4='[$(date -u "+%T %Z")] ' # Check these exist and print the versions for later debugging export CMAKE_BIN="$(which cmake)" "${CMAKE_BIN?}" --version "${CC?}" --version "${CXX?}" --version python3 --version ./build_tools/kokoro/gcp_ubuntu/check_vulkan.sh # Print SwiftShader git commit cat /swiftshader/git-commit echo "Initializing submodules" ./scripts/git/submodule_versions.py init CMAKE_BUILD_DIR="${CMAKE_BUILD_DIR:-$HOME/build}" CMAKE_ARGS=( "-G" "Ninja" "-DCMAKE_BUILD_TYPE=RelWithDebInfo" "-DIREE_ENABLE_ASAN=ON" "-B" "${CMAKE_BUILD_DIR?}" ) echo "Configuring CMake" "${CMAKE_BIN?}" "${CMAKE_ARGS[@]?}" echo "Building with CMake" "${CMAKE_BIN?}" --build "${CMAKE_BUILD_DIR?}" # Respect the user setting, but default to as many jobs as we have cores. export CTEST_PARALLEL_LEVEL=${CTEST_PARALLEL_LEVEL:-$(nproc)} # Respect the user setting, but default to turning off the vulkan tests # and turning on the llvmaot ones. # TODO(#5716): Fix and enable Vulkan tests. export IREE_VULKAN_DISABLE=${IREE_VULKAN_DISABLE:-1} export IREE_LLVMAOT_DISABLE=${IREE_LLVMAOT_DISABLE:-0} # CUDA is off by default. export IREE_CUDA_DISABLE=${IREE_CUDA_DISABLE:-1} # The VK_KHR_shader_float16_int8 extension is optional prior to Vulkan 1.2. # We test on SwiftShader, which does not support this extension. export IREE_VULKAN_F16_DISABLE=${IREE_VULKAN_F16_DISABLE:-1} # Tests to exclude by label. In addition to any custom labels (which are carried # over from Bazel tags), every test should be labeled with the directory it is # in. declare -a label_exclude_args=( # Exclude specific labels. # Put the whole label with anchors for exact matches. # For example: # ^nokokoro$ ^nokokoro$ # Exclude all tests in a directory. # Put the whole directory with anchors for exact matches. # For example: # ^bindings/python/iree/runtime$ # Exclude all tests in some subdirectories. # Put the whole parent directory with only a starting anchor. # Use a trailing slash to avoid prefix collisions. # For example: # ^bindings/ ) if [[ "${IREE_VULKAN_DISABLE?}" == 1 ]]; then label_exclude_args+=("^driver=vulkan$") fi if [[ "${IREE_LLVMAOT_DISABLE?}" == 1 ]]; then label_exclude_args+=("^driver=dylib$") fi if [[ "${IREE_CUDA_DISABLE?}" == 1 ]]; then label_exclude_args+=("^driver=cuda$") label_exclude_args+=("^uses_cuda_runtime$") fi if [[ "${IREE_VULKAN_F16_DISABLE?}" == 1 ]]; then label_exclude_args+=("^vulkan_uses_vk_khr_shader_float16_int8$") fi # Join on "|" label_exclude_regex="($(IFS="|" ; echo "${label_exclude_args[*]?}"))" # These tests currently have asan failures # TODO(#5715): Fix these declare -a excluded_tests=( "iree/base/internal/file_io_test" "iree/samples/static_library/static_library_demo_test" "bindings/tflite/smoke_test" "iree/hal/cts/allocator_test" "iree/hal/cts/buffer_mapping_test" "iree/hal/cts/command_buffer_test" "iree/hal/cts/descriptor_set_layout_test" "iree/hal/cts/driver_test" "iree/hal/cts/event_test" "iree/hal/cts/executable_layout_test" "iree/hal/cts/semaphore_submission_test" "iree/hal/cts/semaphore_test" "iree/modules/check/check_test" "iree/samples/simple_embedding/simple_embedding_vulkan_test" ) # Prefix with `^` anchor excluded_tests=( "${excluded_tests[@]/#/^}" ) # Suffix with `$` anchor excluded_tests=( "${excluded_tests[@]/%/$}" ) # Join on `|` and wrap in parens excluded_tests_regex="($(IFS="|" ; echo "${excluded_tests[*]?}"))" cd ${CMAKE_BUILD_DIR?} echo "Testing with ctest" ctest --timeout 900 --output-on-failure \ --label-exclude "^driver=cuda$|^driver=vulkan$" \ --exclude-regex "${excluded_tests_regex?}"
<reponame>tactilenews/100eyes # frozen_string_literal: true require 'rails_helper' RSpec.describe ContributorChannelSettings::ContributorChannelSettings, type: :component do subject { render_inline(described_class.new(**params)) } let(:contributor) { create(:contributor, email: nil, **attrs) } let(:attrs) { {} } let(:params) { { contributor: contributor } } it { should_not have_css('h2') } context 'given an email contributor' do let(:attrs) { { email: '<EMAIL>' } } it { should have_css('h2', text: 'E-Mail') } end context 'given a Telegram contributor' do let(:attrs) { { telegram_id: 12_345_678 } } it { should have_css('h2', text: 'Telegram') } end context 'given a Threema contributor' do let(:attrs) { { threema_id: 12_345_678 } } it { should have_css('h2', text: 'Threema') } end context 'given a Signal contributor' do let(:attrs) { { signal_phone_number: '+49123456789' } } it { should have_css('h2', text: 'Signal') } end end
import numpy as np from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler from sklearn.neighbors import KNeighborsClassifier # list of features and labels features = np.array([[110000, 'Petrol', 'Manual', 1990, 30, 3000]]) labels = np.array([[1]]) # perform data preprocessing X_train, X_test, y_train, y_test = train_test_split(features, labels, test_size=0.2) scaler = StandardScaler() X_train_scaled = scaler.fit_transform(X_train) X_test_scaled = scaler.transform(X_test) # create the model model = KNeighborsClassifier() model.fit(X_train_scaled, y_train) # predict sales price predicted_price = model.predict([X_test_scaled[0]]) print("Predicted sale price:", predicted_price[0])
#!/bin/bash #./my_run2.sh AttPool 0 1 4 & #./my_run2.sh AttPool 1 1 4 & #./my_run2.sh AttPool 2 1 4 & #./my_run2.sh AttPool 3 1 5 & #./my_run2.sh AttPool 4 1 5 & #./my_run2.sh AttPool 5 1 5 & #./my_run2.sh AttPool 6 1 6 & ./my_run2.sh AttPool 0 2 6 & ./my_run2.sh AttPool 2 2 6 & #./my_run2.sh AttPool 9 1 "" & ./my_run2.sh AttPool 4 2 7 & ./my_run2.sh AttPool 6 2 7 & ./my_run2.sh AttPool 8 2 7 &
import configparser import TestingUtils def build_url_for_servers(server_name): config = TestingUtils.get_test_config() downloads_username = config.get('downloads', 'username') downloads_password = config.get('downloads', 'password') if server_name == "server1": return f"http://server1.com/download?username={downloads_username}&password={downloads_password}" elif server_name == "server2": return f"http://server2.net/download?username={downloads_username}&password={downloads_password}" else: return "Invalid server name" # Example usage print(build_url_for_servers("server1")) # Output: http://server1.com/download?username=your_username&password=your_password print(build_url_for_servers("server2")) # Output: http://server2.net/download?username=your_username&password=your_password
#!/bin/bash curlp () { ipaddr=$@ echo echo $ipaddr curl --interface eth2 -k -n -H "Content-type: text/xml;charset=UTF-8" -H "SOAPAction:CUCM:DB ver=9.1" -d @./soapGetLuaInfo.xml https://$ipaddr:8443/axl/ echo echo curl --interface eth2 -k -n -H "Content-type: text/xml;charset=UTF-8" -H "SOAPAction:CUCM:DB ver=9.1" -d @./soapListTrunk.xml https://$ipaddr:8443/axl/ echo echo curl --interface eth2 -k -n -H "Content-type: text/xml;charset=UTF-8" -H "SOAPAction:CUCM:DB ver=9.1" -d @./soapGetTrunk.xml https://$ipaddr:8443/axl/ } export -f curlp curlp $1
<reponame>BigBong/Community-SpringBoot package org.community.domain; import java.util.ArrayList; import java.util.Date; import java.util.List; public class LocalFileExample { protected String orderByClause; protected boolean distinct; protected List<Criteria> oredCriteria; public LocalFileExample() { oredCriteria = new ArrayList<Criteria>(); } public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } public String getOrderByClause() { return orderByClause; } public void setDistinct(boolean distinct) { this.distinct = distinct; } public boolean isDistinct() { return distinct; } public List<Criteria> getOredCriteria() { return oredCriteria; } public void or(Criteria criteria) { oredCriteria.add(criteria); } public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andIdIsNull() { addCriterion("id is null"); return (Criteria) this; } public Criteria andIdIsNotNull() { addCriterion("id is not null"); return (Criteria) this; } public Criteria andIdEqualTo(Integer value) { addCriterion("id =", value, "id"); return (Criteria) this; } public Criteria andIdNotEqualTo(Integer value) { addCriterion("id <>", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThan(Integer value) { addCriterion("id >", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThanOrEqualTo(Integer value) { addCriterion("id >=", value, "id"); return (Criteria) this; } public Criteria andIdLessThan(Integer value) { addCriterion("id <", value, "id"); return (Criteria) this; } public Criteria andIdLessThanOrEqualTo(Integer value) { addCriterion("id <=", value, "id"); return (Criteria) this; } public Criteria andIdIn(List<Integer> values) { addCriterion("id in", values, "id"); return (Criteria) this; } public Criteria andIdNotIn(List<Integer> values) { addCriterion("id not in", values, "id"); return (Criteria) this; } public Criteria andIdBetween(Integer value1, Integer value2) { addCriterion("id between", value1, value2, "id"); return (Criteria) this; } public Criteria andIdNotBetween(Integer value1, Integer value2) { addCriterion("id not between", value1, value2, "id"); return (Criteria) this; } public Criteria andFileNameIsNull() { addCriterion("file_name is null"); return (Criteria) this; } public Criteria andFileNameIsNotNull() { addCriterion("file_name is not null"); return (Criteria) this; } public Criteria andFileNameEqualTo(String value) { addCriterion("file_name =", value, "fileName"); return (Criteria) this; } public Criteria andFileNameNotEqualTo(String value) { addCriterion("file_name <>", value, "fileName"); return (Criteria) this; } public Criteria andFileNameGreaterThan(String value) { addCriterion("file_name >", value, "fileName"); return (Criteria) this; } public Criteria andFileNameGreaterThanOrEqualTo(String value) { addCriterion("file_name >=", value, "fileName"); return (Criteria) this; } public Criteria andFileNameLessThan(String value) { addCriterion("file_name <", value, "fileName"); return (Criteria) this; } public Criteria andFileNameLessThanOrEqualTo(String value) { addCriterion("file_name <=", value, "fileName"); return (Criteria) this; } public Criteria andFileNameLike(String value) { addCriterion("file_name like", value, "fileName"); return (Criteria) this; } public Criteria andFileNameNotLike(String value) { addCriterion("file_name not like", value, "fileName"); return (Criteria) this; } public Criteria andFileNameIn(List<String> values) { addCriterion("file_name in", values, "fileName"); return (Criteria) this; } public Criteria andFileNameNotIn(List<String> values) { addCriterion("file_name not in", values, "fileName"); return (Criteria) this; } public Criteria andFileNameBetween(String value1, String value2) { addCriterion("file_name between", value1, value2, "fileName"); return (Criteria) this; } public Criteria andFileNameNotBetween(String value1, String value2) { addCriterion("file_name not between", value1, value2, "fileName"); return (Criteria) this; } public Criteria andPathIsNull() { addCriterion("path is null"); return (Criteria) this; } public Criteria andPathIsNotNull() { addCriterion("path is not null"); return (Criteria) this; } public Criteria andPathEqualTo(String value) { addCriterion("path =", value, "path"); return (Criteria) this; } public Criteria andPathNotEqualTo(String value) { addCriterion("path <>", value, "path"); return (Criteria) this; } public Criteria andPathGreaterThan(String value) { addCriterion("path >", value, "path"); return (Criteria) this; } public Criteria andPathGreaterThanOrEqualTo(String value) { addCriterion("path >=", value, "path"); return (Criteria) this; } public Criteria andPathLessThan(String value) { addCriterion("path <", value, "path"); return (Criteria) this; } public Criteria andPathLessThanOrEqualTo(String value) { addCriterion("path <=", value, "path"); return (Criteria) this; } public Criteria andPathLike(String value) { addCriterion("path like", value, "path"); return (Criteria) this; } public Criteria andPathNotLike(String value) { addCriterion("path not like", value, "path"); return (Criteria) this; } public Criteria andPathIn(List<String> values) { addCriterion("path in", values, "path"); return (Criteria) this; } public Criteria andPathNotIn(List<String> values) { addCriterion("path not in", values, "path"); return (Criteria) this; } public Criteria andPathBetween(String value1, String value2) { addCriterion("path between", value1, value2, "path"); return (Criteria) this; } public Criteria andPathNotBetween(String value1, String value2) { addCriterion("path not between", value1, value2, "path"); return (Criteria) this; } public Criteria andFileSizeIsNull() { addCriterion("file_size is null"); return (Criteria) this; } public Criteria andFileSizeIsNotNull() { addCriterion("file_size is not null"); return (Criteria) this; } public Criteria andFileSizeEqualTo(Long value) { addCriterion("file_size =", value, "fileSize"); return (Criteria) this; } public Criteria andFileSizeNotEqualTo(Long value) { addCriterion("file_size <>", value, "fileSize"); return (Criteria) this; } public Criteria andFileSizeGreaterThan(Long value) { addCriterion("file_size >", value, "fileSize"); return (Criteria) this; } public Criteria andFileSizeGreaterThanOrEqualTo(Long value) { addCriterion("file_size >=", value, "fileSize"); return (Criteria) this; } public Criteria andFileSizeLessThan(Long value) { addCriterion("file_size <", value, "fileSize"); return (Criteria) this; } public Criteria andFileSizeLessThanOrEqualTo(Long value) { addCriterion("file_size <=", value, "fileSize"); return (Criteria) this; } public Criteria andFileSizeIn(List<Long> values) { addCriterion("file_size in", values, "fileSize"); return (Criteria) this; } public Criteria andFileSizeNotIn(List<Long> values) { addCriterion("file_size not in", values, "fileSize"); return (Criteria) this; } public Criteria andFileSizeBetween(Long value1, Long value2) { addCriterion("file_size between", value1, value2, "fileSize"); return (Criteria) this; } public Criteria andFileSizeNotBetween(Long value1, Long value2) { addCriterion("file_size not between", value1, value2, "fileSize"); return (Criteria) this; } public Criteria andFileTotalSizeIsNull() { addCriterion("file_total_size is null"); return (Criteria) this; } public Criteria andFileTotalSizeIsNotNull() { addCriterion("file_total_size is not null"); return (Criteria) this; } public Criteria andFileTotalSizeEqualTo(Long value) { addCriterion("file_total_size =", value, "fileTotalSize"); return (Criteria) this; } public Criteria andFileTotalSizeNotEqualTo(Long value) { addCriterion("file_total_size <>", value, "fileTotalSize"); return (Criteria) this; } public Criteria andFileTotalSizeGreaterThan(Long value) { addCriterion("file_total_size >", value, "fileTotalSize"); return (Criteria) this; } public Criteria andFileTotalSizeGreaterThanOrEqualTo(Long value) { addCriterion("file_total_size >=", value, "fileTotalSize"); return (Criteria) this; } public Criteria andFileTotalSizeLessThan(Long value) { addCriterion("file_total_size <", value, "fileTotalSize"); return (Criteria) this; } public Criteria andFileTotalSizeLessThanOrEqualTo(Long value) { addCriterion("file_total_size <=", value, "fileTotalSize"); return (Criteria) this; } public Criteria andFileTotalSizeIn(List<Long> values) { addCriterion("file_total_size in", values, "fileTotalSize"); return (Criteria) this; } public Criteria andFileTotalSizeNotIn(List<Long> values) { addCriterion("file_total_size not in", values, "fileTotalSize"); return (Criteria) this; } public Criteria andFileTotalSizeBetween(Long value1, Long value2) { addCriterion("file_total_size between", value1, value2, "fileTotalSize"); return (Criteria) this; } public Criteria andFileTotalSizeNotBetween(Long value1, Long value2) { addCriterion("file_total_size not between", value1, value2, "fileTotalSize"); return (Criteria) this; } public Criteria andStatusIsNull() { addCriterion("status is null"); return (Criteria) this; } public Criteria andStatusIsNotNull() { addCriterion("status is not null"); return (Criteria) this; } public Criteria andStatusEqualTo(Integer value) { addCriterion("status =", value, "status"); return (Criteria) this; } public Criteria andStatusNotEqualTo(Integer value) { addCriterion("status <>", value, "status"); return (Criteria) this; } public Criteria andStatusGreaterThan(Integer value) { addCriterion("status >", value, "status"); return (Criteria) this; } public Criteria andStatusGreaterThanOrEqualTo(Integer value) { addCriterion("status >=", value, "status"); return (Criteria) this; } public Criteria andStatusLessThan(Integer value) { addCriterion("status <", value, "status"); return (Criteria) this; } public Criteria andStatusLessThanOrEqualTo(Integer value) { addCriterion("status <=", value, "status"); return (Criteria) this; } public Criteria andStatusIn(List<Integer> values) { addCriterion("status in", values, "status"); return (Criteria) this; } public Criteria andStatusNotIn(List<Integer> values) { addCriterion("status not in", values, "status"); return (Criteria) this; } public Criteria andStatusBetween(Integer value1, Integer value2) { addCriterion("status between", value1, value2, "status"); return (Criteria) this; } public Criteria andStatusNotBetween(Integer value1, Integer value2) { addCriterion("status not between", value1, value2, "status"); return (Criteria) this; } public Criteria andMd5IsNull() { addCriterion("md5 is null"); return (Criteria) this; } public Criteria andMd5IsNotNull() { addCriterion("md5 is not null"); return (Criteria) this; } public Criteria andMd5EqualTo(String value) { addCriterion("md5 =", value, "md5"); return (Criteria) this; } public Criteria andMd5NotEqualTo(String value) { addCriterion("md5 <>", value, "md5"); return (Criteria) this; } public Criteria andMd5GreaterThan(String value) { addCriterion("md5 >", value, "md5"); return (Criteria) this; } public Criteria andMd5GreaterThanOrEqualTo(String value) { addCriterion("md5 >=", value, "md5"); return (Criteria) this; } public Criteria andMd5LessThan(String value) { addCriterion("md5 <", value, "md5"); return (Criteria) this; } public Criteria andMd5LessThanOrEqualTo(String value) { addCriterion("md5 <=", value, "md5"); return (Criteria) this; } public Criteria andMd5Like(String value) { addCriterion("md5 like", value, "md5"); return (Criteria) this; } public Criteria andMd5NotLike(String value) { addCriterion("md5 not like", value, "md5"); return (Criteria) this; } public Criteria andMd5In(List<String> values) { addCriterion("md5 in", values, "md5"); return (Criteria) this; } public Criteria andMd5NotIn(List<String> values) { addCriterion("md5 not in", values, "md5"); return (Criteria) this; } public Criteria andMd5Between(String value1, String value2) { addCriterion("md5 between", value1, value2, "md5"); return (Criteria) this; } public Criteria andMd5NotBetween(String value1, String value2) { addCriterion("md5 not between", value1, value2, "md5"); return (Criteria) this; } public Criteria andCreateTimeIsNull() { addCriterion("create_time is null"); return (Criteria) this; } public Criteria andCreateTimeIsNotNull() { addCriterion("create_time is not null"); return (Criteria) this; } public Criteria andCreateTimeEqualTo(Date value) { addCriterion("create_time =", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeNotEqualTo(Date value) { addCriterion("create_time <>", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeGreaterThan(Date value) { addCriterion("create_time >", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeGreaterThanOrEqualTo(Date value) { addCriterion("create_time >=", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeLessThan(Date value) { addCriterion("create_time <", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeLessThanOrEqualTo(Date value) { addCriterion("create_time <=", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeIn(List<Date> values) { addCriterion("create_time in", values, "createTime"); return (Criteria) this; } public Criteria andCreateTimeNotIn(List<Date> values) { addCriterion("create_time not in", values, "createTime"); return (Criteria) this; } public Criteria andCreateTimeBetween(Date value1, Date value2) { addCriterion("create_time between", value1, value2, "createTime"); return (Criteria) this; } public Criteria andCreateTimeNotBetween(Date value1, Date value2) { addCriterion("create_time not between", value1, value2, "createTime"); return (Criteria) this; } public Criteria andUpdateTimeIsNull() { addCriterion("update_time is null"); return (Criteria) this; } public Criteria andUpdateTimeIsNotNull() { addCriterion("update_time is not null"); return (Criteria) this; } public Criteria andUpdateTimeEqualTo(Date value) { addCriterion("update_time =", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeNotEqualTo(Date value) { addCriterion("update_time <>", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeGreaterThan(Date value) { addCriterion("update_time >", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeGreaterThanOrEqualTo(Date value) { addCriterion("update_time >=", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeLessThan(Date value) { addCriterion("update_time <", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeLessThanOrEqualTo(Date value) { addCriterion("update_time <=", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeIn(List<Date> values) { addCriterion("update_time in", values, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeNotIn(List<Date> values) { addCriterion("update_time not in", values, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeBetween(Date value1, Date value2) { addCriterion("update_time between", value1, value2, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeNotBetween(Date value1, Date value2) { addCriterion("update_time not between", value1, value2, "updateTime"); return (Criteria) this; } public Criteria andDescriptionIsNull() { addCriterion("description is null"); return (Criteria) this; } public Criteria andDescriptionIsNotNull() { addCriterion("description is not null"); return (Criteria) this; } public Criteria andDescriptionEqualTo(String value) { addCriterion("description =", value, "description"); return (Criteria) this; } public Criteria andDescriptionNotEqualTo(String value) { addCriterion("description <>", value, "description"); return (Criteria) this; } public Criteria andDescriptionGreaterThan(String value) { addCriterion("description >", value, "description"); return (Criteria) this; } public Criteria andDescriptionGreaterThanOrEqualTo(String value) { addCriterion("description >=", value, "description"); return (Criteria) this; } public Criteria andDescriptionLessThan(String value) { addCriterion("description <", value, "description"); return (Criteria) this; } public Criteria andDescriptionLessThanOrEqualTo(String value) { addCriterion("description <=", value, "description"); return (Criteria) this; } public Criteria andDescriptionLike(String value) { addCriterion("description like", value, "description"); return (Criteria) this; } public Criteria andDescriptionNotLike(String value) { addCriterion("description not like", value, "description"); return (Criteria) this; } public Criteria andDescriptionIn(List<String> values) { addCriterion("description in", values, "description"); return (Criteria) this; } public Criteria andDescriptionNotIn(List<String> values) { addCriterion("description not in", values, "description"); return (Criteria) this; } public Criteria andDescriptionBetween(String value1, String value2) { addCriterion("description between", value1, value2, "description"); return (Criteria) this; } public Criteria andDescriptionNotBetween(String value1, String value2) { addCriterion("description not between", value1, value2, "description"); return (Criteria) this; } public Criteria andFileNameLikeInsensitive(String value) { addCriterion("upper(file_name) like", value.toUpperCase(), "fileName"); return (Criteria) this; } public Criteria andPathLikeInsensitive(String value) { addCriterion("upper(path) like", value.toUpperCase(), "path"); return (Criteria) this; } public Criteria andMd5LikeInsensitive(String value) { addCriterion("upper(md5) like", value.toUpperCase(), "md5"); return (Criteria) this; } public Criteria andDescriptionLikeInsensitive(String value) { addCriterion("upper(description) like", value.toUpperCase(), "description"); return (Criteria) this; } } public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
#!/bin/bash #= git-create-repo-user-git.sh # (c)2016 John de Graaff @ FairFlowTech # usage from 'git client': # ssh -t -p2221 jdg@vps3.fairflowlabs.com '/usr/local/syssetup/bin/git-create-repo-user-git.sh rtpc.git' # ssh -t -p2221 jdg@vps5.dgt-bv.com '/usr/local/syssetup/bin/git-create-repo-user-git.sh rtpc.git' USER="git" DIR="/opt/git" HOST="vps5.dgt-bv.com" PORT="2221" REPO="ssh://$USER@$HOST:$PORT$DIR" ORIG="vps5" BIN="/home/jdg/opensyssetup/bin/git-create-repo-user-git.sh" REPNAME="$1" REPDIR="$DIR/$REPNAME.git" #CMD="ssh -t -p $PORT jdg@$HOST 'REPO=\"RepoName\" ; /usr/local/syssetup/bin/git-create-repo-user-git.sh \$REPO' " #CMD="ssh -t -p $PORT jdg@$HOST '/usr/local/syssetup/bin/git-create-repo-user-git.sh $REPNAME'" #CMD="ssh -t -p $PORT jdg@$HOST '/usr/local/syssetup/bin/git-create-repo-user-git.sh repo-name '" #CMD="ssh -t -p $PORT jdg@$HOST '/bin/bash -c \"/usr/local/syssetup/bin/git-create-repo-user-git.sh $REPNAME\" '" #CMD="echo \"/usr/local/syssetup/bin/git-create-repo-user-git.sh $REPNAME\" | ssh -t -p $PORT jdg@$HOST '/bin/bash -l -s' " CMD="ssh -t -p $PORT jdg@$HOST '$BIN repo-name-without-.git '" BASENAME=`basename $0` usage() { echo echo "Usage: $BASENAME <repo-name>.git": echo " will create dir '/opt/git/repo-name.git/' and in it do 'git init --bare'" echo echo " usage from 'git client':" echo " $CMD " # echo " ssh -t -p $PORT jdg@$HOST 'REPO=\"RepoName\" ; /usr/local/syssetup/bin/git-create-repo-user-git.sh repo-name '" echo #exit 1 } error_usage() { echo "# $BASENAME: Error - $1"; usage ; } error() { echo "# $BASENAME: Error - $1"; exit 1 ; } echo_msg_log() { echo "# $BASENAME: Error - $1"; logger "$1" ; } if [[ ! -d "$DIR" ]]; then usage; exit; # # echo "[running]> $CMD " # # $CMD # echo "/usr/local/syssetup/bin/git-create-repo-user-git.sh $REPNAME" | ssh -t -p $PORT jdg@$HOST '/bin/bash -l -s' # exit fi [[ "$REPNAME" == "" ]] && error_usage "provide new Repo name (without .git)" #[[ "$REPNAME" =~ "git" ]] && error_usage "provide new repo name (WITHOUT .git)" echo "$REPNAME" | egrep -iq "\.git" && error_usage "provide new repo name (without .GIT)" [[ -d "$REPDIR" ]] && error "directory '$REPDIR' already exists!" # > visudo # #jdg # %sudo ALL=NOPASSWD:/bin/mkdir # %sudo ALL=NOPASSWD:/usr/bin/git echo "# OK we're good, I got this command with correct params:" echo "# /usr/local/syssetup/bin/git-create-repo-user-git.sh $REPNAME.git' ..." #exit echo "# > sudo -u $USER /bin/mkdir -pv \"$REPDIR\" ..." sudo -u $USER /bin/mkdir -pv "$REPDIR" [[ ! -d "$REPDIR" ]] && error "could not create directory '$REPDIR' !" echo "# > cd \"$REPDIR\" ..." cd "$REPDIR" echo "# > sudo -u $USER /usr/bin/git init --bare ..." sudo -u $USER /usr/bin/git init --bare # jdg: cant do this in bare repo... https://git-scm.com/docs/git-init #FILE="empty.txt" #sudo -u $USER touch $FILE #sudo -u $USER /usr/bin/git add $FILE #sudo -u $USER /usr/bin/git commit -am 'initial commit' #sudo -u $USER /usr/bin/git push # echo "# done!" echo echo "# next use these commands to add and setup remote:" cat <<HERE git-show-repos.sh git remote add vps5 ssh://git@vps5.dgt-bv.com:2221/opt/git/$REPNAME.git git push --set-upstream vps5 master git branch --set-upstream-to=vps5/master master HERE echo "# that's it folks!" #
import React from 'react' import { Box, Text } from '@rebass/emotion' import { colors } from '../globals' import { Link as GatsbyLink } from 'gatsby' import styled from '@emotion/styled' const Header = ({ visible }) => { return ( <BoxHeader width={1} visible={visible}> <Text>Logo</Text> <Box> <Link activeClassName='active' to='/'> Home </Link> <Link activeClassName='active' to='/contact'> Contact </Link> </Box> </BoxHeader> ) } export default Header const BoxHeader = styled(Box)` position:fixed; z-index:1000; top:0; left:0; background-color: ${colors['dark-blue']}; color: ${props => (props.visible ? colors.blacks[8] : colors.whites[8])}; padding: .5rem calc((100vw - 650px) / 2) ; display:flex; justify-content:space-between; ` const Link = styled(GatsbyLink)` color:${colors.whites[8]}; text-decoration:none; margin-right:1.5rem; &:last-of-type { margin-right:0; } &.active { color: ${colors.whites[10]}; border-bottom:1px solid ${colors.whites[10]}; } `
#!/usr/bin/env bash cd "$(dirname "$0")" && cd .. set -x helm delete netdata kubectl delete -f ./yamls/pv.yaml & sudo rm -rf /mnt/netdata/*
<reponame>rmc8/random_shosha import os import random import shutil from time import sleep from datetime import datetime import tweepy import dropbox from dropbox.exceptions import ApiError import pandas as pd from local_module.api_settings import ( API_KEY, API_SECRET_KEY, ACCESS_TOKEN, ACCESS_SECRET, DB_ACCESS_TOKEN, UID, ) class StreamListener(tweepy.StreamListener): def __init__(self, dbo, api, me, target): super().__init__(api=api) self.followers = None self.target = target self.me = me self.dt = datetime.now() self.dropbox = dbo def refresh_followers(self): min_sec: int = 60 refresh_min: int = 0 refresh_sec: int = min_sec * refresh_min now = datetime.now() if self.followers is None or (now - self.dt).seconds >= refresh_sec: self.followers = {fid for fid in tweepy.Cursor(self.api.followers_ids, user_id=self.me.id).items()} self.dt = now def is_follower(self, uid: int) -> bool: return uid in self.followers @staticmethod def retweet_id(status): try: return status.retweeted_status.id except Exception as e: return e def tweet(self, status): requester: str = status.user.screen_name tweet_id: int = status.id file_path, tweet = self.dropbox.get_subject() tweet: str = f"@{requester} \n{tweet}" retweet_id = self.retweet_id(status) try: if type(retweet_id) is not int: self.api.update_with_media( filename=file_path, status=tweet, in_reply_to_status_id=tweet_id, ) finally: os.remove(file_path) def on_status(self, status): self.refresh_followers() uid: int = status.user.id if self.is_follower(uid) and f"@{self.target}" in status.text: self.tweet(status) class DropBoxController: def __init__(self, db_obj, df): self.db_obj = db_obj self.df = df def _choice_subject(self) -> tuple: CARD_INDEX: int = 0 card_list: list = random.choice(self.df.values.tolist()) card_no: str = card_list[CARD_INDEX] subjects = self.db_obj.files_list_folder( f"/{card_no}", recursive=True).entries subject = random.choice(subjects) return subject, card_list def _dl_file(self, subject: str) -> str: now = datetime.now() file_path: str = f"./tmp/{now:%Y%m%d_%H%M%S}.png" with open(file_path, "wb") as f: _, res = self.db_obj.files_download(path=subject) f.write(res.content) return file_path @staticmethod def _get_card_name(path: str) -> str: return f"card{path[-19:-4]}" def get_subject(self) -> tuple: while True: try: subject, card_list = self._choice_subject() file_path = self._dl_file(subject.path_display) break except ApiError: sleep(1) card_name = self._get_card_name(subject.path_display) TITLE_INDEX: int = 1 URL_INDEX: int = 2 AUTHOR_INDEX: int = 3 tweet_lines: list = [ f"『{card_list[TITLE_INDEX]}』", card_list[AUTHOR_INDEX], f"CARD: https://www.aozora.gr.jp/{card_list[URL_INDEX]}", f"#{card_name}", ] tweet: str = "\n".join(tweet_lines) return file_path, tweet def twitter_obj(api_key: str, api_secret_key: str, access_token: str, access_token_secret: str): def gen_twitter_obj(**kwargs): auth = tweepy.OAuthHandler(api_key, api_secret_key) auth.set_access_token(access_token, access_token_secret) return tweepy.API(auth, wait_on_rate_limit=True, **kwargs) return gen_twitter_obj def dropbox_obj(token: str): dbx = dropbox.Dropbox(token) dbx.users_get_current_account() return dbx def init_dir(dir_name: str): if os.path.exists(dir_name): shutil.rmtree(dir_name) os.mkdir(dir_name) def main(): # Init init_dir("./tmp") # Dropbox dbx = dropbox_obj(DB_ACCESS_TOKEN) df = pd.read_csv("./db/random_shosha.tsv", sep="\t") dbc = DropBoxController(dbx, df) # Twitter gen_twi_obj = twitter_obj( API_KEY, API_SECRET_KEY, ACCESS_TOKEN, ACCESS_SECRET, ) api = gen_twi_obj() my_info = api.me() streamListener = StreamListener( dbo=dbc, api=api, me=my_info, target=UID ) stream = tweepy.Stream(auth=api.auth, listener=streamListener) print("Start!") stream.filter(track=[f"@{UID}"], is_async=True, stall_warnings=True) if __name__ == "__main__": main()
'use strict'; /** * Module dependencies. */ var mongoose = require('mongoose'), Schema = mongoose.Schema; /** * Logo Schema */ var LogoSchema = new Schema({ name: { type: String, default: '', required: 'Please fill Logo name', trim: false }, outputLogo: { type: String, default: '', required: 'Fatal Error' }, created: { type: Date, default: Date.now }, user: { type: Schema.ObjectId, ref: 'User' } }); mongoose.model('Logo', LogoSchema);
import UIKit class ResetPassword: UIViewController { // UI elements let emailTextField = UITextField() let submitButton = UIButton() override func viewDidLoad() { super.viewDidLoad() setupUI() } private func setupUI() { // Configure email text field emailTextField.placeholder = "Enter your email" emailTextField.keyboardType = .emailAddress emailTextField.borderStyle = .roundedRect emailTextField.translatesAutoresizingMaskIntoConstraints = false view.addSubview(emailTextField) // Configure submit button submitButton.setTitle("Submit", for: .normal) submitButton.addTarget(self, action: #selector(submitButtonTapped), for: .touchUpInside) submitButton.translatesAutoresizingMaskIntoConstraints = false view.addSubview(submitButton) // Layout constraints NSLayoutConstraint.activate([ emailTextField.centerXAnchor.constraint(equalTo: view.centerXAnchor), emailTextField.centerYAnchor.constraint(equalTo: view.centerYAnchor, constant: -50), emailTextField.widthAnchor.constraint(equalToConstant: 200), submitButton.centerXAnchor.constraint(equalTo: view.centerXAnchor), submitButton.topAnchor.constraint(equalTo: emailTextField.bottomAnchor, constant: 20) ]) } @objc private func submitButtonTapped() { guard let email = emailTextField.text, !email.isEmpty else { displayError(message: "Please enter your email") return } if isValidEmail(email) { // Simulate sending password reset link sendPasswordResetLink(to: email) } else { displayError(message: "Invalid email format") } } private func isValidEmail(_ email: String) -> Bool { // Simple email format validation let emailRegex = "[A-Z0-9a-z._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,}" return NSPredicate(format: "SELF MATCHES %@", emailRegex).evaluate(with: email) } private func sendPasswordResetLink(to email: String) { // Simulate sending password reset link DispatchQueue.global().async { // Simulate network delay Thread.sleep(forTimeInterval: 2) DispatchQueue.main.async { self.displaySuccessMessage() } } } private func displaySuccessMessage() { let alert = UIAlertController(title: "Success", message: "Password reset link sent to your email", preferredStyle: .alert) alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil)) present(alert, animated: true, completion: nil) } private func displayError(message: String) { let alert = UIAlertController(title: "Error", message: message, preferredStyle: .alert) alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil)) present(alert, animated: true, completion: nil) } }
// // Copyright 2016 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #import <EarlGrey/GREYBaseAction.h> #import <EarlGrey/GREYConstants.h> NS_ASSUME_NONNULL_BEGIN /** * Error domain used for pinch related NSError objects. */ GREY_EXTERN NSString *const kGREYPinchErrorDomain; /** * Error codes for pinch related failures. */ typedef NS_ENUM(NSInteger, GREYPinchErrorCode) { kGREYPinchFailedErrorCode = 0, }; /** * A @c GREYAction that performs the pinch gesture on the view on which it is called. */ @interface GREYPinchAction : GREYBaseAction /** * Performs a pinch action in the given @c direction for the @c duration. The start of outward * pinch is from the center of the view and stops before 20% margin of the view's bounding rect. * * For an inward pinch the start point is at a 20% margin of the view's bounding rect on either * side and stops at the center. The default angle of the pinch action is 30 degrees to closely * match the average pinch angle of a natural right handed pinch. * * @param direction The direction of the pinch. * @param duration The time interval for which the pinch takes place. * @param pinchAngle Angle of the vector in radians to which the pinch direction is pointing. * * @returns An instance of @c GREYPinchAction, initialized with a provided direction and * duration and angle. */ - (instancetype)initWithDirection:(GREYPinchDirection)direction duration:(CFTimeInterval)duration pinchAngle:(double)pinchAngle; @end NS_ASSUME_NONNULL_END
<gh_stars>0 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; exports.default = safeMerge; /** * Takes a list of object and safely merges their properties into a new object. * Throws if any of the objects contain a duplicate key that is also in another object. * Does not modify the given objects. * @param {...Object} l Any number of objects to merge * @return {Object} Merged object */ function safeMerge() { for (var _len = arguments.length, l = Array(_len), _key = 0; _key < _len; _key++) { l[_key] = arguments[_key]; } // If the objects submitted in the list have duplicates in their key names, // abort the merge and tell the function's user to check his objects. if (doesObjectListHaveDuplicates(l)) { throw new Error('The objects you submitted for merging have duplicates. Merge aborted.'); } return _extends.apply(undefined, [{}].concat(l)); } /** * Checks a list of objects for key duplicates and returns a boolean */ function doesObjectListHaveDuplicates(l) { var mergedList = l.reduce(function (merged, obj) { return obj ? merged.concat(Object.keys(obj)) : merged; }, []); // Taken from: http://stackoverflow.com/a/7376645/1263876 // By casting the array to a Set, and then checking if the size of the array // shrunk in the process of casting, we can check if there were any duplicates return new Set(mergedList).size !== mergedList.length; } module.exports = exports['default'];
package com.cupshe.restclient.lb; import com.cupshe.restclient.exception.NotFoundException; import org.springframework.lang.NonNull; import java.util.List; import java.util.concurrent.ThreadLocalRandom; /** * Random (R) * * @author zxy */ public class RandomLoadBalancer implements LoadBalancer { private final List<String> services; public RandomLoadBalancer(@NonNull List<String> services) { this.services = services; } @Override public String next() { int next, size = services.size(); for (int j = 0; j < size; j++) { next = ThreadLocalRandom.current().nextInt(0, size); String result = services.get(next); if (result != null) { return result; } } throw new NotFoundException(); } }
Capistrano::Configuration.instance.load do namespace :elastics do %w(create drop migrate reindex).each do |method| desc "rake elastics:#{method}" task method, roles: :elastics, only: {primary: true} do bundle_cmd = fetch(:bundle_cmd, 'bundle') env = fetch(:rack_env, fetch(:rails_env, 'production')) run "cd #{current_path} && " \ "#{bundle_cmd} exec rake elastics:#{method}[#{ENV['INDICES']}] #{ENV['ES_OPTIONS']} " \ "RAILS_ENV=#{env}" end end end end
def dropEvent(self, event) -> None: """ Overrides :meth:`QtWidgets.QMainWindow.dropEvent`. Handles the file drop event and triggers the plotting process. Parameters: - event: The event object representing the drop action. """ # Assuming self.app.plot is a method that initiates the plotting process self.app.plot(file=self._drag_drop_root, block=False) # Initiates plotting with the dropped file event.accept() # Indicates that the drop event has been successfully handled
<reponame>ooni/orchestra package sched import ( "bytes" "database/sql" "encoding/json" "errors" "fmt" "io/ioutil" "net/http" "net/url" "os" "sync" "time" "github.com/apex/log" "github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx/types" "github.com/lib/pq" "github.com/ooni/orchestra/common" "github.com/satori/go.uuid" "github.com/spf13/viper" ) var ctx = log.WithFields(log.Fields{ "pkg": "sched", "cmd": "ooni-orchestrate", }) // AlertData is the alert message type AlertData struct { ID string `json:"id"` Message string `json:"message" binding:"required"` Extra map[string]interface{} `json:"extra"` } // TaskData is the data for the task type TaskData struct { ID string `json:"id"` TestName string `json:"test_name" binding:"required"` Arguments map[string]interface{} `json:"arguments"` State string } // JobTarget the target of a job type JobTarget struct { ClientID string TaskID *string TaskData *TaskData AlertData *AlertData Token string Platform string } // NewJobTarget create a new job target instance func NewJobTarget(cID string, token string, plat string, tid *string, td *TaskData, ad *AlertData) *JobTarget { return &JobTarget{ ClientID: cID, TaskID: tid, TaskData: td, AlertData: ad, Token: token, Platform: plat, } } // Job container type Job struct { ID string Schedule Schedule Delay int64 Comment string NextRunAt time.Time TimesRun int64 lock sync.RWMutex jobTimer *time.Timer IsDone bool } // NewJob create a new job func NewJob(jID string, comment string, schedule Schedule, delay int64) *Job { return &Job{ ID: jID, Comment: comment, Schedule: schedule, Delay: delay, TimesRun: 0, lock: sync.RWMutex{}, IsDone: false, NextRunAt: schedule.StartTime, } } // CreateTask creates a new task and stores it in the JobDB func (j *Job) CreateTask(cID string, t *TaskData, jDB *JobDB) (string, error) { tx, err := jDB.db.Begin() if err != nil { ctx.WithError(err).Error("failed to open createTask transaction") return "", err } var taskID = uuid.NewV4().String() { query := fmt.Sprintf(`INSERT INTO %s ( id, probe_id, job_id, test_name, arguments, state, progress, creation_time, notification_time, accept_time, done_time, last_updated ) VALUES ( $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)`, pq.QuoteIdentifier(common.TasksTable)) stmt, err := tx.Prepare(query) if err != nil { ctx.WithError(err).Error("failed to prepare task create query") return "", err } defer stmt.Close() taskArgsStr, err := json.Marshal(t.Arguments) ctx.Debugf("task args: %v", t.Arguments) if err != nil { ctx.WithError(err).Error("failed to serialise task arguments in createTask") return "", err } now := time.Now().UTC() _, err = stmt.Exec(taskID, cID, j.ID, t.TestName, taskArgsStr, "ready", 0, now, nil, nil, nil, now) if err != nil { tx.Rollback() ctx.WithError(err).Error("failed to insert into tasks table") return "", err } if err = tx.Commit(); err != nil { ctx.WithError(err).Error("failed to commit transaction in tasks table, rolling back") return "", err } } return taskID, nil } // GetTargets returns all the targets for the job func (j *Job) GetTargets(jDB *JobDB) []*JobTarget { var ( err error query string targetCountries []string targetPlatforms []string targets []*JobTarget taskNo sql.NullInt64 alertNo sql.NullInt64 rows *sql.Rows taskData *TaskData alertData *AlertData ) ctx.Debug("getting targets") query = fmt.Sprintf(`SELECT target_countries, target_platforms, task_no, alert_no FROM %s WHERE id = $1`, pq.QuoteIdentifier(common.JobsTable)) err = jDB.db.QueryRow(query, j.ID).Scan( pq.Array(&targetCountries), pq.Array(&targetPlatforms), &taskNo, &alertNo) if err != nil { ctx.WithError(err).Error("failed to obtain targets") if err == sql.ErrNoRows { panic("could not find job with ID") } panic("other error in query") } if alertNo.Valid { var ( alertExtra types.JSONText ) ad := AlertData{} query := fmt.Sprintf(`SELECT message, extra FROM %s WHERE alert_no = $1`, pq.QuoteIdentifier(common.JobAlertsTable)) err = jDB.db.QueryRow(query, alertNo.Int64).Scan( &ad.Message, &alertExtra) if err != nil { ctx.WithError(err).Errorf("failed to get alert_no %d", alertNo.Int64) panic("failed to get alert_no") } err = alertExtra.Unmarshal(&ad.Extra) if err != nil { ctx.WithError(err).Error("failed to unmarshal json for alert") panic("invalid JSON in database") } alertData = &ad } else if taskNo.Valid { var ( taskArgs types.JSONText ) td := TaskData{} query := fmt.Sprintf(`SELECT test_name, arguments FROM %s WHERE task_no = $1`, pq.QuoteIdentifier(common.JobTasksTable)) err = jDB.db.QueryRow(query, taskNo.Int64).Scan( &td.TestName, &taskArgs) if err != nil { ctx.WithError(err).Errorf("failed to get task_no %d", taskNo.Int64) panic("failed to get task_no") } err = taskArgs.Unmarshal(&td.Arguments) if err != nil { ctx.WithError(err).Error("failed to unmarshal json for task") panic("invalid JSON in database") } taskData = &td } else { panic("inconsistent database missing task_no or alert_no") } // XXX this is really ghetto. There is probably a much better way of doing // it. query = fmt.Sprintf("SELECT id, token, platform FROM %s", pq.QuoteIdentifier(common.ActiveProbesTable)) query += " WHERE is_token_expired = false AND token != ''" if len(targetCountries) > 0 && len(targetPlatforms) > 0 { query += " AND probe_cc = ANY($1) AND platform = ANY($2)" rows, err = jDB.db.Query(query, pq.Array(targetCountries), pq.Array(targetPlatforms)) } else if len(targetCountries) > 0 || len(targetPlatforms) > 0 { if len(targetCountries) > 0 { query += " AND probe_cc = ANY($1)" rows, err = jDB.db.Query(query, pq.Array(targetCountries)) } else { query += " AND platform = ANY($1)" rows, err = jDB.db.Query(query, pq.Array(targetPlatforms)) } } else { rows, err = jDB.db.Query(query) } if err != nil { ctx.WithError(err).Errorf("failed to find targets '%s'", query) return targets } defer rows.Close() for rows.Next() { var ( clientID string taskID string token string plat string ) err = rows.Scan(&clientID, &token, &plat) if err != nil { ctx.WithError(err).Error("failed to iterate over targets") return targets } if taskData != nil { taskID, err = j.CreateTask(clientID, taskData, jDB) if err != nil { ctx.WithError(err).Error("failed to create task") return targets } } targets = append(targets, NewJobTarget(clientID, token, plat, &taskID, taskData, alertData)) } return targets } // GetWaitDuration gets the amount of time to wait for the task to run next func (j *Job) GetWaitDuration() time.Duration { var waitDuration time.Duration ctx.Debugf("calculating wait duration. ran already %d", j.TimesRun) now := time.Now().UTC() if j.IsDone { panic("IsDone should be false") } if now.Before(j.Schedule.StartTime) { ctx.Debug("before => false") waitDuration = time.Duration(j.Schedule.StartTime.UnixNano() - now.UnixNano()) } else { waitDuration = time.Duration(j.NextRunAt.UnixNano() - now.UnixNano()) } ctx.Debugf("waitDuration: %s", waitDuration) if waitDuration < 0 { return 0 } return waitDuration } // WaitAndRun will wait on the job and then run it when it's time func (j *Job) WaitAndRun(jDB *JobDB) { ctx.Debugf("running job: \"%s\"", j.Comment) j.lock.Lock() defer j.lock.Unlock() waitDuration := j.GetWaitDuration() ctx.Debugf("will wait for: \"%s\"", waitDuration) jobRun := func() { j.Run(jDB) } j.jobTimer = time.AfterFunc(waitDuration, jobRun) } // NotifyReq is the reuqest for sending this particular notification message // XXX this is duplicated in proteus-notify type NotifyReq struct { ClientIDs []string `json:"client_ids"` Event map[string]interface{} `json:"event"` } // GoRushNotification contains all the notification metadata for gorush type GoRushNotification struct { Tokens []string `json:"tokens"` Platform int `json:"platform"` Message string `json:"message"` Topic string `json:"topic"` To string `json:"to"` Data map[string]interface{} `json:"data"` ContentAvailable bool `json:"content_available"` Notification map[string]string `json:"notification"` } // GoRushReq is a wrapper for a gorush notification request type GoRushReq struct { Notifications []*GoRushNotification `json:"notifications"` } // GoRushLog contains details about the failure. It is available when core->sync // in the gorush settings (https://github.com/appleboy/gorush#features) is true. // For expired tokens Error will be: // * "Unregistered" or "BadDeviceToken" on iOS // https://stackoverflow.com/questions/42511476/what-are-the-possible-reasons-to-get-apns-responses-baddevicetoken-or-unregister // https://github.com/sideshow/apns2/blob/master/response.go#L85 // * "NotRegistered" or "InvalidRegistration" on Android: // See: https://github.com/appleboy/go-fcm/blob/master/response.go type GoRushLog struct { Type string `json:"type"` Platform string `json:"platform"` Token string `json:"token"` Message string `json:"message"` Error string `json:"error"` } // GoRushResponse is a response from gorush on /api/push type GoRushResponse struct { Counts int `json:"counts"` Success string `json:"success"` Logs []GoRushLog `json:"logs"` } // ErrExpiredToken not enough permissions var ErrExpiredToken = errors.New("token is expired") func gorushPush(baseURL *url.URL, notifyReq GoRushReq) error { jsonStr, err := json.Marshal(notifyReq) if err != nil { ctx.WithError(err).Error("failed to marshal data") return err } path, _ := url.Parse("/api/push") u := baseURL.ResolveReference(path) ctx.Debugf("sending notify request: %s", jsonStr) req, err := http.NewRequest("POST", u.String(), bytes.NewBuffer(jsonStr)) if err != nil { ctx.WithError(err).Error("failed to send request") return err } req.Header.Set("Content-Type", "application/json") if viper.IsSet("auth.gorush-basic-auth-user") { req.SetBasicAuth(viper.GetString("auth.gorush-basic-auth-user"), viper.GetString("auth.gorush-basic-auth-password")) } client := &http.Client{} resp, err := client.Do(req) if err != nil { ctx.WithError(err).Error("http request failed") return err } defer resp.Body.Close() data, err := ioutil.ReadAll(resp.Body) if err != nil { ctx.WithError(err).Error("failed to read response body") return err } // XXX do we also want to check the body? if resp.StatusCode != 200 { ctx.Debugf("got invalid status code: %d", resp.StatusCode) return errors.New("http request returned invalid status code") } var gorushResp GoRushResponse json.Unmarshal(data, &gorushResp) if len(gorushResp.Logs) > 0 { if len(gorushResp.Logs) > 1 { // This should never happen as we currently send one token per HTTP // request. ctx.Errorf("Found more than 1 log message. %v", gorushResp.Logs) return errors.New("inconsistent log message count") } errorStr := gorushResp.Logs[0].Error if errorStr == "Unregistered" || errorStr == "BadDeviceToken" || errorStr == "NotRegistered" || errorStr == "InvalidRegistration" { return ErrExpiredToken } ctx.Errorf("Unhandled token error: %s", errorStr) return fmt.Errorf("Unhandled token error: %s", errorStr) } return nil } // NotifyGorush tell gorush to notify clients func NotifyGorush(bu string, jt *JobTarget) error { var ( err error ) baseURL, err := url.Parse(bu) if err != nil { ctx.WithError(err).Error("invalid base url") return err } notification := &GoRushNotification{ Tokens: []string{jt.Token}, } if jt.AlertData != nil { var ( notificationType = "default" ) if _, ok := jt.AlertData.Extra["href"]; ok { notificationType = "open_href" } notification.Message = jt.AlertData.Message notification.Data = map[string]interface{}{ "type": notificationType, "payload": jt.AlertData.Extra, } } else if jt.TaskData != nil { notification.Data = map[string]interface{}{ "type": "run_task", "payload": map[string]string{ "task_id": *jt.TaskID, }, } notification.ContentAvailable = true } else { return errors.New("either alertData or TaskData must be set") } notification.Notification = make(map[string]string) if jt.Platform == "ios" { notification.Platform = 1 notification.Topic = viper.GetString("core.notify-topic-ios") } else if jt.Platform == "android" { notification.Notification["click_action"] = viper.GetString( "core.notify-click-action-android") notification.Platform = 2 /* We don't need to send a topic on Android. As the response message of failed requests say: `Must use either "registration_ids" field or "to", not both`. And we need `registration_ids` because we send in multicast to many clients. More evidence, as usual, on SO: <https://stackoverflow.com/a/33440105>. */ } else { return errors.New("unsupported platform") } notifyReq := GoRushReq{ Notifications: []*GoRushNotification{notification}, } return gorushPush(baseURL, notifyReq) } // ErrInconsistentState when you try to accept an already accepted task var ErrInconsistentState = errors.New("task already accepted") // ErrTaskNotFound could not find the referenced task var ErrTaskNotFound = errors.New("task not found") // ErrAccessDenied not enough permissions var ErrAccessDenied = errors.New("access denied") // GetTask returns the specified task with the ID func GetTask(tID string, uID string, db *sqlx.DB) (TaskData, error) { var ( err error probeID string taskArgs types.JSONText ) task := TaskData{} query := fmt.Sprintf(`SELECT id, probe_id, test_name, arguments, COALESCE(state, 'ready') FROM %s WHERE id = $1`, pq.QuoteIdentifier(common.TasksTable)) err = db.QueryRow(query, tID).Scan( &task.ID, &probeID, &task.TestName, &taskArgs, &task.State) if err != nil { if err == sql.ErrNoRows { return task, ErrTaskNotFound } ctx.WithError(err).Error("failed to get task") return task, err } if probeID != uID { return task, ErrAccessDenied } err = taskArgs.Unmarshal(&task.Arguments) if err != nil { ctx.WithError(err).Error("failed to unmarshal json") return task, err } return task, nil } // SetTaskState sets the state of the task func SetTaskState(tID string, uID string, state string, validStates []string, updateTimeCol string, db *sqlx.DB) error { var err error task, err := GetTask(tID, uID, db) if err != nil { return err } stateConsistent := false for _, s := range validStates { if task.State == s { stateConsistent = true break } } if !stateConsistent { return ErrInconsistentState } query := fmt.Sprintf(`UPDATE %s SET state = $2, %s = $3, last_updated = $3 WHERE id = $1`, pq.QuoteIdentifier(common.TasksTable), updateTimeCol) _, err = db.Exec(query, tID, state, time.Now().UTC()) if err != nil { ctx.WithError(err).Error("failed to get task") return err } return nil } // SetTokenExpired marks the token of the uID as expired func SetTokenExpired(db *sqlx.DB, uID string) error { query := fmt.Sprintf(`UPDATE %s SET is_token_expired = true WHERE id = $1`, pq.QuoteIdentifier(common.ActiveProbesTable)) _, err := db.Exec(query, uID) if err != nil { ctx.WithError(err).Error("failed to set token as expired") return err } return nil } // Notify send a notification for the given JobTarget func Notify(jt *JobTarget, jDB *JobDB) error { var err error if jt.Platform != "android" && jt.Platform != "ios" { ctx.Debugf("we don't support notifying to %s", jt.Platform) return nil } if viper.IsSet("core.gorush-url") { err = NotifyGorush( viper.GetString("core.gorush-url"), jt) } else if viper.IsSet("core.notify-url") { err = errors.New("proteus notify is no longer supported") } else { err = errors.New("no valid notification service found") } if err == ErrExpiredToken { err = SetTokenExpired(jDB.db, jt.ClientID) if err != nil { return err } } else if err != nil { return err } if jt.TaskData != nil { err = SetTaskState(jt.TaskData.ID, jt.ClientID, "notified", []string{"ready"}, "notification_time", jDB.db) if err != nil { ctx.WithError(err).Error("failed to update task state") return err } } return nil } // Run the given job func (j *Job) Run(jDB *JobDB) { j.lock.Lock() defer j.lock.Unlock() if !j.ShouldRun() { ctx.Error("inconsitency in should run detected..") return } targets := j.GetTargets(jDB) lastRunAt := time.Now().UTC() for _, t := range targets { // XXX // In here shall go logic to connect to notification server and notify // them of the task ctx.Debugf("notifying %s", t.ClientID) err := Notify(t, jDB) if err != nil { ctx.WithError(err).Errorf("failed to notify %s", t.ClientID) } } ctx.Debugf("successfully ran at %s", lastRunAt) // XXX maybe move these elsewhere j.TimesRun = j.TimesRun + 1 if j.Schedule.Repeat != -1 && j.TimesRun >= j.Schedule.Repeat { j.IsDone = true } else { d := j.Schedule.Duration.ToDuration() ctx.Debugf("adding %s", d) j.NextRunAt = lastRunAt.Add(d) } ctx.Debugf("next run will be at %s", j.NextRunAt) ctx.Debugf("times run %d", j.TimesRun) err := j.Save(jDB) if err != nil { ctx.Error("failed to save job state to DB") } if j.ShouldWait() { go j.WaitAndRun(jDB) } } // Save the job to the job database func (j *Job) Save(jDB *JobDB) error { tx, err := jDB.db.Begin() if err != nil { ctx.WithError(err).Error("failed to open transaction") return err } query := fmt.Sprintf(`UPDATE %s SET times_run = $2, next_run_at = $3, is_done = $4 WHERE id = $1`, pq.QuoteIdentifier(common.JobsTable)) stmt, err := tx.Prepare(query) if err != nil { ctx.WithError(err).Error("failed to prepare update jobs query") return err } _, err = stmt.Exec(j.ID, j.TimesRun, j.NextRunAt.UTC(), j.IsDone) if err != nil { tx.Rollback() ctx.WithError(err).Error("failed to jobs table, rolling back") return errors.New("failed to update jobs table") } if err = tx.Commit(); err != nil { ctx.WithError(err).Error("failed to commit transaction, rolling back") return err } return nil } // ShouldWait returns true if the job is not done func (j *Job) ShouldWait() bool { if j.IsDone { return false } return true } // ShouldRun checks if we should run this job func (j *Job) ShouldRun() bool { ctx.Debugf("should run? ran already %d", j.TimesRun) now := time.Now().UTC() if j.IsDone { ctx.Debug("isDone => false") return false } if now.Before(j.Schedule.StartTime) { ctx.Debug("before => false") return false } // XXX is this redundant and maybe can be included in the notion of // IsDone? if j.Schedule.Repeat != -1 && j.TimesRun >= j.Schedule.Repeat { ctx.Debug("repeat => false") return false } if now.After(j.NextRunAt) || now.Equal(j.NextRunAt) { return true } return false } // JobDB keep track of the Job database type JobDB struct { db *sqlx.DB } // GetAll returns a list of all jobs in the database func (db *JobDB) GetAll() ([]*Job, error) { allJobs := []*Job{} query := fmt.Sprintf(`SELECT id, comment, schedule, delay, times_run, next_run_at, is_done FROM %s WHERE state = 'active'`, pq.QuoteIdentifier(common.JobsTable)) rows, err := db.db.Query(query) if err != nil { ctx.WithError(err).Error("failed to list jobs") return allJobs, err } defer rows.Close() for rows.Next() { var ( j Job schedule string nextRunAtStr string ) err := rows.Scan(&j.ID, &j.Comment, &schedule, &j.Delay, &j.TimesRun, &nextRunAtStr, &j.IsDone) if err != nil { ctx.WithError(err).Error("failed to iterate over jobs") return allJobs, err } j.NextRunAt, err = time.Parse(ISOUTCTimeLayout, nextRunAtStr) if err != nil { ctx.WithError(err).Error("invalid time string") return allJobs, err } j.Schedule, err = ParseSchedule(schedule) if err != nil { ctx.WithError(err).Error("invalid schedule") return allJobs, err } j.lock = sync.RWMutex{} allJobs = append(allJobs, &j) } return allJobs, nil } // Scheduler is the datastructure for the scheduler type Scheduler struct { jobDB JobDB runningJobs map[string]*Job stopped chan os.Signal } // NewScheduler creates a new instance of the scheduler func NewScheduler(db *sqlx.DB) *Scheduler { return &Scheduler{ stopped: make(chan os.Signal), runningJobs: make(map[string]*Job), jobDB: JobDB{db: db}} } // DeleteJob will remove the job by removing it from the running jobs func (s *Scheduler) DeleteJob(jobID string) error { job, ok := s.runningJobs[jobID] if !ok { return errors.New("Job is not part of the running jobs") } job.IsDone = true delete(s.runningJobs, jobID) return nil } // RunJob checks if we should wait on the job and if not will run it func (s *Scheduler) RunJob(j *Job) { if j.ShouldWait() { j.WaitAndRun(&s.jobDB) } } // Start the scheduler func (s *Scheduler) Start() { ctx.Debug("starting scheduler") // XXX currently when jobs are deleted the allJobs list will not be // updated. We should find a way to check this and stop triggering a job in // case it gets deleted. allJobs, err := s.jobDB.GetAll() if err != nil { ctx.WithError(err).Error("failed to list all jobs") return } for _, j := range allJobs { s.runningJobs[j.ID] = j s.RunJob(j) } } // Shutdown do all the shutdown logic func (s *Scheduler) Shutdown() { os.Exit(0) }
#!/bin/bash # Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # A library of helper functions and constant for the local config. # Use the config file specified in $KUBE_CONFIG_FILE, or default to # config-default.sh. KUBE_ROOT=$(dirname "${BASH_SOURCE}")/../.. source "${KUBE_ROOT}/cluster/gce/${KUBE_CONFIG_FILE-"config-default.sh"}" # Verify prereqs function verify-prereqs { local cmd for cmd in gcloud gcutil gsutil; do which "${cmd}" >/dev/null || { echo "Can't find ${cmd} in PATH, please fix and retry. The Google Cloud " echo "SDK can be downloaded from https://cloud.google.com/sdk/." exit 1 } done } # Create a temp dir that'll be deleted at the end of this bash session. # # Vars set: # KUBE_TEMP function ensure-temp-dir { if [[ -z ${KUBE_TEMP-} ]]; then KUBE_TEMP=$(mktemp -d -t kubernetes.XXXXXX) trap 'rm -rf "${KUBE_TEMP}"' EXIT fi } # Verify and find the various tar files that we are going to use on the server. # # Vars set: # SERVER_BINARY_TAR # SALT_TAR function find-release-tars { SERVER_BINARY_TAR="${KUBE_ROOT}/server/kubernetes-server-linux-amd64.tar.gz" if [[ ! -f "$SERVER_BINARY_TAR" ]]; then SERVER_BINARY_TAR="${KUBE_ROOT}/_output/release-tars/kubernetes-server-linux-amd64.tar.gz" fi if [[ ! -f "$SERVER_BINARY_TAR" ]]; then echo "!!! Cannot find kubernetes-server-linux-amd64.tar.gz" exit 1 fi SALT_TAR="${KUBE_ROOT}/server/kubernetes-salt.tar.gz" if [[ ! -f "$SALT_TAR" ]]; then SALT_TAR="${KUBE_ROOT}/_output/release-tars/kubernetes-salt.tar.gz" fi if [[ ! -f "$SALT_TAR" ]]; then echo "!!! Cannot find kubernetes-salt.tar.gz" exit 1 fi } # Use the gcloud defaults to find the project. If it is already set in the # environment then go with that. # # Vars set: # PROJECT function detect-project () { if [[ -z "${PROJECT-}" ]]; then PROJECT=$(gcloud config list project | tail -n 1 | cut -f 3 -d ' ') fi if [[ -z "${PROJECT-}" ]]; then echo "Could not detect Google Cloud Platform project. Set the default project using " >&2 echo "'gcloud config set project <PROJECT>'" >&2 exit 1 fi echo "Project: $PROJECT (autodetected from gcloud config)" } # Take the local tar files and upload them to Google Storage. They will then be # downloaded by the master as part of the start up script for the master. # # Assumed vars: # PROJECT # SERVER_BINARY_TAR # SALT_TAR # Vars set: # SERVER_BINARY_TAR_URL # SALT_TAR_URL function upload-server-tars() { SERVER_BINARY_TAR_URL= SALT_TAR_URL= local project_hash if which md5 > /dev/null 2>&1; then project_hash=$(md5 -q -s "$PROJECT") else project_hash=$(echo -n "$PROJECT" | md5sum) fi project_hash=${project_hash:0:5} local -r staging_bucket="gs://kubernetes-staging-${project_hash}" # Ensure the bucket is created if ! gsutil ls "$staging_bucket" > /dev/null 2>&1 ; then echo "Creating $staging_bucket" gsutil mb "${staging_bucket}" fi local -r staging_path="${staging_bucket}/devel" echo "+++ Staging server tars to Google Storage: ${staging_path}" SERVER_BINARY_TAR_URL="${staging_path}/${SERVER_BINARY_TAR##*/}" gsutil -q cp "${SERVER_BINARY_TAR}" "${SERVER_BINARY_TAR_URL}" SALT_TAR_URL="${staging_path}/${SALT_TAR##*/}" gsutil -q cp "${SALT_TAR}" "${SALT_TAR_URL}" } # Detect the information about the minions # # Assumed vars: # MINION_NAMES # ZONE # Vars set: # KUBE_MINION_IP_ADDRESS (array) function detect-minions () { KUBE_MINION_IP_ADDRESSES=() for (( i=0; i<${#MINION_NAMES[@]}; i++)); do # gcutil will print the "external-ip" column header even if no instances are found local minion_ip=$(gcutil listinstances --format=csv --sort=external-ip \ --columns=external-ip --zone ${ZONE} --filter="name eq ${MINION_NAMES[$i]}" \ | tail -n '+2' | tail -n 1) if [[ -z "${minion_ip-}" ]] ; then echo "Did not find ${MINION_NAMES[$i]}" >&2 else echo "Found ${MINION_NAMES[$i]} at ${minion_ip}" KUBE_MINION_IP_ADDRESSES+=("${minion_ip}") fi done if [[ -z "${KUBE_MINION_IP_ADDRESSES-}" ]]; then echo "Could not detect Kubernetes minion nodes. Make sure you've launched a cluster with 'kube-up.sh'" >&2 exit 1 fi } # Detect the IP for the master # # Assumed vars: # MASTER_NAME # ZONE # Vars set: # KUBE_MASTER # KUBE_MASTER_IP function detect-master () { KUBE_MASTER=${MASTER_NAME} if [[ -z "${KUBE_MASTER_IP-}" ]]; then # gcutil will print the "external-ip" column header even if no instances are found KUBE_MASTER_IP=$(gcutil listinstances --format=csv --sort=external-ip \ --columns=external-ip --zone ${ZONE} --filter="name eq ${MASTER_NAME}" \ | tail -n '+2' | tail -n 1) fi if [[ -z "${KUBE_MASTER_IP-}" ]]; then echo "Could not detect Kubernetes master node. Make sure you've launched a cluster with 'kube-up.sh'" >&2 exit 1 fi echo "Using master: $KUBE_MASTER (external IP: $KUBE_MASTER_IP)" } # Ensure that we have a password created for validating to the master. Will # read from $HOME/.kubernetres_auth if available. # # Vars set: # KUBE_USER # KUBE_PASSWORD function get-password { local file="$HOME/.kubernetes_auth" if [[ -r "$file" ]]; then KUBE_USER=$(cat "$file" | python -c 'import json,sys;print json.load(sys.stdin)["User"]') KUBE_PASSWORD=$(cat "$file" | python -c 'import json,sys;print json.load(sys.stdin)["Password"]') return fi KUBE_USER=admin KUBE_PASSWORD=$(python -c 'import string,random; print "".join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(16))') # Store password for reuse. cat << EOF > "$file" { "User": "$KUBE_USER", "Password": "$KUBE_PASSWORD" } EOF chmod 0600 "$file" } # Instantiate a kubernetes cluster # # Assumed vars # KUBE_ROOT # <Various vars set in config file> function kube-up { # Detect the project into $PROJECT if it isn't set detect-project # Make sure we have the tar files staged on Google Storage find-release-tars upload-server-tars ensure-temp-dir get-password python "${KUBE_ROOT}/third_party/htpasswd/htpasswd.py" \ -b -c "${KUBE_TEMP}/htpasswd" "$KUBE_USER" "$KUBE_PASSWORD" local htpasswd htpasswd=$(cat "${KUBE_TEMP}/htpasswd") if ! gcutil getnetwork "${NETWORK}" >/dev/null 2>&1; then echo "Creating new network for: ${NETWORK}" # The network needs to be created synchronously or we have a race. The # firewalls can be added concurrent with instance creation. gcutil addnetwork "${NETWORK}" --range "10.240.0.0/16" gcutil addfirewall "${NETWORK}-default-internal" \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --network "${NETWORK}" \ --allowed_ip_sources "10.0.0.0/8" \ --allowed "tcp:1-65535,udp:1-65535,icmp" & gcutil addfirewall "${NETWORK}-default-ssh" \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --network "${NETWORK}" \ --allowed_ip_sources "0.0.0.0/0" \ --allowed "tcp:22" & fi echo "Starting VMs and configuring firewalls" gcutil addfirewall "${MASTER_NAME}-https" \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --network "${NETWORK}" \ --target_tags "${MASTER_TAG}" \ --allowed tcp:443 & ( echo "#! /bin/bash" echo "mkdir -p /var/cache/kubernetes-install" echo "cd /var/cache/kubernetes-install" echo "readonly MASTER_NAME='${MASTER_NAME}'" echo "readonly NODE_INSTANCE_PREFIX='${INSTANCE_PREFIX}-minion'" echo "readonly SERVER_BINARY_TAR_URL='${SERVER_BINARY_TAR_URL}'" echo "readonly SALT_TAR_URL='${SALT_TAR_URL}'" echo "readonly MASTER_HTPASSWD='${htpasswd}'" echo "readonly PORTAL_NET='${PORTAL_NET}'" grep -v "^#" "${KUBE_ROOT}/cluster/gce/templates/create-dynamic-salt-files.sh" grep -v "^#" "${KUBE_ROOT}/cluster/gce/templates/download-release.sh" grep -v "^#" "${KUBE_ROOT}/cluster/gce/templates/salt-master.sh" ) > "${KUBE_TEMP}/master-start.sh" gcutil addinstance "${MASTER_NAME}" \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --zone "${ZONE}" \ --machine_type "${MASTER_SIZE}" \ --image "${IMAGE}" \ --tags "${MASTER_TAG}" \ --network "${NETWORK}" \ --service_account_scopes="storage-ro,compute-rw" \ --automatic_restart \ --metadata_from_file "startup-script:${KUBE_TEMP}/master-start.sh" & for (( i=0; i<${#MINION_NAMES[@]}; i++)); do ( echo "#! /bin/bash" echo "MASTER_NAME='${MASTER_NAME}'" echo "MINION_IP_RANGE='${MINION_IP_RANGES[$i]}'" grep -v "^#" "${KUBE_ROOT}/cluster/gce/templates/salt-minion.sh" ) > "${KUBE_TEMP}/minion-start-${i}.sh" gcutil addfirewall "${MINION_NAMES[$i]}-all" \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --network "${NETWORK}" \ --allowed_ip_sources "${MINION_IP_RANGES[$i]}" \ --allowed "tcp,udp,icmp,esp,ah,sctp" & gcutil addinstance ${MINION_NAMES[$i]} \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --zone "${ZONE}" \ --machine_type "${MINION_SIZE}" \ --image "${IMAGE}" \ --tags "${MINION_TAG}" \ --network "${NETWORK}" \ --service_account_scopes "${MINION_SCOPES}" \ --automatic_restart \ --can_ip_forward \ --metadata_from_file "startup-script:${KUBE_TEMP}/minion-start-${i}.sh" & gcutil addroute "${MINION_NAMES[$i]}" "${MINION_IP_RANGES[$i]}" \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --network "${NETWORK}" \ --next_hop_instance "${ZONE}/instances/${MINION_NAMES[$i]}" & done local fail=0 local job for job in $(jobs -p); do wait "${job}" || fail=$((fail + 1)) done if (( $fail != 0 )); then echo "${fail} commands failed. Exiting." >&2 exit 2 fi detect-master > /dev/null echo "Waiting for cluster initialization." echo echo " This will continually check to see if the API for kubernetes is reachable." echo " This might loop forever if there was some uncaught error during start" echo " up." echo until curl --insecure --user "${KUBE_USER}:${KUBE_PASSWORD}" --max-time 5 \ --fail --output /dev/null --silent "https://${KUBE_MASTER_IP}/api/v1beta1/pods"; do printf "." sleep 2 done echo "Kubernetes cluster created." echo "Sanity checking cluster..." sleep 5 # Basic sanity checking local i local rc # Capture return code without exiting because of errexit bash option for (( i=0; i<${#MINION_NAMES[@]}; i++)); do # Make sure docker is installed gcutil ssh "${MINION_NAMES[$i]}" which docker >/dev/null || { echo "Docker failed to install on ${MINION_NAMES[$i]}. Your cluster is unlikely" >&2 echo "to work correctly. Please run ./cluster/kube-down.sh and re-create the" >&2 echo "cluster. (sorry!)" >&2 exit 1 } done echo echo "Kubernetes cluster is running. The master is running at:" echo echo " https://${KUBE_MASTER_IP}" echo echo "The user name and password to use is located in ~/.kubernetes_auth." echo local kube_cert=".kubecfg.crt" local kube_key=".kubecfg.key" local ca_cert=".kubernetes.ca.crt" (umask 077 gcutil ssh "${MASTER_NAME}" sudo cat /usr/share/nginx/kubecfg.crt >"${HOME}/${kube_cert}" 2>/dev/null gcutil ssh "${MASTER_NAME}" sudo cat /usr/share/nginx/kubecfg.key >"${HOME}/${kube_key}" 2>/dev/null gcutil ssh "${MASTER_NAME}" sudo cat /usr/share/nginx/ca.crt >"${HOME}/${ca_cert}" 2>/dev/null cat << EOF > ~/.kubernetes_auth { "User": "$KUBE_USER", "Password": "$KUBE_PASSWORD", "CAFile": "$HOME/$ca_cert", "CertFile": "$HOME/$kube_cert", "KeyFile": "$HOME/$kube_key" } EOF chmod 0600 ~/.kubernetes_auth "${HOME}/${kube_cert}" \ "${HOME}/${kube_key}" "${HOME}/${ca_cert}" ) } # Delete a kubernetes cluster function kube-down { # Detect the project into $PROJECT detect-project echo "Bringing down cluster" gcutil deletefirewall \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --force \ "${MASTER_NAME}-https" & gcutil deleteinstance \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --force \ --delete_boot_pd \ --zone "${ZONE}" \ "${MASTER_NAME}" & gcutil deletefirewall \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --force \ "${MINION_NAMES[@]/%/-all}" & gcutil deleteinstance \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --force \ --delete_boot_pd \ --zone "${ZONE}" \ "${MINION_NAMES[@]}" & gcutil deleteroute \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --force \ "${MINION_NAMES[@]}" & wait } # Update a kubernetes cluster with latest source function kube-push { detect-project detect-master # Make sure we have the tar files staged on Google Storage find-release-tars upload-server-tars ( echo "#! /bin/bash" echo "mkdir -p /var/cache/kubernetes-install" echo "cd /var/cache/kubernetes-install" echo "readonly SERVER_BINARY_TAR_URL='${SERVER_BINARY_TAR_URL}'" echo "readonly SALT_TAR_URL='${SALT_TAR_URL}'" grep -v "^#" "${KUBE_ROOT}/cluster/gce/templates/download-release.sh" echo "echo Executing configuration" echo "sudo salt '*' mine.update" echo "sudo salt --force-color '*' state.highstate" ) | gcutil ssh --project "$PROJECT" --zone "$ZONE" "$KUBE_MASTER" sudo bash get-password echo echo "Kubernetes cluster is running. The master is running at:" echo echo " https://${KUBE_MASTER_IP}" echo echo "The user name and password to use is located in ~/.kubernetes_auth." echo } # ----------------------------------------------------------------------------- # Cluster specific test helpers used from hack/e2e-test.sh # Execute prior to running tests to build a release if required for env. # # Assumed Vars: # KUBE_ROOT function test-build-release { # Make a release "${KUBE_ROOT}/build/release.sh" } # Execute prior to running tests to initialize required structure. This is # called from hack/e2e-test.sh. # # Assumed vars: # PROJECT # ALREADY_UP # Variables from config.sh function test-setup { # Detect the project into $PROJECT if it isn't set # gce specific detect-project if [[ ${ALREADY_UP} -ne 1 ]]; then # Open up port 80 & 8080 so common containers on minions can be reached gcutil addfirewall \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --target_tags "${MINION_TAG}" \ --allowed tcp:80,tcp:8080 \ --network "${NETWORK}" \ "${MINION_TAG}-${INSTANCE_PREFIX}-http-alt" fi } # Execute after running tests to perform any required clean-up. This is called # from hack/e2e-test.sh # # Assumed Vars: # PROJECT function test-teardown { echo "Shutting down test cluster in background." gcutil deletefirewall \ --project "${PROJECT}" \ --norespect_terminal_width \ --sleep_between_polls "${POLL_SLEEP_INTERVAL}" \ --force \ "${MINION_TAG}-${INSTANCE_PREFIX}-http-alt" || true > /dev/null "${KUBE_ROOT}/cluster/kube-down.sh" > /dev/null } # SSH to a node by name ($1) and run a command ($2). function ssh-to-node { local node="$1" local cmd="$2" gcutil --log_level=WARNING ssh --ssh_arg "-o LogLevel=quiet" "${node}" "${cmd}" } # Restart the kube-proxy on a node ($1) function restart-kube-proxy { ssh-to-node "$1" "sudo /etc/init.d/kube-proxy restart" }
class ExpressionError(Exception): def __init__(self, message): super().__init__(message) def render_expression(expression): try: # Attempt to render the expression # If rendering fails, raise an ExpressionError raise ExpressionError('Failed to render %s' % str(expression)) except ExpressionError as e: # Handle the raised ExpressionError print(f"Error: {e}") # Example usage try: render_expression(5 + 3) except ExpressionError as e: print(f"Caught custom exception: {e}")
<gh_stars>1-10 import logging from modules.const import Keys from modules.parser.cdi_const import RS_DISKLIST import copy logger = logging.getLogger(__name__) """ディスク一覧を1行解釈する cf) "(01) WDC WD30EFRX-68EUZN0 : 3000.5 GB [X/0/0, mr]" """ def parse_disklist(line): ln = line.strip() result = copy.deepcopy(RS_DISKLIST) result["id"] = ln[0:4] # "(01)" result["model"] = ln[5:ln.index(":")].strip() # "WDC WD30EFRX-68AX9N0" # commandType ct = ln[ln.index("[") + 1:ln.index("]")].strip() # X/0/0, mr result["commandType"] = ct[ct.index(",") + 1:].strip() # ssdVendorString svs = ln[ln.index("]"):] if (svs.find("-") > 0): # SSD以外の場合この部分は出てこない result["ssdVendorString"] = svs[svs.index("-") + 1:].strip() return result
import React, { Component } from 'react'; import PropTypes from 'prop-types'; import Label from '../label'; import { StatusGroupWrapper } from './statusGroup.style'; class StatusGroup extends Component { render() { const { title, status, className, ...props } = this.props; return ( <StatusGroupWrapper className={`statusGroup-wrapper ${className}`} {...props} > {title && ( <Label className="group-title" color="normal" size="big" weight="bold" > {title} </Label> )} <div className="status-group-content"> {status.map((data, index) => { const { title, value } = data; return ( <div className="status-value" key={index}> <Label size="tiny" color="normal" weight="bold"> {title} </Label> <Label size="big" color="normal"> {value} </Label> </div> ); })} </div> </StatusGroupWrapper> ); } } StatusGroup.propTypes = { title: PropTypes.string, status: PropTypes.array, className: PropTypes.string, }; StatusGroup.defaultProps = { title: '', status: [], className: '', }; export default StatusGroup;
<reponame>LaudateCorpus1/PCVT<gh_stars>1-10 //********************************************************************** // (C) Copyright 2020-2021 Hewlett Packard Enterprise Development LP // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR // OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, // ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. //********************************************************************** package hwManifestGen; public class Components { // ComponentClass values static String COMPCLASS_REGISTRY_TCG="2.23.133.18.3.1"; // static String COMPCLASS_BASEBOARD="00030003"; // these values are meant to be an example. check the component class registry. static String COMPCLASS_BIOS="00130003"; static String COMPCLASS_UEFI="00130002"; static String COMPCLASS_CHASSIS="00020001"; // TODO: chassis type is included in SMBIOS static String COMPCLASS_SERVER_CHASSIS="00020016"; // Rack Mount Chassis static String COMPCLASS_CPU="00010002"; static String COMPCLASS_HDD="00070002"; static String COMPCLASS_NIC="00090002"; static String COMPCLASS_RAM="00060001"; // TODO: memory type is included in SMBIOS static String COMPCLASS_RAM_SD="00060010"; // SDRAM Memory Synchronous DRAM memory component static String COMPCLASS_RAM_DDR4="00060016"; // DDR4 Memory- Double Data Rate RAM memory component (successor to DDR3) static String COMPCLASS_POWER_SUPPLY = "000A0002"; static String COMPCLASS_TPM = "00040009"; // A discrete Trusted Platform Module static String COMPCLASS_BMC = "00060003"; // Baseboard Management Controller static String COMPCLASS_UNKNOWN_COMPONENT="00030001"; // Unknown Component is an IC board but identity is unknown to the attesting party static String COMPCLASS_DAUGHTER_BOARD="00030002"; // Daughter board A board that extends the circuitry of another board static String COMPCLASS_MOTHERBOARD="00030003"; // Motherboard (includes processor, memory, and I/O) A board containing the principal components of a computer or other device // PCI Class related mappings: static String COMPCLASS_SAS_CONTROLLER = "00050009"; // PCI Class starting with 0x0107 static String COMPCLASS_SCSI_CONTROLLER = "00050003"; // PCI Class starting with 0x0100 static String COMPCLASS_RAID_CONTROLLER = "0005000B"; // PCI Class starting with 0x0104 static String COMPCLASS_PATA_CONTROLLER = "00050007"; // PCI Class starting with 0x0105 static String COMPCLASS_SATA_CONTROLLER = "00050008"; // PCI Class starting with 0x0106 static String COMPCLASS_STORAGE_CONTROLLER = "00070000"; // others starting with 0x01 static String COMPCLASS_ETHERNET_LAN_ADAPTER = "00090002"; // starting with 0x0200 static String COMPCLASS_OTHER="00030000"; // others starting with 0x02. Other Component is an IC board but identity does not match any of the supported values static String COMPCLASS_VIDEO_CONTROLLER = "00050002"; // others starting with 0x03 static String COMPCLASS_RISER_CARD="00030004"; // PCI Class starting with other than 0x01, 0x02 or 0x03. Riser Card A board that plugs into the system board and provides additional slots // JSON Structure Keywords; static String JSON_COMPONENTS="COMPONENTS"; static String JSON_COMPONENTSURI="COMPONENTSURI"; static String JSON_PROPERTIES="PROPERTIES"; static String JSON_PROPERTIESURI="PROPERTIESURI"; static String JSON_PLATFORM="PLATFORM"; // JSON Component Keywords; static String JSON_COMPONENTCLASS="COMPONENTCLASS"; static String JSON_COMPONENTCLASSREGISTRY="COMPONENTCLASSREGISTRY"; static String JSON_COMPONENTCLASSVALUE="COMPONENTCLASSVALUE"; static String JSON_MANUFACTURER="MANUFACTURER"; static String JSON_MODEL="MODEL"; static String JSON_SERIAL="SERIAL"; static String JSON_REVISION="REVISION"; static String JSON_MANUFACTURERID="MANUFACTURERID"; static String JSON_FIELDREPLACEABLE="FIELDREPLACEABLE"; static String JSON_ADDRESSES="ADDRESSES"; static String JSON_ETHERNETMAC="ETHERNETMAC"; static String JSON_WLANMAC="WLANMAC"; static String JSON_BLUETOOTHMAC="BLUETOOTHMAC"; static String JSON_COMPONENTPLATFORMCERT="PLATFORMCERT"; static String JSON_ATTRIBUTECERTIDENTIFIER="ATTRIBUTECERTIDENTIFIER"; static String JSON_GENERICCERTIDENTIFIER="GENERICCERTIDENTIFIER"; static String JSON_ISSUER="ISSUER"; static String JSON_COMPONENTPLATFORMCERTURI="PLATFORMCERTURI"; static String JSON_STATUS="STATUS"; // JSON Platform Keywords (Subject Alternative Name); static String JSON_PLATFORMMODEL="PLATFORMMODEL"; static String JSON_PLATFORMMANUFACTURERSTR="PLATFORMMANUFACTURERSTR"; static String JSON_PLATFORMVERSION="PLATFORMVERSION"; static String JSON_PLATFORMSERIAL="PLATFORMSERIAL"; static String JSON_PLATFORMMANUFACTURERID="PLATFORMMANUFACTURERID"; // JSON Platform URI Keywords; static String JSON_URI="UNIFORMRESOURCEIDENTIFIER"; static String JSON_HASHALG="HASHALGORITHM"; static String JSON_HASHVALUE="HASHVALUE"; // JSON Properties Keywords; static String JSON_NAME="NAME"; static String JSON_VALUE="VALUE"; public static String NOT_SPECIFIED="Not Specified"; }
<reponame>EvasiveXkiller/youtube-moosick import test from 'tape'; import rawGetArtistURL from '../../dummy/rawGetArtistURL.json'; import { GetArtistParser } from '../../../parsers/getArtistParser.js'; import { WalkUtility } from '../../../resources/utilities/index.js'; import { Albums, ArtistContent, ArtistHeader, ArtistURL, Single, Videos } from '../../../resources/resultTypes/index.js'; import { Artist, Thumbnails } from '../../../resources/generalTypes/index.js'; test('unit_getArtistURLParser', (t) => { const result = GetArtistParser.parseArtistURLPage(rawGetArtistURL); const expected = [ ArtistURL.from({ artistContents: ArtistContent.from({ albums: [ Albums.from({ browseId: String(), thumbnails: [ Thumbnails.from({ height: Number(), url: String(), width: Number(), }), ], year: Number(), URL: String(), title: String(), }), ], videos: [Videos.from({ title: String(), URL: String(), thumbnails: [ Thumbnails.from({ height: Number(), url: String(), width: Number(), }), ], author: [ Artist.from({ name: String(), url: String(), browseId: String(), }), ], videoId: String(), playlistId: String(), views: Number(), })], single: [ Single.from({ browseId: String(), thumbnails: [ Thumbnails.from({ height: Number(), url: String(), width: Number(), }), ], year: Number(), URL: String(), title: String(), }), ], }), headers: ArtistHeader.from({ thumbnails: [ Thumbnails.from({ height: Number(), url: String(), width: Number(), }), ], totalSubscribers: String(), description: String(), artistName: String(), }), }), ]; t.true(WalkUtility .walkAndAssertShape(result, expected[0]), 'unit_getArtistURLParser result has expected shape'); t.equals(result.headers.artistName, 'TWICE', 'title match'); t.equals(result.headers.thumbnails[1].url, 'https://lh3.googleusercontent.com/lmbgcqGRdQLWqjyPgk7239RFd7bB2CRxf4JpS4ndFkZZzcVt1Ia6FHs6Kd4mbWwZQx6DLwHJVDRz3UrZ=w120-h120-l90-rj', 'thumbnails match'); t.equals(result.artistContents.single[5].browseId, 'MPREb_AvQIOJGBHLA', 'random videoId check pass'); t.end(); }); //# sourceMappingURL=unit_getArtistURLParser.js.map
public class Matrix { int[][] matrix; public Matrix(int[][] inMatrix){ this.matrix = inMatrix; } public Matrix multiply(Matrix otherMatrix){ int rowsA = this.matrix.length; int columnsA = this.matrix[0].length; int rowsB = otherMatrix.matrix.length; int columnsB = otherMatrix.matrix[0].length; int[][] result = new int[rowsA][columnsB]; for(int i=0; i<rowsA; i++){ for(int j=0; j<columnsB; j++){ for(int k=0; k<columnsA; k++){ result[i][j] += this.matrix[i][k] * otherMatrix.matrix[k][j]; } } } return new Matrix(result); } }
<reponame>vinnyvinn/newForwarding<gh_stars>0 import MY_URLS from '../../configs/url'; const state = { cargoDetails: {}, customerDetails: [], quotationServices: [], customerDetailsSet: false, shipmentSubType: '', shipmentType: '', errors: [] }; const getters = { cargoDetails: state => state.cargoDetails, customerDetails: state => state.customerDetails, quotationServices: state => state.quotationServices, customerDetailsSet: state => state.customerDetailsSet, shipmentSubType: state => state.shipmentSubType, shipmentType: state => state.shipmentType, errors: state => state.errors, }; const mutations = { SET_CARGO_SHIPMENT_SUB_TYPE(state, data) { state.shipmentSubType = data }, SET_CARGO_SHIPMENT_TYPE(state, data) { state.shipmentType = data }, SET_CUSTOMER_DETAILS(state, data) { state.customerDetails = data }, SET_CARGO_DETAILS(state, data) { state.cargoDetails = data }, SET_CUSTOMER_DETAILS_SET(state, data) { state.customerDetailsSet = data }, SET_QUOTATION_SERVICES(state, data) { state.quotationServices.push(data) }, EDIT_QUOTATION_SERVICE(state, data) { let service = data.data; if (!_.isNil(state.quotationServices[data.index])) { state.quotationServices = state.quotationServices.map((value,key)=>key === data.index? service: value); } }, DELETE_QUOTATION_SERVICES(state, data) { state.quotationServices.splice(_.indexOf(state.quotationServices, _.find(state.quotationServices, data)), 1); }, SET_ERRORS(state, data) { state.errors = data }, }; const actions = { getAllCustomers(store) { axios.get(MY_URLS.CUSTOMER_URL).then((resp) => { store.commit('SET_CUSTOMERS_DETAILS', resp.data); }).catch(error => store.commit('SET_ERRORS', error)) }, }; export default { state, getters, mutations, actions }
#!/bin/bash dt=`date +%Y-%m-%d-%H_%M_%S` #Lock the file so only one of these scripts can run at once. # This one is great when nothing has changed. Only 1 GET request is needed. Good for frequent (hourly) runs. Run this every other day. /usr/bin/time /usr/bin/rclone sync -vv --exclude "/logs/" --dump headers --fast-list --size-only --log-file=/logdir/sync-log-daily-${dt}.log --transfers 2 --s3-chunk-size 16M /mnt/borgdirectory remote:bucket-name > /logdir/sync-time-daily-${dt}.out 2>&1 /usr/bin/find /logdir -name "sync-*" -type f -size +0 -printf "/usr/bin/mv '%p' /mnt/borgdirectory/logs;" | bash
import React from 'react'; export default function SvgNext(props) { return ( <svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" viewBox="0 0 32 32" {...props} > <path data-name="\u9577\u65B9\u5F62 2 \u306E\u30B3\u30D4\u30FC 37" d="M20.7 16.716l-5.97 5.98a1.016 1.016 0 01-1.44-1.433L18.55 16l-5.26-5.263a1.016 1.016 0 011.44-1.434l5.97 5.98a1.024 1.024 0 010 1.433z" fillRule="evenodd" /> </svg> ); }
package evilcraft.blocks; import java.util.List; import net.minecraft.block.material.Material; import net.minecraft.client.renderer.texture.IIconRegister; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.ItemStack; import net.minecraft.util.IIcon; import net.minecraft.world.World; import net.minecraftforge.common.util.ForgeDirection; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import evilcraft.api.Helpers; import evilcraft.api.IInformationProvider; import evilcraft.api.config.BlockConfig; import evilcraft.api.config.ExtendedConfig; import evilcraft.api.config.configurable.ConfigurableBlockContainer; import evilcraft.entities.tileentities.TileEnvironmentalAccumulator; import evilcraft.render.block.RenderEnvironmentalAccumulator; /** * Block that can collect the weather and stuff. * @author immortaleeb * */ public class EnvironmentalAccumulator extends ConfigurableBlockContainer implements IInformationProvider { private static EnvironmentalAccumulator _instance = null; /** * Initialise the configurable. * @param eConfig The config. */ public static void initInstance(ExtendedConfig<BlockConfig> eConfig) { if(_instance == null) _instance = new EnvironmentalAccumulator(eConfig); else eConfig.showDoubleInitError(); } /** * Get the unique instance. * @return The instance. */ public static EnvironmentalAccumulator getInstance() { return _instance; } /** * State indicating the environmental accumulator is idle. */ public static final int STATE_IDLE = 0; /** * State indicating the environmental accumulator is currently * processing an item. */ public static final int STATE_PROCESSING_ITEM = 1; /** * State indicating the environmental accumulator is cooling * down. */ public static final int STATE_COOLING_DOWN = 2; /** * State indicating the environmental accumulator has just * finished processing an item. This state is necessary * because using this state we can put some delay between * processing an item and cooling down so that the client * gets a moment to show an effect when an item has finished * processing. */ public static final int STATE_FINISHED_PROCESSING_ITEM = 3; private IIcon sideIcon; private IIcon bottomIcon; private IIcon topIcon; private EnvironmentalAccumulator(ExtendedConfig<BlockConfig> eConfig) { super(eConfig, Material.iron, TileEnvironmentalAccumulator.class); this.setRotatable(true); this.setStepSound(soundTypeMetal); this.setHardness(50.0F); this.setResistance(6000000.0F); // Can not be destroyed by explosions } @Override public boolean isOpaqueCube() { return false; } @Override public int getRenderType() { return RenderEnvironmentalAccumulator.ID; } @Override public boolean renderAsNormalBlock() { return false; } @SideOnly(Side.CLIENT) @Override public IIcon getIcon(int side, int meta) { if (side == ForgeDirection.UP.ordinal()) return topIcon; if (side == ForgeDirection.DOWN.ordinal()) return bottomIcon; return sideIcon; } @SideOnly(Side.CLIENT) @Override public void registerBlockIcons(IIconRegister iconRegister) { sideIcon = iconRegister.registerIcon(getTextureName() + "_side"); bottomIcon = iconRegister.registerIcon(getTextureName() + "_bottom"); topIcon = iconRegister.registerIcon(getTextureName() + "_top"); } @Override public void onBlockHarvested(World world, int x, int y, int z, int metadata, EntityPlayer player) { // Environmental Accumulators should not drop upon breaking world.setBlockToAir(x, y, z); } @Override public String getInfo(ItemStack itemStack) { return Helpers.getLocalizedInfo(this); } @SuppressWarnings("rawtypes") @Override public void provideInformation(ItemStack itemStack, EntityPlayer entityPlayer, List list, boolean par4) {} }
#!/bin/bash # Copyright (c) 2018 Uber Technologies, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # CUDA_VISIBLE_DEVICES=0 resman -r fnn_mnist_lrb_"$dim"_"$depth"_"$width" -- ./train.py /data/mnist/h5/train.h5 /data/mnist/h5/test.h5 -E 100 --vsize 10 --opt 'sgd' --lr 0.1 --l2 0.0001 --arch mnistfc --depth 2 --width 100 # for depth in {0,1,2,3,4,5} # width in {10,50,100,200,400,600,800,1000} # for dim in {0,10,50,100,150,200,250,300,350,400,450,500,550,600,650,700,750,800,850,900,950,1000} # # CUDA_VISIBLE_DEVICES=7 resman -r rnn_mnist_dir_"$dim"_"$depth"_"$width" -- ./train.py /data/mnist/h5/train.h5 /data/mnist/h5/test.h5 -E 100 --vsize $dim --opt 'sgd' --lr 0.1 --l2 0.0001 --arch mnist_rnn_dir --depth $depth --width $width & for depth in {1,} do for width in {400,} do for dim in {0,} do if [ "$dim" = 0 ]; then echo dir_"$dim"_"$depth"_"$width" CUDA_VISIBLE_DEVICES=7 python ./train.py /data/mnist/h5/train.h5 /data/mnist/h5/test.h5 -E 100 --vsize $dim --opt 'rmsprop' --lr 0.001 --l2 0.0001 --arch mnist_irnn_dir --depth $depth --width $width else echo lrb_"$dim"_"$depth"_"$width" CUDA_VISIBLE_DEVICES=7 python ./train.py /data/mnist/h5/train.h5 /data/mnist/h5/test.h5 -E 100 --vsize $dim --opt 'adam' --lr 0.0001 --l2 0.0001 --arch mnist_lstm --depth $depth --width $width fi done done done
import {Injectable} from "@angular/core"; /** * Created by F1 on 2017/10/16. */ @Injectable() export class Config{ public appConfig:any = { baseUrl:"http://fcat.xfdmao.com/apis", name: 'FCat', version: '4.0.0', testFlag:true, // for chart colors color: { primary: '#7266ba', info: '#23b7e5', success: '#27c24c', warning: '#fad733', danger: '#f05050', light: '#e8eff0', dark: '#3a3f51', black: '#1c2b36' }, settings: { themeID: 1, navbarHeaderColor: 'bg-black', navbarCollapseColor: 'bg-white-only', asideColor: 'bg-black', headerFixed: true, asideFixed: false, asideFolded: false, asideDock: false, container: false } }; }
package io.swagger.model.pheno; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; public enum ObservationTableHeaderRowEnum { OBSERVATIONTIMESTAMP("observationTimeStamp"), OBSERVATIONUNITDBID("observationUnitDbId"), OBSERVATIONUNITNAME("observationUnitName"), STUDYDBID("studyDbId"), STUDYNAME("studyName"), GERMPLASMDBID("germplasmDbId"), GERMPLASMNAME("germplasmName"), POSITIONCOORDINATEX("positionCoordinateX"), POSITIONCOORDINATEY("positionCoordinateY"), YEAR("year"), FIELD("field"), PLOT("plot"), SUB_PLOT("sub-plot"), PLANT("plant"), POT("pot"), BLOCK("block"), ENTRY("entry"), REP("rep"); private String value; ObservationTableHeaderRowEnum(String value) { this.value = value; } @Override @JsonValue public String toString() { return String.valueOf(value); } @JsonCreator public static ObservationTableHeaderRowEnum fromValue(String text) { for (ObservationTableHeaderRowEnum b : ObservationTableHeaderRowEnum.values()) { if (String.valueOf(b.value).equals(text)) { return b; } } return null; } }
#define LIST_DESC_FUNC(kind,mid,suffix) _bal_list_ ## kind ## mid ## suffix #define _bal_list_int_array_get_float NULL #define _bal_list_byte_array_get_float NULL #define _bal_list_float_array_get_int NULL #define LIST_DESC_VTABLE(kind, inexact_set) \ LIST_DESC_FUNC(kind, _get_, tagged),\ LIST_DESC_FUNC(kind, _set_, tagged),\ LIST_DESC_FUNC(kind, inexact_set, tagged),\ LIST_DESC_FUNC(kind, _get_, int),\ LIST_DESC_FUNC(kind, _set_, int),\ LIST_DESC_FUNC(kind, inexact_set, int),\ LIST_DESC_FUNC(kind, _get_, float),\ LIST_DESC_FUNC(kind, _set_, float),\ LIST_DESC_FUNC(kind, inexact_set, float) #define LIST_DESC_INIT_GENERIC(tid, bitSet) {\ tid, 0, 0,\ LIST_DESC_VTABLE(generic, _inexact_set_),\ BITSET_MEMBER_TYPE(bitSet)\ } #define LIST_DESC_INIT_INT_ARRAY(tid) {\ tid, 0, 0,\ LIST_DESC_VTABLE(int_array, _set_),\ BITSET_MEMBER_TYPE(1 << TAG_INT)\ } #define LIST_DESC_INIT_BYTE_ARRAY(tid, byteType) {\ tid, 0, 0,\ LIST_DESC_VTABLE(byte_array, _set_),\ byteType\ } #define LIST_DESC_INIT_FLOAT_ARRAY(tid) {\ tid, 0, 0,\ LIST_DESC_VTABLE(float_array, _set_),\ BITSET_MEMBER_TYPE(1 << TAG_FLOAT)\ }
/* * Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES) * * This file is part of Orfeo Toolbox * * https://www.orfeo-toolbox.org/ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Software Guide : BeginCommandLineArgs // INPUTS: {ROISpot5.png} // OUTPUTS: {DerivativeImageFilterFloatOutput.hdr} // OUTPUTS: {DerivativeImageFilterOutput.png} // 1 0 // Software Guide : EndCommandLineArgs // Software Guide : BeginLatex // // The \doxygen{itk}{DerivativeImageFilter} is used for computing the partial // derivative of an image, the derivative of an image along a particular axial // direction. // // \index{itk::DerivativeImageFilter} // // Software Guide : EndLatex #include "otbImage.h" #include "otbImageFileReader.h" #include "otbImageFileWriter.h" #include "itkUnaryFunctorImageFilter.h" #include "itkRescaleIntensityImageFilter.h" // Software Guide : BeginLatex // // The header file corresponding to this filter should be included first. // // \index{itk::DerivativeImageFilter!header} // // Software Guide : EndLatex // Software Guide : BeginCodeSnippet #include "itkDerivativeImageFilter.h" // Software Guide : EndCodeSnippet int main(int argc, char * argv[]) { if (argc < 6) { std::cerr << "Usage: " << std::endl; std::cerr << argv[0] << " inputImageFile outputImageFile normalizedOutputImageFile "; std::cerr << " derivativeOrder direction" << std::endl; return EXIT_FAILURE; } // Software Guide : BeginLatex // // Next, the pixel types for the input and output images must be defined and, with // them, the image types can be instantiated. Note that it is important to // select a signed type for the image, since the values of the derivatives // will be positive as well as negative. // // Software Guide : EndLatex // Software Guide : BeginCodeSnippet typedef float InputPixelType; typedef float OutputPixelType; const unsigned int Dimension = 2; typedef otb::Image<InputPixelType, Dimension> InputImageType; typedef otb::Image<OutputPixelType, Dimension> OutputImageType; // Software Guide : EndCodeSnippet typedef otb::ImageFileReader<InputImageType> ReaderType; typedef otb::ImageFileWriter<OutputImageType> WriterType; ReaderType::Pointer reader = ReaderType::New(); WriterType::Pointer writer = WriterType::New(); reader->SetFileName(argv[1]); writer->SetFileName(argv[2]); // Software Guide : BeginLatex // // Using the image types, it is now possible to define the filter type // and create the filter object. // // \index{itk::DerivativeImageFilter!instantiation} // \index{itk::DerivativeImageFilter!New()} // \index{itk::DerivativeImageFilter!Pointer} // // Software Guide : EndLatex // Software Guide : BeginCodeSnippet typedef itk::DerivativeImageFilter< InputImageType, OutputImageType> FilterType; FilterType::Pointer filter = FilterType::New(); // Software Guide : EndCodeSnippet // Software Guide : BeginLatex // // The order of the derivative is selected with the \code{SetOrder()} // method. The direction along which the derivative will be computed is // selected with the \code{SetDirection()} method. // // \index{itk::DerivativeImageFilter!SetOrder()} // \index{itk::DerivativeImageFilter!SetDirection()} // // Software Guide : EndLatex // Software Guide : BeginCodeSnippet filter->SetOrder(atoi(argv[4])); filter->SetDirection(atoi(argv[5])); // Software Guide : EndCodeSnippet // Software Guide : BeginLatex // // The input to the filter can be taken from any other filter, for example // a reader. The output can be passed down the pipeline to other filters, // for example, a writer. An update call on any downstream filter will // trigger the execution of the derivative filter. // // \index{itk::DerivativeImageFilter!SetInput()} // \index{itk::DerivativeImageFilter!GetOutput()} // // Software Guide : EndLatex // Software Guide : BeginCodeSnippet filter->SetInput(reader->GetOutput()); writer->SetInput(filter->GetOutput()); writer->Update(); // Software Guide : EndCodeSnippet // Software Guide : BeginLatex // // \begin{figure} // \center // \includegraphics[width=0.44\textwidth]{ROISpot5.eps} // \includegraphics[width=0.44\textwidth]{DerivativeImageFilterOutput.eps} // \itkcaption[Effect of the Derivative filter.]{Effect of the // Derivative filter.} // \label{fig:DerivativeImageFilterOutput} // \end{figure} // // Figure \ref{fig:DerivativeImageFilterOutput} illustrates the effect of // the DerivativeImageFilter. The derivative // is taken along the $x$ direction. The sensitivity to noise in the image // is evident from this result. // // Software Guide : EndLatex typedef otb::Image<unsigned char, Dimension> WriteImageType; typedef itk::RescaleIntensityImageFilter< OutputImageType, WriteImageType> NormalizeFilterType; typedef otb::ImageFileWriter<WriteImageType> NormalizedWriterType; NormalizeFilterType::Pointer normalizer = NormalizeFilterType::New(); NormalizedWriterType::Pointer normalizedWriter = NormalizedWriterType::New(); normalizer->SetInput(filter->GetOutput()); normalizedWriter->SetInput(normalizer->GetOutput()); normalizer->SetOutputMinimum(0); normalizer->SetOutputMaximum(255); normalizedWriter->SetFileName(argv[3]); normalizedWriter->Update(); return EXIT_SUCCESS; }
#include <iostream> #include <vector> #include <string> using namespace std; void fillDP(vector<vector<int>>& dp, const string& r) { int n = r.length(); for (int i = n - 1; i >= 0; i--) { for (int j = 0; j < 10; j++) { dp[i][j] = 0; if (r[i] - '0' < j || (i + 1 < n && r[i] - '0' == j) || (i == 0 && j == 0)) continue; // Implement the logic to fill dp[i][j] based on the given conditions // Your code here // Example: dp[i][j] = some_value_based_on_conditions; } } } int main() { string r = "12345"; int n = r.length(); vector<vector<int>> dp(n, vector<int>(10, 0)); fillDP(dp, r); // Output the filled dp array for (int i = 0; i < n; i++) { for (int j = 0; j < 10; j++) { cout << dp[i][j] << " "; } cout << endl; } return 0; }
<reponame>bink81/java-experiments<filename>src/main/java/tools/javaparser/CodeUpdater.java package tools.javaparser; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.javaparser.JavaParser; import com.github.javaparser.ParseException; import com.github.javaparser.ast.CompilationUnit; import com.github.javaparser.ast.body.BodyDeclaration; import com.github.javaparser.ast.body.MethodDeclaration; import com.github.javaparser.ast.body.Parameter; import com.github.javaparser.ast.body.TypeDeclaration; public class CodeUpdater { private static final Logger logger = LoggerFactory.getLogger(CodeUpdater.class); public static void main(String[] args) throws Exception { CodeUpdater codeParser = new CodeUpdater(); Path path = Paths.get("src", "main", "resources", "tools", "javaparser", "TestJavaClass.java"); CompilationUnit compilationUnit = codeParser.parseFile(path.toFile()); codeParser.showMethods(compilationUnit); codeParser.updateMethods(compilationUnit); } private void updateMethods(CompilationUnit compilationUnit) { logger.info("Updating a method declaration..."); new MethodUpdater().visit(compilationUnit, "2"); logger.info("New code:\n{}", compilationUnit); } private CompilationUnit parseFile(File file) throws FileNotFoundException, ParseException, IOException { FileInputStream inputStream = new FileInputStream(file); logger.info("Parsing file {}", file); CompilationUnit compilationUnit; try { compilationUnit = JavaParser.parse(inputStream); } finally { inputStream.close(); } logger.info("Original code:\n{}", compilationUnit); return compilationUnit; } private void showMethods(CompilationUnit compilationUnit) { List<TypeDeclaration> typeDeclarations = compilationUnit.getTypes(); for (TypeDeclaration typeDeclaration : typeDeclarations) { List<BodyDeclaration> members = typeDeclaration.getMembers(); for (BodyDeclaration member : members) { if (member instanceof MethodDeclaration) { MethodDeclaration method = (MethodDeclaration) member; String methodName = method.getName(); List<Parameter> parameters = method.getParameters(); logger.info("Method name={}, parameters={}", methodName, parameters); } } } } }
#!/bin/sh # Start the application using the supervisor, so it never stops supervisord -n -c /naftis/supervisord.conf
const userInfo = { name: "John Doe", age: 20, removeUser() { delete this.name; delete this.age; } }; userInfo.removeUser(); console.log(userInfo); // {}
import random random_number = random.randint(1, 10) print(random_number)
#!/bin/bash source /cvmfs/sft.cern.ch/lcg/views/LCG_98python3/x86_64-centos7-gcc9-opt/setup.sh python -m venv dask_venv source dask_venv/bin/activate export PYTHONPATH="" export PATH=${PWD}/dask_venv/bin:${PATH} export lcgprefix=/cvmfs/sft.cern.ch/lcg/views/LCG_98python3/x86_64-centos7-gcc9-opt/lib/python3.7/site-packages/ ln -sf ${lcgprefix}/pyxrootd dask_venv/lib/python3.7/site-packages/pyxrootd ln -sf ${lcgprefix}/XRootD dask_venv/lib/python3.7/site-packages/XRootD pip install --upgrade pip wheel --no-cache-dir pip install dask[dataframe] distributed blosc lz4 --no-cache-dir pip list echo "Dask scheduler environment created at ${PWD}/dask_venv" /bin/pwd /bin/uname -a /bin/hostname echo $@ $@
#!/bin/bash set -e HOSTNAME_TO_CHECK=$1 HOSTNAME_FINGERPRINT=$2 # inspired by https://github.com/wercker/step-add-to-known_hosts # make sure $HOME/.ssh exists if [ ! -d "$HOME/.ssh" ]; then echo "$HOME/.ssh does not exists, creating it" mkdir -p "$HOME/.ssh" fi known_hosts_path="$HOME/.ssh/known_hosts" if [ ! -f "$known_hosts_path" ]; then echo "$known_hosts_path does not exists, touching it and chmod it to 644" touch "$known_hosts_path" chmod 644 "$known_hosts_path" fi # validate <hostname> exists if [ ! -n "$HOSTNAME_TO_CHECK" ] then echo "missing or empty hostname, usage keyscan.sh <hostname> <fingerprint>" exit 1 fi # validate <fingerprint> exists if [ ! -n "$HOSTNAME_FINGERPRINT" ] then echo "missing or empty fingerprint, usage keyscan.sh <hostname> <fingerprint>" exit 1 fi types="rsa,dsa,ecdsa" # Check if ssh-keyscan command exists set +e hash ssh-keyscan 2>/dev/null result=$? set -e if [ $result -ne 0 ] ; then echo "ssh-keyscan command not found. Cause: ssh-client software probably not installed." exit 1 fi ssh_keyscan_command="ssh-keyscan -t $types" ssh_keyscan_command="$ssh_keyscan_command $HOSTNAME_TO_CHECK" ssh_keyscan_result=$(mktemp) $ssh_keyscan_command > "$ssh_keyscan_result" echo "Searching for keys that match fingerprint $HOSTNAME_FINGERPRINT" # shellcheck disable=SC2162,SC2002 cat "$ssh_keyscan_result" | sed "/^ *#/d;s/#.*//" | while read ssh_key; do ssh_key_path=$(mktemp) echo "$ssh_key" > "$ssh_key_path" ssh_key_fingerprint=$(ssh-keygen -l -f "$ssh_key_path" | awk '{print $2}') if echo "$ssh_key_fingerprint" | grep -q "$HOSTNAME_FINGERPRINT" ; then echo "Added a key to $known_hosts_path" echo "$ssh_key" | tee -a "$known_hosts_path" else echo "Skipped adding a key to known_hosts, it did not match the fingerprint ($ssh_key_fingerprint)" fi rm -f "$ssh_key_path" done
/* * Public API Surface of ngx-guildy */ export * from './lib/ngx-guildy.service'; export * from './lib/guildy-editor/guildy-editor.component'; export * from './lib/component-library/component-library.component'; export * from './lib/component-library/component-item.directive'; export * from './lib/component-settings/component-settings.component'; export * from './lib/component-settings/component-settings-header.directive'; export * from './lib/component-settings/component-settings-footer.directive'; export * from './lib/guildy-container.directive'; export * from './lib/contenteditable.directive'; export * from './lib/guildy-movable.directive'; export * from './lib/ngx-guildy.module'; export * from './lib/guildy-component.decorator';
#!/bin/sh set -e set -u set -o pipefail function on_error { echo "$(realpath -mq "${0}"):$1: error: Unexpected failure" } trap 'on_error $LINENO' ERR if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" BCSYMBOLMAP_DIR="BCSymbolMaps" # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then # Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do echo "Installing $f" install_bcsymbolmap "$f" "$destination" rm "$f" done rmdir "${source}/${BCSYMBOLMAP_DIR}" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" elif [ -L "${binary}" ]; then echo "Destination binary is symlinked..." dirname="$(dirname "${binary}")" binary="${dirname}/$(readlink "${binary}")" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" warn_missing_arch=${2:-true} if [ -r "$source" ]; then # Copy the dSYM into the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .dSYM "$source")" binary_name="$(ls "$source/Contents/Resources/DWARF")" binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}" # Strip invalid architectures from the dSYM. if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then strip_invalid_archs "$binary" "$warn_missing_arch" fi if [[ $STRIP_BINARY_RETVAL == 0 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM" fi fi } # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # Strip invalid architectures strip_invalid_archs() { binary="$1" warn_missing_arch=${2:-true} # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then if [[ "$warn_missing_arch" == "true" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." fi STRIP_BINARY_RETVAL=1 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=0 } # Copies the bcsymbolmap files of a vendored framework install_bcsymbolmap() { local bcsymbolmap_path="$1" local destination="${BUILT_PRODUCTS_DIR}" echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}" } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identity echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework" install_framework "${BUILT_PRODUCTS_DIR}/JRACRequest/JRACRequest.framework" install_framework "${BUILT_PRODUCTS_DIR}/ReactiveObjC/ReactiveObjC.framework" install_framework "${BUILT_PRODUCTS_DIR}/YTKNetwork/YTKNetwork.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework" install_framework "${BUILT_PRODUCTS_DIR}/JRACRequest/JRACRequest.framework" install_framework "${BUILT_PRODUCTS_DIR}/ReactiveObjC/ReactiveObjC.framework" install_framework "${BUILT_PRODUCTS_DIR}/YTKNetwork/YTKNetwork.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
<reponame>codefacts/Elastic-Components<filename>elasta-orm/src/main/java/elasta/orm/upsert/impl/RelationTableUpserFunctionImpl.java package elasta.orm.upsert.impl; import elasta.orm.upsert.*; import java.util.Objects; /** * Created by Jango on 2017-01-10. */ final public class RelationTableUpserFunctionImpl implements RelationTableUpserFunction { final RelationTableDataPopulator relationTableDataPopulator; public RelationTableUpserFunctionImpl(RelationTableDataPopulator relationTableDataPopulator) { Objects.requireNonNull(relationTableDataPopulator); this.relationTableDataPopulator = relationTableDataPopulator; } @Override public TableData upsert(TableData srcTableData, TableData dstTableData, UpsertContext upsertContext) { Objects.requireNonNull(srcTableData); Objects.requireNonNull(dstTableData); Objects.requireNonNull(upsertContext); final TableData tableData = relationTableDataPopulator.populate(srcTableData, dstTableData).withIsNew(true); upsertContext.putOrMerge( UpsertUtils.toTableAndPrimaryColumnsKey( tableData.getTable(), tableData.getPrimaryColumns(), tableData.getValues() ), tableData ); return tableData; } }
def median_of_two_sorted_arrays(array1, array2): combined_arrays = sorted(array1 + array2) if len(combined_arrays) % 2 == 0: mid_index = len(combined_arrays) // 2 median = (combined_arrays[mid_index - 1] + combined_arrays[mid_index])/2 else: mid_index = (len(combined_arrays) - 1) // 2 median = combined_arrays[mid_index] return median
#!/bin/sh # Retrieve the kernel version using uname command kernel_version=$(uname -r) # Print the kernel version to the console echo $kernel_version
<gh_stars>0 """ defines a medium """ import os import json import hashlib import magic class Medium(): """ a Medium """ medium_id = "" filename = "" tags = [] def __init__(self, medium_id, filename, tags): super().__init__() self.filename = filename if medium_id is None: self.create_media_id() else: self.medium_id = medium_id self.load_tags(tags) def __str__(self): return self.filename def __repr__(self): string = """medium.Medium('{}', '{}', '{}')""".format( self.medium_id, self.filename, json.dumps(self.tags)) return string def __eq__(self, other): vals = [] vals.append(self.medium_id == other.medium_id) vals.append(self.tags == other.tags) vals.append(self.filename == other.filename) return all(vals) def mime_type(self): """ returns mime type """ return magic.from_file(self.filename, mime=True) def rename(self, name): """ renames the file """ os.rename(self.filename, name) self.filename = name def load_tags(self, tag_string): """ parse tags from string """ try: self.tags = json.loads(tag_string) except TypeError: self.tags = [] def add_tags(self, tag_string): """ add one ore more tags """ self.tags.append(tag_string) def get_tags(self): """ returns all tags as json """ return json.dumps(self.tags) def delete_tag(self, tag_string): """ delete a tag """ self.tags.remove(tag_string) def contains(self, tag_string): """ returns true when tag exists """ return tag_string in self.tags def create_media_id(self): """ creates new hash """ buffer_size = 65536 sha2 = hashlib.sha256() with open(self.filename, 'rb', buffering=0) as file: while True: data = file.read(buffer_size) if not data: break sha2.update(data) self.medium_id = sha2.hexdigest()
#!/bin/bash echo Aoi Desktop Installer echo echo This script must be run on a TTY. Before executing further, please make sure that you have a stable internet connection and that the current account is a sudoer. echo read -p "Press any key to continue..." echo # Install the required pacman packages. sudo pacman -Syu firefox plank ulauncher --noconfirm --needed # Copy the required config files. cp -rv config/* ~/.config echo echo Script finished. Please reboot before logging in to ensure that the changes are applied properly.
SELECT * FROM table_name WHERE status = 'active' AND color = 'red' AND size = 'medium' AND price > 10 ORDER BY price DESC;
import json from django.core import serializers from django.db import models from django.contrib.auth.models import User from datetime import datetime from django.db.models import Count, Q from django.db.models.functions import TruncDate # Create your models here. class CreatedModel(models.Model): created = models.DateTimeField(auto_now_add=True) def created_mins_ago(self): now = datetime.now() then = self.created.replace(tzinfo=None) tdelta = now - then minutes = tdelta.seconds / 60 return abs(int(minutes)) class Meta: abstract = True class Meme(CreatedModel): pic = models.ImageField(upload_to='images', null=False, blank=False) author = models.ForeignKey(User, on_delete=models.CASCADE) title = models.CharField(max_length=128) def comments_sum(self): return Comment.objects.filter(meme=self).count() def likes_sum(self): return MemeLike.objects.filter(meme=self).count() def get_likers(self): return User.objects.filter(memelike__meme=self) def get_likes_sum_by_month(self): qs = MemeLike.objects.filter(meme=self).annotate(date=TruncDate('created') ).values('date').annotate( l=Count('pk') ).values('date', 'l').order_by('created') labels = [] values = [] for val in qs: labels.append(str(val['date'])) values.append(val['l']) return {'labels': labels, 'values': values} def get_comments_sum_by_month(self): qs = Comment.objects.filter(meme=self).annotate(date=TruncDate('created') ).values('date').annotate( c=Count('pk') ).values('date', 'c').order_by('created') labels = [] values = [] for val in qs: labels.append(str(val['date'])) values.append(val['c']) return {'labels': labels, 'values': values} class Comment(CreatedModel): meme = models.ForeignKey(Meme, null=False) message = models.CharField(max_length=256, blank=False) user = models.ForeignKey(User, on_delete=models.CASCADE, null=False) reply_to = models.ForeignKey('self', null=True) deleted = models.BooleanField(null=False, blank=False, default=False) def likes_sum(self): return CommentLike.objects.filter(comment=self).count() def get_likers(self): return User.objects.filter(commentlike__comment=self) class MemeLike(CreatedModel): user = models.ForeignKey(User, on_delete=models.CASCADE, null=False) meme = models.ForeignKey(Meme, null=False) class Meta: unique_together = ('user', 'meme') class CommentLike(CreatedModel): user = models.ForeignKey(User, on_delete=models.CASCADE) comment = models.ForeignKey(Comment, null=False) class Meta: unique_together = ('user', 'comment')
<reponame>myid999/javademo<gh_stars>0 package demo.java.log.logback.a; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * * * */ public class DummyA { private static final Logger logger=LoggerFactory.getLogger(DummyA.class.getName()); public static void printLogs(){ System.out.println("DummyA begin"); logger.error("DummyA error"); logger.warn("DummyA warn"); logger.info("DummyA info"); logger.trace("DummyA trace"); logger.debug("DummyA debug"); System.out.println("DummyA end"); } }
<gh_stars>0 test = "Test InpUT" def Swaperino(strInput): output = "" for i in strInput: if(i.isupper()): #output.append(i.lower()) output+= i.lower() else: #output.append(i.upper()) output+= i.upper() print(output) Swaperino(test)
<gh_stars>1-10 /* * Copyright 2022 International Business Machines Corp. and others * * See the NOTICE file distributed with this work for additional information * regarding copyright ownership. Licensed under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * SPDX-License-Identifier: Apache-2.0 */ package com.ibm.jbatch.tck.cdi.jobop; import java.util.List; import java.util.Properties; import java.util.Set; import jakarta.batch.operations.JobExecutionAlreadyCompleteException; import jakarta.batch.operations.JobExecutionIsRunningException; import jakarta.batch.operations.JobExecutionNotMostRecentException; import jakarta.batch.operations.JobExecutionNotRunningException; import jakarta.batch.operations.JobOperator; import jakarta.batch.operations.JobRestartException; import jakarta.batch.operations.JobSecurityException; import jakarta.batch.operations.JobStartException; import jakarta.batch.operations.NoSuchJobException; import jakarta.batch.operations.NoSuchJobExecutionException; import jakarta.batch.operations.NoSuchJobInstanceException; import jakarta.batch.runtime.BatchRuntime; import jakarta.batch.runtime.JobExecution; import jakarta.batch.runtime.JobInstance; import jakarta.batch.runtime.StepExecution; import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.context.Dependent; import jakarta.enterprise.inject.Produces; public class TCKJobOperatorWrapper implements JobOperator { @Override public Set<String> getJobNames() throws JobSecurityException { return BatchRuntime.getJobOperator().getJobNames(); } @Override public int getJobInstanceCount(String jobName) throws NoSuchJobException, JobSecurityException { return BatchRuntime.getJobOperator().getJobInstanceCount(jobName); } @Override public List<JobInstance> getJobInstances(String jobName, int start, int count) throws NoSuchJobException, JobSecurityException { return BatchRuntime.getJobOperator().getJobInstances(jobName, start, count); } @Override public List<Long> getRunningExecutions(String jobName) throws NoSuchJobException, JobSecurityException { return BatchRuntime.getJobOperator().getRunningExecutions(jobName); } @Override public Properties getParameters(long executionId) throws NoSuchJobExecutionException, JobSecurityException { return BatchRuntime.getJobOperator().getParameters(executionId); } @Override public long start(String jobXMLName, Properties jobParameters) throws JobStartException, JobSecurityException { return BatchRuntime.getJobOperator().start(jobXMLName, jobParameters); } @Override public long restart(long executionId, Properties restartParameters) throws JobExecutionAlreadyCompleteException, NoSuchJobExecutionException, JobExecutionNotMostRecentException, JobRestartException, JobSecurityException { return BatchRuntime.getJobOperator().restart(executionId, restartParameters); } @Override public void stop(long executionId) throws NoSuchJobExecutionException, JobExecutionNotRunningException, JobSecurityException { BatchRuntime.getJobOperator().stop(executionId); } @Override public void abandon(long executionId) throws NoSuchJobExecutionException, JobExecutionIsRunningException, JobSecurityException { BatchRuntime.getJobOperator().abandon(executionId); } @Override public JobInstance getJobInstance(long executionId) throws NoSuchJobExecutionException, JobSecurityException { return BatchRuntime.getJobOperator().getJobInstance(executionId); } @Override public List<JobExecution> getJobExecutions(JobInstance instance) throws NoSuchJobInstanceException, JobSecurityException { return BatchRuntime.getJobOperator().getJobExecutions(instance); } @Override public JobExecution getJobExecution(long executionId) throws NoSuchJobExecutionException, JobSecurityException { return BatchRuntime.getJobOperator().getJobExecution(executionId); } @Override public List<StepExecution> getStepExecutions(long jobExecutionId) throws NoSuchJobExecutionException, JobSecurityException { return BatchRuntime.getJobOperator().getStepExecutions(jobExecutionId); } }
package com.hapramp.notification.model; public class ContestCreatedNotificationModel extends BaseNotificationModel{ private String startsAt; private String description; private String endsAt; private String[] prizes; private String id; private String tag; private String type; private String image; private String title; public ContestCreatedNotificationModel(String startsAt, String description, String endsAt, String[] prizes, String id, String tag, String type, String image, String title) { this.startsAt = startsAt; this.description = description; this.endsAt = endsAt; this.prizes = prizes; this.id = id; this.tag = tag; this.type = type; this.image = image; this.title = title; } public String getStartsAt() { return startsAt; } public void setStartsAt(String startsAt) { this.startsAt = startsAt; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getEndsAt() { return endsAt; } public void setEndsAt(String endsAt) { this.endsAt = endsAt; } public String[] getPrizes() { return prizes; } public void setPrizes(String[] prizes) { this.prizes = prizes; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getTag() { return tag; } public void setTag(String tag) { this.tag = tag; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getImage() { return image; } public void setImage(String image) { this.image = image; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } }
package hudson.plugins.accurev; import hudson.EnvVars; import hudson.FilePath; import hudson.Launcher; import hudson.model.TaskListener; import hudson.plugins.accurev.AccurevSCM.AccurevServer; import hudson.plugins.accurev.cmd.History; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOCase; import org.apache.commons.lang.StringUtils; public class CheckForChanges { /** * @param server server * @param accurevEnv accurev environment * @param workspace workspace * @param listener listener * @param launcher launcher * @param stream stream * @param buildDate build Date * @param logger logger * @param scm Accurev SCm * @param version * @return if there are any new transactions in the stream since the last build was done */ // stream param is of type AccurevStream public static boolean checkStreamForChanges( AccurevServer server, EnvVars accurevEnv, FilePath workspace, TaskListener listener, Launcher launcher, AccurevStream stream, Date buildDate, Logger logger, AccurevSCM scm, int version) { String filterForPollSCM = scm.getFilterForPollSCM(); String subPath = scm.getSubPath(); List<String> validTransactionTypes; if (stream.getType().name().equalsIgnoreCase("workspace")) { validTransactionTypes = AccurevSCM.DEFAULT_VALID_WORKSPACE_TRANSACTION_TYPES; } else { validTransactionTypes = AccurevSCM.DEFAULT_VALID_STREAM_TRANSACTION_TYPES; } String transactionTypes = String.join(",", validTransactionTypes); listener .getLogger() .println( // "Checking transactions of type " + transactionTypes + // " in stream [" + stream.getName() + "]"); boolean isTransLatestThanBuild = false; Set<String> serverPaths = new HashSet<String>(); Set<String> pollingFilters = getListOfPollingFilters(filterForPollSCM, subPath); // AR version 7+ supports combined transaction type hist call. if (version < 7) { AccurevTransaction latestCodeChangeTransaction = new AccurevTransaction(); latestCodeChangeTransaction.setDate(AccurevSCM.NO_TRANS_DATE); // query AccuRev for the latest transactions of each kind defined in transactionTypes using // getTimeOfLatestTransaction for (final String transactionType : validTransactionTypes) { try { final AccurevTransaction tempTransaction = History.getLatestTransaction( scm, server, accurevEnv, workspace, listener, launcher, stream.getName(), transactionType); if (tempTransaction != null) { listener .getLogger() .println( "Last transaction of type [" + transactionType + "] is " + tempTransaction); if (latestCodeChangeTransaction.getDate().before(tempTransaction.getDate())) { // check the affected serverPaths.addAll(tempTransaction.getAffectedPaths()); if (tempTransaction.getAffectedPaths().size() > 0) { if (!changesMatchFilter(serverPaths, pollingFilters)) { // Continue to next transaction (that may have a match) continue; } } } latestCodeChangeTransaction = tempTransaction; if (latestCodeChangeTransaction.getDate().equals(AccurevSCM.NO_TRANS_DATE)) { listener.getLogger().println("No last transaction found."); } // log last transaction information if retrieved if (buildDate != null && buildDate.before(latestCodeChangeTransaction.getDate())) { listener.getLogger().println("Last valid trans " + latestCodeChangeTransaction); isTransLatestThanBuild = true; } } else { listener.getLogger().println("No transactions of type [" + transactionType + "]"); } } catch (Exception e) { final String msg = "getLatestTransaction failed when checking the stream " + stream.getName() + " for changes with transaction type " + transactionType; listener.getLogger().println(msg); e.printStackTrace(listener.getLogger()); logger.log(Level.WARNING, msg, e); } } return isTransLatestThanBuild; } else { SimpleDateFormat formatter = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); String dateRange = formatter.format(buildDate); List<AccurevTransaction> tempTransaction; try { // history for all transaction types in time range from last build - now. tempTransaction = History.getTransactionsRange( scm, server, accurevEnv, workspace, listener, launcher, stream.getName(), transactionTypes, dateRange); if (null != tempTransaction && !tempTransaction.isEmpty()) { for (AccurevTransaction t : tempTransaction) { if (t.getAffectedPaths().isEmpty() && (t.getAction().equals("mkstream") || t.getAction().equals("chstream") || t.getAction().equals("defcomp"))) { listener.getLogger().println("Last valid transaction " + tempTransaction); isTransLatestThanBuild = true; } else { serverPaths.addAll(t.getAffectedPaths()); } } } if (serverPaths.size() > 0) { if (changesMatchFilter(serverPaths, pollingFilters)) { isTransLatestThanBuild = true; listener.getLogger().println("Last valid transaction " + tempTransaction); } } } catch (IOException e) { final String msg = "getLatestTransaction failed when checking the stream " + stream.getName() + " for changes with transaction type " + transactionTypes; listener.getLogger().println(msg); e.printStackTrace(listener.getLogger()); logger.log(Level.WARNING, msg, e); } return isTransLatestThanBuild; } } public static boolean changesMatchFilter( Collection<String> serverPaths, Collection<String> filters) { if (CollectionUtils.isEmpty(filters)) { // No filters, so always a match. return true; } for (String path : serverPaths) { path = sanitizeSlashes(path); for (String filter : filters) { if (pathMatcher(path, filter)) { return true; } } } return false; } public static boolean pathMatcher(String path, String wildcard) { return FilenameUtils.wildcardMatch(path, wildcard, IOCase.INSENSITIVE); } private static Set<String> getListOfPollingFilters(String filterForPollSCM, String subPath) { if (StringUtils.isNotBlank(filterForPollSCM)) { return splitAndSanitizeFilters(filterForPollSCM); } return splitAndSanitizeFilters(subPath); } private static Set<String> splitAndSanitizeFilters(String input) { if (StringUtils.isBlank(input)) { return null; } final char DELIMITER = ','; final String STRIP_CHARS = " \t\n\r/"; String[] filters = StringUtils.split(sanitizeSlashes(input), DELIMITER); filters = StringUtils.stripAll(filters, STRIP_CHARS); return new HashSet<>(Arrays.asList(filters)); } private static String sanitizeSlashes(String input) { return input.replace('\\', '/'); } }
require_relative '../../../stdlib/irb/ext/' + File.basename(__FILE__)
<reponame>lsy26499/A_Week_Trip_server import express from 'express'; import planCreate from '../controller/planController/planCreate'; import planDelete from '../controller/planController/planDelete'; import planEdit from '../controller/planController/planEdit'; import planList from '../controller/planController/planList'; import { checkObjectId, checkPlanForm, checkedLogin } from '../middlewares'; const planRouter = express.Router(); planRouter.post('/', checkedLogin, checkPlanForm, planCreate); planRouter.put('/:id', checkedLogin, checkObjectId, planEdit); planRouter.delete('/:id', checkedLogin, checkObjectId, planDelete); planRouter.get('/', planList); export default planRouter;
#!/bin/bash #: ◣ + O - Open generic URL # the URL is built from env-var `XMCHORD_GENERIC_URL` with its sub-string # `SELECTED_TEXT` replaced by the given text-selection xdotool sleep 0.1 focusApplication=\ $(cat /proc/"$(xdotool getwindowpid "$(xdotool getwindowfocus)")"/comm) path_self="$( cd "$(dirname "$0")" >/dev/null 2>&1 || exit ; pwd -P )" "$path_self"/utils/remove_unwanted_output.sh "$focusApplication" xdotool key Ctrl+z # [$] keypress did remove selection: restore it sleep 0.1 xdotool key Control_L+c # copy selected text sleep 0.1 SELECTED_TEXT=$(xclip -o) # put into variable # replace "SELECTED_TEXT" within URL preset from ENV var GENERIC_URL="${XMCHORD_GENERIC_URL/SELECTED_TEXT/$SELECTED_TEXT}" echo "$GENERIC_URL" | xclip -in -selection clipboard # put URL into clipboard # focus or launch chromium if pidof -s chromium-browse >/dev/xnull; then wmctrl -a Chromium else me=$SUDO_USER # sudo -u "$me" nohup chromium-browser >/dev/null & sudo -u "$me" nohup chromium >/dev/null & unset me fi sleep 0.5 xdotool key Control_L+l # focus location field sleep 0.1 xdotool key Control_L+v # paste sleep 0.1 xdotool key Return # load
''' This test case tests that OpenCV python interface can detect triangle shape from an image using the funtions from the OpenCV CV component. ''' import os import cv2 as cv import numpy as np file = '/tmp/pic5.png' if not os.path.exists(file): raise Exception('OpenCV: Failed to find image file (%s).' % file) img = cv.imread(file) gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY) thresh = cv.adaptiveThreshold(gray, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY, 3, 5) _, contours, hierarchy = cv.findContours(thresh, cv.RETR_LIST, cv.CHAIN_APPROX_SIMPLE) triangles = [] for cnt in contours: approx = cv.approxPolyDP(cnt, 0.01*cv.arcLength(cnt,True),True) if len(approx) == 3: triangles.append(approx) print('number of triangles: ', len(triangles)) if len(triangles) < 1: raise Exception('OpenCV: Failed to find any triangle in the picture')
<gh_stars>0 /* * Copyright (c) 2010, <NAME> * All rights reserved. * * Made available under the BSD license - see the LICENSE file */ package sim.components; import java.util.LinkedList; import java.util.Set; import javax.swing.JPanel; import sim.Config; import sim.traffic.TraceEvent; /* * A node represents a single processor and router element (including links between) * in a network. Outgoing link components are updated() and copied() here */ public class Node implements Component { private int m_id; private Router m_router; private Processor m_processor; private ProcessorLink m_procRouter; private ProcessorLink m_routerProc; private RouterLink[] m_outputLinks; public Node(int id, int numInputs, int numOutputs) { m_id = id; m_procRouter = new ProcessorLink(1); m_routerProc = new ProcessorLink(1); m_router = new Router(id, m_procRouter, m_routerProc, numInputs, numOutputs); m_processor = new Processor(id, m_routerProc, m_procRouter); m_outputLinks = null; } public void update() { m_processor.update(); m_router.update(); m_procRouter.update(); m_routerProc.update(); for(Link l : m_outputLinks) l.update(); if(Config.debugMode()) { m_processor.updateStateConsole(); m_router.updateStateConsole(); } } public void copy() { m_processor.copy(); m_router.copy(); m_procRouter.copy(); m_routerProc.copy(); for(Link l : m_outputLinks) l.copy(); } public void reset() { m_processor.reset(); m_processor.resetConsoles(); m_router.reset(); m_router.resetConsoles(); m_procRouter.reset(); m_routerProc.reset(); for(Link l : m_outputLinks) l.reset(); } public void connectTo(Node to, int fromPort, int toPort) { m_router.connectTo(to.getRouter(), fromPort, toPort); } public void finishConnecting() { Set<RouterLink> links = m_router.outgoingLinks(); m_outputLinks = links.toArray(new RouterLink[m_router.outgoingLinks().size()]); } public int getId() { return m_id; } public JPanel getRouterConsole() { return m_router.getConsole(); } public JPanel getProcConsole() { return m_processor.getConsole(); } public Router getRouter() { return m_router; } public void addTrace(LinkedList<TraceEvent> trace) { m_processor.addTrace(trace); } public boolean equals(Node node) { return getId() == node.getId(); } /*public void setRouting(RoutingFunction routing) { m_router.setRoutingFunction(routing); } public int getOutputPort(int downstreamId) { return m_router.getOutputPort(downstreamId); } public int inDegree() { return m_router.inDegree(); } public int outDegree() { return m_router.outDegree(); } public Set<RouterLink> incomingLinks() { return m_router.incomingLinks(); } public Set<RouterLink> outgoingLinks() { return m_router.outgoingLinks(); } public Set<RouterLink> linkSet() { return m_router.linkSet(); }*/ }
<filename>litfx-controls/src/main/java/lit/litfx/controls/menus/LitRadialMenu.java /** * Below is the original source code license header from the original version * found in JFXtras Labs. * * RadialMenu.java * <p> * Copyright (c) 2011-2015, JFXtras * All rights reserved. * <p> * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the organization nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * <p> * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * @author Birdasaur * heavily adapted From Mr. LoNee's awesome RadialMenu example. Source for original * prototype can be found in JFXtras-labs project. * https://github.com/JFXtras/jfxtras-labs */ package lit.litfx.controls.menus; import java.util.ArrayList; import java.util.List; import javafx.animation.Animation; import javafx.animation.FadeTransition; import javafx.animation.KeyFrame; import javafx.animation.KeyValue; import javafx.animation.ParallelTransition; import javafx.animation.Timeline; import javafx.beans.property.BooleanProperty; import javafx.beans.property.DoubleProperty; import javafx.beans.property.ObjectProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleDoubleProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.event.EventHandler; import javafx.scene.Group; import javafx.scene.Node; import javafx.scene.effect.Effect; import javafx.scene.image.ImageView; import javafx.scene.input.MouseButton; import javafx.scene.input.MouseEvent; import javafx.scene.paint.Color; import javafx.scene.paint.Paint; import javafx.scene.shape.Circle; import javafx.scene.text.Font; import javafx.scene.text.Text; import javafx.util.Duration; public class LitRadialMenu extends Group implements EventHandler<MouseEvent>, ChangeListener<Object> { public enum CenterVisibility { ALWAYS, WITH_MENU, NEVER } public static double DEFAULT_STROKE_WIDTH = 1.0; public static double DEFAULT_FONTSIZE = 16; protected List<LitRadialMenuItem> items = new ArrayList<LitRadialMenuItem>(); protected DoubleProperty innerRadius; protected DoubleProperty radius; protected DoubleProperty offset; protected DoubleProperty initialAngle; protected DoubleProperty itemFitWidth; protected DoubleProperty menuItemSize; protected ObjectProperty<Paint> backgroundFill; protected ObjectProperty<Paint> backgroundMouseOnFill; protected ObjectProperty<Paint> strokeMouseOnFill; protected ObjectProperty<Paint> strokeFill; protected DoubleProperty strokeWidth; protected BooleanProperty strokeVisible; protected ObjectProperty<Paint> outlineStrokeMouseOnFill; protected ObjectProperty<Paint> outlineStrokeFill; protected DoubleProperty outlineStrokeWidth; protected BooleanProperty outlineStrokeVisible; protected ObjectProperty<Effect> outlineEffect; protected BooleanProperty clockwise; protected BooleanProperty labelsVisible = new SimpleBooleanProperty(true); protected BooleanProperty backgroundVisible; protected ObjectProperty<CenterVisibility> centerVisibility; protected ObjectProperty<Node> centerGraphic; protected String text; protected Text textNode; protected Circle centerStrokeShape; protected Group centerGroup; protected Group itemGroup; private boolean mouseOn = false; protected BooleanProperty mouseOnProperty = new SimpleBooleanProperty(mouseOn); private double lastInitialAngleValue; private double lastOffsetValue; private boolean allowRedraw = true; private BooleanProperty hideMenuOnItemClick = new SimpleBooleanProperty(false); public LitRadialMenu() { } public LitRadialMenu(final double initialAngle, final double innerRadius, final double radius, final double offset, final Paint bgFill, final Paint bgMouseOnFill, final Paint strokeFill, final Paint strokeMouseOnFill, final boolean clockwise, final CenterVisibility centerVisibility, final Node centerGraphic) { itemGroup = new Group(); getChildren().add(itemGroup); itemGroup.toFront(); itemFitWidth = new SimpleDoubleProperty(innerRadius); itemFitWidth.addListener((ov,t,t1) -> { setGraphicsFitWidth(ov.getValue().doubleValue()); }); menuItemSize = new SimpleDoubleProperty(innerRadius); menuItemSize.addListener((ov,t,t1) -> { setMenuItemSize(ov.getValue().doubleValue()); }); this.initialAngle = new SimpleDoubleProperty(initialAngle); this.initialAngle.addListener((ov,t,t1) -> { setInitialAngle(ov.getValue().doubleValue()); }); this.innerRadius = new SimpleDoubleProperty(innerRadius); this.strokeFill = new SimpleObjectProperty<>(strokeFill); this.strokeFill.addListener(this); strokeWidth = new SimpleDoubleProperty(DEFAULT_STROKE_WIDTH); strokeWidth.addListener(this); this.strokeMouseOnFill = new SimpleObjectProperty<>(strokeMouseOnFill); this.strokeMouseOnFill.addListener(this); strokeVisible = new SimpleBooleanProperty(true); strokeVisible.addListener(this); outlineStrokeFill = new SimpleObjectProperty<>(strokeFill); outlineStrokeFill.addListener(this); outlineStrokeWidth = new SimpleDoubleProperty(DEFAULT_STROKE_WIDTH); outlineStrokeWidth.addListener(this); outlineStrokeMouseOnFill = new SimpleObjectProperty<>(strokeMouseOnFill); outlineStrokeMouseOnFill.addListener(this); outlineStrokeVisible = new SimpleBooleanProperty(true); outlineStrokeVisible.addListener(this); outlineEffect = new SimpleObjectProperty<>(null); outlineEffect.addListener(this); this.radius = new SimpleDoubleProperty(radius); this.offset = new SimpleDoubleProperty(offset); this.clockwise = new SimpleBooleanProperty(clockwise); labelsVisible.addListener(this); backgroundFill = new SimpleObjectProperty<>(bgFill); backgroundFill.addListener(this); backgroundMouseOnFill = new SimpleObjectProperty<>(bgMouseOnFill); backgroundMouseOnFill.addListener(this); backgroundVisible = new SimpleBooleanProperty(true); this.centerVisibility = new SimpleObjectProperty<>(centerVisibility); centerStrokeShape = new Circle(innerRadius); centerStrokeShape.radiusProperty().bind(innerRadiusProperty()); centerStrokeShape.setStroke(strokeFill); centerStrokeShape.setStrokeWidth(strokeWidth.get()); centerStrokeShape.setFill(bgFill); this.centerVisibility.addListener(this); backgroundVisible.addListener(this); centerGroup = new Group(); centerGroup.getChildren().add(centerStrokeShape); centerGroup.setOnMouseEntered(event -> { mouseOn = true; mouseOnProperty.set(mouseOn); redraw(); }); centerGroup.setOnMouseExited(event -> { mouseOn = false; mouseOnProperty.set(mouseOn); redraw(); }); centerGroup.setOnMouseClicked(event -> { if(!event.isControlDown()) { final boolean visible = itemGroup.isVisible(); if (visible) { hideRadialMenu(); } else { showRadialMenu(); } } event.consume(); }); textNode = new Text(); textNode.visibleProperty().bind(labelsVisible); textNode.setFont(new Font(DEFAULT_FONTSIZE)); textNode.setFill(Color.ALICEBLUE.deriveColor(1, 1, 1, 0.75)); centerGroup.getChildren().add(textNode); getChildren().add(centerGroup); centerGroup.toFront(); this.centerGraphic = new SimpleObjectProperty<>(centerGraphic); setCenterGraphic(centerGraphic); saveStateBeforeAnimation(); } public void setGraphicsFitWidth(double fitWidth) { Node centerNode = getCenterGraphic(); if(centerNode instanceof ImageView) { ImageView civ = (ImageView)centerNode; civ.setFitWidth(fitWidth); civ.setTranslateX(-fitWidth / 2.0); civ.setTranslateY(-fitWidth / 2.0); } items.stream().forEach(item -> { Node node = item.getGraphic(); if(node instanceof ImageView) { ImageView iv = (ImageView)node; iv.setFitWidth(fitWidth); } }); } public void setMenuItemSize(double menuItemSize) { items.stream().forEach(item -> item.setMenuSize(menuItemSize)); } public void setOnMenuItemMouseClicked( final EventHandler<? super MouseEvent> paramEventHandler) { for (final LitRadialMenuItem item : items) { item.setOnMouseClicked(paramEventHandler); } } public void setInitialAngle(final double angle) { initialAngle.set(angle); double angleOffset = initialAngle.get(); for (final LitRadialMenuItem item : items) { item.setStartAngle(angleOffset); angleOffset = angleOffset + item.getMenuSize(); } } public void addMenuItem(final LitRadialMenuItem item) { item.visibleProperty().bind(visibleProperty()); item.backgroundColorProperty().bind(backgroundFill); item.backgroundMouseOnColorProperty().bind(backgroundMouseOnFill); item.backgroundVisibleProperty().bind(backgroundVisible); item.innerRadiusProperty().bind(innerRadius); item.radiusProperty().bind(radius); item.offsetProperty().bind(offset); item.strokeMouseOnColorProperty().bind(strokeMouseOnFill); item.strokeColorProperty().bind(strokeFill); item.strokeWidthProperty().bind(strokeWidth); item.strokeVisibleProperty().bind(strokeVisible); item.outlineStrokeMouseOnColorProperty().bind(outlineStrokeMouseOnFill); item.outlineStrokeColorProperty().bind(outlineStrokeFill); item.outlineStrokeWidthProperty().bind(outlineStrokeWidth); item.outlineStrokeVisibleProperty().bind(outlineStrokeVisible); item.outlineEffectProperty().bind(outlineEffect); item.hideMenuOnItemClickProperty().bind(hideMenuOnItemClick); item.clockwiseProperty().bind(clockwise); items.add(item); itemGroup.getChildren().add(itemGroup.getChildren().size(), item); double angleOffset = initialAngle.get(); for (final LitRadialMenuItem it : items) { it.setStartAngle(angleOffset); angleOffset = angleOffset + item.getMenuSize(); } item.setOnMouseClicked(this); } public void removeMenuItem(final LitRadialMenuItem item) { items.remove(item); itemGroup.getChildren().remove(item); item.visibleProperty().unbind(); item.backgroundColorProperty().unbind(); item.backgroundMouseOnColorProperty().unbind(); item.innerRadiusProperty().unbind(); item.radiusProperty().unbind(); item.offsetProperty().unbind(); item.clockwiseProperty().unbind(); item.backgroundVisibleProperty().unbind(); item.strokeMouseOnColorProperty().unbind(); item.strokeColorProperty().unbind(); item.strokeWidthProperty().unbind(); item.strokeVisibleProperty().unbind(); item.outlineStrokeMouseOnColorProperty().unbind(); item.outlineStrokeColorProperty().unbind(); item.outlineStrokeWidthProperty().unbind(); item.outlineStrokeVisibleProperty().unbind(); item.outlineEffectProperty().unbind(); item.hideMenuOnItemClickProperty().unbind(); item.removeEventHandler(MouseEvent.MOUSE_CLICKED, this); } public void removeMenuItem(final int itemIndex) { final LitRadialMenuItem item = items.get(itemIndex); removeMenuItem(item); } @Override public void handle(final MouseEvent event) { if (event.getButton() == MouseButton.PRIMARY) { final LitRadialMenuItem item = (LitRadialMenuItem) event.getSource(); item.setSelected(!item.isSelected()); for (final LitRadialMenuItem it : items) { if (it != item) { it.setSelected(false); } } if(hideMenuOnItemClick.get()) { if (!item.isSelected()) { hideRadialMenu(); } } event.consume(); } } public void hideRadialMenu() { saveStateBeforeAnimation(); final List<Animation> anim = new ArrayList<>(); final FadeTransition fadeItemGroup = new FadeTransition(Duration.millis(300), itemGroup); fadeItemGroup.setFromValue(1); fadeItemGroup.setToValue(0); fadeItemGroup.setOnFinished(event -> { itemGroup.setVisible(false); }); anim.add(fadeItemGroup); if (centerVisibility.get() == CenterVisibility.WITH_MENU) { final FadeTransition fadeCenter = new FadeTransition(Duration.millis(300), centerGroup); fadeCenter.setFromValue(1); fadeCenter.setToValue(0); fadeCenter.setOnFinished(event -> { centerGroup.setVisible(false); }); anim.add(fadeCenter); } final ParallelTransition transition = new ParallelTransition(anim.toArray(new Animation[]{})); transition.play(); } public void showRadialMenu() { final List<Animation> animationList = new ArrayList<>(); final FadeTransition fade = new FadeTransition(Duration.millis(400), itemGroup); fade.setFromValue(0); fade.setToValue(1.0); animationList.add(fade); final Animation offsetAnimation = new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(offsetProperty(), 0)), new KeyFrame(Duration.millis(300), new KeyValue(offsetProperty(), lastOffsetValue))); animationList.add(offsetAnimation); final Animation angle = new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(initialAngleProperty(), lastInitialAngleValue + 20)), new KeyFrame(Duration.millis(300), new KeyValue(initialAngleProperty(),lastInitialAngleValue))); animationList.add(angle); if (centerVisibility.get() == CenterVisibility.WITH_MENU) { final FadeTransition fadeCenter = new FadeTransition(Duration.millis(300), centerGroup); fadeCenter.setFromValue(0); fadeCenter.setToValue(1); animationList.add(fadeCenter); centerGroup.setVisible(true); } final ParallelTransition transition = new ParallelTransition(animationList.toArray(new Animation[]{})); itemGroup.setVisible(true); transition.play(); } private void saveStateBeforeAnimation() { lastInitialAngleValue = initialAngle.get(); lastOffsetValue = offset.get(); } @Override public void changed(final ObservableValue<? extends Object> arg0, final Object arg1, final Object arg2) { if(isAllowRedraw()) redraw(); } public void requestDraw() { redraw(); } private void redraw() { if (centerVisibility.get() == CenterVisibility.NEVER) { centerGroup.visibleProperty().set(false); } else if (centerVisibility.get() == CenterVisibility.ALWAYS) { centerGroup.visibleProperty().set(true); } else { centerGroup.visibleProperty().set(itemGroup.isVisible()); } centerStrokeShape.setFill(backgroundVisible.get() ? (mouseOn && backgroundMouseOnFill.get() != null ? backgroundMouseOnFill .get() : backgroundFill.get()) : Color.TRANSPARENT); centerStrokeShape.setStroke(strokeVisible.get() ? (mouseOn && strokeMouseOnFill.get() != null ? strokeMouseOnFill .get() : strokeFill.get()) : Color.TRANSPARENT); centerStrokeShape.setStrokeWidth(strokeWidth.get()); double width = textNode.getLayoutBounds().getMaxX() - textNode.getLayoutBounds().getMinX(); double height = textNode.getLayoutBounds().getMaxY() - textNode.getLayoutBounds().getMinY(); double graphicHeight = getCenterGraphic().getLayoutBounds().getMaxY() - getCenterGraphic().getLayoutBounds().getMinY(); textNode.setTranslateX(-width/2.0); textNode.setTranslateY(-(height/2.0 + graphicHeight/2.0)); } //<editor-fold defaultstate="collapsed" desc="Properties"> public Group getCenterGroup() { return centerGroup; } public BooleanProperty getMouseOnProperty() { return mouseOnProperty; } public Paint getBackgroundFill() { return backgroundFill.get(); } public void setBackgroundFill(final Paint backgroundFill) { this.backgroundFill.set(backgroundFill); } public ObjectProperty<Paint> backgroundFillProperty() { return backgroundFill; } public Paint getBackgroundMouseOnFill() { return backgroundMouseOnFill.get(); } public void setBackgroundMouseOnFill(final Paint backgroundMouseOnFill) { this.backgroundMouseOnFill.set(backgroundMouseOnFill); } public ObjectProperty<Paint> backgroundMouseOnFillProperty() { return backgroundMouseOnFill; } public Paint getStrokeMouseOnFill() { return strokeMouseOnFill.get(); } public void setStrokeMouseOnFill(final Paint backgroundMouseOnFill) { strokeMouseOnFill.set(backgroundMouseOnFill); } public ObjectProperty<Paint> strokeMouseOnFillProperty() { return strokeMouseOnFill; } public Paint getStrokeFill() { return strokeFill.get(); } public void setStrokeFill(final Paint strokeFill) { this.strokeFill.set(strokeFill); } public ObjectProperty<Paint> strokeFillProperty() { return strokeFill; } public double getStrokeWidth() { return strokeWidth.get(); } public void setStrokeWidth(final double width) { strokeWidth.set(width); } public DoubleProperty strokeWidthProperty() { return strokeWidth; } public BooleanProperty strokeVisibleProperty() { return strokeVisible; } public boolean isStrokeVisible() { return strokeVisible.get(); } public Paint getOutlineStrokeMouseOnFill() { return outlineStrokeMouseOnFill.get(); } public void setOutlineStrokeMouseOnFill(final Paint backgroundMouseOnFill) { outlineStrokeMouseOnFill.set(backgroundMouseOnFill); } public ObjectProperty<Paint> outlineStrokeMouseOnFillProperty() { return outlineStrokeMouseOnFill; } public Paint getOutlineStrokeFill() { return outlineStrokeFill.get(); } public void setOutlineStrokeFill(final Paint strokeFill) { outlineStrokeFill.set(strokeFill); } public ObjectProperty<Paint> outlineStrokeFillProperty() { return outlineStrokeFill; } public double getOutlineStrokeWidth() { return outlineStrokeWidth.get(); } public void setOutlineStrokeWidth(final double width) { outlineStrokeWidth.set(width); } public DoubleProperty outlineStrokeWidthProperty() { return outlineStrokeWidth; } public BooleanProperty outlineStrokeVisibleProperty() { return outlineStrokeVisible; } public boolean isOutlinStrokeVisible() { return outlineStrokeVisible.get(); } public Effect getOutlineEffect() { return outlineEffect.get(); } public void setOutlineEffect(final Effect outlineEffect) { this.outlineEffect.set(outlineEffect); } public ObjectProperty<Effect> outlineEffectProperty() { return outlineEffect; } public Node getCenterGraphic() { return centerGraphic.get(); } public void setCenterGraphic(final Node graphic) { if (centerGraphic.get() != null) { centerGroup.getChildren().remove(centerGraphic.get()); } if (graphic != null) { //CenterStrokeShape should always be zero... textNode always 2 centerGroup.getChildren().add(1, graphic); } centerGraphic.set(graphic); } public ObjectProperty<Node> centerGraphicProperty() { return centerGraphic; } public double getInitialAngle() { return initialAngle.get(); } public DoubleProperty initialAngleProperty() { return initialAngle; } public double getItemFitWidth() { return itemFitWidth.get(); } public DoubleProperty itemFitWidthProperty() { return itemFitWidth; } public double getMenuItemSize() { return menuItemSize.get(); } public DoubleProperty menuItemSizeProperty() { return menuItemSize; } public double getInnerRadius() { return innerRadius.get(); } public DoubleProperty innerRadiusProperty() { return innerRadius; } public double getRadius() { return radius.get(); } public DoubleProperty radiusProperty() { return radius; } public double getOffset() { return offset.get(); } public DoubleProperty offsetProperty() { return offset; } public boolean isClockwise() { return clockwise.get(); } public BooleanProperty clockwiseProperty() { return clockwise; } public BooleanProperty labelsVisibleProperty() { return labelsVisible; } public boolean isBackgroundVisible() { return backgroundVisible.get(); } public BooleanProperty backgroundVisibleProperty() { return backgroundVisible; } public ObjectProperty<CenterVisibility> centerVisibilityProperty() { return centerVisibility; } public CenterVisibility getCenterVisibility() { return centerVisibility.get(); } public void setCenterVisibility(final CenterVisibility visibility) { centerVisibility.set(visibility); } public void setInnerRadius(final double radius) { innerRadius.set(radius); } public void setRadius(final double radius) { this.radius.set(radius); } public void setOffset(final double offset) { this.offset.set(offset); } public void setBackgroundVisible(final boolean visible) { backgroundVisible.set(visible); } public void setStrokeVisible(final boolean visible) { strokeVisible.set(visible); } public void setBackgroundColor(final Paint color) { backgroundFill.set(color); } public void setBackgroundMouseOnColor(final Paint color) { backgroundMouseOnFill.set(color); } public void setStrokeMouseOnColor(final Paint color) { strokeMouseOnFill.set(color); } public void setStrokeColor(final Paint color) { strokeFill.set(color); } public void setClockwise(final boolean clockwise) { this.clockwise.set(clockwise); } /** * @return the allowRedraw */ public boolean isAllowRedraw() { return allowRedraw; } /** * @param allowRedraw the allowRedraw to set */ public void setAllowRedraw(boolean allowRedraw) { this.allowRedraw = allowRedraw; } public void setText(final String text) { this.text = text; textNode.setText(text); redraw(); } public String getText() { return text; } //</editor-fold> /** * @return the hideMenuOnItemClick */ public boolean isHideMenuOnItemClick() { return hideMenuOnItemClick.get(); } /** * @param hideMenuOnItemClick the hideMenuOnItemClick to set */ public void setHideMenuOnItemClick(boolean hideMenuOnItemClick) { this.hideMenuOnItemClick.set(hideMenuOnItemClick); } }
from typing import List def maxRobbery(nums: List[int]) -> int: def rob(nums: List[int], start: int, end: int) -> int: if start == end: return nums[start] prev, curr = 0, 0 for i in range(start, end): prev, curr = curr, max(curr, prev + nums[i]) return curr if len(nums) == 1: return nums[0] return max(rob(nums, 0, len(nums) - 2), rob(nums, 1, len(nums) - 1))
require "spreadsheet" class RenewalReports def initialize(report_type = 'assisted') @report_type = report_type end def process CanonicalVocabulary::RenewalSerializer.new(@report_type).serialize("#{@report_type}_groups.xls") end def generate_groupids scope = (@report_type == 'assisted' ? 'insurance_assisted' : 'unassisted') policies = Policy.individual_market.send(scope).select{|policy| policy.active_and_renewal_eligible?} groups = policies.map{|policy| policy.family_id}.uniq.compact valid_groups = groups.select{|group_id| valid_family?(group_id)} generate_spreadsheet(valid_groups, "#{@report_type}_groups.xls") end def generate_spreadsheet(group_ids, file) puts group_ids.count workbook = Spreadsheet::Workbook.new sheet = workbook.create_worksheet :name => 'ids' index = 0 group_ids.each do |id| sheet.row(index).concat [id.to_s] index += 1 end workbook.write "#{Rails.root.to_s}/#{file}" end private def valid_family?(group_id) group = Family.find(group_id) return false if group.nil? with_no_authoriy = group.people.detect{|people| people.authority_member.blank?} with_no_authoriy.nil? ? true : false end end
<reponame>timburks/gnostic-grpc // Copyright 2019 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // descriptor_renderer generates a FileDescriptorSet from a gnostic output file. package generator import ( "errors" "go/format" "path/filepath" "strings" "github.com/golang/protobuf/proto" openapiv3 "github.com/googleapis/gnostic/OpenAPIv3" plugins "github.com/googleapis/gnostic/plugins" surface "github.com/googleapis/gnostic/surface" ) // This is the main function for the code generation plugin. func RunProtoGenerator() { env, err := plugins.NewEnvironment() env.RespondAndExitIfError(err) fileName := env.Request.SourceName for { extension := filepath.Ext(fileName) if extension == "" { break } fileName = fileName[0 : len(fileName)-len(extension)] } packageName, err := resolvePackageName(fileName) env.RespondAndExitIfError(err) for _, model := range env.Request.Models { switch model.TypeUrl { case "openapi.v3.Document": openAPIdocument := &openapiv3.Document{} err := proto.Unmarshal(model.Value, openAPIdocument) if err == nil { featureChecker := NewGrpcChecker(openAPIdocument) env.Response.Messages = featureChecker.Run() } case "surface.v1.Model": surfaceModel := &surface.Model{} err = proto.Unmarshal(model.Value, surfaceModel) if err == nil { // Create the renderer. renderer := NewRenderer(surfaceModel) renderer.Package = packageName // Run the renderer to generate files and add them to the response object. err = renderer.Render(env.Response, packageName+".proto") env.RespondAndExitIfError(err) // Return with success. env.RespondAndExit() } } } err = errors.New("No generated code surface model is available.") env.RespondAndExitIfError(err) } // resolvePackageName converts a path to a valid package name or // error if path can't be resolved or resolves to an invalid package name. func resolvePackageName(p string) (string, error) { p, err := filepath.Abs(p) p = strings.Replace(p, "-", "_", -1) if err == nil { p = filepath.Base(p) _, err = format.Source([]byte("package " + p)) } if err != nil { return "", errors.New("invalid package name " + p) } return p, nil }
<filename>node_modules/react-icons-kit/iconic/clipboard.js "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.clipboard = void 0; var clipboard = { "viewBox": "0 0 8 8", "children": [{ "name": "path", "attribs": { "d": "M3.5 0c-.28 0-.5.22-.5.5v.5h-.75c-.14 0-.25.11-.25.25v.75h3v-.75c0-.14-.11-.25-.25-.25h-.75v-.5c0-.28-.22-.5-.5-.5zm-3.25 1c-.14 0-.25.11-.25.25v6.5c0 .14.11.25.25.25h6.5c.14 0 .25-.11.25-.25v-6.5c0-.14-.11-.25-.25-.25h-.75v2h-5v-2h-.75z" } }] }; exports.clipboard = clipboard;
import { Component, OnInit } from '@angular/core'; import { NgForm } from '@angular/forms'; import { UserService } from '../user.service'; import { Router } from '@angular/router'; import { ViewChild, ElementRef} from '@angular/core'; @Component({ selector: 'app-login', templateUrl: './login.component.html', styleUrls: ['./login.component.css'] }) export class LoginComponent implements OnInit { constructor(private userService:UserService,private route:Router) { } geolocationPosition:any; ngOnInit() { } @ViewChild('closeAddExpenseModal') closeAddExpenseModal: ElementRef; pass:boolean=false; error:boolean=false; submitLoginForm(form:NgForm){ this.userService.login(form.value.email,form.value.password).subscribe( responseLoginStatus => { if (responseLoginStatus.search("U")==-1){ this.pass=true; } else{ sessionStorage.setItem('userId', responseLoginStatus); this.closeAddExpenseModal.nativeElement.click(); this.route.navigate(['\home']); } }, responseLoginError => { this.error=true; }, () => console.log("SubmitLoginForm method executed successfully") ); } }
"""Leetcode 109. Convert Sorted List to Binary Search Tree Medium URL: https://leetcode.com/problems/convert-sorted-list-to-binary-search-tree/ Given a singly linked list where elements are sorted in ascending order, convert it to a height balanced BST. For this problem, a height-balanced binary tree is defined as a binary tree in which the depth of the two subtrees of every node never differ by more than 1. Example: Given the sorted linked list: [-10,-3,0,5,9], One possible answer is: [0,-3,9,-10,null,5], which represents the following height balanced BST: 0 / \ -3 9 / / -10 5 """ from typing import List, Optional # Definition for singly-linked list. class ListNode(object): def __init__(self, val=0, next=None): self.val = val self.next = None # Definition for a binary tree node. class TreeNode(object): def __init__(self, val=0, left=None, right=None): self.val = val self.left = None self.right = None class SolutionConvert2ArrayTwoPointers(object): def _preorder(self, vals: List[int], left: int, right: int) -> Optional[TreeNode]: # Base case. if left > right: return None # Preorder traversal: root->left->right. mid = left + (right - left) // 2 root = TreeNode(vals[mid]) root.left = self._preorder(vals, left, mid - 1) root.right = self._preorder(vals, mid + 1, right) return root def sortedListToBST(self, head: Optional[ListNode]) -> Optional[TreeNode]: """ Time complexity: O(n). Space complexity: O(logn). """ if not head: return None # Convert linked list to arry first. vals = [] current = head while current: vals.append(current.val) current = current.next # Apply recursive preorder traversal with two pointer method. left, right = 0, len(vals) - 1 return self._preorder(vals, left, right) class SolutionPreorderSlowFastRecur(object): def _preorderSlowFast( self, left: Optional[TreeNode], right: Optional[TreeNode] ) -> Optional[TreeNode]: # Base case. if not left or left == right: return None # Run to middle node. fast, slow = left, left while fast.next != right and fast.next.next != right: slow = slow.next fast = fast.next.next # Preorder traversal: root->left->right. root = TreeNode(slow.val) root.left = self._preorderSlowFast(left, slow) root.right = self._preorderSlowFast(slow.next, right) return root def sortedListToBST(self, head: Optional[ListNode]) -> Optional[TreeNode]: """ Time complexity: O(nlogn), as for each node, traverse half nodes. Space complexity: O(logn). """ # Apply recursive preorder traversal by slow/fast two pointers. # Edge case. if not head: return None left, right = head, None return self._preorderSlowFast(left, right) class SolutionInorderRecur(object): def _inorder(self, left: int, right: int) -> Optional[TreeNode]: # Base case. if left > right: return None # Inorder traversal: left->root->right. mid = left + (right - left) // 2 root_left = self._inorder(left, mid - 1) root = TreeNode(self.current.val) root.left = root_left self.current = self.current.next root.right = self._inorder(mid + 1, right) return root def sortedListToBST(self, head: Optional[ListNode]) -> Optional[TreeNode]: """ Time complexity: O(n). Space complexity: O(logn). """ # Apply recursive inorder traversal. # Edge case. if not head: return None # Get the size of linked list. current = head size = 0 while current: size += 1 current = current.next # Attach head to self for memorizing its current node. self.current = head left, right = 0, size - 1 return self._inorder(left, right) def main(): # Input: [-10,-3,0,5,9]. # Output: # 0 # / \ # -3 9 # / / # -10 5 head = ListNode(-10) head.next = ListNode(-3) head.next.next = ListNode(0) head.next.next.next = ListNode(5) head.next.next.next.next = ListNode(9) root = SolutionConvert2ArrayTwoPointers().sortedListToBST(head) print(root.val, root.left.val, root.right.val, root.left.right.val, root.right.right.val) root = SolutionPreorderSlowFastRecur().sortedListToBST(head) print(root.val, root.left.val, root.right.val, root.left.right.val, root.right.right.val) root = SolutionInorderRecur().sortedListToBST(head) print(root.val, root.left.val, root.right.val, root.left.right.val, root.right.right.val) if __name__ == '__main__': main()