text stringlengths 1 1.05M |
|---|
<gh_stars>1-10
//src/app.ts
import express, { Application } from "express";
//importing our controller
import { Controller } from "./main.controller";
import bodyParser from "body-parser";
import cors from "cors";
class App {
public app: Application;
//declaring our controller
public mainController: Controller;
constructor() {
this.app = express();
this.setConfig();
//Creating and assigning a new instance of our controller
this.mainController = new Controller(this.app);
}
private setConfig() {
this.app.use(bodyParser.json({ limit: "50mb" }));
this.app.use(bodyParser.urlencoded({ limit: "50mb", extended: true }));
this.app.use(cors());
}
}
export default new App().app;
|
import { ConfigParams } from 'pip-services-commons-node';
import { Descriptor } from 'pip-services-commons-node';
import { DirectClient } from 'pip-services-rpc-node';
import { PagingParams } from 'pip-services-commons-node';
import { DataPage } from 'pip-services-commons-node';
import { IFacetsClientV1 } from './IFacetsClientV1';
//import { IFacetsController } from 'pip-services-facets-node';
import { FacetV1 } from './FacetV1';
export class FacetsDirectClientV1 extends DirectClient<any> implements IFacetsClientV1 {
public constructor(config?: any) {
super();
this._dependencyResolver.put('controller', new Descriptor("pip-services-facets", "controller", "*", "*", "*"))
if (config != null)
this.configure(ConfigParams.fromValue(config));
}
public getFacetsByGroup(correlationId: string, group: string, paging: PagingParams,
callback: (err: any, page: DataPage<FacetV1>) => void): void {
let timing = this.instrument(correlationId, 'facets.get_facets_by_group');
this._controller.getFacetsByGroup(correlationId, group, paging, (err, page) => {
timing.endTiming();
callback(err, page);
});
}
public addFacet(correlationId: string, group: string, name: string,
callback: (err: any, item: FacetV1) => void): void {
let timing = this.instrument(correlationId, 'facets.add_facet');
this._controller.addFacet(correlationId, group, name, (err, item) => {
timing.endTiming();
callback(err, item);
});
}
public removeFacet(correlationId: string, group: string, name: string,
callback: (err: any, item: FacetV1) => void): void {
let timing = this.instrument(correlationId, 'facets.remove_facet');
this._controller.removeFacet(correlationId, group, name, (err, item) => {
timing.endTiming();
callback(err, item);
});
}
public deleteFacetsByGroup(correlationId: string, group: string,
callback?: (err: any) => void): void {
let timing = this.instrument(correlationId, 'facets.delete_facats_by_group');
this._controller.deleteFacetsByGroup(correlationId, group, (err) => {
timing.endTiming();
if (callback) callback(err);
});
}
public clear(correlationId: string, callback?: (err: any) => void): void {
let timing = this.instrument(correlationId, 'facets.clear');
this._controller.clear(correlationId, (err) => {
timing.endTiming();
if (callback) callback(err);
});
}
} |
(window["webpackJsonp"] = window["webpackJsonp"] || []).push([[89],{
/***/ "./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/Invoice/Title.vue?vue&type=script&lang=js&":
/*!**************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib??ref--4-0!./node_modules/vue-loader/lib??vue-loader-options!./resources/assets/js/views/Invoice/Title.vue?vue&type=script&lang=js& ***!
\**************************************************************************************************************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var vuex__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! vuex */ "./node_modules/vuex/dist/vuex.esm.js");
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(source, true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(source).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */ __webpack_exports__["default"] = ({
props: {
logo: {
required: false
},
type: {
required: false
},
number: {
required: false
},
quotation: {
required: false
},
date: {
required: false
}
},
data: function data() {
return {};
},
computed: _objectSpread({}, Object(vuex__WEBPACK_IMPORTED_MODULE_0__["mapGetters"])({
companyInfo: 'companyInfo'
})),
created: function created() {}
});
/***/ }),
/***/ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/Invoice/Title.vue?vue&type=template&id=22d2a9da&":
/*!******************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib??vue-loader-options!./resources/assets/js/views/Invoice/Title.vue?vue&type=template&id=22d2a9da& ***!
\******************************************************************************************************************************************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "render", function() { return render; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return staticRenderFns; });
var render = function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("div", { staticClass: " pr-2 pl-2 pb-0 mb-0" }, [
_c("div", { staticClass: "row" }, [
_c("div", { staticClass: "col-md-12" }, [
_c(
"div",
{ staticClass: "pull-left text-left" },
[_c("company-address")],
1
),
_vm._v(" "),
_c("div", { staticClass: "pull-right text-left" }, [
_c("address", { attrs: { id: "clients_address" } }, [
_c("table", [
_c("tr", [
_c("td", { staticClass: "pull-right" }),
_vm._v(" "),
_c("td"),
_vm._v(" "),
_c("td", [
_c("div", { staticClass: "form-group mb-0 mt-4 w-100" }, [
_c("h5", { staticClass: "mt-2 pull-right" }, [
_c("b", [_vm._v(_vm._s(_vm.type))]),
_vm._v(" "),
_c("hr")
])
])
])
]),
_vm._v(" "),
_c("tr", [
_c("td", { staticClass: "pull-right" }),
_vm._v(" "),
_c("td"),
_vm._v(" "),
_c("td", [_c("h5", [_vm._v(_vm._s(_vm.number))])])
]),
_vm._v(" "),
_c("tr", [
_vm._m(0),
_vm._v(" "),
_vm._m(1),
_vm._v(" "),
_c("td", [
_c("h5", [_vm._v(_vm._s(_vm.companyInfo.company_vat))])
])
]),
_vm._v(" "),
_c("tr", [
_vm._m(2),
_vm._v(" "),
_vm._m(3),
_vm._v(" "),
_c("td", [
_c("h5", [_vm._v(_vm._s(_vm.companyInfo.company_mobile))])
])
])
])
])
])
])
]),
_vm._v(" "),
_vm._m(4),
_vm._v(" "),
_c("div", { staticClass: "row" }, [
_c("div", { staticClass: "col-md-12" }, [
_c("div", { staticClass: "pull-left" }, [
_c("address", [
_vm._m(5),
_vm._v(" "),
_c("h5", [
_vm._v("Name : " + _vm._s(_vm.quotation.customer.Name) + " ")
]),
_vm._v(" "),
_c("h5", [
_vm._v(
"Contact Person : " +
_vm._s(_vm.quotation.customer.Contact_Person) +
" "
)
]),
_vm._v(" "),
_c("h5", [
_vm._v("Phone : " + _vm._s(_vm.quotation.customer.Telephone))
]),
_vm._v(" "),
_c("h5", [
_vm._v("Email : " + _vm._s(_vm.quotation.customer.EMail))
]),
_vm._v(" "),
_c("br"),
_vm._v(" "),
_c("p", [
_c("b", [_vm._v("Date : ")]),
_vm._v(" " + _vm._s(_vm.date))
])
])
]),
_vm._v(" "),
_c("div", { staticClass: "pull-right text-left" }, [
_c("table", [
_c("tr", [
_vm._m(6),
_vm._v(" "),
_vm._m(7),
_vm._v(" "),
_c("td", [_c("h5", [_vm._v(_vm._s(_vm.quotation.cargo.bl_no))])])
]),
_vm._v(" "),
_c("tr", [
_vm._m(8),
_vm._v(" "),
_vm._m(9),
_vm._v(" "),
_c("td", [
_c("h5", [_vm._v(_vm._s(_vm.quotation.cargo.cargo_name))])
])
]),
_vm._v(" "),
_c("tr", [
_c("td", { staticClass: "pull-right text-black" }, [
_vm._v("Vessel/DSTN")
]),
_vm._v(" "),
_vm._m(10),
_vm._v(" "),
_c("td", [
_c("h5", { staticClass: "text-grey" }, [
_vm._v(_vm._s(_vm.quotation.cargo.vessel_name))
])
])
]),
_vm._v(" "),
_c("tr", [
_c("td", { staticClass: "pull-right" }, [_vm._v("Quantity")]),
_vm._v(" "),
_vm._m(11),
_vm._v(" "),
_c("td", [
_c("h5", [_vm._v(_vm._s(_vm.quotation.cargo.cargo_qty))])
])
]),
_vm._v(" "),
_c("tr", [
_c("td", { staticClass: "pull-right" }, [_vm._v("Weight")]),
_vm._v(" "),
_vm._m(12),
_vm._v(" "),
_c("td", [
_c("h5", [_vm._v(_vm._s(_vm.quotation.cargo.cargo_weight))])
])
]),
_vm._v(" "),
_c("tr", [
_c("td", { staticClass: "pull-right" }, [_vm._v("Date")]),
_vm._v(" "),
_vm._m(13),
_vm._v(" "),
_c("td", [_c("h5", [_vm._v(_vm._s(_vm.date))])])
])
])
])
])
])
])
}
var staticRenderFns = [
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", { staticClass: "pull-right" }, [
_c("h5", [_vm._v("Tax Registration")])
])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", [_c("h5", [_vm._v(" : ")])])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", { staticClass: "pull-right" }, [
_c("h5", [_vm._v("Telephone ")])
])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", [_c("h5", [_vm._v(" : ")])])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("div", { staticClass: "row" }, [
_c("div", { staticClass: "col-sm-12" }, [_c("hr")])
])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("h5", [_c("b", [_vm._v("To")])])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", { staticClass: "pull-right" }, [
_c("h5", [_vm._v("B/L NO")])
])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", [_c("h5", [_vm._v(" : ")])])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", { staticClass: "pull-right" }, [
_c("h5", [_vm._v("Cargo ")])
])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", [_c("h5", [_vm._v(" : ")])])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", [_c("h5", [_vm._v(" : ")])])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", [_c("h5", [_vm._v(" : ")])])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", [_c("h5", [_vm._v(" : ")])])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("td", [_c("h5", [_vm._v(" : ")])])
}
]
render._withStripped = true
/***/ }),
/***/ "./resources/assets/js/views/Invoice/Title.vue":
/*!*****************************************************!*\
!*** ./resources/assets/js/views/Invoice/Title.vue ***!
\*****************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _Title_vue_vue_type_template_id_22d2a9da___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./Title.vue?vue&type=template&id=22d2a9da& */ "./resources/assets/js/views/Invoice/Title.vue?vue&type=template&id=22d2a9da&");
/* harmony import */ var _Title_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./Title.vue?vue&type=script&lang=js& */ "./resources/assets/js/views/Invoice/Title.vue?vue&type=script&lang=js&");
/* empty/unused harmony star reexport *//* harmony import */ var _node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../../../../node_modules/vue-loader/lib/runtime/componentNormalizer.js */ "./node_modules/vue-loader/lib/runtime/componentNormalizer.js");
/* normalize component */
var component = Object(_node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__["default"])(
_Title_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__["default"],
_Title_vue_vue_type_template_id_22d2a9da___WEBPACK_IMPORTED_MODULE_0__["render"],
_Title_vue_vue_type_template_id_22d2a9da___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"],
false,
null,
null,
null
)
/* hot reload */
if (false) { var api; }
component.options.__file = "resources/assets/js/views/Invoice/Title.vue"
/* harmony default export */ __webpack_exports__["default"] = (component.exports);
/***/ }),
/***/ "./resources/assets/js/views/Invoice/Title.vue?vue&type=script&lang=js&":
/*!******************************************************************************!*\
!*** ./resources/assets/js/views/Invoice/Title.vue?vue&type=script&lang=js& ***!
\******************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Title_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib??ref--4-0!../../../../../node_modules/vue-loader/lib??vue-loader-options!./Title.vue?vue&type=script&lang=js& */ "./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/Invoice/Title.vue?vue&type=script&lang=js&");
/* empty/unused harmony star reexport */ /* harmony default export */ __webpack_exports__["default"] = (_node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Title_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__["default"]);
/***/ }),
/***/ "./resources/assets/js/views/Invoice/Title.vue?vue&type=template&id=22d2a9da&":
/*!************************************************************************************!*\
!*** ./resources/assets/js/views/Invoice/Title.vue?vue&type=template&id=22d2a9da& ***!
\************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Title_vue_vue_type_template_id_22d2a9da___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!../../../../../node_modules/vue-loader/lib??vue-loader-options!./Title.vue?vue&type=template&id=22d2a9da& */ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/Invoice/Title.vue?vue&type=template&id=22d2a9da&");
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "render", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Title_vue_vue_type_template_id_22d2a9da___WEBPACK_IMPORTED_MODULE_0__["render"]; });
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Title_vue_vue_type_template_id_22d2a9da___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"]; });
/***/ })
}]); |
from typing import Optional, Tuple
from solo.server.statuses import Http4xx
from solo.server.definitions import HttpMethod
from solo.server.runtime.dependencies import Runtime
from ...server.request import Request
from .util import as_sorted_tuple
class RequestMethodPredicate:
def __init__(self, val: Tuple[HttpMethod, ...], config, raises: Optional[Http4xx] = None):
""" Predicates are constructed at ``solo.configurator.config.util.PredicateList.make()``
:param val: value passed to view_config/view_defaults
:param config:
"""
request_method = as_sorted_tuple(val)
if HttpMethod.GET in request_method and HttpMethod.HEAD not in request_method:
# GET implies HEAD too
request_method = as_sorted_tuple(request_method + (HttpMethod.HEAD,))
self.val = request_method
self.raises = raises
def text(self) -> str:
return 'request_method = %s' % (','.join(x.value for x in self.val))
phash = text
async def __call__(self, runtime: Runtime, request: Request) -> bool:
return request.method in self.val
|
import React from 'react';
export default class LegalPage extends React.PureComponent {
render() {
return (
<div className="page building js-scroll" data-view="building">
<div className="building__gallery building__gallery--right">
<div className="gallery">
<div className="arrow gallery__arrow js-galleryArrow">
<svg viewBox="0 0 30 30">
<use
xmlnsXlink="http://www.w3.org/1999/xlink"
xlinkHref="#icon-arrow-down"
/>
</svg>
</div>
<ul className="gallery__nav">
<li className="gallery__nav__dot js-galleryDot">
<i>Image 1</i>
</li>
<li className="gallery__nav__dot js-galleryDot">
<i>Image 2</i>
</li>
<li className="gallery__nav__dot js-galleryDot">
<i>Image 3</i>
</li>
</ul>
<div className="gallery__images js-gallery">
<div className="gallery__image js-galleryImage gallery__image--scale">
<img
src="public/fit3.jpg"
srcSet="public/fit3.jpg 1000w, public/fit3.jpg 2000w"
alt
/>
</div>
<div className="gallery__image js-galleryImage ">
<div
className="image"
style={{
paddingTop: "111.11111111111%"
}}
>
<img
src="public/fit4.jpg"
srcSet="public/fit4.jpg 1000w, public/fit4.jpg 2000w"
alt
/>
</div>
</div>
<div className="gallery__image js-galleryImage">
<div
className="image"
style={{
paddingTop: "111.28775834658%"
}}
>
<img
src="public/fit5.jpg"
srcSet="public/fit5.jpg 1000w, public/fit5.jpg 2000w"
alt
/>
</div>
</div>
</div>
</div>
</div>
<div className="building__content building__content--left">
<div className="building__logo building__logo--left">
<img
className="is-loading"
alt="PunkBodies"
src="public/logo2.png"
width={109}
height={34}
/>
</div>
<div className="building__sections js-scroll">
<div className="building__section">
<h2 className="heading--2">Overview</h2>
<ul className="building__floors">
<li>
<a href>Etherscan</a>
</li>
<li>
<a href>License</a>
</li>
<li>
<a href>Terms & Conditions</a>
</li>
<li>
<a href>Privacy Policy</a>
</li>
</ul>
<div className="building__caption">select to download</div>
<h3 className="heading--2">Licensing</h3>
<div className="arrow arrow--building js-arrow">
<svg
width={24}
height={24}
xmlns="http://www.w3.org/2000/svg"
fillRule="evenodd"
clipRule="evenodd"
>
<path d="M12 0c6.623 0 12 5.377 12 12s-5.377 12-12 12-12-5.377-12-12 5.377-12 12-12zm0 1c6.071 0 11 4.929 11 11s-4.929 11-11 11-11-4.929-11-11 4.929-11 11-11zm5.247 8l-5.247 6.44-5.263-6.44-.737.678 6 7.322 6-7.335-.753-.665z" />
</svg>
</div>
</div>
<div className="building__section">
<div className="building__features">
<h3 className="heading--3">STRUCTURE</h3>
<p className="paragraph">
This is the first condition blah blah.
</p>
<h3 className="heading--3">TERMS</h3>
<p className="paragraph">
BLah blah blah blah
<br />
BVLah ahfhd
</p>
<h3 className="heading--3">NOTES</h3>
<p className="paragraph">dfsdfdfasd blah blah</p>
</div>
<div className="building__caption">Back to Top</div>
<div className="arrow arrow--back arrow--dark js-arrowBack">
<svg
width={24}
height={24}
xmlns="http://www.w3.org/2000/svg"
fillRule="evenodd"
clipRule="evenodd"
>
<path d="M12 0c6.623 0 12 5.377 12 12s-5.377 12-12 12-12-5.377-12-12 5.377-12 12-12zm0 1c6.071 0 11 4.929 11 11s-4.929 11-11 11-11-4.929-11-11 4.929-11 11-11zm5.247 8l-5.247 6.44-5.263-6.44-.737.678 6 7.322 6-7.335-.753-.665z" />
</svg>
</div>
</div>
</div>
</div>
</div>
);
}
}
LegalPage.propTypes = {
};
|
def get_adc_memory_address(channel):
adc_channels = {
"_VSYS_ADC1": 0x1F,
"_VSYS_ADC0": 0x20,
"_TS_ADC1": 0x21,
"_TS_ADC0": 0x22
}
if channel in adc_channels:
return hex(adc_channels[channel])
else:
raise ValueError("Invalid ADC channel") |
package types
import (
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
)
var (
ErrInvalid = sdkerrors.Register(ModuleName, 1, "error invalid")
ErrUnknownAccountAddress = sdkerrors.Register(ModuleName, 2, "account address does not exist")
ErrOutOfIssuance = sdkerrors.Register(ModuleName, 3, "mining reward reaches the issuance limit")
ErrWithdrawAmountNotPositive = sdkerrors.Register(ModuleName, 4, "withdraw amount is not positive")
ErrMissingNodeAddress = sdkerrors.Register(ModuleName, 5, "missing node address")
ErrMissingOwnerAddress = sdkerrors.Register(ModuleName, 6, "missing owner address")
ErrInsufficientMatureTotal = sdkerrors.Register(ModuleName, 7, "insufficient mature total")
ErrInsufficientFoundationAccBalance = sdkerrors.Register(ModuleName, 8, "insufficient foundation account balance")
ErrInsufficientUnissuedPrePayBalance = sdkerrors.Register(ModuleName, 9, "insufficient unissued prepay balance")
ErrInitialUOzonePrice = sdkerrors.Register(ModuleName, 10, "initial uOzone price must be positive")
ErrNotTheOwner = sdkerrors.Register(ModuleName, 11, "not the owner of the node")
ErrMatureEpoch = sdkerrors.Register(ModuleName, 12, "the value of epoch must be positive and greater than its previous one")
ErrEmptyFromAddr = sdkerrors.Register(ModuleName, 13, "missing from address")
ErrEmptyReporterAddr = sdkerrors.Register(ModuleName, 14, "missing reporter address")
ErrEmptyNodesVolume = sdkerrors.Register(ModuleName, 15, "nodes volume list empty")
ErrEpochNotPositive = sdkerrors.Register(ModuleName, 16, "report epoch is not positive")
ErrEmptyReportReference = sdkerrors.Register(ModuleName, 17, "missing report reference")
ErrEmptyReporterOwnerAddr = sdkerrors.Register(ModuleName, 18, "missing reporter owner address")
ErrNegativeVolume = sdkerrors.Register(ModuleName, 19, "report volume is negative")
)
|
if A > B:
print("A is greater than B")
else:
print("B is greater than A") |
#include <iostream>
#include <string>
#include <vector>
using namespace std;
int distinct_subsequences(string str) {
int len = str.length();
vector<int> dp(len + 1);
dp[0] = 1;
for(int i = 1; i <= len; i++) {
dp[i] = 2 * dp[i-1];
if(i > 1 && str[i-2] == str[i-1]) {
dp[i] -= dp[i-2];
}
}
return dp[len];
}
int main()
{
string str = "ABCDE";
cout << distinct_subsequences(str) << endl;
return 0;
} |
<reponame>ItsYoungDaddy/Templar-Cosmetics-Beta<gh_stars>10-100
package tae.cosmetics.mods;
import java.time.Instant;
import java.util.Date;
import net.minecraft.network.play.server.SPacketChat;
import net.minecraftforge.fml.common.eventhandler.EventPriority;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import tae.cosmetics.ColorCode;
import tae.cosmetics.settings.Setting;
import tae.packetevent.PacketEvent;
public class TimeStampMod extends BaseMod {
public static Setting<Boolean> enabled = new Setting<>("TSEnabled", false);
public static Setting<Boolean> hour24 = new Setting<>("TS24 Hours", true);
public static Setting<Boolean> date = new Setting<>("TSDate", false);
public static Setting<Boolean> bold = new Setting<>("TSBold", false);
public static Setting<Boolean> italic = new Setting<>("TSItalic", false);
public static Setting<Boolean> underline = new Setting<>("TSUnderline", false);
public static Setting<ColorCode> code = new Setting<>("TSColor Code", ColorCode.WHITE);
@SubscribeEvent(priority = EventPriority.LOW)
public void getChatEvent(PacketEvent.Incoming event) {
if(enabled.getValue() && event.getPacket() instanceof SPacketChat) {
SPacketChat packet = (SPacketChat) event.getPacket();
sendMessage(getFormattedTimeAsString() + " " + packet.getChatComponent().getFormattedText());
event.setCanceled(true);
}
}
@SuppressWarnings("deprecation")
public static String getFormattedTimeAsString() {
Date now = Date.from(Instant.now());
int hours = now.getHours();
int minutes = now.getMinutes();
int seconds = now.getSeconds();
int year = now.getYear() + 1900;
int month = now.getMonth() + 1;
int day = now.getDate();
String time = code.getValue().getCode();
if(bold.getValue()) {
time += ColorCode.BOLD.getCode();
}
if(italic.getValue()) {
time += ColorCode.ITALIC.getCode();
}
if(underline.getValue()) {
time += ColorCode.UNDERLINE.getCode();
}
time += "[";
if(date.getValue()) {
time += "<" + day + "/";
time += month +"/";
time += year+"> ";
}
String ampm = "";
boolean addampm = false;
if(hour24.getValue()) {
if(hours < 10) time += "0";
time += hours+":";
} else {
if(hours < 12) {
ampm = "AM";
} else {
ampm = "PM";
}
int hour = hours % 12;
if (hour == 0) hour = 12;
time += hour+":";
addampm = true;
}
if(minutes < 10) time += "0";
time += minutes+":";
if(seconds < 10) time += "0";
time += seconds;
if(addampm) {
time += ampm;
}
time += "]"+ColorCode.RESET.getCode();
return time;
}
}
|
#!/bin/sh
${HADOOP_HOME}/bin/hdfs --daemon start datanode
tail -F ${HADOOP_HOME}/logs/hadoop-$(whoami)-datanode-$(hostname).log | awk '{print} /FATAL/{exit}' |
<reponame>ustream/terraform-provider-calico-v3
package hclwrite
import (
"github.com/hashicorp/hcl2/hcl"
"github.com/hashicorp/hcl2/hcl/hclsyntax"
"github.com/zclconf/go-cty/cty"
)
type Body struct {
inTree
items nodeSet
// indentLevel is the number of spaces that should appear at the start
// of lines added within this body.
indentLevel int
}
func (b *Body) appendItem(c nodeContent) *node {
nn := b.children.Append(c)
b.items.Add(nn)
return nn
}
func (b *Body) appendItemNode(nn *node) *node {
nn.assertUnattached()
b.children.AppendNode(nn)
b.items.Add(nn)
return nn
}
func (b *Body) AppendUnstructuredTokens(ts Tokens) {
b.inTree.children.Append(ts)
}
// GetAttribute returns the attribute from the body that has the given name,
// or returns nil if there is currently no matching attribute.
func (b *Body) GetAttribute(name string) *Attribute {
for n := range b.items {
if attr, isAttr := n.content.(*Attribute); isAttr {
nameObj := attr.name.content.(*identifier)
if nameObj.hasName(name) {
// We've found it!
return attr
}
}
}
return nil
}
// SetAttributeValue either replaces the expression of an existing attribute
// of the given name or adds a new attribute definition to the end of the block.
//
// The value is given as a cty.Value, and must therefore be a literal. To set
// a variable reference or other traversal, use SetAttributeTraversal.
//
// The return value is the attribute that was either modified in-place or
// created.
func (b *Body) SetAttributeValue(name string, val cty.Value) *Attribute {
attr := b.GetAttribute(name)
expr := NewExpressionLiteral(val)
if attr != nil {
attr.expr = attr.expr.ReplaceWith(expr)
} else {
attr := newAttribute()
attr.init(name, expr)
b.appendItem(attr)
}
return attr
}
// SetAttributeTraversal either replaces the expression of an existing attribute
// of the given name or adds a new attribute definition to the end of the block.
//
// The new expression is given as a hcl.Traversal, which must be an absolute
// traversal. To set a literal value, use SetAttributeValue.
//
// The return value is the attribute that was either modified in-place or
// created.
func (b *Body) SetAttributeTraversal(name string, traversal hcl.Traversal) *Attribute {
panic("Body.SetAttributeTraversal not yet implemented")
}
type Attribute struct {
inTree
leadComments *node
name *node
expr *node
lineComments *node
}
func newAttribute() *Attribute {
return &Attribute{
inTree: newInTree(),
}
}
func (a *Attribute) init(name string, expr *Expression) {
expr.assertUnattached()
nameTok := newIdentToken(name)
nameObj := newIdentifier(nameTok)
a.leadComments = a.children.Append(newComments(nil))
a.name = a.children.Append(nameObj)
a.children.AppendUnstructuredTokens(Tokens{
{
Type: hclsyntax.TokenEqual,
Bytes: []byte{'='},
},
})
a.expr = a.children.Append(expr)
a.expr.list = a.children
a.lineComments = a.children.Append(newComments(nil))
a.children.AppendUnstructuredTokens(Tokens{
{
Type: hclsyntax.TokenNewline,
Bytes: []byte{'\n'},
},
})
}
func (a *Attribute) Expr() *Expression {
return a.expr.content.(*Expression)
}
type Block struct {
inTree
leadComments *node
typeName *node
labels nodeSet
open *node
body *node
close *node
}
|
<reponame>chrisprice/honesty-store-1
import history from '../history';
export const UNKNOWN_ITEM = 'UNKNOWN_ITEM';
const unknown = () => {
return {
type: UNKNOWN_ITEM
};
};
export const unknownItem = itemId => async dispatch => {
history.push('/store');
dispatch(unknown());
};
|
<gh_stars>0
import Reflux from 'reflux';
export default Reflux.createActions([
'loadRelease', // Singular as it loads 1 release
'loadReleaseError',
'loadReleaseSuccess',
'loadDeploys', // Plural as it loads all deploys related to a release
'loadDeploysError',
'loadDeploysSuccess',
]);
|
package model;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.sql.*;
public class RetriveImage {
public static void main(String[] args)throws SQLException,ClassNotFoundException, FileNotFoundException {
// TODO Auto-generated method stub
Connection con=DriverManager.getConnection("jdbc:mysql://localhost:3306/jsp","root","");
Statement st=con.createStatement();
ResultSet rs=st.executeQuery("select photo from student2 where id=5 ");
FileOutputStream fos=new FileOutputStream("vibhor.jpg");
while(rs.next())
{
InputStream input=rs.getBinaryStream("photo");
// while(input.read(buffer)>0)
// {
// output.write(buffer);
//
// }
}
}
}
|
from unittest import TestCase
class MoonshotTestCase:
pass
class CustomTestCase(TestCase, MoonshotTestCase):
def runTest(self):
return True |
#!/bin/bash -xe
# $OS_TYPE $PUBLIC_IP $PRIVATE_IP $PUBLIC_HOSTNAME $BOULDER_URL
# are dynamically set at execution
cd letsencrypt
#git checkout v0.1.0 use --branch instead
SAVE="$PIP_EXTRA_INDEX_URL"
unset PIP_EXTRA_INDEX_URL
export PIP_INDEX_URL="https://isnot.org/pip/0.1.0/"
#OLD_LEAUTO="https://raw.githubusercontent.com/letsencrypt/letsencrypt/5747ab7fd9641986833bad474d71b46a8c589247/letsencrypt-auto"
if ! command -v git ; then
if [ "$OS_TYPE" = "ubuntu" ] ; then
sudo apt-get update
fi
if ! ( sudo apt-get install -y git || sudo yum install -y git-all || sudo yum install -y git || sudo dnf install -y git ) ; then
echo git installation failed!
exit 1
fi
fi
BRANCH=`git rev-parse --abbrev-ref HEAD`
git checkout -f v0.1.0
./letsencrypt-auto -v --debug --version
unset PIP_INDEX_URL
export PIP_EXTRA_INDEX_URL="$SAVE"
git checkout -f "$BRANCH"
EXPECTED_VERSION=$(grep -m1 LE_AUTO_VERSION letsencrypt-auto | cut -d\" -f2)
if ! ./letsencrypt-auto -v --debug --version --no-self-upgrade | grep $EXPECTED_VERSION ; then
echo upgrade appeared to fail
exit 1
fi
echo upgrade appeared to be successful
|
<reponame>wmn7/Traffic-Classification
'''
@Author: <NAME>
@Date: 2020-12-25 10:56:48
@Description:
@LastEditTime: 2020-12-25 15:10:16
'''
# <NAME>(<EMAIL>)
#
# 划分训练集和测试集.
# 对于每一个大的pcap文件中拆分出的若干个session:
# - 选出前6000个pcap, 其中10%作为测试集, 90%作为训练集
# - 同时对文件进行判断, 删除0byte的文件
# ==============================================================================
import os
import shutil # 用于文件的复制
import numpy as np
trainPath = """../1.DataSet/3_trainDataset/train""" # 训练集文件夹路径
testPath = """../1.DataSet/3_trainDataset/test""" # 测试集文件夹路径
dataPath = """../1.DataSet/2_pcap2session/""" # 原始数据路径
SESSIONS_COUNT_LIMIT_MAX = 6000 # 一个pcap提取最多的session
# 循环读取文件夹中所有的pcapng文件
for (root, dirs, files) in os.walk(dataPath):
fileNum = 0 # 统计文件夹内文件个数
dtype = [('filePath', 'U1000'), ('filesize', 'int64')]
fileList = [] # 记录文件名和大小
for Ufile in files:
pcapPath = os.path.join(root, Ufile) # 需要转换的pcap文件的完整路径
pcapSize = os.path.getsize(pcapPath) # 获得文件的大小, bytes
if pcapSize > 0 and pcapSize < 104857600: # 需要文件有大小(特别大的文件就不要了, >100MB)
fileNum = fileNum + 1 # 统计文件夹内的文件数量
fileList = fileList + [(pcapPath, pcapSize)]
pcapFile = root.split('/')[-1] # 所在文件夹的名字
print(pcapFile) # 打印所在文件夹
fileList = np.array(fileList, dtype=dtype)
if fileNum > 0: # 文件夹内文件的个数>0
if fileNum > SESSIONS_COUNT_LIMIT_MAX:
fileList = np.sort(fileList, order='filesize') # 按照文件size从大到小排序
fileList = fileList[-SESSIONS_COUNT_LIMIT_MAX:] # 只提取前6000个文件
fileNum = SESSIONS_COUNT_LIMIT_MAX
else:
pass # 还是按照原来的顺序保持不变
# --------------
# 下面开始转移文件
# --------------
inx = np.random.choice(np.arange(fileNum), size=int(fileNum/10), replace=False) # 生成一个[0,fileNum]的数组
testFiles = fileList[inx] # 选出10%作为测试
trainFiles = fileList[list(set(np.arange(fileNum))-set(inx))] # 选出90%作为训练
# 转移测试集
for testFile in testFiles:
fileName = testFile[0].split('/')[-1] # 获取chat/qq/xxx.pcap
dst = '{}/{}'.format(testPath,fileName).replace('\\','/')
# print(dst)
os.makedirs(os.path.dirname(dst), exist_ok=True) # 没有就创建文件夹
shutil.copy(testFile[0], dst)
# 转移训练集
for trainFile in trainFiles:
fileName = trainFile[0].split('/')[-1] # 获取chat/qq/xxx.pcap
dst = '{}/{}'.format(trainPath,fileName).replace('\\','/')
# print(dst)
os.makedirs(os.path.dirname(dst), exist_ok=True) # 没有就创建文件夹
shutil.copy(trainFile[0], dst)
print('-'*10)
|
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
${CROSS_COMPILE}gcc -static memcopy.c -o memcopy
|
#!/usr/bin/env bash
cd "$(dirname "${BASH_SOURCE[0]}")"/../..
set -ex
if [ -z "$IMAGE" ]; then
echo "Must specify \$IMAGE."
exit 1
fi
URL="http://localhost:7080"
if curl --output /dev/null --silent --head --fail $URL; then
echo "❌ Can't run a new Sourcegraph instance on $URL because another instance is already running."
echo "❌ The last time this happened, there was a runaway integration test run on the same Buildkite agent and the fix was to delete the pod and rebuild."
exit 1
fi
echo "--- Running a daemonized $IMAGE as the test subject..."
CONTAINER="$(docker container run -d -e GOTRACEBACK=all "$IMAGE")"
function cleanup() {
exit_status=$?
if [ $exit_status -ne 0 ]; then
# Expand the output if our run failed.
echo "^^^ +++"
fi
jobs -p -r | xargs kill
echo "--- server logs"
docker logs --timestamps "$CONTAINER"
echo "--- docker cleanup"
docker container rm -f "$CONTAINER"
docker image rm -f "$IMAGE"
if [ $exit_status -ne 0 ]; then
# This command will fail, so our last step will be expanded. We don't want
# to expand "docker cleanup" so we add in a dummy section.
echo "--- gqltest failed"
echo "See go test section for test runner logs."
fi
}
trap cleanup EXIT
docker exec "$CONTAINER" apk add --no-cache socat
# Connect the server container's port 7080 to localhost:7080 so that integration tests
# can hit it. This is similar to port-forwarding via SSH tunneling, but uses `docker exec`
# as the transport.
socat tcp-listen:7080,reuseaddr,fork system:"docker exec -i $CONTAINER socat stdio 'tcp:localhost:7080'" &
echo "--- Waiting for $URL to be up"
set +e
timeout 60s bash -c "until curl --output /dev/null --silent --head --fail $URL; do
echo Waiting 5s for $URL...
sleep 5
done"
# shellcheck disable=SC2181
if [ $? -ne 0 ]; then
echo "^^^ +++"
echo "$URL was not accessible within 60s. Here's the output of docker inspect and docker logs:"
docker inspect "$CONTAINER"
exit 1
fi
set -e
echo "Waiting for $URL... done"
echo '--- go test ./dev/gqltest -long'
go test ./dev/gqltest -long
|
#!/bin/bash
PYTHONPATH=. python3 -m cProfile -o perf.prof scripts/ast/data_process.py \
--file_train_raw "data/ast/programs_training.json" \
--file_eval_raw "data/ast/programs_eval.json" \
--file_non_terminals "data/ast/non_terminals.json" \
--file_terminals "data/ast/terminals.json" \
--file_train_converted "data/ast/programs_training_seq.json" \
--file_eval_converted "data/ast/programs_eval_seq.json" \
--file_train "data/ast/file_train.json" \
--file_eval "data/ast/file_eval.json" \
--file_glove_map "data/ast/terminals_map.json" \
--file_glove_vocab "data/ast/vocab.txt" \
--file_glove_terminals "data/ast/glove_terminals.json" \
--file_glove_non_terminals "data/ast/glove_non_terminals_corpus.txt" \
--last_is_zero |
#!/bin/bash
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This should be run on the host on which the CM server is to run.
# Set up some vars
config_file=clouderaconfig.ini
cm_server_host=`grep cm.host $config_file | awk -F'=' '{print $2}'`
ntp_server=`grep ntp.server $config_file | awk -F'=' '{print $2}'`
# This should be set to whatever HIVE_HMS_HOST is set to in deploycloudera.py
hive_metastore_host=`grep hive.metastore.host $config_file | awk -F'=' '{print $2}'`
hive_metastore_password=`grep hive.metastore.password $config_file | awk -F'=' '{print $2}'`
# Prep Cloudera repo
sudo yum -y install wget
wget http://archive.cloudera.com/cm5/redhat/6/x86_64/cm/cloudera-manager.repo
sudo mv cloudera-manager.repo /etc/yum.repos.d/
# Turn off firewall
sudo service iptables stop
# Turn off SELINUX
sudo echo 0 >/selinux/enforce
# Set up NTP
sudo yum -y install ntp
sudo chkconfig ntpd on
sudo ntpdate $ntp_server
sudo /etc/init.d/ntpd start
# Set up python
sudo rpm -ivh http://dl.fedoraproject.org/pub/epel/6/i386/epel-release-6-8.noarch.rpm
sudo yum -y install python-pip
sudo pip install cm_api
# Set up MySQL
wget http://archive.cloudera.com/cdh5/cdh/5/hive-1.1.0-cdh5.4.0.tar.gz
tar zxf hive-1.1.0-cdh5.4.0.tar.gz
cp ./hive-1.1.0-cdh5.4.0/scripts/metastore/upgrade/mysql/hive-txn-schema-0.13.0.mysql.sql .
sudo yum -y install mysql-server expect
sudo service mysqld start
sudo /sbin/chkconfig mysqld on
sudo /bin/echo -e "\nY\n$hive_metastore_password\n$hive_metastore_password\nY\nn\nY\nY\n" > /tmp/answers
sudo /usr/bin/mysql_secure_installation < /tmp/answers
sudo rm /tmp/answers
mysql -uroot -p$hive_metastore_password --execute="CREATE DATABASE metastore; USE metastore; SOURCE ./hive-1.1.0-cdh5.4.0/scripts/metastore/upgrade/mysql/hive-schema-1.1.0.mysql.sql;"
mysql -uroot -p$hive_metastore_password --execute="CREATE USER 'hive'@'%' IDENTIFIED BY '$hive_metastore_password';"
mysql -uroot -p$hive_metastore_password --execute="REVOKE ALL PRIVILEGES, GRANT OPTION FROM 'hive'@'%';"
mysql -uroot -p$hive_metastore_password --execute="GRANT SELECT,INSERT,UPDATE,DELETE,LOCK TABLES,EXECUTE,CREATE ON metastore.* TO 'hive'@'%';"
mysql -uroot -p$hive_metastore_password --execute="FLUSH PRIVILEGES;"
mysql -uroot -p$hive_metastore_password --execute="create database oozie; grant all privileges on oozie.* to 'oozie'@'%' identified by '$hive_metastore_password'; grant all privileges on oozie.* to 'oozie'@'%' identified by '$hive_metastore_password';"
# Make sure DNS is set up properly so all nodes can find all other nodes
# For master
sudo yum -y install cloudera-manager-agent cloudera-manager-daemons cloudera-manager-server cloudera-manager-server-db-2
sudo service cloudera-scm-server-db start
sudo service cloudera-scm-server start
sudo sed -i.bak -e"s%server_host=localhost%server_host=$cm_server_host%" /etc/cloudera-scm-agent/config.ini
sudo service cloudera-scm-agent start
# Prep work before calling the Cloudera provisioning script.
firehostdbpassword=`grep com.cloudera.cmf.ACTIVITYMONITOR.db.password /etc/cloudera-scm-server/db.mgmt.properties | awk -F'=' '{print $2}'`
navigatordbpassword=`grep com.cloudera.cmf.NAVIGATOR.db.password /etc/cloudera-scm-server/db.mgmt.properties | awk -F'=' '{print $2}'`
headlampdbpassword=`grep com.cloudera.cmf.REPORTSMANAGER.db.password /etc/cloudera-scm-server/db.mgmt.properties | awk -F'=' '{print $2}'`
# Sleep for a while to give the agents enough time to check in with the master.
# Or better yet, make a dependency so that the slave setup scripts don't start until now and the rest of this script doesn't finish until the slaves finish.
sleep_time=180
echo "Sleeping for $sleep_time seconds so managed cluster nodes can get set up."
sleep $sleep_time
echo "Done sleeping. Deploying cluster now."
# Execute script to deploy Cloudera cluster
sudo python deploycloudera.py -i$hive_metastore_password -f$firehostdbpassword -n$navigatordbpassword -r$headlampdbpassword
# Now stop the cluster gracefully if necessary; ie if all servers are automatically rebooted at the end of the provisioning process
#sudo python stopcloudera.py
|
<filename>vezbe06/ListaUlica.java
class Osoba {
String ime;
Osoba next;
public Osoba(String ime) {
this.ime = ime;
this.next = null;
}
public String toString() {
return ime;
}
}
class Kuca {
int redniBr;
Osoba head;
Kuca next;
public Kuca(int redniBr) {
this.redniBr = redniBr;
this.head = null;
this.next = null;
}
public boolean isEmpty() {
return this.head == null;
}
public void dodajOsobu(String ime) {
Osoba nova = new Osoba(ime);
nova.next = this.head;
this.head = nova;
}
public int brojDuzihOdN(int n) {
int broj = 0;
for (Osoba curr = this.head; curr != null; curr = curr.next) {
if (curr.ime.length() > n) {
broj++;
}
}
return broj;
}
public Osoba obrisiPoslednji() {
if (this.head == null) {
return null;
}
if (this.head.next == null) {
Osoba ret = this.head;
this.head = null;
return ret;
}
Osoba prev;
for (prev = this.head; prev.next.next != null; prev = prev.next);
Osoba ret = prev.next;
prev.next = null;
return ret;
}
public String toString() {
String output = String.format("\t%d [\n", this.redniBr);
output += "\t\tosobe [\n";
for (Osoba curr = this.head; curr != null; curr = curr.next) {
output += String.format("\t\t\t%s\n", curr);
}
output += "\t\t]\n\t]\n";
return output;
}
}
class Ulica {
Kuca head;
public Ulica() {
this.head = null;
}
public void dodajKucu(int redniBrojKuce) {
if (this.head != null && this.head.redniBr == redniBrojKuce) {
return;
}
Kuca nova = new Kuca(redniBrojKuce);
if (this.head == null || this.head.redniBr > redniBrojKuce) {
nova.next = this.head;
this.head = nova;
return;
}
Kuca prev = this.head;
while (prev.next != null) {
if (prev.next.redniBr == redniBrojKuce) {
return;
}
if (prev.redniBr < redniBrojKuce && prev.next.redniBr > redniBrojKuce) {
nova.next = prev.next;
prev.next = nova;
return;
}
prev = prev.next;
}
prev.next = nova;
}
private Kuca getKuca(int n) {
for (Kuca curr = this.head; curr != null; curr = curr.next) {
if (curr.redniBr == n) {
return curr;
}
}
return null;
}
public void dodajOsobu(String ime, int redniBrojKuce) {
Kuca kuca = getKuca(redniBrojKuce);
if (kuca != null) {
kuca.dodajOsobu(ime);
}
}
public int najvecaSaVeceOdN(int n) {
if (this.head == null) {
return 0;
}
Kuca max = this.head;
for (Kuca curr = this.head.next; curr != null; curr = curr.next) {
if (curr.brojDuzihOdN(n) > max.brojDuzihOdN(n)) {
max = curr;
}
}
return max.redniBr;
}
public void prebaciAuB(int a, int b) {
Kuca A = getKuca(a);
if (A == null || A.isEmpty()) {
return;
}
Osoba prenos = A.obrisiPoslednji();
Kuca B = getKuca(b);
if (B == null) {
return;
}
B.dodajOsobu(prenos.ime);
}
public String toString() {
String output = "ulica [\n";
for (Kuca curr = this.head; curr != null; curr = curr.next) {
output += curr;
}
output += "]";
return output;
}
}
|
from typing import Tuple
def process_input(input_str: str, x0: int, y0: int) -> Tuple[str, int, int]:
try:
if not ',' in input_str:
raise ValueError("Invalid input format")
click, x, y = input_str.strip().split(",")
print(click, x, y) # Print the values as in the code snippet
x = int(x)
y = int(y)
dx = x - x0 # Calculate the difference in x-coordinates
dy = y - y0 # Calculate the difference in y-coordinates
return click, dx, dy # Return the processed values as a tuple
except ValueError as e:
print(e) # Print the error message if the input format is invalid
# Test the function
input_str = "left,5,8"
x0 = 3
y0 = 4
print(process_input(input_str, x0, y0)) # Output: ('left', 2, 4) |
def decimal_to_binary(num):
num_as_binary = bin(int(num))[2:]
return num_as_binary
# Calling above function
result = decimal_to_binary(12.834)
print(result) |
<reponame>nathanramli/frontend-film<gh_stars>0
import axios from 'axios';
// Additional
import {API_URL} from './constants';
export default class CharacterService{
getChara(id_film) {
const url = `${API_URL}/api/chara/${id_film}`;
return axios.get(url).then(response => response.data);
}
deleteChara(pk) {
const url = `${API_URL}/api/chara_detail/${pk}`;
return axios.delete(url);
}
} |
<reponame>defudef/typexpress
import { TableColumn } from 'typeorm';
export default () => {
return new TableColumn({
name: 'id',
type: 'uuid',
isPrimary: true,
isGenerated: true,
generationStrategy: 'uuid'
});
};
|
#!/usr/bin/env bash
nginx
java -jar image-train-filters-service-assembly-1.0.0.jar
|
def count_programming_languages(metadata):
programming_languages_count = {}
for data in metadata:
if data.startswith("Programming Language ::"):
language = data.split("::")[-1].strip()
programming_languages_count[language] = programming_languages_count.get(language, 0) + 1
return programming_languages_count |
#! /bin/sh
# Updates all Python files with license taken from README.md and copyright information obtained from the git log.
for fn in setup.py `find python_skeleton test -name "*.py"`; do
echo $fn
python3 maintenance/copyright.py $fn | cat - LICENSE.md | python3 maintenance/replace_header.py $fn
done |
#!/bin/sh
#
# Copyright (c) 2007 Stefan Kurtz <kurtz@zbh.uni-hamburg.de>
# Copyright (c) 2007 Center for Bioinformatics, University of Hamburg
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
set -e -x
USAGE="Usage: $0 [-memcheck]"
if test $# -eq 0
then
MC=""
else
if test $# -eq 1
then
if test "$1" = "-memcheck"
then
MC="-memcheck"
else
echo ${USAGE}
exit 1
fi
else
echo ${USAGE}
exit 1
fi
fi
cd testsuite
# the make call normally used for development
env -i GT_MEM_BOOKKEEPING=on GTTESTDATA=${HOME}/gttestdata ./testsuite.rb \
${MC} -keywords 'gt_extractseq' \
-gttestdata ${GTTESTDATA}
env -i GT_MEM_BOOKKEEPING=on ./testsuite.rb ${MC} -keywords 'gt_suffixerator'
env -i GT_MEM_BOOKKEEPING=on GTTESTDATA=${HOME}/gttestdata ./testsuite.rb \
${MC} -keywords 'gt_suffixerator and gttestdata' \
-gttestdata ${GTTESTDATA}
env -i GT_MEM_BOOKKEEPING=on ./testsuite.rb ${MC} -keywords 'gt_trieins'
env -i GT_MEM_BOOKKEEPING=on GTTESTDATA=${HOME}/gttestdata ./testsuite.rb \
${MC} -keywords 'gt_packedindex_at1MB' \
-gttestdata ${GTTESTDATA}
env -i GT_MEM_BOOKKEEPING=on ./testsuite.rb ${MC} -keywords 'gt_packedindex'
cd ..
# optional -memcheck (run valgrind)
# -select 253 (run testcase 253)
# the following depends on vmatch-mini.x and mkvtree.x
# ../scripts/runmaxpairs.sh 14 ${GRUMBACH}/*.fna ../testdata/Duplicate.fna
sktest-vsvs.sh
sktest-match.sh
|
#!/bin/bash
# vim: set tw=0
# Joseph Harriott http://momentary.eu/ Wed 04 Mar 2015
# Convert images to Children's Color-In Outline Image
# ----------------------------------------------------
# as at http://www.imagemagick.org/Usage/photos/#texture
# haven't figured how to include this in jpgMagick.pl yet...
outd="Originals-$(date +%Y%m%d-%H%M%S)"
echo $outd
mkdir $outd
for inf in *jpg
do
outf="${inf%.*}-CI.jpg"
echo -en "\r$outf"
convert $inf -edge 1 -negate -normalize -colorspace Gray -blur 0x.5 -contrast-stretch 0x50% $outf
mv $inf $outd
done
echo
|
def tokenize(sentence):
tokens = []
current_word = ""
for char in sentence:
if char.isalpha():
current_word += char
else:
if current_word:
tokens.append(current_word)
current_word = ""
if char.isnumeric():
tokens.append(char)
if current_word:
tokens.append(current_word)
return tokens
print(tokenize('The quick brown fox jumped over the lazy dog.'))
# ['The', 'quick', 'brown', 'fox', 'jumped', 'over', 'the', 'lazy', 'dog'] |
Let x be one number and y be the other number
x+y = 55
x^2 + y^2 = 1253
x = (55 + sqrt(4*1253 - 220))/2
y = (55 - sqrt(4*1253 - 220))/2
=> x = 17 and y = 38 |
package io.strmprivacy.driver.client;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.strmprivacy.driver.domain.Config;
import io.strmprivacy.driver.domain.StrmPrivacyException;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.client.api.ContentResponse;
import org.eclipse.jetty.client.util.StringContentProvider;
import org.eclipse.jetty.http.HttpHeader;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.time.Duration;
import java.time.Instant;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeoutException;
class AuthService {
private static final ObjectMapper MAPPER = new ObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
private static final Logger log = LoggerFactory.getLogger(AuthService.class);
private final String billingId;
private final String clientId;
private final String clientSecret;
private final HttpClient httpClient;
private final CountDownLatch latch;
private final Timer timer;
private AuthProvider authProvider;
private final String authUri;
private final String refreshUri;
public AuthService(String billingId, String clientId, String clientSecret, Config config) {
this.billingId = billingId;
this.clientId = clientId;
this.clientSecret = clientSecret;
this.httpClient = new HttpClient(new SslContextFactory.Client());
try {
this.httpClient.start();
} catch (Exception e) {
throw new IllegalStateException("An unexpected error occurred while starting a new AuthService for STRM Privacy.", e);
}
try {
this.authUri = new URI(String.format("%s://%s%s", config.getStsScheme(), config.getStsHost(), config.getStsAuthEndpoint())).toString();
this.refreshUri = new URI(String.format("%s://%s%s", config.getStsScheme(), config.getStsHost(), config.getStsRefreshEndpoint())).toString();
} catch (URISyntaxException e) {
throw new IllegalStateException("Malformed URI(s) for " + this.getClass().getCanonicalName(), e);
}
this.timer = new Timer();
this.latch = new CountDownLatch(1);
this.timer.schedule(new AuthProviderInitializerTask(), 0, Duration.ofMinutes(5).toMillis());
try {
latch.await();
} catch (InterruptedException e) {
throw new IllegalStateException("Error while setting up authentication for STRM Privacy", e);
}
}
public String getAccessToken() {
return authProvider.getIdToken();
}
public void stop() {
try {
this.timer.cancel();
this.httpClient.stop();
} catch (Exception e) {
throw new StrmPrivacyException("Error stopping AuthService HttpClient", e);
}
}
private void authenticate(String billingId, String clientId, String clientSecret) {
try {
ObjectNode payload = MAPPER.createObjectNode()
.put("billingId", billingId)
.put("clientId", clientId)
.put("clientSecret", clientSecret);
doPost(authUri, payload);
} catch (IOException | InterruptedException | TimeoutException | ExecutionException e) {
log.error("An error occurred while requesting an access token with clientId '{}' and billingId '{}'", clientId, billingId, e);
}
}
private void refresh(String refreshToken, String billingId, String clientId, String clientSecret) {
try {
ObjectNode payload = MAPPER.createObjectNode();
payload.put("refreshToken", refreshToken);
doPost(refreshUri, payload);
} catch (IOException | InterruptedException | TimeoutException | ExecutionException e) {
log.debug("Failed to refresh token with clientId '{}' and billingId '{}'", clientId, billingId);
log.debug("Trying to request a new token with clientId '{}' and billingId '{}'", clientId, billingId);
authenticate(billingId, clientId, clientSecret);
}
}
private void doPost(String uri, ObjectNode payload) throws IOException, InterruptedException, TimeoutException, ExecutionException {
ContentResponse response = httpClient.POST(uri)
.content(new StringContentProvider(MAPPER.writeValueAsString(payload)))
.header(HttpHeader.CONTENT_TYPE, "application/json; charset=UTF-8")
.send();
this.authProvider = MAPPER.readValue(response.getContentAsString(), AuthProvider.class);
}
private class AuthProviderInitializerTask extends TimerTask {
private final long expirationSlackTimeSeconds = Duration.ofMinutes(10).getSeconds();
public void run() {
if (authProvider == null) {
log.debug("Initializing a new Auth Provider");
authenticate(billingId, clientId, clientSecret);
latch.countDown();
} else if (isAlmostExpired(authProvider.getExpiresAt())) {
log.debug("Refreshing an existing Auth Provider");
refresh(authProvider.getRefreshToken(), billingId, clientId, clientSecret);
}
}
private boolean isAlmostExpired(long expirationTime) {
long currentTime = Instant.now().getEpochSecond();
return (currentTime + expirationSlackTimeSeconds) >= expirationTime;
}
}
}
|
<filename>src/main/scala/IPRepository/AXI/AXIPWMIn.scala<gh_stars>0
package IPRepository.AXI
import IPRepository.SAXILiteBridge.SAXILiteBridge1
import Interfaces.AxiLite
import chisel3._
class AXIPWMIn(val channelCount: Int, val period: Int) extends Module {
assert(channelCount > 0)
assert(channelCount < 33)
private val AXI_WIDTH = Math.ceil(Math.log(channelCount) / Math.log(2)).toInt + 2
val io = IO(new Bundle {
val s_axi = Flipped(new AxiLite(AXI_WIDTH, 32))
val pwm = Input(Vec(channelCount, Bool()))
})
val bridge = Module(new SAXILiteBridge1(AXI_WIDTH, 4))
bridge.io.s_axi <> io.s_axi
val power = RegInit(0.U(channelCount.W))
val counter_period = RegInit(VecInit(Seq.fill(channelCount)(1.U(32.W))))
val counter_high = RegInit(VecInit(Seq.fill(channelCount)(0.U(32.W))))
val result_high = RegInit(VecInit(Seq.fill(channelCount)(0.U(32.W))))
val pwmDelay1 = RegInit(VecInit(Seq.fill(channelCount)(0.U(1.W))))
val pwmCache1 = RegInit(VecInit(Seq.fill(channelCount)(0.U(1.W))))
val pwmCache2 = RegInit(VecInit(Seq.fill(channelCount)(0.U(1.W))))
// counter period will begin to count when power on
for (i <- 0 until channelCount) {
when (power(i).toBool()) {
when (counter_period(i) === period.U) {
counter_period(i) := 1.U
} otherwise {
counter_period(i) := counter_period(i) + 1.U
}
} .otherwise {
counter_period(i) := 1.U
}
}
// counter high
for (i <- 0 until channelCount) {
when (power(i).toBool()) {
when (counter_period(i) === period.U) {
counter_high(i) := 0.U
} .elsewhen(pwmCache2(i) === 1.U) {
counter_high(i) := counter_high(i) + 1.U
}
} .otherwise {
counter_high(i) := 0.U
}
}
// result
for (i <- 0 until channelCount) {
when(power(i).toBool() & counter_period(i) === period.U) {
result_high := counter_high
}
}
// pwm cache and delay
for (i <- 0 until channelCount) {
pwmDelay1(i) := io.pwm(i)
pwmCache1(i) := pwmDelay1(i)
pwmCache2(i) := pwmCache1(i)
}
// sei read
val readBuffer = RegInit(0.U(32.W))
when(bridge.io.s_axi_simplified.rd.addr.valid) {
for (i <- 0 until channelCount) {
when(bridge.io.s_axi_simplified.rd.addr.bits === i.U) {
readBuffer := result_high(i)
}
}
}
bridge.io.s_axi_simplified.rd.data := readBuffer
// sei write
when(bridge.io.s_axi_simplified.wr.valid & bridge.io.s_axi_simplified.wr.bits.addr === 0.U) {
power := bridge.io.s_axi_simplified.wr.bits.data
}
}
|
<filename>platform-shop/src/main/java/com/platform/controller/FlashgoodsController.java
package com.platform.controller;
import java.util.List;
import java.util.Map;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.platform.entity.FlashgoodsEntity;
import com.platform.service.FlashgoodsService;
import com.platform.utils.PageUtils;
import com.platform.utils.Query;
import com.platform.utils.R;
/**
* Controller
*
* @author lipengjun
* @email 9399<EMAIL>
* @date 2018-09-29 14:36:33
*/
@RestController
@RequestMapping("flashgoods")
public class FlashgoodsController {
@Autowired
private FlashgoodsService flashgoodsService;
/**
* 查看列表
*/
@RequestMapping("/list")
@RequiresPermissions("timegoods:list")
public R list(@RequestParam Map<String, Object> params) {
//查询列表数据
Query query = new Query(params);
List<FlashgoodsEntity> flashgoodsList = flashgoodsService.queryList(query);
int total = flashgoodsService.queryTotal(query);
PageUtils pageUtil = new PageUtils(flashgoodsList, total, query.getLimit(), query.getPage());
return R.ok().put("page", pageUtil);
}
/**
* 查看信息
*/
@RequestMapping("/info/{id}")
@RequiresPermissions("flashgoods:info")
public R info(@PathVariable("id") Integer id) {
FlashgoodsEntity flashgoods = flashgoodsService.queryObject(id);
return R.ok().put("flashgoods", flashgoods);
}
/**
* 保存
*/
@RequestMapping("/save")
@RequiresPermissions("flashgoods:save")
public R save(@RequestBody FlashgoodsEntity flashgoods) {
if(flashgoods.getRebate()<0||flashgoods.getRebate()>100) {
return R.error("折扣必须大于0且小于100");
}
flashgoodsService.save(flashgoods);
return R.ok();
}
/**
* 修改
*/
@RequestMapping("/update")
@RequiresPermissions("flashgoods:update")
public R update(@RequestBody FlashgoodsEntity flashgoods) {
flashgoodsService.update(flashgoods);
return R.ok();
}
/**
* 删除
*/
@RequestMapping("/delete")
@RequiresPermissions("flashgoods:delete")
public R delete(@RequestBody Integer[] ids) {
flashgoodsService.deleteBatch(ids);
return R.ok();
}
/**
* 查看所有列表
*/
@RequestMapping("/queryAll")
public R queryAll(@RequestParam Map<String, Object> params) {
List<FlashgoodsEntity> list = flashgoodsService.queryList(params);
return R.ok().put("list", list);
}
}
|
# Download and extract the Windows binary install
# Requires innoextract installed in the Dockerfile
mkdir r-win
wget https://cloud.r-project.org/bin/windows/base/R-3.6.1-win.exe \
--output-document r-win/latest_r.exe
cd r-win
innoextract -e latest_r.exe
mv app/* ../r-win
rm -r app latest_r.exe
# Remove unneccessary files TODO: What else?
rm -r doc tests
|
<filename>src/config-core/config_messaging_parsing.h
#ifndef __TCREWRITE_CONFIG_MESSAGING_PARSING__H_
#define __TCREWRITE_CONFIG_MESSAGING_PARSING__H_
#include <sys/types.h>
#include "apache_typedefs.h"
typedef struct cfgm_wire_header{
char* nodeName;
char* componentId;
char* version;
}cfgm_wire_header;
typedef struct cfgm_wire_message{
char* type;
cfgm_wire_header* header;
apr_hash_t* params;
}cfgm_wire_message;
cfgm_wire_header* cfgm_newWireHeader(pool* p);
cfgm_wire_message* cfgm_newWireMessage(pool* p,cfgm_wire_header* header);
cfgm_wire_message* cfgm_newWireMessageType(pool* p,const char* type,cfgm_wire_header* header);
char* cfgm_serializeMessage(apr_pool_t* p, cfgm_wire_message* msg);
#endif
|
public static void bubbleSort(int[] arr) {
int n = arr.length;
for (int i = 0; i < n - 1; i++) {
for (int j = 0; j < n - i - 1; j++ ) {
if (arr[j] > arr[j+1]) {
int temp = arr[j];
arr[j] = arr[j+1];
arr[j+1] = temp;
}
}
}
}
int arr[] = {10, 8, 6, 2};
bubbleSort(arr);
for (int i = 0; i < arr.length; i++) {
System.out.print(arr[i] + " ");
} |
#!/bin/bash
: ${1:?ERROR: Please supply a control file to parse}
#
# Select range from Build-Depends|Depends, until last element in list (no comma)
# gather package names and output them as a space separated list.
#
sed -r -n '/^(Build-Depends|Depends):/,/[-.+a-zA-Z0-9]+$/ {
s/(Build-Depends|Depends):// # Remove stanza
s/[[:space:]]*([-+.a-zA-Z0-9]+).*/\1/ # Remove space and commas
/appscale-.*/d # Remove appscale packages
p # Print package name
}' "$@" | tr '\n' ' '
|
/*eslint-disable*/
/* global playCounter:true, w2:true, h2:true, changeColor:true, muteMusic:true, muteSoundEffects:true
firstTime:true, bgColors:true, bgColorsDark:true, Phaser, colorPlayers:true,
colorHex:true, colorHexDark:true, baseArea:true, chosenColor:true
modesLB:true, localStorage, scale:true, changingKeys:true, showMouse:true */
/*eslint-enable*/
var boot = function (game) {
playCounter = 0
w2 = 0
h2 = 0
changeColor = false
muteMusic = false
muteSoundEffects = false
showMouse = false
firstTime = true
scale = 1
changingKeys = false
controllersSet = false
}
boot.prototype = {
preload: function () {
this.game.load.image('bunny', 'assets/sprites/gui/bunny.png')
},
create: function () {
w2 = this.game.width / 2
h2 = this.game.height / 2
this.game.stage.disableVisibilityChange = true
if (localStorage.getItem('muteMusic') === 'true') {
muteMusic = true
}
if (localStorage.getItem('muteSoundEffects') === 'true') {
muteSoundEffects = true
}
if (localStorage.getItem('showMouse') === 'true') {
showMouse = true
}
var timeScale = localStorage.getItem('timeScale')
if (timeScale != null) this.game.time.slowMotion = timeScale;
// Background colors
// [green, red, purple, blue]
bgColors = ['#76b83d', '#cf5e4f', '#805296', '#4c99b9']
bgColorsDark = ['#3b5c1e', '#672f27', '#40294b', '#264c5c']
modesLB = ['CgkIr97_oIgHEAIQCQ', 'CgkIr97_oIgHEAIQCg', 'CgkIr97_oIgHEAIQCw']
chosenColor = this.game.rnd.integerInRange(0, 3)
colorHex = bgColors[chosenColor]
colorHexDark = bgColorsDark[chosenColor]
document.body.style.background = '#45AA43'
this.stage.backgroundColor = '#45AA43'
// Player colors
// [red, blue, pink, green, brown, cyan, purple, yellow]
colorPlayers = ['#eb1c1c', '#4368e0', '#f07dc1', '#44c83a', '#9e432e', '#3dd6e0', '#9339e0', '#ebd90f']
// this.game.forcesSingleUpdate = true
this.scale.scaleMode = Phaser.ScaleManager.SHOW_ALL
this.scale.pageAlignHorizontally = true
this.scale.pageAlignVertically = true
// this.scale.setResizeCallback(this.resize, this)
this.physics.startSystem(Phaser.Physics.ARCADE)
this.stage.smoothed = true
this.game.input.gamepad.start()
this.state.start('PreloadMenu')
}
}
|
<reponame>willfrey/ray
package io.ray.api.exception;
import io.ray.api.id.ActorId;
/**
* Indicates that the back pressure occurs when submitting an actor task.
*
* <p>This exception could happen probably because the caller calls the callee too frequently.
*/
public class PendingCallsLimitExceededException extends RayException {
public ActorId actorId;
public PendingCallsLimitExceededException(String message) {
super(message);
}
}
|
#!/usr/bin/env bash
set -e
set -x
DB_HOST=$1
while ! mysqladmin ping -h"$DB_HOST" --silent; do
echo "MYSQL NOT READY"
sleep 1
done
echo "MYSQL READY"
echo y | bin/console doctrine:migrations:migrate
composer exec phing test |
<gh_stars>0
var classarmnn_1_1_graph_observable =
[
[ "Iterator", "classarmnn_1_1_graph_observable.xhtml#a9d28ec37483e8c22231f7bbf0fa0abad", null ],
[ "GraphObservable", "classarmnn_1_1_graph_observable.xhtml#a6378a5e7096305c780b445b9af484d55", null ],
[ "~GraphObservable", "classarmnn_1_1_graph_observable.xhtml#a275f5638b4dedbcbd80b42e4210cb88e", null ],
[ "begin", "classarmnn_1_1_graph_observable.xhtml#a2387033802383edbdc95f9bbb12a707e", null ],
[ "Clear", "classarmnn_1_1_graph_observable.xhtml#aa71d36872f416feaa853788a7a7a7ef8", null ],
[ "end", "classarmnn_1_1_graph_observable.xhtml#ab45dae688fc5d8983727abffa4389003", null ],
[ "m_NotifyOnEvent", "classarmnn_1_1_graph_observable.xhtml#a5294e0648bebd5643078b96f32e5f9fe", null ],
[ "m_ObservedObjects", "classarmnn_1_1_graph_observable.xhtml#a1aef6df25b42119780dc05b880f4a429", null ],
[ "m_Subject", "classarmnn_1_1_graph_observable.xhtml#aec60d37066c40f5583a91535545a2469", null ]
]; |
package judge
import (
"io/ioutil"
"log"
"strconv"
proto "github.com/golang/protobuf/proto"
"github.com/hyperledger/fabric_judge/comparator"
cb "github.com/hyperledger/fabric_judge/protos/common"
validator "github.com/hyperledger/fabric_judge/validator"
"github.com/hyperledger/fabric_judge/verdicts"
)
//export VerifyConsistency
func VerifyConsistency(blockDir1 string, blockDir2 string, identity1 string, identity2 string, channelName string, kafkaPublicKey string, maxBatchSize int, preferredBlockSize int) {
println("Reading and parsing received blocks")
blocks1 := getBlocksFromDir(blockDir1, channelName)
blocks2 := getBlocksFromDir(blockDir2, channelName)
verifier1 := validator.NewVerifier(blocks1, kafkaPublicKey, identity1, maxBatchSize, preferredBlockSize)
verifier2 := validator.NewVerifier(blocks2, kafkaPublicKey, identity2, maxBatchSize, preferredBlockSize)
println("Blocks are successfully parsed\n")
// Verify all merkle proofs, kafka signatures and whether the sequence numbers are incremented sequentially
// Here, there are two possible verdicts:
// 1. Peer accepts block containing invalid merkle proofs, kafka signatures or inconsistent seq. numbers
// In this case, we blame both orderer and peer
// 2. Inconsistency is only shown in the last block:
// Here we assume, that the peer actually followed the protocol and shut down after receiving an invalid kafka message.
println("Verifying Merkle-Proofs and signatures of all Kafka messages. Furthermore, we verify that the Kafka sequence numbers are sorted correctly")
verdict := verifier1.VerifyKafkaMessages()
evaluateVerdict(verdict)
verdict = verifier2.VerifyKafkaMessages()
evaluateVerdict(verdict)
println("Verification successfully complete\n")
// Here we can assume, that both peers received the kafka messages in the intended order (because kafka seq. numbers are sorted sequentially)
// Thus we can now check, if the Kafka Cluster (viewed as a single entity) signed two different messages with the same sequence number
// In this case, we obviously render a verdict against the Kafka Cluster
println("Comparing Kafka messages of both blocks to check, if the same sequence number was used on different blocks")
kafkaComparator := comparator.NewKafkaComparator(verifier1, verifier2)
verdict = kafkaComparator.CompareKafkaMessages()
evaluateVerdict(verdict)
println("No irregularity was found\n")
// At this point, the only thing left to do is to verify that the orderer cut his blocks according to the given Block-Cutting algorithm
println("Verifying that the orderer has cut the blocks correctly")
verdict = verifier1.VerifyBlockCuttingOfOrderer()
evaluateVerdict(verdict)
verdict = verifier2.VerifyBlockCuttingOfOrderer()
evaluateVerdict(verdict)
println("Orderer cut blocks following the Block-Cutting algorithm")
}
func evaluateVerdict(verdict []*verdicts.Verdict) {
if verdict != nil {
for _, v := range verdict {
log.Println(v.EvaluateVerdict())
}
log.Fatal("Inconsistency in blocks is ascertained, exiting...")
}
}
func getBlocksFromDir(dir string, channelName string) []*cb.Block {
files, err := ioutil.ReadDir(dir)
if err != nil {
log.Fatal(err)
}
blocks := make([]*cb.Block, len(files))
for i := range files {
blocks[i], err = computeBlockFromFile(dir + channelName + "_" + strconv.Itoa(i) + ".block")
if err != nil {
log.Fatal(err)
}
}
return blocks
}
func computeBlockFromFile(filePath string) (*cb.Block, error) {
blockData, err := ioutil.ReadFile(filePath)
if err != nil {
println("ERROR: Unable to read file: " + filePath)
return nil, err
}
block := new(cb.Block)
err = proto.Unmarshal(blockData, block)
if err != nil {
println("ERROR: Unable to parse block: " + filePath)
}
return block, nil
}
|
/*
bash-5.1$ gcc -o cyril tabella_cyrillic.c converti_api.c
bash-5.1$ cyril
|0|1|2|3|4|5|6|7|8|9|A|B|C|D|E|F|
|---|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|
|41_|А|Б|В|Г|Д|Е|Ж|З|И|Й|К|Л|М|Н|О|П|
|42_|Р|С|Т|У|Ф|Х|Ц|Ч|Ш|Щ|Ъ|Ы|Ь|Э|Ю|Я|
|43_|а|б|в|г|д|е|ж|з|и|й|к|л|м|н|о|п|
|44_|р|с|т|у|ф|х|ц|ч|ш|щ|ъ|ы|ь|э|ю|я|
*/
#include <stdio.h>
#include <stdlib.h>
#include "converti_api.h"
int main(int argc, char *argv[]) {
char risultato[5];
printf(" ");
for (int c = 0; c < 16; c++) printf("|%X", c);
printf("|\n|---");
for (int c = 0; c < 16; c++) printf("|-");
printf("|\n");
for (int r = 65; r < 69; r++) {
printf("|%X_", r);
for (int c = 0; c < 16; c++) {
char cyril[4];
sprintf(cyril, "%3.3X", 16 * r + c);
converti_api(cyril, risultato);
printf("|%s", risultato);
}
printf("|\n");
}
return 0;
}
|
#!/bin/sh
set -e
mkdir -p "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
realpath() {
DIRECTORY="$(cd "${1%/*}" && pwd)"
FILENAME="${1##*/}"
echo "$DIRECTORY/$FILENAME"
}
install_resource()
{
case $1 in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile ${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .storyboard`.storyboardc ${PODS_ROOT}/$1 --sdk ${SDKROOT}"
ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .storyboard`.storyboardc" "${PODS_ROOT}/$1" --sdk "${SDKROOT}"
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile ${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .xib`.nib ${PODS_ROOT}/$1 --sdk ${SDKROOT}"
ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .xib`.nib" "${PODS_ROOT}/$1" --sdk "${SDKROOT}"
;;
*.framework)
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync -av ${PODS_ROOT}/$1 ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
rsync -av "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1"`.mom\""
xcrun momc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodeld`.momd\""
xcrun momc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcmappingmodel`.cdm\""
xcrun mapc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE=$(realpath "${PODS_ROOT}/$1")
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
/*)
echo "$1"
echo "$1" >> "$RESOURCES_TO_COPY"
;;
*)
echo "${PODS_ROOT}/$1"
echo "${PODS_ROOT}/$1" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "MJRefresh/MJRefresh/MJRefresh.bundle"
install_resource "WeiboSDK/libWeiboSDK/WeiboSDK.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "MJRefresh/MJRefresh/MJRefresh.bundle"
install_resource "WeiboSDK/libWeiboSDK/WeiboSDK.bundle"
fi
mkdir -p "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "$XCASSET_FILES" ]
then
case "${TARGETED_DEVICE_FAMILY}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "`realpath $PODS_ROOT`*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${IPHONEOS_DEPLOYMENT_TARGET}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
|
#!/bin/sh
# Step 1 - Authenticate & retrieve a session token
curl -X POST -iu 'devnetuser:Cisco123!' -H 'Content-Type: application/json' \
--url https://sandboxdnac.cisco.com/dna/system/api/v1/auth/token
|
class Stack<T>: StackProtocol {
private var elements: [T] = []
func push(_ element: T) {
elements.append(element)
}
func pop() -> T? {
return elements.popLast()
}
func peek() -> T? {
return elements.last
}
func isEmpty() -> Bool {
return elements.isEmpty
}
} |
#!/bin/bash
VERSION=$(/QC_Tools/build/calc_J --version | awk '{ print $4 }' |tr -d '\n')
FORMAT=$(file /QC_Tools/build/calc_J | awk '{ print $6}' )
FORMAT=${FORMAT%?}
DISTRO=$(lsb_release -a | grep ID | awk '{print $3}')
DISTRO_VERSION=$(lsb_release -a | grep Release | awk '{print $2}')
tar_file=$(echo calc_J_${VERSION}_${DISTRO}_${DISTRO_VERSION}_${FORMAT}.tar.gz)
echo "tar file name "${tar_file}
ls /QC_Tools/build
tar -zcvf "${tar_file}" QC_Tools/build/calc_J
mkdir TAR
mv *tar.gz TAR/
|
#!/bin/bash
#
# Vagrant Provisionner for PHP Dev
# - Apache
# - MySQL
# - PHP-fpm
#
# @author Akarun for KRKN <akarun@krkn.be> and Passtech <akarun@passtech.be>
# @since August 2014
#
# =============================================================================
PROJECT_NAME=$( echo $1 | sed -e 's/[A-Z]/\L&/g;s/ /_/g')
PROJECT_HOST=$2
PROJECT_ROOT=$3
LOG_FILE="/vagrant/.vagrant/deploy.log"
# =============================================================================
CLL="\r$(printf '%*s\n' 80)\r"
SEP="\r$(printf '%0.1s' "-"{1..80})"
function echo_line { echo -en "${CLL}$*\n"; }
function echo_success { echo -en "${CLL}$*\033[69G\033[0;39m[ \033[1;32mOK\033[0;39m ]\n"; }
function echo_failure { echo -en "${CLL}$*\033[69G\033[0;39m[ \033[1;31mFAILED\033[0;39m ]\n"; }
function echo_warning { echo -en "${CLL}$*\033[69G\033[0;39m[ \033[1;33mWARNING\033[0;39m ]\n"; }
function echo_skip { echo -en "${CLL}$*\033[69G\033[0;39m[ \033[1;34mSKIPPED\033[0;39m ]\n"; }
function echo_done { echo -en "${CLL}$*\033[69G\033[0;39m[ \033[1;34mDONE\033[0;39m ]\n"; }
function process_end {
if (( $# > 0 )); then
echo_failure "ERROR($1) : $2"
echo_line "${SEP}"
exit 1
else
echo_line "${SEP}"
exit 0
fi
}
# =============================================================================
# Update and package list
echo_line "${SEP}"
# =============================================================================
# ELASTICSEARCH
echo_line "ElasticSearch\n"
SLINE="\t- Install Java"
test $(which java) && echo_done $SLINE || ( apt-get install -y openjdk-7-jre-headless >>$LOG_FILE 2>&1 && echo_success $SLINE || echo_failure $SLINE)
SLINE="\t- Download"
SLINE2="\t- Installation"
if [[ ! -f /etc/init.d/elasticsearch ]]; then
pushd /tmp >>$LOG_FILE
wget https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.4.4.deb >>$LOG_FILE 2>&1 && echo_success $SLINE || echo_failure $SLINE
dpkg -i elasticsearch-1.4.4.deb >>$LOG_FILE 2>&1 && echo_success $SLINE2 || echo_failure $SLINE2
# set as an auto boot service
update-rc.d elasticsearch defaults >>$LOG_FILE 2>&1
popd >>$LOG_FILE
else
echo_skip $SLINE
echo_done $SLINE2
fi
SLINE="\t- Start"
service elasticsearch start >>$LOG_FILE 2>&1 && echo_success $SLINE || echo_failure $SLINE
SLINE="\t- Install HEAD Plugin"
test /usr/share/elasticsearch/plugins/head && echo_done $SLINE || (/usr/share/elasticsearch/bin/plugin -install mobz/elasticsearch-head >>$LOG_FILE 2>&1 && echo_success $SLINE || echo_failure $SLINE)
# =============================================================================
# End
process_end |
#!/usr/bin/env bash
set -e
time docker-compose up -d sqlserver oracle
# import setup functions
. build/setup_db_scripts.sh
# load sbt deps to local repo
. build/oracle_setup/load_jdbc.sh
# run setup scripts for local databases
time setup_sqlite $SQLITE_SCRIPT 127.0.0.1
time setup_mysql $MYSQL_SCRIPT 127.0.0.1
time setup_postgres $POSTGRES_SCRIPT 127.0.0.1
# setup sqlserver in docker
send_script sqlserver $SQL_SERVER_SCRIPT sqlserver-schema.sql
send_script sqlserver ./build/setup_db_scripts.sh setup_db_scripts.sh
time docker-compose exec -T sqlserver bash -c ". setup_db_scripts.sh && setup_sqlserver sqlserver-schema.sql 127.0.0.1"
until docker-compose exec -T oracle "/oracle_setup_local/external_check_script.sh" | grep "match_this_test_to_pass"; do
docker-compose exec -T oracle "/oracle_setup_local/external_check_script.sh"
echo "Waiting for Oracle"
sleep 5;
done
sleep 5;
echo "Oracle Setup Complete"
echo "Databases are ready!" |
#!/usr/bin/env python
# circuits.py - convert a Boolean circuit to an equivalent Boolean formula
#
# Copyright 2016 <NAME> <<EMAIL>>.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
"""
========
Circuits
========
Convert a Boolean circuit to an equivalent Boolean formula.
A Boolean circuit can be exponentially more expressive than an
equivalent formula in the worst case, since the circuit can reuse
subcircuits multiple times, whereas a formula cannot reuse subformulas
more than once. Thus creating a Boolean formula from a Boolean circuit
in this way may be infeasible if the circuit is large.
"""
from networkx import dag_to_branching
from networkx import DiGraph
from networkx.utils import arbitrary_element
def circuit_to_formula(circuit):
# Convert the circuit to an equivalent formula.
formula = dag_to_branching(circuit)
# Transfer the operator or variable labels for each node from the
# circuit to the formula.
for v in formula:
source = formula.node[v]['source']
formula.node[v]['label'] = circuit.node[source]['label']
return formula
def formula_to_string(formula):
def _to_string(formula, root):
# If there are no children, this is a variable node.
label = formula.node[root]['label']
if not formula[root]:
return label
# Otherwise, this is an operator.
children = formula[root]
# If one child, the label must be a NOT operator.
if len(children) == 1:
child = arbitrary_element(children)
return '{}({})'.format(label, _to_string(formula, child))
# NB "left" and "right" here are a little misleading: there is
# no order on the children of a node. That's okay because the
# Boolean AND and OR operators are symmetric. It just means that
# the order of the operands cannot be predicted and hence the
# function does not necessarily behave the same way on every
# invocation.
left, right = formula[root]
left_subformula = _to_string(formula, left)
right_subformula = _to_string(formula, right)
return '({} {} {})'.format(left_subformula, label, right_subformula)
root = next(v for v, d in formula.in_degree() if d == 0)
return _to_string(formula, root)
def main():
# Create an example Boolean circuit.
#
# This circuit has a ∧ at the output and two ∨s at the next layer.
# The third layer has a variable x that appears in the left ∨, a
# variable y that appears in both the left and right ∨s, and a
# negation for the variable z that appears as the sole node in the
# fourth layer.
circuit = DiGraph()
# Layer 0
circuit.add_node(0, label='∧')
# Layer 1
circuit.add_node(1, label='∨')
circuit.add_node(2, label='∨')
circuit.add_edge(0, 1)
circuit.add_edge(0, 2)
# Layer 2
circuit.add_node(3, label='x')
circuit.add_node(4, label='y')
circuit.add_node(5, label='¬')
circuit.add_edge(1, 3)
circuit.add_edge(1, 4)
circuit.add_edge(2, 4)
circuit.add_edge(2, 5)
# Layer 3
circuit.add_node(6, label='z')
circuit.add_edge(5, 6)
# Convert the circuit to an equivalent formula.
formula = circuit_to_formula(circuit)
print(formula_to_string(formula))
if __name__ == '__main__':
main()
|
package net.ossrs.yasea.rtmp.io;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketAddress;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.concurrent.atomic.AtomicInteger;
import android.util.Log;
import net.ossrs.yasea.rtmp.RtmpPublisher;
import net.ossrs.yasea.rtmp.amf.AmfMap;
import net.ossrs.yasea.rtmp.amf.AmfNull;
import net.ossrs.yasea.rtmp.amf.AmfNumber;
import net.ossrs.yasea.rtmp.amf.AmfObject;
import net.ossrs.yasea.rtmp.amf.AmfString;
import net.ossrs.yasea.rtmp.packets.Abort;
import net.ossrs.yasea.rtmp.packets.Acknowledgement;
import net.ossrs.yasea.rtmp.packets.Data;
import net.ossrs.yasea.rtmp.packets.Handshake;
import net.ossrs.yasea.rtmp.packets.Command;
import net.ossrs.yasea.rtmp.packets.Audio;
import net.ossrs.yasea.rtmp.packets.Video;
import net.ossrs.yasea.rtmp.packets.UserControl;
import net.ossrs.yasea.rtmp.packets.RtmpPacket;
import net.ossrs.yasea.rtmp.packets.WindowAckSize;
/**
* Main RTMP connection implementation class
*
* @author francois, leoma
*/
public class RtmpConnection implements RtmpPublisher, PacketRxHandler {
private static final String TAG = "RtmpConnection";
private static final Pattern rtmpUrlPattern = Pattern.compile("^rtmp://([^/:]+)(:(\\d+))*/([^/]+)(/(.*))*$");
private RtmpPublisher.EventHandler mHandler;
private String appName;
private String streamName;
private String publishType;
private String swfUrl;
private String tcUrl;
private String pageUrl;
private Socket socket;
private RtmpSessionInfo rtmpSessionInfo;
private ReadThread readThread;
private WriteThread writeThread;
private final ConcurrentLinkedQueue<RtmpPacket> rxPacketQueue = new ConcurrentLinkedQueue<>();
private final Object rxPacketLock = new Object();
private volatile boolean active = false;
private volatile boolean connecting = false;
private volatile boolean fullyConnected = false;
private volatile boolean publishPermitted = false;
private final Object connectingLock = new Object();
private final Object publishLock = new Object();
private AtomicInteger videoFrameCacheNumber = new AtomicInteger(0);
private int currentStreamId = -1;
private int transactionIdCounter = 0;
private AmfString serverIpAddr;
private AmfNumber serverPid;
private AmfNumber serverId;
private int videoWidth;
private int videoHeight;
public RtmpConnection(RtmpPublisher.EventHandler handler) {
mHandler = handler;
}
private void handshake(InputStream in, OutputStream out) throws IOException {
Handshake handshake = new Handshake();
handshake.writeC0(out);
handshake.writeC1(out); // Write C1 without waiting for S0
out.flush();
handshake.readS0(in);
handshake.readS1(in);
handshake.writeC2(out);
handshake.readS2(in);
}
@Override
public void connect(String url) throws IOException {
int port;
String host;
Matcher matcher = rtmpUrlPattern.matcher(url);
if (matcher.matches()) {
tcUrl = url.substring(0, url.lastIndexOf('/'));
swfUrl = "";
pageUrl = "";
host = matcher.group(1);
String portStr = matcher.group(3);
port = portStr != null ? Integer.parseInt(portStr) : 1935;
appName = matcher.group(4);
streamName = matcher.group(6);
} else {
throw new IllegalArgumentException("Invalid RTMP URL. Must be in format: rtmp://host[:port]/application[/streamName]");
}
// socket connection
Log.d(TAG, "connect() called. Host: " + host + ", port: " + port + ", appName: " + appName + ", publishPath: " + streamName);
socket = new Socket();
SocketAddress socketAddress = new InetSocketAddress(host, port);
socket.connect(socketAddress, 3000);
BufferedInputStream in = new BufferedInputStream(socket.getInputStream());
BufferedOutputStream out = new BufferedOutputStream(socket.getOutputStream());
Log.d(TAG, "connect(): socket connection established, doing handhake...");
handshake(in, out);
active = true;
Log.d(TAG, "connect(): handshake done");
rtmpSessionInfo = new RtmpSessionInfo();
readThread = new ReadThread(rtmpSessionInfo, in, this);
writeThread = new WriteThread(rtmpSessionInfo, out, this);
readThread.start();
writeThread.start();
// Start the "main" handling thread
new Thread(new Runnable() {
@Override
public void run() {
try {
Log.d(TAG, "starting main rx handler loop");
handleRxPacketLoop();
} catch (IOException ex) {
Logger.getLogger(RtmpConnection.class.getName()).log(Level.SEVERE, null, ex);
}
}
}).start();
rtmpConnect();
}
private void rtmpConnect() throws IOException, IllegalStateException {
if (fullyConnected || connecting) {
throw new IllegalStateException("Already connected or connecting to RTMP server");
}
// Mark session timestamp of all chunk stream information on connection.
ChunkStreamInfo.markSessionTimestampTx();
Log.d(TAG, "rtmpConnect(): Building 'connect' invoke packet");
ChunkStreamInfo chunkStreamInfo = rtmpSessionInfo.getChunkStreamInfo(ChunkStreamInfo.RTMP_COMMAND_CHANNEL);
Command invoke = new Command("connect", ++transactionIdCounter, chunkStreamInfo);
invoke.getHeader().setMessageStreamId(0);
AmfObject args = new AmfObject();
args.setProperty("app", appName);
args.setProperty("flashVer", "LNX 11,2,202,233"); // Flash player OS: Linux, version: 11.2.202.233
args.setProperty("swfUrl", swfUrl);
args.setProperty("tcUrl", tcUrl);
args.setProperty("fpad", false);
args.setProperty("capabilities", 239);
args.setProperty("audioCodecs", 3575);
args.setProperty("videoCodecs", 252);
args.setProperty("videoFunction", 1);
args.setProperty("pageUrl", pageUrl);
args.setProperty("objectEncoding", 0);
invoke.addData(args);
writeThread.send(invoke);
connecting = true;
mHandler.onRtmpConnecting("connecting");
}
@Override
public void publish(String type) throws IllegalStateException, IOException {
if (connecting) {
synchronized (connectingLock) {
try {
connectingLock.wait(5000);
} catch (InterruptedException ex) {
// do nothing
}
}
}
publishType = type;
createStream();
}
private void createStream() {
if (!fullyConnected) {
throw new IllegalStateException("Not connected to RTMP server");
}
if (currentStreamId != -1) {
throw new IllegalStateException("Current stream object has existed");
}
Log.d(TAG, "createStream(): Sending releaseStream command...");
// transactionId == 2
Command releaseStream = new Command("releaseStream", ++transactionIdCounter);
releaseStream.getHeader().setChunkStreamId(ChunkStreamInfo.RTMP_STREAM_CHANNEL);
releaseStream.addData(new AmfNull()); // command object: null for "createStream"
releaseStream.addData(streamName); // command object: null for "releaseStream"
writeThread.send(releaseStream);
Log.d(TAG, "createStream(): Sending FCPublish command...");
// transactionId == 3
Command FCPublish = new Command("FCPublish", ++transactionIdCounter);
FCPublish.getHeader().setChunkStreamId(ChunkStreamInfo.RTMP_STREAM_CHANNEL);
FCPublish.addData(new AmfNull()); // command object: null for "FCPublish"
FCPublish.addData(streamName);
writeThread.send(FCPublish);
Log.d(TAG, "createStream(): Sending createStream command...");
ChunkStreamInfo chunkStreamInfo = rtmpSessionInfo.getChunkStreamInfo(ChunkStreamInfo.RTMP_COMMAND_CHANNEL);
// transactionId == 4
Command createStream = new Command("createStream", ++transactionIdCounter, chunkStreamInfo);
createStream.addData(new AmfNull()); // command object: null for "createStream"
writeThread.send(createStream);
// Waiting for "NetStream.Publish.Start" response.
synchronized (publishLock) {
try {
publishLock.wait(5000);
} catch (InterruptedException ex) {
// do nothing
}
}
}
private void fmlePublish() throws IllegalStateException {
if (!fullyConnected) {
throw new IllegalStateException("Not connected to RTMP server");
}
if (currentStreamId == -1) {
throw new IllegalStateException("No current stream object exists");
}
Log.d(TAG, "fmlePublish(): Sending publish command...");
// transactionId == 0
Command publish = new Command("publish", 0);
publish.getHeader().setChunkStreamId(ChunkStreamInfo.RTMP_STREAM_CHANNEL);
publish.getHeader().setMessageStreamId(currentStreamId);
publish.addData(new AmfNull()); // command object: null for "publish"
publish.addData(streamName);
publish.addData(publishType);
writeThread.send(publish);
}
private void onMetaData() throws IllegalStateException {
if (!fullyConnected) {
throw new IllegalStateException("Not connected to RTMP server");
}
if (currentStreamId == -1) {
throw new IllegalStateException("No current stream object exists");
}
Log.d(TAG, "onMetaData(): Sending empty onMetaData...");
Data metadata = new Data("@setDataFrame");
metadata.getHeader().setMessageStreamId(currentStreamId);
metadata.addData("onMetaData");
AmfMap ecmaArray = new AmfMap();
ecmaArray.setProperty("duration", 0);
ecmaArray.setProperty("width", videoWidth);
ecmaArray.setProperty("height", videoHeight);
ecmaArray.setProperty("videodatarate", 0);
ecmaArray.setProperty("framerate", 0);
ecmaArray.setProperty("audiodatarate", 0);
ecmaArray.setProperty("audiosamplerate", 44100);
ecmaArray.setProperty("audiosamplesize", 16);
ecmaArray.setProperty("stereo", true);
ecmaArray.setProperty("filesize", 0);
metadata.addData(ecmaArray);
writeThread.send(metadata);
}
@Override
public void closeStream() throws IllegalStateException {
if (!fullyConnected) {
throw new IllegalStateException("Not connected to RTMP server");
}
if (currentStreamId == -1) {
throw new IllegalStateException("No current stream object exists");
}
if (!publishPermitted) {
throw new IllegalStateException("Not get the _result(Netstream.Publish.Start)");
}
Log.d(TAG, "closeStream(): setting current stream ID to -1");
Command closeStream = new Command("closeStream", 0);
closeStream.getHeader().setChunkStreamId(ChunkStreamInfo.RTMP_STREAM_CHANNEL);
closeStream.getHeader().setMessageStreamId(currentStreamId);
closeStream.addData(new AmfNull());
writeThread.send(closeStream);
mHandler.onRtmpStopped("stopped");
}
@Override
public void shutdown() {
if (active) {
readThread.shutdown();
writeThread.shutdown();
try {
// It will invoke EOFException in read thread
socket.shutdownInput();
// It will invoke SocketException in write thread
socket.shutdownOutput();
} catch (IOException ioe) {
ioe.printStackTrace();
}
try {
readThread.join();
} catch (InterruptedException ie) {
ie.printStackTrace();
readThread.interrupt();
}
try {
writeThread.join();
} catch (InterruptedException ie) {
ie.printStackTrace();
writeThread.interrupt();
}
// shutdown handleRxPacketLoop
rxPacketQueue.clear();
active = false;
synchronized (rxPacketLock) {
rxPacketLock.notify();
}
// shutdown socket as well as its input and output stream
if (socket != null) {
try {
socket.close();
Log.d(TAG, "socket closed");
} catch (IOException ex) {
Log.e(TAG, "shutdown(): failed to close socket", ex);
}
}
mHandler.onRtmpDisconnected("disconnected");
}
reset();
}
private void reset() {
active = false;
connecting = false;
fullyConnected = false;
publishPermitted = false;
tcUrl = null;
swfUrl = null;
pageUrl = null;
appName = null;
streamName = null;
publishType = null;
currentStreamId = -1;
transactionIdCounter = 0;
videoFrameCacheNumber.set(0);
serverIpAddr = null;
serverPid = null;
serverId = null;
rtmpSessionInfo = null;
}
@Override
public void notifyWindowAckRequired(final int numBytesReadThusFar) {
Log.i(TAG, "notifyWindowAckRequired() called");
// Create and send window bytes read acknowledgement
writeThread.send(new Acknowledgement(numBytesReadThusFar));
}
@Override
public void publishAudioData(byte[] data) throws IllegalStateException {
if (!fullyConnected) {
throw new IllegalStateException("Not connected to RTMP server");
}
if (currentStreamId == -1) {
throw new IllegalStateException("No current stream object exists");
}
if (!publishPermitted) {
throw new IllegalStateException("Not get the _result(Netstream.Publish.Start)");
}
Audio audio = new Audio();
audio.setData(data);
audio.getHeader().setMessageStreamId(currentStreamId);
writeThread.send(audio);
mHandler.onRtmpAudioStreaming("audio streaming");
}
@Override
public void publishVideoData(byte[] data) throws IllegalStateException {
if (!fullyConnected) {
throw new IllegalStateException("Not connected to RTMP server");
}
if (currentStreamId == -1) {
throw new IllegalStateException("No current stream object exists");
}
if (!publishPermitted) {
throw new IllegalStateException("Not get the _result(Netstream.Publish.Start)");
}
Video video = new Video();
video.setData(data);
video.getHeader().setMessageStreamId(currentStreamId);
writeThread.send(video);
videoFrameCacheNumber.getAndIncrement();
mHandler.onRtmpVideoStreaming("video streaming");
}
@Override
public void handleRxPacket(RtmpPacket rtmpPacket) {
if (rtmpPacket != null) {
rxPacketQueue.add(rtmpPacket);
}
synchronized (rxPacketLock) {
rxPacketLock.notify();
}
}
private void handleRxPacketLoop() throws IOException {
// Handle all queued received RTMP packets
while (active) {
while (!rxPacketQueue.isEmpty()) {
RtmpPacket rtmpPacket = rxPacketQueue.poll();
//Log.d(TAG, "handleRxPacketLoop(): RTMP rx packet message type: " + rtmpPacket.getHeader().getMessageType());
switch (rtmpPacket.getHeader().getMessageType()) {
case ABORT:
rtmpSessionInfo.getChunkStreamInfo(((Abort) rtmpPacket).getChunkStreamId()).clearStoredChunks();
break;
case USER_CONTROL_MESSAGE:
UserControl ping = (UserControl) rtmpPacket;
switch (ping.getType()) {
case PING_REQUEST:
ChunkStreamInfo channelInfo = rtmpSessionInfo.getChunkStreamInfo(ChunkStreamInfo.RTMP_CONTROL_CHANNEL);
Log.d(TAG, "handleRxPacketLoop(): Sending PONG reply..");
UserControl pong = new UserControl(ping, channelInfo);
writeThread.send(pong);
break;
case STREAM_EOF:
Log.i(TAG, "handleRxPacketLoop(): Stream EOF reached, closing RTMP writer...");
break;
}
break;
case WINDOW_ACKNOWLEDGEMENT_SIZE:
WindowAckSize windowAckSize = (WindowAckSize) rtmpPacket;
int size = windowAckSize.getAcknowledgementWindowSize();
Log.d(TAG, "handleRxPacketLoop(): Setting acknowledgement window size: " + size);
rtmpSessionInfo.setAcknowledgmentWindowSize(size);
// Set socket option
socket.setSendBufferSize(size);
break;
case SET_PEER_BANDWIDTH:
int acknowledgementWindowsize = rtmpSessionInfo.getAcknowledgementWindowSize();
final ChunkStreamInfo chunkStreamInfo = rtmpSessionInfo.getChunkStreamInfo(ChunkStreamInfo.RTMP_CONTROL_CHANNEL);
Log.d(TAG, "handleRxPacketLoop(): Send acknowledgement window size: " + acknowledgementWindowsize);
writeThread.send(new WindowAckSize(acknowledgementWindowsize, chunkStreamInfo));
break;
case COMMAND_AMF0:
handleRxInvoke((Command) rtmpPacket);
break;
default:
Log.w(TAG, "handleRxPacketLoop(): Not handling unimplemented/unknown packet of type: " + rtmpPacket.getHeader().getMessageType());
break;
}
}
// Wait for next received packet
synchronized (rxPacketLock) {
try {
rxPacketLock.wait(500);
} catch (InterruptedException ex) {
Log.w(TAG, "handleRxPacketLoop: Interrupted", ex);
}
}
}
}
private void handleRxInvoke(Command invoke) throws IOException {
String commandName = invoke.getCommandName();
if (commandName.equals("_result")) {
// This is the result of one of the methods invoked by us
String method = rtmpSessionInfo.takeInvokedCommand(invoke.getTransactionId());
Log.d(TAG, "handleRxInvoke: Got result for invoked method: " + method);
if ("connect".equals(method)) {
// Capture server ip/pid/id information if any
String serverInfo = onSrsServerInfo(invoke);
mHandler.onRtmpConnected("connected" + serverInfo);
// We can now send createStream commands
connecting = false;
fullyConnected = true;
synchronized (connectingLock) {
connectingLock.notifyAll();
}
} else if ("createStream".contains(method)) {
// Get stream id
currentStreamId = (int) ((AmfNumber) invoke.getData().get(1)).getValue();
Log.d(TAG, "handleRxInvoke(): Stream ID to publish: " + currentStreamId);
if (streamName != null && publishType != null) {
fmlePublish();
}
} else if ("releaseStream".contains(method)) {
Log.d(TAG, "handleRxInvoke(): 'releaseStream'");
} else if ("FCPublish".contains(method)) {
Log.d(TAG, "handleRxInvoke(): 'FCPublish'");
} else {
Log.w(TAG, "handleRxInvoke(): '_result' message received for unknown method: " + method);
}
} else if (commandName.equals("onBWDone")) {
Log.d(TAG, "handleRxInvoke(): 'onBWDone'");
} else if (commandName.equals("onFCPublish")) {
Log.d(TAG, "handleRxInvoke(): 'onFCPublish'");
} else if (commandName.equals("onStatus")) {
String code = ((AmfString) ((AmfObject) invoke.getData().get(1)).getProperty("code")).getValue();
if (code.equals("NetStream.Publish.Start")) {
onMetaData();
// We can now publish AV data
publishPermitted = true;
synchronized (publishLock) {
publishLock.notifyAll();
}
}
} else {
Log.e(TAG, "handleRxInvoke(): Unknown/unhandled server invoke: " + invoke);
}
}
private String onSrsServerInfo(Command invoke) {
// SRS server special information
AmfObject objData = (AmfObject) invoke.getData().get(1);
if ((objData).getProperty("data") instanceof AmfObject) {
objData = ((AmfObject) objData.getProperty("data"));
serverIpAddr = (AmfString) objData.getProperty("srs_server_ip");
serverPid = (AmfNumber) objData.getProperty("srs_pid");
serverId = (AmfNumber) objData.getProperty("srs_id");
}
String info = "";
info += serverIpAddr == null ? "" : " ip: " + serverIpAddr.getValue();
info += serverPid == null ? "" : " pid: " + (int) serverPid.getValue();
info += serverId == null ? "" : " id: " + (int) serverId.getValue();
return info;
}
@Override
public AtomicInteger getVideoFrameCacheNumber() {
return videoFrameCacheNumber;
}
@Override
public EventHandler getEventHandler() {
return mHandler;
}
@Override
public final String getServerIpAddr() {
return serverIpAddr == null ? null : serverIpAddr.getValue();
}
@Override
public final int getServerPid() {
return serverPid == null ? 0 : (int) serverPid.getValue();
}
@Override
public final int getServerId() {
return serverId == null ? 0 : (int) serverId.getValue();
}
@Override
public void setVideoResolution(int width, int height) {
videoWidth = width;
videoHeight = height;
}
}
|
package main
import (
"bufio"
"fmt"
"os"
"os/exec"
"os/signal"
"runtime"
"strconv"
"strings"
"sync"
"syscall"
)
var (
cls string
isWin = false
)
const defaultThreads = 2048
func init() {
if runtime.GOOS == "windows" {
isWin = true
}
}
func main() {
wg := sync.WaitGroup{}
defer wg.Wait()
ip, port, threads := userInput()
botManager := newBotManager(ip, port, threads, &wg)
botManager.start()
clear()
fmt.Printf("Target: %s:%s\nThreads: %d\n\n(Ctrl + C) to quit\n", ip, port, threads)
c := make(chan os.Signal)
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
go func() {
<-c
botManager.stop()
}()
}
func userInput() (ip string, port string, threads int) {
scanner := bufio.NewScanner(os.Stdin)
fmt.Print("Enter IP: ")
scanner.Scan()
ip = scanner.Text()
fmt.Print("Enter Port: ")
scanner.Scan()
port = scanner.Text()
fmt.Printf("Enter Threads (default: %d): ", defaultThreads)
scanner.Scan()
strThreads := scanner.Text()
if len(strings.TrimSpace(strThreads)) != 0 {
threads, _ = strconv.Atoi(strThreads)
} else {
threads = defaultThreads
}
return
}
func clear() {
var cmd *exec.Cmd
if isWin {
cmd = exec.Command("cmd", "/c", "cls")
} else {
cmd = exec.Command("clear")
}
cmd.Stdout = os.Stdout
cmd.Run()
}
|
#!/bin/bash
#
# Copyright 2015 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Test //external mechanisms
#
# Load the test setup defined in the parent directory
CURRENT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${CURRENT_DIR}/../integration_test_setup.sh" \
|| { echo "integration_test_setup.sh not found!" >&2; exit 1; }
source "${CURRENT_DIR}/remote_helpers.sh" \
|| { echo "remote_helpers.sh not found!" >&2; exit 1; }
function set_up() {
# Set up custom repository directory.
m2=$TEST_TMPDIR/my-m2
rm -rf $m2
mkdir -p $m2
startup_server $m2
}
function tear_down() {
shutdown_server
rm -rf $m2
}
function generate_workspace() {
${bazel_data}/src/tools/generate_workspace/generate_workspace $@
}
# Takes: groupId, artifactId, and version, extra arguments are dependencies.
function make_artifact() {
local groupId=$1;
local artifactId=$2;
local version=$3;
shift; shift; shift;
local pkg_dir=${m2}/${groupId}/${artifactId}/${version}
local pom_file=${pkg_dir}/${artifactId}-${version}.pom
mkdir -p ${pkg_dir}
# Make the pom.xml.
cat > ${pom_file} <<EOF
<project>
<modelVersion>4.0.0</modelVersion>
<groupId>${groupId}</groupId>
<artifactId>${artifactId}</artifactId>
<version>${version}</version>
EOF
if [[ ${#@} > 0 ]]; then
echo ' <dependencies>' >> ${pom_file}
for artifact in $@; do
IFS=':' read -r -a dep <<< "$artifact"
cat >> ${pom_file} << EOF
<dependency>
<groupId>${dep[0]}</groupId>
<artifactId>${dep[1]}</artifactId>
<version>${dep[2]}</version>
</dependency>
EOF
done
echo ' </dependencies>' >> ${pom_file}
fi
echo "</project>" >> ${pom_file}
# Make the jar with one class (we use the groupId for the classname).
cat > $TEST_TMPDIR/$groupId.java <<EOF
public class $groupId {
public static void print() {
System.out.println("$artifactId");
}
}
EOF
local jar_path=$pkg_dir/$artifactId-$version.jar
${bazel_javabase}/bin/javac $TEST_TMPDIR/$groupId.java
${bazel_javabase}/bin/jar cf $jar_path $TEST_TMPDIR/$groupId.class
local sha1=$(shasum $jar_path | awk '{print $1}')
echo -n $sha1 > $jar_path.sha1
echo $sha1
}
function get_workspace_file() {
cat $TEST_log | tail -n 2 | head -n 1
}
function get_build_file() {
cat $TEST_log | tail -n 1
}
function test_pom() {
# Create a maven repo
local sha1=$(make_artifact blorp glorp 1.2.3)
# Create a pom that references the artifacts.
cat > $TEST_TMPDIR/pom.xml <<EOF
<project>
<modelVersion>4.0.0</modelVersion>
<groupId>my</groupId>
<artifactId>thing</artifactId>
<version>1.0</version>
<repositories>
<repository>
<id>my-repo1</id>
<name>a custom repo</name>
<url>http://localhost:$fileserver_port/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>blorp</groupId>
<artifactId>glorp</artifactId>
<version>1.2.3</version>
</dependency>
</dependencies>
</project>
EOF
generate_workspace --maven_project=$TEST_TMPDIR &> $TEST_log \
|| fail "generating workspace failed"
cat $(cat $TEST_log | tail -n 2 | head -n 1) > ws
cat $(cat $TEST_log | tail -n 1) > build
assert_contains "artifact = \"blorp:glorp:1.2.3\"," ws
assert_contains "repository = \"http://localhost:$fileserver_port/\"," ws
assert_contains "sha1 = \"$sha1\"," ws
assert_contains "\"@blorp_glorp//jar\"," build
assert_contains "name = \"blorp_glorp\"," build
}
function test_pom_exclusions() {
# Create a maven repo
local sha1_guppy=$(make_artifact fish guppy 2.0)
local sha1_trout=$(make_artifact fish trout 4.2)
local sha1_glorp=$(make_artifact blorp glorp 1.2.3 fish:guppy:2.0)
local sha1_mlorp=$(make_artifact blorp mlorp 3.2.1 fish:trout:4.2)
# Create a pom that references the artifacts.
cat > $TEST_TMPDIR/pom.xml <<EOF
<project>
<modelVersion>4.0.0</modelVersion>
<groupId>my</groupId>
<artifactId>thing</artifactId>
<version>1.0</version>
<repositories>
<repository>
<id>my-repo1</id>
<name>a custom repo</name>
<url>http://localhost:$fileserver_port/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>blorp</groupId>
<artifactId>glorp</artifactId>
<version>1.2.3</version>
</dependency>
<dependency>
<groupId>blorp</groupId>
<artifactId>mlorp</artifactId>
<version>3.2.1</version>
<exclusions>
<exclusion>
<groupId>fish</groupId>
<artifactId>trout</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</project>
EOF
generate_workspace --maven_project=$TEST_TMPDIR &> $TEST_log \
|| fail "generating workspace failed"
cat $(cat $TEST_log | tail -n 2 | head -n 1) > ws
cat $(cat $TEST_log | tail -n 1) > build
assert_contains "artifact = \"blorp:glorp:1.2.3\"," ws
assert_contains "repository = \"http://localhost:$fileserver_port/\"," ws
assert_contains "sha1 = \"$sha1_glorp\"," ws
assert_contains "sha1 = \"$sha1_mlorp\"," ws
assert_contains "sha1 = \"$sha1_guppy\"," ws
assert_not_contains "sha1 = \"$sha1_trout\"," ws
assert_contains "\"@blorp_glorp//jar\"," build
assert_contains "\"@blorp_mlorp//jar\"," build
assert_contains "name = \"blorp_glorp\"," build
assert_contains "name = \"blorp_mlorp\"," build
assert_contains "\"@fish_guppy//jar\"," build
assert_not_contains "\"@fish_trout//jar\"," build
}
function test_invalid_pom() {
# No pom file.
rm -f $TEST_TMPDIR/pom.xml
generate_workspace -m $TEST_TMPDIR &> $TEST_log
expect_log "Non-readable POM $TEST_TMPDIR/pom.xml"
# Invalid XML.
cat > $TEST_TMPDIR/pom.xml <<EOF
<project>
EOF
generate_workspace -m $TEST_TMPDIR &> $TEST_log
expect_log "expected end tag </project>"
}
function test_profile() {
cat > $TEST_TMPDIR/pom.xml <<EOF
<project>
<modelVersion>4.0.0</modelVersion>
<groupId>my</groupId>
<artifactId>thing</artifactId>
<version>1.0</version>
<profiles>
<profile>
<id>my-profile</id>
<activation>
<property>
<name>makeThing</name>
<value>thing</value>
</property>
</activation>
</profile>
</profiles>
</project>
EOF
generate_workspace --maven_project=$TEST_TMPDIR &> $TEST_log \
|| fail "generating workspace failed"
}
function test_submodules() {
cat > $TEST_TMPDIR/pom.xml <<EOF
<project>
<modelVersion>4.0.0</modelVersion>
<groupId>xyz</groupId>
<artifactId>a</artifactId>
<version>1.0</version>
<packaging>pom</packaging>
<modules>
<module>b1</module>
<module>b2</module>
</modules>
</project>
EOF
# Create submodules, version and group are inherited from parent.
mkdir -p $TEST_TMPDIR/{b1,b2}
cat > $TEST_TMPDIR/b1/pom.xml <<EOF
<project>
<modelVersion>4.0.0</modelVersion>
<artifactId>b1</artifactId>
<parent>
<groupId>xyz</groupId>
<artifactId>a</artifactId>
<version>1.0</version>
</parent>
<dependencies>
<dependency>
<groupId>xyz</groupId>
<artifactId>b2</artifactId>
<version>1.0</version>
</dependency>
</dependencies>
</project>
EOF
cat > $TEST_TMPDIR/b2/pom.xml <<EOF
<project>
<modelVersion>4.0.0</modelVersion>
<artifactId>b2</artifactId>
<parent>
<groupId>xyz</groupId>
<artifactId>a</artifactId>
<version>1.0</version>
</parent>
</project>
EOF
generate_workspace -m $TEST_TMPDIR/b1 &> $TEST_log || fail "generate failed"
expect_log "xyz_b2 was defined in $TEST_TMPDIR/b2/pom.xml which isn't a repository URL"
assert_contains "artifact = \"xyz:b2:1.0\"," $(get_workspace_file)
}
run_suite "maven tests"
|
//
// RSSFeed.h
// StreamCast
//
// Created by <NAME> on 12/08/2010.
// Copyright 2010 StreamGlider, Inc. All rights reserved.
//
// This program is free software if used non-commercially: you can redistribute it and/or modify
// it under the terms of the BSD 4 Clause License as published by
// the Free Software Foundation.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// BSD 4 Clause License for more details.
//
// You should have received a copy of the BSD 4 Clause License
// along with this program. If not, see the README.md file with this program.
#import <Foundation/Foundation.h>
#import "Feed.h"
@interface RSSFeed : Feed <NSXMLParserDelegate>
@property (nonatomic, copy) NSString *link;
@property (nonatomic, copy) NSString *feedDescription;
@property (nonatomic, copy) NSString *feedTitle;
@property (nonatomic, copy) NSDate *lastBuildDate;
@property (nonatomic, copy) NSString *imageURL;
@property (nonatomic, copy) NSData *imageData;
- (NSString *)flattenHTML:(NSString *)html;
@end
|
<filename>server/routes/base-apis.js
const express = require("express");
const path = require("path");
const jsonServer = require("json-server");
const jsonGraphqlExpress = require("json-graphql-server");
const { apiLimits } = require("../utils/rateLimiterDefaults");
const { getFromFile } = require("../utils/utils");
const ApiList = require("../apiList");
const router = express.Router();
// API EndPoint Route
ApiList.forEach(({ link }) => {
const dataPath = path.join(__dirname, `../api/${link}.json`);
const data = getFromFile(dataPath);
try {
router.use(`/${link}/graphql`, apiLimits, jsonGraphqlExpress.default(data));
} catch (err) {
console.log(`Unable to set up /${link}/graphql`);
console.error(err);
}
router.use(`/${link}`, apiLimits, jsonServer.router(dataPath));
});
module.exports = router;
|
set -e
psql -d testDB -U test << EOSQL
DROP TABLE IF EXISTS m_user;
DROP TABLE IF EXISTS m_receiver;
DROP TABLE IF EXISTS t_message;
-- ユーザーマスタ
CREATE TABLE m_user(
unique_name varchar(255) NOT NULL,
password varchar(255) NOT NULL,
salt varchar(255) NOT NULL,
disabled boolean default false,
version integer default 1,
primary key(unique_name)
);
-- 受信者マスタ
CREATE TABLE m_receiver(
unique_name varchar(255) NOT NULL,
fullname varchar(255) NOT NULL,
display_list boolean default true,
is_admin_role boolean default false,
is_viewlist_role boolean default true,
version integer default 1,
primary key(unique_name)
);
-- メッセージテーブル
CREATE TABLE t_message(
id SERIAL NOT NULL,
send_to varchar(255) NOT NULL,
detail text NOT NULL,
primary key(id)
);
comment on table m_user is 'ユーザーマスタ';
comment on column m_user.unique_name is 'ユニークな略称';
comment on column m_user.password is '暗号化したパスワード';
comment on column m_user.salt is '暗号化パラメータ';
comment on column m_user.version is '更新バージョン';
comment on table m_receiver is '受信者マスタ';
comment on column m_receiver.unique_name is 'ユニークな略称';
comment on column m_receiver.fullname is '氏名';
comment on column m_receiver.display_list is 'リスト表示可否';
comment on column m_receiver.is_admin_role is '管理者権限';
comment on column m_receiver.is_viewlist_role is '一覧確認権限';
comment on column m_receiver.version is '更新バージョン';
comment on table t_message is 'メッセージテーブル';
comment on column t_message.id is '連番';
comment on column t_message.send_to is '送信対象の上司略称';
comment on column t_message.detail is '送信メッセージ';
-- ユーザーマスタ登録
INSERT INTO m_user("unique_name", "password", "salt")
VALUES ('aa', '09Dg4qWzhtrppNMV6eNbRLb0Cdxy5kH47Qi8O9BzRqU=', 'E6b0z+WEzgNd9mZ6CS5/+w==');
INSERT INTO m_user("unique_name", "password", "salt")
VALUES ('bb', 'ti+jOc88+oWfm3aIkg9XGq0No6akBADu6S2fwAImVXQ=', '6toFigaiTXNUVHzBDlDSqw==');
INSERT INTO m_user("unique_name", "password", "salt")
VALUES ('cc', 'q6cQlhHmvFMBcmvnM3ON61a+qLGzCE3dfcFIpCS4nh4=', 'sXoo48uAMbxMJA9HYifoAA==');
INSERT INTO m_user("unique_name", "password", "salt")
VALUES ('guest', 'XN+YezObIVLTzVT3jBNtrQSfjtaHXL5zWqW/ci50u6g=', '7Hi5ZraAGng7gQRD6r/bqw==');
INSERT INTO m_user("unique_name", "password", "salt")
VALUES ('admin', 're/aFXSjfuKtSeBkCoxAImeQ2Mb6fbunb30EmXpODLc=', ' R2ujDXTjQvUbD4BQRvtbdQ==');
-- 受信者マスタ登録
INSERT INTO m_receiver("unique_name", "fullname")
VALUES ('aa', 'Aさん');
INSERT INTO m_receiver("unique_name", "fullname")
VALUES ('bb', 'Bさん');
INSERT INTO m_receiver("unique_name", "fullname")
VALUES ('cc', 'Cさん');
INSERT INTO m_receiver("unique_name", "fullname", "display_list", "is_admin_role")
VALUES ('admin', '管理者', false, true);
EOSQL |
<reponame>luisarizmendi/analysis
package org.larizmen.analysis.domain;
import io.quarkus.hibernate.orm.panache.PanacheEntityBase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.*;
import java.time.Instant;
import java.util.*;
import java.util.stream.Stream;
@Entity
@Table(name = "Orders")
public class Order extends PanacheEntityBase {
@Transient
static Logger logger = LoggerFactory.getLogger(Order.class);
@Id
@Column(nullable = false, unique = true, name = "order_id")
private String orderId;
private String patientId;
private Instant timestamp;
private String orderStatus;
@OneToMany(fetch = FetchType.EAGER, mappedBy = "order", cascade = CascadeType.ALL)
private List<LineItem> regularLineItems;
@OneToMany(fetch = FetchType.EAGER, mappedBy = "order", cascade = CascadeType.ALL)
private List<LineItem> virusLineItems;
/**
* Updates the lineItem corresponding to the ticket, creates the appropriate domain events,
* creates value objects to notify the system, checks the order to see if all items are completed,
* and updates the order if necessary
*
* All corresponding objects are returned in an OrderEventResult
*
* @param processTicket
* @return OrderEventResult
*/
public OrderEventResult applyProcessTicket(final ProcessTicket processTicket) {
// set the LineItem's new status
if (this.getRegularLineItems().isPresent()) {
this.getRegularLineItems().get().stream().forEach(lineItem -> {
if(lineItem.getItemId().equals(lineItem.getItemId())){
lineItem.setLineItemStatus("FULFILLED");
}
});
}
if (this.getVirusLineItems().isPresent()) {
this.getVirusLineItems().get().stream().forEach(lineItem -> {
if(lineItem.getItemId().equals(lineItem.getItemId())){
lineItem.setLineItemStatus("FULFILLED");
}
});
}
// if there are both regular and virus items concatenate them before checking status
if (this.getRegularLineItems().isPresent() && this.getVirusLineItems().isPresent()) {
// check the status of the Order itself and update if necessary
if(Stream.concat(this.regularLineItems.stream(), this.virusLineItems.stream())
.allMatch(lineItem -> {
return lineItem.getLineItemStatus().equals("FULFILLED");
})){
this.setOrderStatus("FULFILLED");
};
} else if (this.getRegularLineItems().isPresent()) {
if(this.regularLineItems.stream()
.allMatch(lineItem -> {
return lineItem.getLineItemStatus().equals("FULFILLED");
})){
this.setOrderStatus("FULFILLED");
};
}else if (this.getVirusLineItems().isPresent()) {
if(this.virusLineItems.stream()
.allMatch(lineItem -> {
return lineItem.getLineItemStatus().equals("FULFILLED");
})){
this.setOrderStatus("FULFILLED");
};
}
// create the domain event
OrderUpdatedEvent orderUpdatedEvent = OrderUpdatedEvent.of(this);
// create the update value object
OrderUpdate orderUpdate = new OrderUpdate(processTicket.getOrderId(), processTicket.getLineItemId(), processTicket.getName(), processTicket.getItem(), "FULFILLED", processTicket.madeBy);
OrderEventResult orderEventResult = new OrderEventResult();
orderEventResult.setOrder(this);
orderEventResult.addEvent(orderUpdatedEvent);
orderEventResult.setOrderUpdates(new ArrayList<>() {{
add(orderUpdate);
}});
return orderEventResult;
}
/**
* Creates and returns a new OrderEventResult containing the Order aggregate built from the PlaceOrderCommand
* and an OrderCreatedEvent
*
* @param placeOrderCommand PlaceOrderCommand
* @return OrderEventResult
*/
public static OrderEventResult process(final PlaceOrderCommand placeOrderCommand) {
logger.debug("Processing: ", placeOrderCommand.toString() );
// create the return value
OrderEventResult orderEventResult = new OrderEventResult();
// build the order from the PlaceOrderCommand
Order order = new Order(placeOrderCommand.getId());
order.setTimestamp(placeOrderCommand.getTimestamp());
order.setOrderStatus("IN_PROGRESS");
if (placeOrderCommand.getRegularLineItems().isPresent()) {
logger.debug("createOrderFromCommand adding regular markers {}", placeOrderCommand.getRegularLineItems().get().size());
logger.debug("adding Regular LineItems");
placeOrderCommand.getRegularLineItems().get().forEach(commandItem -> {
logger.debug("createOrderFromCommand adding regularItem from {}", commandItem.toString());
LineItem lineItem = new LineItem(commandItem.getItem(), commandItem.getName(), "IN_PROGRESS", order);
order.addRegularLineItem(lineItem);
logger.debug("added LineItem: {}", order.getRegularLineItems().get().size());
orderEventResult.addRegularTicket(new OrderTicket(order.getOrderId(), lineItem.getItemId(), lineItem.getItem(), lineItem.getName()));
logger.debug("Added Regular Ticket to OrderEventResult: {}", orderEventResult.getRegularTickets().get().size());
orderEventResult.addUpdate(new OrderUpdate(order.getOrderId(), lineItem.getItemId(), lineItem.getName(), lineItem.getItem(), "IN_PROGRESS"));
logger.debug("Added Order Update to OrderEventResult: ", orderEventResult.getOrderUpdates().size());
});
}
logger.debug("adding Virus LineItems");
if (placeOrderCommand.getVirusLineItems().isPresent()) {
logger.debug("createOrderFromCommand adding virusOrders {}", placeOrderCommand.getVirusLineItems().get().size());
placeOrderCommand.getVirusLineItems().get().forEach(commandItem -> {
logger.debug("createOrderFromCommand adding virusItem from {}", commandItem.toString());
LineItem lineItem = new LineItem(commandItem.getItem(), commandItem.getName(), "IN_PROGRESS", order);
order.addVirusLineItem(lineItem);
orderEventResult.addVirusTicket(new OrderTicket(order.getOrderId(), lineItem.getItemId(), lineItem.getItem(), lineItem.getName()));
orderEventResult.addUpdate(new OrderUpdate(order.getOrderId(), lineItem.getItemId(), lineItem.getName(), lineItem.getItem(), "IN_PROGRESS"));
});
}
orderEventResult.setOrder(order);
orderEventResult.addEvent(OrderCreatedEvent.of(order));
logger.debug("Added Order and OrderCreatedEvent to OrderEventResult: {}", orderEventResult);
order.setPatientId(placeOrderCommand.getPatientId().get());
logger.debug("returning {}", orderEventResult);
return orderEventResult;
}
/**
* Convenience method to prevent Null Pointer Exceptions
*
* @param lineItem
*/
public void addRegularLineItem(LineItem lineItem) {
if (this.regularLineItems == null) {
this.regularLineItems = new ArrayList<>();
}
lineItem.setOrder(this);
this.regularLineItems.add(lineItem);
}
/**
* Convenience method to prevent Null Pointer Exceptions
*
* @param lineItem
*/
public void addVirusLineItem(LineItem lineItem) {
if (this.virusLineItems == null) {
this.virusLineItems = new ArrayList<>();
}
lineItem.setOrder(this);
this.virusLineItems.add(lineItem);
}
public Optional<List<LineItem>> getRegularLineItems() {
return Optional.ofNullable(regularLineItems);
}
public void setRegularItems(List<LineItem> regularLineItems) {
this.regularLineItems = regularLineItems;
}
public Optional<List<LineItem>> getVirusLineItems() {
return Optional.ofNullable(virusLineItems);
}
public void setVirusItems(List<LineItem> virusLineItems) {
this.virusLineItems = virusLineItems;
}
public Optional<String> getPatientId() {
return Optional.ofNullable(this.patientId);
}
public void setPatientId(String patientId) {
this.patientId = patientId;
}
public Order() {
this.orderId = UUID.randomUUID().toString();
this.timestamp = Instant.now();
}
public Order(String orderId){
this.orderId = orderId;
this.timestamp = Instant.now();
}
public Order(String orderId, String orderSource, String location, String patientId, Instant timestamp, String orderStatus, List<LineItem> regularLineItems, List<LineItem> virusLineItems) {
this.orderId = UUID.randomUUID().toString();
this.patientId = patientId;
this.timestamp = timestamp;
this.orderStatus = orderStatus;
this.regularLineItems = regularLineItems;
this.virusLineItems = virusLineItems;
}
@Override
public String toString() {
return new StringJoiner(", ", Order.class.getSimpleName() + "[", "]")
.add("orderId='" + orderId + "'")
.add("patientId='" + patientId + "'")
.add("timestamp=" + timestamp)
.add("orderStatus=" + orderStatus)
.add("regularLineItems=" + regularLineItems)
.add("virusLineItems=" + virusLineItems)
.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Order order = (Order) o;
if (orderId != null ? !orderId.equals(order.orderId) : order.orderId != null) return false;
if (patientId != null ? !patientId.equals(order.patientId) : order.patientId != null)
return false;
if (timestamp != null ? !timestamp.equals(order.timestamp) : order.timestamp != null) return false;
if (orderStatus != order.orderStatus) return false;
if (regularLineItems != null ? !regularLineItems.equals(order.regularLineItems) : order.regularLineItems != null)
return false;
return virusLineItems != null ? virusLineItems.equals(order.virusLineItems) : order.virusLineItems == null;
}
@Override
public int hashCode() {
int result = orderId != null ? orderId.hashCode() : 0;
result = 31 * result + (patientId != null ? patientId.hashCode() : 0);
result = 31 * result + (timestamp != null ? timestamp.hashCode() : 0);
result = 31 * result + (orderStatus != null ? orderStatus.hashCode() : 0);
result = 31 * result + (regularLineItems != null ? regularLineItems.hashCode() : 0);
result = 31 * result + (virusLineItems != null ? virusLineItems.hashCode() : 0);
return result;
}
public String getOrderId() {
return orderId;
}
public String getOrderStatus() {
return orderStatus;
}
public void setOrderStatus(String orderStatus) {
this.orderStatus = orderStatus;
}
public Instant getTimestamp() {
return timestamp;
}
public void setTimestamp(Instant timestamp) {
this.timestamp = timestamp;
}
}
|
#!/bin/bash
#SBATCH -p veu # Partition to submit to
#SBATCH --gres=gpu:1
#SBATCH --mem=20G # Memory
#SBATCH --ignore-pbs
#SBATCH --output=/home/usuaris/veu/ksenia.kharitonova/tfm/log/generate-factored-wmt-one-sum-en-fr-synsets-new-corrected.log
WORKING_DIR="/home/usuaris/veu/ksenia.kharitonova/tfm/data/europarl/en-fr/en-fr-preprocessed-bpe"
CP_DIR="/home/usuaris/veu/ksenia.kharitonova/tfm/log/checkpoints38-fr-syn"
CP="checkpoint_last.pt"
PYTHON="python"
FAIRSEQ_DIR="/home/usuaris/veu/ksenia.kharitonova/tfm/src/fairseq-factored/"
source ~/.bashrc
conda activate myenv
#mkdir -p $CP_DIR
stdbuf -i0 -e0 -o0 $PYTHON $FAIRSEQ_DIR/generate.py $WORKING_DIR --path $CP_DIR/$CP \
--beam 5 --batch-size 1 --lang-pairs en_tokensS-fr_tokensS,en_synsets_wo_at_lemmas-fr_tokensS --task factored_translation --remove-bpe --target-lang fr_tokensS --multiple-encoders False
|
package com.estevaodias.geekstore.infrastructure.security.orm;
import org.springframework.data.repository.PagingAndSortingRepository;
import java.util.Optional;
import java.util.UUID;
interface UserRepository extends PagingAndSortingRepository<UserEntity, UUID> {
Optional<UserEntity> findByUsername(String username);
}
|
package chylex.hee.system.collections.weight;
public interface IWeightProvider{
int getWeight();
}
|
<reponame>AbstractCoderX/padla
package ru.progrm_jarvis.ultimatemessenger.format.model;
import java.util.stream.Stream;
class SimpleTextModelFactoryTest extends AbstractTextModelFactoryTest {
static Stream<TextModelFactory<User>> provideTestSubjects() {
return Stream.of(new SimpleTextModelFactory<>());
}
} |
<gh_stars>1-10
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import SitemapSvg from '@rsuite/icon-font/lib/legacy/Sitemap';
const Sitemap = createSvgIcon({
as: SitemapSvg,
ariaLabel: 'sitemap',
category: 'legacy',
displayName: 'Sitemap'
});
export default Sitemap;
|
<gh_stars>100-1000
// https://open.kattis.com/problems/heritage
#include <bits/stdc++.h>
using namespace std;
using ll = long long;
using vi = vector<ll>;
using msi = unordered_map<string, ll>;
ll M = 1000000007;
int main() {
int n, k, l;
string w, s;
cin >> n >> w;
l = w.size();
msi m;
for (int i = 0; i < n; i++) {
cin >> s >> k;
m[s] = k;
}
vi dp(l + 1);
dp[0] = 1;
for (int i = 0; i < l; i++) {
for (auto it : m) {
s = it.first;
int j = s.size();
if (i + j <= l && w.substr(i, j) == s)
(dp[i+j] += dp[i] * it.second % M) %= M;
}
}
cout << dp[l] << '\n';
}
|
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import ts from "typescript";
import { Block } from "./block";
import { Transformer } from "./transformer";
import { isErrorMessage } from "./utils";
import { validate, ValidationContext } from "./validation";
import { genIdentifier, UNDEFINED } from "./validation/utils";
export const enum MacroCallContext {
As,
Parameter
}
export interface MarkerCallData {
parameters: Array<ts.Type>,
block: Block<unknown>,
ctx: MacroCallContext,
optional?: boolean,
exp: ts.Expression | ts.BindingName
}
export type MacroFn = (transformer: Transformer, data: MarkerCallData) => ts.Expression|undefined;
export const Markers: Record<string, MacroFn> = {
Assert: (trans, {ctx, exp, block, parameters, optional}) => {
if (ctx === MacroCallContext.Parameter) {
block.nodes.push(...genValidateForProp(exp, (i, patternType) => {
return validate(patternType !== undefined ? trans.checker.getTypeAtLocation(i) : parameters[0]!, i, new ValidationContext({
errorTypeName: parameters[1]?.symbol?.name,
transformer: trans,
depth: [],
propName: ts.isIdentifier(i) ? i.text : i
}), optional);
}));
return undefined;
} else {
let callBy = exp as ts.Expression;
if (!ts.isIdentifier(callBy) && !ts.isPropertyAccessExpression(callBy) && !ts.isElementAccessExpression(callBy)) {
const [decl, ident] = genIdentifier("temp", callBy as ts.Expression, ts.NodeFlags.Const);
block.nodes.push(decl);
callBy = ident;
}
block.nodes.push(...validate(parameters[0]!, callBy, new ValidationContext({
errorTypeName: parameters[1]?.symbol?.name,
transformer: trans,
depth: [],
propName: callBy.pos === -1 ? "value" : callBy.getText()
})));
return callBy;
}
},
EarlyReturn: (trans, { ctx, exp, block, parameters, optional}) => {
const resultType = parameters[1] ? isErrorMessage(parameters[1]) ? { returnErr: true } : { return: trans.typeValueToNode(parameters[1], true) } : { return: UNDEFINED };
if (ctx === MacroCallContext.Parameter) {
block.nodes.push(...genValidateForProp(exp, (i, patternType) => {
return validate(patternType !== undefined ? trans.checker.getTypeAtLocation(i) : parameters[0]!, i, new ValidationContext({
resultType,
transformer: trans,
depth: [],
propName: ts.isIdentifier(i) ? i.text : i
}), optional);
}));
return undefined;
} else {
let callBy = exp as ts.Expression;
if (!ts.isIdentifier(callBy) && !ts.isPropertyAccessExpression(callBy) && !ts.isElementAccessExpression(callBy)) {
const [decl, ident] = genIdentifier("temp", callBy as ts.Expression, ts.NodeFlags.Const);
block.nodes.push(decl);
callBy = ident;
}
block.nodes.push(...validate(parameters[0]!, callBy, new ValidationContext({
resultType,
transformer: trans,
depth: [],
propName: callBy.pos === -1 ? "value" : callBy.getText()
})));
return callBy;
}
}
};
export const enum BindingPatternTypes {
Object,
Array
}
function genValidateForProp(prop: ts.Expression|ts.BindingName,
cb: (i: ts.Expression, bindingPatternType?: BindingPatternTypes) => Array<ts.Statement>,
parentType?: BindingPatternTypes) : Array<ts.Statement> {
if (ts.isIdentifier(prop)) return cb(prop, parentType);
else if (ts.isObjectBindingPattern(prop)) {
const result = [];
for (const el of prop.elements) {
result.push(...genValidateForProp(el.name, cb, BindingPatternTypes.Object));
}
return result;
} else if (ts.isArrayBindingPattern(prop)) {
const result = [];
for (const el of prop.elements) {
if (ts.isOmittedExpression(el)) continue;
result.push(...genValidateForProp(el.name, cb, BindingPatternTypes.Array));
}
return result;
}
else return cb(prop);
}
/**
* Makes sure the value matches the provided type by generating code which validates the value.
* Throws a detailed error by using the `Error` constructor. You can speicfy a different class to use as the marker's
* second parameter.
*
* This marker can be used in function parameters and in the the `as` expression.
*
* @example
* ```ts
* function test(a: Assert<string>, b?: Assert<number, TypeError>) {
* // Your code
* }
* ```
* ```js
* function test(a, b) {
* if (typeof a !== "string") throw new Error("`a` must be of type `string`.");
* else if (b !== undefined && typeof b !== "number") throw new TypeError("`b` must be of type `number`.");
* // Your code
* }
* ```
*/
export type Assert<T, ErrorType = Error> = T & { __marker?: Assert<T, ErrorType> };
/**
* Makes sure the value matches the provided type by generating code which validates the value. Returns the provided
* `ReturnValue` (or `undefiend` if a return value is not provided) if the value doesn't match the type.
* You can provide the `ErrorMsg` type to make it return the error strings.
*
* This marker can be used in function parameters and in the the `as` expression.
*
* @example
* ```ts
* function test(a: EarlyReturn<string>, b?: EarlyReturn<number, "Expected b to be number...">) {
* // Your code
* }
* ```
* ```js
* function test(a, b) {
* if (typeof a !== "string") return;
* else if (b !== undefined && typeof b !== "number") return "Expected b to be number...";
* // Your code
* }
* ```
*/
export type EarlyReturn<T, ReturnValue = undefined> = T & { __marker?: EarlyReturn<T, ReturnValue> };
export type ErrorMsg = { __error_msg: true }
/**
* Validates if the value is a number and if it's between the specified range.
*
* @example
* ```ts
* const someNum = data.num as Assert<Range<1, 10>>;
* ```
*
* ```js
* // Generated code:
* const __data = data.num;
* if (typeof __data !== "number" || (c < 1 || c > 10)) throw new Error("Expected data.num to be Range<1, 10>.");
* const someNum = __data;
* ```
* ```ts
* //Sets only the max
* const someNum = data.num as Assert<Range<number, 10>>;
* ```
* ```js
* // Generated code:
* const __data = data.num;
* if (typeof __data !== "number" || c > 10) throw new Error("Expected data.num to be Range<number, 10>.");
* const someNum = __data;
* ```
*/
export type Range<min extends number|Expr<"">, max extends number|Expr<"">> = number & { __utility?: Range<min, max> };
/**
* Does not validate the type inside the marker.
*/
export type NoCheck<T> = T & { __utility?: NoCheck<T> };
/**
* Validates if the provided value is a string and it matches the regex.
*
* @example
* ```ts
* function test(a: Assert<Matches<"/abc/">>) {
* // Your code...
* }
* ```
* ```js
* function test(a) {
* if (typeof a !== "string" || !/abc/.test(a)) throw new Error("Expected a to be Matches<\"/abc/\">.");
* // Your code...
* }
* ```
*/
export type Matches<Regex extends string|Expr<"">> = string & { __utility?: Matches<Regex> };
/**
* Validates whether the value doesn't have any excessive properties.
*
* **Note:** This marker generates an if loop that goes over each property of the value,
* so you might not want to use it if your object is big.
*
* @example
* ```ts
* function test(a: Assert<ExactProps<{a: number, b: string}>>) {
* // Your code...
* }
* ```
* ```js
* function test2(prop) {
* if (typeof prop !== "object") throw new Error("Expected prop to be { a: number; b: string; }.");
* if (typeof prop["a"] !== "number") throw new Error("Expected prop.a to be number.");
* if (typeof prop["b"] !== "string") throw new Error("Expected prop.b to be string.");
* for (let name_2 in prop) {
* if (name_2 !== "a" && name_2 !== "b") throw new Error("Property " + ("prop[" + name_2 + "]") + " is excessive.");
* }
* }
* ```
*/
export type ExactProps<Obj extends object> = Obj & { __utility?: ExactProps<Obj> };
export type Expr<Expression extends string> = { __utility?: Expr<Expression> };
/**
* Allows you to create custom comparisons. You can use `$self` in `Expression` - it will turn to value
* that's currently being validated. If `FullCheck` is set to false, then any additional checks regarding the
* type of the value will **not** be generated.
*
* @example
* ```ts
* type Assert123 = Assert<If<{a: number, b: string}, "$self.a === 123", true>>;
*
* function test(a?: Assert123) {
* return a;
* }
* ```
* ```js
* function text(a) {
* if (a !== undefined) {
* if (typeof a !== "object") throw new Error("Expected a to be { a: number; b: string; }.");
* if (typeof a["a"] !== "number") throw new Error("Expected a.a to be number.");
* if (typeof a["b"] !== "string") throw new Error("Expected a.b to be string.");
* if (a.a !== 123) throw new Error("Expected a to satisfy `self.a === 123`.");
* }
* return a;
* }
* ```
*/
export type If<Type, Expression extends string, FullCheck extends boolean = false> = Type & { __utility?: If<Type, Expression, FullCheck> };
/**
* Utility function. It's calls get transpiled to a self-invoked arrow function which returns `true` if the value matches the type, `false` otherwise.
* This is basically a tiny wrapper of the `EarlyReturn` type.
*
* @example
* ```ts
* interface Args {
* name: string,
* path: string,
* output: string,
* clusters?: number
* }
*
* console.log(is<Range<1, 10>>(123));
* ```
* ```js
* console.log((() => {
* const temp_1 = 123;
* if (typeof temp_1 !== "number" || (temp_1 < 1 || temp_1 > 10))
* return false;
* return true;
* })());
* ```
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
export declare function is<T, _M = { __is: true }>(prop: unknown) : prop is T; |
#!/bin/bash
# MNN
# |--- Debug
# | |--- libmnnpybridge.a
# | |--- libmnnpybridge.so
# |
# |--- Release
# |--- libmnnpybridge.a
# |--- libmnnpybridge.so
set -e
usage() {
echo "Usage: $0 -i mnn_path -o path [-t build_type -t lib_type]"
echo -e "\t-i MNN library path"
echo -e "\t-o package files output directory"
echo -e "\t-t build type (debug/release), lib_type (dynamic/static), build all when unspecify"
exit 1
}
build_all=true
while getopts "i:o:ft:h" opt; do
case "$opt" in
i ) mnn_path=$OPTARG ;;
o ) path=$OPTARG ;;
t ) build_all=""
case "$OPTARG" in
"debug"|"release" ) build_type=$OPTARG ;;
"dynamic"|"static" ) lib_type=$OPTARG ;;
esac ;;
h|? ) usage ;;
esac
done
if [ -z $build_all ] && ([ -z $build_type ] || [ -z $lib_type ]); then
echo "build_type(debug/release) and lib_type(dynamic/static) should be set or not-set together"
exit 1
fi
rm -rf $path && mkdir -p $path
pushd $path
mkdir -p include wrapper lib/Debug lib/Release
popd
PACKAGE_PATH=$(realpath $path)
MNN_PACKAGE_PATH=$(realpath $mnn_path)
pushd pymnn/3rd_party
rm -rf MNN && mkdir -p MNN/lib
cp -r $MNN_PACKAGE_PATH/* MNN/lib
cp -r ../../include MNN
popd
cp pymnn/src/MNNPyBridge.h $PACKAGE_PATH/include
rm -rf /tmp/mnn_py && mkdir -p /tmp/mnn_py
cp -r pymnn/pip_package/MNN /tmp/mnn_py
pushd /tmp/mnn_py
find . -name __pycache__ | xargs rm -rf
pushd MNN
rm -rf tools
cat __init__.py | sed '/from . import tools/d' > __init__.py.tmp
mv __init__.py.tmp __init__.py
rm -rf data
cat __init__.py | sed '/from . import data/d' > __init__.py.tmp
mv __init__.py.tmp __init__.py
rm -rf optim
cat __init__.py | sed '/from . import optim/d' > __init__.py.tmp
mv __init__.py.tmp __init__.py
python -c "import compileall; compileall.compile_dir('/tmp/mnn_py/MNN', force=True)"
find . -name "*.py" | xargs rm -rf
popd
cp -r MNN $PACKAGE_PATH/wrapper
popd
CMAKE_ARGS="-DPYMNN_USE_ALINNPYTHON=ON -DPYMNN_RUNTIME_CHECK_VM=ON -DPYMNN_EXPR_API=ON -DPYMNN_NUMPY_USABLE=ON -DPYMNN_TRAIN_API=OFF"
rm -rf mnnpybridge_build && mkdir mnnpybridge_build
pushd mnnpybridge_build
log() {
echo "==================================="
echo "Build mnnpybridge $1"
echo "==================================="
}
# Debug Dynamic
if [ $build_all ] || [ $build_type = "debug" -a $lib_type = "dynamic" ]; then
log "debug + dynamic"
[ -f CMakeCache.txt ] && rm CMakeCache.txt
cmake $CMAKE_ARGS -DCMAKE_BUILD_TYPE=Debug -DMNN_BUILD_SHARED_LIBS=ON ../pymnn && make -j8
cp libmnnpybridge.so $PACKAGE_PATH/lib/Debug
fi
# Debug Static
if [ $build_all ] || [ $build_type = "debug" -a $lib_type = "static" ]; then
log "debug + static"
[ -f CMakeCache.txt ] && rm CMakeCache.txt
cmake $CMAKE_ARGS -DCMAKE_BUILD_TYPE=Debug -DMNN_BUILD_SHARED_LIBS=OFF ../pymnn && make -j8
cp libmnnpybridge.a $PACKAGE_PATH/lib/Debug
fi
# Release Dynamic
if [ $build_all ] || [ $build_type = "release" -a $lib_type = "dynamic" ]; then
log "release + dynamic"
[ -f CMakeCache.txt ] && rm CMakeCache.txt
cmake $CMAKE_ARGS -DCMAKE_BUILD_TYPE=Release -DMNN_BUILD_SHARED_LIBS=ON ../pymnn && make -j8
cp libmnnpybridge.so $PACKAGE_PATH/lib/Release
fi
# Release Static
if [ $build_all ] || [ $build_type = "release" -a $lib_type = "static" ]; then
log "release + static"
[ -f CMakeCache.txt ] && rm CMakeCache.txt
cmake $CMAKE_ARGS -DCMAKE_BUILD_TYPE=Release -DMNN_BUILD_SHARED_LIBS=OFF ../pymnn && make -j8
cp libmnnpybridge.a $PACKAGE_PATH/lib/Release
fi
popd
|
class WorkflowTask:
def __init__(self, xid):
'''Initialize the workflow task with the given execution ID.'''
self._xid = xid
def get_execution_id(self):
'''Return the workflow execution ID of the task.'''
return self._xid |
<reponame>icecoolr1/springboot-
package edu.jluzh.test_layuimini.service;
import edu.jluzh.test_layuimini.bean.CarImg;
/**
* @description:
* @author: icecool
* @date: Created in 2021/5/26 19:14
* @version:
* @modified By:
*/
public interface ICarImgService {
//添加图片方法
void addCarImg(CarImg img);
//先执行图片delete方法 再执行汽车delete方法
void deleteImg(int id);
//更新汽车图片方法
void updateImg(CarImg img);
String findCarImgById(int carId);
}
|
import tensorflow as tf
import pandas as pd
# Load the data
dataframe = pd.read_csv('data.csv', index_col=0)
# Split the data
train_dataset = dataframe.sample(frac=0.8,random_state=0)
test_dataset = dataframe.drop(train_dataset.index)
train_labels = train_dataset.pop('cost')
test_labels = test_dataset.pop('cost')
# Build the model
def build_model():
model = tf.keras.models.Sequential([
tf.keras.layers.Dense(1024, activation='relu', input_shape=[len(train_dataset.keys())]),
tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(256, activation='relu'),
tf.keras.layers.Dense(1)
])
optimizer = tf.keras.optimizers.RMSprop(0.001)
model.compile(loss='mse',
optimizer=optimizer,
metrics=['mae', 'mse'])
return model
# Train the model
model = build_model()
model.fit(train_dataset, train_labels, epochs=100, verbose=0)
# Evaluate the model
loss, mae, mse = model.evaluate(test_dataset, test_labels, verbose=0)
# Make Predictions
example_batch = test_dataset[:10]
example_result = model.predict(example_batch) |
// -!- C++ -!- //////////////////////////////////////////////////////////////
//
// System :
// Module :
// Object Name : $RCSfile$
// Revision : $Revision$
// Date : $Date$
// Author : $Author$
// Created By : <NAME>
// Created : Tue Jun 12 23:30:39 2018
// Last Modified : <180723.1334>
//
// Description
//
// Notes
//
// History
//
/////////////////////////////////////////////////////////////////////////////
//
// Copyright (C) 2018 <NAME> D/B/A Deepwoods Software
// 51 Lock<NAME>ill Road
// Wendell, MA 01379-9728
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
//
//
//
//////////////////////////////////////////////////////////////////////////////
static const char rcsid[] = "@(#) : $Id$";
#include "Mast.h"
void Mast::eval() {
//Serial.print("*** Mast::eval(): block->state is "); Serial.println(block->state);
if (block->state) {
stop->state = true;
approach->state = false;
clear->state = false;
} else if (next->state) {
stop->state = false;
approach->state = true;
clear->state = false;
} else {
stop->state = false;
approach->state = false;
clear->state = true;
}
//Serial.print("*** Mast::eval(): stop->state is "); Serial.println(stop->state);
//Serial.print("*** Mast::eval(): approach->state is "); Serial.println(approach->state);
//Serial.print("*** Mast::eval(): clear->state is "); Serial.println(clear->state);
// Set LED brightnesses...
if (stop->state) {
if (commonAnode) {
stop->setPWM(255-stopBrite);
approach->setPWM(255);
clear->setPWM(255);
} else {
stop->setPWM(stopBrite);
approach->setPWM(0);
clear->setPWM(0);
}
} else if (approach->state) {
if (biColorSearch) {
uint8_t green = approachBrite*(yellowHue/256.0);
uint8_t red = approachBrite*((255-yellowHue)/256.0);
if (commonAnode) {
stop->setPWM(255-red);
clear->setPWM(255-green);
} else {
stop->setPWM(red);
clear->setPWM(green);
}
} else {
if (commonAnode) {
stop->setPWM(255);
approach->setPWM(255-approachBrite);
clear->setPWM(255);
} else {
stop->setPWM(0);
approach->setPWM(approachBrite);
clear->setPWM(0);
}
}
} else if (clear->state) {
if (commonAnode) {
stop->setPWM(255);
approach->setPWM(255);
clear->setPWM(255-clearBrite);
} else {
stop->setPWM(0);
approach->setPWM(0);
clear->setPWM(clearBrite);
}
}
}
|
pushd () {
command pushd "$@" > /dev/null
}
popd () {
command popd "$@" > /dev/null
}
set -e
MAINPATH=$(dirname $(readlink -f $0))/..
export PYTHONPATH=$MAINPATH
echo setting \$PYTHONPATH to $PYTHONPATH
echo entering directory $MAINPATH
pushd $MAINPATH
if [ -e output ]; then
if [ -f output/.dynamic_triad ]; then
rm -rf output
else
echo file/directory $MAINPATH/output already exists, please remove it before running the demo 1>&2
popd
exit 1
fi
fi
mkdir -p output
touch output/.dynamic_triad
python . -I 10 -d data/academic_toy.pickle -n 15 -K 48 -l 4 -s 2 -o output --beta-smooth 1 --beta-triad 1 --datasetmod core.dataset.citation -m 1980 --cachefn /tmp/academic_raw -b 5000
# we have to use a different cache file because the file name and indexing are different between data/academic_toy and data/academic_toy.pickle,
# though they are actually the same dataset
python scripts/stdtests.py -f output -d data/academic_toy.pickle -m 1980 -s 4 -l 2 -n 15 -t all --datasetmod core.dataset.citation --cachefn /tmp/academic_raw
popd
|
<gh_stars>10-100
angular.module("wust.elements").directive("discourseNodeList", discourseNodeList);
discourseNodeList.$inject = [];
function discourseNodeList() {
return {
restrict: "A",
templateUrl: "elements/node_list/discourse_node_list.html",
scope: {
nodeModel: "=",
isLoading: "=",
templateBefore: "@",
templateAfter: "@",
},
controller: discourseNodeListCtrl,
controllerAs: "vm",
bindToController: true
};
}
discourseNodeListCtrl.$inject = ["Post", "TagRelationEditService", "Auth"];
function discourseNodeListCtrl(Post, TagRelationEditService, Auth) {
let vm = this;
vm.Auth = Auth;
vm.upvoteAnswer = upvoteAnswer;
vm.remove = remove;
vm.editFollowerConnects = editFollowerConnects;
vm.editPredecessorConnects = editPredecessorConnects;
vm.connectsRelations = {};
let deregisterCommit = vm.nodeModel.component.onCommit(() => {
vm.connectsRelations = vm.nodeModel.component.rootNode.outRelations.map(rel => {
return {
[rel.endNode.id]: rel
};
}).reduce(_.merge, {});
deregisterCommit();
});
function upvoteAnswer(connectable) {
let connects = findConnects(vm.nodeModel.component.rootNode, connectable);
vm.connectsRelations[connectable.id] = connects;
let service = Post.$buildRaw(_.pick(vm.nodeModel.component.rootNode, "id")).connectsTo.$buildRaw(_.pick(connectable, "id"));
if (connects.vote) {
service.neutral.$create().$then(data => {
connects.vote = undefined;
connects.quality = data.quality;
// humane.success("Unvoted post as answer");
}, resp => humane.error(resp.$response.data));
} else {
service.up.$create().$then(data => {
connects.vote = data.vote;
connects.quality = data.quality;
// humane.success("Upvoted post as answer");
}, resp => humane.error(resp.$response.data));
}
}
function remove(node) {
vm.nodeModel.remove(node);
}
function findConnects(startNode, endNode) {
return _.find(startNode.outRelations, h => h.endNode.id === endNode.id);
}
function editPredecessorConnects(node) {
let connects = findConnects(vm.nodeModel.node, node);
TagRelationEditService.show(connects, () => vm.nodeModel.remove(node));
//TODO: update startNode, endNode and bigpost taglist
}
function editFollowerConnects(node) {
let connects = findConnects(node, vm.nodeModel.node);
TagRelationEditService.show(connects, () => vm.nodeModel.remove(node));
//TODO: update startNode, endNode and bigpost taglist
}
}
|
#!/bin/bash
DATASET="$(realpath -s $1)/*/"
for speakers in $DATASET
do
if [ -d "${speakers}" ]; then
speaker=$(basename "${speakers%*/}")
echo "Found speaker: $speakers"
PSPEAKERS="$(realpath -s $speakers)/*/"
for chapters in $PSPEAKERS
do
if [ -d "$chapters" ]; then
chapter=$(basename "$chapters")
echo "Found chapter: $chapter"
outfile="${chapters%/*}/${speaker}-${chapter}.trans.txt"
echo "outfile: $outfile ..."
declare -a trans
audio="${chapters}/*.mp3"
for f in $audio
do
fpath=${f%/*}
fname=${f##*/}
fbase=${fname%.*}
#echo "Processing $fname file... on path $fpath"
#echo "Looking for ${fbase}.txt ..."
tpt="${fpath}/${fbase}.txt"
if [ -f "${tpt}" ]; then
cnt=$(<$tpt)
#echo "Found ${fbase}.txt ..."
trans+=("${fbase} ${cnt^^}")
else
echo "Did not find ${fbase}.txt ..."
fi
done
{ [ "${#trans[@]}" -eq 0 ] || printf '%s\n' "${trans[@]}"; } > $outfile
unset trans
fi
done
fi
done
|
# !/usr/bin/env bash
set -e
# Acquire utils
SCRIPT_PATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
source "$SCRIPT_PATH/__utils.sh"
validate_is_master
# Validate pre-required commands
validate_command kubectl
validate_command helm
INGRESS_NAMESPACE=${INGRESS_NAMESPACE:-"ingress-nginx"}
INGRESS_NAMESPACE_REMOVABLE=${INGRESS_NAMESPACE_REMOVABLE:-"0"}
INGRESS_RELEASE_NAME=${INGRESS_RELEASE_NAME:-"ingress-nginx"}
echo "--------------------------------------------------------------------------------"
echo "NAMESPACE=$INGRESS_NAMESPACE"
echo "RELEASE_NAME=$INGRESS_RELEASE_NAME"
echo "--------------------------------------------------------------------------------"
function get_pod_name() {
echo $(kubectl get pods -n $INGRESS_NAMESPACE -l app.kubernetes.io/name=ingress-nginx -o jsonpath='{.items[*].metadata.name}')
}
if command -v kubectl &> /dev/null
then
POD_NAME=$(get_pod_name)
if [[ $POD_NAME == "$INGRESS_RELEASE_NAME"* ]]
then
helm uninstall $INGRESS_RELEASE_NAME -n $INGRESS_NAMESPACE
helm repo remove ingress-nginx
# Remove redundnat resources
kubectl delete configmap "ingress-controller-leader-nginx" -n $INGRESS_NAMESPACE
NGINX_ADMISSION_NAME="ingress-nginx-admission"
if [ $INGRESS_NAMESPACE != "ingress-nginx" ];then NGINX_ADMISSION_NAME="$INGRESS_NAMESPACE-$NGINX_ADMISSION_NAME";fi
kubectl delete secret $NGINX_ADMISSION_NAME -n $INGRESS_NAMESPACE
if [ $INGRESS_NAMESPACE_REMOVABLE == "1" ];then kubectl delete ns $INGRESS_NAMESPACE;fi
fi
fi
echo "[Ingress] Uninstalled successfully"
echo "--------------------------------------------------------------------------------"
|
<gh_stars>0
/*
* driver/soc/allwinner/sunxi_bootup_extend.c
*
* Copyright(c) 2013-2015 Allwinnertech Co., Ltd.
* http://www.allwinnertech.com
*
* allwinner sunxi platform bootup extend code.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*/
#ifndef _SUNXI_BOORUP_EXTEND_H_
#define SUNXI_BOORUP_EXTEND_H_
#define SUNXI_BOOTUP_EXTEND_MODE_POWEROFF (1)
#define SUNXI_BOOTUP_EXTEND_MODE_RESTART (2)
#define SUNXI_BOOTUP_EXTEND_RTC_POWEROFF (0x02)
#define SUNXI_BOOTUP_EXTEND_RTC_RESTART (0x0f)
#endif
|
/*
* Copyright (C) 2009-2011 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.parboiled.examples.calculators
import org.parboiled.scala._
/**
* A recognizer for a very simple calculator language supporting the 4 basic calculation types on integers.
* This grammar does not contain any actions, it only serves for determinining whether a given input conforms to
* the language. The SimpleCalculator1 adds the actual calculation actions.
*/
class SimpleCalculator0 extends Parser {
def InputLine = rule { Expression ~ EOI }
def Expression: Rule0 = rule { Term ~ zeroOrMore(anyOf("+-") ~ Term) }
def Term = rule { Factor ~ zeroOrMore(anyOf("*/") ~ Factor) }
def Factor = rule { Digits | Parens }
def Parens = rule { "(" ~ Expression ~ ")" }
def Digits = rule { oneOrMore(Digit) }
def Digit = rule { "0" - "9" }
}
|
class FileSizeFormatter {
var countStyle: Int
init(countStyle: Int) {
self.countStyle = countStyle
}
func stringValue(bytes: Int) -> String {
let units = ["bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
var fileSize = Double(bytes)
var unitIndex = 0
while fileSize >= Double(countStyle) && unitIndex < units.count - 1 {
fileSize /= Double(countStyle)
unitIndex += 1
}
let formattedSize = String(format: "%.2f", fileSize)
return "\(formattedSize) \(units[unitIndex])"
}
}
// Example usage
let formatter = FileSizeFormatter(countStyle: 1024)
print(formatter.stringValue(bytes: 2048)) // Output: "2.00 KB"
print(formatter.stringValue(bytes: 1048576)) // Output: "1.00 MB"
print(formatter.stringValue(bytes: 5368709120)) // Output: "5.00 GB" |
<reponame>Affan007/Akevo-Customized
/**
* @author v.lugovsky
* created on 16.12.2015
*/
(function () {
'use strict';
angular.module('BlurAdmin.signin')
.controller('SignInCtrl', SignInCtrl);
function SignInCtrl($scope, $state, $window, $timeout,User,$location) {
console.log("hi");
$scope.Login = function() {
var userdata={
'username': $scope.UserName,
'password': $scope.Password
};
// console.log(userdata);
User.Login(userdata)
.success(function(data) {
// User.SetUser(data.);
$location.path('/dashboard');
})
.error(function(data) {
// console.log(data)
})
}
// $scope.Login = function() {
//
// if ($scope.UserName=='Owner'&& $scope.Password=='<PASSWORD>'){
// User.SetUser($scope.UserName);
// $location.path('/reseller/resellerMod');
// }
// else if($scope.UserName=='Partner'&& $scope.Password=='<PASSWORD>'){
// User.SetUser($scope.UserName);
// $location.path('/reseller/resellerMod');
// }
// else if($scope.UserName=='Reseller'&& $scope.Password=='<PASSWORD>'){
// User.SetUser($scope.UserName);
// $location.path('/reseller/resellerMod');
// }
// else{
// $location.path('/signin');
// }
//
// }
}
/** @ngInject */
})();
|
<reponame>maxmedia/circuit
require 'active_support/concern'
module SpecHelpers
module StoresCleaner
extend ActiveSupport::Concern
include CircuitBlueprints
included do
around :each do |example|
orig_site_store = Circuit::Storage::Sites.instance_variable_get(:@instance)
orig_node_store = Circuit::Storage::Nodes.instance_variable_get(:@instance)
orig_cru_path = Circuit.cru_path.dup
clear_storage
if @storage
Circuit.set_site_store @storage
Circuit.set_node_store @storage
ensure_blueprints
end
example.run
clear_storage
silence_warnings do
Circuit.set_site_store orig_site_store
Circuit.set_node_store orig_node_store
Circuit.cru_path = orig_cru_path
end
ensure_blueprints
end
end
module ClassMethods
def use_storage(val)
before(:all) { @storage = val }
end
end
private
def clear_storage
Circuit::Storage::Sites.instance_variable_set(:@instance, nil)
Circuit::Storage::Nodes.instance_variable_set(:@instance, nil)
end
end
end
|
<filename>sdk/sources/OpenAutomate/oatest/src/oacLogger.cpp
/*******************************************************************************
* Copyright 1993-2008 NVIDIA Corporation. All rights reserved.
*
* NOTICE TO USER:
*
* This source code is subject to NVIDIA ownership rights under U.S. and
* international Copyright laws.
*
* This software and the information contained herein is PROPRIETARY and
* CONFIDENTIAL to NVIDIA and is being provided under the terms and conditions
* of a Non-Disclosure Agreement. Any reproduction or disclosure to any third
* party without the express written consent of NVIDIA is prohibited.
*
* NVIDIA MAKES NO REPRESENTATION ABOUT THE SUITABILITY OF THIS SOURCE CODE FOR
* ANY PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF
* ANY KIND. NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOURCE CODE,
* INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, AND
* FITNESS FOR A PARTICULAR PURPOSE. IN NO EVENT SHALL NVIDIA BE LIABLE FOR
* ANY SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOURCE CODE.
*
* U.S. Government End Users. This source code is a "commercial item" as that
* term is defined at 48 C.F.R. 2.101 (OCT 1995), consisting of "commercial
* computer software" and "commercial computer software documentation" as such
* terms are used in 48 C.F.R. 12.212 (SEPT 1995) and is provided to the U.S.
* Government only as a commercial end item. Consistent with 48 C.F.R.12.212
* and 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all U.S. Government
* End Users acquire the source code with only those rights set forth herein.
*
******************************************************************************/
#include <oac/Logger.h>
oacLogger::~oacLogger()
{
}
|
<reponame>rwth-acis/las2peer-project-service
package i5.las2peer.services.projectService.util;
/**
* Whether projects should be visible for everyone (read-access) or
* only for the project members.
* @author Philipp
*
*/
public enum ProjectVisibility {
OWN, ALL
}
|
#!/bin/bash
set -e
REVISIONIMAGE="$1"
IMAGE="$2"
TAG="$3"
FINALIMAGE="$4"
PREFIX="$5"
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/" && pwd )"
REVISION=$("$DIR/fetchnewrevision.sh" "$IMAGE" "$TAG" "$PREFIX")
docker build --build-arg image="$REVISIONIMAGE" --build-arg revision="$REVISION" -t "$FINALIMAGE" -f $DIR/Dockerfile.revision "$DIR"
|
using QuickBooks.Net.Data.Models.Fields.Line_Items.Invoice_Line.Line_Details;
namespace QuickBooks.Net.Data.Models.Fields.Line_Items.Invoice_Line
{
public class SalesItemLineDetail
{
// Implementation for SalesItemLineDetail
}
public class GroupLineDetail
{
// Implementation for GroupLineDetail
}
public class DescriptionOnly
{
// Implementation for DescriptionOnly
}
public class DiscountLineDetail
{
// Implementation for DiscountLineDetail
}
public class SubTotalLineDetail
{
// Implementation for SubTotalLineDetail
}
public class TaxLineDetail
{
// Implementation for TaxLineDetail
}
public class DepositLineDetail
{
// Implementation for DepositLineDetail
}
} |
git pull origin production
yarn install
yarn run build -- --release
pm2 restart ../build/server.js
|
import { Maybe } from '../value/maybe.type';
import { FactoryWithInput, Getter } from './getter';
/**
* Getter that returns a cached value.
*/
export type CachedGetter<T> = Getter<T> & {
/**
* Sets the value in the cache.
*
* @param value
*/
set(value: T): void;
/**
* Resets/clears the cache.
*/
reset(): void;
/**
* Re-initializes the getter and reloads the value from the source.
*/
init(): void;
};
export type CachedFactoryWithInput<T, A = unknown> = CachedGetter<T> &
FactoryWithInput<T, A> & {
/**
* Re-initializes the cache using the factory function.
*
* @param input
*/
init(input?: A): void;
};
/**
* Creates a CachedGetter from the input Getter.
*
* The value will be retrieved once, then cached permenantly by this function.
*
* @param getter
* @returns
*/
export function cachedGetter<T>(getter: Getter<T>): CachedFactoryWithInput<T>;
export function cachedGetter<T, A = unknown>(factory: FactoryWithInput<T, A>): CachedFactoryWithInput<T, A>;
export function cachedGetter<T, A = unknown>(factory: FactoryWithInput<T, A>): CachedFactoryWithInput<T, A> {
let loaded: Maybe<{ value: T }>;
const init = (input?: A) => {
loaded = {
value: factory(input)
};
};
const result = ((input?: A) => {
if (!loaded) {
init(input);
}
return (loaded as { value: T }).value;
}) as CachedFactoryWithInput<T, A>;
result.set = (value: T) => (loaded = { value });
result.reset = () => (loaded = undefined);
result.init = init;
return result;
}
|
package kubernetes
import (
"flag"
"log"
"os/user"
"github.com/sukeesh/k8s-job-notify/env"
batchv1 "k8s.io/api/batch/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
)
// Client represents the wrapper of kubernetes API client
type Client struct {
clientset kubernetes.Interface
}
// NewClient returns Client struct
func NewClient() (*Client, error) {
config, err := getConfig()
if err != nil {
return nil, err
}
clientSet, err := kubernetes.NewForConfig(config)
if err != nil {
return nil, err
}
return &Client{
clientset: clientSet,
}, nil
}
func getConfig() (config *rest.Config, err error) {
if env.IsInCluster() {
config, err = rest.InClusterConfig()
if err != nil {
return nil, err
}
log.Printf("using inClusterConfig")
} else {
usr, err := user.Current()
if err != nil {
return nil, err
}
filePath := usr.HomeDir + "/.kube/config"
kubeconfig := flag.String("kubeconfig", filePath, "absolute path to file")
flag.Parse()
config, err = clientcmd.BuildConfigFromFlags("", *kubeconfig)
if err != nil {
return nil, err
}
}
return config, nil
}
// ListJobs returns the list of Jobs
func (c *Client) ListJobs(namespace string) (*batchv1.JobList, error) {
jobs, err := c.clientset.BatchV1().Jobs(namespace).List(metav1.ListOptions{})
if err != nil {
return nil, err
}
return jobs, nil
}
|
#!/bin/sh
docker rm -f condor_docker
docker pull kfang1233/condor:latest
docker run -itd -P --name=condor_docker --net=host -v $1:/condor_config.local kfang1233/condor /bin/bash
docker exec condor_docker cp /condor_config.local /etc/condor/condor_config.local
docker exec condor_docker /etc/init.d/condor start
|
// Initialize the Firebase Realtime Database
FirebaseDatabase database = FirebaseDatabase.getInstance();
// Create a reference for the users node
DatabaseReference users = database.getReference("users");
// Get the current user
FirebaseUser currentUser = FirebaseAuth.getInstance().getCurrentUser();
// Create user data object
User user = new User("example@example.com", "John Doe");
// Create user node in the database
users.child(currentUser.getUid()).setValue(user);
// Store data in the database
users.child(currentUser.getUid()).addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
User user = dataSnapshot.getValue(User.class);
// Update the user data here
}
@Override
public void onCancelled(DatabaseError databaseError) {
// Handle error
}
}); |
#!/usr/bin/env bash
# Adapted from Brian's deploy script here:
# https://git.bytes.zone/bytes.zone/infrastructure/src/commit/ac14708d9f8804ebcb2b445dc101bc8a5e464fe2/gitea/deploy.sh
set -euo pipefail
export HOST="${1:?'Pass host'}"
nix build ".#nixosConfigurations.$HOST.config.system.build.toplevel" --out-link "$(pwd)/result"
STORE_PATH=$(realpath result)
# Create a persistent ssh connection that will be reused by follow-up commands
echo "Opening ssh connection..."
ssh -MNf "$HOST"
# Copy configuration
echo "Copying closure..."
nix-copy-closure --use-substitutes --to "$HOST" "$STORE_PATH"
echo -n 'keepassxc password:'
read -s PASSWORD
# Copy over secrets
write_secret () {
SECRET=$1
echo "$PASSWORD" | keepassxc-cli show \
-y 1 \
--show-protected \
--attributes Notes \
~/docs/passwords.kdbx "$HOST/$SECRET" \
| ssh "$HOST" -T "cat > /run/secrets/$SECRET"
}
export -f write_secret
ssh "$HOST" -- "mkdir -p /run/secrets"
for pass in $(echo "$PASSWORD" | keepassxc-cli ls -y 1 ~/docs/passwords.kdbx "$HOST"); do
write_secret "$pass"
done
# Activate new configuration
ssh "$HOST" -- "sudo nix-env --profile /nix/var/nix/profiles/system --set $STORE_PATH"
ssh "$HOST" -- "sudo /nix/var/nix/profiles/system/bin/switch-to-configuration switch"
|
curl -X POST -H 'Content-type: application/json' --data '{"text":"deploy fab4api"}' $1 |
#!/bin/bash
#readlength in=<infile>
usage(){
echo "
Written by Brian Bushnell
Last modified April 4, 2015
Description: Generates a length histogram of input reads.
Usage: readlength.sh in=<input file>
in=<file> The 'in=' flag is needed only if the input file is not the first parameter. 'in=stdin.fq' will pipe from standard in.
in2=<file> Use this if 2nd read of pairs are in a different file.
out=<file> Write the histogram to this file. Default is stdout.
bin=10 Set the histogram bin size.
max=80000 Set the max read length to track.
round=f Places reads in the closest bin, rather than the highest bin of at least readlength.
nzo=f (nonzeroonly) Do not print empty bins.
reads=-1 If nonnegative, stop after this many reads.
Please contact Brian Bushnell at bbushnell@lbl.gov if you encounter any problems.
"
}
pushd . > /dev/null
DIR="${BASH_SOURCE[0]}"
while [ -h "$DIR" ]; do
cd "$(dirname "$DIR")"
DIR="$(readlink "$(basename "$DIR")")"
done
cd "$(dirname "$DIR")"
DIR="$(pwd)/"
popd > /dev/null
#DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/"
CP="$DIR""current/"
z="-Xmx400m"
z2="-Xmx400m"
EA="-ea"
set=0
if [ -z "$1" ] || [[ $1 == -h ]] || [[ $1 == --help ]]; then
usage
exit
fi
calcXmx () {
source "$DIR""/calcmem.sh"
parseXmx "$@"
}
calcXmx "$@"
stats() {
if [[ $NERSC_HOST == genepool ]]; then
module unload oracle-jdk
module load oracle-jdk/1.7_64bit
module load pigz
fi
local CMD="java $EA $z -cp $CP jgi.MakeLengthHistogram $@"
# echo $CMD >&2
eval $CMD
}
stats "$@"
|
<gh_stars>0
import React from 'react';
import Layout from 'layout/Layout';
import Seo from 'common/Seo';
import HeroBanner from 'containers/HeroBanner';
import RecentArticles from 'containers/RecentArticles';
import ShortBio from 'containers/ShortBio';
const Homepage = () => (
<Layout>
<Seo title="Homepage" />
<HeroBanner />
<RecentArticles />
<ShortBio />
</Layout>
);
export default Homepage;
|
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var BaseTooltip_1 = require("../../../tooltip/BaseTooltip");
var index_1 = require("fgraphics/dist/index");
var FC_1 = require("../../FC");
var ConsoleTooltip = (function (_super) {
__extends(ConsoleTooltip, _super);
function ConsoleTooltip() {
_super.call(this);
}
ConsoleTooltip.prototype.construction = function () {
_super.prototype.construction.call(this);
this.bg = index_1.EngineAdapter.instance.createGraphicsWrapper();
this.view.addChild(this.bg);
this.contentCont = index_1.EngineAdapter.instance.createDisplayObjectContainerWrapper();
this.view.addChild(this.contentCont);
this.titleLabel = index_1.EngineAdapter.instance.createTextWrapper();
this.contentCont.addChild(this.titleLabel);
this.titleLabel.align = index_1.TextWrapperAlign.CENTER;
this.titleLabel.color = FC_1.FC.config.tooltipSettings.titleLabelColor;
this.titleLabel.size = FC_1.FC.config.tooltipSettings.titleLabelSize;
this.textLabel = index_1.EngineAdapter.instance.createTextWrapper();
this.contentCont.addChild(this.textLabel);
this.textLabel.align = index_1.TextWrapperAlign.CENTER;
this.textLabel.color = FC_1.FC.config.tooltipSettings.textLabelColor;
this.textLabel.size = FC_1.FC.config.tooltipSettings.textLabelSize;
};
ConsoleTooltip.prototype.commitData = function () {
_super.prototype.commitData.call(this);
if (!this.tooltipData) {
return;
}
this.titleLabel.text = this.tooltipData.title;
this.textLabel.text = this.tooltipData.text;
if (this.tooltipData.text) {
this.textLabel.visible = true;
}
else {
this.textLabel.visible = false;
}
this.arrange();
};
ConsoleTooltip.prototype.arrange = function () {
_super.prototype.arrange.call(this);
if (this.textLabel.visible) {
var labelMaxWidth = Math.max(this.titleLabel.width, this.textLabel.width);
this.titleLabel.x = ((labelMaxWidth - this.titleLabel.width) >> 1);
this.textLabel.x = ((labelMaxWidth - this.textLabel.width) >> 1);
this.textLabel.y = this.titleLabel.y + this.titleLabel.height;
}
else {
this.titleLabel.x = 0;
}
this.bg.clear();
this.bg.beginFill(FC_1.FC.config.tooltipSettings.bgColor, FC_1.FC.config.tooltipSettings.bgAlpha);
this.bg.lineStyle(FC_1.FC.config.tooltipSettings.borderWidth, FC_1.FC.config.tooltipSettings.borderColor, FC_1.FC.config.tooltipSettings.borderAlpha);
this.bg.drawRect(0, 0, this.contentCont.width + FC_1.FC.config.tooltipSettings.bgToContentShift.x, this.contentCont.height + FC_1.FC.config.tooltipSettings.bgToContentShift.y);
this.bg.endFill();
this.contentCont.x = this.bg.x + ((this.bg.width - this.contentCont.width) >> 1);
this.contentCont.y = this.bg.y + ((this.bg.height - this.contentCont.height) >> 1);
};
return ConsoleTooltip;
}(BaseTooltip_1.BaseTooltip));
exports.ConsoleTooltip = ConsoleTooltip;
//# sourceMappingURL=ConsoleTooltip.js.map |
'use strict';
import React, {
Component
} from 'react';
import {
TouchableOpacity,
View
} from 'react-native';
import shallowCompare from 'react-addons-shallow-compare';
class Button extends Component {
shouldComponentUpdate(nextProps, nextState) {
return shallowCompare(this, nextProps, nextState);
}
render() {
return (
<View>
<TouchableOpacity
style={this.props.style}
onPress={this.handlePress} >
<View style={{ flexDirection: 'row', alignItems: 'center'}}>
{this.props.children}
</View>
</TouchableOpacity>
</View>
);
}
handlePress = (event) => {
this.props.onPress(event);
}
}
Button.propTypes = {
style : React.PropTypes.any,
children : React.PropTypes.node.isRequired,
onPress : React.PropTypes.func.isRequired
};
export default Button;
|
import { DfTxIndexer, DfTxTransaction } from '@src/module.indexer/model/dftx/_abstract'
import { AppointOracle, CAppointOracle } from '@defichain/jellyfish-transaction'
import { RawBlock } from '@src/module.indexer/model/_abstract'
import { Injectable } from '@nestjs/common'
import { OracleMapper } from '@src/module.model/oracle'
import { OracleTokenCurrencyMapper } from '@src/module.model/oracle.token.currency'
import { OracleHistoryMapper } from '@src/module.model/oracle.history'
import { HexEncoder } from '@src/module.model/_hex.encoder'
@Injectable()
export class AppointOracleIndexer extends DfTxIndexer<AppointOracle> {
OP_CODE: number = CAppointOracle.OP_CODE
constructor (
private readonly oracleMapper: OracleMapper,
private readonly oracleHistoryMapper: OracleHistoryMapper,
private readonly oracleTokenCurrencyMapper: OracleTokenCurrencyMapper
) {
super()
}
async index (block: RawBlock, txns: Array<DfTxTransaction<AppointOracle>>): Promise<void> {
for (const { txn, dftx: { data } } of txns) {
const oracleId = txn.txid
await this.oracleMapper.put({
id: oracleId,
weightage: data.weightage,
priceFeeds: data.priceFeeds,
block: { hash: block.hash, height: block.height, medianTime: block.mediantime, time: block.time }
})
await this.oracleHistoryMapper.put({
id: `${oracleId}-${block.height}-${txn.txid}`,
sort: HexEncoder.encodeHeight(block.height) + txn.txid,
oracleId: oracleId,
weightage: data.weightage,
priceFeeds: data.priceFeeds,
block: { hash: block.hash, height: block.height, medianTime: block.mediantime, time: block.time }
})
for (const { token, currency } of data.priceFeeds) {
await this.oracleTokenCurrencyMapper.put({
id: `${token}-${currency}-${oracleId}`,
key: `${token}-${currency}`,
oracleId: oracleId,
token: token,
currency: currency,
weightage: data.weightage,
block: { hash: block.hash, height: block.height, medianTime: block.mediantime, time: block.time }
})
}
}
}
async invalidate (block: RawBlock, txns: Array<DfTxTransaction<AppointOracle>>): Promise<void> {
for (const { txn, dftx: { data } } of txns) {
const oracleId = txn.txid
await this.oracleMapper.delete(oracleId)
await this.oracleHistoryMapper.delete(`${oracleId}-${block.height}-${txn.txid}`)
for (const { token, currency } of data.priceFeeds) {
await this.oracleTokenCurrencyMapper.delete(`${token}-${currency}-${oracleId}`)
}
}
}
}
|
package client
import (
"bytes"
"encoding/json"
"net/http"
"github.com/danielkrainas/csense/api/v1"
)
type HooksAPI interface {
CreateHook(r *v1.NewHookRequest) error
}
type hooksAPI struct {
*Client
}
func (c *Client) Hooks() HooksAPI {
return &hooksAPI{c}
}
func (c *hooksAPI) CreateHook(hr *v1.NewHookRequest) error {
body, err := json.Marshal(hr)
if err != nil {
return err
}
urlStr, err := c.urls().BuildHooks()
if err != nil {
return err
}
r, err := http.NewRequest(http.MethodPut, urlStr, bytes.NewBuffer(body))
if err != nil {
return err
}
resp, err := c.do(r)
if err != nil {
return err
}
resp.Body.Close()
if err != nil {
return err
}
return err
}
|
#!/bin/bash
## Shorthand for git branches --list
##
git branch --list "$@" |
exec perl -pe 's#^\W*##'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.