text
stringlengths 1
1.05M
|
|---|
function calculateRemainingDays($domainId) {
// Retrieve the SSL expiration time for the specified domain from the database
$domain = Domain::find($domainId);
if ($domain) {
// Calculate the remaining days until SSL expiration
$currentTimestamp = time();
$sslExpirationTimestamp = $domain->ssltime;
if ($sslExpirationTimestamp > $currentTimestamp) {
$remainingSeconds = $sslExpirationTimestamp - $currentTimestamp;
$remainingDays = ceil($remainingSeconds / (60 * 60 * 24)); // Convert seconds to days and round up
return $remainingDays;
} else {
return 0; // SSL expiration time has already passed
}
} else {
return -1; // Domain not found
}
}
|
import React, { Component } from 'react';
class Counter extends Component {
constructor() {
super();
this.ts = new Date().toLocaleString();
}
//componentWillUnmount ==> componentWillMount ==> componentDidMount
componentWillUnmount() {
// console.log(this.ts + ' componentWillUnmount');
console.log(this.ts + ' componentWillUnmount');
}
componentWillMount() {
console.log(this.ts + ' componentWillMount');
}
componentDidMount() {
console.log(this.ts + ' componentDidMount');
}
render() {
return (
<div>Counter</div>
)
}
}
export default class Main extends Component {
constructor() {
super();
this.state = { show: true };
}
render() {
return (
<div>
<button onClick={() => this.setState({ show: !this.state.show })}>update</button>
{
this.state.show ? <div>
<Counter />
</div> : <span>
<Counter />
</span>
}
</div>
)
}
}
|
package cn.celess.blog.entity.model;
import cn.celess.blog.entity.Tag;
import lombok.Getter;
import lombok.Setter;
import java.util.List;
/**
* @author : xiaohai
* @date : 2019/04/23 12:02
*/
@Getter
@Setter
public class ArticleModel {
private Long id;
/**
* 标题
*/
private String title;
/**
* 摘要
*/
private String summary;
/**
* Markdown正文
*/
private String mdContent;
/**
* 文章类型 true(1)为原创 false(0)为转载
*/
private Boolean original;
/**
* 若为转载 则为转载文章的url
*/
private String url;
/**
* 发布时间
*/
private String publishDateFormat;
/**
* 更新时间
*/
private String updateDateFormat;
/**
* 分类
*/
private String category;
/**
* 标签
*/
private List<Tag> tags;
/**
* 作者
*/
private UserModel author;
private ArticleModel preArticle;
private ArticleModel nextArticle;
/**
* 阅读数
*/
private Long readingNumber;
private Integer likeCount;
private Integer dislikeCount;
/**
* 文章的状态 true:公开 false:不公开
*/
private Boolean open;
private boolean deleted;
}
|
#!/bin/bash
. ./setup.sh
make clean
make
|
from dataclasses import dataclass
from typing import List
import httpx
from pydantic import BaseModel
API_BASE = "https://deckofcardsapi.com/api"
class NewDeckResponse(BaseModel):
success: bool
deck_id: str
remaining: int
shuffled: bool
class CardImagesResponse(BaseModel):
svg: str
png: str
class CardResponse(BaseModel):
code: str
image: str
images: CardImagesResponse
value: str
suit: str
class DeckDrawResponse(BaseModel):
success: bool
deck_id: str
remaining: int
cards: List[CardResponse]
class DeckOfCardsApi(BaseModel):
@staticmethod
async def _fetch(api_path):
async with httpx.AsyncClient() as client:
return await client.get(f"{API_BASE}{api_path}")
@staticmethod
def _ensure_api_sucess(response_json):
if not response_json["success"]:
raise Exception("Deck of Cards API returned non-sucess")
@staticmethod
async def new_deck(shuffle=True, count=1) -> NewDeckResponse:
api_path = "/deck/new"
if shuffle:
api_path += "/shuffle"
api_path += f"?deck_count={count}"
response = await DeckOfCardsApi._fetch(api_path)
response_json = response.json()
DeckOfCardsApi._ensure_api_sucess(response_json)
return NewDeckResponse(**response_json)
@staticmethod
async def deck_draw(deck_id, count=1) -> DeckDrawResponse:
api_path = f"/deck/{deck_id}/draw/?count={count}"
response = await DeckOfCardsApi._fetch(api_path)
response_json = response.json()
DeckOfCardsApi._ensure_api_sucess(response_json)
return DeckDrawResponse(**response_json)
if __name__ == "__main__":
async def main():
deck = await DeckOfCardsApi.new_deck()
print(deck)
card = await DeckOfCardsApi.deck_draw(deck.deck_id)
print(card)
__import__("asyncio").get_event_loop().run_until_complete(main())
|
mkdir trained_models
cd trained_models
mkdir cifar10
cd cifar10
# Barlow Twins
mkdir barlow_twins
cd barlow_twins
gdown https://drive.google.com/uc?id=1x7y44E05vuobibfObT4n3jqLI8QNVESV
gdown https://drive.google.com/uc?id=1Mxfq2YGQ53bNRV2fNYzvYIneM5ZGeb2h
cd ..
# BYOL
mkdir byol
cd byol
gdown https://drive.google.com/uc?id=1zOE8O2yPyhE23LMoesMoDPdLyh1qbI8k
gdown https://drive.google.com/uc?id=1l1XIWE1ailKzsQnUPGDgyvK0escOsta6
cd ..
# DeepCluster V2
mkdir deepclusterv2
cd deepclusterv2
gdown https://drive.google.com/uc?id=13L_QlwrBRJhdeCaVdgkRYWfvoh4PIWwj
gdown https://drive.google.com/uc?id=17jRJ-LC56uWRuNluWXecXHjTxomuGs_T
cd ..
# DINO
mkdir dino
cd dino
gdown https://drive.google.com/uc?id=1Wv9w5j22YitGAWi4p3IJYzLVo4fQkpSu
gdown https://drive.google.com/uc?id=1PBElgMN5gjZsK3o1L55jNnb5A1ebbOvu
cd ..
# MoCo V2+
mkdir mocov2plus
cd mocov2plus
gdown https://drive.google.com/uc?id=1viIUTHmLdozDWtzMicV4oOyC50iL2QDU
gdown https://drive.google.com/uc?id=1ZLpgK13N8rgBxvqRbyGFd_8mF03pStIx
cd ..
# NNCLR
mkdir nnclr
cd nnclr
gdown https://drive.google.com/uc?id=1zKReUmJ35vRnQxfSxn7yRVRW_oy3LUDF
gdown https://drive.google.com/uc?id=1UyI9r19PoFGqHjd5r1UEpstCSTkleja7
cd ..
# ReSSL
mkdir ressl
cd ressl
gdown https://drive.google.com/uc?id=1UdDWvgpyvj3VFVm0lq-WrGj0-GTcEpHq
gdown https://drive.google.com/uc?id=1XkkYUuEI79__4GpCCDhuFEbv0BbRdCBh
cd ..
# SimCLR
mkdir simclr
cd simclr
gdown https://drive.google.com/uc?id=15fI7gb9M92jZWBZoGLvarYDiNYK3RN2O
gdown https://drive.google.com/uc?id=1HMJof4v2B5S-khepI_x8bgFv72I5KMc9
cd ..
# Simsiam
mkdir simsiam
cd simsiam
gdown https://drive.google.com/uc?id=1ZMGGTziK0DbCP43fDx2rPFrtJxCLJDmb
gdown https://drive.google.com/uc?id=1hh1QrQiWfRej-8D6L67T_F7Je9-EUUg2
cd ..
# SwAV
mkdir swav
cd swav
gdown https://drive.google.com/uc?id=1CPok55wwN_4QecEjubdLeBo_9qWSJTHw
gdown https://drive.google.com/uc?id=1t59f1Q8ifx8tAySGpD2pmvogNcR1USEo
cd ..
# VIbCReg
mkdir vibcreg
cd vibcreg
gdown https://drive.google.com/uc?id=1dHsKrhCcwWIXFwQJ4oVPgLcEcT3SecQV
gdown https://drive.google.com/uc?id=1OPsUf8VnKo5w6T8-rEQFaodUNxvQ8CTT
cd ..
# VICReg
mkdir vicreg
cd vicreg
gdown https://drive.google.com/uc?id=1TeliMNt5bOchqJj2u_JjB0_ahKB5LKi5
gdown https://drive.google.com/uc?id=1dsdPL-5QNS9LyHypYN6VQfEuiNWLKJqN
cd ..
# W-MSE
mkdir wmse
cd wmse
gdown https://drive.google.com/uc?id=1jTjpmVTi9rtzy3NPEEp_61py-jeHy5fi
gdown https://drive.google.com/uc?id=1YLuqazfSDOruSiu4Kl6OAexDnt5LKEIT
cd ..
|
"""Sensor platform for the GitHub integration."""
from __future__ import annotations
from collections.abc import Callable, Mapping
from dataclasses import dataclass
from aiogithubapi import GitHubRepositoryModel
from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.entity import DeviceInfo, EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, IssuesPulls
from .coordinator import (
CoordinatorKeyType,
DataUpdateCoordinators,
GitHubBaseDataUpdateCoordinator,
RepositoryCommitDataUpdateCoordinator,
RepositoryIssueDataUpdateCoordinator,
RepositoryReleaseDataUpdateCoordinator,
)
@dataclass
class GitHubSensorBaseEntityDescriptionMixin:
"""Mixin for required GitHub base description keys."""
coordinator_key: CoordinatorKeyType
@dataclass
class GitHubSensorInformationEntityDescriptionMixin(
GitHubSensorBaseEntityDescriptionMixin
):
"""Mixin for required GitHub information description keys."""
value_fn: Callable[[GitHubRepositoryModel], StateType]
@dataclass
class GitHubSensorIssueEntityDescriptionMixin(GitHubSensorBaseEntityDescriptionMixin):
"""Mixin for required GitHub information description keys."""
value_fn: Callable[[IssuesPulls], StateType]
@dataclass
class GitHubSensorBaseEntityDescription(SensorEntityDescription):
"""Describes GitHub sensor entity default overrides."""
icon: str = "mdi:github"
entity_registry_enabled_default: bool = False
@dataclass
class GitHubSensorInformationEntityDescription(
GitHubSensorBaseEntityDescription,
GitHubSensorInformationEntityDescriptionMixin,
):
"""Describes GitHub information sensor entity."""
@dataclass
class GitHubSensorIssueEntityDescription(
GitHubSensorBaseEntityDescription,
GitHubSensorIssueEntityDescriptionMixin,
):
"""Describes GitHub issue sensor entity."""
SENSOR_DESCRIPTIONS: tuple[
GitHubSensorInformationEntityDescription | GitHubSensorIssueEntityDescription,
...,
] = (
GitHubSensorInformationEntityDescription(
key="stargazers_count",
name="Stars",
icon="mdi:star",
native_unit_of_measurement="Stars",
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.stargazers_count,
coordinator_key="information",
),
GitHubSensorInformationEntityDescription(
key="subscribers_count",
name="Watchers",
icon="mdi:glasses",
native_unit_of_measurement="Watchers",
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
# The API returns a watcher_count, but subscribers_count is more accurate
value_fn=lambda data: data.subscribers_count,
coordinator_key="information",
),
GitHubSensorInformationEntityDescription(
key="forks_count",
name="Forks",
icon="mdi:source-fork",
native_unit_of_measurement="Forks",
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.forks_count,
coordinator_key="information",
),
GitHubSensorIssueEntityDescription(
key="issues_count",
name="Issues",
native_unit_of_measurement="Issues",
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.issues_count,
coordinator_key="issue",
),
GitHubSensorIssueEntityDescription(
key="pulls_count",
name="Pull Requests",
native_unit_of_measurement="Pull Requests",
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.pulls_count,
coordinator_key="issue",
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up GitHub sensor based on a config entry."""
repositories: dict[str, DataUpdateCoordinators] = hass.data[DOMAIN]
entities: list[GitHubSensorBaseEntity] = []
for coordinators in repositories.values():
repository_information = coordinators["information"].data
entities.extend(
sensor(coordinators, repository_information)
for sensor in (
GitHubSensorLatestCommitEntity,
GitHubSensorLatestIssueEntity,
GitHubSensorLatestPullEntity,
GitHubSensorLatestReleaseEntity,
)
)
entities.extend(
GitHubSensorDescriptionEntity(
coordinators, description, repository_information
)
for description in SENSOR_DESCRIPTIONS
)
async_add_entities(entities)
class GitHubSensorBaseEntity(CoordinatorEntity, SensorEntity):
"""Defines a base GitHub sensor entity."""
_attr_attribution = "Data provided by the GitHub API"
coordinator: GitHubBaseDataUpdateCoordinator
def __init__(
self,
coordinator: GitHubBaseDataUpdateCoordinator,
repository_information: GitHubRepositoryModel,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self.coordinator.repository)},
name=repository_information.full_name,
manufacturer="GitHub",
configuration_url=f"https://github.com/{self.coordinator.repository}",
entry_type=DeviceEntryType.SERVICE,
)
@property
def available(self) -> bool:
"""Return if entity is available."""
return super().available and self.coordinator.data is not None
class GitHubSensorDescriptionEntity(GitHubSensorBaseEntity):
"""Defines a GitHub sensor entity based on entity descriptions."""
coordinator: GitHubBaseDataUpdateCoordinator
entity_description: GitHubSensorInformationEntityDescription | GitHubSensorIssueEntityDescription
def __init__(
self,
coordinators: DataUpdateCoordinators,
description: GitHubSensorInformationEntityDescription
| GitHubSensorIssueEntityDescription,
repository_information: GitHubRepositoryModel,
) -> None:
"""Initialize a GitHub sensor entity."""
super().__init__(
coordinator=coordinators[description.coordinator_key],
repository_information=repository_information,
)
self.entity_description = description
self._attr_name = f"{repository_information.full_name} {description.name}"
self._attr_unique_id = f"{repository_information.id}_{description.key}"
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.coordinator.data)
class GitHubSensorLatestBaseEntity(GitHubSensorBaseEntity):
"""Defines a base GitHub latest sensor entity."""
_name: str = "Latest"
_coordinator_key: CoordinatorKeyType = "information"
_attr_entity_registry_enabled_default = False
_attr_icon = "mdi:github"
def __init__(
self,
coordinators: DataUpdateCoordinators,
repository_information: GitHubRepositoryModel,
) -> None:
"""Initialize a GitHub sensor entity."""
super().__init__(
coordinator=coordinators[self._coordinator_key],
repository_information=repository_information,
)
self._attr_name = f"{repository_information.full_name} {self._name}"
self._attr_unique_id = (
f"{repository_information.id}_{self._name.lower().replace(' ', '_')}"
)
class GitHubSensorLatestReleaseEntity(GitHubSensorLatestBaseEntity):
"""Defines a GitHub latest release sensor entity."""
_coordinator_key: CoordinatorKeyType = "release"
_name: str = "Latest Release"
_attr_entity_registry_enabled_default = True
coordinator: RepositoryReleaseDataUpdateCoordinator
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.coordinator.data.name[:255]
@property
def extra_state_attributes(self) -> Mapping[str, str | None]:
"""Return the extra state attributes."""
release = self.coordinator.data
return {
"url": release.html_url,
"tag": release.tag_name,
}
class GitHubSensorLatestIssueEntity(GitHubSensorLatestBaseEntity):
"""Defines a GitHub latest issue sensor entity."""
_name: str = "Latest Issue"
_coordinator_key: CoordinatorKeyType = "issue"
coordinator: RepositoryIssueDataUpdateCoordinator
@property
def available(self) -> bool:
"""Return True if entity is available."""
return super().available and self.coordinator.data.issues_count != 0
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
if (issue := self.coordinator.data.issue_last) is None:
return None
return issue.title[:255]
@property
def extra_state_attributes(self) -> Mapping[str, str | int | None] | None:
"""Return the extra state attributes."""
if (issue := self.coordinator.data.issue_last) is None:
return None
return {
"url": issue.html_url,
"number": issue.number,
}
class GitHubSensorLatestPullEntity(GitHubSensorLatestBaseEntity):
"""Defines a GitHub latest pull sensor entity."""
_coordinator_key: CoordinatorKeyType = "issue"
_name: str = "Latest Pull Request"
coordinator: RepositoryIssueDataUpdateCoordinator
@property
def available(self) -> bool:
"""Return True if entity is available."""
return super().available and self.coordinator.data.pulls_count != 0
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
if (pull := self.coordinator.data.pull_last) is None:
return None
return pull.title[:255]
@property
def extra_state_attributes(self) -> Mapping[str, str | int | None] | None:
"""Return the extra state attributes."""
if (pull := self.coordinator.data.pull_last) is None:
return None
return {
"url": pull.html_url,
"number": pull.number,
}
class GitHubSensorLatestCommitEntity(GitHubSensorLatestBaseEntity):
"""Defines a GitHub latest commit sensor entity."""
_coordinator_key: CoordinatorKeyType = "commit"
_name: str = "Latest Commit"
coordinator: RepositoryCommitDataUpdateCoordinator
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.coordinator.data.commit.message.splitlines()[0][:255]
@property
def extra_state_attributes(self) -> Mapping[str, str | int | None]:
"""Return the extra state attributes."""
return {
"sha": self.coordinator.data.sha,
"url": self.coordinator.data.html_url,
}
|
<reponame>asheerrizvifhm9j/Arsylk<gh_stars>0
package com.arsylk.mammonsmite.Live2D;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import net.rbgrn.android.glwallpaperservice.GLWallpaperService;
public class LiveWallpaperService extends GLWallpaperService {
private static LiveWallpaperService instance = null;
public static LiveWallpaperService getInstance() {
return instance;
}
private Live2DEngine live2DEngine = null;
@Override
public void onCreate() {
super.onCreate();
instance = this;
}
@Override
public void onDestroy() {
super.onDestroy();
instance = null;
}
public synchronized void requestReload() {
if(live2DEngine != null) {
if(live2DEngine.getRenderer() != null) {
live2DEngine.getRenderer().loadNewConfig(new L2DConfig(getBaseContext(), L2DConfig.MODE_WALLPAPER));
}
}
}
public Engine onCreateEngine() {
live2DEngine = new Live2DEngine();
return live2DEngine;
}
public class Live2DEngine extends GLWallpaperService.GLEngine {
private L2DWallpaperRenderer renderer = null;
@Override
public void onCreate(SurfaceHolder surfaceHolder) {
super.onCreate(surfaceHolder);
renderer = new L2DWallpaperRenderer(getApplicationContext());
setRenderer(renderer);
setRenderMode(RENDERMODE_CONTINUOUSLY);
}
@Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
}
@Override
public void onTouchEvent(MotionEvent event) {
switch(event.getAction()) {
case MotionEvent.ACTION_DOWN:
queueEvent(new Runnable() {
@Override
public void run() {
renderer.startAttackMotion();
}
});
break;
case MotionEvent.ACTION_UP:
break;
case MotionEvent.ACTION_MOVE:
break;
case MotionEvent.ACTION_CANCEL:
break;
}
}
@Override
public void onDestroy() {
super.onDestroy();
renderer = null;
}
public L2DWallpaperRenderer getRenderer() {
return renderer;
}
}
}
|
<gh_stars>0
/**
*
*/
package de.unirostock.sems.bives.webservice.client;
import java.util.List;
/**
* The Class BivesComparisonRequest to compare two versions using BiVeS web service.
*
* @author martin
*/
public class BivesComparisonRequest extends BivesRequest implements BivesComparisonCommands
{
/**
* The Constructor to create a comparison request.
*/
public BivesComparisonRequest ()
{
super ();
}
/**
* The Constructor to create a comparison request.
*
* @param file1 the version one
* @param file2 the version two
*/
public BivesComparisonRequest (String file1, String file2)
{
super ();
models.add (file1);
models.add (file2);
}
/**
* The Constructor to create a comparison request.
*
* @param file1 the version one
* @param file2 the version two
* @param commands the commands to send
*/
public BivesComparisonRequest (String file1, String file2, List<String> commands)
{
super ();
models.add (file1);
models.add (file2);
for (String c : commands)
commands.add (c);
}
/**
* Sets the models.
*
* @param file1 the version one
* @param file2 the version two
*/
public void setModels (String file1, String file2)
{
models.clear ();
models.add (file1);
models.add (file2);
}
/**
* Checks if the request is ready. (are there exactly two models and at least one command?)
*
* @see de.unirostock.sems.bives.webservice.client.BivesRequest#isReady()
*/
public boolean isReady ()
{
return models.size () == 2 && super.isReady ();
}
}
|
#! /bin/bash
if [ ! -f db.sqlite3 ]; then
cat example_db.sql | sqlite3 db.sqlite3
echo 'db.sqlite3 created'
fi
python manage.py runserver
|
<reponame>pharmer/openshift<gh_stars>100-1000
/*
Copyright AppsCode Inc. and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1
import (
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
)
var (
legacyGroupVersion = schema.GroupVersion{Group: "", Version: "v1"}
legacySchemeBuilder = runtime.NewSchemeBuilder(addLegacyKnownTypes, corev1.AddToScheme)
DeprecatedInstallWithoutGroup = legacySchemeBuilder.AddToScheme
)
func addLegacyKnownTypes(scheme *runtime.Scheme) error {
types := []runtime.Object{
&SecurityContextConstraints{},
&SecurityContextConstraintsList{},
&PodSecurityPolicySubjectReview{},
&PodSecurityPolicySelfSubjectReview{},
&PodSecurityPolicyReview{},
}
scheme.AddKnownTypes(legacyGroupVersion, types...)
return nil
}
|
<filename>app/src/main/java/sample/listup/com/listupsample/activity/ListAllBookActivity.java<gh_stars>1-10
package sample.listup.com.listupsample.activity;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.widget.ListView;
import android.widget.Toast;
import com.android.volley.Request;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.JsonArrayRequest;
import com.android.volley.toolbox.JsonObjectRequest;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
import sample.listup.com.listupsample.R;
import sample.listup.com.listupsample.adapter.BookListAdapter;
import sample.listup.com.listupsample.models.Book;
import sample.listup.com.listupsample.utils.AppController;
import sample.listup.com.listupsample.utils.Helper;
public class ListAllBookActivity extends AppCompatActivity {
//Variabls
private ListView booksListView;
private BookListAdapter bookListAdapter;
private List<Book> bookList = new ArrayList<Book>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_list_all_book);
// Setting Listview and adapter.
booksListView = (ListView) findViewById(R.id.books_list_view);
bookListAdapter = new BookListAdapter(this,bookList);
booksListView.setAdapter(bookListAdapter);
// It is API fetches books
getBooks();
}
// It fetches all books stored..
private void getBooks() {
//GET request to fetch books
JsonArrayRequest booksRequest =
new JsonArrayRequest(Request.Method.GET, Helper.GET_BOOKS_URL,
new Response.Listener<JSONArray>() {
@Override
public void onResponse(JSONArray response) {
Toast.makeText(ListAllBookActivity.this, "List is loading..", Toast.LENGTH_SHORT).show();
for(int i=0;i<response.length();i++){
try {
JSONObject object = response.getJSONObject(i);
String bookImage = object.getString("image");
String bookTitle = object.getString("title");
String bookPrice = object.getString("amazonPrice");
int userPrice = object.getInt("userPrice");
String author = object.getString("author");
Book b = new Book(bookTitle,author,bookImage,bookPrice,userPrice);
bookList.add(b);
} catch (JSONException e) {
e.printStackTrace();
}
}
Log.d("books_size",bookList.size()+" ");
bookListAdapter.notifyDataSetChanged();
}
}, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
}
});
AppController.getInstance().addToRequestQueue(booksRequest);
}
}
|
package inmem
import (
"reflect"
"sync"
"time"
"github.com/armon/relay"
"github.com/armon/relay/broker"
)
// InmemBroker implements the Broker interface in-memory
type InmemBroker struct {
Closed bool
Queues map[string][]interface{}
lock sync.RWMutex
}
// InmemConsumer implements the Consumer interface
type InmemConsumer struct {
Broker *InmemBroker
Queue string
Closed bool
NeedAck bool
LastDequeue interface{}
}
// InmemConsumer implements the Publisher interface
type InmemPublisher struct {
Broker *InmemBroker
Queue string
Closed bool
}
func NewInmemBroker() *InmemBroker {
in := &InmemBroker{
Queues: make(map[string][]interface{}),
}
return in
}
func (i *InmemBroker) Close() error {
i.Closed = true
return nil
}
func (i *InmemBroker) Consumer(q string) (broker.Consumer, error) {
c := &InmemConsumer{
Broker: i,
Queue: q,
}
return c, nil
}
func (i *InmemBroker) Publisher(q string) (broker.Publisher, error) {
p := &InmemPublisher{
Broker: i,
Queue: q,
}
return p, nil
}
func (i *InmemPublisher) Close() error {
i.Closed = true
return nil
}
func (i *InmemPublisher) Publish(in interface{}) error {
i.Broker.lock.Lock()
defer i.Broker.lock.Unlock()
queue := i.Broker.Queues[i.Queue]
queue = append(queue, in)
i.Broker.Queues[i.Queue] = queue
return nil
}
func (i *InmemConsumer) Close() error {
if i.NeedAck {
i.Nack()
}
i.Closed = true
return nil
}
func (i *InmemConsumer) Consume(out interface{}) error {
return i.ConsumeTimeout(out, 0)
}
func (i *InmemConsumer) ConsumeAck(out interface{}) error {
err := i.ConsumeTimeout(out, 0)
if err == nil {
return i.Ack()
}
return err
}
func (i *InmemConsumer) ConsumeTimeout(out interface{}, timeout time.Duration) error {
if i.NeedAck {
panic("Consuming when NeedAck")
}
var timeoutCh <-chan time.Time
if timeout > 0 {
timeoutCh = time.After(timeout)
}
haveMsg := false
var msg interface{}
for {
i.Broker.lock.Lock()
queue := i.Broker.Queues[i.Queue]
if len(queue) > 0 {
msg = queue[0]
haveMsg = true
copy(queue[0:], queue[1:])
i.Broker.Queues[i.Queue] = queue[:len(queue)-1]
}
i.Broker.lock.Unlock()
if haveMsg {
break
}
select {
case <-time.After(time.Millisecond):
continue
case <-timeoutCh:
return relay.TimedOut
}
}
// Set that we need ack
i.NeedAck = true
i.LastDequeue = msg
// Set the message
dst := reflect.Indirect(reflect.ValueOf(out))
src := reflect.Indirect(reflect.ValueOf(msg))
dst.Set(src)
return nil
}
func (i *InmemConsumer) Ack() error {
if !i.NeedAck {
panic("Ack not needed")
}
i.NeedAck = false
i.LastDequeue = nil
return nil
}
func (i *InmemConsumer) Nack() error {
if !i.NeedAck {
panic("Nack not needed")
}
i.Broker.lock.Lock()
defer i.Broker.lock.Unlock()
// Push last entry back
queue := i.Broker.Queues[i.Queue]
n := len(queue)
queue = append(queue, nil)
copy(queue[1:], queue[0:n])
queue[0] = i.LastDequeue
i.Broker.Queues[i.Queue] = queue
i.NeedAck = false
i.LastDequeue = nil
return nil
}
|
def factorial(n):
result=1
for i in range(2,n + 1):
result = result * i
return result
|
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.gson.Gson;
@WebServlet("/myServlet")
public class MyServlet extends HttpServlet {
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response.setContentType("application/json");
response.setCharacterEncoding("UTF-8");
// Get the data here (e.g. from the request)
String data = "My Data";
Gson gson = new Gson();
response.getWriter().write(gson.toJson(data));
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.curso.lambdas.interfaces;
import java.util.Arrays;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Chema
*/
public class UsoInterfazGenericaTest {
private UsoInterfazGenerica u;
public UsoInterfazGenericaTest() {
}
@Before
public void setUp() {
u = new UsoInterfazGenerica();
}
/**
* Test of metodo method, of class UsoInterfazFuncional.
*/
@Test
public void testMetodo() {
assertEquals(u.metodo(numero -> numero.toString(), 12), "El valor es 12");
assertEquals(u.metodo(argumento -> "Resultado: " + argumento * 2, 12), "El valor es Resultado: 24");
}
@Test
public void splitterTest() {
Funcion<String, List<String>> splitter = u.getSplitter();
String texto = "uno\ndos";
List<String> asList = Arrays.asList("uno", "dos");
assertEquals(splitter.aplicar(texto), asList);
}
}
|
#!/bin/bash
sudo apt-get update
sudo apt-get --assume-yes install apt-transport-https ca-certificates
sudo apt-key adv --keyserver hkp://p80.pool.sks-keyservers.net:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D
sudo su -c 'echo "deb https://apt.dockerproject.org/repo ubuntu-trusty main" >> /etc/apt/sources.list.d/docker.list'
sudo apt-get update
sudo apt-get purge lxc-docker
apt-cache policy docker-engine
sudo apt-get --assume-yes install linux-image-extra-$(uname -r)
sudo apt-get --assume-yes install apparmor
sudo apt-get --assume-yes install docker-engine
sudo service docker start
sudo docker run hello-world
sudo groupadd docker
sudo usermod -aG docker ubuntu
|
#!/bin/bash
. ./script/util.sh
pull_from_master_bridge() {
CURRENT_BRANCH="$(get_current_branch)"
echo_msg "current branch is: $CURRENT_BRANCH"
check_if_remote_exists $remote_for_sync
remote_exists_code=$?
check_if_branch_exists $remote_bridge_branch
remote_bridge_branch_exists_code=$?
check_if_branch_exists $origin_bridge_branch
bridge_branch_exists_code=$?
if [ "$remote_exists_code" != 0 ] ; then
echo_err "remote $remote_for_sync does not exist"
elif [ "$remote_bridge_branch_exists_code" != 0 ] ; then
echo_err "remote branch $remote_bridge_branch does not exist"
elif [ "$bridge_branch_exists_code" != 0] ; then
echo_err "branch $origin_bridge_branch does not exist"
else
# checkout $bridge_branch
echo_msg "git checkout $bridge_branch"
git checkout $bridge_branch
# sync from remote bridge
echo_msg "git merge $remote_bridge_branch"
git fetch bridge $bridge_branch
git merge $remote_bridge_branch
echo_msg "git push -u origin $bridge_branch"
git push -u origin $bridge_branch
# checkout 当前分支
echo_msg "git checkout $CURRENT_BRANCH"
git checkout $CURRENT_BRANCH
git pull origin $CURRENT_BRANCH
# merge $bridge_branch 到当前分支
echo_msg "git merge $bridge_branch"
git merge $bridge_branch
echo_msg "git push -u origin $CURRENT_BRANCH"
git push -u origin $CURRENT_BRANCH
fi
}
# check git status
git_is_clean
git_is_clean_code=$?
if [ "$git_is_clean_code" == 0 ] ; then
pull_from_master_bridge
fi
|
module.exports = function (config) {
config.set({
basePath: '',
frameworks: ['jasmine-jquery', 'jasmine', 'sinon'],
files: [
'node_modules/lodash/lodash.js',
'node_modules/jquery/dist/jquery.js',
'node_modules/angular/angular.min.js',
'angular-test-runner.js',
'test/**/*.js'
],
exclude: [],
preprocessors: {
'**/*.html': ['ng-html2js']
},
reporters: ['dots'],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
singleRun: false,
browsers: ['PhantomJS']
});
};
|
<gh_stars>0
def Welcome():
return '''
\ \ / /__| | ___ ___ _ __ ___ ___
\ \ /\ / / _ \ |/ __/ _ \| _ ` _ \ / _ |
\ V V / __/ | (_| (_) | | | | | | __/
\_/\_/ \___|_|\___\___/|_| |_| |_|\___|
______________________
< PDF Metadata Locator >
----------------------
\ ^__^
\ (oo)\_______
(__)\ )\/\/
||----w |
|| ||
'''
print (Welcome())
def OnePDF():
try:
import PyPDF2
except ImportError:
print ('Sorry PyPDF2 not found. Please install this module to continue')
print ('[*] Please enter the path of the file you would like to analyse:')
PDFchoice = input(' >')
try:
PDFfile = open (PDFchoice, 'rb')
except FileNotFoundError:
print ('Sorry, File not found. Try again')
OnePDF()
targetpdf = PyPDF2.PdfFileReader(PDFfile)
# if a pdf is encrypted notify the user and wait for responce
encryptedpdf = targetpdf.isEncrypted
if encryptedpdf == True:
print ('The File You Entered Is Encrypted And Needs To Be Decrypted Before Further Analysis.')
print ('Decrypt File? Y/N')
Decryptchoice = input (' > ')
if Decryptchoice == 'Y':
# Read the encrypted File
eachfile = PyPDF2.PdfFileReader(PDFfile)
print ('Please specify the path to your password list. E.G /home/Brian/<Password List.txt>')
passwordfilechoice = input (' > ')
password_file = open (passwordfilechoice,'r')
# For loop to bruteforce password
for password in password_file:
# strip each word which is tried of its new line. if the password is correct
if eachfile.decrypt(password.strip('\n')) == 1:
print (' [+] Password Successful: {}'.format(password))
# extract Metadata
results = (eachfile.getDocumentInfo())
# for loop is just for formatting
for M in results:
print ('[*] ' + M + ': ' + results[M])
exit()
else:
print (' [+] Password Unsuccessful: {}'.format(password))
else:
exit()
# extract metadata for pdfs which were not encrypted
results = (targetpdf.getDocumentInfo())
for M in results:
print ('[*] ' + M + ': ' + results[M])
def ManyPDF():
try:
import PyPDF2
except ImportError:
raise ImportError('<Unable to find PyPDF2. Please Install. >')
import shutil
import os
print ('What type of File would you like to search for: .pdf, .py .html? Please note that Metadata extraction only works for PDFs. Choosing any other types of file will copy them to a location of your choosing but will be unable to extract Metadata')
choice = input(' > ')
rootDir = '.'
pdflist = []
path = []
# Cycle through all the files in the directory and below
for dirName, subdirList, fileList in os.walk(rootDir, topdown=False):
for fname in fileList:
if fname.endswith(choice):
# for formating, add the dirName and fname together
filewithpath = (dirName + '/' + fname)
# add filewithpath to list
path.append(filewithpath)
# add new line to the list to seperate each element
finalpath = ('\n'.join(path))
# add file names of pdflist
pdflist.append(fname)
# find length of pdf list
numberofpdfs = len(pdflist)
else:
pass
print (' [*] Number of Files Found: {}' .format(numberofpdfs))
print (finalpath)
print ('Proceed With Inspection of The Files?')
inspectiondec = input(' > Y/N: ')
#################### Cycling through the files again and moving pdfs found ##########################
if inspectiondec == 'Y':
targetfolder = []
print ('This program will now create a folder to contain the PDF files before further inspection. Please specify where you would like this folder to be located. E.G /home/Brian/Desktop/<FolderName>')
# take name and path the user has provided
pdffolder = input(' > ')
# create folder to the specifications of the user
os.mkdir(pdffolder)
# Re-cycle through all the directories
for dirName, subdirList, fileList in os.walk(rootDir, topdown=False):
for fname in fileList:
if fname.endswith(choice):
#again for formatting add dirname and fname together
fullfilepath = (dirName + '/' + fname)
# Try to copy pdfs into the users desired folder
try:
shutil.copy2(fullfilepath,pdffolder)
except shutil.SameFileError as e:
pass
# print the files which have been moved
print (fullfilepath)
# add the files which have been moved to a list called targetfolder for metadata Analysis
targetfolder.append(fullfilepath)
else:
pass
if choice != 'pdf':
print ('The file you have chosen cannot undergo metatdata extraction. Exiting...')
exit()
else:
pass
################### Extract Metadata for each file ######################
for eachfile in targetfolder:
try:
PDFfile = open(eachfile, 'rb')
except RuntimeError :
pass
targetpdf = PyPDF2.PdfFileReader(PDFfile)
# detect if one of the files in targetfolder is encrypted. If this is true then begin the decryption routine
encryptedpdf = targetpdf.isEncrypted
if encryptedpdf == True:
print ('{} Is Encrypted And Needs To Be Decrypted Before Further Analysis'.format(eachfile))
print ('Decrypt File? Y/N')
Decryptchoice = input (' > ')
if Decryptchoice == 'Y':
eachfile = PyPDF2.PdfFileReader(PDFfile)
# Ask for the path to the password list
print ('Please Specify The Path To Your Password List. E.G /home/Brian/<Password List.txt>')
passwordfilechoice = input(' > ')
password_file = open(passwordfilechoice, 'r')
# Brute force the password
for password in password_file:
# strip each word in password list of its new line. If the password is Successful....
if eachfile.decrypt(password.strip('\n')) == 1:
print (' [+] Password successful: {}'.format(password))
results = (eachfile.getDocumentInfo())
# for loop is just for formatting
for Metadata in results:
print ('[*] ' + Metadata + ': ' + results[Metadata])
exit()
else:
print (' [+] Password Unsuccessful: {}'.format(password))
else:
exit()
else:
pass
results = (targetpdf.getDocumentInfo())
# print the Metadata for pdfs which were not encrypted
print ('[-------------] PDF Metadata for : {} [----------------] '.format(eachfile))
for M in results:
print ('[*] ' + M + ': ' + results[M])
else:
exit()
print ('[*] Press [1] for targeting a single PDF ')
print ('[*] Press [2] for all PDFs in the current directory and below')
choice = input(' > ')
if choice == '1':
OnePDF()
if choice == '2':
ManyPDF()
|
<gh_stars>0
//import apiLinks from '../vue-app/src/constants/apiLinks' ;
const axios = require('axios');
var express = require('express')
var router = express.Router()
const FormData = require('form-data');
const docparser = require('docparser-node');
const secretAPIKey = '<KEY>';
const client = new docparser.Client("419b9e2e239f4fc7c538f89afd994d57eb87dce7");
// Test function to check if docparser client is responding
// Not sure why the HTTP version does not work but client does?
// router.get('/ping', function(req, res) {
// try {
// let fileResponse = axios.get('https://api.docparser.com/v1/ping',{
// headers: {
// 'Content-Type': 'multipart/form-data'
// },
// auth: {
// username: 'f7dd6<PASSWORD>',
// password: ''
// },
// }).then(function() {
// console.log(fileResponse);
// })
//
// }
// catch(err){
// console.log(err);
// this.message = err.response.data.error
//
// }
// });
router.get('/ping', function(req, res) {
client.ping()
.then(function() {
console.log('authentication succeeded!')
res.json("Pong")
})
.catch(function(err) {
console.log('authentication failed!')
res.json("Failed")
})
});
router.get('/list-parsers', function(req, res) {
client.getParsers()
.then(function (parsers) {
console.log("list of parsers retrieved")
res.json(parsers)
})
.catch(function (err) {
console.log(err)
})
});
// './uploads/filename'
// /:parserId/:filePath
router.get('/upload-and-parse/:parserId/:filePath', function(req, res) {
let parserId = req.params.parserId;
let filePath = decodeURIComponent(req.params.filePath);
console.log(parserId, filePath)
client.uploadFileByStream(parserId, require('fs').createReadStream(filePath))
.then(function (result) {
console.log("Parsed!");
res.json(result);
// => {"id":"document_id","file_size":198989,"quota_used":16,"quota_left":34,"quota_refill":"1970-01-01T00:00:00+00:00"}
})
.catch(function (err) {
console.log(err)
})
});
// router.post('/upload-and-parse/', function(req, res) {
// newFile.on('end', function() {
// const form_data = new FormData();
// form_data.append("file", newFile);
// const request_config = {
// method: "post",
// url: 'https://api.docparser.com/v1/document/upload/pyrhiwzgakvl',
// auth: {
// username: '6<PASSWORD>bcf<PASSWORD>',
// password: ''
// },
// headers: {
// "Content-Type": "multipart/form-data"
// },
// data: form_data
// };
// console.log(form_data);
// axios.post('https://api.docparser.com/v1/document/upload/pyrhiwzgakvl', form_data, {
// withCredentials: true,
// headers: form_data.getHeaders(),
// auth: {
// username: '6<PASSWORD>bcf<PASSWORD>'
// },
// });
// });
// });
// router.get('/json-info/:parserId/:documentId', function(req, res) {
// let parserId = req.params.parserId;
// let documentId = req.params.documentId;
// console.log(parserId, documentId);
// try {
// let fileResponse = axios.post('https://api.docparser.com/v1/results/'+parserId+'/'+documentId,{
// headers: {
// 'Content-Type': 'multipart/form-data'
// },
// auth: {
// username: '<PASSWORD>',
// password: ''
// },
// })
//
// }
// catch(err){
// console.log(err);
// this.message = err.response.data.error
//
// }
// });
router.get('/get-parsed-json/:parserId/:documentId', function(req, res) {
let parserId = req.params.parserId;
let documentId = req.params.documentId;
// console.log(parserId, documentId);
client.getResultsByDocument(parserId, documentId, {format: 'object'})
.then(function (result) {
// console.log(result)
res.json(result)
return result
})
.catch(function (err) {
console.log(err)
})
});
module.exports = router;
|
from django import forms
from django.core.exceptions import ValidationError
import ipaddr
from cyder.base.constants import IP_TYPES, IP_TYPE_4, IP_TYPE_6
from cyder.base.eav.forms import get_eav_form
from cyder.base.eav.models import Attribute
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.network.models import Network, NetworkAV
from cyder.cydhcp.site.models import Site
from cyder.cydhcp.vlan.models import Vlan
from cyder.management.commands.lib.utilities import long2ip
from cydns.ip.models import ipv6_to_longs
class NetworkForm(forms.ModelForm, UsabilityFormMixin):
site = forms.ModelChoiceField(
queryset=Site.objects.all(),
empty_label="(Defaults to parent's site.)",
required=False,
help_text="The site the network will be put into. "
"Defaults to parent network's site"
)
routers = forms.BooleanField(label='Option "routers"', required=False,
initial=True,
help_text='Auto-create DHCP "routers" option')
gateway = forms.CharField(
label='Alternate Gateway', required=False,
help_text=('Fill in this field to change the default gateway '
'in the "routers" DHCP option.'))
subnet_mask = forms.CharField(
label='Alternate Subnet Mask', required=False,
help_text=('Fill in this field to change the default subnet mask '
'in the "routers" DHCP option.'))
def __init__(self, *args, **kwargs):
super(NetworkForm, self).__init__(*args, **kwargs)
if kwargs.get('instance'):
self.fields['routers'].initial = False
for fieldname in ['routers', 'gateway', 'subnet_mask']:
field = self.fields[fieldname]
field.widget = field.hidden_widget()
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style':
'display: none; width: 680px;'})
class Meta:
model = Network
exclude = ('start_upper', 'start_lower',
'end_upper', 'end_lower')
widgets = {'ip_type': forms.RadioSelect}
def clean(self):
cleaned_data = super(NetworkForm, self).clean()
network_str = cleaned_data.get('network_str', '')
try:
ip_type = cleaned_data.get('ip_type')
if ip_type not in IP_TYPES:
raise ValidationError("IP type must be either IPv4 or IPv6.")
if ip_type == IP_TYPE_4:
network = ipaddr.IPv4Network(network_str)
ip_upper, ip_lower = 0, int(network.network)
elif ip_type == IP_TYPE_6:
network = ipaddr.IPv6Network(network_str)
ip_upper, ip_lower = ipv6_to_longs(network.network)
if cleaned_data["routers"]:
for key in ["gateway", "subnet_mask"]:
value = cleaned_data.get(key)
if value:
if ip_type == IP_TYPE_4:
value = ipaddr.IPv4Network(value)
size = 32
elif ip_type == IP_TYPE_6:
value = ipaddr.IPv6Network(value)
size = 128
if key == "gateway" and not network.overlaps(value):
raise ValidationError("Network does not contain "
"specified gateway.")
if key == "subnet_mask":
binstring = bin(int(value.broadcast))
binstring = "".join(binstring[2:])
if "01" in binstring or len(binstring) != size:
raise ValidationError("Invalid subnet mask.")
prefixlen = binstring.count('1')
if prefixlen > network.prefixlen:
raise ValidationError(
"Subnet mask is smaller than network.")
value = str(value.broadcast)
cleaned_data[key] = value
except ipaddr.AddressValueError, e:
raise ValidationError("Bad IP address {0}".format(e))
except ipaddr.NetmaskValueError, e:
raise ValidationError("Bad netmask {0}".format(e))
return cleaned_data
def save(self, *args, **kwargs):
network = super(NetworkForm, self).save(*args, **kwargs)
if self.cleaned_data['routers']:
attr_routers = Attribute.objects.get(name="routers")
attr_subnet_mask = Attribute.objects.get(name="subnet-mask")
if not self.cleaned_data.get('gateway'):
gateway, _ = tuple(network.network_str.split("/"))
else:
gateway = self.cleaned_data['gateway']
if not self.cleaned_data.get('subnet_mask'):
subnet_mask = str(network.network.netmask)
else:
subnet_mask = self.cleaned_data['subnet_mask']
NetworkAV.objects.create(
entity=network, attribute=attr_routers, value=gateway)
NetworkAV.objects.create(
entity=network, attribute=attr_subnet_mask, value=subnet_mask)
return network
NetworkAVForm = get_eav_form(NetworkAV, Network)
class NetworkForm_network(forms.Form):
network = forms.CharField(
required=True,
help_text='Enter the address and mask in '
'CIDR notation (e.g. 10.0.0.0/24)')
ip_type = forms.ChoiceField(choices=IP_TYPES.items())
def clean(self):
cleaned_data = super(NetworkForm_network, self).clean()
network_str = cleaned_data.get('network', '')
try:
ip_type = cleaned_data.get('ip_type')
if ip_type not in IP_TYPES:
raise ValidationError("IP type must be either IPv4 or IPv6.")
elif ip_type == IP_TYPE_4:
network = ipaddr.IPv4Network(network_str)
ip_upper, ip_lower = 0, int(network.network)
elif ip_type == IP_TYPE_4:
network = ipaddr.IPv6Network(network_str)
ip_upper, ip_lower = ipv6_to_longs(network.network)
except ipaddr.AddressValueError, e:
raise ValidationError("Bad IP address {0}".format(e))
except ipaddr.NetmaskValueError, e:
raise ValidationError("Bad netmask {0}".format(e))
if (Network.objects.filter(ip_upper=ip_upper,
ip_lower=ip_lower).exists()):
raise ValidationError("This network has already been allocated.")
# TODO add parent calculaitons
return cleaned_data
class NetworkForm_site(forms.Form):
site = forms.ModelChoiceField(
queryset=Site.objects.all(),
required=True
)
def clean(self):
cleaned_data = super(NetworkForm_site, self).clean()
site = cleaned_data.get('site', None)
if not site:
raise ValidationError("That site does not exist")
return cleaned_data
class NetworkForm_vlan(forms.Form):
vlan = forms.ModelChoiceField(
queryset=Vlan.objects.all(),
required=True,
)
name = forms.CharField()
number = forms.IntegerField()
create_choice = forms.ChoiceField(
widget=forms.RadioSelect, initial='e', choices=(
('existing', 'Use existing VLAN template'),
('new', 'Create new VLAN'),
('none', "Don't assign a VLAN"),
))
|
import Library from "~/views/Library";
export default Library;
|
#!/bin/bash
#SBATCH --exclude=hermes[1-4],trillian[1-3],artemis[1-7],qdata[1-8],nibbler[1-4],slurm[1-5]
#SBATCH --output=granger/60_nodes/script_59_246_250.out
#SBATCH --error=granger/60_nodes/script_59_246_250.err
#SBATCH --job-name="246-250"
hostname
date +%s%N
time blender -t 1 -b Star-collapse-ntsc.blend -s 246 -e 250 -a &> /dev/null
date +%s%N
|
const express = require("express");
const router = express.Router();
const { Company } = require("../models/Company.js");
const { Member } = require("../models/Member.js");
router.get("/all", async (req, res) => {
console.log("---get all companies---");
Company.find()
.then((companys) => {
if (!companys) {
console.log("No such companys");
return res.status(404).send("No such companys");
}
console.log("Success in geting all companies");
console.log(companys);
res.send(companys);
})
.catch((err) => res.status(500).send("Server err"));
});
router.get("/company-members", async (req, res) => {
var companyId = req.query.company_id;
Company.findById(companyId)
.then((company) => {
if (!company) {
return res.status(404).send("No such company");
}
members = company.members;
Member.find(
{
_id: { $in: members },
},
function (err, docs) {
res.send({ members: docs });
}
);
})
.catch((err) => res.status(500).send("Server err" + err));
});
router.get("/:id", async (req, res) => {
console.log("---get company by id---");
const companyId = req.params.id;
Company.findById(companyId)
.then((company) => {
if (!company) {
console.log("No such companys");
return res.status(404).send("No such company");
}
console.log("Success in geting company in id");
console.log(company);
res.send(company);
})
.catch((err) => res.status(500).send("Server err"));
});
module.exports = router;
|
package Longest_Harmonious_Subsequence;
import java.util.*;
public class Solution {
public int findLHS(int[] nums) {
HashMap<Integer, Integer> map = new HashMap<>();
for (int i: nums){
map.put(i, map.getOrDefault(i, 0) + 1);
}
int res = 0;
for (int key: map.keySet()){
if (map.containsKey(key + 1)){
res = Math.max(res, map.get(key) + map.get(key + 1));
}
}
return res;
}
public static void main(String[] args) {
Solution s = new Solution();
System.out.println(s.findLHS(new int[]{1,3,2,2,5,2,3,7}));
}
}
|
#!/bin/bash
cd /app/layer-php-sdk
function test {
PHP="php-$1"
$PHP bin/phpspec run
}
for version in 5.4 5.5 5.6; do
echo "Testing PHP $version"
test $version
done
|
<gh_stars>1-10
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import CaretDownSvg from '@rsuite/icon-font/lib/legacy/CaretDown';
const CaretDown = createSvgIcon({
as: CaretDownSvg,
ariaLabel: 'caret down',
category: 'legacy',
displayName: 'CaretDown'
});
export default CaretDown;
|
<gh_stars>0
/*
* $Header: /home/cvs/jakarta-tomcat-4.0/catalina/src/share/org/apache/catalina/startup/WebRuleSet.java,v 1.1 2001/10/17 00:44:02 craigmcc Exp $
* $Revision: 1.1 $
* $Date: 2001/10/17 00:44:02 $
*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999-2001 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Tomcat", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact <EMAIL>.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.catalina.startup;
import org.apache.catalina.Context;
import org.apache.catalina.Wrapper;
import org.apache.catalina.deploy.SecurityConstraint;
import org.apache.commons.digester.Digester;
import org.apache.commons.digester.Rule;
import org.apache.commons.digester.RuleSetBase;
import org.xml.sax.Attributes;
import java.lang.reflect.Method;
/**
* <p><strong>RuleSet</strong> for processing the contents of a web application
* deployment descriptor (<code>/WEB-INF/web.xml</code>) resource.</p>
*
* @author <NAME>
* @version $Revision: 1.1 $ $Date: 2001/10/17 00:44:02 $
*/
public class WebRuleSet extends RuleSetBase {
// ----------------------------------------------------- Instance Variables
/**
* The matching pattern prefix to use for recognizing our elements.
*/
protected String prefix = null;
// ------------------------------------------------------------ Constructor
/**
* Construct an instance of this <code>RuleSet</code> with the default
* matching pattern prefix.
*/
public WebRuleSet() {
this("");
}
/**
* Construct an instance of this <code>RuleSet</code> with the specified
* matching pattern prefix.
*
* @param prefix Prefix for matching pattern rules (including the
* trailing slash character)
*/
public WebRuleSet(String prefix) {
super();
this.namespaceURI = null;
this.prefix = prefix;
}
// --------------------------------------------------------- Public Methods
/**
* <p>Add the set of Rule instances defined in this RuleSet to the
* specified <code>Digester</code> instance, associating them with
* our namespace URI (if any). This method should only be called
* by a Digester instance.</p>
*
* @param digester Digester instance to which the new Rule instances
* should be added.
*/
public void addRuleInstances(Digester digester) {
digester.addRule(prefix + "web-app",
new SetPublicIdRule(digester, "setPublicId"));
digester.addCallMethod(prefix + "web-app/context-param",
"addParameter", 2);
digester.addCallParam(prefix + "web-app/context-param/param-name", 0);
digester.addCallParam(prefix + "web-app/context-param/param-value", 1);
digester.addCallMethod(prefix + "web-app/display-name",
"setDisplayName", 0);
digester.addRule(prefix + "web-app/distributable",
new SetDistributableRule(digester));
digester.addObjectCreate(prefix + "web-app/ejb-local-ref",
"org.apache.catalina.deploy.ContextLocalEjb");
digester.addSetNext(prefix + "web-app/ejb-local-ref",
"addLocalEjb",
"org.apache.catalina.deploy.ContextLocalEjb");
digester.addCallMethod(prefix + "web-app/ejb-local-ref/description",
"setDescription", 0);
digester.addCallMethod(prefix + "web-app/ejb-local-ref/ejb-link",
"setLink", 0);
digester.addCallMethod(prefix + "web-app/ejb-local-ref/ejb-ref-name",
"setName", 0);
digester.addCallMethod(prefix + "web-app/ejb-local-ref/ejb-ref-type",
"setType", 0);
digester.addCallMethod(prefix + "web-app/ejb-local-ref/local",
"setLocal", 0);
digester.addCallMethod(prefix + "web-app/ejb-local-ref/local-home",
"setHome", 0);
digester.addObjectCreate(prefix + "web-app/ejb-ref",
"org.apache.catalina.deploy.ContextEjb");
digester.addSetNext(prefix + "web-app/ejb-ref",
"addEjb",
"org.apache.catalina.deploy.ContextEjb");
digester.addCallMethod(prefix + "web-app/ejb-ref/description",
"setDescription", 0);
digester.addCallMethod(prefix + "web-app/ejb-ref/ejb-link",
"setLink", 0);
digester.addCallMethod(prefix + "web-app/ejb-ref/ejb-ref-name",
"setName", 0);
digester.addCallMethod(prefix + "web-app/ejb-ref/ejb-ref-type",
"setType", 0);
digester.addCallMethod(prefix + "web-app/ejb-ref/home",
"setHome", 0);
digester.addCallMethod(prefix + "web-app/ejb-ref/remote",
"setRemote", 0);
digester.addObjectCreate(prefix + "web-app/env-entry",
"org.apache.catalina.deploy.ContextEnvironment");
digester.addSetNext(prefix + "web-app/env-entry",
"addEnvironment",
"org.apache.catalina.deploy.ContextEnvironment");
digester.addCallMethod(prefix + "web-app/env-entry/description",
"setDescription", 0);
digester.addCallMethod(prefix + "web-app/env-entry/env-entry-name",
"setName", 0);
digester.addCallMethod(prefix + "web-app/env-entry/env-entry-type",
"setType", 0);
digester.addCallMethod(prefix + "web-app/env-entry/env-entry-value",
"setValue", 0);
digester.addObjectCreate(prefix + "web-app/error-page",
"org.apache.catalina.deploy.ErrorPage");
digester.addSetNext(prefix + "web-app/error-page",
"addErrorPage",
"org.apache.catalina.deploy.ErrorPage");
digester.addCallMethod(prefix + "web-app/error-page/error-code",
"setErrorCode", 0);
digester.addCallMethod(prefix + "web-app/error-page/exception-type",
"setExceptionType", 0);
digester.addCallMethod(prefix + "web-app/error-page/location",
"setLocation", 0);
digester.addObjectCreate(prefix + "web-app/filter",
"org.apache.catalina.deploy.FilterDef");
digester.addSetNext(prefix + "web-app/filter",
"addFilterDef",
"org.apache.catalina.deploy.FilterDef");
digester.addCallMethod(prefix + "web-app/filter/description",
"setDescription", 0);
digester.addCallMethod(prefix + "web-app/filter/display-name",
"setDisplayName", 0);
digester.addCallMethod(prefix + "web-app/filter/filter-class",
"setFilterClass", 0);
digester.addCallMethod(prefix + "web-app/filter/filter-name",
"setFilterName", 0);
digester.addCallMethod(prefix + "web-app/filter/large-icon",
"setLargeIcon", 0);
digester.addCallMethod(prefix + "web-app/filter/small-icon",
"setSmallIcon", 0);
digester.addCallMethod(prefix + "web-app/filter/init-param",
"addInitParameter", 2);
digester.addCallParam(prefix + "web-app/filter/init-param/param-name",
0);
digester.addCallParam(prefix + "web-app/filter/init-param/param-value",
1);
digester.addObjectCreate(prefix + "web-app/filter-mapping",
"org.apache.catalina.deploy.FilterMap");
digester.addSetNext(prefix + "web-app/filter-mapping",
"addFilterMap",
"org.apache.catalina.deploy.FilterMap");
digester.addCallMethod(prefix + "web-app/filter-mapping/filter-name",
"setFilterName", 0);
digester.addCallMethod(prefix + "web-app/filter-mapping/servlet-name",
"setServletName", 0);
digester.addCallMethod(prefix + "web-app/filter-mapping/url-pattern",
"setURLPattern", 0);
digester.addCallMethod(prefix + "web-app/listener/listener-class",
"addApplicationListener", 0);
digester.addObjectCreate(prefix + "web-app/login-config",
"org.apache.catalina.deploy.LoginConfig");
digester.addSetNext(prefix + "web-app/login-config",
"setLoginConfig",
"org.apache.catalina.deploy.LoginConfig");
digester.addCallMethod(prefix + "web-app/login-config/auth-method",
"setAuthMethod", 0);
digester.addCallMethod(prefix + "web-app/login-config/realm-name",
"setRealmName", 0);
digester.addCallMethod(prefix + "web-app/login-config/form-login-config/form-error-page",
"setErrorPage", 0);
digester.addCallMethod(prefix + "web-app/login-config/form-login-config/form-login-page",
"setLoginPage", 0);
digester.addCallMethod(prefix + "web-app/mime-mapping",
"addMimeMapping", 2);
digester.addCallParam(prefix + "web-app/mime-mapping/extension", 0);
digester.addCallParam(prefix + "web-app/mime-mapping/mime-type", 1);
digester.addCallMethod(prefix + "web-app/resource-env-ref",
"addResourceEnvRef", 2);
digester.addCallParam(prefix + "web-app/resource-env-ref/resource-env-ref-name", 0);
digester.addCallParam(prefix + "web-app/resource-env-ref/resource-env-ref-type", 1);
digester.addObjectCreate(prefix + "web-app/resource-ref",
"org.apache.catalina.deploy.ContextResource");
digester.addSetNext(prefix + "web-app/resource-ref",
"addResource",
"org.apache.catalina.deploy.ContextResource");
digester.addCallMethod(prefix + "web-app/resource-ref/description",
"setDescription", 0);
digester.addCallMethod(prefix + "web-app/resource-ref/res-auth",
"setAuth", 0);
digester.addCallMethod(prefix + "web-app/resource-ref/res-ref-name",
"setName", 0);
digester.addCallMethod(prefix + "web-app/resource-ref/res-sharing-scope",
"setScope", 0);
digester.addCallMethod(prefix + "web-app/resource-ref/res-type",
"setType", 0);
digester.addObjectCreate(prefix + "web-app/security-constraint",
"org.apache.catalina.deploy.SecurityConstraint");
digester.addSetNext(prefix + "web-app/security-constraint",
"addConstraint",
"org.apache.catalina.deploy.SecurityConstraint");
digester.addRule(prefix + "web-app/security-constraint/auth-constraint",
new SetAuthConstraintRule(digester));
digester.addCallMethod(prefix + "web-app/security-constraint/auth-constraint/role-name",
"addAuthRole", 0);
digester.addCallMethod(prefix + "web-app/security-constraint/display-name",
"setDisplayName", 0);
digester.addCallMethod(prefix + "web-app/security-constraint/user-data-constraint/transport-guarantee",
"setUserConstraint", 0);
digester.addObjectCreate(prefix + "web-app/security-constraint/web-resource-collection",
"org.apache.catalina.deploy.SecurityCollection");
digester.addSetNext(prefix + "web-app/security-constraint/web-resource-collection",
"addCollection",
"org.apache.catalina.deploy.SecurityCollection");
digester.addCallMethod(prefix + "web-app/security-constraint/web-resource-collection/http-method",
"addMethod", 0);
digester.addCallMethod(prefix + "web-app/security-constraint/web-resource-collection/url-pattern",
"addPattern", 0);
digester.addCallMethod(prefix + "web-app/security-constraint/web-resource-collection/web-resource-name",
"setName", 0);
digester.addCallMethod(prefix + "web-app/security-role/role-name",
"addSecurityRole", 0);
digester.addRule(prefix + "web-app/servlet",
new WrapperCreateRule(digester));
digester.addSetNext(prefix + "web-app/servlet",
"addChild",
"org.apache.catalina.Container");
digester.addCallMethod(prefix + "web-app/servlet/init-param",
"addInitParameter", 2);
digester.addCallParam(prefix + "web-app/servlet/init-param/param-name",
0);
digester.addCallParam(prefix + "web-app/servlet/init-param/param-value",
1);
digester.addCallMethod(prefix + "web-app/servlet/jsp-file",
"setJspFile", 0);
digester.addCallMethod(prefix + "web-app/servlet/load-on-startup",
"setLoadOnStartupString", 0);
digester.addCallMethod(prefix + "web-app/servlet/run-as/role-name",
"setRunAs", 0);
digester.addCallMethod(prefix + "web-app/servlet/security-role-ref",
"addSecurityReference", 2);
digester.addCallParam(prefix + "web-app/servlet/security-role-ref/role-link", 1);
digester.addCallParam(prefix + "web-app/servlet/security-role-ref/role-name", 0);
digester.addCallMethod(prefix + "web-app/servlet/servlet-class",
"setServletClass", 0);
digester.addCallMethod(prefix + "web-app/servlet/servlet-name",
"setName", 0);
digester.addCallMethod(prefix + "web-app/servlet-mapping",
"addServletMapping", 2);
digester.addCallParam(prefix + "web-app/servlet-mapping/servlet-name", 1);
digester.addCallParam(prefix + "web-app/servlet-mapping/url-pattern", 0);
digester.addCallMethod(prefix + "web-app/session-config/session-timeout",
"setSessionTimeout", 1,
new Class[]{Integer.TYPE});
digester.addCallParam(prefix + "web-app/session-config/session-timeout", 0);
digester.addCallMethod(prefix + "web-app/taglib",
"addTaglib", 2);
digester.addCallParam(prefix + "web-app/taglib/taglib-location", 1);
digester.addCallParam(prefix + "web-app/taglib/taglib-uri", 0);
digester.addCallMethod(prefix + "web-app/welcome-file-list/welcome-file",
"addWelcomeFile", 0);
}
}
// ----------------------------------------------------------- Private Classes
/**
* A Rule that calls the <code>setAuthConstraint(true)</code> method of
* the top item on the stack, which must be of type
* <code>org.apache.catalina.deploy.SecurityConstraint</code>.
*/
final class SetAuthConstraintRule extends Rule {
public SetAuthConstraintRule(Digester digester) {
super(digester);
}
public void begin(Attributes attributes) throws Exception {
SecurityConstraint securityConstraint =
(SecurityConstraint) digester.peek();
securityConstraint.setAuthConstraint(true);
if (digester.getDebug() > 0)
digester.log("Calling SecurityConstraint.setAuthConstraint(true)");
}
}
/**
* Class that calls <code>setDistributable(true)</code> for the top object
* on the stack, which must be a <code>org.apache.catalina.Context</code>.
*/
final class SetDistributableRule extends Rule {
public SetDistributableRule(Digester digester) {
super(digester);
}
public void begin(Attributes attributes) throws Exception {
Context context = (Context) digester.peek();
context.setDistributable(true);
if (digester.getDebug() > 0)
digester.log(context.getClass().getName() +
".setDistributable( true)");
}
}
/**
* Class that calls a property setter for the top object on the stack,
* passing the public ID of the entity we are currently processing.
*/
final class SetPublicIdRule extends Rule {
private String method = null;
public SetPublicIdRule(Digester digester, String method) {
super(digester);
this.method = method;
}
public void begin(Attributes attributes) throws Exception {
Context context = (Context) digester.peek(digester.getCount() - 1);
Object top = digester.peek();
Class[] paramClasses = new Class[1];
paramClasses[0] = "String".getClass();
String[] paramValues = new String[1];
paramValues[0] = digester.getPublicId();
Method m = null;
try {
m = top.getClass().getMethod(method, paramClasses);
} catch (NoSuchMethodException e) {
digester.log("Can't find method " + method + " in " + top +
" CLASS " + top.getClass());
return;
}
m.invoke(top, paramValues);
if (digester.getDebug() >= 1)
digester.log("" + top.getClass().getName() + "." + method +
"(" + paramValues[0] + ")");
}
}
/**
* A Rule that calls the factory method on the specified Context to
* create the object that is to be added to the stack.
*/
final class WrapperCreateRule extends Rule {
public WrapperCreateRule(Digester digester) {
super(digester);
}
public void begin(Attributes attributes) throws Exception {
Context context =
(Context) digester.peek(digester.getCount() - 1);
Wrapper wrapper = context.createWrapper();
digester.push(wrapper);
if (digester.getDebug() > 0)
digester.log("new " + wrapper.getClass().getName());
}
public void end() throws Exception {
Wrapper wrapper = (Wrapper) digester.pop();
if (digester.getDebug() > 0)
digester.log("pop " + wrapper.getClass().getName());
}
}
|
package main
import (
"fmt"
"log"
"github.com/boltdb/bolt"
"github.com/labstack/echo"
"github.com/spf13/viper"
)
var db *bolt.DB
func main() {
var dbErr error
viper.SetConfigName("nucleus")
viper.AddConfigPath("$HOME/.nucleus")
viper.AddConfigPath(".")
err := viper.ReadInConfig()
if err != nil {
panic(fmt.Errorf("Fatal error config file: %s \n", err))
}
port := fmt.Sprintf(":%d", viper.GetInt("port"))
db, dbErr = bolt.Open("app.db", 0644, nil)
if dbErr != nil {
log.Fatal(dbErr)
}
defer db.Close()
e := echo.New()
api := e.Group("/api")
api.GET("/read/:bucket/:key", HandleRead)
api.POST("/write/:bucket/:key", HandleWrite)
e.Static("/", "./app")
e.Logger.Fatal(e.Start(port))
}
|
#!/bin/bash
sudo rm -rf WeexiOSSDK/
sudo rm -rf Benmu-iOS-Library/
#sudo rm -rf Podfile.lock
#sudo rm -rf Pods/
git clone https://github.com/bmfe/WeexiOSSDK.git
#cd WeexiOSSDK
#git checkout 0.18.0
#cd ../
git clone https://github.com/bmfe/Benmu-iOS-Library.git
cd Benmu-iOS-Library
git checkout feature-components
pod update
echo =========================
echo 🍺 ios资源文件加载完成
echo =========================
open WeexEros.xcworkspace
|
# Launch the HOL Light checkpoint then load code from our project on top.
cat scripts/prover_ready.ml - | ./hol-light/dmtcp_restart_script.sh
|
TERMUX_PKG_HOMEPAGE=https://xorg.freedesktop.org/
TERMUX_PKG_DESCRIPTION="X11 miscellaneous 'fixes' extension library"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER="Leonid Pliushch <leonid.pliushch@gmail.com>"
TERMUX_PKG_VERSION=5.0.3
TERMUX_PKG_REVISION=17
TERMUX_PKG_SRCURL=https://xorg.freedesktop.org/releases/individual/lib/libXfixes-${TERMUX_PKG_VERSION}.tar.bz2
TERMUX_PKG_SHA256=de1cd33aff226e08cefd0e6759341c2c8e8c9faf8ce9ac6ec38d43e287b22ad6
TERMUX_PKG_DEPENDS="libx11"
TERMUX_PKG_BUILD_DEPENDS="xorgproto, xorg-util-macros"
|
#!/bin/bash
## with set -e in force, I expect a loop to abandon the whole script if
## a command inside the loop exits with non-zero. In MSys, the loop
## exits, but the script continues to success.
function exit_handler
{
echo exit_status $?
echo done
}
trap exit_handler EXIT
function nonzero_return_function
{
return 1
}
set -e # exit on error
echo loop_entry
echo hello | while read a
do
nonzero_return_function || exit 1
echo loop_continued
done
echo loop_exit $?
|
package org.n3r.eql.impl;
import com.google.common.base.Joiner;
import freemarker.cache.StringTemplateLoader;
import freemarker.template.Configuration;
import freemarker.template.Template;
import org.n3r.eql.base.DynamicLanguageDriver;
import org.n3r.eql.parser.EqlBlock;
import org.n3r.eql.parser.FreemarkerSql;
import org.n3r.eql.parser.Sql;
import org.n3r.eql.parser.StaticSql;
import java.io.IOException;
import java.util.List;
public class FreemarkerDynamicLanguageDriver implements DynamicLanguageDriver {
@Override
public Sql parse(EqlBlock block, List<String> oneSqlLines) {
String template = Joiner.on('\n').join(oneSqlLines);
if (template.indexOf("<#") < 0) return new StaticSql(template);
Configuration ftlConfig = new Configuration();
StringTemplateLoader stringLoader = new StringTemplateLoader();
ftlConfig.setTemplateLoader(stringLoader);
String uniquEQLIdStr = block.getUniqueSqlIdStr();
stringLoader.putTemplate(uniquEQLIdStr, template);
Template temp;
try {
temp = ftlConfig.getTemplate(uniquEQLIdStr);
} catch (IOException e) {
throw new RuntimeException(e);
}
return new FreemarkerSql(ftlConfig, temp);
}
}
|
#!/bin/csh
# '@(#)managedb.sh 22.1 03/24/08 1991-2005 '
#
#
# Copyright (C) 2015 University of Oregon
#
# You may distribute under the terms of either the GNU General Public
# License or the Apache License, as specified in the LICENSE file.
#
# For more information, see the LICENSE file.
#
#
set id = `id | sed -e 's/[^(]*[(]\([^)]*\)[)].*/\1/'`
if ( $id == "root" ) then
echo "Cannot execute $0 as root"
exit
endif
set ostype=`uname -s`
# USER is not correct sometimes, so fix it
set USER = $id
# HOME, vnmruser and other env variables are wrong when executed
# from exec_asuser in Windows, so we need to fix them here.
if ( x$ostype == "xInterix" ) then
cd ~$USER
set HOME = "`pwd`"
source .vnmrenv
endif
if ( ! "$?vnmrsystem" ) then
set vnmrsystem = /vnmr
endif
set tmpdir = "$vnmrsystem/tmp"
if ( $id == $USER ) then
if ( "$?vnmruser" ) then
set tmpdir = "$vnmruser"
endif
endif
set debugargs=""
# The debug flag and its arg must be the last two things on the line.
# Check next to last arg to see if it is 'debug'
@ debugflag = $#argv
if ( $debugflag > 0 ) then
@ debugflag = $debugflag - 1
if($argv[$debugflag] == debug) then
set debugargs="$argv[$#argv]"
endif
endif
# Set the name of the host where the Database is located.
# Default to "localhost".
if ( $?PGHOST ) then
set dbhost=$PGHOST
else
set dbhost="localhost"
endif
# Get or default the port number for the database
# If the env variable PGPOST is set, use that, else default to 5432
if ( $?PGPORT ) then
set dbport=$PGPORT
else
set dbport="5432"
endif
# If the env variable PGNETWORK_SERVER is set, use its value, default to 'no'
if ( $?PGNETWORK_SERVER ) then
set dbnet_server=$PGNETWORK_SERVER
else
set dbnet_server="no"
endif
set shtoolcmd="/bin/sh"
set shtooloption="-c"
set sfudir=""
set sfudir_interix=""
set javacmd="$vnmrsystem/jre/bin/java"
set vjclasspath="$vnmrsystem/java/managedb.jar"
set sysdir="$vnmrsystem"
if ( x$ostype == "xInterix" ) then
set vjclasspath=`/bin/unixpath2win "$vjclasspath"`
set sfudir="$SFUDIR"
set sfudir_interix="$SFUDIR_INTERIX"
set shtoolcmd="$SFUDIR\\common\\ksh.bat"
set shtooloption="-lc"
set javacmd="$vnmrsystem/jre/bin/java.exe"
if ( ! -f "$javacmd" ) then
set javacmd="java.exe"
endif
set sysdir=`/bin/unixpath2win "$vnmrsystem"`
endif
$javacmd -mx256m -classpath $vjclasspath -Dsysdir="$sysdir" -Duserdir="$tmpdir" -Ddbhost=$dbhost -Ddbport=$dbport -Ddbnet_server=$dbnet_server -Djava.compiler=sunwjit -Dsfudirwindows="$sfudir" -Dsfudirinterix="$sfudir_interix" -Dshtoolcmd="$shtoolcmd" -Dshtooloption="$shtooloption" -Ddebug="$debugargs" -Duser.name=$id vnmr.ui.shuf.FillDBManager $argv[*]
if ( "$tmpdir" == "$vnmrsystem/tmp" ) then
chmod 666 "$tmpdir"/ManagedbMsgLog
endif
|
package com.ing.baker.baas.state
import akka.actor.ActorSystem
import akka.cluster.pubsub.{DistributedPubSub, DistributedPubSubMediator}
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import akka.stream.Materializer
import com.ing.baker.baas.protocol.{BaaSProtocol, ProtocolDistributedEventPublishing}
import com.ing.baker.baas.protocol.BaaSProto._
import com.ing.baker.baas.protocol.MarshallingUtils._
import com.ing.baker.runtime.scaladsl.Baker
import com.ing.baker.runtime.serialization.{Encryption, SerializersProvider}
import scala.concurrent.{ExecutionContext, Future}
object BaaSServer {
def run(baker: Baker, host: String, port: Int)(implicit system: ActorSystem, mat: Materializer): Future[Http.ServerBinding] = {
import system.dispatcher
val encryption = Encryption.NoEncryption
val server = new BaaSServer()(system, mat, baker, encryption)
for {
_ <- initializeEventListeners(baker, system)
binding <- Http().bindAndHandle(server.route, host, port)
} yield binding
}
private[state] def registerEventListenerForRemote(recipeName: String, baker: Baker, system: ActorSystem): Future[Unit] = {
println(Console.YELLOW + s"Event listener for: $recipeName" + Console.RESET)
baker.registerEventListener(recipeName, (metadata, event) => {
val eventsTopic: String =
ProtocolDistributedEventPublishing.eventsTopic(recipeName)
DistributedPubSub(system).mediator ! DistributedPubSubMediator.Publish(eventsTopic, ProtocolDistributedEventPublishing.Event(metadata, event))
})
}
private[state] def initializeEventListeners(baker: Baker, system: ActorSystem)(implicit ec: ExecutionContext): Future[Unit] =
for {
recipes <- baker.getAllRecipes
_ <- Future.traverse(recipes.toList) { case (_, recipe) => registerEventListenerForRemote(recipe.compiledRecipe.name, baker, system) }
} yield ()
}
class BaaSServer(implicit system: ActorSystem, mat: Materializer, baker: Baker, encryption: Encryption) {
import system.dispatcher
implicit private val serializersProvider: SerializersProvider =
SerializersProvider(system, encryption)
def route: Route = concat(pathPrefix("api" / "v3")(concat(health, addRecipe, getRecipe, getAllRecipes, bake,
fireEventAndResolveWhenReceived, fireEventAndResolveWhenCompleted, fireEventAndResolveOnEvent, fireEvent,
getAllRecipeInstancesMetadata, getRecipeInstanceState, getVisualState, retryInteraction, resolveInteraction,
stopRetryingInteraction
)))
private def health: Route = pathPrefix("health")(get(complete(StatusCodes.OK)))
private def addRecipe: Route = post(path("addRecipe") {
entity(as[BaaSProtocol.AddRecipeRequest]) { request =>
val result = for {
recipeId <- baker.addRecipe(request.compiledRecipe)
_ <- BaaSServer.registerEventListenerForRemote(request.compiledRecipe.name, baker, system)
} yield BaaSProtocol.AddRecipeResponse(recipeId)
completeWithBakerFailures(result)
}
})
private def getRecipe: Route = post(path("getRecipe") {
entity(as[BaaSProtocol.GetRecipeRequest]) { request =>
completeWithBakerFailures(baker.getRecipe(request.recipeId).map(BaaSProtocol.GetRecipeResponse))
}
})
private def getAllRecipes: Route = post(path("getAllRecipes") {
completeWithBakerFailures(baker.getAllRecipes.map(BaaSProtocol.GetAllRecipesResponse))
})
private def bake: Route = post(path("bake") {
entity(as[BaaSProtocol.BakeRequest]) { request =>
completeWithBakerFailures(baker.bake(request.recipeId, request.recipeInstanceId))
}
})
private def fireEventAndResolveWhenReceived: Route = post(path("fireEventAndResolveWhenReceived") {
entity(as[BaaSProtocol.FireEventAndResolveWhenReceivedRequest]) { request =>
completeWithBakerFailures(baker.fireEventAndResolveWhenReceived(request.recipeInstanceId, request.event, request.correlationId)
.map(BaaSProtocol.FireEventAndResolveWhenReceivedResponse))
}
})
private def fireEventAndResolveWhenCompleted: Route = post(path("fireEventAndResolveWhenCompleted") {
entity(as[BaaSProtocol.FireEventAndResolveWhenCompletedRequest]) { request =>
completeWithBakerFailures(baker.fireEventAndResolveWhenCompleted(request.recipeInstanceId, request.event, request.correlationId)
.map(BaaSProtocol.FireEventAndResolveWhenCompletedResponse))
}
})
private def fireEventAndResolveOnEvent: Route = post(path("fireEventAndResolveOnEvent") {
entity(as[BaaSProtocol.FireEventAndResolveOnEventRequest]) { request =>
completeWithBakerFailures(baker.fireEventAndResolveOnEvent(request.recipeInstanceId, request.event, request.onEvent, request.correlationId)
.map(BaaSProtocol.FireEventAndResolveOnEventResponse))
}
})
private def fireEvent: Route = post(path("fireEvent") {
entity(as[BaaSProtocol.FireEventRequest]) { request =>
complete(baker.fireEvent(request.recipeInstanceId, request.event, request.correlationId).resolveWhenReceived
.map(_ => "TODO")) // TODO figure out what to do here with the 2 different futures
}
})
private def getAllRecipeInstancesMetadata: Route = post(path("getAllRecipeInstancesMetadata") {
completeWithBakerFailures(baker.getAllRecipeInstancesMetadata
.map(BaaSProtocol.GetAllRecipeInstancesMetadataResponse))
})
private def getRecipeInstanceState: Route = post(path("getRecipeInstanceState") {
entity(as[BaaSProtocol.GetRecipeInstanceStateRequest]) { request =>
completeWithBakerFailures(baker.getRecipeInstanceState(request.recipeInstanceId)
.map(BaaSProtocol.GetRecipeInstanceStateResponse))
}
})
private def getVisualState: Route = post(path("getVisualState") {
entity(as[BaaSProtocol.GetVisualStateRequest]) { request =>
completeWithBakerFailures(baker.getVisualState(request.recipeInstanceId)
.map(BaaSProtocol.GetVisualStateResponse))
}
})
private def retryInteraction: Route = post(path("retryInteraction") {
entity(as[BaaSProtocol.RetryInteractionRequest]) { request =>
completeWithBakerFailures(baker.retryInteraction(request.recipeInstanceId, request.interactionName))
}
})
private def resolveInteraction: Route = post(path("resolveInteraction") {
entity(as[BaaSProtocol.ResolveInteractionRequest]) { request =>
completeWithBakerFailures(baker.resolveInteraction(request.recipeInstanceId, request.interactionName, request.event))
}
})
private def stopRetryingInteraction: Route = post(path("stopRetryingInteraction") {
entity(as[BaaSProtocol.StopRetryingInteractionRequest]) { request =>
completeWithBakerFailures(baker.stopRetryingInteraction(request.recipeInstanceId, request.interactionName))
}
})
}
|
#!/bin/bash
set -eu
image="cockroachdb/builder"
function init() {
docker build --tag="${image}" - <<EOF
FROM golang:1.5
RUN apt-get update -y && \
apt-get dist-upgrade -y && \
apt-get install --no-install-recommends --auto-remove -y git build-essential file npm nodejs && \
apt-get clean autoclean && \
apt-get autoremove -y && \
rm -rf /tmp/* && \
ln -s /usr/bin/nodejs /usr/bin/node
RUN go get golang.org/x/tools/cmd/vet
CMD ["/bin/bash"]
EOF
}
if [ "${1-}" = "pull" ]; then
docker pull "${image}"
exit 0
fi
if [ "${1-}" = "init" ]; then
init
exit 0
fi
if [ "${1-}" = "push" ]; then
init
tag="$(date +%Y%m%d-%H%M%S)"
docker tag "${image}" "${image}:${tag}"
docker push "${image}"
exit 0
fi
gopath0="${GOPATH%%:*}"
if [ "${CIRCLECI-}" = "true" ]; then
# HACK: Removal of docker containers fails on circleci with the
# error: "Driver btrfs failed to remove root filesystem". So if
# we're running on circleci, just leave the containers around.
rm=""
else
rm="--rm"
fi
if [ -t 0 ]; then
tty="--tty"
fi
# Run our build container with a set of volumes mounted that will
# allow the container to store persistent build data on the host
# computer.
# -i causes some commands (including `git diff`) to attempt to use
# a pager, so we override $PAGER to disable.
docker run -i ${tty-} ${rm} \
--volume="${gopath0}/src:/go/src" \
--volume="${PWD}:/go/src/github.com/cockroachdb/cockroach" \
--volume="${gopath0}/pkg:/go/pkg" \
--volume="${gopath0}/pkg/linux_amd64_netgo:/usr/src/go/pkg/linux_amd64_netgo" \
--volume="${gopath0}/pkg/linux_amd64_race:/usr/src/go/pkg/linux_amd64_race" \
--volume="${gopath0}/bin/linux_amd64:/go/bin" \
--workdir="/go/src/github.com/cockroachdb/cockroach" \
--env="CACHE=/go/pkg/cache" \
--env="PAGER=cat" \
"${image}" "$@"
|
#!/bin/bash -eu
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
export CFLAGS="$CFLAGS"
export CXXFLAGS="$CXXFLAGS"
declare -r FUZZER_TARGETS=$(bazel query "attr('tags', 'fuzz_target', "...") except attr('tags', 'no_fuzz', '...')")
FUZZER_DICTIONARIES="\
"
# Copy $CFLAGS and $CXXFLAGS into Bazel command-line flags, for both
# compilation and linking.
#
# Some flags, such as `-stdlib=libc++`, generate warnings if used on a C source
# file. Since the build runs with `-Werror` this will cause it to break, so we
# use `--conlyopt` and `--cxxopt` instead of `--copt`.
#
# NOTE: We ignore -DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION. All envoy fuzz
# targets link this flag through their build target rule. Passing this in via CLI
# will pass this to genrules that build unit tests that rely on production
# behavior. Ignore this flag so these unit tests don't fail by using a modified
# RE2 library.
# TODO(asraa): Figure out how to work around this better.
CFLAGS=${CFLAGS//"-DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION"/}
CXXFLAGS=${CXXFLAGS//"-DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION"/}
declare -r EXTRA_BAZEL_FLAGS="$(
for f in ${CFLAGS}; do
echo "--conlyopt=${f}" "--linkopt=${f}"
done
for f in ${CXXFLAGS}; do
echo "--cxxopt=${f}" "--linkopt=${f}"
done
if [ "$SANITIZER" = "undefined" ]
then
# Bazel uses clang to link binary, which does not link clang_rt ubsan library for C++ automatically.
# See issue: https://github.com/bazelbuild/bazel/issues/8777
echo "--linkopt=\"$(find $(llvm-config --libdir) -name libclang_rt.ubsan_standalone_cxx-x86_64.a | head -1)\""
elif [ "$SANITIZER" = "address" ]
then
echo "--copt -D__SANITIZE_ADDRESS__" "--copt -DADDRESS_SANITIZER=1"
fi
)"
declare BAZEL_BUILD_TARGETS=""
declare BAZEL_CORPUS_TARGETS=""
for t in ${FUZZER_TARGETS}
do
BAZEL_BUILD_TARGETS+="${t}_driverless "
BAZEL_CORPUS_TARGETS+="${t}_corpus_tar "
done
# Build driverless libraries.
# Benchmark about 3 GB per CPU (10 threads for 28.8 GB RAM)
# TODO(asraa): Remove deprecation warnings when Envoy and deps moves to C++17
bazel build --verbose_failures --dynamic_mode=off --spawn_strategy=standalone \
--local_cpu_resources=HOST_CPUS*0.32 \
--genrule_strategy=standalone --strip=never \
--copt=-fno-sanitize=vptr --linkopt=-fno-sanitize=vptr \
--define tcmalloc=disabled --define signal_trace=disabled \
--define ENVOY_CONFIG_ASAN=1 --config libc++ \
--copt -D_LIBCPP_DISABLE_DEPRECATION_WARNINGS \
--define force_libcpp=enabled --build_tag_filters=-no_asan \
--linkopt=-lc++ --linkopt=-pthread ${EXTRA_BAZEL_FLAGS} \
${BAZEL_BUILD_TARGETS[*]} ${BAZEL_CORPUS_TARGETS[*]}
# Profiling with coverage requires that we resolve+copy all Bazel symlinks and
# also remap everything under proc/self/cwd to correspond to Bazel build paths.
if [ "$SANITIZER" = "coverage" ]
then
# The build invoker looks for sources in $SRC, but it turns out that we need
# to not be buried under src/, paths are expected at out/proc/self/cwd by
# the profiler.
declare -r REMAP_PATH="${OUT}/proc/self/cwd"
mkdir -p "${REMAP_PATH}"
# For .cc, we only really care about source/ today.
rsync -av "${SRC}"/envoy/source "${REMAP_PATH}"
rsync -av "${SRC}"/envoy/test "${REMAP_PATH}"
# Remove filesystem loop manually.
rm -rf "${SRC}"/envoy/bazel-envoy/external/envoy
# Clean up symlinks with a missing referrant.
find "${SRC}"/envoy/bazel-envoy/external -follow -type l -ls -delete || echo "Symlink cleanup soft fail"
rsync -avLk "${SRC}"/envoy/bazel-envoy/external "${REMAP_PATH}"
# For .h, and some generated artifacts, we need bazel-out/. Need to heavily
# filter out the build objects from bazel-out/. Also need to resolve symlinks,
# since they don't make sense outside the build container.
declare -r RSYNC_FILTER_ARGS=("--include" "*.h" "--include" "*.cc" "--include" \
"*.hpp" "--include" "*.cpp" "--include" "*.c" "--include" "*/" "--exclude" "*")
rsync -avLk "${RSYNC_FILTER_ARGS[@]}" "${SRC}"/envoy/bazel-out "${REMAP_PATH}"
rsync -avLkR "${RSYNC_FILTER_ARGS[@]}" "${HOME}" "${OUT}"
rsync -avLkR "${RSYNC_FILTER_ARGS[@]}" /tmp "${OUT}"
fi
# Copy out test driverless binaries from bazel-bin/.
for t in ${FUZZER_TARGETS}
do
TARGET_PATH=${t/://}
TARGET_BASE="$(expr "$TARGET_PATH" : '.*/\(.*\)_fuzz_test')"
TARGET_DRIVERLESS=bazel-bin/"${TARGET_PATH:2}"_driverless
echo "Copying fuzzer $t"
cp "${TARGET_DRIVERLESS}" "${OUT}"/"${TARGET_BASE}"_fuzz_test
done
# Zip up related test corpuses.
# TODO(htuch): just use the .tar directly when
# https://github.com/google/oss-fuzz/issues/1918 is fixed.
CORPUS_UNTAR_PATH="${PWD}"/_tmp_corpus
for t in ${FUZZER_TARGETS}
do
echo "Extracting and zipping fuzzer $t corpus"
TARGET_PATH=${t/://}
rm -rf "${CORPUS_UNTAR_PATH}"
mkdir -p "${CORPUS_UNTAR_PATH}"
tar -C "${CORPUS_UNTAR_PATH}" -xvf bazel-bin/"${TARGET_PATH:2}"_corpus_tar.tar
TARGET_BASE="$(expr "$TARGET_PATH" : '.*/\(.*\)_fuzz_test')"
# There may be *.dict files in this folder that need to be moved into the OUT dir.
find "${CORPUS_UNTAR_PATH}" -type f -name *.dict -exec mv -n {} "${OUT}"/ \;
zip "${OUT}/${TARGET_BASE}"_fuzz_test_seed_corpus.zip \
"${CORPUS_UNTAR_PATH}"/*
done
rm -rf "${CORPUS_UNTAR_PATH}"
# Copy dictionaries and options files to $OUT/
for d in $FUZZER_DICTIONARIES; do
cp "$d" "${OUT}"/
done
# Cleanup bazel- symlinks to avoid oss-fuzz trying to copy out of the build
# cache.
rm -f bazel-*
|
<filename>tasks/gemspec.rake<gh_stars>1-10
require "rake/gempackagetask"
spec = Gem::Specification.new do |s|
s.name = 'datagrammer'
s.version = '0.4'
s.summary = "UDP without the pain"
s.description = "Sends and receives UDP packets in an OSC-compatable encoded format."
s.author = "<NAME>"
s.email = "<EMAIL>"
s.homepage = "http://github.com/mattly/datagrammer"
# code
s.require_path = "lib"
s.files = %w( README.mkdn Rakefile ) + Dir["{spec,lib}/**/*"]
# rdoc
s.has_rdoc = true
s.extra_rdoc_files = ['README.mkdn']
# Requirements
s.required_ruby_version = ">= 1.8.6"
s.platform = Gem::Platform::RUBY
end
desc "create .gemspec file (useful for github)"
task :gemspec do
filename = "#{spec.name}.gemspec"
File.open(filename, "w") do |f|
f.puts spec.to_ruby
end
end
|
#!/bin/bash
#Created by Sam Gleske (sag47@drexel.edu)
#Created Thu Nov 29 16:42:12 EST 2012
#Red Hat Enterprise Linux Server release 5.5 (Tikanga)
#Linux 2.6.18-194.11.4.el5 x86_64 GNU/Linux
#JBoss EWP 5.1.0.GA
#java version "1.6.0_13"
#
#Description:
# This is an automatic deployer for a production jboss instance.
# To be run by an admin from the terminal rather than configuring
# automated deployments from a web interface. Web interface
# automated deployments are for test machines only.
#
# Note the variables are flexible enough that you could configure
# this for most app servers. All you need is an init.d script
# for the app server which can start, stop, and status the server.
# Here's a sample config for tomcat:
# appsprofile=/opt/tomcat
# deploydir="webapps"
# initd_script=/etc/init.d/tomcat
#
# For analyzing the code: environment variables are defined at the
# top, functions are defined after that, and executing the
# deployment occurs at the very end of the script.
#
#Dependencies:
# coreutils-5.97-23.el5_4.2
# bash-3.2-24.el5
# tar-1.15.1-30.el5
# JBoss EWP 5.x
# Java 1.6.x
#
# Also depends on my custom /etc/init.d/jboss script.
export PATH="${PATH}:/bin"
########### USER CONFIGURATION
#If values from user configuration are set then they will be used.
#Otherwise the DEFAULT CONFIGURATION will use ${var:-default_value}
#It is recommended that you go through each default value and specify your configuration
#source env.sh required for continuous integration; be sure to enable continuous_integration setting.
#source /app/stage/env.sh
#stage=""
#second_stage=""
#continuous_integration="false"
#war_files=""
#lib_files=""
#appsprofile=""
#appsuser=""
#runas_appsuser=false
#deploydir=""
#libdir=""
#backupdir=""
#initd_script=""
#force_restart=false
#move_or_copy="mv"
#timeout=0
#debug=false
#dryrun=false
#enable_colors=false
########### END USER CONFIGURATION
########### DEFAULT CONFIGURATION
#cd to the staging directory where files to be deployed are kept;this will become the working directory
stage="${stage:-/opt/staging}"
#secondary staging directory. Check here if deployment files not in $stage. Useful for cluster deployments.
second_stage="${second_stage:-}"
#CI server writes an env.sh file which is used as part of deployment in the stage directory, env.sh will be archived.
continuous_integration="${continuous_integration:-false}"
#war files to deploy to deploy directory; add space separated list of war files
war_files="${war_files:-}"
#jar files to deploy to lib directory; add space separated list of jar files
lib_files="${lib_files:-}"
#app server profile for app server
appsprofile="${appsprofile:-/opt/jboss/server/default}"
#app server user
appsuser="${appsuser:-jboss}"
#assume will be run as apps user otherwise root is assumed (it will basically just avoid the chown command)
runas_appsuser="${runas_appsuser:-false}"
# deploy directory (relative to ${appsprofile})
deploydir="${deploydir:-deploy}"
# lib directory (relative to ${appsprofile})
libdir="${libdir:-lib}"
#backup copies for deployments
backupdir="${backupdir:-/opt/jboss/server/backup}"
#path to init.d service script
initd_script="${initd_script:-/etc/init.d/jboss}"
#force JBoss restart every time (false=allow hot deploy, true=force a restart)
force_restart="${force_restart:-false}"
#move (mv) or copy (cp)... valid values include: mv cp
move_or_copy="${move_or_copy:-mv}"
#set a timeout failure if the app server doesn't shut down after $timeout seconds. 0 is unlimited or no timeout.
timeout="${timeout:-0}"
#debug output (true=enabled debugging, false=disabled debugging)
debug="${debug:-false}"
#simulates a deployment without executing changes (true=simulate deployment, false=execute deployment)
dryrun="${dryrun:-false}"
#just some colored output eye candy in the terminal
enable_colors="${enable_colors:-false}"
########### END DEFAULT CONFIGURATION
#clean up user defined paths in variables vars (basically remove trailing slash if there is one with parameter expansion)
stage="${stage%/}"
second_stage="${second_stage%/}"
appsprofile="${appsprofile%/}"
deploydir="${deploydir%/}"
libdir="${libdir%/}"
backupdir="${backupdir%/}"
#COLORS DOCUMENTATION
# black - 30
# red - 31
# green - 32
# brown - 33
# blue - 34
# magenta - 35
# cyan - 36
# lightgray - 37
#
# * 'm' character at the end of each of the following sentences is used as a stop character, where the system should stop and parse the \033[ sintax.
#
# \033[0m - is the default color for the console
# \033[0;#m - is the color of the text, where # is one of the codes mentioned above
# \033[1m - makes text bold
# \033[1;#m - makes colored text bold**
# \033[2;#m - colors text according to # but a bit darker
# \033[4;#m - colors text in # and underlines
# \033[7;#m - colors the background according to #
# \033[9;#m - colors text and strikes it
# \033[A - moves cursor one line above (carfull: it does not erase the previously written line)
# \033[B - moves cursor one line under
# \033[C - moves cursor one spacing to the right
# \033[D - moves cursor one spacing to the left
# \033[E - don't know yet
# \033[F - don't know yet
#
# \033[2K - erases everything written on line before this.
#Colors variables
SETCOLOR_GREEN="echo -en \\033[0;32m"
SETCOLOR_RED="echo -en \\033[0;31m"
SETCOLOR_YELLOW="echo -en \\033[0;33m"
SETCOLOR_NORMAL="echo -en \\033[0;39m"
SETSTYLE_BOLD="echo -en \\033[1m"
SETSTYLE_UNDERLINE="echo -en \\033[4m"
SETSTYLE_NORMAL="echo -en \\033[0m"
#export environment variables
GLOBAL_VARS="appsprofile appsuser continuous_integration backupdir debug deploydir dryrun enable_colors force_restart initd_script libdir lib_files move_or_copy second_stage stage timeout war_files"
export ${GLOBAL_VARS}
#same as echo function except the whole text line is red
function red_echo() {
#in order for the -n functionality to work properly $2 must be quoted when called in case of spaces
if "${enable_colors}";then
if [ "$1" = "-n" ];then
${SETCOLOR_RED} && echo -n "$2" && ${SETCOLOR_NORMAL}
else
${SETCOLOR_RED} && echo "$*" && ${SETCOLOR_NORMAL}
fi
else
if [ "$1" = "-n" ];then
echo -n "$2"
else
echo "$*"
fi
fi
}
#same as echo function except the whole text line is green
function green_echo() {
#in order for the -n functionality to work properly $2 must be quoted when called in case of spaces
if "${enable_colors}";then
if [ "$1" = "-n" ];then
${SETCOLOR_GREEN} && echo -n "$2" && ${SETCOLOR_NORMAL}
else
${SETCOLOR_GREEN} && echo "$*" && ${SETCOLOR_NORMAL}
fi
else
if [ "$1" = "-n" ];then
echo -n "$2"
else
echo "$*"
fi
fi
}
#same as echo function except the whole text line is yellow
function yellow_echo() {
#in order for the -n functionality to work properly $2 must be quoted when called in case of spaces
if "${enable_colors}";then
if [ "$1" = "-n" ];then
${SETCOLOR_YELLOW} && echo -n "$2" && ${SETCOLOR_NORMAL}
else
${SETCOLOR_YELLOW} && echo "$*" && ${SETCOLOR_NORMAL}
fi
else
if [ "$1" = "-n" ];then
echo -n "$2"
else
echo "$*"
fi
fi
return 0
}
#same as echo function except output bold text
function bold_echo() {
#in order for the -n functionality to work properly $2 must be quoted when called in case of spaces
if "${enable_colors}";then
if [ "$1" = "-n" ];then
${SETSTYLE_BOLD} && echo -n "$2" && ${SETSTYLE_NORMAL}
else
${SETSTYLE_BOLD} && echo "$*" && ${SETSTYLE_NORMAL}
fi
else
if [ "$1" = "-n" ];then
echo -n "$2"
else
echo "$*"
fi
fi
return 0
}
#same as echo function except output underlined text
function underline_echo() {
#in order for the -n functionality to work properly $2 must be quoted when called in case of spaces
if "${enable_colors}";then
if [ "$1" = "-n" ];then
${SETSTYLE_UNDERLINE} && echo -n "$2" && ${SETSTYLE_NORMAL}
else
${SETSTYLE_UNDERLINE} && echo "$*" && ${SETSTYLE_NORMAL}
fi
else
if [ "$1" = "-n" ];then
echo -n "$2"
else
echo "$*"
fi
fi
return 0
}
#reads stdin and highlights deploy specific environment variables
function colorize_env() {
while read line;do
MATCH=0
for var in ${GLOBAL_VARS};do
if [ ! -z "$(echo "$line" | grep -e "^$var")" ];then
MATCH="${var}"
break
fi
done
if [ ! "${MATCH}" = "0" ];then
#$(eval "echo \$$var")
underline_echo -n "${MATCH}"
echo "=$(eval "echo \$${MATCH}")"
else
echo "$line"
fi
done
}
#show environment if debugging enabled
function if_debug_print_environment() {
if [ ! "${debug}" = "false" ];then
echo "===== $HOSTNAME env.sh file ====="
echo ""
cat /app/stage/env.sh
echo ""
bold_echo "== ENVIRONMENT VARIABLES =="
if "${enable_colors}";then
echo "debug mode style:"
echo -n " " && underline_echo -n "underlined text" && echo " is used to highlight env vars specific for deployment"
fi
echo ""
env | colorize_env
echo ""
bold_echo "== EXECUTE DEPLOYMENT =="
if "${enable_colors}";then
echo "debug mode style:"
green_echo -n " green text" && echo " is used to highlight normal stdout output"
yellow_echo -n " yellow text" && echo " is used to highlight output which might be interesting"
red_echo -n " red text" && echo " is used to highlight changes which affect the running system"
fi
fi
}
#run through tests and determine what to deploy (or fail if none)
function preflight_check() {
#START CRITICAL CHECKS (checks which are depended on by the preflight_check in itself)
#test debug environment variable (must be bool)
if [ ! "${debug}" = "true" ] && [ ! "${debug}" = "false" ];then
red_echo "debug=${debug} is not a valid option for debug! Must be true or false." 1>&2
echo "Preflight test failed... Aborting." 1>&2
return 1
fi
#test dryrun environment variable (must be bool)
if [ ! "${dryrun}" = "true" ] && [ ! "${dryrun}" = "false" ];then
red_echo "dryrun=${dryrun} is not a valid option for dryrun! Must be true or false." 1>&2
echo "Preflight test failed... Aborting." 1>&2
return 1
fi
#test runas_appsuser environment variable (must be bool)
if [ ! "${runas_appsuser}" = "true" ] && [ ! "${runas_appsuser}" = "false" ];then
echo "runas_appsuser=${runas_appsuser} is not a valid option for runas_appsuser! Must be true or false." 1>&2
echo "Preflight test failed... Aborting." 1>&2
return 1
fi
#END CRITICAL CHECKS
if "${debug}";then
echo "enter function ${FUNCNAME}" 1>&2
fi
STATUS=0
#test for /etc/init.d service script
if [ ! -f "${initd_script}" ];then
echo "There is no \${initd_script} ${initd_script}." 1>&2
echo " |- At a minimum the script must be able to: start, stop, and status the app server." 1>&2
STATUS=1
fi
#test to make sure /etc/init.d service script is executable
if [ ! -x "${initd_script}" ];then
echo "\${initd_script} ${initd_script} is not executable." 1>&2
STATUS=1
fi
#test continuous_integration environment variable (must be bool)
if [ ! "${continuous_integration}" = "true" ] && [ ! "${continuous_integration}" = "false" ];then
echo "continuous_integration=${continuous_integration} is not a valid option for continuous_integration! Must be true or false." 1>&2
STATUS=1
fi
#test force_restart environment variable (must be bool)
if [ ! "${force_restart}" = "true" ] && [ ! "${force_restart}" = "false" ];then
echo "force_restart=${force_restart} is not a valid option for force_restart! Must be true or false." 1>&2
STATUS=1
fi
#test move_or_copy environment variable (limited string values)
if [ ! "${move_or_copy}" = "mv" ] && [ ! "${move_or_copy}" = "cp" ];then
echo "move_or_copy=${move_or_copy} is not a valid option for move_or_copy! Must be mv or cp." 1>&2
STATUS=1
fi
#test enable_colors environment variable (must be bool)
if [ ! "${enable_colors}" = "true" ] && [ ! "${enable_colors}" = "false" ];then
#don't really care if there's something wrong with this
echo "WARNING: enable_colors=${enable_colors} is not a valid option for enable_colors! Must be true or false." 1>&2
echo " |- setting enable_colors=false"
enable_colors="false"
fi
#test timeout environment variable (must be number)
if ! [[ "${timeout}" =~ "^[0-9]+$" ]];then
echo "timeout=${timeout} is not a valid option for timeout! Must be number >= 0." 1>&2
STATUS=1
fi
#test if $runas_appsuser set make sure the script is actually running as the $appsuser.
if "${runas_appsuser}";then
if [ ! "${appsuser}" = "${USER}" ];then
echo "runas_appsuser is true. The appsuser=${appsuser} however you're currently running as ${USER}" 1>&2
STATUS=1
fi
else
if [ ! "${USER}" = "root" ];then
echo "Trying to run deploy.sh as user ${USER}. Must be run as root or choose the runas_appsuser option." 1>&2
STATUS=1
fi
fi
isdeploy=0
islib=0
#test check all the war files and be sure at least one exists otherwise don't deploy war files
if [ ! -z "${war_files}" ];then
for x in ${war_files};do
if [ -f "${x}" ] || [ -f "${second_stage%/}/${x}" ];then
if "${debug}";then
yellow_echo "stage file exists: ${x}" 1>&2
fi
isdeploy=1
break
elif "${debug}";then
echo "not exist: ${x}" 1>&2
fi
done
fi
#test check all lib files and be sure at least one exists otherwise don't deploy lib files
if [ ! -z "${lib_files}" ];then
for x in ${lib_files};do
if [ -f "${x}" ] || [ -f "${second_stage%/}/${x}" ];then
if "${debug}";then
green_echo "stage file exists: ${x}" 1>&2
fi
islib=1
break
elif "${debug}";then
echo "not exist: ${x}" 1>&2
fi
done
fi
#test there is at least something to deploy, otherwise no need to continue the script
if [ "${isdeploy}" = "0" -a "${islib}" = "0" ];then
echo "No deployments happened. There was nothing to deploy." 1>&2
STATUS=1
fi
#test the app server profile exists
if [ ! -d "${appsprofile}" ];then
red_echo "\${appsprofile} dir does not exist: ${appsprofile}" 1>&2
STATUS=1
fi
#test that the backup directory exists. If not create it. Eventually a backup will be taken before deployment
if [ ! -d "${backupdir}" ];then
yellow_echo "WARNING: \${backupdir} ${backupdir} does not exist." 1>&2
echo -n "Creating directory..." 1>&2
if "${dryrun}";then
echo "DRYRUN: mkdir -p \"${backupdir}\" " 1>&2
else
mkdir -p "${backupdir}" && echo "Done." 1>&2 || echo "Failed." 1>&2
fi
fi
#test that the backup directory exists. If not create it. Eventually a backup will be taken before deployment
if [ ! -d "${backupdir}/${deploydir}" ];then
yellow_echo "WARNING: \${backupdir} ${backupdir}/${deploydir} does not exist." 1>&2
echo -n "Creating directory..." 1>&2
if "${dryrun}";then
red_echo "DRYRUN: mkdir -p \"${backupdir}/${deploydir}\" " 1>&2
else
mkdir -p "${backupdir}/${deploydir}" && echo "Done." 1>&2 || echo "Failed." 1>&2
fi
fi
#test that the backup directory exists. If not create it. Eventually a backup will be taken before deployment
if [ ! -d "${backupdir}/${libdir}" ];then
yellow_echo "WARNING: \${backupdir} ${backupdir}/${libdir} does not exist." 1>&2
echo -n "Creating directory..." 1>&2
if "${dryrun}";then
red_echo "DRYRUN: mkdir -p \"${backupdir}/${libdir}\"" 1>&2
else
mkdir -p "${backupdir}/${libdir}" && echo "Done." 1>&2 || echo "Failed." 1>&2
fi
fi
#final test that the backup directory exists or was successfully created
if [ ! -d "${backupdir}" ];then
if ! "${dryrun}";then
echo "Something went wrong with creating \${backupdir} ${backupdir}." 1>&2
fi
STATUS=1
fi
#final test that the backup directory exists or was successfully created
if [ ! -d "${backupdir}/${deploydir}" ];then
if ! "${dryrun}";then
echo "Something went wrong with creating \${backupdir}/${deploydir} ${backupdir}/${deploydir}." 1>&2
fi
STATUS=1
fi
#final test that the backup directory exists or was successfully created
if [ ! -d "${backupdir}/${libdir}" ];then
if ! "${dryrun}";then
echo "Something went wrong with creating \${backupdir}/${libdir} ${backupdir}/${libdir}." 1>&2
fi
STATUS=1
fi
#if there was any failure in all of the above tests let the user know nothing is going to happen
if [ ! "${STATUS}" -eq "0" ];then
echo "Preflight test failed... Aborting." 1>&2
fi
if "${debug}";then
echo "exit function ${FUNCNAME} return STATUS=${STATUS}" 1>&2
fi
return ${STATUS}
}
#run through and backup everything
function backup_directories() {
if "${debug}";then
echo "enter function ${FUNCNAME}" 1>&2
fi
STATUS=0
echo "Creating backups..."
#custom timestamp for backup archives (used as part of the name)
TIME="$(date +%Y-%m-%d-%s)"
pushd "${appsprofile}" > /dev/null
if "${dryrun}";then
yellow_echo "DRYRUN: Changed working directory: $PWD" 1>&2
fi
if "${continuous_integration}";then
if "${dryrun}";then
red_echo "DRYRUN: cp -f \"${stage}/env.sh\" ./"
else
cp -f "${stage}/env.sh" ./
fi
fi
if [ "${isdeploy}" = "1" ];then
if "${dryrun}";then
green_echo "backup ${deploydir}: ${backupdir}/${deploydir}/${deploydir}_${TIME}.tar.gz"
if "${continuous_integration}";then
red_echo "DRYRUN: tar -czf \"${backupdir}/${deploydir}/${deploydir}_${TIME}.tar.gz\" \"${deploydir}\" \"./env.sh\"" 1>&2
else
red_echo "DRYRUN: tar -czf \"${backupdir}/${deploydir}/${deploydir}_${TIME}.tar.gz\" \"${deploydir}\"" 1>&2
fi
else
echo "backup ${deploydir}: ${backupdir}/${deploydir}/${deploydir}_${TIME}.tar.gz"
if "${continuous_integration}";then
tar -czf "${backupdir}/${deploydir}/${deploydir}_${TIME}.tar.gz" "${deploydir}" "./env.sh"
else
tar -czf "${backupdir}/${deploydir}/${deploydir}_${TIME}.tar.gz" "${deploydir}"
fi
if [ ! "$?" -eq "0" ];then
echo "Backup FAILED!" 1>&2
STATUS=1
fi
fi
fi
if [ "${islib}" = "1" ];then
if "${dryrun}";then
green_echo "${libdir} backup: ${backupdir}/${libdir}/${libdir}_${TIME}.tar.gz"
if "${continuous_integration}";then
red_echo "DRYRUN: tar -czf \"${backupdir}/${libdir}/${libdir_}${TIME}.tar.gz\" \"${libdir}\" \"./env.sh\"" 1>&2
else
red_echo "DRYRUN: tar -czf \"${backupdir}/${libdir}/${libdir_}${TIME}.tar.gz\" \"${libdir}\"" 1>&2
fi
else
echo "${libdir} backup: ${backupdir}/${libdir}/${libdir}_${TIME}.tar.gz"
if "${continuous_integration}";then
tar -czf "${backupdir}/${libdir}/${libdir}_${TIME}.tar.gz" "${libdir}" "./env.sh"
else
tar -czf "${backupdir}/${libdir}/${libdir}_${TIME}.tar.gz" "${libdir}"
fi
if [ ! "$?" -eq "0" ];then
echo "Backup FAILED!" 1>&2
STATUS=1
fi
fi
fi
if "${continuous_integration}";then
if "${dryrun}";then
red_echo "DRYRUN: rm -f \"./env.sh\""
else
rm -f "./env.sh"
fi
fi
popd > /dev/null
if "${dryrun}";then
yellow_echo "DRYRUN: Changed working directory: $PWD" 1>&2
fi
echo "Done."
if "${debug}";then
echo "exit function ${FUNCNAME} return STATUS=${STATUS}" 1>&2
fi
return ${STATUS}
}
#check to see if server shutdown is required
function conditional_shutdown() {
if "${debug}";then
echo "enter function ${FUNCNAME}" 1>&2
fi
STATUS=0
if [ "${islib}" = "1" ] || "${force_restart}";then
if "${dryrun}";then
red_echo "DRYRUN: \"${initd_script}\" stop" 1>&2
green_echo "DRYRUN: app server shutdown executed."
else
if [ "${timeout}" -eq "0" ];then
if ! "${initd_script}" stop;then
red_echo "Failed shutting down the app server." 1>&2
STATUS=1
fi
else
if ! timeout ${timeout} "${initd_script}" stop;then
echo "timeout=${timeout} not necessarily related to shutdown failure."
red_echo "Failed shutting down the app server." 1>&2
STATUS=1
fi
fi
fi
fi
if "${debug}";then
echo "exit function ${FUNCNAME} return STATUS=${STATUS}" 1>&2
fi
return ${STATUS}
}
#deployment logic
function deploy_wars() {
if "${debug}";then
echo "enter function ${FUNCNAME}" 1>&2
fi
STATUS=0
for x in ${war_files};do
#if war file does not exist in the current $stage then try to fall back to $second_stage
if [ ! -z "${second_stage%/}" ] && [ ! -e "${x}" ] && [ -e "${second_stage}/${x}" ];then
x="${second_stage}/${x}"
if "${debug}";then
yellow_echo "Falling back to \$second_stage: ${x}" 1>&2
fi
fi
#try to deploy
if [ -e "${x}" ];then
if "${dryrun}";then
#parameter expansion to the rescue for removing the second_stage from the ${x} variable like ${x#${second_stage}/}!
red_echo "DRYRUN: ${move_or_copy} -f \"${x}\" \"${appsprofile}/${deploydir}/${x#${second_stage}/}\"" 1>&2
green_echo "DRYRUN: ${x} deployed."
else
#Start of deploy command list
if "${runas_appsuser}";then
chmod 644 "${x}" && \
${move_or_copy} -f "${x}" "${appsprofile}/${deploydir}/${x#${second_stage}}" && \
touch "${appsprofile}/${deploydir}/${x#${second_stage}/}" && \
green_echo "${x} deployed."
else
chown ${appsuser}\: "${x}" && \
chmod 644 "${x}" && \
${move_or_copy} -f "${x}" "${appsprofile}/${deploydir}/${x#${second_stage}/}" && \
touch "${appsprofile}/${deploydir}/${x#${second_stage}/}" && \
green_echo "${x} deployed."
fi
#End of deploy command list
fi
if [ ! "$?" -eq "0" ];then
red_echo "${x} deployment FAILED!" 1>&2
STATUS=1
break
fi
elif "${debug}";then
echo "not exist: ${x}" 1>&2
fi
done
if "${debug}";then
echo "exit function ${FUNCNAME} return STATUS=${STATUS}" 1>&2
fi
return ${STATUS}
}
#deployment logic
function deploy_libs() {
if "${debug}";then
echo "enter function ${FUNCNAME}" 1>&2
fi
STATUS=0
for x in ${lib_files};do
#if war file does not exist in the current $stage then try to fall back to $second_stage
if [ ! -z "${second_stage}" ] && [ ! -e "${x}" ] && [ -e "${second_stage}/${x}" ];then
x="${second_stage}/${x}"
fi
#try to deploy
if [ -e "${x}" ];then
if "${dryrun}";then
#parameter expansion to the rescue for removing the second_stage from the ${x} variable like ${x#${second_stage}/}!
red_echo "DRYRUN: ${move_or_copy} -f \"${x}\" \"${appsprofile}/${libdir}/${x#${second_stage}/}\"" 1>&2
green_echo "DRYRUN: ${x} deployed."
else
#Start of deploy command list
if "${runas_appsuser}";then
chmod 644 "${x}" && \
${move_or_copy} -f "${x}" "${appsprofile}/${libdir}/${x#${second_stage}}" && \
touch "${appsprofile}/${libdir}/${x#${second_stage}/}" && \
green_echo "${x} deployed."
else
chown ${appsuser}\: "${x}" && \
chmod 644 "${x}" && \
${move_or_copy} -f "${x}" "${appsprofile}/${libdir}/${x#${second_stage}/}" && \
touch "${appsprofile}/${libdir}/${x#${second_stage}/}" && \
green_echo "${x} deployed."
fi
#End of deploy command list
fi
#test the status output from the deploy command list for errors
if [ ! "$?" -eq "0" ];then
red_echo "${x} deployment FAILED!" 1>&2
STATUS=1
break
fi
elif "${debug}";then
echo "not exist: ${x}" 1>&2
fi
done
if "${debug}";then
echo "exit function ${FUNCNAME} return STATUS=${STATUS}" 1>&2
fi
return ${STATUS}
}
#check to see if server startup is required
function conditional_startup() {
if "${debug}";then
echo "enter function ${FUNCNAME}" 1>&2
fi
STATUS=0
if [ "${islib}" = "1" ] || "${force_restart}";then
if "${dryrun}";then
red_echo "DRYRUN: \"${initd_script}\" start" 1>&2
green_echo "DRYRUN: app server startup executed."
else
if [ "${timeout}" -eq "0" ];then
if ! "${initd_script}" start;then
red_echo "Failed to start the app server." 1>&2
STATUS=1
elif ! sleep 2 && "${initd_script}" status &> /dev/null;then
red_echo "App server failed after apparent successful startup." 1>&2
STATUS=1
fi
else
if ! timeout ${timeout} "${initd_script}" start;then
red_echo "Failed to start the app server." 1>&2
STATUS=1
elif ! sleep 2 && "${initd_script}" status &> /dev/null;then
red_echo "App server failed after apparent successful startup." 1>&2
STATUS=1
fi
fi
fi
fi
if "${debug}";then
echo "exit function ${FUNCNAME} return STATUS=${STATUS}" 1>&2
fi
return ${STATUS}
}
#execute deployments in a safe order; each step depends on a previous
#stderr will be used for error and debug messages
#stdout will be used for successful status updates
#the script will exit with a meaningful status code
if_debug_print_environment 1>&2
if [ ! -d "${stage}" ];then
red_echo "stage=${stage} directory does not exist!" 1>&2
echo "Preflight test failed... Aborting." 1>&2
fi
cd "${stage}" &> /dev/null && \
preflight_check && \
backup_directories && \
conditional_shutdown && \
deploy_wars && \
deploy_libs && \
conditional_startup
STATUS=$?
if [ "${debug}" = "true" ];then
echo "exit STATUS=${STATUS}" 1>&2
fi
echo "running as user $USER"
exit ${STATUS}
|
<reponame>seidu626/vumi<filename>vumi/transports/smpp/tests/test_smpp_transport.py
# -*- coding: utf-8 -*-
import logging
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet.task import Clock
from smpp.pdu_builder import DeliverSM, SubmitSMResp
from vumi.config import ConfigError
from vumi.message import TransportUserMessage
from vumi.tests.helpers import VumiTestCase
from vumi.tests.utils import LogCatcher
from vumi.transports.smpp.smpp_transport import (
message_key, remote_message_key, multipart_info_key, sequence_number_key,
SmppTransceiverTransport, SmppTransmitterTransport, SmppReceiverTransport,
SmppTransceiverTransportWithOldConfig)
from vumi.transports.smpp.pdu_utils import (
pdu_ok, short_message, command_id, seq_no, pdu_tlv, unpacked_pdu_opts)
from vumi.transports.smpp.processors import SubmitShortMessageProcessor
from vumi.transports.smpp.tests.fake_smsc import FakeSMSC
from vumi.transports.tests.helpers import TransportHelper
class TestSmppTransportConfig(VumiTestCase):
def test_host_port_fallback(self):
"""
Old-style 'host' and 'port' fields are still supported in configs.
"""
def parse_config(extra_config):
config = {
'transport_name': 'name',
'system_id': 'foo',
'password': '<PASSWORD>',
}
config.update(extra_config)
return SmppTransceiverTransport.CONFIG_CLASS(config, static=True)
# If we don't provide an endpoint config, we get an error.
self.assertRaises(ConfigError, parse_config, {})
# If we do provide an endpoint config, we get an endpoint.
cfg = {'twisted_endpoint': 'tcp:host=example.com:port=1337'}
self.assertNotEqual(parse_config(cfg).twisted_endpoint.connect, None)
# If we provide host and port configs, we get an endpoint.
cfg = {'host': 'example.com', 'port': 1337}
self.assertNotEqual(parse_config(cfg).twisted_endpoint.connect, None)
class SmppTransportTestCase(VumiTestCase):
DR_TEMPLATE = ("id:%s sub:... dlvrd:... submit date:200101010030"
" done date:200101020030 stat:DELIVRD err:... text:Meep")
DR_MINIMAL_TEMPLATE = "id:%s stat:DELIVRD text:Meep"
transport_class = None
def setUp(self):
self.clock = Clock()
self.fake_smsc = FakeSMSC()
self.tx_helper = self.add_helper(TransportHelper(self.transport_class))
self.default_config = {
'transport_name': self.tx_helper.transport_name,
'worker_name': self.tx_helper.transport_name,
'twisted_endpoint': self.fake_smsc.endpoint,
'delivery_report_processor': 'vumi.transports.smpp.processors.'
'DeliveryReportProcessor',
'deliver_short_message_processor': (
'vumi.transports.smpp.processors.'
'DeliverShortMessageProcessor'),
'system_id': 'foo',
'password': '<PASSWORD>',
'deliver_short_message_processor_config': {
'data_coding_overrides': {
0: 'utf-8',
}
}
}
def _get_transport_config(self, config):
"""
This is overridden in a subclass.
"""
cfg = self.default_config.copy()
cfg.update(config)
return cfg
@inlineCallbacks
def get_transport(self, config={}, bind=True):
cfg = self._get_transport_config(config)
transport = yield self.tx_helper.get_transport(cfg, start=False)
transport.clock = self.clock
yield transport.startWorker()
self.clock.advance(0)
if bind:
yield self.fake_smsc.bind()
returnValue(transport)
class SmppTransceiverTransportTestCase(SmppTransportTestCase):
transport_class = SmppTransceiverTransport
@inlineCallbacks
def test_setup_transport(self):
transport = yield self.get_transport(bind=False)
protocol = yield transport.service.get_protocol()
self.assertEqual(protocol.is_bound(), False)
yield self.fake_smsc.bind()
self.assertEqual(protocol.is_bound(), True)
@inlineCallbacks
def test_mo_sms(self):
yield self.get_transport()
self.fake_smsc.send_mo(
sequence_number=1, short_message='foo', source_addr='123',
destination_addr='456')
deliver_sm_resp = yield self.fake_smsc.await_pdu()
self.assertTrue(pdu_ok(deliver_sm_resp))
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['content'], 'foo')
self.assertEqual(msg['from_addr'], '123')
self.assertEqual(msg['to_addr'], '456')
self.assertEqual(msg['transport_type'], 'sms')
@inlineCallbacks
def test_mo_sms_empty_sms_allowed(self):
yield self.get_transport({
'deliver_short_message_processor_config': {
'allow_empty_messages': True,
}
})
self.fake_smsc.send_mo(
sequence_number=1, short_message='', source_addr='123',
destination_addr='456')
deliver_sm_resp = yield self.fake_smsc.await_pdu()
self.assertTrue(pdu_ok(deliver_sm_resp))
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['content'], '')
@inlineCallbacks
def test_mo_sms_empty_sms_disallowed(self):
yield self.get_transport()
with LogCatcher(message=r"^(Not all parts|WARNING)") as lc:
self.fake_smsc.send_mo(
sequence_number=1, short_message='', source_addr='123',
destination_addr='456')
deliver_sm_resp = yield self.fake_smsc.await_pdu()
self.assertFalse(pdu_ok(deliver_sm_resp))
# check that failure to process delivery report was logged
self.assertEqual(lc.messages(), [
"WARNING: Not decoding `None` message with data_coding=1",
"Not all parts of the PDU were able to be decoded. "
"Responding with ESME_RDELIVERYFAILURE.",
])
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
inbound = self.tx_helper.get_dispatched_inbound()
self.assertEqual(inbound, [])
events = self.tx_helper.get_dispatched_events()
self.assertEqual(events, [])
@inlineCallbacks
def test_mo_delivery_report_pdu_opt_params(self):
"""
We always treat a message with the optional PDU params set as a
delivery report.
"""
transport = yield self.get_transport()
yield transport.message_stash.set_remote_message_id('bar', 'foo')
pdu = DeliverSM(sequence_number=1, esm_class=4)
pdu.add_optional_parameter('receipted_message_id', 'foo')
pdu.add_optional_parameter('message_state', 2)
yield self.fake_smsc.handle_pdu(pdu)
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'delivery_report')
self.assertEqual(event['delivery_status'], 'delivered')
self.assertEqual(event['user_message_id'], 'bar')
@inlineCallbacks
def test_mo_delivery_report_pdu_opt_params_esm_class_not_set(self):
"""
We always treat a message with the optional PDU params set as a
delivery report, even if ``esm_class`` is not set.
"""
transport = yield self.get_transport()
yield transport.message_stash.set_remote_message_id('bar', 'foo')
pdu = DeliverSM(sequence_number=1)
pdu.add_optional_parameter('receipted_message_id', 'foo')
pdu.add_optional_parameter('message_state', 2)
yield self.fake_smsc.handle_pdu(pdu)
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'delivery_report')
self.assertEqual(event['delivery_status'], 'delivered')
self.assertEqual(event['user_message_id'], 'bar')
@inlineCallbacks
def test_mo_delivery_report_pdu_esm_class_not_set(self):
"""
We treat a content-based DR as a normal message if the ``esm_class``
flags are not set.
"""
transport = yield self.get_transport()
yield transport.message_stash.set_remote_message_id('bar', 'foo')
self.fake_smsc.send_mo(
sequence_number=1, short_message=self.DR_TEMPLATE % ('foo',),
source_addr='123', destination_addr='456')
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['content'], self.DR_TEMPLATE % ('foo',))
self.assertEqual(msg['from_addr'], '123')
self.assertEqual(msg['to_addr'], '456')
self.assertEqual(msg['transport_type'], 'sms')
events = yield self.tx_helper.get_dispatched_events()
self.assertEqual(events, [])
@inlineCallbacks
def test_mo_delivery_report_esm_class_with_full_content(self):
"""
If ``esm_class`` and content are both set appropriately, we process the
DR.
"""
transport = yield self.get_transport()
yield transport.message_stash.set_remote_message_id('bar', 'foo')
self.fake_smsc.send_mo(
sequence_number=1, short_message=self.DR_TEMPLATE % ('foo',),
source_addr='123', destination_addr='456', esm_class=4)
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'delivery_report')
self.assertEqual(event['delivery_status'], 'delivered')
self.assertEqual(event['user_message_id'], 'bar')
@inlineCallbacks
def test_mo_delivery_report_esm_class_with_short_status(self):
"""
If the delivery report has a shorter status field, the default regex
still matches.
"""
transport = yield self.get_transport()
yield transport.message_stash.set_remote_message_id('bar', 'foo')
short_message = (
"id:foo sub:... dlvrd:... submit date:200101010030"
" done date:200101020030 stat:FAILED err:042 text:Meep")
self.fake_smsc.send_mo(
sequence_number=1, short_message=short_message,
source_addr='123', destination_addr='456', esm_class=4)
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'delivery_report')
self.assertEqual(event['delivery_status'], 'failed')
self.assertEqual(event['user_message_id'], 'bar')
@inlineCallbacks
def test_mo_delivery_report_esm_class_with_minimal_content(self):
"""
If ``esm_class`` and content are both set appropriately, we process the
DR even if the minimal subset of the content regex matches.
"""
transport = yield self.get_transport()
yield transport.message_stash.set_remote_message_id('bar', 'foo')
self.fake_smsc.send_mo(
sequence_number=1, source_addr='123', destination_addr='456',
short_message=self.DR_MINIMAL_TEMPLATE % ('foo',), esm_class=4)
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'delivery_report')
self.assertEqual(event['delivery_status'], 'delivered')
self.assertEqual(event['user_message_id'], 'bar')
@inlineCallbacks
def test_mo_delivery_report_content_with_nulls(self):
"""
If ``esm_class`` and content are both set appropriately, we process the
DR even if some content fields contain null values.
"""
transport = yield self.get_transport()
yield transport.message_stash.set_remote_message_id('bar', 'foo')
content = (
"id:%s sub:null dlvrd:null submit date:200101010030"
" done date:200101020030 stat:DELIVRD err:null text:Meep")
self.fake_smsc.send_mo(
sequence_number=1, short_message=content % ("foo",),
source_addr='123', destination_addr='456', esm_class=4)
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'delivery_report')
self.assertEqual(event['delivery_status'], 'delivered')
self.assertEqual(event['user_message_id'], 'bar')
@inlineCallbacks
def test_mo_delivery_report_esm_class_with_bad_content(self):
"""
If ``esm_class`` indicates a DR but the regex fails to match, we log a
warning and do nothing.
"""
transport = yield self.get_transport()
yield transport.message_stash.set_remote_message_id('bar', 'foo')
lc = LogCatcher(message="esm_class 4 indicates")
with lc:
self.fake_smsc.send_mo(
sequence_number=1, source_addr='123', destination_addr='456',
short_message="foo", esm_class=4)
yield self.fake_smsc.await_pdu()
# check that failure to process delivery report was logged
[warning] = lc.logs
self.assertEqual(
warning["message"][0],
"esm_class 4 indicates delivery report, but content does not"
" match regex: 'foo'")
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
inbound = self.tx_helper.get_dispatched_inbound()
self.assertEqual(inbound, [])
events = self.tx_helper.get_dispatched_events()
self.assertEqual(events, [])
@inlineCallbacks
def test_mo_delivery_report_esm_class_with_no_content(self):
"""
If ``esm_class`` indicates a DR but the content is empty, we log a
warning and do nothing.
"""
transport = yield self.get_transport()
yield transport.message_stash.set_remote_message_id('bar', 'foo')
lc = LogCatcher(message="esm_class 4 indicates")
with lc:
self.fake_smsc.send_mo(
sequence_number=1, source_addr='123', destination_addr='456',
short_message=None, esm_class=4)
yield self.fake_smsc.await_pdu()
# check that failure to process delivery report was logged
[warning] = lc.logs
self.assertEqual(
warning["message"][0],
"esm_class 4 indicates delivery report, but content does not"
" match regex: None")
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
inbound = self.tx_helper.get_dispatched_inbound()
self.assertEqual(inbound, [])
events = self.tx_helper.get_dispatched_events()
self.assertEqual(events, [])
@inlineCallbacks
def test_mo_delivery_report_esm_disabled_with_full_content(self):
"""
If ``esm_class`` checking is disabled and the content is set
appropriately, we process the DR.
"""
transport = yield self.get_transport({
"delivery_report_processor_config": {
"delivery_report_use_esm_class": False,
}
})
yield transport.message_stash.set_remote_message_id('bar', 'foo')
self.fake_smsc.send_mo(
sequence_number=1, short_message=self.DR_TEMPLATE % ('foo',),
source_addr='123', destination_addr='456', esm_class=0)
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'delivery_report')
self.assertEqual(event['delivery_status'], 'delivered')
self.assertEqual(event['user_message_id'], 'bar')
@inlineCallbacks
def test_mo_delivery_report_esm_disabled_with_minimal_content(self):
"""
If ``esm_class`` checking is disabled and the content is set
appropriately, we process the DR even if the minimal subset of the
content regex matches.
"""
transport = yield self.get_transport({
"delivery_report_processor_config": {
"delivery_report_use_esm_class": False,
}
})
yield transport.message_stash.set_remote_message_id('bar', 'foo')
self.fake_smsc.send_mo(
sequence_number=1, source_addr='123', destination_addr='456',
short_message=self.DR_MINIMAL_TEMPLATE % ('foo',), esm_class=0)
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'delivery_report')
self.assertEqual(event['delivery_status'], 'delivered')
self.assertEqual(event['user_message_id'], 'bar')
@inlineCallbacks
def test_mo_delivery_report_esm_disabled_content_with_nulls(self):
"""
If ``esm_class`` checking is disabled and the content is set
appropriately, we process the DR even if some content fields contain
null values.
"""
transport = yield self.get_transport({
"delivery_report_processor_config": {
"delivery_report_use_esm_class": False,
}
})
yield transport.message_stash.set_remote_message_id('bar', 'foo')
content = (
"id:%s sub:null dlvrd:null submit date:200101010030"
" done date:200101020030 stat:DELIVRD err:null text:Meep")
self.fake_smsc.send_mo(
sequence_number=1, short_message=content % ("foo",),
source_addr='123', destination_addr='456', esm_class=0)
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'delivery_report')
self.assertEqual(event['delivery_status'], 'delivered')
self.assertEqual(event['user_message_id'], 'bar')
@inlineCallbacks
def test_mo_sms_unicode(self):
yield self.get_transport()
self.fake_smsc.send_mo(
sequence_number=1, short_message='Zo\xc3\xab', data_coding=0)
deliver_sm_resp = yield self.fake_smsc.await_pdu()
self.assertTrue(pdu_ok(deliver_sm_resp))
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['content'], u'Zoë')
@inlineCallbacks
def test_mo_sms_multipart_long(self):
yield self.get_transport()
content = '1' * 255
pdu = DeliverSM(sequence_number=1)
pdu.add_optional_parameter('message_payload', content.encode('hex'))
self.fake_smsc.send_pdu(pdu)
deliver_sm_resp = yield self.fake_smsc.await_pdu()
self.assertEqual(1, seq_no(deliver_sm_resp))
self.assertTrue(pdu_ok(deliver_sm_resp))
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['content'], content)
@inlineCallbacks
def test_mo_sms_multipart_udh(self):
yield self.get_transport()
deliver_sm_resps = []
self.fake_smsc.send_mo(
sequence_number=1, short_message="\x05\x00\x03\xff\x03\x01back")
deliver_sm_resps.append((yield self.fake_smsc.await_pdu()))
self.fake_smsc.send_mo(
sequence_number=2, short_message="\x05\x00\x03\xff\x03\x02 at")
deliver_sm_resps.append((yield self.fake_smsc.await_pdu()))
self.fake_smsc.send_mo(
sequence_number=3, short_message="\x05\x00\x03\xff\x03\x03 you")
deliver_sm_resps.append((yield self.fake_smsc.await_pdu()))
self.assertEqual([1, 2, 3], map(seq_no, deliver_sm_resps))
self.assertTrue(all(map(pdu_ok, deliver_sm_resps)))
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['content'], u'back at you')
@inlineCallbacks
def test_mo_sms_multipart_udh_out_of_order(self):
yield self.get_transport()
deliver_sm_resps = []
self.fake_smsc.send_mo(
sequence_number=1, short_message="\x05\x00\x03\xff\x03\x01back")
deliver_sm_resps.append((yield self.fake_smsc.await_pdu()))
self.fake_smsc.send_mo(
sequence_number=3, short_message="\x05\x00\x03\xff\x03\x03 you")
deliver_sm_resps.append((yield self.fake_smsc.await_pdu()))
self.fake_smsc.send_mo(
sequence_number=2, short_message="\x05\x00\x03\xff\x03\x02 at")
deliver_sm_resps.append((yield self.fake_smsc.await_pdu()))
self.assertEqual([1, 3, 2], map(seq_no, deliver_sm_resps))
self.assertTrue(all(map(pdu_ok, deliver_sm_resps)))
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['content'], u'back at you')
@inlineCallbacks
def test_mo_sms_multipart_sar(self):
yield self.get_transport()
deliver_sm_resps = []
pdu1 = DeliverSM(sequence_number=1, short_message='back')
pdu1.add_optional_parameter('sar_msg_ref_num', 1)
pdu1.add_optional_parameter('sar_total_segments', 3)
pdu1.add_optional_parameter('sar_segment_seqnum', 1)
self.fake_smsc.send_pdu(pdu1)
deliver_sm_resps.append((yield self.fake_smsc.await_pdu()))
pdu2 = DeliverSM(sequence_number=2, short_message=' at')
pdu2.add_optional_parameter('sar_msg_ref_num', 1)
pdu2.add_optional_parameter('sar_total_segments', 3)
pdu2.add_optional_parameter('sar_segment_seqnum', 2)
self.fake_smsc.send_pdu(pdu2)
deliver_sm_resps.append((yield self.fake_smsc.await_pdu()))
pdu3 = DeliverSM(sequence_number=3, short_message=' you')
pdu3.add_optional_parameter('sar_msg_ref_num', 1)
pdu3.add_optional_parameter('sar_total_segments', 3)
pdu3.add_optional_parameter('sar_segment_seqnum', 3)
self.fake_smsc.send_pdu(pdu3)
deliver_sm_resps.append((yield self.fake_smsc.await_pdu()))
self.assertEqual([1, 2, 3], map(seq_no, deliver_sm_resps))
self.assertTrue(all(map(pdu_ok, deliver_sm_resps)))
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['content'], u'back at you')
@inlineCallbacks
def test_mo_bad_encoding(self):
yield self.get_transport()
bad_pdu = DeliverSM(555,
short_message="SMS from server containing \xa7",
destination_addr="2772222222",
source_addr="2772000000",
data_coding=1)
good_pdu = DeliverSM(555,
short_message="Next message",
destination_addr="2772222222",
source_addr="2772000000",
data_coding=1)
yield self.fake_smsc.handle_pdu(bad_pdu)
yield self.fake_smsc.handle_pdu(good_pdu)
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['message_type'], 'user_message')
self.assertEqual(msg['transport_name'], self.tx_helper.transport_name)
self.assertEqual(msg['content'], "Next message")
dispatched_failures = self.tx_helper.get_dispatched_failures()
self.assertEqual(dispatched_failures, [])
[failure] = self.flushLoggedErrors(UnicodeDecodeError)
message = failure.getErrorMessage()
codec, rest = message.split(' ', 1)
self.assertEqual(codec, "'ascii'")
self.assertTrue(
rest.startswith("codec can't decode byte 0xa7 in position 27"))
@inlineCallbacks
def test_mo_sms_failed_remote_id_lookup(self):
yield self.get_transport()
lc = LogCatcher(message="Failed to retrieve message id")
with lc:
yield self.fake_smsc.handle_pdu(
DeliverSM(sequence_number=1, esm_class=4,
short_message=self.DR_TEMPLATE % ('foo',)))
# check that failure to send delivery report was logged
[warning] = lc.logs
expected_msg = (
"Failed to retrieve message id for delivery report. Delivery"
" report from %s discarded.") % (self.tx_helper.transport_name,)
self.assertEqual(warning['message'], (expected_msg,))
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
@inlineCallbacks
def test_mt_sms(self):
yield self.get_transport()
msg = self.tx_helper.make_outbound('hello world')
yield self.tx_helper.dispatch_outbound(msg)
pdu = yield self.fake_smsc.await_pdu()
self.assertEqual(command_id(pdu), 'submit_sm')
self.assertEqual(short_message(pdu), 'hello world')
@inlineCallbacks
def test_mt_sms_bad_to_addr(self):
yield self.get_transport()
msg = yield self.tx_helper.make_dispatch_outbound(
'hello world', to_addr=u'+\u2000')
[event] = self.tx_helper.get_dispatched_events()
self.assertEqual(event['event_type'], 'nack')
self.assertEqual(event['user_message_id'], msg['message_id'])
self.assertEqual(event['nack_reason'], u'Invalid to_addr: +\u2000')
@inlineCallbacks
def test_mt_sms_bad_from_addr(self):
yield self.get_transport()
msg = yield self.tx_helper.make_dispatch_outbound(
'hello world', from_addr=u'+\u2000')
[event] = self.tx_helper.get_dispatched_events()
self.assertEqual(event['event_type'], 'nack')
self.assertEqual(event['user_message_id'], msg['message_id'])
self.assertEqual(event['nack_reason'], u'Invalid from_addr: +\u2000')
@inlineCallbacks
def test_mt_sms_submit_sm_encoding(self):
yield self.get_transport({
'submit_short_message_processor_config': {
'submit_sm_encoding': 'latin1',
}
})
yield self.tx_helper.make_dispatch_outbound(u'Zoë destroyer of Ascii!')
submit_sm_pdu = yield self.fake_smsc.await_pdu()
self.assertEqual(
short_message(submit_sm_pdu),
u'Zoë destroyer of Ascii!'.encode('latin-1'))
@inlineCallbacks
def test_mt_sms_submit_sm_null_message(self):
"""
We can successfully send a message with null content.
"""
yield self.get_transport()
msg = self.tx_helper.make_outbound(None)
yield self.tx_helper.dispatch_outbound(msg)
pdu = yield self.fake_smsc.await_pdu()
self.assertEqual(command_id(pdu), 'submit_sm')
self.assertEqual(short_message(pdu), None)
@inlineCallbacks
def test_submit_sm_data_coding(self):
yield self.get_transport({
'submit_short_message_processor_config': {
'submit_sm_data_coding': 8
}
})
yield self.tx_helper.make_dispatch_outbound("hello world")
submit_sm_pdu = yield self.fake_smsc.await_pdu()
params = submit_sm_pdu['body']['mandatory_parameters']
self.assertEqual(params['data_coding'], 8)
@inlineCallbacks
def test_mt_sms_ack(self):
yield self.get_transport()
msg = self.tx_helper.make_outbound('hello world')
yield self.tx_helper.dispatch_outbound(msg)
submit_sm_pdu = yield self.fake_smsc.await_pdu()
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_pdu),
message_id='foo'))
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'ack')
self.assertEqual(event['user_message_id'], msg['message_id'])
self.assertEqual(event['sent_message_id'], 'foo')
@inlineCallbacks
def assert_no_events(self):
# NOTE: We can't test for the absence of an event in isolation but we
# can test that for the presence of a second event only.
fail_msg = self.tx_helper.make_outbound('hello fail')
yield self.tx_helper.dispatch_outbound(fail_msg)
submit_sm_fail_pdu = yield self.fake_smsc.await_pdu()
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_fail_pdu),
message_id='__assert_no_events__',
command_status='ESME_RINVDSTADR'))
[fail] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(fail['event_type'], 'nack')
@inlineCallbacks
def test_mt_sms_disabled_ack(self):
yield self.get_transport({'disable_ack': True})
msg = self.tx_helper.make_outbound('hello world')
yield self.tx_helper.dispatch_outbound(msg)
submit_sm_pdu = yield self.fake_smsc.await_pdu()
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_pdu),
message_id='foo'))
yield self.assert_no_events()
@inlineCallbacks
def test_mt_sms_nack(self):
yield self.get_transport()
msg = self.tx_helper.make_outbound('hello world')
yield self.tx_helper.dispatch_outbound(msg)
submit_sm_pdu = yield self.fake_smsc.await_pdu()
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_pdu),
message_id='foo', command_status='ESME_RINVDSTADR'))
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'nack')
self.assertEqual(event['user_message_id'], msg['message_id'])
self.assertEqual(event['nack_reason'], 'ESME_RINVDSTADR')
@inlineCallbacks
def test_mt_sms_failure(self):
yield self.get_transport()
message = yield self.tx_helper.make_dispatch_outbound(
"message", message_id='446')
submit_sm = yield self.fake_smsc.await_pdu()
response = SubmitSMResp(seq_no(submit_sm), "3rd_party_id_3",
command_status="ESME_RSUBMITFAIL")
# A failure PDU might not have a body.
response.obj.pop('body')
self.fake_smsc.send_pdu(response)
# There should be a nack
[nack] = yield self.tx_helper.wait_for_dispatched_events(1)
[failure] = yield self.tx_helper.get_dispatched_failures()
self.assertEqual(failure['reason'], 'ESME_RSUBMITFAIL')
self.assertEqual(failure['message'], message.payload)
@inlineCallbacks
def test_mt_sms_failure_with_no_reason(self):
yield self.get_transport()
message = yield self.tx_helper.make_dispatch_outbound(
"message", message_id='446')
submit_sm = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm),
message_id='foo',
command_status=None))
# There should be a nack
[nack] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(nack['user_message_id'], message['message_id'])
self.assertEqual(nack['nack_reason'], 'Unspecified')
[failure] = yield self.tx_helper.get_dispatched_failures()
self.assertEqual(failure['reason'], 'Unspecified')
@inlineCallbacks
def test_mt_sms_seq_num_lookup_failure(self):
transport = yield self.get_transport()
lc = LogCatcher(message="Failed to retrieve message id")
with lc:
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=0xbad, message_id='bad'))
# Make sure we didn't store 'None' in redis.
message_stash = transport.message_stash
message_id = yield message_stash.get_internal_message_id('bad')
self.assertEqual(message_id, None)
# check that failure to send ack/nack was logged
[warning] = lc.logs
expected_msg = (
"Failed to retrieve message id for deliver_sm_resp. ack/nack"
" from %s discarded.") % (self.tx_helper.transport_name,)
self.assertEqual(warning['message'], (expected_msg,))
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
@inlineCallbacks
def test_mt_sms_throttled(self):
transport = yield self.get_transport()
transport_config = transport.get_static_config()
msg = self.tx_helper.make_outbound('hello world')
yield self.tx_helper.dispatch_outbound(msg)
submit_sm_pdu = yield self.fake_smsc.await_pdu()
with LogCatcher(message="Throttling outbound messages.") as lc:
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_pdu),
message_id='foo',
command_status='ESME_RTHROTTLED'))
[logmsg] = lc.logs
self.assertEqual(logmsg['logLevel'], logging.INFO)
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
self.clock.advance(transport_config.throttle_delay)
submit_sm_pdu_retry = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_pdu_retry),
message_id='bar',
command_status='ESME_ROK'))
self.assertTrue(seq_no(submit_sm_pdu_retry) > seq_no(submit_sm_pdu))
self.assertEqual(short_message(submit_sm_pdu), 'hello world')
self.assertEqual(short_message(submit_sm_pdu_retry), 'hello world')
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'ack')
self.assertEqual(event['user_message_id'], msg['message_id'])
# We're still throttled until our next attempt to unthrottle finds no
# messages to retry. After a non-throttle submit_sm_resp, that happens
# with no delay.
with LogCatcher(message="No longer throttling outbound") as lc:
self.clock.advance(0)
[logmsg] = lc.logs
self.assertEqual(logmsg['logLevel'], logging.INFO)
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
@inlineCallbacks
def test_mt_sms_multipart_throttled(self):
"""
When parts of a multipart message are throttled, we retry only those
PDUs.
"""
transport = yield self.get_transport({
'submit_short_message_processor_config': {
'send_multipart_udh': True,
}
})
transport_config = transport.get_static_config()
msg = self.tx_helper.make_outbound('a' * 350) # Three parts.
yield self.tx_helper.dispatch_outbound(msg)
[pdu1, pdu2, pdu3] = yield self.fake_smsc.await_pdus(3)
self.assertEqual(short_message(pdu1)[4:6], "\x03\x01")
self.assertEqual(short_message(pdu2)[4:6], "\x03\x02")
self.assertEqual(short_message(pdu3)[4:6], "\x03\x03")
# Let two parts through.
yield self.fake_smsc.submit_sm_resp(pdu1)
yield self.fake_smsc.submit_sm_resp(pdu2)
self.assertEqual(transport.throttled, False)
# Throttle the third part.
yield self.fake_smsc.submit_sm_resp(
pdu3, command_status='ESME_RTHROTTLED')
self.assertEqual(transport.throttled, True)
self.clock.advance(transport_config.throttle_delay)
retry_pdu = yield self.fake_smsc.await_pdu()
# Assume nothing else is incrementing seuqnce numbers.
self.assertEqual(seq_no(retry_pdu), seq_no(pdu3) + 1)
# The retry should be identical to pdu3 except for the sequence number.
pdu3_retry = dict((k, v.copy()) for k, v in pdu3.iteritems())
pdu3_retry['header']['sequence_number'] = seq_no(retry_pdu)
self.assertEqual(retry_pdu, pdu3_retry)
# Let the retry through.
yield self.fake_smsc.submit_sm_resp(retry_pdu)
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'ack')
self.assertEqual(event['user_message_id'], msg['message_id'])
self.assertEqual(transport.throttled, True)
# Prod the clock to notice there are no more retries and unthrottle.
self.clock.advance(0)
self.assertEqual(transport.throttled, False)
@inlineCallbacks
def test_mt_sms_throttle_while_throttled(self):
transport = yield self.get_transport()
transport_config = transport.get_static_config()
msg1 = self.tx_helper.make_outbound('hello world 1')
msg2 = self.tx_helper.make_outbound('hello world 2')
yield self.tx_helper.dispatch_outbound(msg1)
yield self.tx_helper.dispatch_outbound(msg2)
[ssm_pdu1, ssm_pdu2] = yield self.fake_smsc.await_pdus(2)
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(ssm_pdu1),
message_id='foo1', command_status='ESME_RTHROTTLED'))
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(ssm_pdu2),
message_id='foo2', command_status='ESME_RTHROTTLED'))
# Advance clock, still throttled.
self.clock.advance(transport_config.throttle_delay)
ssm_pdu1_retry1 = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(ssm_pdu1_retry1),
message_id='bar1',
command_status='ESME_RTHROTTLED'))
# Advance clock, message no longer throttled.
self.clock.advance(transport_config.throttle_delay)
ssm_pdu2_retry1 = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(ssm_pdu2_retry1),
message_id='bar2',
command_status='ESME_ROK'))
# Prod clock, message no longer throttled.
self.clock.advance(0)
ssm_pdu1_retry2 = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(ssm_pdu1_retry2),
message_id='baz1',
command_status='ESME_ROK'))
self.assertEqual(short_message(ssm_pdu1), 'hello world 1')
self.assertEqual(short_message(ssm_pdu2), 'hello world 2')
self.assertEqual(short_message(ssm_pdu1_retry1), 'hello world 1')
self.assertEqual(short_message(ssm_pdu2_retry1), 'hello world 2')
self.assertEqual(short_message(ssm_pdu1_retry2), 'hello world 1')
[event2, event1] = yield self.tx_helper.wait_for_dispatched_events(2)
self.assertEqual(event1['event_type'], 'ack')
self.assertEqual(event1['user_message_id'], msg1['message_id'])
self.assertEqual(event2['event_type'], 'ack')
self.assertEqual(event2['user_message_id'], msg2['message_id'])
@inlineCallbacks
def test_mt_sms_reconnect_while_throttled(self):
"""
If we reconnect while throttled, we don't try to unthrottle before the
connection is in a suitable state.
"""
transport = yield self.get_transport(bind=False)
yield self.fake_smsc.bind()
transport_config = transport.get_static_config()
msg = self.tx_helper.make_outbound('hello world')
yield self.tx_helper.dispatch_outbound(msg)
ssm_pdu = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(ssm_pdu),
message_id='foo1',
command_status='ESME_RTHROTTLED'))
# Drop SMPP connection and check throttling.
yield self.fake_smsc.disconnect()
with LogCatcher(message="Can't check throttling while unbound") as lc:
self.clock.advance(transport_config.throttle_delay)
[logmsg] = lc.logs
self.assertEqual(
logmsg["message"][0],
"Can't check throttling while unbound, trying later.")
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
# Fast-forward to reconnect (but don't bind) and check throttling.
self.clock.advance(transport.service.delay)
bind_pdu = yield self.fake_smsc.await_pdu()
self.assertTrue(
bind_pdu["header"]["command_id"].startswith("bind_"))
with LogCatcher(message="Can't check throttling while unbound") as lc:
self.clock.advance(transport_config.throttle_delay)
[logmsg] = lc.logs
self.assertEqual(
logmsg["message"][0],
"Can't check throttling while unbound, trying later.")
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
# Bind and check throttling.
yield self.fake_smsc.bind(bind_pdu)
with LogCatcher(message="Can't check throttling while unbound") as lc:
self.clock.advance(transport_config.throttle_delay)
self.assertEqual(lc.logs, [])
ssm_pdu_retry = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(ssm_pdu_retry),
message_id='foo',
command_status='ESME_ROK'))
self.assertEqual(short_message(ssm_pdu), 'hello world')
self.assertEqual(short_message(ssm_pdu_retry), 'hello world')
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'ack')
self.assertEqual(event['user_message_id'], msg['message_id'])
@inlineCallbacks
def test_mt_sms_tps_limits(self):
transport = yield self.get_transport({'mt_tps': 2})
with LogCatcher(message="Throttling outbound messages.") as lc:
yield self.tx_helper.make_dispatch_outbound('hello world 1')
yield self.tx_helper.make_dispatch_outbound('hello world 2')
msg3_d = self.tx_helper.make_dispatch_outbound('hello world 3')
[logmsg] = lc.logs
self.assertEqual(logmsg['logLevel'], logging.INFO)
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
self.assertTrue(transport.throttled)
[submit_sm_pdu1, submit_sm_pdu2] = yield self.fake_smsc.await_pdus(2)
self.assertEqual(short_message(submit_sm_pdu1), 'hello world 1')
self.assertEqual(short_message(submit_sm_pdu2), 'hello world 2')
self.assertNoResult(msg3_d)
with LogCatcher(message="No longer throttling outbound") as lc:
self.clock.advance(1)
[logmsg] = lc.logs
self.assertEqual(logmsg['logLevel'], logging.INFO)
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
self.assertFalse(transport.throttled)
yield msg3_d
submit_sm_pdu3 = yield self.fake_smsc.await_pdu()
self.assertEqual(short_message(submit_sm_pdu3), 'hello world 3')
@inlineCallbacks
def test_mt_sms_tps_limits_multipart(self):
"""
TPS throttling counts PDUs, but finishes sending the current message.
"""
transport = yield self.get_transport({
'mt_tps': 3,
'submit_short_message_processor_config': {
'send_multipart_udh': True,
},
})
self.assertEqual(transport.throttled, False)
with LogCatcher(message="Throttling outbound messages.") as lc:
yield self.tx_helper.make_dispatch_outbound('1' * 200 + 'a')
yield self.tx_helper.make_dispatch_outbound('2' * 200 + 'b')
msg3_d = self.tx_helper.make_dispatch_outbound('3' * 200 + 'c')
[logmsg] = lc.logs
self.assertEqual(logmsg['logLevel'], logging.INFO)
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
self.assertEqual(transport.throttled, True)
[pdu1_1, pdu1_2, pdu2_1, pdu2_2] = yield self.fake_smsc.await_pdus(4)
self.assertEqual(short_message(pdu1_1)[-5:], '11111')
self.assertEqual(short_message(pdu1_2)[-5:], '1111a')
self.assertEqual(short_message(pdu2_1)[-5:], '22222')
self.assertEqual(short_message(pdu2_2)[-5:], '2222b')
self.assertNoResult(msg3_d)
with LogCatcher(message="No longer throttling outbound") as lc:
self.clock.advance(1)
[logmsg] = lc.logs
self.assertEqual(logmsg['logLevel'], logging.INFO)
self.assertEqual(transport.throttled, False)
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
yield msg3_d
[pdu3_1, pdu3_2] = yield self.fake_smsc.await_pdus(2)
self.assertEqual(short_message(pdu3_1)[-5:], '33333')
self.assertEqual(short_message(pdu3_2)[-5:], '3333c')
@inlineCallbacks
def test_mt_sms_reconnect_while_tps_throttled(self):
"""
If we reconnect while throttled due to the tps limit, we don't try to
unthrottle before the connection is in a suitable state.
"""
transport = yield self.get_transport({'mt_tps': 2})
with LogCatcher(message="Throttling outbound messages.") as lc:
yield self.tx_helper.make_dispatch_outbound('hello world 1')
yield self.tx_helper.make_dispatch_outbound('hello world 2')
msg3_d = self.tx_helper.make_dispatch_outbound('hello world 3')
[logmsg] = lc.logs
self.assertEqual(logmsg['logLevel'], logging.INFO)
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
self.assertTrue(transport.throttled)
[submit_sm_pdu1, submit_sm_pdu2] = yield self.fake_smsc.await_pdus(2)
self.assertEqual(short_message(submit_sm_pdu1), 'hello world 1')
self.assertEqual(short_message(submit_sm_pdu2), 'hello world 2')
self.assertNoResult(msg3_d)
# Drop SMPP connection and check throttling.
yield self.fake_smsc.disconnect()
with LogCatcher(message="Can't stop throttling while unbound") as lc:
self.clock.advance(1)
[logmsg] = lc.logs
self.assertEqual(logmsg['logLevel'], logging.INFO)
self.assertTrue(transport.throttled)
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
# Fast-forward to reconnect (but don't bind) and check throttling.
self.clock.advance(transport.service.delay)
bind_pdu = yield self.fake_smsc.await_pdu()
self.assertTrue(
bind_pdu["header"]["command_id"].startswith("bind_"))
with LogCatcher(message="Can't stop throttling while unbound") as lc:
self.clock.advance(1)
[logmsg] = lc.logs
self.assertEqual(logmsg['logLevel'], logging.INFO)
self.assertTrue(transport.throttled)
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
# Bind and check throttling.
yield self.fake_smsc.bind(bind_pdu)
with LogCatcher(message="No longer throttling outbound") as lc:
self.clock.advance(1)
[logmsg] = lc.logs
self.assertEqual(logmsg['logLevel'], logging.INFO)
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
self.assertFalse(transport.throttled)
submit_sm_pdu2 = yield self.fake_smsc.await_pdu()
self.assertEqual(short_message(submit_sm_pdu2), 'hello world 3')
@inlineCallbacks
def test_mt_sms_queue_full(self):
transport = yield self.get_transport()
transport_config = transport.get_static_config()
msg = self.tx_helper.make_outbound('hello world')
yield self.tx_helper.dispatch_outbound(msg)
submit_sm_pdu = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_pdu),
message_id='foo',
command_status='ESME_RMSGQFUL'))
self.clock.advance(transport_config.throttle_delay)
submit_sm_pdu_retry = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_pdu_retry),
message_id='bar',
command_status='ESME_ROK'))
self.assertTrue(seq_no(submit_sm_pdu_retry) > seq_no(submit_sm_pdu))
self.assertEqual(short_message(submit_sm_pdu), 'hello world')
self.assertEqual(short_message(submit_sm_pdu_retry), 'hello world')
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'ack')
self.assertEqual(event['user_message_id'], msg['message_id'])
@inlineCallbacks
def test_mt_sms_remote_id_stored_only_on_rok(self):
transport = yield self.get_transport()
yield self.tx_helper.make_dispatch_outbound("msg1")
submit_sm1 = yield self.fake_smsc.await_pdu()
response = SubmitSMResp(
seq_no(submit_sm1), "remote_1", command_status="ESME_RSUBMITFAIL")
self.fake_smsc.send_pdu(response)
yield self.tx_helper.make_dispatch_outbound("msg2")
submit_sm2 = yield self.fake_smsc.await_pdu()
response = SubmitSMResp(
seq_no(submit_sm2), "remote_2", command_status="ESME_ROK")
self.fake_smsc.send_pdu(response)
yield self.tx_helper.wait_for_dispatched_events(2)
self.assertFalse(
(yield transport.redis.exists(remote_message_key('remote_1'))))
self.assertTrue(
(yield transport.redis.exists(remote_message_key('remote_2'))))
@inlineCallbacks
def test_mt_sms_unicode(self):
yield self.get_transport()
msg = self.tx_helper.make_outbound(u'Zoë')
yield self.tx_helper.dispatch_outbound(msg)
pdu = yield self.fake_smsc.await_pdu()
self.assertEqual(command_id(pdu), 'submit_sm')
self.assertEqual(short_message(pdu), 'Zo\xc3\xab')
@inlineCallbacks
def test_mt_sms_multipart_long(self):
yield self.get_transport({
'submit_short_message_processor_config': {
'send_long_messages': True,
}
})
# SMPP specifies that messages longer than 254 bytes should
# be put in the message_payload field using TLVs
content = '1' * 255
msg = self.tx_helper.make_outbound(content)
yield self.tx_helper.dispatch_outbound(msg)
submit_sm = yield self.fake_smsc.await_pdu()
self.assertEqual(pdu_tlv(submit_sm, 'message_payload').decode('hex'),
content)
@inlineCallbacks
def test_mt_sms_multipart_udh(self):
"""
Sufficiently long messages are split into multiple PDUs with a UDH at
the front of each.
"""
transport = yield self.get_transport({
'submit_short_message_processor_config': {
'send_multipart_udh': True,
}
})
content = '1' * 161
msg = self.tx_helper.make_outbound(content)
yield self.tx_helper.dispatch_outbound(msg)
[submit_sm1, submit_sm2] = yield self.fake_smsc.await_pdus(2)
self.assertEqual(
submit_sm1["body"]["mandatory_parameters"]["esm_class"], 0x40)
self.assertEqual(
submit_sm2["body"]["mandatory_parameters"]["esm_class"], 0x40)
udh_hlen, udh_tag, udh_len, udh_ref, udh_tot, udh_seq = [
ord(octet) for octet in short_message(submit_sm1)[:6]]
self.assertEqual(5, udh_hlen)
self.assertEqual(0, udh_tag)
self.assertEqual(3, udh_len)
self.assertEqual(udh_tot, 2)
self.assertEqual(udh_seq, 1)
_, _, _, ref_to_udh_ref, _, udh_seq = [
ord(octet) for octet in short_message(submit_sm2)[:6]]
self.assertEqual(ref_to_udh_ref, udh_ref)
self.assertEqual(udh_seq, 2)
# Our multipart_info Redis hash should contain the number of parts and
# have an appropriate TTL.
mstash = transport.message_stash
multipart_info = yield mstash.get_multipart_info(msg['message_id'])
self.assertEqual(multipart_info, {"parts": "2"})
mpi_ttl = yield mstash.redis.ttl(multipart_info_key(msg['message_id']))
self.assertTrue(
mpi_ttl <= mstash.config.submit_sm_expiry,
"mpi_ttl (%s) > submit_sm_expiry (%s)" % (
mpi_ttl, mstash.config.submit_sm_expiry))
@inlineCallbacks
def test_mt_sms_multipart_udh_one_part(self):
"""
Messages that fit in a single part should not have a UDH.
"""
yield self.get_transport({
'submit_short_message_processor_config': {
'send_multipart_udh': True,
}
})
content = "1" * 158
msg = self.tx_helper.make_outbound(content)
yield self.tx_helper.dispatch_outbound(msg)
submit_sm = yield self.fake_smsc.await_pdu()
self.assertEqual(
submit_sm["body"]["mandatory_parameters"]["esm_class"], 0)
self.assertEqual(short_message(submit_sm), "1" * 158)
@inlineCallbacks
def test_mt_sms_multipart_sar(self):
yield self.get_transport({
'submit_short_message_processor_config': {
'send_multipart_sar': True,
}
})
content = '1' * 161
msg = self.tx_helper.make_outbound(content)
yield self.tx_helper.dispatch_outbound(msg)
[submit_sm1, submit_sm2] = yield self.fake_smsc.await_pdus(2)
ref_num = pdu_tlv(submit_sm1, 'sar_msg_ref_num')
self.assertEqual(pdu_tlv(submit_sm1, 'sar_total_segments'), 2)
self.assertEqual(pdu_tlv(submit_sm1, 'sar_segment_seqnum'), 1)
self.assertEqual(pdu_tlv(submit_sm2, 'sar_msg_ref_num'), ref_num)
self.assertEqual(pdu_tlv(submit_sm2, 'sar_total_segments'), 2)
self.assertEqual(pdu_tlv(submit_sm2, 'sar_segment_seqnum'), 2)
@inlineCallbacks
def test_mt_sms_multipart_sar_one_part(self):
"""
Messages that fit in a single part should not have SAR params set.
"""
yield self.get_transport({
'submit_short_message_processor_config': {
'send_multipart_sar': True,
}
})
content = '1' * 158
msg = self.tx_helper.make_outbound(content)
yield self.tx_helper.dispatch_outbound(msg)
submit_sm = yield self.fake_smsc.await_pdu()
self.assertEqual(unpacked_pdu_opts(submit_sm), {})
self.assertEqual(short_message(submit_sm), "1" * 158)
@inlineCallbacks
def test_mt_sms_multipart_ack(self):
"""
When all PDUs of a multipart message have been successfully
acknowledged, we clean up the relevant transient state and send an ack.
"""
transport = yield self.get_transport({
'submit_short_message_processor_config': {
'send_multipart_udh': True,
}
})
content = '1' * 161
msg = self.tx_helper.make_outbound(content)
yield self.tx_helper.dispatch_outbound(msg)
[submit_sm1, submit_sm2] = yield self.fake_smsc.await_pdus(2)
# Our multipart_info Redis hash should contain the number of parts and
# have an appropriate TTL.
mstash = transport.message_stash
multipart_info = yield mstash.get_multipart_info(msg['message_id'])
self.assertEqual(multipart_info, {"parts": "2"})
mpi_ttl = yield mstash.redis.ttl(multipart_info_key(msg['message_id']))
self.assertTrue(
mpi_ttl <= mstash.config.submit_sm_expiry,
"mpi_ttl (%s) > submit_sm_expiry (%s)" % (
mpi_ttl, mstash.config.submit_sm_expiry))
# We get one response per PDU, so we only send the ack after receiving
# both responses.
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm1), message_id='foo'))
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm2), message_id='bar'))
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'ack')
self.assertEqual(event['user_message_id'], msg['message_id'])
self.assertEqual(event['sent_message_id'], 'bar,foo')
# After all parts are acknowledged, our multipart_info hash should have
# the details of the responses and a much shorter TTL.
mstash = transport.message_stash
multipart_info = yield mstash.get_multipart_info(msg['message_id'])
self.assertEqual(multipart_info, {
"parts": "2",
"event_counter": "2",
"part:foo": "ack",
"part:bar": "ack",
})
mpi_ttl = yield mstash.redis.ttl(multipart_info_key(msg['message_id']))
self.assertTrue(
mpi_ttl <= mstash.config.completed_multipart_info_expiry,
"mpi_ttl (%s) > completed_multipart_info_expiry (%s)" % (
mpi_ttl, mstash.config.completed_multipart_info_expiry))
@inlineCallbacks
def test_mt_sms_multipart_fail_first_part(self):
"""
When all PDUs of a multipart message have been acknowledged and at
least one of them failed, we clean up the relevant transient state and
send a nack.
"""
transport = yield self.get_transport({
'submit_short_message_processor_config': {
'send_multipart_udh': True,
}
})
content = '1' * 161
msg = self.tx_helper.make_outbound(content)
yield self.tx_helper.dispatch_outbound(msg)
[submit_sm1, submit_sm2] = yield self.fake_smsc.await_pdus(2)
# Our multipart_info Redis hash should contain the number of parts and
# have an appropriate TTL.
mstash = transport.message_stash
multipart_info = yield mstash.get_multipart_info(msg['message_id'])
self.assertEqual(multipart_info, {"parts": "2"})
mpi_ttl = yield mstash.redis.ttl(multipart_info_key(msg['message_id']))
self.assertTrue(
mpi_ttl <= mstash.config.submit_sm_expiry,
"mpi_ttl (%s) > submit_sm_expiry (%s)" % (
mpi_ttl, mstash.config.submit_sm_expiry))
# We get one response per PDU, so we only send the nack after receiving
# both responses.
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm1),
message_id='foo', command_status='ESME_RSUBMITFAIL'))
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm2), message_id='bar'))
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'nack')
self.assertEqual(event['user_message_id'], msg['message_id'])
# After all parts are acknowledged, our multipart_info hash should have
# the details of the responses and a much shorter TTL.
mstash = transport.message_stash
multipart_info = yield mstash.get_multipart_info(msg['message_id'])
self.assertEqual(multipart_info, {
"parts": "2",
"event_counter": "2",
"part:foo": "fail",
"part:bar": "ack",
"event_result": "fail",
})
mpi_ttl = yield mstash.redis.ttl(multipart_info_key(msg['message_id']))
self.assertTrue(
mpi_ttl <= mstash.config.completed_multipart_info_expiry,
"mpi_ttl (%s) > completed_multipart_info_expiry (%s)" % (
mpi_ttl, mstash.config.completed_multipart_info_expiry))
@inlineCallbacks
def test_mt_sms_multipart_fail_second_part(self):
yield self.get_transport({
'submit_short_message_processor_config': {
'send_multipart_udh': True,
}
})
content = '1' * 161
msg = self.tx_helper.make_outbound(content)
yield self.tx_helper.dispatch_outbound(msg)
[submit_sm1, submit_sm2] = yield self.fake_smsc.await_pdus(2)
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm1), message_id='foo'))
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm2),
message_id='bar', command_status='ESME_RSUBMITFAIL'))
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'nack')
self.assertEqual(event['user_message_id'], msg['message_id'])
@inlineCallbacks
def test_mt_sms_multipart_fail_no_remote_id(self):
yield self.get_transport({
'submit_short_message_processor_config': {
'send_multipart_udh': True,
}
})
content = '1' * 161
msg = self.tx_helper.make_outbound(content)
yield self.tx_helper.dispatch_outbound(msg)
[submit_sm1, submit_sm2] = yield self.fake_smsc.await_pdus(2)
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm1),
message_id='', command_status='ESME_RINVDSTADR'))
self.fake_smsc.send_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm2),
message_id='', command_status='ESME_RINVDSTADR'))
[event] = yield self.tx_helper.wait_for_dispatched_events(1)
self.assertEqual(event['event_type'], 'nack')
self.assertEqual(event['user_message_id'], msg['message_id'])
@inlineCallbacks
def test_message_persistence(self):
transport = yield self.get_transport()
message_stash = transport.message_stash
config = transport.get_static_config()
msg = self.tx_helper.make_outbound("hello world")
yield message_stash.cache_message(msg)
ttl = yield transport.redis.ttl(message_key(msg['message_id']))
self.assertTrue(0 < ttl <= config.submit_sm_expiry)
retrieved_msg = yield message_stash.get_cached_message(
msg['message_id'])
self.assertEqual(msg, retrieved_msg)
yield message_stash.delete_cached_message(msg['message_id'])
self.assertEqual(
(yield message_stash.get_cached_message(msg['message_id'])),
None)
@inlineCallbacks
def test_message_clearing(self):
transport = yield self.get_transport()
message_stash = transport.message_stash
msg = self.tx_helper.make_outbound('hello world')
yield message_stash.set_sequence_number_message_id(
3, msg['message_id'])
yield message_stash.cache_message(msg)
yield self.fake_smsc.handle_pdu(SubmitSMResp(
sequence_number=3, message_id='foo', command_status='ESME_ROK'))
self.assertEqual(
None,
(yield message_stash.get_cached_message(msg['message_id'])))
@inlineCallbacks
def test_sequence_number_persistence(self):
"""
We create sequence_number to message_id mappings with an appropriate
TTL and can delete them when we're done.
"""
transport = yield self.get_transport()
message_stash = transport.message_stash
config = transport.get_static_config()
yield message_stash.set_sequence_number_message_id(12, "abc")
ttl = yield transport.redis.ttl(sequence_number_key(12))
self.assertTrue(0 < ttl <= config.submit_sm_expiry)
message_id = yield message_stash.get_sequence_number_message_id(12)
self.assertEqual(message_id, "abc")
yield message_stash.delete_sequence_number_message_id(12)
message_id = yield message_stash.get_sequence_number_message_id(12)
self.assertEqual(message_id, None)
@inlineCallbacks
def test_sequence_number_clearing(self):
"""
When we finish processing a PDU response, the mapping gets deleted.
"""
transport = yield self.get_transport()
message_stash = transport.message_stash
yield message_stash.set_sequence_number_message_id(37, "def")
message_id = yield message_stash.get_sequence_number_message_id(37)
self.assertEqual(message_id, "def")
yield self.fake_smsc.handle_pdu(SubmitSMResp(
sequence_number=37, message_id='foo', command_status='ESME_ROK'))
message_id = yield message_stash.get_sequence_number_message_id(37)
self.assertEqual(message_id, None)
@inlineCallbacks
def test_link_remote_message_id(self):
transport = yield self.get_transport()
config = transport.get_static_config()
msg = self.tx_helper.make_outbound('hello world')
yield self.tx_helper.dispatch_outbound(msg)
pdu = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(pdu),
message_id='foo',
command_status='ESME_ROK'))
self.assertEqual(
msg['message_id'],
(yield transport.message_stash.get_internal_message_id('foo')))
ttl = yield transport.redis.ttl(remote_message_key('foo'))
self.assertTrue(0 < ttl <= config.third_party_id_expiry)
@inlineCallbacks
def test_out_of_order_responses(self):
yield self.get_transport()
yield self.tx_helper.make_dispatch_outbound("msg 1", message_id='444')
submit_sm1 = yield self.fake_smsc.await_pdu()
response1 = SubmitSMResp(seq_no(submit_sm1), "3rd_party_id_1")
yield self.tx_helper.make_dispatch_outbound("msg 2", message_id='445')
submit_sm2 = yield self.fake_smsc.await_pdu()
response2 = SubmitSMResp(seq_no(submit_sm2), "3rd_party_id_2")
# respond out of order - just to keep things interesting
yield self.fake_smsc.handle_pdu(response2)
yield self.fake_smsc.handle_pdu(response1)
[ack1, ack2] = yield self.tx_helper.wait_for_dispatched_events(2)
self.assertEqual(ack1['user_message_id'], '445')
self.assertEqual(ack1['sent_message_id'], '3rd_party_id_2')
self.assertEqual(ack2['user_message_id'], '444')
self.assertEqual(ack2['sent_message_id'], '3rd_party_id_1')
@inlineCallbacks
def test_delivery_report_for_unknown_message(self):
dr = self.DR_TEMPLATE % ('foo',)
deliver = DeliverSM(1, short_message=dr, esm_class=4)
yield self.get_transport()
with LogCatcher(message="Failed to retrieve message id") as lc:
yield self.fake_smsc.handle_pdu(deliver)
[warning] = lc.logs
self.assertEqual(warning['message'],
("Failed to retrieve message id for delivery "
"report. Delivery report from %s "
"discarded." % self.tx_helper.transport_name,))
@inlineCallbacks
def test_delivery_report_delivered_delete_stored_remote_id(self):
transport = yield self.get_transport({
'final_dr_third_party_id_expiry': 23,
})
yield transport.message_stash.set_remote_message_id('bar', 'foo')
remote_id_ttl = yield transport.redis.ttl(remote_message_key('foo'))
self.assertTrue(
remote_id_ttl > 23,
"remote_id_ttl (%s) <= final_dr_third_party_id_expiry (23)"
% (remote_id_ttl,))
pdu = DeliverSM(sequence_number=1, esm_class=4)
pdu.add_optional_parameter('receipted_message_id', 'foo')
pdu.add_optional_parameter('message_state', 2)
yield self.fake_smsc.handle_pdu(pdu)
[dr] = yield self.tx_helper.wait_for_dispatched_events(1)
remote_id_ttl = yield transport.redis.ttl(remote_message_key('foo'))
self.assertTrue(
remote_id_ttl <= 23,
"remote_id_ttl (%s) > final_dr_third_party_id_expiry (23)"
% (remote_id_ttl,))
self.assertEqual(dr['event_type'], u'delivery_report')
self.assertEqual(dr['delivery_status'], u'delivered')
self.assertEqual(dr['transport_metadata'], {
u'smpp_delivery_status': u'DELIVERED',
})
@inlineCallbacks
def test_delivery_report_failed_delete_stored_remote_id(self):
transport = yield self.get_transport({
'final_dr_third_party_id_expiry': 23,
})
yield transport.message_stash.set_remote_message_id('bar', 'foo')
remote_id_ttl = yield transport.redis.ttl(remote_message_key('foo'))
self.assertTrue(
remote_id_ttl > 23,
"remote_id_ttl (%s) <= final_dr_third_party_id_expiry (23)"
% (remote_id_ttl,))
pdu = DeliverSM(sequence_number=1, esm_class=4)
pdu.add_optional_parameter('receipted_message_id', 'foo')
pdu.add_optional_parameter('message_state', 8)
yield self.fake_smsc.handle_pdu(pdu)
[dr] = yield self.tx_helper.wait_for_dispatched_events(1)
remote_id_ttl = yield transport.redis.ttl(remote_message_key('foo'))
self.assertTrue(
remote_id_ttl <= 23,
"remote_id_ttl (%s) > final_dr_third_party_id_expiry (23)"
% (remote_id_ttl,))
self.assertEqual(dr['event_type'], u'delivery_report')
self.assertEqual(dr['delivery_status'], u'failed')
self.assertEqual(dr['transport_metadata'], {
u'smpp_delivery_status': u'REJECTED',
})
@inlineCallbacks
def test_delivery_report_pending_keep_stored_remote_id(self):
transport = yield self.get_transport({
'final_dr_third_party_id_expiry': 23,
})
yield transport.message_stash.set_remote_message_id('bar', 'foo')
remote_id_ttl = yield transport.redis.ttl(remote_message_key('foo'))
self.assertTrue(
remote_id_ttl > 23,
"remote_id_ttl (%s) <= final_dr_third_party_id_expiry (23)"
% (remote_id_ttl,))
pdu = DeliverSM(sequence_number=1, esm_class=4)
pdu.add_optional_parameter('receipted_message_id', 'foo')
pdu.add_optional_parameter('message_state', 1)
yield self.fake_smsc.handle_pdu(pdu)
[dr] = yield self.tx_helper.wait_for_dispatched_events(1)
remote_id_ttl = yield transport.redis.ttl(remote_message_key('foo'))
self.assertTrue(
remote_id_ttl > 23,
"remote_id_ttl (%s) <= final_dr_third_party_id_expiry (23)"
% (remote_id_ttl,))
self.assertEqual(dr['event_type'], u'delivery_report')
self.assertEqual(dr['delivery_status'], u'pending')
self.assertEqual(dr['transport_metadata'], {
u'smpp_delivery_status': u'ENROUTE',
})
@inlineCallbacks
def test_disable_delivery_report_delivered_delete_stored_remote_id(self):
transport = yield self.get_transport({
'final_dr_third_party_id_expiry': 23,
'disable_delivery_report': True,
})
yield transport.message_stash.set_remote_message_id('bar', 'foo')
remote_id_ttl = yield transport.redis.ttl(remote_message_key('foo'))
self.assertTrue(
remote_id_ttl > 23,
"remote_id_ttl (%s) <= final_dr_third_party_id_expiry (23)"
% (remote_id_ttl,))
pdu = DeliverSM(sequence_number=1, esm_class=4)
pdu.add_optional_parameter('receipted_message_id', 'foo')
pdu.add_optional_parameter('message_state', 2)
yield self.fake_smsc.handle_pdu(pdu)
yield self.fake_smsc.await_pdu()
yield self.assert_no_events()
remote_id_ttl = yield transport.redis.ttl(remote_message_key('foo'))
self.assertTrue(
remote_id_ttl <= 23,
"remote_id_ttl (%s) > final_dr_third_party_id_expiry (23)"
% (remote_id_ttl,))
@inlineCallbacks
def test_reconnect(self):
transport = yield self.get_transport(bind=False)
connector = transport.connectors[transport.transport_name]
# Unbound and disconnected.
self.assertEqual(connector._consumers['outbound'].paused, True)
# Connect and bind.
yield self.fake_smsc.bind()
self.assertEqual(connector._consumers['outbound'].paused, False)
# Disconnect.
yield self.fake_smsc.disconnect()
self.assertEqual(connector._consumers['outbound'].paused, True)
# Wait for reconnect, but don't bind.
self.clock.advance(transport.service.delay)
yield self.fake_smsc.await_connected()
self.assertEqual(connector._consumers['outbound'].paused, True)
# Bind.
yield self.fake_smsc.bind()
self.assertEqual(connector._consumers['outbound'].paused, False)
@inlineCallbacks
def test_bind_params(self):
yield self.get_transport({
'system_id': 'myusername',
'password': '<PASSWORD>',
'system_type': 'SMPP',
'interface_version': '33',
'address_range': '*12345',
}, bind=False)
bind_pdu = yield self.fake_smsc.await_pdu()
# This test runs for multiple bind types, so we only assert on the
# common prefix of the command.
self.assertEqual(bind_pdu['header']['command_id'][:5], 'bind_')
self.assertEqual(bind_pdu['body'], {'mandatory_parameters': {
'system_id': 'myusername',
'password': '<PASSWORD>',
'system_type': 'SMPP',
'interface_version': '33',
'address_range': '*12345',
'addr_ton': 'unknown',
'addr_npi': 'unknown',
}})
@inlineCallbacks
def test_bind_params_long_password(self):
lc = LogCatcher(message="Password longer than 8 characters,")
with lc:
yield self.get_transport({
'worker_name': 'sphex',
'system_id': 'myusername',
'password': '<PASSWORD>',
'system_type': 'SMPP',
'interface_version': '33',
'address_range': '*12345',
}, bind=False)
bind_pdu = yield self.fake_smsc.await_pdu()
# This test runs for multiple bind types, so we only assert on the
# common prefix of the command.
self.assertEqual(bind_pdu['header']['command_id'][:5], 'bind_')
self.assertEqual(bind_pdu['body'], {'mandatory_parameters': {
'system_id': 'myusername',
'password': '<PASSWORD>',
'system_type': 'SMPP',
'interface_version': '33',
'address_range': '*12345',
'addr_ton': 'unknown',
'addr_npi': 'unknown',
}})
# Check that the truncation was logged.
[warning] = lc.logs
expected_msg = "Password longer than 8 characters, truncating."
self.assertEqual(warning['message'], (expected_msg,))
for l in lc.logs:
self.assertEqual(l['system'], 'sphex')
@inlineCallbacks
def test_default_bind_params(self):
yield self.get_transport(bind=False)
bind_pdu = yield self.fake_smsc.await_pdu()
# This test runs for multiple bind types, so we only assert on the
# common prefix of the command.
self.assertEqual(bind_pdu['header']['command_id'][:5], 'bind_')
self.assertEqual(bind_pdu['body'], {'mandatory_parameters': {
'system_id': 'foo', # Mandatory param, defaulted by helper.
'password': '<PASSWORD>', # Mandatory param, defaulted by helper.
'system_type': '',
'interface_version': '34',
'address_range': '',
'addr_ton': 'unknown',
'addr_npi': 'unknown',
}})
@inlineCallbacks
def test_startup_with_backlog(self):
yield self.get_transport(bind=False)
# Disconnected.
for i in range(2):
msg = self.tx_helper.make_outbound('hello world %s' % (i,))
yield self.tx_helper.dispatch_outbound(msg)
# Connect and bind.
yield self.fake_smsc.bind()
[submit_sm1, submit_sm2] = yield self.fake_smsc.await_pdus(2)
self.assertEqual(short_message(submit_sm1), 'hello world 0')
self.assertEqual(short_message(submit_sm2), 'hello world 1')
@inlineCallbacks
def test_starting_status(self):
"""
The SMPP bind process emits three status events.
"""
yield self.get_transport({'publish_status': True})
msgs = yield self.tx_helper.wait_for_dispatched_statuses()
[msg_starting, msg_binding, msg_bound] = msgs
self.assertEqual(msg_starting['status'], 'down')
self.assertEqual(msg_starting['component'], 'smpp')
self.assertEqual(msg_starting['type'], 'starting')
self.assertEqual(msg_starting['message'], 'Starting')
self.assertEqual(msg_binding['status'], 'down')
self.assertEqual(msg_binding['component'], 'smpp')
self.assertEqual(msg_binding['type'], 'binding')
self.assertEqual(msg_binding['message'], 'Binding')
self.assertEqual(msg_bound['status'], 'ok')
self.assertEqual(msg_bound['component'], 'smpp')
self.assertEqual(msg_bound['type'], 'bound')
self.assertEqual(msg_bound['message'], 'Bound')
@inlineCallbacks
def test_connect_status(self):
transport = yield self.get_transport(
{'publish_status': True}, bind=False)
# disconnect
yield self.fake_smsc.disconnect()
self.tx_helper.clear_dispatched_statuses()
# reconnect
self.clock.advance(transport.service.delay)
yield self.fake_smsc.await_connected()
[msg] = yield self.tx_helper.wait_for_dispatched_statuses()
self.assertEqual(msg['status'], 'down')
self.assertEqual(msg['component'], 'smpp')
self.assertEqual(msg['type'], 'binding')
self.assertEqual(msg['message'], 'Binding')
@inlineCallbacks
def test_unbinding_status(self):
transport = yield self.get_transport({'publish_status': True})
self.tx_helper.clear_dispatched_statuses()
yield transport.service.get_protocol().unbind()
[msg] = yield self.tx_helper.wait_for_dispatched_statuses()
self.assertEqual(msg['status'], 'down')
self.assertEqual(msg['component'], 'smpp')
self.assertEqual(msg['type'], 'unbinding')
self.assertEqual(msg['message'], 'Unbinding')
@inlineCallbacks
def test_bind_status(self):
yield self.get_transport({'publish_status': True}, bind=False)
self.tx_helper.clear_dispatched_statuses()
yield self.fake_smsc.bind()
[msg] = yield self.tx_helper.wait_for_dispatched_statuses()
self.assertEqual(msg['status'], 'ok')
self.assertEqual(msg['component'], 'smpp')
self.assertEqual(msg['type'], 'bound')
self.assertEqual(msg['message'], 'Bound')
@inlineCallbacks
def test_bind_timeout_status(self):
yield self.get_transport({
'publish_status': True,
'smpp_bind_timeout': 3,
}, bind=False)
# wait for bind pdu
yield self.fake_smsc.await_pdu()
self.tx_helper.clear_dispatched_statuses()
self.clock.advance(3)
[msg] = yield self.tx_helper.wait_for_dispatched_statuses()
self.assertEqual(msg['status'], 'down')
self.assertEqual(msg['component'], 'smpp')
self.assertEqual(msg['type'], 'bind_timeout')
self.assertEqual(msg['message'], 'Timed out awaiting bind')
yield self.fake_smsc.disconnect()
@inlineCallbacks
def test_connection_lost_status(self):
yield self.get_transport({'publish_status': True})
self.tx_helper.clear_dispatched_statuses()
yield self.fake_smsc.disconnect()
[msg] = yield self.tx_helper.wait_for_dispatched_statuses()
self.assertEqual(msg['status'], 'down')
self.assertEqual(msg['status'], 'down')
self.assertEqual(msg['component'], 'smpp')
self.assertEqual(msg['type'], 'connection_lost')
self.assertEqual(
msg['message'],
'Connection was closed cleanly: Connection done.')
@inlineCallbacks
def test_smsc_throttle_status(self):
yield self.get_transport({
'publish_status': True,
'throttle_delay': 3
})
self.tx_helper.clear_dispatched_statuses()
msg = self.tx_helper.make_outbound("throttle me")
yield self.tx_helper.dispatch_outbound(msg)
submit_sm_pdu = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_pdu),
message_id='foo',
command_status='ESME_RTHROTTLED'))
[msg] = yield self.tx_helper.wait_for_dispatched_statuses()
self.assertEqual(msg['status'], 'degraded')
self.assertEqual(msg['component'], 'smpp')
self.assertEqual(msg['type'], 'throttled')
self.assertEqual(msg['message'], 'Throttled')
self.tx_helper.clear_dispatched_statuses()
self.clock.advance(3)
submit_sm_pdu_retry = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_pdu_retry),
message_id='bar',
command_status='ESME_ROK'))
self.clock.advance(0)
[msg] = yield self.tx_helper.wait_for_dispatched_statuses()
self.assertEqual(msg['status'], 'ok')
self.assertEqual(msg['component'], 'smpp')
self.assertEqual(msg['type'], 'throttled_end')
self.assertEqual(msg['message'], 'No longer throttled')
@inlineCallbacks
def test_smsc_throttle_reconnect_status(self):
transport = yield self.get_transport({
'publish_status': True,
})
self.tx_helper.clear_dispatched_statuses()
msg = self.tx_helper.make_outbound("throttle me")
yield self.tx_helper.dispatch_outbound(msg)
submit_sm_pdu = yield self.fake_smsc.await_pdu()
yield self.fake_smsc.handle_pdu(
SubmitSMResp(sequence_number=seq_no(submit_sm_pdu),
message_id='foo',
command_status='ESME_RTHROTTLED'))
yield self.fake_smsc.disconnect()
self.tx_helper.clear_dispatched_statuses()
self.clock.advance(transport.service.delay)
yield self.fake_smsc.bind()
msgs = yield self.tx_helper.wait_for_dispatched_statuses()
[msg1, msg2, msg3] = msgs
self.assertEqual(msg1['type'], 'binding')
self.assertEqual(msg2['type'], 'bound')
self.assertEqual(msg3['status'], 'degraded')
self.assertEqual(msg3['component'], 'smpp')
self.assertEqual(msg3['type'], 'throttled')
self.assertEqual(msg3['message'], 'Throttled')
@inlineCallbacks
def test_tps_throttle_status(self):
yield self.get_transport({
'publish_status': True,
'mt_tps': 2
})
self.tx_helper.clear_dispatched_statuses()
yield self.tx_helper.make_dispatch_outbound('hello world 1')
yield self.tx_helper.make_dispatch_outbound('hello world 2')
self.tx_helper.make_dispatch_outbound('hello world 3')
yield self.fake_smsc.await_pdus(2)
# We can't wait here because that requires throttling to end.
[msg] = self.tx_helper.get_dispatched_statuses()
self.assertEqual(msg['status'], 'degraded')
self.assertEqual(msg['component'], 'smpp')
self.assertEqual(msg['type'], 'throttled')
self.assertEqual(msg['message'], 'Throttled')
self.tx_helper.clear_dispatched_statuses()
self.clock.advance(1)
[msg] = yield self.tx_helper.wait_for_dispatched_statuses()
self.assertEqual(msg['status'], 'ok')
self.assertEqual(msg['component'], 'smpp')
self.assertEqual(msg['type'], 'throttled_end')
self.assertEqual(msg['message'], 'No longer throttled')
@inlineCallbacks
def test_tps_throttle_reconnect_status(self):
transport = yield self.get_transport({
'publish_status': True,
'mt_tps': 2
})
self.tx_helper.clear_dispatched_statuses()
yield self.tx_helper.make_dispatch_outbound('hello world 1')
yield self.tx_helper.make_dispatch_outbound('hello world 2')
self.tx_helper.make_dispatch_outbound('hello world 3')
yield self.fake_smsc.await_pdus(2)
yield self.fake_smsc.disconnect()
self.tx_helper.clear_dispatched_statuses()
self.clock.advance(transport.service.delay)
yield self.fake_smsc.bind()
msgs = yield self.tx_helper.wait_for_dispatched_statuses()
[msg1, msg2, msg3] = msgs
self.assertEqual(msg1['type'], 'binding')
self.assertEqual(msg2['type'], 'bound')
self.assertEqual(msg3['status'], 'degraded')
self.assertEqual(msg3['component'], 'smpp')
self.assertEqual(msg3['type'], 'throttled')
self.assertEqual(msg3['message'], 'Throttled')
class SmppTransmitterTransportTestCase(SmppTransceiverTransportTestCase):
transport_class = SmppTransmitterTransport
class SmppReceiverTransportTestCase(SmppTransceiverTransportTestCase):
transport_class = SmppReceiverTransport
class SmppTransceiverTransportWithOldConfigTestCase(
SmppTransceiverTransportTestCase):
transport_class = SmppTransceiverTransportWithOldConfig
def setUp(self):
self.clock = Clock()
self.fake_smsc = FakeSMSC()
self.tx_helper = self.add_helper(TransportHelper(self.transport_class))
self.default_config = {
'transport_name': self.tx_helper.transport_name,
'worker_name': self.tx_helper.transport_name,
'twisted_endpoint': self.fake_smsc.endpoint,
'system_id': 'foo',
'password': '<PASSWORD>',
'data_coding_overrides': {
0: 'utf-8',
}
}
def _get_transport_config(self, config):
"""
The test cases assume the new config, this flattens the
config key word arguments value to match an old config
layout without the processor configs.
"""
cfg = self.default_config.copy()
processor_config_keys = [
'submit_short_message_processor_config',
'deliver_short_message_processor_config',
'delivery_report_processor_config',
]
for config_key in processor_config_keys:
processor_config = config.pop(config_key, {})
for name, value in processor_config.items():
cfg[name] = value
# Update with all remaining (non-processor) config values
cfg.update(config)
return cfg
class TataUssdSmppTransportTestCase(SmppTransportTestCase):
transport_class = SmppTransceiverTransport
@inlineCallbacks
def test_submit_and_deliver_ussd_continue(self):
yield self.get_transport()
yield self.tx_helper.make_dispatch_outbound(
"hello world", transport_type="ussd")
submit_sm_pdu = yield self.fake_smsc.await_pdu()
self.assertEqual(command_id(submit_sm_pdu), 'submit_sm')
self.assertEqual(pdu_tlv(submit_sm_pdu, 'ussd_service_op'), '02')
self.assertEqual(pdu_tlv(submit_sm_pdu, 'its_session_info'), '0000')
# Server delivers a USSD message to the Client
pdu = DeliverSM(seq_no(submit_sm_pdu) + 1, short_message="reply!")
pdu.add_optional_parameter('ussd_service_op', '02')
pdu.add_optional_parameter('its_session_info', '0000')
yield self.fake_smsc.handle_pdu(pdu)
[mess] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(mess['content'], "reply!")
self.assertEqual(mess['transport_type'], "ussd")
self.assertEqual(mess['session_event'],
TransportUserMessage.SESSION_RESUME)
@inlineCallbacks
def test_submit_and_deliver_ussd_close(self):
yield self.get_transport()
yield self.tx_helper.make_dispatch_outbound(
"hello world", transport_type="ussd",
session_event=TransportUserMessage.SESSION_CLOSE)
submit_sm_pdu = yield self.fake_smsc.await_pdu()
self.assertEqual(command_id(submit_sm_pdu), 'submit_sm')
self.assertEqual(pdu_tlv(submit_sm_pdu, 'ussd_service_op'), '02')
self.assertEqual(pdu_tlv(submit_sm_pdu, 'its_session_info'), '0001')
# Server delivers a USSD message to the Client
pdu = DeliverSM(seq_no(submit_sm_pdu) + 1, short_message="reply!")
pdu.add_optional_parameter('ussd_service_op', '02')
pdu.add_optional_parameter('its_session_info', '0001')
yield self.fake_smsc.handle_pdu(pdu)
[mess] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(mess['content'], "reply!")
self.assertEqual(mess['transport_type'], "ussd")
self.assertEqual(mess['session_event'],
TransportUserMessage.SESSION_CLOSE)
class TestSubmitShortMessageProcessorConfig(VumiTestCase):
def get_config(self, config_dict):
return SubmitShortMessageProcessor.CONFIG_CLASS(config_dict)
def assert_config_error(self, config_dict):
try:
self.get_config(config_dict)
self.fail("ConfigError not raised.")
except ConfigError as err:
return err.args[0]
def test_long_message_params(self):
self.get_config({})
self.get_config({'send_long_messages': True})
self.get_config({'send_multipart_sar': True})
self.get_config({'send_multipart_udh': True})
errmsg = self.assert_config_error({
'send_long_messages': True,
'send_multipart_sar': True,
})
self.assertEqual(errmsg, (
"The following parameters are mutually exclusive: "
"send_long_messages, send_multipart_sar"))
errmsg = self.assert_config_error({
'send_long_messages': True,
'send_multipart_sar': True,
'send_multipart_udh': True,
})
self.assertEqual(errmsg, (
"The following parameters are mutually exclusive: "
"send_long_messages, send_multipart_sar, send_multipart_udh"))
|
<reponame>FreeeBird/blog_admin
import request from '../utils/request';
const URL = '/admin/statistics'
export function fetchStat() {
return request({
url: URL,
method: 'GET',
})
}
/**
* 获取所有分类
* @returns {AxiosPromise}
*/
export function fetchDaily(){
return request({
url: URL +'/daily',
method: 'GET',
})
}
export function fetchMonthly() {
return request({
url: URL +'/monthly',
method: 'GET',
})
}
export function fetchWeekly() {
return request({
url: URL +'/weekly',
method: 'GET',
})
}
|
<filename>packages/ts-tool-type-starter/src/index.ts
console.log("ts-tool-type-starter is ready!");
|
#!/bin/sh
cd /data
rm -rf dump
mongodump
|
from typing import List
def manageDamageSkins(skin_ids: List[int]) -> None:
"""
Adds the specified damage skins to the player's collection and prints a message for each skin added or failed to add.
Args:
skin_ids (List[int]): A list of unique identifiers of the damage skins to be added.
Returns:
None
"""
for skin_id in skin_ids:
success = addDamageSkin(skin_id)
if success:
print(f"The damage skin with ID {skin_id} has been added to your account's collection.")
else:
print(f"Failed to add damage skin with ID {skin_id}.")
|
<reponame>JohnCKangwa/RingedOrb<filename>RingedOrb/User Mode/roUserComponent.cpp
#include "roUserComponent.h"
#include "..\Graphics\GUI\roGUIData.h"
#include "..\Core\roWindow.h"
#include "..\Core\roFileSystem.h"
roUserComponent::roUserComponent(roEventQueue* eventQueue) : roEventNode(eventQueue) {
roEventNode::SubscribeForEvent(roEVENT_ID::E_ID_MODEL);
}
void roUserComponent::OnEvent(roEvent* evt) {
if (evt->EventID == E_ID_MODEL) {
if(((roModelEvent*)evt)->Type == roModelEvent::roEVENT_MODEL_TYPE::STORED)
roGUIData::AddGUIMeshName(((roModelEvent*)evt)->Name);
}
}
void roUserComponent::Update() {
if (roGUIData::GetOpenModelDialogState()) {
static std::wstring path;
path = RingedOrbFileSystem::OpenFileDialog(RingedOrbFileSystem::g_ModelsRootDir, roWindow::GetWindowHandle(),
RingedOrbFileSystem::roFileExtension::F_EXT_OBJ);
//send message
roModelEvent modelEvent;
modelEvent.ModelPath = path;
modelEvent.Type = roModelEvent::CREATE;
PushEvent(modelEvent);
}
if (roGUIData::GetAddEnityState()) {
std::string modelName;
for (auto &iter : roGUIData::sm_MeshData) {
if (iter.second) {
modelName = iter.first;
break;
}
}
roREntityEvent REEvent;
REEvent.MeshRefName = modelName;
REEvent.Type = roREntityEvent::CREATE;
PushEvent(REEvent);
}
}
|
public class Employee {
private String name;
private int salary;
public Employee(String name, int salary){
this.name = name;
this.salary = salary;
}
public String getName() {
return name;
}
public int getSalary() {
return salary;
}
public void setName(String name) {
this.name = name;
}
public void setSalary(int salary) {
this.salary = salary;
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-rare/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-rare/512+512+512-old-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function identity_first_third_old_sixth --eval_function last_sixth_eval
|
<reponame>bencmbrook/compat
#! /usr/bin/env node
let yargs = require('yargs')
let fs = require('fs')
let os = require('os')
let path = require('path')
let compat = require('./compat.js')
let output = require('./output.js')
let createTable = require('./tableUpdater/createTable.js')
const argv =
yargs
.alias({
'target': 't',
'jsEnvs': 'j',
'htmlEnvs': 'h',
'features': 'f',
'ignoreFeatures': 'i',
'recursive': 'r',
'config': 'c'
})
.array(['target', 'jsEnvs', 'htmlEnvs', 'features', 'ignoreFeatures'])
.boolean(['supportedFeatures', 'supportedFeatureGroups', 'enabledFeatures', 'supportedEnvs'])
.describe({
'target': 'file(s) and directories containing files to check for compatibility',
'jsEnvs': 'environment(s) to check for JavaScript compatiblity',
'htmlEnvs': 'environment(s) to check for HTML compatiblity',
'features': 'feature(s) and/or feature group(s) to check for',
'ignoreFeatures': 'feature(s) and/or feature group(s) to ignore',
'recursive': 'enters directories specified in target recursively',
'supportedEnvs': 'prints out all supported environments',
'supportedFeatures': 'prints out tree of supported features',
'supportedFeatureGroups': 'prints out supported feature groups and their features',
'enabledFeatures': 'prints out the features that will be enabled for detection with the provided flags',
'config': 'path to config file (must have .json extension)'
})
.default({
'target': ['.'],
'jsEnvs': ['ie11', 'chrome56', 'firefox51', 'edge13', 'edge14', 'safari9', 'safari10'],
'htmlEnvs': ['ie11', 'chrome56', 'firefox51', 'edge13', 'edge14', 'safari9', 'safari10'],
'features': ['js', 'html'],
'ignoreFeatures': [],
'config': './.compatrc.json'
})
.config()
// 'config': './compat.config.js'
// })
// .config('settings', function (configPath) {
// console.log('asdfkljasldfkjaklsdfjlasdjfklasjflasdjf\n\n\n')
// console.log('hello', configPath);
// return require(configPath);
// })
.version()
.help(false)
.argv
const compatTableLocation = path.join(os.homedir(), '/.compat-data/compatTable.json')
if (argv.supportedFeatures ||
argv.supportedFeatureGroups ||
argv.supportedEnvs
) {
if (argv.supportedFeatures) {
const supportedFeatures = compat.getSupportedFeatures()
output.outputSupportedFeatures(supportedFeatures)
}
if (argv.supportedFeatureGroups) {
const supportedFeatureGroups = compat.getSupportedFeatureGroups()
output.outputSupportedFeatureGroups(supportedFeatureGroups)
}
if (argv.supportedEnvs) {
afterLoadingTable(compatTableLocation, (compatTableLocation) => {
const supportedEnvs = compat.getSupportedEnvs(compatTableLocation)
output.outputSupportedEnvs(supportedEnvs)
})
}
} else {
afterLoadingTable(compatTableLocation, (compatTableLocation) => {
runCompat(compatTableLocation)
})
}
function runCompat (compatTableLocation) {
const nonExistentTargets = []
const filesToCheck =
[].concat.apply([],
argv.target.filter((fileName) => {
if (fs.existsSync(fileName)) {
return true
}
nonExistentTargets.push(fileName)
return false
}).map((fileName) => {
const fileStat = fs.lstatSync(fileName)
if (fileStat.isDirectory()) {
return getFilesInDirectory(fileName, argv.recursive)
.filter((fileName) => {
return fileName.endsWith('.html') || fileName.endsWith('.js')
})
} else {
return [fileName]
}
})
)
output.outputNonExistentTargets(nonExistentTargets)
const undefinedEnvs = compat.getUndefinedEnvs(compatTableLocation, argv.jsEnvs, argv.htmlEnvs)
output.outputUndefinedEnvs(undefinedEnvs)
if (argv.enabledFeatures) {
const enabledFeatures = compat.getEnabledFeatures(argv.features, argv.ignoreFeatures)
output.outputEnabledFeatures(enabledFeatures)
}
const errors = compat.check(filesToCheck, argv.jsEnvs, argv.htmlEnvs, argv.features, argv.ignoreFeatures, compatTableLocation)
output.outputErrors(errors)
output.outputCoverage(argv.jsEnvs, argv.htmlEnvs, errors)
}
function getFilesInDirectory (path, recursive) {
if (!path.endsWith('/')) {
path = path + '/'
}
let filesInDir = fs.readdirSync(path)
return [].concat.apply([],
filesInDir.map((file) => {
const isDir = fs.lstatSync(path + '/' + file).isDirectory()
if (isDir) {
if (recursive) {
return getFilesInDirectory(path + file + '/', recursive)
} else {
return []
}
} else {
return [path + file]
}
})
)
}
function afterLoadingTable (compatTableLocation, func) {
createTable.createTable(compatTableLocation)
.then((updated) => {
if (updated) {
console.log('Compatibility table updated.')
} else {
console.log('Compatibility table already up to date.')
}
func(compatTableLocation)
})
.catch((err) => {
console.log(err)
if (fs.existsSync(compatTableLocation)) {
console.log('Using previous compatability table.')
func(compatTableLocation)
} else {
console.log('No previous compatibility table. Exiting.')
}
})
}
|
<filename>src/main/java/malte0811/controlengineering/logic/clock/ClockTypes.java
package malte0811.controlengineering.logic.clock;
import malte0811.controlengineering.ControlEngineering;
import malte0811.controlengineering.items.CEItems;
import malte0811.controlengineering.util.typereg.TypedRegistry;
import net.minecraft.resources.ResourceLocation;
import net.minecraft.util.Unit;
import net.minecraft.world.item.Item;
import java.util.Map;
public class ClockTypes {
public static final ClockGenerator<Unit> ALWAYS_ON = new FreeClock();
public static final ClockGenerator<Boolean> RISING_EDGE = new EdgeClock();
public static final ClockGenerator<Unit> WHILE_RS_ON = new StateClock();
public static final ClockGenerator<Unit> NEVER = new NoneClock();
public static final TypedRegistry<ClockGenerator<?>> REGISTRY = new TypedRegistry<>();
public static <T extends ClockGenerator<?>> T register(ResourceLocation name, T generator) {
return REGISTRY.register(name, generator);
}
public static Map<ResourceLocation, ClockGenerator<?>> getGenerators() {
return REGISTRY.getEntries();
}
public static Item getItem(ClockGenerator<?> clock) {
return CEItems.CLOCK_GENERATORS.get(clock.getRegistryName()).get();
}
static {
register(new ResourceLocation(ControlEngineering.MODID, "clock_free"), ALWAYS_ON);
register(new ResourceLocation(ControlEngineering.MODID, "clock_edge"), RISING_EDGE);
register(new ResourceLocation(ControlEngineering.MODID, "clock_state"), WHILE_RS_ON);
register(new ResourceLocation(ControlEngineering.MODID, "clock_none"), NEVER);
}
}
|
#!/bin/bash -xe
gcloud compute networks create jenkins --subnet-mode auto
gcloud container clusters create jenkins-cd \
--machine-type n1-standard-2 \
--num-nodes 2 \
--network jenkins \
--scopes "https://www.googleapis.com/auth/projecthosting,storage-rw" \
--cluster-version 1.12
gcloud container clusters list
gcloud container clusters get-credentials jenkins-cd
kubectl cluster-info
HELM_VERSION=2.9.1
wget https://storage.googleapis.com/kubernetes-helm/helm-v$HELM_VERSION-linux-amd64.tar.gz
tar zxfv helm-v$HELM_VERSION-linux-amd64.tar.gz
cp linux-amd64/helm .
kubectl create clusterrolebinding cluster-admin-binding --clusterrole=cluster-admin --user=$(gcloud config get-value account)
kubectl create serviceaccount tiller --namespace kube-system
kubectl create clusterrolebinding tiller-admin-binding --clusterrole=cluster-admin --serviceaccount=kube-system:tiller
./helm init --service-account=tiller
./helm update
# Give tiller a chance to start up
until ./helm version; do sleep 10;done
./helm install -n cd stable/jenkins -f jenkins/values.yaml --version 1.2.2 --wait
for i in `seq 1 5`;do kubectl get pods; sleep 60;done
until kubectl get pods -l app=cd-jenkins | grep Running; do sleep 10;done
# Cleanup resources
./helm delete --purge cd
|
import java.util.ArrayList;
import java.util.List;
interface PropertyEventListener {
void onPropertyEvent(String propertyName, String propertyValue);
}
class SystemPropertyManager {
private List<PropertyEventListener> listeners;
public SystemPropertyManager() {
this.listeners = new ArrayList<>();
}
public void addListener(PropertyEventListener listener) {
listeners.add(listener);
}
public void removeListener(PropertyEventListener listener) {
listeners.remove(listener);
}
public void notifyListeners(String propertyName, String propertyValue) {
for (PropertyEventListener listener : listeners) {
listener.onPropertyEvent(propertyName, propertyValue);
}
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.fuseki.servlets ;
/**
* Servlet for operations directly on a dataset - REST(ish) behaviour on the
* dataset URI.
*/
public abstract class REST_Quads extends SPARQL_GSP
{
public REST_Quads() {
super() ;
}
@Override
protected void doOptions(HttpAction action) {
ServletOps.errorMethodNotAllowed("OPTIONS") ;
}
@Override
protected void doHead(HttpAction action) {
ServletOps.errorMethodNotAllowed("HEAD") ;
}
@Override
protected void doPost(HttpAction action) {
ServletOps.errorMethodNotAllowed("POST") ;
}
@Override
protected void doPut(HttpAction action) {
ServletOps.errorMethodNotAllowed("PUT") ;
}
@Override
protected void doDelete(HttpAction action) {
ServletOps.errorMethodNotAllowed("DELETE") ;
}
@Override
protected void doPatch(HttpAction action) {
ServletOps.errorMethodNotAllowed("PATCH") ;
}
}
|
#!/bin/bash
sum="sha1sum"
echo "If you need reproducible build, export GO111MODULE=on first"
if ! hash sha1sum 2>/dev/null; then
if ! hash shasum 2>/dev/null; then
echo "I can't see 'sha1sum' or 'shasum'"
echo "Please install one of them!"
exit
fi
sum="shasum"
fi
UPX=false
if hash upx 2>/dev/null; then
UPX=true
fi
VERSION=`date -u +%Y%m%d`
LDFLAGS="-X main.VERSION=$VERSION -s -w"
GCFLAGS=""
# AMD64
OSES=(linux darwin windows freebsd)
for os in ${OSES[@]}; do
suffix=""
if [ "$os" == "windows" ]
then
suffix=".exe"
fi
env CGO_ENABLED=0 GOOS=$os GOARCH=amd64 go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o client_${os}_amd64${suffix} github.com/xtaci/kcptun/client
env CGO_ENABLED=0 GOOS=$os GOARCH=amd64 go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o server_${os}_amd64${suffix} github.com/xtaci/kcptun/server
if $UPX; then upx -9 client_${os}_amd64${suffix} server_${os}_amd64${suffix};fi
tar -zcf kcptun-${os}-amd64-$VERSION.tar.gz client_${os}_amd64${suffix} server_${os}_amd64${suffix}
$sum kcptun-${os}-amd64-$VERSION.tar.gz
done
# 386
OSES=(linux windows)
for os in ${OSES[@]}; do
suffix=""
if [ "$os" == "windows" ]
then
suffix=".exe"
fi
env CGO_ENABLED=0 GOOS=$os GOARCH=386 go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o client_${os}_386${suffix} github.com/xtaci/kcptun/client
env CGO_ENABLED=0 GOOS=$os GOARCH=386 go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o server_${os}_386${suffix} github.com/xtaci/kcptun/server
if $UPX; then upx -9 client_${os}_386${suffix} server_${os}_386${suffix};fi
tar -zcf kcptun-${os}-386-$VERSION.tar.gz client_${os}_386${suffix} server_${os}_386${suffix}
$sum kcptun-${os}-386-$VERSION.tar.gz
done
# ARM
ARMS=(5 6 7)
for v in ${ARMS[@]}; do
env CGO_ENABLED=0 GOOS=linux GOARCH=arm GOARM=$v go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o client_linux_arm$v github.com/xtaci/kcptun/client
env CGO_ENABLED=0 GOOS=linux GOARCH=arm GOARM=$v go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o server_linux_arm$v github.com/xtaci/kcptun/server
if $UPX; then upx -9 client_linux_arm$v server_linux_arm$v;fi
tar -zcf kcptun-linux-arm$v-$VERSION.tar.gz client_linux_arm$v server_linux_arm$v
$sum kcptun-linux-arm$v-$VERSION.tar.gz
done
# ARM64
env CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o client_linux_arm64 github.com/xtaci/kcptun/client
env CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o server_linux_arm64 github.com/xtaci/kcptun/server
if $UPX; then upx -9 client_linux_arm64 server_linux_arm64*;fi
tar -zcf kcptun-linux-arm64-$VERSION.tar.gz client_linux_arm64 server_linux_arm64
$sum kcptun-linux-arm64-$VERSION.tar.gz
#MIPS32LE
env CGO_ENABLED=0 GOOS=linux GOARCH=mipsle GOMIPS=softfloat go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o client_linux_mipsle github.com/xtaci/kcptun/client
env CGO_ENABLED=0 GOOS=linux GOARCH=mipsle GOMIPS=softfloat go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o server_linux_mipsle github.com/xtaci/kcptun/server
env CGO_ENABLED=0 GOOS=linux GOARCH=mips GOMIPS=softfloat go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o client_linux_mips github.com/xtaci/kcptun/client
env CGO_ENABLED=0 GOOS=linux GOARCH=mips GOMIPS=softfloat go build -ldflags "$LDFLAGS" -gcflags "$GCFLAGS" -o server_linux_mips github.com/xtaci/kcptun/server
if $UPX; then upx -9 client_linux_mips* server_linux_mips*;fi
tar -zcf kcptun-linux-mipsle-$VERSION.tar.gz client_linux_mipsle server_linux_mipsle
tar -zcf kcptun-linux-mips-$VERSION.tar.gz client_linux_mips server_linux_mips
$sum kcptun-linux-mipsle-$VERSION.tar.gz
$sum kcptun-linux-mips-$VERSION.tar.gz
|
loadAPI(1);
host.defineController("Novation", "Launchkey 25", "1.0", "11516a34-29d9-4126-9a79-db4be58c4c15");
host.defineMidiPorts(2, 2);
host.addDeviceNameBasedDiscoveryPair(["Launchkey 25 MIDI 1", "Launchkey 25 MIDI 2"], ["Launchkey 25 MIDI 1", "Launchkey 25 MIDI 2"]);
function init()
{
host.getMidiInPort(0).setMidiCallback(onMidi);
host.getMidiInPort(0).setSysexCallback(onSysex);
}
function exit()
{
}
function onMidi(status, data1, data2)
{
}
function onSysex(data)
{
}
|
<filename>exercise tracker/exercise.js
const express = require("express");
const Users = require("../Models/user");
const router = express.Router();
const moment = require("moment");
/* adding a new user */
router.post("/new-user", async (req, res) => {
try {
const newUser = new Users({
userName: req.body.username,
exercise: [],
});
const user = await newUser.save();
res.json({ username: user.userName, _id: user.id });
} catch (err) {
console.error(err);
res.send("Error occured while creating a new user");
}
});
/* TO SHOW ALL THE USERS */
router.get("/users", async (req, res) => {
let allUsers = []
try {
const users = await Users.find({})
console.log(users);
allUsers = users.map((user)=>{
return {
username :user.userName,
_id:user._id
}
})
res.json(allUsers);
}
catch {
res.send("no user found");
}
});
/* add a exercise */
router.post("/add", async (req, res) => {
let recDate = req.body.date;
// // validate Date input
if (recDate === '') {
let newDate = Date.now();
recDate = moment(newDate).format('YYYY-MM-DD');
}
else if (!moment(req.body.date, 'YYYY-MM-DD',true).isValid()) {
res.json(`${recDate} is an invalid date. Please enter a valid date in the format YYYY-MM-DD`);
return;
}
checkVaidation(req, res); // validating the data
try {
const existUser = await Users.findById(req.body.userId);
const dataObject = {
description: req.body.description,
duration: req.body.duration,
date: recDate,
};
existUser.exercise = existUser.exercise.concat([dataObject]);
await existUser.save();
res.json({
_id: existUser.id,
username: existUser.userName,
date: recDate,
duration: req.body.duration,
description: req.body.description,
});
} catch (err) {
console.error(err);
res.send("ERROR whie saving the exercise");
}
});
/* logs */
router.get("/log?:userId", (req, res) => {
if (req.query == {}) {
findLogs(req, res); // if from and todate query is not given
} else {
findLogs(req, res, true); // if from and todate query is given
}
});
async function findLogs(req, res, queryValue = false) {
try {
const user = await Users.findById(req.query.userId);
if (queryValue == false) {
res.json({
_id: req.query.userId,
username: user.userName,
count: user.exercise.length,
log: user.exercise,
});
} else {
console.log(req.query);
let results = user.exercise;
let fromDate = new Date(req.query.from); // from date
let toDate = new Date(req.query.to); // todate
let limit = req.query.limit; // limit
if (isValidDate(toDate)) {
results = results.filter(
(item) => item.date >= fromDate && item.date <= toDate
);
//check if just from defined
} else if (isValidDate(fromDate)) {
results = results.filter((item) => item.date >= fromDate);
}
//apply limit if defined and applicable
if (!isNaN(limit) && results.length > limit) {
results = results.slice(0, limit);
}
res.json({
_id: req.query.userId,
username: user.userName,
count: results.length,
log: results,
});
}
} catch (err) {
console.error(err);
res.send("invlaid uid or date params");
}
}
/* validate the date */
function isValidDate(d) {
return d instanceof Date && !isNaN(d);
}
async function checkVaidation(req, res) {
//userid check
const user = await Users.findById(req.body.userId).exec();
if (!user) {
res.send("userId does not exist"); // checking the user is the database
}
const duration = +req.body.duration;
if (isNaN(duration) || duration == "") {
res.send("Duration should be a numbers or not be left empty"); // checking the duration is a number
}
if (req.body.description == "") {
res.send("Description should not be left empty"); // checking the duration is a number
}
}
module.exports = router;
|
#!/bin/bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/..
source "${KUBE_ROOT}/hack/lib/init.sh"
kube::golang::setup_env
make -C "${KUBE_ROOT}" WHAT=cmd/genswaggertypedocs
# Find binary
genswaggertypedocs=$(kube::util::find-binary "genswaggertypedocs")
gen_swagger_result=0
result=0
find_files() {
find . -not \( \
\( \
-wholename './output' \
-o -wholename './_output' \
-o -wholename './_gopath' \
-o -wholename './release' \
-o -wholename './target' \
-o -wholename '*/third_party/*' \
-o -wholename '*/vendor/*' \
\) -prune \
\) \
\( -wholename '*pkg/apis/*/v*/types.go' \
-o -wholename '*pkg/api/unversioned/types.go' \
\)
}
if [[ $# -eq 0 ]]; then
versioned_api_files=$(find_files | egrep "pkg/.[^/]*/((v.[^/]*)|unversioned)/types\.go") || true
else
versioned_api_files="${*}"
fi
for file in $versioned_api_files; do
$genswaggertypedocs -v -s "${file}" -f - || gen_swagger_result=$?
if [[ "${gen_swagger_result}" -ne "0" ]]; then
echo "API file: ${file} is missing: ${gen_swagger_result} descriptions"
result=1
fi
if grep json: "${file}" | grep -v // | grep description: ; then
echo "API file: ${file} should not contain descriptions in struct tags"
result=1
fi
if grep json: "${file}" | grep -Ee ",[[:space:]]+omitempty|omitempty[[:space:]]+" ; then
echo "API file: ${file} should not contain leading or trailing spaces for omitempty directive"
result=1
fi
done
internal_types_files="${KUBE_ROOT}/pkg/apis/core/types.go ${KUBE_ROOT}/pkg/apis/extensions/types.go"
for internal_types_file in $internal_types_files; do
if [[ ! -e $internal_types_file ]]; then
echo "Internal types file ${internal_types_file} does not exist"
result=1
continue
fi
if grep json: "${internal_types_file}" | grep -v // | grep description: ; then
echo "Internal API types should not contain descriptions"
result=1
fi
done
exit ${result}
|
/***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#include <gtest/gtest.h>
#include "ModelFixture.hpp"
#include "../CurveTriquadratic.hpp"
#include <cmath>
using namespace openstudio;
using namespace openstudio::model;
TEST_F(ModelFixture, CurveTriquadratic_DefaultConstructors) {
::testing::FLAGS_gtest_death_test_style = "threadsafe";
ASSERT_EXIT(
{
Model m;
CurveTriquadratic curve(m);
exit(0);
},
::testing::ExitedWithCode(0), "");
}
TEST_F(ModelFixture, CurveTriquadratic_GetterSetters_evaluate) {
Model m;
CurveTriquadratic curve(m);
EXPECT_FALSE(curve.coefficient1Constant());
EXPECT_FALSE(curve.coefficient2xPOW2());
EXPECT_FALSE(curve.coefficient3x());
EXPECT_FALSE(curve.coefficient4yPOW2());
EXPECT_FALSE(curve.coefficient5y());
EXPECT_FALSE(curve.coefficient6zPOW2());
EXPECT_FALSE(curve.coefficient7z());
EXPECT_FALSE(curve.coefficient8xPOW2TIMESYPOW2());
EXPECT_FALSE(curve.coefficient9xTIMESY());
EXPECT_FALSE(curve.coefficient10xTIMESYPOW2());
EXPECT_FALSE(curve.coefficient11xPOW2TIMESY());
EXPECT_FALSE(curve.coefficient12xPOW2TIMESZPOW2());
EXPECT_FALSE(curve.coefficient13xTIMESZ());
EXPECT_FALSE(curve.coefficient14xTIMESZPOW2());
EXPECT_FALSE(curve.coefficient15xPOW2TIMESZ());
EXPECT_FALSE(curve.coefficient16yPOW2TIMESZPOW2());
EXPECT_FALSE(curve.coefficient17yTIMESZ());
EXPECT_FALSE(curve.coefficient18yTIMESZPOW2());
EXPECT_FALSE(curve.coefficient19yPOW2TIMESZ());
EXPECT_FALSE(curve.coefficient20xPOW2TIMESYPOW2TIMESZPOW2());
EXPECT_FALSE(curve.coefficient21xPOW2TIMESYPOW2TIMESZ());
EXPECT_FALSE(curve.coefficient22xPOW2TIMESYTIMESZPOW2());
EXPECT_FALSE(curve.coefficient23xTIMESYPOW2TIMESZPOW2());
EXPECT_FALSE(curve.coefficient24xPOW2TIMESYTIMESZ());
EXPECT_FALSE(curve.coefficient25xTIMESYPOW2TIMESZ());
EXPECT_FALSE(curve.coefficient26xTIMESYTIMESZPOW2());
EXPECT_FALSE(curve.coefficient27xTIMESYTIMESZ());
EXPECT_FALSE(curve.minimumValueofx());
EXPECT_FALSE(curve.maximumValueofx());
EXPECT_FALSE(curve.minimumValueofy());
EXPECT_FALSE(curve.maximumValueofy());
EXPECT_FALSE(curve.minimumValueofz());
EXPECT_FALSE(curve.maximumValueofz());
double c1 = 1.0;
double c2 = 2.0;
double c3 = 3.0;
double c4 = 4.0;
double c5 = 5.0;
double c6 = 6.0;
double c7 = 7.0;
double c8 = 8.0;
double c9 = 9.0;
double c10 = 10.0;
double c11 = 11.0;
double c12 = 12.0;
double c13 = 13.0;
double c14 = 14.0;
double c15 = 15.0;
double c16 = 16.0;
double c17 = 17.0;
double c18 = 18.0;
double c19 = 19.0;
double c20 = 20.0;
double c21 = 21.0;
double c22 = 22.0;
double c23 = 23.0;
double c24 = 24.0;
double c25 = 25.0;
double c26 = 26.0;
double c27 = 27.0;
double min_x = 0.1;
double max_x = 3.0;
double min_y = 4.0;
double max_y = 6.0;
double min_z = 8.0;
double max_z = 10.0;
auto calc = [=](double x, double y, double z) {
return c1 + c2 * std::pow(x, 2) + c3 * x + c4 * std::pow(y, 2) + c5 * y + c6 * std::pow(z, 2) + c7 * z + c8 * std::pow(x, 2) * std::pow(y, 2)
+ c9 * x * y + c10 * x * std::pow(y, 2) + c11 * std::pow(x, 2) * y + c12 * std::pow(x, 2) * std::pow(z, 2) + c13 * x * z
+ c14 * x * std::pow(z, 2) + c15 * std::pow(x, 2) * z + c16 * std::pow(y, 2) * std::pow(z, 2) + c17 * y * z + c18 * y * std::pow(z, 2)
+ c19 * std::pow(y, 2) * z + c20 * std::pow(x, 2) * std::pow(y, 2) * std::pow(z, 2) + c21 * std::pow(x, 2) * std::pow(y, 2) * z
+ c22 * std::pow(x, 2) * y * std::pow(z, 2) + c23 * x * std::pow(y, 2) * std::pow(z, 2) + c24 * std::pow(x, 2) * y * z
+ c25 * x * std::pow(y, 2) * z + c26 * x * y * std::pow(z, 2) + c27 * x * y * z;
};
EXPECT_TRUE(curve.setCoefficient1Constant(c1));
EXPECT_TRUE(curve.setCoefficient2xPOW2(c2));
EXPECT_TRUE(curve.setCoefficient3x(c3));
EXPECT_TRUE(curve.setCoefficient4yPOW2(c4));
EXPECT_TRUE(curve.setCoefficient5y(c5));
EXPECT_TRUE(curve.setCoefficient6zPOW2(c6));
EXPECT_TRUE(curve.setCoefficient7z(c7));
EXPECT_TRUE(curve.setCoefficient8xPOW2TIMESYPOW2(c8));
EXPECT_TRUE(curve.setCoefficient9xTIMESY(c9));
EXPECT_TRUE(curve.setCoefficient10xTIMESYPOW2(c10));
EXPECT_TRUE(curve.setCoefficient11xPOW2TIMESY(c11));
EXPECT_TRUE(curve.setCoefficient12xPOW2TIMESZPOW2(c12));
EXPECT_TRUE(curve.setCoefficient13xTIMESZ(c13));
EXPECT_TRUE(curve.setCoefficient14xTIMESZPOW2(c14));
EXPECT_TRUE(curve.setCoefficient15xPOW2TIMESZ(c15));
EXPECT_TRUE(curve.setCoefficient16yPOW2TIMESZPOW2(c16));
EXPECT_TRUE(curve.setCoefficient17yTIMESZ(c17));
EXPECT_TRUE(curve.setCoefficient18yTIMESZPOW2(c18));
EXPECT_TRUE(curve.setCoefficient19yPOW2TIMESZ(c19));
EXPECT_TRUE(curve.setCoefficient20xPOW2TIMESYPOW2TIMESZPOW2(c20));
EXPECT_TRUE(curve.setCoefficient21xPOW2TIMESYPOW2TIMESZ(c21));
EXPECT_TRUE(curve.setCoefficient22xPOW2TIMESYTIMESZPOW2(c22));
EXPECT_TRUE(curve.setCoefficient23xTIMESYPOW2TIMESZPOW2(c23));
EXPECT_TRUE(curve.setCoefficient24xPOW2TIMESYTIMESZ(c24));
EXPECT_TRUE(curve.setCoefficient25xTIMESYPOW2TIMESZ(c25));
EXPECT_TRUE(curve.setCoefficient26xTIMESYTIMESZPOW2(c26));
EXPECT_TRUE(curve.setCoefficient27xTIMESYTIMESZ(c27));
ASSERT_TRUE(curve.coefficient1Constant());
ASSERT_TRUE(curve.coefficient2xPOW2());
ASSERT_TRUE(curve.coefficient3x());
ASSERT_TRUE(curve.coefficient4yPOW2());
ASSERT_TRUE(curve.coefficient5y());
ASSERT_TRUE(curve.coefficient6zPOW2());
ASSERT_TRUE(curve.coefficient7z());
ASSERT_TRUE(curve.coefficient8xPOW2TIMESYPOW2());
ASSERT_TRUE(curve.coefficient9xTIMESY());
ASSERT_TRUE(curve.coefficient10xTIMESYPOW2());
ASSERT_TRUE(curve.coefficient11xPOW2TIMESY());
ASSERT_TRUE(curve.coefficient12xPOW2TIMESZPOW2());
ASSERT_TRUE(curve.coefficient13xTIMESZ());
ASSERT_TRUE(curve.coefficient14xTIMESZPOW2());
ASSERT_TRUE(curve.coefficient15xPOW2TIMESZ());
ASSERT_TRUE(curve.coefficient16yPOW2TIMESZPOW2());
ASSERT_TRUE(curve.coefficient17yTIMESZ());
ASSERT_TRUE(curve.coefficient18yTIMESZPOW2());
ASSERT_TRUE(curve.coefficient19yPOW2TIMESZ());
ASSERT_TRUE(curve.coefficient20xPOW2TIMESYPOW2TIMESZPOW2());
ASSERT_TRUE(curve.coefficient21xPOW2TIMESYPOW2TIMESZ());
ASSERT_TRUE(curve.coefficient22xPOW2TIMESYTIMESZPOW2());
ASSERT_TRUE(curve.coefficient23xTIMESYPOW2TIMESZPOW2());
ASSERT_TRUE(curve.coefficient24xPOW2TIMESYTIMESZ());
ASSERT_TRUE(curve.coefficient25xTIMESYPOW2TIMESZ());
ASSERT_TRUE(curve.coefficient26xTIMESYTIMESZPOW2());
ASSERT_TRUE(curve.coefficient27xTIMESYTIMESZ());
EXPECT_EQ(c1, curve.coefficient1Constant().get());
EXPECT_EQ(c2, curve.coefficient2xPOW2().get());
EXPECT_EQ(c3, curve.coefficient3x().get());
EXPECT_EQ(c4, curve.coefficient4yPOW2().get());
EXPECT_EQ(c5, curve.coefficient5y().get());
EXPECT_EQ(c6, curve.coefficient6zPOW2().get());
EXPECT_EQ(c7, curve.coefficient7z().get());
EXPECT_EQ(c8, curve.coefficient8xPOW2TIMESYPOW2().get());
EXPECT_EQ(c9, curve.coefficient9xTIMESY().get());
EXPECT_EQ(c10, curve.coefficient10xTIMESYPOW2().get());
EXPECT_EQ(c11, curve.coefficient11xPOW2TIMESY().get());
EXPECT_EQ(c12, curve.coefficient12xPOW2TIMESZPOW2().get());
EXPECT_EQ(c13, curve.coefficient13xTIMESZ().get());
EXPECT_EQ(c14, curve.coefficient14xTIMESZPOW2().get());
EXPECT_EQ(c15, curve.coefficient15xPOW2TIMESZ().get());
EXPECT_EQ(c16, curve.coefficient16yPOW2TIMESZPOW2().get());
EXPECT_EQ(c17, curve.coefficient17yTIMESZ().get());
EXPECT_EQ(c18, curve.coefficient18yTIMESZPOW2().get());
EXPECT_EQ(c19, curve.coefficient19yPOW2TIMESZ().get());
EXPECT_EQ(c20, curve.coefficient20xPOW2TIMESYPOW2TIMESZPOW2().get());
EXPECT_EQ(c21, curve.coefficient21xPOW2TIMESYPOW2TIMESZ().get());
EXPECT_EQ(c22, curve.coefficient22xPOW2TIMESYTIMESZPOW2().get());
EXPECT_EQ(c23, curve.coefficient23xTIMESYPOW2TIMESZPOW2().get());
EXPECT_EQ(c24, curve.coefficient24xPOW2TIMESYTIMESZ().get());
EXPECT_EQ(c25, curve.coefficient25xTIMESYPOW2TIMESZ().get());
EXPECT_EQ(c26, curve.coefficient26xTIMESYTIMESZPOW2().get());
EXPECT_EQ(c27, curve.coefficient27xTIMESYTIMESZ().get());
// Lims
EXPECT_TRUE(curve.setMinimumValueofx(min_x));
EXPECT_TRUE(curve.setMaximumValueofx(max_x));
ASSERT_TRUE(curve.minimumValueofx());
ASSERT_TRUE(curve.maximumValueofx());
EXPECT_EQ(min_x, curve.minimumValueofx().get());
EXPECT_EQ(max_x, curve.maximumValueofx().get());
EXPECT_TRUE(curve.setMinimumValueofy(min_y));
EXPECT_TRUE(curve.setMaximumValueofy(max_y));
ASSERT_TRUE(curve.minimumValueofy());
ASSERT_TRUE(curve.maximumValueofy());
EXPECT_EQ(min_y, curve.minimumValueofy().get());
EXPECT_EQ(max_y, curve.maximumValueofy().get());
EXPECT_TRUE(curve.setMinimumValueofz(min_z));
EXPECT_TRUE(curve.setMaximumValueofz(max_z));
ASSERT_TRUE(curve.minimumValueofz());
ASSERT_TRUE(curve.maximumValueofz());
EXPECT_EQ(min_z, curve.minimumValueofz().get());
EXPECT_EQ(max_z, curve.maximumValueofz().get());
EXPECT_FALSE(curve.minimumCurveOutput());
EXPECT_FALSE(curve.maximumCurveOutput());
// x, y and z in range, no output limit
double x = 0.5;
double y = 5.0;
double z = 9.0;
EXPECT_DOUBLE_EQ(calc(x, y, z), curve.evaluate(x, y, z));
EXPECT_DOUBLE_EQ(92296.75, curve.evaluate(x, y, z));
// x < min_x
x = 0.05;
EXPECT_DOUBLE_EQ(calc(min_x, y, z), curve.evaluate(x, y, z));
EXPECT_DOUBLE_EQ(52520.19, curve.evaluate(x, y, z));
// x > max_x
x = 20.0;
EXPECT_DOUBLE_EQ(calc(max_x, y, z), curve.evaluate(x, y, z));
EXPECT_DOUBLE_EQ(751098.0, curve.evaluate(x, y, z));
// y < min_y
x = 0.5;
y = 3.5;
EXPECT_DOUBLE_EQ(calc(x, min_y, z), curve.evaluate(x, y, z));
EXPECT_DOUBLE_EQ(62231.25, curve.evaluate(x, y, z));
// y > max_y
y = 40.0;
EXPECT_DOUBLE_EQ(calc(x, max_y, z), curve.evaluate(x, y, z));
EXPECT_DOUBLE_EQ(128310.75, curve.evaluate(x, y, z));
// z < min_z
x = 0.5;
y = 5.0;
z = 3.0;
EXPECT_DOUBLE_EQ(calc(x, y, min_z), curve.evaluate(x, y, z));
EXPECT_DOUBLE_EQ(73991.25, curve.evaluate(x, y, z));
// z > max_z
z = 40.0;
EXPECT_DOUBLE_EQ(calc(x, y, max_z), curve.evaluate(x, y, z));
EXPECT_DOUBLE_EQ(112624.25, curve.evaluate(x, y, z));
// x < min_x, y < min_y, z < min_z
x = -5.0;
y = -5.0;
z = -5.0;
EXPECT_DOUBLE_EQ(calc(min_x, min_y, min_z), curve.evaluate(x, y, z));
EXPECT_DOUBLE_EQ(28346.4, curve.evaluate(x, y, z));
// x > max_x, y, z > max_y
x = 10.0;
y = 10.0;
z = 10.0;
EXPECT_DOUBLE_EQ(calc(max_x, max_y, max_z), curve.evaluate(x, y, z));
EXPECT_DOUBLE_EQ(1273160.0, curve.evaluate(x, y, z));
// Set output limits
double min_output = 40000.0;
double max_output = 100000.0;
EXPECT_TRUE(curve.setMinimumCurveOutput(min_output));
EXPECT_TRUE(curve.setMaximumCurveOutput(max_output));
ASSERT_TRUE(curve.minimumCurveOutput());
ASSERT_TRUE(curve.maximumCurveOutput());
EXPECT_EQ(min_output, curve.minimumCurveOutput().get());
EXPECT_EQ(max_output, curve.maximumCurveOutput().get());
// out < min output
EXPECT_DOUBLE_EQ(min_output, curve.evaluate(min_x, min_y, min_z));
// out > max output
EXPECT_DOUBLE_EQ(max_output, curve.evaluate(max_x, max_y, max_z));
// Wrong number of arguments
// EXPECT_THROW(curve.evaluate(1.0), openstudio::Exception);
// EXPECT_THROW(curve.evaluate(1.0, 2.0), openstudio::Exception);
}
|
<reponame>godbobo/Admin
package com.aqzscn.www.global.domain.vo;
import com.aqzscn.www.global.config.validation.ValidationGroup3;
import com.aqzscn.www.global.mapper.User;
import com.fasterxml.jackson.annotation.JsonFilter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import javax.validation.constraints.NotBlank;
/**
* @author Godbobo
* @date 2019/5/27
*/
@ApiModel("用户请求实体类")
@JsonFilter("UserFilter")
@Data
@EqualsAndHashCode(callSuper = true)
public class UserRequest extends User {
@ApiModelProperty("角色名")
@NotBlank(message = "角色名不能为空", groups = {ValidationGroup3.class})
private String roleName;
// 邮箱验证码
@ApiModelProperty("邮箱验证码")
private String mailCode;
}
|
const mysql = require('mysql2/promise');
async function main() {
const connection = await mysql.createConnection({
host: 'HOST_NAME',
user: 'USERNAME',
database: 'DATABASE_NAME',
password: 'PASSWORD'
});
// Get list of tables in the database
const [tables] = await connection.execute('SHOW TABLES');
for (const table of tables) {
// For each table, get its size
const tableName = table['Tables_in_' + process.env.DATABASE];
const [size] = await connection.execute(
`SELECT table_rows,data_length,index_length,table_name FROM
information_schema.tables WHERE table_name = ?`,
[tableName]);
// Print out the size
console.log(size);
}
connection.close();
}
main();
|
#include "App.h"
#include "Input.h"
#include "Textures.h"
#include "Audio.h"
#include "Render.h"
#include "Window.h"
#include "Scene.h"
#include "Defs.h"
#include "Log.h"
#define MARGIN 50
Scene::Scene() : Module()
{
name.Create("scene");
}
// Destructor
Scene::~Scene()
{}
// Called before render is available
bool Scene::Awake()
{
LOG("Loading Scene");
bool ret = true;
return ret;
}
// Called before the first frame
bool Scene::Start()
{
// Load Textures and Fx
bg = app->tex->Load("Assets/Textures/speaker.png");
laserR = app->tex->Load("Assets/Textures/bullet_red.png");
laserB = app->tex->Load("Assets/Textures/bullet_blue.png");
laserFx = app->audio->LoadFx("Assets/Audio/Fx/fx_laser.wav");
// Get dimension of textures
SDL_QueryTexture(bg, NULL, NULL, &dimensionBg.x, &dimensionBg.y);
SDL_QueryTexture(laserR, NULL, NULL, &dimensionLaserR.x, &dimensionLaserR.y);
SDL_QueryTexture(laserB, NULL, NULL, &dimensionLaserB.x, &dimensionLaserB.y);
player.x = (WINDOW_WIDTH * 0.5) - (dimensionLaserR.x * 0.5);
player.y = (WINDOW_HIGHT * 0.5) - (dimensionLaserR.y * 0.5);
return true;
}
// Called each loop iteration
bool Scene::PreUpdate()
{
ListItem<Bullet*>* item;
for (item = bullets.start; item != NULL; item = item->next)
{
if(item->data->pendingToDelete == true)
DeleteBody(item->data);
}
return true;
}
// Called each loop iteration
bool Scene::Update(float dt)
{
ListItem<Bullet*>* item;
// Create one bullet
if (app->input->GetKey(SDL_SCANCODE_SPACE) == KEY_DOWN)
{
AddBullet();
// Play Fx
for (item = bullets.start; item != NULL; item = item->next)
{
app->audio->PlayFx(item->data->channel, laserFx);
}
}
// Explosion bullets all direction
if (app->input->GetKey(SDL_SCANCODE_A) == KEY_DOWN)
{
for (int i = 0; i < 360 / offsetAngle; i++)
{
AddBullet();
// Play Fx
for (item = bullets.start; item != NULL; item = item->next)
{
app->audio->PlayFx(item->data->channel, laserFx);
}
}
}
// Cerate one bullet in the direction of the mouse
if (app->input->GetMouseButtonDown(SDL_BUTTON_LEFT)==KEY_DOWN)
{
iPoint posMouse;
app->input->GetMousePosition(posMouse.x, posMouse.y);
float angle = atan2(player.y - posMouse.y, player.x - posMouse.x) * 180 / PI;
AddBullet(angle + 180);
// Play Fx
for (item = bullets.start; item != NULL; item = item->next)
{
app->audio->PlayFx(item->data->channel, laserFx);
}
}
// Update entities positions
for (item = bullets.start; item != NULL; item = item->next)
{
item->data->pos.x += speed * cos(item->data->angle * PI/180);
item->data->pos.y += speed * sin(item->data->angle * PI/180);
}
// TODO 5: Update distance and direction
// 0 = directly in front / 90 = directly to the right / 180 = directly behind / 270 = directly to the left.
for (item = bullets.start; item != NULL; item = item->next)
{
}
// TODO 7: Up/Down Music
if (app->input->GetKey(SDL_SCANCODE_KP_PLUS) == KEY_DOWN)
if (app->input->GetKey(SDL_SCANCODE_KP_MINUS) == KEY_DOWN)
if (app->input->GetKey(SDL_SCANCODE_UP) == KEY_DOWN)
if (app->input->GetKey(SDL_SCANCODE_DOWN) == KEY_DOWN)
// Play Music
if (app->input->GetKey(SDL_SCANCODE_Z) == KEY_DOWN)
{
app->audio->PlayMusic("Assets/Audio/Music/galactic_empire.ogg");
}
if (app->input->GetKey(SDL_SCANCODE_X) == KEY_DOWN)
{
app->audio->PlayMusic("Assets/Audio/Music/imperial_march.ogg");
}
// Check if the bullet is off-camera
for (item = bullets.start; item != NULL; item = item->next)
{
if (item->data->pos.x > WINDOW_WIDTH + MARGIN || item->data->pos.x < 0 - MARGIN
|| item->data->pos.y > WINDOW_HIGHT + MARGIN || item->data->pos.y < 0 - MARGIN)
{
item->data->pendingToDelete = true;
}
}
// TODO 6: If an entity has been released a channel too
// TODO 6: If no channel is playing reassign the channels
return true;
}
// Called each loop iteration
bool Scene::PostUpdate()
{
bool ret = true;
// Draw background
app->render->DrawTexture(bg, (WINDOW_WIDTH * 0.5) - (dimensionBg.x * 0.5), (WINDOW_HIGHT * 0.5) - (dimensionBg.y * 0.5));
// Draw all bullets
ListItem<Bullet*>* item;
for (item = bullets.start; item != NULL; item = item->next)
{
app->render->DrawTexture(item->data->laserTex, item->data->pos.x, item->data->pos.y, 0, 1, item->data->angle);
}
if(app->input->GetKey(SDL_SCANCODE_ESCAPE) == KEY_DOWN)
ret = false;
return ret;
}
void Scene::AddBullet(float angle)
{
// Create new entity
Bullet* b = new Bullet;
bullets.Add(b);
// Assign texture
if (bullets.Count() % 2 == 0)
b->laserTex = laserR;
else
b->laserTex = laserB;
// Assign position
b->pos.x = player.x;
b->pos.y = player.y;
// Assign direction
if (angle == -1)
{
if (bullets.Count() == 1)
b->angle = 0;
else
b->angle = bullets.end->prev->data->angle + offsetAngle;
}
else
b->angle = angle;
// TODO 3: Assign new channel
}
int Scene::DistanceToListener(iPoint player, fPoint channel)
{
iPoint pos;
pos.x = player.x - channel.x;
pos.y = player.y - channel.y;
return sqrt(pow(pos.x,2)+pow(pos.y,2));
}
// Delete one bullet
void Scene::DeleteBody(Bullet* body)
{
ListItem<Bullet*>* item;
for (item = bullets.start; item != NULL; item = item->next)
{
if (item->data == body)
{
bullets.Del(item);
// TODO 6: Notify the audio manager that a channel can be released
break;
}
}
}
// Called before quitting
bool Scene::CleanUp()
{
LOG("Freeing scene");
app->tex->UnLoad(bg);
app->tex->UnLoad(laserR);
app->tex->UnLoad(laserB);
bullets.Clear();
return true;
}
|
"use strict";
function rename(scope, oldName, newName) {
if (oldName === newName) {
return;
}
let binding = scope.getBinding(oldName); // Rename all constant violations
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = binding.constantViolations[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
let violation = _step.value;
let bindingIds = violation.getBindingIdentifierPaths(true, false);
for (let name in bindingIds) {
if (name === oldName) {
var _iteratorNormalCompletion3 = true;
var _didIteratorError3 = false;
var _iteratorError3 = undefined;
try {
for (var _iterator3 = bindingIds[name][Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
let idPath = _step3.value;
idPath.node.name = newName;
}
} catch (err) {
_didIteratorError3 = true;
_iteratorError3 = err;
} finally {
try {
if (!_iteratorNormalCompletion3 && _iterator3.return != null) {
_iterator3.return();
}
} finally {
if (_didIteratorError3) {
throw _iteratorError3;
}
}
}
}
}
} // Rename all references
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return != null) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
var _iteratorNormalCompletion2 = true;
var _didIteratorError2 = false;
var _iteratorError2 = undefined;
try {
for (var _iterator2 = binding.referencePaths[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
let path = _step2.value;
if (path.node.name === oldName) {
path.node.name = newName;
}
} // Rename binding identifier, and update scope.
} catch (err) {
_didIteratorError2 = true;
_iteratorError2 = err;
} finally {
try {
if (!_iteratorNormalCompletion2 && _iterator2.return != null) {
_iterator2.return();
}
} finally {
if (_didIteratorError2) {
throw _iteratorError2;
}
}
}
scope.removeOwnBinding(oldName);
scope.bindings[newName] = binding;
binding.identifier.name = newName;
}
module.exports = rename;
|
#!/bin/bash
if [[ "$DEV" == "iOS" ]]; then
python acpython.py "$HOME/Desktop/$APPNM.$IMGEXT" --mask
fi
|
#!/bin/bash
# genconfig: Generate configuration files for the system
. /etc/alexandria-env
# TODO: Firstrun code.
TOOL="$VENVPY $ABINDIR/cfgtool.py -baseconfig ${BASECONFIG} -localconfig ${LOCALCONFIG}"
# We're going to start by configuring Debian's interface file and getting things
# set up here.
export CFGTOOL=$TOOL
export AVARDIR
export ABINDIR
export ARUNDIR
export ALEXANDRIAPATH
export VENVPY
# We need to generate the config for supervisor here.
# To do that, we need to generate the configuration for services.
$TOOL -outfile ${ARUNDIR}/services
. ${ARUNDIR}/services
$TOOL -outfile /etc/network/interfaces debian_interfaces
#$TOOL -outfile /etc/network/interfaces \
# debian_loopback \
# debian_interface:wlan0 \
# debian_interface:eth0
$TOOL -outfile ${AVARDIR}/dnsmasq.conf dnsmasq
$TOOL -outfile ${AVARDIR}/hostapd.conf hostapd
# later: $AVARDIR/conf.d will contain some things we care about.
# These include things that may have been generated by another page here.
for service in $SERVICES; do
if [ -f ${ALEXANDRIAPATH}/services.d/${service}.cfg ]; then
# try to run it.
${ALEXANDRIAPATH}/services.d/${service}.cfg || echo "Failed to run configuration script for ${service}">/dev/stderr
fi
done
# now, make sure supervisord has the right information
$TOOL -outfile ${ARUNDIR}/supervisord.conf supervisord_services
# now we run the configure script for mounting the disks.
${ABINDIR}/mount-media.sh
# End genconfig
|
import React, {Component} from 'react'
import {connect} from 'react-redux'
import {AlbumViewList, AlbumViewCard, LoadingScreen} from '../components'
import {withRouter} from 'react-router'
import {getRecs, sortAlbums, getBest} from '../store'
import Nav from 'react-bootstrap/Nav'
import NavDropdown from 'react-bootstrap/NavDropdown'
import Container from 'react-bootstrap/Container'
import Tabs from 'react-bootstrap/Tabs'
import Tab from 'react-bootstrap/Tab'
import history from '../history'
export class Discover extends Component {
constructor() {
super()
this.state = {
display: 'grid',
isLoading: true
}
this.setDisplay = this.setDisplay.bind(this)
this.handleSort = this.handleSort.bind(this)
}
async componentDidMount() {
const page = this.props.match.params.page
if (page === 'for-you') await this.props.getRecs()
else await this.props.getBest(page)
this.setState({
isLoading: false
})
}
async componentDidUpdate(prevProps) {
if (prevProps.match.params.page !== this.props.match.params.page) {
const page = this.props.match.params.page
if (page === 'for-you') await this.props.getRecs()
else await this.props.getBest(page)
}
if (this.state.isLoading) {
this.setState({
isLoading: false
})
}
}
setDisplay(display) {
this.setState({
display: display
})
}
handleSort(sortKey) {
this.props.sortAlbums(sortKey)
}
handleSelect(eventKey) {
history.push(`/discover/${eventKey}`)
}
render() {
const page = this.props.match.params.page
if (this.state.isLoading) {
return <LoadingScreen />
} else
return (
<div>
<br />
<Container>
<h2>Discover</h2>
<Tabs
activeKey={page}
id="uncontrolled-tab-example"
onSelect={this.handleSelect}
>
<Tab eventKey="for-you" title="For You" />
<Tab eventKey="best-rated" title="Best Rated" />
<Tab eventKey="most-popular" title="Most Popular" />
</Tabs>
</Container>
{this.props.albums.length === 0 ? (
<Container>
<br />
<h1>There are no recommendations</h1>
</Container>
) : (
<>
<Container>
<br />
<Nav>
<NavDropdown
className="view-dropdown"
title="View"
id="nav-dropdown-view"
onSelect={this.setDisplay}
>
<NavDropdown.Item eventKey="list">List</NavDropdown.Item>
<NavDropdown.Item eventKey="grid">Grid</NavDropdown.Item>
</NavDropdown>
<NavDropdown
className="sort-dropdown"
title="Sort"
id="nav-dropdown-sort"
onSelect={this.handleSort}
>
<NavDropdown.Item eventKey="artist">
Artist Name
</NavDropdown.Item>
<NavDropdown.Item eventKey="name">
Album Name
</NavDropdown.Item>
</NavDropdown>
</Nav>
<br />
</Container>
{this.state.display === 'list' ? (
<Container>
<AlbumViewList discover={true} />
</Container>
) : (
<Container>
<AlbumViewCard discover={true} />
</Container>
)}
</>
)}
</div>
)
}
}
const mapState = state => {
return {
albums: state.albums
}
}
const mapDispatch = dispatch => ({
getRecs: () => dispatch(getRecs()),
sortAlbums: sortKey => dispatch(sortAlbums(sortKey)),
getBest: pageKey => dispatch(getBest(pageKey))
})
export default withRouter(connect(mapState, mapDispatch)(Discover))
|
#!/bin/sh
directory="$FLERE_IMSAHO/data/playlists/output/test-set/true-positives"
#echo "$directory"
python3 $FLERE_IMSAHO/python/generate-html.py $directory
theFile="$directory/images.html"
ls -l $theFile
head -n 10 $theFile
directory="$FLERE_IMSAHO/data/playlists/output/test-set/true-negatives"
#echo "$directory"
python3 $FLERE_IMSAHO/python/generate-html.py $directory
theFile="$directory/images.html"
ls -l $theFile
head -n 10 $theFile
directory="$FLERE_IMSAHO/data/playlists/output/test-set/false-positives"
#echo "$directory"
python3 $FLERE_IMSAHO/python/generate-html.py $directory
theFile="$directory/images.html"
ls -l $theFile
head -n 10 $theFile
directory="$FLERE_IMSAHO/data/playlists/output/test-set/false-negatives"
#echo "$directory"
python3 $FLERE_IMSAHO/python/generate-html.py $directory
theFile="$directory/images.html"
ls -l $theFile
head -n 10 $theFile
|
def pairSums(arr, k):
result_set = set()
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result_set.add((arr[i], arr[j]))
return result_set
|
<reponame>meetbill/baichuan_go_worker
package gores
import (
"encoding/json"
"fmt"
"os"
)
// Config contains the configuration parameters for running Gores
type Config struct {
// Authetication for Redis connection
RedisURL string
RedisPassword string
// Maximum number of idle connections in the Redis pool
RedisMaxIdle int
// Redigo closes connection after it remains idle for this duration
RedisIdleTimeout int
// Conn blocks for this duration when trying to pop items from several queues from Redis
BlpopMaxBlockTime int
// Maximum number of workers needed for Gores
MaxWorkers int
// names of queues to fetch jobs from
Queues []string
// Dispatcher returns after it did not have jobs to dispatch for this duration
DispatcherTimeout int
// Worker returns after it did not have job to work on after this duration
WorkerTimeout int
}
// InitConfig creates new config instance based on the config.json file path
func InitConfig(confPath string) (*Config, error) {
config := Config{}
configFile, err := os.Open(confPath)
if err != nil {
return &config, fmt.Errorf("init config failed: %s", err)
}
decoder := json.NewDecoder(configFile)
err = decoder.Decode(&config)
if err != nil {
return &config, fmt.Errorf("init config failed: %s", err)
}
return &config, nil
}
|
#!/usr/bin/env bash
cd machine_learning_with_python
mkdir -p db/{migrations,queries,repositories} || true
mkdir -p models/{domain,schemas} || true
mkdir -p resources || true
mkdir -p services || true
mkdir -p downloaders || true
# EG: https://github.com/bergran/places/blob/master/apps/places/generators/places.py
mkdir -p generators
touch db/__init__.py
touch db/migrations/__init__.py
touch db/queries/__init__.py
touch db/repositories/__init__.py
touch models/__init__.py
touch models/domain/__init__.py
touch models/schemas/__init__.py
touch resources/__init__.py
touch services/__init__.py
touch downloaders/__init__.py
cd -
|
public static long gcd(long a, long b) {
if (a == 0 || b == 0) return a+b;
return gcd(b, a % b);
}
|
def html_to_text(html_string):
output = ''
in_tag = False
for char in html_string:
if char == '<':
in_tag = True
elif char == '>':
in_tag = False
elif in_tag == False:
output += char
return output
|
#!/bin/bash -x
number=
if [ "$number" = "1" ]; then
echo "Number equals 1"
else
echo "Number does not equal 1"
fi
|
#!/bin/bash
# Renames Boost to Autoboost to avoid namespace collisions
if [[ -n "$1" ]]; then
directory=$1
else
directory="."
fi
filelist=`find ${directory} -iname "*.h" -o -iname "*.cpp" -o -iname "*.hpp" -o -iname "*.ipp"`
sed -ri 's/([^\.o])boost/\1autoboost/g' ${filelist}
sed -ri 's/([^\.O])BOOST/\1AUTOBOOST/g' ${filelist}
sed -ri 's/[[:space:]]*$//g' ${filelist}
|
import { Component, OnInit, OnDestroy, Injectable } from '@angular/core';
import { Router } from '@angular/router';
import { Response } from '@angular/http';
import { DataTable, Column, Header, Footer} from 'primeng/primeng';
import { LazyLoadEvent } from 'primeng/primeng';
import { Message, Growl } from 'primeng/primeng';
import { Permission } from './permission';
import { PermissionService } from './permission.service';
@Component({
selector: 'permission',
templateUrl: 'resources/angular2/login/menu_app/permission/permission.html',
styleUrls: ['resources/angular2/login/menu_app/permission/permission.css'],
directives: [ DataTable, Column, Growl ],
providers: [ PermissionService ]
})
export class PermissionComponent implements OnInit
{
title = 'Permissions';
permissions: Permission[];
selectedPermission: Permission;
totalRecords: number = 0;
msgs: Message[] = [];
lastLazyLoadEvent: LazyLoadEvent = null;
constructor(private permissionService : PermissionService, private router: Router)
{
}
ngOnInit()
{
}
onRowClick(event)
{
this.router.navigate(['/permissions/edit/', event.data.id]);
}
onCreate(event)
{
this.router.navigate(['/permissions/create']);
}
onDelete(perm: Permission, event: any)
{
event.stopPropagation();
this.permissionService
.delete(perm)
.then((result: {}) => {
this.permissions = this.permissions.filter(p => p !== perm);
if (this.selectedPermission === perm)
this.selectedPermission = null;
if (this.lastLazyLoadEvent)
this.loadData(this.lastLazyLoadEvent);
}).catch(error => this.msgs.push({severity:'error', summary:'Error Message', detail:error.error}));
}
loadData(event: LazyLoadEvent)
{
//event.first = First row offset
//event.rows = Number of rows per page
//event.sortField = Field name to sort in single sort mode
//event.sortOrder = Sort order as number, 1 for asc and -1 for dec in single sort mode
//multiSortMeta: An array of SortMeta objects used in multiple columns sorting. Each SortMeta has field and order properties.
//filters: Filters object having field as key and filter value, filter matchMode as value
this.lastLazyLoadEvent = event;
this.permissionService.getObjects(event.first, event.rows, event.sortField, event.sortOrder, event.filters)
.subscribe(result => {this.permissions = result.data; this.totalRecords = result.total;},
error => this.msgs.push({severity:'error', summary:'Error Message', detail:error}));
}
}
|
<reponame>blinkcat/LeetCode
/**
* @param {string[][]} equations
* @param {number[]} values
* @param {string[][]} queries
* @return {number[]}
*/
var calcEquation = function(equations, values, queries) {
// 存储根和子孙。e.g a->b->c {c:a, b:a}
const root = new Map();
// 存储子孙到根的值,或者说权重。e.g a/b=2.0, b/c=3.0 {b:1/2, c:1/3}
const vals = new Map();
// graph
const edges = new Map();
// 已经记录在root, vals 里面的,就不需要再做dfs了。
const seen = new Set();
for (let i = 0; i < equations.length; i++) {
if (!edges.has(equations[i][0])) {
edges.set(equations[i][0], new Map());
}
if (!edges.has(equations[i][1])) {
edges.set(equations[i][1], new Map());
}
edges.get(equations[i][0]).set(equations[i][1], values[i]);
edges.get(equations[i][1]).set(equations[i][0], 1 / values[i]);
}
for (const a of edges.keys()) {
if (!seen.has(a)) {
dfs(a, a, 1);
}
}
const res = [];
for (const query of queries) {
const a = query[0];
const b = query[1];
if (!root.has(a) || !root.has(b) || root.get(a) != root.get(b)) {
res.push(-1);
} else {
res.push(vals.get(a) / vals.get(b));
}
}
return res;
function dfs(a, b, v) {
vals.set(a, v);
root.set(a, b);
seen.add(a);
for (const nei of edges.get(a).keys()) {
if (!seen.has(nei)) {
dfs(nei, b, v * edges.get(nei).get(a));
}
}
}
};
/**
* @see https://leetcode.com/problems/evaluate-division/discuss/278276/Java-Union-find-and-DFS
* @see https://leetcode.com/problems/evaluate-division/discuss/147281/Java-Union-Find-solution-faster-than-99
* 第一种方法,dfs。
*/
|
// Define the AutocompleteProvider protocol
protocol AutocompleteProvider {
// Define the method for providing autocomplete suggestions
func suggestions(for prefix: String) -> [String]
}
// Implement the AutocompleteSuggestionProvider protocol using the typealias
extension AutocompleteSuggestionProvider {
// Implement the suggestions method to provide autocomplete suggestions
func suggestions(for prefix: String) -> [String] {
// Implement the logic to retrieve and return autocomplete suggestions based on the prefix
// This implementation will depend on the specific requirements and data source
// For demonstration purposes, let's assume a simple array of words for autocomplete suggestions
let words = ["apple", "application", "apricot", "banana", "bat", "cat", "dog", "elephant"]
let suggestedWords = words.filter { $0.hasPrefix(prefix) }
return suggestedWords
}
}
// Usage example
let autocompleteProvider: AutocompleteSuggestionProvider = AutocompleteProvider()
let prefix = "app"
let suggestions = autocompleteProvider.suggestions(for: prefix)
print("Autocomplete suggestions for prefix '\(prefix)': \(suggestions)")
|
package com.devin.client.mysise.model.parse;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import com.devin.client.mysise.model.base.WebBody;
import com.devin.client.mysise.model.bean.DetailSubject;
import com.devin.client.mysise.model.bean.OpenSubject;
import com.devin.client.mysise.model.bean.OpenSubjects;
import com.devin.client.mysise.model.url.SiseURL;
import com.devin.client.mysise.model.url.Url;
public class ParseOpenSubject {
private static OpenSubjects openSubjects = new OpenSubjects();
private static Document document;
public static OpenSubjects getOpenSubjects(){
init();
return openSubjects;
}
private static void init(){
WebBody.initStudent(Url.selectClassCourseURL);
document = WebBody.getDocument();
parseAllOpenSubject();
}
private static void parseAllOpenSubject(){
Elements elements = document.select("table").select("tr");
for(Element element : elements){
if(element.select("td[class=tablebody]").hasText())
openSubjects.getSubjects().add(getOpenSubject(element.select("td[class=tablebody]")));
}
}
private static OpenSubject getOpenSubject(Elements elements){
OpenSubject openSubject = new OpenSubject();
String sub = elements.select("a").attr("href");
openSubject.setUrl(SiseURL.URL + sub.substring(1, sub.length()));
openSubject.setId(elements.get(0).text());
openSubject.setName(elements.get(1).text());
openSubject.setScore(elements.get(2).text());
openSubject.setExammode(elements.get(3).text());
openSubject.setPreview(elements.get(4).text());
return openSubject;
}
//android onItemClick
public static DetailSubject getDetailSubject(int position){
String url = openSubjects.getSubjects().get(position).getUrl();
return ParseDetailSubject.getDetailSubject(url);
}
}
|
export * from './BoxCollider';
export * from './SweptBoxCollider';
|
<gh_stars>0
package com.example.myshoppingmall;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import java.util.List;
public class MyRewardsAdapter extends RecyclerView.Adapter<MyRewardsAdapter.Viewholder> {
private List<RewardModel> rewardModelList;
private Boolean useMiniLayout = false;
public MyRewardsAdapter(List<RewardModel> rewardModelList, Boolean useMiniLayout) {
this.rewardModelList = rewardModelList;
this.useMiniLayout = useMiniLayout;
}
@NonNull
@Override
public Viewholder onCreateViewHolder(@NonNull ViewGroup viewGroup, int i) {
View view;
if(useMiniLayout){
view = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.mini_rewards_item_layout, viewGroup, false);
} else {
view = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.rewards_item_layout, viewGroup, false);
}
return new Viewholder(view);
}
@Override
public void onBindViewHolder(@NonNull Viewholder viewholder, int position) {
String title = rewardModelList.get(position).getTitle();
String date = rewardModelList.get(position).getExpiryDate();
String body = rewardModelList.get(position).getCouponBody();
viewholder.setData(title, date, body);
}
@Override
public int getItemCount() {
return rewardModelList.size();
}
public class Viewholder extends RecyclerView.ViewHolder{
private TextView couponTitle;
private TextView couponExpiryDate;
private TextView couponBody;
public Viewholder(@NonNull View itemView) {
super(itemView);
couponTitle = itemView.findViewById(R.id.coupon_title);
couponExpiryDate = itemView.findViewById(R.id.coupon_validity);
couponBody = itemView.findViewById(R.id.coupon_body);
}
private void setData(final String title, final String date, final String body){
couponTitle.setText(title);
couponExpiryDate.setText(date);
couponBody.setText(body);
if(useMiniLayout){
itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ProductDetailsActivity.couponTitle.setText(title);
ProductDetailsActivity.couponExpiryDate.setText(date);
ProductDetailsActivity.couponBody.setText(body);
ProductDetailsActivity.showDialogRecyclerView();
}
});
}
}
}
}
|
import numpy as np
from sklearn import linear_model
house_features = [[1050, 3, 1], [1350, 5, 2], [1500, 4, 2]]
target_prices = [750, 1000, 950]
model = linear_model.LinearRegression()
model.fit(house_features, target_prices)
|
<gh_stars>1-10
'use strict'
const SLACK = require('slack')
const HDB = require('hearthstone-db');
const _ = require('lodash')
const CONFIG = require('./config')
const BOT = SLACK.rtm.client()
BOT.started((payload) => {
this.self = payload.self
})
BOT.message((msg) => {
let msgPost = 'We must cleanse the Sunwell';
if (!msg.user) return
if (!_.includes(msg.text.match(/<@([A-Z0-9])+>/igm), `<@${this.self.id}>`)) return
const slackMessage = msg.text.split(':')[1].trim();
const command = slackMessage.split(' ')[0];
let cardName = [];
if (command == 'card') {
cardName = slackMessage.split(' ').slice(1).join(' ');
msgPost = fetchCardImage(cardName);
}
SLACK.chat.postMessage({
token: CONFIG('SLACK_TOKEN'),
icon_emoji: CONFIG('ICON_EMOJI'),
channel: msg.channel,
username: '<NAME>',
text: msgPost
}, (err, data) => {
if (err) throw err
let txt = _.truncate(data.message.text)
console.log(`🤖 Listen, young one: I responded with "${txt}"`)
})
})
function fetchCardImage(cardName) {
const cardInfo = _.find(HDB.allCards, (card) => {
return card.name === cardName
})
return cardInfo.image_url
}
module.exports = BOT
|
// https://open.kattis.com/problems/toilet
#include <iostream>
using namespace std;
typedef unsigned char u8;
int main() {
string s;
cin >> s;
int c = 0;
u8 status = s[0];
for (int i = 1; i < s.size(); i++) {
if (status != s[i]) {
c++;
}
if (s[i] != 'U') {
c++;
}
status = 'U';
}
cout << c << endl;
c = 0;
status = s[0];
for (int i = 1; i < s.size(); i++) {
if (status != s[i]) {
c++;
}
if (s[i] != 'D') {
c++;
}
status = 'D';
}
cout << c << endl;
c = 0;
status = s[0];
for (int i = 1; i < s.size(); i++) {
if (status != s[i]) {
c++;
}
status = s[i];
}
cout << c << endl;
}
|
module Capcoauth
module OAuth
class TTLCache
def self.user_id_for(access_token)
store.fetch(key_for(access_token))
end
def self.update(access_token, user_id)
store.write(key_for(access_token), user_id, expires_in: Capcoauth.configuration.token_verify_ttl)
end
def self.remove(access_token)
store.delete_matched(key_for(access_token))
end
def self.key_for(access_token)
"capcoauth_token:#{access_token}"
end
def self.store
Capcoauth.configuration.cache_store
end
end
end
end
|
#ifndef PARTICLE_RPSO_H
#define PARTICLE_RPSO_H
#include<vector>
#include"Particle.h"
#include"PSO_RPSO.h"
#include"PSO.h"
using namespace std;
class Particle_RPSO : public Particle
{
private:
float dLow, dHigh, w, c1, c2, cN;
public:
Particle_RPSO(int numberOfFeatures) : Particle(numberOfFeatures)
{}
virtual void Particle::update(vector<float>& gBest, vector<float>& lBest, PSO_AlgorithmParam* psoAlg)
{
PSO_RPSO* rpso = (PSO_RPSO*)psoAlg;
rpso->setParameters(dLow, dHigh, w, c1, c2, cN);
int NOfFeatures = features.size();
vector<float> pBestF = pBest->getFeatures();
for (int i = 0; i < NOfFeatures; ++i)
{
int dir = -1;
float diversity = getDiversity();
if (diversity < dLow)
dir = -1;
else if (diversity > dHigh)
dir = 1;
velocity[i] = w*velocity[i] + dir * c1 * (rand() / (float)RAND_MAX)* (gBest[i] - features[i])
+ dir * c2 * (rand() / (float)RAND_MAX)*(pBestF[i] - features[i])
+ (lBest.size() ? (dir * cN * (rand() / (float)RAND_MAX)*(lBest[i] - features[i])) : 0);
features[i] += velocity[i];
velocity[i] = velocity[i] > maxVelocity[i] ? maxVelocity[i] : velocity[i];
velocity[i] = velocity[i]<minVelocity[i] ? minVelocity[i] : velocity[i];
features[i] = features[i]>maxPosition[i] ? maxPosition[i] : features[i];
features[i] = features[i] < minPosition[i] ? minPosition[i] : features[i];
}
}
virtual Particle* updateFittness() = 0; //return this
float getDiversity()
{
vector<Particle*> particles = PSO::getParticles();
int NOfFeatures = features.size();
float NOfParticles = particles.size();
vector<float> meanP;
for (int i = 0; i < NOfFeatures; ++i){
float meanI = 0;
for (int j = 0; j <NOfParticles; ++j)
meanI += particles[j]->getFeatures()[i];
meanP.push_back(meanI / NOfParticles);
}
float sumSq = 0;
for (int i = 0; i <NOfParticles; ++i)
{
float subSum = 0;
for (int j = 0; j < NOfFeatures; ++j)
subSum += (particles[i]->getFeatures()[j] - meanP[j])*(particles[i]->getFeatures()[j] - meanP[j]);
sumSq += sqrt(subSum);
}
return 0;// sumSq;
}
};
#endif
|
#!/bin/sh
npm run build
REMOTE_ARTIFACT="nickw444/docker-ui"
PACKAGE_VERSION=$(cat package.json | jq -r '.version')
docker build -t "$REMOTE_ARTIFACT:$PACKAGE_VERSION" .
docker tag "$REMOTE_ARTIFACT:$PACKAGE_VERSION" "$REMOTE_ARTIFACT:latest"
docker push "$REMOTE_ARTIFACT:latest"
docker push "$REMOTE_ARTIFACT:$PACKAGE_VERSION"
|
import {Protocol} from 'devtools-protocol';
import {NodeRef, getNodeRefFromBackendId} from './node_ref';
import {CDPSession, Page, JSHandle} from 'puppeteer';
/**
* Gets strings containing the HTML markup for the Nodes used to compute
* the accessible name for NodeRef.
* @param nodeRef - Reference to Node whose name is being computed
* @param client - CDPSession for page
* @param page - Page containing Node referenced by NodeRef
*/
export async function getHTMLUsed(
nodeRef: NodeRef,
client: CDPSession,
page: Page
): Promise<{[implementation: string]: string}> {
const chromeHandles = await getNodesUsedByChrome(nodeRef, client, page);
const htmlUsedByChrome = await getHTMLFromHandles(chromeHandles, page);
const accnameHandles = (await page.evaluateHandle(`
Array.from(accname.getNameComputationDetails(document.querySelector('${nodeRef.selector}')).nodesUsed);
`)) as JSHandle<Element[]>;
const htmlUsedByAccname = await getHTMLFromHandles(accnameHandles, page);
return {chrome: htmlUsedByChrome, accname: htmlUsedByAccname};
}
/**
* Calculate the HTML snippet containing the array of elements referenced
* by a given JSHandle.
* @param handles - The ElementHandles for whom a HTML snippet is being computed
* @param page - The page containing the ElementHandles.
*/
async function getHTMLFromHandles(
handles: JSHandle<Element[]>,
page: Page
): Promise<string> {
// Get the outerHTML of the nodes used by Chrome
const htmlString = await page.evaluate((nodes: Element[]) => {
// Find 'redundant' nodes: nodes whose outerHTML is included in that of
// an ancestor node.
const redundantNodes = new Array<Element>();
for (const nodeA of nodes) {
for (const nodeB of nodes) {
if (nodeA.contains(nodeB) && nodeA !== nodeB) {
redundantNodes.push(nodeB);
}
}
}
return nodes
.filter(node => !redundantNodes.includes(node))
.map(node => node.outerHTML)
.join('\n');
}, handles);
return htmlString;
}
/**
* Gets a JSHandle containing the array of nodes used by Chrome
* to compute the accessible name for nodeRef.
* @param nodeRef - Node whose accessible name is being computed.
* @param client - CDPSession for page.
* @param page - Page containing nodeRef.
*/
async function getNodesUsedByChrome(
nodeRef: NodeRef,
client: CDPSession,
page: Page
): Promise<JSHandle<Element[]>> {
const stack: NodeRef[] = [];
// Create a JSHandle containing an empty array
const nodesUsedHandle = await page.evaluateHandle('[]');
// Track backendIds of visited nodes to avoid infinite cycle.
const visitedNodes: Protocol.DOM.BackendNodeId[] = [];
stack.push(nodeRef);
// Iterative DFS traverses nodes connected by label references
while (stack.length > 0) {
const currentNodeRef = stack.pop()!;
// Add current Node to nodesUsed array
await page.evaluate(
(node, nodesUsed) => nodesUsed.push(node),
currentNodeRef.handle,
nodesUsedHandle
);
const axTree = (await client.send('Accessibility.getPartialAXTree', {
backendNodeId: currentNodeRef.backendId,
})) as Protocol.Accessibility.GetPartialAXTreeResponse;
// Find the index of the currentNodeRef's corresponding AXNode
const indexOfCurrentNode = axTree.nodes.findIndex(
axNode => axNode.backendDOMNodeId === currentNodeRef?.backendId
);
// Contains AXNodes descendant of currentNodeRef's corresponding AXNode
const descandantNodes = axTree.nodes.slice(0, indexOfCurrentNode + 1);
// Check if any descendant AXNodes are labelled
for (const axNode of descandantNodes) {
let labelNodes: Protocol.Accessibility.AXRelatedNode[] = [];
const sources: Protocol.Accessibility.AXValueSource[] =
axNode.name?.sources ?? [];
for (const source of sources) {
if (source.type === 'relatedElement') {
// Handles nodes connected by attribute value (aria-labelleby)
if (source.attributeValue?.relatedNodes) {
labelNodes = source.attributeValue.relatedNodes;
// Handles nodes connected natively (<label>)
} else if (source.nativeSourceValue?.relatedNodes) {
labelNodes = source.nativeSourceValue.relatedNodes;
}
}
}
// Repeat the process for all unvisited label nodes.
for (const labelNode of labelNodes) {
if (!visitedNodes.includes(labelNode.backendDOMNodeId)) {
const labelNodeRef = await getNodeRefFromBackendId(
labelNode.backendDOMNodeId,
client,
page
);
if (labelNodeRef) {
stack.push(labelNodeRef);
}
visitedNodes.push(labelNode.backendDOMNodeId);
}
}
}
}
return nodesUsedHandle;
}
|
#!/bin/sh
{
CACHE_VERSION_FILE=./test/baseline/.cache/version.json
if [ -f "$CACHE_VERSION_FILE" ]; then
HAS_DIFF=$(git diff --name-only main:test/baseline/version.json $CACHE_VERSION_FILE)
if [[ ${HAS_DIFF} == "" ]]; then
exit 0
fi
fi
echo "~~~~~~~~~~~~~~~~~~~Update Baseline Start~~~~~~~~~~~~~~~~~~~~~"
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
STASH_LIST_BEFORE=$(git stash list)
git stash push --quiet
STASH_LIST_AFTER=$(git stash list)
git switch main --quiet
if [[ $1 == "1" ]]; then
BUILD_DIR=build
else
BUILD_DIR=cmake-build-debug
fi
if [ ! -d "./${BUILD_DIR}" ]; then
mkdir ${BUILD_DIR}
fi
cd ${BUILD_DIR}
if [ -f "./CMakeCache.txt" ]; then
TEXT=$(cat ./CMakeCache.txt)
TEXT=${TEXT#*CMAKE_COMMAND:INTERNAL=}
for line in ${TEXT}; do
CMAKE_COMMAND=$line
break
done
fi
if [ ! -f "$CMAKE_COMMAND" ]; then
CMAKE_COMMAND="cmake"
fi
echo $CMAKE_COMMAND
if [[ $1 == "1" ]]; then
$CMAKE_COMMAND -DCMAKE_CXX_FLAGS="-fprofile-arcs -ftest-coverage -g -O0" -DPAG_USE_SWIFTSHADER=ON -DCMAKE_BUILD_TYPE=Debug ../
else
$CMAKE_COMMAND -DCMAKE_BUILD_TYPE=Debug ../
fi
$CMAKE_COMMAND --build . --target PAGBaseline -- -j 12
./PAGBaseline
cd ..
git switch $CURRENT_BRANCH --quiet
if [[ $STASH_LIST_BEFORE != "$STASH_LIST_AFTER" ]]; then
git stash pop --index --quiet
fi
echo "~~~~~~~~~~~~~~~~~~~Update Baseline END~~~~~~~~~~~~~~~~~~~~~"
exit
}
|
<filename>annotation/annotation-processing/src/main/java/com/serenity/annotation/processor/StepDataProcessor.java
package com.serenity.annotation.processor;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.*;
import java.util.stream.Collectors;
import javax.annotation.processing.*;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.TypeElement;
import javax.tools.JavaFileObject;
import com.google.auto.service.AutoService;
@SupportedAnnotationTypes("com.serenity.annotation.processor.StepData")
@SupportedSourceVersion(SourceVersion.RELEASE_8)
@AutoService(Processor.class)
public class StepDataProcessor extends AbstractProcessor {
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
for (TypeElement annotation : annotations) {
Set<? extends Element> annotatedElements = roundEnv.getElementsAnnotatedWith(annotation);
Map<String, List<Element>> annotatedMethods = annotatedElements.stream().collect(Collectors.groupingBy(element -> ((TypeElement) element.getEnclosingElement()).getQualifiedName().toString()));
if (annotatedMethods.isEmpty()) {
continue;
}
annotatedMethods.forEach((key, setters) -> {
boolean isGetBuilder = setters.get(0).getEnclosingElement().getAnnotation(GetBuilder.class)!=null;
String className = ((TypeElement) setters.get(0).getEnclosingElement()).getQualifiedName().toString();
String classInterfaceName = ((TypeElement) setters.get(0).getEnclosingElement()).getInterfaces().get(0).toString();
setters = setters.stream().sorted(Comparator.comparing(setter -> setter.getAnnotation(StepData.class).order())).collect(Collectors.toList());;
List<List<String>> listSetter = new ArrayList<>();
for (Element setter : setters) {
listSetter.add(Arrays.asList(
setter.getAnnotation(StepData.class).value(),
setter.asType().toString(),
setter.getSimpleName().toString()
));
}
try {
writeBuilderFile(className, classInterfaceName, isGetBuilder, listSetter);
} catch (IOException e) {
e.printStackTrace();
}
});
}
return true;
}
private void writeBuilderFile(String className, String classInterfaceName, boolean isGetBuilder, List<List<String>> listSetter) throws IOException {
String packageName = null;
int lastDot = className.lastIndexOf('.');
if (lastDot > 0) {
packageName = className.substring(0, lastDot);
}
String simpleClassName = className.substring(lastDot + 1);
String simpleBuilderClassName = "Builder";
String extendClassName = className + "Ex";
String extendSimpleClassName = extendClassName.substring(lastDot + 1);
JavaFileObject builderFile = processingEnv.getFiler().createSourceFile(extendClassName);
try (PrintWriter out = new PrintWriter(builderFile.openWriter())) {
List<String> methodList = new ArrayList<>();
List<String> typeList = new ArrayList<>();
List<String> paramList = new ArrayList<>();
List<String> typeParamList = new ArrayList<>();
for (List<String> setter : listSetter) {
methodList.add(setter.get(0));
typeList.add(setter.get(1));
paramList.add(setter.get(2));
typeParamList.add(setter.get(1) + " " + setter.get(2));
}
String params = String.join(", ", paramList);
String typeParams = String.join(", ", typeParamList);
if (packageName != null) {
out.print("package ");
out.print(packageName);
out.println(";");
out.println();
}
out.println("import net.serenitybdd.screenplay.Tasks;");
out.println();
out.println("class " + extendSimpleClassName + " extends " + simpleClassName + " {");
out.println(" public " + extendSimpleClassName + "(" + typeParams + ") {");
for (List<String> setter : listSetter) {
out.print(" this." + setter.get(2));
out.print(" = ");
out.print(setter.get(2));
out.println(";");
}
out.println(" }");
if(!isGetBuilder) {
out.println(" public static " + simpleBuilderClassName + " " + methodList.get(0) + "(" + typeList.get(0) + " " + paramList.get(0) + ") {");
out.println(" return new " + simpleBuilderClassName + "(" + paramList.get(0) + ");");
out.println(" }");
}
out.println(" public static class " + simpleBuilderClassName + " {");
out.println(" private " + classInterfaceName + " build() {");
out.println(" return Tasks.instrumented(" + extendSimpleClassName + ".class, " + params + ");");
out.println(" }");
for(int i=0; i < methodList.size(); i++) {
out.println(" private " + typeList.get(i) + " " + paramList.get(i) + ";");
String returnType = simpleBuilderClassName;
String returnCode = "this";
String methodName = "";
if(i > 0 || isGetBuilder) {
methodName = " " + methodList.get(i);
if(i==methodList.size()-1) {
returnType = classInterfaceName;
returnCode = "build()";
}
}
out.println(" public " + returnType + methodName + "(" + typeList.get(i) + " " + paramList.get(i) + ") {");
out.println(" this." + paramList.get(i) + " = " + paramList.get(i) + ";");
if(!methodName.isEmpty())
out.println(" return " + returnCode + ";");
out.println(" }");
}
out.println(" }");
out.println("}");
}
}
}
|
#!/bin/bash
cd `dirname $0`
cd ..
HOME=`pwd`
export DWS_ENGINE_MANAGER_HOME=$HOME
export DWS_ENGINE_MANAGER_PID=$HOME/bin/linkis-enginemanager.pid
if [[ -f "${DWS_ENGINE_MANAGER_PID}" ]]; then
pid=$(cat ${DWS_ENGINE_MANAGER_PID})
if kill -0 ${pid} >/dev/null 2>&1; then
echo "Dataworkcloud Database is already running."
return 0;
fi
fi
export DWS_ENGINE_MANAGER_LOG_PATH=$HOME/logs
export DWS_ENGINE_MANAGER_HEAP_SIZE="5G"
export DWS_ENGINE_MANAGER_JAVA_OPTS="-Xms$DWS_ENGINE_MANAGER_HEAP_SIZE -Xmx$DWS_ENGINE_MANAGER_HEAP_SIZE -XX:+UseG1GC -XX:MaxPermSize=500m"
nohup java $DWS_ENGINE_MANAGER_JAVA_OPTS -cp $HOME/conf:$HOME/lib/* com.webank.wedatasphere.linkis.DataWorkCloudApplication 2>&1 > $DWS_ENGINE_MANAGER_LOG_PATH/linkis-database.out &
pid=$!
if [[ -z "${pid}" ]]; then
echo "Dataworkcloud EngineManager start failed!"
exit 1
else
echo "Dataworkcloud EngineManager start succeeded!"
echo $pid > $DWS_ENGINE_MANAGER_PID
sleep 1
fi
|
<reponame>acvos/hyperlane<gh_stars>10-100
export const eq = (x, y) => x === y
export const neq = (x, y) => x !== y
export const gt = (x, y) => x > y
export const lt = (x, y) => x < y
export const gte = (x, y) => x >= y
export const lte = (x, y) => x <= y
export const isDefined = x => x !== undefined
export const isUndefined = x => x === undefined
|
#!/bin/sh
for var in "$@"
do
for model in "classroom" "cv" "graphs" "lts" "production" "trash"
do
mkdir -p split/$model
grep "${model}" $var > split/$model/$var
done
done
|
<gh_stars>0
package pe.com.optical.middleware.crm.domain.composite;
import java.io.Serializable;
import java.util.Objects;
public class DocumentoTareaPostVentaId implements Serializable {
private static final long serialVersionUID = -7468961223715973189L;
private Long tareaPostVenta;
private Long documento;
public Long getTareaPostVenta() {
return tareaPostVenta;
}
public void setTareaPostVenta(Long tareaPostVenta) {
this.tareaPostVenta = tareaPostVenta;
}
public Long getDocumento() {
return documento;
}
public void setDocumento(Long documento) {
this.documento = documento;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof DocumentoTareaPostVentaId))
return false;
DocumentoTareaPostVentaId that = (DocumentoTareaPostVentaId) o;
return Objects.equals(getTareaPostVenta(), that.getTareaPostVenta())
&& Objects.equals(getDocumento(), that.getDocumento());
}
@Override
public int hashCode() {
return Objects.hash(getTareaPostVenta(), getDocumento());
}
}
|
package com.example.licio.moringaeats.ui;
import android.content.Intent;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SearchView;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.widget.ListView;
import android.widget.TextView;
import com.example.licio.moringaeats.Constants;
import com.example.licio.moringaeats.R;
import com.example.licio.moringaeats.adapters.RecipeListAdapter;
import com.example.licio.moringaeats.models.Recipe;
import com.example.licio.moringaeats.services.YummlyService;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.miguelcatalan.materialsearchview.MaterialSearchView;
import java.io.IOException;
import java.util.ArrayList;
import butterknife.BindView;
import butterknife.ButterKnife;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
public class Home extends AppCompatActivity {
public static final String TAG = Home.class.getSimpleName();
public ArrayList<Recipe> recipes = new ArrayList<>();
private SharedPreferences mSharedPreferences;
private SharedPreferences.Editor mEditor;
private String mRecentIngredient;
// private String[] recipes = new String[] {"Sweet Potatoes with Apple Butter","Old-Fashioned Apple Pie","Beef Stew in Red Wine Sauce",
// "Butternut Squash Soup with Crisp Pancetta","Hot Mulled Cider","Pear-Cranberry Hand Pies","Caramel Lady Apples","Three-Chile Beef Chili",
// "Poached Egg over Spinach and Mushroom","10-Minute Energizing Oatmeal","Breakfast Bagel","Granola with Fresh Fruit"};
@BindView(R.id.recyclerView)
RecyclerView mRecyclerView;
private RecipeListAdapter mAdapter;
// @BindView(R.id.txtRecipe)
// TextView mTxtRecipe;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
ButterKnife.bind(this);
// final RecipesAdapter adapter = new RecipesAdapter(this, R.layout.support_simple_spinner_dropdown_item,recipes);
// mListView.setAdapter(adapter);
// mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
// @Override
// public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
// String recipe = ((TextView)view).getText().toString();
// Toast.makeText(Home.this, recipe, Toast.LENGTH_SHORT).show();
// }
// });
Intent intent = getIntent();
String ingredients = intent.getStringExtra("ingredients");
//mTxtRecipe.setText("The following recipes are loved by " + name);
getRecipes(ingredients);
mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
mRecentIngredient = mSharedPreferences.getString(Constants.PREFERENCES_INGREDIENTS_KEY, null);
if (mRecentIngredient != null) {
getRecipes(mRecentIngredient);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu_search, menu);
ButterKnife.bind(this);
mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
mEditor = mSharedPreferences.edit();
MenuItem menuItem = menu.findItem(R.id.action_search);
SearchView searchView = (SearchView) MenuItemCompat.getActionView(menuItem);
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
addToSharedPreferences(query);
getRecipes(query);
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
});
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
private void getRecipes(String ingredients){
final YummlyService yummlyService = new YummlyService();
yummlyService.findRecipes(ingredients, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
}
@Override
public void onResponse(Call call, Response response) throws IOException {
//String jsonData = response.body().string();
// if (response.isSuccessful()) {
// Log.v(TAG, jsonData);
recipes = yummlyService.processResults(response);
Home.this.runOnUiThread(new Runnable(){
@Override
public void run() {
mAdapter = new RecipeListAdapter(getApplicationContext(), recipes);
mRecyclerView.setAdapter(mAdapter);
RecyclerView.LayoutManager layoutManager =
new LinearLayoutManager(Home.this);
mRecyclerView.setLayoutManager(layoutManager);
mRecyclerView.setHasFixedSize(true);
}
});
}
});
}
private void addToSharedPreferences(String ingredients) {
mEditor.putString(Constants.PREFERENCES_INGREDIENTS_KEY,ingredients).apply();
}
}
|
<gh_stars>1-10
import fs from 'fs';
import path from 'path';
import { spawn, ChildProcessWithoutNullStreams } from 'child_process';
import _ from 'lodash';
import Stopwatch from '../stopwatch';
import { getSettings } from '../config';
import StringBuffer from '../stringBuffer';
import * as triggerLoader from './triggerLoader';
import * as wsServer from './wsServer';
import { getServerIpAddress } from '../ip';
import { ProcessBufferName, ServerStatuses } from '../commonTypes';
let config = getSettings('appsettings');
const batchFileText = fs.readFileSync(path.join(config.valheim.serverWorkingDirectory, config.valheim.serverBatchFile)).toString();
const steamAppIdRefText = 'SteamAppId=';
const steamAppIdRef = batchFileText.indexOf(steamAppIdRefText);
const newLine = batchFileText.indexOf('\r\n', steamAppIdRef);
const steamAppId = batchFileText.slice(steamAppIdRef + steamAppIdRefText.length, newLine);
let serverProc: ChildProcessWithoutNullStreams;
let started = false;
let ready = false;
let stopwatch: Stopwatch;
let activeStopwatch: Stopwatch;
function readStopwatch(sw: Stopwatch): number {
if (!sw) return 0;
const ms = sw.read();
return isNaN(ms) ? 0 : ms;
}
let stdoutBuffer: StringBuffer;
let stderrBuffer: StringBuffer;
type ConnectedPlayer = { id: string, name: string, stopwatch: Stopwatch };
let connectedPlayers: ConnectedPlayer[];
export enum Statuses {
stopped,
starting,
ready
}
export function getStatus() {
if (ready) return ServerStatuses.ready;
if (started) return ServerStatuses.starting;
return ServerStatuses.stopped;
}
export function start() {
if (getStatus() !== ServerStatuses.stopped) return;
config = getSettings('appsettings');
serverProc = spawn(
config.valheim.serverExecutable,
[
'-nographics',
'-batchmode',
`-name "${config.valheim.name}"`,
`-port ${config.valheim.port}`,
`-world "${config.valheim.world}"`,
`-password "${config.valheim.password}"`,
`-public ${config.valheim.public}`
], {
shell: true,
cwd: config.valheim.serverWorkingDirectory,
env: _.extend(process.env, { SteamAppId: steamAppId })
}
);
started = true;
connectedPlayers = [];
stdoutBuffer = new StringBuffer(25);
stderrBuffer = new StringBuffer(25);
serverProc.stderr.on('data', data => {
const dataString = data.toString();
stderrBuffer.add(dataString);
wsServer.sendMessage('stderr', dataString);
});
let startEventSent = false;
serverProc.stdout.on('data', data => {
const dataString = data.toString();
wsServer.sendMessage('stdout', dataString);
console.log(dataString);
stdoutBuffer.add(dataString);
if (dataString.indexOf('Game server connected') > 0 && !startEventSent) {
stopwatch = new Stopwatch(true);
activeStopwatch = new Stopwatch();
startEventSent = true;
ready = true;
wsServer.sendMessage('started', `${getServerIpAddress()}:${config.valheim.port}`);
} else triggerLoader.handleOutput(dataString);
});
}
export function stop() {
config = getSettings('appsettings');
return new Promise<void>(resolve => {
if (getStatus() === ServerStatuses.stopped) {
resolve();
return;
}
serverProc.on('close', (code, signal) => {
console.log(`Valheim server child process exited with code ${code} (${signal})`);
resolve();
});
spawn('taskkill', [ '/IM', config.valheim.serverExecutable ]);
});
}
export const getServerUptime = () => readStopwatch(stopwatch);
export const getServerActiveUptime = () => readStopwatch(activeStopwatch);
export function getBuffer(name: ProcessBufferName) {
switch (name) {
case 'stdout': return stdoutBuffer;
case 'stderr': return stderrBuffer;
}
}
export function addPlayer(id: string, name: string) {
if (connectedPlayers.length === 0) activeStopwatch = new Stopwatch(true, readStopwatch(activeStopwatch));
connectedPlayers.push({ id, name, stopwatch: new Stopwatch(true) });
};
export function findPlayer(id: string) {
return connectedPlayers.find(p => p.id === id);
}
export function removePlayer(id: string) {
connectedPlayers = connectedPlayers.filter(p => p.id !== id);
if (connectedPlayers.length === 0) activeStopwatch.stop();
}
export const getPlayers = () => connectedPlayers;
|
// Profile.h
//
// The viewer profile enum.
//
// Copyright (c) 2022 <NAME>.
// Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby
// granted, provided that the above copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
// AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
// PERFORMANCE OF THIS SOFTWARE.
#pragma once
namespace Viewer
{
enum class Profile
{
Main,
Basic,
NumProfiles
};
const char* GetProfileName(Profile);
const char* GetProfileNameLong(Profile);
extern const char* ProfileNames[int(Profile::NumProfiles)];
extern const char* ProfileNamesLong[int(Profile::NumProfiles)];
}
// Implementation only below.
inline const char* Viewer::GetProfileName(Profile profile)
{
return ProfileNames[int(profile)];
}
inline const char* Viewer::GetProfileNameLong(Profile profile)
{
return ProfileNamesLong[int(profile)];
}
|
package fpinscala.datastructures
import org.scalatest.{FlatSpec, Matchers}
class TreeSpec extends FlatSpec with Matchers {
// Exercise 25
"A Tree" should "calculate the size of Leaf as 1" in {
val leaf = Leaf("A")
Tree.size(leaf) shouldBe 1
Tree.size2(leaf) shouldBe 1
}
it should "calculate the size of a tree with branches" in {
val tree = Branch(Leaf(1), Branch(Leaf(2), Leaf(3)))
Tree.size(tree) shouldBe 5
Tree.size2(tree) shouldBe 5
}
// Exercise 26
it should "find maximum element in a Leaf" in {
val leaf = Leaf(1)
Tree.maximum(leaf) shouldBe 1
Tree.maximum2(leaf) shouldBe 1
}
it should "find maximum element in a tree with branches" in {
val tree = Branch(Leaf(5), Branch(Leaf(2), Leaf(3)))
Tree.maximum(tree) shouldBe 5
Tree.maximum2(tree) shouldBe 5
}
// Exercise 27
it should "calculate the depth of Leaf as 0" in {
val leaf = Leaf("just me")
Tree.depth(leaf) shouldBe 0
Tree.depth2(leaf) shouldBe 0
}
it should "calculate the depth of a tree with branches" in {
val prettyUnbalanced =
Branch(Leaf(1), Branch(Leaf(2), Branch(Leaf(3), Leaf(4))))
Tree.depth(prettyUnbalanced) shouldBe 3
Tree.depth2(prettyUnbalanced) shouldBe 3
}
// Exercise 28
it should "map elements of a tree" in {
val powerTree = Branch(Leaf("I"), Branch(Leaf("haz"), Leaf("a cat")))
Tree.map(powerTree)(_.length) shouldBe Branch(
Leaf(1),
Branch(Leaf(3), Leaf(5)))
Tree.map2(powerTree)(_.length) shouldBe Branch(
Leaf(1),
Branch(Leaf(3), Leaf(5)))
}
// Exercise 29
it should "fold a Leaf into a value" in {
Tree.fold(Leaf(2))(_ / 2)((_, _) => fail()) shouldBe 1
}
it should "fold a tree into a value" in {
Tree.fold(Branch(Leaf(1), Branch(Leaf(2), Leaf(3))))(_ + 1)(_ - _) shouldBe 3
}
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2022: Jacob.Lundqvist@gmail.com
# License: MIT
#
# Part of https://github.com/jaclu/man_asdf
#
# Version: 1.0.2 2022-04-14
#
# Does shellcheck on all relevant scripts in this project
#
# shellcheck disable=SC1007
CURRENT_DIR=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)
cd "$CURRENT_DIR" || return
checkables=(
# Obviously self exam should be done :)
shellchecker.sh
man_asdf
deploy
)
for script in "${checkables[@]}"; do
# abort as soon as one gives warnings
echo "Checking: $script"
shellcheck -x "$script" || exit 1
done
|
import {Component, Input, OnInit} from '@angular/core';
import { ApiService } from '../../services/api.service';
import { AuthService } from '../../services/auth.service';
@Component ({
selector: 'comments-view',
templateUrl: './comments-view.component.html',
styleUrls: ['./comments-view.component.scss']
})
export default class CommentsViewComponent{
@Input() show;
current_user: any;
com: any;
constructor(private API: ApiService, private auth: AuthService){}
ngOnInit(){
this.comLoad();
this.auth.checkLoggedStatus();
this.auth.getLoggedStatus()
.subscribe((response)=>{
this.current_user = response;
});
}
comDelete(id: number){
const confirmation = confirm('This comment will be removed');
if(confirmation)
this.API.delete('comments', id)
.subscribe((response)=>{
console.log(response);
this.comLoad();
});
}
comLoad(){
this.API.get('comments')
.subscribe((response)=> this.com = response);
}
}
|
package br.com.scgf.squarecrazygame.service;
import android.content.Intent;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.widget.Button;
import androidx.annotation.RequiresApi;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import java.net.URISyntaxException;
import java.util.function.Consumer;
import br.com.scgf.squarecrazygame.R;
import io.socket.client.IO;
import io.socket.client.Socket;
public class WebSocketService {
private static String host = "";
private static Socket socket = null;
@RequiresApi(api = Build.VERSION_CODES.N)
public static void getConnectWebSocket(Consumer<Socket> callback) {
FirebaseDatabase database = FirebaseDatabase.getInstance();
DatabaseReference myRef = database.getReference("apiHost");
if(socket == null){
myRef.addValueEventListener(new ValueEventListener() {
@RequiresApi(api = Build.VERSION_CODES.N)
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
String value = dataSnapshot.getValue(String.class);
host = value;
if(socket == null) {
try {
socket = IO.socket(host);
socket.connect();
callback.accept(socket);
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
}
@Override
public void onCancelled(DatabaseError error) {
Log.w("TAG", "Failed to read value.", error.toException());
}
});
} else {
callback.accept(socket);
}
//socket.emit("register", "{'username': " + username + "}");
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.