text
stringlengths 27
775k
|
|---|
# -*- coding:utf-8 -*-
"""
weibo serializer
"""
from rest_framework import serializers
from account.models import User
from weibo.models.weibo import Image, Weibo, Comment
class ImageModelSerializer(serializers.ModelSerializer):
"""
Image Model Serializer
"""
user = serializers.SlugRelatedField(required=False, slug_field="username", read_only=False,
queryset=User.objects.all())
class Meta:
model = Image
fields = ("id", "user", "file", "filename", "storage", "url", "description")
class ImageInfoModelSerializer(serializers.ModelSerializer):
"""
Image Model Serializer
"""
class Meta:
model = Image
fields = ("id", "file", "filename", "storage", "url")
class CommentModelSerializer(serializers.ModelSerializer):
"""
Webibo Comment Model Serializer
"""
user = serializers.SlugRelatedField(slug_field="username", required=False, read_only=False,
queryset=User.objects.all())
def create(self, validated_data):
request = self.context["request"]
user = request.user
if user.is_authenticated:
validated_data["user"] = user
else:
validated_data["user"] = None
# 获取IP
try:
ip = request.META['HTTP_X_REAL_IP']
except KeyError:
ip = request.META["REMOTE_ADDR"]
validated_data["address"] = ip
instance = super().create(validated_data=validated_data)
return instance
class Meta:
model = Comment
fields = ("id", "user", "weibo", "content", "time_added", "address", "is_deleted")
class WeiboModelSerializer(serializers.ModelSerializer):
"""
Weibo Model Serializer
"""
user = serializers.SlugRelatedField(required=False, slug_field="username",
read_only=False, queryset=User.objects.all())
files = serializers.ListField(
child=serializers.ImageField(
allow_empty_file=False, use_url=False, max_length=1024000),
allow_null=True,
allow_empty=True,
required=False
)
images = ImageInfoModelSerializer(required=False, many=True, read_only=True)
comments = CommentModelSerializer(required=False, many=True, read_only=True,
source="comment_set")
def create(self, validated_data):
request = self.context["request"]
user = request.user
if user.is_authenticated:
validated_data["user"] = user
else:
validated_data["user"] = None
# 获取IP
try:
ip = request.META['HTTP_X_REAL_IP']
except KeyError:
ip = request.META["REMOTE_ADDR"]
validated_data["address"] = ip
# 根据传递的files获取到images
files = []
if "files" in validated_data:
files = validated_data.pop("files")
# print(files)
# for file_i in request.FILES["files"]:
images = []
for file_i in files:
# 创建图片Model
# print(file_i)
info = {
"filename": file_i.name,
"user": user,
}
image_i = Image.objects.create(file=file_i, **info)
images.append(image_i)
validated_data["images"] = images
instance = super().create(validated_data=validated_data)
return instance
class Meta:
model = Weibo
fields = ("id", "user", "content", "images", "video", "link", "address",
# "is_public", "time_added", "is_deleted",
"is_public", "time_added", "is_deleted", "files", "comments"
)
class WeiboSimpleModelSerializer(serializers.ModelSerializer):
"""
Weibo Simple Model Serializer
"""
user = serializers.SlugRelatedField(slug_field="username", required=False, read_only=True)
images = ImageInfoModelSerializer(required=False, many=True, read_only=True)
class Meta:
model = Weibo
fields = ("id", "user", "content", "images", "video", "link",
"is_public", "time_added", "is_deleted")
class CommentDetailModelSerializer(serializers.ModelSerializer):
"""
Comment Detail Model Serializer
"""
user = serializers.SlugRelatedField(slug_field="username", required=False, read_only=True)
weibo = WeiboSimpleModelSerializer(read_only=True)
class Meta:
model = Comment
fields = ("id", "weibo", "user", "content", "address", "time_added")
|
<?php
/*
* This file is part of the lxpgw/logger.
*
* (c) lichunqiang <light-li@hotmail.com>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
*/
namespace lxpgw\logger;
use Yii;
use yii\log\Logger;
use yii\log\Target;
/**
* ~~~
* 'log' => [
* 'traceLevel' => 3,
* 'targets' => [
* [
* 'class' => LogTarget::class,
* 'categories' => ['category1'],
* 'exportInterval' => 1, //send every message
* 'logVars' => [],
* ]
* ]
* ]
* ~~~.
*
* @version 0.0.0
*
* @author lichunqaing <light-li@hotmail.com>
*/
class LogTarget extends Target
{
/**
* @var string The channel of the message send to.
*/
public $channel;
/**
* @var string The logger title.
*/
public $title;
/**
* @var Pubu
*/
public $robot;
/**
* {@inheritdoc}
*/
public function init()
{
parent::init();
$this->robot = Yii::createObject([
'class' => Pubu::class,
'remote' => $this->channel,
]);
}
/**
* Exports log [[messages]] to a specific destination.
*/
public function export()
{
$this->robot->send($this->title ?: 'Logger', $this->getAttachments());
}
/**
* ~~~
* [
* 'title' => 'this is title',
* 'description' => 'this is description',
* 'url' => 'http://www.baidu.com',
* 'color' => 'success'
* ]
* ~~~.
*
* @return array
*/
private function getAttachments()
{
$attachments = [];
foreach ($this->messages as $i => $message) {
$attachment = [
'title' => $message[2], //category as title
'description' => htmlspecialchars($this->formatMessage($message)),
'color' => $this->getLevelColor($message[1]),
];
$attachments[] = $attachment;
}
return $attachments;
}
/**
* @param int $level
*
* @return string
*/
private function getLevelColor($level)
{
$colors = [
Logger::LEVEL_ERROR => 'error',
Logger::LEVEL_WARNING => 'warning',
Logger::LEVEL_INFO => 'info',
Logger::LEVEL_PROFILE => 'primary',
Logger::LEVEL_TRACE => 'muted',
];
if (!isset($colors[$level])) {
return 'success';
}
return $colors[$level];
}
}
|
require 'frap/version'
require 'frap/create_app'
require 'frap/create_resource'
require 'frap/commands/generate'
require 'frap/generators/config'
require 'frap/generators/flutter_config'
require 'frap/generators/flutter_resource'
require 'thor'
require 'yaml'
module Frap
class CLI < Thor
desc 'version', 'Display MyGem version'
map %w[-v --version] => :version
class_option 'verbose', type: :boolean, default: false
def version
say "Frap #{Frap::VERSION}"
end
desc 'new APP_NAME', 'Lower cased underscored app name'
long_desc <<-LONGDESC
Using the "app_name", it will create the parent directory and the Rails and Flutter sub directories,
"app_name_server" and "app_name_ui"
LONGDESC
def new(app_name)
CreateApp.new(app_name).build
end
desc 'generate GENERATOR RESOURCE_NAME', 'Specify the generator and a name'
long_desc <<-LONGDESC
Generate all the boiler plate code such as controllers, pages and blocks for Rails and Flutter.
run `frap g help` to see a list of existing generators
LONGDESC
subcommand 'generate', Frap::Commands::Generate
end
end
|
package models
import (
"time"
"github.com/omnibuildplatform/omni-manager/util"
)
const (
ImageStatusStart string = "created"
ImageStatusDownloading string = "downloading"
ImageStatusDone string = "succeed"
ImageStatusFailed string = "failed"
)
type BaseImagesKickStart struct {
Label string ` description:"name"`
Desc string ` description:"desc"`
BaseImageID string ` description:"BaseImages ID"`
KickStartID string ` description:"KickStart ID"`
KickStartContent string ` description:"KickStart Content"`
KickStartName string ` description:"KickStart name"`
}
type BaseImages struct {
ID int ` description:"id" gorm:"primaryKey"`
Name string ` description:"name"`
ExtName string ` description:"ext name"`
Desc string ` description:"desc"`
Checksum string ` description:"checksum"`
Url string ` description:"url" gorm:"-"`
Arch string ` description:"arch"`
Status string ` description:"status"`
UserId int ` description:"user id"`
CreateTime time.Time ` description:"create time"`
}
func (t *BaseImages) TableName() string {
return "base_images"
}
// AddBaseImages insert a new BaseImages into database and returns
// last inserted Id on success.
func AddBaseImages(m *BaseImages) (err error) {
o := util.GetDB()
result := o.Create(m)
return result.Error
}
func GetBaseImagesByID(id int) (v *BaseImages, err error) {
o := util.GetDB()
v = new(BaseImages)
v.ID = id
tx := o.Model(v).Find(v)
if tx.RowsAffected == 0 {
return nil, tx.Error
}
return v, tx.Error
}
// GetMyBaseImages
func GetMyBaseImages(userid int, offset int, limit int) (total int64, ml []*BaseImages, err error) {
o := util.GetDB()
baseImages := new(BaseImages)
tx := o.Model(baseImages).Where("user_id", userid)
tx.Count(&total)
tx.Limit(limit).Offset(offset).Order("id desc").Scan(&ml)
return
}
// DeleteBaseImagesById
func DeleteBaseImagesById(userid, id int) (deleteNum int, err error) {
o := util.GetDB()
m := new(BaseImages)
m.ID = id
result := o.Debug().Model(m).Where("user_id", userid).Delete(m)
return int(result.RowsAffected), result.Error
}
// UpdateBaseImages
func UpdateBaseImages(m *BaseImages) (err error) {
o := util.GetDB()
result := o.Model(m).Select("checksum", "name", "desc", "url", "arch", "status").Updates(m)
if result.Error != nil {
return result.Error
}
result = o.Find(m)
return result.Error
}
// UpdateBaseImagesStatus
func UpdateBaseImagesStatus(m *BaseImages) (err error) {
o := util.GetDB()
result := o.Debug().Model(m).Select("status").Updates(m)
if result.Error != nil {
return result.Error
}
return result.Error
}
|
package com.android.iam.retrofittutorial;
import java.util.List;
import retrofit.Callback;
import retrofit.http.Field;
import retrofit.http.FormUrlEncoded;
import retrofit.http.POST;
public interface ApiInterface {
@FormUrlEncoded // annotation used in POST type requests
@POST("/retrofit/register.php") // API's endpoints
public void registration(@Field("name") String name,
@Field("email") String email,
@Field("password") String password,
@Field("logintype") String logintype,
Callback<SignUpResponse> callback);
// In registration method @Field used to set the keys and String data
// type is representing its a string type value and callback is used to get the
// response from api and it will set it in our POJO class
}
|
/*
* Copyright 2016 ksilin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.fapi
import java.time.{ Instant, LocalDateTime, ZoneOffset }
import java.util.Date
import io.getquill.ast.Desc
import io.getquill.naming.Literal
import io.getquill.sources.sql.idiom.H2Dialect
import io.getquill.{ JdbcSourceConfig, QueryProbing, _ }
import org.joda.time.{ DateTime, DateTimeComparator, DateTimeZone }
import scala.util.Random
package object data {
implicit val decodeDateTime = mappedEncoding[Date, DateTime](new DateTime(_))
implicit val encodeDateTime = mappedEncoding[DateTime, Date](_.toDate)
implicit val encodeDate = mappedEncoding[Date, LocalDateTime](d =>
LocalDateTime.ofInstant(Instant.ofEpochMilli(d.getTime), ZoneOffset.UTC))
implicit val decodeDate = mappedEncoding[LocalDateTime, Date](ldt =>
Date.from(ldt.toInstant(ZoneOffset.UTC)))
implicit class ForLocalDateTime(ldt: LocalDateTime) {
def > = quote((arg: LocalDateTime) => infix"$ldt > $arg".as[Boolean])
def < = quote((arg: LocalDateTime) => infix"$ldt < $arg".as[Boolean])
}
implicit class ForDateTime(ldt: DateTime) {
def > = quote((arg: DateTime) => infix"$ldt > $arg".as[Boolean])
def < = quote((arg: DateTime) => infix"$ldt < $arg".as[Boolean])
}
// private val dbName: String = "h2DB"
val h2DB = source(new JdbcSourceConfig[H2Dialect, Literal]("h2DB"))
// val h2DBWithQueryProbing = source(new JdbcSourceConfig[H2Dialect, Literal](dbName) with QueryProbing)
case class Load(
machine: String,
cpu: Int = 0,
mem: Int = 0,
records: Long = 0L,
time: DateTime = DateTime.now()
)
val loads = quote {
query[Load]
}
val deleteLoads = quote {
query[Load].delete
}
val deleteLoadsBefore = quote { (t: DateTime) =>
query[Load].filter(_.time < t).delete
}
val insertload = quote { (l: Load) =>
query[Load].insert(l)
}
val loadsAfterFor = quote { (t: DateTime, machine: String) =>
query[Load].filter(_.time > t).filter(_.machine == machine)
}
val xlastLoadsFor = quote { (machine: String, count: Int) =>
query[Load]
.withFilter(_.machine == machine)
.sortBy(_.time)(Ord.descNullsLast)
.take(count)
}
case class Task(
name: String,
createdAt: DateTime = DateTime.now(),
modifiedAt: Option[DateTime] = None,
active: Boolean = true,
id: String = Random.alphanumeric.take(16).mkString
)
case class TaskStart(
name: String,
startedAt: DateTime = DateTime.now(),
modifiedAt: Option[DateTime] = None,
id: String = Random.alphanumeric.take(16).mkString
)
case class TaskEnd(
name: String,
doneAt: DateTime = DateTime.now(),
successful: Boolean = true,
msg: Option[String],
id: String = Random.alphanumeric.take(16).mkString
)
case class TaskRun(
name: String,
createdAt: DateTime = DateTime.now(),
startedAt: Option[DateTime] = None,
doneAt: Option[DateTime] = None,
successful: Boolean = true,
msg: Option[String] = None,
id: String = Random.alphanumeric.take(16).mkString
)
}
|
package com.nepal.adversify.domain.model;
public class LocationModel {
public int id;
public double lat;
public double lon;
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache;
import dunit.Host;
import dunit.VM;
import java.util.Properties;
import com.gemstone.gemfire.cache.CacheException;
import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.util.test.TestUtil;
/**
* @author gthombar This class tests regions created by xml files
*/
public class PartitionedRegionCacheXMLExampleDUnitTest extends
PartitionedRegionDUnitTestCase {
protected static Cache cache;
public PartitionedRegionCacheXMLExampleDUnitTest(String name) {
super(name);
}
public void testExampleWithBothRootRegion() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
CacheSerializableRunnable createCache = new CacheSerializableRunnable(
"createCache") {
public void run2() throws CacheException {
Properties props = new Properties();
String xmlfilepath = TestUtil.getResourcePath(getClass(), "PartitionRegionCacheExample1.xml");
props.setProperty("cache-xml-file", xmlfilepath);
getSystem(props);
cache = getCache();
}
};
CacheSerializableRunnable validateRegion = new CacheSerializableRunnable(
"validateRegion") {
public void run2() throws CacheException {
PartitionedRegion pr1 = (PartitionedRegion) cache
.getRegion(Region.SEPARATOR + "firstPartitionRegion");
assertNotNull("Partiton region cannot be null", pr1);
Object obj = pr1.get("1");
assertNotNull("CacheLoader is not invoked", obj);
pr1.put("key1", "value1");
assertEquals(pr1.get("key1"), "value1");
PartitionedRegion pr2 = (PartitionedRegion) cache
.getRegion(Region.SEPARATOR + "secondPartitionedRegion");
assertNotNull("Partiton region cannot be null", pr2);
pr2.put("key2", "value2");
assertEquals(pr2.get("key2"), "value2");
}
};
CacheSerializableRunnable disconnect = new CacheSerializableRunnable(
"disconnect") {
public void run2() throws CacheException {
closeCache();
disconnectFromDS();
}
};
vm0.invoke(createCache);
vm1.invoke(createCache);
vm0.invoke(validateRegion);
// Disconnecting and again creating the PR from xml file
vm1.invoke(disconnect);
vm1.invoke(createCache);
vm1.invoke(validateRegion);
}
public void testExampleWithSubRegion() {
Host host = Host.getHost(0);
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
CacheSerializableRunnable createCacheSubregion = new CacheSerializableRunnable(
"createCacheSubregion") {
public void run2() throws CacheException {
Properties props = new Properties();
String xmlfilepath = TestUtil.getResourcePath(getClass(), "PartitionRegionCacheExample2.xml");
props.setProperty("cache-xml-file", xmlfilepath);
getSystem(props);
cache = getCache();
}
};
CacheSerializableRunnable validateSubRegion = new CacheSerializableRunnable(
"validateRegion") {
public void run2() throws CacheException {
PartitionedRegion pr = (PartitionedRegion) cache
.getRegion(Region.SEPARATOR + "root" + Region.SEPARATOR
+ "PartitionedSubRegion");
assertNotNull("Partiton region cannot be null", pr);
assertTrue(PartitionedRegionHelper
.isSubRegion(pr.getFullPath()));
Object obj = pr.get("1");
assertNotNull("CacheLoader is not invoked", obj);
pr.put("key1", "value1");
assertEquals(pr.get("key1"), "value1");
}
};
vm2.invoke(createCacheSubregion);
vm3.invoke(createCacheSubregion);
vm2.invoke(validateSubRegion);
}
}
|
import 'dart:async';
import '../data/data.dart';
import 'dart:collection';
import 'search_result.dart';
import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart';
class SelectorPage extends StatefulWidget {
SelectorPage({Key key, this.title}) : super(key: key);
final String title;
@override
_SelectorPageState createState() => new _SelectorPageState();
}
class _SelectorPageState extends State<SelectorPage> {
Map<String, dynamic> cityUpWordStartIndex = new HashMap();
List<String> cityUpWordArr = new List();
ScrollController scrollController = new ScrollController();
double tileTileHeight = 32.0;
double selectUpWordTileHeight = 24.0;
int hotCityHeightScale = 1;
String selectedUpWord = "", curSelectedUpWord = "";
bool isSearchResultPage = false;
List<Map> searchMap = new List();
@override
void initState() {
super.initState();
for (int i = 0; i < china_cities_data.length; i++) {
String currentFLetter = getFirstLetter(china_cities_data[i]["pinyin"]);
String preFLetter =
i >= 1 ? getFirstLetter(china_cities_data[i - 1]["pinyin"]) : "";
if (currentFLetter != preFLetter) {
cityUpWordStartIndex[currentFLetter] = i;
cityUpWordArr.add(currentFLetter);
}
}
}
@override
Widget build(BuildContext context) {
return new Scaffold(
appBar: new AppBar(
title: _getTopSearchWidget(),
),
body: _getContentWidget(),
);
}
Widget _getContentWidget() {
if (isSearchResultPage) {
return new SearchResult(searchMap);
} else {
return new Stack(
alignment: Alignment.topRight,
children: <Widget>[
_getCityList(),
_getWordSelectWidget(),
_getShowSelectedCenterWidget()
],
);
}
}
Widget _getTopSearchWidget() {
return new Row(
children: <Widget>[
new Expanded(
child: new Container(
alignment: Alignment.centerLeft,
height: 32.0,
padding: const EdgeInsets.only(left: 12.0, right: 12.0),
margin: const EdgeInsets.only(right: 20.0),
child: new TextField(
onChanged: _search,
style: new TextStyle(fontSize: 14.0, color: Colors.black),
decoration: new InputDecoration.collapsed(
hintText: Strings.SEARCH_HINT_TEXT),
),
decoration: new BoxDecoration(color: Colors.grey[200]),
)),
],
);
}
void _search(String name) {
if (name != null && name.length > 0) {
if (name.contains(new RegExp("^[\u4e00-\u9fff]"))) {
_startSearch(name, "name");
} else {
_startSearch(name, "pinyin");
}
} else {
_setListState();
}
}
void _startSearch(String name, String key) {
searchWord(name, key).then((List<Map> map) {
searchMap = map;
_setSearchResultState();
});
}
Future<List<Map>> searchWord(String name, String key) async {
List<Map> result = new List();
for (int i = 0; i < china_cities_data.length; i++) {
if (china_cities_data[i][key].toString().contains(name)) {
result.add(china_cities_data[i]);
}
}
return result;
}
_setSearchResultState() {
setState(() {
isSearchResultPage = true;
selectedUpWord = "";
});
}
_setListState() {
setState(() {
isSearchResultPage = false;
selectedUpWord = "";
});
}
Widget _getCityList() {
return new ListView(
children: _getCityItem(),
controller: scrollController,
);
}
Widget _getHotCityList() {
return new Container(
padding: const EdgeInsets.only(right: 20.0),
child: new Column(
children: _getHotCityItem(),
));
}
List<Widget> _getHotCityItem() {
int rowNum = 3;
int columnNum = 1;
columnNum = (china_cities_hot_data.length % rowNum > 0)
? (china_cities_hot_data.length / rowNum + 1).toInt()
: (china_cities_hot_data.length / rowNum).toInt();
if (columnNum >= 1) {
hotCityHeightScale = columnNum;
}
List<Widget> arr = new List();
for (int i = 0; i < columnNum; i++) {
List<Widget> row = new List();
for (int j = 0; j < rowNum; j++) {
if ((i * rowNum + j) <= (china_cities_hot_data.length - 1)) {
row.add(new GestureDetector(
child: new Container(
alignment: Alignment.center,
color: Colors.grey[200],
child: new Text(china_cities_hot_data[i * rowNum + j]["name"]),
width: 76.0,
height: 32.0,
),
onTap: () {
_onTileClick(china_cities_hot_data[i * rowNum + j]["name"]);
},
));
} else {
row.add(new Container(
width: 76.0,
height: 32.0,
));
}
}
arr.add(new Container(
child: new Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: row,
),
height: Values.TILE_HEIGHT,
));
}
return arr;
}
List<Widget> _getCityItem() {
List<Widget> arr = new List();
for (int i = 0; i < china_cities_data.length; i++) {
String currentFLetter = getFirstLetter(china_cities_data[i]["pinyin"]);
String preFLetter =
i >= 1 ? getFirstLetter(china_cities_data[i - 1]["pinyin"]) : "";
if (currentFLetter != preFLetter) {
String tempTitle = currentFLetter;
if (_isHotCityDes(tempTitle)) {
tempTitle = Strings.HOT_CITY_TITLE;
} else if (_isLocationCityDes(tempTitle)) {
tempTitle = Strings.LOCATION_CITY_TITLE;
}
arr.add(new Container(
alignment: Alignment.centerLeft,
color: Colors.grey[200],
height: tileTileHeight,
padding: const EdgeInsets.only(left: 24.0),
child: new Text(tempTitle),
));
}
if (_isHotCityDes(currentFLetter)) {
arr.add(_getHotCityList());
} else {
arr.add(new GestureDetector(
behavior: HitTestBehavior.opaque,
child: new Container(
height: Values.TILE_HEIGHT,
alignment: Alignment.centerLeft,
padding: const EdgeInsets.only(left: 24.0),
child: new Text(china_cities_data[i]["name"]),
),
onTap: () {
_onTileClick(china_cities_data[i]["name"]);
},
));
}
}
return arr;
}
Widget _getWordSelectWidget() {
return new SizedBox(
child: new ListView.builder(
itemBuilder: (BuildContext c, int index) {
return new GestureDetector(
behavior: HitTestBehavior.opaque,
onTapUp: (TapUpDetails detail) {
_setUpWordUpState();
},
onTapDown: (TapDownDetails detail) {
_setUpWordDownState(cityUpWordArr[index]);
},
onVerticalDragUpdate: (DragUpdateDetails detail) {
RenderSliverList sliverList = c.findRenderObject();
RenderBox getBox = sliverList.firstChild;
var local = getBox.globalToLocal(detail.globalPosition);
debugPrint(
local.toString() + "|" + detail.globalPosition.toString());
_setSlideState(local.dy);
},
onVerticalDragEnd: (DragEndDetails detail) {
_setUpWordUpState();
},
child: new Container(
height: selectUpWordTileHeight,
alignment: Alignment.center,
child: new Text(_isHotCityDes(cityUpWordArr[index])
? Strings.HOT_CITY_UP_WORD
: (_isLocationCityDes(cityUpWordArr[index])
? Strings.LOCATION_CITY_UP_WORD
: cityUpWordArr[index])),
),
);
},
itemCount: cityUpWordArr.length,
),
width: 44.0,
);
}
Widget _getShowSelectedCenterWidget() {
if (selectedUpWord.length > 0) {
return new Center(
child: new Container(
color: Colors.grey,
child: new SizedBox(
width: 100.0,
height: 100.0,
child: new Center(
child: new Text(
selectedUpWord,
style: new TextStyle(color: Colors.white, fontSize: 44.0),
),
),
),
),
);
}
return new Container();
}
_setUpWordDownState(String word) {
setState(() {
selectedUpWord = _isHotCityDes(word)
? Strings.HOT_CITY_UP_WORD
: (_isLocationCityDes(word) ? Strings.LOCATION_CITY_UP_WORD : word);
});
_setScrollToWord(word);
}
_setUpWordUpState() {
setState(() {
selectedUpWord = "";
});
}
_setSlideState(double localPosition) {
double minHeight = 0.0;
double maxHeight = cityUpWordArr.length * selectUpWordTileHeight;
if (localPosition >= minHeight && localPosition <= maxHeight) {
double index = localPosition / selectUpWordTileHeight;
if (index >= 0) {
String slideToWord = cityUpWordArr[index.toInt()];
if (curSelectedUpWord != slideToWord) {
setState(() {
selectedUpWord = _isHotCityDes(slideToWord)
? Strings.HOT_CITY_UP_WORD
: (_isLocationCityDes(slideToWord)
? Strings.LOCATION_CITY_UP_WORD
: slideToWord);
});
_setScrollToWord(slideToWord);
curSelectedUpWord = slideToWord;
}
}
}
}
_setScrollToWord(String word) {
double value = 0.0;
if (cityUpWordArr.indexOf(word) == 0) {
} else {
value = (cityUpWordStartIndex[word] +
(hotCityHeightScale - 1) * (_isHotCityDes(word) ? 0 : 1)) *
Values.TILE_HEIGHT +
cityUpWordArr.indexOf(word) * tileTileHeight;
}
scrollController.animateTo(value,
duration: new Duration(milliseconds: 10), curve: Curves.ease);
}
_onTileClick(String name) {
Navigator.of(context).pop(name);
}
String getFirstLetter(String pinyin) {
return pinyin.substring(0, 1).toUpperCase();
}
bool _isHotCityDes(String pinyin) {
return pinyin == "0";
}
bool _isLocationCityDes(String pinyin) {
return pinyin == "1";
}
}
|
package org.pyx.base;
import com.google.common.base.CaseFormat;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import static org.junit.Assert.assertEquals;
/**
* Created by pyx on 2018/7/13.
*/
//@RunWith(SpringRunner.class)
//@SpringBootTest(classes = Application.class)
public class CaseFormatTest {
@Test
public void testLowerHyphenToLowerUnderscore() {
assertEquals("foo", CaseFormat.LOWER_HYPHEN.to(CaseFormat.LOWER_UNDERSCORE, "foo"));
assertEquals("foo_bar", CaseFormat.LOWER_HYPHEN.to(CaseFormat.LOWER_UNDERSCORE, "foo-bar"));
}
public void testLowerUnderscoreToUpperUnderscore() {
if(true){
throw new RuntimeException("aaa");
}
System.out.println(111);
assertEquals("FOO", CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.UPPER_UNDERSCORE, "foo"));
assertEquals("FOO_BAR", CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.UPPER_UNDERSCORE, "foo_bar"));
}
public void testLowerCamelToLowerHyphen() {
assertEquals("foo", CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_HYPHEN, "foo"));
assertEquals("foo-bar", CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_HYPHEN, "fooBar"));
assertEquals("h-t-t-p", CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_HYPHEN, "HTTP"));
}
}
|
#[derive(Debug, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
pub struct SynapseDelay(u8);
impl SynapseDelay {
pub fn new(delay: u8) -> Self {
assert!(delay > 0);
Self(delay)
}
pub fn get(self) -> u8 {
self.0
}
}
|
#include "Graphics.Layouts.h"
#include "Data.JSON.h"
#include <algorithm>
#include "Graphics.Types.h"
#include "Common.Properties.h"
#include "Graphics.Properties.h"
namespace graphics::Layout { void Draw(SDL_Renderer*, const nlohmann::json&); }
namespace graphics::Layouts
{
static std::map<std::string, nlohmann::json> layouts;
static nlohmann::json table;
void Start(const std::string& fileName)
{
table = data::JSON::Load(fileName);
for (auto& item : table.items())
{
layouts[item.key()] = data::JSON::Load(item.value());
}
}
void Draw(SDL_Renderer* renderer, const std::string& layoutName)
{
graphics::Layout::Draw(renderer, layouts[layoutName]);
}
std::optional<int> GetMenuValue(const std::string& layoutName, const std::string& menuId)
{
for (auto& thingie : layouts[layoutName])
{
if (thingie[common::Properties::TYPE] == graphics::Types::MENU &&
thingie[graphics::Properties::MENU_ID] == menuId)
{
int index = thingie[graphics::Properties::INDEX];//TODO: this cannot be proxied!
return (int)thingie[graphics::Properties::MENU_ITEMS][index][graphics::Properties::VALUE];
}
}
return std::optional<int>();
}
static void ChangeMenuIndex(const std::string& layoutName, const std::string& menuId, int delta)
{
for (auto& thingie : layouts[layoutName])
{
if (thingie[common::Properties::TYPE] == graphics::Types::MENU &&
thingie[graphics::Properties::MENU_ID] == menuId)
{
auto itemCount = thingie[graphics::Properties::MENU_ITEMS].size();
//TODO: this cannot be proxied! vv
thingie[graphics::Properties::INDEX] = (thingie[graphics::Properties::INDEX] + itemCount + delta) % itemCount;
}
}
}
void NextMenuIndex(const std::string& layoutName, const std::string& menuId)
{
ChangeMenuIndex(layoutName, menuId, 1);
}
void PreviousMenuIndex(const std::string& layoutName, const std::string& menuId)
{
ChangeMenuIndex(layoutName, menuId, -1);
}
void SetMenuItemText(const std::string& layoutName, const std::string& menuItemId, const std::string& text)
{
for (auto& thingie : layouts[layoutName])
{
if (thingie[common::Properties::TYPE] == graphics::Types::MENU)
{
for (auto& menuItem : thingie[graphics::Properties::MENU_ITEMS])
{
if (menuItem.count(graphics::Properties::MENU_ITEM_ID) > 0 &&
menuItem[graphics::Properties::MENU_ITEM_ID]==menuItemId)
{
menuItem[graphics::Properties::TEXT] = text;
}
}
}
}
}
void SetTextText(const std::string& layoutName, const std::string& textId, const std::string& text)
{
for (auto& thingie : layouts[layoutName])
{
if (thingie[common::Properties::TYPE] == graphics::Types::TEXT)
{
if (thingie.count(graphics::Properties::TEXT_ID) > 0 &&
thingie[graphics::Properties::TEXT_ID] == textId)
{
thingie[graphics::Properties::TEXT] = text;
}
}
}
}
}
|
package com.epam.drill.admin.api.agent
import com.epam.drill.admin.api.plugin.*
import kotlinx.serialization.Serializable
enum class AgentType(val notation: String) {
JAVA("Java"),
DOTNET(".NET"),
NODEJS("Node.js")
}
enum class AgentStatus {
NOT_REGISTERED,
ONLINE,
OFFLINE,
BUSY;
}
@Serializable
data class SystemSettingsDto(
val packages: List<String> = emptyList(),
val sessionIdHeaderName: String = "",
val targetHost: String = ""
)
@Serializable
data class AgentInfoDto(
val id: String,
val serviceGroup: String,
val instanceIds: Set<String>,
val name: String,
val description: String = "",
val environment: String = "",
val status: AgentStatus,
val buildVersion: String,
val adminUrl: String = "",
val ipAddress: String = "",
val activePluginsCount: Int = 0,
val agentType: String,
val agentVersion: String,
val systemSettings: SystemSettingsDto = SystemSettingsDto(),
val plugins: Set<PluginDto> = emptySet()
)
@Serializable
data class AgentCreationDto(
val id: String,
val agentType: AgentType,
val name: String,
val serviceGroup: String = "",
val environment: String = "",
val description: String = "",
val systemSettings: SystemSettingsDto = SystemSettingsDto(),
val plugins: Set<String> = emptySet()
)
@Serializable
data class AgentRegistrationDto(
val name: String,
val description: String = "",
val environment: String = "",
val systemSettings: SystemSettingsDto = SystemSettingsDto(),
val plugins: List<String> = emptyList()
)
@Serializable
data class AgentUpdateDto(
val name: String,
val description: String = "",
val environment: String = ""
)
|
<?php
namespace afrizalmy\BWI;
use afrizalmy\BWI\trait\KumpulanKata;
class BadWord
{
use KumpulanKata;
/**
* Menggunakan jarak Levenshtein distance untuk menghitung kemiripan kata
*
* @param array $wordCollect
* @param string $word
* @return boolean
*/
public function Levenshtein(array $wordCollect, string $word): bool
{
foreach ($wordCollect as $bad) {
if (levenshtein($word, $bad) <= 1) {
return true;
}
}
return false;
}
/**
* Cek apakah kalimat mengandung kata kotor atau jorok atau kurang pantas
*
* @param string $kalimat
* @return boolean
*/
public static function cek(string $kalimat): bool
{
$wordCollect = (new self)->kata();
$kalimat = explode(' ', $kalimat);
foreach ($kalimat as $word) {
foreach ((new self)->numToChar() as $key => $vokal) {
$word = str_replace($key, $vokal, $word);
}
if (in_array(strtolower($word), $wordCollect)) {
return true;
}
if ((new self)->Levenshtein($wordCollect, $word)) {
return true;
}
}
return false;
}
/**
* Mengganti huruf vocal dalam kata atau kalimat dengan karakter masking
*
* @param string $kata
* @param string $masking
* @return void
*/
public static function masking(string $kata, $masking = '*', array $custom_word = [])
{
$words = explode(' ', $kata);
$bad_words = (new self)->kata();
$new_words = [];
foreach ($words as $word) {
$tmpword = $word;
$word = preg_replace('/(.)\\1+/', "$1", $word);
foreach ((new self)->numToChar() as $key => $vokal) {
$word = str_replace($key, $vokal, $word);
}
if (in_array(strtolower($word), $bad_words) || (new self)->Levenshtein($bad_words, $word)) {
$replaceString = str_ireplace(['a', 'i', 'u', 'e', 'o'], $masking, $tmpword);
if (!strpos($replaceString, $masking)) {
$new_words[] = substr_replace($word, $masking, -1);
} else {
$new_words[] = $replaceString;
}
} else {
if (count($custom_word) > 0 && in_array(strtolower($word), $custom_word)) {
if (in_array(strtolower($word), $custom_word) || (new self)->Levenshtein($custom_word, $word)) {
$replaceString = str_ireplace(['a', 'i', 'u', 'e', 'o'], $masking, $tmpword);
if (!strpos($replaceString, $masking)) {
$new_words[] = substr_replace($word, $masking, -1);
} else {
$new_words[] = $replaceString;
}
} else {
$new_words[] = $tmpword;
}
} else {
$new_words[] = $tmpword;
}
}
}
return implode(' ', $new_words);
}
}
|
/* eslint-env jest */
import React from 'react';
import { render, cleanup } from 'ink-testing-library';
import webpack from 'webpack';
import View from '../View';
jest.mock('date-fns', () => ({
format: (_builtAt: number, formatString: string) =>
formatString.replace('HH:mm:ss', '07:58:40').replace(/'/g, ''),
}));
describe('BuildCommand#View', () => {
afterEach(cleanup);
it('should render missing rc view', () => {
const { lastFrame } = render(
<View building={true} watch={false} results={null} rc={null} />,
);
expect(lastFrame()).toMatchSnapshot();
});
it('should render building view', () => {
const { lastFrame } = render(
<View
building={true}
watch={false}
results={null}
rc={{
dir: 'src',
webpack: {
entry: 'client.js',
output: {
path: 'dist',
filename: '[name].js',
},
},
}}
/>,
);
expect(lastFrame()).toMatchSnapshot();
});
it('should render error view', () => {
const results = new Error('Cannot compile.');
const { lastFrame } = render(
<View
building={false}
watch={false}
results={results}
rc={{
dir: 'src',
webpack: {
entry: 'client.js',
output: {
path: 'dist',
filename: '[name].js',
},
},
}}
/>,
);
expect(lastFrame()).toMatchSnapshot();
});
it('should render built view', () => {
const results = {
compilation: {} as any,
hasErrors() {
return false;
},
hasWarnings() {
return false;
},
toJson() {
return {
_showErrors: true,
_showWarnings: true,
assets: [{} as any],
builtAt: 1580129920074,
warnings: [],
errors: [],
};
},
toString() {
return 'Compilation results!';
},
} as webpack.Stats;
const { lastFrame } = render(
<View
building={false}
watch={false}
results={results}
rc={{
dir: 'src',
webpack: {
entry: 'client.js',
output: {
path: 'dist',
filename: '[name].js',
},
},
}}
/>,
);
expect(lastFrame()).toMatchSnapshot();
});
});
|
package com.vanniktech.rxriddles.solutions
import io.reactivex.rxjava3.core.Observable
object Riddle23Solution {
fun solve(source: Observable<Any>)
= source.cast(String::class.java)
}
|
package pl.lodz.p.michalsosn.rest.support;
import pl.lodz.p.michalsosn.domain.sound.filter.Filter;
import java.io.IOException;
import java.util.stream.IntStream;
import static pl.lodz.p.michalsosn.entities.ResultEntity.SoundFilterResultEntity;
/**
* @author Michał Sośnicki
*/
public class SoundFilterChartPack {
private static final int PLOT_SIZE = 1000;
private final double[] values;
private final double startTime;
private final double duration;
public SoundFilterChartPack(SoundFilterResultEntity result) throws IOException {
this(result, null, null);
}
public SoundFilterChartPack(
SoundFilterResultEntity result, Double startTime, Double endTime
) throws IOException {
final Filter filter = result.getFilter();
final int positiveLength = filter.getPositiveLength();
final int negativeLength = filter.getNegativeLength();
final double sampleDuration = filter.getSamplingTime().getDuration();
if (startTime == null || startTime < -negativeLength * sampleDuration) {
startTime = -negativeLength * sampleDuration;
}
if (endTime == null || endTime > positiveLength * sampleDuration) {
endTime = positiveLength * sampleDuration;
}
int sampleStart = Math.max(-negativeLength,
(int) Math.ceil(startTime / sampleDuration));
int sampleEnd = Math.min(positiveLength,
(int) Math.floor(endTime / sampleDuration));
int spanLength = sampleEnd - sampleStart;
int step = (int) Math.floor((double) spanLength / PLOT_SIZE);
if (step < 0) {
this.values = new double[0];
this.duration = Double.NaN;
} else if (step <= 1) {
this.values = IntStream.range(sampleStart, sampleEnd)
.mapToDouble(filter::getValue).toArray();
this.duration = sampleDuration;
} else {
int resultLength = (int) Math.ceil((double) spanLength / step);
this.values = IntStream.range(0, resultLength)
.map(i -> i * step + sampleStart)
.mapToDouble(filter::getValue)
.toArray();
this.duration = sampleDuration * step;
}
this.startTime = startTime;
}
public double[] getValues() {
return values;
}
public double getStartTime() {
return startTime;
}
public double getDuration() {
return duration;
}
}
|
package java2bash.java2bash.commands.yum;
import java.util.ArrayList;
import java.util.List;
import java2bash.java2bash.commands.Snippet;
import java2bash.java2bash.commands.conditions.IfCondition;
import java2bash.java2bash.common.BashString;
public class YumIsInstalled implements Snippet {
/*
* Constants
*/
final static String FUNCTION_NAME_IS_INSTALLED = "_java2bash_yum_isinstalled_";
final static String isInstalledFunction = "function " + FUNCTION_NAME_IS_INSTALLED + " {\n" +
" if yum list installed \"$@\" >/dev/null 2>&1; then\n" +
" true\n" +
" else\n" +
" false\n" +
" fi\n" +
"}";
/*
* Member variables
*/
final IfCondition ifCondition;
/**
*
* @param packageName
* @param actionInstalled
* @param actionNotInstalled
*/
public YumIsInstalled(BashString packageName, Snippet actionInstalled, Snippet actionNotInstalled) {
ifCondition = new IfCondition(FUNCTION_NAME_IS_INSTALLED + " " + packageName.toString(),
actionInstalled == null ? Snippet.NOOP : actionInstalled,
actionNotInstalled == null ? Snippet.NOOP : actionNotInstalled);
}
@Override
public List<String> getIncludesList() {
List<String> includes = new ArrayList<String>();
includes.addAll(ifCondition.getIncludesList());
includes.add(isInstalledFunction);
return includes;
}
@Override
public String getCode() {
return ifCondition.getCode();
}
@Override
public String getCleanupCode() {
return ifCondition.getCleanupCode();
}
}
|
<!-- Create SubCategory -->
<div class="reveal reveal-update" id="createitem-<?php echo e($id); ?>" data-reveal data-close-on-click="false"
data-close-on-esc="false">
<div class="notification callout"></div>
<h3>Create Subcategory</h3>
<h2> for the <?php echo e($name); ?> Category</h2>
<form action="/admin/products/subcategories/<?php echo e($id); ?>/create" method="post">
<div class="input-group">
<input type="text" id="createitem-name-<?php echo e($id); ?>" name="name" >
<div>
<input type="submit" class="button create-subcategory"
data-token="<?php echo e(App\Classes\CSRFHandler::getToken()); ?>" id="<?php echo e($id); ?>" value="Create">
</div>
</div>
</form>
<a href="/admin/products/categories " class="close-button" data-close aria-label="Close modal" type="button">
<span aria-hidden="true">×</span>
</a>
</div><?php /**PATH D:\Ecomsite\resources\views/admin/includes/createmodal.blade.php ENDPATH**/ ?>
|
package net.gesekus.newsaswebapp.domainmodel.inmemoryimpl
import net.gesekus.newsaswebapp.domainmodel._
import scala.util.Success
import scala.util.Try
import scala.util.Failure
object ChatRepository extends ChatRepository {
var chats: Map[ChatId, Chat] = Map()
def find(chatId: ChatId): Try[Chat] = {
chats.get(chatId).map(Success.apply(_)).getOrElse(chatIdNotFoundFailure(chatId))
}
def store(chat: Chat) = {
chats = chats + (chat.id -> chat)
Success(chat)
}
def remove(chatId: ChatId) = {
if (chats.contains(chatId)) {
chats = chats - chatId
Success(chatId)
} else {
chatIdNotFoundFailure(chatId)
}
}
def contains(chatId: ChatId) = {
Success(chats.contains(chatId))
}
private def chatIdNotFoundFailure(chatId: ChatId) = {
Failure(new IllegalArgumentException("No chat found"))
}
}
|
using Codelyzer.Analysis.Common;
using Codelyzer.Analysis.Model;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
namespace Codelyzer.Analysis.CSharp.Handlers
{
public class StructDeclarationHandler : UstNodeHandler
{
private StructDeclaration StructDeclaration { get => (StructDeclaration)UstNode; }
public StructDeclarationHandler(CodeContext context,
StructDeclarationSyntax syntaxNode)
: base(context, syntaxNode, new StructDeclaration())
{
var structSymbol = SemanticModel.GetDeclaredSymbol(syntaxNode);
StructDeclaration.Identifier = syntaxNode.Identifier.ToString();
if (structSymbol != null)
{
StructDeclaration.Reference.Namespace = GetNamespace(structSymbol);
StructDeclaration.Reference.Assembly = GetAssembly(structSymbol);
StructDeclaration.Reference.AssemblySymbol = structSymbol.ContainingAssembly;
}
}
}
}
|
import logging
import threading
import queue
from performance.driver.core.config import Configurable
from performance.driver.core.eventbus import EventBusSubscriber
from performance.driver.core.events import TeardownEvent
from performance.driver.core import fsm
class State(fsm.State):
"""
The policy state provides some policy-specific functionality to the FSM
"""
def onInterruptEvent(self, event):
"""
When a policy receives an interrupt event it should sink to the end as soon
as possible. This placeholder ensures that policies that haven't implemented
this behaviour are properly sinked.
"""
self.logger.debug("Sinking to FSM.End due to interrupt event")
self.goto(type(self._fsm).End)
def onStalledEvent(self, event):
"""
When a policy receives a stalled event it should either continue the tests
or if it's not possible, it should be properly sinked.
"""
self.logger.debug("Sinking to FSM.End due to stalled event")
self.goto(type(self._fsm).End)
class PolicyFSM(fsm.FSM, Configurable, EventBusSubscriber):
"""
A policy-oriented FSM
"""
def __init__(self, config, eventbus, parameterBatch):
fsm.FSM.__init__(self)
Configurable.__init__(self, config)
EventBusSubscriber.__init__(self, eventbus)
self.logger = logging.getLogger('Policy<{}>'.format(type(self).__name__))
self.parameterBatch = parameterBatch
# Receive events from the bus
self.active = True
self.eventQueue = queue.Queue()
self.thread = threading.Thread(target=self.handlerThread, name="policy-event-drain")
if not 'End' in self.states:
raise TypeError('A policy FSM must contain an \'End\' state')
# Start thread ans subscribe to event handlers
self.thread.start()
eventbus.subscribe(self.handleEventSync)
eventbus.subscribe(self.handleTeardown, events=(TeardownEvent, ))
def handleTeardown(self, event):
"""
Stop event handler thread at teardown
"""
self.active = False
self.eventQueue.put(None)
self.thread.join()
def handlerThread(self):
"""
A dedicated thread that passes events received from the event bus to
the FSM, in order to satisfy single-threaded safety of the implementation
"""
self.logger.debug('Policy event thread started')
while self.active:
event = self.eventQueue.get()
# None event exits the loop
if event is None:
break
# Handle the event synchronously in the FSM
self.handleEvent(event)
# Flush any parameter update(s) that occurred during the op handling
self.parameterBatch.flush()
self.logger.debug('Policy event thread exited')
def handleEventSync(self, event):
"""
Enqueues events that are going to be handled by the dedicated thread
"""
self.eventQueue.put(event)
def setStatus(self, value):
"""
Set status is a shorthand for setting the status flag
"""
self.setFlag('status', value)
def setFlag(self, flag, value=True):
"""
Set a flag for this run
"""
return self.parameterBatch.setFlag(flag, value)
def setParameter(self, name, value):
"""
Set a value for a test parameter
"""
return self.parameterBatch.setParameter(name, value)
def setParameters(self, parameters):
"""
Set more than one parameter at once
"""
return self.parameterBatch.setParameters(parameters)
|
<?php
declare(strict_types=1);
namespace common\validators;
/**
* @inheritdoc
*
* @author Залатов Александр <zalatov.ao@gmail.com>
*/
class EachValidator extends \yii\validators\EachValidator {
const ATTR_RULE = 'rule';
}
|
#include "QuadTree.h"
QuadTree::QuadTree(glm::vec3 size)
{
size /= 2.0f;
_root = new QuadTreeNode(AABB(-size, size));
}
QuadTree::~QuadTree()
{
delete _root;
}
void QuadTree::addObject(RenderObject* object)
{
_root->addObject(object);
}
void QuadTree::addLight(Light* light)
{
_root->addLight(light);
}
void QuadTree::getObjectsInFrustum(std::list<RenderObject*>* objectsList, Frustum& frustum)
{
_root->getObjectsInFrustum(objectsList, frustum, false);
}
|
package br.com.zup.beta.microServico.repository;
import br.com.zup.beta.microServico.model.bloqueio.BloqueioCartao;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface BloqueioRepository extends JpaRepository<BloqueioCartao, Long> {
}
|
using GraphQL.Types;
namespace Bench.GraphQLDotNet.Types
{
public class CharacterType : InterfaceGraphType
{
public CharacterType()
{
Name = "Character";
Field<NonNullGraphType<IdGraphType>>("id");
Field<StringGraphType>("name");
Field<ListGraphType<CharacterType>>("friends");
Field<ListGraphType<EpisodeType>>("appearsIn");
Field<FloatGraphType>("height", arguments:
new QueryArguments(
new QueryArgument<UnitType> { Name = "unit" }));
}
}
}
|
package com.example.productmanagement
import android.os.Bundle
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.widget.ArrayAdapter
import androidx.core.content.contentValuesOf
import kotlinx.android.synthetic.main.add_produ_name.*
import kotlinx.android.synthetic.main.super_manager_layout.toolbar
import org.jetbrains.anko.selector
import java.sql.SQLException
import java.sql.Statement
class AddProduName : BaseActivity() {
val dbHelper = ProduDatabaseHelper(this, "nativeBases", 1)
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.add_produ_name)
setSupportActionBar(toolbar)
initTypeName()//初始化工序名称
//添加产品按键点击
addNewPM.setOnClickListener {
addNewProdu()
"添加产品名称成功!".showToast()
}
//显示产品目录点击
listPM.setOnClickListener {
when (typeName.text) {
"瓶坯注塑" -> {
listZhusuProduName()
}
"非瓶坯注塑" -> {
listZhusuOtherName()
}
"吹塑" -> {
listChuisuProduName()
}
"挤出" -> {
listJichuProduName()
}
"其他" -> {
listOtherProduName()
}
}
}
//返回上级页面按键
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
menuInflater.inflate(R.menu.toolbar, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
when(item.itemId){
R.id.backup ->finish()
R.id.finish ->ActivityCollector.finishAll()
}
return true
}
//初始化工序名称
private fun initTypeName(){
val typeList = ArrayList<String>()
val db = dbHelper.writableDatabase
val cursor = db.query("nativeType",null,null,null,null,null,null)
if (cursor.moveToFirst()){
do {
val typeName = cursor.getString(cursor.getColumnIndex("typeName"))
typeList.add(typeName)
}while (cursor.moveToNext())
}
cursor.close()
typeName.text = ""
typeName.setOnClickListener {
selector("选择工序名称", typeList){i ->
typeName.text = typeList[i]
}
}
}
//依据工序名称查询显示注塑产品名称,要有排序
private fun listZhusuProduName(){
val db = dbHelper.writableDatabase
val cursor = db.query("nativeZhusu", null, null, null, null, null, null)
if (cursor.moveToFirst()) {
val nameList: ArrayList<String> = ArrayList()
nameList.sort()//排序
val myAdapter = ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, nameList)
listProduNMView.adapter = myAdapter //列表显示
do {
val produName = cursor.getString(cursor.getColumnIndex("produName"))
nameList.add(produName)
} while (cursor.moveToNext())
}
cursor.close()
}
private fun listZhusuOtherName(){
val db = dbHelper.writableDatabase
val cursor = db.query("nativeZhusuOther", null, null, null, null, null, null)
if (cursor.moveToFirst()) {
val nameList: ArrayList<String> = ArrayList()
nameList.sort()//排序
val myAdapter = ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, nameList)
listProduNMView.adapter = myAdapter //列表显示
do {
val produName = cursor.getString(cursor.getColumnIndex("produName"))
nameList.add(produName)
} while (cursor.moveToNext())
}
cursor.close()
}
//依据工序名称查询显示吹塑产品名称,要有排序
private fun listChuisuProduName(){
val db = dbHelper.writableDatabase
val cursor = db.query("nativeChuisu", null, null, null, null, null, null)
if (cursor.moveToFirst()) {
val nameList: ArrayList<String> = ArrayList()
nameList.sort()//排序
val myAdapter = ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, nameList)
listProduNMView.adapter = myAdapter //列表显示
do {
val produName = cursor.getString(cursor.getColumnIndex("produName"))
nameList.add(produName)
} while (cursor.moveToNext())
}
cursor.close()
}
//依据工序名称查询显示挤出产品名称,要有排序
private fun listJichuProduName(){
val db = dbHelper.writableDatabase
val cursor = db.query("nativeJichu", null, null, null, null, null, null)
if (cursor.moveToFirst()) {
val nameList: ArrayList<String> = ArrayList()
nameList.sort()//排序
val myAdapter = ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, nameList)
listProduNMView.adapter = myAdapter //列表显示
do {
val produName = cursor.getString(cursor.getColumnIndex("produName"))
nameList.add(produName)
} while (cursor.moveToNext())
}
cursor.close()
}
//依据工序名称查询显示挤出产品名称,要有排序
private fun listOtherProduName(){
val db = dbHelper.writableDatabase
val cursor = db.query("nativeOther", null, null, null, null, null, null)
if (cursor.moveToFirst()) {
val nameList: ArrayList<String> = ArrayList()
nameList.sort()//排序
val myAdapter = ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, nameList)
listProduNMView.adapter = myAdapter //列表显示
do {
val produName = cursor.getString(cursor.getColumnIndex("produName"))
nameList.add(produName)
} while (cursor.moveToNext())
}
cursor.close()
}
//增加新产品
private fun addNewProdu(){
when (typeName.text) {
"瓶坯注塑" -> {
addNewZhusu()
}
"非瓶坯注塑" -> {
addNewOtherZhusu()
}
"吹塑" -> {
addNewChuisu()
}
"挤出" -> {
addNewJichu()
}
"其他" -> {
addNewOther()
}
}
}
//注塑品名增加
private fun addNewZhusu(){
val newName = editNewPM.text.toString()
val db = dbHelper.writableDatabase
val values = contentValuesOf("produName" to newName)
db.insert("nativeZhusu", null, values)
Log.d("AddProduName","已添加到本地注塑库")
try {
Thread {
val conn = DBUtil().conection()
val sql = "insert into zhusuprodunametable values (null,'$newName') "
try {
// 创建用来执行sql语句的对象
val statement: Statement = conn!!.createStatement()
// 执行sql查询语句并获取查询信息
val num = statement.executeUpdate(sql)
if (num > 0){
Log.d("AddProduName","已添加到远程注塑库")
}else{
Log.d("AddProduName","已添加到远程注塑库")
}
} catch (e: SQLException) {
Log.e("MakeManager", "远程注塑名称插入失败")
}
//关闭数据库
try {
conn!!.close()
Log.d("MakeManager", "关闭连接成功。")
} catch (e: SQLException) {
Log.d("MakeManager", "关闭连接失败。")
}
}.start()
}catch (e:Exception){
e.printStackTrace()
}
}
private fun addNewOtherZhusu(){
val newName = editNewPM.text.toString()
val db = dbHelper.writableDatabase
val values = contentValuesOf("produName" to newName)
db.insert("nativeZhusu", null, values)
Log.d("AddProduName","已添加到本地注塑库")
try {
Thread {
val conn = DBUtil().conection()
val sql = "insert into zhusuothertable values (null,'$newName') "
try {
// 创建用来执行sql语句的对象
val statement: Statement = conn!!.createStatement()
// 执行sql查询语句并获取查询信息
val num = statement.executeUpdate(sql)
if (num > 0){
Log.d("AddProduName","已添加到远程注塑库")
}else{
Log.d("AddProduName","已添加到远程注塑库")
}
} catch (e: SQLException) {
Log.e("MakeManager", "远程注塑名称插入失败")
}
//关闭数据库
try {
conn!!.close()
Log.d("MakeManager", "关闭连接成功。")
} catch (e: SQLException) {
Log.d("MakeManager", "关闭连接失败。")
}
}.start()
}catch (e:Exception){
e.printStackTrace()
}
}
private fun addNewChuisu(){
val newName = editNewPM.text.toString()
val db = dbHelper.writableDatabase
val values = contentValuesOf("produName" to newName)
db.insert("nativeChuisu", null, values)
Log.d("AddProduName","已添加到本地注塑库")
try {
Thread {
val conn = DBUtil().conection()
val sql = "insert into chuisuprodunametable values (null,'$newName') "
try {
// 创建用来执行sql语句的对象
val statement: Statement = conn!!.createStatement()
// 执行sql查询语句并获取查询信息
val num = statement.executeUpdate(sql)
if (num > 0){
Log.d("AddProduName","已添加到远程吹塑库")
}else{
Log.d("AddProduName","已添加到远程吹塑库")
}
} catch (e: SQLException) {
Log.e("MakeManager", "远程吹塑名称插入失败")
}
//关闭数据库
try {
conn!!.close()
Log.d("MakeManager", "关闭连接成功。")
} catch (e: SQLException) {
Log.d("MakeManager", "关闭连接失败。")
}
}.start()
}catch (e:Exception){
e.printStackTrace()
}
}
private fun addNewJichu(){
val newName = editNewPM.text.toString()
val db = dbHelper.writableDatabase
val values = contentValuesOf("produName" to newName)
db.insert("nativeJichu", null, values)
Log.d("AddProduName","已添加到本地注塑库")
try {
Thread {
val conn = DBUtil().conection()
val sql = "insert into jichuprodunametable values (null,'$newName') "
try {
// 创建用来执行sql语句的对象
val statement: Statement = conn!!.createStatement()
// 执行sql查询语句并获取查询信息
val num = statement.executeUpdate(sql)
if (num > 0){
Log.d("AddProduName","已添加到远程注塑库")
}else{
Log.d("AddProduName","已添加到远程注塑库")
}
} catch (e: SQLException) {
Log.e("MakeManager", "远程注塑名称插入失败")
}
//关闭数据库
try {
conn!!.close()
Log.d("MakeManager", "关闭连接成功。")
} catch (e: SQLException) {
Log.d("MakeManager", "关闭连接失败。")
}
}.start()
}catch (e:Exception){
e.printStackTrace()
}
}
private fun addNewOther(){
val newName = editNewPM.text.toString()
val db = dbHelper.writableDatabase
val values = contentValuesOf("produName" to newName)
db.insert("nativeOther", null, values)
Log.d("AddProduName","已添加到本地注塑库")
try {
Thread {
val conn = DBUtil().conection()
val sql = "insert into otherprodunametable values (null,'$newName') "
try {
// 创建用来执行sql语句的对象
val statement: Statement = conn!!.createStatement()
// 执行sql查询语句并获取查询信息
val num = statement.executeUpdate(sql)
if (num > 0){
Log.d("AddProduName","已添加到远程注塑库")
}else{
Log.d("AddProduName","已添加到远程注塑库")
}
} catch (e: SQLException) {
Log.e("MakeManager", "远程注塑名称插入失败")
}
//关闭数据库
try {
conn!!.close()
Log.d("MakeManager", "关闭连接成功。")
} catch (e: SQLException) {
Log.d("MakeManager", "关闭连接失败。")
}
}.start()
}catch (e:Exception){
e.printStackTrace()
}
}
}
|
package de.faweizz.topicservice.service.transformation.step
import de.faweizz.topicservice.service.transformation.TransformationStepData
class TransformationStepFactory {
fun instantiate(transformationStepData: TransformationStepData): TransformationStep {
val clazz = Class.forName(transformationStepData.transformationStepName)
val constructor = clazz.getConstructor(Map::class.java)
return constructor.newInstance(transformationStepData.data) as TransformationStep
}
}
|
# Compilador de uma lingagem chamada ***Portugolo***
- Contem:
- [x] Analisador Léxico.
- [x] Analisador Sintático.
- [x] Analisador Semântico.
## Gramática da linguagem PortuGolo.

### Padrões para números, literais e identificadores do PortuGolo:
```
digito = [0-9]
letra = [A-Z | a-z]
Numerico = digito+(.digito+)?
ID = letra (letra | digito)*
Literal = pelo menos um dos 256 caracteres do conjunto ASCII entre aspas duplas
```
#### Outras características de PortuGolo:
- Regras:
- As palavras-chave de PortuGolo são reservadas.
- A linguagem possui comentários de mais de uma linha. Um comentário começa com `/*` e deve terminar com `*/`.
- A linguagem possui comentários de uma linha. Um comentário começa com `//`.
- A linguagem não é case-sensitive.
- Cada tabulação, deverá contar como 3 espaços em branco.
|
/*
* Copyright 2017, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.architecture.blueprints.todoapp.data.source
import com.example.android.architecture.blueprints.todoapp.data.Task
import java.util.*
/**
* Concrete implementation to load tasks from the data sources into a cache.
*
*
* For simplicity, this implements a dumb synchronisation between locally persisted data and data
* obtained from the server, by using the remote data source only if the local database doesn't
* exist or is empty.
*/
class TasksRepository(
val tasksRemoteDataSource: TasksDataSource,
val tasksLocalDataSource: TasksDataSource
) : TasksDataSource {
/**
* This variable has public visibility so it can be accessed from tests.
*/
var cachedTasks: LinkedHashMap<String, Task> = LinkedHashMap()
/**
* Marks the cache as invalid, to force an update the next time data is requested. This variable
* has package local visibility so it can be accessed from tests.
*/
var cacheIsDirty = false
/**
* Gets tasks from cache, local data source (SQLite) or remote data source, whichever is
* available first.
*/
override suspend fun getTasks(): Result<List<Task>> {
// Respond immediately with cache if available and not dirty
if (cachedTasks.isNotEmpty() && !cacheIsDirty) {
return Result.Success(cachedTasks.values.toList())
}
return if (cacheIsDirty) {
// If the cache is dirty we need to fetch new data from the network.
getTasksFromRemoteDataSource()
} else {
// Query the local storage if available. If not, query the network.
val result = tasksLocalDataSource.getTasks()
when (result) {
is Result.Success -> {
refreshCache(result.data)
Result.Success(cachedTasks.values.toList())
}
is Result.Error -> getTasksFromRemoteDataSource()
}
}
}
override suspend fun saveTask(task: Task) {
// Do in memory cache update to keep the app UI up to date
cache(task).let {
tasksRemoteDataSource.saveTask(it)
tasksLocalDataSource.saveTask(it)
}
}
override suspend fun completeTask(task: Task) {
// Do in memory cache update to keep the app UI up to date
cache(task).let {
it.isCompleted = true
tasksRemoteDataSource.completeTask(it)
tasksLocalDataSource.completeTask(it)
}
}
override suspend fun completeTask(taskId: String) {
getTaskWithId(taskId)?.let {
completeTask(it)
}
}
override suspend fun activateTask(task: Task) {
// Do in memory cache update to keep the app UI up to date
cache(task).let {
it.isCompleted = false
tasksRemoteDataSource.activateTask(it)
tasksLocalDataSource.activateTask(it)
}
}
override suspend fun activateTask(taskId: String) {
getTaskWithId(taskId)?.let {
activateTask(it)
}
}
override suspend fun clearCompletedTasks() {
tasksRemoteDataSource.clearCompletedTasks()
tasksLocalDataSource.clearCompletedTasks()
cachedTasks = cachedTasks.filterValues {
!it.isCompleted
} as LinkedHashMap<String, Task>
}
/**
* Gets tasks from local data source (sqlite) unless the table is new or empty. In that case it
* uses the network data source. This is done to simplify the sample.
*/
override suspend fun getTask(taskId: String): Result<Task> {
val taskInCache = getTaskWithId(taskId)
// Respond immediately with cache if available
if (taskInCache != null) {
return Result.Success(taskInCache)
}
// Load from server/persisted if needed.
// Is the task in the local data source? If not, query the network.
val localResult = tasksLocalDataSource.getTask(taskId)
return when (localResult) {
is Result.Success -> Result.Success(cache(localResult.data))
is Result.Error -> {
val remoteResult = tasksRemoteDataSource.getTask(taskId)
when (remoteResult) {
is Result.Success -> Result.Success(cache(remoteResult.data))
is Result.Error -> Result.Error(RemoteDataNotFoundException())
}
}
}
}
override suspend fun refreshTasks() {
cacheIsDirty = true
}
override suspend fun deleteAllTasks() {
tasksRemoteDataSource.deleteAllTasks()
tasksLocalDataSource.deleteAllTasks()
cachedTasks.clear()
}
override suspend fun deleteTask(taskId: String) {
tasksRemoteDataSource.deleteTask(taskId)
tasksLocalDataSource.deleteTask(taskId)
cachedTasks.remove(taskId)
}
private suspend fun getTasksFromRemoteDataSource(): Result<List<Task>> {
val result = tasksRemoteDataSource.getTasks()
return when (result) {
is Result.Success -> {
refreshCache(result.data)
refreshLocalDataSource(result.data)
Result.Success(ArrayList(cachedTasks.values))
}
is Result.Error -> Result.Error(RemoteDataNotFoundException())
}
}
private fun refreshCache(tasks: List<Task>) {
cachedTasks.clear()
tasks.forEach {
cache(it)
}
cacheIsDirty = false
}
private suspend fun refreshLocalDataSource(tasks: List<Task>) {
tasksLocalDataSource.deleteAllTasks()
for (task in tasks) {
tasksLocalDataSource.saveTask(task)
}
}
private fun getTaskWithId(id: String) = cachedTasks[id]
private fun cache(task: Task): Task {
val cachedTask = Task(task.title, task.description, task.id).apply {
isCompleted = task.isCompleted
}
cachedTasks.put(cachedTask.id, cachedTask)
return cachedTask
}
companion object {
private var INSTANCE: TasksRepository? = null
/**
* Returns the single instance of this class, creating it if necessary.
* @param tasksRemoteDataSource the backend data source
* *
* @param tasksLocalDataSource the device storage data source
* *
* @return the [TasksRepository] instance
*/
@JvmStatic
fun getInstance(tasksRemoteDataSource: TasksDataSource,
tasksLocalDataSource: TasksDataSource): TasksRepository {
return INSTANCE ?: TasksRepository(tasksRemoteDataSource, tasksLocalDataSource)
.apply { INSTANCE = this }
}
/**
* Used to force [getInstance] to create a new instance
* next time it's called.
*/
@JvmStatic
fun destroyInstance() {
INSTANCE = null
}
}
}
|
r-light .nav-tabs.control-sidebar-tabs > li > a:hover,
.control-sidebar-light .nav-tabs.control-sidebar-tabs > li > a:focus,
.control-sidebar-light .nav-tabs.control-sidebar-tabs > li > a:active {
background: #eff1f7;
}
.control-sidebar-light .nav-tabs.control-sidebar-tabs > li.active > a,
.control-sidebar-light .nav-tabs.control-sidebar-tabs > li.active > a:hover,
.control-sidebar-light .nav-tabs.control-sidebar-tabs > li.active > a:focus,
.control-sidebar-light .nav-tabs.control-sidebar-tabs > li.active > a:active {
background: #f9fafc;
color: #111;
}
.control-sidebar-light .control-sidebar-heading,
.control-sidebar-light .control-sidebar-subheading {
color: #111;
}
.control-sidebar-light .control-sidebar-menu {
margin-left: -14px;
}
.control-sidebar-light .control-sidebar-menu > li > a:hover {
background: #f4f4f5;
}
.control-sidebar-light .control-sidebar-menu > li > a .menu-info > p {
color: #5e5e5e;
}
/*
* Component: Dropdown menus
* -------------------------
*/
/*Dropdowns in general*/
.dropdown-menu {
box-shadow: none;
border-color: #eee;
}
.dropdown-menu > li > a {
color: #777;
}
.dropdown-menu > li > a > .glyphicon,
.dropdown-menu > li > a > .fa,
.dropdown-menu > li > a > .ion {
margin-right: 10px;
}
.dropdown-menu > li > a:hover {
background-color: #e1e3e9;
color: #333;
}
.dropdown-menu > .divider {
background-color: #eee;
}
.navbar-nav > .notifications-menu > .dropdown-menu,
.navbar-nav > .messages-menu > .dropdown-menu,
.navbar-nav > .tasks-menu > .dropdown-menu {
width: 280px;
padding: 0 0 0 0;
margin: 0;
top: 100%;
}
.n
|
class ImageUploader < CarrierWave::Uploader::Base
# Include RMagick or MiniMagick support:
# include CarrierWave::RMagick
# include CarrierWave::MiniMagick
include Cloudinary::CarrierWave unless ENV['CLOUDINARY_URL'].nil?
cloudinary_transformation transformation: [
{ width: 845, height: 597, crop: :limit }
] unless ENV['CLOUDINARY_URL'].nil?
def extension_allowlist
%w[png jpg jpeg]
end
def store_dir
"uploads/#{model.class.name.pluralize.downcase}/#{model.id}"
end
end
|
import { expect, assert } from "chai";
import axios, { AxiosRequestConfig, AxiosResponse } from 'axios'
import * as prep from "./testUtil"
import { testConfig } from './testConfig'
let config = {
headers: { "content-type": "application/ld+json" },
auth: testConfig.auth
}
async function createEntity() {
const entity = {
"id": "urn:xdatatogo:TrafficRestriction:test",
"type": "TrafficRestriction",
"dateValidFrom": {
"type": "Property",
"value": {
"@type": "DateTime",
"@value": "2021-02-08T06:00:00.000Z"
}
},
"dateValidUntil": {
"type": "Property",
"value": {
"@type": "DateTime",
"@value": "2021-02-08T06:00:00.000Z"
}
},
"location": {
"type": "GeoProperty",
"value": {
"type": "Point",
"coordinates": [50, 50]
}
},
"maxSpeed": {
"type": "Property",
"value": 123
},
"maxVehicleAxleLoad": {
"type": "Property",
"value": 123
},
"maxVehicleHeight": {
"type": "Property",
"value": 123
},
"maxVehicleWeight": {
"type": "Property",
"value": 123
},
"maxVehicleWidth": {
"type": "Property",
"value": 123
}
}
// Create entity:
let response = await axios.post(testConfig.base_url + "entities/", entity, config).catch((e) => {
console.log(e)
})
}
describe('6.23.3.1 POST entityOperations/query', function () {
before(async () => {
await prep.deleteAllEntities()
await createEntity()
})
after(async () => {
await prep.deleteAllEntities()
})
it("should return the entities that match the property query", async function () {
const query = {
"@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld",
"q": "maxSpeed==123",
"type": "Query"
}
let response = await axios.post(testConfig.base_url + "entityOperations/query", query, config).catch((e) => {
console.log(e)
}) as AxiosResponse
expect(response.data.length).equals(1)
//console.log(response.data)
expect(response.data[0].id).equals("urn:xdatatogo:TrafficRestriction:test")
})
it("should return an empty array because no existing entities match the query", async function () {
const query = {
"@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld",
geoQ: {
"georel": "near;maxDistance==1000",
"geometry": "Point",
// NOTE: Actual coordinates of existing entity are [50,50]
"coordinates": [0, 0],
"geoproperty": "location"
},
"type": "Query"
}
let response = await axios.post(testConfig.base_url + "entityOperations/query/", query, config).catch((e) => {
console.log(e)
}) as AxiosResponse
assert(response)
expect(response.data).instanceOf(Array)
expect(response.data.length).equals(0)
return new Promise<void>((resolve, reject) => {
resolve()
})
})
it("should return and array with one existing entity that matches the geo query", async function () {
const query = {
"@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld",
geoQ: {
"georel": "near;maxDistance==0",
"geometry": "Point",
// NOTE: Actual coordinates of existing entity are [50,50]
"coordinates": [50, 50],
"geoproperty": "location"
},
"type": "Query"
}
let response = await axios.post(testConfig.base_url + "entityOperations/query/", query, config).catch((e) => {
console.log(e)
}) as AxiosResponse
expect(response.data).instanceOf(Array)
expect(response.data[0].id).equals("urn:xdatatogo:TrafficRestriction:test")
})
});
|
#include <stdlib.h>
#include <string.h>
#include <sys/types.h>
#include <sys/socket.h>
#include "cerver/http/response.h"
#include "cerver/http/json.h"
// TODO: get the size fo this when we start the server!!
char *default_header = "HTTP/1.1 200 OK\r\n\n";
HttpResponse *http_response_new (void) {
HttpResponse *res = (HttpResponse *) malloc (sizeof (HttpResponse));
if (res) {
memset (res, 0, sizeof (HttpResponse));
res->data = NULL;
res->header = NULL;
res->res = NULL;
}
return res;
}
HttpResponse *http_response_create (unsigned int status, const char *header, size_t header_len,
const char *data, size_t data_len) {
HttpResponse *res = NULL;
if (data) {
res = (HttpResponse *) malloc (sizeof (HttpResponse));
if (res) {
memset (res, 0, sizeof (HttpResponse));
res->status = status;
res->data = (char *) calloc (data_len, sizeof (char));
memcpy (res->data, data, data_len);
res->data_len = data_len;
if (header) {
res->header_len = header_len;
res->header = (char *) calloc (header_len, sizeof (char));
memcpy (res->header, header, header_len);
}
else {
res->header_len = strlen (default_header);
res->header = (char *) calloc (res->header_len, sizeof (default_header));
memcpy (res->header, default_header, res->header_len);
}
}
}
return res;
}
void http_respponse_delete (HttpResponse *res) {
if (res) {
if (res->data) free (res->data);
if (res->header) free (res->header);
if (res->res) free (res->res);
free (res);
}
}
// merge the response header and the data into the final response
void http_response_compile (HttpResponse *res) {
if (res) {
if (res->header) {
res->res_len = res->header_len;
if (res->data) res->res_len += res->data_len;
res->res = (char *) calloc (res->res_len, sizeof (char));
memcpy (res->res, res->header, res->res_len);
if (res->data) strcat (res->res, res->data);
}
}
}
int http_response_send_to_socket (const HttpResponse *res, const int socket_fd) {
int retval = 1;
if (res && res->res) {
retval = send (socket_fd, res->res, res->res_len, 0) <= 0 ? 1 : 0;
}
return retval;
}
HttpResponse *http_response_json_error (const char *error_msg) {
HttpResponse *res = NULL;
if (error_msg) {
String *error = str_new (error_msg);
JsonKeyValue *jkvp = json_key_value_create ("error", error, VALUE_TYPE_STRING);
size_t json_len;
char *json = json_create_with_one_pair (jkvp, &json_len);
json_key_value_delete (jkvp);
res = http_response_create (200, NULL, 0, json, json_len);
free (json); // we copy the data into the response
}
return res;
}
|
require 'rails_helper'
describe 'Zohoho::Connection' do
before :each do
@apikey = 'dummy_api_key'
@conn = Zohoho::Connection.new('CRM', @apikey, true)
vcr_configure('connection')
end
it 'should make a simple call' do
VCR.use_cassette('call', record: :new_episodes) do
@result = @conn.call('Contacts', 'getRecords')
end
expect(@result.size).to eq(20)
end
end
|
import { APP_BASE_HREF, LocationStrategy, PathLocationStrategy, PlatformLocation } from "@angular/common";
import { Inject, Injectable, Optional } from "@angular/core";
import { MICRO_APP_NAME } from "./micro-app-name.token";
@Injectable()
export class MicroAppLocationStrategy extends PathLocationStrategy implements LocationStrategy {
constructor(
platformLocation: PlatformLocation,
@Inject(MICRO_APP_NAME) private microAppName: string,
@Optional() @Inject(APP_BASE_HREF) href?: string) {
super(platformLocation, href);
}
override pushState(state: any, title: string, url: string, queryParams: string): void {
super.pushState({ ...state, microApp: this.microAppName }, title, url, queryParams);
}
override replaceState(state: any, title: string, url: string, queryParams: string): void {
super.replaceState({ ...state, microApp: this.microAppName }, title, url, queryParams);
}
}
|
// SPDX-License-Identifier: BSD-3-Clause
/* Copyright 2020, Intel Corporation */
/*
* ep-listen.c -- the endpoint unit tests
*
* APIs covered:
* - rpma_ep_listen()
* - rpma_ep_shutdown()
*/
#include "librpma.h"
#include "ep-common.h"
#include "cmocka_headers.h"
#include "test-common.h"
/*
* listen__peer_NULL - NULL peer is invalid
*/
static void
listen__peer_NULL(void **unused)
{
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_listen(NULL, MOCK_ADDR, MOCK_PORT, &ep);
/* verify the results */
assert_int_equal(ret, RPMA_E_INVAL);
assert_null(ep);
}
/*
* listen__addr_NULL - NULL addr is invalid
*/
static void
listen__addr_NULL(void **unused)
{
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_listen(MOCK_PEER, NULL, MOCK_PORT, &ep);
/* verify the results */
assert_int_equal(ret, RPMA_E_INVAL);
assert_null(ep);
}
/*
* listen__port_NULL - NULL port is invalid
*/
static void
listen__port_NULL(void **unused)
{
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_listen(MOCK_PEER, MOCK_ADDR, NULL, &ep);
/* verify the results */
assert_int_equal(ret, RPMA_E_INVAL);
assert_null(ep);
}
/*
* listen__ep_ptr_NULL - NULL ep_ptr is invalid
*/
static void
listen__ep_ptr_NULL(void **unused)
{
/* run test */
int ret = rpma_ep_listen(MOCK_PEER, MOCK_ADDR, MOCK_PORT, NULL);
/* verify the results */
assert_int_equal(ret, RPMA_E_INVAL);
}
/*
* listen__peer_addr_port_ep_ptr_NULL - peer, addr, port
* and ep_ptr == NULL are invalid
*/
static void
listen__peer_addr_port_ep_ptr_NULL(void **unused)
{
/* run test */
int ret = rpma_ep_listen(NULL, NULL, NULL, NULL);
/* verify the results */
assert_int_equal(ret, RPMA_E_INVAL);
}
/*
* listen__create_evch_EAGAIN - rdma_create_event_channel() fails
* with EAGAIN
*/
static void
listen__create_evch_EAGAIN(void **unused)
{
/*
* configure mocks for:
* - constructing:
*/
will_return(rdma_create_event_channel, NULL);
will_return(rdma_create_event_channel, EAGAIN);
/* - things which may happen: */
will_return_maybe(rpma_info_new, MOCK_INFO);
will_return_maybe(__wrap__test_malloc, MOCK_OK);
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_listen(MOCK_PEER, MOCK_ADDR, MOCK_PORT, &ep);
/* verify the results */
assert_int_equal(ret, RPMA_E_PROVIDER);
assert_int_equal(rpma_err_get_provider_error(), EAGAIN);
assert_null(ep);
}
/*
* listen__create_id_EAGAIN - rdma_create_id() fails with EAGAIN
*/
static void
listen__create_id_EAGAIN(void **unused)
{
/*
* configure mocks:
* - constructing
*/
struct rdma_event_channel evch;
will_return(rdma_create_event_channel, &evch);
will_return(rdma_create_id, NULL);
will_return(rdma_create_id, EAGAIN);
/* - things which may happen: */
will_return_maybe(rpma_info_new, MOCK_INFO);
will_return_maybe(__wrap__test_malloc, MOCK_OK);
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_listen(MOCK_PEER, MOCK_ADDR, MOCK_PORT, &ep);
/* verify the results */
assert_int_equal(ret, RPMA_E_PROVIDER);
assert_int_equal(rpma_err_get_provider_error(), EAGAIN);
assert_null(ep);
}
/*
* listen__info_new_E_NOMEM - rpma_info_new() fails with
* RPMA_E_NOMEM
*/
static void
listen__info_new_E_NOMEM(void **unused)
{
/*
* configure mocks for:
* - constructing
*/
will_return(rpma_info_new, NULL);
will_return(rpma_info_new, RPMA_E_NOMEM);
/* - things which may happen: */
struct rdma_event_channel evch;
will_return_maybe(rdma_create_event_channel, &evch);
struct rdma_cm_id id;
will_return_maybe(rdma_create_id, &id);
will_return_maybe(rdma_destroy_id, MOCK_OK);
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_listen(MOCK_PEER, MOCK_ADDR, MOCK_PORT, &ep);
/* verify the results */
assert_int_equal(ret, RPMA_E_NOMEM);
assert_null(ep);
}
/*
* listen__info_bind_addr_E_PROVIDER - rpma_info_bind_addr() fails
* with RPMA_E_PROVIDER
*/
static void
listen__info_bind_addr_E_PROVIDER(void **unused)
{
/*
* configure mocks for:
* - constructing
*/
struct rdma_event_channel evch;
will_return(rdma_create_event_channel, &evch);
struct rdma_cm_id id;
will_return(rdma_create_id, &id);
will_return(rpma_info_new, MOCK_INFO);
will_return(rpma_info_bind_addr, MOCK_ERRNO);
/* - deconstructing */
will_return(rdma_destroy_id, MOCK_OK);
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_listen(MOCK_PEER, MOCK_ADDR, MOCK_PORT, &ep);
/* verify the results */
assert_int_equal(ret, RPMA_E_PROVIDER);
assert_int_equal(rpma_err_get_provider_error(), MOCK_ERRNO);
assert_null(ep);
}
/*
* listen__listen_EAGAIN - rdma_listen() fails with EAGAIN
*/
static void
listen__listen_EAGAIN(void **unused)
{
/*
* configure mocks for:
* - constructing
*/
struct rdma_event_channel evch;
will_return(rdma_create_event_channel, &evch);
struct rdma_cm_id id;
will_return(rdma_create_id, &id);
will_return(rpma_info_new, MOCK_INFO);
will_return(rpma_info_bind_addr, MOCK_OK);
will_return(rdma_listen, EAGAIN);
/* - deconstructing */
will_return(rdma_destroy_id, MOCK_OK);
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_listen(MOCK_PEER, MOCK_ADDR, MOCK_PORT, &ep);
/* verify the results */
assert_int_equal(ret, RPMA_E_PROVIDER);
assert_int_equal(rpma_err_get_provider_error(), EAGAIN);
assert_null(ep);
}
/*
* listen__malloc_ENOMEM - malloc() fails with ENOMEM
*/
static void
listen__malloc_ENOMEM(void **unused)
{
/*
* configure mocks for:
* - constructing
*/
will_return(__wrap__test_malloc, ENOMEM);
/* - things which may happen: */
struct rdma_event_channel evch;
will_return_maybe(rdma_create_event_channel, &evch);
struct rdma_cm_id id;
will_return_maybe(rdma_create_id, &id);
will_return_maybe(rpma_info_new, MOCK_INFO);
will_return_maybe(rpma_info_bind_addr, MOCK_OK);
will_return_maybe(rdma_listen, MOCK_OK);
will_return_maybe(rdma_destroy_id, MOCK_OK);
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_listen(MOCK_PEER, MOCK_ADDR, MOCK_PORT, &ep);
/* verify the results */
assert_int_equal(ret, RPMA_E_NOMEM);
assert_null(ep);
}
/*
* listen__malloc_ENOMEM_destroy_id_EAGAIN - malloc() fails with ENOMEM
* rdma_destroy_id() fails consequently during the handling of the first error
*
* Note: test assumes rdma_create_id() is called before the first failing
* malloc()
*/
static void
listen__malloc_ENOMEM_destroy_id_EAGAIN(void **unused)
{
/*
* configure mocks for:
* - constructing
*/
struct rdma_event_channel evch;
will_return(rdma_create_event_channel, &evch);
struct rdma_cm_id id;
will_return(rdma_create_id, &id);
will_return(rpma_info_new, MOCK_INFO);
will_return(rpma_info_bind_addr, MOCK_OK);
will_return(rdma_listen, MOCK_OK);
will_return(__wrap__test_malloc, ENOMEM); /* first error */
/* - deconstructing */
will_return(rdma_destroy_id, MOCK_ERRNO); /* second error */
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_listen(MOCK_PEER, MOCK_ADDR, MOCK_PORT, &ep);
/* verify the results */
assert_int_equal(ret, RPMA_E_NOMEM);
assert_null(ep);
}
/*
* shutdown__ep_ptr_NULL - NULL ep_ptr is invalid
*/
static void
shutdown__ep_ptr_NULL(void **unused)
{
/* run test */
int ret = rpma_ep_shutdown(NULL);
/* verify the results */
assert_int_equal(ret, RPMA_E_INVAL);
}
/*
* shutdown__ep_NULL - NULL ep is valid
*/
static void
shutdown__ep_NULL(void **unused)
{
/* run test */
struct rpma_ep *ep = NULL;
int ret = rpma_ep_shutdown(&ep);
/* verify the results */
assert_int_equal(ret, MOCK_OK);
assert_null(ep);
}
/*
* ep__lifecycle - happy day scenario
*/
static void
ep__lifecycle(void **unused)
{
/*
* The thing is done by setup__ep_listen()
* and teardown__ep_shutdown().
*/
}
/*
* shutdown__destroy_id_EAGAIN -- rdma_destroy_id() fails with EAGAIN
*/
static void
shutdown__destroy_id_EAGAIN(void **estate_ptr)
{
struct ep_test_state *estate = *estate_ptr;
/* configure mocks */
expect_value(rdma_destroy_id, id, &estate->cmid);
will_return(rdma_destroy_id, EAGAIN);
/* run test */
int ret = rpma_ep_shutdown(&estate->ep);
/* verify the result */
assert_int_equal(ret, RPMA_E_PROVIDER);
assert_int_equal(rpma_err_get_provider_error(), EAGAIN);
assert_non_null(estate->ep);
assert_int_equal(memcmp(&estate->cmid, &Cmid_zero,
sizeof(estate->cmid)), 0);
assert_int_equal(memcmp(&estate->evch, &Evch_zero,
sizeof(estate->evch)), 0);
}
int
main(int argc, char *argv[])
{
/* prepare prestates */
struct ep_test_state prestate_conn_cfg_default;
prestate_init(&prestate_conn_cfg_default, NULL);
const struct CMUnitTest tests[] = {
/* rpma_ep_listen() unit tests */
cmocka_unit_test(listen__peer_NULL),
cmocka_unit_test(listen__addr_NULL),
cmocka_unit_test(listen__port_NULL),
cmocka_unit_test(listen__ep_ptr_NULL),
cmocka_unit_test(listen__peer_addr_port_ep_ptr_NULL),
cmocka_unit_test(listen__create_evch_EAGAIN),
cmocka_unit_test(listen__create_id_EAGAIN),
cmocka_unit_test(listen__info_new_E_NOMEM),
cmocka_unit_test(listen__info_bind_addr_E_PROVIDER),
cmocka_unit_test(listen__listen_EAGAIN),
cmocka_unit_test(listen__malloc_ENOMEM),
cmocka_unit_test(
listen__malloc_ENOMEM_destroy_id_EAGAIN),
/* rpma_ep_listen()/_shutdown() lifecycle */
cmocka_unit_test_prestate_setup_teardown(ep__lifecycle,
setup__ep_listen, teardown__ep_shutdown,
&prestate_conn_cfg_default),
/* rpma_ep_shutdown() unit tests */
cmocka_unit_test(shutdown__ep_ptr_NULL),
cmocka_unit_test(shutdown__ep_NULL),
cmocka_unit_test_prestate_setup_teardown(
shutdown__destroy_id_EAGAIN,
setup__ep_listen, teardown__ep_shutdown,
&prestate_conn_cfg_default),
};
return cmocka_run_group_tests(tests, NULL, NULL);
}
|
import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
import helpers from 'test/specs/helpers';
import '../history/history_srv';
import { versions, restore } from './history_mocks';
describe('historySrv', function() {
var ctx = new helpers.ServiceTestContext();
var versionsResponse = versions();
var restoreResponse = restore;
beforeEach(angularMocks.module('logdisplayplatform.core'));
beforeEach(angularMocks.module('logdisplayplatform.services'));
beforeEach(
angularMocks.inject(function($httpBackend) {
ctx.$httpBackend = $httpBackend;
$httpBackend.whenRoute('GET', 'api/dashboards/id/:id/versions').respond(versionsResponse);
$httpBackend
.whenRoute('POST', 'api/dashboards/id/:id/restore')
.respond(function(method, url, data, headers, params) {
const parsedData = JSON.parse(data);
return [200, restoreResponse(parsedData.version)];
});
})
);
beforeEach(ctx.createService('historySrv'));
function wrapPromise(ctx, angularPromise) {
return new Promise((resolve, reject) => {
angularPromise.then(resolve, reject);
ctx.$httpBackend.flush();
});
}
describe('getHistoryList', function() {
it('should return a versions array for the given dashboard id', function() {
return wrapPromise(
ctx,
ctx.service.getHistoryList({ id: 1 }).then(function(versions) {
expect(versions).to.eql(versionsResponse);
})
);
});
it('should return an empty array when not given an id', function() {
return wrapPromise(
ctx,
ctx.service.getHistoryList({}).then(function(versions) {
expect(versions).to.eql([]);
})
);
});
it('should return an empty array when not given a dashboard', function() {
return wrapPromise(
ctx,
ctx.service.getHistoryList().then(function(versions) {
expect(versions).to.eql([]);
})
);
});
});
describe('restoreDashboard', function() {
it('should return a success response given valid parameters', function() {
let version = 6;
return wrapPromise(
ctx,
ctx.service.restoreDashboard({ id: 1 }, version).then(function(response) {
expect(response).to.eql(restoreResponse(version));
})
);
});
it('should return an empty object when not given an id', function() {
return wrapPromise(
ctx,
ctx.service.restoreDashboard({}, 6).then(function(response) {
expect(response).to.eql({});
})
);
});
});
});
|
#!/usr/bin/env bash
if [ ! -f "$1" ]; then
cat <<EOF
File name is required!"
Syntax is
$0 <class file>
EOF
exit 1
fi
FILENAME=$1
[ $DEBUG ] && echo "Converting $FILENAME"
printf "\x00\x00\x00\x32" | dd of=$FILENAME seek=4 bs=1 count=4 conv=notrunc 2> /dev/null
KLAZZNAME="$(echo $1 | sed -e "s/.class$//")"
[ $DEBUG ] && echo "Verifying class $KLAZZNAME"
$JAVA_6_HOME/bin/javap $KLAZZNAME > /dev/null || ( echo "Verification failed: $KLAZZNAME"; exit -1 )
|
/*
Convenient alternative is to declare an object (conventionally named exports) and add
properties to that whenever we are defining something that needs to be exported.
In the following example, the module function takes its interface object as an argument,
allowing code outside of the function to create it and store it in a variable.
Note: Outside of a function, this refers to the global scope object.
*/
(function (exports) {
var names = ["Sunday", "Monday", "Tuesday", "Wednesday",
"Thursday", "Friday", "Saturday"];
exports.name = function (number) {
return names[number];
};
exports.number = function (name) {
return names.indexOf(name);
};
})(this.daniTeku = {});
console.log(daniTeku.name(daniTeku.number("Saturday")));
// → Saturday
|
# Encoding: UTF-8
[{name: "Completions",
scope: "source.actionscript.2",
settings:
{completions:
["__proto__",
"_accProps",
"_accProps",
"_alpha",
"_currentframe",
"_droptarget",
"_focusrect",
"_framesloaded",
"_global",
"_height",
"_level",
"_lockroot",
"_name",
"_parent",
"_quality",
"_root",
"_rotation",
"_soundbuftime",
"_target",
"_targetInstanceName",
"_totalframes",
"_url",
"_visible",
"_width",
"_x",
"_xmouse",
"_xscale",
"_y",
"_ymouse",
"_yscale",
"abs",
"acos",
"activePlayControl",
"activityLevel",
"addColumn",
"addColumnAt",
"addCuePoint",
"addEventListener",
"addItem",
"addItemAt",
"addListener",
"addMenu",
"addMenuAt",
"addMenuItem",
"addMenuItemAt",
"addPage",
"addProperty",
"addRequestHeader",
"addTreeNode",
"addTreeNodeAt",
"album",
"align",
"align",
"allowDomain",
"allowInsecureDomain",
"appendChild",
"apply",
"arguments",
"artist",
"asin",
"aspectRatio",
"associateController",
"associateDisplay",
"atan",
"atan2",
"attachMovie",
"attachSound",
"attachVideo",
"attributes",
"autoLoad",
"autoPlay",
"autoSize",
"autoSize",
"avHardwareDisable",
"background",
"backgroundColor",
"backgroundStyle",
"BACKSPACE",
"bandwidth",
"beginFill",
"beginGradientFill",
"blockIndent",
"bold",
"border",
"borderColor",
"bottom",
"bottomScroll",
"broadcastMessage",
"bufferLength",
"bufferTime",
"bullet",
"buttonHeight",
"buttonStyleDeclaration",
"buttonWidth",
"bytesLoaded",
"bytesLoaded",
"bytesTotal",
"bytesTotal",
"call",
"cancelLabel",
"CAPSLOCK",
"caption",
"CASEINSENSITIVE",
"ceil",
"cellRenderer",
"charAt",
"charCodeAt",
"childNodes",
"clear",
"clearInterval",
"cloneNode",
"close",
"close",
"closeButton",
"color",
"columnCount",
"columnNames",
"comment",
"concat",
"condenseWhite",
"connect",
"constructor",
"content",
"contentPath",
"contentType",
"CONTROL",
"controllerPolicy",
"controlPlacement",
"conversion",
"copy",
"cos",
"create",
"createChild",
"createChildren",
"createClassObject",
"createElement",
"createEmptyMovieClip",
"createEmptyObject",
"createLabel",
"createMenu",
"createObject",
"createPopUp",
"createSegment",
"createTextField",
"createTextNode",
"cuePoints",
"currentFps",
"curveTo",
"customItems",
"data",
"data",
"dataProvider",
"dateFormatter",
"dayNames",
"deblocking",
"delete",
"DELETEKEY",
"deletePopUp",
"DESCENDING",
"destroyChildAt",
"destroyObject",
"direction",
"disabledDays",
"disabledRanges",
"dispatchEvent",
"displayedMonth",
"displayedYear",
"displayFull",
"displayNormal",
"docTypeDecl",
"doLater",
"doLayout",
"doLayout",
"domain",
"DOWN",
"draw",
"drawRect",
"dropdown",
"dropdownWidth",
"duplicateMovieClip",
"duration",
"E",
"editable",
"editField",
"embedFonts",
"enabled",
"enabled",
"END",
"endFill",
"ENTER",
"ESCAPE",
"eval",
"exactSettings",
"exp",
"findText",
"firstChild",
"firstDayOfWeek",
"firstVisibleNode",
"floor",
"flush",
"focusedCell",
"focusEnabled",
"font",
"fps",
"fromCharCode",
"gain",
"genre",
"getAscii",
"getBeginIndex",
"getBounds",
"getBytesLoaded",
"getBytesLoaded",
"getBytesTotal",
"getBytesTotal",
"getCaretIndex",
"getChildAt",
"getCode",
"getColumnAt",
"getColumnIndex",
"getCount",
"getCuePoints",
"getDate",
"getDay",
"getDepth",
"getDisplayIndex",
"getEndIndex",
"getFocus",
"getFocus",
"getFontList",
"getFullYear",
"getHours",
"getInstanceAtDepth",
"getIsBranch",
"getIsOpen",
"getItemAt",
"getLocal",
"getMenuAt",
"getMenuEnabledAt",
"getMenuItemAt",
"getMilliseconds",
"getMinutes",
"getMonth",
"getNewTextFormat",
"getNextHighestDepth",
"getNodeDisplayedAt",
"getPan",
"getProgress",
"getProperty",
"getRGB",
"getSeconds",
"getSelected",
"getSelectedText",
"getSize",
"getStyle",
"getStyle",
"getStyleNames",
"getSWFVersion",
"getText",
"getTextExtent",
"getTextFormat",
"getTextSnapshot",
"getTime",
"getTimer",
"getTimezoneOffset",
"getTransform",
"getTreeNodeAt",
"getURL",
"getUTCDate",
"getUTCDay",
"getUTCFullYear",
"getUTCHours",
"getUTCMilliseconds",
"getUTCMinutes",
"getUTCMonth",
"getUTCSeconds",
"getVersion",
"getVolume",
"getYear",
"globalToLocal",
"gotoAndPlay",
"gotoAndStop",
"groupName",
"handleEvent",
"hasAccessibility",
"hasAudio",
"hasAudioEncoder",
"hasChildNodes",
"hasEmbeddedVideo",
"hasMP3",
"hasOwnProperty",
"hasPrinting",
"hasScreenBroadcast",
"hasScreenPlayback",
"hasStreamingAudio",
"hasStreamingVideo",
"hasVideoEncoder",
"headerHeight",
"height",
"height",
"hide",
"hide",
"hideBuiltInItems",
"hitArea",
"hitTest",
"hitTestTextNearPos",
"hLineScrollSize",
"HOME",
"horizontal",
"hPageScrollSize",
"hPosition",
"hscroll",
"hScrollPolicy",
"html",
"html",
"htmlText",
"icon",
"iconField",
"iconFunction",
"ignoreWhite",
"indent",
"indeterminate",
"index",
"indexOf",
"indexOf",
"init",
"initialize",
"initLayout",
"INSERT",
"insertBefore",
"install",
"invalidate",
"invalidate",
"isActive",
"isDebugger",
"isDown",
"isFinite",
"isNaN",
"isToggled",
"italic",
"join",
"keyDown",
"keyUp",
"label",
"labelField",
"labelFunction",
"labelPlacement",
"language",
"lastChild",
"lastIndexOf",
"leading",
"left",
"LEFT",
"leftMargin",
"length",
"length",
"lineScrollSize",
"lineStyle",
"lineTo",
"list",
"LN2",
"LN10",
"load",
"load",
"loadClip",
"loaded",
"loadMovie",
"loadMovieNum",
"loadSound",
"loadVariables",
"loadVariablesNum",
"LoadVars",
"localFileReadDisable",
"localToGlobal",
"log",
"LOG2E",
"LOG10E",
"manufacturer",
"max",
"MAX_VALUE",
"maxChars",
"maxChars",
"maxHPosition",
"maxhscroll",
"maximum",
"maxscroll",
"maxVPosition",
"mediaType",
"menu",
"message",
"messageStyleDeclaration",
"min",
"MIN_VALUE",
"minimum",
"mode",
"monthNames",
"motionLevel",
"motionTimeOut",
"mouseWheelEnabled",
"move",
"move",
"moveTo",
"multiline",
"multipleSelection",
"muted",
"name",
"names",
"NaN",
"NEGATIVE_INFINITY",
"nextFrame",
"nextScene",
"nextSibling",
"nextValue",
"nodeName",
"nodeType",
"nodeValue",
"noLabel",
"noScale",
"numChildren",
"NUMERIC",
"okLabel",
"onActivity",
"onChanged",
"onClipEvent",
"onClose",
"onConnect",
"onData",
"onDragOut",
"onDragOver",
"onEnterFrame",
"onHTTPStatus",
"onID3",
"onKeyDown",
"onKeyUp",
"onKillFocus",
"onLoad",
"onLoadComplete",
"onLoadError",
"onLoadInit",
"onLoadProgress",
"onLoadStart",
"onMouseDown",
"onMouseMove",
"onMouseUp",
"onMouseWheel",
"onPress",
"onRelease",
"onReleaseOutside",
"onResize",
"onRollOut",
"onRollOver",
"onScroller",
"onSelect",
"onSetFocus",
"onSoundComplete",
"onStatus",
"onSync",
"onUnload",
"onUpdate",
"onXML",
"open",
"orientation",
"os",
"pageHeight",
"pageScrollSize",
"pageWidth",
"paperHeight",
"paperWidth",
"parentNode",
"parseCSS",
"parseFloat",
"parseInt",
"parseXML",
"password",
"password",
"pause",
"pause",
"percentComplete",
"percentLoaded",
"PGDN",
"PGUP",
"PI",
"pixelAspectRatio",
"play",
"play",
"playerType",
"playheadTime",
"playing",
"pop",
"position",
"POSITIVE_INFINITY",
"pow",
"preferredHeight",
"preferredWidth",
"pressFocus",
"prevFrame",
"previousSibling",
"previousValue",
"prevScene",
"print",
"printAsBitmap",
"printAsBitmapNum",
"printNum",
"prototype",
"pullDown",
"push",
"quality",
"random",
"rate",
"redraw",
"redraw",
"refresh",
"refreshPane",
"registerClass",
"releaseFocus",
"removeAll",
"removeAllColumns",
"removeAllCuePoints",
"removeColumnAt",
"removeCuePoint",
"removeEventListener",
"removeItemAt",
"removeListener",
"removeMenuAt",
"removeMenuItem",
"removeMenuItemAt",
"removeMovieClip",
"removeNode",
"removeTextField",
"removeTreeNodeAt",
"replaceItemAt",
"replaceSel",
"replaceText",
"resizableColumns",
"restrict",
"restrict",
"RETURNINDEXEDARRAY",
"reverse",
"right",
"RIGHT",
"rightMargin",
"round",
"rowCount",
"rowHeight",
"scaleContent",
"scaleMode",
"scaleX",
"scaleY",
"screenColor",
"screenDPI",
"screenResolutionX",
"screenResolutionY",
"scroll",
"scrollDrag",
"scrollPosition",
"seek",
"selectable",
"selectable",
"selectableRange",
"selected",
"selectedChild",
"selectedData",
"selectedDate",
"selectedIndex",
"selectedIndices",
"selectedItem",
"selectedItems",
"selectedNode",
"selectedNodes",
"selection",
"send",
"sendAndLoad",
"separatorBefore",
"serverString",
"setBufferTime",
"setClipboard",
"setDate",
"setFocus",
"setFocus",
"setFullYear",
"setGain",
"setHours",
"setHPosition",
"setIcon",
"setInterval",
"setIsBranch",
"setIsOpen",
"setMask",
"setMedia",
"setMenuEnabledAt",
"setMenuItemEnabled",
"setMenuItemSelected",
"setMilliseconds",
"setMinutes",
"setMode",
"setMonth",
"setMotionLevel",
"setNewTextFormat",
"setPan",
"setProgress",
"setPropertiesAt",
"setProperty",
"setRate",
"setRGB",
"setScrollProperties",
"setScrollTarget",
"setSeconds",
"setSelectColor",
"setSelected",
"setSelection",
"setSilenceLevel",
"setSize",
"setSize",
"setStyle",
"setStyle",
"setTextFormat",
"setTime",
"setTransform",
"setUseEchoSuppression",
"setUTCDate",
"setUTCFullYear",
"setUTCHours",
"setUTCMilliseconds",
"setUTCMinutes",
"setUTCMonth",
"setUTCSeconds",
"setVolume",
"setYear",
"shift",
"SHIFT",
"show",
"show",
"showHeaders",
"showMenu",
"showSettings",
"showToday",
"silenceLevel",
"silenceTimeOut",
"sin",
"size",
"size",
"slice",
"smoothing",
"songtitle",
"sort",
"sortableColumns",
"sortItems",
"sortItemsBy",
"sortOn",
"source",
"SPACE",
"spaceColumnsEqually",
"splice",
"split",
"sqrt",
"SQRT1_2",
"SQRT2",
"start",
"startDrag",
"static",
"status",
"stepSize",
"stop",
"stop",
"stopAllSounds",
"stopDrag",
"styleName",
"styleSheet",
"styleSheet",
"substr",
"substring",
"swapDepths",
"TAB",
"tabChildren",
"tabEnabled",
"tabIndex",
"tabIndex",
"tabStops",
"tan",
"target",
"text",
"textColor",
"textField",
"textHeight",
"textWidth",
"title",
"titleStyleDeclaration",
"toggle",
"toLowerCase",
"top",
"toString",
"totalTime",
"toUpperCase",
"trace",
"track",
"trackAsMenu",
"transform",
"type",
"typeof",
"underline",
"unescape",
"uninstall",
"UNIQUESORT",
"unloadClip",
"unloadMovie",
"unloadMovieNum",
"unshift",
"unwatch",
"UP",
"updateAfterEvent",
"updateProperties",
"url",
"useCodepage",
"useEchoSuppression",
"useHandCursor",
"UTC",
"value",
"valueOf",
"variable",
"version",
"visible",
"vLineScrollSize",
"volume",
"vPageScrollSize",
"vPosition",
"vScrollPolicy",
"watch",
"width",
"width",
"windowlessDisable",
"wordWrap",
"wordWrap",
"x",
"xmlDecl",
"y",
"year",
"yesLabel"]},
uuid: "8C552A99-B28E-47FB-A5EE-F534066E4DD7"},
{name: "Miscellaneous",
scope: "source.actionscript.2",
settings: {increaseIndentPattern: "^.*(\\{[^}\"']*|\\([^)\"']*)$"},
uuid: "C20EEED3-1491-44BF-86B6-106CAE648B5F"}]
|
# Math.nCr(n,r) borrowed from Brian Candler's
# post in ruby_talk
def Math.nCr(n,r)
a, b = r, n-r
a, b = b, a if a < b # a is the larger
numer = (a+1..n).inject(1) { |t,v| t*v } # n!/r!
denom = (2..b).inject(1) { |t,v| t*v } # (n-r)!
numer/denom
end
class DiceAtLeastFive
def self.int_to_roll( serial, numdice )
# good for reality, too slow for a quiz - #pow is expensive.
# raise if serial >= 6**numdice
serial.to_s(6).rjust(numdice,"0").split(//).map!{|ele| ele.to_i + 1 }.reverse
end
def self.total_rolls(numdice)
6**numdice
end
def self.desirable(numdice, min_fives)
(min_fives..numdice).inject(0) { |tot, val| tot + (Math.nCr(numdice,val) * 5**(numdice-val)) }
end
end
require 'optparse'
options = {}
OptionParser.new do |opts|
opts.banner = "Usage: example.rb [options]"
opts.on("-v", "--[no-]verbose", "Run verbosely") do |v|
options[:iter_step] = 1
end
opts.on("-s", "--sample", "Run in sample mode") do |s|
options[:iter_step] = 50000
end
end.parse!
total_dice, at_least_5 = Integer(ARGV[0]), Integer(ARGV[1])
des=DiceAtLeastFive.desirable(total_dice,at_least_5)
tot=DiceAtLeastFive.total_rolls(total_dice)
if options[:iter_step]
(0..tot-1).step(options[:iter_step]) { |serial|
marker = ""
roll = DiceAtLeastFive.int_to_roll(serial, total_dice)
if roll.select{|die| die==5}.length >= at_least_5
marker = " <=="
end
puts "#{serial+1} #{roll.inspect} #{marker}"
}
end
puts "\nNumber of desirable outcomes is #{des}"
puts "Number of possible outcomes is #{tot}"
puts "\nProbability is %16.16f" %(des.to_f/tot)
|
module Steps
module Respondent
class ContactDetailsForm < BaseForm
attribute :address, StrippedString
attribute :postcode, StrippedString
attribute :postcode_unknown, Boolean
attribute :home_phone, StrippedString
attribute :mobile_phone, StrippedString
attribute :mobile_phone_unknown, Boolean
attribute :email, NormalisedEmail
attribute :email_unknown, Boolean
attribute :residence_requirement_met, YesNoUnknown
attribute :residence_history, String
validates_presence_of :postcode, unless: :postcode_unknown?
validates_presence_of :mobile_phone, unless: :mobile_phone_unknown?
validates_presence_of :email, unless: :email_unknown?
validates_inclusion_of :residence_requirement_met, in: GenericYesNoUnknown.values
validates_presence_of :residence_history, if: -> { residence_requirement_met&.no? }
private
def persist!
raise C100ApplicationNotFound unless c100_application
respondent = c100_application.respondents.find_or_initialize_by(id: record_id)
respondent.update(
attributes_map
)
end
end
end
end
|
<?php
/**
* Created by PhpStorm.
* User: usuario
* Date: 28/12/17
* Time: 11:11
*/
namespace Tests\AppBundle\Controller;
use AbstractBundle\Test\ApiTestCase;
class LoginControllerTest extends ApiTestCase
{
protected function setUp()
{
parent::setUp();
$this->client = $this->createClient();
$this->createUserAdmin([
'email' => 'admin@email.com',
'password' => '654321'
]);
}
public function createUserAdmin($user)
{
$this->client->request('POST', '/user/insert', [
'data' => json_encode($user)
]);
}
public function testLogin()
{
$this->client->request('POST', '/login', [
'data' => json_encode(['email' => 'admin@email.com', 'password' => '654321'])
]);
$response = $this->getData();
$this->assertTrue($this->getResponse()->isSuccessful());
$this->assertEquals(200, $this->getResponse()->getStatusCode());
$this->assertArrayHasKey('content', $response);
$this->client->request('POST', '/login', [
'data' => json_encode(
['email' => 'admin@email.com', 'password' => '654321', 'hash' => true]
)
]);
$response = $this->getData();
$this->assertTrue($this->getResponse()->isSuccessful());
$this->assertEquals(200, $this->getResponse()->getStatusCode());
$this->assertArrayHasKey('content', $response);
}
public function testLoginError()
{
$this->client->request('POST', '/login', [
'data' => json_encode(['email' => 'teste@email.com', 'password' => '654321'])
]);
$this->assertEquals(500, $this->getResponse()->getStatusCode());
$this->client->request('POST', '/login', [
'data' => json_encode(['email' => 'admin@email.com', 'password' => '65432'])
]);
$this->assertEquals(401, $this->getResponse()->getStatusCode());
}
}
|
/* tag::catalog[]
Title:: Adding nodes to a subnet running threshold ECDSA
Goal:: Test whether removing subnet nodes impacts the threshold ECDSA feature
Runbook::
. Setup:
. System subnet comprising N nodes, necessary NNS canisters, and with ecdsa feature featured.
. Removing N/3 + 1 nodes from the subnet via proposal.
. Assert that node membership has changed.
. Assert that ecdsa signing continues to work with the same public key as before.
Success::
. Status endpoints of removed nodes are unreachable.
. ECDSA signature succeeds with the same public key as before.
end::catalog[] */
use crate::driver::ic::{InternetComputer, Subnet};
use crate::tecdsa::tecdsa_signature_test::{enable_ecdsa_signing, make_key};
use crate::{
nns::{submit_external_proposal_with_test_id, vote_execute_proposal_assert_executed, NnsExt},
tecdsa::tecdsa_signature_test::{get_public_key, get_signature, verify_signature, KEY_ID1},
util::*,
};
use canister_test::{Canister, Cycles};
use ic_fondue::ic_manager::IcHandle;
use ic_nns_constants::GOVERNANCE_CANISTER_ID;
use ic_nns_governance::pb::v1::NnsFunction;
use ic_registry_subnet_features::SubnetFeatures;
use ic_registry_subnet_type::SubnetType;
use ic_types::Height;
use registry_canister::mutations::do_add_nodes_to_subnet::AddNodesToSubnetPayload;
use std::time::{Duration, Instant};
use super::tecdsa_signature_test::DKG_INTERVAL;
const NODES_COUNT: usize = 4;
const UNASSIGNED_NODES_COUNT: i32 = 3;
pub fn config() -> InternetComputer {
InternetComputer::new()
.add_subnet(
Subnet::new(SubnetType::System)
.with_dkg_interval_length(Height::from(DKG_INTERVAL))
.add_nodes(NODES_COUNT)
.with_features(SubnetFeatures {
ecdsa_signatures: true,
..SubnetFeatures::default()
}),
)
.with_unassigned_nodes(UNASSIGNED_NODES_COUNT)
}
pub fn test(handle: IcHandle, ctx: &ic_fondue::pot::Context) {
// Setup: install all necessary NNS canisters.
ctx.install_nns_canisters(&handle, true);
// Make sure unassigned nodes are ready
let unassigned_nodes_endpoints = get_unassinged_nodes_endpoints(&handle);
assert_eq!(
unassigned_nodes_endpoints.len(),
UNASSIGNED_NODES_COUNT as usize
);
let unassigned_node_ids: Vec<_> = unassigned_nodes_endpoints
.iter()
.map(|ep| ep.node_id)
.collect();
block_on(assert_all_ready(unassigned_nodes_endpoints.as_slice(), ctx));
// Initial run to get public key
let mut rng = ctx.rng.clone();
let nns_endpoint = get_random_nns_node_endpoint(&handle, &mut rng);
let message_hash = [0xabu8; 32];
// Enable ECDSA signatures.
block_on(async {
nns_endpoint.assert_ready(ctx).await;
let nns = runtime_from_url(nns_endpoint.url.clone());
let governance = Canister::new(&nns, GOVERNANCE_CANISTER_ID);
enable_ecdsa_signing(
&governance,
nns_endpoint.subnet.as_ref().unwrap().id,
make_key(KEY_ID1),
)
.await;
});
// Assert all nodes are reachable via http:://[IPv6]:8080/api/v2/status
let (canister_id, public_key) = block_on(async {
let agent = assert_create_agent(nns_endpoint.url.as_str()).await;
let uni_can = UniversalCanister::new(&agent).await;
let public_key = get_public_key(make_key(KEY_ID1), &uni_can, ctx)
.await
.unwrap();
(uni_can.canister_id(), public_key)
});
// Send a proposal for the nodes to join NNS subnet via the governance canister.
let nns_runtime = runtime_from_url(nns_endpoint.url.clone());
let governance_canister = canister_test::Canister::new(&nns_runtime, GOVERNANCE_CANISTER_ID);
let proposal_payload = AddNodesToSubnetPayload {
subnet_id: nns_endpoint.subnet_id().unwrap().get(),
node_ids: unassigned_node_ids,
};
let proposal_id = block_on(submit_external_proposal_with_test_id(
&governance_canister,
NnsFunction::AddNodeToSubnet,
proposal_payload,
));
// Explicitly vote for the proposal to add nodes to NNS subnet.
block_on(vote_execute_proposal_assert_executed(
&governance_canister,
proposal_id,
));
let newly_assigned_nodes: Vec<_> = unassigned_nodes_endpoints
.iter()
.map(|ep| ep.recreate_with_subnet(nns_endpoint.clone().subnet.unwrap()))
.collect();
// Sleep and assert that new nodes are reachable (via http call).
block_on(async {
tokio::time::sleep(Duration::from_secs(80)).await;
for ep in newly_assigned_nodes.iter() {
ep.assert_ready_with_start(Instant::now(), ctx).await;
}
});
// Run through ecdsa signature test
block_on(async {
let agent = assert_create_agent(nns_endpoint.url.as_str()).await;
let uni_can = UniversalCanister::from_canister_id(&agent, canister_id);
let public_key_ = get_public_key(make_key(KEY_ID1), &uni_can, ctx)
.await
.unwrap();
assert_eq!(public_key, public_key_);
let signature = get_signature(
&message_hash,
Cycles::zero(),
make_key(KEY_ID1),
&uni_can,
ctx,
)
.await
.unwrap();
verify_signature(&message_hash, &public_key, &signature);
});
}
|
#include "AsyncEventQueue.hpp"
#include "jet/live/Utility.hpp"
namespace jet
{
void AsyncEventQueue::addLog(LogSeverity severity, std::string&& message)
{
std::lock_guard<std::mutex> lock(m_logQueueMutex);
m_logQueue.push(jet::make_unique<LogEvent>(severity, std::move(message)));
}
LogEvent* AsyncEventQueue::getLogEvent()
{
std::lock_guard<std::mutex> lock(m_logQueueMutex);
return m_logQueue.empty() ? nullptr : m_logQueue.front().get();
}
void AsyncEventQueue::popLogEvent()
{
std::lock_guard<std::mutex> lock(m_logQueueMutex);
if (!m_logQueue.empty()) {
m_logQueue.pop();
}
}
void AsyncEventQueue::addEvent(std::unique_ptr<IEvent>&& event)
{
std::lock_guard<std::mutex> lock(m_queueMutex);
m_queue.push(std::move(event));
}
IEvent* AsyncEventQueue::getEvent()
{
std::lock_guard<std::mutex> lock(m_queueMutex);
return m_queue.empty() ? nullptr : m_queue.top().get();
}
void AsyncEventQueue::popEvent()
{
std::lock_guard<std::mutex> lock(m_queueMutex);
if (!m_queue.empty()) {
m_queue.pop();
}
}
}
|
{-- snippet all --}
-- posixtime.hs
import System.Posix.Files
import System.Time
import System.Posix.Types
-- | Given a path, returns (atime, mtime, ctime)
getTimes :: FilePath -> IO (ClockTime, ClockTime, ClockTime)
getTimes fp =
do stat <- getFileStatus fp
return (toct (accessTime stat),
toct (modificationTime stat),
toct (statusChangeTime stat))
-- | Convert an EpochTime to a ClockTime
toct :: EpochTime -> ClockTime
toct et =
TOD (truncate (toRational et)) 0
{-- /snippet all --}
|
<?php namespace ConnorVG\WolframAlpha;
/**
* The Wolfram Alpha Info Object
* @package WolframAlpha
*/
class WAInfo {
// define the sections of a response
public $text = '';
// Constructor
public function WAInfo() {
}
}
|
---
title: Frequently Asked Questions
aliases:
- "/start/faq/"
---
The following are short, sometimes superficial, answers to some of the most commonly asked questions about the Fluid
Framework.
## What is the Fluid Framework?
The Fluid Framework is a collection of client libraries for building applications with distributed state. These libraries
allow multiple clients to create and operate on shared, synchronized distributed data structures (DDSes) using coding
patterns similar to those used to work with local data. The Fluid Framework manages connections to services and keeps all
clients in sync so that developers can focus on the client experience.
The Fluid Framework was designed with performance and ease of development as top priorities.
## Distributed Data Structures
### What is a DDS?
DDS is short for *distributed data structure*. DDSes are the foundation of the Fluid Framework. They are designed such
that the Fluid runtime is able to keep them in sync across clients while each client operates on the DDSes in largely
the same way they would operate on local data. The data source for a Fluid solution can represent numerous DDSes.
There are many types of DDSes including a SharedMap that is a distributed version of a JavaScript Map and a SharedString
that is designed to enable real-time editing of text data by multiple clients simultaneously. Developers can use the
DDSes included with the Fluid Framework or develop new ones.
Any practical limits on the types of data and size of a DDS will be specific to the implementation of that DDS. DDSes
can contain text, images, and other binary data and can effectively be any size. However, managing scale on the client
requires thought, just as it does when working with local data.
### Where is the data stored?
There are two classes of data storage to discuss when answering this question: **session storage** and
**persistent storage**.
**Session storage** is managed by the Fluid service and is, essentially, a central record of all the operations (ops)
performed on the DDSes. This record is used by the Fluid clients to produce identical local instances of the DDSes.
Session storage also includes ops that summarize all past operations to improve performance for clients that join
sessions later and for efficiencies when saving to persistent storage.
**Persistent storage** is a record of ops (and summary ops) saved outside of the Fluid service. This could be a
database, blob storage, or a file. Using persistent storage allows a Fluid solution to persist across sessions.
For example, current Microsoft 365 Fluid experiences save ops in *.fluid* files in SharePoint and OneDrive.
It is important to note that these files share many of the properties of a normal file such as permissions and a
location in a file structure, but because these experiences rely on the Fluid service, downloading the files and
working locally is not supported.
### How is data sync'd?
In order to keep all clients in sync, they must be connected to a Fluid service. This service's core
responsibility is sequencing all the incoming Fluid operations and then broadcasting them to all clients. Because
the ops are ordered, and because each client is running the same code, the DDSes in each client eventually end up in an
identical state.
Note, there isn't a centralized Fluid service for all Fluid experiences. But for each Fluid experience, there is only
one Fluid service.
Fluid clients connect to the Fluid service using the WebSocket protocol. However, the Fluid runtime manages
all of the connections so that Fluid client developers can focus on local experiences.
## Scale
### How many concurrent users does this support?
It depends. Because the Fluid service is extremely lightweight, even a simple implementation of the service can
support 100s of concurrent users. A more sophisticated implementation can distribute the work and support 1000s. The
experience on the client will vary depending on the Fluid data store and local device. When considering scale for
Fluid solutions, consider how well the client can handle and render changes, not whether the service is able to
distribute them efficiently.
Also, there is a significant difference in capacity depending on whether users are purely viewers vs. editors.
Adding viewers scales far more than adding editors because each editor increases the volume of changes and viewers
do not.
### How do you design Fluid Framework solutions to scale?
When thinking about scale there are two key factors: service scale and client scale. The Fluid service is designed
from the ground up to be extremely scalable. While there is the potential to refine the service to the point where
it is staggeringly scalable, for most Fluid developers the larger concern will be client scale.
When tackling client scale, developers need to consider how they will manage inbound changes, especially when the
volume of changes is high. The specific strategies developers should consider start when considering which DDS types
to use and how the data is structured. From there developers can look at using virtualization to limit updates to
parts of the view that are currently in scope. Another strategy could be to throttle inbound changes to limit the
number of updates that are required. Of course, the right strategies will depend enormously on the specific scenario.
## Fluid Technology
### What's the difference between Fluid Framework and SignalR?
Where SignalR is a technology principally aimed at simplifying real-time communication between servers and clients,
the Fluid Framework further abstracts that communication and, more significantly, focuses on distributing state between
multiple clients. So, while you might use Fluid to solve some of the same problems you solve with SignalR today,
the two are not interchangeable. Notably, the server component of a Fluid solution is lightweight and general-purpose
while a SignalR solution designed to distribute state would require additional server development.
### Does Fluid use operational transforms?
Fluid does not use Operational Transforms (OT), but we learned a tremendous amount from the literature on OT. While
OT uses operations that can be applied out of order by transforming operations to account for recent changes, Fluid
relies on a Total Order Broadcast to guarantee that all operations are applied in a specific order.
### Does Fluid use CRDT?
Fluid does not use Conflict-Free Replicated Data Types (CRDTs), but our model is more similar to CRDT than OT.
The Fluid Framework relies on update-based operations that are ordered using our Total Order Broadcast to prevent
conflicts. This allows us to have non-commutative operations because there is an explicit ordering.
## Use Cases
### What kind of support is there for real-time editing of text?
This is the scenario that Fluid was first designed to support. Consequently, the Fluid Framework is an ideal foundation
for rich text editors that support simultaneous editing by multiple clients. The SharedString DDS is
tailor-made for this scenario.
### Turn-based games?
DDSes can be used to distribute state for games, including whose turn it is. It's up to the client to enforce the rules
of a game so there may be some interesting problems to solve around preventing cheating but the Fluid team has already
prototyped several games.
### Presence, including mouse cursor?
Keeping track of and sharing each user's position in a grid, a document, or some other virtual space is an ideal
task for the Fluid Framework because it is designed to enable extraordinary performance.
## Fluid Service
### What needs to be running on the server?
The Fluid Framework requires a Fluid service to sync data between clients. The role of the server is very simple:
it orders operations and broadcasts them to all clients. It's also responsible for saving operations to
persistent data storage.
The Fluid service is general-purpose and, as a rule, Fluid solutions will work with any Fluid service. Developers of
Fluid solutions can use a local server or a "test quality" server for development and trust that their solution
will work against whatever production server their solution is pointed at.
The Fluid Framework includes a reference implementation of the Fluid service called Routerlicious that you can use for
development or as the basis for a production quality server.
### Where is the shared data stored?
The specifics of data storage (both session data and persistent data) will depend on the implementation of
the Fluid service. There is a great deal of flexibility here and developers of Fluid services may choose to offer
options around where and how data is stored.
### Is there a dedicated cloud service for syncing the clients?
Microsoft has developed an M365-specific Fluid service designed to enable solutions powered by Fluid within that
ecosystem. There will be ways for Fluid Framework developers to operate in M365 but those integration points are
not available yet.
Microsoft has also
[announced](https://developer.microsoft.com/en-us/office/blogs/whats-new-in-microsoft-365-platform-at-build-2021/) Azure
Fluid Relay, a fully managed Fluid service, at Build 2021.
### Besides SharePoint, where else can we store .fluid files?
.fluid files are a specific file format understood by Fluid solutions integrated with M365. They are designed to operate
exclusively in the cloud (never locally) and currently are only supported by OneDrive and SharePoint.
### Can we use Fluid Framework standalone with no dependencies on other services?
Yes. The Fluid Framework is designed to stand alone. It has no dependencies on other services.
### Can the Fluid Framework be used in a situation without access to the internet?
There are two angles to this question. One is whether the client must be connected to the internet. The other is
whether an organization could run the Fluid service on-site to support an intranet.
Clients do have to be connected to the Fluid service. Fluid can tolerate brief network outages and continue operating
but eventually the promise of being able to merge local changes weakens. We are investigating ways to improve this using
other merging techniques designed to reason over large deltas but no final solution is in place today.
In principle there is nothing preventing an organization from hosting a Fluid service on an intranet. However, Microsoft
has no plans to support that scenario directly.
### Is the Fluid reference server implementation production-ready?
No. Routerlicious on its own is not production-ready. Using it would require more thought about storage, scale,
security, and other typical considerations when building out a service on the internet. It is our expectation that most
Fluid developers will be able to leverage existing Fluid services that will emerge as we approach version 1.0.
### How are Fluid solutions deployed?
Fluid solutions are, at the end of the day, simple JavaScript. At Microsoft, Fluid solutions are deployed to CDNs like
any other static resource. Because Fluid is very client-centric, deployment is very simple.
## Conflicts and History
### How does Fluid Framework deal with conflict resolution?
This depends a great deal on the specific DDS. But, regardless of the final state of the data, operations are stored
in the Fluid ops stream. So, in cases where a client is unhappy with the final state, there are approaches for
achieving consensus that can be built into the DDS or handled by the client.
### Can we create custom strategies to handle update collisions to the distributed data structure?
Yes. You can design your own DDSes with your own strategies for handling merge. You also have access to all
operations and can write client code to reason over state in whatever way best suits your scenario.
### Can we have history of the changes?
Yes. Fluid inherently keeps all changes and these are accessible through the framework. The only caveat is that for
performance and storage efficiency, operations need to be summarized from time to time. This may cause a loss of
granularity.
### Is there any way to know which user caused each change?
Yes. Operations can be attributed to users. This is an implementation choice and not something built directly into
the Fluid Framework.
## UX Frameworks
### Can I use React, Angular, VUE, Svelte, or some other UX framework?
Yes. You can use any UX framework designed for the web.
### What is the relationship with Fluent UI?
Both Fluent and Fluid Framework come from Microsoft. And inside Microsoft many Fluid projects also use Fluent.
But there is no relationship other than the names are similar.
### Is Fluid trying to be a competitor to UX frameworks?
Not at all. The Fluid Framework is unopinionated about UX.
## Coding Frameworks
### Can I use ASP.NET, ASP.NET Core, and C\#?
The Fluid Framework is written in TypeScript but we don't want it to be limited to the web. You can use the Fluid Framework
with non-web technologies by leveraging a JavaScript runtime to host the Fluid code. Ultimately it is critical that the same
code be running in all clients to ensure eventual consistency of data so it is impractical to port Fluid to other coding
frameworks.
This also applies to Blazor, Xamarin, MAUI, and other mobile frameworks.
## Browsers
### What browsers are supported?
{{% include file="_includes/browsers.md" %}}
|
<?php
namespace Relhub\BuildBundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
class RelhubBuildBundle extends Bundle
{
}
|
import cx from 'classnames'
import PropTypes from 'prop-types'
import React from 'react'
import {
customPropTypes,
getElementType,
getUnhandledProps,
META,
SUI,
useKeyOnly,
useKeyOrValueAndKey,
useMultipleProp,
useTextAlignProp,
useVerticalAlignProp,
useWidthProp,
} from '../../lib'
import GridColumn from './GridColumn'
import GridRow from './GridRow'
/**
* A grid is used to harmonize negative space in a layout.
*/
function Grid(props) {
const {
celled,
centered,
children,
className,
columns,
container,
divided,
doubling,
inverted,
padded,
relaxed,
reversed,
stackable,
stretched,
textAlign,
verticalAlign,
} = props
const classes = cx(
'ui',
useKeyOnly(centered, 'centered'),
useKeyOnly(container, 'container'),
useKeyOnly(doubling, 'doubling'),
useKeyOnly(inverted, 'inverted'),
useKeyOnly(stackable, 'stackable'),
useKeyOnly(stretched, 'stretched'),
useKeyOrValueAndKey(celled, 'celled'),
useKeyOrValueAndKey(divided, 'divided'),
useKeyOrValueAndKey(padded, 'padded'),
useKeyOrValueAndKey(relaxed, 'relaxed'),
useMultipleProp(reversed, 'reversed'),
useTextAlignProp(textAlign),
useVerticalAlignProp(verticalAlign),
useWidthProp(columns, 'column', true),
'grid',
className,
)
const rest = getUnhandledProps(Grid, props)
const ElementType = getElementType(Grid, props)
return <ElementType {...rest} className={classes}>{children}</ElementType>
}
Grid.Column = GridColumn
Grid.Row = GridRow
Grid._meta = {
name: 'Grid',
type: META.TYPES.COLLECTION,
}
Grid.propTypes = {
/** An element type to render as (string or function). */
as: customPropTypes.as,
/** A grid can have rows divided into cells. */
celled: PropTypes.oneOfType([
PropTypes.bool,
PropTypes.oneOf(['internally']),
]),
/** A grid can have its columns centered. */
centered: PropTypes.bool,
/** Primary content. */
children: PropTypes.node,
/** Additional classes. */
className: PropTypes.string,
/** Represents column count per row in Grid. */
columns: PropTypes.oneOf([...SUI.WIDTHS, 'equal']),
/** A grid can be combined with a container to use the available layout and alignment. */
container: PropTypes.bool,
/** A grid can have dividers between its columns. */
divided: PropTypes.oneOfType([
PropTypes.bool,
PropTypes.oneOf(['vertically']),
]),
/** A grid can double its column width on tablet and mobile sizes. */
doubling: PropTypes.bool,
/** A grid's colors can be inverted. */
inverted: PropTypes.bool,
/** A grid can preserve its vertical and horizontal gutters on first and last columns. */
padded: PropTypes.oneOfType([
PropTypes.bool,
PropTypes.oneOf(['horizontally', 'vertically']),
]),
/** A grid can increase its gutters to allow for more negative space. */
relaxed: PropTypes.oneOfType([
PropTypes.bool,
PropTypes.oneOf(['very']),
]),
/** A grid can specify that its columns should reverse order at different device sizes. */
reversed: customPropTypes.multipleProp([
'computer', 'computer vertically', 'mobile', 'mobile vertically', 'tablet', 'tablet vertically',
]),
/** A grid can have its columns stack on-top of each other after reaching mobile breakpoints. */
stackable: PropTypes.bool,
/** A grid can stretch its contents to take up the entire grid height. */
stretched: PropTypes.bool,
/** A grid can specify its text alignment. */
textAlign: PropTypes.oneOf(SUI.TEXT_ALIGNMENTS),
/** A grid can specify its vertical alignment to have all its columns vertically centered. */
verticalAlign: PropTypes.oneOf(SUI.VERTICAL_ALIGNMENTS),
}
export default Grid
|
#self join with inner join
select a.customer_id, a.first_name, a.last_name, b.customer_id, b.first_name, b.last_name from customer a
inner join customer b on a.last_name = b.first_name;
#left join with inner join
select a.customer_id, a.first_name, a.last_name, b.customer_id, b.first_name, b.last_name from customer a
left join customer b on a.last_name = b.first_name
order by a.customer_id;
#right join with inner join
select a.customer_id, a.first_name, a.last_name, b.customer_id, b.first_name, b.last_name from customer a
right join customer b on a.last_name = b.first_name
order by b.first_name;
|
package com.twitter.finagle.zookeeper
import com.google.common.collect.ImmutableSet
import com.twitter.common.net.pool.DynamicHostSet
import com.twitter.common.net.pool.DynamicHostSet.MonitorException
import com.twitter.common.zookeeper.{ServerSet, ServerSetImpl}
import com.twitter.concurrent.{Broker, Offer}
import com.twitter.finagle.addr.StabilizingAddr
import com.twitter.finagle.stats.DefaultStatsReceiver
import com.twitter.finagle.{Group, Resolver, InetResolver, Addr}
import com.twitter.thrift.Endpoint
import com.twitter.thrift.ServiceInstance
import com.twitter.thrift.Status.ALIVE
import com.twitter.util.{Future, Return, Throw, Try, Var}
import java.net.{InetSocketAddress, SocketAddress}
import scala.collection.JavaConverters._
import scala.collection.mutable
class ZkResolverException(msg: String) extends Exception(msg)
// Note: this is still used by finagle-memcached.
private[finagle] class ZkGroup(serverSet: ServerSet, path: String)
extends Thread("ZkGroup(%s)".format(path))
with Group[ServiceInstance]
{
setDaemon(true)
start()
protected[finagle] val set = Var(Set[ServiceInstance]())
override def run() {
serverSet.watch(new DynamicHostSet.HostChangeMonitor[ServiceInstance] {
def onChange(newSet: ImmutableSet[ServiceInstance]) = synchronized {
set() = newSet.asScala.toSet
}
})
}
}
private class ZkOffer(serverSet: ServerSet, path: String)
extends Thread("ZkOffer(%s)".format(path)) with Offer[Set[ServiceInstance]] {
setDaemon(true)
start()
private val inbound = new Broker[Set[ServiceInstance]]
override def run() {
var ok = false
while (!ok) {
try {
serverSet.watch(new DynamicHostSet.HostChangeMonitor[ServiceInstance] {
def onChange(newSet: ImmutableSet[ServiceInstance]) {
inbound !! newSet.asScala.toSet
}
})
ok = true
} catch {
case exc: MonitorException =>
// There are certain path permission checks in the serverset library
// that can cause exceptions here. We'll send an empty set (which
// becomes a negative resolution), but keep on trying.
inbound !! Set.empty
Thread.sleep(5000)
}
}
}
def prepare() = inbound.recv.prepare()
}
class ZkResolver(factory: ZkClientFactory) extends Resolver {
val scheme = "zk"
// With the current serverset client, instances are maintained
// forever; additional resource leaks aren't created by caching
// instances here.
private type CacheKey = (Set[InetSocketAddress], String, Option[String], Option[Int])
private val cache = new mutable.HashMap[CacheKey, Var[Addr]]
def this() = this(DefaultZkClientFactory)
def resolve(
zkHosts: Set[InetSocketAddress],
path: String,
endpoint: Option[String] = None,
shardId: Option[Int] = None): Var[Addr] =
synchronized {
cache.getOrElseUpdate(
(zkHosts, path, endpoint, shardId),
newVar(zkHosts, path, newToSocketAddrs(endpoint, shardId)))
}
private def newToSocketAddrs(endpoint: Option[String], shardId: Option[Int]) = {
val getEndpoint: PartialFunction[ServiceInstance, Endpoint] = endpoint match {
case Some(epname) => {
case inst if inst.getAdditionalEndpoints.containsKey(epname) =>
inst.getAdditionalEndpoints.get(epname)
}
case None => {
case inst: ServiceInstance => inst.getServiceEndpoint()
}
}
val filterShardId: PartialFunction[ServiceInstance, ServiceInstance] = shardId match {
case Some(id) => {
case inst if inst.isSetShard && inst.shard == id => inst
}
case None => { case x => x }
}
val toSocketAddr: Endpoint => SocketAddress = (ep: Endpoint) =>
new InetSocketAddress(ep.getHost, ep.getPort)
(insts: Set[ServiceInstance]) =>
insts.collect(filterShardId).collect(getEndpoint).map(toSocketAddr)
}
private def newVar(
zkHosts: Set[InetSocketAddress],
path: String, toSocketAddrs: Set[ServiceInstance] => Set[SocketAddress]) = {
val (zkClient, zkHealthHandler) = factory.get(zkHosts)
val zkOffer = new ZkOffer(new ServerSetImpl(zkClient, path), path)
val addrOffer = zkOffer map { newSet =>
val sockaddrs = toSocketAddrs(newSet)
if (sockaddrs.nonEmpty) Addr.Bound(sockaddrs)
else Addr.Neg
}
val stable = StabilizingAddr(
addrOffer,
zkHealthHandler,
factory.sessionTimeout,
DefaultStatsReceiver.scope("zkGroup"))
val v = Var[Addr](Addr.Pending)
stable foreach { newAddr =>
v() = newAddr
}
v
}
private[this] def zkHosts(hosts: String) = {
val zkHosts = factory.hostSet(hosts)
if (zkHosts.isEmpty) {
throw new ZkResolverException(
"ZK client address \"%s\" resolves to nothing".format(hosts))
}
zkHosts
}
def bind(arg: String) = arg.split("!") match {
// zk!host:2181!/path
case Array(hosts, path) =>
resolve(zkHosts(hosts), path, None)
// zk!host:2181!/path!endpoint
case Array(hosts, path, endpoint) =>
resolve(zkHosts(hosts), path, Some(endpoint))
case _ =>
throw new ZkResolverException("Invalid address \"%s\"".format(arg))
}
}
|
package main
import (
"context"
"encoding/json"
"flag"
"fmt"
"io"
"log"
"net"
"net/http"
"os"
"os/signal"
"path/filepath"
"strings"
"time"
"unicode"
"github.com/sixt/gomodproxy/pkg/api"
"expvar"
_ "net/http/pprof"
)
func prometheusExpose(w io.Writer, name string, v interface{}) {
// replace all invalid symbols with underscores
name = strings.Map(func(r rune) rune {
r = unicode.ToLower(r)
if (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') {
return r
}
return '_'
}, name)
// expvar does not have concepts of counters and gauges,
// so we tell one from another based on the name suffix.
counter := strings.HasSuffix(name, "_total")
if f, ok := v.(float64); ok {
if counter {
fmt.Fprintf(w, "# TYPE %s counter\n", name)
} else {
fmt.Fprintf(w, "# TYPE %s gauge\n", name)
}
fmt.Fprintf(w, "%s %f\n", name, f)
} else if m, ok := v.(map[string]interface{}); ok {
for k, v := range m {
// for composite maps we construct metric names by joining the parent map
// name and the key name.
s := strings.TrimSuffix(name, "_total") + "_" + k
if counter {
s = s + "_total"
}
prometheusExpose(w, s, v)
}
}
}
func prometheusHandler(w http.ResponseWriter, r *http.Request) {
expvar.Do(func(kv expvar.KeyValue) {
var v interface{}
if err := json.Unmarshal([]byte(kv.Value.String()), &v); err != nil {
return
}
prometheusExpose(w, kv.Key, v)
})
}
func prettyLog(v ...interface{}) {
s := ""
msg := ""
if len(v)%2 != 0 {
msg = fmt.Sprintf("%s", v[0])
v = v[1:]
}
s = fmt.Sprintf("%20s ", msg)
for i := 0; i < len(v); i = i + 2 {
s = s + fmt.Sprintf("%v=%v ", v[i], v[i+1])
}
log.Println(s)
}
func jsonLog(v ...interface{}) {
entry := map[string]interface{}{}
if len(v)%2 != 0 {
entry["msg"] = v[0]
v = v[1:]
}
for i := 0; i < len(v); i = i + 2 {
entry[fmt.Sprintf("%v", v[i])] = v[i+1]
}
json.NewEncoder(os.Stdout).Encode(entry)
}
type listFlag []string
func (f *listFlag) String() string { return strings.Join(*f, " ") }
func (f *listFlag) Set(s string) error { *f = append(*f, s); return nil }
func main() {
gitPaths := listFlag{}
vcsPaths := listFlag{}
addr := flag.String("addr", ":0", "http server address")
verbose := flag.Bool("v", false, "verbose logging")
prometheus := flag.String("prometheus", "", "prometheus address")
debug := flag.Bool("debug", false, "enable debug HTTP API (pprof/expvar)")
json := flag.Bool("json", false, "json structured logging")
dir := flag.String("dir", filepath.Join(os.Getenv("HOME"), ".gomodproxy/cache"), "modules cache directory")
gitdir := flag.String("gitdir", filepath.Join(os.Getenv("HOME"), ".gomodproxy/git"), "git cache directory")
memLimit := flag.Int64("mem", 256, "in-memory cache size in MB")
workers := flag.Int("workers", 1, "number of parallel VCS workers")
flag.Var(&gitPaths, "git", "list of git settings")
flag.Var(&vcsPaths, "vcs", "list of custom VCS handlers")
flag.Parse()
ln, err := net.Listen("tcp", *addr)
if err != nil {
log.Fatal("net.Listen:", err)
}
defer ln.Close()
fmt.Println("Listening on", ln.Addr())
options := []api.Option{}
logger := func(...interface{}) {}
if *verbose || *json {
if *json {
logger = jsonLog
} else {
logger = prettyLog
}
}
options = append(options, api.Log(logger))
for _, path := range gitPaths {
kv := strings.SplitN(path, ":", 2)
if len(kv) != 2 {
log.Fatal("bad git path:", path)
}
options = append(options, api.Git(kv[0], kv[1]))
}
for _, path := range vcsPaths {
kv := strings.SplitN(path, ":", 2)
if len(kv) != 2 {
log.Fatal("bad VCS syntax:", path)
}
options = append(options, api.CustomVCS(kv[0], kv[1]))
}
options = append(options,
api.VCSWorkers(*workers),
api.GitDir(*gitdir),
api.Memory(logger, *memLimit*1024*1024),
api.CacheDir(*dir),
)
sigc := make(chan os.Signal, 1)
signal.Notify(sigc, os.Interrupt)
mux := http.NewServeMux()
mux.Handle("/", api.New(options...))
if *prometheus != "" {
if *prometheus == *addr {
mux.HandleFunc("/metrics", prometheusHandler)
} else {
srv := &http.Server{Handler: http.HandlerFunc(prometheusHandler), Addr: *prometheus}
go srv.ListenAndServe()
}
}
if *debug {
mux.Handle("/debug/vars", http.DefaultServeMux)
mux.Handle("/debug/pprof/heap", http.DefaultServeMux)
mux.Handle("/debug/pprof/profile", http.DefaultServeMux)
mux.Handle("/debug/pprof/block", http.DefaultServeMux)
mux.Handle("/debug/pprof/trace", http.DefaultServeMux)
}
srv := &http.Server{Handler: mux}
go func() {
if err := srv.Serve(ln); err != nil {
log.Fatal(err)
}
}()
<-sigc
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
srv.Shutdown(ctx)
}
|
mutable struct Citizen <: Agents.AbstractAgent
id::Int64
pos::Int64
home::Int64
neuroticism::Float64
trust_authorities::Float64
fear::Float64
socialnorm::Float64
socialnorm_memory::Array{Any, 1}
behavior::Bool
behavior_buffer::Bool
quarantined::Bool
state::Symbol
ticks_exposed::Int64
ticks_infected::Int64
ticks_quarantined::Int64
incubation_period::Int64
infection_duration::Int64
end
Citizen(;
id::Int64, pos::Int64, home::Int64,
neuroticism::Float64, trust_authorities::Float64, fear::Float64,
socialnorm::Float64, socialnorm_memory::Array{Float64, 1},
behavior::Bool, behavior_buffer::Bool,
quarantined::Bool, state::Symbol,
ticks_exposed::Int64, ticks_infected::Int64, ticks_quarantined::Int64,
incubation_period::Int64, infection_duration::Int64
) = Citizen(
id, pos, home,
neuroticism, trust_authorities, fear,
socialnorm, socialnorm_memory,
behavior, behavior_buffer,
quarantined, state,
ticks_exposed, ticks_infected, ticks_quarantined,
incubation_period, infection_duration
)
Citizen(id, pos, state) = Citizen(
id=id, pos=pos, home=pos,
neuroticism=randn_unit(), trust_authorities=randn_unit(), fear=0.0,
socialnorm=0.0, socialnorm_memory=Float64[],
behavior=false, behavior_buffer=false,
quarantined=false, state=state,
ticks_exposed=0, ticks_infected=0, ticks_quarantined=0,
incubation_period=rand_incubation(), infection_duration=rand_duration()
)
function randn_unit()
normal_distribution = Distributions.Normal(0.5, 0.2)
random_normal_number = Distributions.rand(normal_distribution)
if (random_normal_number < 0) | (random_normal_number > 1)
random_normal_number = randn_unit()
end
return random_normal_number
end
function rand_incubation(α=2.14532, θ=1.5626)
gamma_distribution = Distributions.Gamma(α, θ)
random_gamma_number = Distributions.rand(gamma_distribution)
random_incubation = trunc(Int64, round(random_gamma_number * 10))
return random_incubation
end
function rand_duration(λ=17.5)
poisson_distribution = Distributions.Poisson(λ)
random_poisson_number = Distributions.rand(poisson_distribution)
random_duration = trunc(Int64, round(random_poisson_number * 10))
return random_duration
end
|
# !bin/bash
# stopping existing node servers
echo "stopping existing node servers"
pm2 kill
# pkill node
|
#include "FWCore/Framework/interface/ESProducer.h"
#include "FWCore/Utilities/interface/ESGetToken.h"
#include "FWCore/ParameterSet/interface/ParameterSet.h"
#include "CalibFormats/HcalObjects/interface/HcalDbService.h"
#include "CalibFormats/HcalObjects/interface/HcalDbRecord.h"
#include "DataFormats/HcalDetId/interface/HcalGenericDetId.h"
#include "DataFormats/HcalDetId/interface/HcalSubdetector.h"
#include "CondFormats/HcalObjects/interface/HcalRecoParams.h"
#include "CondFormats/DataRecord/interface/HcalRecoParamsRcd.h"
#include "Geometry/CaloGeometry/interface/CaloGeometry.h"
#include "Geometry/Records/interface/CaloGeometryRecord.h"
#include "Geometry/CaloTopology/interface/HcalTopology.h"
#include "Geometry/HcalTowerAlgo/interface/HcalGeometry.h"
#include "RecoLocalCalo/HcalRecAlgos/interface/HcalSeverityLevelComputer.h"
#include "RecoLocalCalo/HcalRecAlgos/interface/HcalSeverityLevelComputerRcd.h"
#include "RecoLocalCalo/HcalRecAlgos/interface/HcalChannelProperties.h"
#include "RecoLocalCalo/HcalRecAlgos/interface/HcalChannelPropertiesAuxRecord.h"
#include "RecoLocalCalo/HcalRecAlgos/interface/HcalChannelPropertiesRecord.h"
class HcalChannelPropertiesEP : public edm::ESProducer {
public:
typedef std::unique_ptr<HcalRecoParams> ReturnType1;
typedef std::unique_ptr<HcalChannelPropertiesVec> ReturnType2;
inline HcalChannelPropertiesEP(const edm::ParameterSet&) {
auto cc1 = setWhatProduced(this, &HcalChannelPropertiesEP::produce1);
topoToken_ = cc1.consumes();
paramsToken_ = cc1.consumes();
auto cc2 = setWhatProduced(this, &HcalChannelPropertiesEP::produce2);
edm::ESInputTag qTag("", "withTopo");
condToken_ = cc2.consumes();
myParamsToken_ = cc2.consumes();
sevToken_ = cc2.consumes();
qualToken_ = cc2.consumes(qTag);
geomToken_ = cc2.consumes();
}
inline ~HcalChannelPropertiesEP() override {}
ReturnType1 produce1(const HcalChannelPropertiesAuxRecord& rcd) {
using namespace edm;
const HcalTopology& htopo = rcd.getRecord<HcalRecNumberingRecord>().get(topoToken_);
const HcalRecoParams& params = rcd.getRecord<HcalRecoParamsRcd>().get(paramsToken_);
ReturnType1 prod = std::make_unique<HcalRecoParams>(params);
prod->setTopo(&htopo);
return prod;
}
ReturnType2 produce2(const HcalChannelPropertiesRecord& rcd) {
// There appears to be no easy way to trace the internal
// dependencies of HcalDbService. So, rebuild the product
// every time anything changes in the parent records.
// This means that we are sometimes going to rebuild the
// whole table on the lumi block boundaries instead of
// just updating the list of bad channels.
using namespace edm;
// Retrieve various event setup records and data products
const HcalDbRecord& dbRecord = rcd.getRecord<HcalDbRecord>();
const HcalDbService& cond = dbRecord.get(condToken_);
const HcalRecoParams& params = rcd.getRecord<HcalChannelPropertiesAuxRecord>().get(myParamsToken_);
const HcalSeverityLevelComputer& severity = rcd.getRecord<HcalSeverityLevelComputerRcd>().get(sevToken_);
const HcalChannelQuality& qual = dbRecord.getRecord<HcalChannelQualityRcd>().get(qualToken_);
const CaloGeometry& geom = rcd.getRecord<CaloGeometryRecord>().get(geomToken_);
// HcalTopology is taken from "params" created by the "produce1" method
const HcalTopology& htopo(*params.topo());
// Build the product
ReturnType2 prod = std::make_unique<HcalChannelPropertiesVec>(htopo.ncells());
std::array<HcalPipelinePedestalAndGain, 4> pedsAndGains;
const HcalSubdetector subdetectors[3] = {HcalBarrel, HcalEndcap, HcalForward};
for (HcalSubdetector subd : subdetectors) {
const HcalGeometry* hcalGeom = static_cast<const HcalGeometry*>(geom.getSubdetectorGeometry(DetId::Hcal, subd));
const std::vector<DetId>& ids = hcalGeom->getValidDetIds(DetId::Hcal, subd);
for (const auto cell : ids) {
const auto rawId = cell.rawId();
// ADC decoding tools, etc
const HcalRecoParam* param_ts = params.getValues(rawId);
const HcalQIECoder* channelCoder = cond.getHcalCoder(cell);
const HcalQIEShape* shape = cond.getHcalShape(channelCoder);
const HcalSiPMParameter* siPMParameter = cond.getHcalSiPMParameter(cell);
// Pedestals and gains
const HcalCalibrations& calib = cond.getHcalCalibrations(cell);
const HcalCalibrationWidths& calibWidth = cond.getHcalCalibrationWidths(cell);
for (int capid = 0; capid < 4; ++capid) {
pedsAndGains[capid] = HcalPipelinePedestalAndGain(calib.pedestal(capid),
calibWidth.pedestal(capid),
calib.effpedestal(capid),
calibWidth.effpedestal(capid),
calib.respcorrgain(capid),
calibWidth.gain(capid));
}
// Channel quality
const HcalChannelStatus* digistatus = qual.getValues(rawId);
const bool taggedBadByDb = severity.dropChannel(digistatus->getValue());
// Fill the table entry
const unsigned linearId = htopo.detId2denseId(cell);
prod->at(linearId) =
HcalChannelProperties(&calib, param_ts, channelCoder, shape, siPMParameter, pedsAndGains, taggedBadByDb);
}
}
return prod;
}
HcalChannelPropertiesEP() = delete;
HcalChannelPropertiesEP(const HcalChannelPropertiesEP&) = delete;
HcalChannelPropertiesEP& operator=(const HcalChannelPropertiesEP&) = delete;
private:
edm::ESGetToken<HcalDbService, HcalDbRecord> condToken_;
edm::ESGetToken<HcalTopology, HcalRecNumberingRecord> topoToken_;
edm::ESGetToken<HcalRecoParams, HcalRecoParamsRcd> paramsToken_;
edm::ESGetToken<HcalSeverityLevelComputer, HcalSeverityLevelComputerRcd> sevToken_;
edm::ESGetToken<HcalChannelQuality, HcalChannelQualityRcd> qualToken_;
edm::ESGetToken<CaloGeometry, CaloGeometryRecord> geomToken_;
edm::ESGetToken<HcalRecoParams, HcalChannelPropertiesAuxRecord> myParamsToken_;
};
DEFINE_FWK_EVENTSETUP_MODULE(HcalChannelPropertiesEP);
|
using System;
using UnityEngine;
namespace CustomScripts.Gamemode.GMDebug
{
public class MoveTest : MonoBehaviour
{
public bool isMoving = false;
public float Speed;
public void StartMoving()
{
isMoving = true;
}
private void Update()
{
if (isMoving)
{
transform.position += Vector3.forward * (Speed * Time.deltaTime);
GameReferences.Instance.Player.transform.position += Vector3.forward * (Speed * Time.deltaTime);
}
}
}
}
|
// FIR_COMPARISON
class SomeObject<T, U>() {
var field : T? = null
}
class A {}
class C {}
fun <T: Comparable<T>, U> SomeObject<T, U>.compareTo(other : SomeObject<T, U>) : Int {
return 0;
}
fun some() {
val test = SomeObject<A, A>
test.<caret>
}
// ABSENT: compareTo
|
using System;
using System.Collections.Generic;
namespace Northwind.Models
{
//Model for NORTHWIND.EMPLOYEES (0 rows)
public class Employees
{
public virtual int Id { get; set; } //EMPLOYEE_ID
public virtual string Lastname { get; set; } //LASTNAME
public virtual string Firstname { get; set; } //FIRSTNAME
public virtual string Title { get; set; } //TITLE
public virtual string TitleOfCourtesy { get; set; } //TITLE_OF_COURTESY
public virtual DateTime? Birthdate { get; set; } //BIRTHDATE
public virtual DateTime? Hiredate { get; set; } //HIREDATE
public virtual string Address { get; set; } //ADDRESS
public virtual string City { get; set; } //CITY
public virtual string Region { get; set; } //REGION
public virtual string PostalCode { get; set; } //POSTAL_CODE
public virtual string Country { get; set; } //COUNTRY
public virtual string HomePhone { get; set; } //HOME_PHONE
public virtual string Extension { get; set; } //EXTENSION
public virtual string Photo { get; set; } //PHOTO
public virtual string Notes { get; set; } //NOTES
public virtual int? ReportsTo { get; set; } //REPORTS_TO
//CHILDREN
public virtual IList<Orders> Orders { get; set; } //EMPLOYEE_ID [1:*] ORDERS.EMPLOYEE_ID
}
}
|
require 'national_identification_number/swedish'
require 'national_identification_number/finnish'
require 'national_identification_number/norwegian'
require 'national_identification_number/danish'
|
class jigsaw_prj_t(object):
#------------------------------ obj. to hold projection data
def __init__(self):
self.radii = +1.E+00
self.prjID = ""
self.xbase = +0.E+00
self.ybase = +0.E+00
|
import { BASE_PATH, TEST_BASE_PATH } from "@/client/context";
import { insertUrlParam } from "@/lib/helper";
import React from "react";
type AppModeType = {
basePath: string;
testMode?: boolean;
switchMode: () => void;
};
export function computeMode(pathname?: string) {
return pathname?.startsWith(TEST_BASE_PATH);
};
export function computeBasePath(pathname?: string) {
return pathname?.startsWith(TEST_BASE_PATH) ? TEST_BASE_PATH : BASE_PATH;
};
// Init the APP client mode
export const AppMode = React.createContext<AppModeType>({} as AppModeType);
const AppModeProvider: React.FC<{ pathname: string }> = ({ children, pathname }) => {
const switchMode = () => {
const isTestMode = computeMode(pathname);
insertUrlParam({});
if (isTestMode) window.location.pathname = pathname.replace(TEST_BASE_PATH, '');
else window.location.replace(TEST_BASE_PATH + pathname);
};
return (
<AppMode.Provider value={{
testMode: computeMode(pathname),
basePath: computeBasePath(pathname),
switchMode
}}>
{children}
</AppMode.Provider>
);
};
export default AppModeProvider;
|
## 题目
**39. 组合总和**
>中等
给定一个无重复元素的数组 candidates 和一个目标数 target ,找出 candidates 中所有可以使数字和为 target 的组合。
candidates 中的数字可以`无限制重复`被选取。
说明:
* 所有数字(包括 target)都是正整数。
* 解集不能包含重复的组合。
示例 1:
```
输入:candidates = [2,3,6,7], target = 7,
所求解集为:
[
[7],
[2,2,3]
]
```
示例 2:
```
输入:candidates = [2,3,5], target = 8,
所求解集为:
[
[2,2,2,2],
[2,3,3],
[3,5]
]
```
提示:
* 1 <= candidates.length <= 30
* 1 <= candidates[i] <= 200
* candidate 中的每个元素都是独一无二的。
* 1 <= target <= 500
>来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/combination-sum
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
## 解法:回溯+减枝
### 思路
先对数组排序,`递归`做选择,当`所选值candidates[i]>目标值target`则退出,`candidates[i+1]`即后面的值肯定大于target,直接退出
**注意:和40题的区别**
* 每个数字都是独一无二
* 每个数字都可以被无限制重复选择
### 代码
```js
/**
*
* @param {number[]} candidates
* @param {number} target
* @return {number[][]}
*/
var combinationSum = function(candidates, target) {
if (candidates.length <= 0) {
return [];
}
// 从小到大排序,是剪枝的关键
candidates.sort((a, b) => parseInt(a) - parseInt(b));
/**
*
* @param {*} candidates 原数组
* @param {*} target 此时要组合成的目标值
* @param {*} begin 数组开始遍历的索引
* @param {*} track 每次选择的结果数组
*/
this.backtrack = function(candidates, target, begin, track) {
// 终止条件,找到符合的数组则保存数组结果
if (target == 0) {
// 注意这里要存复制的数组,因为存引用会改变
res.push(track.slice());
return;
}
// 循环做选择
for (let i = begin; i < candidates.length; i++) {
// 剪枝,选择的数大于目标值,则后面的数也大于目标值,直接退出即可
if (target - candidates[i] < 0) {
break;
}
// 选择
track.push(candidates[i]);
// 回溯,i位置的数可以重复被选择
backtrack(candidates, target - candidates[i], i, track);
// 撤销选择
track.pop();
}
}
let res = [];
// 存每次选择的结果数组
let track = [];
backtrack(candidates, target, 0, track);
return res;
};
```
### 复杂度
* 时间O(n*2^n)
* 空间O(target),取决于递归的栈深度
|
#pragma once
#define MAX_CHAR 256
#define MAX_BIT 8
typedef unsigned int UINT;
typedef unsigned char UCHAR;
namespace JF
{
namespace JFStudy
{
struct SymbolInfo
{
UCHAR Symbol;
int Frequency;
};
struct HuffmanNode
{
SymbolInfo Data;
HuffmanNode* pLeft;
HuffmanNode* pRight;
};
struct BitBuffer
{
UCHAR* pBuffer;
UINT nSize;
};
struct HuffmanCode
{
UCHAR Code[MAX_BIT];
int nSize;
};
HuffmanNode* CreateNode(SymbolInfo _NewData);
void DestroyNode(HuffmanNode* _pNode);
void DestroyTree(HuffmanNode* _pNode);
void AddBit(BitBuffer* _pBuffer, char _Bit);
void Encode(HuffmanNode** _ppTree, UCHAR* _pSource, BitBuffer* _pEncoded, HuffmanCode codeTable[MAX_CHAR]);
void Decode(HuffmanNode* _ppTree, BitBuffer* _pEncoded, UCHAR* _pDecoded);
void BuildPrefixTree(HuffmanNode** _ppTree, SymbolInfo _SymbolInfoTable[MAX_CHAR]);
void BuildCodeTable(HuffmanNode* _pTree, HuffmanCode _CodeTable[MAX_CHAR], UCHAR _Code[MAX_BIT], int _nSize);
void PrintBinary(BitBuffer* _pBuffer);
}
}
|
// https://www.geeksforgeeks.org/count-possible-paths-top-left-bottom-right-nxm-matrix/
#include <bits/stdc++.h>
using namespace std;
int matrix_paths_rec(int r, int c) {
if(r==1 || c==1) return 1;
return matrix_paths_rec(r-1,c)+matrix_paths_rec(r,c-1);
}
int matrix_paths_dp(int r, int c) {
vector<vector<int>> dp(r, vector<int>(c));
for(int i=0; i<r; ++i) {
dp[i][0] = 1;
}
for(int j=0; j<c; ++j) {
dp[0][j] = 1;
}
for(int i=1; i<r; ++i){
for(int j=1; j<c; ++j) {
dp[i][j] = dp[i-1][j]+dp[i][j-1];
}
}
return dp[r-1][c-1];
}
/*
Another Approach:(Using combinatorics)
In this approach We have to calculate m+n-2 C n-1
here which will be (m+n-2)! / (n-1)! (m-1)!
*/
int main(){
int r = 4;
int c = 4;
cout<<matrix_paths_rec(r,c)<<endl;
cout<<matrix_paths_dp(r,c)<<endl;
return 0;
}
|
using System;
namespace Immersion.Utility
{
[Flags]
public enum CollisionLayers : uint
{
Tracking = 0b00000010,
World = 0b00000100,
Players = 0b00001000,
Entities = 0b00010000,
Items = 0b00100000,
}
}
|
<?php
namespace App\Repository\Interfaces;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Support\Collection;
interface IUserRepository extends IEloquentRepository {
}
|
export { default } from './ReactUtterances'
export { identifierTypes } from './ReactUtterances'
|
package emul
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
)
// Graph describes single-directed connections
type Graph map[int32]map[int32]float32
// Config provides network topology and functions configuration
type Config struct {
conn Graph
workFunctions map[string]func(*Process, *Message)
init []configInit
}
// AddWorkFunction adds new work function to the list
func (c *Config) AddWorkFunction(name string, f func(*Process, *Message)) {
if c.workFunctions == nil {
c.workFunctions = map[string]func(*Process, *Message){}
}
c.workFunctions[name] = f
}
// AddEdgeDirected adds directed edge from one node to another and sets its delay
func (c *Config) AddEdgeDirected(from, to int32, delay float32) {
if c.conn == nil {
c.conn = Graph{}
}
if _, ok := c.conn[from]; !ok {
c.conn[from] = make(map[int32]float32)
}
c.conn[from][to] = delay
}
// AddEdgeUndirected adds edge between two nodes and sets its delay
func (c *Config) AddEdgeUndirected(from, to int32, delay float32) {
c.AddEdgeDirected(from, to, delay)
c.AddEdgeDirected(to, from, delay)
}
type configFile struct {
Network []configConnection
Init []configInit
}
type configConnection struct {
Directed bool
From []int32
To []int32
Delay float32
}
type configInit struct {
To []int32
Msg string
}
// LoadFromFile loads config file and constructs Config instance accordingly
func (c *Config) LoadFromFile(filename string) {
jsonFile, err := os.Open(filename)
if err != nil {
fmt.Println(err)
}
fmt.Printf("[Config] Loading from \"%s\"\n", filename)
byteValue, _ := ioutil.ReadAll(jsonFile)
defer jsonFile.Close()
var config configFile
err = json.Unmarshal([]byte(byteValue), &config)
if err != nil {
fmt.Println(err)
}
for _, conn := range config.Network {
for _, from := range conn.From {
for _, to := range conn.To {
if conn.Directed {
c.AddEdgeDirected(from, to, conn.Delay)
} else {
c.AddEdgeUndirected(from, to, conn.Delay)
}
}
}
}
c.init = config.Init
}
|
package com.xiangronglin.novel.rest.application.service.file
import org.slf4j.LoggerFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.core.io.InputStreamResource
import org.springframework.core.io.Resource
import org.springframework.http.MediaType
import org.springframework.http.ResponseEntity
import org.springframework.web.bind.annotation.GetMapping
import org.springframework.web.bind.annotation.PathVariable
import org.springframework.web.bind.annotation.RequestMapping
import org.springframework.web.bind.annotation.RestController
import kotlin.io.path.ExperimentalPathApi
import kotlin.io.path.fileSize
import kotlin.io.path.inputStream
@ExperimentalPathApi
@RestController
@RequestMapping("/file")
class FileController(
@Autowired private val fileService: FileService
) {
companion object {
private val LOG = LoggerFactory.getLogger(FileController::class.java)
}
@GetMapping("/{id}")
fun downloadFile(@PathVariable(name = "id") id: String): ResponseEntity<Resource> {
LOG.info("GET file/$id")
val file = fileService.getNovel(id) ?: return ResponseEntity.notFound().build()
val resource = InputStreamResource(file.inputStream())
return ResponseEntity.ok()
.header("Content-Disposition", "attachment; filename=$id")
.contentLength(file.fileSize())
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.body(resource)
}
}
|
---
layout: single
title: "Constructions"
excerpt: "Grammar constructions"
permalink: /constructions/
sidebar:
nav: korean
---
[~ㄹ/을 수
있/없다](https://www.howtostudykorean.com/unit-2-lower-intermediate-korean-grammar/unit-2-lessons-42-50/lesson-45/#451)
is used to create the meaning of "one can..." or "one cannot..."
[~고 있다](https://www.howtostudykorean.com/unit1/unit-1-lessons-17-25-2/lesson-18/#ppt) is attached
to verbs to give it a meaning of "I am ___ing."
[~(으)면](https://www.howtostudykorean.com/unit-2-lower-intermediate-korean-grammar/unit-2-lessons-42-50/lesson-43/#431)
is used to create the meaning of "when" or "if.
[~ㄹ/을수록](https://www.howtostudykorean.com/unit-6/lessons-126-133/lesson-132/#1321) is attached
to verbs, adjectives and 이다 to indicate that, as one situation changes to some degree, another
situation is affected and also changes to that degree.
[~스럽다](https://www.howtostudykorean.com/unit1/unit-1-lessons-9-16/lesson-16/#s2) can be added to
some nouns to change them into an adjective.
|
module Queries
class Container::Autocomplete < Queries::Query
# @return [Arel::Table]
def table
::Container.arel_table
end
def base_query
::Container.select('containers.*')
end
def base_queries
queries = [
autocomplete_identifier_cached_exact,
autocomplete_identifier_identifier_exact,
autocomplete_exact_id,
autocomplete_identifier_cached_like,
]
queries.compact!
return [] if queries.nil?
updated_queries = []
queries.each_with_index do |q ,i|
a = q.where(project_id: project_id) if project_id
a ||= q
updated_queries[i] = a
end
updated_queries
end
def autocomplete
updated_queries = base_queries
result = []
updated_queries.each do |q|
result += q.to_a
result.uniq!
break if result.count > 39
end
result[0..39]
end
end
end
|
package kekmech.ru.mpeiapp.deeplink.di
import kekmech.ru.common_di.ModuleProvider
import kekmech.ru.mpeiapp.deeplink.DeeplinkHandler
import kekmech.ru.mpeiapp.deeplink.DeeplinkHandlersProcessor
import kekmech.ru.mpeiapp.deeplink.handlers.*
object DeeplinkModule : ModuleProvider({
factory {
val handlers = listOf<DeeplinkHandler>(
BarsDeeplinkHandler(get(), get()),
MainScreenDeeplinkHandler(get(), get()),
MapDeeplinkHandler(get(), get(), get()),
ScheduleDeeplinkHandler(get(), get()),
SearchScreenDeeplinkHandler(get(), get(), get()),
SettingsScreenDeeplinkHandler(get(), get(), get())
)
DeeplinkHandlersProcessor(handlers)
}
})
|
import React, { Component } from 'react';
import { Text, View, StyleSheet, Button, TextInput,TouchableOpacity,Icon } from 'react-native';
import { Constants } from 'expo';
export default class App extends Component {
state = {
inputValue1: "New Password",
inputValue: "Confirm Password"
};
_handleTextChange = inputValue => {
this.setState({ inputValue });
};
_handleTextChange1 = inputValue1 => {
this.setState({ inputValue1 });
};
render() {
return (
<View style={styles.container}>
<Text style={styles.yellow}>New Password</Text>
<TextInput
value={this.state.inputValue1}
onChangeText={this._handleTextChange1}
style={{height: 44, padding: 8 }}
/>
<Text style={styles.yellow}>Confirm Password</Text>
<TextInput
value={this.state.inputValue}
onChangeText={this._handleTextChange}
style={{height: 44, padding: 8, justifyContent: 'center' }}
/>
<View style={styles.alternativeLayoutButtonContainer}>
<Button
onPress={this._onPressButton}
title="Change"
color="#f1c40f"
/>
</View>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
},
alternativeLayoutButtonContainer: {
margin: 20,
flexDirection: 'row',
textAlign: 'center',
justifyContent: 'center'
},
yellow:{
margin: 10,
color: '#f1c40f',
fontWeight: 'bold',
fontSize: 15
},
});
|
# There is no DOM without doom.
## Para empezar
```bash
npm install
npm run start
```
## Resumen
Interacción con el DOM desde JS vanilla.
Especial atención a:
* Adecuada refactorización.
* Validación de tipos y propiedades en los elementos de DOM mamipulados.
* Reducción de la recursividad sobre el DOM.
* Manejo de los errores.
|
# 821. Time Intersection
Difficulty: Medium
http://www.lintcode.com/en/problem/time-intersection/
Give two users' ordered online time series, and each section records the user's login time point x and offline time point y. Find out the time periods when both users are online at the same time, and output in ascending order.
**Notice**
* We guarantee that the length of online time series meet 1 <= len <= 1e6.
* For a user's online time series, any two of its sections do not intersect.
**Example**
Given seqA = [[1,2],[5,100]], seqB = [[1,6]], return [[1,2],[5,6]].
```
Explanation:
In these two time periods [1,2],[5,6], both users are online at the same time.
```
Given seqA = [[1,2],[10,15]], seqB = [[3,5],[7,9]], return [].
```
Explanation:
There is no time period, both users are online at the same time.
```
|
import BaseShapes from './baseshapes';
import {SquareRootOfTwo} from '../math';
import { DIAMOND as SVG_DIAMOND } from './svgshapefactory';
import { DIAMOND as CANVAS_DIAMOND } from './bitmapshapefactory';
export class Diamonds extends BaseShapes {
static get ShapeName() { return 'diamonds'; }
/**
* process pixels from image
*/
processPixels() {
let x = 0;
let y = 0;
let odd = true;
while (y < this.H) {
this.pushToBucket(x, y);
x += this.A * SquareRootOfTwo;
if (x > this.W) {
y += this.A / SquareRootOfTwo;
x = odd ? this.A / SquareRootOfTwo : 0;
odd = !odd;
}
}
}
/**
* calculate radius? Is this what R is?
* @param wantRate
*/
calculateR(wantRate) {
return this.A * Math.sqrt( wantRate );
}
/**
* render SVG shape
* @param cx
* @param cy
* @param r
*/
renderSVGShape(cx, cy, r) {
return SVG_DIAMOND(cx, cy, r);
}
/**
* render bitmap shape
* @param cx
* @param cy
* @param r
*/
renderBitmapShape(cx, cy, r) {
CANVAS_DIAMOND(this.outputCanvasContext, cx, cy, r);
}
}
|
#!/bin/sh
set -e
prod_zone_a1="10.100.100.135"
prod_zone_a2="10.100.101.234"
prod_zone_b1="10.100.100.111"
prod_zone_b2="10.100.101.14"
prod_zone_c1="10.100.100.158"
prod_zone_c2="10.100.101.177"
filename="iperf_prod_$(date +%Y-%m-%d_%Hh%Mm%Ss).md"
touch $filename
run_test() {
./iperf-k8s.sh -n esa-csc-s2-prd-cce-rook-ceph -c $1 -s $2 --run 2>&1 | sed -e 's/^/ /' | tee -a $filename
sleep 5
}
echo "# Zone a to b" >> $filename
run_test $prod_zone_a1 $prod_zone_b1
echo "# Zone b to a" >> $filename
run_test $prod_zone_b2 $prod_zone_a2
echo "# Zone a to c" >> $filename
run_test $prod_zone_a1 $prod_zone_c1
echo "# Zone c to a" >> $filename
run_test $prod_zone_c2 $prod_zone_a2
echo "# Zone b to c" >> $filename
run_test $prod_zone_b1 $prod_zone_c1
echo "# Zone c to b" >> $filename
run_test $prod_zone_c2 $prod_zone_b2
echo "# Zone a to a" >> $filename
run_test $prod_zone_a1 $prod_zone_a2
echo "# Zone b to b" >> $filename
run_test $prod_zone_b1 $prod_zone_b2
echo "# Zone c to c" >> $filename
run_test $prod_zone_c1 $prod_zone_c2
|
require 'ruby_event_store'
require "ruby_event_store/outbox"
require "ruby_event_store/outbox/cli"
require "ruby_event_store/outbox/metrics/null"
require "ruby_event_store/outbox/metrics/influx"
require_relative '../../../support/helpers/rspec_defaults'
require_relative '../../../support/helpers/schema_helper'
require_relative '../../../support/helpers/time_enrichment'
require_relative './support/db'
require 'rails'
require 'active_support/testing/time_helpers.rb'
RSpec.configure do |config|
config.include ActiveSupport::Testing::TimeHelpers
config.after(:each) { travel_back }
config.before(:each, redis: true) do |example|
redis.flushdb
end
end
$verbose = ENV.has_key?('VERBOSE') ? true : false
ActiveRecord::Schema.verbose = $verbose
ENV['DATABASE_URL'] ||= 'sqlite3::memory:'
class MutantIdGenerator
def initialize(redis_url, value_for_main_pid, name)
@main_pid = Process.pid
@redis = Redis.new(url: redis_url)
@value_for_main_pid = value_for_main_pid
@redis_key = "mutant-something-#{name}"
@redis.del(@redis_key)
end
def id_for_current_pid
pid = Process.pid
if pid == @main_pid
@value_for_main_pid
else
get_id_for_pid(pid) || set_id_for_pid(pid)
end
end
private
def get_id_for_pid(pid)
position = @redis.lpos(@redis_key, pid)
position.nil? ? nil : position + 1
end
def set_id_for_pid(pid)
length_of_list_after_push = @redis.rpush(@redis_key, pid)
length_of_list_after_push
end
end
RedisMutantIdGenerator = MutantIdGenerator.new("redis://localhost:6379/0", 0, "redis")
module RedisIsolation
def self.redis_url
ENV["REDIS_URL"] || per_process_redis_url_for_mutant_runs
end
private
def self.per_process_redis_url_for_mutant_runs
"redis://localhost:6379/#{RedisMutantIdGenerator.id_for_current_pid}"
end
end
class TickingClock
def initialize(start: Time.now.utc, tick_by: 1)
@start = start.change(usec: start.usec)
@next = @start.dup
@tick_by = tick_by
end
def now
current = @next.dup
@next += tick_by
current
end
def tick(index)
start.dup + index * tick_by
end
attr_reader :start, :tick_by
end
|
-- South Africa
BEGIN;
UPDATE dim_calendar
SET hol_za = FALSE;
-- 1 January New Year's Day 1910
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 1
AND EXTRACT( MONTH FROM calendar_date ) = 1
AND EXTRACT( YEAR FROM calendar_date ) >= 1910
;
-- 21 March Human Rights Day 1990[2]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 21
AND EXTRACT( MONTH FROM calendar_date ) = 3
AND EXTRACT( YEAR FROM calendar_date ) >= 1990
;
-- The Friday before Easter Sunday Good Friday 1910
UPDATE dim_calendar
SET hol_za = TRUE
WHERE calc_western_good_fri = TRUE
AND EXTRACT( YEAR FROM calendar_date ) >= 1910
;
-- The Monday following Easter Sunday Family Day 1910
UPDATE dim_calendar
SET hol_za = TRUE
WHERE calc_western_easter_mon = TRUE
AND EXTRACT( YEAR FROM calendar_date ) >= 1910
;
-- 27 April Freedom Day 1994[2]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 27
AND EXTRACT( MONTH FROM calendar_date ) = 4
AND EXTRACT( YEAR FROM calendar_date ) >= 1994
;
-- 1 May Workers' Day 1910[3]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 1
AND EXTRACT( MONTH FROM calendar_date ) = 5
AND EXTRACT( YEAR FROM calendar_date ) >= 1910
;
-- 16 June Youth Day 1995[4]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 16
AND EXTRACT( MONTH FROM calendar_date ) = 6
AND EXTRACT( YEAR FROM calendar_date ) >= 1995
;
-- 9 August National Women's Day 1995[5]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 9
AND EXTRACT( MONTH FROM calendar_date ) = 8
AND EXTRACT( YEAR FROM calendar_date ) >= 1995
;
-- 24 September Heritage Day 1995[6]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 24
AND EXTRACT( MONTH FROM calendar_date ) = 9
AND EXTRACT( YEAR FROM calendar_date ) >= 1995
;
-- 16 December Day of Reconciliation 1995 / Dingaan's Day /Day of the Covenant /Day of the Vow/ Day of Reconciliation
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 16
AND EXTRACT( MONTH FROM calendar_date ) = 12
AND EXTRACT( YEAR FROM calendar_date ) >= 1910
;
-- 25 December Christmas Day 1910
UPDATE dim_calendar
SET hol_za = TRUE
WHERE calc_western_christmas = TRUE
AND EXTRACT( YEAR FROM calendar_date ) >= 1910
;
-- 26 December Day of Goodwill (formerly Boxing Day) 1910
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 26
AND EXTRACT( MONTH FROM calendar_date ) = 12
AND EXTRACT( YEAR FROM calendar_date ) >= 1910
;
-- The Public Holidays Act (Act No 36 of 1994)[7] states that whenever a public holiday falls on a Sunday,
-- the Monday following it will be a public holiday.
WITH cte AS (
SELECT calendar_date AS sunday_hol,
calendar_date + INTERVAL '1 DAY' as following_mon
FROM dim_calendar AS dc2
WHERE hol_za = TRUE
AND EXTRACT(YEAR FROM calendar_date) >= 1994
AND EXTRACT(DOW FROM calendar_date) = 0
)
UPDATE dim_calendar
SET hol_za = TRUE
FROM cte
WHERE dim_calendar.calendar_date = cte.following_mon
;
-- Since 1994 election days have been declared ad hoc public holidays:
-- National and provincial government elections – 2 June 1999[8]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 2
AND EXTRACT( MONTH FROM calendar_date ) = 6
AND EXTRACT( YEAR FROM calendar_date ) = 1999
;
-- National and provincial government elections – 14 April 2004[9]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 14
AND EXTRACT( MONTH FROM calendar_date ) = 4
AND EXTRACT( YEAR FROM calendar_date ) = 2004
;
-- Local government elections – 1 March 2006[10]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 1
AND EXTRACT( MONTH FROM calendar_date ) = 3
AND EXTRACT( YEAR FROM calendar_date ) = 2006
;
-- National and provincial government elections – 22 April 2009[11]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 22
AND EXTRACT( MONTH FROM calendar_date ) = 4
AND EXTRACT( YEAR FROM calendar_date ) = 2009
;
-- Local government elections – 18 May 2011[12]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 18
AND EXTRACT( MONTH FROM calendar_date ) = 5
AND EXTRACT( YEAR FROM calendar_date ) = 2011
;
-- National and provincial government elections – 7 May 2014[13]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 7
AND EXTRACT( MONTH FROM calendar_date ) = 5
AND EXTRACT( YEAR FROM calendar_date ) = 2014
;
-- Local government elections – 3 August 2016[14]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 3
AND EXTRACT( MONTH FROM calendar_date ) = 8
AND EXTRACT( YEAR FROM calendar_date ) = 2016
;
-- 31 December 1999 and 2 January 2000 were declared public holidays to accommodate the Y2K changeover,
-- and 3 January 2000 was automatically a public holiday because the previous holiday was a Sunday.[15]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 31
AND EXTRACT( MONTH FROM calendar_date ) = 12
AND EXTRACT( YEAR FROM calendar_date ) = 1999
;
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) IN (2,3)
AND EXTRACT( MONTH FROM calendar_date ) = 1
AND EXTRACT( YEAR FROM calendar_date ) = 2000
;
-- 2 May 2008 was declared a public holiday when Human Rights Day and Good Friday coincided on 21 March 2008.[16]
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 2
AND EXTRACT( MONTH FROM calendar_date ) = 5
AND EXTRACT( YEAR FROM calendar_date ) = 2008
;
-- 27 December 2011 was declared a holiday by (acting) president Kgalema Motlanthe & 2016
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 27
AND EXTRACT( MONTH FROM calendar_date ) = 12
AND EXTRACT( YEAR FROM calendar_date ) IN (2011, 2016)
;
-- Historical public holidays
-- 1 January New Year's Day 1910–present
-- The Friday before Easter Sunday Good Friday 1910–present
-- The Monday following Easter Sunday Easter Monday / Family Day 1910–1979 / 1980–present
-- 6 April Van Riebeeck's Day
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 6
AND EXTRACT( MONTH FROM calendar_date ) = 5
AND EXTRACT( YEAR FROM calendar_date ) BETWEEN 1952 AND 1974
;
-- Founder's Day
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 6
AND EXTRACT( MONTH FROM calendar_date ) = 5
AND EXTRACT( YEAR FROM calendar_date ) BETWEEN 1982 AND 1994
;
-- 1st Friday in May Workers' Day 1987–1989
WITH cte AS (
SELECT EXTRACT(YEAR FROM dc2.calendar_date) AS yr,
MIN(calendar_date) AS first_fri_may
FROM dim_calendar AS dc2
WHERE EXTRACT(MONTH FROM calendar_date) = 5
AND EXTRACT(DOW FROM calendar_date) = 5
AND EXTRACT(YEAR FROM calendar_date) BETWEEN 1987 AND 1989
GROUP BY EXTRACT(YEAR FROM dc2.calendar_date)
)
UPDATE dim_calendar
SET hol_za = TRUE
FROM cte
WHERE dim_calendar.calendar_date = cte.first_fri_may
;
-- 1 May Workers' Day 1990–present
-- 40th day after Easter Ascension Day 1910–1993
UPDATE dim_calendar
SET hol_za = TRUE
WHERE calc_western_ascension_thu = TRUE
AND EXTRACT( YEAR FROM calendar_date ) BETWEEN 1910 AND 1993
;
-- 24 May Victoria Day / Empire Day 1910–1951
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 24
AND EXTRACT( MONTH FROM calendar_date ) = 5
AND EXTRACT( YEAR FROM calendar_date ) BETWEEN 1910 AND 1951
;
-- 31 May Union Day / Republic Day
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 31
AND EXTRACT( MONTH FROM calendar_date ) = 5
AND EXTRACT( YEAR FROM calendar_date ) BETWEEN 1910 AND 1993
;
-- 2nd Monday in July Queen's Birthday 1952–1960
WITH cte AS (
SELECT EXTRACT(YEAR FROM dc2.calendar_date) AS yr,
MIN(calendar_date) AS second_mon_jul
FROM dim_calendar AS dc2
WHERE EXTRACT(MONTH FROM calendar_date) = 7
AND EXTRACT( DAY FROM calendar_date ) > 7
AND EXTRACT( DOW FROM calendar_date) = 1
AND EXTRACT( YEAR FROM calendar_date) BETWEEN 1952 AND 1960
GROUP BY EXTRACT(YEAR FROM dc2.calendar_date)
)
UPDATE dim_calendar
SET hol_za = TRUE
FROM cte
WHERE dim_calendar.calendar_date = cte.second_mon_jul
;
-- 10 July Family Day 1961–1974
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 10
AND EXTRACT( MONTH FROM calendar_date ) = 7
AND EXTRACT( YEAR FROM calendar_date ) BETWEEN 1961 AND 1974
;
-- 1st Monday in August King's Birthday 1910–1951
WITH cte AS (
SELECT EXTRACT(YEAR FROM dc2.calendar_date) AS yr,
MIN(calendar_date) AS first_mon_aug
FROM dim_calendar AS dc2
WHERE EXTRACT(MONTH FROM calendar_date) = 8
AND EXTRACT(DOW FROM calendar_date) = 1
AND EXTRACT(YEAR FROM calendar_date) BETWEEN 1910 AND 1951
GROUP BY EXTRACT(YEAR FROM dc2.calendar_date)
)
UPDATE dim_calendar
SET hol_za = TRUE
FROM cte
WHERE dim_calendar.calendar_date = cte.first_mon_aug
;
-- 1st Monday in September Settlers' Day 1952–1979
WITH cte AS (
SELECT EXTRACT(YEAR FROM dc2.calendar_date) AS yr,
MIN(calendar_date) AS first_mon_sep
FROM dim_calendar AS dc2
WHERE EXTRACT(MONTH FROM calendar_date) = 9
AND EXTRACT(DOW FROM calendar_date) = 1
AND EXTRACT(YEAR FROM calendar_date) BETWEEN 1952 AND 1979
GROUP BY EXTRACT(YEAR FROM dc2.calendar_date)
)
UPDATE dim_calendar
SET hol_za = TRUE
FROM cte
WHERE dim_calendar.calendar_date = cte.first_mon_sep
;
-- 10 October Kruger Day 1952–1993
UPDATE dim_calendar
SET hol_za = TRUE
WHERE EXTRACT( DAY FROM calendar_date ) = 10
AND EXTRACT( MONTH FROM calendar_date ) = 10
AND EXTRACT( YEAR FROM calendar_date ) BETWEEN 1952 AND 1993
;
-- 16 December Dingaan's Day /Day of the Covenant /Day of the Vow/ Day of Reconciliation
-- 25 December Christmas Day 1910–present
-- 26 December Boxing Day / Day of Goodwill
COMMIT;
|
#!/usr/bin/env bash
docker rm -f movies-service
docker rmi movies-service
docker image prune
docker volume prune
docker build -t movies-service .
|
import React from "react";
import Post from './Post';
import posts from '../data/posts.json'
class Posts extends React.Component {
//todo fetch the first 8-9 posts to get the links to the pages
//todo - auto limi the number of posts from the fetch helper rather than from here in the Component
render() {
//console.log(links);
let limit;
if(this.props.limit != undefined){
limit = Number(this.props.limit)
}
return (
<div className="Posts" >
{posts.map((post,index)=>{
if(!limit||index<limit){
return <Post description={post.description} author={post.author}/>
}
})}
</div>
);
}
}
export default Posts;
|
#![cfg(feature = "test")]
use std::sync::Arc;
use sentry::{
protocol::{Breadcrumb, Level},
test::TestTransport,
ClientOptions, Hub,
};
use sentry_tower::SentryLayer;
use tower_::{ServiceBuilder, ServiceExt};
#[test]
fn test_tower_hub() {
// Create a fake transport for new hubs
let transport = TestTransport::new();
let opts = ClientOptions {
dsn: Some("https://public@sentry.invalid/1".parse().unwrap()),
transport: Some(Arc::new(transport.clone())),
..Default::default()
};
let events = sentry::test::with_captured_events(|| {
// This breadcrumb should be in all subsequent requests
sentry::add_breadcrumb(Breadcrumb {
message: Some("Starting service...".to_owned()),
level: Level::Info,
..Default::default()
});
sentry::capture_message("Started service", Level::Info);
#[allow(clippy::redundant_closure)]
let hub = Arc::new(Hub::with(|hub| Hub::new_from_top(hub)));
hub.bind_client(Some(Arc::new(opts.into())));
let service = ServiceBuilder::new()
.layer(SentryLayer::new(hub))
.service_fn(|req: String| async move {
// This breadcrumb should not be seen in any other hub
sentry::add_breadcrumb(Breadcrumb {
message: Some(format!("Got request with arg: {}", req)),
level: Level::Info,
..Default::default()
});
sentry::capture_message("Request failed", Level::Error);
Err::<(), _>(format!("Can't greet {}, sorry.", req))
});
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
let res = rt.block_on(service.oneshot("World".to_owned()));
assert_eq!(res, Err("Can't greet World, sorry.".to_owned()));
});
assert_eq!(events.len(), 1);
let event = events.into_iter().next().unwrap();
assert_eq!(event.message, Some("Started service".into()));
assert_eq!(event.breadcrumbs.len(), 1);
assert_eq!(
event.breadcrumbs[0].message,
Some("Starting service...".into())
);
let events = transport.fetch_and_clear_events();
assert_eq!(events.len(), 1);
let event = events.into_iter().next().unwrap();
assert_eq!(event.message, Some("Request failed".into()));
assert_eq!(event.breadcrumbs.len(), 2);
assert_eq!(
event.breadcrumbs[0].message,
Some("Starting service...".into())
);
assert_eq!(
event.breadcrumbs[1].message,
Some("Got request with arg: World".into())
);
}
|
using antlr.collections.impl;
namespace antlr
{
public class TokenStreamBasicFilter : TokenStream
{
protected internal BitSet discardMask;
protected internal TokenStream input;
public TokenStreamBasicFilter(TokenStream input)
{
this.input = input;
discardMask = new BitSet();
}
public virtual void discard(int ttype)
{
discardMask.add(ttype);
}
public virtual void discard(BitSet mask)
{
discardMask = mask;
}
public virtual IToken nextToken()
{
IToken token = input.nextToken();
while (token != null && discardMask.member(token.Type))
{
token = input.nextToken();
}
return token;
}
}
}
|
#region Copyright Syncfusion Inc. 2001-2021.
// Copyright Syncfusion Inc. 2001-2021. All rights reserved.
// Use of this code is subject to the terms of our license.
// A copy of the current license can be obtained at any time by e-mailing
// licensing@syncfusion.com. Any infringement will be prosecuted under
// applicable laws.
#endregion
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Xamarin.Forms.Internals;
namespace SampleBrowser.SfListView
{
[Preserve(AllMembers = true)]
public class ToDoListRepository
{
#region Constructor
public ToDoListRepository()
{
}
#endregion
#region Methods
internal ObservableCollection<ToDoItem> GetToDoList()
{
var groceryList = new ObservableCollection<ToDoItem>();
var random = new Random();
for (int i = 0; i < toDoLists.Count(); i++)
{
var gallery = new ToDoItem()
{
Name = toDoLists[i],
CategoryName = GetCategoryList(i)
};
groceryList.Add(gallery);
}
return groceryList;
}
private string GetCategoryList(int pos)
{
string toDoCategory;
if (pos < 4)
toDoCategory = toDoCategoryLists[0];
else if (pos < 8)
toDoCategory = toDoCategoryLists[1];
else if (pos < 13)
toDoCategory = toDoCategoryLists[2];
else
toDoCategory = toDoCategoryLists[3];
return toDoCategory;
}
string[] toDoLists = new string[]
{
"Reserve party venue",
"Choose party attire",
"Compile guest list",
"Choose invitation",
"Create wedding website",
"Buy wedding ring",
"Apply marriage license",
"Hire photographer",
"Buy wedding dress",
"Refine guest list",
"Send invitations",
"Hire florist",
"Shop for decorations",
"Hire musicians",
"Arrange catering",
"Shop for groceries",
"Book hotel for guest",
"Plan honeymoon",
"Book transportation",
"Order wedding cake",
};
string[] toDoCategoryLists = new string[]
{
"This Week",
"Next Week",
"Next Month",
"Later"
};
#endregion
}
}
|
/*
* Copyright 2020 Azavea
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package geotrellis.server.ogc.ows
import java.net.URI
/** Service-level metadata; roughly corresponds to ows service identification and service providers */
case class ServiceMetadata(
identification: Identification,
provider: Provider
)
case class Identification(
title: String,
description: String,
keywords: List[String],
profile: List[URI],
fees: Option[String],
accessConstraints: List[String]
)
case class Provider(
name: String,
site: Option[String],
contact: Option[ResponsiblePartySubset]
)
/** corresponds roughly to opengis.ows.ResponsiblePartySubsetType */
case class ResponsiblePartySubset(
name: Option[String],
position: Option[String],
role: Option[String]
)
|
pub mod pattern {
use std::io::Write;
pub fn find_matches(content: &str, pattern: &str, mut writer : impl Write) -> Result<(), std::io::Error>{
for (_line_no, line) in content.lines().enumerate() {
if line.contains(pattern){
// writeln!(writer, "{} : {}", _line_no, line)?;
writeln!(writer, "{}", line)?;
}
}
Ok(())
}
#[test]
fn find_a_match() {
let stdout = std::io::stdout();
find_matches("lorem ipsum\ndolor sit amet", "lorem", stdout);
}
#[test]
fn find_a_match_use_string () {
let mut result = Vec::new();
// let mut result = String::new();
find_matches("lorem ipsum\ndolor sit amet", "lorem", &mut result);
assert_eq!(result, b"lorem ipsum\n");
let result = String::from_utf8(result);
println!("{}", result.unwrap());
}
}
|
package de.sscholz.util
import com.badlogic.gdx.Gdx
import com.badlogic.gdx.graphics.OrthographicCamera
import com.badlogic.gdx.math.Matrix4
import com.badlogic.gdx.math.Vector2
import com.badlogic.gdx.utils.viewport.FitViewport
import com.badlogic.gdx.utils.viewport.Viewport
import de.sscholz.Global
import de.sscholz.extensions.toVector2
import de.sscholz.extensions.toVector3
import ktx.math.plus
import ktx.math.vec2
object camera {
val physicsScreenWidth by lazy { screenWidth }
val physicsScreenHeight by lazy { screenHeight - hudTopTotalHeight }
val physicsScreenDyBottom by lazy { 0f }
val physicsScreenDx by lazy { 0f }
private val orthoCam = OrthographicCamera(physicsScreenWidth, physicsScreenHeight)
val heightToWidthRatio: Float
get() = orthoCam.viewportHeight / orthoCam.viewportWidth
val viewportWidth: Float get() = orthoCam.viewportWidth
val viewportHeight: Float get() = orthoCam.viewportHeight
val combinedMatrix: Matrix4 get() = orthoCam.combined
val position: Vector2 get() = vec2(orthoCam.position.x, orthoCam.position.y)
private lateinit var viewport: Viewport
fun screenToWorldCoordinates(screenXInPx: Float, screenYInPx: Float): Vector2 {
val xNormalized = (screenXInPx - physicsScreenDx) / physicsScreenWidth - 0.5f
val yNormalized = (Gdx.graphics.height - screenYInPx - 1 -
physicsScreenDyBottom) / physicsScreenHeight - 0.5f
return vec2(xNormalized * viewportWidth, yNormalized * viewportHeight) + position
}
fun currentMouseWorldCoordinates(): Vector2 {
return screenToWorldCoordinates(Gdx.input.x.toFloat(), Gdx.input.y.toFloat())
}
fun moveTo(newWorldXy: Vector2) {
orthoCam.position.set(newWorldXy.x, newWorldXy.y, 0f)
update()
}
// factor > 1.0 -> zoom out, factor < 1.0 -> zoom in
fun zoomOut(factor: Float, targetPositionInWorldCoordinates: Vector2) {
orthoCam.viewportWidth *= factor
orthoCam.viewportHeight *= factor
orthoCam.position.set(Vector2(targetPositionInWorldCoordinates).apply {
lerp(orthoCam.position.toVector2(), factor)
}.toVector3())
update()
}
// updates camera matrix and assign viewport width/height info to viewport instance
private fun update() {
orthoCam.update()
viewport.worldWidth = orthoCam.viewportWidth
viewport.worldHeight = orthoCam.viewportHeight
}
fun apply() {
update()
viewport.apply()
Global.shapeRenderer.projectionMatrix = orthoCam.combined
}
fun setNewViewportWorldWidth(newPosition: Vector2, newViewportWidthInUnits: Float) {
log("camera. set viewport width")
orthoCam.viewportHeight = newViewportWidthInUnits * 1f * heightToWidthRatio
orthoCam.viewportWidth = newViewportWidthInUnits
orthoCam.position.set(newPosition.x, newPosition.y, 0f)
update()
}
fun initCamera(viewportWidthInUnits: Float) {
orthoCam.viewportHeight = viewportWidthInUnits * 1f * physicsScreenHeight / physicsScreenWidth
orthoCam.viewportWidth = viewportWidthInUnits
viewport = FitViewport(orthoCam.viewportWidth, orthoCam.viewportHeight, orthoCam)
viewport.setScreenBounds(physicsScreenDx.toInt(), physicsScreenDyBottom.toInt(),
physicsScreenWidth.toInt(), physicsScreenHeight.toInt())
update()
}
}
|
#!/bin/sh
echo "Harden the managed servers"
# Reference:
# http://geekeasier.com/protect-ssh-server-with-installing-fail2ban-on-linuxubuntu/3774/
[ -d .git-crypt ] || (
git crypt init
git-crypt add-gpg-user ludovic.claude@chuv.ch
echo "Setup .gitattributes, see https://www.agwa.name/projects/git-crypt/"
echo "Check that your files are well protected using"
echo " git check-attr -a <path to file to encrypt>"
echo "The file should have diff: git-crypt and filter: git-crypt present"
)
DATACENTER=main
ansible-playbook --ask-become-pass -i envs/$DATACENTER/etc/ansible/ \
-e play_dir=$(pwd) \
-e datacenter=$DATACENTER \
common/playbooks/secure-system.yml
|
// rseip
//
// rseip - Ethernet/IP (CIP) in pure Rust.
// Copyright: 2021, Joylei <leingliu@gmail.com>
// License: MIT
use crate::epath::*;
use bytes::{BufMut, BytesMut};
use rseip_core::codec::{Encode, Encoder};
impl Encode for PortSegment {
#[inline]
fn encode_by_ref<A: Encoder>(
&self,
buf: &mut BytesMut,
_encoder: &mut A,
) -> Result<(), A::Error> {
const EXTENDED_LINKED_ADDRESS_SIZE: u16 = 1 << 4; // 0x10
let link_addr_len = self.link.len();
debug_assert!(link_addr_len < u8::MAX as usize);
let start_pos = buf.len();
let mut segment_byte = if self.port > 14 { 0x0F } else { self.port };
if link_addr_len > 1 {
segment_byte |= EXTENDED_LINKED_ADDRESS_SIZE;
}
buf.put_u8(segment_byte as u8);
if link_addr_len > 1 {
buf.put_u8(link_addr_len as u8);
}
if self.port > 14 {
buf.put_u16(self.port);
}
buf.put_slice(&self.link);
let end_pos = buf.len();
if (end_pos - start_pos) % 2 != 0 {
buf.put_u8(0);
}
Ok(())
}
#[inline]
fn bytes_count(&self) -> usize {
let link_addr_len = self.link.len();
let mut count = 1;
if link_addr_len > 1 {
count += 1;
}
if self.port > 14 {
count += 2;
}
count += link_addr_len;
count + count % 2
}
}
impl Segment {
#[inline]
fn encode_class<A: Encoder>(
v: u16,
buf: &mut BytesMut,
_encoder: &mut A,
) -> Result<(), A::Error> {
if v <= u8::MAX as u16 {
buf.put_u8(0x20);
buf.put_u8(v as u8);
} else {
buf.put_u8(0x21);
buf.put_u8(0);
buf.put_u16_le(v);
}
Ok(())
}
#[inline]
fn encode_instance<A: Encoder>(
v: u16,
buf: &mut BytesMut,
_encoder: &mut A,
) -> Result<(), A::Error> {
if v <= u8::MAX as u16 {
buf.put_u8(0x24);
buf.put_u8(v as u8);
} else {
buf.put_u8(0x25);
buf.put_u8(0);
buf.put_u16_le(v);
}
Ok(())
}
#[inline]
fn encode_attribute<A: Encoder>(
v: u16,
buf: &mut BytesMut,
_encoder: &mut A,
) -> Result<(), A::Error> {
if v <= u8::MAX as u16 {
buf.put_u8(0x30);
buf.put_u8(v as u8);
} else {
buf.put_u8(0x31);
buf.put_u8(0);
buf.put_u16_le(v);
}
Ok(())
}
#[inline]
fn encode_element<A: Encoder>(
elem: u32,
buf: &mut BytesMut,
_encoder: &mut A,
) -> Result<(), A::Error> {
match elem {
v if v <= (u8::MAX as u32) => {
buf.put_u8(0x28);
buf.put_u8(v as u8);
}
v if v <= (u16::MAX as u32) => {
buf.put_u8(0x29);
buf.put_u8(0);
buf.put_u16_le(v as u16);
}
v => {
buf.put_u8(0x2A);
buf.put_u8(0);
buf.put_u32_le(v);
}
}
Ok(())
}
#[inline]
fn encode_symbol<A: Encoder>(
symbol: &[u8],
buf: &mut BytesMut,
_encoder: &mut A,
) -> Result<(), A::Error> {
let char_count = symbol.len();
assert!(char_count <= u8::MAX as usize);
buf.put_u8(0x91);
buf.put_u8(char_count as u8);
buf.put_slice(symbol);
if char_count % 2 != 0 {
buf.put_u8(0);
}
Ok(())
}
}
impl Encode for Segment {
#[inline]
fn encode<A: Encoder>(self, buf: &mut BytesMut, encoder: &mut A) -> Result<(), A::Error> {
match self {
Segment::Class(v) => Self::encode_class(v, buf, encoder),
Segment::Instance(v) => Self::encode_instance(v, buf, encoder),
Segment::Attribute(v) => Self::encode_attribute(v, buf, encoder),
Segment::Element(v) => Self::encode_element(v, buf, encoder),
Segment::Port(port) => port.encode_by_ref(buf, encoder),
Segment::Symbol(symbol) => Self::encode_symbol(symbol.as_bytes(), buf, encoder),
}
}
#[inline]
fn encode_by_ref<A: Encoder>(
&self,
buf: &mut BytesMut,
encoder: &mut A,
) -> Result<(), A::Error> {
match self {
Segment::Class(v) => Self::encode_class(*v, buf, encoder),
Segment::Instance(v) => Self::encode_instance(*v, buf, encoder),
Segment::Attribute(v) => Self::encode_attribute(*v, buf, encoder),
Segment::Element(v) => Self::encode_element(*v, buf, encoder),
Segment::Port(port) => port.encode_by_ref(buf, encoder),
Segment::Symbol(symbol) => Self::encode_symbol(symbol.as_bytes(), buf, encoder),
}
}
#[inline]
fn bytes_count(&self) -> usize {
match self {
Segment::Class(v) | Segment::Instance(v) | Segment::Attribute(v) => {
if *v <= u8::MAX as u16 {
2
} else {
4
}
}
Segment::Element(elem) => match elem {
v if *v <= (u8::MAX as u32) => 2,
v if *v <= (u16::MAX as u32) => 4,
_ => 6,
},
Segment::Port(port) => port.bytes_count(),
Segment::Symbol(symbol) => {
let char_count = symbol.as_bytes().len();
2 + char_count + char_count % 2
}
}
}
}
impl Encode for EPath {
#[inline]
fn encode<A: Encoder>(self, buf: &mut BytesMut, encoder: &mut A) -> Result<(), A::Error> {
for item in self {
item.encode(buf, encoder)?;
}
Ok(())
}
#[inline]
fn encode_by_ref<A: Encoder>(
&self,
buf: &mut BytesMut,
encoder: &mut A,
) -> Result<(), A::Error> {
for item in self.iter() {
item.encode_by_ref(buf, encoder)?;
}
Ok(())
}
#[inline]
fn bytes_count(&self) -> usize {
self.iter().map(|v| v.bytes_count()).sum()
}
}
impl From<PortSegment> for EPath {
fn from(port: PortSegment) -> Self {
Self::from(vec![Segment::Port(port)])
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::epath::EPATH_CONNECTION_MANAGER;
use rseip_core::tests::EncodeExt;
#[test]
fn test_epath_symbol() {
let epath = EPath::from_symbol("TotalCount");
assert_eq!(epath.bytes_count(), 12);
let buf = epath.try_into_bytes().unwrap();
assert_eq!(
&buf[..],
&[0x91, 0x0A, 0x54, 0x6F, 0x74, 0x61, 0x6C, 0x43, 0x6F, 0x75, 0x6E, 0x74]
);
}
#[test]
fn test_epath_symbol_odd() {
let epath = EPath::from_symbol("TotalCountt");
assert_eq!(epath.bytes_count(), 14);
let buf = epath.try_into_bytes().unwrap();
assert_eq!(
&buf[..],
&[0x91, 0x0B, 0x54, 0x6F, 0x74, 0x61, 0x6C, 0x43, 0x6F, 0x75, 0x6E, 0x74, 0x74, 0x00]
);
}
#[test]
fn test_epath_unconnected_send() {
let epath = EPath::from(vec![Segment::Class(0x06), Segment::Instance(0x1)]);
assert_eq!(epath.bytes_count(), 4);
let buf = epath.try_into_bytes().unwrap();
assert_eq!(&buf[..], EPATH_CONNECTION_MANAGER);
}
#[test]
fn test_epath_router_path() {
let epath = EPath::from(vec![Segment::Port(PortSegment::default())]);
assert_eq!(epath.bytes_count(), 2);
let buf = epath.try_into_bytes().unwrap();
assert_eq!(&buf[..], &[01, 00]);
}
}
|
### ESS scaling schedule Example
The example launches ESS schedule task, which will create ECS by the schedule time.
### Get up and running
* Planning phase
terraform plan
* Apply phase
terraform apply
* Destroy
terraform destroy
|
(defproject client "0.1.0"
:description "Sharingio client: Web frontend for sharingio pair box creation"
:url "https://sharing.io"
:min-lein-version "2.0.0"
:dependencies [[org.clojure/clojure "1.10.0"]
[org.clojure/test.check "1.1.0"]
[com.gfredericks/test.chuck "0.2.10"]
[cheshire "5.10.0"]
[io.forward/yaml "1.0.10"]
[hiccup "1.0.5"]
[compojure "1.6.1"]
[clj-http "3.10.3"]
[http-kit "2.5.0"]
[org.clojure/tools.logging "1.1.0"]
[ring/ring-defaults "0.3.2"]
[ring/ring-core "1.8.2"]
[ring/ring-jetty-adapter "1.8.2"]
[clojure.java-time "0.3.2"]
[environ "1.2.0"]]
:plugins [[lein-ring "0.12.5"]
[environ/environ.lein "0.2.1"]]
:ring {:handler client.web/app
:port 5000
:nrepl {:start? true}
:autoload? true}
:target-path "target/%s"
:profiles
{:dev {:dependencies [[javax.servlet/servlet-api "2.5"]
[ring/ring-mock "0.3.2"]]}}
:uberjar {:aot :all
:main client.web})
|
package com.github.xmlparser.util;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import java.io.File;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class CommonUtilTest {
File csv;
@BeforeAll
public void setup() {
csv = new File(new File(TestConstants.LOCATION).getAbsolutePath() + TestConstants.CHAR_FORWARD_SLASH
+ TestConstants.XML_SAMPLE);
}
@Test
void getFileNamePrefix() {
System.out.println(CommonUtil.getFileNamePrefix(csv));
}
}
|
#!/usr/bin/env bash
go build -o packages.exe main.go
./packages.exe
printf "======"
printf "\nworld package documentation (look at run.sh)\n"
printf "======\n"
go doc ./world
go doc ./world.PrintStartRoom
printf "Open => http://localhost:6060"
#godoc -http=:6060
#ss -lptn 'sport = :6060'
|
#!/bin/sh
DOTFILES=~/.dotfiles
# Clone dotfiles repo
if [ ! -d "$DOTFILES" ]; then
env git clone https://github.com/josemarluedke/dotfiles.git $DOTFILES || {
echo "Error: git clone of dotfiles repo failed"
exit 1
}
fi
# Add global gitconfig
git config --global include.path $DOTFILES/config/global.gitconfig
# Install Brew
echo "Checking if homebrew is installed."
type brew > /dev/null
rc=$?; if [[ $rc != 0 ]]; then
echo "Installing homebrew."
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
echo "Verifying homebrew installation."
type brew
rc=$?; if [[ $rc != 0 ]]; then
echo "Cannot find homebrew installation. Check the error log."
exit $rc
else
echo "Homebrew installation is verified."
fi
else
echo "Homebrew is already installed."
fi
# Install Volta
echo "Checking if volta is installed."
type volta > /dev/null
rc=$?; if [[ $rc != 0 ]]; then
echo "Installing Volta."
/usr/local/bin/brew install volta
echo "Verifying volta installation."
type volta
rc=$?; if [[ $rc != 0 ]]; then
echo "Cannot find volta installation."
exit $rc
else
echo "Volta installation is verified."
fi
else
echo "Volta is already installed."
fi
# Install Node & Yarn using Volta
type volta > /dev/null
rc=$?; if [[ $rc == 0 ]]; then
echo "Installing Node using Volta."
/usr/local/bin/volta install node@latest
echo "Installing Yarn using Volta."
/usr/local/bin/volta install yarn
fi
# Brew bundle
echo "Do you want to run brew bundle from dotfiles? [Y/N]"
read choice
if [[ $choice = 'Y' ]] || [[ $choice = 'y' ]]; then
brew tap Homebrew/bundle
brew bundle --file=$DOTFILES/Brewfile
else
echo "Brew bundle skipped."
fi
# Install oh my zsh
if [ ! -n "$ZSH" ]; then
hijack_env() {
if [[ "$1" != "zsh" ]]; then
env "$@"
fi
}
alias env="hijack_env"
curl https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh -so - | sh
unalias env
fi
# Install Spaceship zsh theme
ZSH_CUSTOM=~/.oh-my-zsh/custom
git clone https://github.com/denysdovhan/spaceship-prompt.git "$ZSH_CUSTOM/themes/spaceship-prompt"
ln -s "$ZSH_CUSTOM/themes/spaceship-prompt/spaceship.zsh-theme" "$ZSH_CUSTOM/themes/spaceship.zsh-theme" > /dev/null
# Install powerline fonts
echo "Do you want to install Powerline fonts? [Y/N]"
read choice
if [[ $choice = 'Y' ]] || [[ $choice = 'y' ]]; then
TMP_POWERLINE_FONTS=tmp-install-fonts
env git clone --depth=1 https://github.com/powerline/fonts.git $TMP_POWERLINE_FONTS
cd $TMP_POWERLINE_FONTS
sh ./install.sh
cd ..
rm -rf $TMP_POWERLINE_FONTS
echo "Powerline fonts installed."
else
echo "Powerline fonts skipped."
fi
# Symlink files
echo "Symlinking dotfiles."
for i in `find ~/.dotfiles/dotfiles/* -maxdepth 1`; do
original_file=`basename $i`
if [[ "$original_file" != "dotfiles" ]]; then
ln -snfv $i ~/.${original_file} > /dev/null
fi
done
ln -snfv ~/.dotfiles/custom.zsh $ZSH_CUSTOM/custom.zsh > /dev/null
mkdir -p ~/.config/oni
ln -s ~/.dotfiles/config/oni.tsx ~/.config/oni/config.tsx
env zsh
|
/**
* Copyright Soramitsu Co., Ltd. All Rights Reserved.
* SPDX-License-Identifier: GPL-3.0
*/
package jp.co.soramitsu.feature_main_impl.presentation.personaldataedit
import androidx.lifecycle.LiveData
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.viewModelScope
import jp.co.soramitsu.common.interfaces.WithProgress
import jp.co.soramitsu.common.presentation.viewmodel.BaseViewModel
import jp.co.soramitsu.feature_main_api.launcher.MainRouter
import jp.co.soramitsu.feature_main_impl.domain.MainInteractor
import kotlinx.coroutines.launch
class PersonalDataEditViewModel(
private val interactor: MainInteractor,
private val router: MainRouter,
private val progress: WithProgress,
) : BaseViewModel(), WithProgress by progress {
private val _accountNameLiveData = MutableLiveData<String>()
val accountNameLiveData: LiveData<String> = _accountNameLiveData
private val _nextButtonEnableLiveData = MutableLiveData<Boolean>()
val nextButtonEnableLiveData: LiveData<Boolean> = _nextButtonEnableLiveData
init {
viewModelScope.launch {
try {
_accountNameLiveData.value = interactor.getAccountName()
} catch (t: Throwable) {
onError(t)
}
}
}
fun backPressed() {
router.popBackStack()
}
fun saveData(accountName: String) {
viewModelScope.launch {
showProgress()
try {
interactor.saveAccountName(accountName)
hideProgress()
router.popBackStack()
} catch (t: Throwable) {
onError(t)
hideProgress()
}
}
}
fun accountNameChanged(accountName: String) {
_nextButtonEnableLiveData.value = accountName.isNotEmpty()
}
}
|
#[sht] command_prompt = py>
#[sht] command_shell = python -c
py> print('hello')
hello
|
2020年07月28日20时数据
Status: 200
1.新冠确诊疑似患者医保支付12亿
微博热度:2947954
2.陈学冬否认参加中国新说唱
微博热度:1681168
3.三十而已细节
微博热度:1250865
4.中印部队已在大多数地点实现脱离接触
微博热度:965050
5.药水哥参加中国新说唱
微博热度:801412
6.明星作家的治愈书单
微博热度:730356
7.孙红雷夸张艺兴是骄傲
微博热度:730356
8.王漫妮辞职
微博热度:639879
9.最新中国百强县
微博热度:570567
10.20年后打老师男子想当面给老师道歉
微博热度:565311
11.周扬青回应与罗志祥好友合影
微博热度:551372
12.北京公布一批终生禁驾人员名单
微博热度:537207
13.外交部称南海不是美国夏威夷
微博热度:480055
14.马来西亚前总理纳吉布被判12年监禁
微博热度:457322
15.韩国植物园立安倍下跪谢罪雕像
微博热度:404151
16.特斯拉上海工厂大规模招聘
微博热度:403633
17.张朝阳伊能静合跳无价之姐
微博热度:401408
18.微博关闭35个百万粉丝违规账号
微博热度:399359
19.辽宁无人岛最新价格
微博热度:397857
20.CNN记者成都直播时的背景音乐
微博热度:396992
21.谢霆锋回归中国好声音
微博热度:394504
22.公交司机低头捡东西碾死行人
微博热度:390289
23.亲爱的爸妈开机
微博热度:355092
24.为了恋爱学会了PS
微博热度:313024
25.外交部称美国是退群世界冠军
微博热度:277875
26.金星直拍版无价之姐
微博热度:256902
27.无价之姐中国风爵士扇子舞
微博热度:243668
28.字节跳动AI副总裁离职
微博热度:216163
29.大连6例本土确诊病例行动轨迹
微博热度:197961
30.天问一号探测器传回地月合影
微博热度:178593
31.蔡徐坤的站姐
微博热度:177401
32.杨洋胸肌
微博热度:176634
33.日本议员联盟建议禁用中国App
微博热度:174250
34.陕西一幼儿园校车与货车相撞
微博热度:173365
35.草莓音乐节
微博热度:158467
36.EDG处罚Aodi
微博热度:158346
37.黄金
微博热度:156885
38.八一厂纪录片版国庆大阅兵预告
微博热度:154808
39.美国波特兰抗议已持续2个月
微博热度:153437
40.上半年GDP十强城市
微博热度:143742
41.张靓颖加盟中国新说唱
微博热度:134379
42.南京玄武湖连续出现2株并蒂莲
微博热度:130368
43.中国驻印度使馆回应APP被限制
微博热度:121804
44.北京拒绝燃气入户安检可停气
微博热度:120365
45.张子枫我的姐姐开机
微博热度:119875
46.香港暂停与加澳英3国移交逃犯协定
微博热度:113451
47.香港新增本地病例近半为聚集性感染
微博热度:110590
48.上海暴雨
微博热度:100541
49.香港连续一周单日新增确诊破百
微博热度:87830
50.大众已向美国车主赔偿98亿美元
微博热度:87157
|
import 'package:flutter/material.dart';
class Constants {
static const kTitleStyle = TextStyle(
color: Color(0xFF212B46),
fontSize: 19.0,
height: 1.3,
fontWeight: FontWeight.w700);
static const kSubtitleStyle = TextStyle(
color: Colors.black,
fontSize: 16.0,
height: 1.0,
fontWeight: FontWeight.w600);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.