repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
owenzhou/ginrbac
|
config/config.go
|
<filename>config/config.go
package config
import (
"github.com/spf13/viper"
"github.com/owenzhou/ginrbac/contracts"
"os"
)
func newConfig(app contracts.IApplication) *viper.Viper {
configPath := "config.yaml"
if _, err := os.Stat(configPath); err != nil{
return nil
}
viper := app.Make("viper").(*viper.Viper)
viper.SetConfigFile(configPath)
viper.SetConfigType("yaml")
return viper
}
|
FlippingBinary/wqxlib
|
wqxlib-python/wqxlib/wqx_v3_0/ActivityDescription.py
|
from ..common import WQXException
from .MeasureCompact import MeasureCompact
from .SimpleContent import (
ActivityConductingOrganizationText,
ActivityEndDate,
ActivityIdentifier,
ActivityIdentifierUserSupplied,
ActivityMediaName,
ActivityMediaSubdivisionName,
ActivityRelativeDepthName,
ActivityStartDate,
ActivityTypeCode,
CommentText,
DepthAltitudeReferencePointText,
MonitoringLocationIdentifier,
ProjectIdentifier,
SamplingComponentName
)
from .WQXTime import WQXTime
from typing import List, Union
from yattag import Doc
class ActivityDescription:
"""Basic identification information for an activity conducted within a project."""
__activityIdentifier: ActivityIdentifier
__activityIdentifierUserSupplied: ActivityIdentifierUserSupplied
__activityTypeCode: ActivityTypeCode
__activityMediaName: ActivityMediaName
__activityMediaSubdivisionName: ActivityMediaSubdivisionName
__activityStartDate: ActivityStartDate
__activityStartTime: WQXTime
__activityEndDate: ActivityEndDate
__activityEndTime: WQXTime
__activityRelativeDepthName: ActivityRelativeDepthName
__activityDepthHeightMeasure: MeasureCompact
__activityTopDepthHeightMeasure: MeasureCompact
__activityBottomDepthHeightMeasure: MeasureCompact
__activityDepthAltitudeReferencePointText: DepthAltitudeReferencePointText
__projectIdentifier: ProjectIdentifier
__activityConductingOrganizationText: List[ActivityConductingOrganizationText]
__monitoringLocationIdentifier: MonitoringLocationIdentifier
__samplingComponentName: SamplingComponentName
__activityCommentText: CommentText
def __init__(self, o=None, *,
activityIdentifier:ActivityIdentifier = None,
activityIdentifierUserSupplied:ActivityIdentifierUserSupplied = None,
activityTypeCode:ActivityTypeCode = None,
activityMediaName:ActivityMediaName = None,
activityMediaSubdivisionName:ActivityMediaSubdivisionName = None,
activityStartDate:ActivityStartDate = None,
activityStartTime:WQXTime = None,
activityEndDate:ActivityEndDate = None,
activityEndTime:WQXTime = None,
activityRelativeDepthName:ActivityRelativeDepthName = None,
activityDepthHeightMeasure:MeasureCompact = None,
activityTopDepthHeightMeasure:MeasureCompact = None,
activityBottomDepthHeightMeasure:MeasureCompact = None,
activityDepthAltitudeReferencePointText:DepthAltitudeReferencePointText = None,
projectIdentifier:ProjectIdentifier = None,
activityConductingOrganizationText:List[ActivityConductingOrganizationText] = None,
monitoringLocationIdentifier:MonitoringLocationIdentifier = None,
samplingComponentName:SamplingComponentName = None,
activityCommentText:CommentText = None
):
if isinstance(o, ActivityDescription):
# Assign attributes from object without typechecking
self.__activityIdentifier = o.activityIdentifier
self.__activityIdentifierUserSupplied = o.activityIdentifierUserSupplied
self.__activityTypeCode = o.activityTypeCode
self.__activityMediaName = o.activityMediaName
self.__activityMediaSubdivisionName = o.activityMediaSubdivisionName
self.__activityStartDate = o.activityStartDate
self.__activityStartTime = o.activityStartTime
self.__activityEndDate = o.activityEndDate
self.__activityEndTime = o.activityEndTime
self.__activityRelativeDepthName = o.activityRelativeDepthName
self.__activityDepthHeightMeasure = o.activityDepthHeightMeasure
self.__activityTopDepthHeightMeasure = o.activityTopDepthHeightMeasure
self.__activityBottomDepthHeightMeasure = o.activityBottomDepthHeightMeasure
self.__activityDepthAltitudeReferencePointText = o.activityDepthAltitudeReferencePointText
self.__projectIdentifier = o.projectIdentifier
self.__activityConductingOrganizationText = o.activityConductingOrganizationText
self.__monitoringLocationIdentifier = o.monitoringLocationIdentifier
self.__samplingComponentName = o.samplingComponentName
self.__activityCommentText = o.activityCommentText
elif isinstance(o, dict):
# Assign attributes from other ActivityDescription with typechecking
self.activityIdentifier = o.get('activityIdentifier', default = None)
self.activityIdentifierUserSupplied = o.get('activityIdentifierUserSupplied', default = None)
self.activityTypeCode = o.get('activityTypeCode', default = None)
self.activityMediaName = o.get('activityMediaName', default = None)
self.activityMediaSubdivisionName = o.get('activityMediaSubdivisionName', default = None)
self.activityStartDate = o.get('activityStartDate', default = None)
self.activityStartTime = o.get('activityStartTime', default = None)
self.activityEndDate = o.get('activityEndDate', default = None)
self.activityEndTime = o.get('activityEndTime', default = None)
self.activityRelativeDepthName = o.get('activityRelativeDepthName', default = None)
self.activityDepthHeightMeasure = o.get('activityDepthHeightMeasure', default = None)
self.activityTopDepthHeightMeasure = o.get('activityTopDepthHeightMeasure', default = None)
self.activityBottomDepthHeightMeasure = o.get('activityBottomDepthHeightMeasure', default = None)
self.activityDepthAltitudeReferencePointText = o.get('activityDepthAltitudeReferencePointText', default = None)
self.projectIdentifier = o.get('projectIdentifier', default = None)
self.activityConductingOrganizationText = o.get('activityConductingOrganizationText', default = [])
self.monitoringLocationIdentifier = o.get('monitoringLocationIdentifier', default = None)
self.samplingComponentName = o.get('samplingComponentName', default = None)
self.activityCommentText = o.get('activityCommentText', default = None)
else:
# Assign attributes from named keywords with typechecking
self.activityIdentifier = activityIdentifier
self.activityIdentifierUserSupplied = activityIdentifierUserSupplied
self.activityTypeCode = activityTypeCode
self.activityMediaName = activityMediaName
self.activityMediaSubdivisionName = activityMediaSubdivisionName
self.activityStartDate = activityStartDate
self.activityStartTime = activityStartTime
self.activityEndDate = activityEndDate
self.activityEndTime = activityEndTime
self.activityRelativeDepthName = activityRelativeDepthName
self.activityDepthHeightMeasure = activityDepthHeightMeasure
self.activityTopDepthHeightMeasure = activityTopDepthHeightMeasure
self.activityBottomDepthHeightMeasure = activityBottomDepthHeightMeasure
self.activityDepthAltitudeReferencePointText = activityDepthAltitudeReferencePointText
self.projectIdentifier = projectIdentifier
self.activityConductingOrganizationText = activityConductingOrganizationText
self.monitoringLocationIdentifier = monitoringLocationIdentifier
self.samplingComponentName = samplingComponentName
self.activityCommentText = activityCommentText
@property
def activityIdentifier(self) -> ActivityIdentifier:
return self.__activityIdentifier
@activityIdentifier.setter
def activityIdentifier(self, val:ActivityIdentifier) -> None:
self.__activityIdentifier = None if val is None else ActivityIdentifier(val)
@property
def activityIdentifierUserSupplied(self) -> ActivityIdentifierUserSupplied:
return self.__activityIdentifierUserSupplied
@activityIdentifierUserSupplied.setter
def activityIdentifierUserSupplied(self, val:ActivityIdentifierUserSupplied) -> None:
self.__activityIdentifierUserSupplied = None if val is None else ActivityIdentifierUserSupplied(val)
@property
def activityTypeCode(self) -> ActivityTypeCode:
return self.__activityTypeCode
@activityTypeCode.setter
def activityTypeCode(self, val:ActivityTypeCode) -> None:
self.__activityTypeCode = None if val is None else ActivityTypeCode(val)
@property
def activityMediaName(self) -> ActivityMediaName:
return self.__activityMediaName
@activityMediaName.setter
def activityMediaName(self, val:ActivityMediaName) -> None:
self.__activityMediaName = None if val is None else ActivityMediaName(val)
@property
def activityMediaSubdivisionName(self) -> ActivityMediaSubdivisionName:
return self.__activityMediaSubdivisionName
@activityMediaSubdivisionName.setter
def activityMediaSubdivisionName(self, val:ActivityMediaSubdivisionName) -> None:
self.__activityMediaSubdivisionName = None if val is None else ActivityMediaSubdivisionName(val)
@property
def activityStartDate(self) -> ActivityStartDate:
return self.__activityStartDate
@activityStartDate.setter
def activityStartDate(self, val:ActivityStartDate) -> None:
self.__activityStartDate = None if val is None else ActivityStartDate(year=val.year,month=val.month,day=val.day)
@property
def activityStartTime(self) -> WQXTime:
"""The measure of clock time when the field activity began."""
return self.__activityStartTime
@activityStartTime.setter
def activityStartTime(self, val:WQXTime) -> None:
"""The measure of clock time when the field activity began."""
self.__activityStartTime = None if val is None else WQXTime(val)
@property
def activityEndDate(self) -> ActivityEndDate:
return self.__activityEndDate
@activityEndDate.setter
def activityEndDate(self, val:ActivityEndDate) -> None:
self.__activityEndDate = None if val is None else ActivityEndDate(year=val.year,month=val.month,day=val.day)
@property
def activityEndTime(self) -> WQXTime:
"""The measure of clock time when the field activity ended."""
return self.__activityEndTime
@activityEndTime.setter
def activityEndTime(self, val:WQXTime) -> None:
"""The measure of clock time when the field activity ended."""
self.__activityEndTime = None if val is None else WQXTime(val)
@property
def activityRelativeDepthName(self) -> ActivityRelativeDepthName:
return self.__activityRelativeDepthName
@activityRelativeDepthName.setter
def activityRelativeDepthName(self, val:ActivityRelativeDepthName) -> None:
self.__activityRelativeDepthName = None if val is None else ActivityRelativeDepthName(val)
@property
def activityDepthHeightMeasure(self) -> MeasureCompact:
"""A measurement of the vertical location (measured from a reference point) at which an activity occurred."""
return self.__activityDepthHeightMeasure
@activityDepthHeightMeasure.setter
def activityDepthHeightMeasure(self, val:MeasureCompact) -> None:
"""A measurement of the vertical location (measured from a reference point) at which an activity occurred."""
self.__activityDepthHeightMeasure = None if val is None else MeasureCompact(val)
@property
def activityTopDepthHeightMeasure(self) -> MeasureCompact:
"""A measurement of the upper vertical location of a vertical location range (measured from a reference point) at which an activity occurred."""
return self.__activityTopDepthHeightMeasure
@activityTopDepthHeightMeasure.setter
def activityTopDepthHeightMeasure(self, val:MeasureCompact) -> None:
"""A measurement of the upper vertical location of a vertical location range (measured from a reference point) at which an activity occurred."""
self.__activityTopDepthHeightMeasure = None if val is None else MeasureCompact(val)
@property
def activityBottomDepthHeightMeasure(self) -> MeasureCompact:
"""A measurement of the lower vertical location of a vertical location range (measured from a reference point) at which an activity occurred."""
return self.__activityBottomDepthHeightMeasure
@activityBottomDepthHeightMeasure.setter
def activityBottomDepthHeightMeasure(self, val:MeasureCompact) -> None:
"""A measurement of the lower vertical location of a vertical location range (measured from a reference point) at which an activity occurred."""
self.__activityBottomDepthHeightMeasure = None if val is None else MeasureCompact(val)
@property
def activityDepthAltitudeReferencePointText(self) -> DepthAltitudeReferencePointText:
"""The reference used to indicate the datum or reference used to establish the depth/altitude of an activity."""
return self.__activityDepthAltitudeReferencePointText
@activityDepthAltitudeReferencePointText.setter
def activityDepthAltitudeReferencePointText(self, val:DepthAltitudeReferencePointText) -> None:
"""The reference used to indicate the datum or reference used to establish the depth/altitude of an activity."""
self.__activityDepthAltitudeReferencePointText = None if val is None else DepthAltitudeReferencePointText(val)
@property
def projectIdentifier(self) -> ProjectIdentifier:
return self.__projectIdentifier
@projectIdentifier.setter
def projectIdentifier(self, val:ProjectIdentifier) -> None:
self.__projectIdentifier = None if val is None else ProjectIdentifier(val)
@property
def activityConductingOrganizationText(self) -> List[ActivityConductingOrganizationText]:
return self.__activityConductingOrganizationText
@activityConductingOrganizationText.setter
def activityConductingOrganizationText(self, val:Union[ActivityConductingOrganizationText,List[ActivityConductingOrganizationText]]) -> None:
if val is None:
self.__activityConductingOrganizationText = []
elif isinstance(val, list):
r:List[ActivityConductingOrganizationText] = []
for x in val:
r.append(ActivityConductingOrganizationText(x))
self.__activityConductingOrganizationText = r
else:
self.__activityConductingOrganizationText = [ActivityConductingOrganizationText(val)]
@property
def monitoringLocationIdentifier(self) -> MonitoringLocationIdentifier:
return self.__monitoringLocationIdentifier
@monitoringLocationIdentifier.setter
def monitoringLocationIdentifier(self, val:MonitoringLocationIdentifier) -> None:
self.__monitoringLocationIdentifier = None if val is None else MonitoringLocationIdentifier(val)
@property
def samplingComponentName(self) -> SamplingComponentName:
return self.__samplingComponentName
@samplingComponentName.setter
def samplingComponentName(self, val:SamplingComponentName) -> None:
self.__samplingComponentName = None if val is None else SamplingComponentName(val)
@property
def activityCommentText(self) -> CommentText:
"""General comments concerning the activity."""
return self.__activityCommentText
@activityCommentText.setter
def activityCommentText(self, val:CommentText) -> None:
"""General comments concerning the activity."""
self.__activityCommentText = None if val is None else CommentText(val)
def generateXML(self, name:str = 'ActivityDescription') -> str:
doc, tag, text, line = Doc().ttl()
with tag(name):
if self.__activityIdentifier is None:
raise WQXException("Attribute 'activityIdentifier' is required.")
line('ActivityIdentifier', self.__activityIdentifier)
if self.__activityIdentifierUserSupplied is not None:
line('ActivityIdentifierUserSupplied', self.__activityIdentifierUserSupplied)
if self.__activityTypeCode is None:
raise WQXException("Attribute 'activityTypeCode' is required.")
line('ActivityTypeCode', self.__activityTypeCode)
if self.__activityMediaName is None:
raise WQXException("Attribute 'activityMediaName' is required.")
line('ActivityMediaName', self.__activityMediaName)
if self.__activityMediaSubdivisionName is not None:
line('ActivityMediaSubdivisionName', self.__activityMediaSubdivisionName)
if self.__activityStartDate is None:
raise WQXException("Attribute 'activityStartDate' is required.")
line('ActivityStartDate', str(self.__activityStartDate))
if self.__activityStartTime is not None:
doc.asis(self.__activityStartTime.generateXML('ActivityStartTime'))
if self.__activityEndDate is not None:
line('ActivityEndDate', str(self.__activityEndDate))
if self.__activityEndTime is not None:
doc.asis(self.__activityEndTime.generateXML('ActivityEndTime'))
if self.__activityRelativeDepthName is not None:
line('ActivityRelativeDepthName', self.__activityRelativeDepthName)
if self.__activityDepthHeightMeasure is not None:
doc.asis(self.__activityDepthHeightMeasure.generateXML('ActivityDepthHeightMeasure'))
if self.__activityTopDepthHeightMeasure is not None:
doc.asis(self.__activityTopDepthHeightMeasure.generateXML('ActivityTopDepthHeightMeasure'))
if self.__activityBottomDepthHeightMeasure is not None:
doc.asis(self.__activityBottomDepthHeightMeasure.generateXML('ActivityBottomDepthHeightMeasure'))
if self.__activityDepthAltitudeReferencePointText is not None:
line('ActivityDepthAltitudeReferencePointText', self.__activityDepthAltitudeReferencePointText)
if self.__projectIdentifier is None or len(self.__projectIdentifier) < 1:
raise WQXException("Attribute 'projectIdentifier' must be a list of 1 or more ProjectIdentifier objects.")
line('ProjectIdentifier', self.__projectIdentifier)
for x in self.__activityConductingOrganizationText:
line('ActivityConductingOrganizationText', x)
if self.__monitoringLocationIdentifier is not None:
line('MonitoringLocationIdentifier', self.__monitoringLocationIdentifier)
if self.__samplingComponentName is not None:
line('SamplingComponentName', self.__samplingComponentName)
if self.__activityCommentText is not None:
line('ActivityCommentText', self.__activityCommentText)
return doc.getvalue()
|
dharshatharan/web-book
|
api/app/models/user.rb
|
<gh_stars>0
class User < ApplicationRecord
# IdentityCache
include IdentityCache
# Bcrypt Secure Password
has_secure_password
# Validations
validates :email, presence: true, uniqueness: true
validates :username, presence: true, uniqueness: true
validates :role, presence: true, inclusion: { in: ["public", "admin"] }
# Relations
belongs_to :personal_website, class_name: "Website", dependent: :destroy, optional: true
has_many :user_follows_website, dependent: :delete_all
has_many :followed_websites, through: :user_follows_website, source: :website
has_many :user_follows_tag, dependent: :delete_all
has_many :followed_tags, through: :user_follows_tag, source: :tag
# Active Storage
has_one_attached :avatar
# Methods
def display_name
super || username
end
def avatar_url
if avatar.attached?
Rails.cache.fetch([cache_key, __method__]) do
Rails.application.routes.url_helpers
.rails_blob_url(avatar, only_path: true)
end
end
end
end
|
best08618/asylo
|
gcc-gcc-7_3_0-release/gcc/testsuite/gcc.dg/always_inline3.c
|
/* { dg-do compile } */
/* { dg-options "-O2 -fgnu89-inline" } */
int do_something_evil (void);
inline __attribute__ ((always_inline)) void
q2(void) /* { dg-error "recursive inlining" } */
{
if (do_something_evil ())
return;
q2(); /* { dg-message "called from here" } */
q2(); /* With -O2 we don't warn here, it is eliminated by tail recursion. */
}
|
mwiesenberger/feltor-dev.github.io
|
doc/exblas/html/search/all_0.js
|
var searchData=
[
['accumulate_0',['Accumulate',['../namespacedg_1_1exblas_1_1gpu.html#ae8560d81d2a195d7fae6fee57db801f0',1,'dg::exblas::gpu::Accumulate()'],['../namespacedg_1_1exblas_1_1cpu.html#aeb5ba33e37102969daf45883c7582d0d',1,'dg::exblas::cpu::Accumulate()']]],
['accumulate_2ecuh_1',['accumulate.cuh',['../accumulate_8cuh.html',1,'']]],
['accumulate_2eh_2',['accumulate.h',['../accumulate_8h.html',1,'']]]
];
|
congscallion/ds
|
src/test/java/io/ds/congscallion/iopackages/RandomAccessFileDemo.java
|
package io.ds.congscallion.iopackages;
import org.junit.Test;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributeView;
import java.nio.file.attribute.BasicFileAttributes;
public class RandomAccessFileDemo {
@Test
public void test1() throws IOException {
RandomAccessFile raf = new RandomAccessFile("b.txt", "rw");
Path path = Paths.get("b.txt");
raf.setLength(10);
System.out.println("=================raf.setLength(10);==================");
System.out.println("after raf.setLength(10), filePointer: " + raf.getFilePointer());
print(path);
System.out.println("before seek(1000), filePointer: " + raf.getFilePointer());
print(path);
System.out.println("=================raf.seek(1000);==================");
raf.seek(1000);
System.out.println("after seek(1000), filePointer: " + raf.getFilePointer());
print(path);
raf.write("abc".getBytes());
System.out.println("after write(\"abc\".getBytes()), filePointer: " + raf.getFilePointer());
System.out.println("length: " + raf.length());
print(path);
System.out.println("=================raf.setLength(1024 * 10);==================");
raf.setLength(1024 * 10);
System.out.println("after setLength(1024 * 10), filePointer: " + raf.getFilePointer());
System.out.println("length: " + raf.length());
print(path);
Files.delete(Paths.get("b.txt"));
}
private void print(Path path) throws IOException {
BasicFileAttributeView fileAttributeView = Files.getFileAttributeView(path, BasicFileAttributeView.class);
BasicFileAttributes basicFileAttributes = fileAttributeView.readAttributes();
System.out.println("basic:size = " + basicFileAttributes.size());
long size = Files.size(path);
System.out.println("Files:size = " + size);
}
}
|
yndu13/credentials-java
|
src/test/java/utils/ParameterHelperTest.java
|
package utils;
import com.aliyun.credentials.utils.ParameterHelper;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.text.ParseException;
import java.util.Date;
public class ParameterHelperTest {
@Before
public void init() {
new ParameterHelper();
}
@Test
public void getUniqueNonce() {
String nonce = ParameterHelper.getUniqueNonce();
Assert.assertNotEquals(nonce, ParameterHelper.getUniqueNonce());
}
@Test
public void getISO8601Time() {
Date d2 = ParameterHelper.getUTCDate("2018-12-18T16:39:38Z");
Assert.assertEquals("2018-12-18T16:39:38Z", ParameterHelper.getISO8601Time(d2));
}
}
|
atul-vyshnav/2021_IBM_Code_Challenge_StockIT
|
src/StockIT-v1-release_source_from_JADX/sources/com/google/android/gms/clearcut/C1525R.java
|
package com.google.android.gms.clearcut;
/* renamed from: com.google.android.gms.clearcut.R */
public final class C1525R {
private C1525R() {
}
}
|
18406611280/oil
|
grainoil-service-api/grainoil-system-api/src/main/java/com/grainoil/system/domain/vo/point_management/StorehouseGet.java
|
package com.grainoil.system.domain.vo.point_management;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* 修改仓房时的返回页面显示信息
*/
@Data
@ApiModel(value = "StorehouseGet", description = "根据id获取仓房信息")
public class StorehouseGet implements Serializable {
public static final long serialVersionUID = 1L;
@ApiModelProperty(name = "storehouseId", value = "仓房id")
private Long storehouseId;
@ApiModelProperty(name = "storehouseCode", value = "仓房编码")
private String storehouseCode;
@ApiModelProperty(name = "storehouseType", value = "仓房类型(数字字典)")
private String storehouseType;
@ApiModelProperty(name = "storehouseCapacity", value = "仓房设计容量")
private Double storehouseCapacity;
@ApiModelProperty(name = "storehouseArea", value = "仓房面积")
private Double storehouseArea;
@ApiModelProperty(name = "storehouseYear", value = "建设年份")
private Long storehouseYear;
@ApiModelProperty(name = "organizeVoList", value = "使用企业集合")
List<OrganizeVo> organizeVoList;
}
|
nickgros/SynapseWebClient
|
src/main/java/org/sagebionetworks/web/client/widget/evaluation/EvaluationFinderViewImpl.java
|
package org.sagebionetworks.web.client.widget.evaluation;
import org.gwtbootstrap3.client.ui.Button;
import org.gwtbootstrap3.client.ui.Modal;
import org.gwtbootstrap3.client.ui.html.Div;
import com.google.gwt.core.shared.GWT;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.IsWidget;
import com.google.gwt.user.client.ui.Widget;
public class EvaluationFinderViewImpl implements EvaluationFinderView {
public interface Binder extends UiBinder<Widget, EvaluationFinderViewImpl> {
}
private static Binder uiBinder = GWT.create(Binder.class);
@UiField
Modal modal;
@UiField
Div synAlertContainer;
@UiField
Div paginationWidgetContainer;
@UiField
Button selectButton;
@UiField
Button cancelButton;
@UiField
Div evaluationListContainer;
private Presenter presenter;
Widget widget;
public EvaluationFinderViewImpl() {
widget = uiBinder.createAndBindUi(this);
cancelButton.addClickHandler(event -> {
modal.hide();
});
selectButton.addClickHandler(event -> {
presenter.onOk();
});
}
@Override
public void setPresenter(Presenter presenter) {
this.presenter = presenter;
}
@Override
public void setEvaluationList(IsWidget w) {
evaluationListContainer.clear();
evaluationListContainer.add(w);
}
@Override
public void setPaginationWidget(IsWidget w) {
paginationWidgetContainer.clear();
paginationWidgetContainer.add(w);
}
@Override
public Widget asWidget() {
return widget;
}
@Override
public void show() {
modal.show();
}
@Override
public void hide() {
modal.hide();
}
@Override
public void setSynAlert(IsWidget w) {
synAlertContainer.clear();
synAlertContainer.add(w);
}
}
|
mahaplatform/mahaplatform.com
|
src/apps/datasets/admin/views/records/show/details.js
|
<reponame>mahaplatform/mahaplatform.com
import Content from '@apps/forms/admin/tokens/content'
import { Audit, Comments, List, Panel } from '@admin'
import PropTypes from 'prop-types'
import React from 'react'
class Details extends React.PureComponent {
static contextTypes = {
router: PropTypes.object
}
static propTypes = {
dataset: PropTypes.object,
fields: PropTypes.array,
record: PropTypes.object,
type: PropTypes.object
}
render() {
return <List { ...this._getList() } />
}
_getList() {
const { dataset, fields, record, type } = this.props
return {
items: [
{ label: 'Dataset', content: dataset.title },
{ label: 'Type', content: type.title },
...fields.map(field => ({
label: field.name.value, content: <Content data={ record.values } field={ field } />
}))
]
}
}
}
const mapPropsToPanel = (props, context) => ({
title: 'Record',
panel: <Details dataset={ props.dataset } fields={ props.fields } type={ props.type } record={ props.record } />,
tasks: {
icon: 'ellipsis-v',
items: [
{ label: 'Edit Record', modal: <div /> }
]
}
})
export default Panel(null, mapPropsToPanel)
|
wayshall/onetwo
|
core/modules/boot/src/main/java/org/onetwo/boot/plugin/core/ByPluginNameEanbledCondition.java
|
<filename>core/modules/boot/src/main/java/org/onetwo/boot/plugin/core/ByPluginNameEanbledCondition.java<gh_stars>10-100
package org.onetwo.boot.plugin.core;
import org.onetwo.boot.core.condition.EnabledKeyCondition;
import org.onetwo.common.reflect.ReflectUtils;
import org.onetwo.common.spring.SpringUtils;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.core.env.Environment;
import org.springframework.core.type.AnnotatedTypeMetadata;
/**
* @author wayshall
* <br/>
*/
public class ByPluginNameEanbledCondition extends EnabledKeyCondition {
private WebPlugin webPlugin;
final private String prefix = "";// "jfish.";
public ByPluginNameEanbledCondition() {
super();
this.setDefaultEnabledValue(false);
}
@Override
protected String getEnabledKey(Environment environment, AnnotationAttributes attrubutes) {
PluginMeta meta = parsePlugin(attrubutes).getPluginMeta();
String property = attrubutes.getString("property");
// prefix+jfish.swagger.pluginName.enabled
String key = prefix+property+"."+meta.getName()+".enabled";
return key;
}
protected AnnotationAttributes getAnnotationAttributes(AnnotatedTypeMetadata metadata){
//support @AliasFor
AnnotationAttributes attributes = SpringUtils.getAnnotationAttributes(metadata, EnabledByPluginNameProperty.class);
return attributes;
}
private WebPlugin parsePlugin(AnnotationAttributes attributes){
WebPlugin webPlugin = this.webPlugin;
if(webPlugin==null){
Class<? extends WebPlugin> pluginClass = attributes.getClass("pluginClass");
/*if(pluginClass==WebPlugin.class){
throw new BaseException("you must be set your plugin class to annotation @"+EnabledByPluginNameProperty.class.getSimpleName());
}*/
webPlugin = ReflectUtils.newInstance(pluginClass);
this.webPlugin = webPlugin;
}
return webPlugin;
}
}
|
itsmevanessi/Competitive-Programming
|
Codeforces/Codeforces Global Round 4/A.cpp
|
<reponame>itsmevanessi/Competitive-Programming<filename>Codeforces/Codeforces Global Round 4/A.cpp
#include <bits/stdc++.h>
using namespace std;
vector< pair<int, int> > parties, ans;
int main(void){
int p, t, all = 0, alice = 0, col = 0;
scanf("%d", &p);
for(int i = 0; i < p; ++i){
scanf("%d", &t);
all += t;
if(i > 0){
parties.push_back(make_pair(t, i + 1));
}else if(i == 0) all = alice = col = t;
}
sort(parties.rbegin(), parties.rend());
ans.push_back(make_pair(alice ,1));
for(auto x : parties){
if(col > all / 2){
printf("%d\n", (int)ans.size());
for(auto z : ans){
printf("%d ", z.second);
}
return 0;
}else if(x.first <= alice / 2){
ans.push_back(x);
col += x.first;
}
}
if(col > all / 2){
printf("%d\n", (int)ans.size());
for(auto z : ans){
printf("%d ", z.second);
}
return 0;
}
puts("0");
}
|
TheSoftwareFactory/lokki-android
|
App/src/main/java/cc/softwarefactory/lokki/android/services/ApiService.java
|
<gh_stars>1-10
package cc.softwarefactory.lokki.android.services;
import android.content.Context;
import android.util.Log;
import com.androidquery.AQuery;
import com.androidquery.callback.AjaxCallback;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.json.JSONException;
import org.json.JSONObject;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import cc.softwarefactory.lokki.android.constants.Constants;
import cc.softwarefactory.lokki.android.utilities.PreferenceUtils;
/**
* Base class for all API services. Implementing classes should handle objects CRUD-operations and cache.
*/
public abstract class ApiService {
protected Context context;
public static String apiUrl = Constants.API_URL;
abstract String getTag();
abstract String getCacheKey();
public ApiService(Context context) {
this.context = context;
}
private String generateUrl(String urlSuffix) {
String userId = PreferenceUtils.getString(context, PreferenceUtils.KEY_USER_ID);
String url = apiUrl + "user/" + userId + "/" + urlSuffix;
return url;
}
private void authorize(AjaxCallback<String> callback) {
String authorizationToken = PreferenceUtils.getString(context, PreferenceUtils.KEY_AUTH_TOKEN);
callback.header("authorizationtoken", authorizationToken);
}
protected void createAjax(String methodName, String uri, AjaxCallback<String> callback) {
Log.d(getTag(), uri);
String url = generateUrl(uri);
authorize(callback);
try {
Method method = AQuery.class.getMethod(methodName, String.class, Class.class, AjaxCallback.class);
method.invoke(new AQuery(context), url, String.class, callback);
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
Log.e(getTag(), "Reflecting ajax method '" + methodName + "' failed");
e.printStackTrace();
}
}
protected void createAjaxWithBody(String methodName, String uri, AjaxCallback<String> callback, JSONObject body) {
Log.d(getTag(), uri);
String url = generateUrl(uri);
authorize(callback);
try {
Method method = AQuery.class.getMethod(methodName, String.class, JSONObject.class, Class.class, AjaxCallback.class);
method.invoke(new AQuery(context), url, body, String.class, callback);
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
Log.e(getTag(), "Reflecting ajax method failed");
e.printStackTrace();
}
}
protected void get(String uri, AjaxCallback<String> callback) {
createAjax("ajax", uri, callback);
}
protected void put(String uri, JSONObject param, AjaxCallback<String> callback) {
createAjaxWithBody("put", uri, callback, param);
}
//TODO construct a put without body using HttpEntity instead of sending empty object
protected void put(String uri, AjaxCallback<String> callback) {
createAjaxWithBody("put", uri, callback, new JSONObject());
}
protected void delete(String uri, AjaxCallback<String> callback) {
createAjax("delete", uri, callback);
}
protected void post(String uri, JSONObject param, AjaxCallback<String> callback) throws JsonProcessingException, JSONException {
createAjaxWithBody("post", uri, callback, param);
}
public void updateCache(String json) {
PreferenceUtils.setString(context, getCacheKey(), json);
}
}
|
schallerdavid/perses
|
perses/dispersed/smc.py
|
<gh_stars>10-100
import openmmtools.cache as cache
import os
import copy
from perses.dispersed.utils import *
from openmmtools.states import ThermodynamicState, CompoundThermodynamicState, SamplerState
import numpy as np
import mdtraj as md
import simtk.unit as unit
import logging
import time
from collections import namedtuple
from perses.annihilation.lambda_protocol import LambdaProtocol
from perses.annihilation.lambda_protocol import RelativeAlchemicalState
from perses.dispersed import *
import random
import pymbar
from perses.dispersed.parallel import Parallelism
from openmmtools import utils
# Instantiate logger
logging.basicConfig(level = logging.NOTSET)
_logger = logging.getLogger("sMC")
_logger.setLevel(logging.INFO)
cache.global_context_cache.platform = configure_platform(utils.get_fastest_platform().getName())
EquilibriumFEPTask = namedtuple('EquilibriumInput', ['sampler_state', 'inputs', 'outputs'])
DISTRIBUTED_ERROR_TOLERANCE = 1e-4
class SequentialMonteCarlo():
"""
This class represents an sMC particle that runs a nonequilibrium switching protocol.
It is a batteries-included engine for conducting sequential Monte Carlo sampling.
WARNING: take care in writing trajectory file as saving positions to memory is costly. Either do not write the configuration or save sparse positions.
"""
supported_resampling_methods = {'multinomial': multinomial_resample}
supported_observables = {'ESS': ESS, 'CESS': CESS}
def __init__(self,
factory,
lambda_protocol = 'default',
temperature = 300 * unit.kelvin,
trajectory_directory = 'test',
trajectory_prefix = 'out',
atom_selection = 'not water',
timestep = 1 * unit.femtoseconds,
collision_rate = 1 / unit.picoseconds,
eq_splitting_string = 'V R O R V',
neq_splitting_string = 'V R O R V',
ncmc_save_interval = None,
measure_shadow_work = False,
neq_integrator = 'langevin',
compute_endstate_correction = True,
external_parallelism = None,
internal_parallelism = {'library': ('dask', 'LSF'),
'num_processes': 2}
):
"""
Parameters
----------
factory : perses.annihilation.relative.HybridTopologyFactory - compatible object
lambda_protocol : str, default 'default'
the flavor of scalar lambda protocol used to control electrostatic, steric, and valence lambdas
temperature : float unit.Quantity
Temperature at which to perform the simulation, default 300K
trajectory_directory : str, default 'test'
Where to write out trajectories resulting from the calculation. If None, no writing is done.
trajectory_prefix : str, default None
What prefix to use for this calculation's trajectory files. If none, no writing is done.
atom_selection : str, default not water
MDTraj selection syntax for which atomic coordinates to save in the trajectories. Default strips
all water.
timestep : float unit.Quantity, default 1 * units.femtoseconds
the timestep for running MD
collision_rate : float unit.Quantity, default 1 / unit.picoseconds
the collision rate for running MD
eq_splitting_string : str, default 'V R O R V'
The integrator splitting to use for equilibrium simulation
neq_splitting_string : str, default 'V R O R V'
The integrator splitting to use for nonequilibrium switching simulation
ncmc_save_interval : int, default None
interval with which to write ncmc trajectory. If None, trajectory will not be saved.
We will assert that the n_lambdas % ncmc_save_interval = 0; otherwise, the protocol will not be complete
measure_shadow_work : bool, default False
whether to measure the shadow work of the integrator.
WARNING : this is not currently supported
neq_integrator : str, default 'langevin'
which integrator to use
compute_endstate_correction : bool, default True
whether to compute the importance weight to the alchemical endstates
external_parallelism : dict('parallelism': perses.dispersed.parallel.Parallelism, 'available_workers': list(str)), default None
an external parallelism dictionary;
external_parallelism is used if the entire SequentialMonteCarlo class is allocated workers by an external client (i.e.
there exists a Parallelism.client object that is allocating distributed workers to several SequentialMonteCarlo classes simultaneously)
internal_parallelism : dict, default {'library': ('dask', 'LSF'), 'num_processes': 2}
dictionary of parameters to instantiate a client and run parallel computation internally. internal parallelization is handled by default
if None, external worker arguments have to be specified, otherwise, no parallel computation will be conducted, and annealing will be conducted locally.
internal_parallelism is used when the SequentialMonteCarlo class is allowed to create its own Parallelism.client object to allocate workers on a
cluster.
"""
_logger.info(f"Initializing SequentialMonteCarlo")
#pull necessary attributes from factory
self.factory = factory
#context cache
self.context_cache = cache.global_context_cache
#use default protocol
self.lambda_protocol = lambda_protocol
#handle both eq and neq parameters
self.temperature = temperature
self.timestep = timestep
self.collision_rate = collision_rate
self.measure_shadow_work = measure_shadow_work
self.neq_integrator = neq_integrator
if measure_shadow_work:
raise Exception(f"measure_shadow_work is not currently supported. Aborting!")
#handle equilibrium parameters
self.eq_splitting_string = eq_splitting_string
#handle storage and names
self.trajectory_directory = trajectory_directory
self.trajectory_prefix = trajectory_prefix
self.atom_selection = atom_selection
#handle neq parameters
self.neq_splitting_string = neq_splitting_string
self.ncmc_save_interval = ncmc_save_interval
#lambda states:
self.lambda_endstates = {'forward': [0.0,1.0], 'reverse': [1.0, 0.0]}
#instantiate trajectory filenames
if self.trajectory_directory and self.trajectory_prefix:
self.write_traj = True
self.eq_trajectory_filename = {lambda_state: os.path.join(os.getcwd(), self.trajectory_directory, f"{self.trajectory_prefix}.eq.lambda_{lambda_state}.h5") for lambda_state in self.lambda_endstates['forward']}
self.neq_traj_filename = {direct: os.path.join(os.getcwd(), self.trajectory_directory, f"{self.trajectory_prefix}.neq.lambda_{direct}") for direct in self.lambda_endstates.keys()}
self.topology = self.factory.hybrid_topology
else:
self.write_traj = False
self.eq_trajectory_filename = {0: None, 1: None}
self.neq_traj_filename = {'forward': None, 'reverse': None}
self.topology = None
# subset the topology appropriately:
self.atom_selection_string = atom_selection
# subset the topology appropriately:
if self.atom_selection_string is not None:
atom_selection_indices = self.factory.hybrid_topology.select(self.atom_selection_string)
self.atom_selection_indices = atom_selection_indices
else:
self.atom_selection_indices = None
# instantiating equilibrium file/rp collection dicts
self._eq_dict = {0: [], 1: [], '0_decorrelated': None, '1_decorrelated': None, '0_reduced_potentials': [], '1_reduced_potentials': []}
self._eq_files_dict = {0: [], 1: []}
self._eq_timers = {0: [], 1: []}
self._neq_timers = {'forward': [], 'reverse': []}
#instantiate nonequilibrium work dicts: the keys indicate from which equilibrium thermodynamic state the neq_switching is conducted FROM (as opposed to TO)
self.cumulative_work = {'forward': [], 'reverse': []}
self.incremental_work = copy.deepcopy(self.cumulative_work)
self.shadow_work = copy.deepcopy(self.cumulative_work)
self.nonequilibrium_timers = copy.deepcopy(self.cumulative_work)
self.total_jobs = 0
#self.failures = copy.deepcopy(self.cumulative_work)
self.dg_EXP = copy.deepcopy(self.cumulative_work)
self.dg_BAR = None
#instantiate thermodynamic state
lambda_alchemical_state = RelativeAlchemicalState.from_system(self.factory.hybrid_system)
lambda_alchemical_state.set_alchemical_parameters(0.0, LambdaProtocol(functions = self.lambda_protocol))
self.thermodynamic_state = CompoundThermodynamicState(ThermodynamicState(self.factory.hybrid_system, temperature = self.temperature),composable_states = [lambda_alchemical_state])
# set the SamplerState for the lambda 0 and 1 equilibrium simulations
sampler_state = SamplerState(self.factory.hybrid_positions,
box_vectors=self.factory.hybrid_system.getDefaultPeriodicBoxVectors())
self.sampler_states = {0: copy.deepcopy(sampler_state), 1: copy.deepcopy(sampler_state)}
#endstate corrections?
self.compute_endstate_correction = compute_endstate_correction
#implement the appropriate parallelism
self.implement_parallelism(external_parallelism = external_parallelism,
internal_parallelism = internal_parallelism)
def implement_parallelism(self, external_parallelism, internal_parallelism):
"""
Function to implement the approprate parallelism given input arguments.
This is exposed as a method in case the class already exists and the user wants to change the parallelism scheme.
Parameters
----------
external_parallelism : dict('parallelism': perses.dispersed.parallel.Parallelism, 'available_workers': list(str)), default None
an external parallelism dictionary
internal_parallelism : dict, default {'library': ('dask', 'LSF'), 'num_processes': 2}
dictionary of parameters to instantiate a client and run parallel computation internally. internal parallelization is handled by default
if None, external worker arguments have to be specified, otherwise, no parallel computation will be conducted, and annealing will be conducted locally.
"""
#parallelism implementables
if external_parallelism is not None and internal_parallelism is not None:
raise Exception(f"external parallelism were given, but an internal parallelization scheme was also specified. Aborting!")
if external_parallelism is not None:
self.external_parallelism, self.internal_parallelism = True, False
self.parallelism, self.workers = external_parallelism['parallelism'], external_parallelism['workers']
self.parallelism_parameters = None
assert self.parallelism.client is not None, f"the external parallelism class has not yet an activated client."
elif internal_parallelism is not None:
self.external_parallelism, self.internal_parallelism = False, True
self.parallelism, self.workers = Parallelism(), internal_parallelism['num_processes']
self.parallelism_parameters = internal_parallelism
else:
_logger.warning(f"both internal and external parallelisms are unspecified. Defaulting to not_parallel.")
self.external_parallelism, self.internal_parallelism = False, True
self.parallelism_parameters = {'library': None, 'num_processes': None}
self.parallelism, self.workers = Parallelism(), 0
if external_parallelism is not None and internal_parallelism is not None:
raise Exception(f"external parallelism were given, but an internal parallelization scheme was also specified. Aborting!")
def _activate_annealing_workers(self):
"""
wrapper to distribute workers and create appropriate worker attributes for annealing
"""
_logger.debug(f"activating annealing workers...")
if self.internal_parallelism:
_logger.debug(f"found internal parallelism; activating client with the following parallelism parameters: {self.parallelism_parameters}")
#we have to activate the client
self.parallelism.activate_client(library = self.parallelism_parameters['library'],
num_processes = self.parallelism_parameters['num_processes'])
workers = list(self.parallelism.workers.values())
elif self.external_parallelism:
#the client is already active
workers = self.parallelism_parameters['available_workers']
else:
raise Exception(f"either internal or external parallelism must be True.")
#now client.run to broadcast the vars
broadcast_remote_worker = 'remote' if self.parallelism.client is not None else self
addresses = self.parallelism.run_all(func = activate_LocallyOptimalAnnealing, #func
arguments = (copy.deepcopy(self.thermodynamic_state), #arg: thermodynamic state
broadcast_remote_worker, #arg: remote worker
self.lambda_protocol, #arg: lambda protocol
self.timestep, #arg: timestep
self.collision_rate, #arg: collision_rate
self.temperature, #arg: temperature
self.neq_splitting_string, #arg: neq_splitting string
self.ncmc_save_interval, #arg: ncmc_save_interval
self.topology, #arg: topology
self.atom_selection_indices, #arg: subset atoms
self.measure_shadow_work, #arg: measure_shadow_work
self.neq_integrator, #arg: integrator,
self.compute_endstate_correction #arg: compute_endstate_correction
),
workers = workers) #workers
def _deactivate_annealing_workers(self):
"""
wrapper to deactivate workers and delete appropriate worker attributes for annealing
"""
if self.internal_parallelism:
_logger.debug(f"\t\tfound internal parallelism; deactivating client.")
#we have to deactivate the client
if self.parallelism.client is None:
#then we are running local annealing
deactivate_worker_attributes(remote_worker = self)
self.parallelism.deactivate_client()
elif self.external_parallelism:
#the client is already active; we don't have the authority to deactivate
workers = self.parallelism_parameters['available_workers']
pass_remote_worker = 'remote' if self.parallelism.client is not None else self
deactivate_worker_attributes(remote_worker = pass_remote_worker)
else:
raise Exception(f"either internal or external parallelism must be True.")
def AIS(self,
num_particles,
protocols = {'forward': np.linspace(0,1, 1000), 'reverse': np.linspace(1,0,1000)},
num_integration_steps = 1,
return_timer = False,
rethermalize = False):
"""
Conduct vanilla AIS (i.e. nonequilibrium switching FEP) with a given protocol (for each direction), specified annealing time per lambda, and support for rethermalization (i.e. velocity resampling)
NOTE: AIS is NaN-safe
Parameters
----------
num_particles : int
number of particles to run in each direction
protocols : dict of {direction : np.array}, default {'forward': np.linspace(0,1, 1000), 'reverse': np.linspace(1,0,1000)},
the dictionary of forward and reverse protocols. if None, the protocols will be trailblazed.
num_integration_steps : int
number of integration steps per proposal
return_timer : bool, default False
whether to time the annealing protocol
rethermalize : bool, default False
whether to rethermalize velocities after proposal
"""
_logger.debug(f"conducting vanilla AIS")
directions = list(protocols.keys())
for _direction in directions:
assert _direction in ['forward', 'reverse'], f"direction {_direction} is not an appropriate direction"
self.protocols = protocols
self._activate_annealing_workers()
if self.internal_parallelism:
workers = None
elif self.external_parallelism:
workers = self.parallelism_parameters['available_workers']
_logger.debug(f"\tin choosing the remote worker, the parallelism client is: {self.parallelism.client}")
remote_worker = 'remote' if self.parallelism.client is not None else self
_logger.debug(f"\tthe remote worker is: {remote_worker}")
sMC_futures = {_direction: None for _direction in directions} # initialize futures with None objects (we only collect these once)
sMC_sampler_states = {_direction: np.array([self.pull_trajectory_snapshot(int(self.protocols[_direction][0])) for _ in range(num_particles)]) for _direction in directions}
#Note: we can also add functionality to launch jobs on-the-fly, but for now we just randomly pull equilibrium snapshots from a pre-computed equilibrium distribution
self.sMC_timers = {_direction: None for _direction in directions} #the timers are collected once per particle
sMC_cumulative_works = {_direction : None for _direction in directions} #again, theses are only collected once
worker_retrieval = {} #this is an on-the-fly timer for each direction...
self.particle_failures = {_direction: None for _direction in directions} #log the particle failures
self.endstate_corrections = {_direction: None for _direction in directions} # log the endstate corrections
for _direction in directions:
worker_retrieval[_direction] = time.time()
_logger.info(f"entering {_direction} direction to launch annealing jobs.")
#make iterable lists for anneal deployment
iterables = []
iterables.append([remote_worker] * num_particles) #remote_worker
iterables.append(list(sMC_sampler_states[_direction])) #sampler_state
iterables.append([self.protocols[_direction]] * num_particles) #lambdas
iterables.append([None]*num_particles) #noneq_trajectory_filename
iterables.append([num_integration_steps] * num_particles) #num_integration_steps
iterables.append([return_timer] * num_particles) #return timer
iterables.append([False] * num_particles)
iterables.append([rethermalize] * num_particles) #rethermalize
_compute_incremental_works = [True] * num_particles #only compute incremental work remotely if it is AIS
iterables.append(_compute_incremental_works) # whether to compute incremental works
for job in range(num_particles):
if self.ncmc_save_interval is not None: #check if we should make 'trajectory_filename' not None
iterables[3][job] = self.neq_traj_filename[_direction] + f".iteration_{job:04}.h5"
scattered_futures = [self.parallelism.scatter(iterable) for iterable in iterables]
sMC_futures[_direction] = self.parallelism.deploy(func = call_anneal_method,
arguments = tuple(scattered_futures),
workers = workers)
assert len(sMC_futures[_direction]) == num_particles, f"num_particles ({num_particles}) and the length of futures ({len(sMC_futures[_direction])}) do not match"
#collect futures into one list and see progress
all_futures = [item for sublist in list(sMC_futures.values()) for item in sublist]
self.parallelism.progress(futures = all_futures)
#now we collect the finished futures
_collected_observables = {}
for _direction in directions:
_logger.debug(f"collecting annealing jobs in direction {_direction}...")
_futures = self.parallelism.gather_results(futures = sMC_futures[_direction], omit_errors = True)
if remote_worker == 'remote':
assert len(_futures) == num_particles, f"num_particles ({num_particles}) and the length of the collected futures ({len(_futures)}) do not match. _all_anneal_method is supposed to be safe!"
#collect tuple results
_incremental_works = [_iter[0] for _iter in _futures]
_sampler_states = [_iter[1] for _iter in _futures]
_timers = [_iter[2] for _iter in _futures]
_passes = [_iter[3] for _iter in _futures]
return_endstate_corrections = [_iter[4] for _iter in _futures]
pass_indices = [index for index in range(num_particles) if _passes[index] == True]
successful_incremental_works = [item for index, item in enumerate(_incremental_works) if _passes[index] == True]
failed_annealing_jobs = [index for index, item in enumerate(_incremental_works) if _passes[index] == False]
assert all(q is not None for q in successful_incremental_works), f"all passing annealing jobs have been filtered but are still returning NoneType objects"
_logger.debug(f"\tfailed annealing jobs: {failed_annealing_jobs}")
self.particle_failures[_direction] = failed_annealing_jobs if len(failed_annealing_jobs) > 0 else None
self.endstate_corrections[_direction] = [item for index, item in enumerate(return_endstate_corrections) if _passes[index] == True]
#append the incremental works
_logger.debug(f"\tincremental works for direction {_direction}: {np.array(_incremental_works).shape}")
_concatenated_incremental_work = np.concatenate((np.array([np.zeros(len(successful_incremental_works))]).T, np.array(successful_incremental_works)), axis = 1)
sMC_cumulative_works[_direction] = np.cumsum(_concatenated_incremental_work, axis = 1)
_logger.debug(f"\tsMC cumulative works for direction {_direction}: {sMC_cumulative_works[_direction]}")
assert np.std(sMC_cumulative_works[_direction][:,0]) <= np.std(sMC_cumulative_works[_direction][:,-1]), f"the variance of the particle weights is not increasing..."
#append the _timers
self.sMC_timers[_direction] = _timers if return_timer else None
print(f"\t{_direction} retrieval time: {time.time() - worker_retrieval[_direction]}")
_logger.debug(f"deactivating annealing workers...")
self._deactivate_annealing_workers()
self.compute_sMC_free_energy(sMC_cumulative_works)
_logger.debug(f"terminating AIS successfully!")
def sMC(self,
num_particles,
protocols = {'forward': np.linspace(0,1, 1000), 'reverse': np.linspace(1,0,1000)},
trailblaze = None,
resample = None,
directions = ['forward','reverse'],
num_integration_steps = 1,
return_timer = False,
rethermalize = False):
"""
Conduct SequentialMonteCarlo sampling with a trailblazed protocol. Resampling is supported.
Parameters
----------
num_particles : int
number of particles to run in each direction
protocols : dict of {direction : np.array}, default {'forward': np.linspace(0,1, 1000), 'reverse': np.linspace(1,0,1000)},
the dictionary of forward and reverse protocols. if None, the protocols will be trailblazed.
trailblaze : dict, default None
which observable/criterion to use for trailblazing and the threshold
if None, trailblazing is not conducted;
else: the dict must have the following format:
{'criterion': str, 'threshold': float}
resample : dict, default None
the resample dict specifies the resampling criterion and threshold, as well as the resampling method used. if None, no resampling is conduced;
otherwise, the resample dict must take the following form:
{'criterion': str, 'method': str, 'threshold': float}
the directions to run.
num_integration_steps : int
number of integration steps per proposal
return_timer : bool, default False
whether to time the annealing protocol
rethermalize : bool, default False
whether to rethermalize velocities after proposal
"""
_logger.debug(f"conducting generalized sMC...")
_logger.debug(f"conducting argument assertions...")
#direction assertions
for _direction in directions:
assert _direction in ['forward', 'reverse'], f"direction {_direction} is not an appropriate direction"
starting_lines, finish_lines = {}, {}
#check protocols:
if protocols is not None:
assert trailblaze is None, f"neither 'protocols' nor 'trailblaze' is None. Conflict!"
assert all(_key in directions for _key in protocols), f"{_key} is not a supported direction. Supported directions include {directions}"
assert all(type(_val) == np.ndarray for _val in protocols.values()), f"all dictionary values in 'protocols' must be np.ndarrays"
_trailblaze = False
else:
_logger.debug(f"protocols is None; attempting to parse 'trailblaze'")
assert trailblaze is not None, f"both 'protocols' and 'trailblaze' are None; there is no annealing to conduct."
if trailblaze is not None:
assert set(trailblaze.keys()) == set(['criterion', 'threshold']), f"the trailblaze keys are not supported"
assert trailblaze['criterion'] in list(self.supported_observables.keys()), f"the specified trailblazing criterion is not supported"
assert type(trailblaze['threshold']) == float, f"the specified trailblaze threshold is not a float"
_trailblaze = True
#create end-to-ends
_logger.debug(f"conducting end-to-end builds...")
if 'forward' in directions:
assert protocols['forward'][0] == 0.0 and protocols['forward'][-1] == 1.0, f"the forward protocol must start at 0.0 and end at 1.0"
starting_lines['forward'] = 0.0
finish_lines['forward'] = 1.0
if 'reverse' in directions:
assert protocols['reverse'][0] == 1.0 and protocols['reverse'][-1] == 0.0, f"the reverse protocol must start at 1.0 and end at 0.0"
starting_lines['reverse'] = 1.0
finish_lines['reverse'] = 0.0
#check resample
if resample is not None:
_logger.debug(f"resample is not None; conducting resampling assertions...")
assert set(resample.keys()) == set(['criterion', 'method', 'threshold']), f"'resample does not contain the appropriate keys. see documentation'"
assert resample['criterion'] in list(self.supported_observables.keys()), f"the specified resampling criterion is not supported"
assert resample['method'] in list(self.supported_resampling_methods), f"the specified resampling method is not supported."
assert type(resample['threshold'] == float), f"the resampling thrshold must be a float"
_resample = True
else:
_logger.debug(f"resampling is None")
_resample = False
#initialize the new protocols
_logger.debug(f"initializing protocols...")
self.protocols = {_direction : [starting_lines[_direction]] for _direction in directions}
_logger.debug(f"\tinitial protocols: {self.protocols}")
_logger.debug(f"activating annealing workers")
self._activate_annealing_workers()
if self.internal_parallelism:
workers = None
elif self.external_parallelism:
workers = self.parallelism_parameters['available_workers']
_logger.debug(f"\tin choosing the remote worker, the parallelism client is: {self.parallelism.client}")
remote_worker = 'remote' if self.parallelism.client is not None else self
_logger.debug(f"\tthe remote worker is: {remote_worker}")
sMC_futures = {_direction: None for _direction in directions}
_logger.debug(f"\tsMC_futures: {sMC_futures}")
sMC_sampler_states = {_direction: np.array([self.pull_trajectory_snapshot(int(self.protocols[_direction][0])) for _ in range(num_particles)]) for _direction in directions}
sMC_sampler_states = {_direction: None for _direction in directions}
_logger.debug(f"\tsMC_sampler_states: {sMC_sampler_states}")
sMC_timers = {_direction: [] for _direction in directions}
_logger.debug(f"sMC_timers: {sMC_timers}")
sMC_incremental_works = {_direction: None for _direction in directions}
_logger.debug(f"\tsMC_incremental_works: {sMC_incremental_works}")
sMC_cumulative_works = {_direction : [np.zeros(num_particles)] for _direction in directions}
_logger.debug(f"\tsMC_cumulative_works: {sMC_cumulative_works}")
sMC_observables = {_direction : [] for _direction in directions}
_logger.debug(f"\tsMC_observables: {sMC_observables}")
sMC_particle_ancestries = {_direction : [np.arange(num_particles)] for _direction in directions}
_logger.debug(f"\tsMC_particle_ancestries: {sMC_particle_ancestries}")
worker_retrieval = {}
_lambdas = {}
#now we can launch annealing jobs and manage them on-the-fly
current_lambdas = starting_lines
iteration_number = 0
_logger.debug(f"commencing annealing...")
while current_lambdas != finish_lines:
_logger.debug(f"entering iteration {iteration_number}; current_lambdas are {current_lambdas}")
start_timer = time.time()
#sample/resample
_logger.debug(f"\tattempting sampling/resampling...")
for _direction in directions:
if current_lambdas[_direction] == finish_lines[_direction]: #if this direction is done...
_logger.info(f"\tdirection {_direction} is complete. omitting resample.")
continue
if iteration_number == 0: #this is the first iteration and we have to pull sampler states unbiasedly
sMC_sampler_states.update({_direction: np.array([self.pull_trajectory_snapshot(int(self.protocols[_direction][0])) for _ in range(num_particles)])})
elif _resample:
_logger.debug(f"\tattempting to resample particles...")
#Note: the cumulative works we pull for resampling are not the last cumulative works, but the second to last.
#the last cumulative works are the penultimate cumulative works plus the incremental works;
#however, often, the resampling criteria (if conditional, require the separation of cumulative and incremental works)
#implicitly, self._resample will compute the ultimate cumulative works
normalized_observable_value, resampled_works, resampled_indices, resample_bool = self._resample(incremental_works = sMC_incremental_works[_direction],
cumulative_works = sMC_cumulative_works[_direction][-2],
observable = resample['criterion'],
resampling_method = resample['method'],
resample_observable_threshold = resample['threshold'])
if resample_bool:
_logger.debug(f"\tresample is True")
sMC_observables[_direction][-1] = normalized_observable_value #update the previous observables with the resampled observable
sMC_cumulative_works[_direction][-1] = resampled_works #update the ultimate cumulative work
#we need a deepcopy to prevent annealing over the same sampler state in a single iteration with local annealing
new_sampler_states = np.array([copy.deepcopy(sMC_sampler_states[_direction][i]) for i in resampled_indices])
sMC_sampler_states.update({_direction: new_sampler_states})
else:
#we don't need to update the ultimate observables, cumulative works, or sampler_states
pass
#however, we do need to update the particle ancestries...
new_particle_ancestries = np.array([sMC_particle_ancestries[_direction][-1][i] for i in resampled_indices])
sMC_particle_ancestries[_direction].append(new_particle_ancestries)
else: #we are not resampling
#the sampler states are updated by gathering the workers' sampler states
#the observables are calculated in the trailblaze attempt pass
#the cumulative works are unchanged
#we do not return particle ancestries if no resampling is conducted
_logger.debug(f"not resampling. omitting sMC updates")
#attempt to trailblaze lambdas and launch workers
_logger.debug(f"\tincrementing lambdas...")
for _direction in directions:
if current_lambdas[_direction] == finish_lines[_direction]: #if this direction is done...
_logger.debug(f"\tdirection {_direction} is complete. omitting trailblazing.")
continue
if _trailblaze:
_logger.debug(f"\ttrailblazing lambdas in {_direction} direction")
#gather sampler states and cumulative works in a concurrent manner (i.e. flatten them)
sampler_states = sMC_sampler_states[_direction]
cumulative_works = sMC_cumulative_works[_direction][-1]
if iteration_number == 0:
initial_guess = None
else:
initial_guess = min([2 * self.protocols[_direction][-1] - self.protocols[_direction][-2], 1.0]) if _direction == 'forward' else max([2 * self.protocols[_direction][-1] - self.protocols[_direction][-2], 0.0])
_new_lambda, normalized_observable, incremental_works = self.binary_search(sampler_states = sampler_states,
cumulative_works = cumulative_works,
start_val = current_lambdas[_direction],
end_val = finish_lines[_direction],
observable = self.supported_observables[trailblaze['criterion']],
observable_threshold = trailblaze['threshold'] * sMC_observables[_direction][-1],
initial_guess = initial_guess)
sMC_incremental_works.update({_direction: incremental_works})
_logger.info(f"\t\tlambda increments: {current_lambdas[_direction]} to {_new_lambda}.")
_logger.info(f"\t\tnormalized observable: {normalized_observable}. Observable threshold is {trailblaze['threshold'] * sMC_observables[_direction][-1]}")
self.protocols[_direction].append(_new_lambda)
sMC_observables[_direction].append(normalized_observable)
_lambdas.update({_direction: np.array([current_lambdas[_direction], _new_lambda])})
#the current lambdas will be updated at the end of the loop
else:
start_val, end_val = self.protocols[_direction][iteration_number], self.protocols[_direction][iteration_number + 1]
_logger.debug(f"\tnot trailblazing; annealing lambda from {start_val} to {end_val}")
self.thermodynamic_state.set_alchemical_parameters(start_val, LambdaProtocol(functions = self.lambda_protocol))
current_rps = np.array([compute_reduced_potential(self.thermodynamic_state, sampler_state) for sampler_state in sampler_states])
#if we are not trailblazing, then the local observable is computed from the resampling observable
normalized_observable, incremental_works = compute_lambda_increment(new_val, sMC_sampler_states[_direction], resample['criterion'], current_rps, sMC_cumulative_works[_direction][-1])
sMC_incremental_works.update({_direction: incremental_works})
sMC_observables[_direction].append(normalized_observable)
_lambdas.update({_direction: np.array(start_val, end_val)})
#the current lambdas will be updated at the end of the loop
#now we want to execute distributed/local annealing depending on the remote worker
_logger.debug(f"\tconducting annealing execution...")
for _direction in directions:
if current_lambdas[_direction] == finish_lines[_direction]:
_logger.info(f"\tdirection {_direction} is complete. omitting annealing")
continue
worker_retrieval[_direction] = time.time()
_logger.info(f"\t\tentering {_direction} direction to launch annealing jobs.")
_logger.info(f"\t\tthe current lambdas for annealing are {_lambdas[_direction]}")
#make construct iterable list for distributed annealing
iterables = []
iterables.append([remote_worker] * num_particles) #remote_worker
iterables.append(list(sMC_sampler_states[_direction])) #sampler_state
iterables.append([_lambdas[_direction]] * num_particles) #lambdas
iterables.append([None]*num_particles) #noneq_trajectory_filename
iterables.append([num_integration_steps] * num_particles) #num_integration_steps
iterables.append([return_timer] * num_particles) #return timer
iterables.append([True] * num_particles) #return_sampler_state
iterables.append([rethermalize] * num_particles) #rethermalize
iterables.append([True] * num_particles) # whether to compute incremental works
for job in range(num_particles):
if self.ncmc_save_interval is not None: #check if we should make 'trajectory_filename' not None
iterables[2][job] = self.neq_traj_filename[_direction] + f".iteration_{job:04}.h5"
scattered_futures = [self.parallelism.scatter(iterable) for iterable in iterables]
sMC_futures.update({_direction: self.parallelism.deploy(func = call_anneal_method,
arguments = tuple(scattered_futures),
workers = workers)})
#collect futures into one list and see progress
all_futures = [item for sublist in list(sMC_futures.values()) for item in sublist]
self.parallelism.progress(futures = all_futures)
#now we collect the finished futures
_logger.debug(f"\tretreiving annealing executions...")
for _direction in directions:
if current_lambdas[_direction] == finish_lines[_direction]:
_logger.info(f"\tdirection {_direction} is complete. omitting job collection")
continue
_logger.debug(f"\t\tcollecting annealing jobs in direction {_direction}...")
_futures = self.parallelism.gather_results(futures = sMC_futures[_direction])
#collect tuple results
_incremental_works = [_iter[0] for _iter in _futures]
_sampler_states = [_iter[1] for _iter in _futures]
_timers = [_iter[2] for _iter in _futures]
#make sure incremental works are the same on distributed annealing as they are locally with trailblaze/not-trailblaze
assert all(abs(i - j) < DISTRIBUTED_ERROR_TOLERANCE for i, j in zip(np.array(_incremental_works).flatten(), sMC_incremental_works[_direction])), f"the incremental works between the local and distributed platforms do not match"
#if this is true, we can update the cumulative work dict
sMC_cumulative_works[_direction].append(np.add(sMC_cumulative_works[_direction][-1], sMC_incremental_works[_direction]))
#append the sampler_states
sMC_sampler_states[_direction] = np.array(_sampler_states)
#append the _timers
sMC_timers[_direction].append(_timers)
print(f"\t{_direction} retrieval time: {time.time() - worker_retrieval[_direction]}")
#report the updated logger dicts and observables
for _direction in directions:
if current_lambdas[_direction] != finish_lines[_direction]:
current_lambdas[_direction] = _lambdas[_direction][-1] #update the lambda with the current lambda
end_timer = time.time() - start_timer
iteration_number += 1
_logger.info(f"iteration took {end_timer} seconds.")
_logger.debug(f"\n")
_logger.debug(f"deactivating annealing workers...")
self._deactivate_annealing_workers()
for _direction in directions:
_lst = sMC_cumulative_works[_direction]
sMC_cumulative_works.update({_direction: np.array(_lst).T}) #the cumulative work dimensions should be num_particles * num_iterations
self.compute_sMC_free_energy(sMC_cumulative_works)
self.sMC_observables = sMC_observables
if _resample:
_logger.debug(f"computing particle ancestries and survival rates...")
self.survival_rate = compute_survival_rate(sMC_particle_ancestries)
self.particle_ancestries = {_direction : np.array([q.flatten() for q in sMC_particle_ancestries[_direction]]) for _direction in sMC_particle_ancestries.keys()}
else:
_logger.debug(f"omitting particles ancestries and survivial rates (no resampling)...")
self.survival_rate = None
self.particle_ancestries = None
def compute_sMC_free_energy(self, cumulative_work_dict):
"""
Method to compute the free energy of sMC_anneal type cumultaive work dicts, whether the dicts are constructed
via AIS or generalized sMC. The self.cumulative_works, self.dg_EXP, and self.dg_BAR (if applicable) are returned as
instance attributes.
Parameters
----------
cumulative_work_dict : dict
dictionary of the form {_direction <str>: np.nddarray of shape (num_particles, iterations)}
where _direction is 'forward' or 'reverse' and np.2darray is of the shape (num_particles, iteration_number + 2)
"""
_logger.debug(f"computing free energies...")
self.cumulative_work = {}
self.dg_EXP = {}
for _direction, _lst in cumulative_work_dict.items():
self.cumulative_work[_direction] = _lst
self.dg_EXP[_direction] = np.array([pymbar.EXP(_lst[:,i]) for i in range(_lst.shape[1])])
_logger.debug(f"cumulative_work for {_direction}: {self.cumulative_work[_direction]}")
if len(list(self.cumulative_work.keys())) == 2:
self.dg_BAR = pymbar.BAR(self.cumulative_work['forward'][:, -1], self.cumulative_work['reverse'][:, -1])
def minimize_sampler_states(self):
"""
simple wrapper function to minimize the input sampler states
"""
# initialize by minimizing
for state in self.lambda_endstates['forward']: # 0.0, 1.0
self.thermodynamic_state.set_alchemical_parameters(state, LambdaProtocol(functions = self.lambda_protocol))
minimize(self.thermodynamic_state, self.sampler_states[int(state)])
def pull_trajectory_snapshot(self, endstate):
"""
Draw randomly a single snapshot from self._eq_files_dict
Parameters
----------
endstate: int
lambda endstate from which to extract an equilibrated snapshot, either 0 or 1
Returns
-------
sampler_state: openmmtools.SamplerState
sampler state with positions and box vectors if applicable
"""
#pull a random index
assert endstate in [0,1], f"the endstate ({endstate}) is not 0 or 1"
index = random.choice(self._eq_dict[f"{endstate}_decorrelated"])
files = [key for key in self._eq_files_dict[endstate].keys() if index in self._eq_files_dict[endstate][key]]
assert len(files) == 1, f"files: {files} doesn't have one entry; index: {index}, eq_files_dict: {self._eq_files_dict[endstate]}"
file = files[0]
file_index = self._eq_files_dict[endstate][file].index(index)
#now we load file as a traj and create a sampler state with it
traj = md.load_frame(file, file_index)
positions = traj.openmm_positions(0)
box_vectors = traj.openmm_boxes(0)
sampler_state = SamplerState(positions, box_vectors = box_vectors)
return sampler_state
def equilibrate(self,
n_equilibration_iterations = 1,
n_steps_per_equilibration = 5000,
endstates = [0,1],
max_size = 1024*1e3,
decorrelate=False,
timer = False,
minimize = False):
"""
Run the equilibrium simulations a specified number of times at the lambda 0, 1 states. This can be used to equilibrate
the simulation before beginning the free energy calculation.
Parameters
----------
n_equilibration_iterations : int; default 1
number of equilibrium simulations to run, each for lambda = 0, 1.
n_steps_per_equilibration : int, default 5000
number of integration steps to take in an equilibration iteration
endstates : list, default [0,1]
at which endstate(s) to conduct n_equilibration_iterations (either [0] ,[1], or [0,1])
max_size : float, default 1.024e6 (bytes)
number of bytes allotted to the current writing-to file before it is finished and a new equilibrium file is initiated.
decorrelate : bool, default False
whether to parse all written files serially and remove correlated snapshots; this returns an ensemble of iid samples in theory.
timer : bool, default False
whether to trigger the timing in the equilibration; this adds an item to the EquilibriumResult, which is a list of times for various
processes in the feptask equilibration scheme.
minimize : bool, default False
Whether to minimize the sampler state before conducting equilibration. This is passed directly to feptasks.run_equilibration
Returns
-------
equilibrium_result : perses.dispersed.feptasks.EquilibriumResult
equilibrium result namedtuple
"""
_logger.debug(f"conducting equilibration")
for endstate in endstates:
assert endstate in [0, 1], f"the endstates contains {endstate}, which is not in [0, 1]"
# run a round of equilibrium
_logger.debug(f"iterating through endstates to submit equilibrium jobs")
EquilibriumFEPTask_list = []
for state in endstates: #iterate through the specified endstates (0 or 1) to create appropriate EquilibriumFEPTask inputs
_logger.debug(f"\tcreating lambda state {state} EquilibriumFEPTask")
self.thermodynamic_state.set_alchemical_parameters(float(state), lambda_protocol = LambdaProtocol(functions = self.lambda_protocol))
input_dict = {'thermodynamic_state': copy.deepcopy(self.thermodynamic_state),
'nsteps_equil': n_steps_per_equilibration,
'topology': self.factory.hybrid_topology,
'n_iterations': n_equilibration_iterations,
'splitting': self.eq_splitting_string,
'atom_indices_to_save': None,
'trajectory_filename': None,
'max_size': max_size,
'timer': timer,
'_minimize': minimize,
'file_iterator': 0,
'timestep': self.timestep}
if self.write_traj:
_logger.debug(f"\twriting traj to {self.eq_trajectory_filename[state]}")
equilibrium_trajectory_filename = self.eq_trajectory_filename[state]
input_dict['trajectory_filename'] = equilibrium_trajectory_filename
else:
_logger.debug(f"\tnot writing traj")
if self._eq_dict[state] == []:
_logger.debug(f"\tself._eq_dict[{state}] is empty; initializing file_iterator at 0 ")
else:
last_file_num = int(self._eq_dict[state][-1][0][-7:-3])
_logger.debug(f"\tlast file number: {last_file_num}; initiating file iterator as {last_file_num + 1}")
file_iterator = last_file_num + 1
input_dict['file_iterator'] = file_iterator
task = EquilibriumFEPTask(sampler_state = self.sampler_states[state], inputs = input_dict, outputs = None)
EquilibriumFEPTask_list.append(task)
_logger.debug(f"scattering and mapping run_equilibrium task")
#we need not concern ourselves with _adaptive here since we are only running vanilla MD on 1 or 2 endstates
if self.external_parallelism:
#the client is already active
#we only run a max of 2 parallel runs at once, so we can pull 2 workers
num_available_workers = min(len(endstates), len(self.parallelism_parameters['available_workers']))
workers = np.random.choice(self.parallelism_parameters['available_workers'], size = num_available_workers, replace = False)
scatter_futures = self.parallelism.scatter(EquilibriumFEPTask_list, workers = workers)
futures = self.parallelism.deploy(run_equilibrium, (scatter_futures,), workers = workers)
elif self.internal_parallelism:
#we have to activate the client
if self.parallelism_parameters['library'] is None: #then we are running locally
_parallel_processes = 0
else:
_parallel_processes = min(len(endstates), self.parallelism_parameters['num_processes'])
self.parallelism.activate_client(library = self.parallelism_parameters['library'], num_processes = _parallel_processes)
scatter_futures = self.parallelism.scatter(EquilibriumFEPTask_list)
futures = self.parallelism.deploy(run_equilibrium, (scatter_futures,))
else:
raise Exception(f"either internal or external parallelism must be True.")
self.parallelism.progress(futures)
eq_results = self.parallelism.gather_results(futures)
if self.internal_parallelism:
#deactivte the client
self.parallelism.deactivate_client()
else:
#we do not deactivate the external parallelism because the current class has no authority over it. It simply borrows the allotted workers for a short time
pass
#the rest of the function is independent of the dask workers...
for state, eq_result in zip(endstates, eq_results):
_logger.debug(f"\tcomputing equilibrium task future for state = {state}")
self._eq_dict[state].extend(eq_result.outputs['files'])
self._eq_dict[f"{state}_reduced_potentials"].extend(eq_result.outputs['reduced_potentials'])
self.sampler_states.update({state: eq_result.sampler_state})
self._eq_timers[state].append(eq_result.outputs['timers'])
_logger.debug(f"collections complete.")
if decorrelate: # if we want to decorrelate all sample
_logger.debug(f"decorrelating data")
for state in endstates:
_logger.debug(f"\tdecorrelating lambda = {state} data.")
traj_filename = self.eq_trajectory_filename[state]
if os.path.exists(traj_filename[:-2] + f'0000' + '.h5'):
_logger.debug(f"\tfound traj filename: {traj_filename[:-2] + f'0000' + '.h5'}; proceeding...")
[t0, g, Neff_max, A_t, uncorrelated_indices] = compute_timeseries(np.array(self._eq_dict[f"{state}_reduced_potentials"]))
_logger.debug(f"\tt0: {t0}; Neff_max: {Neff_max}; uncorrelated_indices: {uncorrelated_indices}")
self._eq_dict[f"{state}_decorrelated"] = uncorrelated_indices
#now we just have to turn the file tuples into an array
_logger.debug(f"\treorganizing decorrelated data; files w/ num_snapshots are: {self._eq_dict[state]}")
iterator, corrected_dict = 0, {}
for tupl in self._eq_dict[state]:
new_list = [i + iterator for i in range(tupl[1])]
iterator += len(new_list)
decorrelated_list = [i for i in new_list if i in uncorrelated_indices]
corrected_dict[tupl[0]] = decorrelated_list
self._eq_files_dict[state] = corrected_dict
_logger.debug(f"\t corrected_dict for state {state}: {corrected_dict}")
def _resample(self,
incremental_works,
cumulative_works,
observable = 'ESS',
resampling_method = 'multinomial',
resample_observable_threshold = 0.5):
"""
Attempt to resample particles given an observable diagnostic and a resampling method.
Parameters
----------
incremental_works : np.array() of floats
the incremental work accumulated from importance sampling at time t
cumulative_works : np.array() of floats
the cumulative work accumulated from importance sampling from time t = 1 : t-1
observable : str, default 'ESS'
the observable used as a resampling diagnostic; this calls a key in supported_observables
resampling_method: str, default 'multinomial'
method used to resample, this calls a key in supported_resampling_methods
resample_observable_threshold : float, default 0.5
the threshold to diagnose a resampling event.
If None, will automatically return without observables
Returns
-------
normalized_observable_value: float
the value of the observable
resampled_works : np.array() floats
the resampled total works at iteration t
resampled_indices : np.array() int
resampled particle indices
resample_bool : boolean
whether resampling is actually conducted
"""
num_particles = incremental_works.shape[0]
normalized_observable_value = self.supported_observables[observable](cumulative_works, incremental_works)
_logger.debug(f"\t\tstart resampled normalized observable value: {normalized_observable_value}")
total_works = np.add(cumulative_works, incremental_works)
#decide whether to resample
_logger.debug(f"\t\tnormalized observable value: {normalized_observable_value}")
if normalized_observable_value <= resample_observable_threshold: #then we resample
resample_bool = True
_logger.debug(f"\t\tnormalized observable value ({normalized_observable_value}) <= {resample_observable_threshold}. Resampling")
#resample
resampled_works, resampled_indices = self.supported_resampling_methods[resampling_method](total_works = total_works,
num_resamples = num_particles)
normalized_observable_value = 1.0
else:
resample_bool = False
_logger.debug(f"\t\tnormalized observable value ({normalized_observable_value}) > {resample_observable_threshold}. Skipping resampling.")
resampled_works = total_works
resampled_indices = np.arange(num_particles)
_logger.debug(f"\t\tfinal resampled normalized observable_value: {normalized_observable_value}")
return normalized_observable_value, resampled_works, resampled_indices, resample_bool
def compute_lambda_increment(self, new_val, sampler_states, observable, current_rps, cumulative_works):
"""
internal method to compute observables and incremental works locally
"""
self.thermodynamic_state.set_alchemical_parameters(new_val, LambdaProtocol(functions = self.lambda_protocol))
new_rps = np.array([compute_reduced_potential(self.thermodynamic_state, sampler_state) for sampler_state in sampler_states])
_observable = observable(cumulative_works, new_rps - current_rps)
incremental_works = new_rps - current_rps
return _observable, incremental_works
def binary_search(self,
sampler_states,
cumulative_works,
start_val,
end_val,
observable,
observable_threshold,
max_iterations=100,
initial_guess = None,
precision_threshold = 1e-6):
"""
Given corresponding start_val and end_val of observables, conduct a binary search to find min value for which the observable threshold
is exceeded.
Parameters
----------
sampler_states : np.array(openmmtools.states.SamplerState)
numpy array of sampler states
cumulative_works : np.array(float)
cumulative works of corresponding sampler states
start_val: float
start value of binary search
end_val: float
end value of binary search
observable : function
function to compute an observable
observable_threshold : float
the threshold of the observable used to satisfy the binary search criterion
max_iterations: int, default 20
maximum number of interations to conduct
initial_guess: float, default None
guess where the threshold is achieved
precision_threshold: float, default None
precision threshold below which, the max iteration will break
Returns
-------
midpoint: float
maximum value that doesn't exceed threshold
_observable : float
observed value of observable
_incremental_works : np.ndarray of floats
the incremental works of the lambda update
"""
_base_end_val = end_val
right_bound = end_val
left_bound = start_val
_logger.debug(f"\t\tmin, max values: {start_val}, {end_val}. ")
self.thermodynamic_state.set_alchemical_parameters(start_val, LambdaProtocol(functions = self.lambda_protocol))
current_rps = np.array([compute_reduced_potential(self.thermodynamic_state, sampler_state) for sampler_state in sampler_states])
if initial_guess is not None:
midpoint = initial_guess
else:
midpoint = (left_bound + right_bound) * 0.5
for iteration in range(max_iterations):
if iteration != 0:
midpoint = (left_bound + right_bound) * 0.5
_observable, _incremental_works = self.compute_lambda_increment(new_val = midpoint,
sampler_states = sampler_states,
observable = observable,
current_rps = current_rps,
cumulative_works = cumulative_works)
if _observable <= observable_threshold:
right_bound = midpoint
else:
left_bound = midpoint
if precision_threshold is not None:
if abs(right_bound - left_bound) <= precision_threshold:
midpoint = right_bound
_observable, _incremental_works = self.compute_lambda_increment(new_val = midpoint,
sampler_states = sampler_states,
observable = observable,
current_rps = current_rps,
cumulative_works = cumulative_works)
break
return midpoint, _observable, _incremental_works
|
Team-60/ColorSwitch
|
src/gameEngine/customExceptions/FallOutException.java
|
<filename>src/gameEngine/customExceptions/FallOutException.java
package gameEngine.customExceptions;
public class FallOutException extends GameOverException {
public FallOutException() {
super("Ball fell out of the screen");
}
@Override
public String toString() {
return "FallOutException";
}
}
|
davidvancleve/web.dev
|
src/site/_data/paths/index.js
|
<reponame>davidvancleve/web.dev
const accessible = require('./accessible.json');
const angular = require('./angular.json');
const devices = require('./devices.json');
const discoverable = require('./discoverable.json');
const fast = require('./fast.json');
const learnWebVitals = require('./learn-web-vitals.json');
const lighthouseAccessibility = require('./lighthouse-accessibility.json');
const lighthouseBestPractices = require('./lighthouse-best-practices.json');
const lighthousePerformance = require('./lighthouse-performance.json');
const lighthousePwa = require('./lighthouse-pwa.json');
const lighthouseSeo = require('./lighthouse-seo.json');
const media = require('./media.json');
const metrics = require('./metrics.json');
const notifications = require('./notifications.json');
const payments = require('./payments.json');
const progressiveWebApps = require('./progressive-web-apps.json');
const react = require('./react.json');
const reliable = require('./reliable.json');
const secure = require('./secure.json');
const animations = require('./animations.json');
module.exports = {
accessible,
angular,
animations,
devices,
discoverable,
fast,
'learn-web-vitals': learnWebVitals,
'lighthouse-accessibility': lighthouseAccessibility,
'lighthouse-best-practices': lighthouseBestPractices,
'lighthouse-performance': lighthousePerformance,
'lighthouse-pwa': lighthousePwa,
'lighthouse-seo': lighthouseSeo,
media,
metrics,
notifications,
payments,
'progressive-web-apps': progressiveWebApps,
react,
reliable,
secure,
};
|
iikirilov/teku
|
data/provider/src/main/java/tech/pegasys/artemis/api/schema/BeaconState.java
|
/*
* Copyright 2020 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.artemis.api.schema;
import com.google.common.primitives.UnsignedLong;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.tuweni.bytes.Bytes32;
import tech.pegasys.artemis.util.SSZTypes.Bitvector;
public class BeaconState {
public final UnsignedLong genesis_time;
public final UnsignedLong slot;
public final Fork fork;
public final BeaconBlockHeader latest_block_header;
public final List<Bytes32> block_roots;
public final List<Bytes32> state_roots;
public final List<Bytes32> historical_roots;
public final Eth1Data eth1_data;
public final List<Eth1Data> eth1_data_votes;
public final UnsignedLong eth1_deposit_index;
public final List<Validator> validators;
public final List<UnsignedLong> balances;
public final List<Bytes32> randao_mixes;
public final List<UnsignedLong> slashings;
public final List<PendingAttestation> previous_epoch_attestations;
public final List<PendingAttestation> current_epoch_attestations;
public final Bitvector justification_bits;
public final Checkpoint previous_justified_checkpoint;
public final Checkpoint current_justified_checkpoint;
public final Checkpoint finalized_checkpoint;
public BeaconState(final tech.pegasys.artemis.datastructures.state.BeaconState beaconState) {
this.genesis_time = beaconState.getGenesis_time();
this.slot = beaconState.getSlot();
this.fork = new Fork(beaconState.getFork());
this.latest_block_header = new BeaconBlockHeader(beaconState.getLatest_block_header());
this.block_roots = beaconState.getBlock_roots().stream().collect(Collectors.toList());
this.state_roots = beaconState.getState_roots().stream().collect(Collectors.toList());
this.historical_roots = beaconState.getHistorical_roots().stream().collect(Collectors.toList());
this.eth1_data = new Eth1Data(beaconState.getEth1_data());
this.eth1_data_votes =
beaconState.getEth1_data_votes().stream().map(Eth1Data::new).collect(Collectors.toList());
this.eth1_deposit_index = beaconState.getEth1_deposit_index();
this.validators =
beaconState.getValidators().stream().map(Validator::new).collect(Collectors.toList());
this.balances = beaconState.getBalances().stream().collect(Collectors.toList());
this.randao_mixes = beaconState.getRandao_mixes().stream().collect(Collectors.toList());
this.slashings = beaconState.getSlashings().stream().collect(Collectors.toList());
this.previous_epoch_attestations =
beaconState.getPrevious_epoch_attestations().stream()
.map(PendingAttestation::new)
.collect(Collectors.toList());
this.current_epoch_attestations =
beaconState.getCurrent_epoch_attestations().stream()
.map(PendingAttestation::new)
.collect(Collectors.toList());
this.justification_bits = beaconState.getJustification_bits();
this.previous_justified_checkpoint =
new Checkpoint(beaconState.getPrevious_justified_checkpoint());
this.current_justified_checkpoint =
new Checkpoint(beaconState.getCurrent_justified_checkpoint());
this.finalized_checkpoint = new Checkpoint(beaconState.getFinalized_checkpoint());
}
}
|
shaojiankui/iOS10-Runtime-Headers
|
PrivateFrameworks/OfficeImport.framework/ODDNodePoint.h
|
<reponame>shaojiankui/iOS10-Runtime-Headers<filename>PrivateFrameworks/OfficeImport.framework/ODDNodePoint.h
/* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/OfficeImport.framework/OfficeImport
*/
@interface ODDNodePoint : ODDDataPoint {
NSMutableArray * mChildren;
ODDNodePoint * mParent;
ODDTransitionPoint * mParentTransition;
ODDTransitionPoint * mSiblingTransition;
}
- (void)addChild:(id)arg1 order:(unsigned long long)arg2;
- (id)children;
- (void)dealloc;
- (id)parent;
- (id)parentTransition;
- (void)setParentTransition:(id)arg1;
- (void)setSiblingTransition:(id)arg1;
- (void)setType:(int)arg1;
- (id)siblingTransition;
@end
|
moorecoin/MooreCoinMiningAlgorithm
|
src/secp256k1/src/java/org_moorecoin_nativesecp256k1.h
|
/* do not edit this file - it is machine generated */
#include <jni.h>
/* header for class org_moorecoin_nativesecp256k1 */
#ifndef _included_org_moorecoin_nativesecp256k1
#define _included_org_moorecoin_nativesecp256k1
#ifdef __cplusplus
extern "c" {
#endif
/*
* class: org_moorecoin_nativesecp256k1
* method: secp256k1_ecdsa_verify
* signature: (ljava/nio/bytebuffer;)i
*/
jniexport jint jnicall java_org_moorecoin_nativesecp256k1_secp256k1_1ecdsa_1verify
(jnienv *, jclass, jobject);
#ifdef __cplusplus
}
#endif
#endif
|
mkinsner/llvm
|
lldb/unittests/Process/Utility/RegisterContextNetBSDTest_x86_64.cpp
|
<filename>lldb/unittests/Process/Utility/RegisterContextNetBSDTest_x86_64.cpp
//===-- RegisterContextNetBSDTest_x86_64.cpp ------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#if defined(__x86_64__)
// clang-format off
#include <sys/types.h>
#include <amd64/reg.h>
// clang-format on
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "Plugins/Process/Utility/lldb-x86-register-enums.h"
#include "Plugins/Process/Utility/RegisterContextNetBSD_i386.h"
#include "Plugins/Process/Utility/RegisterContextNetBSD_x86_64.h"
using namespace lldb;
using namespace lldb_private;
static std::pair<size_t, size_t> GetRegParams(RegisterInfoInterface &ctx,
uint32_t reg) {
const RegisterInfo &info = ctx.GetRegisterInfo()[reg];
return {info.byte_offset, info.byte_size};
}
#define EXPECT_OFF(regname, offset, size) \
EXPECT_THAT(GetRegParams(reg_ctx, lldb_##regname), \
::testing::Pair(offset + base_offset, size))
#define EXPECT_GPR_X86_64(regname, regconst) \
EXPECT_THAT( \
GetRegParams(reg_ctx, lldb_##regname##_x86_64), \
::testing::Pair(offsetof(reg, regs[regconst]), \
sizeof(reg::regs[regconst])))
#define EXPECT_DBR_X86_64(num) \
EXPECT_OFF(dr##num##_x86_64, offsetof(dbreg, dr[num]), sizeof(dbreg::dr[num]))
TEST(RegisterContextNetBSDTest, x86_64) {
ArchSpec arch{"x86_64-unknown-netbsd"};
RegisterContextNetBSD_x86_64 reg_ctx{arch};
EXPECT_GPR_X86_64(rdi, _REG_RDI);
EXPECT_GPR_X86_64(rsi, _REG_RSI);
EXPECT_GPR_X86_64(rdx, _REG_RDX);
EXPECT_GPR_X86_64(rcx, _REG_RCX);
EXPECT_GPR_X86_64(r8, _REG_R8);
EXPECT_GPR_X86_64(r9, _REG_R9);
EXPECT_GPR_X86_64(r10, _REG_R10);
EXPECT_GPR_X86_64(r11, _REG_R11);
EXPECT_GPR_X86_64(r12, _REG_R12);
EXPECT_GPR_X86_64(r13, _REG_R13);
EXPECT_GPR_X86_64(r14, _REG_R14);
EXPECT_GPR_X86_64(r15, _REG_R15);
EXPECT_GPR_X86_64(rbp, _REG_RBP);
EXPECT_GPR_X86_64(rbx, _REG_RBX);
EXPECT_GPR_X86_64(rax, _REG_RAX);
EXPECT_GPR_X86_64(gs, _REG_GS);
EXPECT_GPR_X86_64(fs, _REG_FS);
EXPECT_GPR_X86_64(es, _REG_ES);
EXPECT_GPR_X86_64(ds, _REG_DS);
EXPECT_GPR_X86_64(rip, _REG_RIP);
EXPECT_GPR_X86_64(cs, _REG_CS);
EXPECT_GPR_X86_64(rflags, _REG_RFLAGS);
EXPECT_GPR_X86_64(rsp, _REG_RSP);
EXPECT_GPR_X86_64(ss, _REG_SS);
// fctrl is the first FPR field, it is used to determine offset of the whole
// FPR struct
size_t base_offset = reg_ctx.GetRegisterInfo()[lldb_fctrl_x86_64].byte_offset;
// assert against FXSAVE struct
EXPECT_OFF(fctrl_x86_64, 0x00, 2);
EXPECT_OFF(fstat_x86_64, 0x02, 2);
// TODO: This is a known bug, abridged ftag should is 8 bits in length.
EXPECT_OFF(ftag_x86_64, 0x04, 2);
EXPECT_OFF(fop_x86_64, 0x06, 2);
// NB: Technically fiseg/foseg are 16-bit long and the higher 16 bits
// are reserved. However, LLDB defines them to be 32-bit long for backwards
// compatibility, as they were used to reconstruct FIP/FDP before explicit
// register entries for them were added. Also, this is still how GDB does it.
EXPECT_OFF(fioff_x86_64, 0x08, 4);
EXPECT_OFF(fiseg_x86_64, 0x0C, 4);
EXPECT_OFF(fip_x86_64, 0x08, 8);
EXPECT_OFF(fooff_x86_64, 0x10, 4);
EXPECT_OFF(foseg_x86_64, 0x14, 4);
EXPECT_OFF(fdp_x86_64, 0x10, 8);
EXPECT_OFF(mxcsr_x86_64, 0x18, 4);
EXPECT_OFF(mxcsrmask_x86_64, 0x1C, 4);
EXPECT_OFF(st0_x86_64, 0x20, 10);
EXPECT_OFF(st1_x86_64, 0x30, 10);
EXPECT_OFF(st2_x86_64, 0x40, 10);
EXPECT_OFF(st3_x86_64, 0x50, 10);
EXPECT_OFF(st4_x86_64, 0x60, 10);
EXPECT_OFF(st5_x86_64, 0x70, 10);
EXPECT_OFF(st6_x86_64, 0x80, 10);
EXPECT_OFF(st7_x86_64, 0x90, 10);
EXPECT_OFF(mm0_x86_64, 0x20, 8);
EXPECT_OFF(mm1_x86_64, 0x30, 8);
EXPECT_OFF(mm2_x86_64, 0x40, 8);
EXPECT_OFF(mm3_x86_64, 0x50, 8);
EXPECT_OFF(mm4_x86_64, 0x60, 8);
EXPECT_OFF(mm5_x86_64, 0x70, 8);
EXPECT_OFF(mm6_x86_64, 0x80, 8);
EXPECT_OFF(mm7_x86_64, 0x90, 8);
EXPECT_OFF(xmm0_x86_64, 0xA0, 16);
EXPECT_OFF(xmm1_x86_64, 0xB0, 16);
EXPECT_OFF(xmm2_x86_64, 0xC0, 16);
EXPECT_OFF(xmm3_x86_64, 0xD0, 16);
EXPECT_OFF(xmm4_x86_64, 0xE0, 16);
EXPECT_OFF(xmm5_x86_64, 0xF0, 16);
EXPECT_OFF(xmm6_x86_64, 0x100, 16);
EXPECT_OFF(xmm7_x86_64, 0x110, 16);
EXPECT_OFF(xmm8_x86_64, 0x120, 16);
EXPECT_OFF(xmm9_x86_64, 0x130, 16);
EXPECT_OFF(xmm10_x86_64, 0x140, 16);
EXPECT_OFF(xmm11_x86_64, 0x150, 16);
EXPECT_OFF(xmm12_x86_64, 0x160, 16);
EXPECT_OFF(xmm13_x86_64, 0x170, 16);
EXPECT_OFF(xmm14_x86_64, 0x180, 16);
EXPECT_OFF(xmm15_x86_64, 0x190, 16);
base_offset = reg_ctx.GetRegisterInfo()[lldb_dr0_x86_64].byte_offset;
EXPECT_DBR_X86_64(0);
EXPECT_DBR_X86_64(1);
EXPECT_DBR_X86_64(2);
EXPECT_DBR_X86_64(3);
EXPECT_DBR_X86_64(4);
EXPECT_DBR_X86_64(5);
EXPECT_DBR_X86_64(6);
EXPECT_DBR_X86_64(7);
}
#endif // defined(__x86_64__)
|
da07ng/dipper
|
src/other/webmap/widgets/_StyledCheckboxTreeNode.js
|
/*
* @Author: Esri
* @Date: 2016-09-10 15:41:43
* @Last Modified by: Esri
* @Last Modified time: 2016-09-10 16:10:23
*/
define([
"dojo/dom-style",
"dojo/_base/declare",
"dijit/_TemplatedMixin",
"dijit/_WidgetsInTemplateMixin",
"dojo/text!./templates/template_styled_treenode_checkbox.html"
], function(domStyle, declare, TemplatedMixin, _WidgetsInTemplateMixin, template) {
return declare("mapwidgets._StyledCheckboxTreeNode", [dijit._TreeNode, TemplatedMixin, _WidgetsInTemplateMixin], {
/**
* 自定义模板
* @type {String}
*/
templateString: template,
setSelected: function(selected) {
if (selected) {
this.nodeCheck.checked = 'checked';
} else {
this.nodeCheck.checked = '';
}
}
});
});
|
antarikshray/websiterudra
|
node_modules/react-icons/ti/adjust-brightness.js
|
import React from 'react'
import Icon from 'react-icon-base'
const TiAdjustBrightness = props => (
<Icon viewBox="0 0 40 40" {...props}>
<g><path d="m20 11.6l1.7-4.9c0.1-0.4 0.1-0.8 0-1.2-0.3-0.9-1.3-1.4-2.3-1.1-0.9 0.3-1.4 1.3-1.1 2.3l1.7 4.9z m-13.3 6.7c-0.4-0.1-0.8-0.1-1.2 0-0.9 0.3-1.4 1.3-1.1 2.3 0.3 0.9 1.3 1.4 2.3 1.1l4.9-1.7-4.9-1.7z m13.3 10.1l-1.7 4.9c-0.1 0.4-0.1 0.8 0 1.2 0.3 0.9 1.3 1.4 2.3 1.1 0.9-0.3 1.4-1.3 1.1-2.3l-1.7-4.9z m15.6-9c-0.3-0.9-1.3-1.4-2.3-1.1l-4.9 1.7 4.9 1.7c0.4 0.1 0.8 0.1 1.2 0 0.9-0.3 1.4-1.4 1.1-2.3z m-26.2-7.6l4.6 2.2-2.2-4.6c-0.2-0.3-0.5-0.6-0.9-0.8-0.8-0.4-1.9-0.1-2.3 0.8-0.4 0.9-0.1 1.9 0.8 2.4z m-0.8 17.3c-0.4 0.8-0.1 1.9 0.8 2.3 0.9 0.4 1.9 0.1 2.3-0.8l2.3-4.6-4.6 2.3c-0.3 0.1-0.6 0.4-0.8 0.8z m22-0.9l-4.6-2.2 2.2 4.6c0.2 0.3 0.5 0.6 0.9 0.8 0.8 0.4 1.9 0.1 2.3-0.8s0.1-1.9-0.8-2.4z m0.8-17.3c0.4-0.8 0.1-1.9-0.8-2.3-0.9-0.4-1.9-0.1-2.4 0.8l-2.2 4.6 4.6-2.3c0.3-0.1 0.6-0.4 0.8-0.8z m-11.4 1.6c-4.1 0-7.5 3.4-7.5 7.5s3.4 7.5 7.5 7.5 7.5-3.4 7.5-7.5-3.4-7.5-7.5-7.5z"/></g>
</Icon>
)
export default TiAdjustBrightness
|
vidkidz/crossbridge
|
llvm-2.9/tools/clang/test/CodeGenCXX/devirtualize-virtual-function-calls.cpp
|
<filename>llvm-2.9/tools/clang/test/CodeGenCXX/devirtualize-virtual-function-calls.cpp
// RUN: %clang_cc1 %s -emit-llvm -o - | FileCheck %s
struct A {
virtual void f();
A h();
};
A g();
void f(A a, A *ap, A& ar) {
// This should not be a virtual function call.
// CHECK: call void @_ZN1A1fEv(%struct.A* %a)
a.f();
// CHECK: call void %
ap->f();
// CHECK: call void %
ar.f();
// CHECK: call void @_ZN1A1fEv
A().f();
// CHECK: call void @_ZN1A1fEv
g().f();
// CHECK: call void @_ZN1A1fEv
a.h().f();
}
struct B {
virtual void f();
~B();
B h();
};
void f() {
// CHECK: call void @_ZN1B1fEv
B().f();
// CHECK: call void @_ZN1B1fEv
B().h().f();
}
|
chinhpv95/52n-core
|
src/js/Styling/directives/barToggler.js
|
angular.module('n52.core.style')
.directive('swcBarToggler', function () {
return {
restrict: 'E',
templateUrl: 'templates/styling/bar-toggler.html',
controller: ['$scope', 'styleService', function ($scope, styleService) {
$scope.intervals = styleService.intervalList;
$scope.setInterval = function (ts, interval) {
styleService.updateInterval(ts, interval);
$scope.modalInstance.close();
};
}]
};
});
|
eoogbe/violit
|
src/server/users/controllers/__tests__/createUserCollection.test.js
|
<reponame>eoogbe/violit
/**
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as USER from '__fixtures__/userFixture';
import routes from 'server/api/routes';
import createUserCollection from '../createUserCollection';
it('constructs the collection for a user', () => {
const collection = createUserCollection({ routes })(USER.credentials);
expect(collection).toEqual({
version: '1.0',
href: USER.userUrl,
links: [
{ rel: 'home', href: 'http://localhost:4000/api' },
{ rel: 'authenticate', href: 'http://localhost:4000/api/auth' },
{ rel: 'register-user', href: 'http://localhost:4000/api/users' },
],
items: USER.userApi,
});
});
|
KingFarGrace/blog_system
|
back-end/src/main/java/com/kingfar/blog/entity/ArticleData.java
|
package com.kingfar.blog.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
/**
* @author ZHANGKAIHENG
*/
@Data
public class ArticleData implements Serializable {
private int bid;
private String title;
private String author;
private String content;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date ctime;
}
|
thefool76/hacktoberfest2021
|
DSA/AVL Trees/Insertion.cpp
|
<filename>DSA/AVL Trees/Insertion.cpp
//Insertion in AVL Tree
//AVL trees are balanced Binary Search Trees.
//Wee have a new Term call balanced factor
//balanced factor = height of left subtree - height of right subtree
//If balanced factor ranges in {-1, 0, 1} for each node, then the tree is balanced
//Otherwise it's imbalanced.
//AVL Trees use LL-Rotation, RR-Rotation, LR-Roation, and RR-Rotation to generate a balanced Tree while inserting
//each node after checking for it's balancing factor and identifying the imbalanced nodes.
//In order to save time, we take an extra variable height for each node containing the height of the node or
//maximum levels beneath it.
#include <iostream>
#define endl "\n"
using namespace std;
struct Node{
Node *lchild;
int data;
int height;
Node *rchild;
}*root = NULL;
//Finding the Height of the Node
int NodeHeight(Node *p){
int h1, h2;
h1 = p&&p->lchild?p->lchild->height:0;
h2 = p&&p->rchild?p->rchild->height:0;
return h1>h2?h1+1:h2+1;
}
//Finding the Balanced Factor
int BalanceFactor(Node *p){
int lh, rh;
lh = p&&p->lchild?p->lchild->height:0;
rh = p&&p->rchild?p->rchild->height:0;
return lh-rh;
}
//LL Rotation
Node* LLRotation(Node *p){
Node *pl = p->lchild;
Node *plr = pl->rchild;
pl->rchild = p;
p->lchild = plr;
pl->height = NodeHeight(pl);
p->height = NodeHeight(p);
if(root == p)
root = pl;
return pl;
}
//LRRotation
Node* LRRotation(Node *p){
Node *pl = p->lchild;
Node* plr = pl->rchild;
pl->rchild = plr->lchild;
p->lchild = plr->rchild;
plr->lchild = pl;
plr->rchild = p;
pl->height = NodeHeight(pl);
plr->height = NodeHeight(plr);
p->height = NodeHeight(p);
if(root == p)
root = plr;
return plr;
}
//RR Rotation
Node* RRRotation(Node *p){
Node *pr = p->rchild;
Node *prl = pr->lchild;
p->rchild = prl;
pr->lchild = p;
p->height = NodeHeight(p);
pr->height = NodeHeight(pr);
if(root == p)
root = pr;
return pr;
}
//RL Rotation
Node* RLRotation(Node *p){
Node *pr = p->rchild;
Node *prl = pr->lchild;
p->rchild = prl->lchild;
pr->lchild = prl->rchild;
prl->lchild = p;
prl->rchild = pr;
p->height = NodeHeight(p);
pr->height = NodeHeight(pr);
prl->height = NodeHeight(prl);
if(root == p)
root = prl;
return prl;
}
//Inserting into AVL Tree
Node* RInsert(Node *p, int key){
if(p == NULL){
p = new Node;
p->data = key;
p->lchild = p->rchild = NULL;
p->height = 1;
}
if(key < p->data)
p->lchild = RInsert(p->lchild, key);
else if(key > p->data)
p->rchild = RInsert(p->rchild, key);
p->height = NodeHeight(p);
if(BalanceFactor(p)==2 && BalanceFactor(p->lchild) == 1)
return LLRotation(p);
else if(BalanceFactor(p)==2 &&BalanceFactor(p->lchild) == -1)
return LRRotation(p);
else if(BalanceFactor(p) == -2 && BalanceFactor(p->rchild) == -1)
return RRRotation(p);
else if(BalanceFactor(p)== -2 && BalanceFactor(p->rchild) == 1)
return RLRotation(p);
return p;
}
//Dummy Making it easier
Node* RInsert(int key){
return RInsert(root, key);
}
int main()
{
int a[] = {10, 20, 30, 25, 28, 27, 5};
int n = sizeof(a)/sizeof(a[0]);
root = RInsert(10);
for(int i=1; i < n; i++)
root = RInsert(a[i]);
return 0;
}
|
giordanna/mycoldlogicapp
|
client/src/assets/FeedbackNeutral.js
|
import React from 'react';
const FeedbackNeutral = (props) => {
return <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 100">
<title>NeutralIcon</title>
<path
d="M50,0C22.4,0,0.1,22.4,0.1,50s22.4,49.9,50,49.9s49.9-22.4,49.9-50C100,22.3,77.7,0,50.2,0H50z M50,90
c-22.1,0-40-17.9-40-40s17.9-40,40-40s40,17.9,40,40S72.1,90,50,90z M39.8,39.8c0,3.1-2.5,5.6-5.6,5.6s-5.6-2.5-5.6-5.6
c0-3.1,2.5-5.6,5.6-5.6S39.8,36.7,39.8,39.8z M71.3,39.8c0,3.1-2.5,5.6-5.6,5.6c-3.1,0-5.6-2.5-5.6-5.6c0-3.1,2.5-5.6,5.6-5.6
C68.8,34.3,71.3,36.7,71.3,39.8z M63.8,71H36.3v-7.8h27.5V71z" fill={props.color}/>
</svg>
};
export default FeedbackNeutral
|
joshayoung/joshayoung_website
|
src/snippets/vim.js
|
<reponame>joshayoung/joshayoung_website<filename>src/snippets/vim.js
import React from "react";
export default () => (
<dl>
<dt>Remove all folds</dt>
<dd>zR</dd>
<dt>Toggle fold inside current indent</dt>
<dd>za</dd>
<dt>Find Character Before</dt>
<dd>
<code>t}</code>
</dd>
<dt>Find Character Before Backwards</dt>
<dd>
<code>T{"{"}</code>
</dd>
<dt> Change to Found Char Before</dt>
<dd>
<code>ct}</code>
</dd>
<dt>Find Character</dt>
<dd>
<code>f}</code>
</dd>
<dt>Find Character Backwards</dt>
<dd>
<code>F{"{"}</code>
</dd>
<dt>Change to Found Char</dt>
<dd>
<code>cf}</code>
</dd>
<dt>Re-run the last 'f' or 't' command</dt>
<dd>
<code>;</code>
</dd>
<dt>Re-run the last 'F' or 'T' command</dt>
<dd>
<code>,</code>
</dd>
<dt>End of Current Word</dt>
<dd>
<code>ea</code>
</dd>
<dt>Change Line</dt>
<dd>
<code>cc</code>
</dd>
<dt>Split Editor</dt>
<dd>
<code>:split(:sp) new_file_name</code>
</dd>
<dt>Vertical Split Editor</dt>
<dd>
<code>:vsplit(:vs)</code> new_file_name
</dd>
<dt>Switch Between Splits</dt>
<dd>
<code>Ctrl + w</code>
</dd>
<dt>Mark Current Line</dt>
<dd>
<code>m[a-z]</code> (i.e. <code>ma</code>)
</dd>
<dt>Go to Mark "h"</dt>
<dd>
<code>'h</code>
</dd>
<dt>Show all marks</dt>
<dd>
<code>:marks</code>
</dd>
<dt>Mark Across Files</dt>
<dd>
Use an uppercase mark: <code>m[A-Z]</code>
</dd>
<dt>Previous location (insert mode)</dt>
<dd>
<code>gi</code>
</dd>
<dt>Go to file under cursor</dt>
<dd>
<code>gf</code>
</dd>
<dt>Open shell from vi</dt>
<dd>
<code>:shell</code>
</dd>
<dt>Close shell</dt>
<dd>
<code>Ctrl + d</code>
</dd>
<dt>Open file browser</dt>
<dd>
<code>:E</code>
</dd>
<dt>Show Registers</dt>
<dd>
<code>:reg</code>
</dd>
<dt>Paste from # registers</dt>
<dd>
<code>"[0-9]p</code> (i.e. <code>0p</code>, <code>"1p</code>, etc.)
</dd>
<dt>Yank to register</dt>
<dd>
<code>"[a-z]yy</code> (then: <code>"ap</code> to paste)
</dd>
<dt>Append to register</dt>
<dd>
<code>"[A-Z]yy</code> (then: <code>"Ap</code> to paste)
</dd>
<dt>Paste from Clipboard</dt>
<dd>
<code>"+p</code>
</dd>
<dt>Record Macro</dt>
<dd>
<code>q[a-z]</code> (i.e. <code>qm</code> - records to <code>m</code>)
</dd>
<dt>Open Multiple Files</dt>
<dd>
<code>vim *</code>
<code>:bn - next file</code>
</dd>
<dt>Stop Macro Recording</dt>
<dd>
<code>q</code>
</dd>
<dt>Execute Macro</dt>
<dd>
<code>@[a-z]</code> (i.e. <code>@m</code> or <code>20 @m</code> to run 20
times
</dd>
<dt>Increment Number</dt>
<dd>
<code>Ctrl + a</code>
</dd>
<dt>Search / Replace Globally</dt>
<dd>
<code>:%s/searchfor/replacewith/g</code>
</dd>
<dt>Search / Replace Current Line</dt>
<dd>
<code>:s/searchfor/replacewith/g</code>
</dd>
<dt>Search / Replace (confirmation)</dt>
<dd>
<code>:%s/searchfor/replacewith/gc</code>
</dd>
<dt>High, Middle, Low</dt>
<dd>
<code>H, M, L</code>
</dd>
<dt>Move up/down</dt>
<dd>
<code>Ctrl + U / Ctrl + D</code>
</dd>
<dt>Move to line 11</dt>
<dd>
<code>11 + G</code>
</dd>
<dt>Indent to surroundings</dt>
<dd>
<code>==</code>
</dd>
<dt>Delete around and within</dt>
<dd>
<code>daw / diw</code>
</dd>
<dt>Delete inner paragraph</dt>
<dd>
<code>dip</code>
</dd>
<dt>Indent a paragraph</dt>
<dd>
<code>>ip</code>
</dd>
<dt>Reverse paragraph indent</dt>
<dd>
<code>=ip</code>
</dd>
<dt>Change between html tags</dt>
<dd>
<code>cit</code>
</dd>
<dt>Delete html tag</dt>
<dd>
<code>dat</code>
</dd>
<dt>Open New Window</dt>
<dd>
<code>:new</code> file_name.txt
</dd>
<dt>Open Vertical Window</dt>
<dd>
<code>:vnew</code> file_name.txt
</dd>
<dt>Go to file</dt>
<dd>
<code>gf</code>
</dd>
<dt>Go Back to prev file</dt>
<dd>
<code>Ctrl + o</code>
</dd>
<dt>Open New Tab</dt>
<dd>
<code>:tabnew</code>
</dd>
<dt>Edit File in Tab</dt>
<dd>
<code>:tabedit</code> file_name.txt
</dd>
<dt>Move forward in tabs</dt>
<dd>
<code>gt</code>
</dd>
<dt>Move backward in tabs</dt>
<dd>
<code>gT</code>
</dd>
<dt>Open Visual Block mode</dt>
<dd>
<code>Ctrl + V</code>
</dd>
<dt>Move to beggining of line</dt>
<dd>
<code>0</code>
</dd>
<dt>Open NETRW file exporer</dt>
<dd>
<code>:e .</code>
</dd>
<dt>Open a file at a line number</dt>
<dd>vim /path/to/file.conf +120</dd>
</dl>
);
|
chrisidefix/devide
|
modules/filters/polyDataConnect.py
|
import gen_utils
from module_base import ModuleBase
from module_mixins import ScriptedConfigModuleMixin
import module_utils
import vtk
EMODES = {
1:'Point seeded regions',
2:'Cell seeded regions',
3:'Specified regions',
4:'Largest region',
5:'All regions',
6:'Closest point region'
}
class polyDataConnect(ScriptedConfigModuleMixin, ModuleBase):
def __init__(self, module_manager):
# call parent constructor
ModuleBase.__init__(self, module_manager)
self._polyDataConnect = vtk.vtkPolyDataConnectivityFilter()
# we're not going to use this feature just yet
self._polyDataConnect.ScalarConnectivityOff()
#
self._polyDataConnect.SetExtractionModeToPointSeededRegions()
module_utils.setup_vtk_object_progress(self, self._polyDataConnect,
'Finding connected surfaces')
# default is point seeded regions (we store zero-based)
self._config.extraction_mode = 0
self._config.colour_regions = 0
config_list = [
('Extraction mode:', 'extraction_mode', 'base:int',
'choice',
'What kind of connected regions should be extracted.',
[EMODES[i] for i in range(1,7)]),
('Colour regions:', 'colour_regions', 'base:int',
'checkbox',
'Should connected regions be coloured differently.')
]
# and the mixin constructor
ScriptedConfigModuleMixin.__init__(
self, config_list,
{'Module (self)' : self,
'vtkPolyDataConnectivityFilter' : self._polyDataConnect})
# we'll use this to keep a binding (reference) to the passed object
self._input_points = None
# this will be our internal list of points
self._seedIds = []
self.sync_module_logic_with_config()
def close(self):
# we play it safe... (the graph_editor/module_manager should have
# disconnected us by now)
self.set_input(0, None)
# don't forget to call the close() method of the vtkPipeline mixin
ScriptedConfigModuleMixin.close(self)
ModuleBase.close(self)
# get rid of our reference
del self._polyDataConnect
def get_input_descriptions(self):
return ('vtkPolyData', 'Seed points')
def set_input(self, idx, inputStream):
if idx == 0:
# will work for None and not-None
self._polyDataConnect.SetInput(inputStream)
else:
self._input_points = inputStream
def get_output_descriptions(self):
return (self._polyDataConnect.GetOutput().GetClassName(),)
def get_output(self, idx):
return self._polyDataConnect.GetOutput()
def logic_to_config(self):
# extractionmodes in vtkPolyDataCF start at 1
# we store it as 0-based
emode = self._polyDataConnect.GetExtractionMode()
self._config.extraction_mode = emode - 1
self._config.colour_regions = \
self._polyDataConnect.GetColorRegions()
def config_to_logic(self):
# extractionmodes in vtkPolyDataCF start at 1
# we store it as 0-based
self._polyDataConnect.SetExtractionMode(
self._config.extraction_mode + 1)
self._polyDataConnect.SetColorRegions(
self._config.colour_regions)
def execute_module(self):
if self._polyDataConnect.GetExtractionMode() == 1:
self._sync_pdc_to_input_points()
self._polyDataConnect.Update()
def _sync_pdc_to_input_points(self):
# extract a list from the input points
temp_list = []
if self._input_points and self._polyDataConnect.GetInput():
for i in self._input_points:
id = self._polyDataConnect.GetInput().FindPoint(i['world'])
if id > 0:
temp_list.append(id)
if temp_list != self._seedIds:
self._seedIds = temp_list
# I'm hoping this clears the list
self._polyDataConnect.InitializeSeedList()
for seedId in self._seedIds:
self._polyDataConnect.AddSeed(seedId)
print "adding %d" % (seedId)
|
waynezhang87/MCommon
|
sdk/src/main/java/com/waynezhang/mcommon/template/interfaces/FieldTemplateImageCallback.java
|
<filename>sdk/src/main/java/com/waynezhang/mcommon/template/interfaces/FieldTemplateImageCallback.java
package com.waynezhang.mcommon.template.interfaces;
import com.waynezhang.mcommon.template.model.TemplateImageItem;
import com.waynezhang.mcommon.template.response.TemplateResponse;
import com.waynezhang.mcommon.template.widget.TemplateImageView;
import com.waynezhang.mcommon.xwidget.SimpleArrayAdapter;
/**
* Created by don on 1/27/16.
*/
public interface FieldTemplateImageCallback {
public void addImgCallback(TemplateResponse.TemplateField templateField,TemplateImageItem item,TemplateImageCallback.AfterCallback afterCallback);
public void deleteImgCallback(TemplateResponse.TemplateField templateField,TemplateImageItem item,TemplateImageCallback.AfterCallback afterCallback);
public void OnItemLongClickListener(TemplateResponse.TemplateField templateField,TemplateImageItem item , SimpleArrayAdapter<TemplateImageItem, TemplateImageView.TemplateImageItemView> PicAdapter);
}
|
codelieche/cronjob
|
tutorial/etcd/operation/main.go
|
package main
import (
"context"
"fmt"
"log"
"time"
"github.com/coreos/etcd/clientv3"
)
func main() {
// etcd配置
config := clientv3.Config{
Endpoints: []string{"127.0.0.1:2379"},
DialTimeout: 20 * time.Second,
}
// 连接etcd
client, err := clientv3.New(config)
if err != nil {
log.Panic(err)
}
// 实例化kv
kv := clientv3.NewKV(client)
fmt.Println("=== Operation Put ===")
// 实例化op
opPut := clientv3.OpPut("/study/name1", "value 0001", clientv3.WithPrevKV())
// 执行op
if opResponse, err := kv.Do(context.TODO(), opPut); err != nil {
log.Println(err.Error())
} else {
// 处理响应数据
putResp := opResponse.Put()
prevKv := putResp.PrevKv
fmt.Printf("\tHeaders:%s\n", putResp.Header)
fmt.Printf("\t上一个版本的Value是:%s\n\n", prevKv.Value)
}
fmt.Println("=== Operation Get ===")
// 实例化op
opGet := clientv3.OpGet("/study/", clientv3.WithPrefix())
// 执行op
if opResponse, err := kv.Do(context.TODO(), opGet); err != nil {
log.Println(err.Error())
} else {
// 打印出获取到的数据
getResp := opResponse.Get()
for _, item := range getResp.Kvs {
fmt.Printf("\tKey: %s \t Value: %s\t Version: %d\n", item.Key, item.Value, item.Version)
}
}
}
|
lordio/insanity
|
src/Windows/CWindowsWin32Window.cpp
|
#define INSANITY_BUILDING_LIBRARY
#include "CWindowsWin32Window.hpp"
#if defined(PLATFORM_MSWINDOWS)
#include <IThread.hpp>
#include <IGarbageCollector.hpp>
#include <IApplication.hpp>
#include <TRectangle.hpp>
#include <IConfigObject.hpp>
#include "WindowsStringConversion.hpp"
#include <windowsx.h>
#include <CommCtrl.h>
#include <iostream>
#include <string>
namespace
{
RECT _GetAdjustedRect(Insanity::TRectangle<Insanity::s16, Insanity::u16> const & rect)
{
RECT ret;
ret.left = rect.GetLeft();
ret.right = rect.GetRight();
ret.top = rect.GetTop();
ret.bottom = rect.GetBottom();
AdjustWindowRectEx(&ret, WS_OVERLAPPEDWINDOW, FALSE, WS_EX_OVERLAPPEDWINDOW);
return ret;
}
}
namespace Insanity
{
IWindow * IWindow::Create(IWindow * ext, IConfigObject const * cfg)
{
return new CWindowsWin32Window(ext, cfg);
}
bool CWindowsWin32Window::s_windowClassRegistered{};
Ptr<CWindowsWin32EventPumpTask> CWindowsWin32Window::s_pumpTask{};
u64 CWindowsWin32Window::s_winCount{};
u64 CWindowsWin32Window::GetWindowCount()
{
return s_winCount;
}
CWindowsWin32Window::CWindowsWin32Window(IWindow * ext, IConfigObject const * cfg) :
_rect{}, _ext{ ext }, _win{ NULL }
{
HMODULE hInst{ GetModuleHandle(nullptr) };
_InitWindowClass(hInst);
_InitEventPump();
_InitWindow(hInst, cfg);
s_winCount++;
}
CWindowsWin32Window::~CWindowsWin32Window()
{
if(--s_winCount == 0)
{
s_pumpTask = nullptr; //The windows don't need it anymore.
//the pump task will not be deleted until it's dequeued from the thread task list.
}
//A comment on SetWindowSubclass says to remove the subclass before destroying the window.
RemoveWindowSubclass(_win,WindowProc,0);
DestroyWindow(_win);
}
void CWindowsWin32Window::_InitWindowClass(HINSTANCE hInst)
{
if (!s_windowClassRegistered)
{
WNDCLASSEXW wcex;
ZeroMemory(&wcex, sizeof(wcex));
wcex.cbClsExtra = 0; //Won't be able to access
wcex.cbSize = sizeof(wcex); //Constant
wcex.cbWndExtra = 0; //Won't be able to access
wcex.hbrBackground = (HBRUSH) (COLOR_WINDOW);//More or less constant.
wcex.hCursor = (HCURSOR) LoadImage(hInst, IDC_ARROW, IMAGE_CURSOR, 0, 0, LR_SHARED); //Can be assigned later
wcex.hIcon = (HICON) LoadImage(hInst, IDI_APPLICATION, IMAGE_ICON, 0, 0, LR_SHARED); //Can be assigned later
wcex.hIconSm = (HICON) LoadImage(hInst, IDI_APPLICATION, IMAGE_ICON, 0, 0, LR_SHARED); //Can be assigned later
wcex.hInstance = hInst; //Constant
wcex.lpfnWndProc = InitialWindowProc; //Will assign WindowProc after window creation
wcex.lpszClassName = L"InsanityWindowClass";//Constant
wcex.lpszMenuName = nullptr; //Can be assigned if/when a Menu is added.
wcex.style = CS_HREDRAW | CS_VREDRAW | CS_DBLCLKS; //First two needed, third needed for uniformity with other platforms.
RegisterClassExW(&wcex);
s_windowClassRegistered = true;
}
}
void CWindowsWin32Window::_InitWindow(HINSTANCE hInst, IConfigObject const * cfg)
{
//_rect stores the dimensions of the client area of the window.
_rect.SetX(static_cast<s16>(cfg->GetProperty("dims.x", s64{})));
_rect.SetY(static_cast<s16>(cfg->GetProperty("dims.y", s64{})));
_rect.SetWidth(static_cast<u16>(cfg->GetProperty("dims.width", s64{ 640 })));
_rect.SetHeight(static_cast<u16>(cfg->GetProperty("dims.height", s64{ 480 })));
//Need to adjust for non-client area of window for CreateWindow.
RECT&& adj = _GetAdjustedRect(_rect);
//convert the title to a wchar_t string
std::wstring wtitle{};
_title = cfg->GetProperty("title", "");
atow(_title, wtitle);
_win = CreateWindowExW(WS_EX_OVERLAPPEDWINDOW, //will either want this or options for fullscreen
L"InsanityWindowClass", //Constant
wtitle.c_str(), //Good candidate for Config file
WS_OVERLAPPEDWINDOW, //as dwExStyle
adj.left, adj.top,
adj.right - adj.left, adj.bottom - adj.top, //Dimensions are another good candidate
HWND_DESKTOP, //If in Config file, would need a way to specify another window
NULL, //Menu can be assigned later
hInst, //Constant
nullptr); //Constant (unused)
SetWindowSubclass(_win, WindowProc, 0, reinterpret_cast<DWORD_PTR>(this));
ShowWindow(_win, SW_SHOWDEFAULT);
}
void CWindowsWin32Window::_InitEventPump()
{
if (s_pumpTask == nullptr)
{
//creates a pump task, assigns it to the static pointer, and passes it to RegisterTask.
IApplication::GetInstance()->RegisterTask(s_pumpTask = new CWindowsWin32EventPumpTask());
}
//Other platforms have to register an event procedure with the event pump task.
// Since that's basically an emulation of Windows' event setup, we use the native setup here.
}
LRESULT CALLBACK CWindowsWin32Window::InitialWindowProc(HWND wnd, UINT msg, WPARAM wParam, LPARAM lParam)
{
//this will receive at least WM_NCCREATE, WM_NCCALCSIZE, and WM_CREATE before assigning WindowProc subclass.
return DefWindowProcW(wnd,msg,wParam,lParam);
}
LRESULT CALLBACK CWindowsWin32Window::WindowProc(HWND wnd, UINT msg, WPARAM wParam, LPARAM lParam, UINT_PTR uSubclassId, DWORD_PTR dwRefData)
{
//this is the main method for processing window messages.
WeakPtr<CWindowsWin32Window> self{ reinterpret_cast<CWindowsWin32Window*>(dwRefData) };
WeakPtr<IWindow> call{ static_cast<IWindow*>(self->_ext ? self->_ext.Get() : self.Get()) };
POINTS pt = MAKEPOINTS(lParam);
WORD highWParam{ HIWORD(wParam) };
switch(msg)
{
case WM_LBUTTONDOWN:
call->MouseHandler(EMouseButton::Left, EMouseButtonState::Down, pt.x, pt.y);
break;
case WM_LBUTTONUP:
call->MouseHandler(EMouseButton::Left, EMouseButtonState::Up, pt.x, pt.y);
break;
case WM_LBUTTONDBLCLK:
call->MouseHandler(EMouseButton::Left, EMouseButtonState::DoubleClick, pt.x, pt.y);
break;
case WM_RBUTTONDOWN:
call->MouseHandler(EMouseButton::Right, EMouseButtonState::Down, pt.x, pt.y);
break;
case WM_RBUTTONUP:
call->MouseHandler(EMouseButton::Right, EMouseButtonState::Up, pt.x, pt.y);
break;
case WM_RBUTTONDBLCLK:
call->MouseHandler(EMouseButton::Right, EMouseButtonState::DoubleClick, pt.x, pt.y);
break;
case WM_MBUTTONDOWN:
call->MouseHandler(EMouseButton::Middle, EMouseButtonState::Down, pt.x, pt.y);
break;
case WM_MBUTTONUP:
call->MouseHandler(EMouseButton::Middle, EMouseButtonState::Up, pt.x, pt.y);
break;
case WM_MBUTTONDBLCLK:
call->MouseHandler(EMouseButton::Middle, EMouseButtonState::DoubleClick, pt.x, pt.y);
break;
case WM_XBUTTONDOWN:
call->MouseHandler((highWParam == 1 ? EMouseButton::X1 : EMouseButton::X2), EMouseButtonState::Down, pt.x, pt.y);
break;
case WM_XBUTTONUP:
call->MouseHandler((highWParam == 1 ? EMouseButton::X1 : EMouseButton::X2), EMouseButtonState::Up, pt.x, pt.y);
break;
case WM_XBUTTONDBLCLK:
call->MouseHandler((highWParam == 1 ? EMouseButton::X1 : EMouseButton::X2), EMouseButtonState::DoubleClick, pt.x, pt.y);
break;
case WM_KEYUP:
call->KeyHandler((EKey) wParam, EKeyState::Up);
break;
case WM_KEYDOWN:
call->KeyHandler((EKey) wParam, EKeyState::Down);
break;
case WM_CLOSE:
call->CloseHandler();
break;
case WM_SHOWWINDOW:
call->ShowHandler(wParam == TRUE);
break;
case WM_DESTROY:
//should only be sent when the window object is destroyed.
//should do any code on window destruction in the dtor, not here or a handler.
break;
case WM_MOVE:
//coordinates reported are the upper-left corner of the client area in screen coordinates.
call->MoveHandler(pt.x, pt.y);
break;
case WM_SIZE:
call->ResizeHandler(pt.x, pt.y);
break;
case WM_MOUSEMOVE:
call->MouseHandler(EMouseButton::Null, EMouseButtonState::Null, pt.x, pt.y);
break;
case WM_MOUSEWHEEL:
{
//need the sign from highWParam
SHORT delta{ (SHORT) highWParam };
//the second parameter to ScrollHandler is a simple magnitude (unsigned), so take the absolute value.
call->ScrollHandler((delta > 0 ? EMouseScrollDirection::Up : EMouseScrollDirection::Down), abs(delta / WHEEL_DELTA));
}
break;
}
return DefSubclassProc(wnd,msg,wParam,lParam);
}
HWND CWindowsWin32Window::GetWindow() const
{
return _win;
}
//=====================================================
//Interface: IWindow
//=====================================================
TRectangle<s16,u16> const & CWindowsWin32Window::GetRect() const
{
return _rect;
}
char const * CWindowsWin32Window::GetTitle() const
{
return _title.c_str();
}
void CWindowsWin32Window::SetTitle(char const * title)
{
_title = title;
std::wstring wtitle{};
atow(_title, wtitle);
SetWindowTextW(_win, wtitle.c_str());
}
void CWindowsWin32Window::MouseHandler(EMouseButton button, EMouseButtonState state, u16 x, u16 y)
{
}
void CWindowsWin32Window::KeyHandler(EKey key, EKeyState state)
{
}
void CWindowsWin32Window::ScrollHandler(EMouseScrollDirection dir, u16 delta)
{
}
void CWindowsWin32Window::ShowHandler(bool show)
{
}
void CWindowsWin32Window::MoveHandler(s16 x, s16 y)
{
_rect.SetX(x);
_rect.SetY(y);
}
void CWindowsWin32Window::ResizeHandler(u16 width, u16 height)
{
_rect.SetWidth(width);
_rect.SetHeight(height);
}
void CWindowsWin32Window::CloseHandler()
{
}
void CWindowsWin32Window::Mouse(EMouseButton button, EMouseButtonState state, u16 x, u16 y)
{
UINT msg{};
WORD xButton{};
switch (button)
{
case EMouseButton::Left:
msg = WM_LBUTTONDOWN;
break;
case EMouseButton::Middle:
msg = WM_MBUTTONDOWN;
break;
case EMouseButton::Right:
msg = WM_RBUTTONDOWN;
break;
case EMouseButton::X1:
xButton = 1;
msg = WM_XBUTTONDOWN;
break;
case EMouseButton::X2:
xButton = 2;
msg = WM_XBUTTONDOWN;
break;
case EMouseButton::Null:
msg = WM_MOUSEMOVE;
break;
}
if (button != EMouseButton::Null) //ignore the state; should be Null, but don't count on it.
{
switch (state)
{
case EMouseButtonState::Down:
//no-op, Down is already set.
break;
case EMouseButtonState::Up:
msg += 1;
break;
case EMouseButtonState::DoubleClick:
msg += 2;
break;
}
}
DWORD tmpX{ x };
DWORD tmpY{ y };
PostMessage(_win, msg, xButton << 16, MAKELPARAM(tmpX, tmpY));
}
void CWindowsWin32Window::Key(EKey key, EKeyState state)
{
//LPARAM is a bunch of state flags we have no way of tracking, so ignore it.
PostMessage(_win, (state == EKeyState::Down ? WM_KEYDOWN : WM_KEYUP), (WPARAM)key, 0);
}
void CWindowsWin32Window::Scroll(EMouseScrollDirection dir, u16 delta)
{
//mask off the sign bit (shouldn't cause problems)
SHORT postDelta{ delta & 0x7fff } ;
postDelta *= (dir == EMouseScrollDirection::Up ? 1 : -1);
//Last parameter should be the current mouse position, but that's not provided nor tracked internally.
PostMessage(_win, WM_MOUSEWHEEL, (static_cast<DWORD>(postDelta)) << 16, 0);
}
void CWindowsWin32Window::Show(bool show)
{
PostMessage(_win, WM_SHOWWINDOW, show, 0);
}
void CWindowsWin32Window::Move(s16 x, s16 y)
{
PostMessage(_win, WM_MOVE, 0, MAKELPARAM(x, y));
}
void CWindowsWin32Window::Resize(u16 width, u16 height)
{
PostMessage(_win, WM_SIZE, SIZE_RESTORED, MAKELPARAM(width, height));
}
void CWindowsWin32Window::Close()
{
PostMessage(_win, WM_CLOSE, 0, 0);
}
}
#endif
|
xiaojieay/mybatis-study
|
src/main/java/org/apache/ibatis/type/FloatTypeHandler.java
|
package org.apache.ibatis.type;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* @author <NAME>
*/
public class FloatTypeHandler extends BaseTypeHandler<Float> {
@Override
public void setNonNullParameter(PreparedStatement ps, int i, Float parameter, JdbcType jdbcType)
throws SQLException {
ps.setFloat(i, parameter);
}
@Override
public Float getNullableResult(ResultSet rs, String columnName)
throws SQLException {
float result = rs.getFloat(columnName);
return result == 0 && rs.wasNull() ? null : result;
}
@Override
public Float getNullableResult(ResultSet rs, int columnIndex)
throws SQLException {
float result = rs.getFloat(columnIndex);
return result == 0 && rs.wasNull() ? null : result;
}
@Override
public Float getNullableResult(CallableStatement cs, int columnIndex)
throws SQLException {
float result = cs.getFloat(columnIndex);
return result == 0 && cs.wasNull() ? null : result;
}
}
|
NullPopPoLab/Play--Framework
|
include/StreamBitStream.h
|
<gh_stars>10-100
#pragma once
#include <stdexcept>
#include "Types.h"
#include "Stream.h"
#include "BitStream.h"
namespace Framework
{
class CStreamBitStream : public CBitStream
{
public:
CStreamBitStream(Framework::CStream&);
virtual ~CStreamBitStream();
virtual void Advance(uint8) override;
virtual uint8 GetBitIndex() const override;
virtual bool TryPeekBits_LSBF(uint8, uint32&) override;
virtual bool TryPeekBits_MSBF(uint8, uint32&) override;
private:
enum
{
BUFFER_SIZE = 16,
};
uint8 m_buffer[BUFFER_SIZE];
unsigned int m_cursor;
unsigned int m_availableBits;
Framework::CStream& m_stream;
};
}
|
emptyland/nyaa
|
src/system/random-zone-system.cc
|
#include "system/random-zone-system.h"
#include "component/zone-component.h"
#include "resource/cube-library.h"
#include "resource/sprite-library.h"
#include "game/game.h"
namespace nyaa {
namespace sys {
void RandomZoneSystem::Update(com::ZoneComponent *zone) {
Update(zone->mutable_region());
}
void RandomZoneSystem::Update(com::RegionComponent *region) {
res::SpriteLibrary *sprite_lib = Game::This()->sprite_lib();
// (1), fill sky cube
for (int i = 0; i < kTerrainMaxLevels; i++) { ::memset(region->floor(i), 0, sizeof(com::RegionComponent::Floor)); }
// (2), fill surface level
com::RegionComponent::Floor *surface = region->floor(kTerrainSurfaceLevel);
for (int y = 0; y < kRegionSize; y++) {
for (int x = 0; x < kRegionSize; x++) {
int seed = ::rand() & 0x7;
switch (seed) {
case 0: surface->cubes[x][y].set_kind(res::Cube::CUBE_DIRT_1); break;
// case 1: surface->cubes[x][y].set_kind(res::Cube::CUBE_STONE_1); break;
case 2: surface->cubes[x][y].set_kind(res::Cube::CUBE_DIRT_2); break;
case 3: surface->cubes[x][y].set_kind(res::Cube::CUBE_GRASS_2); break;
default: surface->cubes[x][y].set_kind(res::Cube::CUBE_GRASS_1); break;
}
surface->cubes[x][y].set_hardness(4);
}
}
// (3) fill terrain
surface = region->floor(kTerrainSurfaceLevel + 1);
for (int y = 0; y < kRegionSize; y++) {
for (int x = 0; x < kRegionSize; x++) {
int seed = ::rand() & 0x1f;
switch (seed) {
// case 0: surface->cubes[x][y].set_kind(res::Cube::CUBE_STONE_1); break;
case 0: surface->cubes[x][y].set_kind(res::Cube::CUBE_GRASS_2); break;
case 1: surface->cubes[x][y].set_kind(res::Cube::CUBE_GRASS_1); break;
case 2: {
size_t pos = region->plants_size();
region->mutable_plants()->resize(pos + 1);
com::PlantComponent *plant = region->plant(pos);
float adjust = static_cast<float>(rand() & 0xff) / 1024;
float xx = x, yy = y + adjust;
if (xx >= kRegionSize) { xx = kRegionSize - 1; }
if (xx < 0) { xx = 0; }
if (yy >= kRegionSize) { yy = kRegionSize - 1; }
if (yy < 0) { yy = 0; }
plant->set_position({xx, yy, kTerrainSurfaceLevel + 0.0f});
plant->set_sprite(sprite_lib->FindOrNull(ResourceId::Of((rand() & 0x1) ? 100000 : 100010)));
// DLOG(INFO) << plant->sprite();
for (int i = 0; i < 3; i++) {
com::CubeComponent *cube = ®ion->floor(kTerrainSurfaceLevel + 1 + i)->cubes[x][y];
cube->set_hardness(2);
cube->set_kind(res::Cube::CUBE_TREE_STUB);
}
} break;
default: surface->cubes[x][y].set_kind(res::Cube::CUBE_AIR); break;
}
if (surface->cubes[x][y].kind() != res::Cube::CUBE_AIR) { surface->cubes[x][y].set_hardness(4); }
}
}
// (4) fill under ground
for (int level = 0; level < kTerrainSurfaceLevel; level++) {
surface = region->floor(level);
for (int y = 0; y < kRegionSize; y++) {
for (int x = 0; x < kRegionSize; x++) {
int seed = ::rand() & 0x3;
switch (seed) {
case 0: surface->cubes[x][y].set_kind(res::Cube::CUBE_DIRT_1); break;
case 1: surface->cubes[x][y].set_kind(res::Cube::CUBE_STONE_1); break;
default: surface->cubes[x][y].set_kind(res::Cube::CUBE_DIRT_2); break;
}
surface->cubes[x][y].set_hardness(4);
}
}
}
}
} // namespace sys
} // namespace nyaa
|
ita-social-projects/ZeroWaste
|
db/migrate/20210428101446_change_products.rb
|
class ChangeProducts < ActiveRecord::Migration[6.1]
def up
ProductType.find_by(title: 'Diapers').products.create([{title: 'Diapers'}, {title: 'Reusable diapers'}])
end
def down
ProductType.find_by(title: 'Diapers').products.destroy_all
end
end
|
npocmaka/Windows-Server-2003
|
inetcore/connectionwizard/icwconn2/debug.h
|
<reponame>npocmaka/Windows-Server-2003
/*-----------------------------------------------------------------------------
debug.h
Declarations for debug features
Copyright (C) 1996 Microsoft Corporation
All rights reserved
Authors:
ChrisK <NAME>
Histroy:
7/22/96 ChrisK Cleaned and formatted
-----------------------------------------------------------------------------*/
#ifndef _PHBKDEBUG
#define _PHBKDEBUG
void Dprintf(LPCTSTR pcsz, ...);
#ifdef DEBUG
BOOL FAssertProc(LPCTSTR szFile, DWORD dwLine, LPCTSTR szMsg, DWORD dwFlags);
void DebugSz(LPCTSTR psz);
#define AssertSzFlg(f, sz, dwFlg) ( (f) ? 0 : FAssertProc(__FILE__, __LINE__, sz, dwFlg) ? DebugBreak() : 1 )
#define AssertSz(f, sz) AssertSzFlg(f, sz, 0)
#define Assert(f) AssertSz((f), "!(" #f ")")
#else
#define DebugSz(x)
#define AssertSzFlg(f, sz, dwFlg)
#define AssertSz(f, sz)
#define Assert(f)
#endif
#endif //_PHBKDEBUG
|
long-grass/mikey
|
universal_api_project_template/api/controllers/user.js
|
// // Route Dependencies
// import express from 'express';
// let router = express.Router();
// import app from '../../server';
// import bcrypt from 'bcrypt';
//
// // Services
// import AuthServices from '../services/AuthService';
//
// // Routes ----------------------------
//
// // Base Path: /user->
//
// //Authentication -----------------------------------------------------------------
// // Create User
// router.post('/signup', function(req, res) {
// // Do the passwords match?
// if (req.body.password === req.body.passwordConfirmation) {
// if (req.body.password.length >= 8) {
// delete req.body.passwordConfirmation
// // Create User in Database
// app.models.user.create(req.body, function(error, model) {
// if (error) {
// return res.status(500).json({
// success: false,
// error: error.invalidAttributes
// });
// }
// res.json(
// { success: true,
// message: {
// created: [{
// rule: 'created',
// message: 'Account created successfully.'
// }]
// }
// }
// );
// // Probly configure email confirmation here eventually
// });
// } else {
// // Incoming password is too short error.
// return res.status(500).json(
// { success: false, error: { password: [{ rule: 'passwordLength', message: 'Passwords need to be at least 8 characters in length.' }] } }
// );
// }
// } else {
// // Passwords do not match error.
// return res.status(500).json(
// { success: false, error: { password: [{ rule: 'password', message: 'Passwords do not match.' }] } }
// );
// }
// });
//
// router.post('/signin', function(req, res){
// app.models.user.findOneByEmail(req.body.email).exec(function(error, user){
// if (error) {
// // Send error if find results in error
// res.status(500).json({
// success: false,
// error: {
// signIn: [{
// rule: 'errorFindingUser',
// message: 'Error when trying to find user.'
// }]
// }
// });
// }
// if (user) {
// // Found user by email, now compare the passwords
// bcrypt.compare(req.body.password, user.password, function(error, match){
// if (match) {
// var token = AuthServices.generateUserToken(user);
// if (token != null) {
// // JWT is good, send token and some credentials
// delete user.password;
// res.status(200).json({
// success: true, message: {
// signedIn: [{
// rule: 'signedIn',
// message: `Account signed in successfully. Welcome back ${user.username}!`,
// }]
// },
// user: {
// id: user.id,
// email: user.email,
// username: user.username,
// avatar: user.avatar
// },
// token: token
// });
// } else {
// // Something is wrong with the JWT
// res.status(401).json({ success: false, error: { signIn: [{ rule: 'jwt', message: "Error creating a session." }] } });
// }
// } else {
// // If it's an invalid password, send an error response
// res.status(401).json({ success: false, error: { signIn: [{ rule: 'invalidCredentials', message: "Email or password incorrect." }] } });
// }
// })
// } else {
// // If it's an invalid email, send an error response
// res.status(401).json({ success: false, error: { signIn: [{ rule: 'invalidCredentials', message: "Email or password incorrect." }] } });
// }
// });
// });
//
// // For retrieving user data after login, can add picture and other info later.
// router.get('/authenticate', function(req, res) {
// AuthServices.getUser(req.headers.access_token).exec((err, user) => {
// if (user != null) {
// res.status(200).json({
// id: user.id,
// username: user.username,
// email: user.email,
// avatar: user.avatar
// });
// } else {
// res.status(401).json({ success: false, error: { signIn: [{ rule: 'jwt', message: "Error authenticating session." }] } })
// }
// });
// });
// // End Authentication ----------------------------------------------------------------------
//
// // ------------------------------------
//
// module.exports = router;
|
osmar-jr/orange-talents-07-template-ecommerce
|
src/main/java/br/com/zupacademy/osmarjunior/mercadolivre/controller/form/LoginForm.java
|
<filename>src/main/java/br/com/zupacademy/osmarjunior/mercadolivre/controller/form/LoginForm.java
package br.com.zupacademy.osmarjunior.mercadolivre.controller.form;
import br.com.zupacademy.osmarjunior.mercadolivre.annotation.ExistsId;
import br.com.zupacademy.osmarjunior.mercadolivre.model.Usuario;
import com.fasterxml.jackson.annotation.JsonCreator;
import org.hibernate.validator.constraints.Length;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import javax.validation.constraints.Email;
import javax.validation.constraints.NotBlank;
public class LoginForm {
@NotBlank
@Email
@ExistsId(classDomain = Usuario.class, attributeName = "login")
private String login;
@NotBlank @Length(min = 6)
private String senha;
@JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
public LoginForm(@NotBlank @Email String login,
@NotBlank @Length(min = 6) String senha) {
this.login = login;
this.senha = senha;
}
@Deprecated
public LoginForm() {
}
public UsernamePasswordAuthenticationToken toUsernamePasswordAuthenticationToken() {
return new UsernamePasswordAuthenticationToken(this.login, this.senha);
}
}
|
prem-chand/Cassie_CFROST
|
Digit_Example/opt_two_step/gen/opt/Js_dxDiscreteMapLeftImpact.cc
|
/*
* Automatically Generated from Mathematica.
* Fri 5 Nov 2021 16:27:00 GMT-04:00
*/
#ifdef MATLAB_MEX_FILE
#include <stdexcept>
#include <cmath>
#include<math.h>
/**
* Copied from Wolfram Mathematica C Definitions file mdefs.hpp
* Changed marcos to inline functions (<NAME>)
*/
inline double Power(double x, double y) { return pow(x, y); }
inline double Sqrt(double x) { return sqrt(x); }
inline double Abs(double x) { return fabs(x); }
inline double Exp(double x) { return exp(x); }
inline double Log(double x) { return log(x); }
inline double Sin(double x) { return sin(x); }
inline double Cos(double x) { return cos(x); }
inline double Tan(double x) { return tan(x); }
inline double ArcSin(double x) { return asin(x); }
inline double ArcCos(double x) { return acos(x); }
inline double ArcTan(double x) { return atan(x); }
/* update ArcTan function to use atan2 instead. */
inline double ArcTan(double x, double y) { return atan2(y,x); }
inline double Sinh(double x) { return sinh(x); }
inline double Cosh(double x) { return cosh(x); }
inline double Tanh(double x) { return tanh(x); }
const double E = 2.71828182845904523536029;
const double Pi = 3.14159265358979323846264;
const double Degree = 0.01745329251994329576924;
inline double Sec(double x) { return 1/cos(x); }
inline double Csc(double x) { return 1/sin(x); }
#endif
/*
* Sub functions
*/
static void output1(double *p_output1,const double *var1)
{
double _NotUsed;
NULL;
p_output1[0]=1;
p_output1[1]=1;
p_output1[2]=1;
p_output1[3]=1;
p_output1[4]=1;
p_output1[5]=1;
p_output1[6]=1;
p_output1[7]=1;
p_output1[8]=1;
p_output1[9]=1;
p_output1[10]=1;
p_output1[11]=1;
p_output1[12]=1;
p_output1[13]=1;
p_output1[14]=1;
p_output1[15]=1;
p_output1[16]=1;
p_output1[17]=1;
p_output1[18]=1;
p_output1[19]=1;
p_output1[20]=1;
p_output1[21]=1;
p_output1[22]=1;
p_output1[23]=1;
p_output1[24]=1;
p_output1[25]=1;
p_output1[26]=1;
p_output1[27]=1;
p_output1[28]=1;
p_output1[29]=1;
p_output1[30]=1;
p_output1[31]=1;
p_output1[32]=1;
p_output1[33]=1;
p_output1[34]=1;
p_output1[35]=1;
p_output1[36]=1;
p_output1[37]=1;
p_output1[38]=1;
p_output1[39]=1;
p_output1[40]=1;
p_output1[41]=1;
p_output1[42]=1;
p_output1[43]=1;
p_output1[44]=1;
p_output1[45]=1;
p_output1[46]=1;
p_output1[47]=1;
p_output1[48]=1;
p_output1[49]=1;
p_output1[50]=1;
p_output1[51]=1;
p_output1[52]=1;
p_output1[53]=1;
p_output1[54]=1;
p_output1[55]=1;
p_output1[56]=1;
p_output1[57]=1;
p_output1[58]=1;
p_output1[59]=1;
p_output1[60]=1;
p_output1[61]=1;
p_output1[62]=1;
p_output1[63]=1;
p_output1[64]=1;
p_output1[65]=1;
p_output1[66]=1;
p_output1[67]=1;
p_output1[68]=1;
p_output1[69]=1;
p_output1[70]=1;
p_output1[71]=1;
p_output1[72]=1;
p_output1[73]=1;
p_output1[74]=1;
p_output1[75]=1;
p_output1[76]=1;
p_output1[77]=1;
p_output1[78]=1;
p_output1[79]=1;
p_output1[80]=1;
p_output1[81]=1;
p_output1[82]=1;
p_output1[83]=1;
p_output1[84]=1;
p_output1[85]=1;
p_output1[86]=1;
p_output1[87]=1;
p_output1[88]=1;
p_output1[89]=1;
p_output1[90]=1;
p_output1[91]=1;
p_output1[92]=1;
p_output1[93]=1;
p_output1[94]=1;
p_output1[95]=1;
p_output1[96]=1;
p_output1[97]=1;
p_output1[98]=1;
p_output1[99]=1;
p_output1[100]=1;
p_output1[101]=1;
p_output1[102]=2;
p_output1[103]=2;
p_output1[104]=2;
p_output1[105]=2;
p_output1[106]=2;
p_output1[107]=2;
p_output1[108]=2;
p_output1[109]=2;
p_output1[110]=2;
p_output1[111]=2;
p_output1[112]=2;
p_output1[113]=2;
p_output1[114]=2;
p_output1[115]=2;
p_output1[116]=2;
p_output1[117]=2;
p_output1[118]=2;
p_output1[119]=2;
p_output1[120]=2;
p_output1[121]=2;
p_output1[122]=2;
p_output1[123]=2;
p_output1[124]=2;
p_output1[125]=2;
p_output1[126]=2;
p_output1[127]=2;
p_output1[128]=2;
p_output1[129]=2;
p_output1[130]=2;
p_output1[131]=2;
p_output1[132]=2;
p_output1[133]=2;
p_output1[134]=2;
p_output1[135]=2;
p_output1[136]=2;
p_output1[137]=2;
p_output1[138]=2;
p_output1[139]=2;
p_output1[140]=2;
p_output1[141]=2;
p_output1[142]=2;
p_output1[143]=2;
p_output1[144]=2;
p_output1[145]=2;
p_output1[146]=2;
p_output1[147]=2;
p_output1[148]=2;
p_output1[149]=2;
p_output1[150]=2;
p_output1[151]=2;
p_output1[152]=2;
p_output1[153]=2;
p_output1[154]=2;
p_output1[155]=2;
p_output1[156]=2;
p_output1[157]=2;
p_output1[158]=2;
p_output1[159]=2;
p_output1[160]=2;
p_output1[161]=2;
p_output1[162]=2;
p_output1[163]=2;
p_output1[164]=2;
p_output1[165]=2;
p_output1[166]=2;
p_output1[167]=2;
p_output1[168]=2;
p_output1[169]=2;
p_output1[170]=2;
p_output1[171]=2;
p_output1[172]=2;
p_output1[173]=2;
p_output1[174]=2;
p_output1[175]=2;
p_output1[176]=2;
p_output1[177]=2;
p_output1[178]=2;
p_output1[179]=2;
p_output1[180]=2;
p_output1[181]=2;
p_output1[182]=2;
p_output1[183]=2;
p_output1[184]=2;
p_output1[185]=2;
p_output1[186]=2;
p_output1[187]=2;
p_output1[188]=2;
p_output1[189]=2;
p_output1[190]=2;
p_output1[191]=2;
p_output1[192]=2;
p_output1[193]=2;
p_output1[194]=2;
p_output1[195]=2;
p_output1[196]=2;
p_output1[197]=2;
p_output1[198]=2;
p_output1[199]=2;
p_output1[200]=2;
p_output1[201]=2;
p_output1[202]=2;
p_output1[203]=2;
p_output1[204]=3;
p_output1[205]=3;
p_output1[206]=3;
p_output1[207]=3;
p_output1[208]=3;
p_output1[209]=3;
p_output1[210]=3;
p_output1[211]=3;
p_output1[212]=3;
p_output1[213]=3;
p_output1[214]=3;
p_output1[215]=3;
p_output1[216]=3;
p_output1[217]=3;
p_output1[218]=3;
p_output1[219]=3;
p_output1[220]=3;
p_output1[221]=3;
p_output1[222]=3;
p_output1[223]=3;
p_output1[224]=3;
p_output1[225]=3;
p_output1[226]=3;
p_output1[227]=3;
p_output1[228]=3;
p_output1[229]=3;
p_output1[230]=3;
p_output1[231]=3;
p_output1[232]=3;
p_output1[233]=3;
p_output1[234]=3;
p_output1[235]=3;
p_output1[236]=3;
p_output1[237]=3;
p_output1[238]=3;
p_output1[239]=3;
p_output1[240]=3;
p_output1[241]=3;
p_output1[242]=3;
p_output1[243]=3;
p_output1[244]=3;
p_output1[245]=3;
p_output1[246]=3;
p_output1[247]=3;
p_output1[248]=3;
p_output1[249]=3;
p_output1[250]=3;
p_output1[251]=3;
p_output1[252]=3;
p_output1[253]=3;
p_output1[254]=3;
p_output1[255]=3;
p_output1[256]=3;
p_output1[257]=3;
p_output1[258]=3;
p_output1[259]=3;
p_output1[260]=3;
p_output1[261]=3;
p_output1[262]=3;
p_output1[263]=3;
p_output1[264]=3;
p_output1[265]=3;
p_output1[266]=3;
p_output1[267]=3;
p_output1[268]=3;
p_output1[269]=3;
p_output1[270]=3;
p_output1[271]=3;
p_output1[272]=3;
p_output1[273]=3;
p_output1[274]=3;
p_output1[275]=3;
p_output1[276]=3;
p_output1[277]=3;
p_output1[278]=3;
p_output1[279]=3;
p_output1[280]=3;
p_output1[281]=3;
p_output1[282]=3;
p_output1[283]=3;
p_output1[284]=3;
p_output1[285]=3;
p_output1[286]=3;
p_output1[287]=3;
p_output1[288]=3;
p_output1[289]=3;
p_output1[290]=3;
p_output1[291]=3;
p_output1[292]=3;
p_output1[293]=3;
p_output1[294]=3;
p_output1[295]=3;
p_output1[296]=3;
p_output1[297]=3;
p_output1[298]=3;
p_output1[299]=3;
p_output1[300]=3;
p_output1[301]=3;
p_output1[302]=3;
p_output1[303]=3;
p_output1[304]=3;
p_output1[305]=3;
p_output1[306]=4;
p_output1[307]=4;
p_output1[308]=4;
p_output1[309]=4;
p_output1[310]=4;
p_output1[311]=4;
p_output1[312]=4;
p_output1[313]=4;
p_output1[314]=4;
p_output1[315]=4;
p_output1[316]=4;
p_output1[317]=4;
p_output1[318]=4;
p_output1[319]=4;
p_output1[320]=4;
p_output1[321]=4;
p_output1[322]=4;
p_output1[323]=4;
p_output1[324]=4;
p_output1[325]=4;
p_output1[326]=4;
p_output1[327]=4;
p_output1[328]=4;
p_output1[329]=4;
p_output1[330]=4;
p_output1[331]=4;
p_output1[332]=4;
p_output1[333]=4;
p_output1[334]=4;
p_output1[335]=4;
p_output1[336]=4;
p_output1[337]=4;
p_output1[338]=4;
p_output1[339]=4;
p_output1[340]=4;
p_output1[341]=4;
p_output1[342]=4;
p_output1[343]=4;
p_output1[344]=4;
p_output1[345]=4;
p_output1[346]=4;
p_output1[347]=4;
p_output1[348]=4;
p_output1[349]=4;
p_output1[350]=4;
p_output1[351]=4;
p_output1[352]=4;
p_output1[353]=4;
p_output1[354]=4;
p_output1[355]=4;
p_output1[356]=4;
p_output1[357]=4;
p_output1[358]=4;
p_output1[359]=4;
p_output1[360]=4;
p_output1[361]=4;
p_output1[362]=4;
p_output1[363]=4;
p_output1[364]=4;
p_output1[365]=4;
p_output1[366]=4;
p_output1[367]=4;
p_output1[368]=4;
p_output1[369]=4;
p_output1[370]=4;
p_output1[371]=4;
p_output1[372]=4;
p_output1[373]=4;
p_output1[374]=4;
p_output1[375]=4;
p_output1[376]=4;
p_output1[377]=4;
p_output1[378]=4;
p_output1[379]=4;
p_output1[380]=4;
p_output1[381]=4;
p_output1[382]=4;
p_output1[383]=4;
p_output1[384]=4;
p_output1[385]=4;
p_output1[386]=4;
p_output1[387]=4;
p_output1[388]=4;
p_output1[389]=4;
p_output1[390]=4;
p_output1[391]=4;
p_output1[392]=4;
p_output1[393]=4;
p_output1[394]=4;
p_output1[395]=4;
p_output1[396]=4;
p_output1[397]=4;
p_output1[398]=4;
p_output1[399]=4;
p_output1[400]=4;
p_output1[401]=4;
p_output1[402]=4;
p_output1[403]=4;
p_output1[404]=4;
p_output1[405]=4;
p_output1[406]=4;
p_output1[407]=4;
p_output1[408]=4;
p_output1[409]=4;
p_output1[410]=4;
p_output1[411]=4;
p_output1[412]=4;
p_output1[413]=4;
p_output1[414]=4;
p_output1[415]=4;
p_output1[416]=4;
p_output1[417]=4;
p_output1[418]=4;
p_output1[419]=4;
p_output1[420]=4;
p_output1[421]=4;
p_output1[422]=5;
p_output1[423]=5;
p_output1[424]=5;
p_output1[425]=5;
p_output1[426]=5;
p_output1[427]=5;
p_output1[428]=5;
p_output1[429]=5;
p_output1[430]=5;
p_output1[431]=5;
p_output1[432]=5;
p_output1[433]=5;
p_output1[434]=5;
p_output1[435]=5;
p_output1[436]=5;
p_output1[437]=5;
p_output1[438]=5;
p_output1[439]=5;
p_output1[440]=5;
p_output1[441]=5;
p_output1[442]=5;
p_output1[443]=5;
p_output1[444]=5;
p_output1[445]=5;
p_output1[446]=5;
p_output1[447]=5;
p_output1[448]=5;
p_output1[449]=5;
p_output1[450]=5;
p_output1[451]=5;
p_output1[452]=5;
p_output1[453]=5;
p_output1[454]=5;
p_output1[455]=5;
p_output1[456]=5;
p_output1[457]=5;
p_output1[458]=5;
p_output1[459]=5;
p_output1[460]=5;
p_output1[461]=5;
p_output1[462]=5;
p_output1[463]=5;
p_output1[464]=5;
p_output1[465]=5;
p_output1[466]=5;
p_output1[467]=5;
p_output1[468]=5;
p_output1[469]=5;
p_output1[470]=5;
p_output1[471]=5;
p_output1[472]=5;
p_output1[473]=5;
p_output1[474]=5;
p_output1[475]=5;
p_output1[476]=5;
p_output1[477]=5;
p_output1[478]=5;
p_output1[479]=5;
p_output1[480]=5;
p_output1[481]=5;
p_output1[482]=5;
p_output1[483]=5;
p_output1[484]=5;
p_output1[485]=5;
p_output1[486]=5;
p_output1[487]=5;
p_output1[488]=5;
p_output1[489]=5;
p_output1[490]=5;
p_output1[491]=5;
p_output1[492]=5;
p_output1[493]=5;
p_output1[494]=5;
p_output1[495]=5;
p_output1[496]=5;
p_output1[497]=5;
p_output1[498]=5;
p_output1[499]=5;
p_output1[500]=5;
p_output1[501]=5;
p_output1[502]=5;
p_output1[503]=5;
p_output1[504]=5;
p_output1[505]=5;
p_output1[506]=5;
p_output1[507]=5;
p_output1[508]=5;
p_output1[509]=5;
p_output1[510]=5;
p_output1[511]=5;
p_output1[512]=5;
p_output1[513]=5;
p_output1[514]=5;
p_output1[515]=5;
p_output1[516]=5;
p_output1[517]=5;
p_output1[518]=5;
p_output1[519]=5;
p_output1[520]=5;
p_output1[521]=5;
p_output1[522]=5;
p_output1[523]=5;
p_output1[524]=5;
p_output1[525]=5;
p_output1[526]=5;
p_output1[527]=5;
p_output1[528]=5;
p_output1[529]=5;
p_output1[530]=5;
p_output1[531]=5;
p_output1[532]=5;
p_output1[533]=5;
p_output1[534]=5;
p_output1[535]=5;
p_output1[536]=5;
p_output1[537]=5;
p_output1[538]=5;
p_output1[539]=6;
p_output1[540]=6;
p_output1[541]=6;
p_output1[542]=6;
p_output1[543]=6;
p_output1[544]=6;
p_output1[545]=6;
p_output1[546]=6;
p_output1[547]=6;
p_output1[548]=6;
p_output1[549]=6;
p_output1[550]=6;
p_output1[551]=6;
p_output1[552]=6;
p_output1[553]=6;
p_output1[554]=6;
p_output1[555]=6;
p_output1[556]=6;
p_output1[557]=6;
p_output1[558]=6;
p_output1[559]=6;
p_output1[560]=6;
p_output1[561]=6;
p_output1[562]=6;
p_output1[563]=6;
p_output1[564]=6;
p_output1[565]=6;
p_output1[566]=6;
p_output1[567]=6;
p_output1[568]=6;
p_output1[569]=6;
p_output1[570]=6;
p_output1[571]=6;
p_output1[572]=6;
p_output1[573]=6;
p_output1[574]=6;
p_output1[575]=6;
p_output1[576]=6;
p_output1[577]=6;
p_output1[578]=6;
p_output1[579]=6;
p_output1[580]=6;
p_output1[581]=6;
p_output1[582]=6;
p_output1[583]=6;
p_output1[584]=6;
p_output1[585]=6;
p_output1[586]=6;
p_output1[587]=6;
p_output1[588]=6;
p_output1[589]=6;
p_output1[590]=6;
p_output1[591]=6;
p_output1[592]=6;
p_output1[593]=6;
p_output1[594]=6;
p_output1[595]=6;
p_output1[596]=6;
p_output1[597]=6;
p_output1[598]=6;
p_output1[599]=6;
p_output1[600]=6;
p_output1[601]=6;
p_output1[602]=6;
p_output1[603]=6;
p_output1[604]=6;
p_output1[605]=6;
p_output1[606]=6;
p_output1[607]=6;
p_output1[608]=6;
p_output1[609]=6;
p_output1[610]=6;
p_output1[611]=6;
p_output1[612]=6;
p_output1[613]=6;
p_output1[614]=6;
p_output1[615]=6;
p_output1[616]=6;
p_output1[617]=6;
p_output1[618]=6;
p_output1[619]=6;
p_output1[620]=6;
p_output1[621]=6;
p_output1[622]=6;
p_output1[623]=6;
p_output1[624]=6;
p_output1[625]=6;
p_output1[626]=6;
p_output1[627]=6;
p_output1[628]=6;
p_output1[629]=6;
p_output1[630]=6;
p_output1[631]=6;
p_output1[632]=6;
p_output1[633]=6;
p_output1[634]=6;
p_output1[635]=6;
p_output1[636]=6;
p_output1[637]=6;
p_output1[638]=6;
p_output1[639]=6;
p_output1[640]=6;
p_output1[641]=6;
p_output1[642]=6;
p_output1[643]=6;
p_output1[644]=6;
p_output1[645]=6;
p_output1[646]=6;
p_output1[647]=6;
p_output1[648]=6;
p_output1[649]=6;
p_output1[650]=6;
p_output1[651]=6;
p_output1[652]=6;
p_output1[653]=6;
p_output1[654]=6;
p_output1[655]=6;
p_output1[656]=7;
p_output1[657]=7;
p_output1[658]=7;
p_output1[659]=7;
p_output1[660]=7;
p_output1[661]=7;
p_output1[662]=7;
p_output1[663]=7;
p_output1[664]=7;
p_output1[665]=7;
p_output1[666]=7;
p_output1[667]=7;
p_output1[668]=7;
p_output1[669]=7;
p_output1[670]=7;
p_output1[671]=7;
p_output1[672]=7;
p_output1[673]=7;
p_output1[674]=7;
p_output1[675]=7;
p_output1[676]=7;
p_output1[677]=7;
p_output1[678]=7;
p_output1[679]=7;
p_output1[680]=7;
p_output1[681]=7;
p_output1[682]=7;
p_output1[683]=7;
p_output1[684]=7;
p_output1[685]=7;
p_output1[686]=7;
p_output1[687]=7;
p_output1[688]=7;
p_output1[689]=7;
p_output1[690]=7;
p_output1[691]=7;
p_output1[692]=7;
p_output1[693]=7;
p_output1[694]=7;
p_output1[695]=7;
p_output1[696]=7;
p_output1[697]=7;
p_output1[698]=7;
p_output1[699]=7;
p_output1[700]=7;
p_output1[701]=7;
p_output1[702]=7;
p_output1[703]=7;
p_output1[704]=7;
p_output1[705]=7;
p_output1[706]=7;
p_output1[707]=7;
p_output1[708]=7;
p_output1[709]=7;
p_output1[710]=7;
p_output1[711]=7;
p_output1[712]=8;
p_output1[713]=8;
p_output1[714]=8;
p_output1[715]=8;
p_output1[716]=8;
p_output1[717]=8;
p_output1[718]=8;
p_output1[719]=8;
p_output1[720]=8;
p_output1[721]=8;
p_output1[722]=8;
p_output1[723]=8;
p_output1[724]=8;
p_output1[725]=8;
p_output1[726]=8;
p_output1[727]=8;
p_output1[728]=8;
p_output1[729]=8;
p_output1[730]=8;
p_output1[731]=8;
p_output1[732]=8;
p_output1[733]=8;
p_output1[734]=8;
p_output1[735]=8;
p_output1[736]=8;
p_output1[737]=8;
p_output1[738]=8;
p_output1[739]=8;
p_output1[740]=8;
p_output1[741]=8;
p_output1[742]=8;
p_output1[743]=8;
p_output1[744]=8;
p_output1[745]=8;
p_output1[746]=8;
p_output1[747]=8;
p_output1[748]=8;
p_output1[749]=8;
p_output1[750]=8;
p_output1[751]=8;
p_output1[752]=8;
p_output1[753]=8;
p_output1[754]=8;
p_output1[755]=8;
p_output1[756]=8;
p_output1[757]=8;
p_output1[758]=8;
p_output1[759]=8;
p_output1[760]=8;
p_output1[761]=8;
p_output1[762]=8;
p_output1[763]=8;
p_output1[764]=8;
p_output1[765]=8;
p_output1[766]=8;
p_output1[767]=8;
p_output1[768]=9;
p_output1[769]=9;
p_output1[770]=9;
p_output1[771]=9;
p_output1[772]=9;
p_output1[773]=9;
p_output1[774]=9;
p_output1[775]=9;
p_output1[776]=9;
p_output1[777]=9;
p_output1[778]=9;
p_output1[779]=9;
p_output1[780]=9;
p_output1[781]=9;
p_output1[782]=9;
p_output1[783]=9;
p_output1[784]=9;
p_output1[785]=9;
p_output1[786]=9;
p_output1[787]=9;
p_output1[788]=9;
p_output1[789]=9;
p_output1[790]=9;
p_output1[791]=9;
p_output1[792]=9;
p_output1[793]=9;
p_output1[794]=9;
p_output1[795]=9;
p_output1[796]=9;
p_output1[797]=9;
p_output1[798]=9;
p_output1[799]=9;
p_output1[800]=9;
p_output1[801]=9;
p_output1[802]=9;
p_output1[803]=9;
p_output1[804]=9;
p_output1[805]=9;
p_output1[806]=9;
p_output1[807]=9;
p_output1[808]=9;
p_output1[809]=9;
p_output1[810]=9;
p_output1[811]=9;
p_output1[812]=9;
p_output1[813]=9;
p_output1[814]=9;
p_output1[815]=9;
p_output1[816]=9;
p_output1[817]=9;
p_output1[818]=9;
p_output1[819]=9;
p_output1[820]=9;
p_output1[821]=9;
p_output1[822]=9;
p_output1[823]=9;
p_output1[824]=10;
p_output1[825]=10;
p_output1[826]=10;
p_output1[827]=10;
p_output1[828]=10;
p_output1[829]=10;
p_output1[830]=10;
p_output1[831]=10;
p_output1[832]=10;
p_output1[833]=10;
p_output1[834]=10;
p_output1[835]=10;
p_output1[836]=10;
p_output1[837]=10;
p_output1[838]=10;
p_output1[839]=10;
p_output1[840]=10;
p_output1[841]=10;
p_output1[842]=10;
p_output1[843]=10;
p_output1[844]=10;
p_output1[845]=10;
p_output1[846]=10;
p_output1[847]=10;
p_output1[848]=10;
p_output1[849]=10;
p_output1[850]=10;
p_output1[851]=10;
p_output1[852]=10;
p_output1[853]=10;
p_output1[854]=10;
p_output1[855]=10;
p_output1[856]=10;
p_output1[857]=10;
p_output1[858]=10;
p_output1[859]=10;
p_output1[860]=10;
p_output1[861]=10;
p_output1[862]=10;
p_output1[863]=10;
p_output1[864]=10;
p_output1[865]=10;
p_output1[866]=10;
p_output1[867]=10;
p_output1[868]=10;
p_output1[869]=10;
p_output1[870]=10;
p_output1[871]=10;
p_output1[872]=10;
p_output1[873]=10;
p_output1[874]=10;
p_output1[875]=10;
p_output1[876]=10;
p_output1[877]=10;
p_output1[878]=10;
p_output1[879]=10;
p_output1[880]=11;
p_output1[881]=11;
p_output1[882]=11;
p_output1[883]=11;
p_output1[884]=11;
p_output1[885]=11;
p_output1[886]=11;
p_output1[887]=11;
p_output1[888]=11;
p_output1[889]=11;
p_output1[890]=11;
p_output1[891]=11;
p_output1[892]=11;
p_output1[893]=11;
p_output1[894]=11;
p_output1[895]=11;
p_output1[896]=11;
p_output1[897]=11;
p_output1[898]=11;
p_output1[899]=11;
p_output1[900]=11;
p_output1[901]=11;
p_output1[902]=11;
p_output1[903]=11;
p_output1[904]=11;
p_output1[905]=11;
p_output1[906]=11;
p_output1[907]=11;
p_output1[908]=11;
p_output1[909]=11;
p_output1[910]=11;
p_output1[911]=11;
p_output1[912]=11;
p_output1[913]=11;
p_output1[914]=11;
p_output1[915]=11;
p_output1[916]=11;
p_output1[917]=11;
p_output1[918]=11;
p_output1[919]=11;
p_output1[920]=11;
p_output1[921]=11;
p_output1[922]=11;
p_output1[923]=11;
p_output1[924]=11;
p_output1[925]=11;
p_output1[926]=11;
p_output1[927]=11;
p_output1[928]=11;
p_output1[929]=11;
p_output1[930]=11;
p_output1[931]=11;
p_output1[932]=11;
p_output1[933]=11;
p_output1[934]=11;
p_output1[935]=11;
p_output1[936]=11;
p_output1[937]=12;
p_output1[938]=12;
p_output1[939]=12;
p_output1[940]=12;
p_output1[941]=12;
p_output1[942]=12;
p_output1[943]=12;
p_output1[944]=12;
p_output1[945]=12;
p_output1[946]=12;
p_output1[947]=12;
p_output1[948]=12;
p_output1[949]=12;
p_output1[950]=12;
p_output1[951]=12;
p_output1[952]=12;
p_output1[953]=12;
p_output1[954]=12;
p_output1[955]=12;
p_output1[956]=12;
p_output1[957]=12;
p_output1[958]=12;
p_output1[959]=12;
p_output1[960]=12;
p_output1[961]=12;
p_output1[962]=12;
p_output1[963]=12;
p_output1[964]=12;
p_output1[965]=12;
p_output1[966]=12;
p_output1[967]=12;
p_output1[968]=12;
p_output1[969]=12;
p_output1[970]=12;
p_output1[971]=12;
p_output1[972]=12;
p_output1[973]=12;
p_output1[974]=12;
p_output1[975]=12;
p_output1[976]=12;
p_output1[977]=12;
p_output1[978]=12;
p_output1[979]=12;
p_output1[980]=12;
p_output1[981]=12;
p_output1[982]=12;
p_output1[983]=12;
p_output1[984]=12;
p_output1[985]=12;
p_output1[986]=12;
p_output1[987]=12;
p_output1[988]=12;
p_output1[989]=12;
p_output1[990]=12;
p_output1[991]=12;
p_output1[992]=12;
p_output1[993]=13;
p_output1[994]=13;
p_output1[995]=13;
p_output1[996]=13;
p_output1[997]=13;
p_output1[998]=13;
p_output1[999]=13;
p_output1[1000]=13;
p_output1[1001]=13;
p_output1[1002]=13;
p_output1[1003]=13;
p_output1[1004]=13;
p_output1[1005]=13;
p_output1[1006]=13;
p_output1[1007]=13;
p_output1[1008]=13;
p_output1[1009]=13;
p_output1[1010]=13;
p_output1[1011]=13;
p_output1[1012]=13;
p_output1[1013]=13;
p_output1[1014]=13;
p_output1[1015]=13;
p_output1[1016]=13;
p_output1[1017]=13;
p_output1[1018]=13;
p_output1[1019]=13;
p_output1[1020]=13;
p_output1[1021]=13;
p_output1[1022]=13;
p_output1[1023]=13;
p_output1[1024]=13;
p_output1[1025]=13;
p_output1[1026]=13;
p_output1[1027]=13;
p_output1[1028]=13;
p_output1[1029]=13;
p_output1[1030]=13;
p_output1[1031]=14;
p_output1[1032]=14;
p_output1[1033]=14;
p_output1[1034]=14;
p_output1[1035]=14;
p_output1[1036]=14;
p_output1[1037]=14;
p_output1[1038]=14;
p_output1[1039]=14;
p_output1[1040]=14;
p_output1[1041]=14;
p_output1[1042]=14;
p_output1[1043]=14;
p_output1[1044]=14;
p_output1[1045]=14;
p_output1[1046]=14;
p_output1[1047]=14;
p_output1[1048]=14;
p_output1[1049]=14;
p_output1[1050]=14;
p_output1[1051]=14;
p_output1[1052]=14;
p_output1[1053]=14;
p_output1[1054]=14;
p_output1[1055]=14;
p_output1[1056]=14;
p_output1[1057]=14;
p_output1[1058]=14;
p_output1[1059]=14;
p_output1[1060]=14;
p_output1[1061]=14;
p_output1[1062]=14;
p_output1[1063]=14;
p_output1[1064]=14;
p_output1[1065]=14;
p_output1[1066]=14;
p_output1[1067]=15;
p_output1[1068]=15;
p_output1[1069]=15;
p_output1[1070]=15;
p_output1[1071]=15;
p_output1[1072]=15;
p_output1[1073]=15;
p_output1[1074]=15;
p_output1[1075]=15;
p_output1[1076]=15;
p_output1[1077]=15;
p_output1[1078]=15;
p_output1[1079]=15;
p_output1[1080]=15;
p_output1[1081]=15;
p_output1[1082]=15;
p_output1[1083]=15;
p_output1[1084]=15;
p_output1[1085]=15;
p_output1[1086]=15;
p_output1[1087]=15;
p_output1[1088]=15;
p_output1[1089]=15;
p_output1[1090]=15;
p_output1[1091]=15;
p_output1[1092]=15;
p_output1[1093]=15;
p_output1[1094]=15;
p_output1[1095]=15;
p_output1[1096]=15;
p_output1[1097]=15;
p_output1[1098]=15;
p_output1[1099]=15;
p_output1[1100]=15;
p_output1[1101]=15;
p_output1[1102]=15;
p_output1[1103]=16;
p_output1[1104]=16;
p_output1[1105]=16;
p_output1[1106]=16;
p_output1[1107]=16;
p_output1[1108]=16;
p_output1[1109]=16;
p_output1[1110]=16;
p_output1[1111]=16;
p_output1[1112]=16;
p_output1[1113]=16;
p_output1[1114]=16;
p_output1[1115]=16;
p_output1[1116]=16;
p_output1[1117]=16;
p_output1[1118]=16;
p_output1[1119]=16;
p_output1[1120]=16;
p_output1[1121]=16;
p_output1[1122]=16;
p_output1[1123]=16;
p_output1[1124]=16;
p_output1[1125]=16;
p_output1[1126]=16;
p_output1[1127]=16;
p_output1[1128]=16;
p_output1[1129]=16;
p_output1[1130]=16;
p_output1[1131]=16;
p_output1[1132]=16;
p_output1[1133]=16;
p_output1[1134]=16;
p_output1[1135]=16;
p_output1[1136]=16;
p_output1[1137]=16;
p_output1[1138]=16;
p_output1[1139]=16;
p_output1[1140]=16;
p_output1[1141]=16;
p_output1[1142]=16;
p_output1[1143]=16;
p_output1[1144]=16;
p_output1[1145]=16;
p_output1[1146]=16;
p_output1[1147]=16;
p_output1[1148]=16;
p_output1[1149]=17;
p_output1[1150]=17;
p_output1[1151]=17;
p_output1[1152]=17;
p_output1[1153]=17;
p_output1[1154]=17;
p_output1[1155]=17;
p_output1[1156]=17;
p_output1[1157]=17;
p_output1[1158]=17;
p_output1[1159]=17;
p_output1[1160]=17;
p_output1[1161]=17;
p_output1[1162]=17;
p_output1[1163]=17;
p_output1[1164]=17;
p_output1[1165]=17;
p_output1[1166]=17;
p_output1[1167]=17;
p_output1[1168]=17;
p_output1[1169]=17;
p_output1[1170]=17;
p_output1[1171]=17;
p_output1[1172]=17;
p_output1[1173]=17;
p_output1[1174]=17;
p_output1[1175]=17;
p_output1[1176]=17;
p_output1[1177]=17;
p_output1[1178]=17;
p_output1[1179]=17;
p_output1[1180]=17;
p_output1[1181]=17;
p_output1[1182]=17;
p_output1[1183]=17;
p_output1[1184]=17;
p_output1[1185]=17;
p_output1[1186]=17;
p_output1[1187]=17;
p_output1[1188]=17;
p_output1[1189]=18;
p_output1[1190]=18;
p_output1[1191]=18;
p_output1[1192]=18;
p_output1[1193]=18;
p_output1[1194]=18;
p_output1[1195]=18;
p_output1[1196]=18;
p_output1[1197]=18;
p_output1[1198]=18;
p_output1[1199]=18;
p_output1[1200]=18;
p_output1[1201]=18;
p_output1[1202]=18;
p_output1[1203]=18;
p_output1[1204]=18;
p_output1[1205]=18;
p_output1[1206]=18;
p_output1[1207]=18;
p_output1[1208]=18;
p_output1[1209]=18;
p_output1[1210]=18;
p_output1[1211]=18;
p_output1[1212]=18;
p_output1[1213]=18;
p_output1[1214]=18;
p_output1[1215]=18;
p_output1[1216]=19;
p_output1[1217]=19;
p_output1[1218]=19;
p_output1[1219]=19;
p_output1[1220]=19;
p_output1[1221]=19;
p_output1[1222]=19;
p_output1[1223]=19;
p_output1[1224]=19;
p_output1[1225]=19;
p_output1[1226]=19;
p_output1[1227]=19;
p_output1[1228]=19;
p_output1[1229]=19;
p_output1[1230]=19;
p_output1[1231]=19;
p_output1[1232]=19;
p_output1[1233]=19;
p_output1[1234]=19;
p_output1[1235]=19;
p_output1[1236]=19;
p_output1[1237]=19;
p_output1[1238]=19;
p_output1[1239]=19;
p_output1[1240]=19;
p_output1[1241]=19;
p_output1[1242]=19;
p_output1[1243]=20;
p_output1[1244]=20;
p_output1[1245]=20;
p_output1[1246]=20;
p_output1[1247]=20;
p_output1[1248]=20;
p_output1[1249]=20;
p_output1[1250]=20;
p_output1[1251]=20;
p_output1[1252]=20;
p_output1[1253]=20;
p_output1[1254]=20;
p_output1[1255]=20;
p_output1[1256]=20;
p_output1[1257]=20;
p_output1[1258]=20;
p_output1[1259]=20;
p_output1[1260]=20;
p_output1[1261]=20;
p_output1[1262]=20;
p_output1[1263]=20;
p_output1[1264]=20;
p_output1[1265]=20;
p_output1[1266]=20;
p_output1[1267]=20;
p_output1[1268]=20;
p_output1[1269]=20;
p_output1[1270]=21;
p_output1[1271]=21;
p_output1[1272]=21;
p_output1[1273]=21;
p_output1[1274]=21;
p_output1[1275]=21;
p_output1[1276]=21;
p_output1[1277]=21;
p_output1[1278]=21;
p_output1[1279]=21;
p_output1[1280]=21;
p_output1[1281]=21;
p_output1[1282]=21;
p_output1[1283]=21;
p_output1[1284]=21;
p_output1[1285]=21;
p_output1[1286]=21;
p_output1[1287]=21;
p_output1[1288]=21;
p_output1[1289]=21;
p_output1[1290]=21;
p_output1[1291]=21;
p_output1[1292]=21;
p_output1[1293]=21;
p_output1[1294]=21;
p_output1[1295]=21;
p_output1[1296]=21;
p_output1[1297]=22;
p_output1[1298]=22;
p_output1[1299]=22;
p_output1[1300]=22;
p_output1[1301]=22;
p_output1[1302]=22;
p_output1[1303]=22;
p_output1[1304]=22;
p_output1[1305]=22;
p_output1[1306]=22;
p_output1[1307]=22;
p_output1[1308]=22;
p_output1[1309]=22;
p_output1[1310]=22;
p_output1[1311]=22;
p_output1[1312]=22;
p_output1[1313]=22;
p_output1[1314]=22;
p_output1[1315]=22;
p_output1[1316]=22;
p_output1[1317]=22;
p_output1[1318]=22;
p_output1[1319]=22;
p_output1[1320]=22;
p_output1[1321]=22;
p_output1[1322]=22;
p_output1[1323]=22;
p_output1[1324]=22;
p_output1[1325]=22;
p_output1[1326]=22;
p_output1[1327]=22;
p_output1[1328]=22;
p_output1[1329]=22;
p_output1[1330]=22;
p_output1[1331]=22;
p_output1[1332]=22;
p_output1[1333]=22;
p_output1[1334]=22;
p_output1[1335]=22;
p_output1[1336]=22;
p_output1[1337]=22;
p_output1[1338]=22;
p_output1[1339]=22;
p_output1[1340]=22;
p_output1[1341]=22;
p_output1[1342]=22;
p_output1[1343]=22;
p_output1[1344]=22;
p_output1[1345]=22;
p_output1[1346]=22;
p_output1[1347]=22;
p_output1[1348]=23;
p_output1[1349]=23;
p_output1[1350]=23;
p_output1[1351]=23;
p_output1[1352]=23;
p_output1[1353]=23;
p_output1[1354]=23;
p_output1[1355]=23;
p_output1[1356]=23;
p_output1[1357]=23;
p_output1[1358]=23;
p_output1[1359]=23;
p_output1[1360]=23;
p_output1[1361]=23;
p_output1[1362]=23;
p_output1[1363]=23;
p_output1[1364]=23;
p_output1[1365]=23;
p_output1[1366]=23;
p_output1[1367]=23;
p_output1[1368]=23;
p_output1[1369]=23;
p_output1[1370]=23;
p_output1[1371]=23;
p_output1[1372]=23;
p_output1[1373]=23;
p_output1[1374]=23;
p_output1[1375]=23;
p_output1[1376]=23;
p_output1[1377]=23;
p_output1[1378]=23;
p_output1[1379]=23;
p_output1[1380]=23;
p_output1[1381]=23;
p_output1[1382]=23;
p_output1[1383]=23;
p_output1[1384]=23;
p_output1[1385]=23;
p_output1[1386]=23;
p_output1[1387]=23;
p_output1[1388]=23;
p_output1[1389]=23;
p_output1[1390]=23;
p_output1[1391]=23;
p_output1[1392]=23;
p_output1[1393]=23;
p_output1[1394]=23;
p_output1[1395]=23;
p_output1[1396]=23;
p_output1[1397]=23;
p_output1[1398]=23;
p_output1[1399]=24;
p_output1[1400]=24;
p_output1[1401]=24;
p_output1[1402]=24;
p_output1[1403]=24;
p_output1[1404]=24;
p_output1[1405]=24;
p_output1[1406]=24;
p_output1[1407]=24;
p_output1[1408]=24;
p_output1[1409]=24;
p_output1[1410]=24;
p_output1[1411]=24;
p_output1[1412]=24;
p_output1[1413]=24;
p_output1[1414]=24;
p_output1[1415]=24;
p_output1[1416]=24;
p_output1[1417]=24;
p_output1[1418]=24;
p_output1[1419]=24;
p_output1[1420]=24;
p_output1[1421]=24;
p_output1[1422]=24;
p_output1[1423]=24;
p_output1[1424]=24;
p_output1[1425]=24;
p_output1[1426]=24;
p_output1[1427]=24;
p_output1[1428]=24;
p_output1[1429]=24;
p_output1[1430]=24;
p_output1[1431]=24;
p_output1[1432]=24;
p_output1[1433]=24;
p_output1[1434]=24;
p_output1[1435]=24;
p_output1[1436]=24;
p_output1[1437]=24;
p_output1[1438]=24;
p_output1[1439]=24;
p_output1[1440]=24;
p_output1[1441]=24;
p_output1[1442]=24;
p_output1[1443]=24;
p_output1[1444]=24;
p_output1[1445]=24;
p_output1[1446]=24;
p_output1[1447]=24;
p_output1[1448]=24;
p_output1[1449]=24;
p_output1[1450]=25;
p_output1[1451]=25;
p_output1[1452]=25;
p_output1[1453]=25;
p_output1[1454]=25;
p_output1[1455]=25;
p_output1[1456]=25;
p_output1[1457]=25;
p_output1[1458]=25;
p_output1[1459]=25;
p_output1[1460]=25;
p_output1[1461]=25;
p_output1[1462]=25;
p_output1[1463]=25;
p_output1[1464]=25;
p_output1[1465]=25;
p_output1[1466]=25;
p_output1[1467]=25;
p_output1[1468]=25;
p_output1[1469]=25;
p_output1[1470]=25;
p_output1[1471]=25;
p_output1[1472]=25;
p_output1[1473]=25;
p_output1[1474]=25;
p_output1[1475]=25;
p_output1[1476]=25;
p_output1[1477]=25;
p_output1[1478]=25;
p_output1[1479]=25;
p_output1[1480]=25;
p_output1[1481]=25;
p_output1[1482]=25;
p_output1[1483]=25;
p_output1[1484]=25;
p_output1[1485]=25;
p_output1[1486]=25;
p_output1[1487]=25;
p_output1[1488]=25;
p_output1[1489]=25;
p_output1[1490]=25;
p_output1[1491]=25;
p_output1[1492]=25;
p_output1[1493]=25;
p_output1[1494]=25;
p_output1[1495]=25;
p_output1[1496]=25;
p_output1[1497]=25;
p_output1[1498]=25;
p_output1[1499]=25;
p_output1[1500]=25;
p_output1[1501]=26;
p_output1[1502]=26;
p_output1[1503]=26;
p_output1[1504]=26;
p_output1[1505]=26;
p_output1[1506]=26;
p_output1[1507]=26;
p_output1[1508]=26;
p_output1[1509]=26;
p_output1[1510]=26;
p_output1[1511]=26;
p_output1[1512]=26;
p_output1[1513]=26;
p_output1[1514]=26;
p_output1[1515]=26;
p_output1[1516]=26;
p_output1[1517]=26;
p_output1[1518]=26;
p_output1[1519]=26;
p_output1[1520]=26;
p_output1[1521]=26;
p_output1[1522]=26;
p_output1[1523]=26;
p_output1[1524]=26;
p_output1[1525]=26;
p_output1[1526]=26;
p_output1[1527]=26;
p_output1[1528]=26;
p_output1[1529]=26;
p_output1[1530]=26;
p_output1[1531]=26;
p_output1[1532]=26;
p_output1[1533]=26;
p_output1[1534]=26;
p_output1[1535]=26;
p_output1[1536]=26;
p_output1[1537]=26;
p_output1[1538]=26;
p_output1[1539]=26;
p_output1[1540]=26;
p_output1[1541]=26;
p_output1[1542]=26;
p_output1[1543]=26;
p_output1[1544]=26;
p_output1[1545]=26;
p_output1[1546]=26;
p_output1[1547]=26;
p_output1[1548]=26;
p_output1[1549]=26;
p_output1[1550]=26;
p_output1[1551]=26;
p_output1[1552]=26;
p_output1[1553]=27;
p_output1[1554]=27;
p_output1[1555]=27;
p_output1[1556]=27;
p_output1[1557]=27;
p_output1[1558]=27;
p_output1[1559]=27;
p_output1[1560]=27;
p_output1[1561]=27;
p_output1[1562]=27;
p_output1[1563]=27;
p_output1[1564]=27;
p_output1[1565]=27;
p_output1[1566]=27;
p_output1[1567]=27;
p_output1[1568]=27;
p_output1[1569]=27;
p_output1[1570]=27;
p_output1[1571]=27;
p_output1[1572]=27;
p_output1[1573]=27;
p_output1[1574]=27;
p_output1[1575]=27;
p_output1[1576]=27;
p_output1[1577]=27;
p_output1[1578]=27;
p_output1[1579]=27;
p_output1[1580]=27;
p_output1[1581]=27;
p_output1[1582]=27;
p_output1[1583]=27;
p_output1[1584]=27;
p_output1[1585]=27;
p_output1[1586]=27;
p_output1[1587]=27;
p_output1[1588]=27;
p_output1[1589]=27;
p_output1[1590]=27;
p_output1[1591]=27;
p_output1[1592]=27;
p_output1[1593]=27;
p_output1[1594]=27;
p_output1[1595]=27;
p_output1[1596]=27;
p_output1[1597]=27;
p_output1[1598]=27;
p_output1[1599]=27;
p_output1[1600]=27;
p_output1[1601]=27;
p_output1[1602]=27;
p_output1[1603]=27;
p_output1[1604]=28;
p_output1[1605]=28;
p_output1[1606]=28;
p_output1[1607]=28;
p_output1[1608]=28;
p_output1[1609]=28;
p_output1[1610]=28;
p_output1[1611]=28;
p_output1[1612]=28;
p_output1[1613]=28;
p_output1[1614]=28;
p_output1[1615]=28;
p_output1[1616]=28;
p_output1[1617]=28;
p_output1[1618]=28;
p_output1[1619]=28;
p_output1[1620]=28;
p_output1[1621]=28;
p_output1[1622]=28;
p_output1[1623]=28;
p_output1[1624]=28;
p_output1[1625]=28;
p_output1[1626]=28;
p_output1[1627]=28;
p_output1[1628]=28;
p_output1[1629]=28;
p_output1[1630]=28;
p_output1[1631]=28;
p_output1[1632]=28;
p_output1[1633]=28;
p_output1[1634]=28;
p_output1[1635]=28;
p_output1[1636]=28;
p_output1[1637]=28;
p_output1[1638]=28;
p_output1[1639]=28;
p_output1[1640]=28;
p_output1[1641]=28;
p_output1[1642]=29;
p_output1[1643]=29;
p_output1[1644]=29;
p_output1[1645]=29;
p_output1[1646]=29;
p_output1[1647]=29;
p_output1[1648]=29;
p_output1[1649]=29;
p_output1[1650]=29;
p_output1[1651]=29;
p_output1[1652]=29;
p_output1[1653]=29;
p_output1[1654]=29;
p_output1[1655]=29;
p_output1[1656]=29;
p_output1[1657]=29;
p_output1[1658]=29;
p_output1[1659]=29;
p_output1[1660]=29;
p_output1[1661]=29;
p_output1[1662]=29;
p_output1[1663]=29;
p_output1[1664]=29;
p_output1[1665]=29;
p_output1[1666]=29;
p_output1[1667]=29;
p_output1[1668]=29;
p_output1[1669]=29;
p_output1[1670]=29;
p_output1[1671]=29;
p_output1[1672]=29;
p_output1[1673]=29;
p_output1[1674]=29;
p_output1[1675]=29;
p_output1[1676]=29;
p_output1[1677]=29;
p_output1[1678]=30;
p_output1[1679]=30;
p_output1[1680]=30;
p_output1[1681]=30;
p_output1[1682]=30;
p_output1[1683]=30;
p_output1[1684]=30;
p_output1[1685]=30;
p_output1[1686]=30;
p_output1[1687]=30;
p_output1[1688]=30;
p_output1[1689]=30;
p_output1[1690]=30;
p_output1[1691]=30;
p_output1[1692]=30;
p_output1[1693]=30;
p_output1[1694]=30;
p_output1[1695]=30;
p_output1[1696]=30;
p_output1[1697]=30;
p_output1[1698]=30;
p_output1[1699]=30;
p_output1[1700]=30;
p_output1[1701]=30;
p_output1[1702]=30;
p_output1[1703]=30;
p_output1[1704]=30;
p_output1[1705]=30;
p_output1[1706]=30;
p_output1[1707]=30;
p_output1[1708]=30;
p_output1[1709]=30;
p_output1[1710]=30;
p_output1[1711]=30;
p_output1[1712]=30;
p_output1[1713]=30;
p_output1[1714]=31;
p_output1[1715]=31;
p_output1[1716]=31;
p_output1[1717]=31;
p_output1[1718]=31;
p_output1[1719]=31;
p_output1[1720]=31;
p_output1[1721]=31;
p_output1[1722]=31;
p_output1[1723]=31;
p_output1[1724]=31;
p_output1[1725]=31;
p_output1[1726]=31;
p_output1[1727]=31;
p_output1[1728]=31;
p_output1[1729]=31;
p_output1[1730]=31;
p_output1[1731]=31;
p_output1[1732]=31;
p_output1[1733]=31;
p_output1[1734]=31;
p_output1[1735]=31;
p_output1[1736]=31;
p_output1[1737]=31;
p_output1[1738]=31;
p_output1[1739]=31;
p_output1[1740]=31;
p_output1[1741]=31;
p_output1[1742]=31;
p_output1[1743]=31;
p_output1[1744]=31;
p_output1[1745]=31;
p_output1[1746]=31;
p_output1[1747]=31;
p_output1[1748]=31;
p_output1[1749]=31;
p_output1[1750]=31;
p_output1[1751]=31;
p_output1[1752]=31;
p_output1[1753]=31;
p_output1[1754]=31;
p_output1[1755]=32;
p_output1[1756]=32;
p_output1[1757]=32;
p_output1[1758]=32;
p_output1[1759]=32;
p_output1[1760]=32;
p_output1[1761]=32;
p_output1[1762]=32;
p_output1[1763]=32;
p_output1[1764]=32;
p_output1[1765]=32;
p_output1[1766]=32;
p_output1[1767]=32;
p_output1[1768]=32;
p_output1[1769]=32;
p_output1[1770]=32;
p_output1[1771]=32;
p_output1[1772]=32;
p_output1[1773]=32;
p_output1[1774]=32;
p_output1[1775]=32;
p_output1[1776]=32;
p_output1[1777]=32;
p_output1[1778]=32;
p_output1[1779]=32;
p_output1[1780]=32;
p_output1[1781]=32;
p_output1[1782]=32;
p_output1[1783]=32;
p_output1[1784]=32;
p_output1[1785]=32;
p_output1[1786]=32;
p_output1[1787]=32;
p_output1[1788]=32;
p_output1[1789]=32;
p_output1[1790]=33;
p_output1[1791]=33;
p_output1[1792]=33;
p_output1[1793]=33;
p_output1[1794]=33;
p_output1[1795]=33;
p_output1[1796]=33;
p_output1[1797]=33;
p_output1[1798]=33;
p_output1[1799]=33;
p_output1[1800]=33;
p_output1[1801]=33;
p_output1[1802]=33;
p_output1[1803]=33;
p_output1[1804]=33;
p_output1[1805]=33;
p_output1[1806]=33;
p_output1[1807]=33;
p_output1[1808]=33;
p_output1[1809]=33;
p_output1[1810]=33;
p_output1[1811]=33;
p_output1[1812]=33;
p_output1[1813]=33;
p_output1[1814]=33;
p_output1[1815]=33;
p_output1[1816]=33;
p_output1[1817]=34;
p_output1[1818]=34;
p_output1[1819]=34;
p_output1[1820]=34;
p_output1[1821]=34;
p_output1[1822]=34;
p_output1[1823]=34;
p_output1[1824]=34;
p_output1[1825]=34;
p_output1[1826]=34;
p_output1[1827]=34;
p_output1[1828]=34;
p_output1[1829]=34;
p_output1[1830]=34;
p_output1[1831]=34;
p_output1[1832]=34;
p_output1[1833]=34;
p_output1[1834]=34;
p_output1[1835]=34;
p_output1[1836]=34;
p_output1[1837]=34;
p_output1[1838]=34;
p_output1[1839]=34;
p_output1[1840]=34;
p_output1[1841]=34;
p_output1[1842]=34;
p_output1[1843]=34;
p_output1[1844]=35;
p_output1[1845]=35;
p_output1[1846]=35;
p_output1[1847]=35;
p_output1[1848]=35;
p_output1[1849]=35;
p_output1[1850]=35;
p_output1[1851]=35;
p_output1[1852]=35;
p_output1[1853]=35;
p_output1[1854]=35;
p_output1[1855]=35;
p_output1[1856]=35;
p_output1[1857]=35;
p_output1[1858]=35;
p_output1[1859]=35;
p_output1[1860]=35;
p_output1[1861]=35;
p_output1[1862]=35;
p_output1[1863]=35;
p_output1[1864]=35;
p_output1[1865]=35;
p_output1[1866]=35;
p_output1[1867]=35;
p_output1[1868]=35;
p_output1[1869]=35;
p_output1[1870]=35;
p_output1[1871]=36;
p_output1[1872]=36;
p_output1[1873]=36;
p_output1[1874]=36;
p_output1[1875]=36;
p_output1[1876]=36;
p_output1[1877]=36;
p_output1[1878]=36;
p_output1[1879]=36;
p_output1[1880]=36;
p_output1[1881]=36;
p_output1[1882]=36;
p_output1[1883]=36;
p_output1[1884]=36;
p_output1[1885]=36;
p_output1[1886]=36;
p_output1[1887]=36;
p_output1[1888]=36;
p_output1[1889]=36;
p_output1[1890]=36;
p_output1[1891]=36;
p_output1[1892]=36;
p_output1[1893]=36;
p_output1[1894]=36;
p_output1[1895]=36;
p_output1[1896]=36;
p_output1[1897]=36;
p_output1[1898]=1;
p_output1[1899]=2;
p_output1[1900]=3;
p_output1[1901]=4;
p_output1[1902]=5;
p_output1[1903]=6;
p_output1[1904]=7;
p_output1[1905]=8;
p_output1[1906]=9;
p_output1[1907]=10;
p_output1[1908]=11;
p_output1[1909]=12;
p_output1[1910]=13;
p_output1[1911]=14;
p_output1[1912]=15;
p_output1[1913]=16;
p_output1[1914]=18;
p_output1[1915]=19;
p_output1[1916]=20;
p_output1[1917]=21;
p_output1[1918]=22;
p_output1[1919]=23;
p_output1[1920]=24;
p_output1[1921]=25;
p_output1[1922]=26;
p_output1[1923]=27;
p_output1[1924]=28;
p_output1[1925]=29;
p_output1[1926]=30;
p_output1[1927]=31;
p_output1[1928]=33;
p_output1[1929]=34;
p_output1[1930]=35;
p_output1[1931]=36;
p_output1[1932]=40;
p_output1[1933]=41;
p_output1[1934]=42;
p_output1[1935]=43;
p_output1[1936]=44;
p_output1[1937]=45;
p_output1[1938]=46;
p_output1[1939]=47;
p_output1[1940]=48;
p_output1[1941]=49;
p_output1[1942]=50;
p_output1[1943]=51;
p_output1[1944]=52;
p_output1[1945]=53;
p_output1[1946]=54;
p_output1[1947]=55;
p_output1[1948]=56;
p_output1[1949]=57;
p_output1[1950]=58;
p_output1[1951]=59;
p_output1[1952]=60;
p_output1[1953]=61;
p_output1[1954]=62;
p_output1[1955]=63;
p_output1[1956]=64;
p_output1[1957]=65;
p_output1[1958]=66;
p_output1[1959]=67;
p_output1[1960]=68;
p_output1[1961]=69;
p_output1[1962]=70;
p_output1[1963]=71;
p_output1[1964]=72;
p_output1[1965]=73;
p_output1[1966]=74;
p_output1[1967]=75;
p_output1[1968]=76;
p_output1[1969]=77;
p_output1[1970]=78;
p_output1[1971]=79;
p_output1[1972]=80;
p_output1[1973]=81;
p_output1[1974]=82;
p_output1[1975]=83;
p_output1[1976]=84;
p_output1[1977]=85;
p_output1[1978]=86;
p_output1[1979]=87;
p_output1[1980]=88;
p_output1[1981]=90;
p_output1[1982]=91;
p_output1[1983]=92;
p_output1[1984]=93;
p_output1[1985]=94;
p_output1[1986]=95;
p_output1[1987]=96;
p_output1[1988]=97;
p_output1[1989]=98;
p_output1[1990]=99;
p_output1[1991]=100;
p_output1[1992]=101;
p_output1[1993]=102;
p_output1[1994]=103;
p_output1[1995]=105;
p_output1[1996]=106;
p_output1[1997]=107;
p_output1[1998]=108;
p_output1[1999]=119;
p_output1[2000]=1;
p_output1[2001]=2;
p_output1[2002]=3;
p_output1[2003]=4;
p_output1[2004]=5;
p_output1[2005]=6;
p_output1[2006]=7;
p_output1[2007]=8;
p_output1[2008]=9;
p_output1[2009]=10;
p_output1[2010]=11;
p_output1[2011]=12;
p_output1[2012]=13;
p_output1[2013]=14;
p_output1[2014]=15;
p_output1[2015]=16;
p_output1[2016]=18;
p_output1[2017]=19;
p_output1[2018]=20;
p_output1[2019]=21;
p_output1[2020]=22;
p_output1[2021]=23;
p_output1[2022]=24;
p_output1[2023]=25;
p_output1[2024]=26;
p_output1[2025]=27;
p_output1[2026]=28;
p_output1[2027]=29;
p_output1[2028]=30;
p_output1[2029]=31;
p_output1[2030]=33;
p_output1[2031]=34;
p_output1[2032]=35;
p_output1[2033]=36;
p_output1[2034]=40;
p_output1[2035]=41;
p_output1[2036]=42;
p_output1[2037]=43;
p_output1[2038]=44;
p_output1[2039]=45;
p_output1[2040]=46;
p_output1[2041]=47;
p_output1[2042]=48;
p_output1[2043]=49;
p_output1[2044]=50;
p_output1[2045]=51;
p_output1[2046]=52;
p_output1[2047]=53;
p_output1[2048]=54;
p_output1[2049]=55;
p_output1[2050]=56;
p_output1[2051]=57;
p_output1[2052]=58;
p_output1[2053]=59;
p_output1[2054]=60;
p_output1[2055]=61;
p_output1[2056]=62;
p_output1[2057]=63;
p_output1[2058]=64;
p_output1[2059]=65;
p_output1[2060]=66;
p_output1[2061]=67;
p_output1[2062]=68;
p_output1[2063]=69;
p_output1[2064]=70;
p_output1[2065]=71;
p_output1[2066]=72;
p_output1[2067]=73;
p_output1[2068]=74;
p_output1[2069]=75;
p_output1[2070]=76;
p_output1[2071]=77;
p_output1[2072]=78;
p_output1[2073]=79;
p_output1[2074]=80;
p_output1[2075]=81;
p_output1[2076]=82;
p_output1[2077]=83;
p_output1[2078]=84;
p_output1[2079]=85;
p_output1[2080]=86;
p_output1[2081]=87;
p_output1[2082]=88;
p_output1[2083]=90;
p_output1[2084]=91;
p_output1[2085]=92;
p_output1[2086]=93;
p_output1[2087]=94;
p_output1[2088]=95;
p_output1[2089]=96;
p_output1[2090]=97;
p_output1[2091]=98;
p_output1[2092]=99;
p_output1[2093]=100;
p_output1[2094]=101;
p_output1[2095]=102;
p_output1[2096]=103;
p_output1[2097]=105;
p_output1[2098]=106;
p_output1[2099]=107;
p_output1[2100]=108;
p_output1[2101]=120;
p_output1[2102]=1;
p_output1[2103]=2;
p_output1[2104]=3;
p_output1[2105]=4;
p_output1[2106]=5;
p_output1[2107]=6;
p_output1[2108]=7;
p_output1[2109]=8;
p_output1[2110]=9;
p_output1[2111]=10;
p_output1[2112]=11;
p_output1[2113]=12;
p_output1[2114]=13;
p_output1[2115]=14;
p_output1[2116]=15;
p_output1[2117]=16;
p_output1[2118]=18;
p_output1[2119]=19;
p_output1[2120]=20;
p_output1[2121]=21;
p_output1[2122]=22;
p_output1[2123]=23;
p_output1[2124]=24;
p_output1[2125]=25;
p_output1[2126]=26;
p_output1[2127]=27;
p_output1[2128]=28;
p_output1[2129]=29;
p_output1[2130]=30;
p_output1[2131]=31;
p_output1[2132]=33;
p_output1[2133]=34;
p_output1[2134]=35;
p_output1[2135]=36;
p_output1[2136]=40;
p_output1[2137]=41;
p_output1[2138]=42;
p_output1[2139]=43;
p_output1[2140]=44;
p_output1[2141]=45;
p_output1[2142]=46;
p_output1[2143]=47;
p_output1[2144]=48;
p_output1[2145]=49;
p_output1[2146]=50;
p_output1[2147]=51;
p_output1[2148]=52;
p_output1[2149]=53;
p_output1[2150]=54;
p_output1[2151]=55;
p_output1[2152]=56;
p_output1[2153]=57;
p_output1[2154]=58;
p_output1[2155]=59;
p_output1[2156]=60;
p_output1[2157]=61;
p_output1[2158]=62;
p_output1[2159]=63;
p_output1[2160]=64;
p_output1[2161]=65;
p_output1[2162]=66;
p_output1[2163]=67;
p_output1[2164]=68;
p_output1[2165]=69;
p_output1[2166]=70;
p_output1[2167]=71;
p_output1[2168]=72;
p_output1[2169]=73;
p_output1[2170]=74;
p_output1[2171]=75;
p_output1[2172]=76;
p_output1[2173]=77;
p_output1[2174]=78;
p_output1[2175]=79;
p_output1[2176]=80;
p_output1[2177]=81;
p_output1[2178]=82;
p_output1[2179]=83;
p_output1[2180]=84;
p_output1[2181]=85;
p_output1[2182]=86;
p_output1[2183]=87;
p_output1[2184]=88;
p_output1[2185]=90;
p_output1[2186]=91;
p_output1[2187]=92;
p_output1[2188]=93;
p_output1[2189]=94;
p_output1[2190]=95;
p_output1[2191]=96;
p_output1[2192]=97;
p_output1[2193]=98;
p_output1[2194]=99;
p_output1[2195]=100;
p_output1[2196]=101;
p_output1[2197]=102;
p_output1[2198]=103;
p_output1[2199]=105;
p_output1[2200]=106;
p_output1[2201]=107;
p_output1[2202]=108;
p_output1[2203]=121;
p_output1[2204]=1;
p_output1[2205]=2;
p_output1[2206]=3;
p_output1[2207]=4;
p_output1[2208]=5;
p_output1[2209]=6;
p_output1[2210]=7;
p_output1[2211]=8;
p_output1[2212]=9;
p_output1[2213]=10;
p_output1[2214]=11;
p_output1[2215]=12;
p_output1[2216]=13;
p_output1[2217]=14;
p_output1[2218]=15;
p_output1[2219]=16;
p_output1[2220]=17;
p_output1[2221]=18;
p_output1[2222]=19;
p_output1[2223]=20;
p_output1[2224]=21;
p_output1[2225]=22;
p_output1[2226]=23;
p_output1[2227]=24;
p_output1[2228]=25;
p_output1[2229]=26;
p_output1[2230]=27;
p_output1[2231]=28;
p_output1[2232]=29;
p_output1[2233]=30;
p_output1[2234]=31;
p_output1[2235]=32;
p_output1[2236]=33;
p_output1[2237]=34;
p_output1[2238]=35;
p_output1[2239]=36;
p_output1[2240]=40;
p_output1[2241]=41;
p_output1[2242]=42;
p_output1[2243]=43;
p_output1[2244]=44;
p_output1[2245]=45;
p_output1[2246]=46;
p_output1[2247]=47;
p_output1[2248]=48;
p_output1[2249]=49;
p_output1[2250]=50;
p_output1[2251]=51;
p_output1[2252]=52;
p_output1[2253]=53;
p_output1[2254]=54;
p_output1[2255]=55;
p_output1[2256]=56;
p_output1[2257]=57;
p_output1[2258]=58;
p_output1[2259]=59;
p_output1[2260]=60;
p_output1[2261]=61;
p_output1[2262]=62;
p_output1[2263]=63;
p_output1[2264]=64;
p_output1[2265]=65;
p_output1[2266]=66;
p_output1[2267]=67;
p_output1[2268]=68;
p_output1[2269]=69;
p_output1[2270]=70;
p_output1[2271]=71;
p_output1[2272]=72;
p_output1[2273]=73;
p_output1[2274]=74;
p_output1[2275]=75;
p_output1[2276]=76;
p_output1[2277]=77;
p_output1[2278]=78;
p_output1[2279]=79;
p_output1[2280]=80;
p_output1[2281]=81;
p_output1[2282]=82;
p_output1[2283]=83;
p_output1[2284]=84;
p_output1[2285]=85;
p_output1[2286]=86;
p_output1[2287]=87;
p_output1[2288]=88;
p_output1[2289]=89;
p_output1[2290]=90;
p_output1[2291]=91;
p_output1[2292]=92;
p_output1[2293]=93;
p_output1[2294]=94;
p_output1[2295]=95;
p_output1[2296]=96;
p_output1[2297]=97;
p_output1[2298]=98;
p_output1[2299]=99;
p_output1[2300]=100;
p_output1[2301]=101;
p_output1[2302]=102;
p_output1[2303]=103;
p_output1[2304]=104;
p_output1[2305]=105;
p_output1[2306]=106;
p_output1[2307]=107;
p_output1[2308]=108;
p_output1[2309]=113;
p_output1[2310]=114;
p_output1[2311]=115;
p_output1[2312]=116;
p_output1[2313]=117;
p_output1[2314]=118;
p_output1[2315]=119;
p_output1[2316]=120;
p_output1[2317]=122;
p_output1[2318]=123;
p_output1[2319]=124;
p_output1[2320]=1;
p_output1[2321]=2;
p_output1[2322]=3;
p_output1[2323]=4;
p_output1[2324]=5;
p_output1[2325]=6;
p_output1[2326]=7;
p_output1[2327]=8;
p_output1[2328]=9;
p_output1[2329]=10;
p_output1[2330]=11;
p_output1[2331]=12;
p_output1[2332]=13;
p_output1[2333]=14;
p_output1[2334]=15;
p_output1[2335]=16;
p_output1[2336]=17;
p_output1[2337]=18;
p_output1[2338]=19;
p_output1[2339]=20;
p_output1[2340]=21;
p_output1[2341]=22;
p_output1[2342]=23;
p_output1[2343]=24;
p_output1[2344]=25;
p_output1[2345]=26;
p_output1[2346]=27;
p_output1[2347]=28;
p_output1[2348]=29;
p_output1[2349]=30;
p_output1[2350]=31;
p_output1[2351]=32;
p_output1[2352]=33;
p_output1[2353]=34;
p_output1[2354]=35;
p_output1[2355]=36;
p_output1[2356]=40;
p_output1[2357]=41;
p_output1[2358]=42;
p_output1[2359]=43;
p_output1[2360]=44;
p_output1[2361]=45;
p_output1[2362]=46;
p_output1[2363]=47;
p_output1[2364]=48;
p_output1[2365]=49;
p_output1[2366]=50;
p_output1[2367]=51;
p_output1[2368]=52;
p_output1[2369]=53;
p_output1[2370]=54;
p_output1[2371]=55;
p_output1[2372]=56;
p_output1[2373]=57;
p_output1[2374]=58;
p_output1[2375]=59;
p_output1[2376]=60;
p_output1[2377]=61;
p_output1[2378]=62;
p_output1[2379]=63;
p_output1[2380]=64;
p_output1[2381]=65;
p_output1[2382]=66;
p_output1[2383]=67;
p_output1[2384]=68;
p_output1[2385]=69;
p_output1[2386]=70;
p_output1[2387]=71;
p_output1[2388]=72;
p_output1[2389]=73;
p_output1[2390]=74;
p_output1[2391]=75;
p_output1[2392]=76;
p_output1[2393]=77;
p_output1[2394]=78;
p_output1[2395]=79;
p_output1[2396]=80;
p_output1[2397]=81;
p_output1[2398]=82;
p_output1[2399]=83;
p_output1[2400]=84;
p_output1[2401]=85;
p_output1[2402]=86;
p_output1[2403]=87;
p_output1[2404]=88;
p_output1[2405]=89;
p_output1[2406]=90;
p_output1[2407]=91;
p_output1[2408]=92;
p_output1[2409]=93;
p_output1[2410]=94;
p_output1[2411]=95;
p_output1[2412]=96;
p_output1[2413]=97;
p_output1[2414]=98;
p_output1[2415]=99;
p_output1[2416]=100;
p_output1[2417]=101;
p_output1[2418]=102;
p_output1[2419]=103;
p_output1[2420]=104;
p_output1[2421]=105;
p_output1[2422]=106;
p_output1[2423]=107;
p_output1[2424]=108;
p_output1[2425]=113;
p_output1[2426]=114;
p_output1[2427]=115;
p_output1[2428]=116;
p_output1[2429]=117;
p_output1[2430]=118;
p_output1[2431]=119;
p_output1[2432]=120;
p_output1[2433]=121;
p_output1[2434]=122;
p_output1[2435]=123;
p_output1[2436]=124;
p_output1[2437]=1;
p_output1[2438]=2;
p_output1[2439]=3;
p_output1[2440]=4;
p_output1[2441]=5;
p_output1[2442]=6;
p_output1[2443]=7;
p_output1[2444]=8;
p_output1[2445]=9;
p_output1[2446]=10;
p_output1[2447]=11;
p_output1[2448]=12;
p_output1[2449]=13;
p_output1[2450]=14;
p_output1[2451]=15;
p_output1[2452]=16;
p_output1[2453]=17;
p_output1[2454]=18;
p_output1[2455]=19;
p_output1[2456]=20;
p_output1[2457]=21;
p_output1[2458]=22;
p_output1[2459]=23;
p_output1[2460]=24;
p_output1[2461]=25;
p_output1[2462]=26;
p_output1[2463]=27;
p_output1[2464]=28;
p_output1[2465]=29;
p_output1[2466]=30;
p_output1[2467]=31;
p_output1[2468]=32;
p_output1[2469]=33;
p_output1[2470]=34;
p_output1[2471]=35;
p_output1[2472]=36;
p_output1[2473]=40;
p_output1[2474]=41;
p_output1[2475]=42;
p_output1[2476]=43;
p_output1[2477]=44;
p_output1[2478]=45;
p_output1[2479]=46;
p_output1[2480]=47;
p_output1[2481]=48;
p_output1[2482]=49;
p_output1[2483]=50;
p_output1[2484]=51;
p_output1[2485]=52;
p_output1[2486]=53;
p_output1[2487]=54;
p_output1[2488]=55;
p_output1[2489]=56;
p_output1[2490]=57;
p_output1[2491]=58;
p_output1[2492]=59;
p_output1[2493]=60;
p_output1[2494]=61;
p_output1[2495]=62;
p_output1[2496]=63;
p_output1[2497]=64;
p_output1[2498]=65;
p_output1[2499]=66;
p_output1[2500]=67;
p_output1[2501]=68;
p_output1[2502]=69;
p_output1[2503]=70;
p_output1[2504]=71;
p_output1[2505]=72;
p_output1[2506]=73;
p_output1[2507]=74;
p_output1[2508]=75;
p_output1[2509]=76;
p_output1[2510]=77;
p_output1[2511]=78;
p_output1[2512]=79;
p_output1[2513]=80;
p_output1[2514]=81;
p_output1[2515]=82;
p_output1[2516]=83;
p_output1[2517]=84;
p_output1[2518]=85;
p_output1[2519]=86;
p_output1[2520]=87;
p_output1[2521]=88;
p_output1[2522]=89;
p_output1[2523]=90;
p_output1[2524]=91;
p_output1[2525]=92;
p_output1[2526]=93;
p_output1[2527]=94;
p_output1[2528]=95;
p_output1[2529]=96;
p_output1[2530]=97;
p_output1[2531]=98;
p_output1[2532]=99;
p_output1[2533]=100;
p_output1[2534]=101;
p_output1[2535]=102;
p_output1[2536]=103;
p_output1[2537]=104;
p_output1[2538]=105;
p_output1[2539]=106;
p_output1[2540]=107;
p_output1[2541]=108;
p_output1[2542]=113;
p_output1[2543]=114;
p_output1[2544]=115;
p_output1[2545]=116;
p_output1[2546]=117;
p_output1[2547]=118;
p_output1[2548]=119;
p_output1[2549]=120;
p_output1[2550]=121;
p_output1[2551]=122;
p_output1[2552]=123;
p_output1[2553]=124;
p_output1[2554]=1;
p_output1[2555]=2;
p_output1[2556]=3;
p_output1[2557]=4;
p_output1[2558]=5;
p_output1[2559]=6;
p_output1[2560]=7;
p_output1[2561]=8;
p_output1[2562]=9;
p_output1[2563]=10;
p_output1[2564]=11;
p_output1[2565]=12;
p_output1[2566]=13;
p_output1[2567]=14;
p_output1[2568]=15;
p_output1[2569]=16;
p_output1[2570]=17;
p_output1[2571]=40;
p_output1[2572]=41;
p_output1[2573]=42;
p_output1[2574]=43;
p_output1[2575]=44;
p_output1[2576]=45;
p_output1[2577]=46;
p_output1[2578]=47;
p_output1[2579]=48;
p_output1[2580]=49;
p_output1[2581]=50;
p_output1[2582]=51;
p_output1[2583]=52;
p_output1[2584]=53;
p_output1[2585]=73;
p_output1[2586]=74;
p_output1[2587]=75;
p_output1[2588]=76;
p_output1[2589]=77;
p_output1[2590]=78;
p_output1[2591]=79;
p_output1[2592]=80;
p_output1[2593]=81;
p_output1[2594]=82;
p_output1[2595]=83;
p_output1[2596]=84;
p_output1[2597]=85;
p_output1[2598]=86;
p_output1[2599]=87;
p_output1[2600]=88;
p_output1[2601]=89;
p_output1[2602]=113;
p_output1[2603]=115;
p_output1[2604]=116;
p_output1[2605]=119;
p_output1[2606]=120;
p_output1[2607]=121;
p_output1[2608]=122;
p_output1[2609]=124;
p_output1[2610]=1;
p_output1[2611]=2;
p_output1[2612]=3;
p_output1[2613]=4;
p_output1[2614]=5;
p_output1[2615]=6;
p_output1[2616]=7;
p_output1[2617]=8;
p_output1[2618]=9;
p_output1[2619]=10;
p_output1[2620]=11;
p_output1[2621]=12;
p_output1[2622]=13;
p_output1[2623]=14;
p_output1[2624]=15;
p_output1[2625]=16;
p_output1[2626]=17;
p_output1[2627]=40;
p_output1[2628]=41;
p_output1[2629]=42;
p_output1[2630]=43;
p_output1[2631]=44;
p_output1[2632]=45;
p_output1[2633]=46;
p_output1[2634]=47;
p_output1[2635]=48;
p_output1[2636]=49;
p_output1[2637]=50;
p_output1[2638]=51;
p_output1[2639]=52;
p_output1[2640]=53;
p_output1[2641]=73;
p_output1[2642]=74;
p_output1[2643]=75;
p_output1[2644]=76;
p_output1[2645]=77;
p_output1[2646]=78;
p_output1[2647]=79;
p_output1[2648]=80;
p_output1[2649]=81;
p_output1[2650]=82;
p_output1[2651]=83;
p_output1[2652]=84;
p_output1[2653]=85;
p_output1[2654]=86;
p_output1[2655]=87;
p_output1[2656]=88;
p_output1[2657]=89;
p_output1[2658]=113;
p_output1[2659]=115;
p_output1[2660]=116;
p_output1[2661]=119;
p_output1[2662]=120;
p_output1[2663]=121;
p_output1[2664]=122;
p_output1[2665]=124;
p_output1[2666]=1;
p_output1[2667]=2;
p_output1[2668]=3;
p_output1[2669]=4;
p_output1[2670]=5;
p_output1[2671]=6;
p_output1[2672]=7;
p_output1[2673]=8;
p_output1[2674]=9;
p_output1[2675]=10;
p_output1[2676]=11;
p_output1[2677]=12;
p_output1[2678]=13;
p_output1[2679]=14;
p_output1[2680]=15;
p_output1[2681]=16;
p_output1[2682]=17;
p_output1[2683]=40;
p_output1[2684]=41;
p_output1[2685]=42;
p_output1[2686]=43;
p_output1[2687]=44;
p_output1[2688]=45;
p_output1[2689]=46;
p_output1[2690]=47;
p_output1[2691]=48;
p_output1[2692]=49;
p_output1[2693]=50;
p_output1[2694]=51;
p_output1[2695]=52;
p_output1[2696]=53;
p_output1[2697]=73;
p_output1[2698]=74;
p_output1[2699]=75;
p_output1[2700]=76;
p_output1[2701]=77;
p_output1[2702]=78;
p_output1[2703]=79;
p_output1[2704]=80;
p_output1[2705]=81;
p_output1[2706]=82;
p_output1[2707]=83;
p_output1[2708]=84;
p_output1[2709]=85;
p_output1[2710]=86;
p_output1[2711]=87;
p_output1[2712]=88;
p_output1[2713]=89;
p_output1[2714]=113;
p_output1[2715]=115;
p_output1[2716]=116;
p_output1[2717]=119;
p_output1[2718]=120;
p_output1[2719]=121;
p_output1[2720]=122;
p_output1[2721]=124;
p_output1[2722]=1;
p_output1[2723]=2;
p_output1[2724]=3;
p_output1[2725]=4;
p_output1[2726]=5;
p_output1[2727]=6;
p_output1[2728]=7;
p_output1[2729]=8;
p_output1[2730]=9;
p_output1[2731]=10;
p_output1[2732]=11;
p_output1[2733]=12;
p_output1[2734]=13;
p_output1[2735]=14;
p_output1[2736]=15;
p_output1[2737]=16;
p_output1[2738]=17;
p_output1[2739]=40;
p_output1[2740]=41;
p_output1[2741]=42;
p_output1[2742]=43;
p_output1[2743]=44;
p_output1[2744]=45;
p_output1[2745]=46;
p_output1[2746]=47;
p_output1[2747]=48;
p_output1[2748]=49;
p_output1[2749]=50;
p_output1[2750]=51;
p_output1[2751]=52;
p_output1[2752]=53;
p_output1[2753]=73;
p_output1[2754]=74;
p_output1[2755]=75;
p_output1[2756]=76;
p_output1[2757]=77;
p_output1[2758]=78;
p_output1[2759]=79;
p_output1[2760]=80;
p_output1[2761]=81;
p_output1[2762]=82;
p_output1[2763]=83;
p_output1[2764]=84;
p_output1[2765]=85;
p_output1[2766]=86;
p_output1[2767]=87;
p_output1[2768]=88;
p_output1[2769]=89;
p_output1[2770]=113;
p_output1[2771]=115;
p_output1[2772]=116;
p_output1[2773]=119;
p_output1[2774]=120;
p_output1[2775]=121;
p_output1[2776]=122;
p_output1[2777]=124;
p_output1[2778]=1;
p_output1[2779]=2;
p_output1[2780]=3;
p_output1[2781]=4;
p_output1[2782]=5;
p_output1[2783]=6;
p_output1[2784]=7;
p_output1[2785]=8;
p_output1[2786]=9;
p_output1[2787]=10;
p_output1[2788]=11;
p_output1[2789]=12;
p_output1[2790]=13;
p_output1[2791]=14;
p_output1[2792]=15;
p_output1[2793]=16;
p_output1[2794]=17;
p_output1[2795]=40;
p_output1[2796]=41;
p_output1[2797]=42;
p_output1[2798]=43;
p_output1[2799]=44;
p_output1[2800]=45;
p_output1[2801]=46;
p_output1[2802]=47;
p_output1[2803]=48;
p_output1[2804]=49;
p_output1[2805]=50;
p_output1[2806]=51;
p_output1[2807]=52;
p_output1[2808]=53;
p_output1[2809]=73;
p_output1[2810]=74;
p_output1[2811]=75;
p_output1[2812]=76;
p_output1[2813]=77;
p_output1[2814]=78;
p_output1[2815]=79;
p_output1[2816]=80;
p_output1[2817]=81;
p_output1[2818]=82;
p_output1[2819]=83;
p_output1[2820]=84;
p_output1[2821]=85;
p_output1[2822]=86;
p_output1[2823]=87;
p_output1[2824]=88;
p_output1[2825]=89;
p_output1[2826]=109;
p_output1[2827]=113;
p_output1[2828]=115;
p_output1[2829]=116;
p_output1[2830]=119;
p_output1[2831]=120;
p_output1[2832]=121;
p_output1[2833]=122;
p_output1[2834]=124;
p_output1[2835]=1;
p_output1[2836]=2;
p_output1[2837]=3;
p_output1[2838]=4;
p_output1[2839]=5;
p_output1[2840]=6;
p_output1[2841]=7;
p_output1[2842]=8;
p_output1[2843]=9;
p_output1[2844]=10;
p_output1[2845]=11;
p_output1[2846]=12;
p_output1[2847]=13;
p_output1[2848]=14;
p_output1[2849]=15;
p_output1[2850]=16;
p_output1[2851]=17;
p_output1[2852]=40;
p_output1[2853]=41;
p_output1[2854]=42;
p_output1[2855]=43;
p_output1[2856]=44;
p_output1[2857]=45;
p_output1[2858]=46;
p_output1[2859]=47;
p_output1[2860]=48;
p_output1[2861]=49;
p_output1[2862]=50;
p_output1[2863]=51;
p_output1[2864]=52;
p_output1[2865]=53;
p_output1[2866]=73;
p_output1[2867]=74;
p_output1[2868]=75;
p_output1[2869]=76;
p_output1[2870]=77;
p_output1[2871]=78;
p_output1[2872]=79;
p_output1[2873]=80;
p_output1[2874]=81;
p_output1[2875]=82;
p_output1[2876]=83;
p_output1[2877]=84;
p_output1[2878]=85;
p_output1[2879]=86;
p_output1[2880]=87;
p_output1[2881]=88;
p_output1[2882]=89;
p_output1[2883]=113;
p_output1[2884]=115;
p_output1[2885]=116;
p_output1[2886]=119;
p_output1[2887]=120;
p_output1[2888]=121;
p_output1[2889]=122;
p_output1[2890]=124;
p_output1[2891]=1;
p_output1[2892]=2;
p_output1[2893]=3;
p_output1[2894]=4;
p_output1[2895]=5;
p_output1[2896]=6;
p_output1[2897]=7;
p_output1[2898]=8;
p_output1[2899]=9;
p_output1[2900]=10;
p_output1[2901]=11;
p_output1[2902]=12;
p_output1[2903]=13;
p_output1[2904]=40;
p_output1[2905]=41;
p_output1[2906]=42;
p_output1[2907]=43;
p_output1[2908]=44;
p_output1[2909]=45;
p_output1[2910]=46;
p_output1[2911]=47;
p_output1[2912]=48;
p_output1[2913]=49;
p_output1[2914]=73;
p_output1[2915]=74;
p_output1[2916]=75;
p_output1[2917]=76;
p_output1[2918]=77;
p_output1[2919]=78;
p_output1[2920]=79;
p_output1[2921]=80;
p_output1[2922]=81;
p_output1[2923]=82;
p_output1[2924]=83;
p_output1[2925]=84;
p_output1[2926]=85;
p_output1[2927]=110;
p_output1[2928]=113;
p_output1[2929]=1;
p_output1[2930]=2;
p_output1[2931]=3;
p_output1[2932]=4;
p_output1[2933]=5;
p_output1[2934]=6;
p_output1[2935]=7;
p_output1[2936]=8;
p_output1[2937]=9;
p_output1[2938]=10;
p_output1[2939]=11;
p_output1[2940]=12;
p_output1[2941]=14;
p_output1[2942]=40;
p_output1[2943]=41;
p_output1[2944]=42;
p_output1[2945]=43;
p_output1[2946]=44;
p_output1[2947]=45;
p_output1[2948]=46;
p_output1[2949]=47;
p_output1[2950]=48;
p_output1[2951]=50;
p_output1[2952]=73;
p_output1[2953]=74;
p_output1[2954]=75;
p_output1[2955]=76;
p_output1[2956]=77;
p_output1[2957]=78;
p_output1[2958]=79;
p_output1[2959]=80;
p_output1[2960]=81;
p_output1[2961]=82;
p_output1[2962]=83;
p_output1[2963]=84;
p_output1[2964]=86;
p_output1[2965]=1;
p_output1[2966]=2;
p_output1[2967]=3;
p_output1[2968]=4;
p_output1[2969]=5;
p_output1[2970]=6;
p_output1[2971]=7;
p_output1[2972]=8;
p_output1[2973]=9;
p_output1[2974]=10;
p_output1[2975]=11;
p_output1[2976]=12;
p_output1[2977]=15;
p_output1[2978]=40;
p_output1[2979]=41;
p_output1[2980]=42;
p_output1[2981]=43;
p_output1[2982]=44;
p_output1[2983]=45;
p_output1[2984]=46;
p_output1[2985]=47;
p_output1[2986]=48;
p_output1[2987]=51;
p_output1[2988]=73;
p_output1[2989]=74;
p_output1[2990]=75;
p_output1[2991]=76;
p_output1[2992]=77;
p_output1[2993]=78;
p_output1[2994]=79;
p_output1[2995]=80;
p_output1[2996]=81;
p_output1[2997]=82;
p_output1[2998]=83;
p_output1[2999]=84;
p_output1[3000]=87;
p_output1[3001]=1;
p_output1[3002]=2;
p_output1[3003]=3;
p_output1[3004]=4;
p_output1[3005]=5;
p_output1[3006]=6;
p_output1[3007]=7;
p_output1[3008]=8;
p_output1[3009]=9;
p_output1[3010]=10;
p_output1[3011]=11;
p_output1[3012]=12;
p_output1[3013]=16;
p_output1[3014]=17;
p_output1[3015]=40;
p_output1[3016]=41;
p_output1[3017]=42;
p_output1[3018]=43;
p_output1[3019]=44;
p_output1[3020]=45;
p_output1[3021]=46;
p_output1[3022]=47;
p_output1[3023]=48;
p_output1[3024]=52;
p_output1[3025]=53;
p_output1[3026]=73;
p_output1[3027]=74;
p_output1[3028]=75;
p_output1[3029]=76;
p_output1[3030]=77;
p_output1[3031]=78;
p_output1[3032]=79;
p_output1[3033]=80;
p_output1[3034]=81;
p_output1[3035]=82;
p_output1[3036]=83;
p_output1[3037]=84;
p_output1[3038]=88;
p_output1[3039]=89;
p_output1[3040]=115;
p_output1[3041]=116;
p_output1[3042]=119;
p_output1[3043]=120;
p_output1[3044]=121;
p_output1[3045]=122;
p_output1[3046]=124;
p_output1[3047]=4;
p_output1[3048]=5;
p_output1[3049]=6;
p_output1[3050]=7;
p_output1[3051]=8;
p_output1[3052]=9;
p_output1[3053]=10;
p_output1[3054]=11;
p_output1[3055]=12;
p_output1[3056]=16;
p_output1[3057]=17;
p_output1[3058]=40;
p_output1[3059]=41;
p_output1[3060]=42;
p_output1[3061]=43;
p_output1[3062]=44;
p_output1[3063]=45;
p_output1[3064]=46;
p_output1[3065]=47;
p_output1[3066]=48;
p_output1[3067]=52;
p_output1[3068]=53;
p_output1[3069]=76;
p_output1[3070]=77;
p_output1[3071]=78;
p_output1[3072]=79;
p_output1[3073]=80;
p_output1[3074]=81;
p_output1[3075]=82;
p_output1[3076]=83;
p_output1[3077]=84;
p_output1[3078]=88;
p_output1[3079]=89;
p_output1[3080]=115;
p_output1[3081]=116;
p_output1[3082]=119;
p_output1[3083]=120;
p_output1[3084]=121;
p_output1[3085]=122;
p_output1[3086]=124;
p_output1[3087]=1;
p_output1[3088]=2;
p_output1[3089]=3;
p_output1[3090]=4;
p_output1[3091]=5;
p_output1[3092]=6;
p_output1[3093]=18;
p_output1[3094]=19;
p_output1[3095]=20;
p_output1[3096]=21;
p_output1[3097]=40;
p_output1[3098]=41;
p_output1[3099]=42;
p_output1[3100]=54;
p_output1[3101]=55;
p_output1[3102]=56;
p_output1[3103]=57;
p_output1[3104]=73;
p_output1[3105]=74;
p_output1[3106]=75;
p_output1[3107]=76;
p_output1[3108]=77;
p_output1[3109]=78;
p_output1[3110]=90;
p_output1[3111]=91;
p_output1[3112]=92;
p_output1[3113]=93;
p_output1[3114]=1;
p_output1[3115]=2;
p_output1[3116]=3;
p_output1[3117]=4;
p_output1[3118]=5;
p_output1[3119]=6;
p_output1[3120]=18;
p_output1[3121]=19;
p_output1[3122]=20;
p_output1[3123]=21;
p_output1[3124]=40;
p_output1[3125]=41;
p_output1[3126]=42;
p_output1[3127]=54;
p_output1[3128]=55;
p_output1[3129]=56;
p_output1[3130]=57;
p_output1[3131]=73;
p_output1[3132]=74;
p_output1[3133]=75;
p_output1[3134]=76;
p_output1[3135]=77;
p_output1[3136]=78;
p_output1[3137]=90;
p_output1[3138]=91;
p_output1[3139]=92;
p_output1[3140]=93;
p_output1[3141]=1;
p_output1[3142]=2;
p_output1[3143]=3;
p_output1[3144]=4;
p_output1[3145]=5;
p_output1[3146]=6;
p_output1[3147]=18;
p_output1[3148]=19;
p_output1[3149]=20;
p_output1[3150]=21;
p_output1[3151]=40;
p_output1[3152]=41;
p_output1[3153]=42;
p_output1[3154]=54;
p_output1[3155]=55;
p_output1[3156]=56;
p_output1[3157]=57;
p_output1[3158]=73;
p_output1[3159]=74;
p_output1[3160]=75;
p_output1[3161]=76;
p_output1[3162]=77;
p_output1[3163]=78;
p_output1[3164]=90;
p_output1[3165]=91;
p_output1[3166]=92;
p_output1[3167]=93;
p_output1[3168]=1;
p_output1[3169]=2;
p_output1[3170]=3;
p_output1[3171]=4;
p_output1[3172]=5;
p_output1[3173]=6;
p_output1[3174]=18;
p_output1[3175]=19;
p_output1[3176]=20;
p_output1[3177]=21;
p_output1[3178]=40;
p_output1[3179]=41;
p_output1[3180]=42;
p_output1[3181]=54;
p_output1[3182]=55;
p_output1[3183]=56;
p_output1[3184]=57;
p_output1[3185]=73;
p_output1[3186]=74;
p_output1[3187]=75;
p_output1[3188]=76;
p_output1[3189]=77;
p_output1[3190]=78;
p_output1[3191]=90;
p_output1[3192]=91;
p_output1[3193]=92;
p_output1[3194]=93;
p_output1[3195]=1;
p_output1[3196]=2;
p_output1[3197]=3;
p_output1[3198]=4;
p_output1[3199]=5;
p_output1[3200]=6;
p_output1[3201]=22;
p_output1[3202]=23;
p_output1[3203]=24;
p_output1[3204]=25;
p_output1[3205]=26;
p_output1[3206]=27;
p_output1[3207]=28;
p_output1[3208]=29;
p_output1[3209]=30;
p_output1[3210]=31;
p_output1[3211]=32;
p_output1[3212]=40;
p_output1[3213]=41;
p_output1[3214]=42;
p_output1[3215]=58;
p_output1[3216]=59;
p_output1[3217]=60;
p_output1[3218]=61;
p_output1[3219]=62;
p_output1[3220]=63;
p_output1[3221]=64;
p_output1[3222]=65;
p_output1[3223]=66;
p_output1[3224]=67;
p_output1[3225]=68;
p_output1[3226]=73;
p_output1[3227]=74;
p_output1[3228]=75;
p_output1[3229]=76;
p_output1[3230]=77;
p_output1[3231]=78;
p_output1[3232]=94;
p_output1[3233]=95;
p_output1[3234]=96;
p_output1[3235]=97;
p_output1[3236]=98;
p_output1[3237]=99;
p_output1[3238]=100;
p_output1[3239]=101;
p_output1[3240]=102;
p_output1[3241]=103;
p_output1[3242]=104;
p_output1[3243]=114;
p_output1[3244]=117;
p_output1[3245]=118;
p_output1[3246]=1;
p_output1[3247]=2;
p_output1[3248]=3;
p_output1[3249]=4;
p_output1[3250]=5;
p_output1[3251]=6;
p_output1[3252]=22;
p_output1[3253]=23;
p_output1[3254]=24;
p_output1[3255]=25;
p_output1[3256]=26;
p_output1[3257]=27;
p_output1[3258]=28;
p_output1[3259]=29;
p_output1[3260]=30;
p_output1[3261]=31;
p_output1[3262]=32;
p_output1[3263]=40;
p_output1[3264]=41;
p_output1[3265]=42;
p_output1[3266]=58;
p_output1[3267]=59;
p_output1[3268]=60;
p_output1[3269]=61;
p_output1[3270]=62;
p_output1[3271]=63;
p_output1[3272]=64;
p_output1[3273]=65;
p_output1[3274]=66;
p_output1[3275]=67;
p_output1[3276]=68;
p_output1[3277]=73;
p_output1[3278]=74;
p_output1[3279]=75;
p_output1[3280]=76;
p_output1[3281]=77;
p_output1[3282]=78;
p_output1[3283]=94;
p_output1[3284]=95;
p_output1[3285]=96;
p_output1[3286]=97;
p_output1[3287]=98;
p_output1[3288]=99;
p_output1[3289]=100;
p_output1[3290]=101;
p_output1[3291]=102;
p_output1[3292]=103;
p_output1[3293]=104;
p_output1[3294]=114;
p_output1[3295]=117;
p_output1[3296]=118;
p_output1[3297]=1;
p_output1[3298]=2;
p_output1[3299]=3;
p_output1[3300]=4;
p_output1[3301]=5;
p_output1[3302]=6;
p_output1[3303]=22;
p_output1[3304]=23;
p_output1[3305]=24;
p_output1[3306]=25;
p_output1[3307]=26;
p_output1[3308]=27;
p_output1[3309]=28;
p_output1[3310]=29;
p_output1[3311]=30;
p_output1[3312]=31;
p_output1[3313]=32;
p_output1[3314]=40;
p_output1[3315]=41;
p_output1[3316]=42;
p_output1[3317]=58;
p_output1[3318]=59;
p_output1[3319]=60;
p_output1[3320]=61;
p_output1[3321]=62;
p_output1[3322]=63;
p_output1[3323]=64;
p_output1[3324]=65;
p_output1[3325]=66;
p_output1[3326]=67;
p_output1[3327]=68;
p_output1[3328]=73;
p_output1[3329]=74;
p_output1[3330]=75;
p_output1[3331]=76;
p_output1[3332]=77;
p_output1[3333]=78;
p_output1[3334]=94;
p_output1[3335]=95;
p_output1[3336]=96;
p_output1[3337]=97;
p_output1[3338]=98;
p_output1[3339]=99;
p_output1[3340]=100;
p_output1[3341]=101;
p_output1[3342]=102;
p_output1[3343]=103;
p_output1[3344]=104;
p_output1[3345]=114;
p_output1[3346]=117;
p_output1[3347]=118;
p_output1[3348]=1;
p_output1[3349]=2;
p_output1[3350]=3;
p_output1[3351]=4;
p_output1[3352]=5;
p_output1[3353]=6;
p_output1[3354]=22;
p_output1[3355]=23;
p_output1[3356]=24;
p_output1[3357]=25;
p_output1[3358]=26;
p_output1[3359]=27;
p_output1[3360]=28;
p_output1[3361]=29;
p_output1[3362]=30;
p_output1[3363]=31;
p_output1[3364]=32;
p_output1[3365]=40;
p_output1[3366]=41;
p_output1[3367]=42;
p_output1[3368]=58;
p_output1[3369]=59;
p_output1[3370]=60;
p_output1[3371]=61;
p_output1[3372]=62;
p_output1[3373]=63;
p_output1[3374]=64;
p_output1[3375]=65;
p_output1[3376]=66;
p_output1[3377]=67;
p_output1[3378]=68;
p_output1[3379]=73;
p_output1[3380]=74;
p_output1[3381]=75;
p_output1[3382]=76;
p_output1[3383]=77;
p_output1[3384]=78;
p_output1[3385]=94;
p_output1[3386]=95;
p_output1[3387]=96;
p_output1[3388]=97;
p_output1[3389]=98;
p_output1[3390]=99;
p_output1[3391]=100;
p_output1[3392]=101;
p_output1[3393]=102;
p_output1[3394]=103;
p_output1[3395]=104;
p_output1[3396]=114;
p_output1[3397]=117;
p_output1[3398]=118;
p_output1[3399]=1;
p_output1[3400]=2;
p_output1[3401]=3;
p_output1[3402]=4;
p_output1[3403]=5;
p_output1[3404]=6;
p_output1[3405]=22;
p_output1[3406]=23;
p_output1[3407]=24;
p_output1[3408]=25;
p_output1[3409]=26;
p_output1[3410]=27;
p_output1[3411]=28;
p_output1[3412]=29;
p_output1[3413]=30;
p_output1[3414]=31;
p_output1[3415]=32;
p_output1[3416]=40;
p_output1[3417]=41;
p_output1[3418]=42;
p_output1[3419]=58;
p_output1[3420]=59;
p_output1[3421]=60;
p_output1[3422]=61;
p_output1[3423]=62;
p_output1[3424]=63;
p_output1[3425]=64;
p_output1[3426]=65;
p_output1[3427]=66;
p_output1[3428]=67;
p_output1[3429]=68;
p_output1[3430]=73;
p_output1[3431]=74;
p_output1[3432]=75;
p_output1[3433]=76;
p_output1[3434]=77;
p_output1[3435]=78;
p_output1[3436]=94;
p_output1[3437]=95;
p_output1[3438]=96;
p_output1[3439]=97;
p_output1[3440]=98;
p_output1[3441]=99;
p_output1[3442]=100;
p_output1[3443]=101;
p_output1[3444]=102;
p_output1[3445]=103;
p_output1[3446]=104;
p_output1[3447]=111;
p_output1[3448]=114;
p_output1[3449]=117;
p_output1[3450]=118;
p_output1[3451]=1;
p_output1[3452]=2;
p_output1[3453]=3;
p_output1[3454]=4;
p_output1[3455]=5;
p_output1[3456]=6;
p_output1[3457]=22;
p_output1[3458]=23;
p_output1[3459]=24;
p_output1[3460]=25;
p_output1[3461]=26;
p_output1[3462]=27;
p_output1[3463]=28;
p_output1[3464]=29;
p_output1[3465]=30;
p_output1[3466]=31;
p_output1[3467]=32;
p_output1[3468]=40;
p_output1[3469]=41;
p_output1[3470]=42;
p_output1[3471]=58;
p_output1[3472]=59;
p_output1[3473]=60;
p_output1[3474]=61;
p_output1[3475]=62;
p_output1[3476]=63;
p_output1[3477]=64;
p_output1[3478]=65;
p_output1[3479]=66;
p_output1[3480]=67;
p_output1[3481]=68;
p_output1[3482]=73;
p_output1[3483]=74;
p_output1[3484]=75;
p_output1[3485]=76;
p_output1[3486]=77;
p_output1[3487]=78;
p_output1[3488]=94;
p_output1[3489]=95;
p_output1[3490]=96;
p_output1[3491]=97;
p_output1[3492]=98;
p_output1[3493]=99;
p_output1[3494]=100;
p_output1[3495]=101;
p_output1[3496]=102;
p_output1[3497]=103;
p_output1[3498]=104;
p_output1[3499]=114;
p_output1[3500]=117;
p_output1[3501]=118;
p_output1[3502]=1;
p_output1[3503]=2;
p_output1[3504]=3;
p_output1[3505]=4;
p_output1[3506]=5;
p_output1[3507]=6;
p_output1[3508]=22;
p_output1[3509]=23;
p_output1[3510]=24;
p_output1[3511]=25;
p_output1[3512]=26;
p_output1[3513]=27;
p_output1[3514]=28;
p_output1[3515]=40;
p_output1[3516]=41;
p_output1[3517]=42;
p_output1[3518]=58;
p_output1[3519]=59;
p_output1[3520]=60;
p_output1[3521]=61;
p_output1[3522]=62;
p_output1[3523]=63;
p_output1[3524]=64;
p_output1[3525]=73;
p_output1[3526]=74;
p_output1[3527]=75;
p_output1[3528]=76;
p_output1[3529]=77;
p_output1[3530]=78;
p_output1[3531]=94;
p_output1[3532]=95;
p_output1[3533]=96;
p_output1[3534]=97;
p_output1[3535]=98;
p_output1[3536]=99;
p_output1[3537]=100;
p_output1[3538]=112;
p_output1[3539]=114;
p_output1[3540]=1;
p_output1[3541]=2;
p_output1[3542]=3;
p_output1[3543]=4;
p_output1[3544]=5;
p_output1[3545]=6;
p_output1[3546]=22;
p_output1[3547]=23;
p_output1[3548]=24;
p_output1[3549]=25;
p_output1[3550]=26;
p_output1[3551]=27;
p_output1[3552]=29;
p_output1[3553]=40;
p_output1[3554]=41;
p_output1[3555]=42;
p_output1[3556]=58;
p_output1[3557]=59;
p_output1[3558]=60;
p_output1[3559]=61;
p_output1[3560]=62;
p_output1[3561]=63;
p_output1[3562]=65;
p_output1[3563]=73;
p_output1[3564]=74;
p_output1[3565]=75;
p_output1[3566]=76;
p_output1[3567]=77;
p_output1[3568]=78;
p_output1[3569]=94;
p_output1[3570]=95;
p_output1[3571]=96;
p_output1[3572]=97;
p_output1[3573]=98;
p_output1[3574]=99;
p_output1[3575]=101;
p_output1[3576]=1;
p_output1[3577]=2;
p_output1[3578]=3;
p_output1[3579]=4;
p_output1[3580]=5;
p_output1[3581]=6;
p_output1[3582]=22;
p_output1[3583]=23;
p_output1[3584]=24;
p_output1[3585]=25;
p_output1[3586]=26;
p_output1[3587]=27;
p_output1[3588]=30;
p_output1[3589]=40;
p_output1[3590]=41;
p_output1[3591]=42;
p_output1[3592]=58;
p_output1[3593]=59;
p_output1[3594]=60;
p_output1[3595]=61;
p_output1[3596]=62;
p_output1[3597]=63;
p_output1[3598]=66;
p_output1[3599]=73;
p_output1[3600]=74;
p_output1[3601]=75;
p_output1[3602]=76;
p_output1[3603]=77;
p_output1[3604]=78;
p_output1[3605]=94;
p_output1[3606]=95;
p_output1[3607]=96;
p_output1[3608]=97;
p_output1[3609]=98;
p_output1[3610]=99;
p_output1[3611]=102;
p_output1[3612]=1;
p_output1[3613]=2;
p_output1[3614]=3;
p_output1[3615]=4;
p_output1[3616]=5;
p_output1[3617]=6;
p_output1[3618]=22;
p_output1[3619]=23;
p_output1[3620]=24;
p_output1[3621]=25;
p_output1[3622]=26;
p_output1[3623]=27;
p_output1[3624]=31;
p_output1[3625]=32;
p_output1[3626]=40;
p_output1[3627]=41;
p_output1[3628]=42;
p_output1[3629]=58;
p_output1[3630]=59;
p_output1[3631]=60;
p_output1[3632]=61;
p_output1[3633]=62;
p_output1[3634]=63;
p_output1[3635]=67;
p_output1[3636]=68;
p_output1[3637]=73;
p_output1[3638]=74;
p_output1[3639]=75;
p_output1[3640]=76;
p_output1[3641]=77;
p_output1[3642]=78;
p_output1[3643]=94;
p_output1[3644]=95;
p_output1[3645]=96;
p_output1[3646]=97;
p_output1[3647]=98;
p_output1[3648]=99;
p_output1[3649]=103;
p_output1[3650]=104;
p_output1[3651]=117;
p_output1[3652]=118;
p_output1[3653]=4;
p_output1[3654]=5;
p_output1[3655]=6;
p_output1[3656]=22;
p_output1[3657]=23;
p_output1[3658]=24;
p_output1[3659]=25;
p_output1[3660]=26;
p_output1[3661]=27;
p_output1[3662]=31;
p_output1[3663]=32;
p_output1[3664]=40;
p_output1[3665]=41;
p_output1[3666]=42;
p_output1[3667]=58;
p_output1[3668]=59;
p_output1[3669]=60;
p_output1[3670]=61;
p_output1[3671]=62;
p_output1[3672]=63;
p_output1[3673]=67;
p_output1[3674]=68;
p_output1[3675]=76;
p_output1[3676]=77;
p_output1[3677]=78;
p_output1[3678]=94;
p_output1[3679]=95;
p_output1[3680]=96;
p_output1[3681]=97;
p_output1[3682]=98;
p_output1[3683]=99;
p_output1[3684]=103;
p_output1[3685]=104;
p_output1[3686]=117;
p_output1[3687]=118;
p_output1[3688]=1;
p_output1[3689]=2;
p_output1[3690]=3;
p_output1[3691]=4;
p_output1[3692]=5;
p_output1[3693]=6;
p_output1[3694]=33;
p_output1[3695]=34;
p_output1[3696]=35;
p_output1[3697]=36;
p_output1[3698]=40;
p_output1[3699]=41;
p_output1[3700]=42;
p_output1[3701]=69;
p_output1[3702]=70;
p_output1[3703]=71;
p_output1[3704]=72;
p_output1[3705]=73;
p_output1[3706]=74;
p_output1[3707]=75;
p_output1[3708]=76;
p_output1[3709]=77;
p_output1[3710]=78;
p_output1[3711]=105;
p_output1[3712]=106;
p_output1[3713]=107;
p_output1[3714]=108;
p_output1[3715]=1;
p_output1[3716]=2;
p_output1[3717]=3;
p_output1[3718]=4;
p_output1[3719]=5;
p_output1[3720]=6;
p_output1[3721]=33;
p_output1[3722]=34;
p_output1[3723]=35;
p_output1[3724]=36;
p_output1[3725]=40;
p_output1[3726]=41;
p_output1[3727]=42;
p_output1[3728]=69;
p_output1[3729]=70;
p_output1[3730]=71;
p_output1[3731]=72;
p_output1[3732]=73;
p_output1[3733]=74;
p_output1[3734]=75;
p_output1[3735]=76;
p_output1[3736]=77;
p_output1[3737]=78;
p_output1[3738]=105;
p_output1[3739]=106;
p_output1[3740]=107;
p_output1[3741]=108;
p_output1[3742]=1;
p_output1[3743]=2;
p_output1[3744]=3;
p_output1[3745]=4;
p_output1[3746]=5;
p_output1[3747]=6;
p_output1[3748]=33;
p_output1[3749]=34;
p_output1[3750]=35;
p_output1[3751]=36;
p_output1[3752]=40;
p_output1[3753]=41;
p_output1[3754]=42;
p_output1[3755]=69;
p_output1[3756]=70;
p_output1[3757]=71;
p_output1[3758]=72;
p_output1[3759]=73;
p_output1[3760]=74;
p_output1[3761]=75;
p_output1[3762]=76;
p_output1[3763]=77;
p_output1[3764]=78;
p_output1[3765]=105;
p_output1[3766]=106;
p_output1[3767]=107;
p_output1[3768]=108;
p_output1[3769]=1;
p_output1[3770]=2;
p_output1[3771]=3;
p_output1[3772]=4;
p_output1[3773]=5;
p_output1[3774]=6;
p_output1[3775]=33;
p_output1[3776]=34;
p_output1[3777]=35;
p_output1[3778]=36;
p_output1[3779]=40;
p_output1[3780]=41;
p_output1[3781]=42;
p_output1[3782]=69;
p_output1[3783]=70;
p_output1[3784]=71;
p_output1[3785]=72;
p_output1[3786]=73;
p_output1[3787]=74;
p_output1[3788]=75;
p_output1[3789]=76;
p_output1[3790]=77;
p_output1[3791]=78;
p_output1[3792]=105;
p_output1[3793]=106;
p_output1[3794]=107;
p_output1[3795]=108;
}
#ifdef MATLAB_MEX_FILE
#include "mex.h"
/*
* Main function
*/
void mexFunction( int nlhs, mxArray *plhs[],
int nrhs, const mxArray *prhs[] )
{
size_t mrows, ncols;
double *var1;
double *p_output1;
/* Check for proper number of arguments. */
if( nrhs != 1)
{
mexErrMsgIdAndTxt("MATLAB:MShaped:invalidNumInputs", "One input(s) required (var1).");
}
else if( nlhs > 1)
{
mexErrMsgIdAndTxt("MATLAB:MShaped:maxlhs", "Too many output arguments.");
}
/* The input must be a noncomplex double vector or scaler. */
mrows = mxGetM(prhs[0]);
ncols = mxGetN(prhs[0]);
if( !mxIsDouble(prhs[0]) || mxIsComplex(prhs[0]) ||
( !(mrows == 1 && ncols == 1) &&
!(mrows == 1 && ncols == 1)))
{
mexErrMsgIdAndTxt( "MATLAB:MShaped:inputNotRealVector", "var1 is wrong.");
}
/* Assign pointers to each input. */
var1 = mxGetPr(prhs[0]);
/* Create matrices for return arguments. */
plhs[0] = mxCreateDoubleMatrix((mwSize) 1898, (mwSize) 2, mxREAL);
p_output1 = mxGetPr(plhs[0]);
/* Call the calculation subroutine. */
output1(p_output1,var1);
}
#else // MATLAB_MEX_FILE
#include "Js_dxDiscreteMapLeftImpact.hh"
namespace LeftImpact
{
void Js_dxDiscreteMapLeftImpact_raw(double *p_output1, const double *var1)
{
// Call Subroutines
output1(p_output1, var1);
}
}
#endif // MATLAB_MEX_FILE
|
RMRicketts/covid19Back
|
actions/index.js
|
'use strict';
const _ = require('lodash');
const fs = require('fs').promises;
const path = require('path');
let getActions = async (merge, filePath) => {
try {
let dir = await fs.opendir(filePath);
for await (let dirent of dir) {
let {name} = dirent;
if (dirent.isDirectory()) {
merge = await getActions(merge, path.join(filePath, name));
}
if (dirent.isFile() && name.substr(name.length - 3, 3) === '.js' && name !== 'index.js') {
let m = require(path.join(filePath, name));
merge = _.merge(merge, m);
}
}
} catch (e) {
console.log(e);
}
return merge;
};
module.exports = async server => {
let merged = await getActions({}, __dirname);
for (let i of Object.keys(merged)) {
server.route(merged[i]);
}
};
|
rjw57/tiw-computer
|
emulator/src/devices/bus/dmv/ram.cpp
|
<gh_stars>1-10
// license:BSD-3-Clause
// copyright-holders:<NAME>
/***************************************************************************
K200 64K RAM expansion
K202 192K RAM expansion
K208 448K RAM expansion
***************************************************************************/
#include "emu.h"
#include "ram.h"
/***************************************************************************
IMPLEMENTATION
***************************************************************************/
//**************************************************************************
// GLOBAL VARIABLES
//**************************************************************************
DEFINE_DEVICE_TYPE(DMV_K200, dmv_k200_device, "dmv_k200", "K200 64K RAM expansion")
DEFINE_DEVICE_TYPE(DMV_K202, dmv_k202_device, "dmv_k202", "K202 192K RAM expansion")
DEFINE_DEVICE_TYPE(DMV_K208, dmv_k208_device, "dmv_k208", "K208 448K RAM expansion")
//**************************************************************************
// LIVE DEVICE
//**************************************************************************
//-------------------------------------------------
// dmv_ram_device_base - constructor
//-------------------------------------------------
dmv_ram_device_base::dmv_ram_device_base(const machine_config &mconfig, device_type type, uint32_t size, const char *tag, device_t *owner, uint32_t clock)
: device_t(mconfig, type, tag, owner, clock)
, device_dmvslot_interface(mconfig, *this)
, m_ram(nullptr)
, m_size(size)
{
}
//-------------------------------------------------
// dmv_k200_device - constructor
//-------------------------------------------------
dmv_k200_device::dmv_k200_device(const machine_config &mconfig, const char *tag, device_t *owner, uint32_t clock)
: dmv_ram_device_base(mconfig, DMV_K200, 1, tag, owner, clock)
{
}
//-------------------------------------------------
// dmv_k202_device - constructor
//-------------------------------------------------
dmv_k202_device::dmv_k202_device(const machine_config &mconfig, const char *tag, device_t *owner, uint32_t clock)
: dmv_ram_device_base(mconfig, DMV_K202, 3, tag, owner, clock)
{
}
//-------------------------------------------------
// dmv_k208_device - constructor
//-------------------------------------------------
dmv_k208_device::dmv_k208_device(const machine_config &mconfig, const char *tag, device_t *owner, uint32_t clock)
: dmv_ram_device_base(mconfig, DMV_K208, 7, tag, owner, clock)
{
}
//-------------------------------------------------
// device_start - device-specific startup
//-------------------------------------------------
void dmv_ram_device_base::device_start()
{
m_ram = machine().memory().region_alloc( "expram", m_size * 0x10000, 1, ENDIANNESS_LITTLE )->base();
}
//-------------------------------------------------
// read
//-------------------------------------------------
void dmv_ram_device_base::ram_read(uint8_t cas, offs_t offset, uint8_t &data)
{
if (cas && cas <= m_size)
data = m_ram[((cas - 1) << 16) | (offset & 0xffff)];
}
//-------------------------------------------------
// write
//-------------------------------------------------
void dmv_ram_device_base::ram_write(uint8_t cas, offs_t offset, uint8_t data)
{
if (cas && cas <= m_size)
m_ram[((cas - 1) << 16) | (offset & 0xffff)] = data;
}
|
White-116/xlsxd
|
libxlsxwriter/test/functional/src/test_autofilter01.c
|
/*****************************************************************************
* Test cases for libxlsxwriter.
*
* Test to compare output against Excel files.
*
* Copyright 2014-2019, <NAME>, <EMAIL>
*
*/
#include "xlsxwriter.h"
int main() {
lxw_workbook *workbook = workbook_new("test_autofilter01.xlsx");
lxw_worksheet *worksheet = workbook_add_worksheet(workbook, NULL);
uint16_t i;
struct row {
char region[16];
char item[16];
int volume;
char month[16];
};
struct row data[] = {
{"East", "Apple", 9000, "July" },
{"East", "Apple", 5000, "July" },
{"South", "Orange", 9000, "September" },
{"North", "Apple", 2000, "November" },
{"West", "Apple", 9000, "November" },
{"South", "Pear", 7000, "October" },
{"North", "Pear", 9000, "August" },
{"West", "Orange", 1000, "December" },
{"West", "Grape", 1000, "November" },
{"South", "Pear", 10000, "April" },
{"West", "Grape", 6000, "January" },
{"South", "Orange", 3000, "May" },
{"North", "Apple", 3000, "December" },
{"South", "Apple", 7000, "February" },
{"West", "Grape", 1000, "December" },
{"East", "Grape", 8000, "February" },
{"South", "Grape", 10000, "June" },
{"West", "Pear", 7000, "December" },
{"South", "Apple", 2000, "October" },
{"East", "Grape", 7000, "December" },
{"North", "Grape", 6000, "April" },
{"East", "Pear", 8000, "February" },
{"North", "Apple", 7000, "August" },
{"North", "Orange", 7000, "July" },
{"North", "Apple", 6000, "June" },
{"South", "Grape", 8000, "September" },
{"West", "Apple", 3000, "October" },
{"South", "Orange", 10000, "November" },
{"West", "Grape", 4000, "July" },
{"North", "Orange", 5000, "August" },
{"East", "Orange", 1000, "November" },
{"East", "Orange", 4000, "October" },
{"North", "Grape", 5000, "August" },
{"East", "Apple", 1000, "December" },
{"South", "Apple", 10000, "March" },
{"East", "Grape", 7000, "October" },
{"West", "Grape", 1000, "September" },
{"East", "Grape", 10000, "October" },
{"South", "Orange", 8000, "March" },
{"North", "Apple", 4000, "July" },
{"South", "Orange", 5000, "July" },
{"West", "Apple", 4000, "June" },
{"East", "Apple", 5000, "April" },
{"North", "Pear", 3000, "August" },
{"East", "Grape", 9000, "November" },
{"North", "Orange", 8000, "October" },
{"East", "Apple", 10000, "June" },
{"South", "Pear", 1000, "December" },
{"North", "Grape", 10000, "July" },
{"East", "Grape", 6000, "February" }
};
/* Write the column headers. */
worksheet_write_string(worksheet, 0, 0, "Region", NULL);
worksheet_write_string(worksheet, 0, 1, "Item", NULL);
worksheet_write_string(worksheet, 0, 2, "Volume" , NULL);
worksheet_write_string(worksheet, 0, 3, "Month", NULL);
/* Write the row data. */
for (i = 0; i < sizeof(data)/sizeof(struct row); i++) {
worksheet_write_string(worksheet, i + 1, 0, data[i].region, NULL);
worksheet_write_string(worksheet, i + 1, 1, data[i].item, NULL);
worksheet_write_number(worksheet, i + 1, 2, data[i].volume , NULL);
worksheet_write_string(worksheet, i + 1, 3, data[i].month, NULL);
}
worksheet_autofilter(worksheet, 0, 0, 50, 3);
return workbook_close(workbook);
}
|
baojie/neo4j
|
enterprise/ha/src/main/java/org/neo4j/kernel/ha/HighlyAvailableGraphDatabase.java
|
<reponame>baojie/neo4j
/**
* Copyright (c) 2002-2013 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.kernel.ha;
import static org.neo4j.kernel.ha.DelegateInvocationHandler.snapshot;
import java.io.File;
import java.lang.reflect.Proxy;
import java.net.URI;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.transaction.Transaction;
import org.neo4j.cluster.ClusterSettings;
import org.neo4j.cluster.client.ClusterClient;
import org.neo4j.cluster.com.NetworkInstance;
import org.neo4j.cluster.member.ClusterMemberAvailability;
import org.neo4j.cluster.member.ClusterMemberEvents;
import org.neo4j.cluster.member.paxos.MemberIsAvailable;
import org.neo4j.cluster.member.paxos.PaxosClusterMemberAvailability;
import org.neo4j.cluster.member.paxos.PaxosClusterMemberEvents;
import org.neo4j.cluster.protocol.cluster.ClusterConfiguration;
import org.neo4j.cluster.protocol.cluster.ClusterListener;
import org.neo4j.cluster.protocol.election.DefaultElectionCredentialsProvider;
import org.neo4j.cluster.protocol.election.ElectionCredentialsProvider;
import org.neo4j.cluster.protocol.election.NotElectableElectionCredentialsProvider;
import org.neo4j.graphdb.DependencyResolver;
import org.neo4j.graphdb.TransactionFailureException;
import org.neo4j.graphdb.factory.GraphDatabaseSettings;
import org.neo4j.graphdb.index.IndexProvider;
import org.neo4j.helpers.Predicate;
import org.neo4j.helpers.collection.Iterables;
import org.neo4j.kernel.ha.com.master.DefaultSlaveFactory;
import org.neo4j.kernel.ha.com.master.Master;
import org.neo4j.kernel.ha.com.RequestContextFactory;
import org.neo4j.kernel.ha.com.master.Slaves;
import org.neo4j.kernel.ha.management.ClusterDatabaseInfoProvider;
import org.neo4j.kernel.ha.management.HighlyAvailableKernelData;
import org.neo4j.kernel.IdGeneratorFactory;
import org.neo4j.kernel.InternalAbstractGraphDatabase;
import org.neo4j.kernel.KernelData;
import org.neo4j.kernel.extension.KernelExtensionFactory;
import org.neo4j.kernel.ha.cluster.HighAvailabilityMemberChangeEvent;
import org.neo4j.kernel.ha.cluster.HighAvailabilityMemberContext;
import org.neo4j.kernel.ha.cluster.HighAvailabilityMemberListener;
import org.neo4j.kernel.ha.cluster.HighAvailabilityMemberState;
import org.neo4j.kernel.ha.cluster.HighAvailabilityMemberStateMachine;
import org.neo4j.kernel.ha.cluster.HighAvailabilityModeSwitcher;
import org.neo4j.kernel.ha.cluster.SimpleHighAvailabilityMemberContext;
import org.neo4j.kernel.ha.cluster.member.ClusterMembers;
import org.neo4j.kernel.ha.cluster.member.HighAvailabilitySlaves;
import org.neo4j.kernel.ha.cluster.zoo.ZooKeeperHighAvailabilityEvents;
import org.neo4j.kernel.ha.id.HaIdGeneratorFactory;
import org.neo4j.kernel.ha.lock.LockManagerModeSwitcher;
import org.neo4j.kernel.ha.switchover.Switchover;
import org.neo4j.kernel.ha.transaction.OnDiskLastTxIdGetter;
import org.neo4j.kernel.ha.transaction.TxHookModeSwitcher;
import org.neo4j.kernel.ha.transaction.TxIdGeneratorModeSwitcher;
import org.neo4j.kernel.impl.cache.CacheProvider;
import org.neo4j.kernel.impl.core.Caches;
import org.neo4j.kernel.impl.core.RelationshipTypeCreator;
import org.neo4j.kernel.impl.core.TransactionState;
import org.neo4j.kernel.impl.core.WritableTransactionState;
import org.neo4j.kernel.impl.transaction.LockManager;
import org.neo4j.kernel.impl.transaction.TransactionStateFactory;
import org.neo4j.kernel.impl.transaction.TxHook;
import org.neo4j.kernel.impl.transaction.TxManager;
import org.neo4j.kernel.impl.transaction.XaDataSourceManager;
import org.neo4j.kernel.impl.transaction.xaframework.ForceMode;
import org.neo4j.kernel.impl.transaction.xaframework.TransactionInterceptorProvider;
import org.neo4j.kernel.impl.transaction.xaframework.TxIdGenerator;
import org.neo4j.kernel.lifecycle.LifeSupport;
import org.neo4j.kernel.lifecycle.Lifecycle;
import org.neo4j.kernel.lifecycle.LifecycleAdapter;
import org.neo4j.kernel.logging.ClassicLoggingService;
import org.neo4j.kernel.logging.LogbackService;
import org.neo4j.kernel.logging.Logging;
import ch.qos.logback.classic.LoggerContext;
public class HighlyAvailableGraphDatabase extends InternalAbstractGraphDatabase
{
private RequestContextFactory requestContextFactory;
private Slaves slaves;
private ClusterMembers members;
private DelegateInvocationHandler masterDelegateInvocationHandler;
private LoggerContext loggerContext;
private Master master;
private InstanceAccessGuard accessGuard;
private HighAvailabilityMemberStateMachine memberStateMachine;
private UpdatePuller updatePuller;
private LastUpdateTime lastUpdateTime;
private HighAvailabilityMemberContext memberContext;
private ClusterClient clusterClient;
private ClusterMemberEvents clusterEvents;
private ClusterMemberAvailability clusterMemberAvailability;
private long stateSwitchTimeoutMillis;
/*
* TODO the following are in place of a proper abstraction of component dependencies, in which the compatibility
* layer would be an optional component and the paxos layer would depend on it. Since we currently don't have one,
* we need to fake it with this life and the accompanying boolean.
*/
/*
* paxosLife holds stuff that must be added in global life if we are not in compatibility mode. If in compatibility
* mode they will be started only on switchover.
*/
private final LifeSupport paxosLife = new LifeSupport();
/*
* compatibilityMode is true if we are in ZK compatibility mode. If false, paxosLife is added to the global life.
*/
private boolean compatibilityMode = false;
/*
* compatibilityLifecycle holds stuff that needs to be shutdown when switching. They can be restarted by adding
* them to paxosLife too.
*/
List<Lifecycle> compatibilityLifecycle = new LinkedList<Lifecycle>();
private DelegateInvocationHandler clusterEventsDelegateInvocationHandler;
private DelegateInvocationHandler memberContextDelegateInvocationHandler;
private DelegateInvocationHandler clusterMemberAvailabilityDelegateInvocationHandler;
public HighlyAvailableGraphDatabase( String storeDir, Map<String, String> params,
Iterable<IndexProvider> indexProviders,
Iterable<KernelExtensionFactory<?>> kernelExtensions,
Iterable<CacheProvider> cacheProviders,
Iterable<TransactionInterceptorProvider> txInterceptorProviders )
{
super( storeDir, params, Iterables.<Class<?>,Class<?>>iterable( GraphDatabaseSettings.class, HaSettings.class,
NetworkInstance.Configuration.class, ClusterSettings.class ), indexProviders, kernelExtensions,
cacheProviders, txInterceptorProviders );
run();
}
protected void create()
{
life.add( new BranchedDataMigrator( storeDir ) );
masterDelegateInvocationHandler = new DelegateInvocationHandler();
master = (Master) Proxy.newProxyInstance( Master.class.getClassLoader(), new Class[]{Master.class},
masterDelegateInvocationHandler );
accessGuard = new InstanceAccessGuard();
super.create();
kernelEventHandlers.registerKernelEventHandler( new HaKernelPanicHandler( xaDataSourceManager,
(TxManager) txManager ) );
life.add( updatePuller = new UpdatePuller( (HaXaDataSourceManager) xaDataSourceManager, master,
requestContextFactory, txManager, accessGuard, lastUpdateTime, config, msgLog ) );
stateSwitchTimeoutMillis = config.get( HaSettings.state_switch_timeout );
if ( !compatibilityMode )
{
life.add( paxosLife );
}
life.add( new StartupWaiter() );
diagnosticsManager.appendProvider( new HighAvailabilityDiagnostics( memberStateMachine, clusterClient ) );
}
public void start()
{
life.start();
}
public void stop()
{
life.stop();
}
@Override
protected org.neo4j.graphdb.Transaction beginTx( ForceMode forceMode )
{
// TODO first startup ever we don't have a proper db, so don't even serve read requests
// if this is a startup for where we have been a member of this cluster before we
// can server (possibly quite outdated) read requests.
if (!accessGuard.await( stateSwitchTimeoutMillis ))
{
throw new TransactionFailureException( "Timeout waiting for cluster to elect master" );
}
return super.beginTx( forceMode );
}
protected Logging createLogging()
{
try
{
getClass().getClassLoader().loadClass( "ch.qos.logback.classic.LoggerContext" );
loggerContext = new LoggerContext();
return life.add( new LogbackService( config, loggerContext ) );
}
catch ( ClassNotFoundException e )
{
return life.add( new ClassicLoggingService( config ) );
}
}
@Override
protected TransactionStateFactory createTransactionStateFactory()
{
return new TransactionStateFactory( logging )
{
@Override
public TransactionState create( Transaction tx )
{
return new WritableTransactionState( snapshot( lockManager ),
propertyIndexManager, nodeManager, logging, tx, snapshot( txHook ),
snapshot( txIdGenerator ) );
}
};
}
@Override
protected XaDataSourceManager createXaDataSourceManager()
{
XaDataSourceManager toReturn = new HaXaDataSourceManager( logging.getLogger( HaXaDataSourceManager.class ) );
requestContextFactory = new RequestContextFactory( config.get( HaSettings.server_id ), toReturn,
dependencyResolver );
return toReturn;
}
@Override
protected TxHook createTxHook()
{
clusterEventsDelegateInvocationHandler = new DelegateInvocationHandler();
memberContextDelegateInvocationHandler = new DelegateInvocationHandler();
clusterMemberAvailabilityDelegateInvocationHandler = new DelegateInvocationHandler();
clusterEvents = (ClusterMemberEvents) Proxy.newProxyInstance( ClusterMemberEvents.class.getClassLoader(),
new Class[]{ClusterMemberEvents.class, Lifecycle.class}, clusterEventsDelegateInvocationHandler );
memberContext = (HighAvailabilityMemberContext) Proxy.newProxyInstance(
HighAvailabilityMemberContext.class.getClassLoader(),
new Class[]{HighAvailabilityMemberContext.class}, memberContextDelegateInvocationHandler );
clusterMemberAvailability = (ClusterMemberAvailability) Proxy.newProxyInstance(
ClusterMemberAvailability.class.getClassLoader(),
new Class[]{ClusterMemberAvailability.class}, clusterMemberAvailabilityDelegateInvocationHandler );
/*
* We need to create these anyway since even in compatibility mode we'll use them for switchover. If it turns
* out we are not going to need zookeeper, just assign them to the class fields. The difference is in when
* they start().
*/
ElectionCredentialsProvider electionCredentialsProvider = config.get( HaSettings.slave_only ) ?
new NotElectableElectionCredentialsProvider() :
new DefaultElectionCredentialsProvider(config.get( HaSettings.server_id ), new OnDiskLastTxIdGetter( new File( getStoreDir() ) ) );
clusterClient = new ClusterClient( ClusterClient.adapt( config ), logging, electionCredentialsProvider );
PaxosClusterMemberEvents localClusterEvents = new PaxosClusterMemberEvents( clusterClient, clusterClient,
clusterClient, logging, new Predicate<PaxosClusterMemberEvents.ClusterMembersSnapshot>()
{
@Override
public boolean accept( PaxosClusterMemberEvents.ClusterMembersSnapshot item )
{
for ( MemberIsAvailable member : item.getCurrentAvailableMembers() )
{
if ( member.getRoleUri().getScheme().equals( "ha" ) )
{
if ( HighAvailabilityModeSwitcher.getServerId( member.getRoleUri() ) ==
config.get( HaSettings.server_id ) )
{
msgLog.error( String.format( "Instance %s has the same serverId as ours (%d) - will not join this cluster",
member.getRoleUri(), config.get( HaSettings.server_id ) ) );
return false;
}
}
}
return true;
}
} );
// Force a reelection after we enter the cluster
// and when that election is finished refresh the snapshot
clusterClient.addClusterListener( new ClusterListener.Adapter()
{
@Override
public void enteredCluster( ClusterConfiguration clusterConfiguration )
{
clusterClient.performRoleElections();
}
@Override
public void elected( String role, URI electedMember )
{
if (role.equals( ClusterConfiguration.COORDINATOR ))
{
clusterClient.refreshSnapshot();
clusterClient.removeClusterListener( this );
}
}
});
HighAvailabilityMemberContext localMemberContext = new SimpleHighAvailabilityMemberContext( clusterClient );
PaxosClusterMemberAvailability localClusterMemberAvailability = new PaxosClusterMemberAvailability(
clusterClient, clusterClient, logging );
// Here we decide whether to start in compatibility mode or mode or not
if ( !config.get( HaSettings.coordinators ).isEmpty() &&
!config.get( HaSettings.coordinators ).get( 0 ).toString().trim().equals( "" ) )
{
compatibilityMode = true;
compatibilityLifecycle = new LinkedList<Lifecycle>();
Switchover switchover = new ZooToPaxosSwitchover( life, paxosLife, compatibilityLifecycle,
clusterEventsDelegateInvocationHandler, memberContextDelegateInvocationHandler,
clusterMemberAvailabilityDelegateInvocationHandler, localClusterEvents,
localMemberContext, localClusterMemberAvailability );
ZooKeeperHighAvailabilityEvents zkEvents =
new ZooKeeperHighAvailabilityEvents( logging, config, switchover );
compatibilityLifecycle.add( zkEvents );
memberContextDelegateInvocationHandler.setDelegate(
new SimpleHighAvailabilityMemberContext( zkEvents ) );
clusterEventsDelegateInvocationHandler.setDelegate( zkEvents );
clusterMemberAvailabilityDelegateInvocationHandler.setDelegate( zkEvents );
// Paxos Events added to life, won't be stopped because it isn't started yet
paxosLife.add( localClusterEvents );
}
else
{
memberContextDelegateInvocationHandler.setDelegate( localMemberContext );
clusterEventsDelegateInvocationHandler.setDelegate( localClusterEvents );
clusterMemberAvailabilityDelegateInvocationHandler.setDelegate( localClusterMemberAvailability );
}
memberStateMachine = new HighAvailabilityMemberStateMachine( memberContext, accessGuard, clusterEvents,
logging.getLogger( HighAvailabilityMemberStateMachine.class ) );
if ( compatibilityMode )
{
/*
* In here goes stuff that needs to stop when switching. If added in paxosLife too they will be restarted.
* Adding to life starts them when life.start is called - adding them to compatibilityLifeCycle shuts them
* down on switchover
*/
compatibilityLifecycle.add( memberStateMachine );
// compatibilityLifecycle.add( highAvailabilityModeSwitcher );
compatibilityLifecycle.add( (Lifecycle) clusterEvents );
life.add( memberStateMachine );
// life.add( highAvailabilityModeSwitcher );
life.add( clusterEvents );
}
/*
* Here goes stuff that needs to start when paxos kicks in:
* In Normal (non compatibility mode): That means they start normally
* In Compatibility Mode: That means they start when switchover happens. If added to life too they will be
* restarted
*/
paxosLife.add( memberStateMachine );
paxosLife.add( clusterEvents );
// highAvailabilityModeSwitcher left for reference, has been moved to createTxIdGenerator
// paxosLife.add( highAvailabilityModeSwitcher );
paxosLife.add( clusterClient );
paxosLife.add( localClusterMemberAvailability );
DelegateInvocationHandler<TxHook> txHookDelegate = new DelegateInvocationHandler<TxHook>();
TxHook txHook = (TxHook) Proxy.newProxyInstance( TxHook.class.getClassLoader(), new Class[]{TxHook.class},
txHookDelegate );
new TxHookModeSwitcher( memberStateMachine, txHookDelegate,
master, new TxHookModeSwitcher.RequestContextFactoryResolver()
{
@Override
public RequestContextFactory get()
{
return requestContextFactory;
}
}, dependencyResolver );
return txHook;
}
@Override
protected TxIdGenerator createTxIdGenerator()
{
DelegateInvocationHandler<TxIdGenerator> txIdGeneratorDelegate = new DelegateInvocationHandler<TxIdGenerator>();
TxIdGenerator txIdGenerator =
(TxIdGenerator) Proxy.newProxyInstance( TxIdGenerator.class.getClassLoader(),
new Class[]{TxIdGenerator.class}, txIdGeneratorDelegate );
members = new ClusterMembers( clusterClient, clusterClient, clusterClient, clusterEvents );
slaves = life.add( new HighAvailabilitySlaves( members, clusterClient, new DefaultSlaveFactory(
xaDataSourceManager, logging, config.get( HaSettings.max_concurrent_channels_per_slave ),
config.get( HaSettings.com_chunk_size ).intValue() ) ) );
new TxIdGeneratorModeSwitcher( memberStateMachine, txIdGeneratorDelegate,
(HaXaDataSourceManager) xaDataSourceManager, master, requestContextFactory, msgLog, config, slaves );
return txIdGenerator;
}
@Override
protected IdGeneratorFactory createIdGeneratorFactory()
{
idGeneratorFactory = new HaIdGeneratorFactory( master, memberStateMachine, logging );
HighAvailabilityModeSwitcher highAvailabilityModeSwitcher = new HighAvailabilityModeSwitcher( masterDelegateInvocationHandler,
clusterMemberAvailability, memberStateMachine, this, (HaIdGeneratorFactory) idGeneratorFactory, config,
logging );
/*
* We always need the mode switcher and we need it to restart on switchover. So:
* 1) if in compatibility mode, it must be added in all 3 - to start on start and restart on switchover
* 2) if not in compatibility mode it must be added in paxosLife, which is started anyway.
*/
paxosLife.add( highAvailabilityModeSwitcher );
if ( compatibilityMode )
{
compatibilityLifecycle.add( 1, highAvailabilityModeSwitcher );
life.add( highAvailabilityModeSwitcher );
}
return idGeneratorFactory;
}
@Override
protected LockManager createLockManager()
{
DelegateInvocationHandler<LockManager> lockManagerDelegate = new DelegateInvocationHandler<LockManager>();
LockManager lockManager =
(LockManager) Proxy.newProxyInstance( LockManager.class.getClassLoader(),
new Class[]{LockManager.class}, lockManagerDelegate );
new LockManagerModeSwitcher( memberStateMachine, lockManagerDelegate, txManager, txHook,
(HaXaDataSourceManager) xaDataSourceManager, master, requestContextFactory, accessGuard, config );
return lockManager;
}
@Override
protected RelationshipTypeCreator createRelationshipTypeCreator()
{
DelegateInvocationHandler<RelationshipTypeCreator> relationshipTypeCreatorDelegate =
new DelegateInvocationHandler<RelationshipTypeCreator>();
RelationshipTypeCreator relationshipTypeCreator =
(RelationshipTypeCreator) Proxy.newProxyInstance( RelationshipTypeCreator.class.getClassLoader(),
new Class[]{RelationshipTypeCreator.class}, relationshipTypeCreatorDelegate );
new RelationshipTypeCreatorModeSwitcher( memberStateMachine, relationshipTypeCreatorDelegate,
(HaXaDataSourceManager) xaDataSourceManager, master, requestContextFactory );
return relationshipTypeCreator;
}
@Override
protected Caches createCaches()
{
return new HaCaches( msgLog );
}
@Override
protected void createNeoDataSource()
{
// no op, we must wait to join the cluster to do stuff
}
@Override
protected KernelData createKernelData()
{
this.lastUpdateTime = new LastUpdateTime();
return new HighlyAvailableKernelData( this, members,
new ClusterDatabaseInfoProvider( members, new OnDiskLastTxIdGetter( new File( getStoreDir() ) ),
lastUpdateTime ) );
}
@Override
protected void registerRecovery()
{
memberStateMachine.addHighAvailabilityMemberListener( new HighAvailabilityMemberListener()
{
@Override
public void masterIsElected( HighAvailabilityMemberChangeEvent event )
{
}
@Override
public void masterIsAvailable( HighAvailabilityMemberChangeEvent event )
{
if ( event.getOldState().equals( HighAvailabilityMemberState.TO_MASTER ) && event.getNewState().equals(
HighAvailabilityMemberState.MASTER ) )
{
doAfterRecoveryAndStartup();
}
}
@Override
public void slaveIsAvailable( HighAvailabilityMemberChangeEvent event )
{
if ( event.getOldState().equals( HighAvailabilityMemberState.TO_SLAVE ) && event.getNewState().equals(
HighAvailabilityMemberState.SLAVE ) )
{
doAfterRecoveryAndStartup();
}
}
@Override
public void instanceStops( HighAvailabilityMemberChangeEvent event )
{
}
private void doAfterRecoveryAndStartup()
{
try
{
synchronized ( xaDataSourceManager )
{
HighlyAvailableGraphDatabase.this.doAfterRecoveryAndStartup();
}
}
catch ( Throwable throwable )
{
msgLog.error( "Post recovery error", throwable );
try
{
memberStateMachine.stop();
}
catch ( Throwable throwable1 )
{
msgLog.warn( "Could not stop", throwable1 );
}
try
{
memberStateMachine.start();
}
catch ( Throwable throwable1 )
{
msgLog.warn( "Could not start", throwable1 );
}
}
}
} );
}
@Override
public String toString()
{
return getClass().getSimpleName() + "[" + storeDir + "]";
}
public String getInstanceState()
{
return memberStateMachine.getCurrentState().name();
}
public boolean isMaster()
{
return memberStateMachine.getCurrentState() == HighAvailabilityMemberState.MASTER;
}
@Override
public DependencyResolver getDependencyResolver()
{
return new DependencyResolver()
{
@Override
public <T> T resolveDependency( Class<T> type ) throws IllegalArgumentException
{
T result;
try
{
result = dependencyResolver.resolveDependency( type );
}
catch ( IllegalArgumentException e )
{
if ( ClusterMemberEvents.class.isAssignableFrom( type ) )
{
result = type.cast( clusterEvents );
}
else if ( ClusterMemberAvailability.class.isAssignableFrom( type ) )
{
result = type.cast( clusterMemberAvailability );
}
else if ( UpdatePuller.class.isAssignableFrom( type ) )
{
result = type.cast( updatePuller );
}
else if ( Slaves.class.isAssignableFrom( type ) )
{
result = type.cast( slaves );
}
else if ( ClusterClient.class.isAssignableFrom( type ) )
{
result = type.cast( clusterClient );
}
else if ( ClusterMembers.class.isAssignableFrom( type ) )
{
result = type.cast( members );
}
else
{
throw e;
}
}
return result;
}
};
}
/**
* At end of startup, wait for instance to become either master or slave.
* <p/>
* This helps users who expect to be able to access the instance after
* the constructor is run.
*/
private class StartupWaiter extends LifecycleAdapter
{
@Override
public void start() throws Throwable
{
accessGuard.await( stateSwitchTimeoutMillis );
}
}
}
|
qaz4042/beBetter
|
3-common/src/main/java/bebetter/basejpa/cfg/sub/IRealm.java
|
package bebetter.basejpa.cfg.sub;
import cn.hutool.crypto.digest.DigestUtil;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authc.AuthenticationInfo;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.realm.AuthorizingRealm;
import org.apache.shiro.subject.PrincipalCollection;
public abstract class IRealm extends AuthorizingRealm {
/**
* 密码加密
*
* @param password <PASSWORD>
* @return 密文密码
*/
public String passwordEncoder(Object password) {
String passwordStr;
if (password instanceof String) {
passwordStr = (String) password;
} else {
passwordStr = new String((char[]) password);//Shiro默认是char[]类型的
}
return DigestUtil.md5Hex(passwordStr);
}
/**
* 登录&&判断密码是否
*
* @param authenticationToken LoginController 请求中的参数
* @return
*/
@Override
abstract protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken authenticationToken) throws AuthenticationException;
/**
* 填充角色&&权限信息
*/
@Override
abstract protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principalCollection);
}
|
dakoto747/datastructuresandalgorithms
|
src/main/java/com/dakoto/datastructuresandalgorithms/dynamic/programming/DynamicProgrammingPrimerOld.java
|
<filename>src/main/java/com/dakoto/datastructuresandalgorithms/dynamic/programming/DynamicProgrammingPrimerOld.java
package com.dakoto.datastructuresandalgorithms.dynamic.programming;
public class DynamicProgrammingPrimerOld {
//also think about backtracking
//n-choose-k
/**
YOU MUST REWATCH ALL THE VIDEOS!!!!
https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-006-introduction-to-algorithms-fall-2011/lecture-videos/lecture-19-dynamic-programming-i-fibonacci-shortest-paths/
https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-006-introduction-to-algorithms-fall-2011/lecture-videos/lecture-20-dynamic-programming-ii-text-justification-blackjack/
https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-006-introduction-to-algorithms-fall-2011/lecture-videos/lecture-21-dp-iii-parenthesization-edit-distance-knapsack/
https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-006-introduction-to-algorithms-fall-2011/lecture-videos/lecture-22-dp-iv-guitar-fingering-tetris-super-mario-bros/
*/
public String main_intuition = "Permutations: k in n ways to do things; the guesses usually have to do with the k things";
public String dpSteps(){
return "define subproblems"+
"\n define guesses (all choices and solves part of subproblem)"+
"\n define recurrence; define DP"+
"\n memoize or build bottom up"+
"\n Remember, we guess EVERYTHING!"+
"\n parent pointers: remember which guess was best"+
"\n validate with topological sort: for bottom-up approaches only!";
}
public String general5Steps(){
return "\n Step 1. define subproblems"+
"\n Step 2. define guesses (all choices and solves part of solution)"+
"\n Step 3. define recurrence; define the Dynamic Program"+
"\n Step 4. Recurse or memoize| check that the recurrence is acyclic, validate with topo-sort if you're doing bottom up "+
"\n Step 5. Solve the original problem!"+
"\n ...."+
"\n Complexity = time per sub-problem * number of subproblems"+
"\n ...."+
"\n validate with topological sort: for bottom-up approaches only!"+
"\n ...."+
"\n Step 1 is one of the hardest parts of DP ... with some practise, Step 2 is pretty easy ... Step 1 is usually where most of the insight comes in. "+
"\n ...."+
"\n Note that he often starts with Step 2 first"+
"\n ...."+
"";
}
public String generalTipsForStep1StringsOrSequences(){
return "\n Tips for Step 1: how to choose subproblems for STRINGS or SEQUENCES"+
"\n ...."+
"\n ...."+
"\n ...."+
"\n ...."+
"\n ...."+
"\n ...."+
"";
}
public String generalTipsForStep1(){
return "\n Tips for Step 1: how to choose subproblems"+
"\n ...."+
"\n ...."+
"\n ...."+
"\n ...."+
"\n ...."+
"\n ...."+
"";
}
public String moreIntuition(){
return "Intuition of of recursion is remembering that there is a call STACK. and that function calls are references on that stack"+
"\n Recursion: I want to do this thing again and get the output and add it to this."+
"\n because I've calculated(this calculations is sometimes the guessing in DP) the 1/nth answer here, I need the 1/n-1th answer to complete this answer(nth answer)"+
"\n or, assuming n-1th part of my problem is solved, how do we solve 'n'?"+
"\n Sometimes we want to work from back to front of 'N' or front to back: Assume part of it is solved, how to we solve the next bit? this gives insight on the kind of input and recurrence we want to make "+
"\n Solving the next bit usually involves the bit of 'doing ALL possible guesses'"+
"\n Guessing works because we memoize it"+
"\n Visualize a Directe Acyclic Graph(DAG) of the solution. Nodes are decisions and paths/edges usually have weightings of the reward for the decision and we want to figure the shortest path. ";
}
public String methods(){
return "suffixes, prefixes, substrings(same as suffixes? changing size)";
}
public String uses(){
return "Strings, sequences, edit distance(dna, spelling/auto-correct, largest subsequence),"
+ "\n usually find the best way to do things situation";
}
}
class MinimumCoinsForAmountProblem{
public static int minimumCoinsForAmountProblem(int amount){
int[] sortedListOfCoins;
int amount_left = amount;
boolean keep_looking = true;
int coin_count = 0;
for (int i = sortedListOfCoins.length - 1; i > 0 && keep_looking; i--){
amount_left = amount % sortedListOfCoins[i];
coin_count = amount / sortedListOfCoins;
keep_looking = ! coin_count > 0;
}
return current_coin_count+minimumCoinsForAmountProblem(amount_left);
}
}
class LongestCommonSubsequenceProblem{
public static int longestSubsequence(String string1, String string2){
if(string1.charAt(0) == string2.charAt(0)){
return 1 + longestSubsequence(string1.substring(1), string2.substring(1));
}else {
return Math.max(
longestSubsequence(
string1.substring(0),
string2.substring(1)
),
longestSubsequence(
string1.substring(1),
string2.substring(0)
),
);
}
}
}
class TextJustification{
}
class TowerOfHanoi{
/*
HAVE FUN WITH IT.
The towers are represented using stacks.
what are we guessing?
-
what is recurrence?
- recursion - 1/nth part of problem + 1/n-1th part of problem
given the next disk and the stack, where to place it.
given disk i and it's current stack, figure out where to put it next.
given the top and length of each stack, find out the best movement: i.e. what stat
guess every move and calculate the minimum move.
~~~~~> some people think through code; it's okay to mention that.
guess
- assuming we want to figure t
s
*/
int cost(int disk, Stack stack){
if(stack.peek() >= disk){
return 0;
}else{
return Integer.MAXIMUM_VALUE;
}
}
public Stack[] nextMove(Stack[] stacks){
Stack stack1 = stacks[0];
Stack stack2 = stacks[1];
Stack stack3 = stacks[2];
int cost = cost(stack1.pop(), stack2);
nextMove = next
}
}
class KnapsackProblem {
/*
Recursion: I want to do this thing again and get the output and add it to this.
because I've calculated(this calculations is sometimes the guessing in DP) the 1/nth answer here, I need the 1/n-1th answer to complete this answer(nth answer)
or, assuming n-1th part of my problem is solved, how do we solve "n"?
Sometimes we want to work from back to front of "N" or front to back: Assume part of it is solved, how to we solve the next bit? this gives insight on the kind of input and recurrence we want to make
Solving the next bit usually involves the biti of "doing ALL possible guess"
Visualize a Directe Acyclic Graph(DAG) of the solution. Nodes are decisions and paths/edges usually have weightings of the reward for the decision and we want to figure the shortest path.
*/
}
class LongestWords{
/*
Given a list of words, write a program to find the longest word made of other words in the list.
"para", "sit", "parasit", "parasitic", "ic"
put the words in a hashmap with words as the key and frequency as the value
*/
baseCase(String[] words){
String[] sorted;
for(int i = 0; i < words.length; i++){
}
}
}
class StringLengthComparator implements Comparable<String>{
String string = "";
public StringLengthComparator(String string){
this.string = string;
}
public int compareTo(String otherString){
return this.string.length() - otherString.length();
}
}
|
likuisuper/Java-Notes
|
JavaSourceLearn/javafx-src/com/sun/javafx/tk/quantum/PerformanceTrackerImpl.java
|
/*
* Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.javafx.tk.quantum;
import com.sun.javafx.perf.PerformanceTracker;
/**
* Class containing implementation for logging, and performance tracking.
*/
final class PerformanceTrackerImpl extends PerformanceTracker {
final PerformanceTrackerHelper helper =
PerformanceTrackerHelper.getInstance();
public PerformanceTrackerImpl() {
setPerfLoggingEnabled(helper.isPerfLoggingEnabled());
}
@Override public void doLogEvent(String s) {
helper.logEvent(s);
}
@Override public void doOutputLog() {
helper.outputLog();
}
@Override public long nanoTime() {
return helper.nanoTime();
}
}
|
UniStuttgart-VISUS/OcclusionCulling
|
CPUT/CPUTTextureDX11.cpp
|
<reponame>UniStuttgart-VISUS/OcclusionCulling
////////////////////////////////////////////////////////////////////////////////
// Copyright 2017 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
////////////////////////////////////////////////////////////////////////////////
#include "CPUTTextureDX11.h"
#include "DDSTextureLoader.h"
// --- BEGIN dds.h
struct DDS_PIXELFORMAT
{
DWORD dwSize;
DWORD dwFlags;
DWORD dwFourCC;
DWORD dwRGBBitCount;
DWORD dwRBitMask;
DWORD dwGBitMask;
DWORD dwBBitMask;
DWORD dwABitMask;
};
#define DDS_FOURCC 0x00000004 // DDPF_FOURCC
#define DDS_RGB 0x00000040 // DDPF_RGB
#define DDS_RGBA 0x00000041 // DDPF_RGB | DDPF_ALPHAPIXELS
const DDS_PIXELFORMAT DDSPF_DXT1 =
{ sizeof(DDS_PIXELFORMAT), DDS_FOURCC, MAKEFOURCC('D','X','T','1'), 0, 0, 0, 0, 0 };
const DDS_PIXELFORMAT DDSPF_DXT2 =
{ sizeof(DDS_PIXELFORMAT), DDS_FOURCC, MAKEFOURCC('D','X','T','2'), 0, 0, 0, 0, 0 };
const DDS_PIXELFORMAT DDSPF_DXT3 =
{ sizeof(DDS_PIXELFORMAT), DDS_FOURCC, MAKEFOURCC('D','X','T','3'), 0, 0, 0, 0, 0 };
const DDS_PIXELFORMAT DDSPF_DXT4 =
{ sizeof(DDS_PIXELFORMAT), DDS_FOURCC, MAKEFOURCC('D','X','T','4'), 0, 0, 0, 0, 0 };
const DDS_PIXELFORMAT DDSPF_DXT5 =
{ sizeof(DDS_PIXELFORMAT), DDS_FOURCC, MAKEFOURCC('D','X','T','5'), 0, 0, 0, 0, 0 };
const DDS_PIXELFORMAT DDSPF_A8R8G8B8 =
{ sizeof(DDS_PIXELFORMAT), DDS_RGBA, 0, 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 };
const DDS_PIXELFORMAT DDSPF_A1R5G5B5 =
{ sizeof(DDS_PIXELFORMAT), DDS_RGBA, 0, 16, 0x00007c00, 0x000003e0, 0x0000001f, 0x00008000 };
const DDS_PIXELFORMAT DDSPF_A4R4G4B4 =
{ sizeof(DDS_PIXELFORMAT), DDS_RGBA, 0, 16, 0x00000f00, 0x000000f0, 0x0000000f, 0x0000f000 };
const DDS_PIXELFORMAT DDSPF_R8G8B8 =
{ sizeof(DDS_PIXELFORMAT), DDS_RGB, 0, 24, 0x00ff0000, 0x0000ff00, 0x000000ff, 0x00000000 };
const DDS_PIXELFORMAT DDSPF_R5G6B5 =
{ sizeof(DDS_PIXELFORMAT), DDS_RGB, 0, 16, 0x0000f800, 0x000007e0, 0x0000001f, 0x00000000 };
#define DDS_HEADER_FLAGS_TEXTURE 0x00001007 // DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT
#define DDS_HEADER_FLAGS_MIPMAP 0x00020000 // DDSD_MIPMAPCOUNT
#define DDS_HEADER_FLAGS_VOLUME 0x00800000 // DDSD_DEPTH
#define DDS_HEADER_FLAGS_PITCH 0x00000008 // DDSD_PITCH
#define DDS_HEADER_FLAGS_LINEARSIZE 0x00080000 // DDSD_LINEARSIZE
#define DDS_SURFACE_FLAGS_TEXTURE 0x00001000 // DDSCAPS_TEXTURE
#define DDS_SURFACE_FLAGS_MIPMAP 0x00400008 // DDSCAPS_COMPLEX | DDSCAPS_MIPMAP
#define DDS_SURFACE_FLAGS_CUBEMAP 0x00000008 // DDSCAPS_COMPLEX
#define DDS_CUBEMAP_POSITIVEX 0x00000600 // DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEX
#define DDS_CUBEMAP_NEGATIVEX 0x00000a00 // DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEX
#define DDS_CUBEMAP_POSITIVEY 0x00001200 // DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEY
#define DDS_CUBEMAP_NEGATIVEY 0x00002200 // DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEY
#define DDS_CUBEMAP_POSITIVEZ 0x00004200 // DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEZ
#define DDS_CUBEMAP_NEGATIVEZ 0x00008200 // DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEZ
#define DDS_CUBEMAP_ALLFACES ( DDS_CUBEMAP_POSITIVEX | DDS_CUBEMAP_NEGATIVEX |\
DDS_CUBEMAP_POSITIVEY | DDS_CUBEMAP_NEGATIVEY |\
DDS_CUBEMAP_POSITIVEZ | DDS_CUBEMAP_NEGATIVEZ )
#define DDS_FLAGS_VOLUME 0x00200000 // DDSCAPS2_VOLUME
struct DDS_HEADER
{
DWORD dwSize;
DWORD dwHeaderFlags;
DWORD dwHeight;
DWORD dwWidth;
DWORD dwPitchOrLinearSize;
DWORD dwDepth; // only if DDS_HEADER_FLAGS_VOLUME is set in dwHeaderFlags
DWORD dwMipMapCount;
DWORD dwReserved1[11];
DDS_PIXELFORMAT ddspf;
DWORD dwSurfaceFlags;
DWORD dwCubemapFlags;
DWORD dwReserved2[3];
};
// ---- END dds.h
// TODO: Would be nice to find a better place for this decl. But, not another file just for this.
const cString gDXGIFormatNames[] =
{
_L("DXGI_FORMAT_UNKNOWN"),
_L("DXGI_FORMAT_R32G32B32A32_TYPELESS"),
_L("DXGI_FORMAT_R32G32B32A32_FLOAT"),
_L("DXGI_FORMAT_R32G32B32A32_UINT"),
_L("DXGI_FORMAT_R32G32B32A32_SINT"),
_L("DXGI_FORMAT_R32G32B32_TYPELESS"),
_L("DXGI_FORMAT_R32G32B32_FLOAT"),
_L("DXGI_FORMAT_R32G32B32_UINT"),
_L("DXGI_FORMAT_R32G32B32_SINT"),
_L("DXGI_FORMAT_R16G16B16A16_TYPELESS"),
_L("DXGI_FORMAT_R16G16B16A16_FLOAT"),
_L("DXGI_FORMAT_R16G16B16A16_UNORM"),
_L("DXGI_FORMAT_R16G16B16A16_UINT"),
_L("DXGI_FORMAT_R16G16B16A16_SNORM"),
_L("DXGI_FORMAT_R16G16B16A16_SINT"),
_L("DXGI_FORMAT_R32G32_TYPELESS"),
_L("DXGI_FORMAT_R32G32_FLOAT"),
_L("DXGI_FORMAT_R32G32_UINT"),
_L("DXGI_FORMAT_R32G32_SINT"),
_L("DXGI_FORMAT_R32G8X24_TYPELESS"),
_L("DXGI_FORMAT_D32_FLOAT_S8X24_UINT"),
_L("DXGI_FORMAT_R32_FLOAT_X8X24_TYPELESS"),
_L("DXGI_FORMAT_X32_TYPELESS_G8X24_UINT"),
_L("DXGI_FORMAT_R10G10B10A2_TYPELESS"),
_L("DXGI_FORMAT_R10G10B10A2_UNORM"),
_L("DXGI_FORMAT_R10G10B10A2_UINT"),
_L("DXGI_FORMAT_R11G11B10_FLOAT"),
_L("DXGI_FORMAT_R8G8B8A8_TYPELESS"),
_L("DXGI_FORMAT_R8G8B8A8_UNORM"),
_L("DXGI_FORMAT_R8G8B8A8_UNORM_SRGB"),
_L("DXGI_FORMAT_R8G8B8A8_UINT"),
_L("DXGI_FORMAT_R8G8B8A8_SNORM"),
_L("DXGI_FORMAT_R8G8B8A8_SINT"),
_L("DXGI_FORMAT_R16G16_TYPELESS"),
_L("DXGI_FORMAT_R16G16_FLOAT"),
_L("DXGI_FORMAT_R16G16_UNORM"),
_L("DXGI_FORMAT_R16G16_UINT"),
_L("DXGI_FORMAT_R16G16_SNORM"),
_L("DXGI_FORMAT_R16G16_SINT"),
_L("DXGI_FORMAT_R32_TYPELESS"),
_L("DXGI_FORMAT_D32_FLOAT"),
_L("DXGI_FORMAT_R32_FLOAT"),
_L("DXGI_FORMAT_R32_UINT"),
_L("DXGI_FORMAT_R32_SINT"),
_L("DXGI_FORMAT_R24G8_TYPELESS"),
_L("DXGI_FORMAT_D24_UNORM_S8_UINT"),
_L("DXGI_FORMAT_R24_UNORM_X8_TYPELESS"),
_L("DXGI_FORMAT_X24_TYPELESS_G8_UINT"),
_L("DXGI_FORMAT_R8G8_TYPELESS"),
_L("DXGI_FORMAT_R8G8_UNORM"),
_L("DXGI_FORMAT_R8G8_UINT"),
_L("DXGI_FORMAT_R8G8_SNORM"),
_L("DXGI_FORMAT_R8G8_SINT"),
_L("DXGI_FORMAT_R16_TYPELESS"),
_L("DXGI_FORMAT_R16_FLOAT"),
_L("DXGI_FORMAT_D16_UNORM"),
_L("DXGI_FORMAT_R16_UNORM"),
_L("DXGI_FORMAT_R16_UINT"),
_L("DXGI_FORMAT_R16_SNORM"),
_L("DXGI_FORMAT_R16_SINT"),
_L("DXGI_FORMAT_R8_TYPELESS"),
_L("DXGI_FORMAT_R8_UNORM"),
_L("DXGI_FORMAT_R8_UINT"),
_L("DXGI_FORMAT_R8_SNORM"),
_L("DXGI_FORMAT_R8_SINT"),
_L("DXGI_FORMAT_A8_UNORM"),
_L("DXGI_FORMAT_R1_UNORM"),
_L("DXGI_FORMAT_R9G9B9E5_SHAREDEXP"),
_L("DXGI_FORMAT_R8G8_B8G8_UNORM"),
_L("DXGI_FORMAT_G8R8_G8B8_UNORM"),
_L("DXGI_FORMAT_BC1_TYPELESS"),
_L("DXGI_FORMAT_BC1_UNORM"),
_L("DXGI_FORMAT_BC1_UNORM_SRGB"),
_L("DXGI_FORMAT_BC2_TYPELESS"),
_L("DXGI_FORMAT_BC2_UNORM"),
_L("DXGI_FORMAT_BC2_UNORM_SRGB"),
_L("DXGI_FORMAT_BC3_TYPELESS"),
_L("DXGI_FORMAT_BC3_UNORM"),
_L("DXGI_FORMAT_BC3_UNORM_SRGB"),
_L("DXGI_FORMAT_BC4_TYPELESS"),
_L("DXGI_FORMAT_BC4_UNORM"),
_L("DXGI_FORMAT_BC4_SNORM"),
_L("DXGI_FORMAT_BC5_TYPELESS"),
_L("DXGI_FORMAT_BC5_UNORM"),
_L("DXGI_FORMAT_BC5_SNORM"),
_L("DXGI_FORMAT_B5G6R5_UNORM"),
_L("DXGI_FORMAT_B5G5R5A1_UNORM"),
_L("DXGI_FORMAT_B8G8R8A8_UNORM"),
_L("DXGI_FORMAT_B8G8R8X8_UNORM"),
_L("DXGI_FORMAT_R10G10B10_XR_BIAS_A2_UNORM"),
_L("DXGI_FORMAT_B8G8R8A8_TYPELESS"),
_L("DXGI_FORMAT_B8G8R8A8_UNORM_SRGB"),
_L("DXGI_FORMAT_B8G8R8X8_TYPELESS"),
_L("DXGI_FORMAT_B8G8R8X8_UNORM_SRGB"),
_L("DXGI_FORMAT_BC6H_TYPELESS"),
_L("DXGI_FORMAT_BC6H_UF16"),
_L("DXGI_FORMAT_BC6H_SF16"),
_L("DXGI_FORMAT_BC7_TYPELESS"),
_L("DXGI_FORMAT_BC7_UNORM"),
_L("DXGI_FORMAT_BC7_UNORM_SRGB")
};
const cString *gpDXGIFormatNames = gDXGIFormatNames;
//-----------------------------------------------------------------------------
CPUTTexture *CPUTTextureDX11::CreateTexture( const cString &name, const cString &absolutePathAndFilename, bool loadAsSRGB )
{
// TODO: Delegate to derived class. We don't currently have CPUTTextureDX11
ID3D11ShaderResourceView *pShaderResourceView = NULL;
ID3D11Resource *pTexture = NULL;
ID3D11Device *pD3dDevice= CPUT_DX11::GetDevice();
CPUTResult result = CreateNativeTexture( pD3dDevice, absolutePathAndFilename, &pShaderResourceView, &pTexture, loadAsSRGB );
ASSERT( CPUTSUCCESS(result), _L("Error loading texture: '")+absolutePathAndFilename );
CPUTTextureDX11 *pNewTexture = new CPUTTextureDX11();
pNewTexture->mName = name;
pNewTexture->SetTextureAndShaderResourceView( pTexture, pShaderResourceView );
pTexture->Release();
pShaderResourceView->Release();
CPUTAssetLibrary::GetAssetLibrary()->AddTexture( absolutePathAndFilename, pNewTexture);
return pNewTexture;
}
//-----------------------------------------------------------------------------
class MappedFile
{
HANDLE hfile;
HANDLE hmapping;
void *view;
public:
MappedFile() : hfile(INVALID_HANDLE_VALUE), hmapping(NULL), view(0) {}
~MappedFile()
{
Close();
}
void *Open(LPCWSTR filename)
{
Close();
if ((hfile = CreateFileW(filename, GENERIC_READ, FILE_SHARE_READ, 0, OPEN_EXISTING, 0, 0)) == INVALID_HANDLE_VALUE ||
(hmapping = CreateFileMapping(hfile, 0, PAGE_READONLY, 0, 0, NULL)) == NULL ||
(view = MapViewOfFile(hmapping, FILE_MAP_READ, 0, 0, 0)) == NULL)
Close();
return view;
}
void Close()
{
if (view != 0)
{
UnmapViewOfFile(view);
view = 0;
}
if (hmapping != NULL)
{
CloseHandle(hmapping);
hmapping = NULL;
}
if (hfile != INVALID_HANDLE_VALUE)
{
CloseHandle(hfile);
hfile = INVALID_HANDLE_VALUE;
}
}
};
CPUTResult CPUTTextureDX11::CreateNativeTexture(
ID3D11Device *pD3dDevice,
const cString &fileName,
ID3D11ShaderResourceView **ppShaderResourceView,
ID3D11Resource **ppTexture,
bool ForceLoadAsSRGB
){
HRESULT hr;
hr = DirectX::CreateDDSTextureFromFileEx(
pD3dDevice,
fileName.c_str(),
0,//maxsize
D3D11_USAGE_DEFAULT,
D3D11_BIND_SHADER_RESOURCE,
0,
0,
ForceLoadAsSRGB,
ppTexture,
ppShaderResourceView);
ASSERT( SUCCEEDED(hr), _L("Failed to load texture: ") + fileName );
CPUTSetDebugName( *ppTexture, fileName );
ASSERT( SUCCEEDED(hr), _L("Failed to create texture shader resource view.") );
CPUTSetDebugName( *ppShaderResourceView, fileName );
return CPUT_SUCCESS;
}
//-----------------------------------------------------------------------------
CPUTResult CPUTTextureDX11::GetSRGBEquivalent(DXGI_FORMAT inFormat, DXGI_FORMAT& sRGBFormat)
{
switch( inFormat )
{
case DXGI_FORMAT_R8G8B8A8_UNORM:
case DXGI_FORMAT_R8G8B8A8_UNORM_SRGB:
sRGBFormat = DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;
return CPUT_SUCCESS;
case DXGI_FORMAT_B8G8R8X8_UNORM:
case DXGI_FORMAT_B8G8R8X8_UNORM_SRGB:
sRGBFormat = DXGI_FORMAT_B8G8R8X8_UNORM_SRGB;
return CPUT_SUCCESS;
case DXGI_FORMAT_BC1_UNORM:
case DXGI_FORMAT_BC1_UNORM_SRGB:
sRGBFormat = DXGI_FORMAT_BC1_UNORM_SRGB;
return CPUT_SUCCESS;
case DXGI_FORMAT_BC2_UNORM:
case DXGI_FORMAT_BC2_UNORM_SRGB:
sRGBFormat = DXGI_FORMAT_BC2_UNORM_SRGB;
return CPUT_SUCCESS;
case DXGI_FORMAT_BC3_UNORM:
case DXGI_FORMAT_BC3_UNORM_SRGB:
sRGBFormat = DXGI_FORMAT_BC3_UNORM_SRGB;
return CPUT_SUCCESS;
case DXGI_FORMAT_BC7_UNORM:
case DXGI_FORMAT_BC7_UNORM_SRGB:
sRGBFormat = DXGI_FORMAT_BC7_UNORM_SRGB;
return CPUT_SUCCESS;
};
return CPUT_ERROR_UNSUPPORTED_SRGB_IMAGE_FORMAT;
}
// This function returns the DXGI string equivalent of the DXGI format for
// error reporting/display purposes
//-----------------------------------------------------------------------------
const cString &CPUTTextureDX11::GetDXGIFormatString(DXGI_FORMAT format)
{
ASSERT( (format>=0) && (format<=DXGI_FORMAT_BC7_UNORM_SRGB), _L("Invalid DXGI Format.") );
return gpDXGIFormatNames[format];
}
// Given a certain DXGI texture format, does it even have an equivalent sRGB one
//-----------------------------------------------------------------------------
bool CPUTTextureDX11::DoesExistEquivalentSRGBFormat(DXGI_FORMAT inFormat)
{
DXGI_FORMAT outFormat;
if( CPUT_ERROR_UNSUPPORTED_SRGB_IMAGE_FORMAT == GetSRGBEquivalent(inFormat, outFormat) )
{
return false;
}
return true;
}
//-----------------------------------------------------------------------------
D3D11_MAPPED_SUBRESOURCE CPUTTextureDX11::MapTexture( CPUTRenderParameters ¶ms, eCPUTMapType type, bool wait )
{
// Mapping for DISCARD requires dynamic buffer. Create dynamic copy?
// Could easily provide input flag. But, where would we specify? Don't like specifying in the .set file
// Because mapping is something the application wants to do - it isn't inherent in the data.
// Could do Clone() and pass dynamic flag to that.
// But, then we have two. Could always delete the other.
// Could support programatic flag - apply to all loaded models in the .set
// Could support programatic flag on model. Load model first, then load set.
// For now, simply support CopyResource mechanism.
HRESULT hr;
ID3D11Device *pD3dDevice = CPUT_DX11::GetDevice();
CPUTRenderParametersDX *pParamsDX11 = (CPUTRenderParametersDX*)¶ms;
ID3D11DeviceContext *pContext = pParamsDX11->mpContext;
if( !mpTextureStaging )
{
// Annoying. We need to create the texture differently, based on dimension.
D3D11_RESOURCE_DIMENSION dimension;
mpTexture->GetType(&dimension);
switch( dimension )
{
case D3D11_RESOURCE_DIMENSION_TEXTURE1D:
{
D3D11_TEXTURE1D_DESC desc;
((ID3D11Texture1D*)mpTexture)->GetDesc( &desc );
desc.Usage = D3D11_USAGE_STAGING;
switch( type )
{
case CPUT_MAP_READ:
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
desc.BindFlags = 0;
break;
case CPUT_MAP_READ_WRITE:
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
desc.BindFlags = 0;
break;
case CPUT_MAP_WRITE:
case CPUT_MAP_WRITE_DISCARD:
case CPUT_MAP_NO_OVERWRITE:
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.BindFlags = 0;
break;
};
hr = pD3dDevice->CreateTexture1D( &desc, NULL, (ID3D11Texture1D**)&mpTextureStaging );
ASSERT( SUCCEEDED(hr), _L("Failed to create staging texture") );
break;
}
case D3D11_RESOURCE_DIMENSION_TEXTURE2D:
{
D3D11_TEXTURE2D_DESC desc;
((ID3D11Texture2D*)mpTexture)->GetDesc( &desc );
desc.Usage = D3D11_USAGE_STAGING;
switch( type )
{
case CPUT_MAP_READ:
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
desc.BindFlags = 0;
break;
case CPUT_MAP_READ_WRITE:
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
desc.BindFlags = 0;
break;
case CPUT_MAP_WRITE:
case CPUT_MAP_WRITE_DISCARD:
case CPUT_MAP_NO_OVERWRITE:
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.BindFlags = 0;
break;
};
hr = pD3dDevice->CreateTexture2D( &desc, NULL, (ID3D11Texture2D**)&mpTextureStaging );
ASSERT( SUCCEEDED(hr), _L("Failed to create staging texture") );
break;
}
case D3D11_RESOURCE_DIMENSION_TEXTURE3D:
{
D3D11_TEXTURE3D_DESC desc;
((ID3D11Texture3D*)mpTexture)->GetDesc( &desc );
desc.Usage = D3D11_USAGE_STAGING;
switch( type )
{
case CPUT_MAP_READ:
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
desc.BindFlags = 0;
break;
case CPUT_MAP_READ_WRITE:
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
desc.BindFlags = 0;
break;
case CPUT_MAP_WRITE:
case CPUT_MAP_WRITE_DISCARD:
case CPUT_MAP_NO_OVERWRITE:
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.BindFlags = 0;
break;
};
hr = pD3dDevice->CreateTexture3D( &desc, NULL, (ID3D11Texture3D**)&mpTextureStaging );
ASSERT( SUCCEEDED(hr), _L("Failed to create staging texture") );
break;
}
default:
ASSERT(0, _L("Unkown texture dimension") );
break;
}
}
else
{
ASSERT( mMappedType == type, _L("Mapping with a different CPU access than creation parameter.") );
}
D3D11_MAPPED_SUBRESOURCE info;
switch( type )
{
case CPUT_MAP_READ:
case CPUT_MAP_READ_WRITE:
// TODO: Copying and immediately mapping probably introduces a stall.
// Expose the copy externally?
// TODO: copy only if changed?
// Copy only first time?
// Copy the GPU version before we read from it.
pContext->CopyResource( mpTextureStaging, mpTexture );
break;
};
hr = pContext->Map( mpTextureStaging, wait ? 0 : D3D11_MAP_FLAG_DO_NOT_WAIT, (D3D11_MAP)type, 0, &info );
mMappedType = type;
return info;
} // CPUTTextureDX11::Map()
//-----------------------------------------------------------------------------
void CPUTTextureDX11::UnmapTexture( CPUTRenderParameters ¶ms )
{
ASSERT( mMappedType != CPUT_MAP_UNDEFINED, _L("Can't unmap a render target that isn't mapped.") );
CPUTRenderParametersDX *pParamsDX11 = (CPUTRenderParametersDX*)¶ms;
ID3D11DeviceContext *pContext = pParamsDX11->mpContext;
pContext->Unmap( mpTextureStaging, 0 );
// If we were mapped for write, then copy staging buffer to GPU
switch( mMappedType )
{
case CPUT_MAP_READ:
break;
case CPUT_MAP_READ_WRITE:
case CPUT_MAP_WRITE:
case CPUT_MAP_WRITE_DISCARD:
case CPUT_MAP_NO_OVERWRITE:
pContext->CopyResource( mpTexture, mpTextureStaging );
break;
};
} // CPUTTextureDX11::Unmap()
|
mariusj/org.openntf.domino
|
domino/core/src/sample/java/org/openntf/conference/graph/examples/GraphExamples.java
|
package org.openntf.conference.graph.examples;
import org.openntf.conference.graph.Attendee;
import org.openntf.conference.graph.ConferenceGraph;
import org.openntf.domino.Session;
import org.openntf.domino.graph2.impl.DGraph;
import org.openntf.domino.utils.Factory;
import org.openntf.domino.utils.Factory.SessionType;
import com.tinkerpop.frames.FramedGraph;
public class GraphExamples {
private ConferenceGraph theConference_;
public GraphExamples() {
}
public ConferenceGraph getConference() {
if (theConference_ == null) {
theConference_ = new ConferenceGraph();
}
return theConference_;
}
public FramedGraph<DGraph> getGraph() {
return getConference().getFramedGraph();
}
public Attendee getMe() {
Session session = Factory.getSession(SessionType.CURRENT);
String myName = session.getEffectiveUserName();
return getConference().getAttendee(myName, true);
}
}
|
openstreetcam/android
|
app/src/main/java/com/telenav/osv/data/user/model/details/gamification/GamificationLevel.java
|
package com.telenav.osv.data.user.model.details.gamification;
/**
* Class representing information related to level achieved by the user using gamification feature.
* @author horatiuf
*/
public class GamificationLevel {
/**
* Current level number.
*/
private int level;
/**
* The next level which is targeted.
*/
private int target;
/**
* The progress of the current level.
*/
private int progress;
/**
* The name of the level.
*/
private String name;
/**
* Default constructor for the current class.
* @param level {@code int} representing {@link #level}.
* @param target {@code int} representing {@link #target}.
* @param progress {@code int} representing {@link #progress}.
* @param name {@code String} representing {@link #name}.
*/
public GamificationLevel(int level, int target, int progress, String name) {
this.level = level;
this.target = target;
this.progress = progress;
this.name = name;
}
/**
* @return {@code int} representing {@link #level}.
*/
public int getLevel() {
return level;
}
/**
* @return {@code int} representing {@link #target}.
*/
public int getTarget() {
return target;
}
/**
* @return {@code int} representing {@link #progress}.
*/
public int getProgress() {
return progress;
}
/**
* @return {@code String} representing {@link #name}.
*/
public String getName() {
return name;
}
}
|
krattai/AEBL
|
blades/seafile/server/processors/recvcommit-v2-proc.c
|
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#include "common.h"
#include <fcntl.h>
#include <ccnet.h>
#include "net.h"
#include "utils.h"
#include "seafile-session.h"
#include "recvcommit-v2-proc.h"
#include "processors/objecttx-common.h"
#include "seaf-utils.h"
enum {
INIT,
RECV_OBJECT
};
typedef struct {
guint32 writer_id;
gboolean registered;
} RecvcommitPriv;
#define GET_PRIV(o) \
(G_TYPE_INSTANCE_GET_PRIVATE ((o), SEAFILE_TYPE_RECVCOMMIT_V2_PROC, RecvcommitPriv))
#define USE_PRIV \
RecvcommitPriv *priv = GET_PRIV(processor);
static int recv_commit_start (CcnetProcessor *processor, int argc, char **argv);
static void handle_update (CcnetProcessor *processor,
char *code, char *code_msg,
char *content, int clen);
static void
write_done_cb (OSAsyncResult *res, void *cb_data);
G_DEFINE_TYPE (SeafileRecvcommitV2Proc, seafile_recvcommit_v2_proc, CCNET_TYPE_PROCESSOR)
static void
release_resource (CcnetProcessor *processor)
{
USE_PRIV;
if (priv->registered)
seaf_obj_store_unregister_async_write (seaf->commit_mgr->obj_store,
priv->writer_id);
CCNET_PROCESSOR_CLASS (seafile_recvcommit_v2_proc_parent_class)->release_resource (processor);
}
static void
seafile_recvcommit_v2_proc_class_init (SeafileRecvcommitV2ProcClass *klass)
{
CcnetProcessorClass *proc_class = CCNET_PROCESSOR_CLASS (klass);
proc_class->name = "recvcommit-v2-proc";
proc_class->start = recv_commit_start;
proc_class->handle_update = handle_update;
proc_class->release_resource = release_resource;
g_type_class_add_private (klass, sizeof (RecvcommitPriv));
}
static void
seafile_recvcommit_v2_proc_init (SeafileRecvcommitV2Proc *processor)
{
}
static int
recv_commit_start (CcnetProcessor *processor, int argc, char **argv)
{
USE_PRIV;
char *session_token;
if (argc != 2) {
ccnet_processor_send_response (processor, SC_BAD_ARGS, SS_BAD_ARGS, NULL, 0);
ccnet_processor_done (processor, FALSE);
return -1;
}
session_token = argv[1];
if (seaf_token_manager_verify_token (seaf->token_mgr,
processor->peer_id,
session_token, NULL) == 0) {
ccnet_processor_send_response (processor, SC_OK, SS_OK, NULL, 0);
processor->state = RECV_OBJECT;
priv->writer_id =
seaf_obj_store_register_async_write (seaf->commit_mgr->obj_store,
write_done_cb,
processor);
priv->registered = TRUE;
return 0;
} else {
ccnet_processor_send_response (processor,
SC_ACCESS_DENIED, SS_ACCESS_DENIED,
NULL, 0);
ccnet_processor_done (processor, FALSE);
return -1;
}
}
static void
write_done_cb (OSAsyncResult *res, void *cb_data)
{
CcnetProcessor *processor = cb_data;
if (!res->success) {
ccnet_processor_send_response (processor, SC_BAD_OBJECT, SS_BAD_OBJECT,
NULL, 0);
g_warning ("[recvcommit] Failed to write commit object.\n");
ccnet_processor_done (processor, FALSE);
}
/* FIXME: need to send ACK if success. */
}
static int
save_commit (CcnetProcessor *processor, ObjectPack *pack, int len)
{
USE_PRIV;
return seaf_obj_store_async_write (seaf->commit_mgr->obj_store,
priv->writer_id,
pack->id,
pack->object,
len - 41);
}
static void
receive_commit (CcnetProcessor *processor, char *content, int clen)
{
ObjectPack *pack = (ObjectPack *)content;
if (clen < sizeof(ObjectPack)) {
g_warning ("[recvcommit] invalid object id.\n");
goto bad;
}
g_debug ("[recvcommit] recv commit object %s\n", pack->id);
if (save_commit (processor, pack, clen) < 0) {
goto bad;
}
return;
bad:
ccnet_processor_send_response (processor, SC_BAD_OBJECT, SS_BAD_OBJECT,
NULL, 0);
g_warning ("[recvcommit] Failed to write commit object.\n");
ccnet_processor_done (processor, FALSE);
}
static void handle_update (CcnetProcessor *processor,
char *code, char *code_msg,
char *content, int clen)
{
switch (processor->state) {
case RECV_OBJECT:
if (strncmp(code, SC_OBJECT, 3) == 0) {
receive_commit (processor, content, clen);
} else if (strncmp(code, SC_END, 3) == 0) {
g_debug ("[recvcommit] Recv commit end.\n");
ccnet_processor_done (processor, TRUE);
} else {
g_warning ("[recvcommit] Bad update: %s %s\n", code, code_msg);
ccnet_processor_send_response (processor,
SC_BAD_UPDATE_CODE, SS_BAD_UPDATE_CODE,
NULL, 0);
ccnet_processor_done (processor, FALSE);
}
break;
default:
g_return_if_reached ();
}
}
|
jaredsburrows/Project-Euler
|
java/src/main/java/leetcode/Problem686RepeatedStringMatch.java
|
package leetcode;
/**
* https://leetcode.com/problems/repeated-string-match
*/
public final class Problem686RepeatedStringMatch {
// Time - O(N), Space - O(N)
public int repeatedStringMatch(String A, String B) {
if (A == null || B == null) {
return -1;
}
StringBuilder stringBuilder = new StringBuilder();
int count = 0;
while (stringBuilder.length() < B.length()) {
stringBuilder.append(A);
count++;
}
if (stringBuilder.toString().contains(B)) {
return count;
}
if (stringBuilder.append(A).toString().contains(B)) {
return count + 1;
}
return -1;
}
}
|
sidneys/yt-playlist-hero
|
app/scripts/main/components/application.js
|
'use strict';
/**
* Modules
* Node
* @constant
*/
const path = require('path');
/**
* Modules
* Electron
* @constant
*/
const electron = require('electron');
const { app } = electron;
/**
* Modules
* External
* @constant
*/
const appRootPath = require('app-root-path');
/**
* Modules
* Configuration
*/
require('events').EventEmitter.defaultMaxListeners = 0;
appRootPath.setPath(path.join(__dirname, '..', '..', '..', '..'));
/**
* Modules
* Internal
* @constant
*/
const logger = require(path.join(appRootPath.path, 'lib', 'logger'))({ write: true });
const appMenu = require(path.join(appRootPath.path, 'app', 'scripts', 'main', 'menus', 'app-menu')); // jshint ignore:line
const mainWindow = require(path.join(appRootPath.path, 'app', 'scripts', 'main', 'windows', 'main-window')); // jshint ignore:line
const configurationManager = require(path.join(appRootPath.path, 'app', 'scripts', 'main', 'managers', 'configuration-manager')); // jshint ignore:line
const trayMenu = require(path.join(appRootPath.path, 'app', 'scripts', 'main', 'menus', 'tray-menu')); // jshint ignore:line
const updaterService = require(path.join(appRootPath.path, 'app', 'scripts', 'main', 'services', 'updater-service')); // jshint ignore:line
const powerService = require(path.join(appRootPath.path, 'app', 'scripts', 'main', 'services', 'power-service')); // jshint ignore:line
const debugService = require(path.join(appRootPath.path, 'app', 'scripts', 'main', 'services', 'debug-service')); // jshint ignore:line
/**
* Disable GPU
*/
app.disableHardwareAcceleration();
/**
* @listens Electron.App#before-quit
*/
app.on('before-quit', () => {
logger.debug('app#before-quit');
app.isQuitting = true;
});
/**
* @listens Electron.App#ready
*/
app.once('ready', () => {
logger.debug('app#ready');
});
|
mcarcaso/foam2
|
src/foam/dao/FixedSizeDAO.js
|
<gh_stars>0
/**
* @license
* Copyright 2018 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
foam.CLASS({
package: 'foam.dao',
name: 'FixedSizeDAO',
extends: 'foam.dao.ProxyDAO',
documentation: `
Manages the size of a dao by purging on each put. Elements that match
the predicate are removed.
NOTE: this DAO must delegate to an MDAO so the remove operations only
affect memory. Install via EasyDAO to it is installed in the correct
place.`,
properties: [
{
class: 'FObjectProperty',
of: 'foam.mlang.order.Comparator',
name: 'comparator'
},
{
//class: 'foam.mlang.predicate.PredicateProperty',
class: 'FObjectProperty',
of: 'foam.mlang.predicate.Predicate',
name: 'predicate'
},
{
class: 'IntProperty',
name: 'size'
}
],
methods: [
{
name: 'put_',
javaCode: `
obj = getDelegate().put_(x, obj);
this.getDelegate()
.where(getPredicate())
.orderBy(getComparator())
.skip(getSize())
.removeAll();
return obj;
`
},
]
});
|
jk-ozlabs/op-test-framework
|
testcases/OpTestEM.py
|
#!/usr/bin/env python2
# IBM_PROLOG_BEGIN_TAG
# This is an automatically generated prolog.
#
# $Source: op-test-framework/testcases/OpTestEM.py $
#
# OpenPOWER Automated Test Project
#
# Contributors Listed Below - COPYRIGHT 2015
# [+] International Business Machines Corp.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# IBM_PROLOG_END_TAG
# @package OpTestEM
# Energy Management package for OpenPower testing.
#
# This class will test the functionality of following drivers
# 1. powernv cpuidle driver
# 2. powernv cpufreq driver
import time
import subprocess
import re
import random
import decimal
import unittest
import OpTestConfiguration
from common.OpTestUtil import OpTestUtil
from common.OpTestSystem import OpSystemState
from common.Exceptions import CommandFailed
from common.OpTestIPMI import IPMIConsoleState
import common.OpTestQemu as OpTestQemu
from testcases.DeviceTreeValidation import DeviceTreeValidation
import logging
import OpTestLogger
log = OpTestLogger.optest_logger_glob.get_logger(__name__)
class OpTestEM():
def setUp(self):
conf = OpTestConfiguration.conf
self.cv_HOST = conf.host()
self.cv_IPMI = conf.ipmi()
self.cv_SYSTEM = conf.system()
self.util = OpTestUtil()
self.ppc64cpu_freq_re = re.compile(r"([a-z]+):\s+([\d.]+)")
self.c = None # use this for tearDown
def set_up(self):
self.c = None # clear this, we may not get back from goto and tearDown relies on
if self.test == "skiroot":
self.cv_SYSTEM.goto_state(OpSystemState.PETITBOOT_SHELL)
self.c = self.cv_SYSTEM.console
elif self.test == "host":
self.cv_SYSTEM.goto_state(OpSystemState.OS)
self.c = self.cv_SYSTEM.cv_HOST.get_ssh_connection()
else:
raise Exception("Unknown test type")
return self.c
def tearDown(self):
if self.c == None: # unable to proceed
return
cpu_num = self.get_first_available_cpu()
# Check cpufreq driver enabled
cpufreq = False
try:
self.c.run_command("ls --color=never /sys/devices/system/cpu/cpu%s/cpufreq/" % cpu_num)
cpufreq = True
except CommandFailed:
pass
# return back to sane cpu governor
if cpufreq:
self.set_cpu_gov("powersave")
# Check cpuidle driver enabled
cpuidle = False
try:
self.c.run_command("ls --color=never /sys/devices/system/cpu/cpu%s/cpuidle/" % cpu_num)
cpuidle = True
except CommandFailed:
pass
if not cpuidle:
return
# and then re-enable all idle states
idle_states = self.get_idle_states()
for i in idle_states:
self.enable_idle_state(i)
def get_idle_states(self):
return self.c.run_command("find /sys/devices/system/cpu/cpu*/cpuidle/state* -type d | cut -d'/' -f8 | sort -u | sed -e 's/^state//'")
def get_first_available_cpu(self):
cmd = "cat /sys/devices/system/cpu/present | cut -d'-' -f1"
try:
res = self.c.run_command(cmd)
return res[0]
except Exception as e:
raise e
##
# @brief sets the cpu frequency with i_freq value
#
# @param i_freq @type str: this is the frequency of cpu to be set
#
# @return BMC_CONST.FW_SUCCESS or raise OpTestError
#
def set_cpu_freq(self, i_freq):
l_cmd = "for i in /sys/devices/system/cpu/cpu*/cpufreq/scaling_setspeed; do echo %s > $i; done" % i_freq
self.c.run_command(l_cmd)
##
# @brief verify the cpu frequency with i_freq value
#
# @param i_freq @type str: this is the frequency to be verified with cpu frequency
def verify_cpu_freq(self, i_freq, and_measure=True):
l_cmd = "cat /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq"
cur_freq = self.c.run_command(l_cmd)
if not cur_freq[0] == i_freq:
# (According to Vaidy) it may take milliseconds to have the
# request for a frequency change to come into effect.
# So, if we happen to be *really* quick checking the result,
# we may have checked before it has taken effect. So, we
# sleep for a (short) amount of time and retry.
time.sleep(0.2)
cur_freq = self.c.run_command(l_cmd)
self.assertEqual(cur_freq[0], i_freq,
"CPU frequency not changed to %s" % i_freq)
if not and_measure:
return
frequency_output = self.c.run_command("ppc64_cpu --frequency")
freq = {}
for f in frequency_output:
m = re.match(self.ppc64cpu_freq_re, f)
if m:
freq[m.group(1)] = int(decimal.Decimal(m.group(2)) * 1000000)
# Frequencies are in KHz
log.debug(repr(freq))
delta = int(i_freq) / (100)
log.debug("# Set %d, Measured %d, Allowed Delta %d" % (int(i_freq),freq["avg"],delta))
self.assertAlmostEqual(freq["min"], freq["max"], delta=(freq["avg"]/100),
msg="ppc64_cpu measured CPU Frequency differs between min/max when frequency set explicitly")
self.assertAlmostEqual(freq["avg"], freq["max"], delta=(freq["avg"]/100),
msg="ppc64_cpu measured CPU Frequency differs between avg/max when frequency set explicitly")
self.assertAlmostEqual(freq["avg"], int(i_freq), delta=delta,
msg="Set and measured CPU frequency differ too greatly")
# This function verifies CPU frequency against a single or list of frequency's provided
def verify_cpu_freq_almost(self, i_freq):
l_cmd = "cat /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq"
cur_freq = self.c.run_command(l_cmd)
if not type(i_freq) is list:
if not cur_freq[0] == i_freq:
time.sleep(0.2)
cur_freq = self.c.run_command(l_cmd)
if int(cur_freq[0]) == int(i_freq):
return
achieved = False
if not type(i_freq) is list:
freq_list = [i_freq]
else:
freq_list = i_freq
for freq in freq_list:
delta = int(freq) / (100)
try:
self.assertAlmostEqual(int(cur_freq[0]), int(freq), delta=delta,
msg="CPU frequency not changed to %s" % i_freq)
achieved = True
break
except AssertionError:
pass
self.assertTrue(achieved, "CPU failed to achieve any one of the frequency in %s" % freq_list)
##
# @brief sets the cpu governer with i_gov governer
#
# @param i_gov @type str: this is the governer to be set for all cpu's
def set_cpu_gov(self, i_gov):
l_cmd = "for i in /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; do echo %s > $i; done" % i_gov
self.c.run_command(l_cmd)
##
# @brief verify the cpu governer with i_gov governer
#
# @param i_gov @type str: this is the governer to be verified with cpu governer
def verify_cpu_gov(self, i_gov):
l_cmd = "cat /sys/devices/system/cpu/cpu0/cpufreq/scaling_governor"
cur_gov = self.c.run_command(l_cmd)
self.assertEqual(cur_gov[0], i_gov, "CPU governor not changed to %s" % i_gov)
##
# @brief enable cpu idle state i_idle
#
# @param i_idle @type str: this is the cpu idle state to be enabled
def enable_idle_state(self, i_idle):
sysfs_cmd = "for i in /sys/devices/system/cpu/cpu*/cpuidle/state%s/disable; do echo 0 > $i; done" % i_idle
if self.test == "host":
l_cmd = "cpupower idle-set -e %s" % i_idle
elif self.test == "skiroot":
l_cmd = "for i in /sys/devices/system/cpu/cpu*/cpuidle/state%s/disable; do echo 0 > $i; done" % i_idle
try:
self.c.run_command(l_cmd)
except CommandFailed:
self.c.run_command(sysfs_cmd)
##
# @brief disable cpu idle state i_idle
#
# @param i_idle @type str: this is the cpu idle state to be disabled
#
# @return BMC_CONST.FW_SUCCESS or raise OpTestError
#
def disable_idle_state(self, i_idle):
sysfs_cmd = "for i in /sys/devices/system/cpu/cpu*/cpuidle/state%s/disable; do echo 1 > $i; done" % i_idle
if self.test == "host":
l_cmd = "cpupower idle-set -d %s" % i_idle
elif self.test == "skiroot":
l_cmd = "for i in /sys/devices/system/cpu/cpu*/cpuidle/state%s/disable; do echo 1 > $i; done" % i_idle
try:
self.c.run_command(l_cmd)
except CommandFailed:
self.c.run_command(sysfs_cmd)
##
# @brief verify whether cpu idle state i_idle enabled
#
# @param i_idle @type str: this is the cpu idle state to be verified for enable
def verify_enable_idle_state(self, i_idle):
l_cmd = "cat /sys/devices/system/cpu/cpu0/cpuidle/state%s/disable" % i_idle
cur_value = self.c.run_command(l_cmd)
self.assertEqual(cur_value[0], "0", "CPU state%s not enabled" % i_idle)
##
# @brief verify whether cpu idle state i_idle disabled
#
# @param i_idle @type str: this is the cpu idle state to be verified for disable
def verify_disable_idle_state(self, i_idle):
l_cmd = "cat /sys/devices/system/cpu/cpu0/cpuidle/state%s/disable" % i_idle
cur_value = self.c.run_command(l_cmd)
self.assertEqual(cur_value[0], "1", "CPU state%s not disabled" % i_idle)
def get_pstate_limits(self):
cpu_num = self.get_first_available_cpu()
# Check cpufreq driver enabled
self.c.run_command("ls --color=never /sys/devices/system/cpu/cpu%s/cpufreq/" % cpu_num)
pstate_min = self.c.run_command("cat /sys/devices/system/cpu/cpu%s/cpufreq/cpuinfo_min_freq" % cpu_num)[0]
pstate_max = self.c.run_command("cat /sys/devices/system/cpu/cpu%s/cpufreq/cpuinfo_max_freq" % cpu_num)[0]
pstate_nom = self.c.run_command("cat /sys/devices/system/cpu/cpu%s/cpufreq/cpuinfo_nominal_freq" % cpu_num)[0]
return pstate_min, pstate_max, pstate_nom
class slw_info(OpTestEM, unittest.TestCase):
def setUp(self):
self.test = "host"
super(slw_info, self).setUp()
# @brief This function just gathers the host CPU SLW info
def runTest(self):
self.c = self.set_up()
self.c.run_command("uname -a")
self.c.run_command("cat /etc/os-release")
proc_gen = self.cv_HOST.host_get_proc_gen()
if proc_gen in ["POWER8", "POWER8E"]:
self.c.run_command("hexdump -c /proc/device-tree/ibm,enabled-idle-states")
try:
if proc_gen in ["POWER8", "POWER8E"]:
self.c.run_command("cat /sys/firmware/opal/msglog | grep -i slw")
elif proc_gen in ["POWER9"]:
self.c.run_command("cat /sys/firmware/opal/msglog | grep -i stop")
except CommandFailed as cf:
pass # we may have no slw entries in msglog
class cpu_freq_states_host(OpTestEM, unittest.TestCase):
def setUp(self):
self.test = "host"
super(cpu_freq_states_host, self).setUp()
NR_FREQUENCIES_SET = 100
NR_FREQUENCIES_VERIFIED = 10
# @brief This function will cover following test steps
# 2. Check the cpupower utility is available in host.
# 3. Get available cpu scaling frequencies
# 4. Set the userspace governer for all cpu's
# 5. test the cpufreq driver by set/verify cpu frequency
def runTest(self):
self.c = self.set_up()
if isinstance(self.c, OpTestQemu.QemuConsole):
raise self.skipTest("OpTestSystem running QEMU frequency checks not applicable")
self.c.run_command("uname -a")
self.c.run_command("cat /etc/os-release")
cpu_num = self.get_first_available_cpu()
# Check cpufreq driver enabled
self.c.run_command("ls --color=never /sys/devices/system/cpu/cpu%s/cpufreq/" % cpu_num)
# Get available cpu scaling frequencies
l_res = self.c.run_command("cat /sys/devices/system/cpu/cpu%s/cpufreq/scaling_available_frequencies" % cpu_num)
log.debug(l_res)
freq_list = l_res[0].split(' ')[:-1] # remove empty entry at end
log.debug(freq_list)
# Set the cpu governer to userspace
self.set_cpu_gov("userspace")
self.verify_cpu_gov("userspace")
for i_freq in freq_list:
self.set_cpu_freq(i_freq)
self.verify_cpu_freq(i_freq, False)
for i in range(1, self.NR_FREQUENCIES_VERIFIED):
i_freq = random.choice(freq_list)
self.set_cpu_freq(i_freq)
self.verify_cpu_freq(i_freq, True)
pass
class cpu_freq_states_skiroot(cpu_freq_states_host):
def setUp(self):
self.test = "skiroot"
super(cpu_freq_states_host, self).setUp()
class cpu_freq_gov_host(OpTestEM, DeviceTreeValidation, unittest.TestCase):
def setUp(self):
self.test = "host"
super(cpu_freq_gov_host, self).setUp()
def runTest(self):
self.c = self.set_up()
if isinstance(self.c, OpTestQemu.QemuConsole):
raise self.skipTest("OpTestSystem running QEMU frequency governor checks not applicable")
self.c.run_command("uname -a")
self.c.run_command("cat /etc/os-release")
pstate_min, pstate_max, pstate_nom = self.get_pstate_limits()
log.debug("Pstate min:{} max:{} nom:{}".format(pstate_min,pstate_max, pstate_nom))
turbo = self.dt_prop_read_u32_arr("/ibm,opal/power-mgt/ibm,pstate-turbo")[0]
ultra_turbo = self.dt_prop_read_u32_arr("/ibm,opal/power-mgt/ibm,pstate-ultra-turbo")[0]
cpu_num = self.get_first_available_cpu()
if turbo == ultra_turbo:
log.debug("No WoF frequencies")
freq_list = [pstate_max]
else:
# Add boost frequencies
l_res = self.c.run_command("cat /sys/devices/system/cpu/cpu%s/cpufreq/scaling_boost_frequencies" % cpu_num)
freq_list = l_res[0].split(' ')[:-1] # remove empty entry at end
# Add turbo frequency
l_res = self.c.run_command("cat /sys/devices/system/cpu/cpu%s/cpufreq/scaling_available_frequencies" % cpu_num)
fre_list = l_res[0].split(' ')[:-1]
freq_list.append(max(fre_list))
# performance(Pstate_max),
# ondemand(Workload based),
# userspace(User request),
# powersave(Pstate_min)
self.set_cpu_gov("performance")
self.verify_cpu_gov("performance")
self.verify_cpu_freq_almost(freq_list)
log.debug("CPU successfully achieved one of the boost or turbo freuency when performance governor set")
self.set_cpu_gov("powersave")
self.verify_cpu_gov("powersave")
self.verify_cpu_freq_almost(pstate_min)
self.set_cpu_gov("performance")
class cpu_freq_gov_skiroot(cpu_freq_gov_host):
def setUp(self):
self.test = "skiroot"
super(cpu_freq_gov_host, self).setUp()
class cpu_boost_freqs_host(OpTestEM, DeviceTreeValidation, unittest.TestCase):
def setUp(self):
self.test = "host"
super(cpu_boost_freqs_host, self).setUp()
def runTest(self):
self.c = self.set_up()
self.c.run_command("uname -a")
self.c.run_command("cat /etc/os-release")
pstate_min, pstate_max, pstate_nom = self.get_pstate_limits()
cpu_num = self.get_first_available_cpu()
# Check cpufreq driver enabled
self.c.run_command("ls --color=never /sys/devices/system/cpu/cpu%s/cpufreq/" % cpu_num)
turbo = self.dt_prop_read_u32_arr("/ibm,opal/power-mgt/ibm,pstate-turbo")[0]
ultra_turbo = self.dt_prop_read_u32_arr("/ibm,opal/power-mgt/ibm,pstate-ultra-turbo")[0]
if turbo == ultra_turbo:
self.skipTest("No WoF frequencies available to test")
# Get available cpu boost frequencies
try:
l_res = self.c.run_command("cat /sys/devices/system/cpu/cpu%s/cpufreq/scaling_boost_frequencies" % cpu_num)
except CommandFailed:
self.assertTrue(False, "No scaling_boost_frequencies file got created")
freq_list = l_res[0].split(' ')[:-1] # remove empty entry at end
log.debug("Boost frequencies: {}".format(freq_list))
# Boost frequencies will achieve only when cpufreq governor is performance
self.set_cpu_gov("performance")
self.verify_cpu_gov("performance")
achieved_freq = ""
# Run the workload only on one active core so it should achieve one of boost frequencies
res = self.c.run_command_ignore_fail("perf stat timeout 10 yes > /dev/null")
for line in res:
if "cycles" in line and "GHz" in line:
achieved_freq = int(decimal.Decimal(line.split()[3]) * 1000000)
break
if not achieved_freq:
self.assertTrue(False, "Failed to get CPU achieved frequency")
achieved = False
for freq in freq_list:
delta = int(freq) / (100)
try:
self.assertAlmostEqual(int(freq), achieved_freq, delta=delta,
msg="Set and measured CPU frequency differ too greatly")
achieved = True
break
except AssertionError:
pass
self.assertTrue(achieved, "CPU failed to achieve any one of the frequency in boost frequenies(WoF) range")
log.debug("CPU successfully achieved one of the boost freuency")
log.debug("Achieved freq: %d, near by WoF freq: %d" % (int(achieved_freq), int(freq)))
class cpu_idle_states_host(OpTestEM, unittest.TestCase):
def setUp(self):
self.test = "host"
super(cpu_idle_states_host, self).setUp()
##
# @brief This function will cover following test steps
# 1. It will get the OS and kernel versions.
# 2. Check the cpupower utility is available in host.
# 3. Set the userspace governer for all cpu's
# 4. test the cpuidle driver by enable/disable/verify the idle states
def runTest(self):
self.c = self.set_up()
if isinstance(self.c, OpTestQemu.QemuConsole):
raise self.skipTest("OpTestSystem running QEMU cpu idle state checks not applicable")
try:
self.c.run_command("taskset")
except CommandFailed as cf:
if 'not found' in ''.join(cf.output):
self.skipTest("Taskset command not found")
self.c.run_command("uname -a")
self.c.run_command("cat /etc/os-release")
cpu_num = self.get_first_available_cpu()
# Check cpuidle driver enabled
try:
self.c.run_command("ls --color=never /sys/devices/system/cpu/cpu%s/cpuidle/" % cpu_num)
except CommandFailed:
self.assertTrue(False, "cpuidle driver is not enabled in kernel")
nrcpus = self.c.run_command("grep -c 'processor.*: ' /proc/cpuinfo")
nrcpus = int(nrcpus[0])
self.assertGreater(nrcpus, 0, "You can't have 0 CPUs")
# Copy /dev/urandom into temptext.txt
self.c.run_command("dd if=/dev/urandom bs=1024 count=30000 2> /dev/null 1> temptext.txt")
# Setting workloads. Skiroot does not have the -c option for taskset.
# This means it must use the CPU affinity which will limit at cpu63 as
# printf '%x' $(( 1 << 64 )) becomes 0
workload = """for cpu in {0..%d};do taskset -c $cpu sha1sum temptext.txt & done; wait""" % (nrcpus - 1)
if self.test == "skiroot":
if nrcpus > 60:
nrcpus = 60
workload = """for cpu in `seq 0 1 %d`;do taskset 0x`printf '%%x' $(( 1 << $cpu ))` sha1sum temptext.txt & done; wait""" % (nrcpus - 1)
# TODO: Check the expected idle states (/proc/device-tree/ibm,opal/power-mgt)
# in runtime idle states (idle_state_names)
idle_states = self.get_idle_states()
log.debug("Discovered idle states: {}".format(repr(idle_states)))
names = self.c.run_command("cat /sys/devices/system/cpu/cpu0/cpuidle/state*/name")
names = [[a] for a in names]
idle_state_names = {}
for i in range(len(idle_states)):
idle_state_names[idle_states[i]] = names[i]
# We first disable everything
for i in idle_states:
self.disable_idle_state(i)
self.verify_disable_idle_state(i)
# With all idle disabled, gather current usage and total time spent in idle
# state (as a baseline)
before_usage = {}
before_time = {}
for i in idle_states:
before_usage[i] = self.c.run_command("cat /sys/devices/system/cpu/cpu*/cpuidle/state%s/usage" % (i))
before_usage[i] = [int(a) for a in before_usage[i]]
before_time[i] = self.c.run_command("cat /sys/devices/system/cpu/cpu*/cpuidle/state%s/time" % (i))
before_time[i] = [int(a) for a in before_time[i]]
# Enable one idle state, check residency, disable and repeat.
after_usage = {}
after_time = {}
for i in idle_states:
success = 0
total = 0
self.enable_idle_state(i)
self.verify_enable_idle_state(i)
self.c.run_command(workload)
after_usage[i] = self.c.run_command("cat /sys/devices/system/cpu/cpu*/cpuidle/state%s/usage" % i)
after_usage[i] = [int(a) for a in after_usage[i]]
after_time[i] = self.c.run_command("cat /sys/devices/system/cpu/cpu*/cpuidle/state%s/time" % i)
after_time[i] = [int(a) for a in after_time[i]]
for c in range(nrcpus):
log.debug("# CPU %d entered idle state %s %u times" % (c, idle_state_names[i], after_usage[i][c] - before_usage[i][c]))
log.debug("# CPU %d entered idle state %s for %u microseconds" % (c, idle_state_names[i], after_time[i][c] - before_time[i][c]))
if after_usage[i][c] > before_usage[i][c]:
success += 0.5
if after_time[i][c] > before_time[i][c]:
success += 0.5
total += 1
log.debug("CPUs entered idle state %s for %d/%d of the times" % (idle_state_names[i], success, total))
self.assertGreater(success/total, 0.95, "CPUs entered idle state %s for %d/%d of the times" % (idle_state_names[i], success, total))
self.disable_idle_state(i)
# Remove added temptext.txt file. Idle states are re-enabled during tearDown
self.c.run_command("rm temptext.txt")
pass
class cpu_idle_states_skiroot(cpu_idle_states_host):
def setUp(self):
self.test = "skiroot"
super(cpu_idle_states_host, self).setUp()
def host_suite():
s = unittest.TestSuite()
s.addTest(slw_info())
s.addTest(cpu_freq_states_host())
s.addTest(cpu_freq_gov_host())
s.addTest(cpu_boost_freqs_host())
s.addTest(cpu_idle_states_host())
return s
def skiroot_suite():
s = unittest.TestSuite()
s.addTest(cpu_freq_states_skiroot())
s.addTest(cpu_freq_gov_skiroot())
s.addTest(cpu_idle_states_skiroot())
return s
|
tqrg-bot/onos
|
apps/openstacktelemetry/app/src/main/java/org/onosproject/openstacktelemetry/codec/TinaStatsInfoByteBufferCodec.java
|
<filename>apps/openstacktelemetry/app/src/main/java/org/onosproject/openstacktelemetry/codec/TinaStatsInfoByteBufferCodec.java
/*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.openstacktelemetry.codec;
import org.onosproject.openstacktelemetry.api.ByteBufferCodec;
import org.onosproject.openstacktelemetry.api.StatsInfo;
import org.onosproject.openstacktelemetry.impl.DefaultStatsInfo;
import java.nio.ByteBuffer;
/**
* StatsInfo ByteBuffer Codec.
*/
public class TinaStatsInfoByteBufferCodec extends ByteBufferCodec<StatsInfo> {
private static final int MESSAGE_SIZE = 48;
@Override
public ByteBuffer encode(StatsInfo statsInfo) {
ByteBuffer byteBuffer = ByteBuffer.allocate(MESSAGE_SIZE);
byteBuffer.putLong(statsInfo.startupTime())
.putLong(statsInfo.fstPktArrTime())
.putInt(statsInfo.lstPktOffset())
.putLong(statsInfo.prevAccBytes())
.putInt(statsInfo.prevAccPkts())
.putLong(statsInfo.currAccBytes())
.putInt(statsInfo.currAccPkts())
.putShort(statsInfo.errorPkts())
.putShort(statsInfo.dropPkts());
return byteBuffer;
}
@Override
public StatsInfo decode(ByteBuffer byteBuffer) {
long startupTime = byteBuffer.getLong();
long fstPktArrTime = byteBuffer.getLong();
int lstPktOffset = byteBuffer.getInt();
long prevAccBytes = byteBuffer.getLong();
int prevAccPkts = byteBuffer.getInt();
long currAccBytes = byteBuffer.getLong();
int currAccPkts = byteBuffer.getInt();
short errorPkts = byteBuffer.getShort();
short dropPkts = byteBuffer.getShort();
return new DefaultStatsInfo.DefaultBuilder()
.withStartupTime(startupTime)
.withFstPktArrTime(fstPktArrTime)
.withLstPktOffset(lstPktOffset)
.withPrevAccBytes(prevAccBytes)
.withPrevAccPkts(prevAccPkts)
.withCurrAccBytes(currAccBytes)
.withCurrAccPkts(currAccPkts)
.withErrorPkts(errorPkts)
.withDropPkts(dropPkts)
.build();
}
}
|
jhunt/ssg
|
pkg/ssg/provider/compressor.go
|
package provider
import (
"fmt"
"compress/zlib"
"github.com/jhunt/ssg/pkg/meter"
)
func Compress(ul Uploader, alg string) (Uploader, error) {
switch alg {
case "none", "":
return ul, nil
case "zlib":
return &ZlibUploader{
w: zlib.NewWriter(ul),
inner: ul,
}, nil
default:
return nil, fmt.Errorf("unsupported compression algorithem: '%s'", alg)
}
}
func Decompress(dl Downloader, alg string) (Downloader, error) {
switch alg {
case "none", "":
return dl, nil
case "zlib":
zr, err := zlib.NewReader(dl)
if err != nil {
return nil, err
}
return &ZlibDownloader{
r: meter.NewReader(zr),
inner: dl,
}, nil
default:
return nil, fmt.Errorf("unsupported compression algorithem: '%s'", alg)
}
}
|
kiranmai-sfdev/aura
|
aura-components/src/test/components/valueChange/simpleValueChange/simpleValueChangeTest.js
|
<gh_stars>100-1000
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
({
testString : {
test : function(cmp) {
var ballot = cmp.find("ballot");
var idxDiv = cmp.find("index").getElement();
var valDiv = cmp.find("value").getElement();
ballot.set("m.candidates.string", 0);
cmp.set("m.string", "hi");
// $A.test.assertEquals("undefined", $A.test.getText(idxDiv));
$A.test.assertEquals("hi", $A.test.getText(valDiv));
$A.test.assertEquals(1, ballot.get("m.candidates.string"));
}
},
testMap : {
test : function(cmp) {
var ballot = cmp.find("ballot");
var idxDiv = cmp.find("index").getElement();
var valDiv = cmp.find("value").getElement();
ballot.set("m.candidates.map", 0);
var map = cmp.get("m.map");
map["hi"] = "there";
cmp.set("m.map", map);
$A.test.assertEquals(1, ballot.get("m.candidates.map"));
// JBUCH: HALO: TODO: NEED TO DECIDE WHAT TO DO ABOUT THIS
// $A.test.assertEquals("hi", $A.test.getText(idxDiv));
// $A.test.assertEquals("there", $A.test.getText(valDiv));
cmp.set("m.map.hi", "yo");
$A.test.assertEquals(2, ballot.get("m.candidates.map"));
$A.test.assertEquals("hi", $A.test.getText(idxDiv));
$A.test.assertEquals("yo", $A.test.getText(valDiv));
map = cmp.get("m.map");
map["hi"] = undefined;
cmp.set("m.map", map);
$A.test.assertEquals(3, ballot.get("m.candidates.map"));
// JBUCH: HALO: TODO: NEED TO DECIDE WHAT TO DO ABOUT THIS
// $A.test.assertEquals("hi", $A.test.getText(idxDiv));
// $A.test.assertEquals("undefined", $A.test.getText(valDiv));
cmp.set("m.string", "hi");
$A.test.assertEquals(3, ballot.get("m.candidates.map"));
$A.test.assertEquals("undefined", $A.test.getText(idxDiv));
$A.test.assertEquals("hi", $A.test.getText(valDiv));
}
},
testList : {
test : function(cmp) {
var ballot = cmp.find("ballot");
var idxDiv = cmp.find("index").getElement();
var valDiv = cmp.find("value").getElement();
ballot.set("m.candidates.string", 0);
var list = cmp.get("m.list");
list.push("hey");
cmp.set("m.list", list);
$A.test.assertEquals(1, ballot.get("m.candidates.list"));
$A.test.assertEquals("undefined", $A.test.getText(idxDiv));
$A.test.assertEquals("hey", $A.test.getText(valDiv));
cmp.set("m.list.0", "yo");
$A.test.assertEquals("0", $A.test.getText(idxDiv));
$A.test.assertEquals("yo", $A.test.getText(valDiv));
$A.test.assertEquals(2, ballot.get("m.candidates.list"));
cmp.set("m.string", "hi");
$A.test.assertEquals(2, ballot.get("m.candidates.list"));
cmp.set("m.list", [ "yoeeee" ]);
$A.test.assertEquals(3, ballot.get("m.candidates.list"));
list = cmp.get("m.list");
list.push("hey");
cmp.set("m.list", list);
$A.test.assertEquals(4, ballot.get("m.candidates.list"));
//KRIS: HALO:
// Why Push an empty object and not validate?
list = cmp.get("m.list");
list.push({});
cmp.set("m.list", list);
}
},
testSingleChain : {
test : function(cmp) {
cmp.set("m.chained", "start");
var idxDiv = cmp.find("index").getElement();
var valDiv = cmp.find("value").getElement();
$A.test.assertEquals("undefined", $A.test.getText(idxDiv));
$A.test.assertEquals("finished", $A.test.getText(valDiv));
}
},
//
// FIXME: W-1296937 this should cause an error that we can check. This is a
// simple
// infinite recursion. If you uncomment this, it gives a different result
// depending on the browser
//
_testRecurseSimple : {
test : function(cmp) {
cmp.set("m.recurseA", "start");
}
},
//
// FIXME: W-1296937 this should cause an error that we can check. This is a
// ping-pong
// infinite recursion. If you uncomment this, it gives a different result
// depending on the browser
//
_testRecursePingPong : {
test : function(cmp) {
cmp.set("m.recurseB", "start");
}
}
})
|
wanderwaltz/Frostbit
|
FrostbitTests/FRBTestAttributedStringInlineStylesApplication.h
|
//
// FRBTestAttributedStringInlineStylesApplication.h
// Frostbit
//
// Created by <NAME> on 22/04/13.
// Copyright (c) 2013 <NAME>. All rights reserved.
//
#import "FRBAttributedStringTest.h"
#pragma mark -
#pragma mark FRBTestAttributedStringInlineStylesApplication interface
@interface FRBTestAttributedStringInlineStylesApplication : FRBAttributedStringTest
@end
|
c-base/diaspora
|
spec/javascripts/app/views/comment_stream_view_spec.js
|
describe("app.views.CommentStream", function(){
beforeEach(function(){
this.view = new app.views.CommentStream({model : factory.post()})
})
describe("postRenderTemplate", function(){
it("applies infield labels", function(){
spyOn($.fn, "inFieldLabels")
this.view.postRenderTemplate()
expect($.fn.inFieldLabels).toHaveBeenCalled()
expect($.fn.inFieldLabels.mostRecentCall.object.selector).toBe("label")
})
it("autoResizes the new comment textarea", function(){
spyOn($.fn, "autoResize")
this.view.postRenderTemplate()
expect($.fn.autoResize).toHaveBeenCalled()
expect($.fn.autoResize.mostRecentCall.object.selector).toBe("textarea")
})
})
describe("createComment", function(){
it("clears the new comment textarea", function(){
$(this.view.el).html($("<textarea/>", {"class" : 'comment_box'}).val("hey"))
this.view.createComment()
expect(this.view.$(".comment_box").val()).toBe("")
})
})
})
|
leandromoreira/edash-packager
|
packager/media/base/audio_stream_info.cc
|
<reponame>leandromoreira/edash-packager
// Copyright 2014 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "packager/media/base/audio_stream_info.h"
#include "packager/base/logging.h"
#include "packager/base/strings/string_number_conversions.h"
#include "packager/base/strings/stringprintf.h"
#include "packager/media/base/limits.h"
namespace edash_packager {
namespace media {
namespace {
std::string AudioCodecToString(AudioCodec audio_codec) {
switch (audio_codec) {
case kCodecAAC:
return "AAC";
case kCodecMP3:
return "MP3";
case kCodecPCM:
return "PCM";
case kCodecVorbis:
return "Vorbis";
case kCodecFLAC:
return "FLAC";
case kCodecAMR_NB:
return "AMR_NB";
case kCodecAMR_WB:
return "AMR_WB";
case kCodecPCM_MULAW:
return "PCM_MULAW";
case kCodecGSM_MS:
return "GSM_MS";
case kCodecPCM_S16BE:
return "PCM_S16BE";
case kCodecPCM_S24BE:
return "PCM_S24BE";
case kCodecOpus:
return "Opus";
case kCodecEAC3:
return "EAC3";
default:
NOTIMPLEMENTED() << "Unknown Audio Codec: " << audio_codec;
return "UnknownAudioCodec";
}
}
} // namespace
AudioStreamInfo::AudioStreamInfo(int track_id,
uint32_t time_scale,
uint64_t duration,
AudioCodec codec,
const std::string& codec_string,
const std::string& language,
uint8_t sample_bits,
uint8_t num_channels,
uint32_t sampling_frequency,
const uint8_t* extra_data,
size_t extra_data_size,
bool is_encrypted)
: StreamInfo(kStreamAudio,
track_id,
time_scale,
duration,
codec_string,
language,
extra_data,
extra_data_size,
is_encrypted),
codec_(codec),
sample_bits_(sample_bits),
num_channels_(num_channels),
sampling_frequency_(sampling_frequency) {
}
AudioStreamInfo::~AudioStreamInfo() {}
bool AudioStreamInfo::IsValidConfig() const {
return codec_ != kUnknownAudioCodec && num_channels_ != 0 &&
num_channels_ <= limits::kMaxChannels && sample_bits_ > 0 &&
sample_bits_ <= limits::kMaxBitsPerSample &&
sampling_frequency_ > 0 &&
sampling_frequency_ <= limits::kMaxSampleRate;
}
std::string AudioStreamInfo::ToString() const {
return base::StringPrintf(
"%s codec: %s\n sample_bits: %d\n num_channels: %d\n "
"sampling_frequency: %d\n",
StreamInfo::ToString().c_str(),
AudioCodecToString(codec_).c_str(),
sample_bits_,
num_channels_,
sampling_frequency_);
}
std::string AudioStreamInfo::GetCodecString(AudioCodec codec,
uint8_t audio_object_type) {
switch (codec) {
case kCodecVorbis:
return "vorbis";
case kCodecOpus:
return "opus";
case kCodecAAC:
return "mp4a.40." + base::UintToString(audio_object_type);
default:
NOTIMPLEMENTED() << "Codec: " << codec;
return "unknown";
}
}
} // namespace media
} // namespace edash_packager
|
ellios/hedwig
|
hedwig-http/src/main/java/me/ellios/hedwig/http/server/HttpServerFactory.java
|
package me.ellios.hedwig.http.server;
import me.ellios.hedwig.rpc.core.ServiceSchema;
import me.ellios.hedwig.rpc.core.ServiceType;
import me.ellios.hedwig.rpc.server.RpcServer;
import me.ellios.hedwig.rpc.server.RpcServerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* We use this factory to create rpc server.
* And the server's schema is {@link ServiceSchema#HTTP}. We do NOT respect the type {@link ServiceType}, because the
* way we implement the HTTP schema can support them both at the same time.
*
* @author <NAME>
* @since 4/26/13 4:53 PM
*/
public class HttpServerFactory implements RpcServerFactory {
private static final Logger LOG = LoggerFactory.getLogger(HttpServerFactory.class);
@Override
public boolean accept(ServiceSchema schema, ServiceType type) {
LOG.info("We can safely ignore the type {}, because we can support them all at the same time.", type);
return schema == ServiceSchema.HTTP;
}
@Override
public RpcServer create() {
return new HttpServer();
}
}
|
majormoses/superhosting
|
lib/superhosting/helper/i18n.rb
|
<filename>lib/superhosting/helper/i18n.rb
module Superhosting
module Helper
module I18n
def i18n_initialize
::I18n.load_path << "#{::File.dirname(::File.dirname(__FILE__))}/config/net_status.yml"
::I18n.reload!
::I18n.locale = :en
end
end
end
end
Superhosting::Helper::I18n.send(:extend, Superhosting::Helper::I18n)
|
nguyenthaiphuong/calling
|
gulp/tasks/stylelint.js
|
'use strict';
import gulp from 'gulp';
import stylelint from 'gulp-stylelint';
import { APP_SCSS } from '../const';
gulp.task('stylelint', () => {
return gulp.src([APP_SCSS, '!src/styles/theme/**/*'])
.pipe(stylelint({
reporters: [{ formatter: 'string', console: true }]
}));
});
|
nhnent/EAT
|
examples/exampleCustomAPI/src/main/java/com/nhnent/eat/sampleCustomAPI/customAPI.java
|
<reponame>nhnent/EAT
package com.nhnent.eat.sampleCustomAPI;
import co.paralleluniverse.fibers.SuspendExecution;
import com.nhnent.eat.customScenario.BaseCustomAPI;
import com.nhnent.eat.entity.ScenarioExecutionResult;
import com.nhnent.eat.entity.ScenarioUnit;
import com.nhnent.eat.entity.ScenarioUnitType;
import javafx.util.Pair;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class customAPI extends BaseCustomAPI {
private final Logger logger = LoggerFactory.getLogger(getClass());
@Override
public Boolean executeExtraFunction(ScenarioExecutionResult scenarioResult, ScenarioUnit scenario)
throws SuspendExecution, InterruptedException {
this.scenarioResult = scenarioResult;
if (scenario.extraFunctionName.equals("getUserNickname")) {
String userNickname = getUserNickname();
logger.info("User Nickname is {}", userNickname);
logger.info("Set runtimeVar (Key:{}, Value:{})", scenario.returnVariableName, userNickname);
runtimeVar.put(scenario.returnVariableName, userNickname);
}
if (scenario.extraFunctionName.equals("illegalCustomApi")) {
return Boolean.FALSE;
}
if(scenario.extraFunctionName.equals("RequestGameDecision")) {
RequestGameDecision();
}
return Boolean.TRUE;
}
public String getUserNickname() throws SuspendExecution {
logger.info("Custom function called: <getUserNickname()>");
try {
Pair<String, byte[]> recvPck;
String userNickname;
while (true) {
recvPck = recvBodyPacket();
if (recvPck.getKey().equals("ResponseLogin")) {
String realJson = this.decodePacket(recvPck.getKey(), recvPck.getValue());
userNickname = Util.extractUserNickName(realJson);
break;
}
}
logger.info("<Packet - ResponseLogin> UserNickname : {}", userNickname);
return userNickname;
} catch (Exception e) {
logger.error(ExceptionUtils.getStackTrace(e));
}
return null;
}
public void RequestGameDecision() throws SuspendExecution {
ScenarioUnit scenarioUnit = new ScenarioUnit();
scenarioUnit.type = ScenarioUnitType.Send;
scenarioUnit.packageName = "tutorial";
scenarioUnit.name = "RequestGameDecision";
// scenarioUnit.json = "{\n" +
// " \"decision\" : 2\n" +
// " }";
scenarioUnit.json = "{ \"decision\" : 2 }";
// {
// "decision" : 2
// }
sendPacketToServer(scenarioUnit);
}
}
|
StartupWichita/startupwichita.com
|
app/models/user.rb
|
# == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# email :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0), not null
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# created_at :datetime
# updated_at :datetime
#
class User < ActiveRecord::Base
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable,
:omniauthable, :omniauth_providers => [:facebook, :twitter]
has_one :person
has_many :topics
accepts_nested_attributes_for :person
acts_as_voter
scope :admins, -> { where('admin = ?', true).all }
def self.from_omniauth(auth)
where(provider: auth.provider, uid: auth.uid).first_or_create do |user|
user.email = auth.info.email
user.password = <PASSWORD>_<PASSWORD>[0,20]
#user.person.last_name = auth.info.last_name # assuming the user model has a name
#user.image = auth.info.image # assuming the user model has an image
end
end
def name
if person == nil
return "Unknown Person"
end
person.full_name
end
def self.new_with_session(params, session)
super.tap do |user|
if data = session["devise.facebook_data"] && session["devise.facebook_data"]["extra"]["raw_info"]
user.email = data["email"] if user.email.blank?
end
end
end
end
|
nimbus-org/nimbus
|
src/test/java/jp/ossc/nimbus/service/http/httpclient/HttpRequestImplTest.java
|
package jp.ossc.nimbus.service.http.httpclient;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import jp.ossc.nimbus.core.ServiceManagerFactory;
import jp.ossc.nimbus.service.http.HttpClientFactory;
import jp.ossc.nimbus.service.http.HttpRequestCreateException;
import jp.ossc.nimbus.service.http.httpclient.HttpClientFactoryService.HttpClientImpl;
import junit.framework.TestCase;
public class HttpRequestImplTest extends TestCase {
public static void main(String[] args) {
junit.textui.TestRunner.run(HttpRequestImplTest.class); }
public HttpRequestImplTest(String arg0) {
super(arg0);
}
/**
* HttpRequestにヘッダ情報を設定するテスト。
* <p>
* 条件:
* <ul>
* <li>次の内容の定義ファイルをロードし、HttpClientFactoryServiceインスタンスを生成する</li>
* <li>RequestContentType:application/xml</li>
* <li>RequestCharacterEncoding:Shift_JIS</li>
* <li>RequestStreamConverterServiceName:#DataSetXMLConverter</li>
* <li>ResponseStreamConverterServiceName:#ResponseStreamConverter</li>
* <li>ResponseHeaders:ContentType=application/xml</li>
* <li>Proxy:#localhost:8280</li>
* <li>論理アクション名"login"のリクエスト情報を定義</li>
* <li>HttpClientFactoryService#createHttpClient()を実行し、HttpClientを生成</li>
* <li>HttpRequestImpl#createRequest(論理アクション名)を実行し、HttpRequestを生成</li>
* <li>送信するデータセットを次の内容で生成し、HttpRequest#setObject()でセット<BR>
* スキーマ::name,java.lang.String,,,\n:age,int,,,<BR>
* 値 :name=hoge,age=25 </li>
* <li>生成したHttpRequestに対してsetContentType()を実行し、ContentTypeヘッダ情報設定</li>
* <li>生成したHttpRequestに対してsetHeader(),addHeader()を実行しヘッダ情報設定</li>
* <li>生成したHttpRequestを指定して、HttpClientImpl#executeRequest(request)を実行</li>
* </ul>
* 確認:
* <ul>
* <li>setContentType()で設定したヘッダ情報が優先されることを確認</li>
* <li>ssetHeader(),addHeader()設定したヘッダ情報が反映されることを確認</li>
* </ul>
*/
public void testRequestSetHeader() {
try {
if (!ServiceManagerFactory
.loadManager("jp/ossc/nimbus/service/http/httpclient/service-clientTest2.xml")) {
System.exit(-1);
}
final HttpClientFactory factory = (HttpClientFactory) ServiceManagerFactory
.getServiceObject("HttpClientFactory");
HttpClientImpl client = (HttpClientImpl) factory.createHttpClient();
// DataSet requestDs = new DataSet("Login");
// requestDs.setHeaderSchema(
// "UserInfo",
// ":name,java.lang.String,,,\n"
// + ":age,int,,,"
// );
// Header userInfo = requestDs.getHeader("UserInfo");
// userInfo.setProperty("name", "hoge");
// userInfo.setProperty("age", 25);
HttpRequestImpl request = (HttpRequestImpl)factory.createRequest("login");
// request.setObject(requestDs);
//ヘッダ情報設定
request.setContentType("text/html");
request.addHeader("Accept","text/html");
request.addHeader("Accept","text/html");
request.setHeader("Accept-Language","jp");
//settr,getterの動作確認
request.setHttpVersion("1.1");
assertEquals("1.1", request.getHttpVersion());
request.setDoAuthentication(true);
assertTrue(request.isDoAuthentication());
request.setFollowRedirects(false);
assertFalse(request.isFollowRedirects());
request.setHttpMethodParam("TEST", "test");
request.setHttpMethodParam("TEST1", "test1");
assertEquals("test", request.getHttpMethodParam("TEST"));
assertEquals("test1", request.getHttpMethodParam("TEST1"));
assertTrue(request.getHttpMethodParamNameSet().contains("TEST"));
assertTrue(request.getHttpMethodParamNameSet().contains("TEST1"));
client.executeRequest(request);
/*Proxyテスト用プログラム(jp.ossc.nimbus.service.http.proxy.TestHttpProcessService)
* の出力ファイルの内容を確認し、HTTPリクエストデータを検証
*/
BufferedReader br = new BufferedReader(
new FileReader("target/temp/jp/ossc/nimbus/service/http/httpclient/help_output.txt"));
String s;
StringBuffer sb = new StringBuffer();
//Requestヘッダの検証
while((s = br.readLine()) != null){
if(s.startsWith("Content-Type:")){
assertTrue(s.endsWith("text/html;charset=Shift_JIS"));
}
if(s.startsWith("Accept:")){
assertTrue(s.endsWith("text/html"));
}
if(s.startsWith("Accept-Language:")){
assertTrue(s.endsWith("jp"));
}
sb.append(s);
}
br.close();
//DataSet内容の検証
assertTrue(sb.toString().endsWith("sectionCode=022&account=059641&password=<PASSWORD>"));
} catch (HttpRequestCreateException e) {
e.printStackTrace();
fail("例外発生");
} catch (FileNotFoundException e) {
e.printStackTrace();
fail("例外発生");
} catch (IOException e) {
e.printStackTrace();
fail("例外発生");
} finally {
ServiceManagerFactory
.unloadManager("jp/ossc/nimbus/service/http/httpclient/service-clientTest2.xml");
}
}
/**
* HttpRequestにパラメータ情報、入力ストリームを設定するテスト。
* <p>
* 条件:
* <ul>
* <li>次の内容の定義ファイルをロードし、HttpClientFactoryServiceインスタンスを生成する</li>
* <li>RequestContentType:application/xml</li>
* <li>RequestCharacterEncoding:Shift_JIS</li>
* <li>RequestStreamConverterServiceName:#DataSetXMLConverter</li>
* <li>ResponseStreamConverterServiceName:#ResponseStreamConverter</li>
* <li>ResponseHeaders:ContentType=application/xml</li>
* <li>Proxy:#localhost:8280</li>
* <li>論理アクション名"login"のリクエスト情報を定義</li>
* <li>HttpClientFactoryService#createHttpClient()を実行し、HttpClientを生成</li>
* <li>HttpRequestImpl#createRequest(論理アクション名)を実行し、HttpRequestを生成</li>
* <li>送信するデータセットをXML入力ストリームで生成し、HttpRequest#setInputStream()でセット<BR>
* <li>生成したHttpRequestに対してsetParameter(),setParameter()を実行し、パラメータ情報設定</li>
* </ul>
* 確認:
* <ul>
* <li>設定したパラメータ情報、入力ストリームが正しく反映されることを確認</li>
* </ul>
*/
public void testRequestSetParamQuely() {
try {
if (!ServiceManagerFactory
.loadManager("jp/ossc/nimbus/service/http/httpclient/service-clientTest2.xml")) {
System.exit(-1);
}
final HttpClientFactory factory = (HttpClientFactory) ServiceManagerFactory
.getServiceObject("HttpClientFactory");
HttpClientImpl client = (HttpClientImpl) factory.createHttpClient();
//入力ストリームを設定
String inxml = "TEST1234567890";
InputStream is = new ByteArrayInputStream(inxml.getBytes());
HttpRequestImpl request = (HttpRequestImpl)factory.createRequest("login");
request.setInputStream(is);
//パラメータ情報設定
request.setContentType("text/html");
request.setParameter("nameA","valueA");
request.setParameter("nameA","valueB");
String[] vals = new String[]{"valueB1","valueB2"};
request.setParameters("nameB", vals);
//パラメータ情報確認
assertEquals("valueA", request.getParameter("nameA"));
String[] getvals = request.getParameters("nameB");
assertEquals(vals[0], getvals[0]);
assertEquals(vals[1], getvals[1]);
client.executeRequest(request);
//入力ストリームが正しく送信されたかの確認
/*Proxyテスト用プログラム(jp.ossc.nimbus.service.http.proxy.TestHttpProcessService)
* の出力ファイルの内容を確認し、HTTPリクエストデータを検証
*/
BufferedReader br = new BufferedReader(
new FileReader("target/temp/jp/ossc/nimbus/service/http/httpclient/help_output.txt"));
String s;
StringBuffer sb = new StringBuffer();
//Requestヘッダの検証
while((s = br.readLine()) != null){
sb.append(s);
}
br.close();
assertTrue(sb.toString().endsWith("TEST1234567890"));
} catch (HttpRequestCreateException e) {
e.printStackTrace();
fail("例外発生");
} catch (FileNotFoundException e) {
e.printStackTrace();
fail("例外発生");
} catch (IOException e) {
e.printStackTrace();
fail("例外発生");
} finally {
ServiceManagerFactory
.unloadManager("jp/ossc/nimbus/service/http/httpclient/service-clientTest2.xml");
}
}
}
|
josh33901/F1-SDK
|
DynamicNetvars.hh
|
<reponame>josh33901/F1-SDK
#pragma once
#include <memory>
#include <unordered_map>
#include <vector>
#include "baseHeaders.hh"
#include "dt_recv2.hh"
// TODO: move into netvar class
class netvar_tree
{
struct node;
using map_type = std::unordered_map<std::string, std::shared_ptr<node>>;
struct node
{
node (int offset, RecvProp *p)
: offset (offset), prop (p)
{
}
map_type nodes;
int offset;
RecvProp *prop;
};
map_type nodes;
public:
// netvar_tree ( );
void init ();
private:
void populate_nodes (class RecvTable *recv_table, map_type *map);
/**
* get_offset_recursive - Return the offset of the final node
* @map: Node map to scan
* @acc: Offset accumulator
* @name: Netvar name to search for
*
* Get the offset of the last netvar from map and return the sum of it and accum
*/
int get_offset_recursive (map_type &map, int acc, const char *name)
{
return acc + map[name]->offset;
}
/**
* get_offset_recursive - Recursively grab an offset from the tree
* @map: Node map to scan
* @acc: Offset accumulator
* @name: Netvar name to search for
* @args: Remaining netvar names
*
* Perform tail recursion with the nodes of the specified branch of the tree passed for map
* and the offset of that branch added to acc
*/
template <typename... args_t>
int get_offset_recursive (map_type &map, int acc, const char *name, args_t... args)
{
const auto &node = map[name];
return get_offset_recursive (node->nodes, acc + node->offset, args...);
}
RecvProp *get_prop_recursive (map_type &map, const char *name)
{
return map[name]->prop;
}
template <typename... args_t>
RecvProp *get_prop_recursive (map_type &map, const char *name, args_t... args)
{
const auto &node = map[name];
return get_prop_recursive (node->nodes, args...);
}
public:
/**
* get_offset - Get the offset of a netvar given a list of branch names
* @name: Top level datatable name
* @args: Remaining netvar names
*
* Initiate a recursive search down the branch corresponding to the specified datable name
*/
template <typename... args_t>
int get_offset (const char *name, args_t... args)
{
const auto &node = nodes[name];
return get_offset_recursive (node->nodes, node->offset, args...);
}
template <typename... args_t>
RecvProp *get_prop (const char *name, args_t... args)
{
const auto &node = nodes[name];
return get_prop_recursive (node->nodes, args...);
}
};
extern netvar_tree gNetvars;
template <typename T>
class Netvar
{
DWORD off;
using sT = typename std::remove_reference<T>::type;
public:
template <typename... args_t>
Netvar (args_t... a)
{
// just assume these wont be accessed until after netvars has been inited
// TODO: adding a onetime check here will add very little overhead as this only gets called
// once per netvar
off = gNetvars.get_offset (a...);
}
template <typename... args_t>
Netvar (int offset, args_t... a)
{
off = gNetvars.get_offset (a...) + offset;
}
template <typename B>
T &GetValue (B base) const
{
return GetValue ((PVOID)base);
}
T &GetValue (PVOID base) const
{
return *reinterpret_cast<sT *> ((DWORD)base + (DWORD)off);
}
template <typename B>
void SetValue (B base, T val) const
{
return SetValue ((PVOID)base, val);
}
void SetValue (const PVOID base, T val) const
{
*reinterpret_cast<sT *> ((DWORD) (base) + ((DWORD) (off))) = val;
}
DWORD GetOffset () const
{
return off;
}
};
#define NETVAR(name, type, ...) static Netvar<type> name (__VA_ARGS__)
#define NETVAR_RETURN(type, base, ...) \
NETVAR (n, type, __VA_ARGS__); \
return n.GetValue (base)
#define NETVAR_RETURN_THIS(type, ...) \
NETVAR (n, type &, __VA_ARGS__); \
return n.GetValue (this)
#define NETVAR_OFF(name, type, offset, ...) static Netvar<type> name (offset, __VA_ARGS__)
#define NETVAR_OFF_RETURN(type, base, offset, ...) \
NETVAR_OFF (n, type, offset, __VA_ARGS__); \
return n.GetValue (base)
|
rsumnerz/refined-github
|
source/features/copy-on-y.js
|
<filename>source/features/copy-on-y.js
import select from 'select-dom';
import ghInjection from 'github-injection';
import copyToClipboard from 'copy-text-to-clipboard';
import * as pageDetect from '../libs/page-detect';
const handler = ({key, target}) => {
if (key === 'y' && target.nodeName !== 'INPUT') {
const permalink = select('.js-permalink-shortcut').href;
copyToClipboard(permalink + location.hash);
}
};
export default function () {
ghInjection(() => {
if (pageDetect.isSingleFile()) {
window.addEventListener('keyup', handler);
} else {
window.removeEventListener('keyup', handler);
}
});
}
|
Ankita-Palekar/mojito
|
tests/func/common/testacpartialrenderclient.js
|
<reponame>Ankita-Palekar/mojito
/*
* This is a basic func test for a Common application.
*/
YUI.add('common-testacpartialinvokeclient-tests', function (Y) {
var suite = new Y.Test.Suite("Common: ACPartailRenderClient");
suite.add(new Y.Test.Case({
"test ACPartailRenderClient": function() {
var that = this;
Y.one('#partialRenderButton').simulate('click');
that.wait(function(){
Y.Assert.areEqual('this is my data: data not from url', Y.one('#subdata').get('innerHTML').match(/this is my data: data not from url/gi));
}, 4000);
}
}));
Y.Test.Runner.add(suite);
}, '0.0.1', { requires: [
'node', 'node-event-simulate', 'test', 'console'
]});
|
AhmedNasser1601/ProblemSolving
|
URI Judge/URI Judge/1154.cpp
|
//#include <iostream>
//#include <stdio.h>
//#include <iomanip>
//#include <string>
//
//using namespace std;
//
//int main() {
// int x = 0, y = -1;
// float z = 0;
//
// while (x >= 0)
// {
// cin >> x;
//
// if (x >= 0)
// z += x;
//
// y++;
// }
//
// cout << fixed << setprecision(2) << z / y << endl;
//
// return 0;
//}
|
philgamevy/loupe
|
test/objects.js
|
<filename>test/objects.js
import loupe from '../index'
import { expect } from 'chai'
for (const [suite, inspect] of Object.entries({
objects: loupe,
'objects (Object.create(null))': (obj, ...rest) => loupe(Object.assign(Object.create(null), obj), ...rest),
})) {
describe(suite, () => {
it('returns `{}` for empty objects', () => {
expect(inspect({})).to.equal('{}')
})
it('quotes a key if it contains special chars', () => {
expect(inspect({ 'a.b': 1 })).to.equal("{ 'a.b': 1 }")
expect(inspect({ 'a b': 1 })).to.equal("{ 'a b': 1 }")
})
it('quotes a key if it is empty', () => {
expect(inspect({ '': 1 })).to.equal("{ '': 1 }")
})
it('quotes a key if it contains a single quote', () => {
expect(inspect({ "'": 1 })).to.equal("{ '\\'': 1 }")
})
it('quotes a key if it contains a double quote', () => {
expect(inspect({ '"': 1 })).to.equal("{ '\"': 1 }")
})
if (suite === 'objects') {
it('detects circular references', () => {
const main = {}
main.a = main
expect(inspect(main)).to.equal('{ a: [Circular] }')
})
}
it('returns `{}` for empty objects with an anonoymous prototype', () => {
expect(inspect(Object.create({ a: 1 }))).to.equal('{}')
})
it("shows objects' own properties for objects with an anonoymous prototype", () => {
const obj = Object.create({ a: 1 })
obj.b = 2
expect(inspect(obj)).to.equal('{ b: 2 }')
})
describe('truncate', () => {
it('returns the full representation when truncate is over string length', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 20 })).to.equal('{ a: 1, b: 2, c: 3 }')
})
it('truncates object values longer than truncate (19)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 19 })).to.equal('{ a: 1, …(2) }')
})
it('truncates object values longer than truncate (18)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 18 })).to.equal('{ a: 1, …(2) }')
})
it('truncates object values longer than truncate (17)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 17 })).to.equal('{ a: 1, …(2) }')
})
it('truncates object values longer than truncate (16)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 16 })).to.equal('{ a: 1, …(2) }')
})
it('truncates object values longer than truncate (15)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 15 })).to.equal('{ a: 1, …(2) }')
})
it('truncates object values longer than truncate (14)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 14 })).to.equal('{ a: 1, …(2) }')
})
it('truncates object values longer than truncate (13)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 13 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (12)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 12 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (11)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 11 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (10)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 10 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (9)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 9 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (8)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 8 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (7)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 7 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (6)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 6 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (5)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 5 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (4)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 4 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (3)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 3 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (2)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 2 })).to.equal('{ …(3) }')
})
it('truncates object values longer than truncate (1)', () => {
expect(inspect({ a: 1, b: 2, c: 3 }, { truncate: 1 })).to.equal('{ …(3) }')
})
})
})
}
|
raghavkedia/CS308_VOOGASalad
|
src/engine/backend/game_object/Mode.java
|
/**
*
* @author mario_oliver93
*
*/
package engine.backend.game_object;
import java.util.HashMap;
import java.util.Map;
public class Mode {
// myLevels is a map of index at which level is played to Level object.
private Map<Integer, Level> myLevels;
private String myName;
private GameStatistics myGameStatistics;
private int index;
/**
* Authoring Environment Constructor.
*/
public Mode(String myName, GameStatistics gameStatistics, Map<Integer, Level> levels) {
this.myName = myName;
this.myGameStatistics = gameStatistics;
this.myLevels = levels;
}
/**
* Engine Testing Constructor.
*/
public Mode(String name) {
this.myName = name;
this.myLevels = new HashMap<Integer, Level>();
}
public Map<Integer, Level> getLevels() {
return myLevels;
}
public void setLevelInMap(int levelIndex, Level level) {
myLevels.put(levelIndex, level);
}
public GameStatistics getGameStatistics() {
return myGameStatistics;
}
public void setGameStatistics(GameStatistics stats) {
myGameStatistics = stats;
}
public String getName() {
return this.myName;
}
/**
* Engine Testing Method
*/
public void addLevel(Level level) {
this.myLevels.put(level.getIndex(), level);
}
public int getIndex() {
return index;
}
public void setIndex(int index) {
this.index = index;
}
@Override
public String toString() {
return "Mode [levels=" + myLevels + "]";
}
}
|
sidorovpavel/writing-javascript-actions
|
.github/actions/joke-action/node_modules/lodash/fp/bindAll.js
|
var convert = require('lodash/fp/convert'),
func = convert('bindAll', require('lodash/bindAll'));
func.placeholder = require('lodash/fp/placeholder');
module.exports = func;
|
windrunner123/sentinel-cpp
|
sentinel-core/flow/flow_rule.h
|
#pragma once
#include <functional>
#include <memory>
#include <string>
#include <vector>
#include "sentinel-core/common/constants.h"
#include "sentinel-core/common/rule.h"
#include "sentinel-core/flow/flow_rule_constants.h"
namespace Sentinel {
namespace Flow {
struct FlowRule : public Rule {
public:
FlowRule() = default;
virtual ~FlowRule() = default;
explicit FlowRule(const std::string& resource)
: resource_(resource), limit_origin_(Constants::kLimitOriginDefault) {}
FlowRule(const std::string& resource, const std::string& limit_origin)
: resource_(resource), limit_origin_(limit_origin) {}
const std::string& resource() const { return resource_; }
const std::string& limit_origin() const { return limit_origin_; }
FlowMetricType metric_type() const { return metric_type_; }
double count() const { return count_; }
FlowRelationStrategy strategy() const { return strategy_; }
const std::string& ref_resource() const { return ref_resource_; }
FlowControlBehavior control_behavior() const { return control_behavior_; }
int32_t warm_up_period_sec() const { return warm_up_period_sec_; }
int32_t max_queueing_time_ms() const { return max_queueing_time_ms_; }
bool cluster_mode() const { return cluster_mode_; }
void set_resource(const std::string& resource) { resource_ = resource; }
void set_limit_origin(const std::string& limit_origin) {
limit_origin_ = limit_origin;
}
void set_limit_origin(const char* limit_origin) {
if (limit_origin != nullptr) {
limit_origin_ = limit_origin;
}
}
void set_metric_type(FlowMetricType metric_type) {
metric_type_ = metric_type;
}
void set_count(double count) { count_ = count; }
void set_strategy(FlowRelationStrategy strategy) { strategy_ = strategy; }
void set_ref_resource(const std::string& r) { ref_resource_ = r; }
void set_control_behavior(FlowControlBehavior cb) { control_behavior_ = cb; }
void set_warm_up_period_sec(int32_t w) { warm_up_period_sec_ = w; }
void set_max_queueing_time_ms(int32_t q) { max_queueing_time_ms_ = q; }
void set_cluster_mode(bool cluster_mode) { cluster_mode_ = cluster_mode; }
bool operator==(const FlowRule& rule) const;
std::string ToString() const;
private:
std::string resource_; // resource
std::string limit_origin_{Constants::kLimitOriginDefault}; // limitApp
FlowMetricType metric_type_{FlowMetricType::kQps}; // grade
double count_ = 0; // count
FlowRelationStrategy strategy_{FlowRelationStrategy::kDirect}; // strategy
FlowControlBehavior control_behavior_{
FlowControlBehavior::kReject}; // controlBehavior
std::string ref_resource_{}; // refResource
int32_t warm_up_period_sec_ = 10; // warmUpPeriodSec
int32_t max_queueing_time_ms_ = 500; // maxQueueingTimeMs
bool cluster_mode_ = false; // clusterMode
};
using FlowRulePtr = std::shared_ptr<FlowRule>;
using FlowRuleList = std::vector<FlowRule>;
struct FlowRuleHash {
std::size_t operator()(const FlowRule& rule) const noexcept {
std::size_t result = std::hash<std::string>{}(rule.resource());
const std::string& limit_origin = rule.limit_origin();
if (!limit_origin.empty() &&
limit_origin != Constants::kLimitOriginDefault) {
result = 31 * result + std::hash<std::string>{}(limit_origin);
}
result = 31 * result + static_cast<int>(rule.metric_type());
result = 31 * result + std::hash<double>{}(rule.count());
result = 31 * result + static_cast<int>(rule.strategy());
result = 31 * result + static_cast<int>(rule.control_behavior());
result = 31 * result + std::hash<std::string>{}(rule.ref_resource());
result = 31 * result + rule.warm_up_period_sec();
result = 31 * result + rule.max_queueing_time_ms();
result = 31 * result + std::hash<bool>{}(rule.cluster_mode());
return result;
}
};
} // namespace Flow
} // namespace Sentinel
|
pcl/spark-js-sdk
|
packages/plugin-encryption/src/kms-error.js
|
<reponame>pcl/spark-js-sdk
/**!
*
* Copyright (c) 2015-2016 Cisco Systems, Inc. See LICENSE file.
* @private
*/
import extendError from 'extend-error';
/**
* @class
*/
const KmsError = extendError({
/**
* @param {Object} body
* @returns {string}
*/
parseFn(body) {
body = body.body || body;
Object.defineProperties(this, {
body: {
enumerable: false,
value: body
},
reason: {
enumerable: false,
value: body.reason
},
requestId: {
enumerable: false,
value: body.requestId
},
status: {
enumerable: false,
value: body.status
}
});
return body.reason;
},
properties: {
defaultMessage: `An error was received while communicating with the KMS`
},
subTypeName: `KmsError`
});
export default KmsError;
|
bobgeis/js-space-rocks
|
src/reducers/game-reducer.js
|
import { Map } from 'immutable';
import undoable from 'redux-undo';
import * as types from '../action-types';
import * as modes from '../mode-types';
import { initialStore } from '../store';
import { updateGame } from '../update/game-update';
const gameReducer = (state = Map(), action) => {
switch (action.type) {
case types.TICK:
return updateGame(state, action.keys);
case types.INIT_STORE:
return state.merge(initialStore);
default:
return state;
}
};
const undoConfig = {
filter: (action, currentState, previousHistory) => {
// the game should only tick and snapshot in play mode
if (currentState.get('mode') !== modes.PLAY) {
return false;
}
// 60 ticks per second, save one frame out of 60
if (currentState.get('ticks') % 60 !== 0) {
return false;
}
return true;
},
// this means that the 14th snapshot is the present, the past will have length = 13
limit: 14
};
export default undoable(gameReducer, undoConfig);
|
marco-brandizi/ondex-knet-builder
|
ondex-desktop/ovtk2/src/main/java/net/sourceforge/ondex/ovtk2/ui/dialog/DialogDataSource.java
|
package net.sourceforge.ondex.ovtk2.ui.dialog;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.beans.PropertyVetoException;
import javax.swing.BorderFactory;
import javax.swing.GroupLayout;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextField;
import javax.swing.JTextPane;
import javax.swing.LayoutStyle;
import javax.swing.border.TitledBorder;
import net.sourceforge.ondex.core.DataSource;
import net.sourceforge.ondex.core.ONDEXGraph;
import net.sourceforge.ondex.ovtk2.config.Config;
import net.sourceforge.ondex.ovtk2.ui.OVTK2Dialog;
/**
* DataSource properties dialog.
*
* @author taubertj
*
*/
public class DialogDataSource extends OVTK2Dialog {
private static final String APPLY = "apply";
private static final String CANCEL = "cancel";
// generated
private static final long serialVersionUID = 1598605227584612734L;
// current AbstractONDEXGraph
private ONDEXGraph aog = null;
// current DialogConcept
private DialogConcept conceptDialog = null;
// id inpt field
private JTextField id = new JTextField();
// fullname input field
private JTextField fullname = new JTextField();
// description input field
private JTextPane description = new JTextPane();
/**
* Constructs user input to add a DataSource.
*
* @param aog
* AbstractONDEXGraph to add to
* @param conceptDialog
* Concept Properties Dialog
*/
public DialogDataSource(ONDEXGraph aog, DialogConcept conceptDialog) {
super("Dialog.DataSource.Title", "Properties16.gif");
this.aog = aog;
this.conceptDialog = conceptDialog;
this.getContentPane().setLayout(new BorderLayout());
this.getContentPane().add(makeProperties(), BorderLayout.CENTER);
this.getContentPane().add(makeButtonsPanel("Dialog.DataSource.Apply", "Dialog.DataSource.Cancel"), BorderLayout.SOUTH);
this.pack();
}
/**
* Constructs user input to add a DataSource.
*
* @param aog
* AbstractONDEXGraph to add to
* @param dataSource
* DataSource to use
*/
public DialogDataSource(ONDEXGraph aog, DataSource dataSource) {
super("Dialog.DataSource.Title", "Properties16.gif");
this.aog = aog;
// set existing information
id.setText(dataSource.getId());
fullname.setText(dataSource.getFullname());
description.setText(dataSource.getDescription());
// set everything to disabled
id.setEditable(false);
fullname.setEditable(false);
description.setEnabled(false);
this.getContentPane().setLayout(new BorderLayout());
this.getContentPane().add(makeProperties(), BorderLayout.CENTER);
this.getContentPane().add(makeButtonsPanel(null, "Dialog.DataSource.Cancel"), BorderLayout.SOUTH);
this.pack();
}
/**
* Creates the properties panel for cv.
*
* @return JPanel
*/
private JPanel makeProperties() {
// init properties layout
JPanel properties = new JPanel();
GroupLayout layout = new GroupLayout(properties);
properties.setLayout(layout);
TitledBorder propertiesBorder = BorderFactory.createTitledBorder(Config.language.getProperty("Dialog.DataSource.DataSource"));
properties.setBorder(propertiesBorder);
// DataSource id
JLabel idLabel = new JLabel(Config.language.getProperty("Dialog.DataSource.ID"));
properties.add(idLabel);
id.setPreferredSize(new Dimension(this.getFieldWidth(), this.getFieldHeight()));
id.setBackground(this.getRequiredColor());
properties.add(id);
// DataSource fullname
JLabel fullnameLabel = new JLabel(Config.language.getProperty("Dialog.DataSource.FullName"));
properties.add(fullnameLabel);
fullname.setPreferredSize(new Dimension(this.getFieldWidth(), this.getFieldHeight()));
properties.add(fullname);
// DataSource description
JLabel descriptionLabel = new JLabel(Config.language.getProperty("Dialog.DataSource.Description"));
properties.add(descriptionLabel);
JScrollPane scroll = new JScrollPane(description);
scroll.setPreferredSize(new Dimension(this.getFieldWidth(), this.getFieldHeight() * 2));
properties.add(scroll);
layout.setHorizontalGroup(layout.createSequentialGroup().addGroup(layout.createParallelGroup().addComponent(idLabel).addComponent(fullnameLabel).addComponent(descriptionLabel)).addPreferredGap(LayoutStyle.ComponentPlacement.RELATED, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE).addGroup(layout.createParallelGroup().addComponent(id, 0, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE).addComponent(fullname, 0, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE).addComponent(scroll, 0, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)));
layout.setVerticalGroup(layout.createSequentialGroup().addGroup(layout.createParallelGroup(GroupLayout.Alignment.BASELINE).addComponent(idLabel).addComponent(id)).addGroup(layout.createParallelGroup(GroupLayout.Alignment.BASELINE).addComponent(fullnameLabel).addComponent(fullname)).addGroup(layout.createParallelGroup(GroupLayout.Alignment.BASELINE).addComponent(descriptionLabel).addComponent(scroll)));
return properties;
}
/**
* Validate data entry.
*
* @return true if data is valid
*/
private boolean validateEntry() {
if (id.getText().trim().length() == 0 || id.getText().contains(" ")) {
JOptionPane.showInternalMessageDialog(this, Config.language.getProperty("Dialog.DataSource.InvalidID"), Config.language.getProperty("Dialog.DataSource.InvalidTitle"), JOptionPane.ERROR_MESSAGE);
return false;
} else if (aog.getMetaData().checkDataSource(id.getText())) {
JOptionPane.showInternalMessageDialog(this, Config.language.getProperty("Dialog.DataSource.DuplicateID"), Config.language.getProperty("Dialog.DataSource.DuplicateTitle"), JOptionPane.ERROR_MESSAGE);
return false;
}
return true;
}
/**
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
public void actionPerformed(ActionEvent arg0) {
String cmd = arg0.getActionCommand();
// create new DataSource
if (cmd.equals(APPLY)) {
if (validateEntry()) {
aog.getMetaData().createDataSource(id.getText(), fullname.getText(), description.getText());
try {
this.setClosed(true);
} catch (PropertyVetoException e) {
// ignore
}
conceptDialog.initDataSource(id.getText());
}
}
// cancel dialog
else if (cmd.equals(CANCEL)) {
try {
this.setClosed(true);
} catch (PropertyVetoException e) {
// ignore
}
}
}
}
|
Aurelien7877/noodl-mapbox-module
|
node_modules/turf-jsts/src/org/locationtech/jts/geom/LineSegment.js
|
import NotRepresentableException from '../algorithm/NotRepresentableException'
import CGAlgorithms from '../algorithm/CGAlgorithms'
import Coordinate from './Coordinate'
import Double from '../../../../java/lang/Double'
import Comparable from '../../../../java/lang/Comparable'
import RobustLineIntersector from '../algorithm/RobustLineIntersector'
import HCoordinate from '../algorithm/HCoordinate'
import Serializable from '../../../../java/io/Serializable'
export default class LineSegment {
constructor () {
this.p0 = null
this.p1 = null
if (arguments.length === 0) {
this.p0 = new Coordinate()
this.p1 = new Coordinate()
} else if (arguments.length === 1) {
const ls = arguments[0]
this.p0 = new Coordinate(ls.p0)
this.p1 = new Coordinate(ls.p1)
} else if (arguments.length === 2) {
this.p0 = arguments[0]
this.p1 = arguments[1]
} else if (arguments.length === 4) {
const x0 = arguments[0]
const y0 = arguments[1]
const x1 = arguments[2]
const y1 = arguments[3]
this.p0 = new Coordinate(x0, y0)
this.p1 = new Coordinate(x1, y1)
}
}
minX () {
return Math.min(this.p0.x, this.p1.x)
}
orientationIndex () {
if (arguments[0] instanceof LineSegment) {
let seg = arguments[0]
var orient0 = CGAlgorithms.orientationIndex(this.p0, this.p1, seg.p0)
var orient1 = CGAlgorithms.orientationIndex(this.p0, this.p1, seg.p1)
if (orient0 >= 0 && orient1 >= 0) return Math.max(orient0, orient1)
if (orient0 <= 0 && orient1 <= 0) return Math.max(orient0, orient1)
return 0
} else if (arguments[0] instanceof Coordinate) {
let p = arguments[0]
return CGAlgorithms.orientationIndex(this.p0, this.p1, p)
}
}
toGeometry (geomFactory) {
return geomFactory.createLineString([this.p0, this.p1])
}
isVertical () {
return this.p0.x === this.p1.x
}
equals (o) {
if (!(o instanceof LineSegment)) {
return false
}
var other = o
return this.p0.equals(other.p0) && this.p1.equals(other.p1)
}
intersection (line) {
var li = new RobustLineIntersector()
li.computeIntersection(this.p0, this.p1, line.p0, line.p1)
if (li.hasIntersection()) return li.getIntersection(0)
return null
}
project () {
if (arguments[0] instanceof Coordinate) {
let p = arguments[0]
if (p.equals(this.p0) || p.equals(this.p1)) return new Coordinate(p)
var r = this.projectionFactor(p)
var coord = new Coordinate()
coord.x = this.p0.x + r * (this.p1.x - this.p0.x)
coord.y = this.p0.y + r * (this.p1.y - this.p0.y)
return coord
} else if (arguments[0] instanceof LineSegment) {
let seg = arguments[0]
var pf0 = this.projectionFactor(seg.p0)
var pf1 = this.projectionFactor(seg.p1)
if (pf0 >= 1.0 && pf1 >= 1.0) return null
if (pf0 <= 0.0 && pf1 <= 0.0) return null
var newp0 = this.project(seg.p0)
if (pf0 < 0.0) newp0 = this.p0
if (pf0 > 1.0) newp0 = this.p1
var newp1 = this.project(seg.p1)
if (pf1 < 0.0) newp1 = this.p0
if (pf1 > 1.0) newp1 = this.p1
return new LineSegment(newp0, newp1)
}
}
normalize () {
if (this.p1.compareTo(this.p0) < 0) this.reverse()
}
angle () {
return Math.atan2(this.p1.y - this.p0.y, this.p1.x - this.p0.x)
}
getCoordinate (i) {
if (i === 0) return this.p0
return this.p1
}
distancePerpendicular (p) {
return CGAlgorithms.distancePointLinePerpendicular(p, this.p0, this.p1)
}
minY () {
return Math.min(this.p0.y, this.p1.y)
}
midPoint () {
return LineSegment.midPoint(this.p0, this.p1)
}
projectionFactor (p) {
if (p.equals(this.p0)) return 0.0
if (p.equals(this.p1)) return 1.0
var dx = this.p1.x - this.p0.x
var dy = this.p1.y - this.p0.y
var len = dx * dx + dy * dy
if (len <= 0.0) return Double.NaN
var r = ((p.x - this.p0.x) * dx + (p.y - this.p0.y) * dy) / len
return r
}
closestPoints (line) {
var intPt = this.intersection(line)
if (intPt !== null) {
return [intPt, intPt]
}
var closestPt = new Array(2).fill(null)
var minDistance = Double.MAX_VALUE
var dist = null
var close00 = this.closestPoint(line.p0)
minDistance = close00.distance(line.p0)
closestPt[0] = close00
closestPt[1] = line.p0
var close01 = this.closestPoint(line.p1)
dist = close01.distance(line.p1)
if (dist < minDistance) {
minDistance = dist
closestPt[0] = close01
closestPt[1] = line.p1
}
var close10 = line.closestPoint(this.p0)
dist = close10.distance(this.p0)
if (dist < minDistance) {
minDistance = dist
closestPt[0] = this.p0
closestPt[1] = close10
}
var close11 = line.closestPoint(this.p1)
dist = close11.distance(this.p1)
if (dist < minDistance) {
minDistance = dist
closestPt[0] = this.p1
closestPt[1] = close11
}
return closestPt
}
closestPoint (p) {
var factor = this.projectionFactor(p)
if (factor > 0 && factor < 1) {
return this.project(p)
}
var dist0 = this.p0.distance(p)
var dist1 = this.p1.distance(p)
if (dist0 < dist1) return this.p0
return this.p1
}
maxX () {
return Math.max(this.p0.x, this.p1.x)
}
getLength () {
return this.p0.distance(this.p1)
}
compareTo (o) {
var other = o
var comp0 = this.p0.compareTo(other.p0)
if (comp0 !== 0) return comp0
return this.p1.compareTo(other.p1)
}
reverse () {
var temp = this.p0
this.p0 = this.p1
this.p1 = temp
}
equalsTopo (other) {
return this.p0.equals(other.p0) &&
(this.p1.equals(other.p1) || this.p0.equals(other.p1)) &&
this.p1.equals(other.p0)
}
lineIntersection (line) {
try {
var intPt = HCoordinate.intersection(this.p0, this.p1, line.p0, line.p1)
return intPt
} catch (ex) {
if (ex instanceof NotRepresentableException) {} else throw ex
} finally {}
return null
}
maxY () {
return Math.max(this.p0.y, this.p1.y)
}
pointAlongOffset (segmentLengthFraction, offsetDistance) {
var segx = this.p0.x + segmentLengthFraction * (this.p1.x - this.p0.x)
var segy = this.p0.y + segmentLengthFraction * (this.p1.y - this.p0.y)
var dx = this.p1.x - this.p0.x
var dy = this.p1.y - this.p0.y
var len = Math.sqrt(dx * dx + dy * dy)
var ux = 0.0
var uy = 0.0
if (offsetDistance !== 0.0) {
if (len <= 0.0) throw new Error('Cannot compute offset from zero-length line segment')
ux = offsetDistance * dx / len
uy = offsetDistance * dy / len
}
var offsetx = segx - uy
var offsety = segy + ux
var coord = new Coordinate(offsetx, offsety)
return coord
}
setCoordinates () {
if (arguments.length === 1) {
const ls = arguments[0]
this.setCoordinates(ls.p0, ls.p1)
} else if (arguments.length === 2) {
const p0 = arguments[0]
const p1 = arguments[1]
this.p0.x = p0.x
this.p0.y = p0.y
this.p1.x = p1.x
this.p1.y = p1.y
}
}
segmentFraction (inputPt) {
var segFrac = this.projectionFactor(inputPt)
if (segFrac < 0.0) segFrac = 0.0; else if (segFrac > 1.0 || Double.isNaN(segFrac)) segFrac = 1.0
return segFrac
}
toString () {
return 'LINESTRING( ' + this.p0.x + ' ' + this.p0.y + ', ' + this.p1.x + ' ' + this.p1.y + ')'
}
isHorizontal () {
return this.p0.y === this.p1.y
}
distance () {
if (arguments[0] instanceof LineSegment) {
const ls = arguments[0]
return CGAlgorithms.distanceLineLine(this.p0, this.p1, ls.p0, ls.p1)
} else if (arguments[0] instanceof Coordinate) {
const p = arguments[0]
return CGAlgorithms.distancePointLine(p, this.p0, this.p1)
}
}
pointAlong (segmentLengthFraction) {
const coord = new Coordinate()
coord.x = this.p0.x + segmentLengthFraction * (this.p1.x - this.p0.x)
coord.y = this.p0.y + segmentLengthFraction * (this.p1.y - this.p0.y)
return coord
}
hashCode () {
let bits0 = Double.doubleToLongBits(this.p0.x)
bits0 ^= Double.doubleToLongBits(this.p0.y) * 31
let hash0 = Math.trunc(bits0) ^ Math.trunc(bits0 >> 32)
let bits1 = Double.doubleToLongBits(this.p1.x)
bits1 ^= Double.doubleToLongBits(this.p1.y) * 31
let hash1 = Math.trunc(bits1) ^ Math.trunc(bits1 >> 32)
return hash0 ^ hash1
}
interfaces_ () {
return [Comparable, Serializable]
}
getClass () {
return LineSegment
}
static midPoint (p0, p1) {
return new Coordinate((p0.x + p1.x) / 2, (p0.y + p1.y) / 2)
}
static get serialVersionUID () { return 3252005833466256227 }
}
|
jgretz/pghbeer
|
api/src/features/users/index.js
|
export * from './users.controller';
export * from './userByWebUserId.controller';
export {FindUserByWebUserIdHandler} from './findUserByWebUserId';
|
spencercjh/sync-leetcode-today-problem-cpp-example
|
cousins_in_binary_tree.cpp
|
package leetcode
/**
* https://leetcode-cn.com/problems/cousins-in-binary-tree/
*
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode() : val(0), left(nullptr), right(nullptr) {}
* TreeNode(int x) : val(x), left(nullptr), right(nullptr) {}
* TreeNode(int x, TreeNode *left, TreeNode *right) : val(x), left(left), right(right) {}
* };
*/
class CousinsInBinaryTree {
public:
bool isCousins(TreeNode* root, int x, int y) {
}
};
|
golfstream83/JavaCourse
|
Module4/SRP/src/test/java/ru/tulin/InteractCalculatorTest.java
|
package ru.tulin;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.is;
import static org.mockito.Mockito.*;
import static org.junit.Assert.*;
/**
* @author <NAME>
* @version 1
* @since 29.10.2016
*/
public class InteractCalculatorTest {
@Test
public void whenAddTwoNumbersThenGetSum() {
Calculator calculator = new Calculator();
ValidateInput input = mock(ValidateInput.class);
Print print = new Print();
InteractCalculator interactCalc = new InteractCalculator(input, calculator, print);
double expResult = 4d;
when(input.askNumber("Please, enter the first number: ")).thenReturn(2.0);
when(input.askNumber("Please, enter the second number: ")).thenReturn(2.0);
interactCalc.fillActions();
interactCalc.select(0);
assertThat(interactCalc.getPrevResult(), is(expResult));
}
@Test
public void whenSubstructNumberThenGetDifference() {
Calculator calculator = new Calculator();
ValidateInput input = mock(ValidateInput.class);
Print print = new Print();
InteractCalculator interactCalc = new InteractCalculator(input, calculator, print);
double expResult = 4d;
when(input.askNumber("Please, enter the first number: ")).thenReturn(8.0);
when(input.askNumber("Please, enter the second number: ")).thenReturn(4.0);
interactCalc.fillActions();
interactCalc.select(1);
assertThat(interactCalc.getPrevResult(), is(expResult));
}
@Test
public void whenDivisionNumberThenGetQuotient() {
Calculator calculator = new Calculator();
ValidateInput input = mock(ValidateInput.class);
Print print = new Print();
InteractCalculator interactCalc = new InteractCalculator(input, calculator, print);
double expResult = 4d;
when(input.askNumber("Please, enter the first number: ")).thenReturn(16.0);
when(input.askNumber("Please, enter the second number: ")).thenReturn(4.0);
interactCalc.fillActions();
interactCalc.select(2);
assertThat(interactCalc.getPrevResult(), is(expResult));
}
@Test
public void whenMultipleNumberThenGetProduct() {
Calculator calculator = new Calculator();
ValidateInput input = mock(ValidateInput.class);
Print print = new Print();
InteractCalculator interactCalc = new InteractCalculator(input, calculator, print);
double expResult = 4d;
when(input.askNumber("Please, enter the first number: ")).thenReturn(2.0);
when(input.askNumber("Please, enter the second number: ")).thenReturn(2.0);
interactCalc.fillActions();
interactCalc.select(3);
assertThat(interactCalc.getPrevResult(), is(expResult));
}
}
|
amirisback/pemrograman-perangkat-bergerak
|
project/MOBPRO/MOBPRO7_JURNAL_6706160014_REZA/MOBPRO7_JURNAL_6706160014_REZA/Jurnal5/app/src/main/java/id/frogobox/amirisback/jurnal5/LangkaActivity.java
|
package id.frogobox.amirisback.jurnal5;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
import java.util.ArrayList;
/**
* Created by Praktikan on 22/02/2018.
*/
public class LangkaActivity extends AppCompatActivity {
ListView listViews;
MediaPlayer mMediaPlayer;
private AudioManager mAudioManager;
private AudioManager.OnAudioFocusChangeListener mOnAudioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() {
@Override
public void onAudioFocusChange(int focusChange) {
if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT || focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK){
mMediaPlayer.pause();
mMediaPlayer.seekTo(0);
} else if (focusChange == AudioManager.AUDIOFOCUS_GAIN){
mMediaPlayer.start();
} else if (focusChange == AudioManager.AUDIOFOCUS_LOSS){
releaseMediaPlayer();
}
}
};
private MediaPlayer.OnCompletionListener mCompletionListener = new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mediaPlayer) {
releaseMediaPlayer();
}
};
private int resIdSuara;
private String resource[] = {"anoa", "bekantan", "jalak_bali", "enggang_gading", "maleo_senkawor", "mandar_dengkur", "mentilin", "rusa_timor", "tangkasi"};
private String hewans[] = {"Anoa","Bekantan","Jalak Bali","Enggang Gading","Maleo Senkawor","<NAME>","Mentilin","Rusa Timor","Tangkasi"};
private String english[] = {"Anoa depressicornis","Nasalis larvatus","Leucopsar rotschildi","Rhinoplax vigil","Macrocephalon maleo","Aramidopsis plateni","Tarsius bancanus", "Cervus timorensis", "Tarsius tarsier"};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.word_activity);
mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
listViews = (ListView)findViewById(R.id.list_isi);
final ArrayList<NewWord> arrayWords = new ArrayList<>();
for (int i = 0 ; i < hewans.length ; i++) {
resIdSuara = getResources().getIdentifier(resource[i], "raw", getPackageName());
arrayWords.add(new NewWord(hewans[i], english[i], resIdSuara));
}
WordAdapter call = new WordAdapter(this, arrayWords, R.color.category_langka);
listViews.setAdapter(call);
listViews.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View parent, int position, long id) {
releaseMediaPlayer();
int result = mAudioManager.requestAudioFocus(mOnAudioFocusChangeListener, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
mMediaPlayer = MediaPlayer.create(LangkaActivity.this, arrayWords.get(position).getSuara());
mMediaPlayer.start();
mMediaPlayer.setOnCompletionListener(mCompletionListener);
}
}
});
}
@Override
protected void onStop(){
super.onStop();
releaseMediaPlayer();
}
private void releaseMediaPlayer(){
if (mMediaPlayer !=null) {
mMediaPlayer.release();
mMediaPlayer = null;
}
}
}
|
rkanavath/ossim18
|
src/ossim/base/ossimDuration.cpp
|
<reponame>rkanavath/ossim18<filename>src/ossim/base/ossimDuration.cpp
//*******************************************************************
//
// License: MIT
//
// See LICENSE.txt file in the top level directory for more details.
//
// Author: <NAME>
//
//*************************************************************************
// $Id$
#include <ossim/base/ossimDuration.h>
#include <ossim/base/ossimCommon.h>
#include <sstream>
#include <cctype>
ossimDuration::ossimDuration(const ossimString& iso8601Duration)
:theSign(1),
theYears(0),
theMonths(0),
theWeeks(0),
theDays(0),
theHours(0),
theMinutes(0),
theSeconds(0)
{
if(!iso8601Duration.empty())
{
setByIso8601DurationString(iso8601Duration);
}
}
void ossimDuration::clearFields()
{
theSign = 1;
theYears = 0;
theMonths = 0;
theWeeks = 0;
theDays = 0;
theHours = 0;
theMinutes = 0;
theSeconds = 0;
}
static bool isWhiteSpace(int c)
{
return ((c==' ') ||
(c=='\n')||
(c=='\r')||
(c=='\t'));
}
bool ossimDuration::readIso8601Encoding(std::istream& in)
{
clearFields();
if(in.peek()=='-')
{
theSign = -1;
in.ignore();
}
if(in.peek()!='P')
{
return false;
}
// skip the period indicater
in.ignore();
// now we start parsing the date portion and the time portion
ossimString value;
bool doneFlag = false;
bool badParseFlag = false;
bool doingTimeFlag = false;
while(!doneFlag)
{
int c = in.peek();
if(!in)
{
doneFlag = true;
}
else
{
if(isWhiteSpace(c))
{
doneFlag = true; // parse til blank character is met
}
else if(isalpha(c))
{
// we are done with current value
//
// check to see if was a Time seaprator of value
// 'T'
if(c == 'T')
{
// then it was a time separator so do nothing
value = ""; // go ahead and null it out for now
doingTimeFlag = true; // now in time parsing portion
}
else
{
if(doingTimeFlag)
{
// check time values
if(c == 'H')
{
theHours = value.toUInt64();
}
else if(c == 'M')
{
theMinutes = value.toUInt64();
}
else if(c == 'S')
{
theSeconds = value.toFloat64();
}
else
{
doneFlag = true;
badParseFlag = true;
}
value = ""; // reset the value
}
else // check date characters instead
{
if(c == 'Y')
{
theYears = value.toUInt64();
}
else if(c == 'M')
{
theMonths = value.toUInt64();
}
else if(c == 'W')
{
theWeeks = value.toUInt64();
}
else if(c == 'D')
{
theDays = value.toUInt64();
}
else
{
doneFlag = true;
badParseFlag = true;
}
value = ""; // reset the value
}
}
}
else if(isdigit(c)||(c=='.'))// not an alphabetic character so add it to the value string
{
value += static_cast<char>(c);
}
else
{
doneFlag = true;
badParseFlag = true;
}
}
if(!doneFlag)
{
in.ignore();
}
}
return badParseFlag;
}
bool ossimDuration::setByIso8601DurationString(const ossimString& iso8601Duration)
{
if(iso8601Duration.empty())
{
clearFields();
return true;
}
std::istringstream in(iso8601Duration);
return readIso8601Encoding(in);
}
void ossimDuration::toIso8601DurationString(ossimString& result)
{
result = "";
bool hasDatePeriod = ((theYears!=0)||
(theMonths!=0)||
(theWeeks!=0)||
(theDays!=0));
bool hasTimePeriod = ((theHours!=0)||
(theMinutes!=0)||
(!ossim::almostEqual(theSeconds, 0.0, .00000000001)));
// if no time or date period present then return empty
if(!(hasDatePeriod || hasTimePeriod))
{
return;
}
if(theSign < 0)
{
result += "-";
}
result += "P";
if(hasDatePeriod)
{
if(theYears > 0)
{
result+=ossimString::toString(theYears);
result+="Y";
}
if(theMonths>0)
{
result+=ossimString::toString(theMonths);
result+="M";
}
if(theWeeks>0)
{
result+=ossimString::toString(theWeeks);
result+="W";
}
if(theDays>0)
{
result+=ossimString::toString(theDays);
result+="D";
}
}
if(hasTimePeriod)
{
result+="T";
if(theHours>0)
{
result+=ossimString::toString(theHours);
result+="H";
}
if(theMinutes>0)
{
result+=ossimString::toString(theMinutes);
result+="M";
}
if(theSeconds>0)
{
result+=ossimString::toString(theSeconds, 15);
result+="S";
}
}
}
ossim_float64 ossimDuration::toSeconds()const
{
ossim_float64 result = theSeconds;
if(theMinutes > 0)
{
result += theMinutes*60.0;
}
if(theHours > 0)
{
result += theHours*3600.0;
}
if(theDays > 0)
{
result += theDays*86400.0;
}
if(theWeeks > 0)
{
result += theWeeks*604800;
}
if(theSign < 0)
{
result *= -1.0;
}
return result;
}
|
xiaotuoapi/yyblog
|
src/main/java/net/laoyeye/yyblog/service/AboutService.java
|
package net.laoyeye.yyblog.service;
import net.laoyeye.yyblog.common.YYBlogResult;
import net.laoyeye.yyblog.model.AboutDO;
public interface AboutService {
/**
* 更新关于信息
*/
YYBlogResult updateByTab(AboutDO about);
/**
* 跟进tab发现关于栏内容
*/
AboutDO getAboutByTab(String tab);
}
|
mxalbert1996/homebrew-fonts
|
Formula/font-sura.rb
|
<gh_stars>10-100
class FontSura < Formula
head "https://github.com/google/fonts/trunk/ofl/sura", verified: "github.com/google/fonts/", using: :svn
desc "Sura"
homepage "https://fonts.google.com/specimen/Sura"
def install
(share/"fonts").install "Sura-Bold.ttf"
(share/"fonts").install "Sura-Regular.ttf"
end
test do
end
end
|
AdmiralBulldogTv/VodApi
|
src/api/gql.go
|
package api
import (
"bytes"
"context"
"net/url"
"time"
"github.com/99designs/gqlgen/graphql"
"github.com/99designs/gqlgen/graphql/handler/extension"
"github.com/AdmiralBulldogTv/VodApi/graph/generated"
"github.com/AdmiralBulldogTv/VodApi/src/api/cache"
"github.com/AdmiralBulldogTv/VodApi/src/api/complexity"
"github.com/AdmiralBulldogTv/VodApi/src/api/helpers"
"github.com/AdmiralBulldogTv/VodApi/src/api/loaders"
"github.com/AdmiralBulldogTv/VodApi/src/api/middleware"
"github.com/AdmiralBulldogTv/VodApi/src/api/resolvers"
"github.com/AdmiralBulldogTv/VodApi/src/api/types"
"github.com/AdmiralBulldogTv/VodApi/src/global"
"github.com/AdmiralBulldogTv/VodApi/src/svc/redis"
"github.com/AdmiralBulldogTv/VodApi/src/utils"
"github.com/dyninc/qstring"
"github.com/sirupsen/logrus"
"github.com/valyala/fasthttp"
)
type gqlRequest struct {
Query string `json:"query"`
Variables map[string]interface{} `json:"variables"`
OperationName string `json:"operation_name"`
RequestID string `json:"request_id"`
}
func GqlHandler(gCtx global.Context) func(ctx *fasthttp.RequestCtx) {
schema := NewWrapper(generated.NewExecutableSchema(generated.Config{
Resolvers: resolvers.New(types.Resolver{Ctx: gCtx}),
Directives: middleware.New(gCtx),
Complexity: complexity.New(gCtx),
}))
schema.Use(&extension.ComplexityLimit{
Func: func(ctx context.Context, rc *graphql.OperationContext) int {
// we can define limits here
return 75
},
})
schema.Use(extension.Introspection{})
schema.Use(extension.AutomaticPersistedQuery{
Cache: cache.NewRedisCache(gCtx, redis.RedisPrefix+":", time.Hour*6),
})
schema.SetRecoverFunc(func(ctx context.Context, err interface{}) (userMessage error) {
logrus.Error("panic in handler: ", err)
return helpers.ErrInternalServerError
})
loader := loaders.New(gCtx)
return func(ctx *fasthttp.RequestCtx) {
req := gqlRequest{}
ctx.Response.Header.Set("Access-Control-Allow-Origin", "*")
ctx.Response.Header.Set("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
ctx.Response.Header.Set("Access-Control-Allow-Headers", "Content-Type")
ctx.Response.Header.Set("Access-Control-Max-Age", "86400")
switch utils.B2S(ctx.Method()) {
case "GET":
query, _ := url.ParseQuery(ctx.QueryArgs().String())
if err := qstring.Unmarshal(query, &req); err != nil {
ctx.SetStatusCode(400)
return
}
data, _ := json.Marshal(req)
ctx.Request.SetBody(data)
case "POST":
case "OPTIONS":
ctx.SetStatusCode(204)
return
default:
ctx.SetStatusCode(fasthttp.StatusMethodNotAllowed)
return
}
decoder := json.NewDecoder(bytes.NewReader(ctx.Request.Body()))
decoder.UseNumber()
if err := decoder.Decode(&req); err != nil {
ctx.SetStatusCode(400)
return
}
// Execute the query
result := schema.Process(context.WithValue(ctx, loaders.LoadersKey, loader), graphql.RawParams{
Query: req.Query,
OperationName: req.OperationName,
Variables: req.Variables,
})
ctx.SetStatusCode(result.Status)
ctx.SetContentType("application/json")
data, _ := json.Marshal(result.Response)
ctx.SetBody(data)
}
}
|
wjchenge/JavaCourseCodes
|
03nio/src/test/java/nio/wjchenge/netty/gateway/v3/router/HttpEndpointRouterTest.java
|
<reponame>wjchenge/JavaCourseCodes
package nio.wjchenge.netty.gateway.v3.router;
import org.junit.jupiter.api.Test;
import java.util.HashMap;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.*;
/**
* @Author wj
* @Date 2021/10/12 9:32
*/
class HttpEndpointRouterTest {
public static final Map<String, Integer> WEIGHT_MAP = new HashMap<>();
static {
WEIGHT_MAP.put("server1", 20);
WEIGHT_MAP.put("server2", 30);
WEIGHT_MAP.put("server3", 50);
}
/**
* 随机算法测试
*/
@Test
void RandomHttpEndpointRouterTest() {
RandomHttpEndpointRouter router = new RandomHttpEndpointRouter();
this.routerTest(router);
}
/**
* 轮询算法测试
*/
@Test
void RoundRibbonHttpEndpointRouterTest() {
RoundRibbonHttpEndpointRouter router = new RoundRibbonHttpEndpointRouter();
this.routerTest(router);
}
/**
* 随机权重算法测试
*/
@Test
void WeightRandomHttpEndpointRouterTest() {
WeightRandomHttpEndpointRouter router = new WeightRandomHttpEndpointRouter();
this.routerTest(router);
}
private void routerTest(HttpEndpointRouter router) {
for (int i = 0; i < 10; i++) {
Map<String, Integer> statisticMap = new HashMap<>();
for (int j = 0; j < 10000; j++) {
String server = router.route(WEIGHT_MAP);
if (!statisticMap.containsKey(server)) statisticMap.put(server, 0);
Integer count = statisticMap.get(server);
statisticMap.put(server, count + 1);
}
System.out.println(statisticMap);
}
}
}
|
LandingJobs/conundrum
|
spec/models/question_spec.rb
|
require 'spec_helper'
describe Question do
it 'has a valid factory' do
expect(Fabricate(:question)).to be_valid
end
it 'is invalid without a question text' do
expect(Fabricate.build(:question, question_text:nil)).not_to be_valid
end
end
|
evanmok2401/Makanbook
|
src/test/java/seedu/address/model/accounting/UniqueDebtListTest.java
|
package seedu.address.model.accounting;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static seedu.address.testutil.TypicalDebts.DEBT_A;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import seedu.address.model.accounting.exception.DebtNotFoundException;
import seedu.address.model.accounting.exception.DuplicateDebtException;
public class UniqueDebtListTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
private final UniqueDebtList debts = new UniqueDebtList();
/**
* Test the NullPointerException throwing with null input for contains
*/
@Test
public void contains_nullDebt_throwsNullPointerException() {
thrown.expect(NullPointerException.class);
debts.contains(null);
}
/**
* Test the failure of contains if debt is not in the UniqueDebtListTest
*/
@Test
public void contains_debtNotInList_returnsFalse() {
assertFalse(debts.contains(DEBT_A));
}
/**
* Test the success of contains if debt is in the UniqueDebtListTest
*/
@Test
public void contains_debtInList_returnsTrue() {
debts.add(DEBT_A);
assertTrue(debts.contains(DEBT_A));
}
/**
* Test the NullPointerException throwing with null input for add
*/
@Test
public void add_nullDebt_throwsNullPointerException() {
thrown.expect(NullPointerException.class);
debts.add(null);
}
/**
* Test the DuplicateDebtException throwing if adding the same debt twice
*/
@Test
public void add_duplicateDebt_throwsDuplicateDebtException() {
debts.add(DEBT_A);
thrown.expect(DuplicateDebtException.class);
debts.add(DEBT_A);
}
/**
* Test the NullPointerException throwing with null input for remove
*/
@Test
public void remove_nullDebt_throwsNullPointerException() {
thrown.expect(NullPointerException.class);
debts.remove(null);
}
/**
* Test the DebtNotFoundException throwing with not exist debt input for remove.
*/
@Test
public void remove_debtDoesNotExist_throwsDebtNotFoundException() {
thrown.expect(DebtNotFoundException.class);
debts.remove(DEBT_A);
}
/**
* Test the success of remove by remove all the Debt in a UniqueDebtList and
* comparing with another empty UniqueDebtList.
*/
@Test
public void remove_existingDebt_removesDebt() {
debts.add(DEBT_A);
debts.remove(DEBT_A);
UniqueDebtList expectedUniqueDebtList = new UniqueDebtList();
assertEquals(expectedUniqueDebtList, debts);
}
/**
* Test UnsupportedOperationException throwing by modify an unmodifiable UniqueDebtList.
*/
@Test
public void asUnmodifiableObservableList_modifyList_throwsUnsupportedOperationException() {
thrown.expect(UnsupportedOperationException.class);
debts.asUnmodifiableObservableList().remove(0);
}
}
|
maidiHaitai/haitaibrowser
|
third_party/catapult/perf_insights/perf_insights/endpoints/upload.py
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import os
import re
import webapp2
import uuid
from perf_insights import trace_info
from perf_insights import cloud_config
import cloudstorage as gcs
from google.appengine.api import datastore_errors
default_retry_params = gcs.RetryParams(initial_delay=0.2,
max_delay=5.0,
backoff_factor=2,
max_retry_period=15)
gcs.set_default_retry_params(default_retry_params)
class UploadPage(webapp2.RequestHandler):
def get(self):
self.response.out.write("""
<html><body>
<head><title>Performance Insights - Trace Uploader</title></head>
<form action="/upload" enctype="multipart/form-data" method="post">
<div><input type="file" name="trace"/></div>
<div><input type="submit" value="Upload"></div>
</form><hr>
</body></html>""")
def post(self):
trace_uuid = str(uuid.uuid4())
gcs_path = '/%s/%s.gz' % (
cloud_config.Get().trace_upload_bucket, trace_uuid)
gcs_file = gcs.open(gcs_path,
'w',
content_type='application/octet-stream',
options={},
retry_params=default_retry_params)
gcs_file.write(self.request.get('trace'))
gcs_file.close()
trace_object = trace_info.TraceInfo(id=trace_uuid)
trace_object.remote_addr = os.environ["REMOTE_ADDR"]
for arg in self.request.arguments():
arg_key = arg.replace('-', '_').lower()
if arg_key in trace_object._properties:
try:
setattr(trace_object, arg_key, self.request.get(arg))
except datastore_errors.BadValueError:
pass
scenario_config = self.request.get('config')
if scenario_config:
config_json = json.loads(scenario_config)
if 'scenario_name' in config_json:
trace_object.scenario_name = config_json['scenario_name']
tags_string = self.request.get('tags')
if tags_string:
# Tags are comma separated and should only include alphanumeric + '-'.
if re.match('^[a-zA-Z0-9-,]+$', tags_string):
trace_object.tags = tags_string.split(',')
else:
logging.warning('The provided tags string includes one or more invalid'
' characters and will be ignored')
trace_object.ver = self.request.get('product-version')
trace_object.put()
self.response.write(trace_uuid)
app = webapp2.WSGIApplication([('/upload', UploadPage)])
|
tanxinzheng/bms-webapp
|
src/main/java/com/xmomen/module/order/entity/TbPackingExample.java
|
<filename>src/main/java/com/xmomen/module/order/entity/TbPackingExample.java
package com.xmomen.module.order.entity;
import com.xmomen.framework.mybatis.model.BaseMybatisExample;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class TbPackingExample extends BaseMybatisExample {
protected String orderByClause;
protected boolean distinct;
protected List<Criteria> oredCriteria;
public TbPackingExample() {
oredCriteria = new ArrayList<Criteria>();
}
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
public String getOrderByClause() {
return orderByClause;
}
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
public boolean isDistinct() {
return distinct;
}
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andIdIsNull() {
addCriterion("ID is null");
return (Criteria) this;
}
public Criteria andIdIsNotNull() {
addCriterion("ID is not null");
return (Criteria) this;
}
public Criteria andIdEqualTo(Integer value) {
addCriterion("ID =", value, "id");
return (Criteria) this;
}
public Criteria andIdNotEqualTo(Integer value) {
addCriterion("ID <>", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThan(Integer value) {
addCriterion("ID >", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThanOrEqualTo(Integer value) {
addCriterion("ID >=", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThan(Integer value) {
addCriterion("ID <", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThanOrEqualTo(Integer value) {
addCriterion("ID <=", value, "id");
return (Criteria) this;
}
public Criteria andIdIn(List<Integer> values) {
addCriterion("ID in", values, "id");
return (Criteria) this;
}
public Criteria andIdNotIn(List<Integer> values) {
addCriterion("ID not in", values, "id");
return (Criteria) this;
}
public Criteria andIdBetween(Integer value1, Integer value2) {
addCriterion("ID between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andIdNotBetween(Integer value1, Integer value2) {
addCriterion("ID not between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andPackingNoIsNull() {
addCriterion("PACKING_NO is null");
return (Criteria) this;
}
public Criteria andPackingNoIsNotNull() {
addCriterion("PACKING_NO is not null");
return (Criteria) this;
}
public Criteria andPackingNoEqualTo(String value) {
addCriterion("PACKING_NO =", value, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoNotEqualTo(String value) {
addCriterion("PACKING_NO <>", value, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoGreaterThan(String value) {
addCriterion("PACKING_NO >", value, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoGreaterThanOrEqualTo(String value) {
addCriterion("PACKING_NO >=", value, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoLessThan(String value) {
addCriterion("PACKING_NO <", value, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoLessThanOrEqualTo(String value) {
addCriterion("PACKING_NO <=", value, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoLike(String value) {
addCriterion("PACKING_NO like", value, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoNotLike(String value) {
addCriterion("PACKING_NO not like", value, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoIn(List<String> values) {
addCriterion("PACKING_NO in", values, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoNotIn(List<String> values) {
addCriterion("PACKING_NO not in", values, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoBetween(String value1, String value2) {
addCriterion("PACKING_NO between", value1, value2, "packingNo");
return (Criteria) this;
}
public Criteria andPackingNoNotBetween(String value1, String value2) {
addCriterion("PACKING_NO not between", value1, value2, "packingNo");
return (Criteria) this;
}
public Criteria andPackingTimeIsNull() {
addCriterion("PACKING_TIME is null");
return (Criteria) this;
}
public Criteria andPackingTimeIsNotNull() {
addCriterion("PACKING_TIME is not null");
return (Criteria) this;
}
public Criteria andPackingTimeEqualTo(Date value) {
addCriterion("PACKING_TIME =", value, "packingTime");
return (Criteria) this;
}
public Criteria andPackingTimeNotEqualTo(Date value) {
addCriterion("PACKING_TIME <>", value, "packingTime");
return (Criteria) this;
}
public Criteria andPackingTimeGreaterThan(Date value) {
addCriterion("PACKING_TIME >", value, "packingTime");
return (Criteria) this;
}
public Criteria andPackingTimeGreaterThanOrEqualTo(Date value) {
addCriterion("PACKING_TIME >=", value, "packingTime");
return (Criteria) this;
}
public Criteria andPackingTimeLessThan(Date value) {
addCriterion("PACKING_TIME <", value, "packingTime");
return (Criteria) this;
}
public Criteria andPackingTimeLessThanOrEqualTo(Date value) {
addCriterion("PACKING_TIME <=", value, "packingTime");
return (Criteria) this;
}
public Criteria andPackingTimeIn(List<Date> values) {
addCriterion("PACKING_TIME in", values, "packingTime");
return (Criteria) this;
}
public Criteria andPackingTimeNotIn(List<Date> values) {
addCriterion("PACKING_TIME not in", values, "packingTime");
return (Criteria) this;
}
public Criteria andPackingTimeBetween(Date value1, Date value2) {
addCriterion("PACKING_TIME between", value1, value2, "packingTime");
return (Criteria) this;
}
public Criteria andPackingTimeNotBetween(Date value1, Date value2) {
addCriterion("PACKING_TIME not between", value1, value2, "packingTime");
return (Criteria) this;
}
public Criteria andPackingStatusIsNull() {
addCriterion("PACKING_STATUS is null");
return (Criteria) this;
}
public Criteria andPackingStatusIsNotNull() {
addCriterion("PACKING_STATUS is not null");
return (Criteria) this;
}
public Criteria andPackingStatusEqualTo(Integer value) {
addCriterion("PACKING_STATUS =", value, "packingStatus");
return (Criteria) this;
}
public Criteria andPackingStatusNotEqualTo(Integer value) {
addCriterion("PACKING_STATUS <>", value, "packingStatus");
return (Criteria) this;
}
public Criteria andPackingStatusGreaterThan(Integer value) {
addCriterion("PACKING_STATUS >", value, "packingStatus");
return (Criteria) this;
}
public Criteria andPackingStatusGreaterThanOrEqualTo(Integer value) {
addCriterion("PACKING_STATUS >=", value, "packingStatus");
return (Criteria) this;
}
public Criteria andPackingStatusLessThan(Integer value) {
addCriterion("PACKING_STATUS <", value, "packingStatus");
return (Criteria) this;
}
public Criteria andPackingStatusLessThanOrEqualTo(Integer value) {
addCriterion("PACKING_STATUS <=", value, "packingStatus");
return (Criteria) this;
}
public Criteria andPackingStatusIn(List<Integer> values) {
addCriterion("PACKING_STATUS in", values, "packingStatus");
return (Criteria) this;
}
public Criteria andPackingStatusNotIn(List<Integer> values) {
addCriterion("PACKING_STATUS not in", values, "packingStatus");
return (Criteria) this;
}
public Criteria andPackingStatusBetween(Integer value1, Integer value2) {
addCriterion("PACKING_STATUS between", value1, value2, "packingStatus");
return (Criteria) this;
}
public Criteria andPackingStatusNotBetween(Integer value1, Integer value2) {
addCriterion("PACKING_STATUS not between", value1, value2, "packingStatus");
return (Criteria) this;
}
}
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
}
|
sumervardhan/SlogoIDE
|
src/view/FeatureReceiver.java
|
<reponame>sumervardhan/SlogoIDE
package view;
import controller.interfaces.Language;
import java.util.List;
import java.util.Map;
/**
* This interface follows the observer design pattern.
* This is given to classes which implement the Feature interface so that there can be
* two way communication between the View and the features
*
* @author <NAME>
* @author <NAME>
*/
public interface FeatureReceiver extends StaticView, Language {
/**
*Asks the observers for a map of current variables
*
* @return - Map of strings where key is variable name and value is value of variable
*/
Map<String, String> getCurrentVariables();
/**
* Asks observer for most recent history
*
* @return - list of strings containing history of commands
*/
List<String> getHistory();
/**
* Asks observer for most recent set of user defined commands
*
* @return - List of strings holding most recent user defined commands
*/
List<String> getUserCommands();
}
|
Who2232/mymapofdaworld
|
src/render/ImageTile.js
|
<filename>src/render/ImageTile.js
/*
* Copyright 2015-2017 WorldWind Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @exports ImageTile
*/
define([
'../error/ArgumentError',
'../util/Logger',
'../render/TextureTile',
'../util/Tile'
],
function (ArgumentError,
Logger,
TextureTile,
Tile) {
"use strict";
/**
* Constructs an image tile.
* @alias ImageTile
* @constructor
* @classdesc Represents an image applied to a portion of a globe's terrain. Applications typically do not
* interact with this class.
* @augments TextureTile
* @param {Sector} sector The sector this tile covers.
* @param {Level} level The level this tile is associated with.
* @param {Number} row This tile's row in the associated level.
* @param {Number} column This tile's column in the associated level.
* @param {String} imagePath The full path to the image.
* @throws {ArgumentError} If the specified sector or level is null or undefined, the row or column arguments
* are less than zero, or the specified image path is null, undefined or empty.
*
*/
var ImageTile = function (sector, level, row, column, imagePath) {
if (!imagePath || (imagePath.length < 1)) {
throw new ArgumentError(
Logger.logMessage(Logger.LEVEL_SEVERE, "ImageTile", "constructor",
"The specified image path is null, undefined or zero length."));
}
TextureTile.call(this, sector, level, row, column); // args are checked in the superclass' constructor
/**
* This tile's image path.
* @type {String}
*/
this.imagePath = imagePath;
/**
* The tile whose texture to use when this tile's texture is not available.
* @type {Matrix}
*/
this.fallbackTile = null;
// Assign imagePath to gpuCacheKey (inherited from TextureTile).
this.gpuCacheKey = imagePath;
};
ImageTile.prototype = Object.create(TextureTile.prototype);
/**
* Returns the size of the this tile in bytes.
* @returns {Number} The size of this tile in bytes, not including the associated texture size.
*/
ImageTile.prototype.size = function () {
return this.__proto__.__proto__.size.call(this) + this.imagePath.length + 8;
};
/**
* Causes this tile's texture to be active. Implements [SurfaceTile.bind]{@link SurfaceTile#bind}.
* @param {DrawContext} dc The current draw context.
* @returns {Boolean} true if the texture was bound successfully, otherwise false.
*/
ImageTile.prototype.bind = function (dc) {
// Attempt to bind in TextureTile first.
var isBound = this.__proto__.__proto__.bind.call(this, dc);
if (isBound) {
return true;
}
if (this.fallbackTile) {
return this.fallbackTile.bind(dc);
}
return false;
};
/**
* If this tile's fallback texture is used, applies the appropriate texture transform to a specified matrix.
* @param {DrawContext} dc The current draw context.
* @param {Matrix} matrix The matrix to apply the transform to.
*/
ImageTile.prototype.applyInternalTransform = function (dc, matrix) {
if (this.fallbackTile && !(dc.gpuResourceCache.resourceForKey(this.imagePath))) {
// Must apply a texture transform to map the tile's sector into its fallback's image.
this.applyFallbackTransform(matrix);
}
};
// Intentionally not documented.
ImageTile.prototype.applyFallbackTransform = function (matrix) {
var deltaLevel = this.level.levelNumber - this.fallbackTile.level.levelNumber;
if (deltaLevel <= 0)
return;
var fbTileDeltaLat = this.fallbackTile.sector.deltaLatitude(),
fbTileDeltaLon = this.fallbackTile.sector.deltaLongitude(),
sx = this.sector.deltaLongitude() / fbTileDeltaLon,
sy = this.sector.deltaLatitude() / fbTileDeltaLat,
tx = (this.sector.minLongitude - this.fallbackTile.sector.minLongitude) / fbTileDeltaLon,
ty = (this.sector.minLatitude - this.fallbackTile.sector.minLatitude) / fbTileDeltaLat;
// Apply a transform to the matrix that maps texture coordinates for this tile to texture coordinates for the
// fallback tile. Rather than perform the full set of matrix operations, a single multiply is performed with the
// precomputed non-zero values:
//
// Matrix trans = Matrix.fromTranslation(tx, ty, 0);
// Matrix scale = Matrix.fromScale(sxy, sxy, 1);
// matrix.multiply(trans);
// matrix.multiply(scale);
matrix.multiply(
sx, 0, 0, tx,
0, sy, 0, ty,
0, 0, 1, 0,
0, 0, 0, 1);
};
return ImageTile;
});
|
cawel/vinifera
|
app/controllers/application_controller.rb
|
<gh_stars>1-10
class ApplicationController < ActionController::Base
include AuthenticatedSystem
helper :all
before_filter :seo_defaults
before_filter :configure_mailers
before_filter :localizate
before_filter :top_wines
before_filter :top_contributers
before_filter :cellar_count
before_filter :review_count
def localizate
#I18n.locale = params[:locale] || I18n.default_locale
I18n.locale = 'fr-CA'
end
def top_contributers
@top_contributers = Review.top_contributers
end
def top_wines
@top_wines = Wine.top_wines
end
def cellar_count
@cellar_count = (logged_in? ? CellarWine.for_person(current_person).count : 0)
end
def review_count
@review_count = (logged_in? ? Review.for_person(current_person).count : 0)
end
def local_request?
false
end
def rescue_action_in_public(exception)
if exception.is_a? ActionController::RoutingError
render :template => "/layouts/404.html.erb", :layout => true, :status => 404
else
render :template => "/layouts/500.html.erb", :layout => true, :status => 500
end
notify_hoptoad(exception)
end
# See ActionController::Base for details
# Uncomment this to filter the contents of submitted sensitive data parameters
# from your application log (in this case, all fields with names like "password").
# filter_parameter_logging :password
protected
def configure_mailers
PasswordResetMailer.configure(request)
end
def seo_defaults
@keywords = "tastevin, vin, critique, critiques, degustation, SAQ"
@description = "Le Tastevin: nos critiques de vin."
end
end
|
inqwell/inq
|
src/main/java/com/inqwell/any/client/swing/FileChooserListener.java
|
/**
* Copyright (C) 2011 Inqwell Ltd
*
* You may distribute under the terms of the Artistic License, as specified in
* the README file.
*/
/*
* $Archive: /src/com/inqwell/any/client/swing/FileChooserListener.java $
* $Author: sanderst $
* $Revision: 1.2 $
* $Date: 2011-04-07 22:18:22 $
*/
package com.inqwell.any.client.swing;
import java.util.EventListener;
/**
*
*/
public interface FileChooserListener extends EventListener
{
public void fileChooserApprove(FileChooserEvent e);
public void fileChooserCancel(FileChooserEvent e);
}
|
arfusop/weather
|
src/layout/views/CurrentWeather.js
|
import React from 'react'
import { useSelector } from 'react-redux'
import { format } from 'date-fns'
import WeatherCard from '../../components/card'
import WeatherSpan from '../../components/weatherSpan'
import { getWeatherIcon } from '../../helpers'
import StyledCurrentWeather from './styled/StyledCurrentWeather'
const CurrentWeather = () => {
const {
location,
weather: { currently, daily, hourly }
} = useSelector(state => state.app)
const currentDay = daily.data[0]
const date = format(new Date(), 'E, MMM d')
return (
<WeatherCard>
<StyledCurrentWeather>
<div className="currentInfo">
<div className="tempContainer">
<div className="currentTemp">
<WeatherSpan
size="large"
temp={currently.temperature}
/>
</div>
<div className="tempExtremes">
<WeatherSpan
style={{
position: 'relative',
bottom: '0.3rem'
}}
temp={currentDay.temperatureHigh}
/>
<span className="slash">/</span>
<WeatherSpan
style={{
position: 'relative',
top: '0.3rem'
}}
temp={currentDay.temperatureLow}
/>
</div>
</div>
<i
className={`wi largeIcon ${getWeatherIcon(
currently.icon
)}`}
/>
</div>
<div className="date">{date}</div>
<div className="location">
{location?.formatted_address || null}
</div>
<div className="hourlySummary">
{hourly.summary}{' '}
<i className={`wi ${getWeatherIcon(hourly.icon)}`} />
</div>
<div className="dailySummary">
{daily.summary}{' '}
<i className={`wi ${getWeatherIcon(daily.icon)}`} />
</div>
</StyledCurrentWeather>
</WeatherCard>
)
}
export default CurrentWeather
|
qrsforever/workspace
|
cpp/test/getopt/Test.cpp
|
#include <unistd.h>
#include <string>
#include <stdio.h>
void usage(const char* program)
{
printf("Usage: %s [Bd]\n", program);
printf("\t -d: configure dir\t -t -B: number, enable bridge and set sleep time\n");
}
int test(int argc, char *argv[])
{
bool enable_bridge = false;
int sleeptime = 0;
int opt;
std::string rootdir;
std::string hueconf;
std::string uid;
std::string key;
printf("##test##########argc = %d\n", argc);
while ((opt = getopt(argc, argv, "Bd:t:h:u:k:")) != -1) {
printf("-test---------> %c\n", opt);
switch(opt) {
case 'd':
rootdir = optarg;
break;
case 'B':
enable_bridge = true;
break;
case 't':
sleeptime = atoi(optarg);
break;
case 'h':
hueconf = optarg;
break;
case 'u':
uid = optarg;
break;
case 'k':
key = optarg;
break;
default:
usage(argv[0]);
exit(1);
}
}
printf("sleeptime = %d roodir = %s %s %s %s\n", sleeptime, rootdir.c_str(), hueconf.c_str(), uid.c_str(), key.c_str());
return 0;
}
int main(int argc, char *argv[])
{
bool enable_bridge = false;
int sleeptime = 0;
int opt;
std::string rootdir;
std::string hueconf;
std::string uid;
std::string key;
printf("############argc = %d\n", argc);
while ((opt = getopt(argc, argv, "Bd:t:h:u:k:")) != -1) {
printf("-----------> %c\n", opt);
switch(opt) {
case 'd':
rootdir = optarg;
break;
case 'B':
optind = 1;
return test(argc, argv);
// enable_bridge = true;
// break;
case 't':
sleeptime = atoi(optarg);
break;
case 'h':
hueconf = optarg;
break;
case 'u':
uid = optarg;
break;
case 'k':
key = optarg;
break;
default:
usage(argv[0]);
exit(1);
}
}
printf("sleeptime = %d roodir = %s %s %s %s\n", sleeptime, rootdir.c_str(), hueconf.c_str(), uid.c_str(), key.c_str());
return 0;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.