repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
z8g/app
|
file.zxy97.com/src/java/com/zxy97/download/servlet/DownloadServlet.java
|
<reponame>z8g/app
package com.zxy97.download.servlet;
import static com.zxy97.download.util.Download.download;
import static com.zxy97.download.util.Download.getFileName;
import com.zxy97.download.util.GetPath;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
public class DownloadServlet extends HttpServlet {
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response.setContentType("text/html;charset=UTF-8");
request.setCharacterEncoding("UTF-8");
HttpSession session = request.getSession(true);
String sessionId = session.getId();
String url = request.getParameter("url");
String webRootPath = new GetPath().getWebRootPath();
String folderName = webRootPath + "WEB-INF/downloads/" + sessionId + "/";
File folder = new File(folderName);
String folderPath = folder.getAbsolutePath();
String fileName = sessionId + getFileName(url);
boolean isDownload = download(folderPath, fileName, url);
if(!isDownload){
try (PrintWriter printWriter = response.getWriter()) {
printWriter.println(url + "下载失败!");
}
return;
}
String saveFilePath = folderPath + File.separator + fileName;
File file = new File(saveFilePath);
String downloadPath = file.getAbsolutePath();
session.setAttribute("downloadPath", downloadPath);
response.sendRedirect("download.jsp");
}
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
@Override
public String getServletInfo() {
return "根据指定url下载,然后转发给用户。";
}
}
|
parthjoshi2007/pydantic
|
docs/examples/types_choices.py
|
from enum import Enum, IntEnum
from pydantic import BaseModel, ValidationError
class FruitEnum(str, Enum):
pear = 'pear'
banana = 'banana'
class ToolEnum(IntEnum):
spanner = 1
wrench = 2
class CookingModel(BaseModel):
fruit: FruitEnum = FruitEnum.pear
tool: ToolEnum = ToolEnum.spanner
print(CookingModel())
print(CookingModel(tool=2, fruit='banana'))
try:
CookingModel(fruit='other')
except ValidationError as e:
print(e)
|
l81893521/design-pattern-example
|
src/main/java/abstract_factory/Test.java
|
<gh_stars>1-10
package abstract_factory;
import abstract_factory.apple.AppleFactory;
import abstract_factory.xiaomi.XiaomiFactory;
/**
* 抽象工厂模式测试类
* @author zhangjiawei
*
*/
public class Test {
public static void main(String[] args) {
/*
* 很轻松获取到appleFactory
* 通过appleFactory也很轻松拿到苹果产品的对象,如iphone,ipad等
*/
SuningFactory appleFactory = new AppleFactory();
Phone iPhone = appleFactory.getPhone();
Pad iPad = appleFactory.getPad();
/*
* 很轻松获取到xiaomiFactory
* 同理我们使用xiaomiFactory很容易就拿到mi4,mipad等小米产品
*/
SuningFactory xiaomiFactory = new XiaomiFactory();
Phone mi4 = xiaomiFactory.getPhone();
Pad miPad = xiaomiFactory.getPad();
/*
* 开机看看有没有坏
*/
iPhone.open();
iPad.open();
mi4.open();
miPad.open();
}
}
|
NickyMateev/compass
|
components/kyma-environment-broker/internal/appinfo/runtime_info_test.go
|
<reponame>NickyMateev/compass
package appinfo_test
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/kyma-incubator/compass/components/kyma-environment-broker/internal"
"github.com/kyma-incubator/compass/components/kyma-environment-broker/internal/appinfo"
"github.com/kyma-incubator/compass/components/kyma-environment-broker/internal/appinfo/automock"
"github.com/kyma-incubator/compass/components/kyma-environment-broker/internal/broker"
"github.com/kyma-incubator/compass/components/kyma-environment-broker/internal/httputil"
"github.com/kyma-incubator/compass/components/kyma-environment-broker/internal/logger"
"github.com/kyma-incubator/compass/components/kyma-environment-broker/internal/storage"
"github.com/pivotal-cf/brokerapi/v7/domain"
"github.com/sebdah/goldie"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
)
func TestRuntimeInfoHandlerSuccess(t *testing.T) {
tests := map[string]struct {
instances []internal.Instance
provisionOp []internal.ProvisioningOperation
deprovisionOp []internal.DeprovisioningOperation
}{
"no instances": {
instances: []internal.Instance{},
},
"instances without operations": {
instances: []internal.Instance{
fixInstance(1), fixInstance(2), fixInstance(2),
},
},
"instances without service and plan name should have defaults": {
instances: func() []internal.Instance {
i := fixInstance(1)
i.ServicePlanName = ""
i.ServiceName = ""
// selecting servicePlanName based on existing real planID
i.ServicePlanID = broker.GCPPlanID
return []internal.Instance{i}
}(),
},
"instances with provision operation": {
instances: []internal.Instance{
fixInstance(1), fixInstance(2), fixInstance(3),
},
provisionOp: []internal.ProvisioningOperation{
fixProvisionOperation(1), fixProvisionOperation(2),
},
},
"instances with deprovision operation": {
instances: []internal.Instance{
fixInstance(1), fixInstance(2), fixInstance(3),
},
deprovisionOp: []internal.DeprovisioningOperation{
fixDeprovisionOperation(1), fixDeprovisionOperation(2),
},
},
"instances with provision and deprovision operations": {
instances: []internal.Instance{
fixInstance(1), fixInstance(2), fixInstance(3),
},
provisionOp: []internal.ProvisioningOperation{
fixProvisionOperation(1), fixProvisionOperation(2),
},
deprovisionOp: []internal.DeprovisioningOperation{
fixDeprovisionOperation(1), fixDeprovisionOperation(2),
},
},
}
for tn, tc := range tests {
t.Run(tn, func(t *testing.T) {
// given
var (
fixReq = httptest.NewRequest("GET", "http://example.com/foo", nil)
respSpy = httptest.NewRecorder()
writer = httputil.NewResponseWriter(logger.NewLogDummy(), true)
memStorage = newInMemoryStorage(t, tc.instances, tc.provisionOp, tc.deprovisionOp)
)
handler := appinfo.NewRuntimeInfoHandler(memStorage.Instances(), writer)
// when
handler.ServeHTTP(respSpy, fixReq)
// then
assert.Equal(t, http.StatusOK, respSpy.Result().StatusCode)
assert.Equal(t, "application/json", respSpy.Result().Header.Get("Content-Type"))
assertJSONWithGoldenFile(t, respSpy.Body.Bytes())
})
}
}
func TestRuntimeInfoHandlerFailures(t *testing.T) {
// given
var (
fixReq = httptest.NewRequest("GET", "http://example.com/foo", nil)
respSpy = httptest.NewRecorder()
writer = httputil.NewResponseWriter(logger.NewLogDummy(), true)
expBody = `{
"status": 500,
"requestId": "",
"message": "Something went very wrong. Please try again.",
"details": "while fetching all instances: ups.. internal info"
}`
)
storageMock := &automock.InstanceFinder{}
defer storageMock.AssertExpectations(t)
storageMock.On("FindAllJoinedWithOperations", mock.Anything).Return(nil, errors.New("ups.. internal info"))
handler := appinfo.NewRuntimeInfoHandler(storageMock, writer)
// when
handler.ServeHTTP(respSpy, fixReq)
// then
assert.Equal(t, http.StatusInternalServerError, respSpy.Result().StatusCode)
assert.Equal(t, "application/json", respSpy.Result().Header.Get("Content-Type"))
assert.JSONEq(t, expBody, respSpy.Body.String())
}
func assertJSONWithGoldenFile(t *testing.T, gotRawJSON []byte) {
t.Helper()
g := goldie.New(t, goldie.WithNameSuffix(".golden.json"))
var jsonGoType interface{}
require.NoError(t, json.Unmarshal(gotRawJSON, &jsonGoType))
g.AssertJson(t, t.Name(), jsonGoType)
}
func fixTime() time.Time {
return time.Date(2020, 04, 21, 0, 0, 23, 42, time.UTC)
}
func fixInstance(idx int) internal.Instance {
return internal.Instance{
InstanceID: fmt.Sprintf("InstanceID field. IDX: %d", idx),
RuntimeID: fmt.Sprintf("RuntimeID field. IDX: %d", idx),
GlobalAccountID: fmt.Sprintf("GlobalAccountID field. IDX: %d", idx),
SubAccountID: fmt.Sprintf("SubAccountID field. IDX: %d", idx),
ServiceID: fmt.Sprintf("ServiceID field. IDX: %d", idx),
ServiceName: fmt.Sprintf("ServiceName field. IDX: %d", idx),
ServicePlanID: fmt.Sprintf("ServicePlanID field. IDX: %d", idx),
ServicePlanName: fmt.Sprintf("ServicePlanName field. IDX: %d", idx),
DashboardURL: fmt.Sprintf("DashboardURL field. IDX: %d", idx),
ProvisioningParameters: fmt.Sprintf("ProvisioningParameters field. IDX: %d", idx),
CreatedAt: fixTime().Add(time.Duration(idx) * time.Second),
UpdatedAt: fixTime().Add(time.Duration(idx) * time.Minute),
DeletedAt: fixTime().Add(time.Duration(idx) * time.Hour),
}
}
func newInMemoryStorage(t *testing.T,
instances []internal.Instance,
provisionOp []internal.ProvisioningOperation,
deprovisionOp []internal.DeprovisioningOperation) storage.BrokerStorage {
t.Helper()
memStorage := storage.NewMemoryStorage()
for _, i := range instances {
require.NoError(t, memStorage.Instances().Insert(i))
}
for _, op := range provisionOp {
require.NoError(t, memStorage.Operations().InsertProvisioningOperation(op))
}
for _, op := range deprovisionOp {
require.NoError(t, memStorage.Operations().InsertDeprovisioningOperation(op))
}
return memStorage
}
func fixProvisionOperation(idx int) internal.ProvisioningOperation {
return internal.ProvisioningOperation{
Operation: fixSucceededOperation(idx),
}
}
func fixDeprovisionOperation(idx int) internal.DeprovisioningOperation {
return internal.DeprovisioningOperation{
Operation: fixSucceededOperation(idx),
}
}
func fixSucceededOperation(idx int) internal.Operation {
return internal.Operation{
ID: fmt.Sprintf("Operation ID field. IDX: %d", idx),
Version: 0,
CreatedAt: fixTime().Add(time.Duration(idx) * 24 * time.Hour),
UpdatedAt: fixTime().Add(time.Duration(idx) * 48 * time.Hour),
InstanceID: fmt.Sprintf("InstanceID field. IDX: %d", idx),
ProvisionerOperationID: fmt.Sprintf("ProvisionerOperationID field. IDX: %d", idx),
State: domain.Succeeded,
Description: fmt.Sprintf("esc for succeeded op.. IDX: %d", idx),
}
}
|
xingmeichen/spring-cloud-shop
|
shop-job/shop-job-api/src/main/java/quick/pager/shop/trigger/JobTrigger.java
|
<gh_stars>100-1000
package quick.pager.shop.trigger;
import com.google.common.collect.Lists;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.EnumUtils;
import org.quartz.CronScheduleBuilder;
import org.quartz.CronTrigger;
import org.quartz.Job;
import org.quartz.JobBuilder;
import org.quartz.JobDataMap;
import org.quartz.JobDetail;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.quartz.TriggerBuilder;
import org.quartz.TriggerKey;
import org.quartz.impl.matchers.GroupMatcher;
import java.util.List;
import java.util.Set;
import quick.pager.shop.job.enums.JobEnums;
import quick.pager.shop.job.enums.JobStatusEnums;
import quick.pager.shop.model.JobInfo;
import quick.pager.shop.quartz.JobQuartzJobBean;
/**
* 任务调度工具类
*
* @author siguiyang
*/
@Slf4j
public final class JobTrigger {
/**
* 根据job名称和job组名称获取触发key
*
* @param jobName jobName
* @param jobGroup jobGroup
*/
private static TriggerKey getTriggerKey(String jobName, String jobGroup) {
return TriggerKey.triggerKey(jobName, jobGroup);
}
/**
* 获取表达式触发器
*
* @param scheduler 调度器
* @param jobName job名称
* @param jobGroup job组
*/
public static CronTrigger getCronTrigger(Scheduler scheduler, String jobName, String jobGroup) throws SchedulerException {
log.info("执行的定时任务 JobName = {}, JobGroup = {}", jobName, jobGroup);
return (CronTrigger) scheduler.getTrigger(getTriggerKey(jobName, jobGroup));
}
/**
* 创建任务
*
* @param scheduler scheduler
* @param job job
* @param params params
*/
public static void createJob(Scheduler scheduler, JobInfo job, Map<String, Object> params) throws SchedulerException {
Class<? extends Job> jobClass = JobQuartzJobBean.class;
// 构建job信息
JobDetail jobDetail = JobBuilder.newJob(jobClass).withIdentity(job.getJobName(), job.getJobGroup()).build();
// 表达式调度构建器
CronScheduleBuilder scheduleBuilder = CronScheduleBuilder.cronSchedule(job.getCron());
// 按新的cron表达式构建一个新的trigger
Trigger trigger = TriggerBuilder.newTrigger().withIdentity(job.getJobName(), job.getJobGroup())
.withSchedule(scheduleBuilder).build();
scheduler.scheduleJob(jobDetail, trigger);
}
/**
* 创建定时任务
*
* @param scheduler scheduler
* @param job job
*/
public static void createJob(Scheduler scheduler, JobInfo job) throws SchedulerException {
Class<? extends Job> jobClass = JobQuartzJobBean.class;
TriggerKey key = getTriggerKey(job.getJobName(), job.getJobGroup());
CronTrigger trigger = (CronTrigger) scheduler.getTrigger(key);
JobDataMap jobDataMap = new JobDataMap();
jobDataMap.put("jobId", job.getId());
jobDataMap.put("jobGroupId", job.getJobGroupId());
jobDataMap.put("jobName", job.getJobName());
jobDataMap.put("params", job.getParams());
jobDataMap.put("jobEnums", JobEnums.EXECUTE);
if (null == trigger) {
// 构建job信息
JobDetail jobDetail = JobBuilder.newJob(jobClass)
.withIdentity(job.getJobName(), job.getJobGroup())
.usingJobData(jobDataMap)
.build();
// 表达式调度构建器
CronScheduleBuilder scheduleBuilder = CronScheduleBuilder.cronSchedule(job.getCron());
// 按新的cron表达式构建一个新的trigger
trigger = TriggerBuilder.newTrigger().withIdentity(job.getJobName(), job.getJobGroup())
.withSchedule(scheduleBuilder).build();
scheduler.scheduleJob(jobDetail, trigger);
} else {
CronScheduleBuilder scheduleBuilder = CronScheduleBuilder.cronSchedule(job.getCron());
trigger = TriggerBuilder.newTrigger()
.withIdentity(key)
.usingJobData(jobDataMap)
.withSchedule(scheduleBuilder)
.build();
scheduler.rescheduleJob(key, trigger);
}
}
/**
* 获取所有job任务
*/
public static List<JobInfo> getJobs(Scheduler scheduler) throws SchedulerException {
GroupMatcher<JobKey> matcher = GroupMatcher.anyJobGroup();
List<JobInfo> jobs = Lists.newArrayList();
Set<JobKey> jobKeys = scheduler.getJobKeys(matcher);
for (JobKey jobKey : jobKeys) {
List<? extends Trigger> triggers = scheduler.getTriggersOfJob(jobKey);
for (Trigger trigger : triggers) {
JobInfo event = new JobInfo();
event.setJobName(jobKey.getName());
event.setJobGroup(jobKey.getGroup());
event.setDescription(String.format("触发器 ======== %s", trigger.getKey()));
Trigger.TriggerState state = scheduler.getTriggerState(trigger.getKey());
event.setJobStatus(EnumUtils.getEnum(JobStatusEnums.class, state.name()).getCode());
if (trigger instanceof CronTrigger) {
CronTrigger cronTrigger = (CronTrigger) trigger;
event.setCron(cronTrigger.getCronExpression());
jobs.add(event);
}
}
}
return jobs;
}
/**
* 获取正在运行的job任务
*
* @param scheduler scheduler
*/
public static List<JobInfo> getRunningJobs(Scheduler scheduler) throws SchedulerException {
return Optional.ofNullable(scheduler.getCurrentlyExecutingJobs()).orElse(Collections.emptyList()).stream().map(context -> {
JobDetail jobDetail = context.getJobDetail();
Trigger trigger = context.getTrigger();
JobKey jobKey = jobDetail.getKey();
JobInfo job = new JobInfo();
job.setJobName(jobKey.getName());
job.setJobGroup(jobKey.getGroup());
job.setDescription(String.format("触发器 ======== %s", trigger.getKey()));
try {
Trigger.TriggerState state = scheduler.getTriggerState(trigger.getKey());
job.setJobStatus(EnumUtils.getEnum(JobStatusEnums.class, state.name()).getCode());
if (trigger instanceof CronTrigger) {
CronTrigger cronTrigger = (CronTrigger) trigger;
job.setCron(cronTrigger.getCronExpression());
return job;
}
} catch (SchedulerException e) {
e.printStackTrace();
}
return null;
}).filter(Objects::nonNull).collect(Collectors.toList());
}
/**
* 运行一次任务
*
* @param scheduler scheduler
* @param jobName jobName
* @param jobGroup jobGroup
*/
public static void runOnce(Scheduler scheduler, String jobName, String jobGroup) throws SchedulerException {
JobKey jobKey = JobKey.jobKey(jobName, jobGroup);
scheduler.triggerJob(jobKey);
}
/**
* 暂停任务
*
* @param scheduler scheduler
* @param jobName jobName
* @param jobGroup jobGroup
*/
public static void pauseJob(Scheduler scheduler, String jobName, String jobGroup) throws SchedulerException {
log.info("暂定定时任务 jobName = {}, jobGroup = {}", jobName, jobGroup);
JobKey jobKey = JobKey.jobKey(jobName, jobGroup);
scheduler.pauseJob(jobKey);
}
/**
* 恢复任务
*
* @param scheduler scheduler
* @param jobName jobName
* @param jobGroup jobGroup
*/
public static void resumeJob(Scheduler scheduler, String jobName, String jobGroup) throws SchedulerException {
log.info("恢复定时任务 jobName = {}, jobGroup = {}", jobName, jobGroup);
JobKey jobKey = JobKey.jobKey(jobName, jobGroup);
scheduler.resumeJob(jobKey);
}
/**
* 获取jobKey
*
* @param jobName the job name
* @param jobGroup the job group
* @return the job key
*/
private static JobKey getJobKey(String jobName, String jobGroup) {
return JobKey.jobKey(jobName, jobGroup);
}
/**
* 更新定时任务
*
* @param scheduler the scheduler
*/
public static void updateJob(Scheduler scheduler, JobInfo job) throws SchedulerException {
updateJob(scheduler, job.getJobName(), job.getJobGroup(), job.getCron(), job);
}
/**
* 更新定时任务
*
* @param scheduler the scheduler
* @param jobName the job name
* @param jobGroup the job group
* @param cronExpression the cron expression
* @param param the param
*/
private static void updateJob(Scheduler scheduler, String jobName, String jobGroup, String cronExpression, Object param) throws SchedulerException {
// 同步或异步
Class<? extends Job> jobClass = JobQuartzJobBean.class;
JobDetail jobDetail = scheduler.getJobDetail(getJobKey(jobName, jobGroup));
jobDetail = jobDetail.getJobBuilder().ofType(jobClass).build();
// 更新参数 实际测试中发现无法更新
JobDataMap jobDataMap = jobDetail.getJobDataMap();
jobDataMap.put("JobAdapter", param);
jobDetail.getJobBuilder().usingJobData(jobDataMap);
TriggerKey triggerKey = getTriggerKey(jobName, jobGroup);
// 表达式调度构建器
CronScheduleBuilder scheduleBuilder = CronScheduleBuilder.cronSchedule(cronExpression);
CronTrigger trigger = (CronTrigger) scheduler.getTrigger(triggerKey);
// 按新的cronExpression表达式重新构建trigger
trigger = trigger.getTriggerBuilder().withIdentity(triggerKey).withSchedule(scheduleBuilder).build();
Trigger.TriggerState triggerState = scheduler.getTriggerState(trigger.getKey());
// 忽略状态为PAUSED的任务,解决集群环境中在其他机器设置定时任务为PAUSED状态后,集群环境启动另一台主机时定时任务全被唤醒的bug
if (!JobEnums.PAUSE.name().equalsIgnoreCase(triggerState.name())) {
// 按新的trigger重新设置job执行
scheduler.rescheduleJob(triggerKey, trigger);
}
}
/**
* 删除定时任务
*
* @param scheduler scheduler
* @param jobName jobName
* @param jobGroup jobGroup
*/
public static void deleteJob(Scheduler scheduler, String jobName, String jobGroup) throws SchedulerException {
log.info("删除定时任务 jobName = {}, jobGroup = {}", jobName, jobGroup);
scheduler.deleteJob(getJobKey(jobName, jobGroup));
}
}
|
nikkieverett/recipe-app
|
src/components/RecipeCard/RecipeCard.styles.js
|
<filename>src/components/RecipeCard/RecipeCard.styles.js
import { makeStyles } from '@material-ui/core/styles'
const recipeCardStyles = makeStyles(theme => ({
root: {
position: 'relative',
cursor: 'pointer',
height: '100%',
padding: '0',
textTransform: 'capitalize',
backgroundColor: 'rgba(255, 152, 138, .2)',
boxShadow: '0px 8px 10px -7px rgba(203, 101, 87, 1)',
'&:hover': {
boxShadow: '0px 11px 15px -7px rgba(203, 101, 87, 1)'
},
marginBottom: '20px'
},
cardRoot: {
padding: 0
},
cardImageContainer: {
height: '150px',
width: '100%'
},
cardImage: {
backgroundColor: theme.palette.primary.light,
height: '100%',
width: '100%',
position: 'relative'
},
noPhotoIcon: {
color: theme.palette.primary.dark,
position: 'absolute',
top: '50%',
left: '50%',
transform: 'translate(-50%, -50%)'
},
cardTitle: {
fontWeight: 300,
lineHeight: 1.2,
color: theme.palette.primary.dark,
marginBottom: 10
},
cardBodyIcon: {
color: theme.palette.secondary.contrastText,
position: 'absolute',
left: 10
},
cardBodyItem: {
position: 'relative',
paddingLeft: 40,
fontSize: 14,
paddingTop: 5,
color: theme.palette.secondary.contrastText,
textTransform: 'uppercase'
},
cardHeader: {
backgroundColor: theme.palette.primary.light,
padding: '15px 15px',
borderRadius: '0 0 4px 4px',
textTransform: 'uppercase',
marginBottom: '10px'
},
cardFooter: {
position: 'absolute',
left: 20,
bottom: 10,
color: theme.palette.secondary.contrastText
},
cardBadgeIcon: {
fontSize: 20,
float: 'left',
padding: '0 3px 0 0'
},
cardBadgeText: {
fontSize: 12,
float: 'left',
padding: 3
},
link: {
textDecoration: 'none',
textTransform: 'uppercase',
padding: '10px',
fontWeight: 'bold',
width: '100%',
height: '100%',
display: 'block',
color: theme.palette.primary.dark,
'&:hover': {
color: `rgba(${theme.palette.primary.contrastText}, .5)`
}
}
}))
export default recipeCardStyles
|
kojitominaga/scratch
|
270k/eb/plot5.py
|
import os
import numpy as np
import pandas as pd
# import pg8000
# from sqlalchemy import create_engine
import datetime
# import scipy.optimize
# import scipy.interpolate
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import matplotlib as mpl
geog = pd.read_csv('9k_geography.csv')
geog = geog.set_index('ebint')
fig = plt.figure()
ax1 = fig.add_subplot(1, 3, 1)
ax2 = fig.add_subplot(1, 3, 2)
ax3 = fig.add_subplot(1, 3, 3)
map1 = Basemap(lon_0=17.5, lat_0=63.2, width=1.45e6, height=1.85e6,
# llcrnrlon=5.0, llcrnrlat=40.0, urcrnrlon=50.0, urcrnrlat=70.0,
resolution='i', projection='tmerc',
k_0=0.9996, rsphere=(6378137.00, 6356752.314245179),
ax=ax1)
map2 = Basemap(lon_0=17.5, lat_0=63.2, width=1.45e6, height=1.85e6,
# llcrnrlon=5.0, llcrnrlat=40.0, urcrnrlon=50.0, urcrnrlat=70.0,
resolution='i', projection='tmerc',
k_0=0.9996, rsphere=(6378137.00, 6356752.314245179),
ax=ax2)
map3 = Basemap(lon_0=17.5, lat_0=63.2, width=1.45e6, height=1.85e6,
# llcrnrlon=5.0, llcrnrlat=40.0, urcrnrlon=50.0, urcrnrlat=70.0,
resolution='i', projection='tmerc',
k_0=0.9996, rsphere=(6378137.00, 6356752.314245179),
ax=ax3)
map1.drawlsmask(land_color='gray', ocean_color='lightblue',
lakes=False, resolution='f', grid=1.25)
map2.drawlsmask(land_color='gray', ocean_color='lightblue',
lakes=False, resolution='f', grid=1.25)
map3.drawlsmask(land_color='gray', ocean_color='lightblue',
lakes=False, resolution='f', grid=1.25)
map1.scatter(geog.longitude.values, geog.latitude.values,
s=np.sqrt(geog.area.values)/200.0+0.5,
marker='o',
edgecolors=[0.1, 0.1, 0.1, 0.8],
facecolors=mpl.cm.Blues(0.01, alpha=0.8),
linewidths=0.2,
latlon=True)
ax1.set_title('lake area\ncan we tell lakes\nsmall in NO and\nlarge in FI?')
colours2 = [mpl.cm.YlGnBu(min(1, depth/50.0), alpha=0.7)
for depth in geog.maxdepth.values]
map2.scatter(geog.longitude.values, geog.latitude.values,
s=15.0,
c=colours2,
marker='.',
linewidths=0,
latlon=True)
ax2.set_title('lake maximum depth\n[shallow]yellow-green-blue[deep]\nI think we can see Norway\ndeep lakes')
colours3 = [mpl.cm.Accent(min(0.95, max(0.05, cl/30*0.8)), alpha=0.7)
for cl in geog.clust30a.values]
map3.scatter(geog.longitude.values, geog.latitude.values,
s=15.0,
c=colours3,
marker='.',
linewidths=0,
latlon=True)
ax3.set_title('k-means clusters\nbasis for selecting 9000 lakes\n(not sure if we present this)')
fig.set_figheight(6)
fig.set_figwidth(11)
fig.savefig('lake morphology.png', dpi=600)
fig.savefig('lake morphology low DPI.png', dpi=150)
|
andy-sheng/leetcode
|
proj/alog/885. Spiral Matrix III/885. Spiral Matrix III.h
|
<reponame>andy-sheng/leetcode
//
// 885. Spiral Matrix III.h
// leetcode
//
// Created by andysheng on 2019/10/23.
// Copyright © 2019 Andy. All rights reserved.
//
#ifndef _85__Spiral_Matrix_III_h
#define _85__Spiral_Matrix_III_h
#include <vector>
using namespace std;
namespace SpiralMatrixIII {
class Solution {
public:
vector<vector<int>> spiralMatrixIII(int R, int C, int r0, int c0) {
bool topLeft = (r0 == 0 && c0 == 0);
bool topRight = (r0 == 0 && c0 == C - 1);
bool bottomLeft = (r0 == R - 1 && c0 == 0);
bool bottomRight = (r0 == R - 1 && c0 == C - 1);
vector<vector<int>> ret {{r0, c0}};
for (int step = 1; !topLeft || !topRight || !bottomLeft || !bottomRight; step += 1) {
for (int direction = 0; direction < 4; ++direction) {
int deltaR = 0;
int deltaC = 0;
if (direction == 0)
{
deltaR = 0;
deltaC = 1;
} else if (direction == 1) {
deltaR = 1;
deltaC = 0;
} else if (direction == 2) {
++step;
deltaR = 0;
deltaC = -1;
} else if (direction == 3) {
deltaR = -1;
deltaC = 0;
}
for (int i = 1; i <= step; ++i) {
r0 += deltaR;
c0 += deltaC;
cout << r0 << " " << c0 << endl;
if (r0 > -1 && r0 < R && c0 > -1 && c0 < C) {
ret.push_back(vector<int>{r0, c0});
topLeft = topLeft ?: (r0 == 0 && c0 == 0);
topRight = topRight ?: (r0 == 0 && c0 == C - 1);
bottomLeft = bottomLeft ?: (r0 == R - 1 && c0 == 0);
bottomRight = bottomRight ?: (r0 == R - 1 && c0 == C - 1);
}
}
}
if (topLeft && topRight && bottomLeft && bottomRight) {
break;
}
}
return ret;
}
};
}
#endif /* _85__Spiral_Matrix_III_h */
|
Parcons/Torque3D
|
Engine/source/T3D/vehicles/flyingVehicle.h
|
<filename>Engine/source/T3D/vehicles/flyingVehicle.h<gh_stars>100-1000
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#ifndef _FLYINGVEHICLE_H_
#define _FLYINGVEHICLE_H_
#ifndef _VEHICLE_H_
#include "T3D/vehicles/vehicle.h"
#endif
#ifndef _CLIPPEDPOLYLIST_H_
#include "collision/clippedPolyList.h"
#endif
class ParticleEmitter;
class ParticleEmitterData;
//----------------------------------------------------------------------------
struct FlyingVehicleData: public VehicleData {
typedef VehicleData Parent;
enum Sounds {
JetSound,
EngineSound,
MaxSounds,
};
DECLARE_SOUNDASSET_ARRAY(FlyingVehicleData, FlyingSounds, Sounds::MaxSounds);
DECLARE_ASSET_ARRAY_SETGET(FlyingVehicleData, FlyingSounds);
enum Jets {
// These enums index into a static name list.
ForwardJetEmitter, // Thrust forward
BackwardJetEmitter, // Thrust backward
DownwardJetEmitter, // Thrust down
TrailEmitter, // Contrail
MaxJetEmitters,
};
ParticleEmitterData* jetEmitter[MaxJetEmitters];
F32 minTrailSpeed;
//
F32 maneuveringForce;
F32 horizontalSurfaceForce;
F32 verticalSurfaceForce;
F32 autoInputDamping;
F32 steeringForce;
F32 steeringRollForce;
F32 rollForce;
F32 autoAngularForce;
F32 rotationalDrag;
F32 maxAutoSpeed;
F32 autoLinearForce;
F32 hoverHeight;
F32 createHoverHeight;
F32 vertThrustMultiple;
// Initialized in preload
ClippedPolyList rigidBody;
F32 maxSpeed;
enum JetNodes {
// These enums index into a static name list.
ForwardJetNode,
ForwardJetNode1,
BackwardJetNode,
BackwardJetNode1,
DownwardJetNode,
DownwardJetNode1,
//
TrailNode,
TrailNode1,
TrailNode2,
TrailNode3,
//
MaxJetNodes,
MaxDirectionJets = 2,
ThrustJetStart = ForwardJetNode,
NumThrustJets = TrailNode,
MaxTrails = 4,
};
static const char *sJetNode[MaxJetNodes];
S32 jetNode[MaxJetNodes];
//
FlyingVehicleData();
DECLARE_CONOBJECT(FlyingVehicleData);
static void initPersistFields();
bool preload(bool server, String &errorStr);
void packData(BitStream* stream);
void unpackData(BitStream* stream);
};
//----------------------------------------------------------------------------
class FlyingVehicle: public Vehicle
{
typedef Vehicle Parent;
FlyingVehicleData* mDataBlock;
SFXSource* mJetSound;
SFXSource* mEngineSound;
enum NetMaskBits {
InitMask = BIT(0),
HoverHeight = BIT(1)
};
bool createHeightOn;
F32 mCeilingFactor;
enum ThrustDirection {
// Enums index into sJetActivationTable
ThrustForward,
ThrustBackward,
ThrustDown,
NumThrustDirections,
NumThrustBits = 3
};
Point2F mThrust;
ThrustDirection mThrustDirection;
// Jet Threads
enum Jets {
// These enums index into a static name list.
BackActivate,
BackMaintain,
BottomActivate,
BottomMaintain,
JetAnimCount
};
static const char* sJetSequence[FlyingVehicle::JetAnimCount];
TSThread* mJetThread[JetAnimCount];
S32 mJetSeq[JetAnimCount];
bool mBackMaintainOn;
bool mBottomMaintainOn;
// Jet Particles
struct JetActivation {
// Convert thrust direction into nodes & emitters
S32 node;
S32 emitter;
};
static JetActivation sJetActivation[NumThrustDirections];
SimObjectPtr<ParticleEmitter> mJetEmitter[FlyingVehicleData::MaxJetNodes];
//
bool onNewDataBlock(GameBaseData* dptr,bool reload);
void updateMove(const Move *move);
void updateForces(F32);
// bool collideBody(const MatrixF& mat,Collision* info);
F32 getHeight();
// Client sounds & particles
void updateJet(F32 dt);
void updateEngineSound(F32 level);
void updateEmitter(bool active,F32 dt,ParticleEmitterData *emitter,S32 idx,S32 count);
U32 getCollisionMask();
public:
DECLARE_CONOBJECT(FlyingVehicle);
static void initPersistFields();
FlyingVehicle();
~FlyingVehicle();
bool onAdd();
void onRemove();
void advanceTime(F32 dt);
void writePacketData(GameConnection *conn, BitStream *stream);
void readPacketData(GameConnection *conn, BitStream *stream);
U32 packUpdate(NetConnection *conn, U32 mask, BitStream *stream);
void unpackUpdate(NetConnection *conn, BitStream *stream);
void useCreateHeight(bool val);
};
#endif
|
06keito/study-atcoder
|
src/abc194_b.py
|
<gh_stars>1-10
N = int(input())
li = [list(map(int,input().split())) for i in range(N)]
ans = 10**9
for idx_a in range(N):
for idx_b in range(N):
A,B = li[idx_a][0],li[idx_b][1]
if idx_a==idx_b:
ans = min(ans,A+B)
else:
ans = min(ans,max(A,B))
print(ans)
|
nbbull/RIDE
|
src/robotide/editor/listeditor.py
|
<reponame>nbbull/RIDE
# Copyright 2008-2012 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import wx
from wx.lib.mixins.listctrl import ListCtrlAutoWidthMixin
from robotide.controller.commands import MoveUp, MoveDown, DeleteItem
from robotide.utils import RideEventHandler
from robotide.widgets import PopupMenu, PopupMenuItems, ButtonWithHandler, Font
from robotide.context.platform import ctrl_or_cmd, bind_keys_to_evt_menu, IS_WINDOWS
class ListEditorBase(wx.Panel):
_menu = ['Edit', 'Move Up\tCtrl-Up', 'Move Down\tCtrl-Down', '---', 'Delete']
_buttons = []
def __init__(self, parent, columns, controller):
wx.Panel.__init__(self, parent)
self._controller = controller
self._selection = wx.NOT_FOUND
self._create_ui(columns, controller)
self._make_bindings()
self._bind_keys()
def _create_ui(self, columns, data):
sizer = wx.BoxSizer(wx.HORIZONTAL)
self._list = self._create_list(columns, data)
sizer.Add(self._list, 1, wx.EXPAND)
sizer.Add((5,0))
sizer.Add(self._create_buttons())
sizer.Add((5,0))
self.SetSizer(sizer)
sizer.Layout()
def _create_list(self, columns, data):
return AutoWidthColumnList(self, columns, data)
def _create_buttons(self):
sizer = wx.BoxSizer(wx.VERTICAL)
for label in self._buttons:
sizer.Add(ButtonWithHandler(self, label, width=120), 0, wx.ALL, 1)
return sizer
def _make_bindings(self):
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.OnItemSelected)
self.Bind(wx.EVT_LIST_ITEM_DESELECTED , self.OnItemDeselected)
self.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.OnEdit)
self.Bind(wx.EVT_LIST_ITEM_RIGHT_CLICK, self.OnRightClick)
if IS_WINDOWS:
self.Bind(wx.EVT_COMMAND_LEFT_CLICK, self.OnLeftClick)
else:
self._list.Bind(wx.EVT_LEFT_UP, self.OnLeftClick)
def OnItemSelected(self, event):
self._selection = event.GetIndex()
def OnItemDeselected(self, event):
self._selection = wx.NOT_FOUND
def OnEdit(self, event):
pass
def OnRightClick(self, event):
PopupMenu(self, PopupMenuItems(self, self._menu))
def OnLeftClick(self, event):
pass
def _bind_keys(self):
bind_keys_to_evt_menu(self, self._get_bind_keys())
def _get_bind_keys(self):
return [(ctrl_or_cmd(), wx.WXK_UP, self.OnMoveUp),
(ctrl_or_cmd(), wx.WXK_DOWN, self.OnMoveDown),
(wx.ACCEL_NORMAL, wx.WXK_WINDOWS_MENU, self.OnRightClick),
(wx.ACCEL_NORMAL, wx.WXK_DELETE, self.OnDelete)]
def OnMoveUp(self, event):
if self._selection < 1:
return
self._controller.execute(MoveUp(self._selection))
self.update_data()
self._list.Select(self._selection-1, True)
def OnMoveDown(self, event):
if self._selection == self._list.GetItemCount() - 1 or not self.is_selected:
return
self._controller.execute(MoveDown(self._selection))
self.update_data()
self._list.Select(self._selection+1, True)
def OnDelete(self, event):
if self.is_selected:
self._with_column_width_preservation(self._delete_selected)
def _with_column_width_preservation(self, func):
widths = []
for i in range(self._list.GetColumnCount()):
widths.append(self._list.GetColumnWidth(i))
func()
for i in range(self._list.GetColumnCount()):
self._list.SetColumnWidth(i, widths[i])
def _delete_selected(self):
self._controller.execute(DeleteItem(self._selection))
self._calculate_selection()
self.update_data()
def _calculate_selection(self):
self._selection = min(self._selection,
sum(1 for _ in self._controller)-1)
@property
def is_selected(self):
return self._selection != wx.NOT_FOUND
def update_data(self):
self._list.DeleteAllItems()
self._list.insert_data(self._controller)
self._list.select_and_ensure_visibility(self._selection)
def select(self, text):
self._list.select(text)
def has_link_target(self, controller):
return False
class ListEditor(ListEditorBase, RideEventHandler): pass
class AutoWidthColumnList(wx.ListCtrl, ListCtrlAutoWidthMixin):
def __init__(self, parent, columns, data=[]):
wx.ListCtrl.__init__(self, parent,
style=wx.LC_REPORT|wx.NO_BORDER|wx.LC_SINGLE_SEL|wx.LC_HRULES)
ListCtrlAutoWidthMixin.__init__(self)
self._parent = parent
self.populate(columns, data)
def populate(self, columns, data):
for i, name in enumerate(columns):
self.InsertColumn(i, name)
self.insert_data(data)
def insert_data(self, data):
self._insert_data(data)
self._set_column_widths()
def _insert_data(self, data):
for row, item in enumerate(data):
rowdata = self._parent.get_column_values(item)
self.InsertStringItem(row, rowdata[0])
for i in range(1, len(rowdata)):
data = rowdata[i] is not None and rowdata[i] or ''
self.SetStringItem(row, i, data)
self._add_link_style(row, item)
def _set_column_widths(self):
min_width = self._parent.Parent.plugin.global_settings['list col min width']
max_width = self._parent.Parent.plugin.global_settings['list col max width']
for i in range(self.ColumnCount):
self.SetColumnWidth(i, -1)
if self.GetColumnWidth(i) < min_width:
self.SetColumnWidth(i, min_width)
if self.GetColumnWidth(i) > max_width:
self.SetColumnWidth(i, max_width)
def _add_link_style(self, row, item):
if self._parent.has_link_target(item):
list_item = self.GetItem(row)
list_item.SetFont(self._underlined_font())
list_item.SetTextColour(wx.BLUE)
self.SetItem(list_item)
def _underlined_font(self):
font = Font().underlined
if IS_WINDOWS:
font.SetPointSize(8)
return font
def select(self, text):
index = self.FindItem(0, text)
self.select_and_ensure_visibility(index)
def select_and_ensure_visibility(self, index):
if index >= 0:
self.Select(index, on=True)
self.EnsureVisible(index)
self.Focus(index)
|
yangbajing/akka-fusion
|
fusion-security/src/main/scala/fusion/security/aes/Crypto.scala
|
<reponame>yangbajing/akka-fusion
/*
* Copyright 2019 <EMAIL>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fusion.security.aes
import java.nio.charset.StandardCharsets
import java.security.MessageDigest
import java.util
import java.util.Base64
import javax.crypto.spec.SecretKeySpec
import javax.crypto.{ Cipher, Mac }
object Crypto {
import SessionUtil._
val CRYPTO_NAME = "AES"
val KEY_SPEC_NAME = "AES"
def signHmacSHA1Hex(message: String, secret: String): String = {
val key = secret.getBytes("UTF-8")
val mac = Mac.getInstance("HmacSHA1")
mac.init(new SecretKeySpec(key, "HmacSHA1"))
toHexString(mac.doFinal(message.getBytes(StandardCharsets.UTF_8)))
}
def signHmacSHA256Base64(message: String, secret: String): String = {
val key = secret.getBytes("UTF-8")
val mac = Mac.getInstance("HmacSHA256")
mac.init(new SecretKeySpec(key, "HmacSHA256"))
Base64.getUrlEncoder.withoutPadding().encodeToString(mac.doFinal(message.getBytes(StandardCharsets.UTF_8)))
}
def encryptAES(value: String, secret: String): String = {
val raw = util.Arrays.copyOf(secret.getBytes(StandardCharsets.UTF_8), 16)
val skeySpec = new SecretKeySpec(raw, KEY_SPEC_NAME)
val cipher = Cipher.getInstance(CRYPTO_NAME)
cipher.init(Cipher.ENCRYPT_MODE, skeySpec)
toHexString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8)))
}
def decryptAES(value: String, secret: String): String = {
val raw = util.Arrays.copyOf(secret.getBytes(StandardCharsets.UTF_8), 16)
val skeySpec = new SecretKeySpec(raw, KEY_SPEC_NAME)
val cipher = Cipher.getInstance(CRYPTO_NAME)
cipher.init(Cipher.DECRYPT_MODE, skeySpec)
new String(cipher.doFinal(hexStringToByte(value)))
}
def hashSHA256(value: String): String = {
val digest = MessageDigest.getInstance("SHA-256")
toHexString(digest.digest(value.getBytes(StandardCharsets.UTF_8)))
}
}
|
McJty/ImmersiveCraft
|
src/main/java/mcjty/immcraft/setup/ClientProxy.java
|
<gh_stars>1-10
package mcjty.immcraft.setup;
import mcjty.immcraft.ImmersiveCraft;
import mcjty.immcraft.blocks.ModBlocks;
import mcjty.immcraft.blocks.bundle.BundleModelLoader;
import mcjty.immcraft.events.ClientForgeEventHandlers;
import mcjty.immcraft.input.InputHandler;
import mcjty.immcraft.input.KeyBindings;
import mcjty.lib.font.FontLoader;
import mcjty.lib.font.TrueTypeFont;
import mcjty.lib.setup.DefaultClientProxy;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.client.model.ModelLoaderRegistry;
import net.minecraftforge.client.model.obj.OBJLoader;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.common.event.FMLInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
import java.awt.Font;
public class ClientProxy extends DefaultClientProxy {
public static TrueTypeFont font;
public static TrueTypeFont font_bold;
public static TrueTypeFont font_italic;
@Override
public void preInit(FMLPreInitializationEvent e) {
super.preInit(e);
MinecraftForge.EVENT_BUS.register(new ClientForgeEventHandlers());
OBJLoader.INSTANCE.addDomain(ImmersiveCraft.MODID);
ModelLoaderRegistry.registerLoader(new BundleModelLoader());
}
@Override
public void init(FMLInitializationEvent e) {
super.init(e);
FMLCommonHandler.instance().bus().register(new InputHandler());
KeyBindings.init();
font = FontLoader.createFont(new ResourceLocation(ImmersiveCraft.MODID, "fonts/ubuntu.ttf"), 64, false,
Font.TRUETYPE_FONT, new char[] { '\u2022', '\u2014' });
font_bold = FontLoader.createFont(new ResourceLocation(ImmersiveCraft.MODID, "fonts/ubuntu_bold.ttf"), 64, false,
Font.TRUETYPE_FONT, new char[] { '\u2022', '\u2014' });
font_italic = FontLoader.createFont(new ResourceLocation(ImmersiveCraft.MODID, "fonts/ubuntu_italic.ttf"), 64, false,
Font.TRUETYPE_FONT, new char[] { '\u2022', '\u2014' });
ModBlocks.initItemModels();
}
}
|
besom/bbossgroups-3.5
|
bboss-taglib/src/com/frameworkset/common/tag/pager/tags/ParamTag.java
|
/*****************************************************************************
* *
* This file is part of the tna framework distribution. *
* Documentation and updates may be get from biaoping.yin the author of *
* this framework *
* *
* Sun Public License Notice: *
* *
* The contents of this file are subject to the Sun Public License Version *
* 1.0 (the "License"); you may not use this file except in compliance with *
* the License. A copy of the License is available at http://www.sun.com *
* *
* The Original Code is tag. The Initial Developer of the Original *
* Code is <NAME>. Portions created by biaoping yin are Copyright *
* (C) 2000. All Rights Reserved. *
* *
* GNU Public License Notice: *
* *
* Alternatively, the contents of this file may be used under the terms of *
* the GNU Lesser General Public License (the "LGPL"), in which case the *
* provisions of LGPL are applicable instead of those above. If you wish to *
* allow use of your version of this file only under the terms of the LGPL *
* and not to allow others to use your version of this file under the SPL, *
* indicate your decision by deleting the provisions above and replace *
* them with the notice and other provisions required by the LGPL. If you *
* do not delete the provisions above, a recipient may use your version of *
* this file under either the SPL or the LGPL. *
* *
* biaoping.yin (<EMAIL>) *
* Author of Learning Java *
* *
*****************************************************************************/
package com.frameworkset.common.tag.pager.tags;
import java.io.OutputStream;
import javax.servlet.jsp.JspException;
/**
*
* To change for your class or interface
*
* @author biaoping.yin
* @version 1.0
* 2005-2-3
*/
public final class ParamTag extends PagerTagSupport {
private String name = null;
private String value = null;
/**
* 编码次数,连续编码次数
*/
private int encodecount = 1;
private String defaultValue = null;
private boolean encode = false;
public void setEncode(boolean encode)
{
this.encode = encode;
}
public boolean getEncode()
{
return this.encode;
}
/**
* request:parameter
* :attribute
*/
private String type = null;
public final void setName(String val) {
name = val;
}
public final String getName() {
return name;
}
public final void setValue(String val) {
value = val;
}
public final String getValue() {
return value;
}
public int doEndTag()throws JspException
{
this.defaultValue = null;
this.encode = false;
this.name = null;
this.type = null;
this.value = null;
encodecount = 1;
return super.doEndTag();
}
public int doStartTag() throws JspException {
super.doStartTag();
if(pagerContext != null)
{
if(value != null ||
type == null ||
(!type.equals(pagerContext.ATTRIBUTE) && !type.equals(pagerContext.PARAMETER)))
{
pagerContext.addParam(name, value,defaultValue,encode,encodecount);
}
else
{
pagerContext.addParamByRequest(name,type,defaultValue,encode,encodecount);
}
return EVAL_BODY_INCLUDE;
}
else
/**
当param标签出现在list标签中时,
进行以下处理。如果当存在pager标签,并且将param也放在list标签中,这种情况是非法的
但是程序没有做相应的判断。
*/
{
PagerDataSet listTag = (PagerDataSet) findAncestorWithClass(this, PagerDataSet.class);
if(listTag != null && listTag.getRowid() == 0)
{
pagerContext = listTag.getPagerContext();
if(value != null ||
type == null ||
(!type.equals(pagerContext.ATTRIBUTE) && !type.equals(pagerContext.PARAMETER)))
{
pagerContext.addParam(name, value,defaultValue,encode,encodecount);
}
else
{
pagerContext.addParamByRequest(name,type,defaultValue,encode,encodecount);
}
return EVAL_BODY_INCLUDE;
}
else
{
return SKIP_BODY;
}
}
}
public void release() {
name = null;
value = null;
super.release();
}
/* (non-Javadoc)
* @see com.frameworkset.common.tag.BaseTag#generateContent()
*/
public String generateContent() {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see com.frameworkset.common.tag.BaseTag#write(java.io.OutputStream)
*/
public void write(OutputStream output) {
// TODO Auto-generated method stub
}
/**
* Description:
* @return String
*/
public String getType() {
return type;
}
/**
* Description:
* @return void
*/
public void setType(String string) {
type = string;
}
/**
* Description:
* @return defaultValue
*/
public String getDefaultValue() {
return defaultValue;
}
/**
* Description:defaultValue
* @return void
*/
public void setDefaultValue(String string) {
defaultValue = string;
}
public int getEncodecount() {
return encodecount;
}
public void setEncodecount(int encodecount) {
this.encodecount = encodecount;
}
}
/* vim:set ts=4 sw=4: */
|
curaga/curaga
|
db/migrate/20200508174125_create_documents.rb
|
# frozen_string_literal: true
class CreateDocuments < ActiveRecord::Migration[6.0]
def change
create_table :documents do |t|
t.text :title, null: false, default: ''
t.jsonb :content, null: false, default: '{"doc": {"type":"doc"}}'
t.timestamps
end
end
end
|
Juny4541/GitDemo
|
app/src/main/java/com/juny/cashiersystem/business/cashiertab/presenter/CashierPresenter.java
|
<reponame>Juny4541/GitDemo
package com.juny.cashiersystem.business.cashiertab.presenter;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import com.juny.cashiersystem.base.BasePresenter;
import com.juny.cashiersystem.bean.CategoryBean;
import com.juny.cashiersystem.bean.GoodsBean;
import com.juny.cashiersystem.bean.MemberBean;
import com.juny.cashiersystem.bean.OrderBean;
import com.juny.cashiersystem.business.cashiertab.contract.ICashierContract;
import com.juny.cashiersystem.business.cashiertab.model.CashierRepository;
import com.juny.cashiersystem.widget.AddDialog;
import io.realm.RealmResults;
/**
* <br> ClassName:
* <br> Description:
* <br>
* <br> Author:
* <br> Date: 2018/4/8 17:18
*/
public class CashierPresenter extends BasePresenter<ICashierContract.IView>
implements ICashierContract.IPresenter {
public final static int DIALOG_TYPE_GOODS_DELETE = 1;
public final static int DIALOG_TYPE_CATEGORY_DELETE = 2;
private CashierRepository mRepository;
public CashierPresenter() {
mRepository = new CashierRepository();
}
@Override
public void getCategoryData() {
if (isViewAttached()) {
getView().showCategoryData(mRepository.searchCategoryData());
}
}
@Override
public void getGoodsData(int categoryId) {
if (isViewAttached()) {
getView().showGoodsData(mRepository.searchGoodsData(categoryId));
}
}
@Override
public void addOrder(OrderBean orderBean) {
mRepository.addOrder(orderBean);
}
/**
* <br> Description: 显示插入商品数据对话框
* <br> Author: chenrunfang
* <br> Date: 2018/5/10 15:02
*/
public void showAddDialog(Activity activity, int dialogType, String tag, final int categoryId) {
AddDialog dialog = new AddDialog();
dialog.setDialogType(dialogType); // 需要先设置对话框的类型,再显示, 才能显示相应的自定义布局
dialog.show(activity.getFragmentManager(), tag);
dialog.setOnCashierAddListener(new AddDialog.OnCashierAddListener() {
@Override
public void onCategoryAdd(String name) {
CategoryBean categoryBean = new CategoryBean();
categoryBean.setCategoryName(name);
categoryBean.setSelect("false"); // 默认选中状态为false
mRepository.addCategory(categoryBean); // 执行数据库插入
}
@Override
public void onGoodsAdd(String name, int price, int repertory) {
GoodsBean goodsBean = new GoodsBean();
goodsBean.setName(name);
goodsBean.setPrice(price);
goodsBean.setRepertory(repertory);
mRepository.addGoods(goodsBean, categoryId); // 执行数据库插入
}
});
}
/**
* <br> Description: 显示删除对话框
* <br> Author: chenrunfang
* <br> Date: 2018/5/16 17:06
*/
public void showDeleteDialog(Activity activity, final int dialogType, final int id, String content) {
new AlertDialog.Builder(activity)
.setMessage(content)//设置显示的内容
.setPositiveButton("确定", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (DIALOG_TYPE_GOODS_DELETE == dialogType) {
mRepository.deleteGoods(id);
}
if (DIALOG_TYPE_CATEGORY_DELETE == dialogType) {
mRepository.deleteCategory(id);
}
dialog.dismiss();
}
})
.setNegativeButton("取消", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
}).show();
}
/**
* <br> Description: 更新会员信息
* <br> Author: chenrunfang
* <br> Date: 2018/5/15 11:04
*/
public CategoryBean updateCategorySelected(int categoryId, String isSelect) {
return mRepository.updateCategorySelected(categoryId, isSelect);
}
/**
* <br> Description: 根据ID 查询会员
* <br> Author: chenrunfang
* <br> Date: 2018/5/17 9:44
*/
public MemberBean searchMemberById(int memberId) {
return mRepository.searchMemberById(memberId);
}
/**
* <br> Description: 更新会员余额
* <br> Author: chenrunfang
* <br> Date: 2018/5/17 9:44
*/
public void updateMemberBalance(MemberBean member, int newBalance) {
mRepository.updateMemberBalance(member, newBalance);
}
/**
* <br> Description: 查询会员列表
* <br> Author: chenrunfang
* <br> Date: 2018/5/17 9:48
*/
public RealmResults<MemberBean> getMembers() {
return mRepository.searchMembers();
}
/**
* <br> Description: 关闭数据库相关的操作
* <br> Author: chenrunfang
* <br> Date: 2018/5/10 15:44
*/
public void closeRealm() {
mRepository.closeRealm();
}
}
|
BrunoAOR/get-out
|
get-out/Interactable.h
|
<gh_stars>0
#ifndef H_INTERACTABLE
#define H_INTERACTABLE
#include "Entity.h"
#include "EntityFactory.h"
class Interactable :
public Entity
{
friend Entity* EntityFactory::createEntity(EntityInfo);
private:
Interactable(int id, const std::string& name, const std::string& description, const std::string& inspectDescription, bool isVisibleInDark = false);
virtual ~Interactable();
public:
Interactable(const Interactable& source) = delete;
Interactable& operator=(const Interactable& source) = delete;
// Entity overrides
virtual std::string getDetailedDescription() const override;
private:
std::string m_inspectDescription;
bool m_isVisibleInDark;
// Entity overrides
virtual bool canAddChild(const Entity* child) const override;
};
#endif // !H_INTERACTABLE
|
prudywsh/steganography_conf_website
|
client/components/nav.js
|
import { h, Component } from 'preact'
import { connect } from 'preact-redux'
import reduce from '../reducer'
import * as actions from '../actions'
import NavItem from './navItem'
@connect(reduce, actions)
class Nav extends Component {
onScroll = () => {
this.setState({
black: window.pageYOffset >= 100
})
}
componentDidMount () {
addEventListener('scroll', this.onScroll)
}
componentWillUnmount () {
removeEventListener('scroll', this.onScroll)
}
redirectToBlog () {
window.location.replace(`${window.location.origin}/blog.html`)
}
render () {
return (
<nav class={`navbar navbar-expand-lg navbar-light fixed-top ${this.state.black ? 'navbar-shrink' : ''}`} id="mainNav">
<a class="navbar-brand" href="#top">ALASKA</a>
<button class="navbar-toggler navbar-toggler-right" onClick={this.props.toggleSideNav}>
<i class="oi oi-menu"></i>
</button>
<div class={`collapse navbar-collapse ${this.props.showSideNav ? 'show' : ''}`}>
<ul class="navbar-nav mr-auto">
<NavItem itemId='about' text='About' />
<NavItem itemId='timeline' text='Timeline' />
<NavItem itemId='material' text='Material' />
<NavItem itemId='rules' text='Rules' />
<NavItem itemId='submit' text='Submit an answer' />
<NavItem itemId='leaderboard' text='LeaderBoard' />
<NavItem itemId='acknowledgements' text='Acknowledgements' />
<li class="nav-item">
<a class="nav-link" onClick={this.redirectToBlog}>
Blog
</a>
</li>
</ul>
{
this.props.jwtPayload
?
<button onClick={this.props.logoutUI} class="btn btn-outline-danger" type="button">
Logout
</button>
:
<button onClick={this.props.toggleLoginModal} class="btn btn-outline-light" type="button">
Login
</button>
}
</div>
</nav>
)
}
}
export default Nav
|
arthur-noseda/spring-hateoas
|
src/main/java/org/springframework/hateoas/support/WebStack.java
|
<reponame>arthur-noseda/spring-hateoas
/*
* Copyright 2019-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.hateoas.support;
import java.util.ArrayList;
import java.util.List;
import org.springframework.util.ClassUtils;
/**
* Utility to glean what web stack is currently available.
*
* @author <NAME>
*/
public enum WebStack {
WEBMVC("org.springframework.web.servlet.DispatcherServlet", //
"org.springframework.hateoas.config.WebMvcHateoasConfiguration", //
"org.springframework.web.client.RestTemplate", //
"org.springframework.hateoas.config.RestTemplateHateoasConfiguration"),
WEBFLUX("org.springframework.web.reactive.DispatcherHandler", //
"org.springframework.hateoas.config.WebFluxHateoasConfiguration", //
"org.springframework.web.reactive.function.client.WebClient", //
"org.springframework.hateoas.config.WebClientHateoasConfiguration");
private final boolean isServerAvailable;
private final String serverConfiguration;
private final boolean isClientAvailable;
private final String clientConfiguration;
/**
* Initialize the {@link #isAvailable} based upon a defined signature class.
*/
WebStack(String serverAvailableClazz, String serverConfigurationClazz, String clientAvailableClazz,
String clientConfigurationClazz) {
this.isServerAvailable = ClassUtils.isPresent(serverAvailableClazz, null);
this.serverConfiguration = serverConfigurationClazz;
this.isClientAvailable = ClassUtils.isPresent(clientAvailableClazz, null);
this.clientConfiguration = clientConfigurationClazz;
}
/**
* Based on what client/server components are on the classpath, return what configuration classes should be
* registered.
*/
public List<String> getAvailableConfigurations() {
List<String> configurations = new ArrayList<>();
if (this.isServerAvailable) {
configurations.add(this.serverConfiguration);
}
if (this.isClientAvailable) {
configurations.add(this.clientConfiguration);
}
return configurations;
}
/**
* Is this web stack on the classpath?
*
* @deprecated Will be removed in 1.2 in light of {@link #getAvailableConfigurations()}.
*/
@Deprecated
public boolean isAvailable() {
return this.isServerAvailable;
}
}
|
cloudfoundry-incubator/garden-linux
|
linux_container/cgroups_manager/fake_cgroups_manager/fake_cgroups_manager.go
|
<reponame>cloudfoundry-incubator/garden-linux<gh_stars>10-100
package fake_cgroups_manager
import (
"path"
)
type FakeCgroupsManager struct {
cgroupsPath string
id string
SetError error
AddError error
setValues []SetValue
addValues []AddValue
getCallbacks []GetCallback
setCallbacks []SetCallback
subsystemPathCalls []string
}
type AddValue struct {
Pid int
Subsystems []string
}
type SetValue struct {
Subsystem string
Name string
Value string
}
type GetCallback struct {
Subsystem string
Name string
Callback func() (string, error)
}
type SetCallback struct {
Subsystem string
Name string
Callback func() error
}
func New(cgroupsPath, id string) *FakeCgroupsManager {
return &FakeCgroupsManager{
cgroupsPath: cgroupsPath,
id: id,
}
}
func (m *FakeCgroupsManager) Add(pid int, subsystems ...string) error {
if m.AddError != nil {
return m.AddError
}
m.addValues = append(m.addValues, AddValue{pid, subsystems})
return nil
}
func (m *FakeCgroupsManager) Set(subsystem, name, value string) error {
if m.SetError != nil {
return m.SetError
}
for _, cb := range m.setCallbacks {
if cb.Subsystem == subsystem && cb.Name == name {
return cb.Callback()
}
}
m.setValues = append(m.setValues, SetValue{subsystem, name, value})
return nil
}
func (m *FakeCgroupsManager) Get(subsytem, name string) (string, error) {
for _, cb := range m.getCallbacks {
if cb.Subsystem == subsytem && cb.Name == name {
return cb.Callback()
}
}
for _, val := range m.setValues {
if val.Subsystem == subsytem && val.Name == name {
return val.Value, nil
}
}
return "", nil
}
func (m *FakeCgroupsManager) SubsystemPath(subsystem string) (string, error) {
m.subsystemPathCalls = append(m.subsystemPathCalls, subsystem)
return path.Join(m.cgroupsPath, subsystem, "instance-"+m.id), nil
}
func (m *FakeCgroupsManager) SubsystemPathCallCount() int {
return len(m.subsystemPathCalls)
}
func (m *FakeCgroupsManager) SubsystemArgsForCall(index int) string {
return m.subsystemPathCalls[index]
}
func (m *FakeCgroupsManager) AddedValues() []AddValue {
return m.addValues
}
func (m *FakeCgroupsManager) SetValues() []SetValue {
return m.setValues
}
func (m *FakeCgroupsManager) WhenGetting(subsystem, name string, callback func() (string, error)) {
m.getCallbacks = append(m.getCallbacks, GetCallback{subsystem, name, callback})
}
func (m *FakeCgroupsManager) WhenSetting(subsystem, name string, callback func() error) {
m.setCallbacks = append(m.setCallbacks, SetCallback{subsystem, name, callback})
}
|
BabyMelvin/Linux-Api
|
driver/weidongshan/100ask/first_session/009_nor_flash/timer.c
|
<gh_stars>1-10
#include "s3c2440_soc.h"
void timer_irq(void)
{
// 点灯计数
static int cnt = 0;
int tmp;
cnt ++;
tmp =~cnt;
tmp &= 7;
GPFDAT &= ~(7 << 4);
GPFDAT |= ~(tmp << 4);
}
void timer_init (void)
{
/**
* 设置TIMER0的时钟
* Timer clk = PCLK / {prescaler value + 1} / {divider value}
* = 50000000/ (99 + 1) / 16
* = 31250
*/
TCFG0 = 99; // prescaler 0 = 99,用于timer0,1
TCFG1 &= ~0xf;
TCFG1 |= 3; // MUX0: 1/16
// 设置TIMER0的初始值
TCNTB0 = 15625; // 0.5s中断一次
// 加载初值,启动timer0
TCON |= (1 << 1); // update from TCNB0 & TCMPB0
// 设置为自动加载并启动
TCON &= ~(1 << 1);
TCON |= (1 << 0) | (1 << 3); // bit0: start, bit3:auto load
// 设置中断
register_irq(10, timer_irq);
}
|
muhammad-masood-ur-rehman/Skillrack
|
Python Programs/value-equals-previous-two.py
|
<filename>Python Programs/value-equals-previous-two.py
Value Equals Previous Two
An array of N integers is passed as the input. The program must find the combination of integers forming a sequence whose length is more than 4 which satisfies the below conditions.
- The ith index must satisfy arr[i] = arr[i-1] + arr[i-2]
- The length of the sequence must be the maximum possible
- If there are more than one sequences satisfying above two conditions, then print the sequence which contains the smaller value integers.
If there is no such combination of integers, then the program must print -1.
Boundary Condition(s):
1 <= N <= 25
Input Format:
The first line contains N.
The second line contains the N integer values separated by a space.
Output Format:
The first line contains the integer values in the sequence or -1.
Example Input/Output 1:
Input:
9
4 2 7 5 3 8 10 11 19
Output:
2 3 5 8
Explanation:
2 3 5 8 and 3 8 11 19 are the two sequences having same length. But as 2 3 5 8 contains the smaller values, it is printed as the output.
Example Input/Output 2:
Input:
4
1 5 6 10
Output:
-1
Explanation:
Here the sequence 1 5 6 length is not 4. Hence -1 is printed.
num=int(input())
lis1=list(map(int,input().split()))
lis1.sort()
lis2,lis3=[],[]
for ele in range(num-1):
for bar in range(ele+1,num):
lis2.append(lis1[ele])
lis2.append(lis1[bar])
t1=0
t2=1
for foo in range(bar+1,num):
if(lis1[foo]==(lis2[t1]+lis2[t2])):
lis2.append(lis1[foo])
t1+=1
t2+=1
lis3.append(lis2)
lis2=[]
lis4,lis5=[],[]
for ele in lis3:
lis4.append(len(ele))
maxima=max(lis4)
if(maxima<4):
print(-1)
exit()
for ele in lis3:
if len(ele)==maxima:
lis5.append(sum(ele))
for ele in lis3:
if sum(ele)==min(lis5):
print(*ele)
break
|
diogocs1/comps
|
web/openerp/addons/base/res/res_users.py
|
<filename>web/openerp/addons/base/res/res_users.py
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2014 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import itertools
import logging
from functools import partial
from itertools import repeat
from lxml import etree
from lxml.builder import E
import openerp
from openerp import SUPERUSER_ID, models
from openerp import tools
import openerp.exceptions
from openerp.osv import fields, osv, expression
from openerp.tools.translate import _
from openerp.http import request
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# Basic res.groups and res.users
#----------------------------------------------------------
class res_groups(osv.osv):
_name = "res.groups"
_description = "Access Groups"
_rec_name = 'full_name'
_order = 'name'
def _get_full_name(self, cr, uid, ids, field, arg, context=None):
res = {}
for g in self.browse(cr, uid, ids, context):
if g.category_id:
res[g.id] = '%s / %s' % (g.category_id.name, g.name)
else:
res[g.id] = g.name
return res
def _search_group(self, cr, uid, obj, name, args, context=None):
operand = args[0][2]
operator = args[0][1]
lst = True
if isinstance(operand, bool):
domains = [[('name', operator, operand)], [('category_id.name', operator, operand)]]
if operator in expression.NEGATIVE_TERM_OPERATORS == (not operand):
return expression.AND(domains)
else:
return expression.OR(domains)
if isinstance(operand, basestring):
lst = False
operand = [operand]
where = []
for group in operand:
values = filter(bool, group.split('/'))
group_name = values.pop().strip()
category_name = values and '/'.join(values).strip() or group_name
group_domain = [('name', operator, lst and [group_name] or group_name)]
category_domain = [('category_id.name', operator, lst and [category_name] or category_name)]
if operator in expression.NEGATIVE_TERM_OPERATORS and not values:
category_domain = expression.OR([category_domain, [('category_id', '=', False)]])
if (operator in expression.NEGATIVE_TERM_OPERATORS) == (not values):
sub_where = expression.AND([group_domain, category_domain])
else:
sub_where = expression.OR([group_domain, category_domain])
if operator in expression.NEGATIVE_TERM_OPERATORS:
where = expression.AND([where, sub_where])
else:
where = expression.OR([where, sub_where])
return where
_columns = {
'name': fields.char('Name', required=True, translate=True),
'users': fields.many2many('res.users', 'res_groups_users_rel', 'gid', 'uid', 'Users'),
'model_access': fields.one2many('ir.model.access', 'group_id', 'Access Controls'),
'rule_groups': fields.many2many('ir.rule', 'rule_group_rel',
'group_id', 'rule_group_id', 'Rules', domain=[('global', '=', False)]),
'menu_access': fields.many2many('ir.ui.menu', 'ir_ui_menu_group_rel', 'gid', 'menu_id', 'Access Menu'),
'view_access': fields.many2many('ir.ui.view', 'ir_ui_view_group_rel', 'group_id', 'view_id', 'Views'),
'comment' : fields.text('Comment', size=250, translate=True),
'category_id': fields.many2one('ir.module.category', 'Application', select=True),
'full_name': fields.function(_get_full_name, type='char', string='Group Name', fnct_search=_search_group),
}
_sql_constraints = [
('name_uniq', 'unique (category_id, name)', 'The name of the group must be unique within an application!')
]
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
# add explicit ordering if search is sorted on full_name
if order and order.startswith('full_name'):
ids = super(res_groups, self).search(cr, uid, args, context=context)
gs = self.browse(cr, uid, ids, context)
gs.sort(key=lambda g: g.full_name, reverse=order.endswith('DESC'))
gs = gs[offset:offset+limit] if limit else gs[offset:]
return map(int, gs)
return super(res_groups, self).search(cr, uid, args, offset, limit, order, context, count)
def copy(self, cr, uid, id, default=None, context=None):
group_name = self.read(cr, uid, [id], ['name'])[0]['name']
default.update({'name': _('%s (copy)')%group_name})
return super(res_groups, self).copy(cr, uid, id, default, context)
def write(self, cr, uid, ids, vals, context=None):
if 'name' in vals:
if vals['name'].startswith('-'):
raise osv.except_osv(_('Error'),
_('The name of the group can not start with "-"'))
res = super(res_groups, self).write(cr, uid, ids, vals, context=context)
self.pool['ir.model.access'].call_cache_clearing_methods(cr)
self.pool['res.users'].has_group.clear_cache(self.pool['res.users'])
return res
class res_users(osv.osv):
""" User class. A res.users record models an OpenERP user and is different
from an employee.
res.users class now inherits from res.partner. The partner model is
used to store the data related to the partner: lang, name, address,
avatar, ... The user model is now dedicated to technical data.
"""
__admin_ids = {}
_uid_cache = {}
_inherits = {
'res.partner': 'partner_id',
}
_name = "res.users"
_description = 'Users'
def _set_new_password(self, cr, uid, id, name, value, args, context=None):
if value is False:
# Do not update the password if no value is provided, ignore silently.
# For example web client submits False values for all empty fields.
return
if uid == id:
# To change their own password users must use the client-specific change password wizard,
# so that the new password is immediately used for further RPC requests, otherwise the user
# will face unexpected 'Access Denied' exceptions.
raise osv.except_osv(_('Operation Canceled'), _('Please use the change password wizard (in User Preferences or User menu) to change your own password.'))
self.write(cr, uid, id, {'password': value})
def _get_password(self, cr, uid, ids, arg, karg, context=None):
return dict.fromkeys(ids, '')
_columns = {
'id': fields.integer('ID'),
'login_date': fields.date('Latest connection', select=1, copy=False),
'partner_id': fields.many2one('res.partner', required=True,
string='Related Partner', ondelete='restrict',
help='Partner-related data of the user', auto_join=True),
'login': fields.char('Login', size=64, required=True,
help="Used to log into the system"),
'password': fields.char('Password', size=64, invisible=True, copy=False,
help="Keep empty if you don't want the user to be able to connect on the system."),
'new_password': fields.function(_get_password, type='char', size=64,
fnct_inv=_set_new_password, string='Set Password',
help="Specify a value only when creating a user or if you're "\
"changing the user's password, otherwise leave empty. After "\
"a change of password, the user has to login again."),
'signature': fields.html('Signature'),
'active': fields.boolean('Active'),
'action_id': fields.many2one('ir.actions.actions', 'Home Action', help="If specified, this action will be opened at log on for this user, in addition to the standard menu."),
'groups_id': fields.many2many('res.groups', 'res_groups_users_rel', 'uid', 'gid', 'Groups'),
# Special behavior for this field: res.company.search() will only return the companies
# available to the current user (should be the user's companies?), when the user_preference
# context is set.
'company_id': fields.many2one('res.company', 'Company', required=True,
help='The company this user is currently working for.', context={'user_preference': True}),
'company_ids':fields.many2many('res.company','res_company_users_rel','user_id','cid','Companies'),
}
# overridden inherited fields to bypass access rights, in case you have
# access to the user but not its corresponding partner
name = openerp.fields.Char(related='partner_id.name', inherited=True)
email = openerp.fields.Char(related='partner_id.email', inherited=True)
def on_change_login(self, cr, uid, ids, login, context=None):
if login and tools.single_email_re.match(login):
return {'value': {'email': login}}
return {}
def onchange_state(self, cr, uid, ids, state_id, context=None):
partner_ids = [user.partner_id.id for user in self.browse(cr, uid, ids, context=context)]
return self.pool.get('res.partner').onchange_state(cr, uid, partner_ids, state_id, context=context)
def onchange_type(self, cr, uid, ids, is_company, context=None):
""" Wrapper on the user.partner onchange_type, because some calls to the
partner form view applied to the user may trigger the
partner.onchange_type method, but applied to the user object.
"""
partner_ids = [user.partner_id.id for user in self.browse(cr, uid, ids, context=context)]
return self.pool['res.partner'].onchange_type(cr, uid, partner_ids, is_company, context=context)
def onchange_address(self, cr, uid, ids, use_parent_address, parent_id, context=None):
""" Wrapper on the user.partner onchange_address, because some calls to the
partner form view applied to the user may trigger the
partner.onchange_type method, but applied to the user object.
"""
partner_ids = [user.partner_id.id for user in self.browse(cr, uid, ids, context=context)]
return self.pool['res.partner'].onchange_address(cr, uid, partner_ids, use_parent_address, parent_id, context=context)
def _check_company(self, cr, uid, ids, context=None):
return all(((this.company_id in this.company_ids) or not this.company_ids) for this in self.browse(cr, uid, ids, context))
_constraints = [
(_check_company, 'The chosen company is not in the allowed companies for this user', ['company_id', 'company_ids']),
]
_sql_constraints = [
('login_key', 'UNIQUE (login)', 'You can not have two users with the same login !')
]
def _get_company(self,cr, uid, context=None, uid2=False):
if not uid2:
uid2 = uid
# Use read() to compute default company, and pass load=_classic_write to
# avoid useless name_get() calls. This will avoid prefetching fields
# while computing default values for new db columns, as the
# db backend may not be fully initialized yet.
user_data = self.pool['res.users'].read(cr, uid, uid2, ['company_id'],
context=context, load='_classic_write')
comp_id = user_data['company_id']
return comp_id or False
def _get_companies(self, cr, uid, context=None):
c = self._get_company(cr, uid, context)
if c:
return [c]
return False
def _get_group(self,cr, uid, context=None):
dataobj = self.pool.get('ir.model.data')
result = []
try:
dummy,group_id = dataobj.get_object_reference(cr, SUPERUSER_ID, 'base', 'group_user')
result.append(group_id)
dummy,group_id = dataobj.get_object_reference(cr, SUPERUSER_ID, 'base', 'group_partner_manager')
result.append(group_id)
except ValueError:
# If these groups does not exists anymore
pass
return result
def _get_default_image(self, cr, uid, context=None):
return self.pool['res.partner']._get_default_image(cr, uid, False, colorize=True, context=context)
_defaults = {
'password': '',
'active': True,
'customer': False,
'company_id': _get_company,
'company_ids': _get_companies,
'groups_id': _get_group,
'image': _get_default_image,
}
# User can write on a few of his own fields (but not his groups for example)
SELF_WRITEABLE_FIELDS = ['password', 'signature', 'action_id', 'company_id', 'email', 'name', 'image', 'image_medium', 'image_small', 'lang', 'tz']
# User can read a few of his own fields
SELF_READABLE_FIELDS = ['signature', 'company_id', 'login', 'email', 'name', 'image', 'image_medium', 'image_small', 'lang', 'tz', 'tz_offset', 'groups_id', 'partner_id', '__last_update']
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
def override_password(o):
if 'password' in o and ('id' not in o or o['id'] != uid):
o['password'] = '********'
return o
if fields and (ids == [uid] or ids == uid):
for key in fields:
if not (key in self.SELF_READABLE_FIELDS or key.startswith('context_')):
break
else:
# safe fields only, so we read as super-user to bypass access rights
uid = SUPERUSER_ID
result = super(res_users, self).read(cr, uid, ids, fields=fields, context=context, load=load)
canwrite = self.pool['ir.model.access'].check(cr, uid, 'res.users', 'write', False)
if not canwrite:
if isinstance(ids, (int, long)):
result = override_password(result)
else:
result = map(override_password, result)
return result
def create(self, cr, uid, vals, context=None):
user_id = super(res_users, self).create(cr, uid, vals, context=context)
user = self.browse(cr, uid, user_id, context=context)
if user.partner_id.company_id:
user.partner_id.write({'company_id': user.company_id.id})
return user_id
def write(self, cr, uid, ids, values, context=None):
if not hasattr(ids, '__iter__'):
ids = [ids]
if ids == [uid]:
for key in values.keys():
if not (key in self.SELF_WRITEABLE_FIELDS or key.startswith('context_')):
break
else:
if 'company_id' in values:
user = self.browse(cr, SUPERUSER_ID, uid, context=context)
if not (values['company_id'] in user.company_ids.ids):
del values['company_id']
uid = 1 # safe fields only, so we write as super-user to bypass access rights
res = super(res_users, self).write(cr, uid, ids, values, context=context)
if 'company_id' in values:
for user in self.browse(cr, uid, ids, context=context):
# if partner is global we keep it that way
if user.partner_id.company_id and user.partner_id.company_id.id != values['company_id']:
user.partner_id.write({'company_id': user.company_id.id})
# clear caches linked to the users
self.pool['ir.model.access'].call_cache_clearing_methods(cr)
clear = partial(self.pool['ir.rule'].clear_cache, cr)
map(clear, ids)
db = cr.dbname
if db in self._uid_cache:
for id in ids:
if id in self._uid_cache[db]:
del self._uid_cache[db][id]
self.context_get.clear_cache(self)
self.has_group.clear_cache(self)
return res
def unlink(self, cr, uid, ids, context=None):
if 1 in ids:
raise osv.except_osv(_('Can not remove root user!'), _('You can not remove the admin user as it is used internally for resources created by Odoo (updates, module installation, ...)'))
db = cr.dbname
if db in self._uid_cache:
for id in ids:
if id in self._uid_cache[db]:
del self._uid_cache[db][id]
return super(res_users, self).unlink(cr, uid, ids, context=context)
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
if not args:
args=[]
if not context:
context={}
ids = []
if name and operator in ['=', 'ilike']:
ids = self.search(cr, user, [('login','=',name)]+ args, limit=limit, context=context)
if not ids:
ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit, context=context)
return self.name_get(cr, user, ids, context=context)
def copy(self, cr, uid, id, default=None, context=None):
user2copy = self.read(cr, uid, [id], ['login','name'])[0]
default = dict(default or {})
if ('name' not in default) and ('partner_id' not in default):
default['name'] = _("%s (copy)") % user2copy['name']
if 'login' not in default:
default['login'] = _("%s (copy)") % user2copy['login']
return super(res_users, self).copy(cr, uid, id, default, context)
@tools.ormcache(skiparg=2)
def context_get(self, cr, uid, context=None):
user = self.browse(cr, SUPERUSER_ID, uid, context)
result = {}
for k in self._all_columns.keys():
if k.startswith('context_'):
context_key = k[8:]
elif k in ['lang', 'tz']:
context_key = k
else:
context_key = False
if context_key:
res = getattr(user, k) or False
if isinstance(res, models.BaseModel):
res = res.id
result[context_key] = res or False
return result
def action_get(self, cr, uid, context=None):
dataobj = self.pool['ir.model.data']
data_id = dataobj._get_id(cr, SUPERUSER_ID, 'base', 'action_res_users_my')
return dataobj.browse(cr, uid, data_id, context=context).res_id
def check_super(self, passwd):
if passwd == tools.config['admin_passwd']:
return True
else:
raise openerp.exceptions.AccessDenied()
def check_credentials(self, cr, uid, password):
""" Override this method to plug additional authentication methods"""
res = self.search(cr, SUPERUSER_ID, [('id','=',uid),('password','=',password)])
if not res:
raise openerp.exceptions.AccessDenied()
def _login(self, db, login, password):
if not password:
return False
user_id = False
cr = self.pool.cursor()
try:
# autocommit: our single update request will be performed atomically.
# (In this way, there is no opportunity to have two transactions
# interleaving their cr.execute()..cr.commit() calls and have one
# of them rolled back due to a concurrent access.)
cr.autocommit(True)
# check if user exists
res = self.search(cr, SUPERUSER_ID, [('login','=',login)])
if res:
user_id = res[0]
# check credentials
self.check_credentials(cr, user_id, password)
# We effectively unconditionally write the res_users line.
# Even w/ autocommit there's a chance the user row will be locked,
# in which case we can't delay the login just for the purpose of
# update the last login date - hence we use FOR UPDATE NOWAIT to
# try to get the lock - fail-fast
# Failing to acquire the lock on the res_users row probably means
# another request is holding it. No big deal, we don't want to
# prevent/delay login in that case. It will also have been logged
# as a SQL error, if anyone cares.
try:
# NO KEY introduced in PostgreSQL 9.3 http://www.postgresql.org/docs/9.3/static/release-9-3.html#AEN115299
update_clause = 'NO KEY UPDATE' if cr._cnx.server_version >= 90300 else 'UPDATE'
cr.execute("SELECT id FROM res_users WHERE id=%%s FOR %s NOWAIT" % update_clause, (user_id,), log_exceptions=False)
cr.execute("UPDATE res_users SET login_date = now() AT TIME ZONE 'UTC' WHERE id=%s", (user_id,))
self.invalidate_cache(cr, user_id, ['login_date'], [user_id])
except Exception:
_logger.debug("Failed to update last_login for db:%s login:%s", db, login, exc_info=True)
except openerp.exceptions.AccessDenied:
_logger.info("Login failed for db:%s login:%s", db, login)
user_id = False
finally:
cr.close()
return user_id
def authenticate(self, db, login, password, user_agent_env):
"""Verifies and returns the user ID corresponding to the given
``login`` and ``password`` combination, or False if there was
no matching user.
:param str db: the database on which user is trying to authenticate
:param str login: username
:param str password: <PASSWORD>
:param dict user_agent_env: environment dictionary describing any
relevant environment attributes
"""
uid = self._login(db, login, password)
if uid == openerp.SUPERUSER_ID:
# Successfully logged in as admin!
# Attempt to guess the web base url...
if user_agent_env and user_agent_env.get('base_location'):
cr = self.pool.cursor()
try:
base = user_agent_env['base_location']
ICP = self.pool['ir.config_parameter']
if not ICP.get_param(cr, uid, 'web.base.url.freeze'):
ICP.set_param(cr, uid, 'web.base.url', base)
cr.commit()
except Exception:
_logger.exception("Failed to update web.base.url configuration parameter")
finally:
cr.close()
return uid
def check(self, db, uid, passwd):
"""Verifies that the given (uid, password) is authorized for the database ``db`` and
raise an exception if it is not."""
if not passwd:
# empty passwords disallowed for obvious security reasons
raise openerp.exceptions.AccessDenied()
if self._uid_cache.get(db, {}).get(uid) == passwd:
return
cr = self.pool.cursor()
try:
self.check_credentials(cr, uid, passwd)
if self._uid_cache.has_key(db):
self._uid_cache[db][uid] = passwd
else:
self._uid_cache[db] = {uid:passwd}
finally:
cr.close()
def change_password(self, cr, uid, old_passwd, new_passwd, context=None):
"""Change current user password. Old password must be provided explicitly
to prevent hijacking an existing user session, or for cases where the cleartext
password is not used to authenticate requests.
:return: True
:raise: openerp.exceptions.AccessDenied when old password is wrong
:raise: except_osv when new password is not set or empty
"""
self.check(cr.dbname, uid, old_passwd)
if new_passwd:
return self.write(cr, uid, uid, {'password': new_passwd})
raise osv.except_osv(_('Warning!'), _("Setting empty passwords is not allowed for security reasons!"))
def preference_save(self, cr, uid, ids, context=None):
return {
'type': 'ir.actions.client',
'tag': 'reload_context',
}
def preference_change_password(self, cr, uid, ids, context=None):
return {
'type': 'ir.actions.client',
'tag': 'change_password',
'target': 'new',
}
@tools.ormcache(skiparg=2)
def has_group(self, cr, uid, group_ext_id):
"""Checks whether user belongs to given group.
:param str group_ext_id: external ID (XML ID) of the group.
Must be provided in fully-qualified form (``module.ext_id``), as there
is no implicit module to use..
:return: True if the current user is a member of the group with the
given external ID (XML ID), else False.
"""
assert group_ext_id and '.' in group_ext_id, "External ID must be fully qualified"
module, ext_id = group_ext_id.split('.')
cr.execute("""SELECT 1 FROM res_groups_users_rel WHERE uid=%s AND gid IN
(SELECT res_id FROM ir_model_data WHERE module=%s AND name=%s)""",
(uid, module, ext_id))
return bool(cr.fetchone())
#----------------------------------------------------------
# Implied groups
#
# Extension of res.groups and res.users with a relation for "implied"
# or "inherited" groups. Once a user belongs to a group, it
# automatically belongs to the implied groups (transitively).
#----------------------------------------------------------
class cset(object):
""" A cset (constrained set) is a set of elements that may be constrained to
be a subset of other csets. Elements added to a cset are automatically
added to its supersets. Cycles in the subset constraints are supported.
"""
def __init__(self, xs):
self.supersets = set()
self.elements = set(xs)
def subsetof(self, other):
if other is not self:
self.supersets.add(other)
other.update(self.elements)
def update(self, xs):
xs = set(xs) - self.elements
if xs: # xs will eventually be empty in case of a cycle
self.elements.update(xs)
for s in self.supersets:
s.update(xs)
def __iter__(self):
return iter(self.elements)
concat = itertools.chain.from_iterable
class groups_implied(osv.osv):
_inherit = 'res.groups'
def _get_trans_implied(self, cr, uid, ids, field, arg, context=None):
"computes the transitive closure of relation implied_ids"
memo = {} # use a memo for performance and cycle avoidance
def computed_set(g):
if g not in memo:
memo[g] = cset(g.implied_ids)
for h in g.implied_ids:
computed_set(h).subsetof(memo[g])
return memo[g]
res = {}
for g in self.browse(cr, SUPERUSER_ID, ids, context):
res[g.id] = map(int, computed_set(g))
return res
_columns = {
'implied_ids': fields.many2many('res.groups', 'res_groups_implied_rel', 'gid', 'hid',
string='Inherits', help='Users of this group automatically inherit those groups'),
'trans_implied_ids': fields.function(_get_trans_implied,
type='many2many', relation='res.groups', string='Transitively inherits'),
}
def create(self, cr, uid, values, context=None):
users = values.pop('users', None)
gid = super(groups_implied, self).create(cr, uid, values, context)
if users:
# delegate addition of users to add implied groups
self.write(cr, uid, [gid], {'users': users}, context)
return gid
def write(self, cr, uid, ids, values, context=None):
res = super(groups_implied, self).write(cr, uid, ids, values, context)
if values.get('users') or values.get('implied_ids'):
# add all implied groups (to all users of each group)
for g in self.browse(cr, uid, ids, context=context):
gids = map(int, g.trans_implied_ids)
vals = {'users': [(4, u.id) for u in g.users]}
super(groups_implied, self).write(cr, uid, gids, vals, context)
return res
class users_implied(osv.osv):
_inherit = 'res.users'
def create(self, cr, uid, values, context=None):
groups = values.pop('groups_id', None)
user_id = super(users_implied, self).create(cr, uid, values, context)
if groups:
# delegate addition of groups to add implied groups
self.write(cr, uid, [user_id], {'groups_id': groups}, context)
self.pool['ir.ui.view'].clear_cache()
return user_id
def write(self, cr, uid, ids, values, context=None):
if not isinstance(ids,list):
ids = [ids]
res = super(users_implied, self).write(cr, uid, ids, values, context)
if values.get('groups_id'):
# add implied groups for all users
for user in self.browse(cr, uid, ids):
gs = set(concat(g.trans_implied_ids for g in user.groups_id))
vals = {'groups_id': [(4, g.id) for g in gs]}
super(users_implied, self).write(cr, uid, [user.id], vals, context)
self.pool['ir.ui.view'].clear_cache()
return res
#----------------------------------------------------------
# Vitrual checkbox and selection for res.user form view
#
# Extension of res.groups and res.users for the special groups view in the users
# form. This extension presents groups with selection and boolean widgets:
# - Groups are shown by application, with boolean and/or selection fields.
# Selection fields typically defines a role "Name" for the given application.
# - Uncategorized groups are presented as boolean fields and grouped in a
# section "Others".
#
# The user form view is modified by an inherited view (base.user_groups_view);
# the inherited view replaces the field 'groups_id' by a set of reified group
# fields (boolean or selection fields). The arch of that view is regenerated
# each time groups are changed.
#
# Naming conventions for reified groups fields:
# - boolean field 'in_group_ID' is True iff
# ID is in 'groups_id'
# - selection field 'sel_groups_ID1_..._IDk' is ID iff
# ID is in 'groups_id' and ID is maximal in the set {ID1, ..., IDk}
#----------------------------------------------------------
def name_boolean_group(id):
return 'in_group_' + str(id)
def name_selection_groups(ids):
return 'sel_groups_' + '_'.join(map(str, ids))
def is_boolean_group(name):
return name.startswith('in_group_')
def is_selection_groups(name):
return name.startswith('sel_groups_')
def is_reified_group(name):
return is_boolean_group(name) or is_selection_groups(name)
def get_boolean_group(name):
return int(name[9:])
def get_selection_groups(name):
return map(int, name[11:].split('_'))
def partition(f, xs):
"return a pair equivalent to (filter(f, xs), filter(lambda x: not f(x), xs))"
yes, nos = [], []
for x in xs:
(yes if f(x) else nos).append(x)
return yes, nos
def parse_m2m(commands):
"return a list of ids corresponding to a many2many value"
ids = []
for command in commands:
if isinstance(command, (tuple, list)):
if command[0] in (1, 4):
ids.append(command[2])
elif command[0] == 5:
ids = []
elif command[0] == 6:
ids = list(command[2])
else:
ids.append(command)
return ids
class groups_view(osv.osv):
_inherit = 'res.groups'
def create(self, cr, uid, values, context=None):
res = super(groups_view, self).create(cr, uid, values, context)
self.update_user_groups_view(cr, uid, context)
return res
def write(self, cr, uid, ids, values, context=None):
res = super(groups_view, self).write(cr, uid, ids, values, context)
self.update_user_groups_view(cr, uid, context)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(groups_view, self).unlink(cr, uid, ids, context)
self.update_user_groups_view(cr, uid, context)
return res
def update_user_groups_view(self, cr, uid, context=None):
# the view with id 'base.user_groups_view' inherits the user form view,
# and introduces the reified group fields
# we have to try-catch this, because at first init the view does not exist
# but we are already creating some basic groups
view = self.pool['ir.model.data'].xmlid_to_object(cr, SUPERUSER_ID, 'base.user_groups_view', context=context)
if view and view.exists() and view._name == 'ir.ui.view':
xml1, xml2 = [], []
xml1.append(E.separator(string=_('Application'), colspan="4"))
for app, kind, gs in self.get_groups_by_application(cr, uid, context):
# hide groups in category 'Hidden' (except to group_no_one)
attrs = {'groups': 'base.group_no_one'} if app and app.xml_id == 'base.module_category_hidden' else {}
if kind == 'selection':
# application name with a selection field
field_name = name_selection_groups(map(int, gs))
xml1.append(E.field(name=field_name, **attrs))
xml1.append(E.newline())
else:
# application separator with boolean fields
app_name = app and app.name or _('Other')
xml2.append(E.separator(string=app_name, colspan="4", **attrs))
for g in gs:
field_name = name_boolean_group(g.id)
xml2.append(E.field(name=field_name, **attrs))
xml = E.field(*(xml1 + xml2), name="groups_id", position="replace")
xml.addprevious(etree.Comment("GENERATED AUTOMATICALLY BY GROUPS"))
xml_content = etree.tostring(xml, pretty_print=True, xml_declaration=True, encoding="utf-8")
view.write({'arch': xml_content})
return True
def get_application_groups(self, cr, uid, domain=None, context=None):
return self.search(cr, uid, domain or [])
def get_groups_by_application(self, cr, uid, context=None):
""" return all groups classified by application (module category), as a list of pairs:
[(app, kind, [group, ...]), ...],
where app and group are browse records, and kind is either 'boolean' or 'selection'.
Applications are given in sequence order. If kind is 'selection', the groups are
given in reverse implication order.
"""
def linearized(gs):
gs = set(gs)
# determine sequence order: a group should appear after its implied groups
order = dict.fromkeys(gs, 0)
for g in gs:
for h in gs.intersection(g.trans_implied_ids):
order[h] -= 1
# check whether order is total, i.e., sequence orders are distinct
if len(set(order.itervalues())) == len(gs):
return sorted(gs, key=lambda g: order[g])
return None
# classify all groups by application
gids = self.get_application_groups(cr, uid, context=context)
by_app, others = {}, []
for g in self.browse(cr, uid, gids, context):
if g.category_id:
by_app.setdefault(g.category_id, []).append(g)
else:
others.append(g)
# build the result
res = []
apps = sorted(by_app.iterkeys(), key=lambda a: a.sequence or 0)
for app in apps:
gs = linearized(by_app[app])
if gs:
res.append((app, 'selection', gs))
else:
res.append((app, 'boolean', by_app[app]))
if others:
res.append((False, 'boolean', others))
return res
class users_view(osv.osv):
_inherit = 'res.users'
def create(self, cr, uid, values, context=None):
values = self._remove_reified_groups(values)
return super(users_view, self).create(cr, uid, values, context)
def write(self, cr, uid, ids, values, context=None):
values = self._remove_reified_groups(values)
return super(users_view, self).write(cr, uid, ids, values, context)
def _remove_reified_groups(self, values):
""" return `values` without reified group fields """
add, rem = [], []
values1 = {}
for key, val in values.iteritems():
if is_boolean_group(key):
(add if val else rem).append(get_boolean_group(key))
elif is_selection_groups(key):
rem += get_selection_groups(key)
if val:
add.append(val)
else:
values1[key] = val
if 'groups_id' not in values and (add or rem):
# remove group ids in `rem` and add group ids in `add`
values1['groups_id'] = zip(repeat(3), rem) + zip(repeat(4), add)
return values1
def default_get(self, cr, uid, fields, context=None):
group_fields, fields = partition(is_reified_group, fields)
fields1 = (fields + ['groups_id']) if group_fields else fields
values = super(users_view, self).default_get(cr, uid, fields1, context)
self._add_reified_groups(group_fields, values)
# add "default_groups_ref" inside the context to set default value for group_id with xml values
if 'groups_id' in fields and isinstance(context.get("default_groups_ref"), list):
groups = []
ir_model_data = self.pool.get('ir.model.data')
for group_xml_id in context["default_groups_ref"]:
group_split = group_xml_id.split('.')
if len(group_split) != 2:
raise osv.except_osv(_('Invalid context value'), _('Invalid context default_groups_ref value (model.name_id) : "%s"') % group_xml_id)
try:
temp, group_id = ir_model_data.get_object_reference(cr, uid, group_split[0], group_split[1])
except ValueError:
group_id = False
groups += [group_id]
values['groups_id'] = groups
return values
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
# determine whether reified groups fields are required, and which ones
fields1 = fields or self.fields_get(cr, uid, context=context).keys()
group_fields, other_fields = partition(is_reified_group, fields1)
# read regular fields (other_fields); add 'groups_id' if necessary
drop_groups_id = False
if group_fields and fields:
if 'groups_id' not in other_fields:
other_fields.append('groups_id')
drop_groups_id = True
else:
other_fields = fields
res = super(users_view, self).read(cr, uid, ids, other_fields, context=context, load=load)
# post-process result to add reified group fields
if group_fields:
for values in (res if isinstance(res, list) else [res]):
self._add_reified_groups(group_fields, values)
if drop_groups_id:
values.pop('groups_id', None)
return res
def _add_reified_groups(self, fields, values):
""" add the given reified group fields into `values` """
gids = set(parse_m2m(values.get('groups_id') or []))
for f in fields:
if is_boolean_group(f):
values[f] = get_boolean_group(f) in gids
elif is_selection_groups(f):
selected = [gid for gid in get_selection_groups(f) if gid in gids]
values[f] = selected and selected[-1] or False
def fields_get(self, cr, uid, allfields=None, context=None, write_access=True):
res = super(users_view, self).fields_get(cr, uid, allfields, context, write_access)
# add reified groups fields
for app, kind, gs in self.pool['res.groups'].get_groups_by_application(cr, uid, context):
if kind == 'selection':
# selection group field
tips = ['%s: %s' % (g.name, g.comment) for g in gs if g.comment]
res[name_selection_groups(map(int, gs))] = {
'type': 'selection',
'string': app and app.name or _('Other'),
'selection': [(False, '')] + [(g.id, g.name) for g in gs],
'help': '\n'.join(tips),
'exportable': False,
'selectable': False,
}
else:
# boolean group fields
for g in gs:
res[name_boolean_group(g.id)] = {
'type': 'boolean',
'string': g.name,
'help': g.comment,
'exportable': False,
'selectable': False,
}
return res
#----------------------------------------------------------
# change password wizard
#----------------------------------------------------------
class change_password_wizard(osv.TransientModel):
"""
A wizard to manage the change of users' passwords
"""
_name = "change.password.wizard"
_description = "Change Password Wizard"
_columns = {
'user_ids': fields.one2many('change.password.user', 'wizard_id', string='Users'),
}
def _default_user_ids(self, cr, uid, context=None):
if context is None:
context = {}
user_model = self.pool['res.users']
user_ids = context.get('active_model') == 'res.users' and context.get('active_ids') or []
return [
(0, 0, {'user_id': user.id, 'user_login': user.login})
for user in user_model.browse(cr, uid, user_ids, context=context)
]
_defaults = {
'user_ids': _default_user_ids,
}
def change_password_button(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids, context=context)[0]
need_reload = any(uid == user.user_id.id for user in wizard.user_ids)
line_ids = [user.id for user in wizard.user_ids]
self.pool.get('change.password.user').change_password_button(cr, uid, line_ids, context=context)
if need_reload:
return {
'type': 'ir.actions.client',
'tag': 'reload'
}
return {'type': 'ir.actions.act_window_close'}
class change_password_user(osv.TransientModel):
"""
A model to configure users in the change password wizard
"""
_name = 'change.password.user'
_description = 'Change Password Wizard User'
_columns = {
'wizard_id': fields.many2one('change.password.wizard', string='Wizard', required=True),
'user_id': fields.many2one('res.users', string='User', required=True),
'user_login': fields.char('User Login', readonly=True),
'new_passwd': fields.char('<PASSWORD>'),
}
_defaults = {
'new_passwd': '',
}
def change_password_button(self, cr, uid, ids, context=None):
for line in self.browse(cr, uid, ids, context=context):
line.user_id.write({'password': line.new_passwd})
# don't keep temporary passwords in the database longer than necessary
self.write(cr, uid, ids, {'new_passwd': False}, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
yang-xiansen/drp
|
src/main/webapp/drp/base/view/WestView.js
|
<filename>src/main/webapp/drp/base/view/WestView.js<gh_stars>100-1000
Ext.define("drp.base.view.WestView", {
extend : 'Ext.panel.Panel',
alias : 'widget.westview',
collapsible : true,
split : true,
border : 0,
margins : '0 2 0 0',
width : 180,
titleAlign: 'center',
title : "业务导航",
align: 'center',
layout : 'accordion',
layoutConfig : {
titleCollapse : false,
animate : true,
activeOnTop : true
},
items : [{
title : "库存管理",
titleAlign: 'center',
autoScroll : true,
items:[{
xtype : "treepanel",
rootVisible : false,// 不展示根节点
displayField : "text",
margin : '0 0 5 0',
border : 0,
root : {
expanded : true,
children : [{
text : "<span style='font-weight:bold'>实时库存</span>",
id : 'menu_current_inventory',
leaf : true
}/*, {
text : "<span style='font-weight:bold'>每日总账</span>",
id : 'menu_monthend_inventory',
leaf : true
}*/]
}
}]
}, {
title : "出入库管理",
titleAlign: 'center',
autoScroll : true,
items:[{
xtype : "treepanel",
rootVisible : false,// 不展示根节点
displayField : "text",
margin : '0 0 5 0',
border : 0,
root : {
expanded : true,
children : [{
text : "<span style='font-weight:bold'>入库单管理</span>",
id : 'menu_stockin',
leaf : true
}, {
text : "<span style='font-weight:bold'>出库单管理</span>",
id : 'menu_stockout',
leaf : true
}]
}
}]
}, {
title : "系统管理",
titleAlign: 'center',
autoScroll : true,
items:[{
xtype : "treepanel",
rootVisible : false,
displayField : "text",
margin : '0 0 0 0',
border : 0,
root : {
expanded : true,
children : [ {
text : "商品管理",
leaf : false,
children : [{
text : "<span style='font-weight:bold'>商品类别</span>",
id : 'menu_ware_category',
leaf : true
}, {
text : "<span style='font-weight:bold'>商品</span>",
id : 'menu_ware',
leaf : true
}, {
text : "<span style='font-weight:bold'>供应商</span>",
id : 'menu_vendor',
leaf : true
}]
}, {
text : "用户管理",
leaf : false,
children : [{
text : "<span style='font-weight:bold'>负责人</span>",
id : 'menu_manager',//对于菜单项,再分配id时都要加menu前缀
leaf : true
}, {
text : "<span style='font-weight:bold'>库管员</span>",
id : 'menu_warekeeper',
leaf : true
}, {
text : "<span style='font-weight:bold'>经手人</span>",
id : 'menu_regulator',
leaf : true
}]
}]
}
}]
}, {
title : "个人中心",
titleAlign: 'center',
autoScroll : true,
items:[{
xtype : "treepanel",
rootVisible : false,// 不展示根节点
displayField : "text",
margin : '0 0 5 0',
border : 0,
root : {
expanded : true,
children : [{
text : "<span style='font-weight:bold'>修改密码</span>",
id : 'menu_update_password',
leaf : true
}]
}
}]
}]
});
|
wrmlab/wrmos
|
krn/thread.h
|
//##################################################################################################
//
// Thread implementation.
//
//##################################################################################################
#ifndef THREAD_H
#define THREAD_H
#include "list.h"
#include "sys_eframe.h"
#include "sys_stack.h"
#include "l4_types.h"
#include "l4_syscalls.h"
#include "task.h"
#include "sysclock.h"
#include "thrid.h"
#include "tmaccount.h"
#include "arch.h"
#include "wlibc_assert.h"
class Thread_t;
typedef list_t <Thread_t*, Kcfg::Threads_max> threads_t;
// helper for threads_t
static inline bool is_exist(threads_t* list, Thread_t* thr)
{
for (threads_t::iter_t it=list->begin(); it!=list->end(); ++it)
if ((*it) == thr)
return true;
return false;
}
// helper for threads_t
static inline bool is_exist(threads_t* list, threads_t::iter_t iter)
{
for (threads_t::iter_t it=list->begin(); it!=list->end(); ++it)
if (it == iter)
return true;
return false;
}
/*
static void dump(threads_t* list)
{
unsigned cnt = 0;
printk(" list=0x%x, sz=%u:\n", list, list->size());
for (threads_t::iter_t it=list->begin(); it!=list->end(); ++it, ++cnt)
printk(" %u. thr=%s, prio=%u.\n", cnt, (*it)->name(), (*it)->prio());
}
*/
// helpers to call Threads_t members
threads_t::iter_t threads_add_ready(Thread_t* thr);
threads_t::iter_t threads_del_ready(threads_t::iter_t it);
threads_t::iter_t threads_add_send(Thread_t* thr);
threads_t::iter_t threads_del_send(threads_t::iter_t it);
threads_t::iter_t threads_add_rcv_timeout_waiting(Thread_t* thr);
threads_t::iter_t threads_del_rcv_timeout_waiting(threads_t::iter_t it);
threads_t::iter_t threads_add_snd_timeout_waiting(Thread_t* thr);
threads_t::iter_t threads_del_snd_timeout_waiting(threads_t::iter_t it);
threads_t::iter_t threads_timeslice_expired();
Thread_t* threads_find(L4_thrid_t id);
// helpers to call Sched_t members
Thread_t* cur_thr();
const char* cur_thr_name();
//--------------------------------------------------------------------------------------------------
class Thread_t
{
enum
{
Stack_32bit_val = 0xa5a5a5a5,
Stack_sz = Cfg_page_sz,
};
public:
enum
{
Prio_min = 0,
Prio_max = 255,
Snd_mask = 0x10,
Rcv_mask = 0x20
};
enum state_t
{
Idle = 0, // unused thread
Inactive = 1, // inactive thread
Active = 2, // active thread, start by receiving a starg_msg from pager
Ready = 3, // thread ready to execute
Send_ipc = Snd_mask | 0, // thread blocked in IpcSnd phase
Send_pfault = Snd_mask | 1, // thread blocked for send pfault_msg to pager
Send_exception = Snd_mask | 2, // thread blocked for send except_msg to exc-handler
Receive_ipc = Rcv_mask | 0, // thread blocked in IpcRcv phase
Receive_pfault = Rcv_mask | 1, // thread blocked for receive map/grant msg from pager
Receive_exception = Rcv_mask | 2 // thread blocked for receive except_msg from exc-handler
};
private:
addr_t _kstack_area;
// TODO: may be enough store 'timeout' and 'partner'
struct Ipc_t
{
uint64_t timeout; // systime in usec, timeout for current operation (snd/rcv)
L4_thrid_t to; // dst for snd phase
L4_thrid_t from_spec; // src pattern for rcv phase
Ipc_t() : timeout(0), to(L4_thrid_t::Nil), from_spec(L4_thrid_t::Nil) {}
void clear() { timeout = 0; to = from_spec = L4_thrid_t::Nil; }
};
// page fault data
struct Pfault_t
{
word_t addr; // pfault address
word_t inst; // pfault instruction
word_t access; // access permission
Pfault_t() : addr(0), inst(0), access(0) {}
void clear() { addr = inst = access = 0; }
};
// exception fault data
struct Efault_t
{
int type; // exc type
word_t pfault_addr_and_acc; // pfault addr and access
Efault_t() : type(0), pfault_addr_and_acc(0) {}
void clear() { type = pfault_addr_and_acc = 0; }
};
static unsigned _counter; // for set id
unsigned _id; // XXX: is it need?
addr_t _ksp; // kernel stack pointer
char _name[8]; // thread's name for debug
unsigned _flags; // now use only 1 flag: FPU=1
unsigned _fpu_in_use; // is need store/restore FPU context ?
Float_frame_t _float_frame __attribute__((aligned(16)));
// aspace data
Task_t* _task; // thread's task
addr_t _utcb_uva; // UTCB va for user address space
addr_t _utcb_kva; // UTCB va for kernel address space
paddr_t _utcb_pa; // UTCB location
// L4 data
L4_thrid_t _glob_id; // global thread ID
L4_thrid_t _sched_id; // sched thread ID, global or local
L4_thrid_t _pager_id; // pager thread ID, global or local
Thread_t* _sched; //
Thread_t* _pager; //
state_t _state; //
Ipc_t _ipc; // store active normal-ipc operation data
Pfault_t _pfault; // store active pfault-ipc operation data
Efault_t _efault; // store active exc-ipc operation data
// sched data
struct InheritedPrio_t
{
L4_thrid_t owner;
unsigned prio;
InheritedPrio_t(L4_thrid_t o, unsigned p) : owner(o), prio(p) {}
inline bool operator < (InheritedPrio_t other) const { return prio < other.prio; };
};
typedef list_t <InheritedPrio_t, 2> inhprios_t;
unsigned _prio; // thread priority
inhprios_t _inherited_prios; // some threads inerit priority for prio inversion
L4_thrid_t _prio_heir; // this thread inerited selth prio to _prio_heir
// Wrm extention: signal
bool _signal_pending; // flag: is signal pending
int _entry_type; // 1 - syscall, 2 - pfault, 3 - kpfault, 4 - irq
threads_t::iter_t _iter; // iterator for current threads list
L4_clock_t _update_timeslice_point; // time point
unsigned _remaning_timeslice; // time stamp
// time accounting
Time_account_t _tmaccount; // for accounting timeslice and profile
public:
void print_kstack()
{
printf("stack: sz=%u:\n", Stack_sz);
for (unsigned i=0; i<Stack_sz/sizeof(uint32_t); ++i)
printf(" 0x%08x %8x\n", _kstack_area + i * sizeof(uint32_t), ((uint32_t*)(_kstack_area))[i]);
printf("\n");
}
unsigned unused_kstack_sz()
{
for (unsigned i=0; i<Stack_sz/sizeof(uint32_t); ++i)
if (((uint32_t*)(_kstack_area))[i] != Stack_32bit_val)
return i * sizeof(uint32_t);
return Stack_sz;
}
// for replacement new in list_t
static void* operator new(size_t sz, Thread_t* thr)
{
(void) sz;
return thr;
}
explicit Thread_t() : _kstack_area(0), _id(_counter++), _ksp(0/*(addr_t)_kstack + sizeof(_kstack)*/), _flags(0), _fpu_in_use(0),
_task(0), _utcb_uva(-1), _utcb_kva(-1), _utcb_pa(-1),
_glob_id(L4_thrid_t::Nil), _sched_id(L4_thrid_t::Nil), _pager_id(L4_thrid_t::Nil),
_sched(0), _pager(0), _state(Idle),
_ipc(), _pfault(), _efault(),
_prio(0), _prio_heir(L4_thrid_t::Nil), _signal_pending(false),
_update_timeslice_point(0), _remaning_timeslice(0), _tmaccount(_name)
{
_name[0] = 0;
//printk("Thread::ctor: id=%d, _kstack=0x%x, sz=%u, _ksp=0x%x.\n", _id, _kstack_area, Stack_sz, _ksp);
}
void name(const char* n, size_t sz = sizeof(_name)-1)
{
unsigned len = min(sizeof(_name)-1, sz);
strncpy(_name, n, len);
_name[len] = '\0';
// XXX: set user accesible thread name (wrm extention)
if (kutcb() != (void*)-1)
memcpy(kutcb()->tls, _name, 4);
}
// allocate kstack and remap it to separate vspace with guard between stacks
void alloc_kstack()
{
wassert(!_kstack_area && !_ksp);
addr_t va = kmem_alloc(Stack_sz, Cfg_page_sz);
paddr_t pa = kmem_paddr(va, Stack_sz);
addr_t new_va = Aspace::kmap_kstack(pa, Stack_sz);
// TODO: Aspace::kunmap(va, Stack_sz);
_kstack_area = new_va;
}
void setup_kstack()
{
wassert(_kstack_area);
_ksp = _kstack_area + Stack_sz;
// init stack area for debug
for (unsigned i=0; i<Stack_sz/sizeof(uint32_t); ++i)
((uint32_t*)(_kstack_area))[i] = Stack_32bit_val;
}
inline unsigned id() const { return _id; }
inline addr_t ksp() const { return _ksp; }
inline addr_t kstack_area() const { return _kstack_area; }
inline size_t kstack_sz() const { return Stack_sz; }
inline addr_t kentry_sp() const { return _kstack_area + Stack_sz; }
inline const char* name() const { return _name; }
inline unsigned flags() const { return _flags; }
inline unsigned fpu_in_use() const { return _fpu_in_use; }
inline Task_t* task() const { return _task; }
inline L4_thrid_t globid() const { return _glob_id; }
inline L4_thrid_t localid() const { return _utcb_uva; }
inline L4_thrid_t schedid() const { return _sched_id; }
inline L4_thrid_t pagerid() const { return _pager_id; }
inline Thread_t* sched() const { return _sched; }
inline Thread_t* pager() const { return _pager; }
inline bool is_active() const { return _state != Inactive; }
inline uint8_t prio() const { return _prio; }
inline L4_thrid_t prio_heir() const { return _prio_heir; }
inline bool signal_pending() const { return _signal_pending; }
inline int entry_type() const { return _entry_type; }
threads_t::iter_t iter() const { return _iter; }
inline void ksp(addr_t v) { _ksp = v; }
inline void fpu_in_use(unsigned v) { _fpu_in_use = v; }
inline void task(Task_t* v) { _task = v; }
inline void globid(L4_thrid_t v) { _glob_id = v; }
inline void schedid(L4_thrid_t v) { _sched_id = v; }
inline void pagerid(L4_thrid_t v) { _pager_id = v; }
inline void sched(Thread_t* v) { _sched = v; }
inline void pager(Thread_t* v) { _pager = v; }
inline void prio_heir(L4_thrid_t v) { _prio_heir = v; }
inline void signal_pending(bool v) { _signal_pending = v; }
inline void entry_type(int v) { _entry_type = v; }
inline void iter(threads_t::iter_t v) { _iter = v; }
// timeslice account
unsigned timeslice() const { return _remaning_timeslice; }
void timeslice(unsigned v) { _remaning_timeslice = v; }
void timeslice_start(L4_clock_t now) { _update_timeslice_point = now; }
void timeslice_stop(L4_clock_t now)
{
L4_clock_t exec_time = now - _update_timeslice_point;
_remaning_timeslice -= min(exec_time, _remaning_timeslice);
_update_timeslice_point = 0;
}
void timeslice_update(L4_clock_t now)
{
L4_clock_t exec_time = now - _update_timeslice_point;
_remaning_timeslice -= min(exec_time, _remaning_timeslice);
_update_timeslice_point = now;
}
// time account funcs
inline L4_clock_t tmspan_exec() const { return _tmaccount.tmspan_exec(); }
inline L4_clock_t tmspan_uexec() const { return _tmaccount.tmspan_uexec(); }
inline L4_clock_t tmspan_kexec() const { return _tmaccount.tmspan_kexec(); }
inline L4_clock_t tmspan_kentry() const { return _tmaccount.tmspan_kentry(); }
inline L4_clock_t tmspan_kwork() const { return _tmaccount.tmspan_kwork(); }
inline L4_clock_t tmspan_kexit() const { return _tmaccount.tmspan_kexit(); }
inline L4_clock_t tmspan_kwork1() const { return _tmaccount.tmspan_kwork1(); }
inline L4_clock_t tmspan_kwork2() const { return _tmaccount.tmspan_kwork2(); }
inline L4_clock_t tmspan_kwork3() const { return _tmaccount.tmspan_kwork3(); }
inline L4_clock_t tmpoint_suspend() const { return _tmaccount.tmpoint_suspend(); }
inline void tmevent_tick(L4_clock_t c) { return _tmaccount.tmevent_tick(c); }
inline void tmevent_resume(L4_clock_t c) { return _tmaccount.tmevent_resume(c); }
inline void tmevent_kexit_start(L4_clock_t c) { return _tmaccount.tmevent_kexit_start(c); }
inline void tmevent_kentry_end(L4_clock_t c) { return _tmaccount.tmevent_kentry_end(c); }
inline void tmevent_suspend(L4_clock_t c) { return _tmaccount.tmevent_suspend(c); }
inline void tmevent_kwork_1s(L4_clock_t c) { return _tmaccount.tmevent_kwork_1s(c); }
inline void tmevent_kwork_1e(L4_clock_t c) { return _tmaccount.tmevent_kwork_1e(c); }
inline void tmevent_kwork_2s(L4_clock_t c) { return _tmaccount.tmevent_kwork_2s(c); }
inline void tmevent_kwork_2e(L4_clock_t c) { return _tmaccount.tmevent_kwork_2e(c); }
inline void tmevent_kwork_3s(L4_clock_t c) { return _tmaccount.tmevent_kwork_3s(c); }
inline void tmevent_kwork_3e(L4_clock_t c) { return _tmaccount.tmevent_kwork_3e(c); }
void prio(uint8_t v)
{
// ready list is unique for every priority
// delete item for current ready list
if (_state == Ready)
_iter = threads_del_ready(_iter);
_prio = v;
// and add to new ready list
if (_state == Ready)
_iter = threads_add_ready(this);
}
void flags(word_t f)
{
if ((_flags & L4_flags_fpu) && !(f & L4_flags_fpu))
{
// disable FPU for thread
fpu_in_use(false);
entry_frame()->disable_fpu();
}
//force_printk_uart("flags: 0x%lx -> 0x%lx\n", _flags, f);
_flags = f;
}
static const char* state_str(int state)
{
switch (state)
{
case Idle: return "idle";
case Inactive: return "inactive";
case Active: return "active";
case Ready: return "ready";
case Send_ipc: return "send_ipc";
case Send_pfault: return "send_pfault";
case Send_exception: return "send_exc";
case Receive_ipc: return "receive_ipc";
case Receive_pfault: return "receive_pfault";
case Receive_exception: return "receive_exc";
}
return "__unknown_state__";
}
const char* state_str() const { return state_str(_state); }
bool inline is_snd_state() const { return _state & Snd_mask; }
bool inline is_rcv_state() const { return _state & Rcv_mask; }
inline void ipc_to(L4_thrid_t v) { _ipc.to = v; }
inline void ipc_from_spec(L4_thrid_t v) { _ipc.from_spec = v; }
inline L4_thrid_t ipc_to() const { return _ipc.to; }
inline L4_thrid_t ipc_from_spec() const { return _ipc.from_spec; }
inline void save_rcv_phase(L4_time_t timeout, L4_thrid_t from_spec)
{
wassert(timeout.is_rel());
_ipc.timeout = timeout.is_never() ? -1 : SystemClock_t::sys_clock(__func__) + timeout.rel_usec();
_ipc.from_spec = from_spec;
state(Receive_ipc);
//printk("-- from_spec=0x%x/%u, timeout=%llu, expired=%llu.\n",
// from_spec.raw(), from_spec.number(), timeout.rel_usec(), _ipc.timeout);
}
inline void save_snd_phase(L4_time_t timeout, L4_thrid_t to)
{
wassert(timeout.is_rel());
_ipc.timeout = timeout.is_never() ? -1 : SystemClock_t::sys_clock(__func__) + timeout.rel_usec();
_ipc.to = to;
state(Send_ipc);
//printk("-- to=0x%x/%u, timeout=%llu, expired=%llu.\n",
// to.raw(), to.number(), timeout.rel_usec(), _ipc.timeout);
}
inline L4_clock_t timeout() { return _ipc.timeout; }
void pf_save(word_t fault_addr, word_t fault_access, word_t fault_inst)
{
_pfault.addr = fault_addr;
_pfault.access = fault_access;
_pfault.inst = fault_inst;
//_ipc.to = _pager_id;
}
inline word_t pf_addr() const { return _pfault.addr; }
inline word_t pf_inst() const { return _pfault.inst; }
inline word_t pf_access() const { return _pfault.access; }
inline void exc_save(int exc_type, word_t pfault_addr_and_acc)
{
_efault.type = exc_type;
_efault.pfault_addr_and_acc = pfault_addr_and_acc;
}
inline int exc_type() const { return _efault.type; }
inline word_t exc_pf() const { return _efault.pfault_addr_and_acc; }
void inherit_prio_dump()
{
for (inhprios_t::iter_t it=_inherited_prios.begin(); it!=_inherited_prios.end(); ++it)
printk("inh_prio_dump: owner=%u, prio=%u.\n", it->owner.number(), it->prio);
}
inline inhprios_t::iter_t inherit_prio_find(L4_thrid_t owner)
{
for (inhprios_t::iter_t it=_inherited_prios.begin(); it!=_inherited_prios.end(); ++it)
if (it->owner == owner)
return it;
return _inherited_prios.end();
}
void inherit_prio_add(L4_thrid_t owner, unsigned priority)
{
printk("inh_prio_add: iam=%u: owner=%u, prio=%u, inh_prio_list_sz=%zu.\n",
globid().number(), owner.number(), priority, _inherited_prios.size());
wassert(owner != globid());
wassert(inherit_prio_find(owner) == _inherited_prios.end());
_inherited_prios.insert_sort(InheritedPrio_t(owner, priority));
}
void inherit_prio_del(L4_thrid_t owner, unsigned priority)
{
printk("inh_prio_del: iam=%u: owner=%u, prio=%u, inh_prio_list_sz=%zu.\n",
globid().number(), owner.number(), priority, _inherited_prios.size());
(void) priority;
inhprios_t::iter_t it = inherit_prio_find(owner);
wassert(owner != globid());
wassert(it != _inherited_prios.end());
wassert(it->prio == priority);
_inherited_prios.erase(it);
}
inline unsigned prio_max()
{
if (_inherited_prios.empty())
return _prio;
unsigned inh_max = _inherited_prios.back().prio;
return max(_prio, inh_max);
}
bool is_good_sender(const Thread_t* snd, bool* use_local_id) const
{
wassert(state() == Receive_ipc);
wassert(_ipc.from_spec != L4_thrid_t::Nil);
*use_local_id = false;
bool local = task() == snd->task(); // is 'snd' local thread
// check rcv from
if (_ipc.from_spec.is_any() || // 'from' is any thread
(local && _ipc.from_spec.is_any_local()) || // 'from' is any_local thread
(local && _ipc.from_spec == snd->localid()) || // 'from' is local_id
( _ipc.from_spec == snd->globid())) // 'from' is global_id
return true;
// set local or global flag for 'from' field
if (local && (_ipc.from_spec.is_any_local() || _ipc.from_spec == snd->localid()))
*use_local_id = true;
return false;
}
bool is_irq_acceptable(unsigned irq)
{
wassert(state() == Receive_ipc);
wassert(_ipc.from_spec != L4_thrid_t::Nil);
// check rcv from
if (_ipc.from_spec.is_any() || // 'from' is any thread
_ipc.from_spec == L4_thrid_t::create_irq(irq)) // 'from' is equal
return true;
return false;
}
void activate()
{
printk("activate thread id=%u.\n", _id);
wassert(!is_active() && "activate: already active.");
wassert(_utcb_pa && "activate: no utcb location.");
wassert(_task->is_configured() && "activate: aspace has not been configured.");
// map utcb
addr_t utcb_uva = task()->alloc_utcb_uspace();
wassert(utcb_uva);
_task->map(utcb_uva, _utcb_pa, Cfg_page_sz, Acc_utcb, Cachable);
_utcb_uva = utcb_uva;
state(Active);
}
void start(addr_t entry_ip, addr_t sp)
{
wassert(_state==Active && "Attemt to start inactivate thread.");
if (_id != 1) // is not sigma0
{
wassert(!_pager_id.is_nil() && "Attemt to start thread without pager.");
wassert(!_sched_id.is_nil() && "Attemt to start thread without scheduler.");
// XXX
// define pager, scheduler
_pager = threads_find(_pager_id);
_sched = threads_find(_sched_id);
wassert(_pager && _sched);
L4_utcb_t* p = utcb();
p->global_id(globid());
}
set_initial_stack_frame(entry_ip, sp);
state(Ready);
}
inline addr_t user_kip()
{
return task()->user_kip();
}
inline state_t state() const { return _state; }
inline void state(state_t s)
{
printk("%s: %u: state: %s -> %s.\n", _name, globid().number(), state_str(), state_str(s));
wassert(_state != s);
// delete from cur sched-list if need
if (_state == Ready)
_iter = threads_del_ready(_iter);
else
if (_state == Send_ipc && _ipc.timeout != -1)
_iter = threads_del_snd_timeout_waiting(_iter);
else
if (_state == Send_ipc /**/ || _state==Send_pfault || _state==Send_exception /*~*/)
_iter = threads_del_send(_iter);
else
if (_state == Receive_ipc && _ipc.timeout != -1)
_iter = threads_del_rcv_timeout_waiting(_iter);
_state = s;
// add to sched-new list if need
if (s == Ready)
{
_ipc.clear();
_pfault.clear();
timeslice(Kcfg::Timeslice_usec);
_iter = threads_add_ready(this);
}
else
if (s == Send_ipc && _ipc.timeout != -1)
_iter = threads_add_snd_timeout_waiting(this);
else
if (s == Send_ipc /**/ || s==Send_pfault || s==Send_exception /*~*/)
_iter = threads_add_send(this);
else
if (s == Receive_ipc && _ipc.timeout != -1)
_iter = threads_add_rcv_timeout_waiting(this);
// remove inherited prio if need
if (!prio_heir().is_nil())
{
Thread_t* thr = threads_find(prio_heir());
wassert(thr);
thr->inherit_prio_del(globid(), prio_max());
prio_heir(L4_thrid_t::Nil);
}
}
inline L4_utcb_t* uutcb() const { return (L4_utcb_t*) _utcb_uva; } // used for intra aspace access
inline L4_utcb_t* kutcb() const { return (L4_utcb_t*) _utcb_kva; } // used for other aspace access, don't forget flush dcache
inline L4_utcb_t* utcb() const { return task()->is_cur_aspace() ? uutcb() : kutcb(); }
inline paddr_t utcb_location() { return _utcb_pa; }
inline void utcb(addr_t va)
{
wassert(!is_active() && "Attemt to set utcb_va for already activate thread.");
_utcb_uva = va;
}
inline void utcb_location(paddr_t pa)
{
wassert(!is_active() && "Attemt to set utcb_pa for already activate thread.");
_utcb_pa = pa;
_utcb_kva = Aspace::kmap_utcb(pa);
wassert(_utcb_kva);
// set user accesible thread name (wrm extention)
memcpy(kutcb()->tls, _name, 4);
}
inline void free_utcb_kspace()
{
if (_utcb_kva)
{
Aspace::kunmap_utcb(_utcb_kva);
_utcb_pa = -1;
_utcb_kva = -1;
}
}
Entry_frame_t* entry_frame() const
{
addr_t addr = (addr_t)_kstack_area + Stack_sz - sizeof(Entry_frame_t);
return (Entry_frame_t*) addr;
}
static void user_invoke()
{
arch_user_invoke();
}
void set_initial_stack_frame(addr_t entry, addr_t sp)
{
printk("%s: entry=%lx, sp=%lx, _ksp=%lx, utcb=%lx.\n", __func__, entry, sp, _ksp, _utcb_uva);
wassert(_ksp);
wassert(_utcb_uva);
wassert(entry);
Stack::push(&_ksp, _flags); // user_invoke() will use it from the stack
Stack::push(&_ksp, sp); // user_invoke() will use it from the stack
Stack::push(&_ksp, entry); // user_invoke() will use it from the stack
Stack::push(&_ksp, _utcb_uva); // user_invoke() will use it from the stack
Stack::push(&_ksp, (word_t)user_invoke); // context_switch() will use it from the stack
}
void store_floats()
{
if (fpu_in_use() /*TESTME*/ && entry_frame()->is_fpu_enabled()/*~TESTME*/)
arch_store_floats(&_float_frame);
}
void restore_floats()
{
if (fpu_in_use())
arch_restore_floats(&_float_frame);
}
void context_switch(Thread_t* next);
};
//--------------------------------------------------------------------------------------------------
class Int_thread_t
{
unsigned _intno; // int number
L4_thrid_t _globid; // int thread id
L4_thrid_t _handler; // global id
bool _pending; // is interrupt happens?
static unsigned _counter; // to set int number
public:
Int_thread_t() :
_intno(_counter++),
_globid(L4_thrid_t::create_irq(_intno)),
_handler(L4_thrid_t::Nil),
_pending(false) {}
inline void handler(L4_thrid_t h)
{
wassert(h.is_nil() || thrid_is_global_user(h));
_handler = h;
_pending = false;
if (h.is_nil())
Intc::mask(_intno);
}
inline void pending(bool p) { _pending = p; }
inline unsigned intno() const { return _intno; }
inline L4_thrid_t globid() const { return _globid; }
inline L4_thrid_t handler() const { return _handler; }
inline bool is_active() const { return !_handler.is_nil(); }
inline bool is_pending() const { return _pending; }
};
#endif // THREAD_H
|
rbouadjenek/DQBioinformatics
|
DNorm-5.4.0/src/dnorm/PollDNorm.java
|
package dnorm;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.stream.XMLStreamException;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.HierarchicalConfiguration;
import org.apache.commons.configuration.XMLConfiguration;
import dnorm.core.DiseaseNameAnalyzer;
import dnorm.core.Lexicon;
import dnorm.core.MEDICLexiconLoader;
import dnorm.core.SynonymTrainer;
import dnorm.core.SynonymTrainer.LookupResult;
import dnorm.types.FullRankSynonymMatrix;
import dnorm.util.AbbreviationIdentifier;
import dnorm.util.AbbreviationResolver;
import dnorm.util.PubtatorReader;
import dnorm.util.PubtatorReader.Abstract;
import dragon.nlp.tool.Tagger;
import dragon.nlp.tool.lemmatiser.EngLemmatiser;
import banner.eval.BANNER;
import banner.postprocessing.PostProcessor;
import banner.tagging.CRFTagger;
import banner.tagging.dictionary.DictionaryTagger;
import banner.tokenization.Tokenizer;
import banner.types.Mention;
import banner.types.Sentence;
import banner.types.Mention.MentionType;
import banner.util.RankedList;
import banner.util.SentenceBreaker;
import bioc.BioCAnnotation;
import bioc.BioCCollection;
import bioc.BioCDocument;
import bioc.BioCPassage;
import bioc.io.BioCDocumentWriter;
import bioc.io.BioCFactory;
import bioc.io.woodstox.ConnectorWoodstox;
import dragon.util.EnvVariable;
public class PollDNorm {
private static SentenceBreaker breaker;
private static AbbreviationIdentifier abbrev;
private static CRFTagger tagger;
private static Tokenizer tokenizer;
private static PostProcessor postProcessor;
private static SynonymTrainer syn;
public static void main(String[] args) throws ConfigurationException, XMLStreamException, IOException {
if (args.length != 7) {
usage();
System.exit(0);
}
String configurationFilename = args[0];
String lexiconFilename = args[1];
String matrixFilename = args[2];
String abbreviationDirectory = args[3];
String tempDirectory = args[4];
String input = args[5];
String output = args[6];
DiseaseNameAnalyzer analyzer = DiseaseNameAnalyzer.getDiseaseNameAnalyzer(true, true, false, true);
Lexicon lex = new Lexicon(analyzer);
MEDICLexiconLoader loader = new MEDICLexiconLoader();
loader.loadLexicon(lex, lexiconFilename);
lex.prepare();
FullRankSynonymMatrix matrix = FullRankSynonymMatrix.load(new File(matrixFilename));
syn = new SynonymTrainer(lex, matrix, 1000);
HierarchicalConfiguration config = new XMLConfiguration(configurationFilename);
EnvVariable.setDragonHome(".");
EnvVariable.setCharSet("US-ASCII");
EngLemmatiser lemmatiser = BANNER.getLemmatiser(config);
Tagger posTagger = BANNER.getPosTagger(config);
HierarchicalConfiguration localConfig = config.configurationAt(BANNER.class.getPackage().getName());
String modelFilename = localConfig.getString("modelFilename");
tagger = CRFTagger.load(new File(modelFilename), lemmatiser, posTagger);
tokenizer = BANNER.getTokenizer(config);
postProcessor = BANNER.getPostProcessor(config);
abbrev = new AbbreviationIdentifier("./identify_abbr", abbreviationDirectory, tempDirectory, 1000);
breaker = new SentenceBreaker();
// Process file(s)
File inFile = new File(input);
File outFile = new File(output);
if (inFile.isDirectory()) {
if (!outFile.isDirectory()) {
usage();
throw new IllegalArgumentException();
}
if (!input.endsWith("/"))
input = input + "/";
if (!output.endsWith("/"))
output = output + "/";
boolean error = false;
System.out.println("Waiting for input");
while (!error) {
List<String> reportFilenames = getUnlockedFiles(input);
for (String filename : reportFilenames) {
String reportFilename = input + filename;
String annotationFilename = output + filename;
String lockFilename = output + "." + filename + ".lck";
(new OutputStreamWriter(new FileOutputStream(lockFilename), "UTF-8")).close();
if (filename.endsWith(".xml")) {
processFile_BioC(reportFilename, annotationFilename);
} else {
processFile_PubTator(reportFilename, annotationFilename);
}
(new File(lockFilename)).delete();
(new File(reportFilename)).delete();
System.out.println("Waiting for input");
}
try {
Thread.sleep(500);
} catch (InterruptedException e) {
System.err.println("Interrupted while polling:");
e.printStackTrace();
error = true;
}
}
} else {
usage();
throw new IllegalArgumentException();
}
System.out.println("Done.");
}
private static List<String> getUnlockedFiles(String input) {
List<String> reportFilenames = new ArrayList<String>();
Set<String> lockFilenames = new HashSet<String>();
File[] listOfFiles = (new File(input)).listFiles();
for (int i = 0; i < listOfFiles.length; i++) {
if (listOfFiles[i].isFile()) {
String filename = listOfFiles[i].getName();
if (filename.endsWith(".lck")) {
lockFilenames.add(filename);
} else {
reportFilenames.add(filename);
}
}
}
List<String> unlockedReportFilenames = new ArrayList<String>();
for (String filename : reportFilenames) {
String lockFilename = "." + filename + ".lck";
if (!lockFilenames.contains(lockFilename)) {
unlockedReportFilenames.add(filename);
}
}
return unlockedReportFilenames;
}
private static void usage() {
System.out.println("Usage:");
System.out.println("\tPollDNorm configurationFilename lexiconFilename matrixFilename Ab3P_Directory tempDirectory inputDirectory outputDirectory");
}
private static void processFile_PubTator(String inputFilename, String outputFilename) throws IOException {
System.out.println("Reading input");
PubtatorReader reader = new PubtatorReader(inputFilename);
System.out.println("Processing & writing output");
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFilename), "UTF-8"));
for (Abstract a : reader.getAbstracts()) {
writer.write(a.getId() + "|t|");
if (a.getTitleText() != null)
writer.write(a.getTitleText());
writer.newLine();
writer.write(a.getId() + "|a|");
if (a.getAbstractText() != null)
writer.write(a.getAbstractText());
writer.newLine();
List<DNormResult> results = process(a);
Collections.sort(results, new Comparator<DNormResult>() {
@Override
public int compare(DNormResult r1, DNormResult r2) {
return r1.getStartChar() - r2.getStartChar();
}
});
for (DNormResult r : results) {
writer.write(a.getId() + "\t" + r.getStartChar() + "\t" + r.getEndChar() + "\t" + r.getMentionText() + "\tDisease");
if (r.getConceptID() != null) {
writer.write("\t" + r.getConceptID());
}
writer.newLine();
}
writer.newLine();
}
writer.close();
}
private static List<DNormResult> process(Abstract a) throws IOException {
String text = a.getText();
System.out.println("Text received: " + text);
if (text == null)
return new ArrayList<DNormResult>();
Map<String, String> abbreviationMap = abbrev.getAbbreviations(a.getId(), text);
List<DNormResult> found = processText(a, abbreviationMap);
System.out.println("Mentions found:");
for (DNormResult result : found)
System.out.println("\t" + result.toString());
if (abbreviationMap == null)
return found;
List<DNormResult> returned = extendResults(text, found, abbreviationMap);
System.out.println("Mentions added:");
List<DNormResult> added = new ArrayList<DNormResult>(returned);
added.removeAll(found);
for (DNormResult result : added)
System.out.println("\t" + result.toString());
System.out.println("Mentions removed:");
List<DNormResult> removed = new ArrayList<DNormResult>(found);
removed.removeAll(returned);
for (DNormResult result : removed)
System.out.println("\t" + result.toString());
return returned;
}
private static List<DNormResult> processText(Abstract a, Map<String, String> abbreviationMap) {
List<DNormResult> results = new ArrayList<DNormResult>();
int index = 0;
List<String> sentences = a.getSentenceTexts();
for (int i = 0; i < sentences.size(); i++) {
String sentence = sentences.get(i);
int length = sentence.length();
sentence = sentence.trim();
Sentence sentence1 = new Sentence(a.getId() + "-" + i, a.getId(), sentence);
Sentence sentence2 = BANNER.process(tagger, tokenizer, postProcessor, sentence1);
for (Mention mention : sentence2.getMentions(MentionType.Found)) {
int start = index + mention.getStartChar();
int end = start + mention.getText().length();
DNormResult result = new DNormResult(start, end, mention.getText());
String lookupText = result.getMentionText();
lookupText = AbbreviationResolver.expandAbbreviations(lookupText, abbreviationMap);
RankedList<LookupResult> lookup = syn.lookup(lookupText);
if (lookup.size() > 0) {
result.setConceptID(lookup.getObject(0).getConceptId(), lookup.getValue(0));
}
results.add(result);
}
index += length;
}
return results;
}
private static List<DNormResult> extendResults(String text, List<DNormResult> results, Map<String, String> shortLongAbbrevMap) {
// Get long->short map
Map<String, String> longShortAbbrevMap = new HashMap<String, String>();
for (String shortText : shortLongAbbrevMap.keySet()) {
String longText = shortLongAbbrevMap.get(shortText);
longShortAbbrevMap.put(longText, shortText);
}
// Create a set of strings to be set as results
Set<DNormResult> unlocalizedResults = new HashSet<DNormResult>();
for (DNormResult result : results) {
if (result.getConceptID() != null) {
unlocalizedResults.add(new DNormResult(-1, -1, result.getMentionText(), result.getConceptID(), result.getScore()));
if (shortLongAbbrevMap.containsKey(result.getMentionText())) {
String mentionText = shortLongAbbrevMap.get(result.getMentionText());
// TODO Verify mentionText realistically normalizes to the concept intended, or we will drop the original result
unlocalizedResults.add(new DNormResult(-1, -1, mentionText, result.getConceptID(), result.getScore()));
}
if (longShortAbbrevMap.containsKey(result.getMentionText())) {
String mentionText = longShortAbbrevMap.get(result.getMentionText());
unlocalizedResults.add(new DNormResult(-1, -1, mentionText, result.getConceptID(), result.getScore()));
}
}
}
return localizeResults(text, unlocalizedResults);
}
private static List<DNormResult> localizeResults(String text, Set<DNormResult> unlocalizedResults) {
// Add a result for each instance of a mention found
List<DNormResult> newResults = new ArrayList<DNormResult>();
for (DNormResult result : unlocalizedResults) {
String pattern = "\\b" + Pattern.quote(result.getMentionText()) + "\\b";
Pattern mentionPattern = Pattern.compile(pattern);
Matcher textMatcher = mentionPattern.matcher(text);
while (textMatcher.find()) {
newResults.add(new DNormResult(textMatcher.start(), textMatcher.end(), result.getMentionText(), result.getConceptID(), result.getScore()));
}
}
// If two results overlap, remove the shorter of the two
List<DNormResult> newResults2 = new ArrayList<DNormResult>();
for (int i = 0; i < newResults.size(); i++) {
DNormResult result1 = newResults.get(i);
boolean add = true;
for (int j = 0; j < newResults.size() && add; j++) {
DNormResult result2 = newResults.get(j);
if (i != j && result1.overlaps(result2) && result2.getMentionText().length() > result1.getMentionText().length())
add = false;
}
if (add)
newResults2.add(result1);
}
return newResults2;
}
private static class DNormResult {
private int startChar;
private int endChar;
private String mentionText;
private String conceptID;
private double score;
public DNormResult(int startChar, int endChar, String mentionText) {
this.startChar = startChar;
this.endChar = endChar;
this.mentionText = mentionText;
}
public DNormResult(int startChar, int endChar, String mentionText, String conceptID, double score) {
this.startChar = startChar;
this.endChar = endChar;
this.mentionText = mentionText;
this.conceptID = conceptID;
this.score = score;
}
public String getConceptID() {
return conceptID;
}
public void setConceptID(String conceptID, double score) {
this.conceptID = conceptID;
this.score = score;
}
public int getStartChar() {
return startChar;
}
public int getEndChar() {
return endChar;
}
public String getMentionText() {
return mentionText;
}
public double getScore() {
return score;
}
public boolean overlaps(DNormResult result) {
return endChar > result.startChar && startChar < result.endChar;
}
@Override
public String toString() {
return mentionText + " (" + startChar + ", " + endChar + ") -> " + conceptID + " @ " + score;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((conceptID == null) ? 0 : conceptID.hashCode());
result = prime * result + endChar;
result = prime * result + ((mentionText == null) ? 0 : mentionText.hashCode());
long temp;
temp = Double.doubleToLongBits(score);
result = prime * result + (int) (temp ^ (temp >>> 32));
result = prime * result + startChar;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DNormResult other = (DNormResult) obj;
if (conceptID == null) {
if (other.conceptID != null)
return false;
} else if (!conceptID.equals(other.conceptID))
return false;
if (endChar != other.endChar)
return false;
if (mentionText == null) {
if (other.mentionText != null)
return false;
} else if (!mentionText.equals(other.mentionText))
return false;
if (Double.doubleToLongBits(score) != Double.doubleToLongBits(other.score))
return false;
if (startChar != other.startChar)
return false;
return true;
}
}
private static void processFile_BioC(String inXML, String outXML) throws IOException, XMLStreamException {
ConnectorWoodstox connector = new ConnectorWoodstox();
BioCCollection collection = connector.startRead(new InputStreamReader(new FileInputStream(inXML), "UTF-8"));
String parser = BioCFactory.WOODSTOX;
BioCFactory factory = BioCFactory.newFactory(parser);
BioCDocumentWriter writer = factory.createBioCDocumentWriter(new OutputStreamWriter(new FileOutputStream(outXML), "UTF-8"));
writer.writeCollectionInfo(collection);
while (connector.hasNext()) {
BioCDocument document = connector.next();
String documentId = document.getID();
System.out.println("ID=" + documentId);
for (BioCPassage passage : document.getPassages()) {
Map<String, String> abbreviationMap = abbrev.getAbbreviations(documentId, passage.getText());
processPassage(documentId, passage, abbreviationMap);
}
writer.writeDocument(document);
System.out.println();
}
writer.close();
}
private static void processPassage(String documentId, BioCPassage passage, Map<String, String> abbreviationMap) {
// Figure out the correct next annotation ID to use
int nextId = 0;
for (BioCAnnotation annotation : passage.getAnnotations()) {
String annotationIdString = annotation.getID();
if (annotationIdString.matches("[0-9]+")) {
int annotationId = Integer.parseInt(annotationIdString);
if (annotationId > nextId)
nextId = annotationId;
}
}
// Process the passage text
breaker.setText(passage.getText());
int offset = passage.getOffset();
List<String> sentences = breaker.getSentences();
for (int i = 0; i < sentences.size(); i++) {
String sentenceText = sentences.get(i);
String sentenceTextTrim = sentenceText.trim();
int trimOffset = sentenceText.indexOf(sentenceTextTrim);
if (sentenceTextTrim.length() > 0) {
String sentenceId = Integer.toString(i);
if (sentenceId.length() < 2)
sentenceId = "0" + sentenceId;
sentenceId = documentId + "-" + sentenceId;
Sentence sentence = new Sentence(sentenceId, documentId, sentenceText);
sentence = BANNER.process(tagger, tokenizer, postProcessor, sentence);
for (Mention mention : sentence.getMentions()) {
BioCAnnotation annotation = new BioCAnnotation();
nextId++;
annotation.setID(Integer.toString(nextId));
String entityType = mention.getEntityType().getText();
if (entityType.matches("[A-Z]+")) {
entityType = entityType.toLowerCase();
String first = entityType.substring(0, 1);
entityType = entityType.replaceFirst(first, first.toUpperCase());
}
annotation.putInfon("type", entityType);
String mentionText = mention.getText();
annotation.setLocation(offset + trimOffset + mention.getStartChar(), mentionText.length());
annotation.setText(mentionText);
String lookupText = AbbreviationResolver.expandAbbreviations(mentionText, abbreviationMap);
RankedList<LookupResult> results = syn.lookup(lookupText);
if (results.size() > 0) {
String conceptId = results.getObject(0).getConceptId();
// Cut off "MESH:" and "OMIM:"
String id = "id";
int index = conceptId.indexOf(":");
if (index != -1) {
id = conceptId.substring(0, index);
conceptId = conceptId.substring(index + 1);
}
annotation.putInfon(id, conceptId);
}
passage.addAnnotation(annotation);
}
}
offset += sentenceText.length();
}
}
}
|
Jenny19880324/suitesparse-metis-for-windows
|
SuiteSparse/GraphBLAS/Demo/Source/bfs5m.c
|
<reponame>Jenny19880324/suitesparse-metis-for-windows
//------------------------------------------------------------------------------
// GraphBLAS/Demo/Source/bfs5m.c: breadth first search (vxm and assign/reduce)
//------------------------------------------------------------------------------
// Modified from the GraphBLAS C API Specification, by <NAME>, Timothy
// Mattson, <NAME>, <NAME>, <NAME>. Based on "GraphBLAS
// Mathematics" by <NAME>.
// This method has been updated as of Version 2.2 of SuiteSparse:GraphBLAS.
// It now assumes the matrix is held by row (GxB_BY_ROW) and uses GrB_vxm
// instead of GrB_mxv. It now more closely matches the BFS example in the
// GraphBLAS C API Specification.
#include "demos.h"
//------------------------------------------------------------------------------
// bfs5m: breadth first search using a Boolean semiring
//------------------------------------------------------------------------------
// Given a n x n adjacency matrix A and a source node s, performs a BFS
// traversal of the graph and sets v[i] to the level in which node i is
// visited (v[s] == 1). If i is not reacheable from s, then v[i] = 0. (Vector
// v should be empty on input.) The graph A need not be Boolean on input;
// if it isn't Boolean, the semiring will properly typecast it to Boolean.
GrB_Info bfs5m // BFS of a graph (using vector assign & reduce)
(
GrB_Vector *v_output, // v [i] is the BFS level of node i in the graph
const GrB_Matrix A, // input graph, treated as if boolean in semiring
GrB_Index s // starting node of the BFS
)
{
//--------------------------------------------------------------------------
// set up the semiring and initialize the vector v
//--------------------------------------------------------------------------
GrB_Index n ; // # of nodes in the graph
GrB_Vector q = NULL ; // nodes visited at each level
GrB_Vector v = NULL ; // result vector
GrB_Monoid Lor = NULL ; // Logical-or monoid
GrB_Semiring Boolean = NULL ; // Boolean semiring
GrB_Descriptor desc = NULL ; // Descriptor for vxm
GrB_Matrix_nrows (&n, A) ; // n = # of rows of A
GrB_Vector_new (&v, GrB_INT32, n) ; // Vector<int32_t> v(n) = 0
// This is a little faster if the whole graph is expected to be searched,
// but slower if only a small part of the graph is reached:
// for (int32_t i = 0 ; i < n ; i++) GrB_Vector_setElement (v, 0, i) ;
GrB_Vector_new (&q, GrB_BOOL, n) ; // Vector<bool> q(n) = false
GrB_Vector_setElement (q, true, s) ; // q[s] = true, false elsewhere
GrB_Monoid_new (&Lor, GrB_LOR, (bool) false) ;
GrB_Semiring_new (&Boolean, Lor, GrB_LAND) ;
GrB_Descriptor_new (&desc) ;
GrB_Descriptor_set (desc, GrB_MASK, GrB_SCMP) ; // invert the mask
GrB_Descriptor_set (desc, GrB_OUTP, GrB_REPLACE) ; // clear q first
//--------------------------------------------------------------------------
// BFS traversal and label the nodes
//--------------------------------------------------------------------------
bool successor = true ; // true when some successor found
for (int32_t level = 1 ; successor && level <= n ; level++)
{
// v<q> = level, using vector assign with q as the mask
GrB_assign (v, q, NULL, level, GrB_ALL, n, NULL) ;
// q<!v> = q ||.&& A ; finds all the unvisited
// successors from current q, using !v as the mask
GrB_vxm (q, v, NULL, Boolean, q, A, desc) ;
// successor = ||(q)
GrB_reduce (&successor, NULL, Lor, q, NULL) ;
}
*v_output = v ; // return result
GrB_free (&q) ;
GrB_free (&Lor) ;
GrB_free (&Boolean) ;
GrB_free (&desc) ;
return (GrB_SUCCESS) ;
}
|
elveahuang/spring-samples
|
spring-boot-samples/spring-boot-data-sample/src/test/java/cn/elvea/samples/spring/boot/data/datasource/DataSourceTests.java
|
package cn.elvea.samples.spring.boot.data.datasource;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import javax.sql.DataSource;
import java.sql.SQLException;
/**
* 读写分离单元测试
*
* @author elvea
*/
@ExtendWith(SpringExtension.class)
@SpringBootTest
public class DataSourceTests {
@Autowired
DataSource dataSource;
@Test
public void test() throws SQLException {
Assertions.assertNotNull(dataSource);
Assertions.assertNotNull(dataSource.getConnection());
}
}
|
maheshlalu/OdishaNews
|
Odisha360/OGOdishaSHKConfigurator.h
|
//
// OGOdishaSHKConfigurator.h
// OnGO
//
// Created by <NAME> on 20/02/16.
// Copyright © 2016 <NAME>. All rights reserved.
//
#import <ShareKit/ShareKit.h>
#import "SHKConfiguration.h"
#import "DefaultSHKConfigurator.h"
@interface OGOdishaSHKConfigurator : DefaultSHKConfigurator
@end
|
heaths/azure-sdk-for-go
|
services/cognitiveservices/v1.0/entitysearch/client.go
|
<filename>services/cognitiveservices/v1.0/entitysearch/client.go
// Package entitysearch implements the Azure ARM Entitysearch service API version 1.0.
//
// The Entity Search API lets you send a search query to Bing and get back search results that include entities and
// places. Place results include restaurants, hotel, or other local businesses. For places, the query can specify the
// name of the local business or it can ask for a list (for example, restaurants near me). Entity results include
// persons, places, or things. Place in this context is tourist attractions, states, countries, etc.
package entitysearch
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/Azure/go-autorest/autorest"
)
const (
// DefaultEndpoint is the default value for endpoint
DefaultEndpoint = "https://api.cognitive.microsoft.com"
)
// BaseClient is the base client for Entitysearch.
type BaseClient struct {
autorest.Client
Endpoint string
}
// New creates an instance of the BaseClient client.
func New() BaseClient {
return NewWithoutDefaults(DefaultEndpoint)
}
// NewWithoutDefaults creates an instance of the BaseClient client.
func NewWithoutDefaults(endpoint string) BaseClient {
return BaseClient{
Client: autorest.NewClientWithUserAgent(UserAgent()),
Endpoint: endpoint,
}
}
|
ikostan/python
|
markdown/markdown.py
|
<filename>markdown/markdown.py
import re
def parse(markdown):
# Split source string in to list by new line
lines = markdown.split('\n')
results = list()
in_list = False
# Process the list line by line and replace
# patterns into HTML tags
for line in lines:
line = replace_header(line)
line, in_list = replace_list(line, in_list)
line = replace_paragraph(line)
line = replace_strong(line)
line = replace_em(line)
results.append(line)
return ''.join(results)
def replace_list(line: str, in_list: bool) -> (str, bool):
# Replace list items
m = re.match(r'\* (.*)', line)
if m:
curr = m.group(1)
if not in_list:
in_list = True
line = ''.join(('<ul><li>',
curr,
'</li>'))
else:
# List item
line = ''.join(('<li>',
curr,
'</li></ul>'))
return line, in_list
def replace_paragraph(line: str) -> str:
# Replace paragraph tag
m = re.match('<h|<ul|<p|<li', line)
if not m:
line = '<p>' + line + '</p>'
return line
def replace_em(line: str) -> str:
# Replace * with EM tag
m = re.match('(.*)_(.*)_(.*)', line)
if m:
line = ''.join((m.group(1), '<em>',
m.group(2), '</em>',
m.group(3)))
return line
def replace_strong(line: str) -> str:
# Replace double underscore with STRONG tag
m = re.match('(.*)__(.*)__(.*)', line)
if m:
line = ''.join((m.group(1), '<strong>',
m.group(2), '</strong>',
m.group(3)))
return line
def replace_header(line: str) -> str:
# Replace hash tag with appropriate header level
if '#' in line[:7]:
counter = line[:7].count('#')
line = ''.join(('<h{}>'.format(counter),
line[counter + 1:],
'</h{}>'.format(counter)))
return line
|
AonanHe/LeetCode
|
Easy/monotonic-array.js
|
/**
* Problem: Monotonic Array
* Difficulty: Easy
* Runtime: 200 ms
* Date: 2019/10/27
* Author: <NAME>
*/
/**
* @param {number[]} A
* @return {boolean}
*/
var isMonotonic = function(A) {
function equal(a, b) {
if (a.length !== b.length) return false
for (let i = 0; i < a.length; i++) {
if (a[i] !== b[i]) return false
}
return true
}
const a = [...A].sort((a, b) => a - b)
const b = [...A].sort((a, b) => b - a)
return equal(A, a) || equal(A, b)
}
|
npocmaka/Windows-Server-2003
|
admin/wmi/wbem/scripting/test/jscript/arrayoob.js
|
<reponame>npocmaka/Windows-Server-2003
//***************************************************************************
//This script tests array out-of-bounds conditions on properties and
//qualifiers
//***************************************************************************
var Service = GetObject("winmgmts:root/default");
var Class = Service.Get();
Class.Path_.Class = "ARRAYPROP00";
var Property = Class.Properties_.Add ("p1", 19, true);
Property.Value = new Array (12, 787, 34124);
var Qualifier = Property.Qualifiers_.Add("wibble", new Array ("fred", "the", "hamster"));
//************************************************************
// PROPERTY
//************************************************************
//Out-of-bounds write ; should expand the array
Class.Properties_("p1")(3) = 783837;
//Now read should be in bounds
WScript.Echo ("Value of ARRAYPROP00.Class.p1(3) is [783837]:",
(new VBArray(Class.Properties_("p1").Value).toArray ())[3]);
//Out-of-bounds write ; should expand the array
Class.p1(4) = 783844;
//Now read should be in bounds
WScript.Echo ("Value of ARRAYPROP00.Class.p1(4) is [783844]:",
(new VBArray(Class.Properties_("p1").Value).toArray ())[4]);
//Complete value dump
var arrayVal = new VBArray(Class.Properties_("p1").Value).toArray ();
for (i = 0; i < arrayVal.length; i++)
WScript.Echo(arrayVal[i]);
//************************************************************
// QUALIFIER
//************************************************************
//Out-of-bounds write ; should expand the array
Property.Qualifiers_("wibble")(3) = "jam";
//Now read should be in bounds
WScript.Echo ("Value of qualifier(3) is [jam]:",
(new VBArray(Property.Qualifiers_("wibble").Value).toArray())[3]);
//Complete value dump
var arrayVal = new VBArray(Property.Qualifiers_("wibble").Value).toArray ();
for (i = 0; i < arrayVal.length; i++)
WScript.Echo(arrayVal[i]);
|
jacadcaps/webkitty
|
Source/WebCore/html/FeaturePolicy.cpp
|
/*
* Copyright (C) 2019 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "FeaturePolicy.h"
#include "DOMWindow.h"
#include "Document.h"
#include "HTMLIFrameElement.h"
#include "HTMLNames.h"
#include "HTMLParserIdioms.h"
#include "SecurityOrigin.h"
namespace WebCore {
using namespace HTMLNames;
static const char* policyTypeName(FeaturePolicy::Type type)
{
switch (type) {
case FeaturePolicy::Type::Camera:
return "Camera";
case FeaturePolicy::Type::Microphone:
return "Microphone";
case FeaturePolicy::Type::DisplayCapture:
return "DisplayCapture";
case FeaturePolicy::Type::SyncXHR:
return "SyncXHR";
case FeaturePolicy::Type::Fullscreen:
return "Fullscreen";
#if ENABLE(WEBXR)
case FeaturePolicy::Type::XRSpatialTracking:
return "XRSpatialTracking";
#endif
}
ASSERT_NOT_REACHED();
return "";
}
bool isFeaturePolicyAllowedByDocumentAndAllOwners(FeaturePolicy::Type type, const Document& document, LogFeaturePolicyFailure logFailure)
{
auto& topDocument = document.topDocument();
auto* ancestorDocument = &document;
while (ancestorDocument != &topDocument) {
if (!ancestorDocument) {
if (logFailure == LogFeaturePolicyFailure::Yes && document.domWindow())
document.domWindow()->printErrorMessage(makeString("Feature policy '", policyTypeName(type), "' check failed."));
return false;
}
auto* ownerElement = ancestorDocument->ownerElement();
if (is<HTMLIFrameElement>(ownerElement)) {
const auto& featurePolicy = downcast<HTMLIFrameElement>(ownerElement)->featurePolicy();
if (!featurePolicy.allows(type, ancestorDocument->securityOrigin().data())) {
if (logFailure == LogFeaturePolicyFailure::Yes && document.domWindow()) {
auto& allowValue = downcast<HTMLIFrameElement>(ownerElement)->attributeWithoutSynchronization(HTMLNames::allowAttr);
document.domWindow()->printErrorMessage(makeString("Feature policy '", policyTypeName(type), "' check failed for iframe with origin '", document.securityOrigin().toString(), "' and allow attribute '", allowValue, "'."));
}
return false;
}
}
ancestorDocument = ancestorDocument->parentDocument();
}
return true;
}
static bool isAllowedByFeaturePolicy(const FeaturePolicy::AllowRule& rule, const SecurityOriginData& origin)
{
switch (rule.type) {
case FeaturePolicy::AllowRule::Type::None:
return false;
case FeaturePolicy::AllowRule::Type::All:
return true;
case FeaturePolicy::AllowRule::Type::List:
return rule.allowedList.contains(origin);
}
ASSERT_NOT_REACHED();
return false;
}
static inline void processOriginItem(Document& document, FeaturePolicy::AllowRule& rule, StringView item)
{
if (rule.type == FeaturePolicy::AllowRule::Type::None)
return;
item = item.stripLeadingAndTrailingMatchedCharacters(isHTMLSpace<UChar>);
// FIXME: Support 'src'.
if (item == "'src'")
return;
if (item == "*") {
rule.type = FeaturePolicy::AllowRule::Type::All;
return;
}
if (item == "'self'") {
rule.allowedList.add(document.securityOrigin().data());
return;
}
if (item == "'none'") {
rule.type = FeaturePolicy::AllowRule::Type::None;
return;
}
URL url { { }, item.toString() };
if (url.isValid())
rule.allowedList.add(SecurityOriginData::fromURL(url));
}
static inline void updateList(Document& document, FeaturePolicy::AllowRule& rule, StringView value)
{
// We keep the empty string value equivalent to '*' for existing websites.
if (value.isEmpty()) {
rule.type = FeaturePolicy::AllowRule::Type::All;
return;
}
while (!value.isEmpty()) {
auto position = value.find(isHTMLSpace<UChar>);
if (position == notFound) {
processOriginItem(document, rule, value);
return;
}
processOriginItem(document, rule, value.substring(0, position));
value = value.substring(position + 1).stripLeadingAndTrailingMatchedCharacters(isHTMLSpace<UChar>);
}
}
FeaturePolicy FeaturePolicy::parse(Document& document, const HTMLIFrameElement& iframe, StringView allowAttributeValue)
{
FeaturePolicy policy;
bool isCameraInitialized = false;
bool isMicrophoneInitialized = false;
bool isDisplayCaptureInitialized = false;
bool isSyncXHRInitialized = false;
bool isFullscreenInitialized = false;
#if ENABLE(WEBXR)
bool isXRSpatialTrackingInitialized = false;
#endif
for (auto allowItem : allowAttributeValue.split(';')) {
auto item = allowItem.stripLeadingAndTrailingMatchedCharacters(isHTMLSpace<UChar>);
if (item.startsWith("camera")) {
isCameraInitialized = true;
updateList(document, policy.m_cameraRule, item.substring(7));
continue;
}
if (item.startsWith("microphone")) {
isMicrophoneInitialized = true;
updateList(document, policy.m_microphoneRule, item.substring(11));
continue;
}
if (item.startsWith("display-capture")) {
isDisplayCaptureInitialized = true;
updateList(document, policy.m_displayCaptureRule, item.substring(16));
continue;
}
if (item.startsWith("sync-xhr")) {
isSyncXHRInitialized = true;
updateList(document, policy.m_syncXHRRule, item.substring(8));
continue;
}
if (item.startsWith("fullscreen")) {
isFullscreenInitialized = true;
updateList(document, policy.m_fullscreenRule, item.substring(11));
continue;
}
#if ENABLE(WEBXR)
if (item.startsWith("xr-spatial-tracking")) {
isXRSpatialTrackingInitialized = true;
updateList(document, policy.m_xrSpatialTrackingRule, item.substring(19));
continue;
}
#endif
}
// By default, camera, microphone, display-capture, fullscreen and
// xr-spatial-tracking policy is 'self'.
if (!isCameraInitialized)
policy.m_cameraRule.allowedList.add(document.securityOrigin().data());
if (!isMicrophoneInitialized)
policy.m_microphoneRule.allowedList.add(document.securityOrigin().data());
if (!isDisplayCaptureInitialized)
policy.m_displayCaptureRule.allowedList.add(document.securityOrigin().data());
#if ENABLE(WEBXR)
if (!isXRSpatialTrackingInitialized)
policy.m_xrSpatialTrackingRule.allowedList.add(document.securityOrigin().data());
#endif
// https://w3c.github.io/webappsec-feature-policy/#process-feature-policy-attributes
// 9.5 Process Feature Policy Attributes
// 3.1 If element’s allowfullscreen attribute is specified, and container policy does
// not contain an allowlist for fullscreen,
if (!isFullscreenInitialized) {
if (iframe.hasAttribute(allowfullscreenAttr) || iframe.hasAttribute(webkitallowfullscreenAttr)) {
// 3.1.1 Construct a new declaration for fullscreen, whose allowlist is the special value *.
policy.m_fullscreenRule.type = FeaturePolicy::AllowRule::Type::All;
} else {
// https://fullscreen.spec.whatwg.org/#feature-policy-integration
// The default allowlist is 'self'.
policy.m_fullscreenRule.allowedList.add(document.securityOrigin().data());
}
}
if (!isSyncXHRInitialized)
policy.m_syncXHRRule.type = AllowRule::Type::All;
return policy;
}
bool FeaturePolicy::allows(Type type, const SecurityOriginData& origin) const
{
switch (type) {
case Type::Camera:
return isAllowedByFeaturePolicy(m_cameraRule, origin);
case Type::Microphone:
return isAllowedByFeaturePolicy(m_microphoneRule, origin);
case Type::DisplayCapture:
return isAllowedByFeaturePolicy(m_displayCaptureRule, origin);
case Type::SyncXHR:
return isAllowedByFeaturePolicy(m_syncXHRRule, origin);
case Type::Fullscreen:
return isAllowedByFeaturePolicy(m_fullscreenRule, origin);
#if ENABLE(WEBXR)
case Type::XRSpatialTracking:
return isAllowedByFeaturePolicy(m_xrSpatialTrackingRule, origin);
#endif
}
ASSERT_NOT_REACHED();
return false;
}
}
|
lingfish/stackstorm-vsphere
|
actions/guest_file_upload.py
|
<filename>actions/guest_file_upload.py
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from vmwarelib.guest import GuestAction
from pyVmomi import vim # pylint: disable-msg=E0611
import os
import requests
class InitiateFileTransferToGuest(GuestAction):
def run(self, vm_id, username, password, guest_directory, local_path,
vsphere=None):
"""
Upload a file to a directory inside a guest.
Args:
- vm_id: MOID of the Virtual Machine
- username: username to perform the operation
- password: <PASSWORD>
- guest_directory: Directory name in the guest to store the file
- local_path: The full path to the local file, or a path
- relative to the packs directory when prefixed
- with pack:
- If this pack is a development pack (inside packs.dev)
- then the "pack:" prefix indicates packs.dev as the
- relative starting point.
- examples: /opt/stackstorm/packs/mypack/path/to/file
- pack:mypack/path-inside-pack/to/file
- vsphere: Pre-configured vsphere connection details (config.yaml)
"""
self.prepare_guest_operation(vsphere, vm_id, username, password)
if (local_path.startswith("pack:")):
packsdir =\
os.path.dirname(
os.path.dirname(
os.path.dirname(os.path.abspath(__file__))))
full_local_path = os.path.join(packsdir, local_path[5:])
else:
full_local_path = local_path
with open(full_local_path, 'rb') as myfile:
file_contents = myfile.read()
guest_filename = os.path.basename(full_local_path)
file_attribute = vim.vm.guest.FileManager.FileAttributes()
full_path = self.joinpath(guest_directory, guest_filename)
url = self.guest_file_manager.InitiateFileTransferToGuest(
self.vm, self.guest_credentials, full_path, file_attribute,
len(file_contents), True)
response = requests.put(url, data=file_contents, verify=False)
response.raise_for_status() # raise if status_code is not 200
return full_path
|
Matthew-Griffith/ringteki-client
|
server/game/cards/02.2-FHaG/NorthernWallSensei.js
|
<gh_stars>100-1000
const DrawCard = require('../../drawcard.js');
const { Players, CardTypes } = require('../../Constants');
class NorthernWallSensei extends DrawCard {
setupCardAbilities(ability) {
this.action({
title: 'Grant immunity to events',
condition: context => context.source.isParticipating(),
target: {
cardType: CardTypes.Character,
controller: Players.Self,
cardCondition: card => card.isParticipating() && card.attachments.size() > 0,
gameAction: ability.actions.cardLastingEffect({
effect: ability.effects.immunity({ restricts: 'events' })
})
},
effect: 'grant immunity to events to {0}'
});
}
}
NorthernWallSensei.id = 'northern-wall-sensei';
module.exports = NorthernWallSensei;
|
ichitaso/TwitterListEnabler
|
Twitter-Dumped/7.60.6/T1FollowsYouView.h
|
<reponame>ichitaso/TwitterListEnabler<gh_stars>1-10
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>.
//
#import <UIKit/UIView.h>
@class NSString, UIColor, UIFont, UILabel;
@interface T1FollowsYouView : UIView
{
UILabel *_label;
struct UIEdgeInsets _contentEdgeInsets;
}
+ (id)font;
+ (struct CGSize)sizeForText:(id)arg1 font:(id)arg2 contentEdgeInsets:(struct UIEdgeInsets)arg3;
@property(retain, nonatomic) UILabel *label; // @synthesize label=_label;
@property(nonatomic) struct UIEdgeInsets contentEdgeInsets; // @synthesize contentEdgeInsets=_contentEdgeInsets;
- (void).cxx_destruct;
- (void)layoutSubviews;
- (struct CGSize)intrinsicContentSize;
- (void)sizeToFit;
@property(nonatomic) double baseline;
@property(nonatomic) double cornerRadius;
@property(retain, nonatomic) UIColor *textColor;
@property(retain, nonatomic) UIFont *font;
@property(nonatomic) long long textAlignment;
@property(copy, nonatomic) NSString *text;
- (void)_setupWithLabel:(id)arg1;
- (id)initWithFrame:(struct CGRect)arg1;
- (id)initWithCoder:(id)arg1;
@end
|
asheraryam/ETEngine
|
Engine/source/EtRendering/GlobalRenderingSystems/PrimitiveRenderer.cpp
|
<reponame>asheraryam/ETEngine
#include "stdafx.h"
#include "PrimitiveRenderer.h"
namespace et {
namespace render {
//Abstract
//*********
void PrimitiveGeometry::RootDraw()
{
if (!m_IsInitialized)
{
Initialize();
m_IsInitialized = true;
}
Draw();
}
PrimitiveRenderer::PrimitiveRenderer()
{
AddGeometry(new primitives::Quad());
AddGeometry(new primitives::Cube());
AddGeometry(new primitives::IcoSphere<0>());
AddGeometry(new primitives::IcoSphere<1>());
AddGeometry(new primitives::IcoSphere<2>());
AddGeometry(new primitives::IcoSphere<3>());
}
PrimitiveRenderer::~PrimitiveRenderer()
{
for (PrimitiveGeometry* geometry : m_pTypes)
{
SafeDelete(geometry);
}
m_pTypes.clear();
}
void PrimitiveRenderer::AddGeometry(PrimitiveGeometry* pGeometry)
{
for (PrimitiveGeometry* pType : m_pTypes)
{
if (pType->GetType() == pGeometry->GetType())
{
SafeDelete(pGeometry);
return;
}
}
m_pTypes.push_back(pGeometry);
}
//Specifications
//***************
//Unit Quad
primitives::Quad::~Quad()
{
I_GraphicsContextApi* const api = ContextHolder::GetRenderContext();
api->DeleteBuffer(m_VBO);
api->DeleteVertexArray(m_VAO);
}
void primitives::Quad::Draw()
{
I_GraphicsContextApi* const api = ContextHolder::GetRenderContext();
api->BindVertexArray(m_VAO);
api->DrawArrays(E_DrawMode::TriangleStrip, 0, 4);
api->BindVertexArray(0);
}
void primitives::Quad::Initialize()
{
I_GraphicsContextApi* const api = ContextHolder::GetRenderContext();
float quadVertices[] =
{
// Positions Texture Coords
-1.0f, 1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f, 1.0f,
-1.0f, -1.0f, 0.0f, 0.0f, 0.0f,
1.0f, -1.0f, 0.0f, 1.0f, 0.0f,
};
// Setup plane VAO
m_VAO = api->CreateVertexArray();
m_VBO = api->CreateBuffer();
api->BindVertexArray(m_VAO);
api->BindBuffer(E_BufferType::Vertex, m_VBO);
api->SetBufferData(E_BufferType::Vertex, sizeof(quadVertices), &quadVertices, E_UsageHint::Static);
api->SetVertexAttributeArrayEnabled(0, true);
api->DefineVertexAttributePointer(0, 3, E_DataType::Float, false, 5 * sizeof(float), 0);
api->SetVertexAttributeArrayEnabled(1, true);
api->DefineVertexAttributePointer(1, 2, E_DataType::Float, false, 5 * sizeof(float), (3 * sizeof(float)));
}
//Unit cube
primitives::Cube::~Cube()
{
I_GraphicsContextApi* const api = ContextHolder::GetRenderContext();
api->DeleteBuffer(m_VBO);
api->DeleteVertexArray(m_VAO);
}
void primitives::Cube::Draw()
{
I_GraphicsContextApi* const api = ContextHolder::GetRenderContext();
api->BindVertexArray(m_VAO);
api->DrawArrays(E_DrawMode::Triangles, 0, 36);
api->BindVertexArray(0);
}
void primitives::Cube::Initialize()
{
float vertices[] =
{
// Back face
-1.0f, -1.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, // Bottom-left
1.0f, 1.0f, -1.0f, 0.0f, 0.0f, -1.0f, 1.0f, 1.0f, // top-right
1.0f, -1.0f, -1.0f, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f, // bottom-right
1.0f, 1.0f, -1.0f, 0.0f, 0.0f, -1.0f, 1.0f, 1.0f, // top-right
-1.0f, -1.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, // bottom-left
-1.0f, 1.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 1.0f,// top-left
// Front face
-1.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // bottom-left
1.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, // bottom-right
1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, // top-right
1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, // top-right
-1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, // top-left
-1.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // bottom-left
// Left face
-1.0f, 1.0f, 1.0f, -1.0f, 0.0f, 0.0f, 1.0f, 0.0f, // top-right
-1.0f, 1.0f, -1.0f, -1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // top-left
-1.0f, -1.0f, -1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, // bottom-left
-1.0f, -1.0f, -1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, // bottom-left
-1.0f, -1.0f, 1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, // bottom-right
-1.0f, 1.0f, 1.0f, -1.0f, 0.0f, 0.0f, 1.0f, 0.0f, // top-right
// Right face
1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, // top-left
1.0f, -1.0f, -1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, // bottom-right
1.0f, 1.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // top-right
1.0f, -1.0f, -1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, // bottom-right
1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, // top-left
1.0f, -1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, // bottom-left
// Bottom face
-1.0f, -1.0f, -1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, // top-right
1.0f, -1.0f, -1.0f, 0.0f, -1.0f, 0.0f, 1.0f, 1.0f, // top-left
1.0f, -1.0f, 1.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f,// bottom-left
1.0f, -1.0f, 1.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f, // bottom-left
-1.0f, -1.0f, 1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, // bottom-right
-1.0f, -1.0f, -1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, // top-right
// Top face
-1.0f, 1.0f, -1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f,// top-left
1.0f, 1.0f , 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // bottom-right
1.0f, 1.0f, -1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, // top-right
1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // bottom-right
-1.0f, 1.0f, -1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f,// top-left
-1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f // bottom-left
};
I_GraphicsContextApi* const api = ContextHolder::GetRenderContext();
m_VAO = api->CreateVertexArray();
m_VBO = api->CreateBuffer();
// Fill buffer
api->BindBuffer(E_BufferType::Vertex, m_VBO);
api->SetBufferData(E_BufferType::Vertex, sizeof(vertices), vertices, E_UsageHint::Static);
// Link vertex attributes
api->BindVertexArray(m_VAO);
api->SetVertexAttributeArrayEnabled(0, true);
api->DefineVertexAttributePointer(0, 3, E_DataType::Float, false, 8 * sizeof(float), 0);
api->SetVertexAttributeArrayEnabled(1, true);
api->DefineVertexAttributePointer(1, 3, E_DataType::Float, false, 8 * sizeof(float), (3 * sizeof(float)));
api->SetVertexAttributeArrayEnabled(2, true);
api->DefineVertexAttributePointer(2, 2, E_DataType::Float, false, 8 * sizeof(float), (6 * sizeof(float)));
api->BindBuffer(E_BufferType::Vertex, 0);
api->BindVertexArray(0);
}
//Unit sphere with variable detail
template<int32 level>
primitives::IcoSphere<level>::~IcoSphere()
{
I_GraphicsContextApi* const api = ContextHolder::GetRenderContext();
api->DeleteBuffer(m_VBO);
api->DeleteVertexArray(m_VAO);
}
template<int32 level>
void primitives::IcoSphere<level>::Draw()
{
I_GraphicsContextApi* const api = ContextHolder::GetRenderContext();
api->BindVertexArray(m_VAO);
api->DrawArrays(E_DrawMode::Triangles, 0, m_NumVerts);
api->BindVertexArray(0);
}
template<int32 level>
void primitives::IcoSphere<level>::Initialize()
{
auto ico = math::GetIcosahedronPositions(1);
auto indices = math::GetIcosahedronIndicesBFC();
std::vector<vec3> vertices;
for (size_t i = 0; i < indices.size(); i += 3)
{
vec3 a = ico[indices[i]];
vec3 b = ico[indices[i+1]];
vec3 c = ico[indices[i+2]];
SubAndPush(vertices, 0, a, b, c);
}
m_NumVerts = (int32)vertices.size();
I_GraphicsContextApi* const api = ContextHolder::GetRenderContext();
m_VAO = api->CreateVertexArray();
m_VBO = api->CreateBuffer();
// Fill buffer
api->BindBuffer(E_BufferType::Vertex, m_VBO);
api->SetBufferData(E_BufferType::Vertex, sizeof(vec3)*vertices.size(), vertices.data(), E_UsageHint::Static);
// Link vertex attributes
api->BindVertexArray(m_VAO);
api->SetVertexAttributeArrayEnabled(0, true);
api->DefineVertexAttributePointer(0, 3, E_DataType::Float, false, 3 * sizeof(float), 0);
api->BindBuffer(E_BufferType::Vertex, 0);
api->BindVertexArray(0);
}
template<int32 level>
void primitives::IcoSphere<level>::SubAndPush(std::vector<vec3> &vertices, const int32 lev, const vec3 &a, const vec3 &b, const vec3 &c) const
{
if (lev < level)
{
//find midpoints
vec3 A = math::normalize(b + ((c - b)*0.5f));
vec3 B = math::normalize(c + ((a - c)*0.5f));
vec3 C = math::normalize(a + ((b - a)*0.5f));
//Make 4 new triangles
int32 nLevel = lev + 1;
SubAndPush(vertices, nLevel, B, A, c);
SubAndPush(vertices, nLevel, b, A, C);
SubAndPush(vertices, nLevel, B, a, C);
SubAndPush(vertices, nLevel, A, B, C);
}
else
{
vertices.push_back(a);
vertices.push_back(b);
vertices.push_back(c);
}
}
} // namespace render
} // namespace et
|
naparuba/opsbro
|
data/global-configuration/packs/rabbitmq/collectors/collector_rabbitmq.py
|
<filename>data/global-configuration/packs/rabbitmq/collectors/collector_rabbitmq.py<gh_stars>10-100
import traceback
from opsbro.httpclient import get_http_exceptions, httper
from opsbro.collector import Collector
from opsbro.parameters import StringParameter
from opsbro.jsonmgr import jsoner
# TODO: look at all available at to learn how rabbitmq is working https://github.com/nagios-plugins-rabbitmq/nagios-plugins-rabbitmq
class RabbitMQ(Collector):
parameters = {
'uri' : StringParameter(default='http://localhost:15672/api/overview'),
'user' : StringParameter(default='guest'),
'password': StringParameter(default='<PASSWORD>'),
}
def launch(self):
logger = self.logger
logger.debug('getRabbitMQStatus: start')
if not self.is_in_group('rabbitmq'):
self.set_not_eligible('Please add the rabbitmq group to enable this collector.')
return
try:
uri = self.get_parameter('uri')
user = self.get_parameter('user')
password = self.get_parameter('password')
response = httper.get(uri, timeout=3, user=user, password=password)
except get_http_exceptions() as e:
self.set_error('Unable to get RabbitMQ status - HTTPError = %s' % e)
return False
except Exception:
self.set_error('Unable to get RabbitMQ status - Exception = %s' % traceback.format_exc())
return False
try:
status = jsoner.loads(response)
except Exception as exp:
self.set_error("Rabbitmq: parsing json: %s" % exp)
return False
return status
|
trayanmomkov/jos
|
src/main/java/info/trekto/jos/core/model/impl/SimulationObjectImpl.java
|
<gh_stars>0
package info.trekto.jos.core.model.impl;
import info.trekto.jos.core.model.ImmutableSimulationObject;
import info.trekto.jos.core.model.SimulationObject;
import info.trekto.jos.core.numbers.Number;
import static info.trekto.jos.core.numbers.NumberFactoryProxy.TRIPLE_ZERO;
import static info.trekto.jos.core.numbers.NumberFactoryProxy.ZERO;
import static java.awt.Color.BLUE;
/**
* @author <NAME>
* 3 Mar 2016
*/
public class SimulationObjectImpl implements SimulationObject {
public static final int DEFAULT_COLOR = BLUE.getRGB();
public static final int DEFAULT_COLOR_SIMPLIFIED = BLUE.getBlue();
private Number x;
private Number y;
private Number z;
private TripleNumber velocity;
private TripleNumber acceleration;
private Number radius;
private int color;
private Number mass;
/* Whether the object is static */
boolean motionless = false;
String id;
public SimulationObjectImpl() {
this.x = ZERO;
this.y = ZERO;
this.z = ZERO;
this.velocity = TRIPLE_ZERO;
this.acceleration = TRIPLE_ZERO;
color = DEFAULT_COLOR;
}
public SimulationObjectImpl(ImmutableSimulationObject simulationObject) {
this.color = simulationObject.getColor();
this.id = simulationObject.getId();
this.mass = simulationObject.getMass();
this.radius = simulationObject.getRadius();
this.velocity = simulationObject.getVelocity();
this.acceleration = simulationObject.getAcceleration();
this.x = simulationObject.getX();
this.y = simulationObject.getY();
this.z = simulationObject.getZ();
}
@Override
public Number getX() {
return x;
}
@Override
public void setX(Number x) {
this.x = x;
}
@Override
public Number getY() {
return y;
}
@Override
public void setY(Number y) {
this.y = y;
}
@Override
public Number getZ() {
return z;
}
@Override
public void setZ(Number z) {
this.z = z;
}
@Override
public TripleNumber getVelocity() {
return velocity;
}
@Override
public TripleNumber getAcceleration() {
return acceleration;
}
@Override
public void setVelocity(TripleNumber velocity) {
this.velocity = velocity;
}
@Override
public void setAcceleration(TripleNumber acceleration) {
this.acceleration = acceleration;
}
@Override
public Number getRadius() {
return radius;
}
@Override
public void setRadius(Number radius) {
this.radius = radius;
}
@Override
public int getColor() {
return color;
}
@Override
public void setColor(int color) {
this.color = color;
}
@Override
public Number getMass() {
return mass;
}
@Override
public void setMass(Number mass) {
this.mass = mass;
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public String toString() {
return "{" +
"id='" + id + '\'' +
", x=" + x +
", y=" + y +
", velocity=" + velocity +
'}';
}
/**
* Do not change! {@link SimulationObjectImpl} is used as HashMap key.
* This method is created only to show that usage of Object's equals is intentional.
*/
@Override
public boolean equals(Object obj) {
return super.equals(obj);
}
/**
* Do not change! {@link SimulationObjectImpl} is used as HashMap key.
* This method is created only to show that usage of Object's hashCode is intentional.
*/
@Override
public int hashCode() {
return super.hashCode();
}
}
|
delftdata/s-query
|
hazelcast/hazelcast-sql-core/src/test/java/com/hazelcast/sql/support/model/person/Person.java
|
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.sql.support.model.person;
import java.io.Serializable;
public class Person implements Serializable {
private static final long serialVersionUID = -3664451191867982502L;
private String name;
private int age;
private long salary;
private long cityId;
private String deptTitle;
public Person() {
// No-op.
}
public Person(String name, int age, long salary, long cityId, String deptTitle) {
this.name = name;
this.age = age;
this.salary = salary;
this.cityId = cityId;
this.deptTitle = deptTitle;
}
public String getName() {
return name;
}
public int getAge() {
return age;
}
public long getSalary() {
return salary;
}
public long getCityId() {
return cityId;
}
public String getDeptTitle() {
return deptTitle;
}
}
|
meterXu/DogIcon
|
src/packages/dog-icon/components/iconPark/ComputerOne.js
|
<gh_stars>1-10
/**
* @file ComputerOne 计算机
* @author Auto Generated by IconPark
*/
/* tslint:disable: max-line-length */
/* eslint-disable max-len */
import {IconWrapper} from '../index';
export default IconWrapper(
'ComputerOne',
true,
(h, props) => (
<svg
width={props.size}
height={props.size}
viewBox="0 0 48 48"
fill="none"
>
<path
d="M10 6C10 4.89543 10.8954 4 12 4H36C37.1046 4 38 4.89543 38 6V42C38 43.1046 37.1046 44 36 44H12C10.8954 44 10 43.1046 10 42L10 6Z"
stroke={props.colors[0]}
stroke-width={props.strokeWidth}
/>
<path
d="M17 12L31 12"
stroke={props.colors[0]}
stroke-width={props.strokeWidth}
stroke-linecap={props.strokeLinecap}
stroke-linejoin={props.strokeLinejoin}
/>
<circle
cx="17"
cy="21"
r="2"
fill={props.colors[0]}
/>
<circle
cx="17"
cy="27"
r="2"
fill={props.colors[0]}
/>
<circle
cx="17"
cy="33"
r="2"
fill={props.colors[0]}
/>
<circle
cx="24"
cy="21"
r="2"
fill={props.colors[0]}
/>
<circle
cx="24"
cy="27"
r="2"
fill={props.colors[0]}
/>
<circle
cx="24"
cy="33"
r="2"
fill={props.colors[0]}
/>
<circle
cx="31"
cy="21"
r="2"
fill={props.colors[0]}
/>
<circle
cx="31"
cy="27"
r="2"
fill={props.colors[0]}
/>
<circle
cx="31"
cy="33"
r="2"
fill={props.colors[0]}
/>
</svg>
)
);
|
spcl/dace-onnx
|
tests/pure_expansions/test_conv_expansion.py
|
import pytest
import dace
import daceml.onnx as donnx
import torch
import torch.nn.functional as F
import numpy as np
@pytest.mark.parametrize("implementation", ["pure", "im2col"])
@pytest.mark.parametrize("num_in_channels, kernel_size, num_filters, bias",
[(1, (3, 3), 8, True), (8, (3, 3), 3, False),
(8, (5, 5), 3, True), (8, (4, 4), 3, False)])
@pytest.mark.pure
def test_conv_simple(num_in_channels, kernel_size, num_filters, bias,
implementation):
if implementation == "im2col":
pytest.skip("pure im2col is currently broken")
old_implementation = donnx.ONNXConv.default_implementation
donnx.ONNXConv.default_implementation = implementation
batch_size = 8
X = np.random.rand(batch_size, num_in_channels, 32, 32).astype(np.float32)
W = np.random.rand(num_filters, num_in_channels,
*kernel_size).astype(np.float32)
if bias:
B = np.random.rand(num_filters).astype(np.float32)
torch_Z = F.conv2d(torch.from_numpy(X),
torch.from_numpy(W),
bias=torch.from_numpy(B)).numpy()
else:
B = None
torch_Z = F.conv2d(torch.from_numpy(X), torch.from_numpy(W)).numpy()
dace_Z = np.zeros_like(torch_Z)
if bias:
@dace.program
def conv(X_: dace.float32[tuple(X.shape)],
W_: dace.float32[tuple(W.shape)],
B_: dace.float32[tuple(B.shape)],
Z_: dace.float32[tuple(torch_Z.shape)]):
donnx.ONNXConv(X=X_, W=W_, B=B_, Y=Z_)
else:
@dace.program
def conv(X_: dace.float32[tuple(X.shape)],
W_: dace.float32[tuple(W.shape)],
Z_: dace.float32[tuple(torch_Z.shape)]):
donnx.ONNXConv(X=X_, W=W_, Y=Z_)
sdfg = conv.to_sdfg()
sdfg.expand_library_nodes()
if bias:
sdfg(X_=X, W_=W, Z_=dace_Z, B_=B)
else:
sdfg(X_=X, W_=W, Z_=dace_Z)
print(torch_Z - dace_Z)
assert np.allclose(torch_Z, dace_Z)
donnx.ONNXConv.default_implementation = old_implementation
|
czankel/cne
|
cli/common_test.go
|
<reponame>czankel/cne
package cli
import (
"testing"
"bytes"
"io"
"os"
)
// compareString compares the provided strings and returns -1 if they match, or the position
// where they mismatch. Note that this will return the length of the shorter string if their
// length differs.
func compareStrings(l, r string) int {
maxLen := len(l)
if len(r) < maxLen {
maxLen = len(r)
}
for i := 0; i < maxLen; i++ {
if l[i] != r[i] {
return i
}
}
if len(l) != len(r) {
return maxLen
}
return -1
}
// compareFuncOutput compares the output printed to stdout from the provided function with the
// provided expected string. It returns -1 if the strings match or the position where they
// mismatch, and also returns the generated string from the function.
func compareFuncOutput(printFunc func(), expected string) (int, string) {
oldStdout := os.Stdout
r, w, _ := os.Pipe()
os.Stdout = w
outC := make(chan string)
// copy the output in a separate goroutine so printing can't block indefinitely
go func() {
var buf bytes.Buffer
io.Copy(&buf, r)
outC <- buf.String()
}()
printFunc()
w.Sync()
w.Close()
os.Stdout = oldStdout // restoring the real stdout
out := <-outC
return compareStrings(out, expected), out
}
// TestPrintValueSimpleString tests printValue for a simple string value
func TestPrintValueSimpleString(t *testing.T) {
testString := "TestString"
const expected = "NAME VALUE\nVarName TestString\n"
errPos, out := compareFuncOutput(
func() { printValue("NAME", "VALUE", "VarName", &testString) }, expected)
if errPos != -1 {
t.Errorf("Failed to print simple structure (pos %d)", errPos)
t.Errorf("\n" + out)
}
}
// TestPrintValueSimpleStruct tests printValue for a simple (non-nested) structure
func TestPrintValueSimpleStruct(t *testing.T) {
testStruct := struct {
FieldA string
FieldAB string
fieldABC string
}{
FieldA: "ValueA",
FieldAB: "ValueAB",
fieldABC: "ValueABC",
}
const expected = "NAME VALUE\nFieldA ValueA\nFieldAB ValueAB\n"
errPos, out := compareFuncOutput(
func() { printValue("NAME", "VALUE", "", &testStruct) }, expected)
if errPos != -1 {
t.Errorf("Failed to print simple structure (pos %d)", errPos)
t.Errorf("\n" + out)
}
}
// TestPrintValueSimpleStructWithPrefix tests printValue for a simple structure with
// a provided prefix string
func TestPrintValueSimpleStructWithPrefix(t *testing.T) {
testStruct := struct {
FieldA string
FieldAB string
fieldABC string
}{
FieldA: "ValueA",
FieldAB: "ValueAB",
fieldABC: "ValueABC",
}
const expected = "NAME VALUE\nPrefix/FieldA ValueA\nPrefix/FieldAB ValueAB\n"
errPos, out := compareFuncOutput(
func() { printValue("NAME", "VALUE", "Prefix", &testStruct) }, expected)
if errPos != -1 {
t.Errorf("Failed to print simple structure with prefix (pos %d)", errPos)
t.Errorf("\n" + out)
}
}
// TestPrintValueNestedStruct tests printValue for a nested structure
func TestPrintValueNestedStruct(t *testing.T) {
type testSubStruct struct {
FieldAA string
FieldAB string
}
testStruct := struct {
FieldA testSubStruct
}{
FieldA: testSubStruct{
FieldAA: "ValueAA",
FieldAB: "ValueAB",
},
}
const expected = "NAME VALUE\nFieldA/FieldAA ValueAA\nFieldA/FieldAB ValueAB\n"
errPos, out := compareFuncOutput(
func() { printValue("NAME", "VALUE", "", &testStruct) }, expected)
if errPos != -1 {
t.Errorf("Failed to print nested structure (pos %d)", errPos)
t.Errorf("\n" + out)
}
}
// TestPrintValueStructMap tests printValue for a map of a structure
func TestPrintValueStructMap(t *testing.T) {
type testStruct struct {
FieldA string
FieldB string
}
testMap := map[string]testStruct{
"KeyA": testStruct{
FieldA: "ValueAA",
FieldB: "ValueAB",
},
"KeyB": testStruct{
FieldA: "ValueBA",
FieldB: "ValueBB",
},
}
const expected = `NAME VALUE
Prefix/KeyA/FieldA ValueAA
Prefix/KeyA/FieldB ValueAB
Prefix/KeyB/FieldA ValueBA
Prefix/KeyB/FieldB ValueBB
`
errPos, out := compareFuncOutput(
func() { printValue("NAME", "VALUE", "Prefix", &testMap) }, expected)
if errPos != -1 {
t.Errorf("Failed to print map (pos %d)", errPos)
t.Errorf("\n" + out)
}
}
// TestPrintList tests printList for a slice of a simpel structure
func TestPrintValueList(t *testing.T) {
type testStruct struct {
FieldA string
FieldBB string
FieldCCC string
}
testList := []testStruct{
testStruct{
FieldA: "ValueAA",
FieldBB: "ValueAB",
FieldCCC: "ValueAC",
},
testStruct{
FieldA: "ValueBA",
FieldBB: "ValueBB",
FieldCCC: "ValueBC",
},
}
const expected = `FIELDA FIELDBB FIELDCCC
ValueAA ValueAB ValueAC
ValueBA ValueBB ValueBC
`
errPos, out := compareFuncOutput(
func() { printList(testList, false) }, expected)
if errPos != -1 {
t.Errorf("Failed to print simple structure with prefix (pos %d)", errPos)
t.Errorf("\n" + out)
}
}
func TestPrintValueSlice(t *testing.T) {
type testStruct struct {
FieldA string
FieldBB string
}
testSlice := []testStruct{
testStruct{
FieldA: "ValueAA",
FieldBB: "ValueABB",
},
testStruct{
FieldA: "ValueBA",
FieldBB: "ValueXYZ",
},
}
const expected = `FIELD VALUE
0/FieldA ValueAA
0/FieldBB ValueABB
1/FieldA ValueBA
1/FieldBB ValueXYZ
`
errPos, out := compareFuncOutput(
func() { printValue("Field", "Value", "", testSlice) }, expected)
if errPos != -1 {
t.Errorf("Failed to print simple structure with prefix (pos %d)", errPos)
t.Errorf("\n" + out)
}
}
// TODO: implement splitting a command line into slices of arguments
func compareCommands(t *testing.T, desc string, line string, exp [][]string) bool {
commands := scanLine(line)
if len(commands) != len(exp) {
t.Errorf("Test '%s' failed: different length %d, should be %d",
desc, len(commands), len(exp))
return false
}
for i := range commands {
if len(commands[i].Args) != len(exp[i]) {
t.Errorf("Test '%s' failed in line %d: number of arguments mismatch",
desc, i)
}
for j, arg := range commands[i].Args {
if arg != exp[i][j] {
t.Errorf("Test '%s' failed in line %d, index %d: '%s' (exp: '%s')",
desc, i, j, arg, exp[i][j])
return false
}
}
}
return true
}
func TestCliScanArgs(t *testing.T) {
testLine := ""
testCmds := [][]string{}
compareCommands(t, "empty line", testLine, testCmds)
testLine = "cmd1 arg11 arg12"
testCmds = [][]string{{"cmd1 arg11 arg12"}}
compareCommands(t, "single line", testLine, testCmds)
testLine = " cmd1 arg11 arg12 "
testCmds = [][]string{{"cmd1 arg11 arg12"}}
compareCommands(t, "single line, extra spaces", testLine, testCmds)
testLine = "cmd1 arg11, cmd2 arg21"
testCmds = [][]string{{"cmd1 arg11"}, {"cmd2 arg21"}}
compareCommands(t, "multi line, attached delim", testLine, testCmds)
testLine = "cmd1 arg11 , cmd2 arg21"
testCmds = [][]string{{"cmd1 arg11"}, {"cmd2 arg21"}}
compareCommands(t, "multi line", testLine, testCmds)
testLine = "cmd1 arg11 , ,,, cmd2 arg21"
testCmds = [][]string{{"cmd1 arg11"}, {"cmd2 arg21"}}
compareCommands(t, "multi line, multi delims", testLine, testCmds)
}
|
Manny27nyc/azure-sdk-for-java
|
sdk/avs/azure-resourcemanager-avs/src/samples/java/com/azure/resourcemanager/avs/HcxEnterpriseSitesCreateOrUpdateSamples.java
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.avs;
/** Samples for HcxEnterpriseSites CreateOrUpdate. */
public final class HcxEnterpriseSitesCreateOrUpdateSamples {
/**
* Sample code: HcxEnterpriseSites_CreateOrUpdate.
*
* @param avsManager Entry point to AvsManager. Azure VMware Solution API.
*/
public static void hcxEnterpriseSitesCreateOrUpdate(com.azure.resourcemanager.avs.AvsManager avsManager) {
avsManager.hcxEnterpriseSites().define("site1").withExistingPrivateCloud("group1", "cloud1").create();
}
}
|
Zirias/pocas
|
src/bin/test/gui_internal.h
|
<filename>src/bin/test/gui_internal.h
#ifndef GUI_INTERNAL_H
#define GUI_INTERNAL_H
typedef struct Gui Gui;
Gui *Gui_create(void);
int Gui_run(Gui *self);
void Gui_dispose(Gui *self);
void Gui_destroy(Gui *self);
#endif
|
SVEChina/SVEngine
|
SVEngine/src/node/SVSpriteNode.h
|
//
// SVSpriteNode.h
// SVEngine
// Copyright 2017-2020
// <NAME>,<NAME>,<NAME>,<NAME>,<NAME>,<NAME>
//
#ifndef SV_SPRITE_H
#define SV_SPRITE_H
#include "SVNode.h"
namespace sv {
namespace node{
/*
精灵节点
*/
class SVSpriteNode : public SVNode {
public:
SVSpriteNode(SVInst *_app);
SVSpriteNode(SVInst *_app,f32 _w,f32 _h);
~SVSpriteNode();
void update(f32 _dt);
void render();
virtual void setSize(f32 _w,f32 _h);
virtual void setTexcoord(f32 x,f32 y);
void syncTexSize();
f32 getRelativeWidth();
f32 getRelativeHeight();
f32 getWidth();
f32 getHeight();
virtual void setMaterial(SVMtlCorePtr _mtl);
SVMtlCorePtr getMaterial();
cptr8 getTexturePath();
virtual void setTexture(cptr8 _path, bool enableMipMap = false);
virtual void setTexture(SVTEXTYPE _textype);
virtual void setTexture(SVTexturePtr _tex);
virtual void setTexture(void * _data, s32 _w, s32 _h, bool enableMipMap = false);
virtual void setMesh(SVRenderMeshPtr _mesh);
//序列化接口
void toJSON(RAPIDJSON_NAMESPACE::Document::AllocatorType &_allocator, RAPIDJSON_NAMESPACE::Value &_objValue);
void fromJSON(RAPIDJSON_NAMESPACE::Value &item);
protected:
f32 m_width;
f32 m_height;
f32 m_texcoordX;
f32 m_texcoordY;
SVString m_pTexPath;
SVTEXTYPE m_inTexType;
//运行时属性
SVTexturePtr m_pTex;
SVRenderMeshPtr m_pMesh;
SVRenderObjectPtr m_pRenderObj;
private:
SVString m_pTexName;
};
}//!namespace node
}//!namespace sv
#endif //SV_SPRITE_H
|
rweyrauch/AoSSimulator
|
include/stormcast/VanguardHunters.h
|
<reponame>rweyrauch/AoSSimulator<gh_stars>1-10
/*
* Warhammer Age of Sigmar battle simulator.
*
* Copyright (C) 2019 by <NAME> - <EMAIL>
*
* This code is licensed under the MIT license (MIT) (http://opensource.org/licenses/MIT)
*/
#pragma once
#include <stormcast/StormcastEternals.h>
#include <Weapon.h>
namespace StormcastEternals {
class VanguardHunters : public StormcastEternal {
public:
enum WeaponOption {
Shock_Handaxe,
Storm_Sabre,
};
static Unit *Create(const ParameterList ¶meters);
static std::string ValueToString(const Parameter ¶meter);
static int EnumStringToInt(const std::string &enumString);
static int ComputePoints(const ParameterList& parameters);
static void Init();
VanguardHunters(Stormhost stormhost, int numModels, WeaponOption weapons, bool astralCompass, int points);
~VanguardHunters() override = default;
protected:
private:
WeaponOption m_weaponOption = Storm_Sabre;
bool m_astralCompass = false;
Weapon m_boltstormPistol,
m_boltstormPistolPrime,
m_shockHandaxe,
m_shockHandaxePrime,
m_stormSabre,
m_stormSabrePrime;
static bool s_registered;
};
//
// Abilities Implemented
// -------------------------------------------
// Astral Compass TODO
// Tireless Hunters Yes
//
} // namespace StormcastEternals
|
projectpai/paipass
|
frontend/src/components/shared/Header/index.js
|
import React, { Component } from 'react';
import { withStyles } from '@material-ui/core/styles';
import PaiPassLogo from 'assets/logo.png';
import AppBar from '@material-ui/core/AppBar';
import Toolbar from '@material-ui/core/Toolbar';
import IconButton from '@material-ui/core/IconButton';
import MenuIcon from '@material-ui/icons/Menu';
import Drawer from '@material-ui/core/Drawer';
import MenuItems from 'components/shared/menuItems';
import LinearProgress from '@material-ui/core/LinearProgress';
// import classNames from 'classnames';
const drawerStyle = theme => ({
paper: {
width: '38vw',
},
});
const styles = theme => ({
header: {
paddingRight: '48px',
},
title: {
color: ' #1590ea',
textTransform: 'capitalize',
fontSize: '18px',
},
root: {
position: 'absolute',
},
});
const StyledDrawer = withStyles(drawerStyle)(Drawer);
class Header extends Component {
state = { drawer: false,
pdp2_sub_active: false};
onClickMenu = () => {
this.setState({ drawer: !this.state.drawer });
};
render() {
const { hidden = true } = this.props;
const these_props = {location: this.props.location, pdp2_sub_status: 'Inactive'};
return this.props.withDrawer ? (
<>
<LinearProgress
classes={{ root: this.props.classes.root }}
className="loading"
hidden={hidden}
/>
<AppBar position="static" color="default" className="app-bar">
<Toolbar className={'mt-2'}>
<IconButton
color="primary"
aria-label="Menu"
onClick={this.onClickMenu}
>
<MenuIcon />
</IconButton>
<div className={`flex-grow-1 ${this.props.classes.header}`}>
<div>
<img
className="paipass-logo d-block full-width"
src={PaiPassLogo}
alt="PaiPass Logo"
/>
</div>
</div>
</Toolbar>
</AppBar>
<div>
<h2 className={`title text-center m-0 ${this.props.classes.title}`}>
{this.props.subtitle}
</h2>
</div>
{this.props.children}
<StyledDrawer open={this.state.drawer} onClose={this.onClickMenu}>
<MenuItems {...these_props}/>
</StyledDrawer>
</>
) : (
<div className="row justify-content-center align-items-center">
<div className="col-xl-4 col-sm-8">
<img
className="paipass-logo d-block"
src={PaiPassLogo}
alt="PaiPass Logo"
/>
<div>
<h2 className={`title text-center m-0 ${this.props.classes.title}`}>
{this.props.subtitle}
</h2>
</div>
</div>
</div>
);
}
}
export default withStyles(styles)(Header);
|
Praneethvvs/CircleCi_FastApi
|
general_dir/sorting_algorithms.py
|
<gh_stars>0
from abc import ABC, abstractmethod
class A(ABC):
@abstractmethod
def myfun(self):
return 1
def testfun(self):
print(1)
class C(A):
pass
C().testfun()
|
lumos675/themecolor
|
mutable-theme/src/main/java/com/stardust/theme/app/ThemeColorAppCompatActivity.java
|
<reponame>lumos675/themecolor
package com.stardust.theme.app;
import android.os.Bundle;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.stardust.theme.ThemeColorManager;
/**
* Created by Stardust on 2017/3/5.
*/
public class ThemeColorAppCompatActivity extends AppCompatActivity {
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ThemeColorManager.addActivityStatusBar(this);
}
}
|
atveit/vespa
|
document/src/vespa/document/datatype/referencedatatype.cpp
|
<gh_stars>0
// Copyright 2017 <NAME>. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#include "referencedatatype.h"
#include <vespa/document/fieldvalue/referencefieldvalue.h>
#include <vespa/vespalib/util/exceptions.h>
using vespalib::make_string;
using vespalib::IllegalArgumentException;
namespace document {
ReferenceDataType::ReferenceDataType(const DocumentType& targetDocType, int id)
: DataType(vespalib::make_string("Reference<%s>", targetDocType.getName().c_str()), id),
_targetDocType(targetDocType)
{
}
ReferenceDataType::~ReferenceDataType() {
}
std::unique_ptr<FieldValue> ReferenceDataType::createFieldValue() const {
return std::make_unique<ReferenceFieldValue>(*this);
}
void ReferenceDataType::print(std::ostream& os, bool verbose, const std::string& indent) const {
(void) verbose;
(void) indent;
os << "ReferenceDataType(" << _targetDocType.getName()
<< ", id " << getId() << ')';
}
ReferenceDataType* ReferenceDataType::clone() const {
return new ReferenceDataType(_targetDocType, getId());
}
void ReferenceDataType::onBuildFieldPath(FieldPath &, const vespalib::stringref& remainingFieldName) const {
if ( ! remainingFieldName.empty() ) {
throw IllegalArgumentException(make_string("Reference data type does not support further field recursion: '%s'",
remainingFieldName.c_str()), VESPA_STRLOC);
}
}
} // document
|
XpressAI/frovedis
|
src/foreign_if/python/examples/spectral_clustering_demo.py
|
#!/usr/bin/env python
import sys
import numpy as np
from frovedis.exrpc.server import FrovedisServer
from frovedis.matrix.dense import FrovedisRowmajorMatrix
from frovedis.mllib.cluster import SpectralClustering
# initializing the Frovedis server
argvs = sys.argv
argc = len(argvs)
if (argc < 2):
print ('Please give frovedis_server calling command as the first argument \n(e.g. "mpirun -np 2 /opt/nec/frovedis/ve/bin/frovedis_server")')
quit()
FrovedisServer.initialize(argvs[1])
#train_mat = FrovedisRowmajorMatrix(dtype=np.float64).load("./input/spectral_data.txt")
train_mat = np.loadtxt("./input/spectral_data.txt")
# creating spectral clustering object
spec = SpectralClustering(n_clusters=2, verbose=0)
# fitting the training matrix on spectral clustering object
spec.fit(train_mat)
spec.debug_print()
# extracting the affinity matrix from created model
aff = spec.affinity_matrix_
#aff.debug_print()
# fitting the precomputed affinity matrix for new model creation
spec.affinity = 'precomputed'
spec.fit(aff)
spec.debug_print()
# saving the trained model
spec.save("./out/MySpecClusteringModel")
#aff.release()
spec.release()
# loading the same trained model
spec.load("./out/MySpecClusteringModel",dtype=np.float64)
spec.debug_print()
spec.release()
FrovedisServer.shut_down()
|
halleyzhao/alios-mm
|
test/cow/player/pipeline_player_test.cc
|
/**
* Copyright (C) 2017 Alibaba Group Holding Limited. All Rights Reserved.
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "pipeline_player_test.h"
#include "multimedia/component_factory.h"
#include "multimedia/media_attr_str.h"
#include "multimedia/mm_debug.h"
namespace YUNOS_MM {
MM_LOG_DEFINE_MODULE_NAME("COW-PLPT")
#define FUNC_TRACK() FuncTracker tracker(MM_LOG_TAG, __FUNCTION__, __LINE__)
// #define FUNC_TRACK()
typedef struct {
int32_t index;
MediaMetaSP meta;
} ComponentAttr;
class ComponentInfo{
public:
ComponentInfo() : isEncoder(false) {}
virtual ~ComponentInfo() { index = -1; }
int32_t index; // unique index in pipeline
std::string name; // component name
std::string mime; // mimetype
bool isEncoder;
std::vector<int32_t> pulls; // pull data from this componentm: addSource
std::vector<int32_t> pushes; // push data to this component, addSink
};
static std::vector<ComponentAttr> compAttrs;
static const char *pipeGraphFile = "/data/pipeline.cfg";
static std::vector<ComponentInfo> compInfos;
void parseLinkedComps(char* comps, std::vector<int32_t> &components)
{
char tmp[20];
char *ptr = comps, *saveptr = NULL, *gotptr = NULL;
memset(tmp, 0, sizeof(tmp));
int ret = sscanf(comps, "(%[^)])", tmp);
if (ret == 1)
ptr = tmp;
gotptr = strtok_r(ptr, ":", &saveptr);
while (gotptr) {
int pull = atoi(gotptr);
if (pull >=0)
components.push_back(pull);
gotptr = strtok_r(NULL, ":", &saveptr);
}
}
static bool parsePipelineInfo()
{
FILE *fp = NULL;
fp = fopen(pipeGraphFile, "r");
if (!fp)
return false;
#define BUFFER_SIZE 1023
#define MAX_LINE_SIZE 128
char buffer[BUFFER_SIZE+1]; // plus one char for '\0'
char* bufferFillStart = buffer;
uint32_t bufferEmptySize = BUFFER_SIZE;
bool readToEOF = false;
enum {
GRAPH_SECTION,
ATTRIBUTES_SECTION,
} sectionMode = GRAPH_SECTION;
while(1) {
// read file to buffer
uint32_t readSize = fread(bufferFillStart, 1, bufferEmptySize, fp);
if (readSize < bufferEmptySize)
readToEOF = true;
ASSERT(bufferFillStart+readSize <= &buffer[BUFFER_SIZE+1]);
*(bufferFillStart+readSize) = 0; // set '\0'
// parse the data in buffer
char *pos = NULL;
char *line = strtok_r(buffer, "\n", &pos);
while(1) {
DEBUG("one line data: %s", line);
if (!line)
break;
if (*line == '\r') // ignore '\r'
line++;
do { // make it easier to continue next line reading, without goto
if ((line[0] == '/' && line[1] == '/'))
break; // comment, ignore
if (!strcmp(line, "GRAPH:")) {
sectionMode = GRAPH_SECTION;
break;
} else if (!strcmp(line, "ATTRIBUTES:")) {
sectionMode = ATTRIBUTES_SECTION;
break;
}
#define BLANK "%*[ ]"
int ret = -1;
if (sectionMode == GRAPH_SECTION) {
int index = -1;
char compName[20];
char mimeType[20];
char isEncoder[10];
char pulls[20];
char pushes[20];
memset(compName, 0, sizeof(compName));
memset(mimeType, 0, sizeof(mimeType));
memset(isEncoder, 0, sizeof(isEncoder));
memset(pulls, 0, sizeof(pulls));
memset(pushes, 0, sizeof(pushes));
ret = sscanf(line, "{" BLANK "%d," BLANK "%[^,]," BLANK "%[^,]," BLANK "%[^,]," BLANK "%[^,]," BLANK "%[^}]}",
&index, compName, mimeType, isEncoder, pulls, pushes);
DEBUG("ret: %d, index: %d, compName: %s, mimeType: %s, isEncoder: %s, pulls: %s, pushes: %s",
ret, index, compName, mimeType, isEncoder, pulls, pushes);
if (ret < 6) {
ERROR("incorrect line data: %s", line);
fclose(fp);
return false;
}
ComponentInfo compInfo;
compInfo.index = index;
compInfo.name = compName;
compInfo.mime = mimeType;
compInfo.isEncoder = !strcmp(isEncoder, "true");
// parse uplink/downlink components (self in master mode)
parseLinkedComps(pulls, compInfo.pulls);
parseLinkedComps(pushes, compInfo.pushes);
compInfos.push_back(compInfo);
}else if (sectionMode == ATTRIBUTES_SECTION) {
int index = -1;
// FIXME, boundary check
char name[20];
char type[20];
char value[20];
memset(name, 0, sizeof(name));
memset(type, 0, sizeof(type));
memset(value, 0, sizeof(value));
ret = sscanf(line, "{" BLANK "%d," BLANK "%[^,]," BLANK "%[^,]," BLANK "%[^}],", &index, name, type, value);
DEBUG("ret: %d, index: %d, name: %s, type: %s, value: %s", ret, index, name, type, value);
if (ret < 4) {
ERROR("incorrect line data: %s", line);
break;
}
ComponentAttr attr;
attr.index = index;
MediaMetaSP meta = MediaMeta::create();
if (!strcasecmp(type, "STRING")) {
meta->setString(name, value);
} else if (!strcasecmp(type, "INT32")) {
meta->setInt32(name, atoi(value));
} else if (!strcasecmp(type, "INT64")) {
meta->setInt32(name, atoi(value));
}
attr.meta = meta;
meta->dump();
compAttrs.push_back(attr);
}
#undef BLANK
}while (0);
if (!readToEOF) {
if (pos + MAX_LINE_SIZE > &buffer[BUFFER_SIZE]) // may not enough data for one complete line
break;
}
line = strtok_r(NULL, "\n", &pos);
}
if (readToEOF)
break;
bufferEmptySize = pos - buffer;
uint32_t remainingDataSize = BUFFER_SIZE - bufferEmptySize;
memcpy(buffer, pos, remainingDataSize);
bufferFillStart = buffer + remainingDataSize;
}
fclose(fp);
return true;
}
mm_status_t PipelinePlayerTest::prepareInternal()
{
FUNC_TRACK();
mm_status_t status = MM_ERROR_SUCCESS;
compInfos.reserve(5);
if (!parsePipelineInfo()) {
ERROR("fail to parse pipeline config file: %s", pipeGraphFile);
return MM_ERROR_NO_PIPELINE;
}
// MMAutoLock locker(mLock); NO big lock
setState(mState, kComponentStatePreparing);
PlaySourceComponent* source = getSourceComponent();
ASSERT_RET(source, MM_ERROR_NO_COMPONENT);
status = source->prepare();
ASSERT_RET(status == MM_ERROR_SUCCESS || status == MM_ERROR_ASYNC, status);
if (status == MM_ERROR_ASYNC) {
status = waitUntilCondition(mComponents[mDemuxIndex].state, kComponentStatePrepared, false/*pipeline state*/);
}
if (status != MM_ERROR_SUCCESS)
return status;
status = updateTrackInfo();
ASSERT_RET(status == MM_ERROR_SUCCESS, status);
std::map<int32_t, int32_t> compIndexMap;
uint32_t i = 0;
// AVDemuxer index
compIndexMap[0] = 0;
for (i=0; i<compInfos.size(); i++) {
ComponentSP comp = createComponentHelper(compInfos[i].name.c_str(), compInfos[i].mime.c_str());
ASSERT(comp);
mComponents.push_back(ComponentInfo(comp, ComponentInfo::kComponentTypeSource));
compIndexMap[compInfos[i].index] = i+1;
}
for (i=0; i<compInfos.size(); i++) {
Component::MediaType mediaType = Component::kMediaTypeVideo;
DEBUG("config component: %s", mComponents[i+1].component->name());
if (!strncmp(compInfos[i].mime.c_str(), "audio/", strlen("audio/")))
mediaType = Component::kMediaTypeAudio;
uint32_t j=0;
for (j=0; j<compInfos[i].pulls.size(); j++) {
std::map<int32_t, int32_t>::iterator it = compIndexMap.find(compInfos[i].pulls[j]);
ASSERT(it != compIndexMap.end());
DEBUG("i+1: %d, compInfos[i].pulls: %d, source data from: %d, %s", i+1, compInfos[i].pulls[j], it->second, mComponents[it->second].component->name());
status = mComponents[i+1].component->addSource(mComponents[it->second].component.get(), mediaType);
ASSERT_RET(status == MM_ERROR_SUCCESS, MM_ERROR_COMPONENT_CONNECT_FAILED);
}
for (j=0; j<compInfos[i].pushes.size(); j++) {
std::map<int32_t, int32_t>::iterator it = compIndexMap.find(compInfos[i].pushes[j]);
ASSERT(it != compIndexMap.end());
DEBUG("i+1: %d, compInfos[i].pushes: %d, sink data to: %d", i+1, compInfos[i].pushes[j], it->second);
status = mComponents[i+1].component->addSink(mComponents[it->second].component.get(), mediaType);
ASSERT_RET(status == MM_ERROR_SUCCESS, MM_ERROR_COMPONENT_CONNECT_FAILED);
}
}
for (i=0; i<compAttrs.size(); i++) {
std::map<int32_t, int32_t>::iterator it = compIndexMap.find(compAttrs[i].index);
ASSERT(it != compIndexMap.end());
mComponents[it->second].component->setParameter(compAttrs[i].meta);
}
mMediaMeta->setInt32(MEDIA_ATTR_VARIABLE_RATE_SUPPORT, false);
if (mSurface) {
if (mIsSurfaceTexture)
mMediaMeta->setPointer(MEDIA_ATTR_VIDEO_SURFACE_TEXTURE, mSurface);
else
mMediaMeta->setPointer(MEDIA_ATTR_VIDEO_SURFACE, mSurface);
}
MMAutoLock locker(mLock);
//protect mSurface
for (i=0; i<compInfos.size(); i++) {
mComponents[i].component->setParameter(mMediaMeta);
}
// FIXME
#if 0
if (videoSink) {
ClockSP clock = audioSink->provideClock();
videoSink->setClock(clock);
}
#endif
compAttrs.clear();
compInfos.clear();
return status;
}
} // YUNOS_MM
|
avinfinity/UnmanagedCodeSnippets
|
Segmentation/utils.cpp
|
#include "iostream"
#include "src\plugins\Application\ZeissViewer\SegmentationInterface\ZeissSegmentationInterface.hpp"
#include "itkImage.h"
#include "itkImageFileWriter.h"
#include "itkPNGImageIO.h"
#include "eigenincludes.h"
#include "VolumeInfo.h"
typedef unsigned short InternalPixelType;
typedef Eigen::Vector3f GradientPixelType;
typedef unsigned char BinaryPixelType;
typedef Eigen::Vector3i IndexType;
typedef Eigen::Vector3f ContIndexType;
const float PI = 3.1415927;
//Assumes that the index provided is inside the volume
template<typename T>
T TrilinearInterpolation(typename const T* volume, const int* volumeSize, const ContIndexType index)
{
const size_t zStep = volumeSize[0] * volumeSize[1], yStep = volumeSize[0];
int x0 = floor(index[0]), x1 = ceil(index[0]), y0 = floor(index[1]), y1 = ceil(index[1]), z0 = floor(index[2]), z1 = ceil(index[2]);
float xd = (x0 == x1) ? 0 : ((index[0] - x0) / (x1 - x0));
float yd = (y0 == y1) ? 0 : ((index[1] - y0) / (y1 - y0));
float zd = (z0 == z1) ? 0 : ((index[2] - z0) / (z1 - z0));
float c00 = *(volume + x0 + y0*yStep + z0*zStep) * (1 - xd) + *(volume + x1 + y0*yStep + z0*zStep)*xd;
float c01 = *(volume + x0 + y0*yStep + z1*zStep) * (1 - xd) + *(volume + x1 + y0*yStep + z1*zStep)*xd;
float c10 = *(volume + x0 + y1*yStep + z0*zStep) * (1 - xd) + *(volume + x1 + y1*yStep + z0*zStep)*xd;
float c11 = *(volume + x0 + y1*yStep + z1*zStep) * (1 - xd) + *(volume + x1 + y1*yStep + z1*zStep)*xd;
float c0 = c00*(1 - yd) + c10*yd;
float c1 = c01*(1 - yd) + c11*yd;
return c0*(1 - zd) + c1*zd;
}
//Assumes that the index provided is inside the volume
template<typename T>
T VectorTrilinearInterpolation(typename const T* volume, const int* volumeSize, const ContIndexType index)
{
const size_t zStep = volumeSize[0] * volumeSize[1], yStep = volumeSize[0];
int x0 = floor(index[0]), x1 = ceil(index[0]), y0 = floor(index[1]), y1 = ceil(index[1]), z0 = floor(index[2]), z1 = ceil(index[2]);
float xd = (x0 == x1) ? 0 : ((index[0] - x0) / (x1 - x0));
float yd = (y0 == y1) ? 0 : ((index[1] - y0) / (y1 - y0));
float zd = (z0 == z1) ? 0 : ((index[2] - z0) / (z1 - z0));
T c00 = *(volume + x0 + y0*yStep + z0*zStep) * (1 - xd) + *(volume + x1 + y0*yStep + z0*zStep)*xd;
T c01 = *(volume + x0 + y0*yStep + z1*zStep) * (1 - xd) + *(volume + x1 + y0*yStep + z1*zStep)*xd;
T c10 = *(volume + x0 + y1*yStep + z0*zStep) * (1 - xd) + *(volume + x1 + y1*yStep + z0*zStep)*xd;
T c11 = *(volume + x0 + y1*yStep + z1*zStep) * (1 - xd) + *(volume + x1 + y1*yStep + z1*zStep)*xd;
T c0 = c00*(1 - yd) + c10*yd;
T c1 = c01*(1 - yd) + c11*yd;
T c = c0*(1 - zd) + c1*zd;
return c;
}
template<typename T>
void saveSlice(typename const T *volData, int* volSize, std::string name)
{
typedef typename itk::Image<T, 2> Image2D;
typedef typename Image2D::IndexType ImageIndexType;
typedef typename Image2D::RegionType ImageRegionType;
typedef typename Image2D::RegionType::SizeType ImageSizeType;
typedef typename itk::ImageFileWriter<Image2D> ImageWriter;
unsigned int yStep = volSize[0], zStep = volSize[1] * volSize[0];
std::ostringstream out;
for (unsigned int xx = 0; xx < volSize[0]; xx++)
{
out.str(""); out.clear();
out << name << xx << ".png";
ImageWriter::Pointer TestWriter = ImageWriter::New();
TestWriter->SetImageIO(itk::PNGImageIO::New());
ImageIndexType SliceStart; ImageSizeType SliceSize; ImageRegionType SliceRegion;
SliceStart.Fill(0); SliceSize[0] = volSize[1]; SliceSize[1] = volSize[2];
SliceRegion.SetIndex(SliceStart); SliceRegion.SetSize(SliceSize);
Image2D::Pointer Slice = Image2D::New();
Slice->SetRegions(SliceRegion); Slice->Allocate();
for (unsigned int yy = 0; yy < volSize[1]; yy++)
for (unsigned int zz = 0; zz < volSize[2]; zz++)
{
ImageIndexType temp;
temp[0] = yy; temp[1] = zz;
Slice->SetPixel(temp, *(volData + xx + yy*yStep + zz*zStep));
}
TestWriter->SetInput(Slice);
TestWriter->SetFileName(out.str());
try
{
TestWriter->Update();
}
catch (itk::ExceptionObject &err)
{
std::cout << "Error Occured During Writing Slice:" << xx << std::endl;
std::cout << err;
return;
}
}
}
struct Regions
{
public:
InternalPixelType *deviation, *LowerLimits, *UpperLimits, *means, *meanDeviation;
std::vector<MaterialRegion> regions;
Regions(Materials M, unsigned long long* histogram)
{
regions = M.regions;
//deviation = paramDeviation;
LowerLimits = new InternalPixelType[regions.size()];
UpperLimits = new InternalPixelType[regions.size()];
deviation = new InternalPixelType[regions.size()];
means = new InternalPixelType[regions.size()];
meanDeviation = new InternalPixelType[regions.size()];
for (int i = 0; i < regions.size(); i++)
{
InternalPixelType LowerBound, UpperBound;
if (i == 0)
{
LowerBound = 0;
UpperBound = regions[i].upper_bound;
}
else if (i == regions.size() - 1)
{
LowerBound = regions[i].lower_bound;
UpperBound = std::numeric_limits<InternalPixelType>::max();
}
else
{
LowerBound = regions[i].lower_bound;
UpperBound = regions[i].upper_bound;
}
float sum = 0, mean = 0, dev = 0;
for (InternalPixelType x = LowerBound; (x <= UpperBound) && (x >= LowerBound); x++)
{
sum += histogram[x];
mean += x*histogram[x];
dev += pow((x - 1.0*regions[i].peak), 2)*histogram[x];
}
means[i] = ceil(mean / sum);
deviation[i] = ceil(sqrt(dev / sum));
dev = 0;
for (InternalPixelType x = LowerBound; (x <= UpperBound) && (x >= LowerBound); x++)
{
dev += pow((x - 1.0*means[i]), 2)*histogram[x];
}
meanDeviation[i] = ceil(sqrt(dev / sum));
if (i == 0)
{
LowerLimits[i] = 0;
//UpperLimits[i] = regions[i].upper_bound;
if (regions[i].upper_bound < (regions[i].peak + deviation[i]))
UpperLimits[i] = regions[i].upper_bound;
else
UpperLimits[i] = regions[i].peak + deviation[i];
}
else if (i == (regions.size() - 1))
{
//LowerLimits[i] = regions[i].lower_bound;
if (regions[i].lower_bound >(regions[i].peak - deviation[i]))
LowerLimits[i] = regions[i].lower_bound;
else
LowerLimits[i] = regions[i].peak - deviation[i];
UpperLimits[i] = std::numeric_limits <InternalPixelType>::max();
}
else
{
if (regions[i].lower_bound > (regions[i].peak - deviation[i]))
LowerLimits[i] = regions[i].lower_bound;
else
LowerLimits[i] = regions[i].peak - deviation[i];
if (regions[i].upper_bound < (regions[i].peak + deviation[i]))
UpperLimits[i] = regions[i].upper_bound;
else
UpperLimits[i] = regions[i].peak + deviation[i];
}
}
}
int getRegion(InternalPixelType intensity) //returns -1 if doesn't belong to region
{
for (int i = 0; i < regions.size(); i++)
{
if ((intensity >= LowerLimits[i]) && (intensity <= UpperLimits[i]))
return i;
}
return -1;
}
};
struct ValueChange
{
public:
IndexType index;
unsigned char value;
ValueChange(IndexType paramIndex, unsigned char paramValue) : index(paramIndex), value(paramValue) { }
};
//InternalPixelType* calculateDeviation(Materials M, unsigned long long* histogram)
//{
// InternalPixelType* deviation = new InternalPixelType[M.regions.size()];
// for (unsigned int i = 0; i < M.regions.size(); i++)
// {
// InternalPixelType LowerBound, peak, UpperBound;
// LowerBound = M.regions[i].lower_bound;
// UpperBound = M.regions[i].upper_bound;
// peak = M.regions[i].peak;
// double materialDeviation = 0;
// double voxelCount = 0;
// for (unsigned int j = LowerBound; j <= UpperBound; j++)
// {
// voxelCount += histogram[j];
// materialDeviation += pow((j - 1.0*peak), 2)*histogram[j];
// }
// deviation[i] = ceil(sqrt(materialDeviation / voxelCount));
// }
// return deviation;
//}
//BinaryPixelType MaxLogHMRFLikelihood(int xx, int yy, int zz,size_t yStep, size_t zStep, imt::volume::VolumeInfo& volInfo, BinaryPixelType* Segmentation, Regions& regions, float beta)
//{
// int num_labels = regions.regions.size();
// InternalPixelType* VolumeData = (InternalPixelType*)volInfo.mVolumeData;
// float minimum = std::numeric_limits<float>::max();
// BinaryPixelType currentSegmentation = *(Segmentation + xx + yy*yStep + zz*zStep);
// BinaryPixelType neigh = 0;
//
// for (int dz = zz - 1; dz <= zz + 1; dz++)
// for (int dy = yy - 1; dy <= yy + 1; dy++)
// for (int dx = xx - 1; dx <= xx + 1; dx++)
// {
// neigh |= (1 << *(Segmentation + dx + dy*yStep + dz*zStep));
// }
//
// for (int i = 0; i < num_labels; i++)
// {
// if ((neigh >> i) & 1)
// {
// float logLikelihood = std::log(regions.meanDeviation[i]) + std::pow(*(VolumeData + xx + yy*yStep + zz*zStep) - (1.0)*regions.means[i], 2) / (2 * std::pow(regions.meanDeviation[i], 2));
//
// for (int dz = zz - 1; dz <= zz + 1; dz++)
// for (int dy = yy - 1; dy <= yy + 1; dy++)
// for (int dx = xx - 1; dx <= xx + 1; dx++)
// {
// if ((dx == xx) && (dy == yy) && (dz == zz))
// continue;
// if (i == *(Segmentation + dx + dy*yStep + dz*zStep))
// logLikelihood -= beta;
// }
//
// if (logLikelihood < minimum)
// {
// minimum = logLikelihood;
// currentSegmentation = i;
// }
// }
// }
// return currentSegmentation;
//}
|
YukkaSarasti/pythonintask
|
IVTp/2014/Shcherbakov_R_A/task_07_22.py
|
<filename>IVTp/2014/Shcherbakov_R_A/task_07_22.py
# Задача 7. Вариант 22.
# Разработайте систему начисления очков для задачи 6, в соответствии с которой
# игрок получал бы большее количество баллов за меньшее количество попыток.
# <NAME>.
# 22.05.2016
import random
print("Комп загадал имя одного из двух братьев основавшие рим, попробуй угадать что ли")
name='Рем', 'Ромул'
rand=random.randint(0,1)
ugad=""
bal=6
while ugad!=name[rand]:
bal=bal-1
ugad=input("Введите слово:")
input("\nВы наконец то угадали!\nВаше количество баллов: "+str(bal)+"\n\nok")
|
DigitalInnovation/cucumber-jvm
|
java/src/test/java/io/cucumber/java/JavaDefaultParameterTransformerDefinitionTest.java
|
<gh_stars>1-10
package io.cucumber.java;
import io.cucumber.core.backend.Lookup;
import org.junit.jupiter.api.Test;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.util.Map;
import static org.hamcrest.CoreMatchers.startsWith;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.jupiter.api.Assertions.assertThrows;
class JavaDefaultParameterTransformerDefinitionTest {
private final Lookup lookup = new Lookup() {
@Override
@SuppressWarnings("unchecked")
public <T> T getInstance(Class<T> glueClass) {
return (T) JavaDefaultParameterTransformerDefinitionTest.this;
}
};
@Test
void can_transform_string_to_type() throws Throwable {
Method method = JavaDefaultParameterTransformerDefinitionTest.class.getMethod("transform_string_to_type", String.class, Type.class);
JavaDefaultParameterTransformerDefinition definition = new JavaDefaultParameterTransformerDefinition(method, lookup);
Object transformed = definition.parameterByTypeTransformer().transform("something", String.class);
assertThat(transformed, is("transform_string_to_type"));
}
public Object transform_string_to_type(String fromValue, Type toValueType) {
return "transform_string_to_type";
}
@Test
void can_transform_object_to_type() throws Throwable {
Method method = JavaDefaultParameterTransformerDefinitionTest.class.getMethod("transform_object_to_type", Object.class, Type.class);
JavaDefaultParameterTransformerDefinition definition = new JavaDefaultParameterTransformerDefinition(method, lookup);
String transformed = (String) definition.parameterByTypeTransformer().transform("something", String.class);
assertThat(transformed, is("transform_object_to_type"));
}
public Object transform_object_to_type(Object fromValue, Type toValueType) {
return "transform_object_to_type";
}
@Test
void must_have_non_void_return() throws Throwable {
Method method = JavaDefaultParameterTransformerDefinitionTest.class.getMethod("transforms_string_to_void", String.class, Type.class);
InvalidMethodSignatureException exception = assertThrows(InvalidMethodSignatureException.class, () -> new JavaDefaultParameterTransformerDefinition(method, lookup));
assertThat(exception.getMessage(), startsWith("" +
"A @DefaultParameterTransformer annotated method must have one of these signatures:\n" +
" * public Object defaultDataTableEntry(String fromValue, Type toValueType)\n" +
" * public Object defaultDataTableEntry(Object fromValue, Type toValueType)\n" +
"at io.cucumber.java.JavaDefaultParameterTransformerDefinitionTest.transforms_string_to_void(String,Type) in"
));
}
public void transforms_string_to_void(String fromValue, Type toValueType) {
}
@Test
void must_have_two_arguments() throws Throwable {
Method oneArg = JavaDefaultParameterTransformerDefinitionTest.class.getMethod("one_argument", String.class);
assertThrows(InvalidMethodSignatureException.class, () -> new JavaDefaultParameterTransformerDefinition(oneArg, lookup));
Method threeArg = JavaDefaultParameterTransformerDefinitionTest.class.getMethod("three_arguments", String.class, Type.class, Object.class);
assertThrows(InvalidMethodSignatureException.class, () -> new JavaDefaultParameterTransformerDefinition(threeArg, lookup));
}
public Object one_argument(String fromValue) {
return "one_arguments";
}
public Object three_arguments(String fromValue, Type toValueType, Object extra) {
return "three_arguments";
}
@Test
void must_have_string_or_object_as_from_value() throws Throwable {
Method threeArg = JavaDefaultParameterTransformerDefinitionTest.class.getMethod("map_as_from_value", Map.class, Type.class);
assertThrows(InvalidMethodSignatureException.class, () -> new JavaDefaultParameterTransformerDefinition(threeArg, lookup));
}
public Object map_as_from_value(Map<String, String> fromValue, Type toValueType) {
return "map_as_from_value";
}
@Test
void must_have_type_as_to_value_type() throws Throwable {
Method threeArg = JavaDefaultParameterTransformerDefinitionTest.class.getMethod("object_as_to_value_type", String.class, Object.class);
assertThrows(InvalidMethodSignatureException.class, () -> new JavaDefaultParameterTransformerDefinition(threeArg, lookup));
}
public Object object_as_to_value_type(String fromValue, Object toValueType) {
return "object_as_to_value_type";
}
}
|
qrac/musubii
|
src/components/previews/preview-badge.js
|
import React from "react"
import beautify from "js-beautify"
import DemoOption from "~/components/parts/demo-option"
import DemoOptionBoxRadios from "~/components/parts/demo-option-box-radios"
import DemoOptionBoxCheckbox from "~/components/parts/demo-option-box-checkbox"
import DemoPre from "~/components/parts/demo-pre"
const items = [
{ id: 0, role: "", text: "任意", icon: "tag" },
{ id: 1, role: "is-primary", text: "推奨", icon: "check" },
{ id: 2, role: "is-info", text: "情報", icon: "info" },
{ id: 3, role: "is-success", text: "重要", icon: "check" },
{ id: 4, role: "is-warning", text: "注意", icon: "info" },
{ id: 5, role: "is-danger", text: "必須", icon: "minus" },
]
const tags = [
{ id: 0, text: "Span", value: "span" },
{ id: 1, text: "Div", value: "div" },
]
const patterns = [
{ id: 0, text: "Plain", value: "is-plain" },
{ id: 1, text: "Outline", value: "is-outline" },
]
const beautifyHtmlOptions = {
inline: ["i"],
indent_size: 2,
}
export class PreviewBadgeBasic extends React.Component {
constructor(props) {
super(props)
this.state = {
tag: "span",
pattern: "is-plain",
strong: false,
round: false,
disabled: false,
}
this.changeTag = this.changeTag.bind(this)
this.changePattern = this.changePattern.bind(this)
this.toggleStrong = this.toggleStrong.bind(this)
this.toggleRound = this.toggleRound.bind(this)
this.toggleDisabled = this.toggleDisabled.bind(this)
}
changeTag(value) {
this.setState({ tag: value })
}
changePattern(value) {
this.setState({ pattern: value })
}
toggleStrong() {
this.setState({ strong: !this.state.strong })
}
toggleRound() {
this.setState({ round: !this.state.round })
}
toggleDisabled() {
this.setState({ disabled: !this.state.disabled })
}
render() {
const tag = this.state.tag
const pattern = this.state.pattern
const strong = this.state.strong ? "is-strong" : ""
const round = this.state.round ? "is-round" : ""
const disabled = this.state.disabled
const disabledClass = disabled ? "is-disabled" : ""
const contents = items
.map(
(item) =>
`<${tag} class="badge ${pattern} ${item.role} ${strong} ${round} ${disabledClass}"
>${item.text}</${tag}>`
)
.join("")
.replace(/\s+/g, " ")
.replace(/\s\"/g, '"')
const formattedCode = beautify.html(contents, beautifyHtmlOptions)
return (
<div className="demo-box is-preview">
<div className="demo-options-wrap">
<div className="demo-options">
<DemoOption title={"Tag"}>
<DemoOptionBoxRadios
patterns={tags}
name="radio-badge-basic-tag"
parentChange={(value) => this.changeTag(value)}
checked={this.state.tag}
/>
</DemoOption>
<DemoOption title={"Pattern"}>
<DemoOptionBoxRadios
patterns={patterns}
name="radio-badge-basic-pattern"
parentChange={(value) => this.changePattern(value)}
checked={this.state.pattern}
/>
</DemoOption>
</div>
<div className="demo-options">
<DemoOption title={"Other"}>
<DemoOptionBoxCheckbox
text={"Strong"}
parentChange={() => this.toggleStrong()}
checked={this.state.strong}
/>
<DemoOptionBoxCheckbox
text={"Round"}
parentChange={() => this.toggleRound()}
checked={this.state.round}
/>
<DemoOptionBoxCheckbox
text={"Disabled"}
parentChange={() => this.toggleDisabled()}
checked={this.state.disabled}
/>
</DemoOption>
</div>
</div>
<div className="demo-box is-line">
<div
className="box is-flex is-space-right-xs is-space-bottom-xs is-margin-right-minus-xs is-margin-bottom-minus-xs"
dangerouslySetInnerHTML={{ __html: formattedCode }}
></div>
</div>
<DemoPre language="html" code={formattedCode} />
</div>
)
}
}
export class PreviewBadgeSquare extends React.Component {
constructor(props) {
super(props)
this.state = {
tag: "span",
pattern: "is-plain",
disabled: false,
}
this.changeTag = this.changeTag.bind(this)
this.changePattern = this.changePattern.bind(this)
this.toggleDisabled = this.toggleDisabled.bind(this)
}
changeTag(value) {
this.setState({ tag: value })
}
changePattern(value) {
this.setState({ pattern: value })
}
toggleDisabled() {
this.setState({ disabled: !this.state.disabled })
}
render() {
const tag = this.state.tag
const pattern = this.state.pattern
const disabled = this.state.disabled
const disabledClass = disabled ? "is-disabled" : ""
const contents = items
.map(
(item) =>
`<${tag} class="badge ${pattern} is-square ${item.role} ${disabledClass}"
><i aria-hidden="true" class="fas fa-${item.icon}"></i></${tag}>`
)
.join("")
.replace(/\s+/g, " ")
.replace(/\s\"/g, '"')
const formattedCode = beautify.html(contents, beautifyHtmlOptions)
return (
<div className="demo-box is-preview">
<div className="demo-options-wrap">
<div className="demo-options">
<DemoOption title={"Tag"}>
<DemoOptionBoxRadios
patterns={tags}
name="radio-badge-square-tag"
parentChange={(value) => this.changeTag(value)}
checked={this.state.tag}
/>
</DemoOption>
<DemoOption title={"Pattern"}>
<DemoOptionBoxRadios
patterns={patterns}
name="radio-badge-square-pattern"
parentChange={(value) => this.changePattern(value)}
checked={this.state.pattern}
/>
</DemoOption>
</div>
<div className="demo-options">
<DemoOption title={"Other"}>
<DemoOptionBoxCheckbox
text={"Disabled"}
parentChange={() => this.toggleDisabled()}
checked={this.state.disabled}
/>
</DemoOption>
</div>
</div>
<div className="demo-box is-line">
<div
className="box is-flex is-space-right-xs is-space-bottom-xs is-margin-bottom-minus-xs"
dangerouslySetInnerHTML={{ __html: formattedCode }}
></div>
</div>
<DemoPre language="html" code={formattedCode} />
</div>
)
}
}
export class PreviewBadgeCircle extends React.Component {
constructor(props) {
super(props)
this.state = {
tag: "span",
pattern: "is-plain",
disabled: false,
}
this.changeTag = this.changeTag.bind(this)
this.changePattern = this.changePattern.bind(this)
this.toggleDisabled = this.toggleDisabled.bind(this)
}
changeTag(value) {
this.setState({ tag: value })
}
changePattern(value) {
this.setState({ pattern: value })
}
toggleDisabled() {
this.setState({ disabled: !this.state.disabled })
}
render() {
const tag = this.state.tag
const pattern = this.state.pattern
const disabled = this.state.disabled
const disabledClass = disabled ? "is-disabled" : ""
const contents = items
.map(
(item) =>
`<${tag} class="badge ${pattern} is-circle ${item.role} ${disabledClass}"
><i aria-hidden="true" class="fas fa-${item.icon}"></i></${tag}>`
)
.join("")
.replace(/\s+/g, " ")
.replace(/\s\"/g, '"')
const formattedCode = beautify.html(contents, beautifyHtmlOptions)
return (
<div className="demo-box is-preview">
<div className="demo-options-wrap">
<div className="demo-options">
<DemoOption title={"Tag"}>
<DemoOptionBoxRadios
patterns={tags}
name="radio-badge-circle-tag"
parentChange={(value) => this.changeTag(value)}
checked={this.state.tag}
/>
</DemoOption>
<DemoOption title={"Pattern"}>
<DemoOptionBoxRadios
patterns={patterns}
name="radio-badge-circle-pattern"
parentChange={(value) => this.changePattern(value)}
checked={this.state.pattern}
/>
</DemoOption>
</div>
<div className="demo-options">
<DemoOption title={"Other"}>
<DemoOptionBoxCheckbox
text={"Disabled"}
parentChange={() => this.toggleDisabled()}
checked={this.state.disabled}
/>
</DemoOption>
</div>
</div>
<div className="demo-box is-line">
<div
className="box is-flex is-space-right-xs is-space-bottom-xs is-margin-bottom-minus-xs"
dangerouslySetInnerHTML={{ __html: formattedCode }}
></div>
</div>
<DemoPre language="html" code={formattedCode} />
</div>
)
}
}
|
unparalleled/kcards
|
app/src/main/java/com/mrkevinthomas/kcards/card_swipe/CardSwipeActivity.java
|
package com.mrkevinthomas.kcards.card_swipe;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.widget.Toast;
import com.lorentzos.flingswipe.SwipeFlingAdapterView;
import com.mrkevinthomas.kcards.BaseActivity;
import com.mrkevinthomas.kcards.CardViewActivity;
import com.mrkevinthomas.kcards.R;
import com.mrkevinthomas.kcards.ThisApp;
import com.mrkevinthomas.kcards.models.Card;
import com.mrkevinthomas.kcards.models.Deck;
public class CardSwipeActivity extends CardViewActivity {
public static void launchActivity(Activity parent, Deck deck) {
if (deck.getCards().isEmpty()) {
Toast.makeText(parent, R.string.no_cards_to_practice, Toast.LENGTH_LONG).show();
return;
}
Intent intent = new Intent(parent, CardSwipeActivity.class);
intent.putExtra(BaseActivity.ARG_DECK, deck);
parent.startActivityForResult(intent, BaseActivity.REQUEST_DECK);
}
private SwipeFlingAdapterView swipeFlingAdapterView;
private CardSwipeAdapter cardSwipeAdapter;
@Override
protected int getViewId() {
return R.layout.card_swipe;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
cardSwipeAdapter = new CardSwipeAdapter(this, deck, isSwapped, isHidden);
swipeFlingAdapterView = (SwipeFlingAdapterView) findViewById(R.id.swipe_container);
swipeFlingAdapterView.setAdapter(cardSwipeAdapter);
swipeFlingAdapterView.setFlingListener(new SwipeFlingAdapterView.onFlingListener() {
@Override
public void removeFirstObjectInAdapter() {
cardSwipeAdapter.removeFirst();
}
@Override
public void onLeftCardExit(Object o) {
Card card = (Card) o;
card.incrementIncorrect();
ThisApp.get().showToastView(R.layout.toast_incorrect);
}
@Override
public void onRightCardExit(Object o) {
Card card = (Card) o;
card.incrementCorrect();
ThisApp.get().showToastView(R.layout.toast_correct);
}
@Override
public void onAdapterAboutToEmpty(int i) {
cardSwipeAdapter.chooseCardsToShow();
}
@Override
public void onScroll(float v) {
}
});
}
@Override
protected void handleSwapActionClicked() {
super.handleSwapActionClicked();
swipeFlingAdapterView.removeAllViewsInLayout(); // workaround for https://github.com/Diolor/Swipecards/issues/29
cardSwipeAdapter.setSwapped(isSwapped);
}
@Override
protected void handleShowHideActionClicked() {
super.handleShowHideActionClicked();
swipeFlingAdapterView.removeAllViewsInLayout(); // workaround for https://github.com/Diolor/Swipecards/issues/29
cardSwipeAdapter.setHidden(isHidden);
}
public SwipeFlingAdapterView getSwipeFlingAdapterView() {
return swipeFlingAdapterView;
}
}
|
robbypambudi/Struktur-Data
|
Tugas [6]/Exercises_9_No_4/bst_empty.c
|
<reponame>robbypambudi/Struktur-Data<gh_stars>1-10
// Fungsi untuk mengecek apakah fungsi tersebut kosong atau tidak
#include "header.h"
bool bst_empty(BST *bst)
{
return bst->_root == NULL;
}
|
aicas/s2n-tls
|
tests/unit/s2n_certificate_extensions_test.c
|
<reponame>aicas/s2n-tls
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <string.h>
#include <stdio.h>
#include <s2n.h>
#include "s2n_test.h"
#include "testlib/s2n_testlib.h"
#include "tls/extensions/s2n_extension_list.h"
#include "tls/s2n_cipher_suites.h"
#include "tls/s2n_tls.h"
#include "tls/s2n_tls13.h"
#include "error/s2n_errno.h"
#include "stuffer/s2n_stuffer.h"
#include "utils/s2n_safety.h"
s2n_cert_public_key public_key;
s2n_pkey_type actual_cert_pkey_type;
static int s2n_skip_cert_chain_size(struct s2n_stuffer *stuffer)
{
uint32_t cert_chain_size;
GUARD(s2n_stuffer_read_uint24(stuffer, &cert_chain_size));
eq_check(cert_chain_size, s2n_stuffer_data_available(stuffer));
return S2N_SUCCESS;
}
static int s2n_skip_cert(struct s2n_stuffer *stuffer)
{
uint32_t cert_size;
GUARD(s2n_stuffer_read_uint24(stuffer, &cert_size));
GUARD(s2n_stuffer_skip_read(stuffer, cert_size));
return S2N_SUCCESS;
}
static int s2n_x509_validator_validate_cert_chain_test(struct s2n_connection *conn, struct s2n_stuffer *stuffer)
{
GUARD(s2n_skip_cert_chain_size(stuffer));
uint32_t cert_chain_size = s2n_stuffer_data_available(stuffer);
uint8_t *cert_chain_data;
notnull_check(cert_chain_data = s2n_stuffer_raw_read(stuffer, cert_chain_size));
GUARD(s2n_x509_validator_validate_cert_chain(&conn->x509_validator, conn,
cert_chain_data, cert_chain_size, &actual_cert_pkey_type, &public_key));
GUARD(s2n_pkey_free(&public_key));
return S2N_SUCCESS;
}
static int s2n_write_test_cert(struct s2n_stuffer *stuffer, struct s2n_cert_chain_and_key *chain_and_key)
{
struct s2n_blob *cert = &chain_and_key->cert_chain->head->raw;
GUARD(s2n_stuffer_write_uint24(stuffer, cert->size));
GUARD(s2n_stuffer_write_bytes(stuffer, cert->data, cert->size));
return S2N_SUCCESS;
}
static int s2n_setup_connection_for_ocsp_validate_test(struct s2n_connection **conn, struct s2n_cert_chain_and_key *chain_and_key)
{
struct s2n_connection *nconn;
notnull_check(nconn = s2n_connection_new(S2N_SERVER));
nconn->actual_protocol_version = S2N_TLS13;
nconn->handshake_params.our_chain_and_key = chain_and_key;
GUARD(s2n_connection_allow_all_response_extensions(nconn));
nconn->status_type = S2N_STATUS_REQUEST_OCSP;
*conn = nconn;
return S2N_SUCCESS;
}
int main(int argc, char **argv)
{
BEGIN_TEST();
EXPECT_SUCCESS(s2n_enable_tls13());
struct s2n_config *config;
EXPECT_NOT_NULL(config = s2n_config_new());
EXPECT_SUCCESS(s2n_pkey_zero_init(&public_key));
/* Initialize cert chain */
struct s2n_cert_chain_and_key *chain_and_key;
EXPECT_SUCCESS(s2n_test_cert_chain_and_key_new(&chain_and_key,
S2N_DEFAULT_TEST_CERT_CHAIN, S2N_DEFAULT_TEST_PRIVATE_KEY));
EXPECT_SUCCESS(s2n_config_add_cert_chain_and_key_to_store(config, chain_and_key));
/* Initialize cert extension data */
uint8_t data[] = "extension data";
EXPECT_SUCCESS(s2n_cert_chain_and_key_set_ocsp_data(chain_and_key, data, s2n_array_len(data)));
EXPECT_SUCCESS(s2n_cert_chain_and_key_set_sct_list(chain_and_key, data, s2n_array_len(data)));
/* Test: s2n_send_cert_chain sends extensions */
{
/* Test: extensions only sent for >= TLS1.3 */
{
struct s2n_connection *conn;
EXPECT_NOT_NULL(conn = s2n_connection_new(S2N_SERVER));
conn->handshake_params.our_chain_and_key = chain_and_key;
EXPECT_SUCCESS(s2n_connection_allow_all_response_extensions(conn));
conn->status_type = S2N_STATUS_REQUEST_OCSP;
/* TLS1.2 does NOT send extensions */
{
DEFER_CLEANUP(struct s2n_stuffer stuffer, s2n_stuffer_free);
EXPECT_SUCCESS(s2n_stuffer_growable_alloc(&stuffer, 0));
conn->actual_protocol_version = S2N_TLS12;
EXPECT_SUCCESS(s2n_send_cert_chain(conn, &stuffer, chain_and_key));
s2n_parsed_extensions_list extensions;
EXPECT_SUCCESS(s2n_skip_cert_chain_size(&stuffer));
EXPECT_SUCCESS(s2n_skip_cert(&stuffer));
EXPECT_FAILURE_WITH_ERRNO(s2n_extension_list_parse(&stuffer, &extensions),
S2N_ERR_BAD_MESSAGE);
}
/* TLS1.3 DOES send extensions */
{
DEFER_CLEANUP(struct s2n_stuffer stuffer, s2n_stuffer_free);
EXPECT_SUCCESS(s2n_stuffer_growable_alloc(&stuffer, 0));
conn->actual_protocol_version = S2N_TLS13;
EXPECT_SUCCESS(s2n_send_cert_chain(conn, &stuffer, chain_and_key));
s2n_parsed_extensions_list extensions;
EXPECT_SUCCESS(s2n_skip_cert_chain_size(&stuffer));
EXPECT_SUCCESS(s2n_skip_cert(&stuffer));
EXPECT_SUCCESS(s2n_extension_list_parse(&stuffer, &extensions));
EXPECT_PARSED_EXTENSION_LIST_NOT_EMPTY(extensions);
}
EXPECT_SUCCESS(s2n_connection_free(conn));
}
/* Test: extensions only sent on first certificate */
{
struct s2n_connection *conn;
EXPECT_NOT_NULL(conn = s2n_connection_new(S2N_SERVER));
conn->handshake_params.our_chain_and_key = chain_and_key;
DEFER_CLEANUP(struct s2n_stuffer stuffer, s2n_stuffer_free);
EXPECT_SUCCESS(s2n_stuffer_growable_alloc(&stuffer, 0));
EXPECT_SUCCESS(s2n_connection_allow_all_response_extensions(conn));
conn->status_type = S2N_STATUS_REQUEST_OCSP;
conn->actual_protocol_version = S2N_TLS13;
EXPECT_SUCCESS(s2n_send_cert_chain(conn, &stuffer, chain_and_key));
s2n_parsed_extensions_list extensions;
EXPECT_SUCCESS(s2n_skip_cert_chain_size(&stuffer));
/* First cert includes extensions */
EXPECT_SUCCESS(s2n_skip_cert(&stuffer));
EXPECT_SUCCESS(s2n_extension_list_parse(&stuffer, &extensions));
EXPECT_PARSED_EXTENSION_LIST_NOT_EMPTY(extensions);
/* Other certs do not include extensions */
do {
EXPECT_SUCCESS(s2n_skip_cert(&stuffer));
EXPECT_SUCCESS(s2n_extension_list_parse(&stuffer, &extensions));
EXPECT_PARSED_EXTENSION_LIST_EMPTY(extensions);
} while(s2n_stuffer_data_available(&stuffer));
EXPECT_SUCCESS(s2n_connection_free(conn));
}
}
/* Test: s2n_x509_validator_validate_cert_chain handles the output of s2n_send_cert_chain */
{
/* Test: with no extensions */
{
struct s2n_connection *conn;
EXPECT_NOT_NULL(conn = s2n_connection_new(S2N_SERVER));
conn->actual_protocol_version = S2N_TLS13;
conn->handshake_params.our_chain_and_key = chain_and_key;
DEFER_CLEANUP(struct s2n_stuffer stuffer, s2n_stuffer_free);
EXPECT_SUCCESS(s2n_stuffer_growable_alloc(&stuffer, 0));
EXPECT_SUCCESS(s2n_send_cert_chain(conn, &stuffer, chain_and_key));
EXPECT_SUCCESS(s2n_x509_validator_validate_cert_chain_test(conn, &stuffer));
EXPECT_SUCCESS(s2n_connection_free(conn));
}
/* Test: with extensions */
{
struct s2n_connection *conn;
EXPECT_NOT_NULL(conn = s2n_connection_new(S2N_SERVER));
conn->actual_protocol_version = S2N_TLS13;
conn->handshake_params.our_chain_and_key = chain_and_key;
DEFER_CLEANUP(struct s2n_stuffer stuffer, s2n_stuffer_free);
EXPECT_SUCCESS(s2n_stuffer_growable_alloc(&stuffer, 0));
EXPECT_SUCCESS(s2n_connection_allow_all_response_extensions(conn));
conn->status_type = S2N_STATUS_REQUEST_OCSP;
conn->ct_level_requested = S2N_CT_SUPPORT_REQUEST;
EXPECT_SUCCESS(s2n_send_cert_chain(conn, &stuffer, chain_and_key));
EXPECT_SUCCESS(s2n_x509_validator_validate_cert_chain_test(conn, &stuffer));
/* OCSP extension processed */
EXPECT_EQUAL(conn->status_response.size, s2n_array_len(data));
EXPECT_BYTEARRAY_EQUAL(conn->status_response.data, data, s2n_array_len(data));
/* SCT extension processed */
EXPECT_EQUAL(conn->ct_response.size, s2n_array_len(data));
EXPECT_BYTEARRAY_EQUAL(conn->ct_response.data, data, s2n_array_len(data));
EXPECT_SUCCESS(s2n_connection_free(conn));
}
}
/* Test: s2n_x509_validator_validate_cert_chain receives extensions */
{
/* Test: extensions only processed for >= TLS1.3 */
{
struct s2n_connection *setup_conn;
GUARD(s2n_setup_connection_for_ocsp_validate_test(&setup_conn, chain_and_key));
DEFER_CLEANUP(struct s2n_stuffer stuffer, s2n_stuffer_free);
EXPECT_SUCCESS(s2n_stuffer_growable_alloc(&stuffer, 0));
struct s2n_stuffer_reservation size = {0};
EXPECT_SUCCESS(s2n_stuffer_reserve_uint24(&stuffer, &size));
EXPECT_SUCCESS(s2n_write_test_cert(&stuffer, chain_and_key));
EXPECT_SUCCESS(s2n_extension_list_send(S2N_EXTENSION_LIST_CERTIFICATE, setup_conn, &stuffer));
EXPECT_SUCCESS(s2n_stuffer_write_vector_size(&size));
/* TLS1.2 does NOT process extensions */
{
struct s2n_connection *conn;
GUARD(s2n_setup_connection_for_ocsp_validate_test(&conn, chain_and_key));
EXPECT_SUCCESS(s2n_stuffer_reread(&stuffer));
conn->actual_protocol_version = S2N_TLS12;
EXPECT_FAILURE(s2n_x509_validator_validate_cert_chain_test(conn, &stuffer));
EXPECT_EQUAL(conn->status_response.size, 0);
EXPECT_EQUAL(conn->status_response.data, NULL);
EXPECT_SUCCESS(s2n_connection_free(conn));
}
/* TLS1.3 DOES process extensions */
{
struct s2n_connection *conn;
GUARD(s2n_setup_connection_for_ocsp_validate_test(&conn, chain_and_key));
EXPECT_SUCCESS(s2n_stuffer_reread(&stuffer));
conn->actual_protocol_version = S2N_TLS13;
EXPECT_SUCCESS(s2n_x509_validator_validate_cert_chain_test(conn, &stuffer));
EXPECT_EQUAL(conn->status_response.size, s2n_array_len(data));
EXPECT_BYTEARRAY_EQUAL(conn->status_response.data, data, s2n_array_len(data));
EXPECT_SUCCESS(s2n_connection_free(conn));
}
EXPECT_SUCCESS(s2n_connection_free(setup_conn));
}
/* Test: extensions only processed on first certificate */
{
struct s2n_stuffer_reservation size = {0};
/* Extensions on second cert ignored */
{
struct s2n_connection *conn;
GUARD(s2n_setup_connection_for_ocsp_validate_test(&conn, chain_and_key));
DEFER_CLEANUP(struct s2n_stuffer stuffer, s2n_stuffer_free);
EXPECT_SUCCESS(s2n_stuffer_growable_alloc(&stuffer, 0));
EXPECT_SUCCESS(s2n_stuffer_reserve_uint24(&stuffer, &size));
EXPECT_SUCCESS(s2n_write_test_cert(&stuffer, chain_and_key));
EXPECT_SUCCESS(s2n_extension_list_send(S2N_EXTENSION_LIST_EMPTY, conn, &stuffer));
EXPECT_SUCCESS(s2n_write_test_cert(&stuffer, chain_and_key));
EXPECT_SUCCESS(s2n_extension_list_send(S2N_EXTENSION_LIST_CERTIFICATE, conn, &stuffer));
EXPECT_SUCCESS(s2n_stuffer_write_vector_size(&size));
EXPECT_SUCCESS(s2n_x509_validator_validate_cert_chain_test(conn, &stuffer));
EXPECT_EQUAL(conn->status_response.size, 0);
EXPECT_EQUAL(conn->status_response.data, NULL);
EXPECT_SUCCESS(s2n_connection_free(conn));
}
/* Extensions on first cert processed */
{
struct s2n_connection *conn;
GUARD(s2n_setup_connection_for_ocsp_validate_test(&conn, chain_and_key));
DEFER_CLEANUP(struct s2n_stuffer stuffer, s2n_stuffer_free);
EXPECT_SUCCESS(s2n_stuffer_growable_alloc(&stuffer, 0));
EXPECT_SUCCESS(s2n_stuffer_reserve_uint24(&stuffer, &size));
EXPECT_SUCCESS(s2n_write_test_cert(&stuffer, chain_and_key));
EXPECT_SUCCESS(s2n_extension_list_send(S2N_EXTENSION_LIST_CERTIFICATE, conn, &stuffer));
EXPECT_SUCCESS(s2n_write_test_cert(&stuffer, chain_and_key));
EXPECT_SUCCESS(s2n_extension_list_send(S2N_EXTENSION_LIST_EMPTY, conn, &stuffer));
EXPECT_SUCCESS(s2n_stuffer_write_vector_size(&size));
EXPECT_SUCCESS(s2n_x509_validator_validate_cert_chain_test(conn, &stuffer));
EXPECT_EQUAL(conn->status_response.size, s2n_array_len(data));
EXPECT_BYTEARRAY_EQUAL(conn->status_response.data, data, s2n_array_len(data));
EXPECT_SUCCESS(s2n_connection_free(conn));
}
}
}
EXPECT_SUCCESS(s2n_cert_chain_and_key_free(chain_and_key));
EXPECT_SUCCESS(s2n_config_free(config));
END_TEST();
return 0;
}
|
ScheintodX/AXON-E-Tools
|
src/test/java/de/axone/cache/ng/TestValueProviderTest.java
|
<gh_stars>0
package de.axone.cache.ng;
import static org.testng.Assert.*;
import java.util.ArrayList;
import java.util.ConcurrentModificationException;
import java.util.List;
import org.testng.annotations.Test;
import de.axone.cache.ng.TestValueProvider.Range;
@Test( groups="testng.testvalueprovider" )
public class TestValueProviderTest {
private static final int NUM_THREADS = 1000;
public void testValueProvider() {
TestValueProvider<Integer,String> t1 = new TestValueProvider<Integer,String>( k -> "value_" + k );
assertEquals( t1.get( 0 ), "value_0" );
assertEquals( t1.get( 10 ), "value_10" );
assertEquals( t1.get( 11 ), "value_11" );
TestValueProvider<Integer,String> t2 = t1.ranged( Range.of( 0, 10 ) );
assertEquals( t2.get( 0 ), "value_0" );
assertEquals( t2.get( 10 ), "value_10" );
assertNull( t2.get( 11 ) );
TestValueProvider<Integer,String> t3 = t2.delayed( 10 );
long start = System.currentTimeMillis();
assertEquals( t3.get( 0 ), "value_0" );
assertEquals( t3.get( 10 ), "value_10" );
assertNull( t3.get( 11 ) );
long end = System.currentTimeMillis();
assertTrue( (end-start) >= 20, "Takes some Time: " + (end-start) + " >= 20" );
}
public void testMultithreaded() throws Exception {
TestValueProvider<Integer,String> t1 = new TestValueProvider<Integer,String>( k -> "value_" + k )
.threadsafe( false );
List<Thread> threads = new ArrayList<>( NUM_THREADS );
for( int i = 0; i < NUM_THREADS; i++ ) {
threads.add( new Thread( new ValueChecker( t1, ConcurrentModificationException.class, i ) ) );
}
for( Thread t : threads ) t.start();
for( Thread t : threads ) t.join();
}
static class ValueChecker implements Runnable {
final int i;
final TestValueProvider<Integer,String> tvp;
final Class<? extends Throwable> expectException;
ValueChecker( TestValueProvider<Integer,String> tvp, Class<? extends Throwable> expectException, int i ) {
this.tvp = tvp;
this.i = i;
this.expectException = expectException;
}
@Override
public void run() {
if( expectException != null ) {
try {
assertEquals( tvp.get( i ), "value_" + i );
} catch( Throwable t ) {
assertEquals( t.getClass(), expectException );
}
} else {
assertEquals( tvp.get( i ), "value_" + i );
}
}
}
}
|
digi-embedded/android_platform_system_bt
|
service/test/low_energy_scanner_unittest.cc
|
<filename>service/test/low_energy_scanner_unittest.cc<gh_stars>0
//
// Copyright (C) 2016 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#include <base/macros.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <memory>
#include "service/adapter.h"
#include "service/hal/fake_bluetooth_gatt_interface.h"
#include "service/low_energy_scanner.h"
#include "stack/include/bt_types.h"
#include "stack/include/hcidefs.h"
#include "test/mock_adapter.h"
using ::testing::_;
using ::testing::Return;
using ::testing::Pointee;
using ::testing::DoAll;
using ::testing::Invoke;
using ::testing::SaveArg;
namespace bluetooth {
namespace {
class MockScannerHandler : public BleScannerInterface {
public:
MockScannerHandler() {}
~MockScannerHandler() override = default;
MOCK_METHOD1(RegisterScanner, void(BleScannerInterface::RegisterCallback));
MOCK_METHOD1(Unregister, void(int));
MOCK_METHOD1(Scan, void(bool));
MOCK_METHOD5(ScanFilterParamSetupImpl,
void(uint8_t client_if, uint8_t action, uint8_t filt_index,
btgatt_filt_param_setup_t* filt_param,
FilterParamSetupCallback cb));
MOCK_METHOD2(ScanFilterClear, void(int filt_index, FilterConfigCallback cb));
MOCK_METHOD2(ScanFilterEnable, void(bool enable, EnableCallback cb));
MOCK_METHOD3(SetScanParameters,
void(int scan_interval, int scan_window, Callback cb));
MOCK_METHOD5(BatchscanConfigStorage,
void(int client_if, int batch_scan_full_max,
int batch_scan_trunc_max, int batch_scan_notify_threshold,
Callback cb));
MOCK_METHOD6(BatchscanEnable,
void(int scan_mode, int scan_interval, int scan_window,
int addr_type, int discard_rule, Callback cb));
MOCK_METHOD1(BatchscanDisable, void(Callback cb));
MOCK_METHOD2(BatchscanReadReports, void(int client_if, int scan_mode));
MOCK_METHOD7(StartSync, void(uint8_t, bt_bdaddr_t, uint16_t, uint16_t,
StartSyncCb, SyncReportCb, SyncLostCb));
MOCK_METHOD1(StopSync, void(uint16_t));
void ScanFilterAddRemove(int action, int filt_type, int filt_index,
int company_id, int company_id_mask,
const bt_uuid_t* p_uuid,
const bt_uuid_t* p_uuid_mask,
const bt_bdaddr_t* bd_addr, char addr_type,
std::vector<uint8_t> data,
std::vector<uint8_t> p_mask,
FilterConfigCallback cb){};
void ScanFilterParamSetup(
uint8_t client_if, uint8_t action, uint8_t filt_index,
std::unique_ptr<btgatt_filt_param_setup_t> filt_param,
FilterParamSetupCallback cb) {
ScanFilterParamSetupImpl(client_if, action, filt_index, filt_param.get(),
std::move(cb));
}
};
class TestDelegate : public LowEnergyScanner::Delegate {
public:
TestDelegate() : scan_result_count_(0) {}
~TestDelegate() override = default;
int scan_result_count() const { return scan_result_count_; }
const ScanResult& last_scan_result() const { return last_scan_result_; }
void OnScanResult(LowEnergyScanner* scanner, const ScanResult& scan_result) {
ASSERT_TRUE(scanner);
scan_result_count_++;
last_scan_result_ = scan_result;
}
private:
int scan_result_count_;
ScanResult last_scan_result_;
DISALLOW_COPY_AND_ASSIGN(TestDelegate);
};
class LowEnergyScannerTest : public ::testing::Test {
public:
LowEnergyScannerTest() = default;
~LowEnergyScannerTest() override = default;
void SetUp() override {
// Only set |mock_handler_| if a test hasn't set it.
if (!mock_handler_) mock_handler_.reset(new MockScannerHandler());
fake_hal_gatt_iface_ = new hal::FakeBluetoothGattInterface(
nullptr, std::static_pointer_cast<BleScannerInterface>(mock_handler_),
nullptr, nullptr);
hal::BluetoothGattInterface::InitializeForTesting(fake_hal_gatt_iface_);
ble_factory_.reset(new LowEnergyScannerFactory(mock_adapter_));
}
void TearDown() override {
ble_factory_.reset();
hal::BluetoothGattInterface::CleanUp();
}
protected:
hal::FakeBluetoothGattInterface* fake_hal_gatt_iface_;
testing::MockAdapter mock_adapter_;
std::shared_ptr<MockScannerHandler> mock_handler_;
std::unique_ptr<LowEnergyScannerFactory> ble_factory_;
private:
DISALLOW_COPY_AND_ASSIGN(LowEnergyScannerTest);
};
// Used for tests that operate on a pre-registered scanner.
class LowEnergyScannerPostRegisterTest : public LowEnergyScannerTest {
public:
LowEnergyScannerPostRegisterTest() : next_scanner_id_(0) {}
~LowEnergyScannerPostRegisterTest() override = default;
void SetUp() override {
LowEnergyScannerTest::SetUp();
auto callback = [&](std::unique_ptr<LowEnergyScanner> scanner) {
le_scanner_ = std::move(scanner);
};
RegisterTestScanner(callback);
}
void TearDown() override {
EXPECT_CALL(*mock_handler_, Unregister(_)).Times(1).WillOnce(Return());
le_scanner_.reset();
LowEnergyScannerTest::TearDown();
}
void RegisterTestScanner(
const std::function<void(std::unique_ptr<LowEnergyScanner> scanner)>
callback) {
UUID uuid = UUID::GetRandom();
auto api_callback = [&](BLEStatus status, const UUID& in_uuid,
std::unique_ptr<BluetoothInstance> in_scanner) {
CHECK(in_uuid == uuid);
CHECK(in_scanner.get());
CHECK(status == BLE_STATUS_SUCCESS);
callback(std::unique_ptr<LowEnergyScanner>(
static_cast<LowEnergyScanner*>(in_scanner.release())));
};
BleScannerInterface::RegisterCallback reg_scanner_cb;
EXPECT_CALL(*mock_handler_, RegisterScanner(_))
.Times(1)
.WillOnce(SaveArg<0>(®_scanner_cb));
ble_factory_->RegisterInstance(uuid, api_callback);
reg_scanner_cb.Run(next_scanner_id_++, BT_STATUS_SUCCESS);
::testing::Mock::VerifyAndClearExpectations(mock_handler_.get());
}
protected:
std::unique_ptr<LowEnergyScanner> le_scanner_;
private:
int next_scanner_id_;
DISALLOW_COPY_AND_ASSIGN(LowEnergyScannerPostRegisterTest);
};
TEST_F(LowEnergyScannerTest, RegisterInstance) {
BleScannerInterface::RegisterCallback reg_scanner_cb1;
EXPECT_CALL(*mock_handler_, RegisterScanner(_))
.Times(1)
.WillOnce(SaveArg<0>(®_scanner_cb1));
// These will be asynchronously populated with a result when the callback
// executes.
BLEStatus status = BLE_STATUS_SUCCESS;
UUID cb_uuid;
std::unique_ptr<LowEnergyScanner> scanner;
int callback_count = 0;
auto callback = [&](BLEStatus in_status, const UUID& uuid,
std::unique_ptr<BluetoothInstance> in_scanner) {
status = in_status;
cb_uuid = uuid;
scanner = std::unique_ptr<LowEnergyScanner>(
static_cast<LowEnergyScanner*>(in_scanner.release()));
callback_count++;
};
UUID uuid0 = UUID::GetRandom();
// HAL returns success.
EXPECT_TRUE(ble_factory_->RegisterInstance(uuid0, callback));
EXPECT_EQ(0, callback_count);
// Calling twice with the same UUID should fail with no additional call into
// the stack.
EXPECT_FALSE(ble_factory_->RegisterInstance(uuid0, callback));
::testing::Mock::VerifyAndClearExpectations(mock_handler_.get());
// Call with a different UUID while one is pending.
UUID uuid1 = UUID::GetRandom();
BleScannerInterface::RegisterCallback reg_scanner_cb2;
EXPECT_CALL(*mock_handler_, RegisterScanner(_))
.Times(1)
.WillOnce(SaveArg<0>(®_scanner_cb2));
EXPECT_TRUE(ble_factory_->RegisterInstance(uuid1, callback));
// |uuid0| succeeds.
int scanner_if0 = 2; // Pick something that's not 0.
reg_scanner_cb1.Run(scanner_if0, BT_STATUS_SUCCESS);
EXPECT_EQ(1, callback_count);
ASSERT_TRUE(scanner.get() !=
nullptr); // Assert to terminate in case of error
EXPECT_EQ(BLE_STATUS_SUCCESS, status);
EXPECT_EQ(scanner_if0, scanner->GetInstanceId());
EXPECT_EQ(uuid0, scanner->GetAppIdentifier());
EXPECT_EQ(uuid0, cb_uuid);
// The scanner should unregister itself when deleted.
EXPECT_CALL(*mock_handler_, Unregister(scanner_if0))
.Times(1)
.WillOnce(Return());
scanner.reset();
::testing::Mock::VerifyAndClearExpectations(mock_handler_.get());
// |uuid1| fails.
int scanner_if1 = 3;
reg_scanner_cb2.Run(scanner_if1, BT_STATUS_FAIL);
EXPECT_EQ(2, callback_count);
ASSERT_TRUE(scanner.get() ==
nullptr); // Assert to terminate in case of error
EXPECT_EQ(BLE_STATUS_FAILURE, status);
EXPECT_EQ(uuid1, cb_uuid);
}
TEST_F(LowEnergyScannerPostRegisterTest, ScanSettings) {
EXPECT_CALL(mock_adapter_, IsEnabled())
.WillOnce(Return(false))
.WillRepeatedly(Return(true));
ScanSettings settings;
std::vector<ScanFilter> filters;
// Adapter is not enabled.
EXPECT_FALSE(le_scanner_->StartScan(settings, filters));
// TODO(jpawlowski): add tests checking settings and filter parsing when
// implemented
// These should succeed and result in a HAL call
EXPECT_CALL(*mock_handler_, Scan(true)).Times(1).WillOnce(Return());
EXPECT_TRUE(le_scanner_->StartScan(settings, filters));
// These should succeed and result in a HAL call
EXPECT_CALL(*mock_handler_, Scan(false)).Times(1).WillOnce(Return());
EXPECT_TRUE(le_scanner_->StopScan());
::testing::Mock::VerifyAndClearExpectations(mock_handler_.get());
}
TEST_F(LowEnergyScannerPostRegisterTest, ScanRecord) {
TestDelegate delegate;
le_scanner_->SetDelegate(&delegate);
EXPECT_EQ(0, delegate.scan_result_count());
std::vector<uint8_t> kTestRecord0({0x02, 0x01, 0x00, 0x00});
std::vector<uint8_t> kTestRecord1({0x00});
std::vector<uint8_t> kTestRecord2(
{0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01,
0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00,
0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01,
0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00,
0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01,
0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00});
const bt_bdaddr_t kTestAddress = {{0x01, 0x02, 0x03, 0x0A, 0x0B, 0x0C}};
const char kTestAddressStr[] = "01:02:03:0A:0B:0C";
const int kTestRssi = 64;
// Scan wasn't started. Result should be ignored.
fake_hal_gatt_iface_->NotifyScanResultCallback(kTestAddress, kTestRssi,
kTestRecord0);
EXPECT_EQ(0, delegate.scan_result_count());
// Start a scan session for |le_scanner_|.
EXPECT_CALL(mock_adapter_, IsEnabled()).Times(1).WillOnce(Return(true));
EXPECT_CALL(*mock_handler_, Scan(_))
.Times(2)
.WillOnce(Return())
.WillOnce(Return());
ScanSettings settings;
std::vector<ScanFilter> filters;
ASSERT_TRUE(le_scanner_->StartScan(settings, filters));
fake_hal_gatt_iface_->NotifyScanResultCallback(kTestAddress, kTestRssi,
kTestRecord0);
EXPECT_EQ(1, delegate.scan_result_count());
EXPECT_EQ(kTestAddressStr, delegate.last_scan_result().device_address());
EXPECT_EQ(kTestRssi, delegate.last_scan_result().rssi());
EXPECT_EQ(3U, delegate.last_scan_result().scan_record().size());
fake_hal_gatt_iface_->NotifyScanResultCallback(kTestAddress, kTestRssi,
kTestRecord1);
EXPECT_EQ(2, delegate.scan_result_count());
EXPECT_EQ(kTestAddressStr, delegate.last_scan_result().device_address());
EXPECT_EQ(kTestRssi, delegate.last_scan_result().rssi());
EXPECT_TRUE(delegate.last_scan_result().scan_record().empty());
fake_hal_gatt_iface_->NotifyScanResultCallback(kTestAddress, kTestRssi,
kTestRecord2);
EXPECT_EQ(3, delegate.scan_result_count());
EXPECT_EQ(kTestAddressStr, delegate.last_scan_result().device_address());
EXPECT_EQ(kTestRssi, delegate.last_scan_result().rssi());
EXPECT_EQ(62U, delegate.last_scan_result().scan_record().size());
le_scanner_->SetDelegate(nullptr);
}
} // namespace
} // namespace bluetooth
|
knightjdr/prohits-viz-ap
|
app/actions/news/get-news-articles.test.js
|
<gh_stars>1-10
import addMongoDate from '../../utils/add-mongo-date.js';
import find from '../../helpers/database/find.js';
import getNewsArticles from './get-news-articles.js';
import logger from '../../helpers/logging/logger.js';
jest.mock('../../utils/add-mongo-date');
jest.mock('../../helpers/database/find');
jest.mock('../../helpers/logging/logger.js');
const req = {};
const res = {
end: jest.fn(),
send: jest.fn(),
status: jest.fn(),
};
const returnValues = {
news: {
addDate: [{ headline: 1, dbDate: 'a' }, { headline: 2, dbDate: 'b' }],
find: [{ headline: 1 }, { headline: 2 }],
},
};
describe('News list', () => {
describe('when find returns response object', () => {
beforeAll(async () => {
res.send.mockClear();
res.status.mockClear();
find.mockResolvedValueOnce(returnValues.news.find);
addMongoDate.arr.mockReturnValueOnce(returnValues.news.addDate);
await getNewsArticles(req, res);
});
it('should return default status', () => {
expect(res.status).not.toHaveBeenCalled();
});
it('should return data object', () => {
expect(res.send).toHaveBeenCalledWith(returnValues.news.addDate);
});
});
describe('when there is a news list error', () => {
beforeAll(async () => {
logger.error.mockClear();
res.end.mockClear();
res.status.mockClear();
find.mockRejectedValueOnce(new Error('cannot access news'));
await getNewsArticles(req, res);
});
afterAll(() => {
find.mockClear();
});
it('should log error', () => {
expect(logger.error).toHaveBeenCalledWith('news articles - Error: cannot access news');
});
it('should return 500 status', () => {
expect(res.status).toHaveBeenCalledWith(500);
});
it('should end response', () => {
expect(res.end).toHaveBeenCalled();
});
});
});
|
BernardoFuret/async-tajs
|
resources/hostenv/nodejs/modules/fs.js
|
function Stats(
dev,
mode,
nlink,
uid,
gid,
rdev,
blksize,
ino,
size,
blocks,
atim_msec,
mtim_msec,
ctim_msec,
birthtim_msec,
atime,
mtime,
ctime,
birthtime
) {
this.dev = dev;
this.mode = mode;
this.nlink = nlink;
this.uid = uid;
this.gid = gid;
this.rdev = rdev;
this.blksize = blksize;
this.ino = ino;
this.size = size;
this.blocks = blocks;
this.atimeMs = atim_msec;
this.mtimeMs = mtim_msec;
this.ctimeMs = ctim_msec;
this.birthtimeMs = birthtim_msec;
this.atime = atime;
this.mtime = mtime;
this.ctime = ctime;
this.birthtime = birthtime;
}
Stats.prototype.isDirectory = function () {
return TAJS_make('AnyBool');
};
Stats.prototype.isSymbolicLink = function () {
return TAJS_make('AnyBool');
};
function access(filename, mode, callback) {
TAJS_makeContextSensitive(access, 2);
var err = TAJS_join(TAJS_make('Undef'), TAJS_makeGenericError());
TAJS_addAsyncIOCallback(callback, err);
}
function close(fd, callback) {
TAJS_makeContextSensitive(close, 1);
var err = TAJS_join(TAJS_make('Undef'), TAJS_makeGenericError());
TAJS_addAsyncIOCallback(callback, err);
}
function open(filename, flags, mode, callback) {
TAJS_makeContextSensitive(open, 3);
var err = TAJS_join(TAJS_make('Undef'), TAJS_makeGenericError());
var fd = TAJS_join(TAJS_make('Undef'), TAJS_make('AnyNum'));
TAJS_addAsyncIOCallback(callback, err, fd);
}
function readdir(path, options, callback) {
TAJS_makeContextSensitive(readdir, 2);
var err = TAJS_join(TAJS_make('Undef'), TAJS_makeGenericError());
var data = TAJS_join(TAJS_make('Undef'), [], [TAJS_make('AnyStr')]);
TAJS_addAsyncIOCallback(callback, err, data);
}
function readFile(filename, options, callback) {
TAJS_makeContextSensitive(readFile, 2);
var err = TAJS_join(TAJS_make('Undef'), TAJS_makeGenericError());
var data = TAJS_join(TAJS_make('Undef'), TAJS_make('AnyStr'));
TAJS_addAsyncIOCallback(callback, err, data);
}
function realPath(filename, options, callback) {
TAJS_makeContextSensitive(realPath, 2);
var err = TAJS_join(TAJS_make('Undef'), TAJS_makeGenericError());
var data = TAJS_join(TAJS_make('Undef'), TAJS_make('AnyStr'));
TAJS_addAsyncIOCallback(callback, err, data);
}
function stat(path, options, callback) {
TAJS_makeContextSensitive(stat, 2);
var err = TAJS_join(TAJS_make('Undef'), TAJS_makeGenericError());
var stats = new Stats(
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'));
var data = TAJS_join(TAJS_make('Undef'), stats);
TAJS_addAsyncIOCallback(callback, err, data);
}
function watch(filename, options, callback) {
TAJS_makeContextSensitive(watch, 2);
var eventType = TAJS_make('AnyStr');
var file = TAJS_make('AnyStr');
TAJS_addAsyncIOCallback(callback, eventType, file);
}
function watchFile(filename, options, callback) {
TAJS_makeContextSensitive(watchFile, 2);
var current = new Stats(
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'));
var previous = new Stats(
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'));
TAJS_addAsyncIOCallback(callback, current, previous);
}
function unwatchFile(file) { }
// Synchronous functions
function readdirSync(path, options) {
return TAJS_join(TAJS_make('Undef'), [], [TAJS_make('AnyStr')]);
}
function realpathSync(filename, options) {
return TAJS_join(TAJS_make('Undef'), TAJS_make('AnyStr'));
}
function statSync(fd, options) {
return new Stats(
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyNum'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'),
TAJS_make('AnyStr'));
}
module.exports = {
access: access,
close: close,
lstat: stat,
open: open,
readFile : readFile,
readdir: readdir,
realpath: realPath,
stat: stat,
watch: watch,
watchFile: watchFile,
unwatchFile: unwatchFile,
Stats: Stats,
lstatSync: statSync,
readdirSync: readdirSync,
realpathSync: realpathSync,
statSync: statSync
};
|
adohe/Homework
|
LeetCode/src/com/xqbase/java/ZigConv.java
|
<gh_stars>1-10
package com.xqbase.java;
/**
* LeetCodeSix -- Zigzag Conversion.
*
* @author <NAME>
*/
public class ZigConv {
public static void main(String[] args) {
System.out.println(convert("PAYPALISHIRING", 3));
}
public static String convert(String s, int numRows) {
if (numRows <= 1 || s.length() == 0)
return s;
StringBuilder sb = new StringBuilder();
int len = s.length();
for (int i = 0; i < len && i < numRows; ++i)
{
int index = i;
sb.append(s.charAt(index));
for (int k = 1; index < len; ++k)
{
if (i == 0 || i == numRows - 1)
{
index += 2 * numRows - 2;
} else
{
if (k % 2 != 0)
index += 2 * (numRows - 1 - i);
else index += 2 * i;
}
if (index < len)
{
sb.append(s.charAt(index));
}
}
}
return sb.toString();
}
}
|
svidoso/ipopo
|
pelix/rsa/topologymanagers/basic.py
|
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
BasicTopologyManager implements TopologyManager API
:author: <NAME>
:copyright: Copyright 2020, <NAME>
:license: Apache License 2.0
:version: 1.0.1
..
Copyright 2020 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
try:
# pylint: disable=W0611
from typing import Any, Dict
from pelix.framework import ServiceEvent
from pelix.rsa import RemoteServiceAdminEvent
except ImportError:
pass
from pelix.ipopo.decorators import ComponentFactory, Instantiate
from pelix.rsa import ECF_ENDPOINT_CONTAINERID_NAMESPACE
from pelix.rsa.providers.discovery import EndpointEvent
from pelix.rsa.topologymanagers import TopologyManager
# ------------------------------------------------------------------------------
# Module version
__version_info__ = (1, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# Standard logging
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
@ComponentFactory("basic-topology-manager-factory")
# Tell iPOPO to instantiate a component instance as soon as the file is loaded
@Instantiate(
"basic-topology-manager",
{
TopologyManager.ENDPOINT_LISTENER_SCOPE: "({0}=*)".format(
ECF_ENDPOINT_CONTAINERID_NAMESPACE
)
},
)
class BasicTopologyManager(TopologyManager):
"""
BasicTopologyManager extends TopologyManager api
"""
def event(self, service_event, listener_dict):
# type: (ServiceEvent, Dict[Any, Any]) -> None
"""
Implementation of EventListenerHook. Called by local
service registry when a service is registered, unregistered
or modified. Will be called by thread doing registration/unregister
service
"""
self._handle_event(service_event)
def endpoint_changed(self, endpoint_event, matched_filter):
# type: (EndpointEvent, Any) -> None
"""
Implementation of discovery API EndpointEventListener.
Called by discovery provider when an endpoint change
ADDED,REMOVED,MODIFIED is detected. May be called
by arbitrary thread.
"""
event_type = endpoint_event.get_type()
ed = endpoint_event.get_endpoint_description()
ed_id = ed.get_id()
if event_type == EndpointEvent.ADDED:
# if it's an add event, we call handle_endpoint_added
imported_reg = self._import_added_endpoint(ed)
# get exception from ImportRegistration
exc = imported_reg.get_exception()
# if there was exception on import, print out messages
if exc:
_logger.exception(
"BasicTopologyManager import failed for endpoint.id=%s",
ed_id,
)
else:
_logger.debug(
"BasicTopologyManager: service imported! "
"endpoint.id=%s, service_ref=%s",
ed_id,
imported_reg.get_reference(),
)
elif event_type == EndpointEvent.REMOVED:
self._unimport_removed_endpoint(ed)
_logger.debug(
"BasicTopologyManager: endpoint removed. endpoint.id=%s", ed_id
)
elif event_type == EndpointEvent.MODIFIED:
self._update_imported_endpoint(ed)
_logger.debug(
"BasicTopologyManager: endpoint updated. endpoint.id=%s", ed_id
)
|
wcalandro/kythe
|
kythe/cxx/indexer/cxx/testdata/tvar_template/template_arg_multiple_typename.cc
|
// Checks that templates can accept multiple typename arguments.
template
//- @T defines/binding TT
//- @S defines/binding TS
<typename T, typename S>
//- @C defines/binding ACDecl1
class C;
template
//- @N defines/binding TN
//- @V defines/binding TV
<typename N, typename V>
//- @C defines/binding ACDecl2
class C;
template
//- @W defines/binding TW
//- @X defines/binding TX
<typename W, typename X>
//- @C defines/binding ACDefn
//- @C completes/uniquely ACDecl2
//- @C completes/uniquely ACDecl1
class C { };
//- CDecl1 childof ACDecl1
//- CDecl2 childof ACDecl2
//- CDefn childof ACDefn
//- CDecl1 tparam.0 TT
//- CDecl1 tparam.1 TS
//- CDecl2 tparam.0 TN
//- CDecl2 tparam.1 TV
//- CDefn tparam.0 TW
//- CDefn tparam.1 TX
|
ajb85/coopers-site
|
src/components/Gallery/Gallery.js
|
<filename>src/components/Gallery/Gallery.js
import React, { useEffect, useContext } from 'react';
import { useParams } from 'react-router-dom';
import MainImage from '../MainImage/';
import SideMenu from '../SideMenu/';
import BottomMenu from '../BottomMenu/';
import { ImagesContext } from 'Providers/Images.js';
import { WindowContext } from 'Providers/Window.js';
import history from 'history.js';
import styles from './styles.module.scss';
function Gallery(props) {
const {
image,
nextImage,
prevImage,
active,
setActive,
setLastImage
} = useContext(ImagesContext);
const { windowSize, showMenu, setShowMenu } = useContext(WindowContext);
const { id } = useParams;
console.log('ID: ', id);
useEffect(() => {
const removeListener = () =>
window.removeEventListener('keydown', keyPress);
function keyPress({ code }) {
if (code === 'ArrowLeft') {
prevImage();
} else if (code === 'ArrowRight') {
nextImage();
}
}
if (!active) {
id ? setActive(id) : setLastImage();
}
removeListener();
window.addEventListener('keydown', keyPress);
return removeListener;
}, [nextImage, prevImage, active, setActive, id, setLastImage]);
if (!image) {
return <div>Loading...</div>;
}
if (active) {
history.push('/gallery/' + active);
}
return (
<div className={styles.Gallery}>
{active && (
<MainImage
windowSize={windowSize}
image={image}
showMenu={showMenu}
setShowMenu={setShowMenu}
/>
)}
{windowSize.isMobile ? <BottomMenu /> : <SideMenu />}
</div>
);
}
export default Gallery;
|
sergeishay/Screenters
|
source-pack/pro/InputRange/index.js
|
export { default } from './InputRange';
export * from './InputRange';
|
C14427818/CollegeYr1
|
Algorithm and Design/Assignment/bubblenum.c
|
<reponame>C14427818/CollegeYr1
#include <stdio.h>
main()
{
int stuarray[15], num, i, j, swap;
printf("Enter number of students\n");
scanf("%d", &num);
printf("Enter %d student ID's\n", num);
for (i = 0; i < num; i++)
{
scanf("%d", &stuarray[i]);
}
for (i = 0 ; i < ( num - 1 ); i++)
{
for (j = 0 ; j < num - i - 1; j++)
{
if (stuarray[j] > stuarray[j+1]) /* For decreasing order use < */
{
swap = stuarray[j];
stuarray[j] = stuarray[j+1];
stuarray[j+1] = swap;
}
}
}
printf("Sorted list in ascending order:\n");
for ( i = 0 ; i < num ; i++ )
{
printf("%d\n", stuarray[i]);
}
}
|
brycewang-microsoft/iot-sdks-e2e-fx
|
test-runner/exc_thread.py
|
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for
# full license information.
import threading
class ExcThread(threading.Thread):
def __init__(self, target, args=None):
self.args = args if args else []
self.target = target
self.exc = None
threading.Thread.__init__(self)
def run(self):
try:
self.target(*self.args)
except Exception as e:
# self.exc =sys.exc_info()
self.exc = e
|
shubha-rajan/civiform
|
universal-application-tool-0.0.1/app/services/program/ProgramBlockDefinitionNotFoundException.java
|
package services.program;
/**
* ProgramBlockDefinitionNotFoundException is thrown when the specified block definition is not
* found in this program.
*/
public class ProgramBlockDefinitionNotFoundException extends Exception {
public ProgramBlockDefinitionNotFoundException(long programId, long blockDefinitionId) {
super(
"Block not found in Program (ID "
+ programId
+ ") for block definition ID "
+ blockDefinitionId);
}
}
|
doorsrom/com.doors.edge
|
app/src/main/java/org/chromium/chrome/browser/widget/selection/SelectableBottomSheetContent.java
|
<reponame>doorsrom/com.doors.edge<filename>app/src/main/java/org/chromium/chrome/browser/widget/selection/SelectableBottomSheetContent.java
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.widget.selection;
import android.support.annotation.CallSuper;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import android.widget.TextView;
import org.chromium.base.CollectionUtil;
import org.chromium.chrome.browser.ChromeActivity;
import org.chromium.chrome.browser.toolbar.BottomToolbarPhone;
import org.chromium.chrome.browser.widget.bottomsheet.BottomSheet;
import org.chromium.chrome.browser.widget.bottomsheet.BottomSheet.BottomSheetContent;
import java.util.List;
/**
* An abstract {@link BottomSheetContent} for selectable list UI's displayed in the BottomSheet.
*
* @param <E> The type of the selectable items this bottom sheet content displays.
*/
public abstract class SelectableBottomSheetContent<E> implements BottomSheetContent {
/**
* The class managing the selectable list. Used to retrieve various pieces of the
* selectable list UI.
* @param <E> The type of the selectable items the manager manages.
*/
public interface SelectableBottomSheetContentManager<E> {
/**
* @return The view that shows the list UI.
*/
View getView();
/**
* See {@link SelectableListLayout#detachToolbarView()}.
*/
SelectableListToolbar<E> detachToolbarView();
/**
* @return The {@link RecyclerView} that contains the list of selectable items.
*/
RecyclerView getRecyclerView();
/**
* @return The {@link TextView} shown when there are no selectable items to be shown.
*/
TextView getEmptyView();
/**
* Called when the bottom sheet content is destroyed.
*/
void onDestroyed();
}
private SelectableBottomSheetContentManager<E> mManager;
private SelectableListToolbar<E> mToolbarView;
/**
* Initialize the {@link SelectableBottomSheetContent}.
* @param activity The activity displaying the bottom sheet that will hold this content.
* @param manager The {@link SelectableBottomSheetContentManager} managing the selectable list.
*/
public void initialize(
final ChromeActivity activity, SelectableBottomSheetContentManager<E> manager) {
mManager = manager;
mToolbarView = manager.detachToolbarView();
mToolbarView.setActionBarDelegate(activity.getBottomSheet().getActionBarDelegate());
mToolbarView.addObserver(new SelectableListToolbar.SelectableListToolbarObserver() {
@Override
public void onThemeColorChanged(boolean isLightTheme) {
activity.getBottomSheet().updateHandleTint();
}
@Override
public void onStartSearch() {
activity.getBottomSheet().setSheetState(BottomSheet.SHEET_STATE_FULL, true);
}
});
((BottomToolbarPhone) activity.getToolbarManager().getToolbar())
.setOtherToolbarStyle(mToolbarView);
}
@Override
public abstract int getType();
@Override
public View getContentView() {
return mManager.getView();
}
@Override
public List<View> getViewsForPadding() {
return CollectionUtil.newArrayList(mManager.getRecyclerView(), mManager.getEmptyView());
}
@Override
public View getToolbarView() {
return mToolbarView;
}
@Override
public boolean isUsingLightToolbarTheme() {
return mToolbarView.isLightTheme();
}
@Override
public boolean isIncognitoThemedContent() {
return false;
}
@Override
public int getVerticalScrollOffset() {
return mManager.getRecyclerView().computeVerticalScrollOffset();
}
@Override
public boolean applyDefaultTopPadding() {
return false;
}
@Override
public void scrollToTop() {
mManager.getRecyclerView().smoothScrollToPosition(0);
}
@Override
@CallSuper
public void destroy() {
mManager.onDestroyed();
mManager = null;
}
}
|
NOAA-EMC/ioda
|
src/engines/ioda/src/ioda/Engines/HH/HH/HH-types.h
|
#pragma once
/*
* (C) Copyright 2017-2020 <NAME> (<EMAIL>)
* (C) Copyright 2020-2021 UCAR
*
* This software is licensed under the terms of the Apache Licence Version 2.0
* which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
*/
/*! \addtogroup ioda_internals_engines_hh
*
* @{
* \file HH-types.h
* \brief HDF5 engine implementation of ioda::detail::Type_Provider.
*/
#include "./Handles.h"
#include "ioda/Types/Type.h"
#include "ioda/Types/Type_Provider.h"
#include "ioda/defs.h"
namespace ioda {
namespace detail {
namespace Engines {
namespace HH {
/// \brief This is the implementation of Type_Provider using HDF5. Do not use outside of IODA.
/// \ingroup ioda_internals_engines_hh
class IODA_DL HH_Type_Provider : public detail::Type_Provider {
public:
virtual ~HH_Type_Provider();
static HH_hid_t getFundamentalHHType(std::type_index type);
Type makeFundamentalType(std::type_index type) const final;
Type makeArrayType(std::initializer_list<Dimensions_t> dimensions, std::type_index typeOuter,
std::type_index typeInner) const final;
Type makeStringType(std::type_index typeOuter, size_t string_length, StringCSet cset) const final;
static HH_Type_Provider* instance();
};
/// \brief This is the implementation of ioda::Type using HDF5. Do not use outside of IODA.
/// \ingroup ioda_internals_engines_hh
class IODA_DL HH_Type : public detail::Type_Backend {
public:
virtual ~HH_Type();
HH_hid_t handle;
HH_Type(HH_hid_t h);
size_t getSize() const final;
TypeClass getClass() const final;
void commitToBackend(Group &d, const std::string &name) const final;
bool isTypeSigned() const final;
bool isVariableLengthStringType() const final;
StringCSet getStringCSet() const final;
Type getBaseType() const final;
std::vector<Dimensions_t> getDimensions() const final;
};
} // namespace HH
} // namespace Engines
} // namespace detail
} // namespace ioda
/// @}
|
sho25/jackrabbit-oak
|
oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/DataStoreCacheUpgradeUtilsTest.java
|
<filename>oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/DataStoreCacheUpgradeUtilsTest.java
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
end_comment
begin_package
package|package
name|org
operator|.
name|apache
operator|.
name|jackrabbit
operator|.
name|oak
operator|.
name|plugins
operator|.
name|blob
package|;
end_package
begin_import
import|import
name|java
operator|.
name|io
operator|.
name|File
import|;
end_import
begin_import
import|import
name|java
operator|.
name|io
operator|.
name|IOException
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Map
import|;
end_import
begin_import
import|import
name|com
operator|.
name|google
operator|.
name|common
operator|.
name|collect
operator|.
name|Maps
import|;
end_import
begin_import
import|import
name|com
operator|.
name|google
operator|.
name|common
operator|.
name|io
operator|.
name|Files
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|commons
operator|.
name|io
operator|.
name|FileUtils
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|Before
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|Rule
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|Test
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|rules
operator|.
name|TemporaryFolder
import|;
end_import
begin_import
import|import static
name|org
operator|.
name|apache
operator|.
name|jackrabbit
operator|.
name|oak
operator|.
name|plugins
operator|.
name|blob
operator|.
name|DataStoreCacheUpgradeUtils
operator|.
name|DOWNLOAD_DIR
import|;
end_import
begin_import
import|import static
name|org
operator|.
name|apache
operator|.
name|jackrabbit
operator|.
name|oak
operator|.
name|plugins
operator|.
name|blob
operator|.
name|DataStoreCacheUpgradeUtils
operator|.
name|UPLOAD_STAGING_DIR
import|;
end_import
begin_import
import|import static
name|org
operator|.
name|junit
operator|.
name|Assert
operator|.
name|assertFalse
import|;
end_import
begin_import
import|import static
name|org
operator|.
name|junit
operator|.
name|Assert
operator|.
name|assertTrue
import|;
end_import
begin_comment
comment|/** * Tests for {@link DataStoreCacheUpgradeUtils} */
end_comment
begin_class
specifier|public
class|class
name|DataStoreCacheUpgradeUtilsTest
extends|extends
name|AbstractDataStoreCacheTest
block|{
annotation|@
name|Rule
specifier|public
name|TemporaryFolder
name|folder
init|=
operator|new
name|TemporaryFolder
argument_list|(
operator|new
name|File
argument_list|(
literal|"target"
argument_list|)
argument_list|)
decl_stmt|;
name|File
name|homeDir
decl_stmt|;
name|File
name|path
decl_stmt|;
name|File
name|pendingUploads
decl_stmt|;
annotation|@
name|Before
specifier|public
name|void
name|setup
parameter_list|()
throws|throws
name|IOException
block|{
name|homeDir
operator|=
name|folder
operator|.
name|getRoot
argument_list|()
expr_stmt|;
name|path
operator|=
name|folder
operator|.
name|newFolder
argument_list|(
literal|"repository"
argument_list|,
literal|"datastore"
argument_list|)
expr_stmt|;
name|pendingUploads
operator|=
operator|new
name|File
argument_list|(
name|homeDir
operator|+
literal|"/"
operator|+
name|DataStoreCacheUpgradeUtils
operator|.
name|UPLOAD_MAP
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
specifier|public
name|void
name|upgradeNoDownloads
parameter_list|()
throws|throws
name|Exception
block|{
name|setupUploads
argument_list|(
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
name|DataStoreCacheUpgradeUtils
operator|.
name|upgrade
argument_list|(
name|homeDir
argument_list|,
name|path
argument_list|,
literal|true
argument_list|,
literal|true
argument_list|)
expr_stmt|;
name|assertFiles
argument_list|(
name|UPLOAD_STAGING_DIR
argument_list|,
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
name|assertFalse
argument_list|(
name|pendingUploads
operator|.
name|exists
argument_list|()
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
specifier|public
name|void
name|upgradeNoDownloadsDelPendingFileFalse
parameter_list|()
throws|throws
name|Exception
block|{
name|setupUploads
argument_list|(
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
name|DataStoreCacheUpgradeUtils
operator|.
name|upgrade
argument_list|(
name|homeDir
argument_list|,
name|path
argument_list|,
literal|true
argument_list|,
literal|false
argument_list|)
expr_stmt|;
name|assertFiles
argument_list|(
name|UPLOAD_STAGING_DIR
argument_list|,
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|pendingUploads
operator|.
name|exists
argument_list|()
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
specifier|public
name|void
name|upgradeMoveDownloadsFalse
parameter_list|()
throws|throws
name|Exception
block|{
name|setupUploads
argument_list|(
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
name|setupDownloads
argument_list|(
literal|"4444440"
argument_list|,
literal|"5555550"
argument_list|,
literal|"6666660"
argument_list|)
expr_stmt|;
name|DataStoreCacheUpgradeUtils
operator|.
name|upgrade
argument_list|(
name|homeDir
argument_list|,
name|path
argument_list|,
literal|false
argument_list|,
literal|true
argument_list|)
expr_stmt|;
name|assertFiles
argument_list|(
name|UPLOAD_STAGING_DIR
argument_list|,
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
name|assertFalse
argument_list|(
name|pendingUploads
operator|.
name|exists
argument_list|()
argument_list|)
expr_stmt|;
name|assertFilesNoMove
argument_list|(
name|DOWNLOAD_DIR
argument_list|,
literal|"4444440"
argument_list|,
literal|"5555550"
argument_list|,
literal|"6666660"
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
specifier|public
name|void
name|upgradeNoUploads
parameter_list|()
throws|throws
name|Exception
block|{
name|setupDownloads
argument_list|(
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
name|DataStoreCacheUpgradeUtils
operator|.
name|upgrade
argument_list|(
name|homeDir
argument_list|,
name|path
argument_list|,
literal|true
argument_list|,
literal|true
argument_list|)
expr_stmt|;
name|assertFiles
argument_list|(
name|DOWNLOAD_DIR
argument_list|,
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
specifier|public
name|void
name|upgradeNoUploadMap
parameter_list|()
throws|throws
name|Exception
block|{
name|setupUploads
argument_list|(
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
name|FileUtils
operator|.
name|deleteQuietly
argument_list|(
name|pendingUploads
argument_list|)
expr_stmt|;
name|DataStoreCacheUpgradeUtils
operator|.
name|upgrade
argument_list|(
name|homeDir
argument_list|,
name|path
argument_list|,
literal|true
argument_list|,
literal|true
argument_list|)
expr_stmt|;
name|assertFiles
argument_list|(
name|DOWNLOAD_DIR
argument_list|,
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
name|assertFalse
argument_list|(
name|pendingUploads
operator|.
name|exists
argument_list|()
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
specifier|public
name|void
name|upgrade
parameter_list|()
throws|throws
name|Exception
block|{
name|upgrade
argument_list|(
literal|true
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
specifier|public
name|void
name|upgradeDelPendingFileFalse
parameter_list|()
throws|throws
name|Exception
block|{
name|upgrade
argument_list|(
literal|false
argument_list|)
expr_stmt|;
block|}
specifier|private
name|void
name|upgrade
parameter_list|(
name|boolean
name|pendingFileDelete
parameter_list|)
throws|throws
name|Exception
block|{
name|setupUploads
argument_list|(
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
name|setupDownloads
argument_list|(
literal|"4444440"
argument_list|,
literal|"5555550"
argument_list|,
literal|"6666660"
argument_list|)
expr_stmt|;
name|DataStoreCacheUpgradeUtils
operator|.
name|upgrade
argument_list|(
name|homeDir
argument_list|,
name|path
argument_list|,
literal|true
argument_list|,
name|pendingFileDelete
argument_list|)
expr_stmt|;
name|assertFiles
argument_list|(
name|UPLOAD_STAGING_DIR
argument_list|,
literal|"1111110"
argument_list|,
literal|"2222220"
argument_list|,
literal|"3333330"
argument_list|)
expr_stmt|;
if|if
condition|(
name|pendingFileDelete
condition|)
block|{
name|assertFalse
argument_list|(
name|pendingUploads
operator|.
name|exists
argument_list|()
argument_list|)
expr_stmt|;
block|}
else|else
block|{
name|assertTrue
argument_list|(
name|pendingUploads
operator|.
name|exists
argument_list|()
argument_list|)
expr_stmt|;
block|}
name|assertFiles
argument_list|(
name|DOWNLOAD_DIR
argument_list|,
literal|"4444440"
argument_list|,
literal|"5555550"
argument_list|,
literal|"6666660"
argument_list|)
expr_stmt|;
block|}
specifier|private
name|void
name|setupUploads
parameter_list|(
name|String
modifier|...
name|ids
parameter_list|)
throws|throws
name|IOException
block|{
name|Map
argument_list|<
name|String
argument_list|,
name|Long
argument_list|>
name|pendingMap
init|=
name|Maps
operator|.
name|newHashMap
argument_list|()
decl_stmt|;
for|for
control|(
name|String
name|id
range|:
name|ids
control|)
block|{
name|File
name|f1
init|=
name|copyToFile
argument_list|(
name|randomStream
argument_list|(
name|Integer
operator|.
name|parseInt
argument_list|(
name|id
argument_list|)
argument_list|,
literal|4
operator|*
literal|1024
argument_list|)
argument_list|,
name|getFile
argument_list|(
name|id
argument_list|,
name|path
argument_list|)
argument_list|)
decl_stmt|;
name|pendingMap
operator|.
name|put
argument_list|(
name|getFileName
argument_list|(
name|id
argument_list|)
argument_list|,
name|System
operator|.
name|currentTimeMillis
argument_list|()
argument_list|)
expr_stmt|;
block|}
name|serializeMap
argument_list|(
name|pendingMap
argument_list|,
name|pendingUploads
argument_list|)
expr_stmt|;
block|}
specifier|private
name|void
name|setupDownloads
parameter_list|(
name|String
modifier|...
name|ids
parameter_list|)
throws|throws
name|IOException
block|{
for|for
control|(
name|String
name|id
range|:
name|ids
control|)
block|{
name|copyToFile
argument_list|(
name|randomStream
argument_list|(
name|Integer
operator|.
name|parseInt
argument_list|(
name|id
argument_list|)
argument_list|,
literal|4
operator|*
literal|1024
argument_list|)
argument_list|,
name|getFile
argument_list|(
name|id
argument_list|,
name|path
argument_list|)
argument_list|)
expr_stmt|;
block|}
block|}
specifier|private
name|void
name|assertFiles
parameter_list|(
name|String
name|moveFolder
parameter_list|,
name|String
modifier|...
name|ids
parameter_list|)
throws|throws
name|Exception
block|{
for|for
control|(
name|String
name|id
range|:
name|ids
control|)
block|{
name|File
name|file
init|=
name|getFile
argument_list|(
name|id
argument_list|,
name|path
argument_list|)
decl_stmt|;
name|assertFalse
argument_list|(
name|file
operator|.
name|exists
argument_list|()
argument_list|)
expr_stmt|;
name|file
operator|=
name|getFile
argument_list|(
name|id
argument_list|,
operator|new
name|File
argument_list|(
name|path
argument_list|,
name|moveFolder
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|file
operator|.
name|exists
argument_list|()
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|Files
operator|.
name|equal
argument_list|(
name|file
argument_list|,
name|copyToFile
argument_list|(
name|randomStream
argument_list|(
name|Integer
operator|.
name|parseInt
argument_list|(
name|id
argument_list|)
argument_list|,
literal|4
operator|*
literal|1024
argument_list|)
argument_list|,
name|folder
operator|.
name|newFile
argument_list|()
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
block|}
specifier|private
name|void
name|assertFilesNoMove
parameter_list|(
name|String
name|moveFolder
parameter_list|,
name|String
modifier|...
name|ids
parameter_list|)
throws|throws
name|Exception
block|{
for|for
control|(
name|String
name|id
range|:
name|ids
control|)
block|{
name|File
name|file
init|=
name|getFile
argument_list|(
name|id
argument_list|,
name|path
argument_list|)
decl_stmt|;
name|assertTrue
argument_list|(
name|file
operator|.
name|exists
argument_list|()
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|Files
operator|.
name|equal
argument_list|(
name|file
argument_list|,
name|copyToFile
argument_list|(
name|randomStream
argument_list|(
name|Integer
operator|.
name|parseInt
argument_list|(
name|id
argument_list|)
argument_list|,
literal|4
operator|*
literal|1024
argument_list|)
argument_list|,
name|folder
operator|.
name|newFile
argument_list|()
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|file
operator|=
name|getFile
argument_list|(
name|id
argument_list|,
operator|new
name|File
argument_list|(
name|path
argument_list|,
name|moveFolder
argument_list|)
argument_list|)
expr_stmt|;
name|assertFalse
argument_list|(
name|file
operator|.
name|exists
argument_list|()
argument_list|)
expr_stmt|;
block|}
block|}
specifier|private
specifier|static
name|String
name|getFileName
parameter_list|(
name|String
name|name
parameter_list|)
block|{
return|return
name|name
operator|.
name|substring
argument_list|(
literal|0
argument_list|,
literal|2
argument_list|)
operator|+
literal|"/"
operator|+
name|name
operator|.
name|substring
argument_list|(
literal|2
argument_list|,
literal|4
argument_list|)
operator|+
literal|"/"
operator|+
name|name
return|;
block|}
block|}
end_class
end_unit
|
sarvekash/HackerRank_Solutions
|
ProjectEuler+/euler-0485.cpp
|
// ////////////////////////////////////////////////////////
// # Title
// Maximum number of divisors
//
// # URL
// https://projecteuler.net/problem=485
// http://euler.stephan-brumme.com/485/
//
// # Problem
// Let `d(n)` be the number of divisors of `n`.
// Let `M(n,k)` be the maximum value of `d(j)` for `n <= j <= n+k-1`.
// Let `S(u,k)` be the sum of `M(n,k)` for `1 <= n <= u-k+1`.
//
// You are given that `S(1000, 10) = 17176`.
//
// Find `S(100 000 000, 100 000)`.
//
// # Solved by
// <NAME>
// December 2017
//
// # Algorithm
// I needed to translate the short problem description into plain English (well, I'm German so actually it wasn't English ...):
// to find `S(1000, 10)` I have to create 990 blocks with 10 elements each:
// `\{ d(1), d(2), ..., d(10) \}`,
// `\{ d(2), d(3), ..., d(11) \}`,
// `\{ d(3), d(4), ..., d(12) \}`,
// ...
// `\{ d(991), d(992), ..., d(1000) \}`
// and add the maximum values of all those 990 blocks.
//
// My first step is to fill the container ''numDivisors'' with `d(1)`, `d(2)`, ..., `d(10^8)`.
// The maximum value is 768 so an ''unsigned short'' is sufficient - but it still requires 200 MByte RAM.
//
// I implemented two algorithms to compute the number of divisors of `n`:
// ''countDivisorsSlow'' is based on trial division: it divides `n` by every number between `1` and `n` and counts how often the remainder is zero.
// That's pretty fast if `n` is small (well, even `10^6` is almost okay) but much too slow for `10^8`.
//
// ''countDivisors'' needs only one second to perform the same task:
// if the number `n` is factorized into its prime factors `n = {p_1}^e_1 * {p_2}^e_2 * {p_3}^e_3 * ...`
// then the number of divisors is `d(n) = (e_1 + 1) * (e_2 + 1) * (e_3 + 1) * ...`
// My first step is to find all prime numbers below `10^8` (see below for a significant optimization / chapter "Note").
// Then all `d(n)` are initialized with 1 and:
// - iterate over all multiples `m` of `2^1 = 2`, multiply `d(m)` by `1+1=2`
// - iterate over all multiples `m` of `2^2 = 4`, multiply `d(m)` by `2+1=3` and divide by `1+1=2` (that's an undo of the previous step)
// - iterate over all multiples `m` of `2^3 = 8`, multiply `d(m)` by `3+1=4` and divide by `2+1=3` (that's an undo of the previous step)
// - iterate over all multiples `m` of `2^4 = 16`, multiply `d(m)` by `4+1=5` and divide by `3+1=4` (that's an undo of the previous step)
// - ... and so on, until `2^x > 10^8`
// - repeat the same procedure for all other primes 3, 5, 7, 11, ...
//
// Now that I have `d(n)`, I need to find the maximums of each blocks. I wrote two algorithms to find them:
// - the obvious ''bruteForce()'' algorithm with two nested loops has `10^8 * 10^5 = 10^13` iterations and is too slow.
// - a smarter algorithm can do the same job in just `10^8` iterations !
//
// In ''search()'', each iteration updates ''mostRecent[d(n)]'' with the current position ''n'' (and enlarges ''mostRecent'' if required).
// Zero is more or less a dummy element and the whole algorithm still works if I omit it but it simplifies index calculations a lot.
// Only once it causes minor problems: the maximum number is the size of ''mostRecent'' minus 1.
//
// For example:
//
// || 3 || 3 || 10 ||
// ||! n ++ d(n) ++ mostRecent ||
// || 0 ++ 0 ++ { 0 } ||
// || 1 ++ 1 ++ { 0,1 } ||
// || 2 ++ 2 ++ { 0,1,2 } ||
// || 3 ++ 2 ++ { 0,1,3 } ||
// || 4 ++ 3 ++ { 0,1,3,4 } ||
// || 5 ++ 2 ++ { 0,1,5,4 } ||
// || 6 ++ 4 ++ { 0,1,5,4,6 } ||
// || 7 ++ 2 ++ { 0,1,7,4,6 } ||
// || 8 ++ 4 ++ { 0,1,7,4,8 } ||
// || 9 ++ 3 ++ { 0,1,7,9,8 } ||
// || 10 ++ 4 ++ { 0,1,7,9,10 } ||
//
// If an element of ''mostRecent'' is too far away, that means if its value is more than ''blockSize'' away from the current position, then it becomes invalid.
// That's only important for the largest indices of ''mostRecent'': they will be removed, thus shrinking ''mostRecent''.
//
// # Note
// `d(n)` is a large number if:
// - `n` has many different prime factors or
// - those prime factors have large exponents
// - or both
// One of the largest values of `d(n)` is `d(73513440) = 768` because `73513440 = 2^5 * 3^3 * 5 * 7 * 11 * 13 * 17`
// such that `d(73513440) = (5+1) * (3+1) * (1+1) * (1+1) * (1+1) * (1+1) * (1+1) = 6 * 4 * 2 * 2 * 2 * 2 * 2 = 768`
//
// In conclusion, pretty much every maximum `d(n)` of a block has typically very small prime factors.
// Assuming that all prime factors of a maximum `d(n)` are less than `sqrt{10^8} = 10^4`, my code will still find the correct result for `S(10^8, 10^5)`.
// However, this "square-root" assumption isn't always true: it fails for `S(1000, 10)` (the problem is the small ''blockSize'').
//
// The less primes I use as prime factors in ''countDivisors'' the faster the code becomes (at the risk of being off, depending on ''blockSize'').
// I manually looked for the smallest "valid" value for ''primeLimit'' for `S(10^8, 10^5)` and it turned out to be 107.
// Using this parameter cuts down the execution time from 1.8 to 1.1 seconds.
//
// # Alternative
// The whole algorthm could be rewritten to process the data in chunks instead of all 100 million at once.
// Memory consumption will drop considerably and I expect performance to remain about the same (maybe a tiny bit slower).
// But the code size will grow considerably and become less reabable.
#include <iostream>
#include <vector>
#include <algorithm>
#include <cmath>
// store the number of divisors for the first 100 million numbers
typedef unsigned short Number; // => 200 MByte
std::vector<Number> numDivisors;
// slow trial division
void countDivisorsSlow(unsigned int limit)
{
// zero has no divisors
numDivisors = { 0 };
// process all numbers 1..10^8
for (unsigned int current = 1; current <= limit; current++)
{
Number count = 0;
// trial division of all numbers <= sqrt(current)
for (unsigned int divisor = 1; divisor*divisor <= current; divisor++)
{
// divisible ?
if (current % divisor != 0)
continue;
count++; // one divisor if i^2 = x
if (divisor*divisor != current) // or two if not (it's i and x/i)
count++;
}
numDivisors.push_back(count);
}
}
// similar to a prime sieve, much faster
void countDivisors(unsigned int limit, unsigned int primeLimit = 0)
{
numDivisors.resize(limit + 1, 1);
// zero has no divisors
numDivisors[0] = 0;
// accurate algorithm by default
if (primeLimit == 0)
primeLimit = limit;
// simple prime sieve (trial division because it has the shortest/most simple implementation)
std::vector<unsigned int> primes = { 2 };
for (unsigned int smallPrime = 3; smallPrime <= primeLimit; smallPrime += 2)
{
// find primes by trial division
bool isPrime = true;
for (auto p : primes)
if (smallPrime % p == 0)
{
isPrime = false;
break;
}
// yes, found another prime
if (isPrime)
primes.push_back(smallPrime);
}
// multiply all "simple" multiples of each prime by 2
// ... and multiply all multiples p^2 by 3
// ... and multiply all multiples p^3 by 4
// ... and so on
for (auto p : primes)
{
// "simple" multiples
for (auto i = p; i <= limit; i += p)
numDivisors[i] *= 2;
// multiples of p^2, p^3, ...
auto power = p * p;
auto exponent = 2;
while (power <= limit)
{
// undo the previous factor and multiply with "better" factor
for (auto i = power; i <= limit; i += power)
numDivisors[i] = (numDivisors[i] / exponent) * (exponent + 1);
// from to p^2 to p^3 to p^4 ...
power *= p;
exponent++;
}
}
}
// enough to verify the example ... (you need to fill numDivisors first)
unsigned long long bruteForce(unsigned int limit, unsigned int blockSize)
{
// for each range ...
unsigned long long result = 0;
for (unsigned int from = 1; from <= limit - blockSize + 1; from++)
{
// ... find its maximum
Number maximum = numDivisors[from];
for (unsigned int i = 1; i < blockSize; i++)
maximum = std::max(maximum, numDivisors[from + i]);
// and add it to the result
//std::cout << maximum << " ";
result += maximum;
}
return result;
}
// fast O(n) algorithm: a single pass iterating over all 10^8 elements of numDivisors (you need to fill numDivisors first)
unsigned long long search(unsigned int limit, unsigned int blockSize)
{
// store the most recent position a certain number of divisors was encountered
// actually the maximum is 768 divisors (first seen at 73513440)
std::vector<unsigned int> mostRecent;
// process number of divisors of the first block (10^5-1 elements)
for (unsigned int i = 0; i < blockSize; i++)
{
// new/updated maximum
auto current = numDivisors[i];
if (current >= mostRecent.size())
mostRecent.resize(current + 1, 0);
mostRecent[current] = i;
}
unsigned long long result = 0;
for (unsigned int i = blockSize; i <= limit; i++)
{
// remove "old" maximums
auto tooFar = i - blockSize;
while (!mostRecent.empty() && mostRecent.back() <= tooFar)
mostRecent.pop_back();
// new/updated maximum
auto current = numDivisors[i];
if (current >= mostRecent.size())
mostRecent.resize(current + 1, 0);
mostRecent[current] = i;
// highest index is equal to size-1
result += mostRecent.size() - 1;
}
return result;
}
int main()
{
unsigned int limit = 100000000; // 10^8
unsigned int blockSize = 100000; // 10^5
std::cin >> limit >> blockSize;
// compute number of divisors for all 100 million numbers
unsigned int primeLimit = limit;
// faster heuristic if possible, fails with small blockSize
if (blockSize >= 100)
primeLimit = sqrt(limit);
// lowest limit for default input: I found it by trial'n'error
if (limit == 100000000 && blockSize == 100000)
primeLimit = 107;
// compute number of divisors (might be off a little bit when primeLimit != limit)
countDivisors(limit, primeLimit);
// and now find S(10^8, 10^5)
//std::cout << bruteForce(limit, blockSize) << std::endl;
std::cout << search(limit, blockSize) << std::endl;
return 0;
}
|
yinziang/CMSProject
|
src/main/java/com/hy/dao/mapper/ImageTextMapper.java
|
package com.hy.dao.mapper;
import com.hy.domain.ImageText;
import java.util.List;
public interface ImageTextMapper {
int deleteByPrimaryKey(Integer id);
int insert(ImageText record);
ImageText selectByPrimaryKey(Integer id);
List<ImageText> selectAll();
int updateByPrimaryKey(ImageText record);
List<ImageText> selectAllByPartIdOrderBySid(Integer partId);
}
|
ScottEllisNovatex/opendatacon
|
Code/Ports/SimPort/SimPortConf.h
|
/* opendatacon
*
* Copyright (c) 2014:
*
* DCrip3fJguWgVCLrZFfA7sIGgvx1Ou3fHfCxnrz4svAi
* yxeOtDhDCXf1Z4ApgXvX5ahqQmzRfJ2DoX8S05SqHA==
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* SimPortConf.h
*
* Created on: 2015-12-16
* Author: <NAME> <<EMAIL>>
*/
#ifndef SIMPORTCONF_H
#define SIMPORTCONF_H
#include "SimPortData.h"
#include "sqlite3/sqlite3.h"
#include <opendatacon/DataPortConf.h>
#include <opendatacon/IOTypes.h>
#include <json/json.h>
#include <memory>
using namespace odc;
using DB_STATEMENT = std::shared_ptr<sqlite3_stmt>;
enum class TimestampMode : uint8_t
{
FIRST = 1,
ABSOLUTE_T = 1<<1,
FASTFORWARD = 1<<2,
TOD = 1<<3
};
namespace odc
{
ENABLE_BITWISE(TimestampMode)
}
// Hide some of the code to make Logging cleaner
#define LOGTRACE(...) \
if (auto log = odc::spdlog_get("SimPort")) \
log->trace(__VA_ARGS__)
#define LOGDEBUG(...) \
if (auto log = odc::spdlog_get("SimPort")) \
log->debug(__VA_ARGS__)
#define LOGERROR(...) \
if (auto log = odc::spdlog_get("SimPort")) \
log->error(__VA_ARGS__)
#define LOGWARN(...) \
if (auto log = odc::spdlog_get("SimPort")) \
log->warn(__VA_ARGS__)
#define LOGINFO(...) \
if (auto log = odc::spdlog_get("SimPort")) \
log->info(__VA_ARGS__)
class SimPortConf: public DataPortConf
{
public:
SimPortConf();
void ProcessElements(const Json::Value& json_root);
std::unordered_map<std::string, DB_STATEMENT> GetDBStats() const;
TimestampMode TimestampHandling() const;
void Name(const std::string& name);
double DefaultStdDev() const;
std::string HttpAddress() const;
std::string HttpPort() const;
std::string Version() const;
double StdDev(std::size_t index) const;
void Event(std::shared_ptr<odc::EventInfo> event);
std::shared_ptr<odc::EventInfo> Event(odc::EventType type, std::size_t index) const;
void SetLatestControlEvent(std::shared_ptr<odc::EventInfo> event, std::size_t index);
void Payload(odc::EventType type, std::size_t index, double payload);
double Payload(odc::EventType type, std::size_t index) const;
double StartValue(odc::EventType type, std::size_t index) const;
void ForcedState(odc::EventType type, std::size_t index, bool value);
bool ForcedState(odc::EventType type, std::size_t index) const;
void UpdateInterval(odc::EventType type, std::size_t index, std::size_t value);
std::size_t UpdateInterval(odc::EventType type, std::size_t) const;
Json::Value CurrentState() const;
std::string CurrentState(odc::EventType type, std::vector<std::size_t>& indexes) const;
void Timer(const std::string& name, ptimer_t ptr);
ptimer_t Timer(const std::string& name) const;
void CancelTimers();
bool IsIndex(odc::EventType type, std::size_t index) const;
std::vector<std::size_t> Indexes(odc::EventType type) const;
std::vector<std::shared_ptr<BinaryFeedback>> BinaryFeedbacks(std::size_t index) const;
std::shared_ptr<PositionFeedback> GetPositionFeedback(std::size_t index) const;
private:
std::string m_name;
TimestampMode m_timestamp_handling;
std::unordered_map<std::string, DB_STATEMENT> m_db_stats;
std::shared_ptr<SimPortData> m_pport_data;
bool m_ParseIndexes(const Json::Value& data, std::size_t& start, std::size_t& stop) const;
void m_ProcessAnalogs(const Json::Value& analogs);
void m_ProcessBinaries(const Json::Value& binaires);
void m_ProcessBinaryControls(const Json::Value& binary_controls);
void m_ProcessSQLite3(const Json::Value& sqlite, std::size_t index);
void m_ProcessFeedbackBinaries(const Json::Value& feedback_binaries, std::size_t index,
std::size_t update_interval);
void m_ProcessFeedbackPosition(const Json::Value& feedback_position, std::size_t index);
};
#endif // SIMPORTCONF_H
|
jerrylovepizza/JavaLearningmanual
|
project/小米商城/shopping/src/com/mylifes1110/java/dao/OrderDao.java
|
package com.mylifes1110.java.dao;
import com.mylifes1110.java.bean.Order;
import java.sql.SQLException;
import java.util.List;
public interface OrderDao {
List<Order> selectOrderByUserId(int userId) throws SQLException;
void insertOrder(Order order) throws SQLException;
Order selectOrderMoney(String oid) throws SQLException;
void updateOrder(String oid) throws SQLException;
Order selectOrderByOid(String oid) throws SQLException;
}
|
nistefan/cmssw
|
FWCore/Framework/src/ESProxyFactoryProducer.cc
|
<filename>FWCore/Framework/src/ESProxyFactoryProducer.cc<gh_stars>1-10
// -*- C++ -*-
//
// Package: Framework
// Class : ESProxyFactoryProducer
//
// Implementation:
// <Notes on implementation>
//
// Author: <NAME>
// Created: Thu Apr 7 21:36:15 CDT 2005
//
// system include files
// user include files
#include "FWCore/Framework/interface/ESProxyFactoryProducer.h"
#include "FWCore/Framework/interface/ProxyFactoryBase.h"
#include "FWCore/Framework/interface/DataProxy.h"
#include "FWCore/Utilities/interface/Exception.h"
#include <algorithm>
#include <cassert>
//
// constants, enums and typedefs
//
using namespace edm::eventsetup;
namespace edm {
typedef std::multimap< EventSetupRecordKey, FactoryInfo > Record2Factories;
//
// static data member definitions
//
//
// constructors and destructor
//
ESProxyFactoryProducer::ESProxyFactoryProducer() : record2Factories_()
{
}
// ESProxyFactoryProducer::ESProxyFactoryProducer(const ESProxyFactoryProducer& rhs)
// {
// // do actual copying here;
// }
ESProxyFactoryProducer::~ESProxyFactoryProducer() noexcept(false)
{
}
//
// assignment operators
//
// const ESProxyFactoryProducer& ESProxyFactoryProducer::operator=(const ESProxyFactoryProducer& rhs)
// {
// //An exception safe implementation is
// ESProxyFactoryProducer temp(rhs);
// swap(rhs);
//
// return *this;
// }
//
// member functions
//
void
ESProxyFactoryProducer::registerProxies(const EventSetupRecordKey& iRecord,
KeyedProxies& iProxies)
{
typedef Record2Factories::iterator Iterator;
std::pair< Iterator, Iterator > range = record2Factories_.equal_range(iRecord);
for(Iterator it = range.first; it != range.second; ++it) {
std::shared_ptr<DataProxy> proxy(it->second.factory_->makeProxy().release());
if(nullptr != proxy.get()) {
iProxies.push_back(KeyedProxies::value_type((*it).second.key_,
proxy));
}
}
}
void
ESProxyFactoryProducer::registerFactoryWithKey(const EventSetupRecordKey& iRecord ,
std::unique_ptr<ProxyFactoryBase> iFactory,
const std::string& iLabel )
{
if(nullptr == iFactory.get()) {
assert(false && "Factor pointer was null");
::exit(1);
}
usingRecordWithKey(iRecord);
std::shared_ptr<ProxyFactoryBase> temp(iFactory.release());
FactoryInfo info(temp->makeKey(iLabel),
temp);
//has this already been registered?
std::pair<Record2Factories::const_iterator,Record2Factories::const_iterator> range =
record2Factories_.equal_range(iRecord);
if(range.second != std::find_if(range.first,range.second,
[&info](const auto& r2f) {
return r2f.second.key_ == info.key_;
}
) ) {
throw cms::Exception("IdenticalProducts")<<"Producer has been asked to produce "<<info.key_.type().name()
<<" \""<<info.key_.name().value()<<"\" multiple times.\n Please modify the code.";
}
record2Factories_.insert(Record2Factories::value_type(iRecord,
std::move(info)));
}
void
ESProxyFactoryProducer::newInterval(const EventSetupRecordKey& iRecordType,
const ValidityInterval& /*iInterval*/)
{
invalidateProxies(iRecordType);
}
//
// const member functions
//
//
// static member functions
//
}
|
Znigneering/CSCI-3154
|
graph-tool-2.27/src/graph/util/graph_search.hh
|
<gh_stars>0
// graph-tool -- a general graph modification and manipulation thingy
//
// Copyright (C) 2006-2018 <NAME> <<EMAIL>>
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 3
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#ifndef GRAPH_SEARCH_HH
#define GRAPH_SEARCH_HH
#include "graph_python_interface.hh"
#include "graph_util.hh"
#include "hash_map_wrap.hh"
#ifdef _OPENMP
#include <omp.h>
#include <boost/type_traits.hpp>
#endif
namespace graph_tool
{
using namespace std;
using namespace boost;
// sort sequences lexicographically
template <class ValueType>
bool operator<=(const vector<ValueType>& v1, const vector<ValueType>& v2)
{
for (size_t i = 0; i < min(v1.size(), v2.size()); ++i)
{
if (v1[i] != v2[i])
return (v1[i] <= v2[i]);
}
return (v1.size() <= v2.size());
}
// sort strings in alphabetical (ASCII) order
bool operator<=(const string& s1, const string& s2)
{
for (size_t i = 0; i < min(s1.size(), s2.size()); ++i)
{
if (s1[i] != s2[i])
return (s1[i] <= s2[i]);
}
return (s1.size() <= s2.size());
}
// find vertices which match a certain (inclusive) property range
struct find_vertices
{
template <class Graph, class DegreeSelector>
void operator()(Graph& g, GraphInterface& gi, DegreeSelector deg,
python::tuple& prange, python::list& ret) const
{
typedef typename DegreeSelector::value_type value_type;
pair<value_type,value_type> range;
range.first = python::extract<value_type>(prange[0]);
range.second = python::extract<value_type>(prange[1]);
#ifdef _OPENMP
size_t __attribute__ ((unused)) nt = omp_get_num_threads();
if (std::is_convertible<value_type,python::object>::value)
nt = 1; // python is not thread-safe
#endif
auto gp = retrieve_graph_view<Graph>(gi, g);
bool is_eq = range.first == range.second;
#pragma omp parallel if (num_vertices(g) > OPENMP_MIN_THRESH) num_threads(nt)
parallel_vertex_loop_no_spawn
(g,
[&](auto v)
{
value_type val = deg(v, g);
if ((is_eq && (val == range.first)) ||
(!is_eq && (range.first <= val && val <= range.second)))
{
PythonVertex<Graph> pv(gp, v);
auto& ret_ = ret; // work around clang
#pragma omp critical
{
ret_.append(pv);
}
}
});
}
};
// find edges which match a certain (inclusive) property range
struct find_edges
{
template <class Graph, class EdgeIndex, class EdgeProperty>
void operator()(Graph& g, GraphInterface& gi, EdgeIndex eindex,
EdgeProperty prop, python::tuple& prange, python::list& ret)
const
{
typedef typename property_traits<EdgeProperty>::value_type value_type;
pair<value_type,value_type> range;
range.first = python::extract<value_type>(prange[0]);
range.second = python::extract<value_type>(prange[1]);
gt_hash_set<size_t> edge_set;
#ifdef _OPENMP
size_t __attribute__ ((unused)) nt = omp_get_num_threads();
if (std::is_convertible<value_type,python::object>::value)
nt = 1; // python is not thread-safe
#endif
auto gp = retrieve_graph_view<Graph>(gi, g);
bool is_eq = range.first == range.second;
#pragma omp parallel if (num_vertices(g) > OPENMP_MIN_THRESH) \
num_threads(nt)
parallel_edge_loop_no_spawn
(g,
[&](auto e)
{
if (!graph_tool::is_directed(g))
{
if (edge_set.find(eindex[e]) == edge_set.end())
edge_set.insert(eindex[e]);
else
return;
}
value_type val = get(prop, e);
if ((is_eq && (val == range.first)) ||
(!is_eq && (range.first <= val && val <= range.second)))
{
PythonEdge<Graph> pe(gp, e);
auto& ret_ = ret; // work around clang
#pragma omp critical
{
ret_.append(pe);
}
}
});
}
};
} // graph_tool namespace
#endif // GRAPH_SEARCH_HH
|
gsage/engine
|
PlugIns/SDL/src/SDLPlugin.cpp
|
<filename>PlugIns/SDL/src/SDLPlugin.cpp
/*
-----------------------------------------------------------------------------
This file is a part of Gsage engine
Copyright (c) 2014-2017 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------
*/
#include "SDLPlugin.h"
#include "SDLWindowManager.h"
#include "SDLInputListener.h"
namespace Gsage {
const std::string PLUGIN_NAME = "SDL";
SDLPlugin::SDLPlugin()
{
}
SDLPlugin::~SDLPlugin()
{
}
const std::string& SDLPlugin::getName() const
{
return PLUGIN_NAME;
}
bool SDLPlugin::installImpl()
{
// TODO: get parameters from config somehow?
mSDLCore.initialize(DataProxy(), mFacade);
mFacade->registerWindowManager<SDLWindowManager>("SDL", &mSDLCore);
SDLInputFactory* f = mFacade->registerInputFactory<SDLInputFactory>("SDL");
if(!f) {
return false;
}
f->setSDLCore(&mSDLCore);
return true;
}
void SDLPlugin::uninstallImpl()
{
mFacade->removeWindowManager("SDL");
mFacade->removeInputFactory("SDL");
mSDLCore.tearDown();
}
void SDLPlugin::setupLuaBindings()
{
}
}
Gsage::SDLPlugin* sdlPlugin = NULL;
extern "C" bool PluginExport dllStartPlugin(Gsage::GsageFacade* facade)
{
if(sdlPlugin != NULL)
{
return false;
}
sdlPlugin = new Gsage::SDLPlugin();
return facade->installPlugin(sdlPlugin);
}
extern "C" bool PluginExport dllStopPlugin(Gsage::GsageFacade* facade)
{
if(sdlPlugin == NULL)
return true;
bool res = facade->uninstallPlugin(sdlPlugin);
if(!res)
return false;
delete sdlPlugin;
sdlPlugin = NULL;
return true;
}
|
ImogenBits/algobattle
|
algobattle/battle_wrapper.py
|
<reponame>ImogenBits/algobattle<filename>algobattle/battle_wrapper.py
"""Base class for wrappers that execute a specific kind of battle.
The battle wrapper class is a base class for specific wrappers, which are
responsible for executing specific types of battle. They share the
characteristic that they are responsible for updating some match data during
their run, such that it contains the current state of the match.
"""
import logging
from abc import ABC, abstractmethod
logger = logging.getLogger('algobattle.battle_wrapper')
class BattleWrapper(ABC):
"""Base class for wrappers that execute a specific kind of battle."""
@abstractmethod
def wrapper(self, match, options: dict) -> None:
"""The main base method for a wrapper.
In order to manage the execution of a match, the wrapper needs the match object and possibly
some options that are specific to the individual battle wrapper.
A wrapper should update the match.match_data dict during its run by calling
the match.update_match_data method. This ensures that the callback
functionality around the match_data dict is properly executed.
It is assumed that the match.generating_team and match.solving_team are
set before calling a wrapper.
Parameters
----------
match: Match
The Match object on which the battle wrapper is to be executed on.
options: dict
Additional options for the wrapper.
"""
raise NotImplementedError
@abstractmethod
def calculate_points(match_data: dict, achievable_points: int) -> dict:
"""Calculate the number of achieved points, given results.
As awarding points completely depends on the type of battle that
was fought, each wrapper should implement a method that determines
how to split up the achievable points among all teams, given
the match_data.
Parameters
----------
match_data : dict
dict containing the results of match.run().
achievable_points : int
Number of achievable points.
Returns
-------
dict
A mapping between team names and their achieved points.
The format is {(team_x_name, team_y_name): points [...]} for each
pair (x,y) for which there is an entry in match_data and points is a
float value.
"""
raise NotImplementedError
def format_as_utf8(self, match_data: dict) -> str:
"""Format the match_data for the battle wrapper as a UTF-8 string.
The output should not exceed 80 characters, assuming the default
of a battle of 5 rounds.
Parameters
----------
match_data : dict
dict containing match data generated by match.run().
Returns
-------
str
A formatted string on the basis of the match_data.
"""
formatted_output_string = ""
formatted_output_string += 'Battles of type {} are currently not compatible with the ui.'.format(match_data['type'])
formatted_output_string += 'Here is a dump of the match_data dict anyway:\n{}'.format(match_data)
return formatted_output_string
|
Seitenbau/Sonferenz
|
sonferenz-web/src/main/java/de/bitnoise/sonferenz/web/pages/admin/tabs/LogOutputPanel.java
|
package de.bitnoise.sonferenz.web.pages.admin.tabs;
import static ch.qos.logback.core.CoreConstants.LINE_SEPARATOR;
import java.io.StringWriter;
import org.apache.wicket.Component;
import org.apache.wicket.markup.html.basic.Label;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.html.HTMLLayout;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.classic.spi.LoggingEvent;
import ch.qos.logback.core.CoreConstants;
import ch.qos.logback.core.html.CssBuilder;
import ch.qos.logback.core.read.CyclicBufferAppender;
import de.bitnoise.sonferenz.web.app.KonferenzSession;
import de.bitnoise.sonferenz.web.component.panels.KonferenzTabPanel;
import de.bitnoise.sonferenz.web.pages.log.LoggingPage;
public class LogOutputPanel extends KonferenzTabPanel
{
private final class MyCSS implements CssBuilder {
@Override
public void addCss(StringBuilder sbuf) {
sbuf.append("<style type=\"text/css\">");
sbuf.append(LINE_SEPARATOR);
sbuf
.append("table { margin-left: 0em; margin-right: 0em; border-left: 2px solid #AAA; }");
sbuf.append(LINE_SEPARATOR);
sbuf.append("TR.even { background: #FFFFFF; }");
sbuf.append(LINE_SEPARATOR);
sbuf.append("TR.odd { background: #EAEAEA; }");
sbuf.append(LINE_SEPARATOR);
sbuf
.append("TR.warn TD.Level, TR.error TD.Level, TR.fatal TD.Level {font-weight: bold; color: #FF4040 }");
sbuf.append(CoreConstants.LINE_SEPARATOR);
sbuf
.append("TD { padding-right: 1ex; padding-left: 1ex; border-right: 2px solid #AAA; }");
sbuf.append(LINE_SEPARATOR);
sbuf
.append("TD.Time, TD.Date { text-align: right; font-family: courier, monospace; font-size: smaller; }");
sbuf.append(LINE_SEPARATOR);
sbuf.append("TD.Thread { text-align: left; }");
sbuf.append(LINE_SEPARATOR);
sbuf.append("TD.Level { text-align: right; }");
sbuf.append(LINE_SEPARATOR);
sbuf.append("TD.Logger { text-align: left; }");
sbuf.append(LINE_SEPARATOR);
sbuf
.append("TR.header { background: #596ED5; color: #FFF; font-weight: bold; font-size: larger; }");
sbuf.append(CoreConstants.LINE_SEPARATOR);
sbuf
.append("TD.Exception { background: #A2AEE8; font-family: courier, monospace;}");
sbuf.append(LINE_SEPARATOR);
sbuf.append("</style>");
sbuf.append(LINE_SEPARATOR);
}
}
public LogOutputPanel(String id)
{
super(id);
}
private static final String USERID_MDC_KEY = "MDC";
private static final String CYCLIC_BUFFER_APPENDER_NAME = "CYCLIC";
Logger logger = LoggerFactory.getLogger(LoggingPage.class);
static String PATTERN = "%d%thread%level%logger{25}%msg";
@Override
protected void onInitialize()
{
super.onInitialize();
LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
HTMLLayout layout = new HTMLLayout();
layout.setContext(lc);
layout.setPattern(PATTERN);
layout.setCssBuilder(new MyCSS());
// layout.setTitle("Last Logging Events");
layout.start();
LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
CyclicBufferAppender<ILoggingEvent> cyclicBufferAppender = getCyclicBuffer(context);
String content = "";
if (KonferenzSession.get().isAdmin())
{
content = content(layout,cyclicBufferAppender);
}
Component label = new Label("text", content)
.setEscapeModelStrings(false);
add(label);
}
public CyclicBufferAppender<ILoggingEvent> getCyclicBuffer(
LoggerContext context)
{
CyclicBufferAppender<ILoggingEvent> cyclicBufferAppender = (CyclicBufferAppender<ILoggingEvent>) context
.getLogger(Logger.ROOT_LOGGER_NAME).getAppender(
CYCLIC_BUFFER_APPENDER_NAME);
return cyclicBufferAppender;
}
private String content(HTMLLayout layout,
CyclicBufferAppender<ILoggingEvent> cyclicBufferAppender)
{
StringWriter output = new StringWriter();
int count = -1;
if (cyclicBufferAppender != null)
{
count = cyclicBufferAppender.getLength();
}
if (count == -1)
{
output.append("<tr><td>Failed to locate CyclicBuffer</td></tr>\r\n");
}
else if (count == 0)
{
output.append("<tr><td>No logging events to display</td></tr>\r\n");
}
else
{
output.append(layout.getFileHeader());
output.append(layout.getPresentationHeader());
LoggingEvent le;
for (int i = 0; i < count; i++)
{
le = (LoggingEvent) cyclicBufferAppender.get(i);
output.append(layout.doLayout(le) + "\r\n");
}
output.append(layout.getPresentationFooter());
output.append(layout.getFileFooter());
}
return output.toString();
}
}
|
shahvineet98/dagster
|
python_modules/libraries/dagster-gcp/dagster_gcp/dataproc/configs.py
|
<filename>python_modules/libraries/dagster-gcp/dagster_gcp/dataproc/configs.py
from dagster import Dict, Field, String
from .configs_dataproc_cluster import define_dataproc_cluster_config
from .configs_dataproc_job import define_dataproc_job_config
def define_dataproc_create_cluster_config():
cluster_name = Field(
String,
description='''Required. The cluster name. Cluster names within a project must be unique.
Names of deleted clusters can be reused.''',
is_optional=False,
)
return Field(
Dict(
fields={
'projectId': _define_project_id_config(),
'region': _define_region_config(),
'clusterName': cluster_name,
'cluster_config': define_dataproc_cluster_config(),
}
)
)
def define_dataproc_submit_job_config():
return Field(
Dict(
fields={
'job': define_dataproc_job_config(),
'projectId': _define_project_id_config(),
'region': _define_region_config(),
}
)
)
def _define_project_id_config():
return Field(
String,
description='''Required. Project ID for the project which the client acts on behalf of. Will
be passed when creating a dataset / job. If not passed, falls back to the default inferred
from the environment.''',
is_optional=False,
)
def _define_region_config():
return Field(String, is_optional=False)
|
gsmcwhirter/discord-bot-lib
|
discordapi/etf/helpers.go
|
package etf
import (
"encoding/binary"
"fmt"
"io"
"github.com/gsmcwhirter/go-util/v8/errors"
"github.com/gsmcwhirter/discord-bot-lib/v20/snowflake"
)
func writeLength16(b io.Writer, n int) error {
// assumes the Atom identifier byte has already been written
size, err := intToInt16Slice(n)
if err != nil {
return errors.Wrap(err, "couldn't marshal length")
}
_, err = b.Write(size)
return errors.Wrap(err, "could not write length")
}
func writeLength32(b io.Writer, n int) error {
size, err := intToInt32Slice(n)
if err != nil {
return errors.Wrap(err, "could not marshal length")
}
_, err = b.Write(size)
return errors.Wrap(err, "could not write length")
}
func marshalMapTo(b io.Writer, v []Element) error {
var err error
if len(v)%2 != 0 {
return errors.Wrap(ErrBadMarshalData, "bad parity on map list")
}
err = writeLength32(b, len(v)/2)
if err != nil {
return errors.Wrap(err, "couldn't marshal map length")
}
for i := 0; i < len(v); i += 2 {
if !v[i].Code.IsStringish() {
return errors.Wrap(ErrBadMarshalData, "bad map key")
}
err = v[i].MarshalTo(b)
if err != nil {
return errors.Wrap(err, "couldn't marshal map key")
}
err = v[i+1].MarshalTo(b)
if err != nil {
return errors.Wrap(err, "couldn't marshal map value")
}
}
return nil
}
func marshalListTo(b io.Writer, v []Element) error {
err := writeLength32(b, len(v))
if err != nil {
return errors.Wrap(err, "couldn't marshal list length")
}
for _, e := range v {
err = e.MarshalTo(b)
if err != nil {
return errors.Wrap(err, "couldn't marshal list value")
}
}
_, err = b.Write([]byte{byte(EmptyList)})
return errors.Wrap(err, "couldn't write trailing list byte")
}
func marshalBinaryTo(b io.Writer, v []byte) error {
err := writeLength32(b, len(v))
if err != nil {
return errors.Wrap(err, "couldn't marshal binary length")
}
_, err = b.Write(v)
return errors.Wrap(err, "couldn't marshal binary value")
}
// for Atom, String
func marshalStringTo(b io.Writer, v []byte) error {
err := writeLength16(b, len(v))
if err != nil {
return errors.Wrap(err, "couldn't marshal string length")
}
_, err = b.Write(v)
return errors.Wrap(err, "couldn't marshal string value")
}
// for SmallBig, LargeBig
func marshalInt64To(b io.Writer, v []byte) error {
var err error
if len(v) != 9 {
return errors.Wrap(ErrBadMarshalData, "not a int64 byte slice")
}
_, err = b.Write([]byte{byte(len(v) - 1)})
if err != nil {
return errors.Wrap(err, "couldn't marshal Int64 size")
}
_, err = b.Write(v)
return errors.Wrap(err, "couldn't marshal Int64 value")
}
func marshalInt32To(b io.Writer, v []byte) error {
var err error
if len(v) != 4 {
return errors.Wrap(ErrBadMarshalData, "not a int32 byte slice")
}
_, err = b.Write(v)
return errors.Wrap(err, "couldn't marshal Int32 value")
}
func marshalInt8To(b io.Writer, v []byte) error {
var err error
if len(v) != 1 {
return errors.Wrap(ErrBadMarshalData, "not a int8 byte slice")
}
_, err = b.Write(v)
return errors.Wrap(err, "couldn't marshal Int8 value")
}
func unmarshalSlice(raw []byte, numElements int) (uint32, []Element, error) {
var size int
var idx uint32
var deltaIdx uint32
var err error
e := make([]Element, numElements)
for i := 0; i < numElements; i++ {
e[i].Code = Code(raw[idx])
idx++
switch e[i].Code {
case Map:
size, err = int32SliceToInt(raw[idx : idx+4])
if err != nil {
return 0, nil, errors.Wrap(err, "could not read map length")
}
idx += 4
deltaIdx, e[i].Vals, err = unmarshalSlice(raw[idx:], size*2)
if err != nil {
return 0, nil, errors.Wrap(err, "could not unmarshal map")
}
idx += deltaIdx
case Atom, String:
size, err = int16SliceToInt(raw[idx : idx+2])
if err != nil {
return 0, nil, errors.Wrap(err, "could not read atom/string length")
}
idx += 2
e[i].Val = raw[idx : idx+uint32(size)]
idx += uint32(size)
case List:
size, err = int32SliceToInt(raw[idx : idx+4])
if err != nil {
return 0, nil, errors.Wrap(err, "coult not read list length")
}
idx += 4
deltaIdx, e[i].Vals, err = unmarshalSlice(raw[idx:], size)
if err != nil {
return 0, nil, err
}
idx += deltaIdx
if raw[idx] != byte(EmptyList) {
return 0, nil, ErrBadPayload
}
idx++
case Binary:
size, err = int32SliceToInt(raw[idx : idx+4])
if err != nil {
return 0, nil, errors.Wrap(err, "could not read binary length")
}
idx += 4
e[i].Val = raw[idx : idx+uint32(size)]
idx += uint32(size)
case Int32:
e[i].Val = raw[idx : idx+4]
idx += 4
case Int8: // small int
e[i].Val = raw[idx : idx+1]
idx++
case EmptyList:
case SmallBig:
size = int(raw[idx])
idx++
e[i].Val = raw[idx : idx+uint32(size)+1]
idx += uint32(size) + 1
// case LargeBig:
// size, err = Int32SliceToInt(raw[idx : idx+4])
// if err != nil {
// return 0, nil, errors.Wrap(err, "could not read largebig length")
// }
// idx += 4
// e[i].Val = raw[idx : idx+uint32(size)]
// idx += uint32(size)
default:
return 0, nil, errors.Wrap(ErrBadFieldType, fmt.Sprintf("type=%v", e[i].Code))
}
}
return idx, e, nil
}
func intToInt8Slice(v int) ([]byte, error) {
if v < 0 || v > 255 {
return nil, ErrOutOfBounds
}
return []byte{byte(v)}, nil
}
func int8SliceToInt(v []byte) (int, error) {
if len(v) != 1 {
return 0, ErrOutOfBounds
}
return int(v[0]), nil
}
func intToInt16Slice(v int) ([]byte, error) {
if v < 0 || v >= (1<<16) {
return nil, ErrOutOfBounds
}
size := make([]byte, 2)
binary.BigEndian.PutUint16(size, uint16(v))
return size, nil
}
func int16SliceToInt(v []byte) (int, error) {
if len(v) != 2 {
return 0, ErrOutOfBounds
}
return int(binary.BigEndian.Uint16(v)), nil
}
func intToInt32Slice(v int) ([]byte, error) {
if v < 0 || v >= (1<<32) {
return nil, ErrOutOfBounds
}
size := make([]byte, 4)
binary.BigEndian.PutUint32(size, uint32(v))
return size, nil
}
func int64ToInt64Slice(v int64) ([]byte, error) {
data := make([]byte, 9)
if v < 0 {
v = -v
data[0] = 1
}
binary.LittleEndian.PutUint64(data[1:], uint64(v))
return data, nil
}
func int32SliceToInt(v []byte) (int, error) {
if len(v) != 4 {
return 0, ErrOutOfBounds
}
return int(binary.BigEndian.Uint32(v)), nil
}
func intNSliceToInt64(v []byte) (int64, error) {
var newV []byte
if len(v) > 8 {
return 0, ErrOutOfBounds
}
if len(v) < 8 {
newV = make([]byte, 8)
copy(newV[8-len(v):], v)
} else {
newV = v
}
return int64(binary.LittleEndian.Uint64(newV)), nil
}
// ElementMapToElementSlice converts an string->Element map into a slice of Elements (kv pairs)
func ElementMapToElementSlice(m map[string]Element) ([]Element, error) {
e := make([]Element, 0, len(m)*2)
for k, v := range m {
el, err := NewBinaryElement([]byte(k))
if err != nil {
return nil, errors.Wrap(err, "could not create Element for key")
}
e = append(e, el, v)
}
return e, nil
}
// MapAndIDFromElement converts a Map element into a string->Element map and attempts to extract
// an id Snowflake from the "id" field
func MapAndIDFromElement(e Element) (map[string]Element, snowflake.Snowflake, error) {
eMap, err := e.ToMap()
if err != nil {
return eMap, 0, errors.Wrap(err, fmt.Sprintf("could not inflate element to map: %v", e))
}
id, err := SnowflakeFromElement(eMap["id"])
return eMap, id, errors.Wrap(err, "could not get id snowflake.Snowflake")
}
// SnowflakeFromElement converts a number-like Element into a Snowflake
func SnowflakeFromElement(e Element) (snowflake.Snowflake, error) {
temp, err := e.ToInt64()
s := snowflake.Snowflake(temp)
return s, errors.Wrap(err, "could not unmarshal snowflake.Snowflake")
}
|
cquoss/jboss-4.2.3.GA-jdk8
|
aspects/src/main/org/jboss/aspects/asynch/FutureInvocationHandler.java
|
<reponame>cquoss/jboss-4.2.3.GA-jdk8
/*
* JBoss, Home of Professional Open Source.
* Copyright 2006, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.aspects.asynch;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import org.jboss.aop.ClassInstanceAdvisor;
import org.jboss.aop.InstanceAdvisor;
import org.jboss.aop.advice.Interceptor;
import org.jboss.aop.instrument.Untransformable;
import org.jboss.aop.joinpoint.MethodInvocation;
import org.jboss.aop.proxy.Proxy;
import org.jboss.aop.util.MethodHashing;
import org.jboss.util.id.GUID;
/**
* An invocation handler for the Future interface using dynamic proxies. It is an alternative to
* having generated proxies for use with EJB 3, avoiding client relying on javassist in the
* EJB 3 client proxies
*
* @author <a href="<EMAIL>"><NAME></a>
* @version $Revision$
*/
public class FutureInvocationHandler extends Proxy implements InvocationHandler, Untransformable, Serializable
{
private static final long serialVersionUID = -2343948303742422382L;
private Map methodMap = new HashMap();
public FutureInvocationHandler()
{
// FIXME FutureInvocationHandler constructor
super();
}
public static Object createFutureProxy(GUID guid, ClassLoader loader, Class[] interfaces) throws Exception
{
FutureInvocationHandler ih = new FutureInvocationHandler();
ih.instanceAdvisor = new ClassInstanceAdvisor();
ih.mixins = null;
ih.interfaces = interfaces;
ih.guid = guid;
return java.lang.reflect.Proxy.newProxyInstance(loader, interfaces, ih);
}
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable
{
if (method.getName().equals("_getInstanceAdvisor"))
{
return _getInstanceAdvisor();
}
else if (method.getName().equals("_setInstanceAdvisor") &&
method.getParameterTypes().length == 1 && method.getParameterTypes()[0].equals(InstanceAdvisor.class))
{
_setInstanceAdvisor((InstanceAdvisor)args[0]);
return null;
}
Interceptor[] interceptors = instanceAdvisor.getInterceptors();
long hash = MethodHashing.calculateHash(method);
MethodInvocation invocation = new MethodInvocation(interceptors, hash, method, method, null);
invocation.setInstanceResolver(instanceAdvisor.getMetaData());
invocation.setArguments(args);
return invocation.invokeNext();
}
/**
* Override Proxy implementation so we get default behaviour.
* Reason is to avoid client dependencies on javassist in EJB 3 asynchronous proxies
*/
public Object writeReplace() throws ObjectStreamException
{
return this;
}
public Map getMethodMap()
{
//I don't think we need to populate this for now
return methodMap;
}
}
|
fuchao01/fuchao
|
solr/core/src/java/org/apache/solr/search/similarities/DFRSimilarityFactory.java
|
package org.apache.solr.search.similarities;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.search.similarities.AfterEffect;
import org.apache.lucene.search.similarities.AfterEffect.NoAfterEffect; // javadoc
import org.apache.lucene.search.similarities.AfterEffectB;
import org.apache.lucene.search.similarities.AfterEffectL;
import org.apache.lucene.search.similarities.BasicModel;
import org.apache.lucene.search.similarities.BasicModelBE;
import org.apache.lucene.search.similarities.BasicModelD;
import org.apache.lucene.search.similarities.BasicModelG;
import org.apache.lucene.search.similarities.BasicModelIF;
import org.apache.lucene.search.similarities.BasicModelIn;
import org.apache.lucene.search.similarities.BasicModelIne;
import org.apache.lucene.search.similarities.BasicModelP;
import org.apache.lucene.search.similarities.DFRSimilarity;
import org.apache.lucene.search.similarities.Normalization;
import org.apache.lucene.search.similarities.Normalization.NoNormalization; // javadoc
import org.apache.lucene.search.similarities.NormalizationH1;
import org.apache.lucene.search.similarities.NormalizationH2;
import org.apache.lucene.search.similarities.NormalizationH3;
import org.apache.lucene.search.similarities.NormalizationZ;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.schema.SimilarityFactory;
/**
* Factory for {@link DFRSimilarity}
* <p>
* You must specify the implementations for all three components of
* DFR (strings). In general the models are parameter-free, but two of the
* normalizations take floating point parameters (see below):
* <ol>
* <li>{@link BasicModel basicModel}: Basic model of information content:
* <ul>
* <li>{@link BasicModelBE Be}: Limiting form of Bose-Einstein
* <li>{@link BasicModelG G}: Geometric approximation of Bose-Einstein
* <li>{@link BasicModelP P}: Poisson approximation of the Binomial
* <li>{@link BasicModelD D}: Divergence approximation of the Binomial
* <li>{@link BasicModelIn I(n)}: Inverse document frequency
* <li>{@link BasicModelIne I(ne)}: Inverse expected document
* frequency [mixture of Poisson and IDF]
* <li>{@link BasicModelIF I(F)}: Inverse term frequency
* [approximation of I(ne)]
* </ul>
* <li>{@link AfterEffect afterEffect}: First normalization of information
* gain:
* <ul>
* <li>{@link AfterEffectL L}: Laplace's law of succession
* <li>{@link AfterEffectB B}: Ratio of two Bernoulli processes
* <li>{@link NoAfterEffect none}: no first normalization
* </ul>
* <li>{@link Normalization normalization}: Second (length) normalization:
* <ul>
* <li>{@link NormalizationH1 H1}: Uniform distribution of term
* frequency
* <ul>
* <li>parameter c (float): hyper-parameter that controls
* the term frequency normalization with respect to the
* document length. The default is <code>1</code>
* </ul>
* <li>{@link NormalizationH2 H2}: term frequency density inversely
* related to length
* <ul>
* <li>parameter c (float): hyper-parameter that controls
* the term frequency normalization with respect to the
* document length. The default is <code>1</code>
* </ul>
* <li>{@link NormalizationH3 H3}: term frequency normalization
* provided by Dirichlet prior
* <ul>
* <li>parameter mu (float): smoothing parameter μ. The
* default is <code>800</code>
* </ul>
* <li>{@link NormalizationZ Z}: term frequency normalization provided
* by a Zipfian relation
* <ul>
* <li>parameter z (float): represents <code>A/(A+1)</code>
* where A measures the specificity of the language.
* The default is <code>0.3</code>
* </ul>
* <li>{@link NoNormalization none}: no second normalization
* </ul>
* </ol>
* <p>
* <p>
* Optional settings:
* <ul>
* <li>discountOverlaps (bool): Sets
* {@link DFRSimilarity#setDiscountOverlaps(boolean)}</li>
* </ul>
* @lucene.experimental
*/
public class DFRSimilarityFactory extends SimilarityFactory {
private boolean discountOverlaps;
private BasicModel basicModel;
private AfterEffect afterEffect;
private Normalization normalization;
@Override
public void init(SolrParams params) {
super.init(params);
discountOverlaps = params.getBool("discountOverlaps", true);
basicModel = parseBasicModel(params.get("basicModel"));
afterEffect = parseAfterEffect(params.get("afterEffect"));
normalization = parseNormalization(
params.get("normalization"), params.get("c"), params.get("mu"), params.get("z"));
}
private BasicModel parseBasicModel(String expr) {
if ("Be".equals(expr)) {
return new BasicModelBE();
} else if ("D".equals(expr)) {
return new BasicModelD();
} else if ("G".equals(expr)) {
return new BasicModelG();
} else if ("I(F)".equals(expr)) {
return new BasicModelIF();
} else if ("I(n)".equals(expr)) {
return new BasicModelIn();
} else if ("I(ne)".equals(expr)) {
return new BasicModelIne();
} else if ("P".equals(expr)) {
return new BasicModelP();
} else {
throw new RuntimeException("Invalid basicModel: " + expr);
}
}
private AfterEffect parseAfterEffect(String expr) {
if ("B".equals(expr)) {
return new AfterEffectB();
} else if ("L".equals(expr)) {
return new AfterEffectL();
} else if ("none".equals(expr)) {
return new AfterEffect.NoAfterEffect();
} else {
throw new RuntimeException("Invalid afterEffect: " + expr);
}
}
// also used by IBSimilarityFactory
static Normalization parseNormalization(String expr, String c, String mu, String z) {
if (mu != null && !"H3".equals(expr)) {
throw new RuntimeException(
"parameter mu only makes sense for normalization H3");
}
if (z != null && !"Z".equals(expr)) {
throw new RuntimeException(
"parameter z only makes sense for normalization Z");
}
if (c != null && !("H1".equals(expr) || "H2".equals(expr))) {
throw new RuntimeException(
"parameter c only makese sense for normalizations H1 and H2");
}
if ("H1".equals(expr)) {
return (c != null) ? new NormalizationH1(Float.parseFloat(c))
: new NormalizationH1();
} else if ("H2".equals(expr)) {
return (c != null) ? new NormalizationH2(Float.parseFloat(c))
: new NormalizationH2();
} else if ("H3".equals(expr)) {
return (mu != null) ? new NormalizationH3(Float.parseFloat(mu))
: new NormalizationH3();
} else if ("Z".equals(expr)) {
return (z != null) ? new NormalizationZ(Float.parseFloat(z))
: new NormalizationZ();
} else if ("none".equals(expr)) {
return new Normalization.NoNormalization();
} else {
throw new RuntimeException("Invalid normalization: " + expr);
}
}
@Override
public Similarity getSimilarity() {
DFRSimilarity sim = new DFRSimilarity(basicModel, afterEffect, normalization);
sim.setDiscountOverlaps(discountOverlaps);
return sim;
}
}
|
stanfy/helium
|
codegen/swagger/src/main/java/com/stanfy/helium/swagger/Root.java
|
<gh_stars>10-100
package com.stanfy.helium.swagger;
import com.stanfy.helium.handler.codegen.json.schema.JsonSchemaEntity;
import java.util.List;
import java.util.Map;
/** Root of Swagger spec. */
final class Root {
final String swagger = "2.0";
Info info;
String host;
List<String> schemes;
String basePath;
Map<String, Path> paths;
Map<String, JsonSchemaEntity> definitions;
static class Info {
final String title, description, version;
Info(String title, String description, String version) {
this.title = title;
this.description = description;
this.version = version;
}
}
}
|
Raxa/Raxa-JSS
|
src/outpatient/app/view/patient/diagnosedlist.js
|
/**
* Copyright 2012, Raxa
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* This contains list of diagnosed diseases, items to which are added on clicking on search result list on diagnosis (Diagnosis Tab)
*/
Ext.define('RaxaEmr.Outpatient.view.patient.diagnosedlist', {
extend: 'Ext.dataview.List',
xtype: 'Diagnosed-List',
config: {
cls: 'x-diagnosed',
id: 'diagnosedList',
store: 'diagnosedDisease',
itemTpl: ['<div id="{id}">', '<strong>{complain}</strong>', '</div>'],
items: [{
xtype: 'toolbar',
docked: 'top',
items: [{
xtype: 'spacer'
}, {
xtype: 'button',
ui: 'plain',
iconCls: 'trash',
iconMask: true,
hidden: false,
id: 'deleteDiagnosed'
}]
}]
},
});
|
sasfeld/remap
|
src/main/java/com/remondis/remap/BidirectionalMapper.java
|
package com.remondis.remap;
import static com.remondis.remap.Lang.denyNull;
import java.util.Collection;
import java.util.List;
import java.util.Set;
/**
* This class can be used to manage bidirectional mappings. The configuration of mappers for both directions is required
* to build a bidirectional mapping.
*
* @deprecated The intent of this class was to shorten the access to bidirectional mappings. Actually this class
* introduces overhead for creation and the access to the different mapping directions is confusing in most
* cases.
*
* @param <S> Source type
* @param <D> Destination type
*/
@Deprecated
public class BidirectionalMapper<S, D> {
Mapper<S, D> to;
Mapper<D, S> from;
private BidirectionalMapper(Mapper<S, D> to, Mapper<D, S> from) {
super();
this.to = to;
this.from = from;
}
/**
* Creates a {@link BidirectionalMapper} for the specified {@link Mapper} objects that represent the two
* uni-directional mappings..
*
* @param to Valid {@link Mapper} for mapping source to destination type.
* @param from Valid {@link Mapper} for mapping destination back to source type.
* @return Returns a {@link BidirectionalMapper}.
*/
public static <S, D> BidirectionalMapper<S, D> of(Mapper<S, D> to, Mapper<D, S> from) {
denyNull("to", to);
denyNull("from", from);
return new BidirectionalMapper<>(to, from);
}
/**
* Performs the mapping from the source to destination type.
*
* @param source The source object to map to a new destination object.
* @return Returns a newly created destination object.
*/
public D map(S source) {
return to.map(source);
}
/**
* Performs the mapping from the source to destination type for the specified {@link Collection}.
*
* @param source The source collection to map to a new collection of destination objects.
* @return Returns a newly created collection of destination objects. The type of the resulting collection is either
* {@link List} or {@link Set} depending on the specified type.
*/
public Collection<D> map(Collection<? extends S> source) {
return to.map(source);
}
/**
* Performs the mapping from the source to destination type for the specified {@link List}.
*
* @param source The source collection to map to a new collection of destination objects.
* @return Returns a newly created list of destination objects.
*/
public List<D> map(List<? extends S> source) {
return to.map(source);
}
/**
* Performs the mapping from the source to destination type for the specified {@link Set}.
*
* @param source The source collection to map to a new collection of destination objects.
* @return Returns a newly created set of destination objects.
*/
public Set<D> map(Set<? extends S> source) {
return to.map(source);
}
/**
* Performs the mapping from the source to destination type for the elements provided by the specified
* {@link Iterable} .
*
* @param iterable The source iterable to be mapped to a new {@link List} of destination objects.
* @return Returns a newly created list of destination objects.
*/
public List<D> map(Iterable<? extends S> iterable) {
return to.map(iterable);
}
/**
* Performs the mapping from the destination to source type.
*
* @param destination The destination object to map to a new source object.
* @return Returns a newly created source object.
*/
public <Dest extends D> S mapFrom(Dest destination) {
return from.map(destination);
}
/**
* Performs the mapping from the destination to source type for the specified {@link Collection}.
*
* @param destination The destination collection to map to a new collection of source objects.
* @return Returns a newly created collection of source objects. The type of the resulting collection is either
* {@link List} or {@link Set} depending on the specified type.
*/
public Collection<S> mapFrom(Collection<? extends D> destination) {
return from.map(destination);
}
/**
* Performs the mapping from the destination to source type for the specified {@link List}.
*
* @param destination The destination collection to map to a new collection of source objects.
* @return Returns a newly created list of source objects.
*/
public List<S> mapFrom(List<? extends D> destination) {
return from.map(destination);
}
/**
* Performs the mapping from the destination to source type for the specified {@link Set}.
*
* @param destination The destination collection to map to a new collection of source objects.
* @return Returns a newly created set of source objects.
*/
public Set<S> mapFrom(Set<? extends D> destination) {
return from.map(destination);
}
/**
* Performs the mapping from the destination to source type for the elements provided by the specified
* {@link Iterable} .
*
* @param iterable The destination iterable to be mapped to a new {@link List} of source objects.
* @return Returns a newly created list of source objects.
*/
public List<S> mapFrom(Iterable<? extends D> iterable) {
return from.map(iterable);
}
/**
* Returns the {@link Mapper} object for mapping source to destination type.
*
* @return {@link Mapper}
*/
public Mapper<S, D> getMapper() {
return to;
}
/**
* Returns the {@link Mapper} object for mapping destination to source type.
*
* @return {@link Mapper}
*/
public Mapper<D, S> getFromMapper() {
return from;
}
@Override
public String toString() {
return "BidirectionalMapper [to=" + to + ", from=" + from + "]";
}
}
|
125929280/LeetCode
|
572.java
|
<reponame>125929280/LeetCode<filename>572.java
/**
* Definition for a binary tree node.
* public class TreeNode {
* int val;
* TreeNode left;
* TreeNode right;
* TreeNode() {}
* TreeNode(int val) { this.val = val; }
* TreeNode(int val, TreeNode left, TreeNode right) {
* this.val = val;
* this.left = left;
* this.right = right;
* }
* }
*/
class Solution {
public boolean isSubtree(TreeNode root, TreeNode subRoot) {
if(root == null) return false;
return helper(root, subRoot) || isSubtree(root.left, subRoot) || isSubtree(root.right, subRoot);
}
public boolean helper(TreeNode root, TreeNode subRoot) {
if(root == null && subRoot == null) return true;
if(root == null || subRoot == null) return false;
return root.val == subRoot.val && helper(root.left, subRoot.left) && helper(root.right, subRoot.right);
}
}
|
pepsi7959/OpenstudioThai
|
openstudiocore/src/project/ProjectDatabaseRecord.hpp
|
<gh_stars>1-10
/**********************************************************************
* Copyright (c) 2008-2015, Alliance for Sustainable Energy.
* All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
**********************************************************************/
#ifndef PROJECT_PROJECTDATABASERECORD_HPP
#define PROJECT_PROJECTDATABASERECORD_HPP
#include "ProjectAPI.hpp"
#include "ObjectRecord.hpp"
class QSqlQuery;
namespace openstudio {
namespace project {
namespace detail {
class ProjectDatabaseRecord_Impl;
class ProjectDatabase_Impl;
}
/** \class ProjectDatabaseRecordColumns
* \brief Column definitions for the ProjectDatabaseRecords table.
*
* \relates ProjectDatabaseRecord */
OPENSTUDIO_ENUM( ProjectDatabaseRecordColumns,
((id)(INTEGER PRIMARY KEY)(0))
((handle)(TEXT)(1))
((name)(TEXT)(2))
((displayName)(TEXT)(3))
((description)(TEXT)(4))
((timestampCreate)(TEXT)(5))
((timestampLast)(TEXT)(6))
((uuidLast)(TEXT)(7))
((version)(TEXT)(8))
((runManagerDBPath)(TEXT)(9))
);
class PROJECT_API ProjectDatabaseRecord : public ObjectRecord {
public:
typedef detail::ProjectDatabaseRecord_Impl ImplType;
typedef ProjectDatabaseRecordColumns ColumnsType;
/** @name Constructors and Destructors */
//@{
ProjectDatabaseRecord(const std::string& version,
const openstudio::path& runManagerDBPath,
ProjectDatabase projectDatabase);
virtual ~ProjectDatabaseRecord() {}
//@}
/// get name of the database table
static std::string databaseTableName();
static UpdateByIdQueryData updateByIdQueryData();
static ProjectDatabaseRecord getProjectDatabaseRecord(const ProjectDatabase& database);
static void updatePathData(ProjectDatabase database,
const openstudio::path& originalBase,
const openstudio::path& newBase);
/** @name Getters */
//@{
/// get the version
std::string version() const;
/// get the run manager path
openstudio::path runManagerDBPath() const;
//@}
protected:
friend class Record;
friend class ProjectDatabase;
friend class detail::ProjectDatabase_Impl;
/// constructor, does not register in database, use with caution
ProjectDatabaseRecord(std::shared_ptr<detail::ProjectDatabaseRecord_Impl> impl);
void setVersion(const std::string& version);
private:
REGISTER_LOGGER("openstudio.project.ProjectDatabaseRecord");
};
} // project
} // openstudio
#endif // PROJECT_PROJECTDATABASERECORD_HPP
|
asuessenbach/pipeline
|
dp/sg/io/DPAF/Saver/inc/DPAFSaver.h
|
// Copyright (c) 2002-2015, NVIDIA CORPORATION. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of NVIDIA CORPORATION nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pragma once
/** \file */
#include <set>
#include <dp/sg/core/Config.h>
#include <dp/sg/io/PlugInterface.h>
#include <dp/sg/ui/ViewState.h>
#include <dp/sg/algorithm/Traverser.h>
// Don't need to document the API specifier
#if ! defined( DOXYGEN_IGNORE )
#if defined(_WIN32)
# ifdef DPAFSAVER_EXPORTS
# define DPAFSAVER_API __declspec(dllexport)
# else
# define DPAFSAVER_API __declspec(dllimport)
# endif
#else
# define DPAFSAVER_API
#endif
#endif // DOXYGEN_IGNORE
// exports required for a scene loader plug-in
extern "C"
{
//! Get the PlugIn interface for this scene saver.
/** Every PlugIn has to resolve this function. It is used to get a pointer to a PlugIn class, in this case a
DPAFSaver.
* If the PlugIn ID \a piid equals \c PIID_DP_SCENE_SAVER, a DPAFSaver is created and returned in \a pi.
* \returns true, if the requested PlugIn could be created, otherwise false
*/
DPAFSAVER_API bool getPlugInterface(const dp::util::UPIID& piid, dp::util::PlugInSharedPtr & pi);
//! Query the supported types of PlugIn Interfaces.
DPAFSAVER_API void queryPlugInterfacePIIDs( std::vector<dp::util::UPIID> & piids );
}
//! A Traverser to traverse a scene on saving to DPAF file format.
/** \note Needs a valid ViewState. Call setViewState prior to apply().*/
class DPAFSaveTraverser : public dp::sg::algorithm::SharedTraverser
{
public:
//! Default constructor
DPAFSaveTraverser();
//! Sets the FILE where the scene is to be saved to.
void setFILE( FILE *fh //!< FILE to save to
, std::string const& filename
);
protected:
//! Controls saving of the scene together with a ViewState.
void doApply( const dp::sg::core::NodeSharedPtr & root );
// overloads to process concrete types for saving
// ...Cameras
//! Save a \c ParallelCamera.
/** If the \c ParallelCarmera \a p is encountered on saving the first time, it is traversed with \a root and then it's
* saved. */
virtual void handleParallelCamera(const dp::sg::core::ParallelCamera *p);
//! Save a \c PerspectiveCamera.
/** If the \c PerspectiveCamera \a p is encountered on saving the first time, it is traversed with \a root and then
* it's saved. */
virtual void handlePerspectiveCamera(const dp::sg::core::PerspectiveCamera *p);
//! Save a \c MatrixCamera.
/** If the \c MatrixCamera \a p is encountered on saving the first time, it is traversed with \a root and then
* it's saved. */
virtual void handleMatrixCamera( const dp::sg::core::MatrixCamera * p );
// ...Nodes
//! Save a \c Billboard.
/** If the \c Billboard \a p is encountered on saving the first time, it is traversed and then saved. */
virtual void handleBillboard(const dp::sg::core::Billboard *p);
//! Save a \c GeoNode.
/** If the \c GeoNode \a p is encountered on saving the first time, it is traversed and then saved. */
virtual void handleGeoNode(const dp::sg::core::GeoNode *p);
//! Save a \c Group.
/** If the \c Group \a p is encountered on saving the first time, it is traversed and then saved. */
virtual void handleGroup( const dp::sg::core::Group *p );
//! Save a \c Transform.
/** If the \c Transform \a p is encountered on saving the first time, it is traversed and then saved. */
virtual void handleTransform(const dp::sg::core::Transform *p);
//! Save a \c LOD.
/** If the \c LOD \a p is encountered on saving the first time, all it's children are traversed, no matter which might
* be currently active, then it is saved. */
virtual void handleLOD(const dp::sg::core::LOD *p);
//! Save a \c Switch.
/** If the \c Switch \a p is encountered on saving the first time, all it's children are traversed, no matter which
* might be currently active, then it is saved. */
virtual void handleSwitch(const dp::sg::core::Switch *p);
virtual void handleLightSource( const dp::sg::core::LightSource * p );
//! Save a \c Primitive.
/** If the \c Primitive \a p is encountered on saving the first time, it is saved. */
virtual void handlePrimitive(const dp::sg::core::Primitive *p);
//! Save an \c IndexSet.
/** If the \c IndexSet \a p is encountered on saving the first time, it is saved. */
virtual void handleIndexSet( const dp::sg::core::IndexSet * p );
//! Save a \c VertexAttributeSet.
/** If the \c VertexAttributeSet \a p is encountered on saving the first time, it is saved. */
virtual void handleVertexAttributeSet( const dp::sg::core::VertexAttributeSet *p );
virtual void handleParameterGroupData( const dp::sg::core::ParameterGroupData * p );
virtual void handlePipelineData( const dp::sg::core::PipelineData * p );
virtual void handleSampler( const dp::sg::core::Sampler * p );
private:
void cameraData( const dp::sg::core::Camera *p );
void frustumCameraData( const dp::sg::core::FrustumCamera *p );
const std::string getName( const std::string &name );
std::string getObjectName( const dp::sg::core::Object *p );
void objectData( const dp::sg::core::Object *p ); // writes object data
void groupData( const dp::sg::core::Group *p );
bool isFirstTime( const dp::sg::core::HandledObject * p );
void lightSourceData( const dp::sg::core::LightSource *p );
void nodeData( const dp::sg::core::Node *p );
std::string parameterString( const dp::sg::core::ParameterGroupData * p, dp::fx::ParameterGroupSpec::iterator it );
void primitiveData( const dp::sg::core::Primitive *p );
void textureImage( const dp::sg::core::TextureHostSharedPtr & tih );
void transformData( const dp::sg::core::Transform * p );
void vertexAttributeSetData( const dp::sg::core::VertexAttributeSet * p );
void writeVertexData( const dp::sg::core::VertexAttribute & va );
void buffer( dp::sg::core::BufferSharedPtr const& p );
private:
struct CallbackLink
{
std::string name;
dp::sg::core::ObjectWeakPtr subject;
dp::sg::core::ObjectWeakPtr observer;
};
private:
FILE * m_fh;
std::vector<std::string> m_basePaths;
std::string m_effectSpecName;
std::map<dp::sg::core::DataID, std::string> m_sharedData;
std::set<dp::sg::core::HandledObjectSharedPtr> m_sharedObjects;
std::map<dp::sg::core::BufferSharedPtr, std::string> m_storedBuffers;
std::map<dp::sg::core::SamplerSharedPtr, std::string> m_storedSamplers;
std::map<dp::sg::core::ObjectSharedPtr, std::string> m_objectNames;
unsigned int m_nameCount;
std::set<std::string> m_nameSet;
std::map<dp::sg::core::TextureHostSharedPtr, std::string> m_textureImageNames;
std::vector<CallbackLink> m_links;
};
DEFINE_PTR_TYPES( DPAFSaver );
//! A Scene Saver for DPAF files.
/** DPAF files can be produced with the sample ViewerVR.
* They are text files that represent a Scene and a ViewState. */
class DPAFSaver : public dp::sg::io::SceneSaver
{
public :
static DPAFSaverSharedPtr create();
virtual ~DPAFSaver();
//! Realization of the pure virtual interface function of a SceneSaver.
/** Saves the \a scene and the \a viewState to \a filename.
* The \a viewState may be NULL. */
bool save( dp::sg::core::SceneSharedPtr const& scene //!< scene to save
, dp::sg::ui::ViewStateSharedPtr const& viewState //!< view state to save
, std::string const& filename //!< file name to save to
);
protected:
DPAFSaver();
};
|
ProjectBlackFalcon/DatBot
|
DatBot.ProtocolBuilder/Utils/messages/game/context/roleplay/job/JobLevelUpMessage.java
|
package protocol.network.messages.game.context.roleplay.job;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import protocol.utils.ProtocolTypeManager;
import protocol.network.util.types.BooleanByteWrapper;
import protocol.network.NetworkMessage;
import protocol.network.util.DofusDataReader;
import protocol.network.util.DofusDataWriter;
import protocol.network.Network;
import protocol.network.NetworkMessage;
import protocol.network.types.game.context.roleplay.job.JobDescription;
@SuppressWarnings("unused")
public class JobLevelUpMessage extends NetworkMessage {
public static final int ProtocolId = 5656;
private int newLevel;
private JobDescription jobsDescription;
public int getNewLevel() { return this.newLevel; }
public void setNewLevel(int newLevel) { this.newLevel = newLevel; };
public JobDescription getJobsDescription() { return this.jobsDescription; }
public void setJobsDescription(JobDescription jobsDescription) { this.jobsDescription = jobsDescription; };
public JobLevelUpMessage(){
}
public JobLevelUpMessage(int newLevel, JobDescription jobsDescription){
this.newLevel = newLevel;
this.jobsDescription = jobsDescription;
}
@Override
public void Serialize(DofusDataWriter writer) {
try {
writer.writeByte(this.newLevel);
jobsDescription.Serialize(writer);
} catch (Exception e){
e.printStackTrace();
}
}
@Override
public void Deserialize(DofusDataReader reader) {
try {
this.newLevel = reader.readByte();
this.jobsDescription = new JobDescription();
this.jobsDescription.Deserialize(reader);
} catch (Exception e){
e.printStackTrace();
}
}
}
|
NaturalHistoryMuseum/taxonworks
|
spec/factories/geographic_areas_geographic_items_factory.rb
|
# Read about factories at https://github.com/thoughtbot/factory_bot
FactoryBot.define do
factory :geographic_areas_geographic_item do
geographic_area { nil }
geographic_item { nil }
data_origin { 'MyString' }
origin_gid { 1 }
date_valid_from { 'MyString' }
date_valid_to { 'MyString' }
# date_valid_origin "MyString"
factory :valid_geographic_areas_geographic_item do
association :geographic_area, factory: :valid_geographic_area
association :geographic_item, factory: :valid_geographic_item
end
end
end
|
cliveyao/Orienteer
|
orienteer-tours/src/main/java/org/orienteer/tours/BootstrapTouristPlugin.java
|
package org.orienteer.tours;
import org.apache.wicket.Page;
import org.apache.wicket.markup.head.CssHeaderItem;
import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.JavaScriptHeaderItem;
import org.apache.wicket.request.resource.CssResourceReference;
import org.apache.wicket.request.resource.JavaScriptResourceReference;
import org.orienteer.core.web.BasePage;
import de.agilecoders.wicket.webjars.request.resource.WebjarsCssResourceReference;
import de.agilecoders.wicket.webjars.request.resource.WebjarsJavaScriptResourceReference;
/**
* Tours JS plugin for BootstrapTourist JS Lib
*/
public class BootstrapTouristPlugin implements ITourPlugin {
private static final CssResourceReference TOURIST_CSS = new CssResourceReference(BootstrapTouristPlugin.class, "bootstrap-tourist/bootstrap-tourist.css");
private static final JavaScriptResourceReference TOURIST_JS = new JavaScriptResourceReference(BootstrapTouristPlugin.class, "bootstrap-tourist/bootstrap-tourist.js");
private static final JavaScriptResourceReference PLUGIN_JS = new JavaScriptResourceReference(BootstrapTouristPlugin.class, "bootstrap-tourist/bootstrap-tourist-plugin.js");
@Override
public void renderHeader(Page page, IHeaderResponse response) {
response.render(CssHeaderItem.forReference(TOURIST_CSS));
response.render(JavaScriptHeaderItem.forReference(TOURIST_JS));
response.render(JavaScriptHeaderItem.forReference(PLUGIN_JS));
}
}
|
zakibinary/deriv-app
|
packages/cashier/build/webpack.config.js
|
const path = require('path');
const { ALIASES, IS_RELEASE, MINIMIZERS, plugins, rules } = require('./constants');
module.exports = function (env, argv) {
const base = env && env.base && env.base != true ? '/' + env.base + '/' : '/';
return {
context: path.resolve(__dirname, '../src'),
devtool: IS_RELEASE ? undefined : 'eval-cheap-module-source-map',
entry: {
cashier: path.resolve(__dirname, '../src', 'index.js'),
'cashier-store': path.resolve(__dirname, '../src', 'Stores/Cashier/cashier-store'),
},
mode: IS_RELEASE ? 'production' : 'development',
module: {
rules: rules(),
},
resolve: {
alias: ALIASES,
extensions: ['.js', '.jsx'],
},
optimization: {
chunkIds: 'named',
moduleIds: 'named',
minimize: IS_RELEASE,
minimizer: MINIMIZERS,
},
output: {
filename: 'js/[name].js',
publicPath: base,
path: path.resolve(__dirname, '../dist'),
chunkFilename: 'js/cashier.[name].[contenthash].js',
libraryExport: 'default',
library: '@deriv/cashier',
libraryTarget: 'umd',
},
externals: [
{
react: 'react',
'react-dom': 'react-dom',
'react-router-dom': 'react-router-dom',
'react-router': 'react-router',
mobx: 'mobx',
'mobx-react': 'mobx-react',
'@deriv/shared': '@deriv/shared',
'@deriv/components': '@deriv/components',
'@deriv/translations': '@deriv/translations',
},
/^@deriv\/shared\/.+$/,
/^@deriv\/components\/.+$/,
/^@deriv\/translations\/.+$/,
],
target: 'web',
plugins: plugins(base, false),
};
};
|
AleFelix/Sobelizador-de-Videos-Distribuido
|
src/distribuido/mapper/MapperServer.java
|
<filename>src/distribuido/mapper/MapperServer.java
package distribuido.mapper;
import java.io.File;
import java.rmi.RemoteException;
import java.rmi.registry.LocateRegistry;
import java.rmi.registry.Registry;
import java.rmi.server.UnicastRemoteObject;
import java.util.Scanner;
public class MapperServer {
public static void main(String[] args) throws Exception {
if (args.length < 2) {
System.out.println("Debe ingresar 2 parametros: Carpeta de trabajo y Video a convertir");
} else {
File carpeta = new File(args[0]);
carpeta.mkdirs();
if (!carpeta.isDirectory()) {
System.out.println("Directorio invalido");
} else {
File video = new File(args[1]);
if (!video.isFile()) {
System.out.println("Direccion de video invalida");
} else {
System.setProperty("Java.rmi.server.hostname", "localhost");
Registry registro;
try {
registro = LocateRegistry.createRegistry(9000);
} catch (RemoteException e) {
registro = LocateRegistry.getRegistry(9000);
}
Mapper map = new Mapper(args[0]);
map.setVideoLocal(video);
IMapper imap = (IMapper) UnicastRemoteObject.exportObject(map, 9000);
registro.rebind("Mp01", imap);
System.out.println("Servidor iniciado");
mostrarMenu(imap);
}
}
}
}
private static void mostrarMenu(IMapper imap) throws RemoteException {
Scanner leer = new Scanner(System.in);
int opcion;
do {
System.out.println("-----------------------------------------");
System.out.println("Ingrese 1 para iniciar el trabajo");
System.out.println("Ingrese 0 para salir");
System.out.print("Opcion: ");
opcion = leer.nextInt();
leer.nextLine();
switch (opcion) {
case 1:
imap.iniciarTrabajo();
break;
}
} while (opcion != 0);
leer.close();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.