repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
CaMnter/AndroidLife | agera-1.0.0/src/main/java/com/google/android/agera/RepositoryConfig.java | 3890 | /*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.agera;
import android.support.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Constants controlling some behaviors of the compiled {@link Repository}s.
* 用于 控制 编译库 的 行为
*/
@Retention(RetentionPolicy.SOURCE)
@IntDef(flag = true, value = {
RepositoryConfig.CONTINUE_FLOW,
RepositoryConfig.CANCEL_FLOW,
RepositoryConfig.RESET_TO_INITIAL_VALUE,
RepositoryConfig.SEND_INTERRUPT,
})
public @interface RepositoryConfig {
/**
* If a data processing flow is ongoing, allow it to finish. If this is the configuration for
* the
* concurrent update, the new data processing flow will commence when the current one finishes.
* This is the default behavior and, with a value of 0, cannot be combined with other
* configurations.
*/
/*
* 0000 0000 0000 0000
*/
int CONTINUE_FLOW = 0;
/**
* If a data processing flow is ongoing, cancel it at the earliest opportunity, which is
* immediately after the currently running directive, or during it if {@link #SEND_INTERRUPT}
* is
* used and the current operator ({@link Function}, {@link Supplier}, {@link Merger} etc.) has
* adequate support for the thread interruption signal. If this is the configuration for a
* repository, cancellation prevents the flow from updating the repository value, even if the
* last
* run directive would have set the new value otherwise. If this is the configuration for the
* concurrent update, the new data processing flow will commence as soon as the current one is
* terminated, effectively redoing the data processing from the start. This behavior is
* implicit
* if {@link #RESET_TO_INITIAL_VALUE} or {@link #SEND_INTERRUPT} is specified.
*/
/*
* 0000 0000 0000 0001
*/
int CANCEL_FLOW = 1;
/**
* The repository value should reset to the initial value on deactivation. The reset is
* immediate
* while the data processing flow, if ongoing, may terminate only after the currently running
* directive. If this is the configuration for the concurrent update, the repository value will
* <i>not</i> be reset, but due to the included {@link #CANCEL_FLOW} value, the ongoing flow
* will
* still be cancelled.
*/
/*
* 0000 0000 0000 0010
* 0000 0000 0000 0001
*
* 0000 0000 0000 0011
*/
int RESET_TO_INITIAL_VALUE = 2 | CANCEL_FLOW;
/**
* If a data processing flow is ongoing and in the asynchronous stage (after the first
* {@code goTo} directive and before the {@code goLazy} directive), {@linkplain
* Thread#interrupt()
* interrupt} the thread currently running the flow, to signal the current operator (function,
* supplier, merger etc.) to stop early. The interrupt signal will not be sent if the flow is
* in
* a synchronous stage, to minimize unwanted effects on the worker looper thread and the thread
* from which the client calls {@link Repository#get()}.
*/
/*
* 0000 0000 0000 0100
* 0000 0000 0000 0001
*
* 0000 0000 0000 0101
*/
int SEND_INTERRUPT = 4 | CANCEL_FLOW;
}
| apache-2.0 |
raster-foundry/raster-foundry | app-backend/common/src/test/scala/com/implicits/Generators.scala | 42777 | package com.rasterfoundry.common
import com.rasterfoundry.datamodel._
import cats.data.{NonEmptyList => NEL}
import cats.implicits._
import com.azavea.stac4s.Proprietary
import eu.timepit.refined.auto._
import eu.timepit.refined.types.string.NonEmptyString
import geotrellis.vector.testkit.Rectangle
import geotrellis.vector.{io => _, _}
import io.circe.syntax._
import io.circe.testing.ArbitraryInstances
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck._
import org.scalacheck.cats.implicits._
import java.net.URI
import java.sql.Timestamp
import java.time.LocalDate
import java.util.UUID
object Generators extends ArbitraryInstances {
private def stringOptionGen: Gen[Option[String]] =
Gen.oneOf(
Gen.const(Option.empty[String]),
nonEmptyStringGen map { Some(_) }
)
private def stringListGen: Gen[List[String]] =
Gen.oneOf(0, 15) flatMap { Gen.listOfN(_, nonEmptyStringGen) }
private def nonEmptyStringGen: Gen[String] =
Gen.listOfN(30, Gen.alphaChar) map { _.mkString }
private def possiblyEmptyStringGen: Gen[String] =
Gen.containerOf[List, Char](Gen.alphaChar) map { _.mkString }
private def pageRequestGen: Gen[PageRequest] =
Gen.const(PageRequest(0, 10, Map("created_at" -> Order.Desc)))
private def userRoleGen: Gen[UserRole] =
Gen.oneOf(UserRoleRole, Viewer, Admin)
private def exportAssetTypeGen: Gen[ExportAssetType] =
Gen.oneOf(ExportAssetType.COG, ExportAssetType.SignedURL)
private def groupTypeGen: Gen[GroupType] =
Gen.oneOf(GroupType.Platform, GroupType.Organization, GroupType.Team)
private def groupRoleGen: Gen[GroupRole] =
Gen.oneOf(GroupRole.Admin, GroupRole.Member)
private def subjectTypeGen: Gen[SubjectType] =
Gen.oneOf(
SubjectType.All,
SubjectType.Platform,
SubjectType.Organization,
SubjectType.Team,
SubjectType.User
)
private def actionTypeGen: Gen[ActionType] =
Gen.oneOf(
ActionType.View,
ActionType.Edit,
ActionType.Deactivate,
ActionType.Delete,
ActionType.Annotate,
ActionType.Export,
ActionType.Download
)
private def annotationQualityGen: Gen[AnnotationQuality] =
Gen.oneOf(
AnnotationQuality.Yes,
AnnotationQuality.No,
AnnotationQuality.Miss,
AnnotationQuality.Unsure
)
private def visibilityGen: Gen[Visibility] =
Gen.oneOf(Visibility.Public, Visibility.Organization, Visibility.Private)
private def taskStatusGen: Gen[TaskStatus] =
Gen.frequency(
(1, TaskStatus.Unlabeled),
(6, TaskStatus.LabelingInProgress),
(1, TaskStatus.Labeled),
(6, TaskStatus.ValidationInProgress),
(1, TaskStatus.Validated),
(6, TaskStatus.Flagged),
(1, TaskStatus.Invalid),
(1, TaskStatus.Split)
)
private def taskTypeGen: Gen[TaskType] =
Gen.frequency(
(1, TaskType.Label),
(1, TaskType.Review)
)
private def userVisibilityGen: Gen[UserVisibility] =
Gen.oneOf(UserVisibility.Public, UserVisibility.Private)
private def orgStatusGen: Gen[OrgStatus] =
Gen.oneOf(OrgStatus.Requested, OrgStatus.Active, OrgStatus.Inactive)
private def exportStatusGen: Gen[ExportStatus] =
Gen.oneOf(
ExportStatus.Exported,
ExportStatus.Exporting,
ExportStatus.Failed,
ExportStatus.ToBeExported,
ExportStatus.NotExported
)
private def sceneTypeGen: Gen[SceneType] =
Gen.oneOf(SceneType.Avro, SceneType.COG)
private def credentialGen: Gen[Credential] =
possiblyEmptyStringGen flatMap {
Credential.fromString
}
// This is fine not to test the max value --
private def rawDataBytesGen: Gen[Long] = Gen.choose(0L, 100000L)
private def bandDataTypeGen: Gen[BandDataType] =
Gen.oneOf(
BandDataType.Diverging,
BandDataType.Sequential,
BandDataType.Categorical
)
private def uuidGen: Gen[UUID] = Gen.delay(UUID.randomUUID)
private def jobStatusGen: Gen[JobStatus] =
Gen.oneOf(
JobStatus.Uploading,
JobStatus.Success,
JobStatus.Failure,
JobStatus.PartialFailure,
JobStatus.Queued,
JobStatus.Processing
)
private def ingestStatusGen: Gen[IngestStatus] =
Gen.oneOf(
IngestStatus.NotIngested,
IngestStatus.ToBeIngested,
IngestStatus.Ingesting,
IngestStatus.Ingested,
IngestStatus.Failed
)
private def annotationProjectStatusGen: Gen[AnnotationProjectStatus] =
Gen.oneOf(
AnnotationProjectStatus.Waiting,
AnnotationProjectStatus.Queued,
AnnotationProjectStatus.Processing,
AnnotationProjectStatus.Ready,
AnnotationProjectStatus.UnknownFailure,
AnnotationProjectStatus.TaskGridFailure,
AnnotationProjectStatus.ImageIngestionFailure
)
private def tileLayerTypeGen: Gen[TileLayerType] =
Gen.oneOf(
TileLayerType.MVT,
TileLayerType.TMS
)
private def annotationProjectTypeGen: Gen[AnnotationProjectType] =
Gen.oneOf(
AnnotationProjectType.Segmentation,
AnnotationProjectType.Classification,
AnnotationProjectType.Detection
)
private def shapePropertiesGen: Gen[ShapeProperties] =
for {
timeField <- timestampIn2016Gen
userField <- nonEmptyStringGen
name <- nonEmptyStringGen
description <- Gen.oneOf(
Gen.const(None),
nonEmptyStringGen map {
Some(_)
}
)
} yield {
ShapeProperties(
timeField,
userField,
timeField,
userField,
name,
description
)
}
private def thumbnailSizeGen: Gen[ThumbnailSize] =
Gen.oneOf(ThumbnailSize.Small, ThumbnailSize.Large, ThumbnailSize.Square)
private def uploadStatusGen: Gen[UploadStatus] =
Gen.oneOf(
UploadStatus.Created,
UploadStatus.Uploading,
UploadStatus.Uploaded,
UploadStatus.Queued,
UploadStatus.Processing,
UploadStatus.Complete,
UploadStatus.Failed,
UploadStatus.Aborted
)
private def uploadTypeGen: Gen[UploadType] =
Gen.oneOf(
UploadType.Dropbox,
UploadType.S3,
UploadType.Local,
UploadType.Planet
)
private def fileTypeGen: Gen[FileType] =
Gen.oneOf(FileType.Geotiff, FileType.GeotiffWithMetadata)
private def timeRangeGen: Gen[(LocalDate, LocalDate)] =
for {
year <- Gen.choose(2005, 2019)
month <- Gen.choose(1, 12)
day <- Gen.choose(1, 28)
} yield {
val start = LocalDate.of(year, month, day)
(start, start.plusDays(1))
}
private def timestampIn2016Gen: Gen[Timestamp] =
for {
year <- Gen.const(2016)
month <- Gen.choose(1, 12)
day <- Gen.choose(1, 28) // for safety
} yield { Timestamp.valueOf(LocalDate.of(year, month, day).atStartOfDay) }
// generate up to a 50km/side polygon with bounds in EPSG:3857 bounds
private def polygonGen3857: Gen[Polygon] =
for {
width <- Gen.choose(100, 50000)
height <- Gen.choose(100, 50000)
centerX <- Gen.choose(-2e7, 2e7)
centerY <- Gen.choose(-2e7, 2e7)
} yield {
(Extent
.toPolygon(
(Rectangle()
.withWidth(width)
.withHeight(height)
.setCenter(Point(centerX, centerY))
.build(): Geometry).extent
))
}
private def multiPolygonGen3857: Gen[MultiPolygon] =
for {
polygons <- Gen.oneOf(1, 2) flatMap {
Gen.listOfN[Polygon](_, polygonGen3857)
}
} yield MultiPolygon(polygons)
private def projectedMultiPolygonGen3857: Gen[Projected[MultiPolygon]] =
multiPolygonGen3857 map { Projected(_, 3857) }
private def annotationGroupCreateGen: Gen[AnnotationGroup.Create] =
for {
name <- nonEmptyStringGen
defaultStyle <- Gen.const(Some(().asJson))
} yield { AnnotationGroup.Create(name, defaultStyle) }
val labelValues = Seq("Car", "Human", "Apple")
private def annotationCreateGen: Gen[Annotation.Create] =
for {
owner <- Gen.const(None)
label <- Gen.oneOf(labelValues)
description <- nonEmptyStringGen map { Some(_) }
machineGenerated <- arbitrary[Option[Boolean]]
confidence <- Gen.choose(0.0f, 1.0f) map { Some(_) }
quality <- annotationQualityGen map { Some(_) }
geometry <- projectedMultiPolygonGen3857 map { Some(_) }
} yield {
Annotation.Create(
owner,
label,
description,
machineGenerated,
confidence,
quality,
geometry,
None
)
}
private def organizationCreateGen: Gen[Organization.Create] =
for {
name <- nonEmptyStringGen
visibility <- visibilityGen
orgStatus <- orgStatusGen
} yield Organization
.Create(name, UUID.randomUUID, Some(visibility), orgStatus)
private def organizationGen: Gen[Organization] =
organizationCreateGen map {
_.toOrganization(true)
}
private def shapeCreateGen: Gen[Shape.Create] =
for {
owner <- Gen.const(None)
name <- nonEmptyStringGen
description <- nonEmptyStringGen map { Some(_) }
geometry <- projectedMultiPolygonGen3857
} yield {
Shape.Create(owner, name, description, geometry)
}
private def shapeGeoJSONGen: Gen[Shape.GeoJSON] =
for {
id <- uuidGen
geometry <- projectedMultiPolygonGen3857
properties <- shapePropertiesGen
} yield {
Shape.GeoJSON(id, Some(geometry), properties)
}
private def userCreateGen: Gen[User.Create] =
for {
id <- uuidGen map { _.toString }
role <- userRoleGen
email <- nonEmptyStringGen
name <- nonEmptyStringGen
profileImageUri <- nonEmptyStringGen
} yield { User.Create(id, role, email, name, profileImageUri) }
private def userJwtFieldsGen: Gen[User.JwtFields] =
for {
userCreate <- userCreateGen
placeholderUUID <- uuidGen
} yield {
User.JwtFields(
userCreate.id,
userCreate.email,
userCreate.name,
userCreate.profileImageUri,
placeholderUUID,
placeholderUUID
)
}
private def userGen: Gen[User] = userCreateGen map { _.toUser }
private def bandIdentifiedGen: Gen[Band.Identified] =
for {
name <- nonEmptyStringGen
number <- Gen.choose(1, 15)
wavelength <- Gen.listOfN(2, Gen.choose(1, 50000)) map { _.sorted }
imageId <- uuidGen
} yield { Band.Identified(None, imageId, name, number, wavelength) }
private def bandCreateGen: Gen[Band.Create] =
for {
name <- nonEmptyStringGen
number <- Gen.choose(1, 15)
wavelength <- Gen.listOfN(2, Gen.choose(1, 50000)) map { _.sorted }
} yield { Band.Create(name, number, wavelength) }
private def bandGen: Gen[Band] = bandIdentifiedGen map { _.toBand }
private def singleBandOptionsParamsGen: Gen[SingleBandOptions.Params] =
for {
band <- Gen.choose(1, 15)
datatype <- bandDataTypeGen
colorBins <- Gen.choose(3, 17)
colorScheme <- Gen.const(().asJson)
legendOrientation <- nonEmptyStringGen
} yield {
SingleBandOptions.Params(
band,
datatype,
colorBins,
colorScheme,
legendOrientation
)
}
private def imageCreateGen: Gen[Image.Create] =
for {
rawDataBytes <- rawDataBytesGen
visibility <- visibilityGen
filename <- nonEmptyStringGen
sourceUri <- nonEmptyStringGen
scene <- uuidGen
imageMetadata <- Gen.const(().asJson)
owner <- stringOptionGen
resolutionMeters <- Gen.choose(0.25f, 1000f)
metadataFiles <- stringListGen
} yield Image.Create(
rawDataBytes,
visibility,
filename,
sourceUri,
scene,
imageMetadata,
owner,
resolutionMeters,
metadataFiles
)
private def imageBandedGen: Gen[Image.Banded] =
for {
rawDataBytes <- rawDataBytesGen
visibility <- visibilityGen
filename <- nonEmptyStringGen
sourceUri <- nonEmptyStringGen
scene <- uuidGen
imageMetadata <- Gen.const(().asJson)
owner <- stringOptionGen
resolutionMeters <- Gen.choose(0.25f, 1000f)
metadataFiles <- stringListGen
bands <- Gen.listOf[Band.Create](bandCreateGen)
} yield Image.Banded(
rawDataBytes,
visibility,
filename,
sourceUri,
owner,
scene,
imageMetadata,
resolutionMeters,
metadataFiles,
bands
)
private def imageGen: Gen[Image] =
for {
imCreate <- imageCreateGen
user <- userGen
} yield imCreate.copy(owner = Some(user.id)).toImage(user)
private def projectCreateGen: Gen[Project.Create] =
for {
name <- nonEmptyStringGen
description <- nonEmptyStringGen
visibility <- visibilityGen
tileVisibility <- visibilityGen
isAOIProject <- arbitrary[Boolean]
aoiCadenceMillis <- Gen.choose(0L, 604800000L)
owner <- Gen.const(None)
tags <- stringListGen
isSingleBand <- arbitrary[Boolean]
singleBandOptions <- singleBandOptionsParamsGen map { Some(_) }
extras <- Gen.const(().asJson)
} yield {
Project.Create(
name,
description,
visibility,
tileVisibility,
isAOIProject,
aoiCadenceMillis,
owner,
tags,
isSingleBand,
singleBandOptions,
Some(extras)
)
}
private def projectGen: Gen[Project] =
for {
projCreate <- projectCreateGen
defaultLayerId <- uuidGen
user <- userGen
} yield {
projCreate.copy(owner = Some(user.id)).toProject(user, defaultLayerId)
}
private def sceneFilterFieldsGen: Gen[SceneFilterFields] =
for {
cloudCover <- Gen.frequency(
(1, None),
(
10,
Gen.choose(0.0f, 1.0f) map {
Some(_)
}
)
)
acquisitionDate <- Gen.frequency(
(1, None),
(
10,
timestampIn2016Gen map {
Some(_)
}
)
)
sunAzimuth <- Gen.frequency(
(1, None),
(
10,
Gen.choose(0f, 360f) map {
Some(_)
}
)
)
sunElevation <- Gen.frequency(
(1, None),
(
10,
Gen.choose(0f, 90f) map {
Some(_)
}
)
)
} yield {
SceneFilterFields(cloudCover, acquisitionDate, sunAzimuth, sunElevation)
}
private def sceneStatusFieldsGen: Gen[SceneStatusFields] =
for {
thumbnailStatus <- jobStatusGen
boundaryStatus <- jobStatusGen
ingestStatus <- ingestStatusGen
} yield { SceneStatusFields(thumbnailStatus, boundaryStatus, ingestStatus) }
private def thumbnailIdentifiedGen: Gen[Thumbnail.Identified] =
for {
id <- uuidGen map { Some(_) }
thumbnailSize <- thumbnailSizeGen
sideLength <- Gen.choose(200, 1000)
sceneId <- uuidGen
url <- nonEmptyStringGen
} yield {
Thumbnail.Identified(
id,
thumbnailSize,
sideLength,
sideLength,
sceneId,
url
)
}
private def thumbnailGen: Gen[Thumbnail] =
for {
thumbnailIdentified <- thumbnailIdentifiedGen
} yield { thumbnailIdentified.toThumbnail }
private def sceneCreateGen: Gen[Scene.Create] =
for {
sceneId <- uuidGen map { Some(_) }
visibility <- Gen.const(Visibility.Private)
tags <- stringListGen
datasource <- uuidGen
sceneMetadata <- Gen.const(().asJson)
name <- nonEmptyStringGen
owner <- stringOptionGen
tileFootprint <- projectedMultiPolygonGen3857 map { Some(_) }
dataFootprint <- projectedMultiPolygonGen3857 map { Some(_) }
metadataFiles <- stringListGen
images <- Gen.oneOf(1, 10) flatMap { Gen.listOfN(_, imageBandedGen) }
thumbnails <- Gen.oneOf(1, 2) flatMap {
Gen.listOfN(_, thumbnailIdentifiedGen)
}
ingestLocation <- Gen.oneOf(
nonEmptyStringGen map { Some(_) },
Gen.const(None)
)
filterFields <- sceneFilterFieldsGen
statusFields <- sceneStatusFieldsGen
sceneType <- Gen.option(sceneTypeGen)
} yield {
Scene.Create(
sceneId,
visibility,
tags,
datasource,
sceneMetadata,
name,
owner,
tileFootprint,
dataFootprint,
metadataFiles,
images,
thumbnails,
ingestLocation,
filterFields,
statusFields,
sceneType
)
}
private def datasourceCreateGen: Gen[Datasource.Create] =
for {
name <- nonEmptyStringGen
visibility <- visibilityGen
owner <- Gen.const(None)
composites <- Gen.const(Map.empty[String, ColorComposite])
extras <- Gen.const(().asJson)
// bands gets a concrete nonsense type to make the implicits work
bands <- Gen.const(List.empty[Int].asJson)
licenseName <- Gen.oneOf(None, Some("GPL-3.0"))
} yield {
Datasource.Create(
name,
visibility,
owner,
composites,
extras,
bands,
licenseName
)
}
private def uploadCreateGen: Gen[Upload.Create] =
for {
uploadStatus <- uploadStatusGen
fileType <- fileTypeGen
uploadType <- uploadTypeGen
files <- stringListGen
datasource <- uuidGen
metadata <- Gen.const(().asJson)
owner <- Gen.const(None)
visibility <- visibilityGen
projectId <- Gen.const(None)
layerId <- Gen.const(None)
source <- Gen.oneOf(nonEmptyStringGen map { Some(_) }, Gen.const(None))
keepInSourceBucket <- Gen.const(None)
annotationProjectId <- Gen.const(None)
generateTasks <- Gen.const(false)
} yield {
Upload.Create(
uploadStatus,
fileType,
uploadType,
files,
datasource,
metadata,
owner,
visibility,
projectId,
layerId,
source,
keepInSourceBucket,
annotationProjectId,
generateTasks
)
}
private def geojsonUploadCreateGen: Gen[GeojsonUpload.Create] =
for {
uploadStatus <- uploadStatusGen
fileType <- fileTypeGen
uploadType <- uploadTypeGen
files <- stringListGen
keepFiles <- arbitrary[Boolean]
} yield {
GeojsonUpload.Create(
uploadStatus,
fileType,
uploadType,
files,
keepFiles
)
}
private def layerAttributeGen: Gen[LayerAttribute] =
for {
layerName <- nonEmptyStringGen
zoom <- Gen.choose(0, 30)
name <- nonEmptyStringGen
value <- Gen.const(().asJson)
} yield {
LayerAttribute(layerName, zoom, name, value)
}
private def layerAttributesWithSameLayerNameGen: Gen[List[LayerAttribute]] =
for {
layerName <- nonEmptyStringGen
layerAttributes <- Gen.listOfN(10, layerAttributeGen)
} yield layerAttributes map { _.copy(layerName = layerName) }
private def combinedSceneQueryParamsGen: Gen[CombinedSceneQueryParams] =
Gen.const(CombinedSceneQueryParams())
private def annotationQueryParametersGen: Gen[AnnotationQueryParameters] =
Gen.const(AnnotationQueryParameters())
private def projectSceneQueryParametersGen: Gen[ProjectSceneQueryParameters] =
Gen.const(ProjectSceneQueryParameters())
private def teamCreateGen: Gen[Team.Create] =
for {
orgId <- uuidGen
name <- nonEmptyStringGen
settings <- Gen.const(().asJson)
} yield Team.Create(orgId, name, settings)
private def teamGen: Gen[Team] =
for {
user <- userGen
teamCreate <- teamCreateGen
} yield {
teamCreate.toTeam(user)
}
private def userGroupRoleCreateGen: Gen[UserGroupRole.Create] =
for {
user <- userGen
groupType <- groupTypeGen
groupId <- uuidGen
groupRole <- groupRoleGen
} yield { UserGroupRole.Create(user.id, groupType, groupId, groupRole) }
private def platformPublicSettingsGen: Gen[Platform.PublicSettings] =
for {
emailSmtpUserName <- nonEmptyStringGen
emailSmtpHost <- nonEmptyStringGen
emailSmtpPort <- Gen.oneOf(25, 2525, 465, 587)
emailSmtpEncryption <- Gen.oneOf("ssl", "tls", "starttls")
emailIngestNotification <- arbitrary[Boolean]
emailExportNotification <- arbitrary[Boolean]
platformHost <- Gen.const(None)
emailFrom <- nonEmptyStringGen
emailFromDisplayName <- nonEmptyStringGen
emailSupport <- nonEmptyStringGen
} yield {
Platform.PublicSettings(
emailSmtpUserName,
emailSmtpHost,
emailSmtpPort,
emailSmtpEncryption,
emailIngestNotification,
emailExportNotification,
platformHost,
emailFrom,
emailFromDisplayName,
emailSupport
)
}
private def platformPrivateSettingsGen: Gen[Platform.PrivateSettings] =
for {
emailPassword <- nonEmptyStringGen
} yield { Platform.PrivateSettings(emailPassword) }
private def platformGen: Gen[Platform] =
for {
platformId <- uuidGen
platformName <- uuidGen map { _.toString }
publicSettings <- platformPublicSettingsGen
isActive <- arbitrary[Boolean]
defaultOrganizationId <- Gen.const(None)
privateSettings <- platformPrivateSettingsGen
} yield {
Platform(
platformId,
platformName,
publicSettings,
isActive,
defaultOrganizationId,
privateSettings
)
}
private def userOrgPlatformGen
: Gen[(User.Create, Organization.Create, Platform)] =
for {
platform <- platformGen
orgCreate <- organizationCreateGen map {
_.copy(platformId = platform.id)
}
userCreate <- userCreateGen
} yield { (userCreate, orgCreate, platform) }
private def searchQueryParametersGen: Gen[SearchQueryParameters] =
for {
searchName <- possiblyEmptyStringGen
} yield { SearchQueryParameters(Some(searchName)) }
private def objectAccessControlRuleGen: Gen[ObjectAccessControlRule] =
for {
subjectType <- subjectTypeGen
subjectId <- uuidGen
actionType <- actionTypeGen
} yield {
ObjectAccessControlRule(
subjectType,
subjectType match {
case SubjectType.All => None
case _ => Some(subjectId.toString)
},
actionType
)
}
private def toolCreateGen: Gen[Tool.Create] =
for {
title <- nonEmptyStringGen
description <- nonEmptyStringGen
requirements <- nonEmptyStringGen
license <- Gen.const(Option.empty[Int])
visibility <- visibilityGen
compatibleDataSources <- Gen.const(List.empty)
owner <- Gen.const(None)
stars <- Gen.const(9999.9f) // good tools only :sunglasses:
definition <- Gen.const(().asJson)
singleSource <- arbitrary[Boolean]
} yield {
Tool.Create(
title,
description,
requirements,
license,
visibility,
compatibleDataSources,
owner,
stars,
definition,
singleSource
)
}
private def toolRunCreateGen: Gen[ToolRun.Create] =
for {
name <- Gen.option(nonEmptyStringGen)
visibility <- visibilityGen
executionParameters <- Gen.const(().asJson)
owner <- Gen.const(None)
} yield {
ToolRun.Create(
name,
visibility,
None,
None,
None,
executionParameters,
owner
)
}
private def mapTokenCreateGen: Gen[MapToken.Create] =
nonEmptyStringGen map { name =>
MapToken.Create(name, None, None, None)
}
private def exportTypeGen: Gen[ExportType] =
Gen.oneOf(ExportType.Dropbox, ExportType.Local, ExportType.S3)
private def exportOptionGen: Gen[ExportOptions] =
for {
mask: Option[Projected[MultiPolygon]] <-
projectedMultiPolygonGen3857 map {
Some(_)
}
resolution <- arbitrary[Int]
rasterSize <- arbitrary[Option[Int]]
crop <- arbitrary[Boolean]
raw <- arbitrary[Boolean]
bands <- arbitrary[Option[Seq[Int]]]
operation <- nonEmptyStringGen
} yield ExportOptions(
mask,
resolution,
crop,
raw,
bands,
rasterSize,
Some(3857),
new URI(""),
operation
)
private def exportCreateGen: Gen[Export.Create] =
for {
projectId <- Gen.const(None)
exportStatus <- exportStatusGen
exportType <- exportTypeGen
visibility <- visibilityGen
toolRunId <- Gen.const(None)
projectLayerId <- Gen.const(None)
exportOptions <- exportOptionGen
} yield {
Export.Create(
projectId,
exportStatus,
exportType,
visibility,
None,
toolRunId,
exportOptions.asJson,
projectLayerId
)
}
private def projectLayerCreateGen: Gen[ProjectLayer.Create] =
for {
name <- nonEmptyStringGen
projectId <- Gen.const(None)
colorGroupHex <- Gen.const("#ABCDEF")
smartLayerId <- Gen.const(None)
rangeStart <- Gen.const(None)
rangeEnd <- Gen.const(None)
geometry <- Gen.const(None)
isSingleBand <- Gen.const(false)
singleBandOptions <- Gen.const(None)
overviewsLocation <- Gen.const(None)
minZoomLevel <- Gen.const(None)
} yield {
ProjectLayer.Create(
name,
projectId,
colorGroupHex,
smartLayerId,
rangeStart,
rangeEnd,
geometry,
isSingleBand,
singleBandOptions,
overviewsLocation,
minZoomLevel
)
}
private def metricEventGen: Gen[MetricEvent] =
Gen.oneOf(
projectMosaicEventGen.widen,
analysisEventGen.widen
)
private def projectMosaicEventGen: Gen[ProjectLayerMosaicEvent] =
for {
projectId <- uuidGen
projectLayerId <- uuidGen
projectOwner <- nonEmptyStringGen
referer <- nonEmptyStringGen
} yield ProjectLayerMosaicEvent(
projectId,
projectLayerId,
projectOwner,
referer
)
private def analysisEventGen: Gen[AnalysisEvent] =
for {
(projectId, projectLayerId) <- Gen.oneOf(
(
uuidGen map { Some(_) },
uuidGen map {
Some(_)
}
).tupled,
Gen.const((None, None))
)
analysisId <- uuidGen
nodeId <- Gen.oneOf(
Gen.const(None),
uuidGen map { Some(_) }
)
analysisOwner <- nonEmptyStringGen
referer <- nonEmptyStringGen
} yield AnalysisEvent(
projectId,
projectLayerId,
analysisId,
nodeId,
analysisOwner,
referer
)
private def metricGen: Gen[Metric] =
for {
period <- timeRangeGen
metricEvent <- metricEventGen
value <- Gen.const(1)
requester <- nonEmptyStringGen
} yield { Metric(period, metricEvent, requester, value) }
private def taskPropertiesCreateGen: Gen[Task.TaskPropertiesCreate] =
for {
status <- Gen.const[TaskStatus](TaskStatus.Unlabeled)
annotationProjectId <- uuidGen
note <-
if (status == TaskStatus.Flagged) {
nonEmptyStringGen map { s =>
Some(NonEmptyString.unsafeFrom(s))
}
} else {
Gen.const(None)
}
taskType <- Gen.oneOf(
Gen.const(None),
taskTypeGen map { Some(_) }
)
reviews <- Gen.const(None)
reviewStatus <- Gen.const(None)
} yield {
Task.TaskPropertiesCreate(
status,
annotationProjectId,
note,
taskType,
None,
reviews,
reviewStatus
)
}
private def taskFeatureCreateGen: Gen[Task.TaskFeatureCreate] =
for {
properties <- taskPropertiesCreateGen
geometry <- projectedMultiPolygonGen3857
} yield { Task.TaskFeatureCreate(properties, geometry) }
private def taskFeatureCollectionCreateGen
: Gen[Task.TaskFeatureCollectionCreate] =
for {
features <- Gen.nonEmptyListOf(taskFeatureCreateGen)
} yield {
Task.TaskFeatureCollectionCreate(features = features)
}
private def taskGridCreatePropertiesGen: Gen[Task.TaskGridCreateProperties] =
for {
sizeMeters <- Gen.const(100000)
} yield {
Task.TaskGridCreateProperties(Some(sizeMeters))
}
private def taskGridFeatureCreateGen: Gen[Task.TaskGridFeatureCreate] =
for {
properties <- taskGridCreatePropertiesGen
geometry <- Gen.option(projectedMultiPolygonGen3857)
} yield {
Task.TaskGridFeatureCreate(properties, geometry)
}
private def taskStatusListGen: Gen[List[TaskStatus]] =
Gen.oneOf(0, 5) flatMap { Gen.listOfN(_, taskStatusGen) }
private def exportAssetTypeNelGen: Gen[Option[NEL[ExportAssetType]]] =
for {
exportAssetType <- exportAssetTypeGen
exportAssetTypes <- Gen.nonEmptyListOf(exportAssetType)
} yield (NEL.fromList(exportAssetTypes))
private def taskNextStatusGen: Gen[TaskNextStatus] =
for {
nextStatus <- taskStatusGen
note <-
if (nextStatus == TaskStatus.Flagged) {
nonEmptyStringGen map { s =>
Some(NonEmptyString.unsafeFrom(s))
}
} else {
Gen.const(None)
}
} yield TaskNextStatus(nextStatus, note)
private val stacAnnotationExportGenTup =
(
nonEmptyStringGen,
Gen.const(StacExportLicense(Proprietary(), Some("http://example.com"))),
taskStatusListGen,
uuidGen
)
private val stacCampaignExportGenTup =
(
nonEmptyStringGen,
Gen.const(StacExportLicense(Proprietary(), Some("http://example.com"))),
taskStatusListGen,
exportAssetTypeNelGen,
uuidGen
)
private def stacAnnotationProjectExportGen
: Gen[StacExport.AnnotationProjectExport] =
stacAnnotationExportGenTup.mapN(StacExport.AnnotationProjectExport.apply)
private def stacCampaignExportGen: Gen[StacExport.CampaignExport] =
stacCampaignExportGenTup.mapN(StacExport.CampaignExport.apply)
private def stacExportQueryParametersGen: Gen[StacExportQueryParameters] =
Gen.const(StacExportQueryParameters())
private def tileLayerCreateGen: Gen[TileLayer.Create] =
(
nonEmptyStringGen,
nonEmptyStringGen,
Gen.option(arbitrary[Boolean]),
Gen.option(arbitrary[Boolean]),
tileLayerTypeGen,
Gen.option(
Gen.oneOf(
TileLayerQuality.Good,
TileLayerQuality.Better,
TileLayerQuality.Best
)
)
).mapN(TileLayer.Create.apply _)
private def labelClassCreateGen: Gen[AnnotationLabelClass.Create] =
(
nonEmptyStringGen,
Gen.const("#AB34DE"),
Gen.option(arbitrary[Boolean]),
Gen.option(arbitrary[Boolean]),
Gen.choose(0, 100),
Gen.option(
Gen.oneOf(
LabelGeomType.PointLabel,
LabelGeomType.PolygonLabel
)
),
Gen.option(nonEmptyStringGen)
).mapN(AnnotationLabelClass.Create.apply _)
private def labelClassGroupGen: Gen[AnnotationLabelClassGroup.Create] =
(
nonEmptyStringGen,
Gen.option(Gen.choose(0, 1000)),
Gen.listOfN(1, labelClassCreateGen)
).mapN(AnnotationLabelClassGroup.Create.apply _)
private def annotationProjectCreateGen: Gen[AnnotationProject.Create] =
(
nonEmptyStringGen,
annotationProjectTypeGen,
Gen.choose(1, 1000),
Gen.option(projectedMultiPolygonGen3857),
Gen.const(None),
Gen.const(None),
Gen.const(None),
tileLayerCreateGen map { List(_) },
Gen.listOfN(3, labelClassGroupGen),
annotationProjectStatusGen,
Gen.const(None),
// always generate a timestamp -- copying child project labels back requires one,
// and it doesn't hurt anything else to have one
Gen.const(Some(Timestamp.valueOf(LocalDate.now.atStartOfDay)))
).mapN(AnnotationProject.Create.apply _)
private def annotationLabelWithClassesCreateGen
: Gen[AnnotationLabelWithClasses.Create] =
(
projectedMultiPolygonGen3857,
Gen.const(Nil),
Gen.option(nonEmptyStringGen),
Gen.const(true),
Gen.option(uuidGen),
Gen.option(arbitrary[Int].map(_.toFloat))
).mapN(AnnotationLabelWithClasses.Create.apply _)
private def continentGen: Gen[Continent] =
Gen.oneOf(
Continent.Asia,
Continent.Africa,
Continent.Antarctica,
Continent.Australia,
Continent.Europe,
Continent.NorthAmerica,
Continent.SouthAmerica
)
private def campaignCreateGen: Gen[Campaign.Create] =
(
nonEmptyStringGen,
annotationProjectTypeGen,
Gen.option(nonEmptyStringGen),
Gen.option(nonEmptyStringGen),
Gen.option(nonEmptyStringGen),
Gen.option(nonEmptyStringGen),
Gen.option(uuidGen),
Gen.option(continentGen),
stringListGen,
Gen.option(nonEmptyStringGen),
Gen.const(Option.empty[String])
).mapN(Campaign.Create.apply _)
private def campaignCloneGen: Gen[Campaign.Clone] =
(
stringListGen,
arbitrary[Boolean],
arbitrary[Boolean]
).mapN(Campaign.Clone.apply _)
private def taskSessionCreateGen: Gen[TaskSession.Create] =
for {
taskSessionType <- Gen.oneOf(
TaskSessionType.LabelSession,
TaskSessionType.ValidateSession
)
} yield TaskSession.Create(
taskSessionType
)
private def taskSessionCompleteGen: Gen[TaskSession.Complete] =
for {
toStatus <- taskStatusGen
note <- nonEmptyStringGen map { s =>
Some(NonEmptyString.unsafeFrom(s))
}
} yield TaskSession.Complete(
toStatus,
note
)
object Implicits {
implicit def arbCredential: Arbitrary[Credential] =
Arbitrary {
credentialGen
}
implicit def arbPageRequest: Arbitrary[PageRequest] =
Arbitrary {
pageRequestGen
}
implicit def arbCombinedSceneQueryParams
: Arbitrary[CombinedSceneQueryParams] =
Arbitrary {
combinedSceneQueryParamsGen
}
implicit def arbProjectsceneQueryParameters
: Arbitrary[ProjectSceneQueryParameters] =
Arbitrary { projectSceneQueryParametersGen }
implicit def arbAnnotationCreate: Arbitrary[Annotation.Create] =
Arbitrary {
annotationCreateGen
}
implicit def arbListAnnotationCreate: Arbitrary[List[Annotation.Create]] =
Arbitrary {
Gen.listOfN(10, arbitrary[Annotation.Create])
}
implicit def arbAnnotationGroupCreate: Arbitrary[AnnotationGroup.Create] =
Arbitrary { annotationGroupCreateGen }
implicit def arbOrganization: Arbitrary[Organization] =
Arbitrary {
organizationGen
}
implicit def arbExport: Arbitrary[Export.Create] =
Arbitrary {
exportCreateGen
}
implicit def arbOrganizationCreate: Arbitrary[Organization.Create] =
Arbitrary { organizationCreateGen }
implicit def arbUserCreate: Arbitrary[User.Create] =
Arbitrary {
userCreateGen
}
implicit def arbUser: Arbitrary[User] = Arbitrary { userGen }
implicit def arbBand: Arbitrary[Band] = Arbitrary { bandGen }
implicit def arbImage: Arbitrary[Image] = Arbitrary { imageGen }
implicit def arbImageCreate: Arbitrary[Image.Create] =
Arbitrary {
imageCreateGen
}
implicit def arbImageBanded: Arbitrary[Image.Banded] =
Arbitrary {
imageBandedGen
}
implicit def arbProjectCreate: Arbitrary[Project.Create] =
Arbitrary {
projectCreateGen
}
implicit def arbProject: Arbitrary[Project] = Arbitrary { projectGen }
implicit def arbSceneCreate: Arbitrary[Scene.Create] =
Arbitrary {
sceneCreateGen
}
implicit def arbShapeCreate: Arbitrary[Shape.Create] =
Arbitrary {
shapeCreateGen
}
implicit def arbShapeGeoJSON: Arbitrary[Shape.GeoJSON] =
Arbitrary {
shapeGeoJSONGen
}
implicit def arbListSceneCreate: Arbitrary[List[Scene.Create]] =
Arbitrary {
Gen.oneOf(
// 11 is one more than the size of the pageRequest that we'll generate, so this allows
// testing paging and counting correctly
Gen.listOfN(11, sceneCreateGen),
Gen.listOfN(7, sceneCreateGen),
Gen.listOfN(0, sceneCreateGen)
)
}
implicit def arbThumbnail: Arbitrary[Thumbnail] = Arbitrary { thumbnailGen }
implicit def arbDatasourceCreate: Arbitrary[Datasource.Create] =
Arbitrary {
datasourceCreateGen
}
implicit def arbUploadCreate: Arbitrary[Upload.Create] =
Arbitrary {
uploadCreateGen
}
implicit def arbGeojsonUploadCreate: Arbitrary[GeojsonUpload.Create] =
Arbitrary {
geojsonUploadCreateGen
}
implicit def arbLayerAttribute: Arbitrary[LayerAttribute] =
Arbitrary {
layerAttributeGen
}
implicit def arbListLayerAttribute: Arbitrary[List[LayerAttribute]] =
Arbitrary {
layerAttributesWithSameLayerNameGen
}
implicit def arbTeamCreate: Arbitrary[Team.Create] =
Arbitrary {
teamCreateGen
}
implicit def arbTeam: Arbitrary[Team] = Arbitrary { teamGen }
implicit def arbUserGroupRoleCreate: Arbitrary[UserGroupRole.Create] =
Arbitrary { userGroupRoleCreateGen }
implicit def arbGroupRoleCreate: Arbitrary[GroupRole] =
Arbitrary {
groupRoleGen
}
implicit def arbPlatform: Arbitrary[Platform] = Arbitrary { platformGen }
implicit def arbUserOrgPlatform
: Arbitrary[(User.Create, Organization.Create, Platform)] =
Arbitrary {
userOrgPlatformGen
}
implicit def arbUserJwtFields: Arbitrary[User.JwtFields] =
Arbitrary {
userJwtFieldsGen
}
implicit def arbUserVisibility: Arbitrary[UserVisibility] =
Arbitrary {
userVisibilityGen
}
implicit def arbSearchQueryParameters: Arbitrary[SearchQueryParameters] =
Arbitrary { searchQueryParametersGen }
implicit def arbObjectAccessControlRule
: Arbitrary[ObjectAccessControlRule] =
Arbitrary { objectAccessControlRuleGen }
implicit def arbListObjectAccessControlRule
: Arbitrary[List[ObjectAccessControlRule]] =
Arbitrary {
Gen.nonEmptyListOf[ObjectAccessControlRule](
arbitrary[ObjectAccessControlRule]
)
}
implicit def arbToolCreate: Arbitrary[Tool.Create] =
Arbitrary { toolCreateGen }
implicit def arbListToolCreate: Arbitrary[List[Tool.Create]] =
Arbitrary {
Gen.oneOf(
Gen.listOfN(7, toolCreateGen),
Gen.listOfN(0, toolCreateGen)
)
}
implicit def arbToolRunCreate: Arbitrary[ToolRun.Create] =
Arbitrary { toolRunCreateGen }
implicit def arbMapTokenCreate: Arbitrary[MapToken.Create] =
Arbitrary { mapTokenCreateGen }
implicit def arbProjectLayerCreate: Arbitrary[ProjectLayer.Create] =
Arbitrary { projectLayerCreateGen }
implicit def arbProjectLayerCreateWithScenes
: Arbitrary[List[(ProjectLayer.Create, List[Scene.Create])]] = {
val tupGen = for {
projectLayerCreate <- arbitrary[ProjectLayer.Create]
sceneCreates <- arbitrary[List[Scene.Create]]
} yield { (projectLayerCreate, sceneCreates) }
Arbitrary { Gen.listOfN(5, tupGen) }
}
implicit def arbAnnotationQueryParameters
: Arbitrary[AnnotationQueryParameters] =
Arbitrary {
annotationQueryParametersGen
}
implicit def arbMetric: Arbitrary[Metric] =
Arbitrary {
metricGen
}
implicit def arbNEL[T: Arbitrary]: Arbitrary[NEL[T]] =
Arbitrary {
for {
h <- arbitrary[T]
t <- arbitrary[List[T]]
} yield { NEL(h, t) }
}
implicit def arbTaskStatus: Arbitrary[TaskStatus] =
Arbitrary {
taskStatusGen
}
implicit def arbTaskType: Arbitrary[TaskType] =
Arbitrary {
taskTypeGen
}
implicit def arbTaskFeatureCreate: Arbitrary[Task.TaskFeatureCreate] =
Arbitrary {
taskFeatureCreateGen
}
implicit def arbTaskFeatureCollectionCreate
: Arbitrary[Task.TaskFeatureCollectionCreate] =
Arbitrary {
taskFeatureCollectionCreateGen
}
implicit def arbTaskGridFeatureCreate
: Arbitrary[Task.TaskGridFeatureCreate] =
Arbitrary {
taskGridFeatureCreateGen
}
implicit def arbTaskPropertiesCreate: Arbitrary[Task.TaskPropertiesCreate] =
Arbitrary {
taskPropertiesCreateGen
}
implicit def arbStacAnnotationExport
: Arbitrary[StacExport.AnnotationProjectExport] =
Arbitrary { stacAnnotationProjectExportGen }
implicit def arbStacCampaignExport: Arbitrary[StacExport.CampaignExport] =
Arbitrary { stacCampaignExportGen }
implicit def arbStacExportQueryParameters
: Arbitrary[StacExportQueryParameters] =
Arbitrary {
stacExportQueryParametersGen
}
implicit def arbAnnotationProjectCreate
: Arbitrary[AnnotationProject.Create] =
Arbitrary {
annotationProjectCreateGen
}
implicit def arbAnnotationLabelWithClassesCreate
: Arbitrary[AnnotationLabelWithClasses.Create] =
Arbitrary {
annotationLabelWithClassesCreateGen
}
implicit def arbTileLayerCreate: Arbitrary[TileLayer.Create] =
Arbitrary {
tileLayerCreateGen
}
implicit def arbContinent: Arbitrary[Continent] =
Arbitrary {
continentGen
}
implicit def arbCampaignCreate: Arbitrary[Campaign.Create] =
Arbitrary {
campaignCreateGen
}
implicit def arbCampaignClone: Arbitrary[Campaign.Clone] =
Arbitrary {
campaignCloneGen
}
implicit def arbTaskSessionCreate: Arbitrary[TaskSession.Create] =
Arbitrary {
taskSessionCreateGen
}
implicit def arbTaskSessionComplete: Arbitrary[TaskSession.Complete] =
Arbitrary {
taskSessionCompleteGen
}
implicit def arbLabelClassCreate: Arbitrary[AnnotationLabelClass.Create] =
Arbitrary { labelClassCreateGen }
implicit def arbLabelClassGroup
: Arbitrary[AnnotationLabelClassGroup.Create] =
Arbitrary { labelClassGroupGen }
implicit def arbTaskNextStatus: Arbitrary[TaskNextStatus] =
Arbitrary { taskNextStatusGen }
}
}
| apache-2.0 |
afamorim/Vortice | apps/vortice-core/src/main/java/com/vortice/core/util/DateUtil.java | 4854 | package com.vortice.core.util;
import java.sql.Timestamp;
import java.text.DateFormatSymbols;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import com.vortice.core.exception.AmbienteException;
public class DateUtil {
private static DateUtil converter;
private Locale locale;
private DateUtil(Locale locale){
this.locale = locale;
}
private DateUtil(){}
public static DateUtil getInstance(Locale locale){
if (converter==null){
converter = new DateUtil(locale);
}
return converter;
}
public static DateUtil getInstance(){
if (converter==null){
converter = new DateUtil();
}
return converter;
}
public Date convertStringToDate(String valor) throws AmbienteException{
try{
SimpleDateFormat dateFormatter = new SimpleDateFormat("dd/MM/yyyy",new DateFormatSymbols(locale));
return dateFormatter.parse(valor);
}catch (Exception e) {
throw new AmbienteException(e);
}
}
public Timestamp convertStringToDateTime(String valor) throws AmbienteException{
try{
SimpleDateFormat dateFormatter = new SimpleDateFormat("dd/MM/yyyy HH:mm",new DateFormatSymbols(locale));
return new Timestamp(dateFormatter.parse(valor).getTime());
}catch (Exception e) {
throw new AmbienteException(e);
}
}
public String convertDateTimeToString(Timestamp valor) throws AmbienteException{
try{
SimpleDateFormat dateFormatter = new SimpleDateFormat("dd/MM/yyyy HH:mm",new DateFormatSymbols(locale));
return dateFormatter.format(new Date(valor.getTime()));
}catch (Exception e) {
throw new AmbienteException(e);
}
}
public String convertDateToString(Date valor) throws AmbienteException{
try{
SimpleDateFormat dateFormatter = new SimpleDateFormat("dd/MM/yyyy",new DateFormatSymbols(locale));
return dateFormatter.format(valor);
}catch (Exception e) {
throw new AmbienteException(e);
}
}
public void setLocale(Locale locale){
this.locale = locale;
}
/**
* Retorna uma nova data somada de um mês.
* @param data
* @return
*/
public Date calcularProximoMes(Date data)
{
Calendar dataAtual = Calendar.getInstance();
dataAtual.setTime(data);
int indiceMes = dataAtual.get(Calendar.MONTH)+1;
dataAtual.set(Calendar.MONTH,indiceMes);
return dataAtual.getTime();
}
public String formatarMesAnoNumero(Integer mes,Integer ano){
Date date = convertToDate(null,mes,ano);
return new SimpleDateFormat("MM/yyyy").format(date);
}
public String formatarMesAnoDescricao(Integer mes,Integer ano){
Date date = convertToDate(null,mes,ano);
return new SimpleDateFormat("MMMM/yyyy").format(date);
}
public Date convertToDate(Integer dia, Integer mes,Integer ano){
Calendar c = Calendar.getInstance();
if (dia==null)
c.set(Calendar.DAY_OF_MONTH,1);
else
c.set(Calendar.DAY_OF_MONTH,dia.intValue());
c.set(Calendar.MONTH,mes.intValue()-1);
c.set(Calendar.YEAR,ano.intValue());
return c.getTime();
}
public int subtractDays(Date dataInicial,Date dataFinal){
long res = dataFinal.getTime()-dataInicial.getTime();
Calendar c = Calendar.getInstance();
c.setTimeInMillis(res);
return c.get(Calendar.DAY_OF_YEAR)+1;
}
public Date somaDiarUteis(Date data, int qtdDiasUteis){
try {
int diasUteis = 0;
int acmDias = 0;
Calendar calendario = Calendar.getInstance();
calendario.setTime(data);
SimpleDateFormat dateFormat = new SimpleDateFormat("dd/MM/yyyy");
while (diasUteis < qtdDiasUteis){
acmDias++;
calendario.add(Calendar.DATE, 1);
System.out.println("DATA " + dateFormat.format(calendario.getTime()));
System.out.println("calendario.get(Calendar.DAY_OF_WEEK) " + calendario.get(Calendar.DAY_OF_WEEK));
if (calendario.get(Calendar.DAY_OF_WEEK) != Calendar.SATURDAY &&
calendario.get(Calendar.DAY_OF_WEEK) != Calendar.SUNDAY){
diasUteis++;
}else{
System.out.println("FINAL DE SEMANA");
}
}
return calendario.getTime();
} catch (Exception e) {
return null;
}
}
} | apache-2.0 |
jberezanski/choco | src/chocolatey/infrastructure/adapters/CustomString.cs | 1240 | // Copyright © 2017 - 2018 Chocolatey Software, Inc
// Copyright © 2011 - 2017 RealDimensions Software, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
//
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace chocolatey.infrastructure.adapters
{
public sealed class CustomString
{
private readonly string _stringValue;
public CustomString(string stringValue)
{
_stringValue = stringValue;
}
public static implicit operator CustomString(string value)
{
return new CustomString(value);
}
public static implicit operator string(CustomString customString)
{
return customString._stringValue;
}
}
}
| apache-2.0 |
igor-sfdc/aura | aura-components/src/test/components/uitest/tabset_Test/tabset_TestTest.js | 7077 | /*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
({
/**
* Test making sure that getting element by Index works correctly
*/
testGettingTabByTabIndex : {
attributes : {"renderItem" : "basic"},
test: [function(cmp) {
this.activateElement(cmp, "Index");
}, function(cmp){
this.verifyNewlyActivatedElement("Index", "Dashboards");
}]
},
/**
* Test trying to get element by its tab name
*/
testGettingTabByTabName : {
attributes : {"renderItem" : "basic"},
test: [function(cmp){
this.activateElement(cmp, "Name");
}, function(cmp){
this.verifyNewlyActivatedElement("Name","Icon");
}]
},
/**
* Making sure that not have any tabs still works fine
*/
testEmptyTab : {
attributes : {"renderItem" : "noTabs"},
test : function (cmp){
var ulElem = cmp.find("noTabsTabSet").getElement().getElementsByTagName("ul")[0];
var ulChldrn = this.ignoreComments(ulElem.children);
$A.test.assertEquals(0, ulChldrn.length, "There should not be any tabs or errors present");
}
},
/**
* Verifying lazy rendering works as expected, With Lazy rendering we should only have a new section
* when we click on a tab and activate
*/
testLazyRendering : {
attributes : {"renderItem" : "basic"},
test : [function (cmp){
/*
* Get the active tab and verify that it matches the correct section
* (in this case there should only be one section since we are loading things lazily)
*/
var tabSet = cmp.find("tabset2").getElement();
var section = tabSet.getElementsByTagName("section");
$A.test.assertEquals(1, section.length, "Since we loading lazily we should only have one section on the page");
//Verify that section and anchor aria-controled by id match
this.matchSectionAndAnchor(cmp.find("chatter").get("v.title"), "tab 5 contents");
//Click on the first item on the list
var lis = tabSet.getElementsByTagName("li");
$A.test.clickOrTouch(lis[6].children[0]);
}, function (cmp){
//Verify that the new active element is correct and its sections matches correctly
this.matchSectionAndAnchor(cmp.find("dashboard").get("v.title"), "tab 7 contents");
}]
},
/*************************************************************************************************************
* HELPER FUNCTIONS
************************************************************************************************************/
/**
* Specifically for IE7/8 since grabbing all of the children from a parent element will include comments
*/
ignoreComments : function(elements){
var elementArray = [];
for(var i = 0; i < elements.length; i++){
if(elements[i].tagName != "!"){
elementArray.push(chldrn[i]);
}
}
return elementArray;
},
/**
* Code extracted to be used to activate tab by name and by index
*/
activateElement : function(cmp, activateBy){
//Pressing button to activate predetermined tab
cmp.find("activateBy"+activateBy).get("e.press").fire({});
var tmpFunc = this.getElement;
$A.test.addWaitFor(true, function(){
return ($A.util.getText(tmpFunc("li", "tabItem uiTabItem active")[0]).indexOf("Chatter") < 0);
});
},
/**
* Verifying that the newly activated element is what we expect it to be
*/
verifyNewlyActivatedElement : function (activateBy, text){
//Get newly activated tab
var element = this.getElement("li", "tabItem uiTabItem active");
//Verify that there is only one tab active
$A.test.assertEquals(element.length, 1, "There should only be one active tab");
$A.test.assertNotUndefinedOrNull(element[0], "Finding an active element should not be null");
var elmText = $A.util.getText(element[0]);
$A.test.assertEquals(text, elmText, "Did not find the correct tab by its' "+activateBy.toLowerCase());
},
/**
* Helper code verifying that we are looking at the correct items
*/
matchSectionAndAnchor : function(tabText, bodyText){
//Get Element in three different ways (1 way for ie7, 1 way for ie 8 and another way for all other browsers)
var activeLi = this.getElement("li", "tabItem uiTabItem active");
var activeSection = this.getElement("section", "tabBody uiTab active");
$A.test.assertEquals(1, activeLi.length, "There should only be one active list element");
$A.test.assertEquals(1, activeSection.length, "There should only be one active section element");
//Grab the only elements
activeLi = activeLi[0];
activeSection = activeSection[0];
var activeLiText = $A.util.getText(activeLi);
var activeSectionText = $A.util.getText(activeSection);
$A.test.assertEquals(tabText, activeLiText, "Text from the active tab, does not match what the text of the active tab should be");
$A.test.assertTrue(activeSectionText.indexOf(bodyText) > -1, "Text from the active section, does not match what the text of the active section should be");
//check to make sure the correct items are set
var anchorAriaId = $A.util.getElementAttributeValue(activeLi.children[0], "aria-controls");
var sectionId = $A.util.getElementAttributeValue(activeSection, "id");
$A.test.assertEquals(anchorAriaId, sectionId, "Aria Anchor Id and section Id do not match");
},
/**
* Extracted function so that we can use a more sophisticated way of getting the element by class
*/
getElement : function(elmTagName, classToUse){
//All other browsers
var activeElm = $A.test.getElementByClass(classToUse);
//Custom way to get an element in ie8/7
if($A.util.isUndefinedOrNull(activeElm)) {
//IE8 custom way
if(document.querySelectorAll){
activeElm = document.querySelectorAll(elmTagName + "." +classToUse.replace(/ /g, "."));
}
//IE7 custom way
else{
activeElm = [];
var elmArray = document.getElementsByTagName(elmTagName);
var className = "";
for(var i = 0; i< elmArray.length; i++){
className = $A.util.getElementAttributeValue(elmArray[i], "class");
if(!$A.util.isUndefinedOrNull(className) && className.indexOf(classToUse) > -1){
activeElm.push(elmArray[i]);
}
}
}
}
return activeElm;
}
}) | apache-2.0 |
Yurik16/yuchuksin | chapter_002/src/main/java/ru/job4j/chess/Board.java | 2358 | package ru.job4j.chess;
import ru.job4j.chess.Exceptions.FigureNotFoundException;
import ru.job4j.chess.Exceptions.ImpossibleMoveException;
import ru.job4j.chess.Exceptions.OccupiedWayException;
import ru.job4j.chess.figures.AbstractFigure;
/**
* Main class.
*
* @author Yury Chuksin (chuksin.yury@gmail.com)
* @since 30.04.2017.
*/
public class Board {
/**
* Figures array at this board.
*/
private AbstractFigure[] figures = new AbstractFigure[32];
/**
* Constructor of Board.
*
* @param figures all figures at this board
*/
Board(AbstractFigure[] figures) {
this.figures = figures;
}
/**
* Two dimension array with 64 cells.
*/
private Cell[][] board = new Cell[8][8];
/**
* Initiate chess board.
*/
public void initBorad() {
for (int i = 1; i < 9; i++) {
for (int j = 1; j < 9; j++) {
board[i][j] = new Cell(i + 1, j + 1);
}
}
}
/**
* Method which moving figure.
*
* @param source start cell
* @param dist destination cell
* @return boolean
* @throws ImpossibleMoveException can`t move like that
* @throws FigureNotFoundException there is no figure at this cell
* @throws OccupiedWayException another figure on the way
*/
boolean move(Cell source, Cell dist) throws ImpossibleMoveException, FigureNotFoundException, OccupiedWayException {
AbstractFigure movingFigure = null;
for (AbstractFigure x : this.figures) {
if (x.getFigurePosition().equals(source)) {
movingFigure = x;
}
}
if (movingFigure == null) {
throw new FigureNotFoundException("There is no any figure at this Cell.");
}
if (movingFigure.isCorrectWay(dist)) {
Cell[] resultCellArray = movingFigure.way(dist);
for (AbstractFigure x : this.figures) {
for (Cell cellFromWay : resultCellArray) {
if (x.getFigurePosition().equals(cellFromWay)) {
throw new OccupiedWayException("There is another figure on this way.");
}
}
}
movingFigure.setFigurePosition(dist);
return true;
}
return false;
}
}
| apache-2.0 |
lkoskela/maven-build-utils | src/main/java/com/lassekoskela/maven/buildevents/BuildEventLog.java | 1479 | package com.lassekoskela.maven.buildevents;
import java.util.ArrayList;
import java.util.List;
import com.lassekoskela.maven.logging.Log;
public class BuildEventLog {
private final Log logger;
private final List<BuildStep> steps;
private BuildStep latestStep;
public BuildEventLog(Log logger) {
this.logger = logger;
this.steps = new ArrayList<BuildStep>();
}
public void start(String project, String phase, String groupId,
String artifactId, String goal) {
latestStep = new BuildStep(project, phase, groupId, artifactId, goal);
latestStep.start();
}
public void end(String project, String phase, String groupId,
String artifactId, String goal) {
latestStep.end();
steps.add(latestStep);
}
public void report() {
createReport().report();
}
public long totalDuration() {
return createReport().totalDuration();
}
public long totalDurationOfProject(String project) {
return createReport().totalDurationOfProject(project);
}
public long totalDurationOfPhase(String phase) {
return createReport().totalDurationOfPhase(phase);
}
public long totalDurationOfPhase(String project, String phase) {
return createReport().totalDurationOfPhase(project, phase);
}
protected BuildEventLogReport createReport(Log log, List<BuildStep> steps) {
BuildEventLogReport report = new BuildEventLogReport(log);
report.add(steps);
return report;
}
private BuildEventLogReport createReport() {
return createReport(logger, steps);
}
}
| apache-2.0 |
d3sw/conductor | contribs/src/main/java/com/netflix/conductor/contribs/validation/ValidationTask.java | 5957 | /**
* Copyright 2016 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package com.netflix.conductor.contribs.validation;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.Task.Status;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.core.events.ScriptEvaluator;
import com.netflix.conductor.core.execution.WorkflowExecutor;
import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Singleton;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* @author Oleksiy Lysak
*
*/
@Singleton
public class ValidationTask extends WorkflowSystemTask {
private static final Logger logger = LoggerFactory.getLogger(ValidationTask.class);
private static final String REASON_PARAMETER = "reason";
private static final String PAYLOAD_PARAMETER = "payload";
private static final String CONDITIONS_PARAMETER = "conditions";
public ValidationTask() {
super("VALIDATION");
}
@Override
@SuppressWarnings("unchecked")
public void start(Workflow workflow, Task task, WorkflowExecutor executor) throws Exception {
Map<String, Object> taskInput = task.getInputData();
Map<String, Object> taskOutput = task.getOutputData();
Object payloadObj = taskInput.get(PAYLOAD_PARAMETER);
logger.debug("Payload object is " + payloadObj);
if(payloadObj == null) {
task.setReasonForIncompletion("Missing '" + PAYLOAD_PARAMETER + "' in input parameters");
task.setStatus(Task.Status.FAILED);
return;
}
Map<String, String> conditionsObj = (Map<String, String>)taskInput.get(CONDITIONS_PARAMETER);
logger.debug("Conditions object is " + conditionsObj);
if (conditionsObj == null){
task.setReasonForIncompletion("Missing '" + CONDITIONS_PARAMETER + "' in input parameters");
task.setStatus(Task.Status.FAILED);
return;
} else if (conditionsObj.isEmpty()) {
task.setReasonForIncompletion("'" + CONDITIONS_PARAMETER + "' input parameter is empty");
task.setStatus(Task.Status.FAILED);
return;
}
// Set the task status to complete at the begin
task.setStatus(Status.COMPLETED);
// Default is true. Will be set to false upon some condition fails
AtomicBoolean overallStatus = new AtomicBoolean(true);
// Go over all conditions and evaluate them
conditionsObj.forEach((name, condition) -> {
try {
Boolean success = ScriptEvaluator.evalBool(condition, payloadObj);
logger.debug("Evaluation resulted in " + success + " for " + name + "=" + condition);
// Add condition evaluation result into output map
addEvalResult(task, name, success);
// Failed ?
if (!success) {
// Set the over all status to false
overallStatus.set(false);
}
} catch (Exception ex) {
logger.error("Evaluation failed for " + name + "=" + condition, ex);
// Set the error message instead of false
addEvalResult(task, name, ex.getMessage());
// Set the over all status to false
overallStatus.set(false);
}
});
// Set the overall status to the output map
taskOutput.put("overallStatus", overallStatus.get());
// Get an additional configuration
boolean failOnFalse = getFailOnFalse(task);
// Build the overall reason
String overallReason = (String)taskInput.get(REASON_PARAMETER);
if (overallReason == null) {
overallReason = "Payload validation failed";
}
// Set the overall reason to the output map
taskOutput.put("overallReason", overallReason);
// If overall status is false and we need to fail whole workflow
if (!overallStatus.get() && failOnFalse) {
task.setReasonForIncompletion(overallReason);
task.setStatus(Status.FAILED);
}
}
@SuppressWarnings("unchecked")
private void addEvalResult(Task task, String condition, Object result) {
Map<String, Object> taskOutput = task.getOutputData();
Map<String, Object> conditions = (Map<String, Object>)taskOutput.get(CONDITIONS_PARAMETER);
if (conditions == null) {
conditions = new HashMap<>();
taskOutput.put(CONDITIONS_PARAMETER, conditions);
}
conditions.put(condition, result);
}
private boolean getFailOnFalse(Task task) {
Object obj = task.getInputData().get("failOnFalse");
if (obj instanceof Boolean) {
return (Boolean)obj;
} else if (obj instanceof String) {
return Boolean.parseBoolean((String)obj);
}
return true;
}
@Override
public boolean execute(Workflow workflow, Task task, WorkflowExecutor executor) throws Exception {
return false;
}
@Override
public void cancel(Workflow workflow, Task task, WorkflowExecutor executor) throws Exception {
task.setStatus(Status.CANCELED);
}
}
| apache-2.0 |
Chirojeugd-Vlaanderen/gap | tools/Chiro.Gap.FixAnomalies/Chiro.Gap.FixAnomalies/Program.LidRelatiesFixen.cs | 9819 | /*
Copyright 2015,2016 Chirojeugd-Vlaanderen vzw
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using Chiro.Cdf.Poco;
using Chiro.Cdf.ServiceHelper;
using Chiro.CiviCrm.Api;
using Chiro.CiviCrm.Api.DataContracts;
using Chiro.CiviCrm.Api.DataContracts.Requests;
using Chiro.Gap.FixAnomalies.Properties;
using Chiro.Gap.Poco.Context;
using Chiro.Gap.Poco.Model;
using Chiro.Gap.Sync;
using Chiro.Gap.SyncInterfaces;
namespace Chiro.Gap.FixAnomalies
{
partial class Program
{
private static void LidRelatiesFixen(ServiceHelper serviceHelper, string apiKey, string siteKey)
{
Console.WriteLine(Resources.Program_Main_Opvragen_actieve_lidrelaties_CiviCRM_);
// Vermijd dat de CiviCRM-API over zijn memory limit gaat, door de leden op te
// halen in blokken.
var civiLeden = new List<string>();
int offset = 0;
bool finished = false;
while (!finished)
{
var request = new BaseRequest
{
ApiOptions = new ApiOptions {Limit = Properties.Settings.Default.LedenBlokGrootte, Offset = offset}
};
var civiResult =
serviceHelper.CallService<ICiviCrmApi, ApiResultStrings>(
svc => svc.ChiroDiagnosticsActieveLidRelaties(apiKey, siteKey, request));
if (civiResult.IsError != 0)
{
throw new ApplicationException(civiResult.ErrorMessage);
}
civiLeden.AddRange(from v in civiResult.Values select v[0]);
offset += civiResult.Count;
finished = civiResult.Count < Properties.Settings.Default.LedenBlokGrootte;
}
Console.WriteLine(Resources.Program_Main_Dat_zijn_er__0__, civiLeden.Count);
Console.WriteLine(Resources.Program_Main_Opvragen_actieve_leden_GAP__);
var gapLeden = AlleActieveLeden();
Console.WriteLine(Resources.Program_Main_Dat_zijn_er__0__, gapLeden.Count());
var teBewarenLeden = OntbrekendInCiviZoeken(civiLeden, gapLeden);
Console.WriteLine(Resources.Program_Main__0__leden_uit_GAP_niet_teruggevonden_in_CiviCRM_, teBewarenLeden.Count);
// TODO: command line switch om deze vraag te vermijden.
Console.Write(Resources.Program_Main_Meteen_syncen__);
string input = Console.ReadLine();
if (input.ToUpper() == "J" || input.ToUpper() == "Y")
{
LedenNaarCivi(teBewarenLeden, serviceHelper);
}
var uitTeSchrijvenLeden = TeVeelInCiviZoeken(civiLeden, gapLeden);
Console.WriteLine(Resources.Program_Main__0__leden_uit_CiviCRM_niet_teruggevonden_in_GAP_, uitTeSchrijvenLeden.Count);
// TODO: command line switch om deze vraag te vermijden.
Console.Write(Resources.Program_Main_Uitschrijven_uit_Civi__);
string input2 = Console.ReadLine();
if (input2.ToUpper() == "J" || input2.ToUpper() == "Y")
{
LedenUitschrijvenCivi(uitTeSchrijvenLeden, serviceHelper);
}
}
private static int HuidigWerkJaar()
{
DateTime vandaag = DateTime.Now;
int werkjaar = vandaag.Month >= 9 ? vandaag.Year : vandaag.Year - 1;
return werkjaar;
}
private static void LedenNaarCivi(List<LidInfo> teSyncen, ServiceHelper serviceHelper)
{
int counter = 0;
var sync = new LedenSync(serviceHelper);
using (var context = new ChiroGroepEntities())
{
var repositoryProvider = new RepositoryProvider(context);
var ledenRepo = repositoryProvider.RepositoryGet<Lid>();
foreach (var l in teSyncen)
{
sync.Bewaren(ledenRepo.ByID(l.LidId));
Console.Write("{0} ", ++counter);
}
}
}
private static void LedenUitschrijvenCivi(List<UitschrijfInfo> teSyncen, ServiceHelper serviceHelper)
{
int counter = 0;
var sync = new LedenSync(serviceHelper);
foreach (var l in teSyncen)
{
sync.Uitschrijven(l);
Console.Write("{0} ", ++counter);
}
}
private static List<LidInfo> OntbrekendInCiviZoeken(IList<string> civiLeden, IList<LidInfo> gapLeden)
{
int civiCounter = 0;
int gapCounter = 0;
var teSyncen = new List<LidInfo>();
int aantalciviLeden = civiLeden.Count;
// Normaal zijn de leden uit het GAP hetzelfde gesorteerd als die uit Civi.
// Overloop de GAP-leden, en kijk of ze ook in de Civi-leden voorkomen.
Console.WriteLine(Resources.Program_OntbrekendInCiviZoeken_Opzoeken_leden_in_GAP_maar_niet_in_CiviCRM_);
while (gapCounter < gapLeden.Count && civiCounter < aantalciviLeden)
{
while (civiCounter < aantalciviLeden && String.Compare(gapLeden[gapCounter].StamNrAdNr, civiLeden[civiCounter], StringComparison.OrdinalIgnoreCase) > 0)
{
++civiCounter;
}
if (civiCounter < aantalciviLeden && String.Compare(gapLeden[gapCounter].StamNrAdNr, civiLeden[civiCounter], StringComparison.OrdinalIgnoreCase) != 0)
{
teSyncen.Add(gapLeden[gapCounter]);
Console.WriteLine(gapLeden[gapCounter].StamNrAdNr);
}
++gapCounter;
}
return teSyncen;
}
private static List<UitschrijfInfo> TeVeelInCiviZoeken(IList<string> civiLeden, IList<LidInfo> gapLeden)
{
int civiCounter = 0;
int gapCounter = 0;
var teSyncen = new List<UitschrijfInfo>();
// Normaal zijn de leden uit het GAP hetzelfde gesorteerd als die uit Civi.
// Overloop de GAP-leden, en kijk of ze ook in de Civi-leden voorkomen.
Console.WriteLine(Resources.Program_TeVeelInCiviZoeken_Opzoeken_leden_in_CiviCRM_maar_niet_in_GAP_);
while (gapCounter < gapLeden.Count && civiCounter < civiLeden.Count)
{
while (gapCounter < gapLeden.Count && String.Compare(gapLeden[gapCounter].StamNrAdNr, civiLeden[civiCounter], StringComparison.OrdinalIgnoreCase) < 0)
{
++gapCounter;
}
if (gapCounter < gapLeden.Count && String.Compare(gapLeden[gapCounter].StamNrAdNr, civiLeden[civiCounter], StringComparison.OrdinalIgnoreCase) != 0)
{
// Splits output van Civi in stamnummer en AD-nummer.
string[] components = civiLeden[civiCounter].Split(';');
// FIXME: Nationale ploegen zitten nog niet in GAP (#4055). We negeren hen op
// basis van een hardgecodeerde onduidelijke conditie (oh dear).
// Zie ook (#5644)
if (components[0].Length == 8 && components[0].Substring(0, 3).ToUpper() != "NAT")
{
// Construeer een fake lid, om dat zodatdelijk naar Civi te syncen.
// Ik gebruik niet het echte lid, want het is niet gezegd dat dat bestaat. En leden uit Civi
// verwijderen die misschien al DP februari hebben gekregen, lijkt me niet zo'n goed idee.
var l = new UitschrijfInfo
{
AdNummer = int.Parse(components[1]),
StamNummer = components[0],
WerkJaar = HuidigWerkJaar(),
UitschrijfDatum = DateTime.Now
};
teSyncen.Add(l);
Console.WriteLine(civiLeden[civiCounter]);
}
}
++civiCounter;
}
return teSyncen;
}
/// <summary>
/// Levert een lijst op van alle actieve stamnummer-adnummer-combinaties van de
/// actieve leden. Zal gebruikt worden voor monitoring. (#4326, #4268)
/// </summary>
/// <returns>Lijst van alle stamnummer-adnummer-combinaties van de actieve
/// leden.</returns>
public static LidInfo[] AlleActieveLeden()
{
// Dit zou beter gebeuren met dependency injection. Maar het is en blijft een hack.
using (var context = new ChiroGroepEntities())
{
var repositoryProvider = new RepositoryProvider(context);
var repo = repositoryProvider.RepositoryGet<ActiefLid>();
var result = (from l in repo.GetAll()
select new LidInfo
{
StamNrAdNr = String.Format("{0};{1}", l.Code.Trim(), l.AdNummer),
LidId = l.LidID
});
return result.OrderBy(r => r.StamNrAdNr).ToArray();
}
}
}
}
| apache-2.0 |
sreeram-boyapati/VITProcedures | VITProc2/src/com/vitguide/customobjects/ProcedureObjects.java | 1542 | package com.vitguide.customobjects;
import java.io.Serializable;
import java.util.ArrayList;
public class ProcedureObjects implements Serializable{
/**
*
*/
private static final long serialVersionUID = -3458674522445274446L;
private String Office;
private String Query;
private String Clubs;
private String Freshers;
private ArrayList<String> ProcedureSteps;
public ProcedureObjects(){
ProcedureSteps = new ArrayList<String>();
}
public ProcedureObjects(String mQuery,ArrayList<String> mProcedures) {
Query = mQuery;
ProcedureSteps = mProcedures;
}
public String getQuery()
{
return Query;
}
public String[] getProcedures()
{
String[] Procedures = new String[ProcedureSteps.size()];
Procedures = ProcedureSteps.toArray(Procedures);
return Procedures;
}
public ArrayList<String> getArrayProcedures(){
return ProcedureSteps;
}
public void setClubs(String mClubs){
Clubs = mClubs;
}
public void setFreshers(String mFreshers){
Freshers = mFreshers;
}
public void setQuery(String mQuery){
Query = mQuery;
}
public void addProcedure(String mProcedure){
ProcedureSteps.add(mProcedure);
}
public String getOffice(){
return Office;
}
public void setOffice(String mOffice){
Office = mOffice;
}
public boolean getFreshers(){
if(Freshers.equals("Yes")){
return true;
}
else{
return false;
}
}
public boolean getClubs(){
if(Clubs.equals("Yes")){
return true;
}
else{
return false;
}
}
}
| apache-2.0 |
gchq/stroom | stroom-core-shared/src/main/java/stroom/explorer/shared/ExplorerServiceRenameRequest.java | 1455 | /*
* Copyright 2017 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package stroom.explorer.shared;
import stroom.docref.DocRef;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
@JsonInclude(Include.NON_NULL)
public class ExplorerServiceRenameRequest {
@JsonProperty
private final DocRef docRef;
@JsonProperty
private final String docName;
@JsonCreator
public ExplorerServiceRenameRequest(@JsonProperty("docRef") final DocRef docRef,
@JsonProperty("docName") final String docName) {
this.docRef = docRef;
this.docName = docName;
}
public DocRef getDocRef() {
return docRef;
}
public String getDocName() {
return docName;
}
}
| apache-2.0 |
mikedurbin/fcrepo-java-client | src/test/java/org/fcrepo/client/FcrepoClientAuthTest.java | 5153 | /**
* Copyright 2015 DuraSpace, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.client;
import static org.fcrepo.client.TestUtils.baseUrl;
import static org.fcrepo.client.TestUtils.rdfXml;
import static org.fcrepo.client.TestUtils.setField;
import static org.fcrepo.client.TestUtils.RDF_XML;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.when;
import static java.net.URI.create;
import java.io.IOException;
import java.net.URI;
import org.apache.commons.io.IOUtils;
import org.apache.http.Header;
import org.apache.http.StatusLine;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.message.BasicHeader;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
/**
* @author acoburn
*/
@RunWith(MockitoJUnitRunner.class)
public class FcrepoClientAuthTest {
private FcrepoClient testClient;
@Mock
private CloseableHttpClient mockHttpclient;
@Mock
private CloseableHttpResponse mockResponse;
@Mock
private StatusLine mockStatus;
@Test
public void testAuthNoHost() throws IOException, FcrepoOperationFailedException {
final int status = 200;
final URI uri = create(baseUrl);
final ByteArrayEntity entity = new ByteArrayEntity(rdfXml.getBytes());
testClient = new FcrepoClient("user", "pass", null, true);
setField(testClient, "httpclient", mockHttpclient);
entity.setContentType(RDF_XML);
doSetupMockRequest(RDF_XML, entity, status);
final FcrepoResponse response = testClient.get(uri, RDF_XML, null);
assertEquals(response.getUrl(), uri);
assertEquals(response.getStatusCode(), status);
assertEquals(response.getContentType(), RDF_XML);
assertEquals(response.getLocation(), null);
assertEquals(IOUtils.toString(response.getBody()), rdfXml);
}
@Test
public void testAuthWithHost() throws IOException, FcrepoOperationFailedException {
final int status = 200;
final URI uri = create(baseUrl);
final ByteArrayEntity entity = new ByteArrayEntity(rdfXml.getBytes());
testClient = new FcrepoClient("user", "pass", "localhost", true);
setField(testClient, "httpclient", mockHttpclient);
entity.setContentType(RDF_XML);
doSetupMockRequest(RDF_XML, entity, status);
final FcrepoResponse response = testClient.get(uri, RDF_XML, null);
assertEquals(response.getUrl(), uri);
assertEquals(response.getStatusCode(), status);
assertEquals(response.getContentType(), RDF_XML);
assertEquals(response.getLocation(), null);
assertEquals(IOUtils.toString(response.getBody()), rdfXml);
}
@Test
public void testAuthNoPassword() throws IOException, FcrepoOperationFailedException {
final int status = 200;
final URI uri = create(baseUrl);
final ByteArrayEntity entity = new ByteArrayEntity(rdfXml.getBytes());
testClient = new FcrepoClient("user", null, null, true);
setField(testClient, "httpclient", mockHttpclient);
entity.setContentType(RDF_XML);
doSetupMockRequest(RDF_XML, entity, status);
final FcrepoResponse response = testClient.get(uri, RDF_XML, null);
assertEquals(response.getUrl(), uri);
assertEquals(response.getStatusCode(), status);
assertEquals(response.getContentType(), RDF_XML);
assertEquals(response.getLocation(), null);
assertEquals(IOUtils.toString(response.getBody()), rdfXml);
}
private void doSetupMockRequest(final String contentType, final ByteArrayEntity entity, final int status)
throws IOException {
final Header contentTypeHeader = new BasicHeader("Content-Type", contentType);
final Header[] linkHeaders = new Header[]{};
when(mockHttpclient.execute(any(HttpUriRequest.class))).thenReturn(mockResponse);
when(mockResponse.getFirstHeader("Location")).thenReturn(null);
when(mockResponse.getFirstHeader("Content-Type")).thenReturn(contentTypeHeader);
when(mockResponse.getHeaders("Link")).thenReturn(linkHeaders);
when(mockResponse.getEntity()).thenReturn(entity);
when(mockResponse.getStatusLine()).thenReturn(mockStatus);
when(mockStatus.getStatusCode()).thenReturn(status);
}
}
| apache-2.0 |
yuananf/presto | presto-thrift-connector/src/main/java/com/facebook/presto/connector/thrift/ThriftPluginInfo.java | 1260 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.connector.thrift;
import com.facebook.presto.connector.thrift.api.PrestoThriftService;
import com.google.inject.Module;
import static com.facebook.presto.connector.thrift.location.ExtendedSimpleAddressSelectorBinder.extendedSimpleAddressSelector;
import static io.airlift.drift.client.guice.DriftClientBinder.driftClientBinder;
public class ThriftPluginInfo
{
public String getName()
{
return "presto-thrift";
}
public Module getLocationModule()
{
return binder -> driftClientBinder(binder)
.bindDriftClient(PrestoThriftService.class)
.withAddressSelector(extendedSimpleAddressSelector());
}
}
| apache-2.0 |
Neoskai/greycat | plugins/backup/src/main/java/greycat/backup/tools/StorageValueChunk.java | 5217 | /**
* Copyright 2017 The GreyCat Authors. All rights reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package greycat.backup.tools;
import greycat.Constants;
import greycat.Type;
import greycat.struct.Buffer;
import greycat.utility.Base64;
import java.io.ByteArrayInputStream;
import java.io.ObjectInputStream;
/**
* @ignore ts
*/
public class StorageValueChunk {
private long world;
private long time;
private int type;
private Object value;
private int index;
public static StorageValueChunk build(Buffer buffer){
StorageValueChunk tuple = new StorageValueChunk();
long cursor = 0;
long length = buffer.length();
long previous = 0;
int index = 0;
while (cursor < length) {
byte current = buffer.read(cursor);
if (current == Constants.CHUNK_SEP) {
switch (index) {
case 0:
tuple.world = Base64.decodeToLongWithBounds(buffer, previous, cursor);
break;
case 1:
tuple.time = Base64.decodeToLongWithBounds(buffer, previous, cursor);
break;
case 2:
tuple.index = Base64.decodeToIntWithBounds(buffer, previous, cursor);
break;
case 3:
tuple.type = Base64.decodeToIntWithBounds(buffer, previous, cursor);
break;
case 4:
tuple.value= valueFromBuffer(buffer, previous, cursor, tuple.type);
break;
}
index++;
previous = cursor + 1;
}
cursor++;
}
//collect last
switch (index) {
case 0:
tuple.world = Base64.decodeToLongWithBounds(buffer, previous, cursor);
break;
case 1:
tuple.time = Base64.decodeToLongWithBounds(buffer, previous, cursor);
break;
case 2:
tuple.index = Base64.decodeToIntWithBounds(buffer, previous, cursor);
break;
case 3:
tuple.type = buffer.slice(previous,cursor)[0];
break;
case 4:
tuple.value= valueFromBuffer(buffer, previous, cursor, tuple.type);
break;
}
return tuple;
}
/**
* Rebuilds the object from the buffer, given the index of beginning and end of the object in the buffer
* Object has to be written in Base64 format
* @param buffer The complete buffer where the object is
* @param begin First index of the object in buffer
* @param end End index of the object in buffer
* @param type The type of the Object
* @return The builded object from buffer
*/
public static Object valueFromBuffer(Buffer buffer, long begin, long end, int type) {
switch (type){
case Type.STRING:
return Base64.decodeToStringWithBounds(buffer,begin,end);
case Type.BOOL:
return buffer.slice(begin,end)[0] != 0;
case Type.LONG:
return Base64.decodeToLongWithBounds(buffer,begin,end);
case Type.INT:
return Base64.decodeToIntWithBounds(buffer, begin, end);
case Type.DOUBLE:
return Base64.decodeToDoubleWithBounds(buffer, begin, end);
case Type.REMOVE:
return null;
default:
System.out.println("Type: " + type);
return deserialize(buffer.slice(begin,end));
}
}
/**
* Deserialize an object
* @param data The byte array containing the object
* @return A java object containing the deserialized object
*/
private static Object deserialize(byte[] data) {
try{
ByteArrayInputStream in = new ByteArrayInputStream(data);
ObjectInputStream is = new ObjectInputStream(in);
return is.readObject();
} catch(Exception e){
e.printStackTrace();
}
return null;
}
public int type(){
return type;
}
public Object value(){
return value;
}
public long world(){
return world;
}
public long time(){
return time;
}
public int index(){
return index;
}
@Override
public String toString() {
return "StorageValueChunk{" +
"type=" + type +
", value=" + value +
'}';
}
}
| apache-2.0 |
mauricionr/TypeScript | tests/baselines/reference/contextualTyping34.js | 130 | //// [contextualTyping34.ts]
var foo = <{ id: number;}> ({id:4});
//// [contextualTyping34.js]
var foo = ({
id: 4
});
| apache-2.0 |
yy13003/Im001 | Hello/src/com/example/connection_adapter/Connection_adapter.java | 2855 | package com.example.connection_adapter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.example.hello.R;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
public class Connection_adapter extends BaseAdapter {
//对话窗口适配器
@SuppressWarnings("unused")
private Context context;
private LayoutInflater inflater;
private List<Map<String, Object>> list;
public void getData() {
list = new ArrayList<Map<String, Object>>();
Map<String, Object> map1 = new HashMap<String, Object>();
map1.put("IV_head", R.drawable.head);
map1.put("NatName", "沙皮狗的忧伤");
map1.put("TVContent", "你好啊");
map1.put("Time", "19:11:04");
list.add(map1);
Map<String, Object> map2 = new HashMap<String, Object>();
map2.put("IV_head", R.drawable.head);
map2.put("NatName", "沙皮狗的悲伤");
map2.put("TVContent", "我不好");
map2.put("Time", "19:11:04");
list.add(map2);
}
public Connection_adapter(Context context, List<Map<String, Object>> list) {
getData();
this.context = context;
inflater = LayoutInflater.from(context);
// this.list = list;
}
@Override
public int getCount() {
// TODO Auto-generated method stub
return list.size();
}
@Override
public Object getItem(int arg0) {
// TODO Auto-generated method stub
return list.get(arg0);
}
@Override
public long getItemId(int arg0) {
// TODO Auto-generated method stub
return arg0;
}
@Override
public View getView(int position, View convertView, ViewGroup arg2) {
// TODO Auto-generated method stub
@SuppressWarnings("unused")
final int selectId = position;
ViewHolder holder;
View view;
if (convertView == null) {
view = inflater.inflate(R.layout.activity_connection, null);
holder = new ViewHolder();
holder.IV_head = (ImageView) view.findViewById(R.id.IV_head);
holder.NatName = (TextView) view.findViewById(R.id.NatName);
holder.TVContent = (TextView) view.findViewById(R.id.TVContent);
holder.Time = (TextView) view.findViewById(R.id.Time);
view.setTag(holder);
} else {
view = convertView;
holder = (ViewHolder) view.getTag();
}
list.get(position);
System.out.println("查看List中是否有数据::list.get(position)" + position);
holder.NatName.setText((String) list.get(position).get("NatName"));
holder.TVContent.setText((String) list.get(position).get("TVContent"));
holder.Time.setText((String) list.get(position).get("Time"));
holder.IV_head.setImageResource((Integer) list.get(position).get(
"IV_head"));
return view;
}
class ViewHolder {
ImageView IV_head;
TextView NatName, TVContent, Time;
}
} | apache-2.0 |
FabianTerhorst/Isometric | lib/src/main/java/io/fabianterhorst/isometric/Shape.java | 6712 | package io.fabianterhorst.isometric;
import java.util.Arrays;
/**
* Created by fabianterhorst on 31.03.17.
*/
public class Shape {
private Path[] paths;
public Shape() {
}
public Shape(Path[] paths) {
this.paths = paths;
}
public void push(Path path) {
if (paths == null) {
paths = new Path[0];
}
paths = add(path, paths);
}
public void setPaths(Path[] paths) {
this.paths = paths;
}
public static Path[] add(Path point, Path[] values) {
Path[] anotherArray = new Path[values.length + 1];
System.arraycopy(values, 0, anotherArray, 0, values.length);
anotherArray[values.length] = point;
return anotherArray;
}
public static Path[] concat(Path[] a, Path[] b) {
Path[] c = new Path[a.length + b.length];
System.arraycopy(a, 0, c, 0, a.length);
System.arraycopy(b, 0, c, a.length, b.length);
return c;
}
public void push(Path[] paths) {
if (this.paths == null) {
this.paths = new Path[0];
}
this.paths = concat(paths, this.paths);
}
public Path[] getPaths() {
return paths;
}
public Shape translate(double dx, double dy, double dz) {
Path[] paths = new Path[this.paths.length];
Path point;
for (int i = 0; i < this.paths.length; i++) {
point = this.paths[i];
paths[i] = point.translate(dx, dy, dz);
}
return new Shape(paths);
}
public Shape rotateX(Point origin, double angle) {
Path[] paths = new Path[this.paths.length];
Path point;
for (int i = 0; i < this.paths.length; i++) {
point = this.paths[i];
paths[i] = point.rotateX(origin, angle);
}
return new Shape(paths);
}
public Shape rotateY(Point origin, double angle) {
Path[] paths = new Path[this.paths.length];
Path point;
for (int i = 0; i < this.paths.length; i++) {
point = this.paths[i];
paths[i] = point.rotateY(origin, angle);
}
return new Shape(paths);
}
public Shape rotateZ(Point origin, double angle) {
Path[] paths = new Path[this.paths.length];
Path point;
for (int i = 0; i < this.paths.length; i++) {
point = this.paths[i];
paths[i] = point.rotateZ(origin, angle);
}
return new Shape(paths);
}
public Shape scale(Point origin, double dx, double dy, double dz) {
Path[] paths = new Path[this.paths.length];
Path point;
for (int i = 0; i < this.paths.length; i++) {
point = this.paths[i];
paths[i] = point.scale(origin, dx, dy, dz);
}
return new Shape(paths);
}
public Shape scale(Point origin, double dx, double dy) {
Path[] paths = new Path[this.paths.length];
Path point;
for (int i = 0; i < this.paths.length; i++) {
point = this.paths[i];
paths[i] = point.scale(origin, dx, dy);
}
return new Shape(paths);
}
public Shape scale(Point origin, double dx) {
Path[] paths = new Path[this.paths.length];
Path point;
for (int i = 0; i < this.paths.length; i++) {
point = this.paths[i];
paths[i] = point.scale(origin, dx);
}
return new Shape(paths);
}
public void scalePaths(Point origin, double dx, double dy, double dz) {
for (int i = 0, length = paths.length; i < length; i++) {
paths[i] = paths[i].scale(origin, dx, dy, dz);
}
}
public void scalePaths(Point origin, double dx, double dy) {
for (int i = 0, length = paths.length; i < length; i++) {
paths[i] = paths[i].scale(origin, dx, dy);
}
}
public void scalePaths(Point origin, double dx) {
for (int i = 0, length = paths.length; i < length; i++) {
paths[i] = paths[i].scale(origin, dx);
}
}
public void translatePaths(double dx, double dy, double dz) {
for (int i = 0, length = paths.length; i < length; i++) {
paths[i] = paths[i].translate(dx, dy, dz);
}
}
/**
* Sort the list of faces by distance then map the entries, returning
* only the path and not the added "further point" from earlier.
*/
public Path[] orderedPaths() {
double[] depths = new double[paths.length];
for (int i = 0; i < depths.length; i++) {
depths[i] = paths[i].depth();
}
boolean swapped = true;
int j = 0;
Path tmp;
double tmp2;
while (swapped) {
swapped = false;
j++;
for (int i = 0; i < paths.length - j; i++) {
if (depths[i] < depths[i + 1]) {
tmp = paths[i];
tmp2 = depths[i];
paths[i] = paths[i + 1];
depths[i] = depths[i + 1];
paths[i + 1] = tmp;
depths[i + 1] = tmp2;
swapped = true;
}
}
}
return this.paths;
}
public static Shape extrude(Path path) {
return extrude(new Shape(), path, 1);
}
public static Shape extrude(Path path, double height) {
return extrude(new Shape(), path, height);
}
public static Shape extrude(Shape shape, Path path) {
return extrude(shape, path, 1);
}
public static Shape extrude(Shape shape, Path path, double height) {
Path topPath = path.translate(0, 0, height);
int i;
int length = path.points.length;
Path[] paths = new Path[length + 2];
/* Push the top and bottom faces, top face must be oriented correctly */
paths[0] = path.reverse();
paths[1] = topPath;
/* Push each side face */
Point[] points;
for (i = 0; i < length; i++) {
points = new Point[4];
points[0] = topPath.points[i];
points[1] = path.points[i];
points[2] = path.points[(i + 1) % length];
points[3] = topPath.points[(i + 1) % length];
paths[i + 2] = new Path(points);
}
shape.setPaths(paths);
return shape;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Shape)) return false;
Shape shape = (Shape) o;
return Arrays.equals(paths, shape.paths);
}
@Override
public int hashCode() {
return Arrays.hashCode(paths);
}
} | apache-2.0 |
zline/JsonPath | json-path/src/test/java/com/jayway/jsonpath/JsonOrgJsonProviderTest.java | 1284 | package com.jayway.jsonpath;
import org.json.JSONArray;
import org.json.JSONObject;
import org.junit.Test;
import java.util.List;
import java.util.Map;
import static com.jayway.jsonpath.JsonPath.using;
import static org.assertj.core.api.Assertions.assertThat;
public class JsonOrgJsonProviderTest extends BaseTest {
@Test
public void an_object_can_be_read() {
JSONObject book = using(JSON_ORG_CONFIGURATION).parse(JSON_DOCUMENT).read("$.store.book[0]");
assertThat(book.get("author").toString()).isEqualTo("Nigel Rees");
}
@Test
public void a_property_can_be_read() {
String category = using(JSON_ORG_CONFIGURATION).parse(JSON_DOCUMENT).read("$.store.book[0].category");
assertThat(category).isEqualTo("reference");
}
@Test
public void a_filter_can_be_applied() {
JSONArray fictionBooks = using(JSON_ORG_CONFIGURATION).parse(JSON_DOCUMENT).read("$.store.book[?(@.category == 'fiction')]");
assertThat(fictionBooks.length()).isEqualTo(3);
}
@Test
public void result_can_be_mapped_to_object() {
List<Map<String, Object>> books = using(JSON_ORG_CONFIGURATION).parse(JSON_DOCUMENT).read("$.store.book", List.class);
assertThat(books.size()).isEqualTo(4);
}
}
| apache-2.0 |
CognizantQAHub/Cognizant-Intelligent-Test-Scripter | Engine/src/main/java/com/cognizant/cognizantits/engine/reporting/util/RDS.java | 5938 | /*
* Copyright 2014 - 2017 Cognizant Technology Solutions
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cognizant.cognizantits.engine.reporting.util;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
/**
*
*
*/
@SuppressWarnings("unchecked")
public class RDS {
private final static String BEFORE = "var DATA=", AFTER = ";";
public synchronized static JSONObject getNewStep(String name) {
JSONObject step = new JSONObject();
step.put(Step.TYPE, "step");
step.put(Step.NAME, name == null ? "Description Not Given" : name);
step.put(Step.DATA, new JSONObject());
return step;
}
public synchronized static JSONObject getNewIteration(String name) {
JSONObject iteration = new JSONObject();
iteration.put(Step.TYPE, "iteration");
iteration.put(Step.NAME, name);
iteration.put(Step.DATA, new JSONArray());
iteration.put(TestCase.STATUS, "");
return iteration;
}
public synchronized static JSONObject getNewReusable(String name, String desc) {
JSONObject reusable = new JSONObject();
reusable.put(Step.TYPE, "reusable");
reusable.put(Step.NAME, name);
reusable.put(Step.DESCRIPTION, desc);
reusable.put(Step.DATA, new JSONArray());
reusable.put(TestCase.STATUS, "");
reusable.put(Step.START_TIME, DateTimeUtils.DateTimeNow());
return reusable;
}
public synchronized static void writeToDataJS(String fileToWrite, JSONObject data) {
writeToFile(fileToWrite, BEFORE + data.toString() + AFTER);
}
public synchronized static void writeToFile(String fileToWrite, String data) {
try (BufferedWriter bufwriter = new BufferedWriter(new FileWriter(fileToWrite))) {
bufwriter.write(data);
} catch (IOException ex) {
Logger.getLogger(RDS.class.getName()).log(Level.SEVERE, null, ex);
}
}
public class TestSet {
public static final String PROJECT_NAME = "projectName";
public static final String RELEASE_NAME = "releaseName";
public static final String TESTSET_NAME = "testsetName";
public static final String ITERATION_MODE = "iterationMode";
public static final String RUN_CONFIG = "runConfiguration";
public static final String MAX_THREADS = "maxThreads";
public static final String START_TIME = "startTime";
public static final String END_TIME = "endTime";
public static final String EXE_TIME = "exeTime";
public static final String NO_OF_TESTS = "noTests";
public static final String NO_OF_PASS_TESTS = "nopassTests";
public static final String NO_OF_FAIL_TESTS = "nofailTests";
public static final String THEME = "theme";
public static final String THEMES = "themes";
public static final String TEST_RUN = "testRun";
public static final String EXECUTIONS = "EXECUTIONS";
public static final String BDD_STYLE = "bddReport";
public static final String AXE_REPORT = "axeReport";
public static final String PERF_REPORT = "perfReport";
}
public class TestCase {
public static final String SCENARIO_NAME = "scenarioName";
public static final String TESTCASE_NAME = "testcaseName";
public static final String DESCRIPTION = "description";
public static final String ITERATIONS = "iterations";
public static final String ITERATION_TYPE = "iterationType";
public static final String PLATFORM = "platform";
public static final String B_VERSION = "bversion";
public static final String START_TIME = "startTime";
public static final String END_TIME = "endTime";
public static final String EXE_TIME = "exeTime";
public static final String NO_OF_TESTS = "noTests";
public static final String NO_OF_PASS_TESTS = "nopassTests";
public static final String NO_OF_FAIL_TESTS = "nofailTests";
public static final String BROWSER = "browser";
public static final String STATUS = "status";
public static final String STEPS = "STEPS";
}
public class Step {
public static final String NAME = "name";
public static final String TYPE = "type";
public static final String DATA = "data";
public static final String DESCRIPTION = "description";
public static final String START_TIME = "startTime";
public static final String END_TIME = "endTime";
public class Data {
public static final String STEP_NO = "stepno";
public static final String STEP_NAME = "stepName";
public static final String ACTION = "action";
public static final String DESCRIPTION = "description";
public static final String STATUS = "status";
public static final String TIME_STAMP = "tStamp";
public static final String LINK = "link";
public static final String EXPECTED = "expected";
public static final String ACTUAL = "actual";
public static final String COMPARISION = "comparison";
public static final String OBJECTS = "objects";
}
}
}
| apache-2.0 |
opencredo/opencredo-cloud-storage | cloud-storage-azure/src/main/java/org/opencredo/cloud/storage/azure/model/InputStreamBlob.java | 1695 | /* Copyright 2009-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencredo.cloud.storage.azure.model;
import java.io.InputStream;
import org.apache.http.HttpEntity;
import org.apache.http.entity.InputStreamEntity;
/**
* Blob abstraction for sending input stream to Azure cloud storage. This
* abstraction is also used to receive blob data from Azure cloud storage.
*
* @author Tomas Lukosius (tomas.lukosius@opencredo.com)
*
*/
public class InputStreamBlob extends Blob<InputStream> {
private final InputStream data;
/**
* @param name
*/
public InputStreamBlob(String name, InputStream data) {
super(name);
this.data = data;
}
/**
* @return
* @see org.opencredo.cloud.storage.azure.model.Blob#getData()
*/
@Override
public InputStream getData() {
return data;
}
/**
* @return
* @see org.opencredo.cloud.storage.azure.model.Blob#createRequestBody()
*/
@Override
public HttpEntity createRequestBody() {
// FIXME: Calculate size - currently is 0
return new InputStreamEntity(data, 0);
}
}
| apache-2.0 |
tambora-org/dockerCRE | volumes/www/survey/application/libraries/PluginManager/PluginManager.php | 14270 | <?php
namespace ls\pluginmanager;
use \Yii;
use Plugin;
/**
* Factory for limesurvey plugin objects.
*/
class PluginManager extends \PluginManager {
/**
* Object containing any API that the plugins can use.
* @var mixed $api The class name of the API class to load, or
*/
public $api;
/**
* Array mapping guids to question object class names.
* @var type
*/
protected $guidToQuestion = array();
protected $plugins = array();
protected $pluginDirs = array(
'webroot.plugins', // User plugins
'application.core.plugins' // Core plugins
);
protected $stores = array();
protected $subscriptions = array();
/**
* Creates the plugin manager.
*
*
* a reference to an already constructed reference.
*/
public function init() {
parent::init();
if (!is_object($this->api)) {
$class = $this->api;
$this->api = new $class;
}
$this->loadPlugins();
}
/**
* Return a list of installed plugins, but only if the files are still there
*
* This prevents errors when a plugin was installed but the files were removed
* from the server.
*
* @return array
*/
public function getInstalledPlugins()
{
$pluginModel = Plugin::model();
$records = $pluginModel->findAll();
$plugins = array();
foreach ($records as $record) {
// Only add plugins we can find
if ($this->loadPlugin($record->name) !== false) {
$plugins[$record->id] = $record;
}
}
return $plugins;
}
/**
* Return the status of plugin (true/active or false/desactive)
*
* @param sPluginName Plugin name
* @return boolean
*/
public function isPluginActive($sPluginName)
{
$pluginModel = Plugin::model();
$record = $pluginModel->findByAttributes(array('name' => $sPluginName, 'active' => '1'));
if ($record == false)
{
return false;
}
else
{
return true;
}
}
/**
* Returns the storage instance of type $storageClass.
* If needed initializes the storage object.
* @param string $storageClass
*/
public function getStore($storageClass)
{
if (!class_exists($storageClass)
&& class_exists('ls\\pluginmanager\\' . $storageClass)) {
$storageClass = 'ls\\pluginmanager\\' . $storageClass;
}
if (!isset($this->stores[$storageClass]))
{
$this->stores[$storageClass] = new $storageClass();
}
return $this->stores[$storageClass];
}
/**
* This function returns an API object, exposing an API to each plugin.
* In the current case this is the LimeSurvey API.
* @return LimesurveyApi
*/
public function getAPI()
{
return $this->api;
}
/**
* Registers a plugin to be notified on some event.
* @param iPlugin $plugin Reference to the plugin.
* @param string $event Name of the event.
* @param string $function Optional function of the plugin to be called.
*/
public function subscribe(iPlugin $plugin, $event, $function = null)
{
if (!isset($this->subscriptions[$event]))
{
$this->subscriptions[$event] = array();
}
if (!$function)
{
$function = $event;
}
$subscription = array($plugin, $function);
// Subscribe only if not yet subscribed.
if (!in_array($subscription, $this->subscriptions[$event]))
{
$this->subscriptions[$event][] = $subscription;
}
}
/**
* Unsubscribes a plugin from an event.
* @param iPlugin $plugin Reference to the plugin being unsubscribed.
* @param string $event Name of the event. Use '*', to unsubscribe all events for the plugin.
*/
public function unsubscribe(iPlugin $plugin, $event)
{
// Unsubscribe recursively.
if ($event == '*')
{
foreach ($this->subscriptions as $event)
{
$this->unsubscribe($plugin, $event);
}
}
elseif (isset($this->subscriptions[$event]))
{
foreach ($this->subscriptions[$event] as $index => $subscription)
{
if ($subscription[0] == $plugin)
{
unset($this->subscriptions[$event][$index]);
}
}
}
}
/**
* This function dispatches an event to all registered plugins.
* @param PluginEvent $event Object holding all event properties
* @param string|array $target Optional name of plugin to fire the event on
*
* @return PluginEvent
*/
public function dispatchEvent(PluginEvent $event, $target = array())
{
$eventName = $event->getEventName();
if (is_string($target)) {
$target = array($target);
}
if (isset($this->subscriptions[$eventName]))
{
foreach($this->subscriptions[$eventName] as $subscription)
{
if (!$event->isStopped()
&& (empty($target) || in_array(get_class($subscription[0]), $target)))
{
$subscription[0]->setEvent($event);
call_user_func($subscription);
}
}
}
return $event;
}
/**
* Scans the plugin directory for plugins.
* This function is not efficient so should only be used in the admin interface
* that specifically deals with enabling / disabling plugins.
*/
public function scanPlugins($forceReload = false)
{
$result = array();
foreach ($this->pluginDirs as $pluginDir) {
$currentDir = Yii::getPathOfAlias($pluginDir);
if (is_dir($currentDir)) {
foreach (new \DirectoryIterator($currentDir) as $fileInfo)
{
if (!$fileInfo->isDot() && $fileInfo->isDir())
{
// Check if the base plugin file exists.
// Directory name Example most contain file ExamplePlugin.php.
$pluginName = $fileInfo->getFilename();
$file = Yii::getPathOfAlias($pluginDir . ".$pluginName.{$pluginName}") . ".php";
if (file_exists($file))
{
$result[$pluginName] = $this->getPluginInfo($pluginName, $pluginDir);
}
}
}
}
}
return $result;
}
/**
* Gets the description of a plugin. The description is accessed via a
* static function inside the plugin file.
*
* @param string $pluginClass The classname of the plugin
*/
public function getPluginInfo($pluginClass, $pluginDir = null)
{
$result = array();
$class = "{$pluginClass}";
if (!class_exists($class, false)) {
$found = false;
if (!is_null($pluginDir)) {
$dirs = array($pluginDir);
} else {
$dirs = $this->pluginDirs;
}
foreach ($this->pluginDirs as $pluginDir) {
$file = Yii::getPathOfAlias($pluginDir . ".$pluginClass.{$pluginClass}") . ".php";
if (file_exists($file)) {
Yii::import($pluginDir . ".$pluginClass.*");
$found = true;
break;
}
}
if (!$found) {
return false;
}
}
$result['description'] = call_user_func(array($class, 'getDescription'));
$result['pluginName'] = call_user_func(array($class, 'getName'));
$result['pluginClass'] = $class;
return $result;
}
/**
* Returns the instantiated plugin
*
* @param string $pluginName
* @param int $id Identifier used for identifying a specific plugin instance.
* If ommitted will return the first instantiated plugin with the given name.
* @return iPlugin|null The plugin or null when missing
*/
public function loadPlugin($pluginName, $id = null)
{
// If the id is not set we search for the plugin.
if (!isset($id))
{
foreach ($this->plugins as $plugin)
{
if (get_class($plugin) == $pluginName)
{
return $plugin;
}
}
}
else
{
if ((!isset($this->plugins[$id]) || get_class($this->plugins[$id]) !== $pluginName))
{
if ($this->getPluginInfo($pluginName) !== false) {
$this->plugins[$id] = new $pluginName($this, $id);
if (method_exists($this->plugins[$id], 'init')) {
$this->plugins[$id]->init();
}
} else {
$this->plugins[$id] = null;
}
}
return $this->plugins[$id];
}
}
/**
* Handles loading all active plugins
*
* Possible improvement would be to load them for a specific context.
* For instance 'survey' for runtime or 'admin' for backend. This needs
* some thinking before implementing.
*/
public function loadPlugins()
{
// If DB version is less than 165 : plugins table don't exist. 175 update it (boolean to integer for active)
$dbVersion=\SettingGlobal::model()->find("stg_name=:name",array(':name'=>'DBVersion'));// Need table SettingGlobal, but settings from DB is set only in controller, not in App, see #11294
if($dbVersion && $dbVersion->stg_value >= 165)
{
$pluginModel = Plugin::model();
$records = $pluginModel->findAllByAttributes(array('active'=>1));
foreach ($records as $record)
{
$this->loadPlugin($record->name, $record->id);
}
}
else
{
// Log it ? tracevar ?
}
$this->dispatchEvent(new PluginEvent('afterPluginLoad', $this)); // Alow plugins to do stuff after all plugins are loaded
}
/**
* Get a list of question objects and load some information about them.
* This registers the question object classes with Yii.
*/
public function loadQuestionObjects($forceReload = false)
{
if (empty($this->guidToQuestion) || $forceReload)
{
$event = new PluginEvent('listQuestionPlugins');
$this->dispatchEvent($event);
foreach ($event->get('questionplugins', array()) as $pluginClass => $paths)
{
foreach ($paths as $path)
{
Yii::import("webroot.plugins.$pluginClass.$path");
$parts = explode('.', $path);
// Get the class name.
$className = array_pop($parts);
// Get the GUID for the question object.
$guid = forward_static_call(array($className, 'getGUID'));
// Save the GUID-class mapping.
$this->guidToQuestion[$guid] = array(
'class' => $className,
'guid' => $guid,
'plugin' => $pluginClass,
'name' => $className::$info['name']
);
}
}
}
return $this->guidToQuestion;
}
/**
* Construct a question object from a GUID.
* @param string $guid
* @param int $questionId,
* @param int $responseId
* @return iQuestion
*/
public function constructQuestionFromGUID($guid, $questionId = null, $responseId = null)
{
$this->loadQuestionObjects();
if (isset($this->guidToQuestion[$guid]))
{
$questionClass = $this->guidToQuestion[$guid]['class'];
$questionObject = new $questionClass($this->loadPlugin($this->guidToQuestion[$guid]['plugin']), $this->api, $questionId, $responseId);
return $questionObject;
}
}
}
?>
| apache-2.0 |
QAZDER/WeatherUpdate | app/src/main/java/com/azder/weatherupdata/MainActivity.java | 758 | package com.azder.weatherupdata;
import android.content.Intent;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (prefs.getString("weather", null) != null) {
Intent intent = new Intent(this, WeatherActivity.class);
startActivity(intent);
finish();
}
}
}
| apache-2.0 |
shakamunyi/mesos | src/slave/containerizer/isolators/filesystem/linux.cpp | 29510 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <list>
#include <sstream>
#include <string>
#include <glog/logging.h>
#include <process/collect.hpp>
#include <process/metrics/metrics.hpp>
#include <stout/error.hpp>
#include <stout/foreach.hpp>
#include <stout/os.hpp>
#include <stout/path.hpp>
#include <stout/stringify.hpp>
#include <stout/strings.hpp>
#include <stout/os/shell.hpp>
#include "linux/fs.hpp"
#include "linux/ns.hpp"
#include "slave/paths.hpp"
#include "slave/containerizer/isolators/filesystem/linux.hpp"
using namespace process;
using std::list;
using std::ostringstream;
using std::string;
using mesos::slave::ContainerState;
using mesos::slave::ContainerLimitation;
using mesos::slave::ContainerPrepareInfo;
using mesos::slave::Isolator;
namespace mesos {
namespace internal {
namespace slave {
Try<Isolator*> LinuxFilesystemIsolatorProcess::create(
const Flags& flags,
const Owned<Provisioner>& provisioner)
{
Result<string> user = os::user();
if (!user.isSome()) {
return Error("Failed to determine user: " +
(user.isError() ? user.error() : "username not found"));
}
if (user.get() != "root") {
return Error("LinuxFilesystemIsolator requires root privileges");
}
// Make slave's work_dir a shared mount so that when forking a child
// process (with a new mount namespace), the child process does not
// hold extra references to container's work directory mounts and
// provisioner mounts (e.g., when using the bind backend) because
// cleanup operations within work_dir can be propagted to all
// container namespaces. See MESOS-3483 for more details.
LOG(INFO) << "Making '" << flags.work_dir << "' a shared mount";
Try<fs::MountInfoTable> table = fs::MountInfoTable::read();
if (table.isError()) {
return Error("Failed to get mount table: " + table.error());
}
Option<fs::MountInfoTable::Entry> workDirMount;
foreach (const fs::MountInfoTable::Entry& entry, table.get().entries) {
// TODO(jieyu): Make sure 'flags.work_dir' is a canonical path.
if (entry.target == flags.work_dir) {
workDirMount = entry;
break;
}
}
// Do a self bind mount if needed. If the mount already exists, make
// sure it is a shared mount of its own peer group.
if (workDirMount.isNone()) {
// NOTE: Instead of using fs::mount to perform the bind mount, we
// use the shell command here because the syscall 'mount' does not
// update the mount table (i.e., /etc/mtab). In other words, the
// mount will not be visible if the operator types command
// 'mount'. Since this mount will still be presented after all
// containers and the slave are stopped, it's better to make it
// visible. It's OK to use the blocking os::shell here because
// 'create' will only be invoked during initialization.
Try<string> mount = os::shell(
"mount --bind %s %s && "
"mount --make-slave %s && "
"mount --make-shared %s",
flags.work_dir.c_str(),
flags.work_dir.c_str(),
flags.work_dir.c_str(),
flags.work_dir.c_str());
if (mount.isError()) {
return Error(
"Failed to self bind mount '" + flags.work_dir +
"' and make it a shared mount: " + mount.error());
}
} else {
if (workDirMount.get().shared().isNone()) {
// This is the case where the work directory mount is not a
// shared mount yet (possibly due to slave crash while preparing
// the work directory mount). It's safe to re-do the following.
Try<string> mount = os::shell(
"mount --make-slave %s && "
"mount --make-shared %s",
flags.work_dir.c_str(),
flags.work_dir.c_str());
if (mount.isError()) {
return Error(
"Failed to self bind mount '" + flags.work_dir +
"' and make it a shared mount: " + mount.error());
}
} else {
// We need to make sure that the shared mount is in its own peer
// group. To check that, we need to get the parent mount.
foreach (const fs::MountInfoTable::Entry& entry, table.get().entries) {
if (entry.id == workDirMount.get().parent) {
// If the work directory mount and its parent mount are in
// the same peer group, we need to re-do the following
// commands so that they are in different peer groups.
if (entry.shared() == workDirMount.get().shared()) {
Try<string> mount = os::shell(
"mount --make-slave %s && "
"mount --make-shared %s",
flags.work_dir.c_str(),
flags.work_dir.c_str());
if (mount.isError()) {
return Error(
"Failed to self bind mount '" + flags.work_dir +
"' and make it a shared mount: " + mount.error());
}
}
break;
}
}
}
}
Owned<MesosIsolatorProcess> process(
new LinuxFilesystemIsolatorProcess(flags, provisioner));
return new MesosIsolator(process);
}
LinuxFilesystemIsolatorProcess::LinuxFilesystemIsolatorProcess(
const Flags& _flags,
const Owned<Provisioner>& _provisioner)
: flags(_flags),
provisioner(_provisioner),
metrics(PID<LinuxFilesystemIsolatorProcess>(this)) {}
LinuxFilesystemIsolatorProcess::~LinuxFilesystemIsolatorProcess() {}
Future<Nothing> LinuxFilesystemIsolatorProcess::recover(
const list<ContainerState>& states,
const hashset<ContainerID>& orphans)
{
// Read the mount table in the host mount namespace to recover paths
// to containers' work directories if their root filesystems are
// changed. Method 'cleanup()' relies on this information to clean
// up mounts in the host mount namespace for each container.
Try<fs::MountInfoTable> table = fs::MountInfoTable::read();
if (table.isError()) {
return Failure("Failed to get mount table: " + table.error());
}
foreach (const ContainerState& state, states) {
Owned<Info> info(new Info(state.directory()));
foreach (const fs::MountInfoTable::Entry& entry, table.get().entries) {
if (entry.root == info->directory) {
info->sandbox = entry.target;
break;
}
}
infos.put(state.container_id(), info);
}
// Recover both known and unknown orphans by scanning the mount
// table and finding those mounts whose roots are under slave's
// sandbox root directory. Those mounts are container's work
// directory mounts. Mounts from unknown orphans will be cleaned up
// immediately. Mounts from known orphans will be cleaned up when
// those known orphan containers are being destroyed by the slave.
hashset<ContainerID> unknownOrphans;
string sandboxRootDir = paths::getSandboxRootDir(flags.work_dir);
foreach (const fs::MountInfoTable::Entry& entry, table.get().entries) {
if (!strings::startsWith(entry.root, sandboxRootDir)) {
continue;
}
// TODO(jieyu): Here, we retrieve the container ID by taking the
// basename of 'entry.root'. This assumes that the slave's sandbox
// root directory are organized according to the comments in the
// beginning of slave/paths.hpp.
ContainerID containerId;
containerId.set_value(Path(entry.root).basename());
if (infos.contains(containerId)) {
continue;
}
Owned<Info> info(new Info(entry.root));
if (entry.root != entry.target) {
info->sandbox = entry.target;
}
infos.put(containerId, info);
// Remember all the unknown orphan containers.
if (!orphans.contains(containerId)) {
unknownOrphans.insert(containerId);
}
}
// Cleanup mounts from unknown orphans.
list<Future<Nothing>> futures;
foreach (const ContainerID& containerId, unknownOrphans) {
futures.push_back(cleanup(containerId));
}
return collect(futures)
.then(defer(PID<LinuxFilesystemIsolatorProcess>(this),
&LinuxFilesystemIsolatorProcess::_recover,
states,
orphans));
}
Future<Nothing> LinuxFilesystemIsolatorProcess::_recover(
const list<ContainerState>& states,
const hashset<ContainerID>& orphans)
{
return provisioner->recover(states, orphans)
.then([]() -> Future<Nothing> { return Nothing(); });
}
Future<Option<ContainerPrepareInfo>> LinuxFilesystemIsolatorProcess::prepare(
const ContainerID& containerId,
const ExecutorInfo& executorInfo,
const string& directory,
const Option<string>& user)
{
if (infos.contains(containerId)) {
return Failure("Container has already been prepared");
}
infos.put(containerId, Owned<Info>(new Info(directory)));
if (!executorInfo.has_container()) {
return __prepare(containerId, executorInfo, directory, user, None());
}
// Provision the root filesystem if needed.
CHECK_EQ(executorInfo.container().type(), ContainerInfo::MESOS);
if (!executorInfo.container().mesos().has_image()) {
return _prepare(containerId, executorInfo, directory, user, None());
}
const Image& image = executorInfo.container().mesos().image();
return provisioner->provision(containerId, image)
.then(defer(PID<LinuxFilesystemIsolatorProcess>(this),
&LinuxFilesystemIsolatorProcess::_prepare,
containerId,
executorInfo,
directory,
user,
lambda::_1));
}
Future<Option<ContainerPrepareInfo>> LinuxFilesystemIsolatorProcess::_prepare(
const ContainerID& containerId,
const ExecutorInfo& executorInfo,
const string& directory,
const Option<string>& user,
const Option<string>& rootfs)
{
CHECK(executorInfo.has_container());
CHECK_EQ(executorInfo.container().type(), ContainerInfo::MESOS);
// We will provision the images specified in ContainerInfo::volumes
// as well. We will mutate ContainerInfo::volumes to include the
// paths to the provisioned root filesystems (by setting the
// 'host_path') if the volume specifies an image as the source.
Owned<ExecutorInfo> _executorInfo(new ExecutorInfo(executorInfo));
list<Future<Nothing>> futures;
for (int i = 0; i < _executorInfo->container().volumes_size(); i++) {
Volume* volume = _executorInfo->mutable_container()->mutable_volumes(i);
if (!volume->has_image()) {
continue;
}
const Image& image = volume->image();
futures.push_back(
provisioner->provision(containerId, image)
.then([volume](const string& path) -> Future<Nothing> {
volume->set_host_path(path);
return Nothing();
}));
}
return collect(futures)
.then([=]() -> Future<Option<ContainerPrepareInfo>> {
return __prepare(containerId, *_executorInfo, directory, user, rootfs);
});
}
Future<Option<ContainerPrepareInfo>> LinuxFilesystemIsolatorProcess::__prepare(
const ContainerID& containerId,
const ExecutorInfo& executorInfo,
const string& directory,
const Option<string>& user,
const Option<string>& rootfs)
{
CHECK(infos.contains(containerId));
const Owned<Info>& info = infos[containerId];
ContainerPrepareInfo prepareInfo;
prepareInfo.set_namespaces(CLONE_NEWNS);
if (rootfs.isSome()) {
// If the container changes its root filesystem, we need to mount
// the container's work directory into its root filesystem
// (creating it if needed) so that the executor and the task can
// access the work directory.
//
// NOTE: The mount of the work directory must be a shared mount in
// the host filesystem so that any mounts underneath it will
// propagate into the container's mount namespace. This is how we
// can update persistent volumes for the container.
// This is the mount point of the work directory in the root filesystem.
const string sandbox = path::join(rootfs.get(), flags.sandbox_directory);
// Save the path 'sandbox' which will be used in 'cleanup()'.
info->sandbox = sandbox;
if (!os::exists(sandbox)) {
Try<Nothing> mkdir = os::mkdir(sandbox);
if (mkdir.isError()) {
return Failure(
"Failed to create sandbox mount point at '" +
sandbox + "': " + mkdir.error());
}
}
LOG(INFO) << "Bind mounting work directory from '" << directory
<< "' to '" << sandbox << "' for container " << containerId;
Try<Nothing> mount = fs::mount(
directory,
sandbox,
None(),
MS_BIND,
NULL);
if (mount.isError()) {
return Failure(
"Failed to mount work directory '" + directory +
"' to '" + sandbox + "': " + mount.error());
}
mount = fs::mount(
None(),
sandbox,
None(),
MS_SLAVE,
NULL);
if (mount.isError()) {
return Failure(
"Failed to mark sandbox '" + sandbox +
"' as a slave mount: " + mount.error());
}
mount = fs::mount(
None(),
sandbox,
None(),
MS_SHARED,
NULL);
if (mount.isError()) {
return Failure(
"Failed to mark sandbox '" + sandbox +
"' as a shared mount: " + mount.error());
}
prepareInfo.set_rootfs(rootfs.get());
}
// Prepare the commands that will be run in the container's mount
// namespace right after forking the executor process. We use these
// commands to mount those volumes specified in the container info
// so that they don't pollute the host mount namespace.
Try<string> _script = script(containerId, executorInfo, directory, rootfs);
if (_script.isError()) {
return Failure("Failed to generate isolation script: " + _script.error());
}
CommandInfo* command = prepareInfo.add_commands();
command->set_value(_script.get());
return update(containerId, executorInfo.resources())
.then([prepareInfo]() -> Future<Option<ContainerPrepareInfo>> {
return prepareInfo;
});
}
Try<string> LinuxFilesystemIsolatorProcess::script(
const ContainerID& containerId,
const ExecutorInfo& executorInfo,
const string& directory,
const Option<string>& rootfs)
{
ostringstream out;
out << "#!/bin/sh\n";
out << "set -x -e\n";
// Make sure mounts in the container mount namespace do not
// propagate back to the host mount namespace.
out << "mount --make-rslave /\n";
// Try to unmount work directory mounts and persistent volume mounts
// for other containers to release the extra references to them.
// NOTE:
// 1) This doesn't completely eliminate the race condition between
// this container copying mount table and other containers being
// cleaned up. This is instead a best-effort attempt.
// 2) This script assumes that all the mounts the container needs
// under the slave work directory have its container ID in the
// path either for the mount source (e.g. sandbox self-bind mount)
// or the mount target (e.g. mounting sandbox into new rootfs).
//
// TODO(xujyan): This command may fail if --work_dir is not specified
// with a real path as real paths are used in the mount table. It
// doesn't work when the paths contain reserved characters such as
// spaces either because such characters in mount info are encoded
// in the escaped form (i.e. '\0xx').
out << "grep -E '" << flags.work_dir << "/.+' /proc/self/mountinfo | "
<< "grep -v '" << containerId.value() << "' | "
<< "cut -d' ' -f5 | " // '-f5' is the mount target. See MountInfoTable.
<< "xargs --no-run-if-empty umount -l || "
<< "true \n"; // We mask errors in this command.
if (!executorInfo.has_container()) {
return out.str();
}
foreach (const Volume& volume, executorInfo.container().volumes()) {
if (!volume.has_host_path()) {
return Error("A volume misses 'host_path'");
}
// If both 'host_path' and 'container_path' are relative paths,
// return an error because the user can just directly access the
// volume in the work directory.
if (!strings::startsWith(volume.host_path(), "/") &&
!strings::startsWith(volume.container_path(), "/")) {
return Error(
"Both 'host_path' and 'container_path' of a volume are relative");
}
// Determine the source of the mount.
string source;
if (strings::startsWith(volume.host_path(), "/")) {
source = volume.host_path();
// An absolute path must already exist.
if (!os::exists(source)) {
return Error("Absolute host path does not exist");
}
} else {
// Path is interpreted as relative to the work directory.
source = path::join(directory, volume.host_path());
// TODO(jieyu): We need to check that source resolves under the
// work directory because a user can potentially use a container
// path like '../../abc'.
if (!os::exists(source)) {
Try<Nothing> mkdir = os::mkdir(source);
if (mkdir.isError()) {
return Error(
"Failed to create the source of the mount at '" +
source + "': " + mkdir.error());
}
// TODO(idownes): Consider setting ownership and mode.
}
}
// Determine the target of the mount.
string target;
if (strings::startsWith(volume.container_path(), "/")) {
if (rootfs.isSome()) {
target = path::join(rootfs.get(), volume.container_path());
} else {
target = volume.container_path();
}
// An absolute path must already exist. This is because we want
// to avoid creating mount points outside the work directory in
// the host filesystem or in the container filesystem root.
if (!os::exists(target)) {
return Error("Absolute container path does not exist");
}
// TODO(jieyu): We need to check that target resolves under
// 'rootfs' because a user can potentially use a container path
// like '/../../abc'.
} else {
if (rootfs.isSome()) {
target = path::join(rootfs.get(),
flags.sandbox_directory,
volume.container_path());
} else {
target = path::join(directory, volume.container_path());
}
// TODO(jieyu): We need to check that target resolves under the
// sandbox because a user can potentially use a container path
// like '../../abc'.
if (!os::exists(target)) {
Try<Nothing> mkdir = os::mkdir(target);
if (mkdir.isError()) {
return Error(
"Failed to create the target of the mount at '" +
target + "': " + mkdir.error());
}
}
}
// TODO(jieyu): Consider the mode in the volume.
out << "mount -n --rbind '" << source << "' '" << target << "'\n";
}
return out.str();
}
Future<Nothing> LinuxFilesystemIsolatorProcess::isolate(
const ContainerID& containerId,
pid_t pid)
{
// No-op, isolation happens when unsharing the mount namespace.
return Nothing();
}
Future<ContainerLimitation> LinuxFilesystemIsolatorProcess::watch(
const ContainerID& containerId)
{
// No-op.
return Future<ContainerLimitation>();
}
Future<Nothing> LinuxFilesystemIsolatorProcess::update(
const ContainerID& containerId,
const Resources& resources)
{
// Mount persistent volumes. We do this in the host namespace and
// rely on mount propagation for them to be visible inside the
// container.
if (!infos.contains(containerId)) {
return Failure("Unknown container");
}
const Owned<Info>& info = infos[containerId];
Resources current = info->resources;
// We first remove unneeded persistent volumes.
foreach (const Resource& resource, current.persistentVolumes()) {
// This is enforced by the master.
CHECK(resource.disk().has_volume());
// Ignore absolute and nested paths.
const string& containerPath = resource.disk().volume().container_path();
if (strings::contains(containerPath, "/")) {
LOG(WARNING) << "Skipping updating mount for persistent volume "
<< resource << " of container " << containerId
<< " because the container path '" << containerPath
<< "' contains slash";
continue;
}
if (resources.contains(resource)) {
continue;
}
// Determine the target of the mount.
string target;
if (info->sandbox.isSome()) {
target = path::join(info->sandbox.get(), containerPath);
} else {
target = path::join(info->directory, containerPath);
}
LOG(INFO) << "Removing mount '" << target << "' for persistent volume "
<< resource << " of container " << containerId;
// The unmount will fail if the task/executor is still using files
// or directories under 'target'.
Try<Nothing> unmount = fs::unmount(target);
if (unmount.isError()) {
return Failure(
"Failed to unmount unneeded persistent volume at '" +
target + "': " + unmount.error());
}
// NOTE: This is a non-recursive rmdir.
Try<Nothing> rmdir = os::rmdir(target, false);
if (rmdir.isError()) {
return Failure(
"Failed to remove persistent volume mount point at '" +
target + "': " + rmdir.error());
}
}
// We then mount new persistent volumes.
foreach (const Resource& resource, resources.persistentVolumes()) {
// This is enforced by the master.
CHECK(resource.disk().has_volume());
// Ignore absolute and nested paths.
const string& containerPath = resource.disk().volume().container_path();
if (strings::contains(containerPath, "/")) {
LOG(WARNING) << "Skipping updating mount for persistent volume "
<< resource << " of container " << containerId
<< " because the container path '" << containerPath
<< "' contains slash";
continue;
}
if (current.contains(resource)) {
continue;
}
// Determine the source of the mount.
string source = paths::getPersistentVolumePath(
flags.work_dir,
resource.role(),
resource.disk().persistence().id());
// Set the ownership of the persistent volume to match that of the
// sandbox directory.
//
// NOTE: Currently, persistent volumes in Mesos are exclusive,
// meaning that if a persistent volume is used by one task or
// executor, it cannot be concurrently used by other task or
// executor. But if we allow multiple executors to use same
// persistent volume at the same time in the future, the ownership
// of the persistent volume may conflict here.
//
// TODO(haosdent): Consider letting the frameworks specify the
// user/group of the persistent volumes.
struct stat s;
if (::stat(info->directory.c_str(), &s) < 0) {
return Failure(
"Failed to get ownership for '" + info->directory +
"': " + strerror(errno));
}
LOG(INFO) << "Changing the ownership of the persistent volume at '"
<< source << "' with uid " << s.st_uid
<< " and gid " << s.st_gid;
Try<Nothing> chown = os::chown(s.st_uid, s.st_gid, source, true);
if (chown.isError()) {
return Failure(
"Failed to change the ownership of the persistent volume at '" +
source + "' with uid " + stringify(s.st_uid) +
" and gid " + stringify(s.st_gid) + ": " + chown.error());
}
// Determine the target of the mount.
string target;
if (info->sandbox.isSome()) {
target = path::join(info->sandbox.get(), containerPath);
} else {
target = path::join(info->directory, containerPath);
}
if (os::exists(target)) {
// NOTE: This is possible because 'info->resources' will be
// reset when slave restarts and recovers. When the slave calls
// 'containerizer->update' after the executor re-registers,
// we'll try to re-mount all the already mounted volumes.
// TODO(jieyu): Check the source of the mount matches the entry
// with the same target in the mount table if one can be found.
// If not, mount the persistent volume as we did below. This is
// possible because the slave could crash after it unmounts the
// volume but before it is able to delete the mount point.
} else {
Try<Nothing> mkdir = os::mkdir(target);
if (mkdir.isError()) {
return Failure(
"Failed to create persistent volume mount point at '" +
target + "': " + mkdir.error());
}
LOG(INFO) << "Mounting '" << source << "' to '" << target
<< "' for persistent volume " << resource
<< " of container " << containerId;
Try<Nothing> mount = fs::mount(source, target, None(), MS_BIND, NULL);
if (mount.isError()) {
return Failure(
"Failed to mount persistent volume from '" +
source + "' to '" + target + "': " + mount.error());
}
}
}
// Store the new resources;
info->resources = resources;
return Nothing();
}
Future<ResourceStatistics> LinuxFilesystemIsolatorProcess::usage(
const ContainerID& containerId)
{
// No-op, no usage gathered.
return ResourceStatistics();
}
Future<Nothing> LinuxFilesystemIsolatorProcess::cleanup(
const ContainerID& containerId)
{
if (!infos.contains(containerId)) {
VLOG(1) << "Ignoring cleanup request for unknown container: "
<< containerId;
return Nothing();
}
const Owned<Info>& info = infos[containerId];
// NOTE: We don't need to cleanup mounts in the container's mount
// namespace because it's done automatically by the kernel when the
// mount namespace is destroyed after the last process terminates.
// The path to the container' work directory which is the parent of
// all the persistent volume mounts.
string sandbox;
if (info->sandbox.isSome()) {
sandbox = info->sandbox.get();
} else {
sandbox = info->directory;
}
infos.erase(containerId);
// Cleanup the mounts for this container in the host mount
// namespace, including container's work directory and all the
// persistent volume mounts.
Try<fs::MountInfoTable> table = fs::MountInfoTable::read();
if (table.isError()) {
return Failure("Failed to get mount table: " + table.error());
}
bool sandboxMountExists = false;
foreach (const fs::MountInfoTable::Entry& entry, table.get().entries) {
// NOTE: All persistent volumes are mounted at targets under the
// container's work directory. We unmount all the persistent
// volumes before unmounting the sandbox/work directory mount.
if (entry.target == sandbox) {
sandboxMountExists = true;
} else if (strings::startsWith(entry.target, sandbox)) {
LOG(INFO) << "Unmounting volume '" << entry.target
<< "' for container " << containerId;
Try<Nothing> unmount = fs::unmount(entry.target);
if (unmount.isError()) {
return Failure(
"Failed to unmount volume '" + entry.target +
"': " + unmount.error());
}
}
}
if (!sandboxMountExists) {
// This could happen if the container was not launched by this
// isolator (e.g., slaves prior to 0.25.0), or the container did
// not specify a root filesystem.
LOG(INFO) << "Ignoring unmounting sandbox/work directory"
<< " for container " << containerId;
} else {
LOG(INFO) << "Unmounting sandbox/work directory '" << sandbox
<< "' for container " << containerId;
Try<Nothing> unmount = fs::unmount(sandbox);
if (unmount.isError()) {
return Failure(
"Failed to unmount sandbox/work directory '" + sandbox +
"': " + unmount.error());
}
}
// Destroy the provisioned root filesystems.
return provisioner->destroy(containerId)
.then([]() -> Future<Nothing> { return Nothing(); });
}
LinuxFilesystemIsolatorProcess::Metrics::Metrics(
const PID<LinuxFilesystemIsolatorProcess>& isolator)
: containers_new_rootfs(
"containerizer/mesos/filesystem/containers_new_rootfs",
defer(isolator, &LinuxFilesystemIsolatorProcess::_containers_new_rootfs))
{
process::metrics::add(containers_new_rootfs);
}
LinuxFilesystemIsolatorProcess::Metrics::~Metrics()
{
process::metrics::remove(containers_new_rootfs);
}
double LinuxFilesystemIsolatorProcess::_containers_new_rootfs()
{
double count = 0.0;
foreachvalue (const Owned<Info>& info, infos) {
if (info->sandbox.isSome()) {
++count;
}
}
return count;
}
} // namespace slave {
} // namespace internal {
} // namespace mesos {
| apache-2.0 |
project-mandolin/mandolin | mandolin-mx/src/main/scala/org/mitre/mandolin/mx/standalone/StandaloneMxNetOptimizer.scala | 2828 | package org.mitre.mandolin.mx.standalone
import org.mitre.mandolin.optimize.standalone.OnlineOptimizer
import org.mitre.mandolin.mx._
import org.mitre.mandolin.mlp.MMLPFactor
import ml.dmlc.mxnet.{Symbol, Context, Shape, NDArray, Uniform, Xavier}
import ml.dmlc.mxnet.optimizer.SGD
/**
* This follows the MMLPOptimizer but really just thinly wraps MxNet
* allowing for data conventions used by Mandolin and to ensure that standalone
* and Spark-based usage is consistent.
*/
object StandaloneMxNetOptimizer {
val batchSize = 64
// hardcoded example
def getMlp: Symbol = {
val data = Symbol.Variable("data")
val fc1 = Symbol.FullyConnected(name = "fc1")()(Map("data" -> data, "num_hidden" -> 1000))
val act1 = Symbol.Activation(name = "relu1")()(Map("data" -> fc1, "act_type" -> "relu"))
val fc2 = Symbol.FullyConnected(name = "fc2")()(Map("data" -> act1, "num_hidden" -> 1000))
val act2 = Symbol.Activation(name = "relu2")()(Map("data" -> fc2, "act_type" -> "relu"))
val fc3 = Symbol.FullyConnected(name = "fc3")()(Map("data" -> act2, "num_hidden" -> 10))
val mlp = Symbol.SoftmaxOutput(name = "softmax")()(Map("data" -> fc3))
mlp
}
val uniInit = new Uniform(0.1f)
val xavierInit = new Xavier(factorType = "in", magnitude = 2.32f)
def initializeParameters(net: Symbol, inputShapes: Map[String, Shape]) = {
val (argShapes, _, auxShapes) = net.inferShape(inputShapes)
val argNames = net.listArguments()
val inputNames = inputShapes.keys
val paramNames = argNames.toSet -- inputNames.toSet
val auxNames = net.listAuxiliaryStates()
val paramNameShapes = (argNames zip argShapes).filter { case (name, _) =>
paramNames.contains(name)
}
val argParams = paramNameShapes.map { case (name, shape) =>
(name, NDArray.zeros(shape))
}.toMap
val auxParams = (auxNames zip auxShapes).map { case (name, shape) =>
(name, NDArray.zeros(shape))
}.toMap
argParams foreach { case (name, shape) => xavierInit(name, shape) }
auxParams foreach { case (name, shape) => xavierInit(name, shape) }
new MxNetWeights(argParams, auxParams, 1.0f)
}
def getStandaloneOptimizer() = {
val sgd = new SGD(learningRate = 0.1f)
val updater = new MxNetOptimizer(sgd)
val inDim = 784
val mlp = getMlp
val evaluator = new MxNetGlpEvaluator(mlp, Array(Context.cpu(0), Context.cpu(1)), 784)
val params = initializeParameters(mlp, Map("data" -> Shape(batchSize, 784), "softmax_label" -> Shape(batchSize)))
val mxEpochs = 10
val numSubEpochs = 1
val workersPerPartition = 1
val optDetails: Option[String] = None
new OnlineOptimizer[MMLPFactor, MxNetWeights, MxNetLossGradient, MxNetOptimizer](params,
evaluator, updater, mxEpochs, numSubEpochs, workersPerPartition, optDetails)
}
}
| apache-2.0 |
webadvancedservicescom/magento | dev/tests/unit/testsuite/Magento/Framework/Model/Resource/Type/Db/ConnectionFactoryTest.php | 903 | <?php
/**
* @copyright Copyright (c) 2014 X.commerce, Inc. (http://www.magentocommerce.com)
*/
namespace Magento\Framework\Model\Resource\Type\Db;
class ConnectionFactoryTest extends \PHPUnit_Framework_TestCase
{
/**
* @var ConnectionFactory
*/
private $model;
protected function setUp()
{
$this->model = new ConnectionFactory(
$this->getMockForAbstractClass('\Magento\Framework\ObjectManagerInterface')
);
}
/**
* @param array $config
* @dataProvider dataProviderCreateNoActiveConfig
*/
public function testCreateNoActiveConfig($config)
{
$this->assertNull($this->model->create($config));
}
/**
* @return array
*/
public function dataProviderCreateNoActiveConfig()
{
return [
[[]],
[['value']],
[['active' => 0]],
];
}
}
| apache-2.0 |
flavoi/diventi | diventi/core/views.py | 705 | from django.shortcuts import render
from django.contrib import messages
from django.views.generic import ListView
from django.contrib.auth.mixins import UserPassesTestMixin
from diventi.blog.models import Article
class DiventiActionMixin:
""" Disable redirect for update and creation views. """
@property
def success_msg(self):
return NotImplemented
def form_valid(self, form):
messages.success(self.request, self.success_msg)
return super(DiventiActionMixin, self).form_valid(form)
class StaffRequiredMixin:
""" Restrict the access of a page to admins only. """
def test_func(self):
return self.request.user.is_staff | apache-2.0 |
udoprog/tiny-serializer-java | tiny-serializer-core/src/main/java/eu/toolchain/serializer/collection/DefaultNavigableMapSerializer.java | 1307 | package eu.toolchain.serializer.collection;
import eu.toolchain.serializer.SerialReader;
import eu.toolchain.serializer.SerialWriter;
import eu.toolchain.serializer.Serializer;
import java.io.IOException;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public class DefaultNavigableMapSerializer<K extends Comparable<?>, V>
implements Serializer<NavigableMap<K, V>> {
private final Serializer<Integer> integer;
private final Serializer<K> key;
private final Serializer<V> value;
@Override
public void serialize(SerialWriter buffer, NavigableMap<K, V> values) throws IOException {
integer.serialize(buffer, values.size());
for (final Map.Entry<K, V> entry : values.entrySet()) {
key.serialize(buffer, entry.getKey());
value.serialize(buffer, entry.getValue());
}
}
@Override
public NavigableMap<K, V> deserialize(SerialReader buffer) throws IOException {
final int size = integer.deserialize(buffer);
final NavigableMap<K, V> values = new TreeMap<>();
for (int i = 0; i < size; ++i) {
final K key = this.key.deserialize(buffer);
final V value = this.value.deserialize(buffer);
values.put(key, value);
}
return values;
}
}
| apache-2.0 |
ST-DDT/CrazySpawnerAI | compatibility/craftbukkit/v1_6_R3/de/st_ddt/crazyspawner/craftbukkit/v1_6_R3/ai/CompatibilityProvider.java | 379 | package de.st_ddt.crazyspawner.craftbukkit.v1_6_R3.ai;
import de.st_ddt.crazyspawner.craftbukkit.v1_6_R3.entities.util.ai.ActionHelperImpl;
import de.st_ddt.crazyspawner.entities.util.ai.ActionHelperInterface;
public class CompatibilityProvider
{
static
{
ActionHelperInterface.ACTIONHELPERCLASSES.add(ActionHelperImpl.class);
}
private CompatibilityProvider()
{
}
}
| apache-2.0 |
LowieHuyghe/edmunds | tests/cache/drivers/testredis.py | 1260 |
from tests.testcase import TestCase
from werkzeug.contrib.cache import RedisCache
from edmunds.cache.drivers.redis import Redis
class TestRedis(TestCase):
"""
Test the Redis
"""
def test_redis(self):
"""
Test the redis
"""
# Write config
self.write_config([
"from edmunds.cache.drivers.redis import Redis \n",
"APP = { \n",
" 'cache': { \n",
" 'enabled': True, \n",
" 'instances': [ \n",
" { \n",
" 'name': 'redis',\n",
" 'driver': Redis,\n",
" 'host': 'localhost',\n",
" 'port': 6379,\n",
" 'password': None,\n",
" 'db': 0,\n",
" 'default_timeout': 300,\n",
" 'key_prefix': None,\n",
" }, \n",
" ], \n",
" }, \n",
"} \n",
])
# Create app
app = self.create_application()
driver = app.cache()
self.assert_is_instance(driver, Redis)
self.assert_is_instance(driver, RedisCache)
| apache-2.0 |
metatron-app/metatron-discovery | discovery-server/src/main/java/app/metatron/discovery/domain/workbook/configurations/widget/shelf/LayerView.java | 5787 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specic language governing permissions and
* limitations under the License.
*/
package app.metatron.discovery.domain.workbook.configurations.widget.shelf;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.apache.commons.lang3.StringUtils;
import java.io.Serializable;
import java.util.List;
import app.metatron.discovery.common.datasource.LogicalType;
import app.metatron.discovery.query.druid.Aggregation;
import app.metatron.discovery.query.druid.PostAggregation;
import app.metatron.discovery.query.druid.aggregations.CountAggregation;
import app.metatron.discovery.query.druid.aggregations.GenericSumAggregation;
import app.metatron.discovery.query.druid.aggregations.RelayAggregation;
import app.metatron.discovery.query.druid.postaggregations.ExprPostAggregator;
import app.metatron.discovery.util.EnumUtils;
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.EXTERNAL_PROPERTY,
property = "type",
defaultImpl = LayerView.OriginalLayerView.class)
@JsonSubTypes({
@JsonSubTypes.Type(value = LayerView.OriginalLayerView.class, name = "original"),
@JsonSubTypes.Type(value = LayerView.AbbreviatedView.class, name = "abbr"),
@JsonSubTypes.Type(value = LayerView.ClusteringLayerView.class, name = "clustering"),
@JsonSubTypes.Type(value = LayerView.HashLayerView.class, name = "hash")
})
public interface LayerView extends Serializable {
boolean needAggregation();
class OriginalLayerView implements LayerView {
public OriginalLayerView() {
}
@Override
public boolean needAggregation() {
return false;
}
}
class HashLayerView implements LayerView {
String method;
Integer precision;
public HashLayerView() {
}
@JsonCreator
public HashLayerView(@JsonProperty("method") String method,
@JsonProperty("precision") Integer precision) {
this.method = StringUtils.isEmpty(method) ? "geohex" : method;
if (precision == null) {
this.precision = 4;
} else {
Preconditions.checkArgument(precision > 0 && precision < 13, "precision value must be between 1 and 12.");
this.precision = precision;
}
}
public String toHashExpression(String fieldName) {
List<String> pointKeyList = LogicalType.GEO_POINT.getGeoPointKeys();
StringBuilder builder = new StringBuilder();
builder.append("to_").append(method).append("(");
builder.append(fieldName).append(".").append(pointKeyList.get(0)).append(",");
builder.append(fieldName).append(".").append(pointKeyList.get(1)).append(",");
builder.append(precision).append(")");
return builder.toString();
}
public String toWktExpression(String hashColumnName, String geoColumnName) {
StringBuilder builder = new StringBuilder();
builder.append(geoColumnName).append("=");
builder.append(method).append("_to_boundary_wkt").append("(");
builder.append(hashColumnName).append(")");
return builder.toString();
}
@JsonIgnore
public List<Aggregation> getClusteringAggregations(String fieldName) {
List<String> pointKeyList = LogicalType.GEO_POINT.getGeoPointKeys();
List<Aggregation> aggregations = Lists.newArrayList();
aggregations.add(new GenericSumAggregation("SUM_LAT", null, fieldName + "." + pointKeyList.get(0), "double"));
aggregations.add(new GenericSumAggregation("SUM_LON", null, fieldName + "." + pointKeyList.get(1), "double"));
aggregations.add(new CountAggregation("count"));
return aggregations;
}
@JsonIgnore
public List<PostAggregation> getClusteringPostAggregations(String geoName) {
String expr = geoName + " = concat(\'POINT(\', SUM_LON/count, \' \' , SUM_LAT/count, \')\')";
return Lists.newArrayList(new ExprPostAggregator(expr));
}
@Override
public boolean needAggregation() {
return true;
}
public String getMethod() {
return method;
}
public Integer getPrecision() {
return precision;
}
}
class AbbreviatedView extends HashLayerView implements LayerView {
RelayAggregation.RelayType relayType;
@JsonCreator
public AbbreviatedView(@JsonProperty("method") String method,
@JsonProperty("precision") Integer precision,
@JsonProperty("relayType") String relayType) {
super(method, precision);
this.relayType = EnumUtils.getUpperCaseEnum(RelayAggregation.RelayType.class, relayType);
}
public RelayAggregation.RelayType getRelayType() {
return relayType;
}
}
class ClusteringLayerView extends HashLayerView implements LayerView {
String method;
Integer precision;
public ClusteringLayerView() {
}
@JsonCreator
public ClusteringLayerView(@JsonProperty("method") String method,
@JsonProperty("precision") Integer precision) {
super(method, precision);
}
}
}
| apache-2.0 |
GoogleCloudPlatform/elcarro-oracle-operator | oracle/controllers/configcontroller/config_controller_test.go | 3923 | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package configcontroller
import (
"context"
"fmt"
"sync/atomic"
"testing"
"time"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"github.com/GoogleCloudPlatform/elcarro-oracle-operator/oracle/api/v1alpha1"
"github.com/GoogleCloudPlatform/elcarro-oracle-operator/oracle/controllers/testhelpers"
)
var k8sClient client.Client
var k8sManager ctrl.Manager
// Define utility constants for object names and testing timeouts and intervals.
const (
Namespace = "default"
ConfigName = "test-config"
timeout = time.Second * 15
interval = time.Millisecond * 250
instanceCount = 3
)
func TestConfigController(t *testing.T) {
testhelpers.CdToRoot(t)
testhelpers.RunFunctionalTestSuite(t, &k8sClient, &k8sManager,
[]*runtime.SchemeBuilder{&v1alpha1.SchemeBuilder.SchemeBuilder},
"Config controller",
func() []testhelpers.Reconciler {
return []testhelpers.Reconciler{
&ConfigReconciler{
Client: k8sManager.GetClient(),
Log: ctrl.Log.WithName("controllers").WithName("Config"),
Scheme: k8sManager.GetScheme(),
Images: map[string]string{"config": "config_image"},
},
}
})
}
var _ = Describe("Config controller", func() {
var patchedDeploymentCount uint32
config := &v1alpha1.Config{
ObjectMeta: metav1.ObjectMeta{
Namespace: Namespace,
Name: ConfigName,
},
}
configObjectKey := client.ObjectKey{Namespace: Namespace, Name: ConfigName}
BeforeEach(func() {
Patch = func(reconciler *ConfigReconciler, ctx context.Context, object client.Object, patch client.Patch, option ...client.PatchOption) error {
atomic.AddUint32(&patchedDeploymentCount, 1)
return nil
}
})
It("Should succeed when config exists", func() {
createInstances()
Expect(k8sClient.Create(context.Background(), config)).Should(Succeed())
createdConfig := &v1alpha1.Config{}
Eventually(func() bool {
err := k8sClient.Get(context.Background(), configObjectKey, createdConfig)
return err == nil
}, timeout, interval).Should(BeTrue())
Eventually(func() bool {
// The Patch function is called once for each deployment that should be patched.
return atomic.LoadUint32(&patchedDeploymentCount) == instanceCount
}, timeout, interval).Should(BeTrue())
Expect(k8sClient.Delete(context.Background(), config)).Should(Succeed())
})
It("Should succeed when config doesn't exist", func() {
reconciler := ConfigReconciler{
Client: k8sClient,
Log: ctrl.Log,
Scheme: k8sManager.GetScheme(),
Images: map[string]string{"config": "config_image"},
}
// Force the reconciler to run since there's no state change in this test spec that would cause it to run.
_, err := reconciler.Reconcile(context.Background(), ctrl.Request{NamespacedName: configObjectKey})
Expect(err).ToNot(HaveOccurred())
})
})
func createInstances() {
for i := 0; i < instanceCount; i++ {
instance := &v1alpha1.Instance{
ObjectMeta: metav1.ObjectMeta{
Name: fmt.Sprintf("myinstance-%d", i),
Namespace: Namespace,
},
Spec: v1alpha1.InstanceSpec{
CDBName: fmt.Sprintf("MYDB-%d", i),
},
}
Expect(k8sClient.Create(context.Background(), instance)).Should(Succeed())
}
}
| apache-2.0 |
stringbean/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/requests/admin/IndexTemplateHttpTest.scala | 2465 | package com.sksamuel.elastic4s.requests.admin
import com.sksamuel.elastic4s.requests.common.RefreshPolicy
import com.sksamuel.elastic4s.testkit.DockerTests
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.mockito.MockitoSugar
class IndexTemplateHttpTest
extends AnyWordSpec
with MockitoSugar
with Matchers
with DockerTests {
"create template" should {
"be stored" in {
client.execute {
createIndexTemplate("brewery_template", "brew*").mappings(
mapping().fields(
textField("name"),
doubleField("year_founded")
)
)
}.await.result.acknowledged shouldBe true
}
"be retrievable" in {
val resp = client.execute {
getIndexTemplate("brewery_template")
}.await
resp.result.templateFor("brewery_template").indexPatterns shouldBe Seq("brew*")
resp.result.templateFor("brewery_template").order shouldBe 0
}
"return error if the template has invalid parameters" in {
client.execute {
createIndexTemplate("brewery_template", "brew*").mappings(
mapping().fields(
textField("name"),
doubleField("year_founded") analyzer "test_analyzer"
)
)
}.await.error.`type` shouldBe "mapper_parsing_exception"
}
"apply template to new indexes that match the pattern" ignore {
// this should match the earlier template of brew*
client.execute {
createIndex("brewers")
}.await
client.execute {
indexInto("brewers") fields(
"name" -> "fullers",
"year_founded" -> 1829
) refresh RefreshPolicy.Immediate
}.await
// check that the document was indexed
client.execute {
search("brewers") query termQuery("year_founded", 1829)
}.await.result.totalHits shouldBe 1
// the mapping for this index should match the template
// val properties = http.execute {
// getMapping("brewers" / "brands")
// }.await.propertiesFor("brewers" / "brands")
// val year_founded = properties("year_founded").asInstanceOf[util.Map[String, Any]]
// note: this field would be long/int if the template wasn't applied, because we indexed an integer.
// but the template should be applied to override it to a double
// year_founded.get("type") shouldBe "double"
}
}
}
| apache-2.0 |
roncohen/apm-server | processor/transaction/processor.go | 1232 | package transaction
import (
"encoding/json"
pr "github.com/elastic/apm-server/processor"
"github.com/elastic/beats/libbeat/beat"
"github.com/elastic/beats/libbeat/monitoring"
"github.com/santhosh-tekuri/jsonschema"
)
var (
transactionMetrics = monitoring.Default.NewRegistry("apm-server.processor.transaction")
transformations = monitoring.NewInt(transactionMetrics, "transformations")
validationCount = monitoring.NewInt(transactionMetrics, "validation.count")
validationError = monitoring.NewInt(transactionMetrics, "validation.errors")
)
const (
processorName = "transaction"
)
var schema = pr.CreateSchema(transactionSchema, processorName)
func NewProcessor() pr.Processor {
return &processor{schema: schema}
}
type processor struct {
schema *jsonschema.Schema
}
func (p *processor) Validate(buf []byte) error {
validationCount.Inc()
err := pr.Validate(buf, p.schema)
if err != nil {
validationError.Inc()
}
return err
}
func (p *processor) Transform(buf []byte) ([]beat.Event, error) {
var pa payload
transformations.Inc()
err := json.Unmarshal(buf, &pa)
if err != nil {
return nil, err
}
return pa.transform(), nil
}
func (p *processor) Name() string {
return processorName
}
| apache-2.0 |
qalingo/qalingo-engine | apis/api-core/api-core-common/src/main/java/org/hoteia/qalingo/core/domain/CmsLink.java | 2994 | package org.hoteia.qalingo.core.domain;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Version;
import org.hoteia.qalingo.core.domain.impl.DomainEntity;
@Entity
@Table(name="TCMS_LINK")
public class CmsLink extends AbstractEntity<CmsLink> implements DomainEntity {
/**
* Generated UID
*/
private static final long serialVersionUID = -6464078133126013413L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "ID", nullable = false)
private Long id;
@Version
@Column(name = "VERSION", nullable = false) // , columnDefinition = "int(11) default 1"
private int version;
@Column(name = "NAME")
private String name;
@Column(name = "ALT")
private String alt;
@Column(name = "TYPE")
private String type;
@Column(name = "PARAMS")
private String params;
@Column(name = "EXTERNAL", nullable = false) // , columnDefinition = "tinyint(1) default 0"
private boolean external = false;
@Column(name = "FULL_URL_PATH")
private String fullUrlPath;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "DATE_CREATE")
private Date dateCreate;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "DATE_UPDATE")
private Date dateUpdate;
public CmsLink() {
this.dateCreate = new Date();
this.dateUpdate = new Date();
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAlt() {
return alt;
}
public void setAlt(String alt) {
this.alt = alt;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getParams() {
return params;
}
public void setParams(String params) {
this.params = params;
}
public boolean isExternal() {
return external;
}
public void setExternal(boolean external) {
this.external = external;
}
public String getFullUrlPath() {
return fullUrlPath;
}
public void setFullUrlPath(String fullUrlPath) {
this.fullUrlPath = fullUrlPath;
}
public Date getDateCreate() {
return dateCreate;
}
public void setDateCreate(Date dateCreate) {
this.dateCreate = dateCreate;
}
public Date getDateUpdate() {
return dateUpdate;
}
public void setDateUpdate(Date dateUpdate) {
this.dateUpdate = dateUpdate;
}
} | apache-2.0 |
teisun/SunmiUI | SunmiDesign/sunmiui/src/main/java/sunmi/sunmiui/utils/Adaptation.java | 2442 | package sunmi.sunmiui.utils;
import android.content.Context;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.WindowManager;
/**
* 一些常用的系统配置信息 获取自定义的配置信息 类
*/
public class Adaptation {
public static int screenHeight = 0;
public static int screenWidth = 0;
public static float screenDensity = 0;
public static int densityDpi = 0;
// public static int version = Integer.valueOf(android.os.Build.VERSION.SDK_INT);
public static final int SCREEN_9_16 = 1; // 9:16
public static final int SCREEN_3_4 = 2; // 3:4
public static final int SCREEN_4_3 = 3; // 4:3
public static final int SCREEN_16_9 = 4; // 16:9
public static int proportion = SCREEN_9_16;
// public static final float PROPORTION_9_16 = 0.56f; // 9:16
// public static final float PROPORTION_3_4 = 0.75f; // 3:4
// public static final float PROPORTION_4_3 = 1.33f; // 4:3
// public static final float PROPORTION_16_9 = 1.77f; // 16:9
public static final float AVERAGE1 = 0.655f; // (9:16+3:4)/2
public static final float AVERAGE2 = 1.04f; // (3:4+4:3)/2
public static final float AVERAGE3 = 1.55f; // (4:3+16:9)/2
public static void init(Context context) {
if (screenDensity == 0 || screenWidth == 0 || screenHeight == 0) {
DisplayMetrics dm = new DisplayMetrics();
WindowManager wm = (WindowManager) context
.getSystemService(Context.WINDOW_SERVICE);
wm.getDefaultDisplay().getMetrics(dm);
Adaptation.screenDensity = dm.density;
Adaptation.screenHeight = dm.heightPixels;
Adaptation.screenWidth = dm.widthPixels;
Adaptation.densityDpi = dm.densityDpi;
}
float proportionF = (float) screenWidth / (float) screenHeight;
if (proportionF <= AVERAGE1) {
proportion = SCREEN_9_16;
} else if (proportionF <= AVERAGE2) {
proportion = SCREEN_3_4;
} else if (proportionF <= AVERAGE3) {
proportion = SCREEN_4_3;
} else if (proportionF > AVERAGE3) {
proportion = SCREEN_16_9;
}
Log.i("SCREEN CONFIG", "screenHeight:" + screenHeight + ";screenWidth:"
+ screenWidth + ";screenDensity:" + screenDensity
+ ";densityDpi:" + densityDpi);
}
} | apache-2.0 |
zoneXcoding/Mineworld | src/main/java/org/terasology/rendering/shader/ShaderParametersGenericMesh.java | 1536 | /*
* Copyright 2012 Benjamin Glatzel <benjamin.glatzel@me.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.shader;
import static org.lwjgl.opengl.GL11.glBindTexture;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL13;
import org.terasology.asset.Assets;
import org.terasology.game.CoreRegistry;
import org.terasology.logic.LocalPlayer;
import org.terasology.rendering.assets.Texture;
/**
* Shader parameters for the Gel. Cube shader program.
*
* @author Benjamin Glatzel <benjamin.glatzel@me.com>
*/
public class ShaderParametersGenericMesh extends ShaderParametersBase {
private Texture texture = Assets.getTexture("engine:mhead");
@Override
public void applyParameters(ShaderProgram program) {
super.applyParameters(program);
GL13.glActiveTexture(GL13.GL_TEXTURE0);
glBindTexture(GL11.GL_TEXTURE_2D, texture.getId());
program.setFloat3("colorOffset", 1.0f, 1.0f, 1.0f);
program.setInt("textured", 1);
}
}
| apache-2.0 |
blamb102/jupyter-aws-console | src/app/service/cognito.service.ts | 8503 | import {Injectable, Inject} from "@angular/core";
import {DynamoDBService} from "./ddb.service";
import {environment} from "../../environments/environment";
/**
* Created by Vladimir Budilov
*/
declare var AWSCognito: any;
declare var AWS: any;
export interface CognitoCallback {
cognitoCallback(message: string, result: any): void;
}
export interface LoggedInCallback {
isLoggedIn(message: string, loggedIn: boolean): void;
}
export interface Callback {
callback(): void;
callbackWithParam(result: any): void;
}
@Injectable()
export class CognitoUtil {
public static _REGION = environment.region;
public static _IDENTITY_POOL_ID = environment.identityPoolId;
public static _USER_POOL_ID = environment.userPoolId;
public static _CLIENT_ID = environment.clientId;
public static _POOL_DATA = {
UserPoolId: CognitoUtil._USER_POOL_ID,
ClientId: CognitoUtil._CLIENT_ID
};
public static getAwsCognito(): any {
return AWSCognito
}
getUserPool() {
return new AWSCognito.CognitoIdentityServiceProvider.CognitoUserPool(CognitoUtil._POOL_DATA);
}
getCurrentUser() {
return this.getUserPool().getCurrentUser();
}
getCognitoIdentity(): string {
return AWS.config.credentials.identityId;
}
getAccessToken(callback: Callback): void {
if (callback == null) {
throw("CognitoUtil: callback in getAccessToken is null...returning");
}
if (this.getCurrentUser() != null)
this.getCurrentUser().getSession(function (err, session) {
if (err) {
console.log("CognitoUtil: Can't set the credentials:" + err);
callback.callbackWithParam(null);
}
else {
if (session.isValid()) {
callback.callbackWithParam(session.getAccessToken().getJwtToken());
}
}
});
else
callback.callbackWithParam(null);
}
getIdToken(callback: Callback): void {
if (callback == null) {
throw("CognitoUtil: callback in getIdToken is null...returning");
}
if (this.getCurrentUser() != null)
this.getCurrentUser().getSession(function (err, session) {
if (err) {
console.log("CognitoUtil: Can't set the credentials:" + err);
callback.callbackWithParam(null);
}
else {
if (session.isValid()) {
callback.callbackWithParam(session.getIdToken().getJwtToken());
} else {
console.log("CognitoUtil: Got the id token, but the session isn't valid");
}
}
});
else
callback.callbackWithParam(null);
}
getRefreshToken(callback: Callback): void {
if (callback == null) {
throw("CognitoUtil: callback in getRefreshToken is null...returning");
}
if (this.getCurrentUser() != null)
this.getCurrentUser().getSession(function (err, session) {
if (err) {
console.log("CognitoUtil: Can't set the credentials:" + err);
callback.callbackWithParam(null);
}
else {
if (session.isValid()) {
callback.callbackWithParam(session.getRefreshToken());
}
}
});
else
callback.callbackWithParam(null);
}
refresh(): void {
this.getCurrentUser().getSession(function (err, session) {
if (err) {
console.log("CognitoUtil: Can't set the credentials:" + err);
}
else {
if (session.isValid()) {
console.log("CognitoUtil: refreshed successfully");
} else {
console.log("CognitoUtil: refreshed but session is still not valid");
}
}
});
}
}
@Injectable()
export class UserLoginService {
constructor(public ddb: DynamoDBService, public cognitoUtil: CognitoUtil) {
}
authenticate(username: string, password: string, callback: CognitoCallback) {
console.log("UserLoginService: starting the authentication")
// Need to provide placeholder keys unless unauthorised user access is enabled for user pool
AWSCognito.config.update({accessKeyId: 'anything', secretAccessKey: 'anything'})
let authenticationData = {
Username: username,
Password: password,
};
let authenticationDetails = new AWSCognito.CognitoIdentityServiceProvider.AuthenticationDetails(authenticationData);
let userData = {
Username: username,
Pool: this.cognitoUtil.getUserPool()
};
console.log("UserLoginService: Params set...Authenticating the user");
let cognitoUser = new AWSCognito.CognitoIdentityServiceProvider.CognitoUser(userData);
console.log("UserLoginService: config is " + AWS.config);
cognitoUser.authenticateUser(authenticationDetails, {
onSuccess: function (result) {
var logins = {}
logins['cognito-idp.' + CognitoUtil._REGION + '.amazonaws.com/' + CognitoUtil._USER_POOL_ID] = result.getIdToken().getJwtToken();
// Add the User's Id Token to the Cognito credentials login map.
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: CognitoUtil._IDENTITY_POOL_ID,
Logins: logins
});
console.log("UserLoginService: set the AWS credentials - " + JSON.stringify(AWS.config.credentials));
console.log("UserLoginService: set the AWSCognito credentials - " + JSON.stringify(AWSCognito.config.credentials));
AWS.config.credentials.get(function (err) {
if (!err) {
callback.cognitoCallback(null, result);
} else {
callback.cognitoCallback(err.message, null);
}
});
},
onFailure: function (err) {
callback.cognitoCallback(err.message, null);
},
});
}
logout() {
console.log("UserLoginService: Logging out");
this.ddb.writeLogEntry("logout");
this.cognitoUtil.getCurrentUser().signOut();
}
isAuthenticated(callback: LoggedInCallback) {
if (callback == null)
throw("UserLoginService: Callback in isAuthenticated() cannot be null");
let cognitoUser = this.cognitoUtil.getCurrentUser();
if (cognitoUser != null) {
cognitoUser.getSession(function (err, session) {
if (err) {
console.log("UserLoginService: Couldn't get the session: " + err, err.stack);
callback.isLoggedIn(err, false);
}
else {
console.log("UserLoginService: Session is " + session.isValid());
callback.isLoggedIn(err, session.isValid());
}
});
} else {
console.log("UserLoginService: can't retrieve the current user");
callback.isLoggedIn("Can't retrieve the CurrentUser", false);
}
}
}
@Injectable()
export class UserParametersService {
constructor(public cognitoUtil: CognitoUtil) {
}
getParameters(callback: Callback) {
let cognitoUser = this.cognitoUtil.getCurrentUser();
if (cognitoUser != null) {
cognitoUser.getSession(function (err, session) {
if (err)
console.log("UserParametersService: Couldn't retrieve the user");
else {
cognitoUser.getUserAttributes(function (err, result) {
if (err) {
console.log("UserParametersService: in getParameters: " + err);
} else {
callback.callbackWithParam(result);
}
});
}
});
} else {
callback.callbackWithParam(null);
}
}
}
| apache-2.0 |
andrenpaes/killbill-adyen-plugin | src/main/java/org/killbill/billing/plugin/adyen/client/model/PaymentInfo.java | 16205 | /*
* Copyright 2014-2016 Groupon, Inc
* Copyright 2014-2016 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.plugin.adyen.client.model;
import java.util.Map;
public class PaymentInfo {
private Integer captureDelayHours;
private Integer installments;
private String contract;
private String shopperInteraction;
private String shopperStatement;
// 3D Secure
private Long threeDThreshold;
private String acceptHeader;
private String userAgent;
private String md;
private String paRes;
private String mpiDataDirectoryResponse;
private String mpiDataAuthenticationResponse;
private String mpiDataCavv;
private String mpiDataCavvAlgorithm;
private String mpiDataXid;
private String mpiDataEci;
private String mpiImplementationType;
private Map<String, String> mpiImplementationTypeValues;
private String termUrl;
// Billing Address
private String street;
private String houseNumberOrName;
private String city;
private String postalCode;
private String stateOrProvince;
private String country;
// Special fields
private String acquirer;
private String acquirerMID;
private String selectedBrand;
public Integer getCaptureDelayHours() {
return captureDelayHours;
}
public void setCaptureDelayHours(final Integer captureDelayHours) {
this.captureDelayHours = captureDelayHours;
}
public Integer getInstallments() {
return installments;
}
public void setInstallments(final Integer installments) {
this.installments = installments;
}
public String getContract() {
return contract;
}
public void setContract(final String contract) {
this.contract = contract;
}
public String getShopperInteraction() {
return shopperInteraction;
}
public void setShopperInteraction(final String shopperInteraction) {
this.shopperInteraction = shopperInteraction;
}
public String getShopperStatement() {
return shopperStatement;
}
public void setShopperStatement(final String shopperStatement) {
this.shopperStatement = shopperStatement;
}
public Long getThreeDThreshold() {
return threeDThreshold;
}
public void setThreeDThreshold(final Long threeDThreshold) {
this.threeDThreshold = threeDThreshold;
}
public String getAcceptHeader() {
return acceptHeader;
}
public void setAcceptHeader(final String acceptHeader) {
this.acceptHeader = acceptHeader;
}
public String getUserAgent() {
return userAgent;
}
public void setUserAgent(final String userAgent) {
this.userAgent = userAgent;
}
public String getMd() {
return md;
}
public void setMd(final String md) {
this.md = md;
}
public String getPaRes() {
return paRes;
}
public void setPaRes(final String paRes) {
this.paRes = paRes;
}
public String getMpiDataDirectoryResponse() {
return mpiDataDirectoryResponse;
}
public void setMpiDataDirectoryResponse(final String mpiDataDirectoryResponse) {
this.mpiDataDirectoryResponse = mpiDataDirectoryResponse;
}
public String getMpiDataAuthenticationResponse() {
return mpiDataAuthenticationResponse;
}
public void setMpiDataAuthenticationResponse(final String mpiDataAuthenticationResponse) {
this.mpiDataAuthenticationResponse = mpiDataAuthenticationResponse;
}
public String getMpiDataCavv() {
return mpiDataCavv;
}
public void setMpiDataCavv(final String mpiDataCavv) {
this.mpiDataCavv = mpiDataCavv;
}
public String getMpiDataCavvAlgorithm() {
return mpiDataCavvAlgorithm;
}
public void setMpiDataCavvAlgorithm(final String mpiDataCavvAlgorithm) {
this.mpiDataCavvAlgorithm = mpiDataCavvAlgorithm;
}
public String getMpiDataXid() {
return mpiDataXid;
}
public void setMpiDataXid(final String mpiDataXid) {
this.mpiDataXid = mpiDataXid;
}
public String getMpiDataEci() {
return mpiDataEci;
}
public void setMpiDataEci(final String mpiDataEci) {
this.mpiDataEci = mpiDataEci;
}
public String getMpiImplementationType() {
return mpiImplementationType;
}
public void setMpiImplementationType(final String mpiImplementationType) {
this.mpiImplementationType = mpiImplementationType;
}
public Map<String, String> getMpiImplementationTypeValues() {
return mpiImplementationTypeValues;
}
public void setMpiImplementationTypeValues(final Map<String, String> mpiImplementationTypeValues) {
this.mpiImplementationTypeValues = mpiImplementationTypeValues;
}
public String getTermUrl() {
return termUrl;
}
public void setTermUrl(final String termUrl) {
this.termUrl = termUrl;
}
public String getStreet() {
return street;
}
public void setStreet(final String street) {
this.street = street;
}
public String getHouseNumberOrName() {
return houseNumberOrName;
}
public void setHouseNumberOrName(final String houseNumberOrName) {
this.houseNumberOrName = houseNumberOrName;
}
public String getCity() {
return city;
}
public void setCity(final String city) {
this.city = city;
}
public String getPostalCode() {
return postalCode;
}
public void setPostalCode(final String postalCode) {
this.postalCode = postalCode;
}
public String getStateOrProvince() {
return stateOrProvince;
}
public void setStateOrProvince(final String stateOrProvince) {
this.stateOrProvince = stateOrProvince;
}
public String getCountry() {
return country;
}
public void setCountry(final String country) {
this.country = country;
}
public String getAcquirer() {
return acquirer;
}
public void setAcquirer(final String acquirer) {
this.acquirer = acquirer;
}
public String getAcquirerMID() {
return acquirerMID;
}
public void setAcquirerMID(final String acquirerMID) {
this.acquirerMID = acquirerMID;
}
public String getSelectedBrand() {
return selectedBrand;
}
public void setSelectedBrand(final String selectedBrand) {
this.selectedBrand = selectedBrand;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("PaymentInfo{");
sb.append("captureDelayHours=").append(captureDelayHours);
sb.append(", installments=").append(installments);
sb.append(", contract='").append(contract).append('\'');
sb.append(", shopperInteraction='").append(shopperInteraction).append('\'');
sb.append(", shopperStatement='").append(shopperStatement).append('\'');
sb.append(", threeDThreshold=").append(threeDThreshold);
sb.append(", acceptHeader='").append(acceptHeader).append('\'');
sb.append(", userAgent='").append(userAgent).append('\'');
sb.append(", md='").append(md).append('\'');
sb.append(", paRes='").append(paRes).append('\'');
sb.append(", mpiDataDirectoryResponse='").append(mpiDataDirectoryResponse).append('\'');
sb.append(", mpiDataAuthenticationResponse='").append(mpiDataAuthenticationResponse).append('\'');
sb.append(", mpiDataCavv='").append(mpiDataCavv).append('\'');
sb.append(", mpiDataCavvAlgorithm='").append(mpiDataCavvAlgorithm).append('\'');
sb.append(", mpiDataXid='").append(mpiDataXid).append('\'');
sb.append(", mpiDataEci='").append(mpiDataEci).append('\'');
sb.append(", mpiImplementationType='").append(mpiImplementationType).append('\'');
sb.append(", mpiImplementationTypeValues=").append(mpiImplementationTypeValues);
sb.append(", termUrl='").append(termUrl).append('\'');
sb.append(", street='").append(street).append('\'');
sb.append(", houseNumberOrName='").append(houseNumberOrName).append('\'');
sb.append(", city='").append(city).append('\'');
sb.append(", postalCode='").append(postalCode).append('\'');
sb.append(", stateOrProvince='").append(stateOrProvince).append('\'');
sb.append(", country='").append(country).append('\'');
sb.append(", acquirer='").append(acquirer).append('\'');
sb.append(", acquirerMID='").append(acquirerMID).append('\'');
sb.append(", selectedBrand='").append(selectedBrand).append('\'');
sb.append('}');
return sb.toString();
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final PaymentInfo that = (PaymentInfo) o;
if (captureDelayHours != null ? !captureDelayHours.equals(that.captureDelayHours) : that.captureDelayHours != null) {
return false;
}
if (installments != null ? !installments.equals(that.installments) : that.installments != null) {
return false;
}
if (contract != null ? !contract.equals(that.contract) : that.contract != null) {
return false;
}
if (shopperInteraction != null ? !shopperInteraction.equals(that.shopperInteraction) : that.shopperInteraction != null) {
return false;
}
if (shopperStatement != null ? !shopperStatement.equals(that.shopperStatement) : that.shopperStatement != null) {
return false;
}
if (threeDThreshold != null ? !threeDThreshold.equals(that.threeDThreshold) : that.threeDThreshold != null) {
return false;
}
if (acceptHeader != null ? !acceptHeader.equals(that.acceptHeader) : that.acceptHeader != null) {
return false;
}
if (userAgent != null ? !userAgent.equals(that.userAgent) : that.userAgent != null) {
return false;
}
if (md != null ? !md.equals(that.md) : that.md != null) {
return false;
}
if (paRes != null ? !paRes.equals(that.paRes) : that.paRes != null) {
return false;
}
if (mpiDataDirectoryResponse != null ? !mpiDataDirectoryResponse.equals(that.mpiDataDirectoryResponse) : that.mpiDataDirectoryResponse != null) {
return false;
}
if (mpiDataAuthenticationResponse != null ? !mpiDataAuthenticationResponse.equals(that.mpiDataAuthenticationResponse) : that.mpiDataAuthenticationResponse != null) {
return false;
}
if (mpiDataCavv != null ? !mpiDataCavv.equals(that.mpiDataCavv) : that.mpiDataCavv != null) {
return false;
}
if (mpiDataCavvAlgorithm != null ? !mpiDataCavvAlgorithm.equals(that.mpiDataCavvAlgorithm) : that.mpiDataCavvAlgorithm != null) {
return false;
}
if (mpiDataXid != null ? !mpiDataXid.equals(that.mpiDataXid) : that.mpiDataXid != null) {
return false;
}
if (mpiDataEci != null ? !mpiDataEci.equals(that.mpiDataEci) : that.mpiDataEci != null) {
return false;
}
if (mpiImplementationType != null ? !mpiImplementationType.equals(that.mpiImplementationType) : that.mpiImplementationType != null) {
return false;
}
if (mpiImplementationTypeValues != null ? !mpiImplementationTypeValues.equals(that.mpiImplementationTypeValues) : that.mpiImplementationTypeValues != null) {
return false;
}
if (termUrl != null ? !termUrl.equals(that.termUrl) : that.termUrl != null) {
return false;
}
if (street != null ? !street.equals(that.street) : that.street != null) {
return false;
}
if (houseNumberOrName != null ? !houseNumberOrName.equals(that.houseNumberOrName) : that.houseNumberOrName != null) {
return false;
}
if (city != null ? !city.equals(that.city) : that.city != null) {
return false;
}
if (postalCode != null ? !postalCode.equals(that.postalCode) : that.postalCode != null) {
return false;
}
if (stateOrProvince != null ? !stateOrProvince.equals(that.stateOrProvince) : that.stateOrProvince != null) {
return false;
}
if (country != null ? !country.equals(that.country) : that.country != null) {
return false;
}
if (acquirer != null ? !acquirer.equals(that.acquirer) : that.acquirer != null) {
return false;
}
if (acquirerMID != null ? !acquirerMID.equals(that.acquirerMID) : that.acquirerMID != null) {
return false;
}
return selectedBrand != null ? selectedBrand.equals(that.selectedBrand) : that.selectedBrand == null;
}
@Override
public int hashCode() {
int result = captureDelayHours != null ? captureDelayHours.hashCode() : 0;
result = 31 * result + (installments != null ? installments.hashCode() : 0);
result = 31 * result + (contract != null ? contract.hashCode() : 0);
result = 31 * result + (shopperInteraction != null ? shopperInteraction.hashCode() : 0);
result = 31 * result + (shopperStatement != null ? shopperStatement.hashCode() : 0);
result = 31 * result + (threeDThreshold != null ? threeDThreshold.hashCode() : 0);
result = 31 * result + (acceptHeader != null ? acceptHeader.hashCode() : 0);
result = 31 * result + (userAgent != null ? userAgent.hashCode() : 0);
result = 31 * result + (md != null ? md.hashCode() : 0);
result = 31 * result + (paRes != null ? paRes.hashCode() : 0);
result = 31 * result + (mpiDataDirectoryResponse != null ? mpiDataDirectoryResponse.hashCode() : 0);
result = 31 * result + (mpiDataAuthenticationResponse != null ? mpiDataAuthenticationResponse.hashCode() : 0);
result = 31 * result + (mpiDataCavv != null ? mpiDataCavv.hashCode() : 0);
result = 31 * result + (mpiDataCavvAlgorithm != null ? mpiDataCavvAlgorithm.hashCode() : 0);
result = 31 * result + (mpiDataXid != null ? mpiDataXid.hashCode() : 0);
result = 31 * result + (mpiDataEci != null ? mpiDataEci.hashCode() : 0);
result = 31 * result + (mpiImplementationType != null ? mpiImplementationType.hashCode() : 0);
result = 31 * result + (mpiImplementationTypeValues != null ? mpiImplementationTypeValues.hashCode() : 0);
result = 31 * result + (termUrl != null ? termUrl.hashCode() : 0);
result = 31 * result + (street != null ? street.hashCode() : 0);
result = 31 * result + (houseNumberOrName != null ? houseNumberOrName.hashCode() : 0);
result = 31 * result + (city != null ? city.hashCode() : 0);
result = 31 * result + (postalCode != null ? postalCode.hashCode() : 0);
result = 31 * result + (stateOrProvince != null ? stateOrProvince.hashCode() : 0);
result = 31 * result + (country != null ? country.hashCode() : 0);
result = 31 * result + (acquirer != null ? acquirer.hashCode() : 0);
result = 31 * result + (acquirerMID != null ? acquirerMID.hashCode() : 0);
result = 31 * result + (selectedBrand != null ? selectedBrand.hashCode() : 0);
return result;
}
}
| apache-2.0 |
dataplumber/nexus | data-access/tests/sizefromcass.py | 532 | """
Copyright (c) 2016 Jet Propulsion Laboratory,
California Institute of Technology. All rights reserved
"""
import pyximport
pyximport.install()
import ConfigParser
import pkg_resources
from nexustiles.dao.CassandraProxy import CassandraProxy
config = ConfigParser.RawConfigParser()
config.readfp(pkg_resources.resource_stream(__name__, "config/datastores.ini"), filename='datastores.ini')
cass = CassandraProxy(config)
tiles = cass.fetch_nexus_tiles('d9b5afe3-bd7f-3824-ad8a-d8d3b364689c')
print len(tiles[0].tile_blob)
| apache-2.0 |
tgraf/cilium | pkg/service/store/logfields.go | 786 | // Copyright 2020 Authors of Cilium
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package store
import (
"github.com/cilium/cilium/pkg/logging"
"github.com/cilium/cilium/pkg/logging/logfields"
)
var log = logging.DefaultLogger.WithField(logfields.LogSubsys, "service")
| apache-2.0 |
googleapis/google-api-nodejs-client | src/apis/blogger/v2.ts | 55798 | // Copyright 2020 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/class-name-casing */
/* eslint-disable @typescript-eslint/no-unused-vars */
/* eslint-disable @typescript-eslint/no-empty-interface */
/* eslint-disable @typescript-eslint/no-namespace */
/* eslint-disable no-irregular-whitespace */
import {
OAuth2Client,
JWT,
Compute,
UserRefreshClient,
BaseExternalAccountClient,
GaxiosPromise,
GoogleConfigurable,
createAPIRequest,
MethodOptions,
StreamMethodOptions,
GlobalOptions,
GoogleAuth,
BodyResponseCallback,
APIRequestContext,
} from 'googleapis-common';
import {Readable} from 'stream';
export namespace blogger_v2 {
export interface Options extends GlobalOptions {
version: 'v2';
}
interface StandardParameters {
/**
* Auth client or API Key for the request
*/
auth?:
| string
| OAuth2Client
| JWT
| Compute
| UserRefreshClient
| BaseExternalAccountClient
| GoogleAuth;
/**
* V1 error format.
*/
'$.xgafv'?: string;
/**
* OAuth access token.
*/
access_token?: string;
/**
* Data format for response.
*/
alt?: string;
/**
* JSONP
*/
callback?: string;
/**
* Selector specifying which fields to include in a partial response.
*/
fields?: string;
/**
* API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
*/
key?: string;
/**
* OAuth 2.0 token for the current user.
*/
oauth_token?: string;
/**
* Returns response with indentations and line breaks.
*/
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
*/
quotaUser?: string;
/**
* Legacy upload protocol for media (e.g. "media", "multipart").
*/
uploadType?: string;
/**
* Upload protocol for media (e.g. "raw", "multipart").
*/
upload_protocol?: string;
}
/**
* Blogger API v3
*
* The Blogger API provides access to posts, comments and pages of a Blogger blog.
*
* @example
* ```js
* const {google} = require('googleapis');
* const blogger = google.blogger('v2');
* ```
*/
export class Blogger {
context: APIRequestContext;
blogs: Resource$Blogs;
comments: Resource$Comments;
pages: Resource$Pages;
posts: Resource$Posts;
users: Resource$Users;
constructor(options: GlobalOptions, google?: GoogleConfigurable) {
this.context = {
_options: options || {},
google,
};
this.blogs = new Resource$Blogs(this.context);
this.comments = new Resource$Comments(this.context);
this.pages = new Resource$Pages(this.context);
this.posts = new Resource$Posts(this.context);
this.users = new Resource$Users(this.context);
}
}
export interface Schema$Blog {
/**
* The JSON custom meta-data for the Blog.
*/
customMetaData?: string | null;
/**
* The description of this blog. This is displayed underneath the title.
*/
description?: string | null;
/**
* The identifier for this resource.
*/
id?: string | null;
/**
* The kind of this entry. Always blogger#blog.
*/
kind?: string | null;
/**
* The locale this Blog is set to.
*/
locale?: {country?: string; language?: string; variant?: string} | null;
/**
* The name of this blog. This is displayed as the title.
*/
name?: string | null;
/**
* The container of pages in this blog.
*/
pages?: {selfLink?: string; totalItems?: number} | null;
/**
* The container of posts in this blog.
*/
posts?: {
items?: Schema$Post[];
selfLink?: string;
totalItems?: number;
} | null;
/**
* RFC 3339 date-time when this blog was published.
*/
published?: string | null;
/**
* The API REST URL to fetch this resource from.
*/
selfLink?: string | null;
/**
* The status of the blog.
*/
status?: string | null;
/**
* RFC 3339 date-time when this blog was last updated.
*/
updated?: string | null;
/**
* The URL where this blog is published.
*/
url?: string | null;
}
export interface Schema$BlogList {
/**
* Admin level list of blog per-user information.
*/
blogUserInfos?: Schema$BlogUserInfo[];
/**
* The list of Blogs this user has Authorship or Admin rights over.
*/
items?: Schema$Blog[];
/**
* The kind of this entity. Always blogger#blogList.
*/
kind?: string | null;
}
export interface Schema$BlogPerUserInfo {
/**
* ID of the Blog resource.
*/
blogId?: string | null;
/**
* True if the user has Admin level access to the blog.
*/
hasAdminAccess?: boolean | null;
/**
* The kind of this entity. Always blogger#blogPerUserInfo.
*/
kind?: string | null;
/**
* The Photo Album Key for the user when adding photos to the blog.
*/
photosAlbumKey?: string | null;
/**
* Access permissions that the user has for the blog (ADMIN, AUTHOR, or READER).
*/
role?: string | null;
/**
* ID of the User.
*/
userId?: string | null;
}
export interface Schema$BlogUserInfo {
/**
* The Blog resource.
*/
blog?: Schema$Blog;
/**
* Information about a User for the Blog.
*/
blog_user_info?: Schema$BlogPerUserInfo;
/**
* The kind of this entity. Always blogger#blogUserInfo.
*/
kind?: string | null;
}
export interface Schema$Comment {
/**
* The author of this Comment.
*/
author?: {
displayName?: string;
id?: string;
image?: {url?: string};
url?: string;
} | null;
/**
* Data about the blog containing this comment.
*/
blog?: {id?: string} | null;
/**
* The actual content of the comment. May include HTML markup.
*/
content?: string | null;
/**
* The identifier for this resource.
*/
id?: string | null;
/**
* Data about the comment this is in reply to.
*/
inReplyTo?: {id?: string} | null;
/**
* The kind of this entry. Always blogger#comment.
*/
kind?: string | null;
/**
* Data about the post containing this comment.
*/
post?: {id?: string} | null;
/**
* RFC 3339 date-time when this comment was published.
*/
published?: string | null;
/**
* The API REST URL to fetch this resource from.
*/
selfLink?: string | null;
/**
* The status of the comment (only populated for admin users).
*/
status?: string | null;
/**
* RFC 3339 date-time when this comment was last updated.
*/
updated?: string | null;
}
export interface Schema$CommentList {
/**
* Etag of the response.
*/
etag?: string | null;
/**
* The List of Comments for a Post.
*/
items?: Schema$Comment[];
/**
* The kind of this entry. Always blogger#commentList.
*/
kind?: string | null;
/**
* Pagination token to fetch the next page, if one exists.
*/
nextPageToken?: string | null;
/**
* Pagination token to fetch the previous page, if one exists.
*/
prevPageToken?: string | null;
}
export interface Schema$Page {
/**
* The author of this Page.
*/
author?: {
displayName?: string;
id?: string;
image?: {url?: string};
url?: string;
} | null;
/**
* Data about the blog containing this Page.
*/
blog?: {id?: string} | null;
/**
* The body content of this Page, in HTML.
*/
content?: string | null;
/**
* Etag of the resource.
*/
etag?: string | null;
/**
* The identifier for this resource.
*/
id?: string | null;
/**
* The kind of this entity. Always blogger#page.
*/
kind?: string | null;
/**
* RFC 3339 date-time when this Page was published.
*/
published?: string | null;
/**
* The API REST URL to fetch this resource from.
*/
selfLink?: string | null;
/**
* The status of the page for admin resources (either LIVE or DRAFT).
*/
status?: string | null;
/**
* The title of this entity. This is the name displayed in the Admin user interface.
*/
title?: string | null;
/**
* RFC 3339 date-time when this Page was last updated.
*/
updated?: string | null;
/**
* The URL that this Page is displayed at.
*/
url?: string | null;
}
export interface Schema$PageList {
/**
* Etag of the response.
*/
etag?: string | null;
/**
* The list of Pages for a Blog.
*/
items?: Schema$Page[];
/**
* The kind of this entity. Always blogger#pageList.
*/
kind?: string | null;
/**
* Pagination token to fetch the next page, if one exists.
*/
nextPageToken?: string | null;
}
export interface Schema$Post {
/**
* The author of this Post.
*/
author?: {
displayName?: string;
id?: string;
image?: {url?: string};
url?: string;
} | null;
/**
* Data about the blog containing this Post.
*/
blog?: {id?: string} | null;
/**
* The content of the Post. May contain HTML markup.
*/
content?: string | null;
/**
* The JSON meta-data for the Post.
*/
customMetaData?: string | null;
/**
* Etag of the resource.
*/
etag?: string | null;
/**
* The identifier of this Post.
*/
id?: string | null;
/**
* Display image for the Post.
*/
images?: Array<{url?: string}> | null;
/**
* The kind of this entity. Always blogger#post.
*/
kind?: string | null;
/**
* The list of labels this Post was tagged with.
*/
labels?: string[] | null;
/**
* The location for geotagged posts.
*/
location?: {
lat?: number;
lng?: number;
name?: string;
span?: string;
} | null;
/**
* RFC 3339 date-time when this Post was published.
*/
published?: string | null;
/**
* Comment control and display setting for readers of this post.
*/
readerComments?: string | null;
/**
* The container of comments on this Post.
*/
replies?: {
items?: Schema$Comment[];
selfLink?: string;
totalItems?: string;
} | null;
/**
* The API REST URL to fetch this resource from.
*/
selfLink?: string | null;
/**
* Status of the post. Only set for admin-level requests.
*/
status?: string | null;
/**
* The title of the Post.
*/
title?: string | null;
/**
* The title link URL, similar to atom's related link.
*/
titleLink?: string | null;
/**
* RFC 3339 date-time when this Post was last updated.
*/
updated?: string | null;
/**
* The URL where this Post is displayed.
*/
url?: string | null;
}
export interface Schema$PostList {
/**
* Etag of the response.
*/
etag?: string | null;
/**
* The list of Posts for this Blog.
*/
items?: Schema$Post[];
/**
* The kind of this entity. Always blogger#postList.
*/
kind?: string | null;
/**
* Pagination token to fetch the next page, if one exists.
*/
nextPageToken?: string | null;
/**
* Pagination token to fetch the previous page, if one exists.
*/
prevPageToken?: string | null;
}
export interface Schema$User {
/**
* Profile summary information.
*/
about?: string | null;
/**
* The container of blogs for this user.
*/
blogs?: {selfLink?: string} | null;
/**
* The timestamp of when this profile was created, in seconds since epoch.
*/
created?: string | null;
/**
* The display name.
*/
displayName?: string | null;
/**
* The identifier for this User.
*/
id?: string | null;
/**
* The kind of this entity. Always blogger#user.
*/
kind?: string | null;
/**
* This user's locale
*/
locale?: {country?: string; language?: string; variant?: string} | null;
/**
* The API REST URL to fetch this resource from.
*/
selfLink?: string | null;
/**
* The user's profile page.
*/
url?: string | null;
}
export class Resource$Blogs {
context: APIRequestContext;
constructor(context: APIRequestContext) {
this.context = context;
}
/**
* Gets a blog by id.
* @example
* ```js
* // Before running the sample:
* // - Enable the API at:
* // https://console.developers.google.com/apis/api/blogger.googleapis.com
* // - Login into gcloud by running:
* // `$ gcloud auth application-default login`
* // - Install the npm module by running:
* // `$ npm install googleapis`
*
* const {google} = require('googleapis');
* const blogger = google.blogger('v2');
*
* async function main() {
* const auth = new google.auth.GoogleAuth({
* // Scopes can be specified either as an array or as a single, space-delimited string.
* scopes: ['https://www.googleapis.com/auth/blogger'],
* });
*
* // Acquire an auth client, and bind it to all future calls
* const authClient = await auth.getClient();
* google.options({auth: authClient});
*
* // Do the magic
* const res = await blogger.blogs.get({
* blogId: 'placeholder-value',
* });
* console.log(res.data);
*
* // Example response
* // {
* // "customMetaData": "my_customMetaData",
* // "description": "my_description",
* // "id": "my_id",
* // "kind": "my_kind",
* // "locale": {},
* // "name": "my_name",
* // "pages": {},
* // "posts": {},
* // "published": "my_published",
* // "selfLink": "my_selfLink",
* // "status": "my_status",
* // "updated": "my_updated",
* // "url": "my_url"
* // }
* }
*
* main().catch(e => {
* console.error(e);
* throw e;
* });
*
* ```
*
* @param params - Parameters for request
* @param options - Optionally override request options, such as `url`, `method`, and `encoding`.
* @param callback - Optional callback that handles the response.
* @returns A promise if used with async/await, or void if used with a callback.
*/
get(
params: Params$Resource$Blogs$Get,
options: StreamMethodOptions
): GaxiosPromise<Readable>;
get(
params?: Params$Resource$Blogs$Get,
options?: MethodOptions
): GaxiosPromise<Schema$Blog>;
get(
params: Params$Resource$Blogs$Get,
options: StreamMethodOptions | BodyResponseCallback<Readable>,
callback: BodyResponseCallback<Readable>
): void;
get(
params: Params$Resource$Blogs$Get,
options: MethodOptions | BodyResponseCallback<Schema$Blog>,
callback: BodyResponseCallback<Schema$Blog>
): void;
get(
params: Params$Resource$Blogs$Get,
callback: BodyResponseCallback<Schema$Blog>
): void;
get(callback: BodyResponseCallback<Schema$Blog>): void;
get(
paramsOrCallback?:
| Params$Resource$Blogs$Get
| BodyResponseCallback<Schema$Blog>
| BodyResponseCallback<Readable>,
optionsOrCallback?:
| MethodOptions
| StreamMethodOptions
| BodyResponseCallback<Schema$Blog>
| BodyResponseCallback<Readable>,
callback?:
| BodyResponseCallback<Schema$Blog>
| BodyResponseCallback<Readable>
): void | GaxiosPromise<Schema$Blog> | GaxiosPromise<Readable> {
let params = (paramsOrCallback || {}) as Params$Resource$Blogs$Get;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Blogs$Get;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl = options.rootUrl || 'https://blogger.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v2/blogs/{blogId}').replace(/([^:]\/)\/+/g, '$1'),
method: 'GET',
},
options
),
params,
requiredParams: ['blogId'],
pathParams: ['blogId'],
context: this.context,
};
if (callback) {
createAPIRequest<Schema$Blog>(
parameters,
callback as BodyResponseCallback<unknown>
);
} else {
return createAPIRequest<Schema$Blog>(parameters);
}
}
/**
* Lists blogs by user id, possibly filtered.
* @example
* ```js
* // Before running the sample:
* // - Enable the API at:
* // https://console.developers.google.com/apis/api/blogger.googleapis.com
* // - Login into gcloud by running:
* // `$ gcloud auth application-default login`
* // - Install the npm module by running:
* // `$ npm install googleapis`
*
* const {google} = require('googleapis');
* const blogger = google.blogger('v2');
*
* async function main() {
* const auth = new google.auth.GoogleAuth({
* // Scopes can be specified either as an array or as a single, space-delimited string.
* scopes: ['https://www.googleapis.com/auth/blogger'],
* });
*
* // Acquire an auth client, and bind it to all future calls
* const authClient = await auth.getClient();
* google.options({auth: authClient});
*
* // Do the magic
* const res = await blogger.blogs.list({
* userId: 'placeholder-value',
* });
* console.log(res.data);
*
* // Example response
* // {
* // "blogUserInfos": [],
* // "items": [],
* // "kind": "my_kind"
* // }
* }
*
* main().catch(e => {
* console.error(e);
* throw e;
* });
*
* ```
*
* @param params - Parameters for request
* @param options - Optionally override request options, such as `url`, `method`, and `encoding`.
* @param callback - Optional callback that handles the response.
* @returns A promise if used with async/await, or void if used with a callback.
*/
list(
params: Params$Resource$Blogs$List,
options: StreamMethodOptions
): GaxiosPromise<Readable>;
list(
params?: Params$Resource$Blogs$List,
options?: MethodOptions
): GaxiosPromise<Schema$BlogList>;
list(
params: Params$Resource$Blogs$List,
options: StreamMethodOptions | BodyResponseCallback<Readable>,
callback: BodyResponseCallback<Readable>
): void;
list(
params: Params$Resource$Blogs$List,
options: MethodOptions | BodyResponseCallback<Schema$BlogList>,
callback: BodyResponseCallback<Schema$BlogList>
): void;
list(
params: Params$Resource$Blogs$List,
callback: BodyResponseCallback<Schema$BlogList>
): void;
list(callback: BodyResponseCallback<Schema$BlogList>): void;
list(
paramsOrCallback?:
| Params$Resource$Blogs$List
| BodyResponseCallback<Schema$BlogList>
| BodyResponseCallback<Readable>,
optionsOrCallback?:
| MethodOptions
| StreamMethodOptions
| BodyResponseCallback<Schema$BlogList>
| BodyResponseCallback<Readable>,
callback?:
| BodyResponseCallback<Schema$BlogList>
| BodyResponseCallback<Readable>
): void | GaxiosPromise<Schema$BlogList> | GaxiosPromise<Readable> {
let params = (paramsOrCallback || {}) as Params$Resource$Blogs$List;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Blogs$List;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl = options.rootUrl || 'https://blogger.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v2/users/{userId}/blogs').replace(
/([^:]\/)\/+/g,
'$1'
),
method: 'GET',
},
options
),
params,
requiredParams: ['userId'],
pathParams: ['userId'],
context: this.context,
};
if (callback) {
createAPIRequest<Schema$BlogList>(
parameters,
callback as BodyResponseCallback<unknown>
);
} else {
return createAPIRequest<Schema$BlogList>(parameters);
}
}
}
export interface Params$Resource$Blogs$Get extends StandardParameters {
/**
*
*/
blogId?: string;
}
export interface Params$Resource$Blogs$List extends StandardParameters {
/**
*
*/
userId?: string;
}
export class Resource$Comments {
context: APIRequestContext;
constructor(context: APIRequestContext) {
this.context = context;
}
/**
* Gets a comment by blog id, post id and comment id.
* @example
* ```js
* // Before running the sample:
* // - Enable the API at:
* // https://console.developers.google.com/apis/api/blogger.googleapis.com
* // - Login into gcloud by running:
* // `$ gcloud auth application-default login`
* // - Install the npm module by running:
* // `$ npm install googleapis`
*
* const {google} = require('googleapis');
* const blogger = google.blogger('v2');
*
* async function main() {
* const auth = new google.auth.GoogleAuth({
* // Scopes can be specified either as an array or as a single, space-delimited string.
* scopes: ['https://www.googleapis.com/auth/blogger'],
* });
*
* // Acquire an auth client, and bind it to all future calls
* const authClient = await auth.getClient();
* google.options({auth: authClient});
*
* // Do the magic
* const res = await blogger.comments.get({
* blogId: 'placeholder-value',
*
* commentId: 'placeholder-value',
*
* postId: 'placeholder-value',
* });
* console.log(res.data);
*
* // Example response
* // {
* // "author": {},
* // "blog": {},
* // "content": "my_content",
* // "id": "my_id",
* // "inReplyTo": {},
* // "kind": "my_kind",
* // "post": {},
* // "published": "my_published",
* // "selfLink": "my_selfLink",
* // "status": "my_status",
* // "updated": "my_updated"
* // }
* }
*
* main().catch(e => {
* console.error(e);
* throw e;
* });
*
* ```
*
* @param params - Parameters for request
* @param options - Optionally override request options, such as `url`, `method`, and `encoding`.
* @param callback - Optional callback that handles the response.
* @returns A promise if used with async/await, or void if used with a callback.
*/
get(
params: Params$Resource$Comments$Get,
options: StreamMethodOptions
): GaxiosPromise<Readable>;
get(
params?: Params$Resource$Comments$Get,
options?: MethodOptions
): GaxiosPromise<Schema$Comment>;
get(
params: Params$Resource$Comments$Get,
options: StreamMethodOptions | BodyResponseCallback<Readable>,
callback: BodyResponseCallback<Readable>
): void;
get(
params: Params$Resource$Comments$Get,
options: MethodOptions | BodyResponseCallback<Schema$Comment>,
callback: BodyResponseCallback<Schema$Comment>
): void;
get(
params: Params$Resource$Comments$Get,
callback: BodyResponseCallback<Schema$Comment>
): void;
get(callback: BodyResponseCallback<Schema$Comment>): void;
get(
paramsOrCallback?:
| Params$Resource$Comments$Get
| BodyResponseCallback<Schema$Comment>
| BodyResponseCallback<Readable>,
optionsOrCallback?:
| MethodOptions
| StreamMethodOptions
| BodyResponseCallback<Schema$Comment>
| BodyResponseCallback<Readable>,
callback?:
| BodyResponseCallback<Schema$Comment>
| BodyResponseCallback<Readable>
): void | GaxiosPromise<Schema$Comment> | GaxiosPromise<Readable> {
let params = (paramsOrCallback || {}) as Params$Resource$Comments$Get;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Comments$Get;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl = options.rootUrl || 'https://blogger.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (
rootUrl + '/v2/blogs/{blogId}/posts/{postId}/comments/{commentId}'
).replace(/([^:]\/)\/+/g, '$1'),
method: 'GET',
},
options
),
params,
requiredParams: ['blogId', 'postId', 'commentId'],
pathParams: ['blogId', 'commentId', 'postId'],
context: this.context,
};
if (callback) {
createAPIRequest<Schema$Comment>(
parameters,
callback as BodyResponseCallback<unknown>
);
} else {
return createAPIRequest<Schema$Comment>(parameters);
}
}
/**
* Lists comments.
* @example
* ```js
* // Before running the sample:
* // - Enable the API at:
* // https://console.developers.google.com/apis/api/blogger.googleapis.com
* // - Login into gcloud by running:
* // `$ gcloud auth application-default login`
* // - Install the npm module by running:
* // `$ npm install googleapis`
*
* const {google} = require('googleapis');
* const blogger = google.blogger('v2');
*
* async function main() {
* const auth = new google.auth.GoogleAuth({
* // Scopes can be specified either as an array or as a single, space-delimited string.
* scopes: ['https://www.googleapis.com/auth/blogger'],
* });
*
* // Acquire an auth client, and bind it to all future calls
* const authClient = await auth.getClient();
* google.options({auth: authClient});
*
* // Do the magic
* const res = await blogger.comments.list({
* blogId: 'placeholder-value',
*
* fetchBodies: 'placeholder-value',
*
* maxResults: 'placeholder-value',
*
* pageToken: 'placeholder-value',
*
* postId: 'placeholder-value',
*
* startDate: 'placeholder-value',
* });
* console.log(res.data);
*
* // Example response
* // {
* // "etag": "my_etag",
* // "items": [],
* // "kind": "my_kind",
* // "nextPageToken": "my_nextPageToken",
* // "prevPageToken": "my_prevPageToken"
* // }
* }
*
* main().catch(e => {
* console.error(e);
* throw e;
* });
*
* ```
*
* @param params - Parameters for request
* @param options - Optionally override request options, such as `url`, `method`, and `encoding`.
* @param callback - Optional callback that handles the response.
* @returns A promise if used with async/await, or void if used with a callback.
*/
list(
params: Params$Resource$Comments$List,
options: StreamMethodOptions
): GaxiosPromise<Readable>;
list(
params?: Params$Resource$Comments$List,
options?: MethodOptions
): GaxiosPromise<Schema$CommentList>;
list(
params: Params$Resource$Comments$List,
options: StreamMethodOptions | BodyResponseCallback<Readable>,
callback: BodyResponseCallback<Readable>
): void;
list(
params: Params$Resource$Comments$List,
options: MethodOptions | BodyResponseCallback<Schema$CommentList>,
callback: BodyResponseCallback<Schema$CommentList>
): void;
list(
params: Params$Resource$Comments$List,
callback: BodyResponseCallback<Schema$CommentList>
): void;
list(callback: BodyResponseCallback<Schema$CommentList>): void;
list(
paramsOrCallback?:
| Params$Resource$Comments$List
| BodyResponseCallback<Schema$CommentList>
| BodyResponseCallback<Readable>,
optionsOrCallback?:
| MethodOptions
| StreamMethodOptions
| BodyResponseCallback<Schema$CommentList>
| BodyResponseCallback<Readable>,
callback?:
| BodyResponseCallback<Schema$CommentList>
| BodyResponseCallback<Readable>
): void | GaxiosPromise<Schema$CommentList> | GaxiosPromise<Readable> {
let params = (paramsOrCallback || {}) as Params$Resource$Comments$List;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Comments$List;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl = options.rootUrl || 'https://blogger.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (
rootUrl + '/v2/blogs/{blogId}/posts/{postId}/comments'
).replace(/([^:]\/)\/+/g, '$1'),
method: 'GET',
},
options
),
params,
requiredParams: ['blogId', 'postId'],
pathParams: ['blogId', 'postId'],
context: this.context,
};
if (callback) {
createAPIRequest<Schema$CommentList>(
parameters,
callback as BodyResponseCallback<unknown>
);
} else {
return createAPIRequest<Schema$CommentList>(parameters);
}
}
}
export interface Params$Resource$Comments$Get extends StandardParameters {
/**
*
*/
blogId?: string;
/**
*
*/
commentId?: string;
/**
*
*/
postId?: string;
}
export interface Params$Resource$Comments$List extends StandardParameters {
/**
*
*/
blogId?: string;
/**
*
*/
fetchBodies?: boolean;
/**
*
*/
maxResults?: number;
/**
*
*/
pageToken?: string;
/**
*
*/
postId?: string;
/**
*
*/
startDate?: string;
}
export class Resource$Pages {
context: APIRequestContext;
constructor(context: APIRequestContext) {
this.context = context;
}
/**
* Gets a page by blog id and page id.
* @example
* ```js
* // Before running the sample:
* // - Enable the API at:
* // https://console.developers.google.com/apis/api/blogger.googleapis.com
* // - Login into gcloud by running:
* // `$ gcloud auth application-default login`
* // - Install the npm module by running:
* // `$ npm install googleapis`
*
* const {google} = require('googleapis');
* const blogger = google.blogger('v2');
*
* async function main() {
* const auth = new google.auth.GoogleAuth({
* // Scopes can be specified either as an array or as a single, space-delimited string.
* scopes: ['https://www.googleapis.com/auth/blogger'],
* });
*
* // Acquire an auth client, and bind it to all future calls
* const authClient = await auth.getClient();
* google.options({auth: authClient});
*
* // Do the magic
* const res = await blogger.pages.get({
* blogId: 'placeholder-value',
*
* pageId: 'placeholder-value',
* });
* console.log(res.data);
*
* // Example response
* // {
* // "author": {},
* // "blog": {},
* // "content": "my_content",
* // "etag": "my_etag",
* // "id": "my_id",
* // "kind": "my_kind",
* // "published": "my_published",
* // "selfLink": "my_selfLink",
* // "status": "my_status",
* // "title": "my_title",
* // "updated": "my_updated",
* // "url": "my_url"
* // }
* }
*
* main().catch(e => {
* console.error(e);
* throw e;
* });
*
* ```
*
* @param params - Parameters for request
* @param options - Optionally override request options, such as `url`, `method`, and `encoding`.
* @param callback - Optional callback that handles the response.
* @returns A promise if used with async/await, or void if used with a callback.
*/
get(
params: Params$Resource$Pages$Get,
options: StreamMethodOptions
): GaxiosPromise<Readable>;
get(
params?: Params$Resource$Pages$Get,
options?: MethodOptions
): GaxiosPromise<Schema$Page>;
get(
params: Params$Resource$Pages$Get,
options: StreamMethodOptions | BodyResponseCallback<Readable>,
callback: BodyResponseCallback<Readable>
): void;
get(
params: Params$Resource$Pages$Get,
options: MethodOptions | BodyResponseCallback<Schema$Page>,
callback: BodyResponseCallback<Schema$Page>
): void;
get(
params: Params$Resource$Pages$Get,
callback: BodyResponseCallback<Schema$Page>
): void;
get(callback: BodyResponseCallback<Schema$Page>): void;
get(
paramsOrCallback?:
| Params$Resource$Pages$Get
| BodyResponseCallback<Schema$Page>
| BodyResponseCallback<Readable>,
optionsOrCallback?:
| MethodOptions
| StreamMethodOptions
| BodyResponseCallback<Schema$Page>
| BodyResponseCallback<Readable>,
callback?:
| BodyResponseCallback<Schema$Page>
| BodyResponseCallback<Readable>
): void | GaxiosPromise<Schema$Page> | GaxiosPromise<Readable> {
let params = (paramsOrCallback || {}) as Params$Resource$Pages$Get;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Pages$Get;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl = options.rootUrl || 'https://blogger.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v2/blogs/{blogId}/pages/{pageId}').replace(
/([^:]\/)\/+/g,
'$1'
),
method: 'GET',
},
options
),
params,
requiredParams: ['blogId', 'pageId'],
pathParams: ['blogId', 'pageId'],
context: this.context,
};
if (callback) {
createAPIRequest<Schema$Page>(
parameters,
callback as BodyResponseCallback<unknown>
);
} else {
return createAPIRequest<Schema$Page>(parameters);
}
}
/**
* Lists pages.
* @example
* ```js
* // Before running the sample:
* // - Enable the API at:
* // https://console.developers.google.com/apis/api/blogger.googleapis.com
* // - Login into gcloud by running:
* // `$ gcloud auth application-default login`
* // - Install the npm module by running:
* // `$ npm install googleapis`
*
* const {google} = require('googleapis');
* const blogger = google.blogger('v2');
*
* async function main() {
* const auth = new google.auth.GoogleAuth({
* // Scopes can be specified either as an array or as a single, space-delimited string.
* scopes: ['https://www.googleapis.com/auth/blogger'],
* });
*
* // Acquire an auth client, and bind it to all future calls
* const authClient = await auth.getClient();
* google.options({auth: authClient});
*
* // Do the magic
* const res = await blogger.pages.list({
* blogId: 'placeholder-value',
*
* fetchBodies: 'placeholder-value',
* });
* console.log(res.data);
*
* // Example response
* // {
* // "etag": "my_etag",
* // "items": [],
* // "kind": "my_kind",
* // "nextPageToken": "my_nextPageToken"
* // }
* }
*
* main().catch(e => {
* console.error(e);
* throw e;
* });
*
* ```
*
* @param params - Parameters for request
* @param options - Optionally override request options, such as `url`, `method`, and `encoding`.
* @param callback - Optional callback that handles the response.
* @returns A promise if used with async/await, or void if used with a callback.
*/
list(
params: Params$Resource$Pages$List,
options: StreamMethodOptions
): GaxiosPromise<Readable>;
list(
params?: Params$Resource$Pages$List,
options?: MethodOptions
): GaxiosPromise<Schema$PageList>;
list(
params: Params$Resource$Pages$List,
options: StreamMethodOptions | BodyResponseCallback<Readable>,
callback: BodyResponseCallback<Readable>
): void;
list(
params: Params$Resource$Pages$List,
options: MethodOptions | BodyResponseCallback<Schema$PageList>,
callback: BodyResponseCallback<Schema$PageList>
): void;
list(
params: Params$Resource$Pages$List,
callback: BodyResponseCallback<Schema$PageList>
): void;
list(callback: BodyResponseCallback<Schema$PageList>): void;
list(
paramsOrCallback?:
| Params$Resource$Pages$List
| BodyResponseCallback<Schema$PageList>
| BodyResponseCallback<Readable>,
optionsOrCallback?:
| MethodOptions
| StreamMethodOptions
| BodyResponseCallback<Schema$PageList>
| BodyResponseCallback<Readable>,
callback?:
| BodyResponseCallback<Schema$PageList>
| BodyResponseCallback<Readable>
): void | GaxiosPromise<Schema$PageList> | GaxiosPromise<Readable> {
let params = (paramsOrCallback || {}) as Params$Resource$Pages$List;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Pages$List;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl = options.rootUrl || 'https://blogger.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v2/blogs/{blogId}/pages').replace(
/([^:]\/)\/+/g,
'$1'
),
method: 'GET',
},
options
),
params,
requiredParams: ['blogId'],
pathParams: ['blogId'],
context: this.context,
};
if (callback) {
createAPIRequest<Schema$PageList>(
parameters,
callback as BodyResponseCallback<unknown>
);
} else {
return createAPIRequest<Schema$PageList>(parameters);
}
}
}
export interface Params$Resource$Pages$Get extends StandardParameters {
/**
*
*/
blogId?: string;
/**
*
*/
pageId?: string;
}
export interface Params$Resource$Pages$List extends StandardParameters {
/**
*
*/
blogId?: string;
/**
*
*/
fetchBodies?: boolean;
}
export class Resource$Posts {
context: APIRequestContext;
constructor(context: APIRequestContext) {
this.context = context;
}
/**
* Gets a post by blog id and post id
* @example
* ```js
* // Before running the sample:
* // - Enable the API at:
* // https://console.developers.google.com/apis/api/blogger.googleapis.com
* // - Login into gcloud by running:
* // `$ gcloud auth application-default login`
* // - Install the npm module by running:
* // `$ npm install googleapis`
*
* const {google} = require('googleapis');
* const blogger = google.blogger('v2');
*
* async function main() {
* const auth = new google.auth.GoogleAuth({
* // Scopes can be specified either as an array or as a single, space-delimited string.
* scopes: ['https://www.googleapis.com/auth/blogger'],
* });
*
* // Acquire an auth client, and bind it to all future calls
* const authClient = await auth.getClient();
* google.options({auth: authClient});
*
* // Do the magic
* const res = await blogger.posts.get({
* blogId: 'placeholder-value',
*
* postId: 'placeholder-value',
* });
* console.log(res.data);
*
* // Example response
* // {
* // "author": {},
* // "blog": {},
* // "content": "my_content",
* // "customMetaData": "my_customMetaData",
* // "etag": "my_etag",
* // "id": "my_id",
* // "images": [],
* // "kind": "my_kind",
* // "labels": [],
* // "location": {},
* // "published": "my_published",
* // "readerComments": "my_readerComments",
* // "replies": {},
* // "selfLink": "my_selfLink",
* // "status": "my_status",
* // "title": "my_title",
* // "titleLink": "my_titleLink",
* // "updated": "my_updated",
* // "url": "my_url"
* // }
* }
*
* main().catch(e => {
* console.error(e);
* throw e;
* });
*
* ```
*
* @param params - Parameters for request
* @param options - Optionally override request options, such as `url`, `method`, and `encoding`.
* @param callback - Optional callback that handles the response.
* @returns A promise if used with async/await, or void if used with a callback.
*/
get(
params: Params$Resource$Posts$Get,
options: StreamMethodOptions
): GaxiosPromise<Readable>;
get(
params?: Params$Resource$Posts$Get,
options?: MethodOptions
): GaxiosPromise<Schema$Post>;
get(
params: Params$Resource$Posts$Get,
options: StreamMethodOptions | BodyResponseCallback<Readable>,
callback: BodyResponseCallback<Readable>
): void;
get(
params: Params$Resource$Posts$Get,
options: MethodOptions | BodyResponseCallback<Schema$Post>,
callback: BodyResponseCallback<Schema$Post>
): void;
get(
params: Params$Resource$Posts$Get,
callback: BodyResponseCallback<Schema$Post>
): void;
get(callback: BodyResponseCallback<Schema$Post>): void;
get(
paramsOrCallback?:
| Params$Resource$Posts$Get
| BodyResponseCallback<Schema$Post>
| BodyResponseCallback<Readable>,
optionsOrCallback?:
| MethodOptions
| StreamMethodOptions
| BodyResponseCallback<Schema$Post>
| BodyResponseCallback<Readable>,
callback?:
| BodyResponseCallback<Schema$Post>
| BodyResponseCallback<Readable>
): void | GaxiosPromise<Schema$Post> | GaxiosPromise<Readable> {
let params = (paramsOrCallback || {}) as Params$Resource$Posts$Get;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Posts$Get;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl = options.rootUrl || 'https://blogger.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v2/blogs/{blogId}/posts/{postId}').replace(
/([^:]\/)\/+/g,
'$1'
),
method: 'GET',
},
options
),
params,
requiredParams: ['blogId', 'postId'],
pathParams: ['blogId', 'postId'],
context: this.context,
};
if (callback) {
createAPIRequest<Schema$Post>(
parameters,
callback as BodyResponseCallback<unknown>
);
} else {
return createAPIRequest<Schema$Post>(parameters);
}
}
/**
* Lists posts.
* @example
* ```js
* // Before running the sample:
* // - Enable the API at:
* // https://console.developers.google.com/apis/api/blogger.googleapis.com
* // - Login into gcloud by running:
* // `$ gcloud auth application-default login`
* // - Install the npm module by running:
* // `$ npm install googleapis`
*
* const {google} = require('googleapis');
* const blogger = google.blogger('v2');
*
* async function main() {
* const auth = new google.auth.GoogleAuth({
* // Scopes can be specified either as an array or as a single, space-delimited string.
* scopes: ['https://www.googleapis.com/auth/blogger'],
* });
*
* // Acquire an auth client, and bind it to all future calls
* const authClient = await auth.getClient();
* google.options({auth: authClient});
*
* // Do the magic
* const res = await blogger.posts.list({
* blogId: 'placeholder-value',
*
* fetchBodies: 'placeholder-value',
*
* maxResults: 'placeholder-value',
*
* pageToken: 'placeholder-value',
*
* startDate: 'placeholder-value',
* });
* console.log(res.data);
*
* // Example response
* // {
* // "etag": "my_etag",
* // "items": [],
* // "kind": "my_kind",
* // "nextPageToken": "my_nextPageToken",
* // "prevPageToken": "my_prevPageToken"
* // }
* }
*
* main().catch(e => {
* console.error(e);
* throw e;
* });
*
* ```
*
* @param params - Parameters for request
* @param options - Optionally override request options, such as `url`, `method`, and `encoding`.
* @param callback - Optional callback that handles the response.
* @returns A promise if used with async/await, or void if used with a callback.
*/
list(
params: Params$Resource$Posts$List,
options: StreamMethodOptions
): GaxiosPromise<Readable>;
list(
params?: Params$Resource$Posts$List,
options?: MethodOptions
): GaxiosPromise<Schema$PostList>;
list(
params: Params$Resource$Posts$List,
options: StreamMethodOptions | BodyResponseCallback<Readable>,
callback: BodyResponseCallback<Readable>
): void;
list(
params: Params$Resource$Posts$List,
options: MethodOptions | BodyResponseCallback<Schema$PostList>,
callback: BodyResponseCallback<Schema$PostList>
): void;
list(
params: Params$Resource$Posts$List,
callback: BodyResponseCallback<Schema$PostList>
): void;
list(callback: BodyResponseCallback<Schema$PostList>): void;
list(
paramsOrCallback?:
| Params$Resource$Posts$List
| BodyResponseCallback<Schema$PostList>
| BodyResponseCallback<Readable>,
optionsOrCallback?:
| MethodOptions
| StreamMethodOptions
| BodyResponseCallback<Schema$PostList>
| BodyResponseCallback<Readable>,
callback?:
| BodyResponseCallback<Schema$PostList>
| BodyResponseCallback<Readable>
): void | GaxiosPromise<Schema$PostList> | GaxiosPromise<Readable> {
let params = (paramsOrCallback || {}) as Params$Resource$Posts$List;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Posts$List;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl = options.rootUrl || 'https://blogger.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v2/blogs/{blogId}/posts').replace(
/([^:]\/)\/+/g,
'$1'
),
method: 'GET',
},
options
),
params,
requiredParams: ['blogId'],
pathParams: ['blogId'],
context: this.context,
};
if (callback) {
createAPIRequest<Schema$PostList>(
parameters,
callback as BodyResponseCallback<unknown>
);
} else {
return createAPIRequest<Schema$PostList>(parameters);
}
}
}
export interface Params$Resource$Posts$Get extends StandardParameters {
/**
*
*/
blogId?: string;
/**
*
*/
postId?: string;
}
export interface Params$Resource$Posts$List extends StandardParameters {
/**
*
*/
blogId?: string;
/**
*
*/
fetchBodies?: boolean;
/**
*
*/
maxResults?: number;
/**
*
*/
pageToken?: string;
/**
*
*/
startDate?: string;
}
export class Resource$Users {
context: APIRequestContext;
constructor(context: APIRequestContext) {
this.context = context;
}
/**
* Gets a user by user id.
* @example
* ```js
* // Before running the sample:
* // - Enable the API at:
* // https://console.developers.google.com/apis/api/blogger.googleapis.com
* // - Login into gcloud by running:
* // `$ gcloud auth application-default login`
* // - Install the npm module by running:
* // `$ npm install googleapis`
*
* const {google} = require('googleapis');
* const blogger = google.blogger('v2');
*
* async function main() {
* const auth = new google.auth.GoogleAuth({
* // Scopes can be specified either as an array or as a single, space-delimited string.
* scopes: ['https://www.googleapis.com/auth/blogger'],
* });
*
* // Acquire an auth client, and bind it to all future calls
* const authClient = await auth.getClient();
* google.options({auth: authClient});
*
* // Do the magic
* const res = await blogger.users.get({
* userId: 'placeholder-value',
* });
* console.log(res.data);
*
* // Example response
* // {
* // "about": "my_about",
* // "blogs": {},
* // "created": "my_created",
* // "displayName": "my_displayName",
* // "id": "my_id",
* // "kind": "my_kind",
* // "locale": {},
* // "selfLink": "my_selfLink",
* // "url": "my_url"
* // }
* }
*
* main().catch(e => {
* console.error(e);
* throw e;
* });
*
* ```
*
* @param params - Parameters for request
* @param options - Optionally override request options, such as `url`, `method`, and `encoding`.
* @param callback - Optional callback that handles the response.
* @returns A promise if used with async/await, or void if used with a callback.
*/
get(
params: Params$Resource$Users$Get,
options: StreamMethodOptions
): GaxiosPromise<Readable>;
get(
params?: Params$Resource$Users$Get,
options?: MethodOptions
): GaxiosPromise<Schema$User>;
get(
params: Params$Resource$Users$Get,
options: StreamMethodOptions | BodyResponseCallback<Readable>,
callback: BodyResponseCallback<Readable>
): void;
get(
params: Params$Resource$Users$Get,
options: MethodOptions | BodyResponseCallback<Schema$User>,
callback: BodyResponseCallback<Schema$User>
): void;
get(
params: Params$Resource$Users$Get,
callback: BodyResponseCallback<Schema$User>
): void;
get(callback: BodyResponseCallback<Schema$User>): void;
get(
paramsOrCallback?:
| Params$Resource$Users$Get
| BodyResponseCallback<Schema$User>
| BodyResponseCallback<Readable>,
optionsOrCallback?:
| MethodOptions
| StreamMethodOptions
| BodyResponseCallback<Schema$User>
| BodyResponseCallback<Readable>,
callback?:
| BodyResponseCallback<Schema$User>
| BodyResponseCallback<Readable>
): void | GaxiosPromise<Schema$User> | GaxiosPromise<Readable> {
let params = (paramsOrCallback || {}) as Params$Resource$Users$Get;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Users$Get;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl = options.rootUrl || 'https://blogger.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v2/users/{userId}').replace(/([^:]\/)\/+/g, '$1'),
method: 'GET',
},
options
),
params,
requiredParams: ['userId'],
pathParams: ['userId'],
context: this.context,
};
if (callback) {
createAPIRequest<Schema$User>(
parameters,
callback as BodyResponseCallback<unknown>
);
} else {
return createAPIRequest<Schema$User>(parameters);
}
}
}
export interface Params$Resource$Users$Get extends StandardParameters {
/**
*
*/
userId?: string;
}
}
| apache-2.0 |
liviutudor/MySQLMultiLineString | mysql_multiline_form.php | 1367 | <?php
//settings
global $form_action;
if($form_action == '' ) {
$form_action = "mysql_multiline_form.php";
}
global $rows;
if( !$rows ) {
$rows = 10;
}
global $columns;
if( !$columns ) {
$columns = 80;
}
global $eliminate_empty;
if( !$eliminate_empty ) {
$eliminate_empty = true;
}
//convert function
function safeMySQLMultiline( $txt ) {
$txt = str_replace( "'", "''", $txt );
$txt = preg_replace( "/\r/i", "", $txt );
$txt = preg_replace( "/\n/i", "\\\\\\\\n' \r'", $txt);
//$txt = preg_replace( "/\t/i", "' \\\\\\\\t'", $txt);
$txt = "'" . $txt . "'";
//get rid of empty lines if needed
if( $eliminate_empty ) {
}
return stripslashes($txt);
}
?>
<form action="<?php echo $form_action; ?>" method="POST">
<label for="mysql_value">Multi-line value</label>
<br/>
<textarea id="mysql_value" name="mysql_value" rows="<?php echo $rows; ?>" cols="<?php echo $columns; ?>">
<?php
if( $_POST["mysql_value"] != "" ) {
echo stripslashes($_POST["mysql_value"]);
}
?>
</textarea>
<br/><br/>
<label for="converted_value">Converted value</label><br/>
<textarea id="converted_value" name="converted_value" rows="<?php echo $rows; ?>" cols="<?php echo $columns; ?>">
<?php
if( $_POST["mysql_value"] != "" ) {
echo safeMySQLMultiline($_POST["mysql_value"]);
}
?>
</textarea>
<br/>
<input type="submit" />
</form>
| apache-2.0 |
DigitalPlatform/dp2 | dp2Catalog/XmlDetailForm.cs | 14023 | using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using System.Xml;
using System.IO;
using DigitalPlatform.Xml;
using DigitalPlatform;
namespace dp2Catalog
{
public partial class XmlDetailForm : Form
{
bool m_bDisplayOriginPage = true; // 是否显示原始数据属性页
public bool DisplayOriginPage
{
get
{
return this.m_bDisplayOriginPage;
}
set
{
this.m_bDisplayOriginPage = value;
if (value == false)
{
// TODO: 这里有内存泄漏,需要改进
if (this.tabControl_main.TabPages.IndexOf(this.tabPage_originData) != -1)
this.tabControl_main.TabPages.Remove(this.tabPage_originData);
}
else
{
if (this.tabControl_main.TabPages.IndexOf(this.tabPage_originData) == -1)
this.tabControl_main.TabPages.Add(this.tabPage_originData);
}
}
}
const int WM_LOADSIZE = API.WM_USER + 201;
const int WM_VERIFY_DATA = API.WM_USER + 204;
const int WM_FILL_MARCEDITOR_SCRIPT_MENU = API.WM_USER + 205;
public LoginInfo LoginInfo = new LoginInfo();
public MainForm MainForm = null;
DigitalPlatform.Stop stop = null;
public ISearchForm LinkedSearchForm = null;
DigitalPlatform.OldZ3950.Record CurrentRecord = null;
Encoding CurrentEncoding = Encoding.GetEncoding(936);
public string AutoDetectedMarcSyntaxOID = "";
byte[] CurrentTimestamp = null;
// 用于保存记录的路径
public string SavePath
{
get
{
return this.textBox_savePath.Text;
}
set
{
this.textBox_savePath.Text = value;
}
}
public XmlDetailForm()
{
InitializeComponent();
}
private void XmlDetailForm_Load(object sender, EventArgs e)
{
stop = new DigitalPlatform.Stop();
stop.Register(MainForm.stopManager, true); // 和容器关联
Global.FillEncodingList(this.comboBox_originDataEncoding,
false);
this.NeedIndentXml = this.MainForm.AppInfo.GetBoolean(
"xmldetailform",
"need_indent_xml",
true);
API.PostMessage(this.Handle, WM_LOADSIZE, 0, 0);
}
private void XmlDetailForm_FormClosing(object sender, FormClosingEventArgs e)
{
}
private void XmlDetailForm_FormClosed(object sender, FormClosedEventArgs e)
{
if (stop != null) // 脱离关联
{
stop.Unregister(); // 和容器关联
stop = null;
}
SaveSize();
if (this.MainForm != null && this.MainForm.AppInfo != null)
{
this.MainForm.AppInfo.SetBoolean(
"xmldetailform",
"need_indent_xml",
this.NeedIndentXml);
}
}
public int LoadRecord(string strDirection,
bool bForceFull = false)
{
string strError = "";
if (this.LinkedSearchForm == null)
{
strError = "没有关联的检索窗";
goto ERROR1;
}
string strPath = this.textBox_tempRecPath.Text;
if (String.IsNullOrEmpty(strPath) == true)
{
strError = "路径为空";
goto ERROR1;
}
// 分离出各个部分
string strProtocol = "";
string strResultsetName = "";
string strIndex = "";
int nRet = MarcDetailForm.ParsePath(strPath,
out strProtocol,
out strResultsetName,
out strIndex,
out strError);
if (nRet == -1)
{
strError = "解析路径 '" + strPath + "' 字符串过程中发生错误: " + strError;
goto ERROR1;
}
if (strProtocol != this.LinkedSearchForm.CurrentProtocol)
{
strError = "检索窗的协议已经发生改变";
goto ERROR1;
}
if (strResultsetName != this.LinkedSearchForm.CurrentResultsetPath)
{
strError = "结果集已经发生改变";
goto ERROR1;
}
int index = 0;
index = Convert.ToInt32(strIndex) - 1;
if (strDirection == "prev")
{
index--;
if (index < 0)
{
strError = "到头";
goto ERROR1;
}
}
else if (strDirection == "current")
{
}
else if (strDirection == "next")
{
index++;
}
else
{
strError = "不能识别的strDirection参数值 '" + strDirection + "'";
goto ERROR1;
}
return LoadRecord(this.LinkedSearchForm, index, bForceFull);
ERROR1:
MessageBox.Show(this, strError);
return -1;
}
// 装载XML记录
public int LoadRecord(ISearchForm searchform,
int index,
bool bForceFullElementSet = false)
{
string strError = "";
string strMARC = "";
this.LinkedSearchForm = searchform;
this.SavePath = "";
DigitalPlatform.OldZ3950.Record record = null;
Encoding currentEncoding = null;
this.CurrentRecord = null;
byte[] baTimestamp = null;
string strSavePath = "";
string strOutStyle = "";
LoginInfo logininfo = null;
long lVersion = 0;
string strXmlFragment = "";
string strParameters = "hilight_browse_line";
if (bForceFullElementSet == true)
strParameters += ",force_full";
int nRet = searchform.GetOneRecord(
"xml",
index, // 即将废止
"index:" + index.ToString(),
strParameters, // true,
out strSavePath,
out strMARC,
out strXmlFragment,
out strOutStyle,
out baTimestamp,
out lVersion,
out record,
out currentEncoding,
out logininfo,
out strError);
if (nRet == -1)
goto ERROR1;
this.LoginInfo = logininfo;
this.CurrentTimestamp = baTimestamp;
this.SavePath = strSavePath;
this.CurrentEncoding = currentEncoding;
// 替换单个0x0a
strMARC = strMARC.Replace("\r", "");
strMARC = strMARC.Replace("\n", "\r\n");
// 装入XML编辑器
// this.textBox_xml.Text = strMARC;
this.PlainText = strMARC; // 能自动缩进
this.textBox_xml.Select(0, 0);
// 装入XML只读Web控件
{
string strTempFileName = MainForm.DataDir + "\\xml.xml";
// SUTRS
if (record.m_strSyntaxOID == "1.2.840.10003.5.101")
strTempFileName = MainForm.DataDir + "\\xml.txt";
using (Stream stream = File.Create(strTempFileName))
{
// 写入xml内容
byte[] buffer = Encoding.UTF8.GetBytes(strMARC);
stream.Write(buffer, 0, buffer.Length);
}
this.webBrowser_xml.Navigate(strTempFileName);
}
this.CurrentRecord = record;
if (this.CurrentRecord != null && this.DisplayOriginPage == true)
{
// 装入二进制编辑器
this.binaryEditor_originData.SetData(
this.CurrentRecord.m_baRecord);
// 装入原始文本
nRet = this.SetOriginText(this.CurrentRecord.m_baRecord,
this.CurrentEncoding,
out strError);
if (nRet == -1)
{
this.textBox_originData.Text = strError;
}
// 数据库名
this.textBox_originDatabaseName.Text = this.CurrentRecord.m_strDBName;
// record syntax OID
this.textBox_originMarcSyntaxOID.Text = this.CurrentRecord.m_strSyntaxOID;
}
// 构造路径
string strPath = searchform.CurrentProtocol + ":"
+ searchform.CurrentResultsetPath
+ "/" + (index + 1).ToString();
this.textBox_tempRecPath.Text = strPath;
this.textBox_xml.Focus();
return 0;
ERROR1:
MessageBox.Show(this, strError);
return -1;
}
int SetOriginText(byte[] baOrigin,
Encoding encoding,
out string strError)
{
strError = "";
if (encoding == null)
{
int nRet = this.MainForm.GetEncoding(this.comboBox_originDataEncoding.Text,
out encoding,
out strError);
if (nRet == -1)
return -1;
}
else
{
this.comboBox_originDataEncoding.Text = GetEncodingForm.GetEncodingName(this.CurrentEncoding);
}
this.textBox_originData.Text = encoding.GetString(baOrigin);
return 0;
}
// 装入 XML 到 textbox 的时候需要缩进效果么?
bool NeedIndentXml
{
get
{
return this.toolStripButton_indentXmlText.Checked;
}
set
{
this.toolStripButton_indentXmlText.Checked = value;
}
}
public string PlainText
{
get
{
return this.textBox_xml.Text;
}
set
{
if (this.NeedIndentXml == false)
{
this.textBox_xml.Text = value;
return;
}
string strError = "";
string strOutXml = "";
int nRet = DomUtil.GetIndentXml(value,
out strOutXml,
out strError);
if (nRet == -1)
{
// 可能并不是 XML
this.textBox_xml.Text = value;
return;
}
this.textBox_xml.Text = strOutXml;
}
}
private void toolStripButton_indentXmlText_Click(object sender, EventArgs e)
{
if (this.toolStripButton_indentXmlText.Checked == true)
{
string strError = "";
string strOutXml = "";
int nRet = DomUtil.GetIndentXml(this.textBox_xml.Text,
out strOutXml,
out strError);
if (nRet == -1)
{
MessageBox.Show(this, strError);
return;
}
this.textBox_xml.Text = strOutXml;
}
}
private void XmlDetailForm_Activated(object sender, EventArgs e)
{
MainForm.toolButton_prev.Enabled = true;
MainForm.toolButton_next.Enabled = true;
MainForm.toolButton_nextBatch.Enabled = false;
MainForm.toolButton_getAllRecords.Enabled = false;
}
protected override void DefWndProc(ref Message m)
{
switch (m.Msg)
{
case WM_LOADSIZE:
LoadSize();
return;
}
base.DefWndProc(ref m);
}
public void LoadSize()
{
// 设置窗口尺寸状态
MainForm.AppInfo.LoadMdiChildFormStates(this,
"mdi_form_state",
SizeStyle.All,
MainForm.DefaultMdiWindowWidth,
MainForm.DefaultMdiWindowHeight);
// 获得splitContainer_originDataMain的状态
int nValue = MainForm.AppInfo.GetInt(
"xmldetailform",
"splitContainer_originDataMain",
-1);
if (nValue != -1)
this.splitContainer_originDataMain.SplitterDistance = nValue;
try
{
this.tabControl_main.SelectedIndex = this.MainForm.AppInfo.GetInt(
"xmldetailform",
"active_page",
0);
}
catch
{
}
}
public void SaveSize()
{
if (this.MainForm != null && this.MainForm.AppInfo != null)
{
MainForm.AppInfo.SaveMdiChildFormStates(this,
"mdi_form_state");
// 保存splitContainer_originDataMain的状态
MainForm.AppInfo.SetInt(
"xmldetailform",
"splitContainer_originDataMain",
this.splitContainer_originDataMain.SplitterDistance);
this.MainForm.AppInfo.SetInt(
"xmldetailform",
"active_page",
this.tabControl_main.SelectedIndex);
}
}
}
} | apache-2.0 |
balazssimon/meta-cs | src/Main/MetaDslx.CodeAnalysis.Common/Compilation/CompilationOptions.cs | 31095 | // Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using Microsoft.CodeAnalysis.PooledObjects;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis
{
/// <summary>
/// Represents compilation options common to C# and VB.
/// </summary>
public abstract class CompilationOptions
{
/// <summary>
/// The kind of assembly generated when emitted.
/// </summary>
public OutputKind OutputKind { get; protected set; }
/// <summary>
/// Name of the primary module, or null if a default name should be used.
/// </summary>
/// <remarks>
/// The name usually (but not necessarily) includes an extension, e.g. "MyModule.dll".
///
/// If <see cref="ModuleName"/> is null the actual name written to metadata
/// is derived from the name of the compilation (<see cref="Compilation.AssemblyName"/>)
/// by appending a default extension for <see cref="OutputKind"/>.
/// </remarks>
public string ModuleName { get; protected set; }
/// <summary>
/// The full name of a global implicit class (script class). This class implicitly encapsulates top-level statements,
/// type declarations, and member declarations. Could be a namespace qualified name.
/// </summary>
public string ScriptClassName { get; protected set; }
/// <summary>
/// The full name of a type that declares static Main method. Must be a valid non-generic namespace-qualified name.
/// Null if any static Main method is a candidate for an entry point.
/// </summary>
public string MainTypeName { get; protected set; }
// Note that we avoid using default(ImmutableArray<byte>) for unspecified value since
// such value is currently not serializable by JSON serializer.
/// <summary>
/// Specifies public key used to generate strong name for the compilation assembly, or empty if not specified.
/// </summary>
/// <remarks>
/// If specified the values of <see cref="CryptoKeyFile"/> and <see cref="CryptoKeyContainer"/>
/// must be null. If <see cref="PublicSign"/> is true the assembly is marked as fully signed
/// but only signed with the public key (aka "OSS signing").
/// </remarks>
public ImmutableArray<byte> CryptoPublicKey { get; protected set; }
/// <summary>
/// The name of the file containing the public and private keys to use to generate strong name of the
/// compilation assembly and to sign it.
/// </summary>
/// <remarks>
/// <para>
/// To sign the output supply either one of <see cref="CryptoKeyFile"/> or <see cref="CryptoKeyContainer"/>.
/// but not both. If both are specified <see cref="CryptoKeyContainer"/> is ignored.
/// </para>
/// <para>
/// If <see cref="PublicSign" /> is also set, <see cref="CryptoKeyFile"/> must be the absolute
/// path to key file.
/// </para>
/// </remarks>
public string CryptoKeyFile { get; protected set; }
/// <summary>
/// The CSP container containing the key with which to sign the output.
/// </summary>
/// <remarks>
/// <para>
/// To sign the output supply either one of <see cref="CryptoKeyFile"/> or <see cref="CryptoKeyContainer"/>.
/// but not both. If both are specified <see cref="CryptoKeyContainer"/> is ignored.
/// </para>
/// <para>
/// This setting is obsolete and only supported on Microsoft Windows platform.
/// Use <see cref="CryptoPublicKey"/> to generate assemblies with strong name and
/// a signing tool (Microsoft .NET Framework Strong Name Utility (sn.exe) or equivalent) to sign them.
/// </para>
/// </remarks>
public string CryptoKeyContainer { get; protected set; }
/// <summary>
/// Mark the compilation assembly as delay-signed.
/// </summary>
/// <remarks>
/// If true the resulting assembly is marked as delay signed.
///
/// If false and <see cref="CryptoPublicKey"/>, <see cref="CryptoKeyFile"/>, or <see cref="CryptoKeyContainer"/> is specified
/// or attribute System.Reflection.AssemblyKeyFileAttribute or System.Reflection.AssemblyKeyNameAttribute is applied to the
/// compilation assembly in source the resulting assembly is signed accordingly to the specified values/attributes.
///
/// If null the semantics is specified by the value of attribute System.Reflection.AssemblyDelaySignAttribute
/// applied to the compilation assembly in source. If the attribute is not present the value defaults to "false".
/// </remarks>
public bool? DelaySign { get; protected set; }
/// <summary>
/// Mark the compilation assembly as fully signed, but only sign with the public key.
/// </summary>
/// <remarks>
/// <para>
/// If true, the assembly is marked as signed, but is only signed with the public key.
/// </para>
/// <para>
/// The key must be provided through either an absolute path in <see cref="CryptoKeyFile"/>
/// or directly via <see cref="CryptoPublicKey" />.
/// </para>
/// </remarks>
public bool PublicSign { get; protected set; }
/// <summary>
/// Whether bounds checking on integer arithmetic is enforced by default or not.
/// </summary>
public bool CheckOverflow { get; protected set; }
/// <summary>
/// Specifies which version of the common language runtime (CLR) can run the assembly.
/// </summary>
public Platform Platform { get; protected set; }
/// <summary>
/// Specifies whether or not optimizations should be performed on the output IL.
/// This is independent of whether or not PDB information is generated.
/// </summary>
public OptimizationLevel OptimizationLevel { get; protected set; }
/// <summary>
/// Global warning report option
/// </summary>
public ReportDiagnostic GeneralDiagnosticOption { get; protected set; }
/// <summary>
/// Global warning level (from 0 to 4).
/// </summary>
public int WarningLevel { get; protected set; }
/// <summary>
/// Specifies whether building compilation may use multiple threads.
/// </summary>
public bool ConcurrentBuild { get; protected set; }
/// <summary>
/// Specifies whether the compilation should be deterministic.
/// </summary>
public bool Deterministic { get; protected set; }
/// <summary>
/// Used for time-based version generation when <see cref="System.Reflection.AssemblyVersionAttribute"/> contains a wildcard.
/// If equal to default(<see cref="DateTime"/>) the actual current local time will be used.
/// </summary>
public DateTime CurrentLocalTime { get; protected set; }
/// <summary>
/// Emit mode that favors debuggability.
/// </summary>
public bool DebugPlusMode { get; protected set; }
/// <summary>
/// Specifies whether to import members with accessibility other than public or protected by default.
/// Default value is <see cref="MetadataImportOptions.Public"/>. The value specified is not going to
/// affect correctness of analysis performed by compilers because all members needed for correctness
/// are going to be imported regardless. This setting can force compilation to import members that it
/// normally doesn't.
/// </summary>
public MetadataImportOptions MetadataImportOptions { get; protected set; }
/// <summary>
/// Apply additional disambiguation rules during resolution of referenced assemblies.
/// </summary>
public bool ReferencesSupersedeLowerVersions { get; protected set; }
/// <summary>
/// Modifies the incoming diagnostic, for example escalating its severity, or discarding it (returning null) based on the compilation options.
/// </summary>
/// <param name="diagnostic"></param>
/// <returns>The modified diagnostic, or null</returns>
public abstract Diagnostic FilterDiagnostic(Diagnostic diagnostic);
/// <summary>
/// Warning report option for each warning.
/// </summary>
public ImmutableDictionary<string, ReportDiagnostic> SpecificDiagnosticOptions { get; protected set; }
/// <summary>
/// Whether diagnostics suppressed in source, i.e. <see cref="Diagnostic.IsSuppressed"/> is true, should be reported.
/// </summary>
public bool ReportSuppressedDiagnostics { get; protected set; }
/// <summary>
/// Resolves paths to metadata references specified in source via #r directives.
/// Null if the compilation can't contain references to metadata other than those explicitly passed to its factory (such as #r directives in sources).
/// </summary>
public MetadataReferenceResolver MetadataReferenceResolver { get; protected set; }
/// <summary>
/// Gets the resolver for resolving XML document references for the compilation.
/// Null if the compilation is not allowed to contain XML file references, such as XML doc comment include tags and permission sets stored in an XML file.
/// </summary>
public XmlReferenceResolver XmlReferenceResolver { get; protected set; }
/// <summary>
/// Gets the resolver for resolving source document references for the compilation.
/// Null if the compilation is not allowed to contain source file references, such as #line pragmas and #load directives.
/// </summary>
public SourceReferenceResolver SourceReferenceResolver { get; protected set; }
/// <summary>
/// Provides strong name and signature the source assembly.
/// Null if assembly signing is not supported.
/// </summary>
public StrongNameProvider StrongNameProvider { get; protected set; }
/// <summary>
/// Used to compare assembly identities. May implement unification and portability policies specific to the target platform.
/// <see cref="AssemblyIdentityComparer.Default"/> if not specified.
/// </summary>
public AssemblyIdentityComparer AssemblyIdentityComparer { get; protected set; }
private readonly Lazy<ImmutableArray<Diagnostic>> _lazyErrors;
// Expects correct arguments.
internal CompilationOptions(
OutputKind outputKind,
bool reportSuppressedDiagnostics,
string moduleName,
string mainTypeName,
string scriptClassName,
string cryptoKeyContainer,
string cryptoKeyFile,
ImmutableArray<byte> cryptoPublicKey,
bool? delaySign,
bool publicSign,
OptimizationLevel optimizationLevel,
bool checkOverflow,
Platform platform,
ReportDiagnostic generalDiagnosticOption,
int warningLevel,
ImmutableDictionary<string, ReportDiagnostic> specificDiagnosticOptions,
bool concurrentBuild,
bool deterministic,
DateTime currentLocalTime,
bool debugPlusMode,
XmlReferenceResolver xmlReferenceResolver,
SourceReferenceResolver sourceReferenceResolver,
MetadataReferenceResolver metadataReferenceResolver,
AssemblyIdentityComparer assemblyIdentityComparer,
StrongNameProvider strongNameProvider,
MetadataImportOptions metadataImportOptions,
bool referencesSupersedeLowerVersions)
{
this.OutputKind = outputKind;
this.ModuleName = moduleName;
this.MainTypeName = mainTypeName;
this.ScriptClassName = scriptClassName ?? WellKnownMemberNames.DefaultScriptClassName;
this.CryptoKeyContainer = cryptoKeyContainer;
this.CryptoKeyFile = string.IsNullOrEmpty(cryptoKeyFile) ? null : cryptoKeyFile;
this.CryptoPublicKey = cryptoPublicKey.NullToEmpty();
this.DelaySign = delaySign;
this.CheckOverflow = checkOverflow;
this.Platform = platform;
this.GeneralDiagnosticOption = generalDiagnosticOption;
this.WarningLevel = warningLevel;
this.SpecificDiagnosticOptions = specificDiagnosticOptions;
this.ReportSuppressedDiagnostics = reportSuppressedDiagnostics;
this.OptimizationLevel = optimizationLevel;
this.ConcurrentBuild = concurrentBuild;
this.Deterministic = deterministic;
this.CurrentLocalTime = currentLocalTime;
this.DebugPlusMode = debugPlusMode;
this.XmlReferenceResolver = xmlReferenceResolver;
this.SourceReferenceResolver = sourceReferenceResolver;
this.MetadataReferenceResolver = metadataReferenceResolver;
this.StrongNameProvider = strongNameProvider;
this.AssemblyIdentityComparer = assemblyIdentityComparer ?? AssemblyIdentityComparer.Default;
this.MetadataImportOptions = metadataImportOptions;
this.ReferencesSupersedeLowerVersions = referencesSupersedeLowerVersions;
this.PublicSign = publicSign;
_lazyErrors = new Lazy<ImmutableArray<Diagnostic>>(() =>
{
var builder = ArrayBuilder<Diagnostic>.GetInstance();
ValidateOptions(builder);
return builder.ToImmutableAndFree();
});
}
/// <summary>
/// This condition has to include all options the Assembly Manager depends on when binding references.
/// In addition, the assembly name is determined based upon output kind. It is special for netmodules.
/// Can't reuse when file resolver or identity comparers change.
/// Can reuse even if StrongNameProvider changes. When resolving a cyclic reference only the simple name is considered, not the strong name.
/// </summary>
/// <param name="other"></param>
/// <returns></returns>
public virtual bool CanReuseCompilationReferenceManager(CompilationOptions other)
{
return this.MetadataImportOptions == other.MetadataImportOptions
&& this.ReferencesSupersedeLowerVersions == other.ReferencesSupersedeLowerVersions
&& this.OutputKind.IsNetModule() == other.OutputKind.IsNetModule()
&& object.Equals(this.XmlReferenceResolver, other.XmlReferenceResolver)
&& object.Equals(this.MetadataReferenceResolver, other.MetadataReferenceResolver)
&& object.Equals(this.AssemblyIdentityComparer, other.AssemblyIdentityComparer);
}
/// <summary>
/// Gets the source language ("C#" or "Visual Basic").
/// </summary>
public abstract string Language { get; }
public bool EnableEditAndContinue
{
get
{
return OptimizationLevel == OptimizationLevel.Debug;
}
}
internal static bool IsValidFileAlignment(int value)
{
switch (value)
{
case 512:
case 1024:
case 2048:
case 4096:
case 8192:
return true;
default:
return false;
}
}
/// <summary>
/// Creates a new options instance with the specified general diagnostic option.
/// </summary>
public CompilationOptions WithGeneralDiagnosticOption(ReportDiagnostic value)
{
return CommonWithGeneralDiagnosticOption(value);
}
/// <summary>
/// Creates a new options instance with the specified diagnostic-specific options.
/// </summary>
public CompilationOptions WithSpecificDiagnosticOptions(ImmutableDictionary<string, ReportDiagnostic> value)
{
return CommonWithSpecificDiagnosticOptions(value);
}
/// <summary>
/// Creates a new options instance with the specified diagnostic-specific options.
/// </summary>
public CompilationOptions WithSpecificDiagnosticOptions(IEnumerable<KeyValuePair<string, ReportDiagnostic>> value)
{
return CommonWithSpecificDiagnosticOptions(value);
}
/// <summary>
/// Creates a new options instance with the specified suppressed diagnostics reporting option.
/// </summary>
public CompilationOptions WithReportSuppressedDiagnostics(bool value)
{
return CommonWithReportSuppressedDiagnostics(value);
}
/// <summary>
/// Creates a new options instance with the concurrent build property set accordingly.
/// </summary>
public CompilationOptions WithConcurrentBuild(bool concurrent)
{
return CommonWithConcurrentBuild(concurrent);
}
/// <summary>
/// Creates a new options instance with the deterministic property set accordingly.
/// </summary>
public CompilationOptions WithDeterministic(bool deterministic)
{
return CommonWithDeterministic(deterministic);
}
/// <summary>
/// Creates a new options instance with the specified output kind.
/// </summary>
public CompilationOptions WithOutputKind(OutputKind kind)
{
return CommonWithOutputKind(kind);
}
/// <summary>
/// Creates a new options instance with the specified platform.
/// </summary>
public CompilationOptions WithPlatform(Platform platform)
{
return CommonWithPlatform(platform);
}
/// <summary>
/// Creates a new options instance with the specified public sign setting.
/// </summary>
public CompilationOptions WithPublicSign(bool publicSign) => CommonWithPublicSign(publicSign);
/// <summary>
/// Creates a new options instance with optimizations enabled or disabled.
/// </summary>
public CompilationOptions WithOptimizationLevel(OptimizationLevel value)
{
return CommonWithOptimizationLevel(value);
}
public CompilationOptions WithXmlReferenceResolver(XmlReferenceResolver resolver)
{
return CommonWithXmlReferenceResolver(resolver);
}
public CompilationOptions WithSourceReferenceResolver(SourceReferenceResolver resolver)
{
return CommonWithSourceReferenceResolver(resolver);
}
public CompilationOptions WithMetadataReferenceResolver(MetadataReferenceResolver resolver)
{
return CommonWithMetadataReferenceResolver(resolver);
}
public CompilationOptions WithAssemblyIdentityComparer(AssemblyIdentityComparer comparer)
{
return CommonWithAssemblyIdentityComparer(comparer);
}
public CompilationOptions WithStrongNameProvider(StrongNameProvider provider)
{
return CommonWithStrongNameProvider(provider);
}
public CompilationOptions WithModuleName(string moduleName)
{
return CommonWithModuleName(moduleName);
}
public CompilationOptions WithMainTypeName(string mainTypeName)
{
return CommonWithMainTypeName(mainTypeName);
}
public CompilationOptions WithScriptClassName(string scriptClassName)
{
return CommonWithScriptClassName(scriptClassName);
}
public CompilationOptions WithCryptoKeyContainer(string cryptoKeyContainer)
{
return CommonWithCryptoKeyContainer(cryptoKeyContainer);
}
public CompilationOptions WithCryptoKeyFile(string cryptoKeyFile)
{
return CommonWithCryptoKeyFile(cryptoKeyFile);
}
public CompilationOptions WithCryptoPublicKey(ImmutableArray<byte> cryptoPublicKey)
{
return CommonWithCryptoPublicKey(cryptoPublicKey);
}
public CompilationOptions WithDelaySign(bool? delaySign)
{
return CommonWithDelaySign(delaySign);
}
public CompilationOptions WithOverflowChecks(bool checkOverflow)
{
return CommonWithCheckOverflow(checkOverflow);
}
public CompilationOptions WithMetadataImportOptions(MetadataImportOptions value) => CommonWithMetadataImportOptions(value);
protected abstract CompilationOptions CommonWithConcurrentBuild(bool concurrent);
protected abstract CompilationOptions CommonWithDeterministic(bool deterministic);
protected abstract CompilationOptions CommonWithOutputKind(OutputKind kind);
protected abstract CompilationOptions CommonWithPlatform(Platform platform);
protected abstract CompilationOptions CommonWithPublicSign(bool publicSign);
protected abstract CompilationOptions CommonWithOptimizationLevel(OptimizationLevel value);
protected abstract CompilationOptions CommonWithXmlReferenceResolver(XmlReferenceResolver resolver);
protected abstract CompilationOptions CommonWithSourceReferenceResolver(SourceReferenceResolver resolver);
protected abstract CompilationOptions CommonWithMetadataReferenceResolver(MetadataReferenceResolver resolver);
protected abstract CompilationOptions CommonWithAssemblyIdentityComparer(AssemblyIdentityComparer comparer);
protected abstract CompilationOptions CommonWithStrongNameProvider(StrongNameProvider provider);
protected abstract CompilationOptions CommonWithGeneralDiagnosticOption(ReportDiagnostic generalDiagnosticOption);
protected abstract CompilationOptions CommonWithSpecificDiagnosticOptions(ImmutableDictionary<string, ReportDiagnostic> specificDiagnosticOptions);
protected abstract CompilationOptions CommonWithSpecificDiagnosticOptions(IEnumerable<KeyValuePair<string, ReportDiagnostic>> specificDiagnosticOptions);
protected abstract CompilationOptions CommonWithReportSuppressedDiagnostics(bool reportSuppressedDiagnostics);
protected abstract CompilationOptions CommonWithModuleName(string moduleName);
protected abstract CompilationOptions CommonWithMainTypeName(string mainTypeName);
protected abstract CompilationOptions CommonWithScriptClassName(string scriptClassName);
protected abstract CompilationOptions CommonWithCryptoKeyContainer(string cryptoKeyContainer);
protected abstract CompilationOptions CommonWithCryptoKeyFile(string cryptoKeyFile);
protected abstract CompilationOptions CommonWithCryptoPublicKey(ImmutableArray<byte> cryptoPublicKey);
protected abstract CompilationOptions CommonWithDelaySign(bool? delaySign);
protected abstract CompilationOptions CommonWithCheckOverflow(bool checkOverflow);
protected abstract CompilationOptions CommonWithMetadataImportOptions(MetadataImportOptions value);
/// <summary>
/// Performs validation of options compatibilities and generates diagnostics if needed
/// </summary>
protected abstract void ValidateOptions(ArrayBuilder<Diagnostic> builder);
internal void ValidateOptions(ArrayBuilder<Diagnostic> builder, CommonMessageProvider messageProvider)
{
if (!CryptoPublicKey.IsEmpty)
{
if (CryptoKeyFile != null)
{
builder.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_MutuallyExclusiveOptions,
Location.None, nameof(CryptoPublicKey), nameof(CryptoKeyFile)));
}
if (CryptoKeyContainer != null)
{
builder.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_MutuallyExclusiveOptions,
Location.None, nameof(CryptoPublicKey), nameof(CryptoKeyContainer)));
}
}
if (PublicSign)
{
if (CryptoKeyFile != null && !PathUtilities.IsAbsolute(CryptoKeyFile))
{
builder.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_OptionMustBeAbsolutePath,
Location.None, nameof(CryptoKeyFile)));
}
if (CryptoKeyContainer != null)
{
builder.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_MutuallyExclusiveOptions,
Location.None, nameof(PublicSign), nameof(CryptoKeyContainer)));
}
if (DelaySign == true)
{
builder.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_MutuallyExclusiveOptions,
Location.None, nameof(PublicSign), nameof(DelaySign)));
}
}
}
/// <summary>
/// Errors collection related to an incompatible set of compilation options
/// </summary>
public ImmutableArray<Diagnostic> Errors
{
get { return _lazyErrors.Value; }
}
public abstract override bool Equals(object obj);
protected bool EqualsHelper(CompilationOptions other)
{
if (object.ReferenceEquals(other, null))
{
return false;
}
// NOTE: StringComparison.Ordinal is used for type name comparisons, even for VB. That's because
// a change in the canonical case should still change the option.
bool equal =
this.CheckOverflow == other.CheckOverflow &&
this.ConcurrentBuild == other.ConcurrentBuild &&
this.Deterministic == other.Deterministic &&
this.CurrentLocalTime == other.CurrentLocalTime &&
this.DebugPlusMode == other.DebugPlusMode &&
string.Equals(this.CryptoKeyContainer, other.CryptoKeyContainer, StringComparison.Ordinal) &&
string.Equals(this.CryptoKeyFile, other.CryptoKeyFile, StringComparison.Ordinal) &&
this.CryptoPublicKey.SequenceEqual(other.CryptoPublicKey) &&
this.DelaySign == other.DelaySign &&
this.GeneralDiagnosticOption == other.GeneralDiagnosticOption &&
string.Equals(this.MainTypeName, other.MainTypeName, StringComparison.Ordinal) &&
this.MetadataImportOptions == other.MetadataImportOptions &&
this.ReferencesSupersedeLowerVersions == other.ReferencesSupersedeLowerVersions &&
string.Equals(this.ModuleName, other.ModuleName, StringComparison.Ordinal) &&
this.OptimizationLevel == other.OptimizationLevel &&
this.OutputKind == other.OutputKind &&
this.Platform == other.Platform &&
this.ReportSuppressedDiagnostics == other.ReportSuppressedDiagnostics &&
string.Equals(this.ScriptClassName, other.ScriptClassName, StringComparison.Ordinal) &&
this.SpecificDiagnosticOptions.SequenceEqual(other.SpecificDiagnosticOptions, (left, right) => (left.Key == right.Key) && (left.Value == right.Value)) &&
this.WarningLevel == other.WarningLevel &&
object.Equals(this.MetadataReferenceResolver, other.MetadataReferenceResolver) &&
object.Equals(this.XmlReferenceResolver, other.XmlReferenceResolver) &&
object.Equals(this.SourceReferenceResolver, other.SourceReferenceResolver) &&
object.Equals(this.StrongNameProvider, other.StrongNameProvider) &&
object.Equals(this.AssemblyIdentityComparer, other.AssemblyIdentityComparer) &&
this.PublicSign == other.PublicSign;
return equal;
}
public abstract override int GetHashCode();
protected int GetHashCodeHelper()
{
return Hash.Combine(this.CheckOverflow,
Hash.Combine(this.ConcurrentBuild,
Hash.Combine(this.Deterministic,
Hash.Combine(this.CurrentLocalTime.GetHashCode(),
Hash.Combine(this.DebugPlusMode,
Hash.Combine(this.CryptoKeyContainer != null ? StringComparer.Ordinal.GetHashCode(this.CryptoKeyContainer) : 0,
Hash.Combine(this.CryptoKeyFile != null ? StringComparer.Ordinal.GetHashCode(this.CryptoKeyFile) : 0,
Hash.Combine(Hash.CombineValues(this.CryptoPublicKey, 16),
Hash.Combine((int)this.GeneralDiagnosticOption,
Hash.Combine(this.MainTypeName != null ? StringComparer.Ordinal.GetHashCode(this.MainTypeName) : 0,
Hash.Combine((int)this.MetadataImportOptions,
Hash.Combine(this.ReferencesSupersedeLowerVersions,
Hash.Combine(this.ModuleName != null ? StringComparer.Ordinal.GetHashCode(this.ModuleName) : 0,
Hash.Combine((int)this.OptimizationLevel,
Hash.Combine((int)this.OutputKind,
Hash.Combine((int)this.Platform,
Hash.Combine(this.ReportSuppressedDiagnostics,
Hash.Combine(this.ScriptClassName != null ? StringComparer.Ordinal.GetHashCode(this.ScriptClassName) : 0,
Hash.Combine(Hash.CombineValues(this.SpecificDiagnosticOptions),
Hash.Combine(this.WarningLevel,
Hash.Combine(this.MetadataReferenceResolver,
Hash.Combine(this.XmlReferenceResolver,
Hash.Combine(this.SourceReferenceResolver,
Hash.Combine(this.StrongNameProvider,
Hash.Combine(this.AssemblyIdentityComparer,
Hash.Combine(this.PublicSign, 0))))))))))))))))))))))))));
}
public static bool operator ==(CompilationOptions left, CompilationOptions right)
{
return object.Equals(left, right);
}
public static bool operator !=(CompilationOptions left, CompilationOptions right)
{
return !object.Equals(left, right);
}
}
}
| apache-2.0 |
zpao/buck | test/com/facebook/buck/android/MergeAssetsIntegrationTest.java | 2977 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.android;
import static org.junit.Assert.assertFalse;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.model.impl.BuildTargetPaths;
import com.facebook.buck.testutil.TemporaryPaths;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public class MergeAssetsIntegrationTest {
@Rule public TemporaryPaths tmpFolder = new TemporaryPaths();
private ProjectWorkspace workspace;
@Before
public void setUp() throws Exception {
workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "android_multi_cell_resource", tmpFolder);
workspace.setUp();
AssumeAndroidPlatform.get(workspace).assumeSdkIsAvailable();
}
@Test
public void testNoRelativePathsInOutputWithoutEmbeddedCells() throws Exception {
workspace.runBuckCommand(workspace.getPath("home"), "build", ":list_outputs").assertSuccess();
String unzipOutput =
workspace.getFileContents(
"home/"
+ BuildTargetPaths.getGenPath(
workspace.getProjectFileSystem(),
BuildTargetFactory.newInstance("//:list_outputs"),
"%s")
+ "/list_of_outputs.txt");
for (String line : unzipOutput.split("\n")) {
assertFalse(line.contains(".."));
}
}
@Test
public void testNoRelativePathsInOutputWithEmbeddedCells() throws Exception {
workspace
.runBuckCommand(
workspace.getPath("home"),
"build",
":list_outputs",
"--config",
"project.embedded_cell_buck_out_enabled=true")
.assertSuccess();
String unzipOutput =
workspace.getFileContents(
"home/"
+ BuildTargetPaths.getGenPath(
workspace.getProjectFileSystem(),
BuildTargetFactory.newInstance("//:list_outputs"),
"%s")
+ "/list_of_outputs.txt");
for (String line : unzipOutput.split("\n")) {
assertFalse(
String.format(
"Apk entries should only contain normalized paths (Found '%s')", line.trim()),
line.contains(".."));
}
}
}
| apache-2.0 |
mifos/1.4.x | testFramework/src/main/java/org/mifos/test/acceptance/framework/util/AcceptanceDatabaseTestUtils.java | 1847 | /*
* Copyright (c) 2005-2009 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.test.acceptance.framework.util;
import java.io.IOException;
import java.io.StringReader;
import java.sql.SQLException;
import org.dbunit.DatabaseUnitException;
import org.dbunit.database.DatabaseDataSourceConnection;
import org.dbunit.database.IDatabaseConnection;
import org.dbunit.dataset.DataSetException;
import org.dbunit.dataset.IDataSet;
import org.dbunit.dataset.xml.FlatXmlDataSet;
import org.dbunit.operation.DatabaseOperation;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
public class AcceptanceDatabaseTestUtils {
public void deleteDataFromTable(String tableName, DriverManagerDataSource dataSource) throws IOException, DataSetException, SQLException, DatabaseUnitException {
StringReader dataSetXmlStream = new StringReader("<dataset><" + tableName + "/></dataset>");
IDataSet dataSet = new FlatXmlDataSet(dataSetXmlStream);
IDatabaseConnection databaseConnection = new DatabaseDataSourceConnection(dataSource);
DatabaseOperation.CLEAN_INSERT.execute(databaseConnection, dataSet);
}
}
| apache-2.0 |
rashmithajajur/PrivateCloud-master | src/main/java/com/privatecloud/users/dao/DBConnection.java | 923 | package com.privatecloud.users.dao;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
public class DBConnection {
static final String JDBC_DRIVER = "com.mysql.jdbc.Driver";
static final String DB_URL = "jdbc:mysql://localhost/CMPE283";
// Database credentials
static final String USER = "root";
static final String PASS = "root";
public Connection connection = null;
private static DBConnection instance;
private DBConnection() {
try {
Class.forName(JDBC_DRIVER);
connection = DriverManager.getConnection(DB_URL,USER,PASS);
} catch (ClassNotFoundException e) {
System.out.println(e.getMessage());
e.printStackTrace();
} catch (SQLException e) {
//System.out.println(e.getMessage());
e.printStackTrace();
}
}
public static DBConnection getInstance() {
if(instance == null)
instance = new DBConnection();
return instance;
}
}
| apache-2.0 |
asaaki/yyid.rs | examples/release-tag.rs | 262 | // Awesome simple way to generate a tag based on current cargo package version
fn main() {
let version = std::env!("CARGO_PKG_VERSION");
println!(
r#"git tag --sign --create-reflog --message "Release v{}" v{}"#,
version, version
);
}
| apache-2.0 |
saga810203/jfw | jfw-core/src/main/java/org/jfw/core/code/webmvc/handler/RemoveSessionAttributeHandler.java | 628 | package org.jfw.core.code.webmvc.handler;
import org.jfw.core.code.generator.annotations.webmvc.RemoveSessionAttribute;
import org.jfw.core.code.webmvc.Handler;
public class RemoveSessionAttributeHandler extends Handler{
@Override
public void appendBeforCode(StringBuilder sb) {
RemoveSessionAttribute sa= this.getMethodAnnotation(RemoveSessionAttribute.class);
if(null==sa || (this.getCmcg().getMethod().getReturnType()==void.class))return;
this.getCmcg().readSession();
sb.append("session.removeAttriubte(\"").append(sa.value().trim()).append("\");\r\n");
}
}
| apache-2.0 |
xushaomin/appleframework | apple-commons/src/main/java/com/appleframework/tools/lang/hash/MurmurHash.java | 8442 | package com.appleframework.tools.lang.hash;
import com.appleframework.tools.util.CharsetUtil;
import com.appleframework.tools.util.StrUtil;
import java.io.Serializable;
import java.nio.charset.Charset;
/**
* Murmur3 32bit、64bit、128bit 哈希算法实现<br>
* 此算法来自于:https://github.com/xlturing/Simhash4J/blob/master/src/main/java/bee/simhash/main/Murmur3.java
*
* <p>
* 32-bit Java port of https://code.google.com/p/smhasher/source/browse/trunk/MurmurHash3.cpp#94 <br>
* 128-bit Java port of https://code.google.com/p/smhasher/source/browse/trunk/MurmurHash3.cpp#255
* </p>
*
* @author cruise.xu,Simhash4J
* @since 4.3.3
*/
public class MurmurHash implements Serializable{
private static final long serialVersionUID = 1L;
// Constants for 32 bit variant
private static final int C1_32 = 0xcc9e2d51;
private static final int C2_32 = 0x1b873593;
private static final int R1_32 = 15;
private static final int R2_32 = 13;
private static final int M_32 = 5;
private static final int N_32 = 0xe6546b64;
// Constants for 128 bit variant
private static final long C1 = 0x87c37b91114253d5L;
private static final long C2 = 0x4cf5ad432745937fL;
private static final int R1 = 31;
private static final int R2 = 27;
private static final int R3 = 33;
private static final int M = 5;
private static final int N1 = 0x52dce729;
private static final int N2 = 0x38495ab5;
private static final int DEFAULT_SEED = 0;
private static final Charset DEFAULT_CHARSET = CharsetUtil.CHARSET_UTF_8;
/**
* Murmur3 32-bit Hash值计算
*
* @param data 数据
* @return Hash值
*/
public static int hash32(CharSequence data) {
return hash32(StrUtil.bytes(data, DEFAULT_CHARSET));
}
/**
* Murmur3 32-bit Hash值计算
*
* @param data 数据
* @return Hash值
*/
public static int hash32(byte[] data) {
return hash32(data, data.length, DEFAULT_SEED);
}
/**
* Murmur3 32-bit Hash值计算
*
* @param data 数据
* @param length 长度
* @param seed 种子,默认0
* @return Hash值
*/
public static int hash32(byte[] data, int length, int seed) {
int hash = seed;
final int nblocks = length >> 2;
// body
for (int i = 0; i < nblocks; i++) {
int i_4 = i << 2;
int k = (data[i_4] & 0xff) //
| ((data[i_4 + 1] & 0xff) << 8) //
| ((data[i_4 + 2] & 0xff) << 16) //
| ((data[i_4 + 3] & 0xff) << 24);
// mix functions
k *= C1_32;
k = Integer.rotateLeft(k, R1_32);
k *= C2_32;
hash ^= k;
hash = Integer.rotateLeft(hash, R2_32) * M_32 + N_32;
}
// tail
int idx = nblocks << 2;
int k1 = 0;
switch (length - idx) {
case 3:
k1 ^= data[idx + 2] << 16;
case 2:
k1 ^= data[idx + 1] << 8;
case 1:
k1 ^= data[idx];
// mix functions
k1 *= C1_32;
k1 = Integer.rotateLeft(k1, R1_32);
k1 *= C2_32;
hash ^= k1;
}
// finalization
hash ^= length;
hash ^= (hash >>> 16);
hash *= 0x85ebca6b;
hash ^= (hash >>> 13);
hash *= 0xc2b2ae35;
hash ^= (hash >>> 16);
return hash;
}
/**
* Murmur3 64-bit Hash值计算
*
* @param data 数据
* @return Hash值
*/
public static long hash64(CharSequence data) {
return hash64(StrUtil.bytes(data, DEFAULT_CHARSET));
}
/**
* Murmur3 64-bit 算法<br>
* This is essentially MSB 8 bytes of Murmur3 128-bit variant.
*
*
* @param data 数据
* @return Hash值
*/
public static long hash64(byte[] data) {
return hash64(data, data.length, DEFAULT_SEED);
}
/**
* Murmur3 64-bit 算法 <br>
* This is essentially MSB 8 bytes of Murmur3 128-bit variant.
*
* @param data 数据
* @param length 长度
* @param seed 种子,默认0
* @return Hash值
*/
public static long hash64(byte[] data, int length, int seed) {
long hash = seed;
final int nblocks = length >> 3;
// body
for (int i = 0; i < nblocks; i++) {
final int i8 = i << 3;
long k = ((long) data[i8] & 0xff) //
| (((long) data[i8 + 1] & 0xff) << 8) //
| (((long) data[i8 + 2] & 0xff) << 16) //
| (((long) data[i8 + 3] & 0xff) << 24) //
| (((long) data[i8 + 4] & 0xff) << 32)//
| (((long) data[i8 + 5] & 0xff) << 40) //
| (((long) data[i8 + 6] & 0xff) << 48) //
| (((long) data[i8 + 7] & 0xff) << 56);
// mix functions
k *= C1;
k = Long.rotateLeft(k, R1);
k *= C2;
hash ^= k;
hash = Long.rotateLeft(hash, R2) * M + N1;
}
// tail
long k1 = 0;
int tailStart = nblocks << 3;
switch (length - tailStart) {
case 7:
k1 ^= ((long) data[tailStart + 6] & 0xff) << 48;
case 6:
k1 ^= ((long) data[tailStart + 5] & 0xff) << 40;
case 5:
k1 ^= ((long) data[tailStart + 4] & 0xff) << 32;
case 4:
k1 ^= ((long) data[tailStart + 3] & 0xff) << 24;
case 3:
k1 ^= ((long) data[tailStart + 2] & 0xff) << 16;
case 2:
k1 ^= ((long) data[tailStart + 1] & 0xff) << 8;
case 1:
k1 ^= ((long) data[tailStart] & 0xff);
k1 *= C1;
k1 = Long.rotateLeft(k1, R1);
k1 *= C2;
hash ^= k1;
}
// finalization
hash ^= length;
hash = fmix64(hash);
return hash;
}
/**
* Murmur3 128-bit Hash值计算
*
* @param data 数据
* @return Hash值 (2 longs)
*/
public static long[] hash128(CharSequence data) {
return hash128(StrUtil.bytes(data, DEFAULT_CHARSET));
}
/**
* Murmur3 128-bit 算法.
*
* @param data -数据
* @return Hash值 (2 longs)
*/
public static long[] hash128(byte[] data) {
return hash128(data, data.length, DEFAULT_SEED);
}
/**
* Murmur3 128-bit variant.
*
* @param data 数据
* @param length 长度
* @param seed 种子,默认0
* @return Hash值(2 longs)
*/
public static long[] hash128(byte[] data, int length, int seed) {
long h1 = seed;
long h2 = seed;
final int nblocks = length >> 4;
// body
for (int i = 0; i < nblocks; i++) {
final int i16 = i << 4;
long k1 = ((long) data[i16] & 0xff) //
| (((long) data[i16 + 1] & 0xff) << 8) //
| (((long) data[i16 + 2] & 0xff) << 16) //
| (((long) data[i16 + 3] & 0xff) << 24) //
| (((long) data[i16 + 4] & 0xff) << 32) //
| (((long) data[i16 + 5] & 0xff) << 40) //
| (((long) data[i16 + 6] & 0xff) << 48) //
| (((long) data[i16 + 7] & 0xff) << 56);
long k2 = ((long) data[i16 + 8] & 0xff) //
| (((long) data[i16 + 9] & 0xff) << 8) //
| (((long) data[i16 + 10] & 0xff) << 16) //
| (((long) data[i16 + 11] & 0xff) << 24) //
| (((long) data[i16 + 12] & 0xff) << 32) //
| (((long) data[i16 + 13] & 0xff) << 40) //
| (((long) data[i16 + 14] & 0xff) << 48) //
| (((long) data[i16 + 15] & 0xff) << 56);
// mix functions for k1
k1 *= C1;
k1 = Long.rotateLeft(k1, R1);
k1 *= C2;
h1 ^= k1;
h1 = Long.rotateLeft(h1, R2);
h1 += h2;
h1 = h1 * M + N1;
// mix functions for k2
k2 *= C2;
k2 = Long.rotateLeft(k2, R3);
k2 *= C1;
h2 ^= k2;
h2 = Long.rotateLeft(h2, R1);
h2 += h1;
h2 = h2 * M + N2;
}
// tail
long k1 = 0;
long k2 = 0;
int tailStart = nblocks << 4;
switch (length - tailStart) {
case 15:
k2 ^= (long) (data[tailStart + 14] & 0xff) << 48;
case 14:
k2 ^= (long) (data[tailStart + 13] & 0xff) << 40;
case 13:
k2 ^= (long) (data[tailStart + 12] & 0xff) << 32;
case 12:
k2 ^= (long) (data[tailStart + 11] & 0xff) << 24;
case 11:
k2 ^= (long) (data[tailStart + 10] & 0xff) << 16;
case 10:
k2 ^= (long) (data[tailStart + 9] & 0xff) << 8;
case 9:
k2 ^= data[tailStart + 8] & 0xff;
k2 *= C2;
k2 = Long.rotateLeft(k2, R3);
k2 *= C1;
h2 ^= k2;
case 8:
k1 ^= (long) (data[tailStart + 7] & 0xff) << 56;
case 7:
k1 ^= (long) (data[tailStart + 6] & 0xff) << 48;
case 6:
k1 ^= (long) (data[tailStart + 5] & 0xff) << 40;
case 5:
k1 ^= (long) (data[tailStart + 4] & 0xff) << 32;
case 4:
k1 ^= (long) (data[tailStart + 3] & 0xff) << 24;
case 3:
k1 ^= (long) (data[tailStart + 2] & 0xff) << 16;
case 2:
k1 ^= (long) (data[tailStart + 1] & 0xff) << 8;
case 1:
k1 ^= data[tailStart] & 0xff;
k1 *= C1;
k1 = Long.rotateLeft(k1, R1);
k1 *= C2;
h1 ^= k1;
}
// finalization
h1 ^= length;
h2 ^= length;
h1 += h2;
h2 += h1;
h1 = fmix64(h1);
h2 = fmix64(h2);
h1 += h2;
h2 += h1;
return new long[] { h1, h2 };
}
private static long fmix64(long h) {
h ^= (h >>> 33);
h *= 0xff51afd7ed558ccdL;
h ^= (h >>> 33);
h *= 0xc4ceb9fe1a85ec53L;
h ^= (h >>> 33);
return h;
}
}
| apache-2.0 |
wolfdog007/aruzhev | chapter_002/src/main/java/ru/job4j/battlegame/units/human/Crossbowman.java | 2215 | package ru.job4j.battlegame.units.human;
import ru.job4j.battlegame.Squad;
import ru.job4j.battlegame.units.AbstractUnit;
import java.util.Random;
/**
* CrossBowman.
*
* @author Ruzhev Alexander
* @since on 24.06.2017.
*/
public class Crossbowman extends AbstractUnit {
/**
* Name.
*/
private static final String NAME = "human crossbow";
/**
* Melee damage.
*/
private double damage = 3;
/**
* Range Damage.
*/
private double rangeDamage = 5;
/**
* Constructor.
*/
public Crossbowman() {
super(NAME);
}
/**
* First - range attack.
*
* @param unit attacked unit
*/
@Override
public void firstAction(AbstractUnit unit) {
double realDamage = getImproved() ? rangeDamage * 1.5 / getDisease() : rangeDamage / getDisease();
printToConsole(unit, realDamage, "attack with crossbow");
unit.toDamage(realDamage);
if (getDisease() != 1) {
toDisease();
}
}
/**
* Second - melee attack.
*
* @param unit attacked unit
*/
@Override
public void secondAction(AbstractUnit unit) {
double realDamage = getImproved() ? damage * 1.5 / getDisease() : damage / getDisease();
printToConsole(unit, realDamage, "attacks");
unit.toDamage(realDamage);
if (getDisease() != 1) {
toDisease();
}
}
/**
* Select random action.
*
* @param lightSquad - ally squad
* @param darkSquad - enemy squad
*/
@Override
public void randomAction(Squad lightSquad, Squad darkSquad) {
Random random = new Random();
if (random.nextInt(50) < 25) {
AbstractUnit objAction = darkSquad.getSquad()[random.nextInt(darkSquad.getSquad().length)];
firstAction(objAction);
} else {
AbstractUnit objAction = darkSquad.getSquad()[random.nextInt(darkSquad.getSquad().length)];
secondAction(objAction);
}
}
/**
* Create new clone crossbowman.
*
* @return new clone
*/
@Override
public AbstractUnit clone() {
return new Crossbowman();
}
}
| apache-2.0 |
micrometer-metrics/micrometer | micrometer-core/src/main/java/io/micrometer/core/instrument/binder/BaseUnits.java | 2134 | /*
* Copyright 2017 VMware, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micrometer.core.instrument.binder;
/**
* Base units constants for convenience.
*
* @author Johnny Lim
*/
public final class BaseUnits {
/**
* For bytes.
*/
public static final String BYTES = "bytes";
/**
* For rows.
*/
public static final String ROWS = "rows";
/**
* For tasks.
*/
public static final String TASKS = "tasks";
/**
* For threads.
*/
public static final String THREADS = "threads";
/**
* For classes.
*/
public static final String CLASSES = "classes";
/**
* For buffers.
*/
public static final String BUFFERS = "buffers";
/**
* For events.
*/
public static final String EVENTS = "events";
/**
* For files.
*/
public static final String FILES = "files";
/**
* For sessions.
*/
public static final String SESSIONS = "sessions";
/**
* For milliseconds.
*/
public static final String MILLISECONDS = "ms";
/**
* For messages.
*/
public static final String MESSAGES = "messages";
/**
* For connections.
*/
public static final String CONNECTIONS = "connections";
/**
* For operations.
*/
public static final String OPERATIONS = "operations";
/**
* Percent.
*/
public static final String PERCENT = "percent";
/**
* For objects.
* @since 1.6.0
*/
public static final String OBJECTS = "objects";
private BaseUnits() {
}
}
| apache-2.0 |
Eduworks/decals-ui | decals-ui/src/com/eduworks/decals/ui/client/pagebuilder/screen/DsTeacherHomeScreen.java | 1009 | package com.eduworks.decals.ui.client.pagebuilder.screen;
import com.eduworks.decals.ui.client.DsSession;
import com.eduworks.decals.ui.client.handler.DsHeaderHandler;
import com.eduworks.decals.ui.client.pagebuilder.DecalsScreen;
import com.eduworks.gwt.client.pagebuilder.PageAssembler;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.ui.HTML;
/**
*
* Stub for the Teacher home screen
*
* @author Eduworks Corporation
*
*/
public class DsTeacherHomeScreen extends DecalsScreen {
@Override
public void display() {
PageAssembler.ready(new HTML(getTemplates().getTeacherHomePanel().getText()));
PageAssembler.buildContents();
DOM.getElementById("teacher").setAttribute("style", "display:block;");
DsHeaderHandler dhh = new DsHeaderHandler(getDispatcher());
dhh.setUpHeader(DsSession.getUser().getFirstName(), DsSession.getUser().getEmailAddress());
}
@Override
public void lostFocus() {}
} | apache-2.0 |
natefinch/claymud | world/player_test.go | 33 | package world
import ()
var ()
| apache-2.0 |
joy-inc/core-http | src/main/java/com/joy/http/volley/Network.java | 1054 | /*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.joy.http.volley;
import com.joy.http.JoyError;
/**
* An interface for performing requests.
*/
public interface Network {
/**
* Performs the specified request.
* @param request Request to process
* @return A {@link NetworkResponse} with data and caching metadata; will never be null
* @throws JoyError on errors
*/
NetworkResponse performRequest(Request<?> request) throws JoyError;
}
| apache-2.0 |
Primechain/yobichain | yobichain-web/create_user_process.php | 1904 | <?php
include_once("primechain_functions/error_reporting.php");
include_once("primechain_functions/crud_engine.php");
include_once("primechain_functions/blockchain_engine.php");
include_once("primechain_functions/helperFunctions.php");
include_once("classes/classes.php");
$crud_engine = new crudEngine();
$blockchain_engine = new blockchainEngine();
try
{
$user = new User();
//echo json_encode($roles);
$user_email = htmlspecialchars($_POST['user_email'], ENT_QUOTES);
// Does a user with this email address exist in the database?
$doesEmailExist = $crud_engine->doesEmailExist($user_email);
if ($doesEmailExist===true) { header("location:create_user.php?role_code=create_user&msg=4"); }
foreach ($_POST as $key => $value) {
if (property_exists($user, $key) && !empty($value) && !is_null($value)) {
$user->{$key} = htmlspecialchars($value, ENT_QUOTES);
}
}
$user->random = $crud_engine->random_str(40);
$crud_engine->beginTransaction();
// Insert new user details into database
if(!$crud_engine->createNewUser($user))
throw new Exception("128", 1);
$user->user_id = $crud_engine->getUserIdFromUserEmail($user->user_email);
// Send activation email to newly created user
include_once("primechain_functions/sendgridemail/notification_grid.php");
$notificationEngine = new notificationEngine();
$sendActivationEmail = $notificationEngine->sendActivationEmail($user, $orgnName);
$crud_engine->commit();
// printSuccessMessage("User created!");
header("location:create_user.php?msg=3");
}
catch (Exception $e)
{
$crud_engine->rollBack();
$ex_msg = $e->getMessage();
$error_redirect_url_prefix = "create_user.php?msg=";
if (is_numeric($ex_msg)) {
header("location:".$error_redirect_url_prefix.$ex_msg);
}
else {
error_log("internal error: ".$ex_msg);
header("location:".$error_redirect_url_prefix."56");
}
}
?> | apache-2.0 |
google/sling | sling/task/frames.cc | 4605 | // Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "sling/task/frames.h"
#include "sling/base/logging.h"
#include "sling/frame/encoder.h"
#include "sling/frame/decoder.h"
#include "sling/frame/object.h"
#include "sling/frame/reader.h"
#include "sling/frame/serialization.h"
#include "sling/frame/store.h"
#include "sling/frame/wire.h"
#include "sling/stream/file.h"
#include "sling/stream/memory.h"
#include "sling/task/task.h"
namespace sling {
namespace task {
void FrameProcessor::Start(Task *task) {
// Create commons store.
commons_ = new Store();
// Load commons store from file.
for (Binding *binding : task->GetInputs("commons")) {
LoadStore(binding->resource()->name(), commons_);
}
// Get output channel (optional).
output_ = task->GetSink("output");
// Bind names.
InitCommons(task);
CHECK(names_.Bind(commons_));
// Start up processor.
Startup(task);
// Freeze commons store.
commons_->Freeze();
// Update statistics for common store.
MemoryUsage usage;
commons_->GetMemoryUsage(&usage, true);
task->GetCounter("commons_memory")->Increment(usage.memory_used());
task->GetCounter("commons_handles")->Increment(usage.used_handles());
task->GetCounter("commons_symbols")->Increment(usage.num_symbols());
task->GetCounter("commons_gcs")->Increment(usage.num_gcs);
task->GetCounter("commons_gctime")->Increment(usage.gc_time);
// Get counters for frame stores.
frame_memory_ = task->GetCounter("frame_memory");
frame_handles_ = task->GetCounter("frame_handles");
frame_symbols_ = task->GetCounter("frame_symbols");
frame_gcs_ = task->GetCounter("frame_gcs");
frame_gctime_ = task->GetCounter("frame_gctime");
}
void FrameProcessor::Receive(Channel *channel, Message *message) {
// Create store for frame.
Store store(commons_);
// Decode frame from message.
Frame frame = DecodeMessage(&store, message);
CHECK(frame.valid());
// Process frame.
Process(message->key(), frame);
// Update statistics.
MemoryUsage usage;
store.GetMemoryUsage(&usage, true);
frame_memory_->Increment(usage.memory_used());
frame_handles_->Increment(usage.used_handles());
frame_symbols_->Increment(usage.num_symbols());
frame_gcs_->Increment(usage.num_gcs);
frame_gctime_->Increment(usage.gc_time);
// Delete input message.
delete message;
}
void FrameProcessor::Done(Task *task) {
// Flush output.
Flush(task);
// Delete commons store.
delete commons_;
commons_ = nullptr;
}
void FrameProcessor::Output(Text key, const Object &value) {
CHECK(output_ != nullptr);
output_->Send(CreateMessage(key, value));
}
void FrameProcessor::Output(const Frame &value) {
CHECK(output_ != nullptr);
output_->Send(CreateMessage(value));
}
void FrameProcessor::OutputShallow(Text key, const Object &value) {
CHECK(output_ != nullptr);
output_->Send(CreateMessage(key, value, true));
}
void FrameProcessor::OutputShallow(const Frame &value) {
CHECK(output_ != nullptr);
output_->Send(CreateMessage(value, true));
}
void FrameProcessor::InitCommons(Task *task) {}
void FrameProcessor::Startup(Task *task) {}
void FrameProcessor::Process(Slice key, const Frame &frame) {}
void FrameProcessor::Flush(Task *task) {}
Message *CreateMessage(Text key, const Object &object, bool shallow) {
ArrayOutputStream stream;
Output output(&stream);
Encoder encoder(object.store(), &output);
encoder.set_shallow(shallow);
encoder.Encode(object);
output.Flush();
return new Message(Slice(key.data(), key.size()), stream.data());
}
Message *CreateMessage(const Frame &frame, bool shallow) {
return CreateMessage(frame.Id(), frame, shallow);
}
Frame DecodeMessage(Store *store, Message *message) {
ArrayInputStream stream(message->value().data(), message->value().size());
Input input(&stream);
if (input.Peek() == WIRE_BINARY_MARKER) {
Decoder decoder(store, &input);
return decoder.Decode().AsFrame();
} else {
Reader reader(store, &input);
return reader.Read().AsFrame();
}
}
} // namespace task
} // namespace sling
| apache-2.0 |
TakahikoKawasaki/Android-OpenGL-Friendly-API | src/com/neovisionaries/android/opengl/impl/GLESImpl11.java | 6776 | /*
* Copyright (C) 2012 Neo Visionaries Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.neovisionaries.android.opengl.impl;
import java.nio.Buffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import android.opengl.GLES11;
import com.neovisionaries.android.opengl.GLES;
/**
* An implementation of {@link GLES} interface for OpenGL ES 1.1.
*
* @author Takahiko Kawasaki
*
* @see <a href="http://developer.android.com/reference/android/opengl/GLES11.html">android.opengl.GLES11</a>
*/
public class GLESImpl11 extends GLESImplBase
{
@Override
public int getMajorVersion()
{
// OpenGL ES 1.1.
return 1;
}
@Override
public void glActiveTexture(int textureUnit)
{
GLES11.glActiveTexture(textureUnit);
}
@Override
public void glBindBuffer(int vertexBufferType, int vertexBufferId)
{
GLES11.glBindBuffer(vertexBufferType, vertexBufferId);
}
@Override
public void glBindTexture(int textureType, int textureId)
{
GLES11.glBindTexture(textureType, textureId);
}
@Override
public void glBlendFunc(int sourceFactor, int destinationFactor)
{
GLES11.glBlendFunc(sourceFactor, destinationFactor);
}
@Override
public void glBufferData(int vertexBufferType, int size, Buffer data, int vertexBufferUsage)
{
GLES11.glBufferData(vertexBufferType, size, data, vertexBufferUsage);
}
@Override
public void glBufferSubData(int vertexBufferType, int offset, int size, Buffer data)
{
GLES11.glBufferSubData(vertexBufferType, offset, size, data);
}
@Override
public void glClear(int mask)
{
GLES11.glClear(mask);
}
@Override
public void glClearColor(float red, float green, float blue, float alpha)
{
GLES11.glClearColor(red, green, blue, alpha);
}
@Override
public void glClearDepthf(float depth)
{
GLES11.glClearDepthf(depth);
}
@Override
public void glClearStencil(int stencil)
{
GLES11.glClearStencil(stencil);
}
@Override
public void glColorMask(boolean red, boolean green, boolean blue, boolean alpha)
{
GLES11.glColorMask(red, green, blue, alpha);
}
@Override
public void glDeleteBuffers(int count, int[] vertexBufferIds, int offset)
{
GLES11.glDeleteBuffers(count, vertexBufferIds, offset);
}
@Override
public void glDeleteTextures(int count, int[] textureIds, int offset)
{
GLES11.glDeleteTextures(count, textureIds, offset);
}
@Override
public void glDepthMask(boolean writable)
{
GLES11.glDepthMask(writable);
}
@Override
public void glDisable(int capability)
{
GLES11.glDisable(capability);
}
@Override
public void glDrawArrays(int mode, int first, int count)
{
GLES11.glDrawArrays(mode, first, count);
}
@Override
public void glDrawElements(int mode, int count, int type, Buffer indices)
{
GLES11.glDrawElements(mode, count, type, indices);
}
@Override
public void glDrawElements(int mode, int count, int type, int offset)
{
GLES11.glDrawElements(mode, count, type, offset);
}
@Override
public void glEnable(int capability)
{
GLES11.glEnable(capability);
}
@Override
public void glFinish()
{
GLES11.glFinish();
}
@Override
public void glFlush()
{
GLES11.glFlush();
}
@Override
public void glGenBuffers(int vertexBufferType, int[] vertexBufferIds, int offset)
{
GLES11.glGenBuffers(vertexBufferType, vertexBufferIds, offset);
}
@Override
public void glGenTextures(int textureType, int[] textureIds, int offset)
{
GLES11.glGenTextures(textureType, textureIds, offset);
}
@Override
public void glGetBooleanv(int parameterId, IntBuffer params)
{
GLES11.glGetBooleanv(parameterId, params);
}
@Override
public void glGetBooleanv(int parameterId, boolean[] params, int offset)
{
GLES11.glGetBooleanv(parameterId, params, offset);
}
@Override
public int glGetError()
{
return GLES11.glGetError();
}
@Override
public void glGetFloatv(int parameterId, FloatBuffer params)
{
GLES11.glGetFloatv(parameterId, params);
}
@Override
public void glGetFloatv(int parameterId, float[] params, int offset)
{
GLES11.glGetFloatv(parameterId, params, offset);
}
@Override
public void glGetIntegerv(int parameterId, IntBuffer params)
{
GLES11.glGetIntegerv(parameterId, params);
}
@Override
public void glGetIntegerv(int parameterId, int[] params, int offset)
{
GLES11.glGetIntegerv(parameterId, params, offset);
}
@Override
public String glGetString(int parameterId)
{
return GLES11.glGetString(parameterId);
}
@Override
public boolean glIsEnabled(int capability)
{
return GLES11.glIsEnabled(capability);
}
@Override
public void glStencilFunc(int comparisonOperator, int referenceValue, int mask)
{
GLES11.glStencilFunc(comparisonOperator, referenceValue, mask);
}
@Override
public void glStencilMask(int stencilMask)
{
GLES11.glStencilMask(stencilMask);
}
@Override
public void glStencilOp(int stencilFail, int depthFail, int pass)
{
GLES11.glStencilOp(stencilFail, depthFail, pass);
}
@Override
public void glTexParameteri(int textureType, int parameterId, int parameterValue)
{
GLES11.glTexParameteri(textureType, parameterId, parameterValue);
}
@Override
public void glViewport(int x, int y, int width, int height)
{
GLES11.glViewport(x, y, width, height);
}
}
| apache-2.0 |
dhgarrette/2015-ccg-parsing | src/main/scala/dhg/ccg/tag/learn/EmHmm.scala | 31283 | package dhg.ccg.tag.learn
import scala.annotation.tailrec
import scala.collection.breakOut
import scala.math.abs
import scala.math.exp
import scala.math.log
import dhg.util._
import dhg.util.FastMathUtil._
import dhg.ccg.prob.ConditionalLogProbabilityDistribution
import dhg.ccg.tag._
import dhg.ccg.tagdict.TagDictionary
import dhg.ccg.tagdict.SimpleTagDictionary
class SimpleTypeSupervisedTaggerTrainer[Tag](
learner: SemisupervisedTaggerTrainer[Tag],
trInitializer: TransitionInitializer[Tag], emInitializer: EmissionInitializer[Tag])
extends TypeSupervisedTaggerTrainer[Tag] {
override def typesupTrain(rawSentences: Vector[Vector[Word]], initialTagdict: TagDictionary[Tag]): Tagger[Tag] = {
val tagdict = initialTagdict.withWords(rawSentences.flatten.toSet)
println("Make Transition Distributions")
val transitions = trInitializer.fromRaw(rawSentences, tagdict)
println("Make Emission Distributions")
val emissions = emInitializer.fromRaw(rawSentences, tagdict)
learner.train(rawSentences, tagdict, transitions, emissions)
}
override final def toString = f"SimpleTypeSupervisedTaggerTrainer($learner, $trInitializer, $emInitializer)"
}
class SimpleNoisilySupervisedTaggerTrainer[Tag](
learner: SemisupervisedTaggerTrainer[Tag],
transitionDistributioner: TransitionDistributioner[Tag], emissionDistributioner: EmissionDistributioner[Tag])
extends NoisilySupervisedTaggerTrainer[Tag] {
override def noisySupTrainWithSomeGold(noisilyLabeledSentences: Vector[Vector[(Word, Tag)]], goldLabeledSentences: Vector[Vector[(Word, Tag)]], initialTagdict: TagDictionary[Tag]): Tagger[Tag] = {
val tagdict = initialTagdict
.withWords(noisilyLabeledSentences.flatten.map(_._1).toSet ++ goldLabeledSentences.flatten.map(_._1))
.withTags(noisilyLabeledSentences.flatten.map(_._2).toSet ++ goldLabeledSentences.flatten.map(_._2))
val transitions = transitionDistributioner(noisilyLabeledSentences ++ goldLabeledSentences, tagdict)
val emissions = emissionDistributioner(noisilyLabeledSentences ++ goldLabeledSentences, tagdict)
learner.trainWithSomeGold(noisilyLabeledSentences.map(_.map(_._1)), goldLabeledSentences, tagdict, transitions, emissions)
}
override final def toString = f"SimpleNoisilySupervisedTaggerTrainer($learner, $transitionDistributioner, $emissionDistributioner)"
}
abstract class SemisupervisedHmmTaggerTrainer[Tag](
transitionDistributioner: TransitionDistributioner[Tag],
emissionDistributioner: EmissionDistributioner[Tag],
alphaT: Double, alphaE: Double)
extends SemisupervisedTaggerTrainer[Tag] {
final override def trainWithTagsetsAndSomeGold(
rawSentencesWithTokenTags: Vector[Vector[(Word, Set[Tag])]], goldLabeledSentences: Vector[Vector[(Word, Tag)]], initialTagdict: TagDictionary[Tag],
transitions: ConditionalLogProbabilityDistribution[Tag, Tag], emissions: ConditionalLogProbabilityDistribution[Tag, Word]) = {
val rawSentences = rawSentencesWithTokenTags.map(_.map(_._1))
val tagdict = initialTagdict.withWords(rawSentences.flatten.toSet)
//
//
//
val allActualWords = rawSentences.flatten.toSet.toVector
val allWords = tagdict.startWord +: tagdict.endWord +: allActualWords
val allTdTags = allActualWords.flatMap(tagdict).distinct // filter out tags that are not used by any word
val allTags = tagdict.startTag +: tagdict.endTag +: allTdTags
val numWords = allWords.size
val numTags = allTags.size
println(f"raw tokens = ${rawSentences.flatten.size} (${rawSentences.size} sentences)")
println("numWords = " + numWords)
println("numTags = " + numTags)
println
val wordIndex = allWords.zipWithIndex.toMap
val tagIndex = allTags.zipWithIndex.toMap
val td: Array[Array[Int]] = Array(0) +: Array(1) +: {
val fullTagsetSet = (2 until numTags).toArray
allWords.drop(2).map { w =>
val tdws = tagdict(w)
if (tdws.size < numTags - 2) // incomplete set of tags
tdws.map(tagIndex)(breakOut): Array[Int]
else // complete set of tags
fullTagsetSet // re-use the same array
}(breakOut): Array[Array[Int]]
}
val rtd: Array[Array[Int]] = Array(0) +: Array(1) +: {
allTags.drop(2).map { t =>
allWords.zipWithIndex.drop(2).collect { case (w, wi) if tagdict(w)(t) => wi }(breakOut): Array[Int]
}(breakOut): Array[Array[Int]]
}
// DEBUG
//rawSentences.foreach(s => println(s.mkString(" "))); println
// DEBUG
//println("TAG DICT"); for (w <- allWords.sortBy(_.toString)) println(f" $w%-6s -> [${td(wordIndex(w)).map(allTags).sortBy(_.toString).mkString(", ")}]"); println
//println("REVERSE TAG DICT"); for (t <- allTags.sortBy(_.toString)) println(f" $t -> [${rtd(tagIndex(t)).map(allWords).sortBy(_.toString).mkString(", ")}]"); println
println("Make Indexed Distributions")
val logInitialTr: Array[Array[Double]] = Array.tabulate(numTags) { t1 => Array.tabulate(numTags) { t2 => transitions(allTags(t2), allTags(t1)).logValue } }
val logInitialEm: Array[Array[Double]] = Array.tabulate(numTags) { t => Array.tabulate(numWords) { w => emissions(allWords(w), allTags(t)).logValue } }
println(f"Make Prior Counts (from the ${goldLabeledSentences.size} gold labeled sentences)")
val trGoldCounts: Array[Array[Double]] = Array.fill(numTags) { new Array[Double](numTags) }
for {
(a, b) <- goldLabeledSentences.flatMap(s => (tagdict.startTag +: s.map(_._2) :+ tagdict.endTag).sliding2)
ai <- tagIndex.get(a)
bi <- tagIndex.get(b)
} { trGoldCounts(ai)(bi) += 1 }
val emGoldCounts: Array[Array[Double]] = Array.fill(numTags) { new Array[Double](numWords) }
for {
(w, t) <- goldLabeledSentences.flatten
ti <- tagIndex.get(t)
wi <- wordIndex.get(w)
} { emGoldCounts(ti)(wi) += 1 }
// DEBUG
//println("\nTRANSITIONS"); println("\t" + (0 until numTags).map(allTags).mkString("\t")); for (t1 <- 0 until numTags) println(allTags(t1) + "\t" + (0 until numTags).map(t2 => if (t1 != 1 && t2 != 0 && !(t1 == 0 && t2 <= 1)) f"${tr(t1)(t2)}%.2f" else "").mkString("\t"))
//println("\nEMISSIONS"); println("\t" + (0 until numWords).map(allWords).mkString("\t")); for (t <- 0 until numTags) println(allTags(t) + "\t" + (0 until numWords).map(w => if (td(w).contains(t)) f"${em(t)(w)}%.2f" else "").mkString("\t")); println
val sentsWithTokenTags: Vector[(Array[Int], Array[Array[Int]])] = rawSentencesWithTokenTags.map { sentWithTokenTags =>
val (s, tokenTags) = sentWithTokenTags.unzip
val sent = (tagdict.startWord +: s :+ tagdict.endWord).map(wordIndex).toArray
val tokTags = (Set(tagdict.startTag) +: tokenTags :+ Set(tagdict.endTag)).map(_.map(tagIndex).toArray.sorted).toArray
(sent, tokTags)
}
// Prepare for re-use later: alpha*beta + (gold counts), to be added to data counts before taking dirichlet sample
val alphaPriorTr: Array[Array[Double]] = Array.tabulate(numTags, numTags) { (a, b) => alphaT * math.exp(logInitialTr(a)(b)) + trGoldCounts(a)(b) }
val alphaPriorEm: Array[Array[Double]] = Array.tabulate(numTags, numWords) { (t, w) => alphaE * math.exp(logInitialEm(t)(w)) + emGoldCounts(t)(w) }; alphaPriorEm(0)(0) = 0.0; alphaPriorEm(1)(1) = 0.0
println("Start Training")
val (trExpectedLogCounts, emExpectedLogCounts) = doTrain(sentsWithTokenTags, numWords, numTags, rtd, alphaPriorTr, alphaPriorEm, logInitialTr, logInitialEm)
val trExpectedCountsUnindexed =
(0 until numTags).map(k1 => allTags(k1) ->
(0 until numTags).map(k2 => allTags(k2) ->
(if (k1 < 2 && k2 < 2) LogDouble.zero else new LogDouble(trExpectedLogCounts(k1)(k2)))).toMap).toMap +
(tagdict.endTag -> Map[Tag, LogDouble]())
val tagCountsUnindexed = trExpectedCountsUnindexed.mapVals(_.values.sum) + (tagdict.endTag -> trExpectedCountsUnindexed.values.map(_.getOrElse(tagdict.endTag, LogDouble.zero)).sum)
val emLearnedTr = transitionDistributioner.make(trExpectedCountsUnindexed, tagCountsUnindexed, tagdict)
val emExpectedCountsUnindexed =
(2 until numTags).map(t => allTags(t) ->
rtd(t).map(w => allWords(w) ->
new LogDouble(emExpectedLogCounts(t)(w))).toMap).toMap +
(tagdict.startTag -> Map(tagdict.startWord -> LogDouble.one)) + (tagdict.endTag -> Map(tagdict.endWord -> LogDouble.one))
val emLearnedEm = emissionDistributioner.make(emExpectedCountsUnindexed, tagdict)
new HmmTagger(emLearnedTr, emLearnedEm, tagdict)
}
def doTrain(
sentsWithTokenTags: Vector[(Array[Int], Array[Array[Int]])],
numWords: Int, numTags: Int,
rtd: Array[Array[Int]],
alphaPriorTr: Array[Array[Double]], alphaPriorEm: Array[Array[Double]],
logInitialTr: Array[Array[Double]], logInitialEm: Array[Array[Double]]): //
(Array[Array[Double]], Array[Array[Double]])
/**
* Convert, IN-PLACE, counts matrices into conditional
* probability distribution matrices.
*/
protected[this] final def convertCountsToProbabilities(
trCounts: Array[Array[Double]], emCounts: Array[Array[Double]],
numWords: Int, numTags: Int,
rtd: Array[Array[Int]]): Unit = {
// newLogTr
// 1. Divide by sum (to get probability)
// 2. Log
var k1 = 0
while (k1 < numTags) {
normalizeAndLog(trCounts(k1), numTags)
k1 += 1
}
// newLogEm
// 1. Divide by sum (to get probability)
// 2. Log
// expectedEmCounts(0)(0) = 0.0
// expectedEmCounts(1)(1) = 0.0
var k = 2
while (k < numTags) {
val rtdK = rtd(k)
val rtdKLen = rtdK.length
activeNormalizeAndLog(emCounts(k), rtdK, rtdKLen)
k += 1
}
// At this point the "counts" are actually log probabilities!!
}
/**
* Convert, IN-PLACE, counts matrices into conditional
* probability distribution matrices.
*/
protected[this] final def convertLogCountsToProbabilities(
trLogCounts: Array[Array[Double]], emLogCounts: Array[Array[Double]],
numWords: Int, numTags: Int,
rtd: Array[Array[Int]]): Unit = {
// newLogTr
// 1. Divide by sum (to get probability)
// 2. Log
var k1 = 0
while (k1 < numTags) {
logNormalize(trLogCounts(k1), numTags)
k1 += 1
}
// newLogEm
// 1. Divide by sum (to get probability)
// 2. Log
emLogCounts(0)(0) = 0.0
emLogCounts(1)(1) = 0.0
var k = 2
while (k < numTags) {
val rtdK = rtd(k)
val rtdKLen = rtdK.length
activeLogNormalize(emLogCounts(k), rtdK, rtdKLen)
k += 1
}
// At this point the "counts" are actually log probabilities!!
}
}
abstract class EmHmmTaggerTrainer[Tag](
maxIterations: Int,
transitionDistributioner: TransitionDistributioner[Tag],
emissionDistributioner: EmissionDistributioner[Tag],
alphaT: Double, alphaE: Double,
convergence: Double)
extends SemisupervisedHmmTaggerTrainer[Tag](transitionDistributioner, emissionDistributioner, alphaT, alphaE) {
protected[this] final def makeMatrix(wLen: Int, numTags: Int) = {
val data = new Array[Array[Double]](wLen)
var i = 0; while (i < wLen) { data(i) = new Array[Double](numTags); i += 1 }
data
}
}
class SoftEmHmmTaggerTrainer[Tag](
maxIterations: Int,
transitionDistributioner: TransitionDistributioner[Tag],
emissionDistributioner: EmissionDistributioner[Tag],
alphaT: Double = 0.0, alphaE: Double = 0.0,
convergence: Double = 1e-10)
extends EmHmmTaggerTrainer[Tag](maxIterations, transitionDistributioner, emissionDistributioner, alphaT, alphaE, convergence) {
/**
* @return: Transition and Emission expected log counts
*/
final override def doTrain(
sentsWithTokenTags: Vector[(Array[Int], Array[Array[Int]])],
numWords: Int, numTags: Int,
rtd: Array[Array[Int]],
alphaPriorTr: Array[Array[Double]], alphaPriorEm: Array[Array[Double]],
logInitialTr: Array[Array[Double]], logInitialEm: Array[Array[Double]]) = {
val alphaPriorLogTr = alphaPriorTr.map(_.map(log))
val alphaPriorLogEm = alphaPriorEm.map(_.map(log))
val (expectedTrLogCounts, expectedEmLogCounts) = iterate(sentsWithTokenTags, numWords, numTags, rtd, alphaPriorLogTr, alphaPriorLogEm, logInitialTr, logInitialEm, 1, Double.NegativeInfinity)
(expectedTrLogCounts, expectedEmLogCounts)
}
/**
* @return: Transition and Emission expected log counts
*/
@tailrec private[this] final def iterate(
sentsWithTokenTags: Vector[(Array[Int], Array[Array[Int]])],
numWords: Int, numTags: Int,
rtd: Array[Array[Int]],
alphaPriorLogTr: Array[Array[Double]], alphaPriorLogEm: Array[Array[Double]],
logTr: Array[Array[Double]], logEm: Array[Array[Double]],
iteration: Int, prevAvgLogProb: Double //
): (Array[Array[Double]], Array[Array[Double]]) = {
val startTime = System.currentTimeMillis()
val (expectedTrLogCounts, expectedEmLogCounts, avgLogProb) = reestimate(sentsWithTokenTags, numWords, numTags, rtd, alphaPriorLogTr, alphaPriorLogEm, logTr, logEm)
println(f"iteration ${(iteration + ":").padRight(4)} ${(System.currentTimeMillis() - startTime) / 1000.0}%.3f sec avgLogProb=${(avgLogProb + ",").padRight(22)} avgProb=${exp(avgLogProb)}")
//println("\nTRANSITIONS"); for (t1 <- 0 until numTags) println((0 until numTags).map(t2 => if (t1 != 1 && !(t1 == 0 && t2 <= 1)) f"${exp(expectedTrLogCounts(t1)(t2))}%.4f" else "").mkString("\t"))
//println("\nEMISSIONS"); for (t <- 0 until numTags) println((0 until numWords).map(w => if (tokenTags(i).contains(t)) f"${exp(newLogEm(t)(w))}%.4f" else "").mkString("\t")); println
if (iteration >= maxIterations) {
println(f"MAX ITERATIONS REACHED")
(expectedTrLogCounts, expectedEmLogCounts)
}
else if (avgLogProb < prevAvgLogProb) {
println(f"DIVERGENCE!")
assert(false, "DIVERGENCE!")
(expectedTrLogCounts, expectedEmLogCounts)
}
else if (avgLogProb - prevAvgLogProb < convergence) {
println(f"CONVERGENCE (${avgLogProb - prevAvgLogProb} < $convergence)")
(expectedTrLogCounts, expectedEmLogCounts)
}
else {
convertLogCountsToProbabilities(expectedTrLogCounts, expectedEmLogCounts, numWords, numTags, rtd)
// At this point the "counts" are actually log probabilities!!
iterate(sentsWithTokenTags, numWords, numTags, rtd, alphaPriorLogTr, alphaPriorLogEm, expectedTrLogCounts, expectedEmLogCounts, iteration + 1, avgLogProb)
}
}
private[this] final def reestimate(
sentsWithTokenTags: Vector[(Array[Int], Array[Array[Int]])],
numWords: Int, numTags: Int,
rtd: Array[Array[Int]],
alphaPriorLogTr: Array[Array[Double]], alphaPriorLogEm: Array[Array[Double]],
logTr: Array[Array[Double]], logEm: Array[Array[Double]] //
) = {
val expectedTrLogCounts: Array[Array[Double]] = {
val data = new Array[Array[Double]](numTags)
var i = 0; while (i < numTags) { val a = new Array[Double](numTags); System.arraycopy(alphaPriorLogTr(i), 0, a, 0, numTags); data(i) = a; i += 1 }
data
}
val expectedEmLogCounts: Array[Array[Double]] = {
val data = new Array[Array[Double]](numTags)
var i = 0; while (i < numTags) { val a = new Array[Double](numWords); System.arraycopy(alphaPriorLogEm(i), 0, a, 0, numWords); data(i) = a; i += 1 }
data
}
var logProbSum = 0.0
for ((s, stags) <- sentsWithTokenTags.seq) {
logProbSum += contributeExpectations(expectedTrLogCounts, expectedEmLogCounts, s, stags, numWords, numTags, rtd, logTr, logEm)
}
//println("\nTRANSITION COUNTS"); for (t1 <- 0 until numTags) println((0 until numTags).map(t2 => if (t1 != 1 && !(t1 == 0 && t2 <= 1)) f"${expectedTrLogCounts(t1)(t2)}%.4f" else "").mkString("\t"))
//println("\nEMISSION COUNTS"); for (t <- 0 until numTags) println((0 until numWords).map(w => if (td(w).contains(t)) f"${expectedEmLogCounts(t)(w)}%.4f" else "").mkString("\t")); println
(expectedTrLogCounts, expectedEmLogCounts, logProbSum / sentsWithTokenTags.size)
}
/*
* Forward-Backward
*/
private[this] final def contributeExpectations(
expectedTrLogCounts: Array[Array[Double]],
expectedEmLogCounts: Array[Array[Double]],
w: Array[Int], tokenTags: Array[Array[Int]],
numWords: Int, numTags: Int,
rtd: Array[Array[Int]],
logTr: Array[Array[Double]], logEm: Array[Array[Double]]): Double = {
assert(w.head == 0 && w.last == 1)
val logFwd = calculateForward(w, tokenTags, numWords, numTags, logTr, logEm)
val logBkd = calculateBackwrd(w, tokenTags, numWords, numTags, logTr, logEm)
val logFwdP = logFwd.last(1)
val logBkdP = logBkd.head(0)
assert(abs(logFwdP - logBkdP) < 1e-10, f"$logFwdP != $logBkdP")
contributeExpectedTrCounts(expectedTrLogCounts, w, tokenTags, numWords, numTags, logTr, logEm, logFwd, logFwdP, logBkd, logBkdP)
contributeExpectedEmCounts(expectedEmLogCounts, w, tokenTags, numWords, numTags, logTr, logEm, logFwd, logFwdP, logBkd, logBkdP)
logFwdP
}
private[this] final def calculateForward(
w: Array[Int], tokenTags: Array[Array[Int]],
numWords: Int, numTags: Int,
logTr: Array[Array[Double]], logEm: Array[Array[Double]]) = {
//println("FORWARD")
val logFwd = makeMatrix(w.length, numTags)
// For temporary storage
val logValueArray = new Array[Double](numTags)
//logFwd(0)(0) = 0.0
var i = 1
while (i < w.length) {
val curLogFwd = logFwd(i)
val prevLogFwd = logFwd(i - 1)
val curW = w(i)
val curWKs = tokenTags(i)
val curWKsLen = curWKs.length
val prevKs = tokenTags(i - 1)
val prevKsLen = prevKs.length
assert(prevKsLen > 0, f"prevKsLen = $prevKsLen; td(${w(i - 1)}) = ${tokenTags(i - 1).toVector}") // TODO: REMOVE
var j = 0
while (j < curWKsLen) {
val k = curWKs(j)
var l = 0
while (l < prevKsLen) {
val k1 = prevKs(l)
//assert(!logTr(k1)(k).isNegInfinity, f"logTr($k1)($k) is infinite") // TODO: REMOVE
//assert(!prevLogFwd(k1).isNegInfinity, f"prevLogFwd($k1) is infinite") // TODO: REMOVE
val v = logTr(k1)(k) + prevLogFwd(k1)
logValueArray(l) = v
l += 1
}
//assert(!logEm(k)(curW).isNegInfinity, f"logEm($k)($curW) is infinite") // TODO: REMOVE
curLogFwd(k) = logSum(logValueArray, prevKsLen) + logEm(k)(curW)
//assert(!curLogFwd(k).isNegInfinity, f"curLogFwd($k) is infinite; logSum(${logValueArray.toVector}, $prevKsLen) + ${logEm(k)(curW)}") // TODO: REMOVE
j += 1
}
//println(f"$i%3d: " + curLogFwd.zipWithIndex.map { case (v, k) => if (td(w(i)).contains(k)) exp(v).toString else "" }.map(_.padRight(30)).mkString(" "))
i += 1
}
logFwd
}
private[this] final def calculateBackwrd(
w: Array[Int], tokenTags: Array[Array[Int]],
numWords: Int, numTags: Int,
logTr: Array[Array[Double]], logEm: Array[Array[Double]]) = {
//println("BACKWARD")
val logBkd = makeMatrix(w.length, numTags)
// For temporary storage
val logValueArray = new Array[Double](numTags)
//logBkd(w.length-1)(0) = 0.0
var i = w.length - 2
while (i >= 0) {
val curLogBkd = logBkd(i)
val nextLogBkd = logBkd(i + 1)
val curW = w(i)
val curWKs = tokenTags(i)
val curWKsLen = curWKs.length
val nextW = w(i + 1)
val nextKs = tokenTags(i + 1)
val nextKsLen = nextKs.length
var j = 0
while (j < curWKsLen) {
val k = curWKs(j)
var l = 0
while (l < nextKsLen) {
val k2 = nextKs(l)
assert(!logTr(k)(k2).isNegInfinity, f"logTr($k)($k2) is infinite") // TODO: REMOVE
assert(!logEm(k2)(nextW).isNegInfinity, f"logEm($k2)($nextW) is infinite") // TODO: REMOVE
logValueArray(l) = logTr(k)(k2) + logEm(k2)(nextW) + nextLogBkd(k2)
l += 1
}
curLogBkd(k) = logSum(logValueArray, nextKsLen)
j += 1
}
//println(f"$i%3d: " + curLogBkd.zipWithIndex.map { case (v, k) => if (td(w(i)).contains(k)) exp(v).toString else "" }.map(_.padRight(30)).mkString(" "))
i -= 1
}
logBkd
}
private[this] final def contributeExpectedTrCounts(
expectedTrLogCounts: Array[Array[Double]],
w: Array[Int], tokenTags: Array[Array[Int]],
numWords: Int, numTags: Int,
logTr: Array[Array[Double]], logEm: Array[Array[Double]],
logFwd: Array[Array[Double]], logFwdP: Double,
logBkd: Array[Array[Double]], logBkdP: Double) = {
var i = 0
while (i < w.length - 1) {
val curW = w(i)
val curWKs = tokenTags(i)
val curWKsLen = curWKs.length
val nextW = w(i + 1)
val nextWKs = tokenTags(i + 1)
val nextWKsLen = nextWKs.length
var j = 0
while (j < curWKsLen) {
val k1 = curWKs(j)
val exLogTrK1 = expectedTrLogCounts(k1)
var l = 0
while (l < nextWKsLen) {
val k2 = nextWKs(l)
val logEx = logFwd(i)(k1) + logTr(k1)(k2) + logEm(k2)(nextW) + logBkd(i + 1)(k2)
exLogTrK1(k2) = logSum(exLogTrK1(k2), logEx - logFwdP)
//println(f"giuhrgis:: expectedTrLogCounts($k1)($k2) = ${expectedTrLogCounts(k1)(k2)}")
l += 1
}
j += 1
}
i += 1
}
// println((0 until numTags).flatMap(k1 => (0 until numTags).map(k2 => f"exTr($k1)($k2)=${exTr(k1)(k2)}")).mkString(" "))
}
private[this] final def contributeExpectedEmCounts(
expectedEmLogCounts: Array[Array[Double]],
w: Array[Int], tokenTags: Array[Array[Int]],
numWords: Int, numTags: Int,
logTr: Array[Array[Double]], logEm: Array[Array[Double]],
logFwd: Array[Array[Double]], logFwdP: Double,
logBkd: Array[Array[Double]], logBkdP: Double) = {
var i = 1
val wLen = w.length - 1
while (i < wLen) {
val curW = w(i)
val curWKs = tokenTags(i)
val curWKsLen = curWKs.length
var j = 0
while (j < curWKsLen) {
val k = curWKs(j)
val logEx = logFwd(i)(k) + logBkd(i)(k)
val exLogEmK = expectedEmLogCounts(k)
exLogEmK(curW) = logSum(exLogEmK(curW), logEx - logFwdP)
assert(!expectedEmLogCounts(k)(curW).isNegInfinity, f"expectedEmLogCounts($k)($curW) is infinite; logEx=$logFwdP-logEx=$logFwdP = ${logEx - logFwdP}; curWKs=${curWKs.mkString("[", ",", "]")}") // TODO: REMOVE
j += 1
}
i += 1
}
}
override final def toString = f"SoftEmHmmTaggerTrainer($maxIterations, $transitionDistributioner, $emissionDistributioner, alphaT=${alphaT}%2f, alphaE=${alphaE}%2f)"
}
class HardEmHmmTaggerTrainer[Tag](
maxIterations: Int,
transitionDistributioner: TransitionDistributioner[Tag],
emissionDistributioner: EmissionDistributioner[Tag],
alphaT: Double = 0.0, alphaE: Double = 0.0,
convergence: Double = 1e-10)
extends EmHmmTaggerTrainer[Tag](maxIterations, transitionDistributioner, emissionDistributioner, alphaT, alphaE, convergence) {
/**
* @return: Transition and Emission expected counts
*/
final override def doTrain(
sentsWithTokenTags: Vector[(Array[Int], Array[Array[Int]])],
numWords: Int, numTags: Int,
rtd: Array[Array[Int]],
alphaPriorTr: Array[Array[Double]], alphaPriorEm: Array[Array[Double]],
logInitialTr: Array[Array[Double]], logInitialEm: Array[Array[Double]]) = {
val (expectedTrCounts, expectedEmCounts) = iterate(sentsWithTokenTags, numWords, numTags, rtd, alphaPriorTr, alphaPriorEm, logInitialTr, logInitialEm, 1, Double.NegativeInfinity)
val expectedTrLogCounts = expectedTrCounts.map(_.map(log))
val expectedEmLogCounts = expectedEmCounts.map(_.map(log))
(expectedTrLogCounts, expectedEmLogCounts)
}
/**
* @return: Transition and Emission expected counts
*/
@tailrec private[this] final def iterate(
sentsWithTokenTags: Vector[(Array[Int], Array[Array[Int]])],
numWords: Int, numTags: Int,
rtd: Array[Array[Int]],
alphaPriorTr: Array[Array[Double]], alphaPriorEm: Array[Array[Double]],
logTr: Array[Array[Double]], logEm: Array[Array[Double]],
iteration: Int, prevAvgLogProb: Double //
): (Array[Array[Double]], Array[Array[Double]]) = {
val startTime = System.currentTimeMillis()
val (expectedTrCounts, expectedEmCounts, avgLogProb) = reestimate(sentsWithTokenTags, numWords, numTags, alphaPriorTr, alphaPriorEm, logTr, logEm)
println(f"iteration ${(iteration + ":").padRight(4)} ${(System.currentTimeMillis() - startTime) / 1000.0}%.3f sec avgLogProb=${(avgLogProb + ",").padRight(22)} avgProb=${exp(avgLogProb)}")
//println("\nTRANSITIONS"); for (t1 <- 0 until numTags) println((0 until numTags).map(t2 => if (t1 != 1 && !(t1 == 0 && t2 <= 1)) f"${exp(newLogTr(t1)(t2))}%.4f" else "").mkString("\t"))
//println("\nEMISSIONS"); for (t <- 0 until numTags) println((0 until numWords).map(w => if (td(w).contains(t)) f"${exp(newLogEm(t)(w))}%.4f" else "").mkString("\t")); println
if (iteration >= maxIterations) {
println(f"MAX ITERATIONS REACHED")
(expectedTrCounts, expectedEmCounts)
}
else if (avgLogProb < prevAvgLogProb) {
println(f"DIVERGENCE!")
//assert(false, "DIVERGENCE!")
(expectedTrCounts, expectedEmCounts)
}
else if (avgLogProb - prevAvgLogProb < convergence) {
println(f"CONVERGENCE (${avgLogProb - prevAvgLogProb} < $convergence)")
(expectedTrCounts, expectedEmCounts)
}
else {
convertCountsToProbabilities(expectedTrCounts, expectedEmCounts, numWords, numTags, rtd)
// At this point the "counts" are actually log probabilities!!
iterate(sentsWithTokenTags, numWords, numTags, rtd, alphaPriorTr, alphaPriorEm, expectedTrCounts, expectedEmCounts, iteration + 1, avgLogProb)
}
}
private[this] final def reestimate(
sentsWithTokenTags: Vector[(Array[Int], Array[Array[Int]])],
numWords: Int, numTags: Int,
alphaPriorTr: Array[Array[Double]], alphaPriorEm: Array[Array[Double]],
logTr: Array[Array[Double]], logEm: Array[Array[Double]] //
) = {
val expectedTrCounts: Array[Array[Double]] = {
val data = new Array[Array[Double]](numTags)
var i = 0; while (i < numTags) { val a = new Array[Double](numTags); System.arraycopy(alphaPriorTr(i), 0, a, 0, numTags); data(i) = a; i += 1 }
data
}
val expectedEmCounts: Array[Array[Double]] = {
val data = new Array[Array[Double]](numTags)
var i = 0; while (i < numTags) { val a = new Array[Double](numWords); System.arraycopy(alphaPriorEm(i), 0, a, 0, numWords); data(i) = a; i += 1 }
data
}
var logProbSum = 0.0
for ((s, stags) <- sentsWithTokenTags.seq) {
logProbSum += contributeExpectations(expectedTrCounts, expectedEmCounts, s, stags, numWords, numTags, logTr, logEm)
}
//println("\nTRANSITION COUNTS"); for (t1 <- 0 until numTags) println((0 until numTags).map(t2 => if (t1 != 1 && !(t1 == 0 && t2 <= 1)) f"${expectedTrLogCounts(t1)(t2)}%.4f" else "").mkString("\t"))
//println("\nEMISSION COUNTS"); for (t <- 0 until numTags) println((0 until numWords).map(w => if (td(w).contains(t)) f"${expectedEmLogCounts(t)(w)}%.4f" else "").mkString("\t")); println
(expectedTrCounts, expectedEmCounts, logProbSum / sentsWithTokenTags.size)
}
private[this] final def contributeExpectations(
expectedTrCounts: Array[Array[Double]],
expectedEmCounts: Array[Array[Double]],
w: Array[Int], tokenTags: Array[Array[Int]],
numWords: Int, numTags: Int,
logTr: Array[Array[Double]], logEm: Array[Array[Double]]): Double = {
assert(w.head == 0 && w.last == 1)
val (logP, backpointers) = calculateForwardViterbi(w, tokenTags, numWords, numTags, logTr, logEm)
contributeBackwardTagging(w, tokenTags, numWords, numTags, backpointers, logTr, logEm, expectedTrCounts, expectedEmCounts)
logP
}
private[this] final def calculateForwardViterbi(
w: Array[Int], tokenTags: Array[Array[Int]],
numWords: Int, numTags: Int,
logTr: Array[Array[Double]], logEm: Array[Array[Double]]) = {
//println("FORWARD")
val wLen = w.length
val logViterbiTable = makeMatrix(w.length, numTags)
val backpointers = new Array[Array[Int]](wLen); { var i = 0; while (i < wLen) { backpointers(i) = new Array[Int](numTags); i += 1 } }
//logViterbiTable(0)(0) = 0.0
var i = 1
while (i < w.length) {
val curViterbi = logViterbiTable(i)
val prevViterbi = logViterbiTable(i - 1)
//Console.err.println(f"prevViterbi = ${prevViterbi.toVector}") // TODO: REMOVE
val curW = w(i)
val curWKs = tokenTags(i)
val curWKsLen = curWKs.length
val curBack = backpointers(i)
val prevKs = tokenTags(i - 1)
val prevKsLen = prevKs.length
assert(prevKsLen > 0, f"prevKsLen = $prevKsLen; td(${w(i - 1)}) = ${tokenTags(i - 1).toVector}") // TODO: REMOVE
var j = 0
while (j < curWKsLen) {
val k = curWKs(j)
var bestK1 = -1
var maxScore = Double.NegativeInfinity
var l = 0
while (l < prevKsLen) {
val k1 = prevKs(l)
//assert(!logTr(k1)(k).isNegInfinity, f"logTr($k1)($k) is infinite") // TODO: REMOVE
//assert(!prevViterbi(k1).isNegInfinity, f"prevViterbi($k1) is infinite") // TODO: REMOVE
val score = logTr(k1)(k) + prevViterbi(k1)
if (score > maxScore) {
bestK1 = k1
maxScore = score
}
l += 1
}
//assert(!logEm(k)(curW).isNegInfinity, f"logEm($k)($curW) is infinite") // TODO: REMOVE
curViterbi(k) = maxScore + logEm(k)(curW)
curBack(k) = bestK1
j += 1
}
//println(f"$i%3d: " + curViterbi.zipWithIndex.map { case (v, k) => if (td(w(i)).contains(k)) exp(v).toString else "" }.map(_.padRight(30)).mkString(" "))
i += 1
}
//drawViterbiTable(w, td, logViterbiTable, backpointers)
(logViterbiTable.last(1), backpointers)
}
private[this] final def contributeBackwardTagging(
w: Array[Int], tokenTags: Array[Array[Int]],
numWords: Int, numTags: Int,
backpointers: Array[Array[Int]],
logTr: Array[Array[Double]], logEm: Array[Array[Double]],
expectedTrCounts: Array[Array[Double]],
expectedEmCounts: Array[Array[Double]]): Unit = {
//println("BACKWARD")
var i = w.length - 2
var nextTag = 1
while (i > 0) {
val currTag = backpointers(i + 1)(nextTag)
expectedTrCounts(currTag)(nextTag) += 1
expectedEmCounts(currTag)(w(i)) += 1
nextTag = currTag
i -= 1
}
expectedTrCounts(0)(nextTag) += 1
}
private[this] def drawViterbiTable(
w: Array[Int], tokenTags: Array[Array[Int]],
logViterbiTable: Array[Array[Double]],
backpointers: Array[Array[Int]]): Unit = {
for ((currV, k) <- logViterbiTable.transpose.zipWithIndex) {
println(currV.zipWithIndex.map {
case (v, i) =>
(if (tokenTags(i).contains(w(i))) f"${backpointers(i)(k)} <- ${exp(v)}%.5f"
else "").padLeft(5 + 8)
}.mkString(" "))
}
println
}
override final def toString = f"HardEmHmmTaggerTrainer($maxIterations, $transitionDistributioner, $emissionDistributioner, alphaT=${alphaT}%2f, alphaE=${alphaE}%2f)"
}
| apache-2.0 |
greenify/msa | src/model/SeqCollection.js | 4339 | import Sequence from "./Sequence";
import FeatureCol from "./FeatureCol";
const Collection = require("backbone-thin").Collection;
const SeqCollection = Collection.extend({
model: Sequence,
constructor: function(seqs, g) {
Collection.apply(this, arguments);
this.g = g;
this.on( "add reset remove", (() => {
// invalidate cache
this.lengthCache = null;
return this._bindSeqsWithFeatures();
}), this
);
// use the first seq as reference as default
this.on("reset", () => {
return this._autoSetRefSeq();
});
this._autoSetRefSeq();
this.lengthCache = null;
this.features = {};
return this;
},
// gives the max length of all sequences
// (cached)
getMaxLength: function() {
if (this.models.length === 0) { return 0; }
if (this.lengthCache === null) {
this.lengthCache = this.max(function(seq) { return seq.get("seq").length; }).get("seq").length;
}
return this.lengthCache;
},
// gets the previous model
// @param endless [boolean] for the first element
// true: returns the last element, false: returns undefined
prev: function(model, endless) {
let index = this.indexOf(model) - 1;
if (index < 0 && endless) { index = this.length - 1; }
return this.at(index);
},
// gets the next model
// @param endless [boolean] for the last element
// true: returns the first element, false: returns undefined
next: function(model, endless) {
let index = this.indexOf(model) + 1;
if (index === this.length && endless) { index = 0; }
return this.at(index);
},
// @returns n [int] number of hidden columns until n
calcHiddenSeqs: function(n) {
let nNew = n;
for (let i = 0; 0 < nNew ? i <= nNew : i >= nNew; 0 < nNew ? i++ : i--) {
if (this.at(i).get("hidden")) {
nNew++;
}
}
return nNew - n;
},
// you can add features independent to the current seqs as they may be added
// later (lagging connection)
// sequence - feature binding is based on id
addFeatures: function(features) {
if ((features.config != null)) {
const obj = features;
features = features.seqs;
if ((obj.config.colors != null)) {
const colors = obj.config.colors;
_.each(features, function(seq) {
return _.each(seq, function(val) {
if ((colors[val.feature] != null)) {
return val.fillColor = colors[val.feature];
}
});
});
}
}
// we might already have features
if (_.isEmpty(this.features)) {
// replace (no existent features)
this.features = features;
} else {
// merge
_.each(features, (val, key) => {
if (!this.features.hasOwnProperty(key)) {
return this.features[key] = val;
} else {
return this.features[key] = _.union(this.features[key], val);
}
});
}
// rehash
return this._bindSeqsWithFeatures();
},
// adds features to a sequence
// does it silenty without triggering an event
_bindSeqWithFeatures: function(seq) {
// TODO: probably we don't always want to bind to name
var features = this.features[seq.attributes.name];
if (features) {
// do silently to avoid triggering to many events
seq.attributes.features = new FeatureCol(features);
seq.attributes.features.assignRows();
seq.attributes.height = seq.attributes.features.getCurrentHeight() + 1;
}
},
// rehash the sequence feature binding
_bindSeqsWithFeatures: function() {
return this.each((seq) => this._bindSeqWithFeatures(seq));
},
// removes all features from the cache (not from the seqs)
removeAllFeatures: function() {
return delete this.features;
},
_autoSetRefSeq: function() {
if (this.length > 0) {
return this.at(0).set("ref", true);
}
},
// sets a sequence (e.g. BLAST start or consensus seq) as reference
setRef: function(seq) {
const obj = this.get(seq);
this.each(function(s) {
if (seq.cid) {
if (obj.cid === s.cid) {
return s.set("ref", true);
} else {
return s.set("ref", false);
}
}
});
this.g.config.set("hasRef", true);
return this.trigger("change:reference", seq);
}
});
export default SeqCollection;
| apache-2.0 |
bitjjj/HFTracker | HFTrackerDataCrawler/cron/NewsCronJobTest.js | 232 | var cronJob = require("cron").CronJob,
FundNewsENCrawler = require("../FundNewsENCrawler");
//new cronJob('* * 20 * * *', function () {
new FundNewsENCrawler("0").run();
new FundNewsENCrawler("1").run();
//}, null, true); | apache-2.0 |
realityforge/arez | processor/src/test/fixtures/input/com/example/component_ref/other/BaseProtectedAccessComponentRefModel.java | 230 | package com.example.component_ref.other;
import arez.Component;
import arez.annotations.ComponentRef;
public abstract class BaseProtectedAccessComponentRefModel
{
@ComponentRef
protected abstract Component getComponent();
}
| apache-2.0 |
kamalmarhubi/bazel | src/main/java/com/google/devtools/build/skyframe/CyclesReporter.java | 4251 | // Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.skyframe;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.events.EventHandler;
/**
* An utility for custom reporting of errors from cycles in the the Skyframe graph. This class is
* stateful in order to differentiate between new cycles and cycles that have already been
* reported (do not reuse the instances or cache the results as it could end up printing
* inconsistent information or leak memory). It treats two cycles as the same if they contain the
* same {@link SkyKey}s in the same order, but perhaps with different starting points. See
* {@link CycleDeduper} for more information.
*/
public class CyclesReporter {
/**
* Interface for reporting custom information about a single cycle.
*/
public interface SingleCycleReporter {
/**
* Reports the given cycle and returns {@code true}, or return {@code false} if this
* {@link SingleCycleReporter} doesn't know how to report the cycle.
*
* @param topLevelKey the top level key that transitively depended on the cycle
* @param cycleInfo the cycle
* @param alreadyReported whether the cycle has already been reported to the
* {@link CyclesReporter}.
* @param eventHandler the eventHandler to which to report the error
*/
boolean maybeReportCycle(SkyKey topLevelKey, CycleInfo cycleInfo, boolean alreadyReported,
EventHandler eventHandler);
}
private final ImmutableList<SingleCycleReporter> cycleReporters;
private final CycleDeduper<SkyKey> cycleDeduper = new CycleDeduper<>();
/**
* Constructs a {@link CyclesReporter} that delegates to the given {@link SingleCycleReporter}s,
* in the given order, to report custom information about cycles.
*/
public CyclesReporter(SingleCycleReporter... cycleReporters) {
this.cycleReporters = ImmutableList.copyOf(cycleReporters);
}
/**
* Reports the given cycles, differentiating between cycles that have already been reported.
*
* @param cycles The {@code Iterable} of cycles.
* @param topLevelKey This key represents the top level value key that returned cycle errors.
* @param eventHandler the eventHandler to which to report the error
*/
public void reportCycles(Iterable<CycleInfo> cycles, SkyKey topLevelKey,
EventHandler eventHandler) {
Preconditions.checkNotNull(eventHandler);
for (CycleInfo cycleInfo : cycles) {
boolean alreadyReported = false;
if (!cycleDeduper.seen(cycleInfo.getCycle())) {
alreadyReported = true;
}
boolean successfullyReported = false;
for (SingleCycleReporter cycleReporter : cycleReporters) {
if (cycleReporter.maybeReportCycle(topLevelKey, cycleInfo, alreadyReported, eventHandler)) {
successfullyReported = true;
break;
}
}
Preconditions.checkState(successfullyReported,
printArbitraryCycle(topLevelKey, cycleInfo, alreadyReported));
}
}
private String printArbitraryCycle(SkyKey topLevelKey, CycleInfo cycleInfo,
boolean alreadyReported) {
StringBuilder cycleMessage = new StringBuilder()
.append("topLevelKey: " + topLevelKey + "\n")
.append("alreadyReported: " + alreadyReported + "\n")
.append("path to cycle:\n");
for (SkyKey skyKey : cycleInfo.getPathToCycle()) {
cycleMessage.append(skyKey + "\n");
}
cycleMessage.append("cycle:\n");
for (SkyKey skyKey : cycleInfo.getCycle()) {
cycleMessage.append(skyKey + "\n");
}
return cycleMessage.toString();
}
}
| apache-2.0 |
mirkosertic/Bytecoder | classlib/bytecoder.web/src/main/java/de/mirkosertic/bytecoder/api/web/HTMLWebGLCanvasElement.java | 362 | package de.mirkosertic.bytecoder.api.web;
import de.mirkosertic.bytecoder.api.OpaqueProperty;
import de.mirkosertic.bytecoder.api.web.webgl.WebGLRenderingContext;
public interface HTMLWebGLCanvasElement extends HTMLElement {
@OpaqueProperty
int width();
@OpaqueProperty
int height();
WebGLRenderingContext getContext(String context);
}
| apache-2.0 |
cipriancraciun/mosaic-java-benchmarks | benchmarks-core/src/main/java/eu/mosaic_cloud/benchmarks/tools/ThreadBundle.java | 1575 |
package eu.mosaic_cloud.benchmarks.tools;
import java.util.concurrent.ConcurrentSkipListSet;
public final class ThreadBundle
extends Object
{
public ThreadBundle ()
{
super ();
this.threads = new ConcurrentSkipListSet<ThreadBundle.Thread> ();
}
public final void join ()
{
while (true)
try {
for (final Thread thread : this.threads)
thread.join (1);
break;
} catch (final InterruptedException exception) {
for (final Thread thread : this.threads)
thread.interrupt ();
}
}
public final void shouldStop ()
{
for (final Thread thread : this.threads)
thread.shouldStop ();
}
public final void start ()
{
for (final Thread thread : this.threads)
thread.start ();
}
/*private*/final ConcurrentSkipListSet<Thread> threads;
public abstract class Thread
extends java.lang.Thread
implements
Comparable<Thread>
{
protected Thread ()
{
super ();
ThreadBundle.this.threads.add (this);
this.shouldStop = false;
}
@Override
public final int compareTo (final Thread thread)
{
final int thisHash = System.identityHashCode (this);
final int threadHash = System.identityHashCode (thread);
if (thisHash < threadHash)
return (-1);
if (thisHash > threadHash)
return (1);
return (0);
}
public final void shouldStop ()
{
this.shouldStop = true;
}
@Override
public final synchronized void start ()
{
super.start ();
}
protected final boolean shouldRun ()
{
return (!this.shouldStop);
}
private boolean shouldStop;
}
}
| apache-2.0 |
FrenzelGmbH/company-website | modules/purchase/views/purchase-order/index.php | 2369 | <?php
use yii\helpers\Html;
use yii\helpers\Url;
use yii\grid\GridView;
use yiidhtmlx\Grid;
use yii\widgets\Block;
use kartik\widgets\SideNav;
/**
* @var yii\web\View $this
* @var yii\data\ActiveDataProvider $dataProvider
* @var app\modules\parties\models\PartySearch $searchModel
*/
$this->title = 'Purchase Order';
$this->params['breadcrumbs'][] = $this->title;
//all that has to do with the grid
$target = Url::to(array('/purchase/purchase-order/view','id'=>''));
$gridURL = Url::to(['/purchase/purchase-order/dhtmlxgrid','un'=> date('Ymd')]);
$gridJS = <<<GRIDJS
function doOnRowSelect(id,ind) {
window.location = "$target"+id;
};
function doOnFilterStart(indexes,values){
$.ajax("$gridURL&search="+values).
success(function(data){
dhtmlxPurchaseOrderGrid.clearAll();
dhtmlxPurchaseOrderGrid.parse(data,"json");
}
);
}
GRIDJS;
$this->registerJs($gridJS);
?>
<?php Block::begin(array('id'=>'sidebar')); ?>
<?php
echo SideNav::widget([
'type' => SideNav::TYPE_DEFAULT,
'heading' => Yii::t('app','Purchase Order Options'),
'items' =>
[
[
'url' => ['/site/index'],
'label' => 'Home',
'icon' => 'home'
],
['label' => Yii::t('app','Create'), 'icon'=>'plus', 'url'=>['create']]
]
]);
?>
<?php Block::end(); ?>
<div class="purchase-order-index">
<?php // echo $this->render('_search', ['model' => $searchModel]); ?>
<?php
echo Grid::widget(
[
'clientOptions'=>[
'parent' => 'PurchaseOrderGrid',
'image_path' => Yii::$app->AssetManager->getBundle('yiidhtmlx\GridObjectAsset')->baseUrl."/dhtmlxGrid/imgs/",
'auto_height' => false,
'auto_width' => true,
'smart' => true,
'skin' => "dhx_terrace",
'columns' => [
['label'=>'id','width'=>'40','type'=>'ro'],
['label'=>[Yii::t('app','Order ID'),'#text_filter'],'type'=>'ro','width'=>'150'],
['label'=>[Yii::t('app','Ordered By')],'type'=>'ed'],
],
],
'enableSmartRendering' => true,
'options'=>[
'id' => 'PurchaseOrderGrid',
'height' => '500px',
],
'clientDataOptions'=>[
'type'=>'json',
'url'=> $gridURL
],
'clientEvents'=>[
'onRowDblClicked'=>'doOnRowSelect',
'onEnter' => 'doOnRowSelect',
'onFilterStart' => 'doOnFilterStart'
]
]
);
?>
</div>
| apache-2.0 |
akjava/gwt-three.js-test | src/com/akjava/gwt/three/client/java/utils/TextureExportUtils.java | 1948 | package com.akjava.gwt.three.client.java.utils;
import com.akjava.gwt.three.client.gwt.renderers.WebGLContext;
import com.akjava.gwt.three.client.js.renderers.WebGLRenderTarget;
import com.akjava.gwt.three.client.js.renderers.WebGLRenderer;
import com.google.gwt.canvas.client.Canvas;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.dom.client.CanvasElement;
public class TextureExportUtils {
public static final String toDataUrl(WebGLRenderer renderer,WebGLRenderTarget renderTarget,Canvas canvas){
int w= renderTarget.getWidth();
int h= renderTarget.getHeight();
CanvasElement element=canvas!=null?canvas.getCanvasElement():null;
return toDataUrl(renderer.getContext(), renderTarget.getWebglTexture(),w,h, element);
}
public static final native String toDataUrl(WebGLContext gl, JavaScriptObject texture, int width, int height,CanvasElement c)/*-{
// Create a framebuffer backed by the texture
var framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
// Read the contents of the framebuffer
var data = new Uint8Array(width * height * 4);
gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.deleteFramebuffer(framebuffer);
// Create a 2D canvas to store the result
var canvas;
if(c){
canvas=c;
}else{
canvas= document.createElement('canvas');
canvas.width = width;
canvas.height = height;
}
var context = canvas.getContext('2d');
// Copy the pixels to a 2D canvas
var imageData = context.createImageData(width, height);
imageData.data.set(data);
context.putImageData(imageData, 0, 0);
return canvas.toDataURL();
}-*/;
}
| apache-2.0 |
orientechnologies/orientdb | core/src/main/java/com/orientechnologies/orient/core/sql/functions/OIndexableSQLFunction.java | 6531 | /*
*
* * Copyright 2015 OrientDB LTD (info(at)orientdb.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientdb.com
*
*/
package com.orientechnologies.orient.core.sql.functions;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.sql.parser.OBinaryCompareOperator;
import com.orientechnologies.orient.core.sql.parser.OExpression;
import com.orientechnologies.orient.core.sql.parser.OFromClause;
/**
* This interface represents SQL functions whose implementation can rely on an index. If used in a
* WHERE condition, this kind of function can be invoked to retrieve target records from an
* underlying structure, like an index
*
* @author Luigi Dell'Aquila (l.dellaquila-(at)-orientdb.com)
*/
public interface OIndexableSQLFunction extends OSQLFunction {
/**
* returns all the entries belonging to the target that match the binary condition where this
* function appears
*
* @param target the query target
* @param operator the operator after the function, eg. in <code>
* select from Foo where myFunction(name) > 4</code> the operator is >
* @param rightValue the value that has to be compared to the function result, eg. in <code>
* select from Foo where myFunction(name) > 4</code> the right value is 4
* @param ctx the command context for this query
* @param args the function arguments, eg. in <code>select from Foo where myFunction(name) > 4
* </code> the arguments are [name]
* @return an iterable of records that match the condition; null means that the execution could
* not be performed for some reason.
*/
public Iterable<OIdentifiable> searchFromTarget(
OFromClause target,
OBinaryCompareOperator operator,
Object rightValue,
OCommandContext ctx,
OExpression... args);
/**
* estimates the number of entries returned by searchFromTarget() with these parameters
*
* @param target the query target
* @param operator the operator after the function, eg. in <code>
* select from Foo where myFunction(name) > 4</code> the operator is >
* @param rightValue the value that has to be compared to the function result, eg. in <code>
* select from Foo where myFunction(name) > 4</code> the right value is 4
* @param ctx the command context for this query
* @param args the function arguments, eg. in <code>select from Foo where myFunction(name) > 4
* </code> the arguments are [name]
* @return an estimantion of how many entries will be returned by searchFromTarget() with these
* parameters, -1 if the estimation cannot be done
*/
public long estimate(
OFromClause target,
OBinaryCompareOperator operator,
Object rightValue,
OCommandContext ctx,
OExpression... args);
/**
* checks if the function can be used even on single records, not as an indexed function (even if
* the index does not exist at all)
*
* @param target the query target
* @param operator the operator after the function, eg. in <code>
* select from Foo where myFunction(name) > 4</code> the operator is >
* @param rightValue the value that has to be compared to the function result, eg. in <code>
* select from Foo where myFunction(name) > 4</code> the right value is 4
* @param ctx the command context for this query
* @param args the function arguments, eg. in <code>select from Foo where myFunction(name) > 4
* </code> the arguments are [name]
* @return true if the funciton can be calculated without the index. False otherwise
*/
public boolean canExecuteInline(
OFromClause target,
OBinaryCompareOperator operator,
Object rightValue,
OCommandContext ctx,
OExpression... args);
/**
* Checks if this function can be used to fetch data from this target and with these arguments
* (eg. if the index exists on this target and it's defined on these fields)
*
* @param target the query target
* @param operator the operator after the function, eg. in <code>
* select from Foo where myFunction(name) > 4</code> the operator is >
* @param rightValue the value that has to be compared to the function result, eg. in <code>
* select from Foo where myFunction(name) > 4</code> the right value is 4
* @param ctx the command context for this query
* @param args the function arguments, eg. in <code>select from Foo where myFunction(name) > 4
* </code> the arguments are [name]
* @return True if the funciton can be used to fetch from an index. False otherwise
*/
public boolean allowsIndexedExecution(
OFromClause target,
OBinaryCompareOperator operator,
Object rightValue,
OCommandContext ctx,
OExpression... args);
/**
* Checks if this function should be called even if the method {@link #searchFromTarget} is
* executed.
*
* @param target the query target
* @param operator the operator after the function, eg. in <code>
* select from Foo where myFunction(name) > 4</code> the operator is >
* @param rightValue the value that has to be compared to the function result, eg. in <code>
* select from Foo where myFunction(name) > 4</code> the right value is 4
* @param ctx the command context for this query
* @param args the function arguments, eg. in <code>select from Foo where myFunction(name) > 4
* </code> the arguments are [name]
* @return True if this function should be called even if the method {@link #searchFromTarget} is
* executed. False otherwise
*/
public boolean shouldExecuteAfterSearch(
OFromClause target,
OBinaryCompareOperator operator,
Object rightValue,
OCommandContext ctx,
OExpression... args);
}
| apache-2.0 |
wuyinlei/MyHearts | app/src/main/java/ruolan/com/myhearts/entity/GroupDetailBean.java | 8824 | package ruolan.com.myhearts.entity;
import java.io.Serializable;
import java.util.List;
/**
* Created by wuyinlei on 2016/11/6.
*/
public class GroupDetailBean implements Serializable{
/**
* errorCode : 0
* errorStr : success
* resultCount : 25
* score : 0
* balance : 0
* results : {"id":"269","ownerId":"7301","ownerName":"宝贝蛋","ownerAvatar":"http://icon.xinliji.me/avatar_0_13.png","ownerIsConsultant":"0","name":"我是小三","icon":"http://7xi9sc.com2.z0.glb.qiniucdn.com/group_bg_s_29.png","gallery":"http://7xi9sc.com2.z0.glb.qiniucdn.com/group_bg_l_29.png","slogan":"小三也是人,你们知道我们的痛苦吗?","subject":"","maxMember":"5000","member":"169","createdDate":1427201143,"latitude":"31.24582862854004","longitude":"121.5610580444336","location":"上海","catgId":"5","needConfirm":"1","pending":"0","pendingCnt":"430","distance":"7592294.477861399","joined":"0","isBlocked":"0","gotyeGroupId":"58421","memberAvatars":["http://icon.xinliji.me/avatar_0_13.png","http://image.xinliji.me//FhTJNEKGGkDeFunHg2wdRQQUXhTB","http://image.xinliji.me//FvvgWd6kz0XBqnLZtopy-jQVYT8g","http://icon.xinliji.me/avatar_1_62.png","http://icon.xinliji.me/avatar_0_65.png","http://icon.xinliji.me/avatar_0_43.png"]}
*/
private int errorCode;
private String errorStr;
private int resultCount;
private int score;
private int balance;
/**
* id : 269
* ownerId : 7301
* ownerName : 宝贝蛋
* ownerAvatar : http://icon.xinliji.me/avatar_0_13.png
* ownerIsConsultant : 0
* name : 我是小三
* icon : http://7xi9sc.com2.z0.glb.qiniucdn.com/group_bg_s_29.png
* gallery : http://7xi9sc.com2.z0.glb.qiniucdn.com/group_bg_l_29.png
* slogan : 小三也是人,你们知道我们的痛苦吗?
* subject :
* maxMember : 5000
* member : 169
* createdDate : 1427201143
* latitude : 31.24582862854004
* longitude : 121.5610580444336
* location : 上海
* catgId : 5
* needConfirm : 1
* pending : 0
* pendingCnt : 430
* distance : 7592294.477861399
* joined : 0
* isBlocked : 0
* gotyeGroupId : 58421
* memberAvatars : ["http://icon.xinliji.me/avatar_0_13.png","http://image.xinliji.me//FhTJNEKGGkDeFunHg2wdRQQUXhTB","http://image.xinliji.me//FvvgWd6kz0XBqnLZtopy-jQVYT8g","http://icon.xinliji.me/avatar_1_62.png","http://icon.xinliji.me/avatar_0_65.png","http://icon.xinliji.me/avatar_0_43.png"]
*/
private ResultsBean results;
public int getErrorCode() {
return errorCode;
}
public void setErrorCode(int errorCode) {
this.errorCode = errorCode;
}
public String getErrorStr() {
return errorStr;
}
public void setErrorStr(String errorStr) {
this.errorStr = errorStr;
}
public int getResultCount() {
return resultCount;
}
public void setResultCount(int resultCount) {
this.resultCount = resultCount;
}
public int getScore() {
return score;
}
public void setScore(int score) {
this.score = score;
}
public int getBalance() {
return balance;
}
public void setBalance(int balance) {
this.balance = balance;
}
public ResultsBean getResults() {
return results;
}
public void setResults(ResultsBean results) {
this.results = results;
}
public static class ResultsBean {
private String id;
private String ownerId;
private String ownerName;
private String ownerAvatar;
private String ownerIsConsultant;
private String name;
private String icon;
private String gallery;
private String slogan;
private String subject;
private String maxMember;
private String member;
private int createdDate;
private String latitude;
private String longitude;
private String location;
private String catgId;
private String needConfirm;
private String pending;
private String pendingCnt;
private String distance;
private String joined;
private String isBlocked;
private String gotyeGroupId;
private List<String> memberAvatars;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getOwnerId() {
return ownerId;
}
public void setOwnerId(String ownerId) {
this.ownerId = ownerId;
}
public String getOwnerName() {
return ownerName;
}
public void setOwnerName(String ownerName) {
this.ownerName = ownerName;
}
public String getOwnerAvatar() {
return ownerAvatar;
}
public void setOwnerAvatar(String ownerAvatar) {
this.ownerAvatar = ownerAvatar;
}
public String getOwnerIsConsultant() {
return ownerIsConsultant;
}
public void setOwnerIsConsultant(String ownerIsConsultant) {
this.ownerIsConsultant = ownerIsConsultant;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getIcon() {
return icon;
}
public void setIcon(String icon) {
this.icon = icon;
}
public String getGallery() {
return gallery;
}
public void setGallery(String gallery) {
this.gallery = gallery;
}
public String getSlogan() {
return slogan;
}
public void setSlogan(String slogan) {
this.slogan = slogan;
}
public String getSubject() {
return subject;
}
public void setSubject(String subject) {
this.subject = subject;
}
public String getMaxMember() {
return maxMember;
}
public void setMaxMember(String maxMember) {
this.maxMember = maxMember;
}
public String getMember() {
return member;
}
public void setMember(String member) {
this.member = member;
}
public int getCreatedDate() {
return createdDate;
}
public void setCreatedDate(int createdDate) {
this.createdDate = createdDate;
}
public String getLatitude() {
return latitude;
}
public void setLatitude(String latitude) {
this.latitude = latitude;
}
public String getLongitude() {
return longitude;
}
public void setLongitude(String longitude) {
this.longitude = longitude;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
public String getCatgId() {
return catgId;
}
public void setCatgId(String catgId) {
this.catgId = catgId;
}
public String getNeedConfirm() {
return needConfirm;
}
public void setNeedConfirm(String needConfirm) {
this.needConfirm = needConfirm;
}
public String getPending() {
return pending;
}
public void setPending(String pending) {
this.pending = pending;
}
public String getPendingCnt() {
return pendingCnt;
}
public void setPendingCnt(String pendingCnt) {
this.pendingCnt = pendingCnt;
}
public String getDistance() {
return distance;
}
public void setDistance(String distance) {
this.distance = distance;
}
public String getJoined() {
return joined;
}
public void setJoined(String joined) {
this.joined = joined;
}
public String getIsBlocked() {
return isBlocked;
}
public void setIsBlocked(String isBlocked) {
this.isBlocked = isBlocked;
}
public String getGotyeGroupId() {
return gotyeGroupId;
}
public void setGotyeGroupId(String gotyeGroupId) {
this.gotyeGroupId = gotyeGroupId;
}
public List<String> getMemberAvatars() {
return memberAvatars;
}
public void setMemberAvatars(List<String> memberAvatars) {
this.memberAvatars = memberAvatars;
}
}
}
| apache-2.0 |
hufsm/PocketHub | app/src/main/java/com/github/pockethub/ui/gist/CreateGistTask.java | 2750 | /*
* Copyright (c) 2015 PocketHub
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.pockethub.ui.gist;
import android.accounts.Account;
import android.app.Activity;
import android.util.Log;
import com.alorma.github.sdk.bean.dto.response.Gist;
import com.alorma.github.sdk.bean.dto.response.GistFile;
import com.alorma.github.sdk.bean.dto.response.GistFilesMap;
import com.alorma.github.sdk.services.gists.PublishGistClient;
import com.github.pockethub.R;
import com.github.pockethub.ui.ProgressDialogTask;
import com.github.pockethub.util.ToastUtils;
/**
* Task to create a {@link Gist}
*/
public class CreateGistTask extends ProgressDialogTask<Gist> {
private static final String TAG = "CreateGistTask";
private final String description;
private final boolean isPublic;
private final String name;
private final String content;
/**
* Create task that creates a {@link Gist}
*
* @param activity
* @param description
* @param isPublic
* @param name
* @param content
*/
public CreateGistTask(Activity activity, String description,
boolean isPublic, String name, String content) {
super(activity);
this.description = description;
this.isPublic = isPublic;
this.name = name;
this.content = content;
}
@Override
public Gist run(Account account) throws Exception {
Gist gist = new Gist();
gist.description = description;
gist.isPublic = isPublic;
GistFile file = new GistFile();
file.content = content;
file.filename = name;
GistFilesMap map = new GistFilesMap();
map.put(name, file);
gist.files = map;
return new PublishGistClient(gist).observable().toBlocking().first();
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
Log.d(TAG, "Exception creating Gist", e);
ToastUtils.show((Activity) getContext(), e.getMessage());
}
/**
* Create the {@link Gist} with the configured values
*/
public void create() {
showIndeterminate(R.string.creating_gist);
execute();
}
}
| apache-2.0 |
steveKapturowski/tensorflow-rl | algorithms/cem_actor_learner.py | 3543 | # -*- coding: utf-8 -*-
import numpy as np
import utils.logger
import tensorflow as tf
from networks.policy_v_network import PolicyNetwork
from policy_based_actor_learner import BaseA3CLearner
logger = utils.logger.getLogger('cross_entropy_actor_learner')
class CEMLearner(BaseA3CLearner):
'''
Implementation of Cross-Entropy Method, Useful as a baseline for simple environments
'''
def __init__(self, args):
super(CEMLearner, self).__init__(args)
policy_conf = {'name': 'local_learning_{}'.format(self.actor_id),
'input_shape': self.input_shape,
'num_act': self.num_actions,
'args': args}
self.local_network = args.network(policy_conf)
self.num_params = np.sum([
np.prod(v.get_shape().as_list())
for v in self.local_network.params])
logger.info('Parameter count: {}'.format(self.num_params))
self.mu = np.zeros(self.num_params)
self.sigma = np.ones(self.num_params)
self.num_samples = args.episodes_per_batch
self.num_epochs = args.num_epochs
if self.is_master():
var_list = self.local_network.params
self.saver = tf.train.Saver(var_list=var_list, max_to_keep=3,
keep_checkpoint_every_n_hours=2)
def choose_next_action(self, state):
return self.local_network.get_action(self.session, state)
def sample_theta(self, N):
return self.mu + np.random.randn(N, self.num_params)*self.sigma
def update_sample_distribution(self, population, rewards, keep_ratio=0.25, noise=0.01):
num_to_keep = int(keep_ratio * len(population))
elite = np.array(rewards).argsort()[-num_to_keep:]
self.mu = population[elite].mean(axis=0)
self.sigma = population[elite].std(axis=0) + noise
return np.array(rewards)[elite].mean(), population[elite]
def train(self):
consecutive_successes = 0
for epoch in xrange(self.num_epochs):
episode_rewards = list()
population = self.sample_theta(self.num_samples)
for theta in population:
self.assign_vars(self.local_network, theta)
s = self.emulator.get_initial_state()
total_episode_reward = 0.0
episode_over = False
while not episode_over:
a, pi = self.choose_next_action(s)
s, reward, episode_over = self.emulator.next(a)
total_episode_reward += reward
episode_rewards.append(total_episode_reward)
population_mean_reward = np.array(episode_rewards).mean()
elite_mean_reward, elite_set = self.update_sample_distribution(
population, episode_rewards, noise=.1/(1+epoch))
logger.info('Epoch {} / Population Mean {} / Elite Mean {}'.format(
epoch+1, population_mean_reward, elite_mean_reward))
if self.emulator.env.spec.reward_threshold and elite_mean_reward > self.emulator.env.spec.reward_threshold:
consecutive_successes += 1
else:
consecutive_successes = 0
#if the elite set is consistently good enough we'll stop updating the parameters
if consecutive_successes > 10:
logger.info('switching to testing mode...')
episode_rewards = list()
for _ in xrange(200):
i = np.random.choice(elite_set.shape[0])
self.assign_vars(self.local_network, elite_set[i])
s = self.emulator.get_initial_state()
total_episode_reward = 0.0
episode_over = False
while not episode_over:
a, pi = self.choose_next_action(s)
s, reward, episode_over = self.emulator.next(a)
total_episode_reward += reward
episode_rewards.append(total_episode_reward)
logger.info('Average Test Reward: {}'.format(np.array(episode_rewards).mean()))
return
| apache-2.0 |
sciage/FinalProject | app/src/main/java/in/beacandid/app/beacandid/ui/base/MvpView.java | 1362 | /*
* Copyright (C) 2017 MINDORKS NEXTGEN PRIVATE LIMITED
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://mindorks.com/license/apache-v2
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package in.beacandid.app.beacandid.ui.base;
/**
* Created by janisharali on 27/01/17.
*/
import android.support.annotation.StringRes;
/**
* Base interface that any class that wants to act as a View in the MVP (Model View Presenter)
* pattern must implement. Generally this interface will be extended by a more specific interface
* that then usually will be implemented by an Activity or Fragment.
*/
public interface MvpView {
void showLoading();
void hideLoading();
void openActivityOnTokenExpire();
void onError(@StringRes int resId);
void onError(String message);
void showMessage(String message);
void showMessage(@StringRes int resId);
boolean isNetworkConnected();
void hideKeyboard();
}
| apache-2.0 |
reportportal/service-api | src/main/java/com/epam/ta/reportportal/core/launch/cluster/CreateClusterHandler.java | 891 | /*
* Copyright 2021 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.core.launch.cluster;
import com.epam.ta.reportportal.core.analyzer.auto.client.model.cluster.ClusterData;
/**
* @author <a href="mailto:ivan_budayeu@epam.com">Ivan Budayeu</a>
*/
public interface CreateClusterHandler {
void create(ClusterData clusterData);
}
| apache-2.0 |
GoogleCloudPlatform/professional-services | tools/lambda-compat/cmd/lambda-compat/main.go | 5028 | package main
/*
Copyright 2022 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import (
"encoding/json"
"os"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"cloud.google.com/go/compute/metadata"
lambdacompat "github.com/GoogleCloudPlatform/professional-services/tools/lambda-compat"
)
var regionMap map[string]string = map[string]string{
"europe-north1": "eu-north-1", // Europe (Stockholm) -> Hamina, Finland
"asia-south1": "ap-south-1", // Asia Pacific (Mumbai) -> Mumbai, India
"asia-south2": "ap-south-1", // Asia Pacific (Mumbai) -> Delhi, India
"asia-east1": "ap-east-1", // Asia Pacific (Mumbai) -> Changhua, Taiwan
"asia-east2": "ap-east-1", // Asia Pacific (Mumbai) -> Hong Kong
"europe-central2": "eu-central-1", // Europe (Frankfurt) -> Warsaw, Poland
"europe-west1": "eu-west-3", // Europe (Paris) -> St. Ghislain, Belgium
"europe-west2": "eu-west-2", // Europe (London) -> London, United Kingdom
"europe-west3": "eu-central-1", // Europe (Frankfurt) -> Frankfurt, Germany
"europe-west4": "eu-central-1", // Europe (Frankfurt) -> Eemshaven, Netherlands
"asia-northeast2": "ap-northeast-3", // Asia Pacific (Osaka) -> Osaka, Japan
"asia-northeast3": "ap-northeast-2", // Asia Pacific (Seoul) -> Seoul, South Korea
"asia-northeast1": "ap-northeast-1", // Asia Pacific (Tokyo) -> Tokyo, Japan
"southamerica-east1": "sa-east-1", // South America (São Paolo) -> São Paolo, Brazil
"southamerica-west1": "sa-east-1", // South America (São Paolo) -> Santiago, Chile
"northamerica-northeast1": "ca-central-1", // Canada (Central) -> Montreal, Canada
"northamerica-northeast2": "ca-central-1", // Canada (Central) -> Toronto, Canada
"asia-southeast1": "ap-southeast-1", // Asia Pacific (Singapore) -> Singapore
"australia-southeast1": "ap-southeast-2", // Asia Pacific (Sydney) -> Sydney, Australia
"australia-southeast2": "ap-southeast-2", // Asia Pacific (Sydney) -> Melbourne, Australia
"us-east1": "us-east-1", // US East (N. Virginia) -> Virginia, United States
"us-east4": "us-east-1", // US East (N. Virginia) -> Virginia, United States
"us-central1": "us-east-2", // US East (Ohio) -> Iowa, United States
"us-west1": "us-west-2", // US West (Oregon) -> Oregon, United Stated
"us-west2": "us-west-1", // US West (N. California) -> California, United States
"us-west3": "us-west-1", // US West (N. California) -> Utah, United States
"us-west4": "us-west-1", // US West (N. California) -> Nevada, United States
}
func main() {
if len(os.Args) < 2 {
log.Fatal().Msg("No command specified!")
}
region := os.Getenv("REGION")
projectNum := os.Getenv("PROJECT_NUMBER")
service := os.Getenv("K_SERVICE")
audience := os.Getenv("OIDC_AUDIENCE")
roleArn := os.Getenv("AWS_ROLE_ARN")
jsonTransform := os.Getenv("JSON_TRANSFORM")
zerolog.SetGlobalLevel(zerolog.InfoLevel)
if os.Getenv("DEBUG") != "" {
zerolog.SetGlobalLevel(zerolog.DebugLevel)
}
var err error
// Retrieve details from metadata endpoint if on GCP
if metadata.OnGCE() {
mc := metadata.NewClient(nil)
region, err = mc.Get("region")
if err != nil {
log.Fatal().Err(err)
}
projectNum, err = mc.NumericProjectID()
if err != nil {
log.Fatal().Err(err)
}
region, err = mc.InstanceAttributeValue("region")
if err != nil {
log.Fatal().Err(err)
}
}
// Just some reasonable defaults
if projectNum == "" {
projectNum = "1234567890"
}
if service == "" {
service = "custom-function"
}
if region == "" {
region = "europe-west4"
}
// Override regions
overrideRegionMap := os.Getenv("REGION_MAP")
if overrideRegionMap != "" {
err := json.Unmarshal([]byte(overrideRegionMap), ®ionMap)
if err != nil {
log.Fatal().Err(err)
}
}
if overrideRegion, ok := regionMap[region]; ok {
region = overrideRegion
}
log.Info().Str("service", service).Str("projectNumber", projectNum).Str("region", region).Str("audience", audience).Str("roleArn", roleArn).Msg("Starting Lambda server")
server := lambdacompat.NewLambdaCompatServer(os.Args[1:len(os.Args)], 8080, region, projectNum, service, audience, roleArn, jsonTransform)
err = server.Start()
if err != nil {
log.Fatal().Err(err)
}
}
| apache-2.0 |
nonamejx/qltiemchung | qltiemchung/src/com/qltiemchung/controller/admin/VacxinThemServlet.java | 1566 | package com.qltiemchung.controller.admin;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.qltiemchung.model.bean.MessageBundle;
import com.qltiemchung.model.bo.LoaiVacxinBO;
import com.qltiemchung.utils.MyUtils;
/**
* Servlet implementation class VacxinThemServlet
*/
@WebServlet("/VacxinThemServlet")
public class VacxinThemServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public VacxinThemServlet() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
doPost(request, response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
MessageBundle bundle = MyUtils.getMessageBundle(request);
request.setAttribute("DanhSachLoaiVacxin", new LoaiVacxinBO().getTatCaLoaiVacxin());
MyUtils.putMessageBundle(request, bundle);
MyUtils.forward(getServletContext(), request, response, "/admin/vacxin-them.jsp");
}
}
| apache-2.0 |
omsmith/node-jwk-to-pem | src/jwk-to-pem.js | 526 | 'use strict';
var ec = require('./ec'),
rsa = require('./rsa');
function jwkToBuffer (jwk) {
if ('object' !== typeof jwk || null === jwk) {
throw new TypeError('Expected "jwk" to be an Object');
}
var kty = jwk.kty;
if ('string' !== typeof kty) {
throw new TypeError('Expected "jwk.kty" to be a String');
}
switch (kty) {
case 'EC': {
return ec(jwk);
}
case 'RSA': {
return rsa(jwk);
}
default: {
throw new Error('Unsupported key type "' + kty + '"');
}
}
}
module.exports = jwkToBuffer;
| apache-2.0 |
googleads/googleads-php-lib | src/Google/AdsApi/AdManager/v202111/PauseProposalLineItems.php | 750 | <?php
namespace Google\AdsApi\AdManager\v202111;
/**
* This file was generated from WSDL. DO NOT EDIT.
*/
class PauseProposalLineItems extends \Google\AdsApi\AdManager\v202111\ProposalLineItemAction
{
/**
* @var string $reason
*/
protected $reason = null;
/**
* @param string $reason
*/
public function __construct($reason = null)
{
$this->reason = $reason;
}
/**
* @return string
*/
public function getReason()
{
return $this->reason;
}
/**
* @param string $reason
* @return \Google\AdsApi\AdManager\v202111\PauseProposalLineItems
*/
public function setReason($reason)
{
$this->reason = $reason;
return $this;
}
}
| apache-2.0 |
ys305751572/shouye | src/main/java/com/smallchill/api/function/modal/Message.java | 4797 | package com.smallchill.api.function.modal;
import com.smallchill.api.function.meta.other.MessageAction;
import org.apache.commons.lang3.StringUtils;
import org.beetl.sql.core.annotatoin.Table;
import com.smallchill.core.annotation.BindID;
import com.smallchill.core.base.model.BaseModel;
import javax.persistence.Column;
/**
* 消息实体类
* Generated by yesong.
* 2016-10-27 17:24:13
*/
@Table(name = "tb_message")
@BindID(name = "id")
@SuppressWarnings("serial")
public class Message extends BaseModel {
@Column(name = "id")
private Integer id;
@Column(name = "from_id")
private Integer fromId;
@Column(name = "to_id")
private Integer toId;
@Column(name = "label")
private String label = "";
@Column(name = "title")
private String title;
@Column(name = "content")
private String content;
@Column(name = "replaces")
private String replaces = "";
@Column(name = "action1")
private String action1 = "";
@Column(name = "action2")
private String action2 = "";
@Column(name = "action3")
private String action3 = "";
@Column(name = "action4")
private String action4 = "";
//接收类型 1:用户 2:组织
@Column(name = "receive_type")
private Integer receiveType;
//发送类型 1:消息发送 2:手机短信 3:滚动公告
@Column(name = "send_type")
private Integer sendType;
//定时发送时间
@Column(name = "send_time")
private Long sendTime;
//组织: 1:单发 2:群发
@Column(name = "send_mass")
private Integer sendMass;
//组织: 发送日期
@Column(name = "send_date")
private Long sendDate;
@Column(name = "create_time")
private Long createTime;
public void addMessageAction(MessageAction ma) {
if(StringUtils.isBlank(action1)) {
action1 = combinationMa(ma);
}
if(StringUtils.isBlank(action2)) {
action2 = combinationMa(ma);
}
if(StringUtils.isBlank(action3)) {
action3 = combinationMa(ma);
}
if(StringUtils.isBlank(action4)) {
action4 = combinationMa(ma);
}
}
public String getLabel() {
return label;
}
public void setLabel(String label) {
this.label = label;
}
public Long getCreateTime() {
return createTime;
}
public void setCreateTime(Long createTime) {
this.createTime = createTime;
}
private String combinationMa(MessageAction ma) {
return ma.getActionName() + "|" + ma.getActionUrl();
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Integer getFromId() {
return fromId;
}
public void setFromId(Integer fromId) {
this.fromId = fromId;
}
public Integer getToId() {
return toId;
}
public void setToId(Integer toId) {
this.toId = toId;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getReplaces() {
return replaces;
}
public void setReplaces(String replaces) {
this.replaces = replaces;
}
public String getAction1() {
return action1;
}
public void setAction1(String action1) {
this.action1 = action1;
}
public String getAction2() {
return action2;
}
public void setAction2(String action2) {
this.action2 = action2;
}
public String getAction3() {
return action3;
}
public void setAction3(String action3) {
this.action3 = action3;
}
public String getAction4() {
return action4;
}
public void setAction4(String action4) {
this.action4 = action4;
}
public Integer getReceiveType() {
return receiveType;
}
public void setReceiveType(Integer receiveType) {
this.receiveType = receiveType;
}
public Integer getSendType() {
return sendType;
}
public void setSendType(Integer sendType) {
this.sendType = sendType;
}
public Long getSendTime() {
return sendTime;
}
public void setSendTime(Long sendTime) {
this.sendTime = sendTime;
}
public Long getSendDate() {
return sendDate;
}
public void setSendDate(Long sendDate) {
this.sendDate = sendDate;
}
public Integer getSendMass() {
return sendMass;
}
public void setSendMass(Integer sendMass) {
this.sendMass = sendMass;
}
}
| apache-2.0 |
cdapio/sql-pipelines | sql-data-pipeline/src/main/java/io/cdap/pipeline/sql/app/core/SQLConfig.java | 1545 | /*
* Copyright © 2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.pipeline.sql.app.core;
import io.cdap.cdap.etl.proto.v2.ETLConfig;
import java.util.HashMap;
import java.util.HashSet;
import javax.annotation.Nullable;
/**
* Represents an SQL configuration object.
*
* Currently, the SQLConfig object represents a direct extension of {@link ETLConfig}, but future configuration
* variables may be added.
*/
public class SQLConfig extends ETLConfig {
private final String schedule;
private final String serviceAccountPath;
/**
* For compilation purposes.
*/
public SQLConfig(String schedule, String serviceAccountPath) {
super(new HashSet<>(), new HashSet<>(), null, null, null,
false, false, 0, new HashMap<>());
this.schedule = schedule;
this.serviceAccountPath = serviceAccountPath;
}
@Nullable
public String getSchedule() {
return schedule;
}
@Nullable
public String getServiceAccountPath() {
return serviceAccountPath;
}
}
| apache-2.0 |
moosbusch/xbCDWALite | src/edu/getty/cdwa/cdwaLite/DescriptiveNoteSetDocument.java | 14581 | /*
* Copyright 2013 Gunnar Kappei.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.getty.cdwa.cdwaLite;
/**
* A document containing one descriptiveNoteSet(@http://www.getty.edu/CDWA/CDWALite) element.
*
* This is a complex type.
*/
public interface DescriptiveNoteSetDocument extends org.apache.xmlbeans.XmlObject
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(DescriptiveNoteSetDocument.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s1F643FAF4399D1206A04583D585DB128").resolveHandle("descriptivenoteset590bdoctype");
/**
* Gets the "descriptiveNoteSet" element
*/
edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument.DescriptiveNoteSet getDescriptiveNoteSet();
/**
* Sets the "descriptiveNoteSet" element
*/
void setDescriptiveNoteSet(edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument.DescriptiveNoteSet descriptiveNoteSet);
/**
* Appends and returns a new empty "descriptiveNoteSet" element
*/
edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument.DescriptiveNoteSet addNewDescriptiveNoteSet();
/**
* An XML descriptiveNoteSet(@http://www.getty.edu/CDWA/CDWALite).
*
* This is a complex type.
*/
public interface DescriptiveNoteSet extends org.apache.xmlbeans.XmlObject
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(DescriptiveNoteSet.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s1F643FAF4399D1206A04583D585DB128").resolveHandle("descriptivenoteset384belemtype");
/**
* Gets the "descriptiveNote" element
*/
java.lang.String getDescriptiveNote();
/**
* Gets (as xml) the "descriptiveNote" element
*/
org.apache.xmlbeans.XmlString xgetDescriptiveNote();
/**
* True if has "descriptiveNote" element
*/
boolean isSetDescriptiveNote();
/**
* Sets the "descriptiveNote" element
*/
void setDescriptiveNote(java.lang.String descriptiveNote);
/**
* Sets (as xml) the "descriptiveNote" element
*/
void xsetDescriptiveNote(org.apache.xmlbeans.XmlString descriptiveNote);
/**
* Unsets the "descriptiveNote" element
*/
void unsetDescriptiveNote();
/**
* Gets a List of "sourceDescriptiveNote" elements
*/
java.util.List<java.lang.String> getSourceDescriptiveNoteList();
/**
* Gets array of all "sourceDescriptiveNote" elements
* @deprecated
*/
@Deprecated
java.lang.String[] getSourceDescriptiveNoteArray();
/**
* Gets ith "sourceDescriptiveNote" element
*/
java.lang.String getSourceDescriptiveNoteArray(int i);
/**
* Gets (as xml) a List of "sourceDescriptiveNote" elements
*/
java.util.List<org.apache.xmlbeans.XmlString> xgetSourceDescriptiveNoteList();
/**
* Gets (as xml) array of all "sourceDescriptiveNote" elements
* @deprecated
*/
@Deprecated
org.apache.xmlbeans.XmlString[] xgetSourceDescriptiveNoteArray();
/**
* Gets (as xml) ith "sourceDescriptiveNote" element
*/
org.apache.xmlbeans.XmlString xgetSourceDescriptiveNoteArray(int i);
/**
* Returns number of "sourceDescriptiveNote" element
*/
int sizeOfSourceDescriptiveNoteArray();
/**
* Sets array of all "sourceDescriptiveNote" element
*/
void setSourceDescriptiveNoteArray(java.lang.String[] sourceDescriptiveNoteArray);
/**
* Sets ith "sourceDescriptiveNote" element
*/
void setSourceDescriptiveNoteArray(int i, java.lang.String sourceDescriptiveNote);
/**
* Sets (as xml) array of all "sourceDescriptiveNote" element
*/
void xsetSourceDescriptiveNoteArray(org.apache.xmlbeans.XmlString[] sourceDescriptiveNoteArray);
/**
* Sets (as xml) ith "sourceDescriptiveNote" element
*/
void xsetSourceDescriptiveNoteArray(int i, org.apache.xmlbeans.XmlString sourceDescriptiveNote);
/**
* Inserts the value as the ith "sourceDescriptiveNote" element
*/
void insertSourceDescriptiveNote(int i, java.lang.String sourceDescriptiveNote);
/**
* Appends the value as the last "sourceDescriptiveNote" element
*/
void addSourceDescriptiveNote(java.lang.String sourceDescriptiveNote);
/**
* Inserts and returns a new empty value (as xml) as the ith "sourceDescriptiveNote" element
*/
org.apache.xmlbeans.XmlString insertNewSourceDescriptiveNote(int i);
/**
* Appends and returns a new empty value (as xml) as the last "sourceDescriptiveNote" element
*/
org.apache.xmlbeans.XmlString addNewSourceDescriptiveNote();
/**
* Removes the ith "sourceDescriptiveNote" element
*/
void removeSourceDescriptiveNote(int i);
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument.DescriptiveNoteSet newInstance() {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument.DescriptiveNoteSet) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument.DescriptiveNoteSet newInstance(org.apache.xmlbeans.XmlOptions options) {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument.DescriptiveNoteSet) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
private Factory() { } // No instance of this class allowed
}
}
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument newInstance() {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument newInstance(org.apache.xmlbeans.XmlOptions options) {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
/** @param xmlAsString the string value to parse */
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(java.lang.String xmlAsString) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, null ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(java.lang.String xmlAsString, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, options ); }
/** @param file the file from which to load an xml document */
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(java.io.File file) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, null ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(java.io.File file, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, options ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(java.net.URL u) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, null ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(java.net.URL u, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, options ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(java.io.InputStream is) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, null ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(java.io.InputStream is, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, options ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(java.io.Reader r) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, null ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(java.io.Reader r, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, options ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(javax.xml.stream.XMLStreamReader sr) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, null ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(javax.xml.stream.XMLStreamReader sr, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, options ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(org.w3c.dom.Node node) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, null ); }
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(org.w3c.dom.Node node, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (edu.getty.cdwa.cdwaLite.DescriptiveNoteSetDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, options ); }
private Factory() { } // No instance of this class allowed
}
}
| apache-2.0 |
mmerce/python | bigml/tests/create_batch_prediction_steps.py | 10291 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2020 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import json
import requests
import csv
import traceback
from datetime import datetime
from .world import world, res_filename, logged_wait
from nose.tools import eq_, ok_, assert_less
from bigml.api import HTTP_CREATED
from bigml.api import FINISHED
from bigml.api import FAULTY
from bigml.api import get_status
from bigml.io import UnicodeReader
from .read_batch_prediction_steps import (i_get_the_batch_prediction,
i_get_the_batch_centroid, i_get_the_batch_anomaly_score)
#@step(r'I create a batch prediction for the dataset with the model$')
def i_create_a_batch_prediction(step):
dataset = world.dataset.get('resource')
model = world.model.get('resource')
resource = world.api.create_batch_prediction(model, dataset)
world.status = resource['code']
eq_(world.status, HTTP_CREATED)
world.location = resource['location']
world.batch_prediction = resource['object']
world.batch_predictions.append(resource['resource'])
#@step(r'I create a batch prediction for the dataset with the ensemble and "(.*)"$')
def i_create_a_batch_prediction_ensemble(step, params=None):
if params is None:
params = {}
dataset = world.dataset.get('resource')
ensemble = world.ensemble.get('resource')
resource = world.api.create_batch_prediction(ensemble, dataset, params)
world.status = resource['code']
eq_(world.status, HTTP_CREATED)
world.location = resource['location']
world.batch_prediction = resource['object']
world.batch_predictions.append(resource['resource'])
#@step(r'I wait until the batch prediction status code is either (\d) or (-\d) less than (\d+)')
def wait_until_batch_prediction_status_code_is(step, code1, code2, secs):
start = datetime.utcnow()
delta = int(secs) * world.delta
i_get_the_batch_prediction(step, world.batch_prediction['resource'])
status = get_status(world.batch_prediction)
count = 0
while (status['code'] != int(code1) and
status['code'] != int(code2)):
count += 1
logged_wait(start, delta, count, "batchprediction")
i_get_the_batch_prediction(step, world.batch_prediction['resource'])
status = get_status(world.batch_prediction)
eq_(status['code'], int(code1))
#@step(r'I wait until the batch centroid status code is either (\d) or (-\d) less than (\d+)')
def wait_until_batch_centroid_status_code_is(step, code1, code2, secs):
start = datetime.utcnow()
delta = int(secs) * world.delta
i_get_the_batch_centroid(step, world.batch_centroid['resource'])
status = get_status(world.batch_centroid)
count = 0
while (status['code'] != int(code1) and
status['code'] != int(code2)):
count += 1
logged_wait(start, delta, count, "batchcentroid")
i_get_the_batch_centroid(step, world.batch_centroid['resource'])
status = get_status(world.batch_centroid)
eq_(status['code'], int(code1))
#@step(r'I wait until the batch anomaly score status code is either (\d) or (-\d) less than (\d+)')
def wait_until_batch_anomaly_score_status_code_is(step, code1, code2, secs):
start = datetime.utcnow()
delta = int(secs) * world.delta
i_get_the_batch_anomaly_score(step, world.batch_anomaly_score['resource'])
status = get_status(world.batch_anomaly_score)
count = 0
while (status['code'] != int(code1) and
status['code'] != int(code2)):
count += 1
logged_wait(start, delta, count, "batchanomalyscore")
i_get_the_batch_anomaly_score(step, world.batch_anomaly_score['resource'])
status = get_status(world.batch_anomaly_score)
eq_(status['code'], int(code1), msg="%s seconds waited." % delta)
#@step(r'I wait until the batch prediction is ready less than (\d+)')
def the_batch_prediction_is_finished_in_less_than(step, secs):
wait_until_batch_prediction_status_code_is(step, FINISHED, FAULTY, secs)
#@step(r'I wait until the batch centroid is ready less than (\d+)')
def the_batch_centroid_is_finished_in_less_than(step, secs):
wait_until_batch_centroid_status_code_is(step, FINISHED, FAULTY, secs)
#@step(r'I wait until the batch anomaly score is ready less than (\d+)')
def the_batch_anomaly_score_is_finished_in_less_than(step, secs):
wait_until_batch_anomaly_score_status_code_is(step, FINISHED, FAULTY, secs)
#@step(r'I download the created predictions file to "(.*)"')
def i_download_predictions_file(step, filename):
file_object = world.api.download_batch_prediction(
world.batch_prediction, filename=res_filename(filename))
ok_(file_object is not None)
world.output = file_object
#@step(r'I download the created centroid file to "(.*)"')
def i_download_centroid_file(step, filename):
file_object = world.api.download_batch_centroid(
world.batch_centroid, filename=res_filename(filename))
ok_(file_object is not None)
world.output = file_object
#@step(r'I download the created anomaly score file to "(.*)"')
def i_download_anomaly_score_file(step, filename):
file_object = world.api.download_batch_anomaly_score(
world.batch_anomaly_score, filename=res_filename(filename))
ok_(file_object is not None)
world.output = file_object
def check_rows(prediction_rows, test_rows):
row_num = 0
for row in prediction_rows:
check_row = next(test_rows)
row_num += 1
eq_(len(check_row), len (row))
for index in range(len(row)):
dot = row[index].find(".")
if dot > 0:
try:
decs = min(len(row[index]), len(check_row[index])) - dot - 1
row[index] = round(float(row[index]), decs)
check_row[index] = round(float(check_row[index]), decs)
except ValueError:
pass
eq_(check_row[index], row[index],
"Got: %s/ Expected: %s in line %s" % (row, check_row, row_num))
#@step(r'the batch prediction file is like "(.*)"')
def i_check_predictions(step, check_file):
with UnicodeReader(world.output) as prediction_rows:
with UnicodeReader(res_filename(check_file)) as test_rows:
check_rows(prediction_rows, test_rows)
#@step(r'the batch centroid file is like "(.*)"')
def i_check_batch_centroid(step, check_file):
i_check_predictions(step, check_file)
#@step(r'the batch anomaly score file is like "(.*)"')
def i_check_batch_anomaly_score(step, check_file):
i_check_predictions(step, check_file)
#@step(r'I check the batch centroid is ok')
def i_check_batch_centroid_is_ok(step):
ok_(world.api.ok(world.batch_centroid))
#@step(r'I check the batch anomaly score is ok')
def i_check_batch_anomaly_score_is_ok(step):
ok_(world.api.ok(world.batch_anomaly_score))
#@step(r'I create a batch centroid for the dataset$')
def i_create_a_batch_prediction_with_cluster(step):
dataset = world.dataset.get('resource')
cluster = world.cluster.get('resource')
resource = world.api.create_batch_centroid(cluster, dataset)
world.status = resource['code']
eq_(world.status, HTTP_CREATED)
world.location = resource['location']
world.batch_centroid = resource['object']
world.batch_centroids.append(resource['resource'])
#@step(r'I create a batch anomaly score$')
def i_create_a_batch_prediction_with_anomaly(step):
dataset = world.dataset.get('resource')
anomaly = world.anomaly.get('resource')
resource = world.api.create_batch_anomaly_score(anomaly, dataset)
world.status = resource['code']
eq_(world.status, HTTP_CREATED)
world.location = resource['location']
world.batch_anomaly_score = resource['object']
world.batch_anomaly_scores.append(resource['resource'])
#@step(r'I create a linear batch prediction$')
def i_create_a_linear_batch_prediction(step):
dataset = world.dataset.get('resource')
linear_regression = world.linear_regression.get('resource')
resource = world.api.create_batch_prediction(linear_regression, dataset)
world.status = resource['code']
eq_(world.status, HTTP_CREATED)
world.location = resource['location']
world.batch_prediction = resource['object']
world.batch_predictions.append(resource['resource'])
#@step(r'I create a source from the batch prediction$')
def i_create_a_source_from_batch_prediction(step):
batch_prediction = world.batch_prediction.get('resource')
resource = world.api.source_from_batch_prediction(batch_prediction)
world.status = resource['code']
eq_(world.status, HTTP_CREATED)
world.location = resource['location']
world.source = resource['object']
world.sources.append(resource['resource'])
#@step(r'I create a batch prediction for the dataset with the logistic regression$')
def i_create_a_batch_prediction_logistic_model(step):
dataset = world.dataset.get('resource')
logistic = world.logistic_regression.get('resource')
resource = world.api.create_batch_prediction(logistic, dataset)
world.status = resource['code']
eq_(world.status, HTTP_CREATED)
world.location = resource['location']
world.batch_prediction = resource['object']
world.batch_predictions.append(resource['resource'])
#@step(r'I create a batch prediction for the dataset with the fusion$')
def i_create_a_batch_prediction_fusion(step):
dataset = world.dataset.get('resource')
fusion = world.fusion.get('resource')
resource = world.api.create_batch_prediction(fusion, dataset)
world.status = resource['code']
eq_(world.status, HTTP_CREATED)
world.location = resource['location']
world.batch_prediction = resource['object']
world.batch_predictions.append(resource['resource'])
| apache-2.0 |
spaceshipsamurai/samurai-auth | server/controllers/eve/charactersController.js | 2437 | var mongoose = require('mongoose'),
KeyManager = require('../../services/eve/key-service'),
CharacterService = require('../../services/eve/character-service')(),
User = mongoose.model('User'),
Character = mongoose.model('Character'),
async = require('async');
module.exports = function() {
var listPrimaries = function(req, res, next) {
User.find({}, 'primary')
.populate('primary')
.exec(function(err, docs){
if(err) return next({ msg: err, tags: ['characters', 'listPrimaries', 'mongo']});
User.populate(docs, { path: 'primary.key', model: 'Key'}, function(err, users){
var filtered = users.filter(function(user){
return user.primary && user.primary.key && user.primary.key.status === 'Valid';
});
return res.json(filtered);
});
});
};
var updatePrimaryCharacter = function(req, res, next) {
Character.findOne({ _id: req.params.cid, user: req.user._id })
.populate('user', 'primary')
.exec(function(err, character){
if(err) return next({ msg: err, tags: ['characters', 'auth', 'mongo', 'update-primary']});
if(!character) return res.json({ msg: 'success' });
character.user.primary = req.params.cid;
character.user.save(function(err){
if(err) return next({ msg: err, tags: ['characters', 'auth', 'mongo', 'update-primary']});
return res.json({ msg: 'success' })
});
});
};
var getAffiliated = function(req, res, next) {
CharacterService.find({ user: req.user._id }).then(function(characters){
async.filter(characters, function(character, cb){
CharacterService.getAffiliation(character._id).then(function(aff){
cb(aff === 'Alliance' || aff === 'Coalition');
}).catch(function(err) {
return next(err);
});
}, function(results){
return res.json(results);
});
}, function(err) {
return next(err);
});
};
return {
updatePrimaryCharacter: updatePrimaryCharacter,
listPrimaries: listPrimaries,
getAffiliated: getAffiliated
}
};
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-kafkaconnect/src/main/java/com/amazonaws/services/kafkaconnect/model/AutoScaling.java | 11005 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kafkaconnect.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Specifies how the connector scales.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kafkaconnect-2021-09-14/AutoScaling" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AutoScaling implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The maximum number of workers allocated to the connector.
* </p>
*/
private Integer maxWorkerCount;
/**
* <p>
* The number of microcontroller units (MCUs) allocated to each connector worker. The valid values are 1,2,4,8.
* </p>
*/
private Integer mcuCount;
/**
* <p>
* The minimum number of workers allocated to the connector.
* </p>
*/
private Integer minWorkerCount;
/**
* <p>
* The sacle-in policy for the connector.
* </p>
*/
private ScaleInPolicy scaleInPolicy;
/**
* <p>
* The sacle-out policy for the connector.
* </p>
*/
private ScaleOutPolicy scaleOutPolicy;
/**
* <p>
* The maximum number of workers allocated to the connector.
* </p>
*
* @param maxWorkerCount
* The maximum number of workers allocated to the connector.
*/
public void setMaxWorkerCount(Integer maxWorkerCount) {
this.maxWorkerCount = maxWorkerCount;
}
/**
* <p>
* The maximum number of workers allocated to the connector.
* </p>
*
* @return The maximum number of workers allocated to the connector.
*/
public Integer getMaxWorkerCount() {
return this.maxWorkerCount;
}
/**
* <p>
* The maximum number of workers allocated to the connector.
* </p>
*
* @param maxWorkerCount
* The maximum number of workers allocated to the connector.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AutoScaling withMaxWorkerCount(Integer maxWorkerCount) {
setMaxWorkerCount(maxWorkerCount);
return this;
}
/**
* <p>
* The number of microcontroller units (MCUs) allocated to each connector worker. The valid values are 1,2,4,8.
* </p>
*
* @param mcuCount
* The number of microcontroller units (MCUs) allocated to each connector worker. The valid values are
* 1,2,4,8.
*/
public void setMcuCount(Integer mcuCount) {
this.mcuCount = mcuCount;
}
/**
* <p>
* The number of microcontroller units (MCUs) allocated to each connector worker. The valid values are 1,2,4,8.
* </p>
*
* @return The number of microcontroller units (MCUs) allocated to each connector worker. The valid values are
* 1,2,4,8.
*/
public Integer getMcuCount() {
return this.mcuCount;
}
/**
* <p>
* The number of microcontroller units (MCUs) allocated to each connector worker. The valid values are 1,2,4,8.
* </p>
*
* @param mcuCount
* The number of microcontroller units (MCUs) allocated to each connector worker. The valid values are
* 1,2,4,8.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AutoScaling withMcuCount(Integer mcuCount) {
setMcuCount(mcuCount);
return this;
}
/**
* <p>
* The minimum number of workers allocated to the connector.
* </p>
*
* @param minWorkerCount
* The minimum number of workers allocated to the connector.
*/
public void setMinWorkerCount(Integer minWorkerCount) {
this.minWorkerCount = minWorkerCount;
}
/**
* <p>
* The minimum number of workers allocated to the connector.
* </p>
*
* @return The minimum number of workers allocated to the connector.
*/
public Integer getMinWorkerCount() {
return this.minWorkerCount;
}
/**
* <p>
* The minimum number of workers allocated to the connector.
* </p>
*
* @param minWorkerCount
* The minimum number of workers allocated to the connector.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AutoScaling withMinWorkerCount(Integer minWorkerCount) {
setMinWorkerCount(minWorkerCount);
return this;
}
/**
* <p>
* The sacle-in policy for the connector.
* </p>
*
* @param scaleInPolicy
* The sacle-in policy for the connector.
*/
public void setScaleInPolicy(ScaleInPolicy scaleInPolicy) {
this.scaleInPolicy = scaleInPolicy;
}
/**
* <p>
* The sacle-in policy for the connector.
* </p>
*
* @return The sacle-in policy for the connector.
*/
public ScaleInPolicy getScaleInPolicy() {
return this.scaleInPolicy;
}
/**
* <p>
* The sacle-in policy for the connector.
* </p>
*
* @param scaleInPolicy
* The sacle-in policy for the connector.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AutoScaling withScaleInPolicy(ScaleInPolicy scaleInPolicy) {
setScaleInPolicy(scaleInPolicy);
return this;
}
/**
* <p>
* The sacle-out policy for the connector.
* </p>
*
* @param scaleOutPolicy
* The sacle-out policy for the connector.
*/
public void setScaleOutPolicy(ScaleOutPolicy scaleOutPolicy) {
this.scaleOutPolicy = scaleOutPolicy;
}
/**
* <p>
* The sacle-out policy for the connector.
* </p>
*
* @return The sacle-out policy for the connector.
*/
public ScaleOutPolicy getScaleOutPolicy() {
return this.scaleOutPolicy;
}
/**
* <p>
* The sacle-out policy for the connector.
* </p>
*
* @param scaleOutPolicy
* The sacle-out policy for the connector.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AutoScaling withScaleOutPolicy(ScaleOutPolicy scaleOutPolicy) {
setScaleOutPolicy(scaleOutPolicy);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getMaxWorkerCount() != null)
sb.append("MaxWorkerCount: ").append(getMaxWorkerCount()).append(",");
if (getMcuCount() != null)
sb.append("McuCount: ").append(getMcuCount()).append(",");
if (getMinWorkerCount() != null)
sb.append("MinWorkerCount: ").append(getMinWorkerCount()).append(",");
if (getScaleInPolicy() != null)
sb.append("ScaleInPolicy: ").append(getScaleInPolicy()).append(",");
if (getScaleOutPolicy() != null)
sb.append("ScaleOutPolicy: ").append(getScaleOutPolicy());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AutoScaling == false)
return false;
AutoScaling other = (AutoScaling) obj;
if (other.getMaxWorkerCount() == null ^ this.getMaxWorkerCount() == null)
return false;
if (other.getMaxWorkerCount() != null && other.getMaxWorkerCount().equals(this.getMaxWorkerCount()) == false)
return false;
if (other.getMcuCount() == null ^ this.getMcuCount() == null)
return false;
if (other.getMcuCount() != null && other.getMcuCount().equals(this.getMcuCount()) == false)
return false;
if (other.getMinWorkerCount() == null ^ this.getMinWorkerCount() == null)
return false;
if (other.getMinWorkerCount() != null && other.getMinWorkerCount().equals(this.getMinWorkerCount()) == false)
return false;
if (other.getScaleInPolicy() == null ^ this.getScaleInPolicy() == null)
return false;
if (other.getScaleInPolicy() != null && other.getScaleInPolicy().equals(this.getScaleInPolicy()) == false)
return false;
if (other.getScaleOutPolicy() == null ^ this.getScaleOutPolicy() == null)
return false;
if (other.getScaleOutPolicy() != null && other.getScaleOutPolicy().equals(this.getScaleOutPolicy()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getMaxWorkerCount() == null) ? 0 : getMaxWorkerCount().hashCode());
hashCode = prime * hashCode + ((getMcuCount() == null) ? 0 : getMcuCount().hashCode());
hashCode = prime * hashCode + ((getMinWorkerCount() == null) ? 0 : getMinWorkerCount().hashCode());
hashCode = prime * hashCode + ((getScaleInPolicy() == null) ? 0 : getScaleInPolicy().hashCode());
hashCode = prime * hashCode + ((getScaleOutPolicy() == null) ? 0 : getScaleOutPolicy().hashCode());
return hashCode;
}
@Override
public AutoScaling clone() {
try {
return (AutoScaling) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.kafkaconnect.model.transform.AutoScalingMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| apache-2.0 |
mariusj/org.openntf.domino | domino/externals/javassist/src/main/java/javassist/bytecode/SignatureAttribute.java | 35773 | /*
* Javassist, a Java-bytecode translator toolkit.
* Copyright (C) 1999- Shigeru Chiba. All Rights Reserved.
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. Alternatively, the contents of this file may be used under
* the terms of the GNU Lesser General Public License Version 2.1 or later,
* or the Apache License Version 2.0.
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*/
package javassist.bytecode;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.Map;
import java.util.ArrayList;
import javassist.CtClass;
/**
* <code>Signature_attribute</code>.
*/
public class SignatureAttribute extends AttributeInfo {
/**
* The name of this attribute <code>"Signature"</code>.
*/
public static final String tag = "Signature";
SignatureAttribute(ConstPool cp, int n, DataInputStream in)
throws IOException
{
super(cp, n, in);
}
/**
* Constructs a <code>Signature</code> attribute.
*
* @param cp a constant pool table.
* @param signature the signature represented by this attribute.
*/
public SignatureAttribute(ConstPool cp, String signature) {
super(cp, tag);
int index = cp.addUtf8Info(signature);
byte[] bvalue = new byte[2];
bvalue[0] = (byte)(index >>> 8);
bvalue[1] = (byte)index;
set(bvalue);
}
/**
* Returns the generic signature indicated by <code>signature_index</code>.
*
* @see #toClassSignature(String)
* @see #toMethodSignature(String)
* @see #toFieldSignature(String)
*/
public String getSignature() {
return getConstPool().getUtf8Info(ByteArray.readU16bit(get(), 0));
}
/**
* Sets <code>signature_index</code> to the index of the given generic signature,
* which is added to a constant pool.
*
* @param sig new signature.
* @since 3.11
*/
public void setSignature(String sig) {
int index = getConstPool().addUtf8Info(sig);
ByteArray.write16bit(index, info, 0);
}
/**
* Makes a copy. Class names are replaced according to the
* given <code>Map</code> object.
*
* @param newCp the constant pool table used by the new copy.
* @param classnames pairs of replaced and substituted
* class names.
*/
public AttributeInfo copy(ConstPool newCp, Map classnames) {
return new SignatureAttribute(newCp, getSignature());
}
void renameClass(String oldname, String newname) {
String sig = renameClass(getSignature(), oldname, newname);
setSignature(sig);
}
void renameClass(Map classnames) {
String sig = renameClass(getSignature(), classnames);
setSignature(sig);
}
static String renameClass(String desc, String oldname, String newname) {
Map map = new java.util.HashMap();
map.put(oldname, newname);
return renameClass(desc, map);
}
static String renameClass(String desc, Map map) {
if (map == null)
return desc;
StringBuilder newdesc = new StringBuilder();
int head = 0;
int i = 0;
for (;;) {
int j = desc.indexOf('L', i);
if (j < 0)
break;
StringBuilder nameBuf = new StringBuilder();
int k = j;
char c;
try {
while ((c = desc.charAt(++k)) != ';') {
nameBuf.append(c);
if (c == '<') {
while ((c = desc.charAt(++k)) != '>')
nameBuf.append(c);
nameBuf.append(c);
}
}
}
catch (IndexOutOfBoundsException e) { break; }
i = k + 1;
String name = nameBuf.toString();
String name2 = (String)map.get(name);
if (name2 != null) {
newdesc.append(desc.substring(head, j));
newdesc.append('L');
newdesc.append(name2);
newdesc.append(c);
head = i;
}
}
if (head == 0)
return desc;
else {
int len = desc.length();
if (head < len)
newdesc.append(desc.substring(head, len));
return newdesc.toString();
}
}
private static boolean isNamePart(int c) {
return c != ';' && c != '<';
}
static private class Cursor {
int position = 0;
int indexOf(String s, int ch) throws BadBytecode {
int i = s.indexOf(ch, position);
if (i < 0)
throw error(s);
else {
position = i + 1;
return i;
}
}
}
/**
* Class signature.
*/
public static class ClassSignature {
TypeParameter[] params;
ClassType superClass;
ClassType[] interfaces;
/**
* Constructs a class signature.
*
* @param params type parameters.
* @param superClass the super class.
* @param interfaces the interface types.
*/
public ClassSignature(TypeParameter[] params, ClassType superClass, ClassType[] interfaces) {
this.params = params == null ? new TypeParameter[0] : params;
this.superClass = superClass == null ? ClassType.OBJECT : superClass;
this.interfaces = interfaces == null ? new ClassType[0] : interfaces;
}
/**
* Constructs a class signature.
*
* @param p type parameters.
*/
public ClassSignature(TypeParameter[] p) {
this(p, null, null);
}
/**
* Returns the type parameters.
*
* @return a zero-length array if the type parameters are not specified.
*/
public TypeParameter[] getParameters() {
return params;
}
/**
* Returns the super class.
*/
public ClassType getSuperClass() { return superClass; }
/**
* Returns the super interfaces.
*
* @return a zero-length array if the super interfaces are not specified.
*/
public ClassType[] getInterfaces() { return interfaces; }
/**
* Returns the string representation.
*/
public String toString() {
StringBuffer sbuf = new StringBuffer();
TypeParameter.toString(sbuf, params);
sbuf.append(" extends ").append(superClass);
if (interfaces.length > 0) {
sbuf.append(" implements ");
Type.toString(sbuf, interfaces);
}
return sbuf.toString();
}
/**
* Returns the encoded string representing the method type signature.
*/
public String encode() {
StringBuffer sbuf = new StringBuffer();
if (params.length > 0) {
sbuf.append('<');
for (int i = 0; i < params.length; i++)
params[i].encode(sbuf);
sbuf.append('>');
}
superClass.encode(sbuf);
for (int i = 0; i < interfaces.length; i++)
interfaces[i].encode(sbuf);
return sbuf.toString();
}
}
/**
* Method type signature.
*/
public static class MethodSignature {
TypeParameter[] typeParams;
Type[] params;
Type retType;
ObjectType[] exceptions;
/**
* Constructs a method type signature. Any parameter can be null
* to represent <code>void</code> or nothing.
*
* @param tp type parameters.
* @param params parameter types.
* @param ret a return type, or null if the return type is <code>void</code>.
* @param ex exception types.
*/
public MethodSignature(TypeParameter[] tp, Type[] params, Type ret, ObjectType[] ex) {
typeParams = tp == null ? new TypeParameter[0] : tp;
this.params = params == null ? new Type[0] : params;
retType = ret == null ? new BaseType("void") : ret;
exceptions = ex == null ? new ObjectType[0] : ex;
}
/**
* Returns the formal type parameters.
*
* @return a zero-length array if the type parameters are not specified.
*/
public TypeParameter[] getTypeParameters() { return typeParams; }
/**
* Returns the types of the formal parameters.
*
* @return a zero-length array if no formal parameter is taken.
*/
public Type[] getParameterTypes() { return params; }
/**
* Returns the type of the returned value.
*/
public Type getReturnType() { return retType; }
/**
* Returns the types of the exceptions that may be thrown.
*
* @return a zero-length array if exceptions are never thrown or
* the exception types are not parameterized types or type variables.
*/
public ObjectType[] getExceptionTypes() { return exceptions; }
/**
* Returns the string representation.
*/
public String toString() {
StringBuffer sbuf = new StringBuffer();
TypeParameter.toString(sbuf, typeParams);
sbuf.append(" (");
Type.toString(sbuf, params);
sbuf.append(") ");
sbuf.append(retType);
if (exceptions.length > 0) {
sbuf.append(" throws ");
Type.toString(sbuf, exceptions);
}
return sbuf.toString();
}
/**
* Returns the encoded string representing the method type signature.
*/
public String encode() {
StringBuffer sbuf = new StringBuffer();
if (typeParams.length > 0) {
sbuf.append('<');
for (int i = 0; i < typeParams.length; i++)
typeParams[i].encode(sbuf);
sbuf.append('>');
}
sbuf.append('(');
for (int i = 0; i < params.length; i++)
params[i].encode(sbuf);
sbuf.append(')');
retType.encode(sbuf);
if (exceptions.length > 0)
for (int i = 0; i < exceptions.length; i++) {
sbuf.append('^');
exceptions[i].encode(sbuf);
}
return sbuf.toString();
}
}
/**
* Formal type parameters.
*
* @see TypeArgument
*/
public static class TypeParameter {
String name;
ObjectType superClass;
ObjectType[] superInterfaces;
TypeParameter(String sig, int nb, int ne, ObjectType sc, ObjectType[] si) {
name = sig.substring(nb, ne);
superClass = sc;
superInterfaces = si;
}
/**
* Constructs a <code>TypeParameter</code> representing a type parametre
* like <code><T extends ... ><code>.
*
* @param name parameter name.
* @param superClass an upper bound class-type (or null).
* @param superInterfaces an upper bound interface-type (or null).
*/
public TypeParameter(String name, ObjectType superClass, ObjectType[] superInterfaces) {
this.name = name;
this.superClass = superClass;
if (superInterfaces == null)
this.superInterfaces = new ObjectType[0];
else
this.superInterfaces = superInterfaces;
}
/**
* Constructs a <code>TypeParameter</code> representing a type parameter
* like <code><T><code>.
*
* @param name parameter name.
*/
public TypeParameter(String name) {
this(name, null, null);
}
/**
* Returns the name of the type parameter.
*/
public String getName() {
return name;
}
/**
* Returns the class bound of this parameter.
*/
public ObjectType getClassBound() { return superClass; }
/**
* Returns the interface bound of this parameter.
*
* @return a zero-length array if the interface bound is not specified.
*/
public ObjectType[] getInterfaceBound() { return superInterfaces; }
/**
* Returns the string representation.
*/
public String toString() {
StringBuffer sbuf = new StringBuffer(getName());
if (superClass != null)
sbuf.append(" extends ").append(superClass.toString());
int len = superInterfaces.length;
if (len > 0) {
for (int i = 0; i < len; i++) {
if (i > 0 || superClass != null)
sbuf.append(" & ");
else
sbuf.append(" extends ");
sbuf.append(superInterfaces[i].toString());
}
}
return sbuf.toString();
}
static void toString(StringBuffer sbuf, TypeParameter[] tp) {
sbuf.append('<');
for (int i = 0; i < tp.length; i++) {
if (i > 0)
sbuf.append(", ");
sbuf.append(tp[i]);
}
sbuf.append('>');
}
void encode(StringBuffer sb) {
sb.append(name);
if (superClass == null)
sb.append(":Ljava/lang/Object;");
else {
sb.append(':');
superClass.encode(sb);
}
for (int i = 0; i < superInterfaces.length; i++) {
sb.append(':');
superInterfaces[i].encode(sb);
}
}
}
/**
* Type argument.
*
* @see TypeParameter
*/
public static class TypeArgument {
ObjectType arg;
char wildcard;
TypeArgument(ObjectType a, char w) {
arg = a;
wildcard = w;
}
/**
* Constructs a <code>TypeArgument</code>.
* A type argument is <code><String></code>, <code><int[]></code>,
* or a type variable <code><T></code>, etc.
*
* @param t a class type, an array type, or a type variable.
*/
public TypeArgument(ObjectType t) {
this(t, ' ');
}
/**
* Constructs a <code>TypeArgument</code> representing <code><?></code>.
*/
public TypeArgument() {
this(null, '*');
}
/**
* A factory method constructing a <code>TypeArgument</code> with an upper bound.
* It represents <code><? extends ... ></code>
*
* @param t an upper bound type.
*/
public static TypeArgument subclassOf(ObjectType t) {
return new TypeArgument(t, '+');
}
/**
* A factory method constructing a <code>TypeArgument</code> with an lower bound.
* It represents <code><? super ... ></code>
*
* @param t an lower bbound type.
*/
public static TypeArgument superOf(ObjectType t) {
return new TypeArgument(t, '-');
}
/**
* Returns the kind of this type argument.
*
* @return <code>' '</code> (not-wildcard), <code>'*'</code> (wildcard), <code>'+'</code> (wildcard with
* upper bound), or <code>'-'</code> (wildcard with lower bound).
*/
public char getKind() { return wildcard; }
/**
* Returns true if this type argument is a wildcard type
* such as <code>?</code>, <code>? extends String</code>, or <code>? super Integer</code>.
*/
public boolean isWildcard() { return wildcard != ' '; }
/**
* Returns the type represented by this argument
* if the argument is not a wildcard type. Otherwise, this method
* returns the upper bound (if the kind is '+'),
* the lower bound (if the kind is '-'), or null (if the upper or lower
* bound is not specified).
*/
public ObjectType getType() { return arg; }
/**
* Returns the string representation.
*/
public String toString() {
if (wildcard == '*')
return "?";
String type = arg.toString();
if (wildcard == ' ')
return type;
else if (wildcard == '+')
return "? extends " + type;
else
return "? super " + type;
}
static void encode(StringBuffer sb, TypeArgument[] args) {
sb.append('<');
for (int i = 0; i < args.length; i++) {
TypeArgument ta = args[i];
if (ta.isWildcard())
sb.append(ta.wildcard);
if (ta.getType() != null)
ta.getType().encode(sb);
}
sb.append('>');
}
}
/**
* Primitive types and object types.
*/
public static abstract class Type {
abstract void encode(StringBuffer sb);
static void toString(StringBuffer sbuf, Type[] ts) {
for (int i = 0; i < ts.length; i++) {
if (i > 0)
sbuf.append(", ");
sbuf.append(ts[i]);
}
}
}
/**
* Primitive types.
*/
public static class BaseType extends Type {
char descriptor;
BaseType(char c) { descriptor = c; }
/**
* Constructs a <code>BaseType</code>.
*
* @param typeName <code>void</code>, <code>int</code>, ...
*/
public BaseType(String typeName) {
this(Descriptor.of(typeName).charAt(0));
}
/**
* Returns the descriptor representing this primitive type.
*
* @see javassist.bytecode.Descriptor
*/
public char getDescriptor() { return descriptor; }
/**
* Returns the <code>CtClass</code> representing this
* primitive type.
*/
public CtClass getCtlass() {
return Descriptor.toPrimitiveClass(descriptor);
}
/**
* Returns the string representation.
*/
public String toString() {
return Descriptor.toClassName(Character.toString(descriptor));
}
void encode(StringBuffer sb) {
sb.append(descriptor);
}
}
/**
* Class types, array types, and type variables.
* This class is also used for representing a field type.
*/
public static abstract class ObjectType extends Type {
/**
* Returns the encoded string representing the object type signature.
*/
public String encode() {
StringBuffer sb = new StringBuffer();
encode(sb);
return sb.toString();
}
}
/**
* Class types.
*/
public static class ClassType extends ObjectType {
String name;
TypeArgument[] arguments;
static ClassType make(String s, int b, int e,
TypeArgument[] targs, ClassType parent) {
if (parent == null)
return new ClassType(s, b, e, targs);
else
return new NestedClassType(s, b, e, targs, parent);
}
ClassType(String signature, int begin, int end, TypeArgument[] targs) {
name = signature.substring(begin, end).replace('/', '.');
arguments = targs;
}
/**
* A class type representing <code>java.lang.Object</code>.
*/
public static ClassType OBJECT = new ClassType("java.lang.Object", null);
/**
* Constructs a <code>ClassType</code>. It represents
* the name of a non-nested class.
*
* @param className a fully qualified class name.
* @param args type arguments or null.
*/
public ClassType(String className, TypeArgument[] args) {
name = className;
arguments = args;
}
/**
* Constructs a <code>ClassType</code>. It represents
* the name of a non-nested class.
*
* @param className a fully qualified class name.
*/
public ClassType(String className) {
this(className, null);
}
/**
* Returns the class name.
*/
public String getName() {
return name;
}
/**
* Returns the type arguments.
*
* @return null if no type arguments are given to this class.
*/
public TypeArgument[] getTypeArguments() { return arguments; }
/**
* If this class is a member of another class, returns the
* class in which this class is declared.
*
* @return null if this class is not a member of another class.
*/
public ClassType getDeclaringClass() { return null; }
/**
* Returns the string representation.
*/
public String toString() {
StringBuffer sbuf = new StringBuffer();
ClassType parent = getDeclaringClass();
if (parent != null)
sbuf.append(parent.toString()).append('.');
sbuf.append(name);
if (arguments != null) {
sbuf.append('<');
int n = arguments.length;
for (int i = 0; i < n; i++) {
if (i > 0)
sbuf.append(", ");
sbuf.append(arguments[i].toString());
}
sbuf.append('>');
}
return sbuf.toString();
}
void encode(StringBuffer sb) {
sb.append('L');
encode2(sb);
sb.append(';');
}
void encode2(StringBuffer sb) {
ClassType parent = getDeclaringClass();
if (parent != null) {
parent.encode2(sb);
sb.append('$');
}
sb.append(name.replace('.', '/'));
if (arguments != null)
TypeArgument.encode(sb, arguments);
}
}
/**
* Nested class types.
*/
public static class NestedClassType extends ClassType {
ClassType parent;
NestedClassType(String s, int b, int e,
TypeArgument[] targs, ClassType p) {
super(s, b, e, targs);
parent = p;
}
/**
* Constructs a <code>NestedClassType</code>.
*
* @param parent the class surrounding this class type.
* @param className a simple class name. It does not include
* a package name or a parent's class name.
* @param args type parameters or null.
*/
public NestedClassType(ClassType parent, String className, TypeArgument[] args) {
super(className, args);
this.parent = parent;
}
/**
* Returns the class that declares this nested class.
* This nested class is a member of that declaring class.
*/
public ClassType getDeclaringClass() { return parent; }
}
/**
* Array types.
*/
public static class ArrayType extends ObjectType {
int dim;
Type componentType;
/**
* Constructs an <code>ArrayType</code>.
*
* @param d dimension.
* @param comp the component type.
*/
public ArrayType(int d, Type comp) {
dim = d;
componentType = comp;
}
/**
* Returns the dimension of the array.
*/
public int getDimension() { return dim; }
/**
* Returns the component type.
*/
public Type getComponentType() {
return componentType;
}
/**
* Returns the string representation.
*/
public String toString() {
StringBuffer sbuf = new StringBuffer(componentType.toString());
for (int i = 0; i < dim; i++)
sbuf.append("[]");
return sbuf.toString();
}
void encode(StringBuffer sb) {
for (int i = 0; i < dim; i++)
sb.append('[');
componentType.encode(sb);
}
}
/**
* Type variables.
*/
public static class TypeVariable extends ObjectType {
String name;
TypeVariable(String sig, int begin, int end) {
name = sig.substring(begin, end);
}
/**
* Constructs a <code>TypeVariable</code>.
*
* @param name the name of a type variable.
*/
public TypeVariable(String name) {
this.name = name;
}
/**
* Returns the variable name.
*/
public String getName() {
return name;
}
/**
* Returns the string representation.
*/
public String toString() {
return name;
}
void encode(StringBuffer sb) {
sb.append('T').append(name).append(';');
}
}
/**
* Parses the given signature string as a class signature.
*
* @param sig the signature obtained from the <code>SignatureAttribute</code>
* of a <code>ClassFile</code>.
* @return a tree-like data structure representing a class signature. It provides
* convenient accessor methods.
* @throws BadBytecode thrown when a syntactical error is found.
* @see #getSignature()
* @since 3.5
*/
public static ClassSignature toClassSignature(String sig) throws BadBytecode {
try {
return parseSig(sig);
}
catch (IndexOutOfBoundsException e) {
throw error(sig);
}
}
/**
* Parses the given signature string as a method type signature.
*
* @param sig the signature obtained from the <code>SignatureAttribute</code>
* of a <code>MethodInfo</code>.
* @return @return a tree-like data structure representing a method signature. It provides
* convenient accessor methods.
* @throws BadBytecode thrown when a syntactical error is found.
* @see #getSignature()
* @since 3.5
*/
public static MethodSignature toMethodSignature(String sig) throws BadBytecode {
try {
return parseMethodSig(sig);
}
catch (IndexOutOfBoundsException e) {
throw error(sig);
}
}
/**
* Parses the given signature string as a field type signature.
*
* @param sig the signature string obtained from the <code>SignatureAttribute</code>
* of a <code>FieldInfo</code>.
* @return the field type signature.
* @throws BadBytecode thrown when a syntactical error is found.
* @see #getSignature()
* @since 3.5
*/
public static ObjectType toFieldSignature(String sig) throws BadBytecode {
try {
return parseObjectType(sig, new Cursor(), false);
}
catch (IndexOutOfBoundsException e) {
throw error(sig);
}
}
/**
* Parses the given signature string as a type signature.
* The type signature is either the field type signature or a base type
* descriptor including <code>void</code> type.
*
* @throws BadBytecode thrown when a syntactical error is found.
* @since 3.18
*/
public static Type toTypeSignature(String sig) throws BadBytecode {
try {
return parseType(sig, new Cursor());
}
catch (IndexOutOfBoundsException e) {
throw error(sig);
}
}
private static ClassSignature parseSig(String sig)
throws BadBytecode, IndexOutOfBoundsException
{
Cursor cur = new Cursor();
TypeParameter[] tp = parseTypeParams(sig, cur);
ClassType superClass = parseClassType(sig, cur);
int sigLen = sig.length();
ArrayList ifArray = new ArrayList();
while (cur.position < sigLen && sig.charAt(cur.position) == 'L')
ifArray.add(parseClassType(sig, cur));
ClassType[] ifs
= (ClassType[])ifArray.toArray(new ClassType[ifArray.size()]);
return new ClassSignature(tp, superClass, ifs);
}
private static MethodSignature parseMethodSig(String sig)
throws BadBytecode
{
Cursor cur = new Cursor();
TypeParameter[] tp = parseTypeParams(sig, cur);
if (sig.charAt(cur.position++) != '(')
throw error(sig);
ArrayList params = new ArrayList();
while (sig.charAt(cur.position) != ')') {
Type t = parseType(sig, cur);
params.add(t);
}
cur.position++;
Type ret = parseType(sig, cur);
int sigLen = sig.length();
ArrayList exceptions = new ArrayList();
while (cur.position < sigLen && sig.charAt(cur.position) == '^') {
cur.position++;
ObjectType t = parseObjectType(sig, cur, false);
if (t instanceof ArrayType)
throw error(sig);
exceptions.add(t);
}
Type[] p = (Type[])params.toArray(new Type[params.size()]);
ObjectType[] ex = (ObjectType[])exceptions.toArray(new ObjectType[exceptions.size()]);
return new MethodSignature(tp, p, ret, ex);
}
private static TypeParameter[] parseTypeParams(String sig, Cursor cur)
throws BadBytecode
{
ArrayList typeParam = new ArrayList();
if (sig.charAt(cur.position) == '<') {
cur.position++;
while (sig.charAt(cur.position) != '>') {
int nameBegin = cur.position;
int nameEnd = cur.indexOf(sig, ':');
ObjectType classBound = parseObjectType(sig, cur, true);
ArrayList ifBound = new ArrayList();
while (sig.charAt(cur.position) == ':') {
cur.position++;
ObjectType t = parseObjectType(sig, cur, false);
ifBound.add(t);
}
TypeParameter p = new TypeParameter(sig, nameBegin, nameEnd,
classBound, (ObjectType[])ifBound.toArray(new ObjectType[ifBound.size()]));
typeParam.add(p);
}
cur.position++;
}
return (TypeParameter[])typeParam.toArray(new TypeParameter[typeParam.size()]);
}
private static ObjectType parseObjectType(String sig, Cursor c, boolean dontThrow)
throws BadBytecode
{
int i;
int begin = c.position;
switch (sig.charAt(begin)) {
case 'L' :
return parseClassType2(sig, c, null);
case 'T' :
i = c.indexOf(sig, ';');
return new TypeVariable(sig, begin + 1, i);
case '[' :
return parseArray(sig, c);
default :
if (dontThrow)
return null;
else
throw error(sig);
}
}
private static ClassType parseClassType(String sig, Cursor c)
throws BadBytecode
{
if (sig.charAt(c.position) == 'L')
return parseClassType2(sig, c, null);
else
throw error(sig);
}
private static ClassType parseClassType2(String sig, Cursor c, ClassType parent)
throws BadBytecode
{
int start = ++c.position;
char t;
do {
t = sig.charAt(c.position++);
} while (t != '$' && t != '<' && t != ';');
int end = c.position - 1;
TypeArgument[] targs;
if (t == '<') {
targs = parseTypeArgs(sig, c);
t = sig.charAt(c.position++);
}
else
targs = null;
ClassType thisClass = ClassType.make(sig, start, end, targs, parent);
if (t == '$' || t == '.') {
c.position--;
return parseClassType2(sig, c, thisClass);
}
else
return thisClass;
}
private static TypeArgument[] parseTypeArgs(String sig, Cursor c) throws BadBytecode {
ArrayList args = new ArrayList();
char t;
while ((t = sig.charAt(c.position++)) != '>') {
TypeArgument ta;
if (t == '*' )
ta = new TypeArgument(null, '*');
else {
if (t != '+' && t != '-') {
t = ' ';
c.position--;
}
ta = new TypeArgument(parseObjectType(sig, c, false), t);
}
args.add(ta);
}
return (TypeArgument[])args.toArray(new TypeArgument[args.size()]);
}
private static ObjectType parseArray(String sig, Cursor c) throws BadBytecode {
int dim = 1;
while (sig.charAt(++c.position) == '[')
dim++;
return new ArrayType(dim, parseType(sig, c));
}
private static Type parseType(String sig, Cursor c) throws BadBytecode {
Type t = parseObjectType(sig, c, true);
if (t == null)
t = new BaseType(sig.charAt(c.position++));
return t;
}
private static BadBytecode error(String sig) {
return new BadBytecode("bad signature: " + sig);
}
}
| apache-2.0 |
djacobsmeyer/bloc-jams-angular | app/scripts/app.js | 14229 | var blocjams = angular.module('blocjams', ['ui.router']);
var currentAlbum = null;
var currentlyPlayingSongNumber = null;
var currentSongFromAlbum = null;
var currentSoundFile = null;
var currentVolume = 80;
var playButtonTemplate = '<a class="album-song-button"><span class="ion-play"></span></a>';
var pauseButtonTemplate = '<a class="album-song-button"><span class="ion-pause"></span></a>';
var playerBarPlayButton = '<span class="ion-play"></span>';
var playerBarPauseButton = '<span class="ion-pause"></span>';
blocjams.filter('songTime', function() {
return function(input){
var seconds = Number.parseFloat(input);
var wholeSeconds = Math.floor(seconds);
var minutes = Math.floor(wholeSeconds / 60);
var remainingSeconds = wholeSeconds % 60;
var output = minutes + ':';
if (remainingSeconds < 10) {
output += '0';
}
output += remainingSeconds;
return output;
};
});
blocjams.factory('SongPlayer', ['SeekBarService', function(SeekBarService) {
return {
createSongRow: function(songNumber, songName, songLength) {
var template =
'<tr class="album-view-song-item">'
+ ' <td class="song-item-number" data-song-number="' + songNumber + '">' + songNumber + '</td>'
+ ' <td class="song-item-title">' + songName + '</td>'
+ ' <td class="song-item-duration">' + this.filterTimeCode(songLength) + '</td>'
+ '</tr>'
;
var $row = $(template);
var clickHandler = function() {
var songNumber = parseInt($(this).attr('data-song-number'));
if (currentlyPlayingSongNumber !== null) {
// Revert to song number for currently playing song because user started playing new song.
var currentlyPlayingCell = getSongNumberCell(currentlyPlayingSongNumber);
currentlyPlayingCell.html(currentlyPlayingSongNumber);
}
if (currentlyPlayingSongNumber !== songNumber) {
this.setSong(songNumber);
currentSoundFile.play();
updateSeekBarWhileSongPlays()
$(this).html(pauseButtonTemplate);
currentSongFromAlbum = currentAlbum.songs[songNumber - 1];
var $volumeFill = $('.volume .fill');
var $volumeThumb = $('.volume .thumb');
$volumeFill.width(currentVolume + '%');
$volumeThumb.css({left: currentVolume + '%'});
} else if (currentlyPlayingSongNumber === songNumber) {
if (currentSoundFile.isPaused()) {
$(this).html(pauseButtonTemplate);
$('.main-controls .play-pause').html(playerBarPauseButton);
currentSoundFile.play();
updateSeekBarWhileSongPlays()
} else {
$(this).html(playButtonTemplate);
$('.main-controls .play-pause').html(playerBarPlayButton);
currentSoundFile.pause();
}
}
updatePlayerBarSong(currentSongFromAlbum, currentAlbum);
};
var onHover = function(event) {
var songNumberCell = $(this).find('.song-item-number');
var songNumber = parseInt(songNumberCell.attr('data-song-number'));
if (songNumber !== currentlyPlayingSongNumber) {
songNumberCell.html(playButtonTemplate);
}
};
var offHover = function(event) {
var songNumberCell = $(this).find('.song-item-number');
var songNumber = parseInt(songNumberCell.attr('data-song-number'));
if (songNumber !== currentlyPlayingSongNumber) {
songNumberCell.html(songNumber);
}
};
$row.find('.song-item-number').click(clickHandler);
// #2
$row.hover(onHover, offHover);
// #3
return $row;
},
setCurrentAlbum: function(album) {
currentAlbum = album
// var $albumTitle = $('.album-view-title');
// var $albumArtist = $('.album-view-artist');
// var $albumReleaseInfo = $('.album-view-release-info');
// var $albumImage = $('.album-cover-art');
// var $albumSongList = $('.album-view-song-list');
//
// $albumTitle.text(album.name);
// $albumArtist.text(album.artist);
// $albumReleaseInfo.text(album.year + ' ' + album.label);
// $albumImage.attr('src', album.albumArtUrl);
//
// $albumSongList.empty();
//
// for (i = 0; i < album.songs.length; i++) {
// var $newRow = this.createSongRow(i + 1, album.songs[i].name, album.songs[i].length);
// $albumSongList.append($newRow);
// }
},
setSong: function(songNumber) {
// assigns currentlyPlayingSongNumber and currentSongFromAlbum a new value based on the new song number
if (currentSoundFile) {
currentSoundFile.stop();
}
currentlyPlayingSongNumber = parseInt(songNumber);
currentSongFromAlbum = currentAlbum.songs[songNumber];
currentSoundFile = new buzz.sound(currentSongFromAlbum.audioUrl, {
formats: [ 'mp3' ],
preload: true
});
currentSoundFile.play()
SeekBarService.updateSeekBarWhileSongPlays();
this.setVolume(currentVolume);
SeekBarService.setTotalTimeInPlayerBar(currentSongFromAlbum.length)
},
nextSong: function() {
var getLastSongNumber= function(index) {
return index == 0 ? currentAlbum.songs.length : index;
};
var trackIndex = function(album, song) {
return album.songs.indexOf(song);
};
var currentSongIndex = trackIndex(currentAlbum, currentSongFromAlbum);
// Note that we're _incrementing_ the song here
currentSongIndex++;
if (currentSongIndex >= currentAlbum.songs.length) {
currentSongIndex = 0;
}
// Set a new current song
this.setSong(currentSongIndex)
currentSongFromAlbum = currentAlbum.songs[currentSongIndex];
//
// // Update the Player Bar information
// $('.currently-playing .song-name').text(currentSongFromAlbum.name);
// $('.currently-playing .artist-name').text(currentAlbum.artist);
// $('.currently-playing .artist-song-mobile').text(currentSongFromAlbum.name + " - " + currentAlbum.name);
// $('.main-controls .play-pause').html(playerBarPauseButton);
//
// var lastSongNumber = getLastSongNumber(currentSongIndex);
// var $nextSongNumberCell = getSongNumberCell(currentlyPlayingSongNumber);
// var $lastSongNumberCell = getSongNumberCell(lastSongNumber);
//
// $nextSongNumberCell.html(pauseButtonTemplate);
// $lastSongNumberCell.html(lastSongNumber);
},
previousSong: function() {
var getLastSongNumber = function(index) {
return index == (currentAlbum.songs.length - 1) ? 1 : index + 2;
};
var trackIndex = function(album, song) {
return album.songs.indexOf(song);
};
var currentSongIndex = trackIndex(currentAlbum, currentSongFromAlbum);
// Note that we're _decrementing_ the index here
currentSongIndex--;
if (currentSongIndex < 0) {
currentSongIndex = currentAlbum.songs.length - 1;
}
// Set a new current song
this.setSong(currentSongIndex)
currentSongFromAlbum = currentAlbum.songs[currentSongIndex];
//
// // Update the Player Bar information
// $('.currently-playing .song-name').text(currentSongFromAlbum.name);
// $('.currently-playing .artist-name').text(currentAlbum.artist);
// $('.currently-playing .artist-song-mobile').text(currentSongFromAlbum.name + " - " + currentAlbum.name);
// $('.main-controls .play-pause').html(playerBarPauseButton);
//
// var lastSongNumber = getLastSongNumber(currentSongIndex);
// var $previousSongNumberCell = getSongNumberCell(currentlyPlayingSongNumber);
// var $lastSongNumberCell = getSongNumberCell(lastSongNumber);
//
// $previousSongNumberCell.html(pauseButtonTemplate);
// $lastSongNumberCell.html(lastSongNumber);
},
playPause: function() {
if (currentSoundFile.isPaused() == false) {
currentSoundFile.pause();
} else if (currentSoundFile.isPaused() == true) {
currentSoundFile.play();
};
},
setVolume: function(volume) {
if (currentSoundFile) {
currentSoundFile.setVolume(volume);
}
},
getVolume: function(volume) {
if (currentSoundFile) {
var currentVolume = currentSoundFile.volume;
console.log(currentVolume)
}
},
seek: function(time) {
if (currentSoundFile) {
currentSoundFile.setTime(time);
}
},
getCurrentSongTime: function() {
if (currentSoundFile) {
var currentTime = currentSoundFile.getTime;
}
},
}
}]);
blocjams.service('SeekBarService', ['songTimeFilter', function(songTimeFilter) {
return {
setCurrentTimeInPlayerBar: function(currentTime) {
var $currentTimeElement = $('.seek-control .current-time');
$currentTimeElement.text(songTimeFilter(currentTime));
},
setTotalTimeInPlayerBar: function(totalTime) {
var $currentTimeElement = $('.seek-control .total-time');
$currentTimeElement.text(songTimeFilter(totalTime));
},
updateSeekPercentage: function($seekBar, seekBarFillRatio) {
var offsetXPercent = seekBarFillRatio * 100;
offsetXPercent = Math.max(0, offsetXPercent);
offsetXPercent = Math.min(100, offsetXPercent);
var percentageString = offsetXPercent + '%';
$seekBar.find('.fill').width(percentageString);
$seekBar.find('.thumb').css({left: percentageString});
},
updateSeekBarWhileSongPlays: function() {
var self = this;
if (currentSoundFile) {
currentSoundFile.bind('timeupdate', function(event) {
var currentTime = this.getTime();
var songlength = this.getDuration();
var seekBarFillRatio = this.getTime() / this.getDuration();
var $seekBar = $("slider[control-type='duration'] .seek-bar");
self.updateSeekPercentage($seekBar, seekBarFillRatio);
self.setCurrentTimeInPlayerBar((currentTime));
});
}
}
}
}]);
blocjams.directive('slider', ['SongPlayer', function() {
return {
templateUrl: 'app/templates/slider.html',
restrict: 'E',
link: function(scope, element, attributes){
var updateSeekPercentage = function($seekBar, seekBarFillRatio) {
var offsetXPercent = seekBarFillRatio * 100;
offsetXPercent = Math.max(0, offsetXPercent);
offsetXPercent = Math.min(100, offsetXPercent);
var percentageString = offsetXPercent + '%';
$seekBar.find('.fill').width(percentageString);
$seekBar.find('.thumb').css({left: percentageString});
};
element.find('.thumb').mousedown(function(event) {
var $seekBar = element.find(".seek-bar");
$(document).bind('mousemove.thumb', function(event){
var offsetX = event.pageX - $seekBar.offset().left;
var barWidth = $seekBar.width();
var seekBarFillRatio = (offsetX / barWidth) ;
console.log(attributes.controlType)
if(attributes.controlType == "duration") {
scope.SongPlayer.seek(seekBarFillRatio * currentSoundFile.getDuration());
} else if(attributes.controlType == "volume") {
scope.SongPlayer.setVolume(seekBarFillRatio * 100);
}
updateSeekPercentage($seekBar, seekBarFillRatio);
});
$(document).bind('mouseup.thumb', function() {
$(document).unbind('mousemove.thumb');
$(document).unbind('mouseup.thumb');
});
});
},
controller: function($scope, SongPlayer){
$scope.SongPlayer = SongPlayer;
}
}
}]);
blocjams.config(function($stateProvider, $locationProvider) {
// $locationProvider.html5Mode({
// enabled: true,
// requireBase: false
// });
$stateProvider.state('landing', {
url: '/landing',
controller: function($scope){
$scope.tagline = 'Turn the music up!';
},
controllerAs: 'landingCtrl',
templateUrl: 'app/templates/landing.html'
});
$stateProvider.state('collection', {
url: '/collection',
controller: function($scope){
$scope.albums = [albumPicasso, albumMarconi]
},
templateUrl: 'app/templates/collection.html'
});
$stateProvider.state('album', {
url: '/album',
controller: function($scope, SongPlayer, SeekBarService){
SongPlayer.setCurrentAlbum(albumPicasso)
$scope.songPlayer = SongPlayer;
$scope.seekBarService = SeekBarService;
// $scope.SongTimeFilter = SongTimeFilter;
$scope.album = albumPicasso
$scope.songs = albumPicasso.songs
},
templateUrl: 'app/templates/album.html'
});
});
| apache-2.0 |
pczhaoyun/obtainfo | blog/forms.py | 2004 | """Forms for Zinnia admin"""
from django.contrib import admin
from django import forms
from django.db.models import ManyToOneRel
from django.db.models import ManyToManyRel
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
from django.contrib.admin.widgets import RelatedFieldWidgetWrapper
from mptt.forms import TreeNodeChoiceField
from zinnia.models.entry import Entry
from zinnia.models.category import Category
from zinnia.admin.widgets import MPTTFilteredSelectMultiple
from zinnia.admin.fields import MPTTModelMultipleChoiceField
class CustomEntryAdminForm(forms.ModelForm):
"""
Form for Entry's Admin.
"""
categories = MPTTModelMultipleChoiceField(
label=_('Categories'), required=False,
queryset=Category.objects.all(),
widget=MPTTFilteredSelectMultiple(_('categories'), False,
attrs={'rows': '10'}))
def __init__(self, *args, **kwargs):
super(CustomEntryAdminForm, self).__init__(*args, **kwargs)
rel = ManyToManyRel(Category, 'id')
self.fields['categories'].widget = RelatedFieldWidgetWrapper(
self.fields['categories'].widget, rel, self.admin_site)
self.fields['sites'].initial = [Site.objects.get_current()]
def clean(self):
cleaned_data = super(CustomEntryAdminForm, self).clean()
featured = cleaned_data.get('featured', None)
if featured == True:
fst = cleaned_data.get('featured_short_title', None)
fsc = cleaned_data.get('featured_short_comment', None)
fi = cleaned_data.get('featured_image', None)
if not fst or not fsc or not fi:
raise forms.ValidationError("when featured=True, we must set short title and comment and image")
return cleaned_data
class Meta:
"""
EntryAdminForm's Meta.
"""
model = Entry
fields = forms.ALL_FIELDS
| apache-2.0 |
pulcy/quark | providers/cluster_member.go | 1716 | // Copyright (c) 2016 Pulcy.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package providers
import (
"fmt"
)
type ClusterMember struct {
ClusterID string // ID of the cluster this is a member of (/etc/pulcu/cluster-id)
MachineID string // ID of the machine (/etc/machine-id)
ClusterIP string // IP address of the instance used for all private communication in the cluster
PrivateHostIP string // IP address of the host on the private network (can be ClusterIP)
EtcdProxy bool // If set, this member is an ETCD proxy
}
type ClusterMemberList []ClusterMember
func (cml ClusterMemberList) Render() string {
data := ""
for _, cm := range cml {
options := ""
if cm.EtcdProxy {
options = options + " etcd-proxy"
}
if cm.PrivateHostIP != "" && cm.ClusterIP != cm.PrivateHostIP {
options = options + " private-host-ip=" + cm.PrivateHostIP
}
data = data + fmt.Sprintf("%s=%s%s\n", cm.MachineID, cm.ClusterIP, options)
}
return data
}
func (cml ClusterMemberList) Find(instance ClusterInstance) (ClusterMember, error) {
for _, cm := range cml {
if cm.ClusterIP == instance.ClusterIP {
return cm, nil
}
}
return ClusterMember{}, maskAny(NotFoundError)
}
| apache-2.0 |
gaaiatinc/spa-framework | webpack.config.js | 1655 | const resolve = require("path").resolve;
const webpack = require("webpack");
var libraryName = "SPAFramework";
var outputFile = libraryName + ".js";
module.exports = {
// configuration
name: "jsx bundling",
entry: {
libFacade: resolve(__dirname, "./libraryFacade")
},
output: {
path: resolve(__dirname),
filename: outputFile,
library: libraryName,
libraryTarget: "umd",
umdNamedDefine: true
},
context: resolve(__dirname),
externals: {
//don"t bundle the following npm module packages within our bundle page.js
//but get them from global variables
"react": "React",
"react-dom": "ReactDOM",
"react-dom/server": "ReactDOMServer"
},
resolve: {
extensions: [".js", ".jsx"]
},
module: {
rules: [
{
test: /\.jsx?$/,
exclude: /(node_modules|bower_components)/,
use: [
{
loader: "babel-loader",
options: {
presets: ["env", "react"]
}
}
]
}
]
},
plugins: [
new webpack.DefinePlugin({
"process.env.NODE_ENV": JSON.stringify("production")
}),
new webpack.optimize.UglifyJsPlugin({comments: false, mangle: true, compress: false})
]
};
// config
// .plugins
// .push(new webpack.optimize.UglifyJsPlugin({
// compressor: {
// screw_ie8: true,
// warnings: false
// }
// }));
| apache-2.0 |
JinBuHanLin/eshow-android | eshow_framwork/src/cn/google/gson/TypeAdapterFactory.java | 6574 | /*
* Copyright (C) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.google.gson;
import cn.google.gson.reflect.TypeToken;
/**
* Creates type adapters for set of related types. Type adapter factories are
* most useful when several types share similar structure in their JSON form.
*
* <h3>Example: Converting enums to lowercase</h3>
* In this example, we implement a factory that creates type adapters for all
* enums. The type adapters will write enums in lowercase, despite the fact
* that they're defined in {@code CONSTANT_CASE} in the corresponding Java
* model: <pre> {@code
*
* public class LowercaseEnumTypeAdapterFactory implements TypeAdapter.Factory {
* public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> type) {
* Class<T> rawType = (Class<T>) type.getRawType();
* if (!rawType.isEnum()) {
* return null;
* }
*
* final Map<String, T> lowercaseToConstant = new HashMap<String, T>();
* for (T constant : rawType.getEnumConstants()) {
* lowercaseToConstant.put(toLowercase(constant), constant);
* }
*
* return new TypeAdapter<T>() {
* public void write(JsonWriter out, T value) throws IOException {
* if (value == null) {
* out.nullValue();
* } else {
* out.value(toLowercase(value));
* }
* }
*
* public T read(JsonReader reader) throws IOException {
* if (reader.peek() == JsonToken.NULL) {
* reader.nextNull();
* return null;
* } else {
* return lowercaseToConstant.get(reader.nextString());
* }
* }
* };
* }
*
* private String toLowercase(Object o) {
* return o.toString().toLowerCase(Locale.US);
* }
* }
* }</pre>
*
* <p>Type adapter factories select which types they provide type adapters
* for. If a factory cannot support a given type, it must return null when
* that type is passed to {@link #create}. Factories should expect {@code
* create()} to be called on them for many types and should return null for
* most of those types. In the above example the factory returns null for
* calls to {@code create()} where {@code type} is not an enum.
*
* <p>A factory is typically called once per type, but the returned type
* adapter may be used many times. It is most efficient to do expensive work
* like reflection in {@code create()} so that the type adapter's {@code
* read()} and {@code write()} methods can be very fast. In this example the
* mapping from lowercase name to enum value is computed eagerly.
*
* <p>As with type adapters, factories must be <i>registered</i> with a {@link
* GsonBuilder} for them to take effect: <pre> {@code
*
* GsonBuilder builder = new GsonBuilder();
* builder.registerTypeAdapterFactory(new LowercaseEnumTypeAdapterFactory());
* ...
* Gson gson = builder.create();
* }</pre>
* If multiple factories support the same type, the factory registered earlier
* takes precedence.
*
* <h3>Example: composing other type adapters</h3>
* In this example we implement a factory for Guava's {@code Multiset}
* collection type. The factory can be used to create type adapters for
* multisets of any element type: the type adapter for {@code
* Multiset<String>} is different from the type adapter for {@code
* Multiset<URL>}.
*
* <p>The type adapter <i>delegates</i> to another type adapter for the
* multiset elements. It figures out the element type by reflecting on the
* multiset's type token. A {@code Gson} is passed in to {@code create} for
* just this purpose: <pre> {@code
*
* public class MultisetTypeAdapterFactory implements TypeAdapter.Factory {
* public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> typeToken) {
* Type type = typeToken.getType();
* if (typeToken.getRawType() != Multiset.class
* || !(type instanceof ParameterizedType)) {
* return null;
* }
*
* Type elementType = ((ParameterizedType) type).getActualTypeArguments()[0];
* TypeAdapter<?> elementAdapter = gson.getAdapter(TypeToken.get(elementType));
* return (TypeAdapter<T>) newMultisetAdapter(elementAdapter);
* }
*
* private <E> TypeAdapter<Multiset<E>> newMultisetAdapter(
* final TypeAdapter<E> elementAdapter) {
* return new TypeAdapter<Multiset<E>>() {
* public void write(JsonWriter out, Multiset<E> value) throws IOException {
* if (value == null) {
* out.nullValue();
* return;
* }
*
* out.beginArray();
* for (Multiset.Entry<E> entry : value.entrySet()) {
* out.value(entry.getCount());
* elementAdapter.write(out, entry.getElement());
* }
* out.endArray();
* }
*
* public Multiset<E> read(JsonReader in) throws IOException {
* if (in.peek() == JsonToken.NULL) {
* in.nextNull();
* return null;
* }
*
* Multiset<E> result = LinkedHashMultiset.create();
* in.beginArray();
* while (in.hasNext()) {
* int count = in.nextInt();
* E element = elementAdapter.read(in);
* result.add(element, count);
* }
* in.endArray();
* return result;
* }
* };
* }
* }
* }</pre>
* Delegating from one type adapter to another is extremely powerful; it's
* the foundation of how Gson converts Java objects and collections. Whenever
* possible your factory should retrieve its delegate type adapter in the
* {@code create()} method; this ensures potentially-expensive type adapter
* creation happens only once.
*
* @since 2.1
*/
public interface TypeAdapterFactory {
/**
* Returns a type adapter for {@code type}, or null if this factory doesn't
* support {@code type}.
*/
<T> TypeAdapter<T> create(Gson gson, TypeToken<T> type);
}
| apache-2.0 |
zakyalvan/spring-envers-audit | spring-envers-audit-core/src/main/java/com/innovez/core/audit/repository/DefaultRevisionMetadata.java | 955 | package com.innovez.core.audit.repository;
import org.joda.time.DateTime;
import org.springframework.data.history.RevisionMetadata;
import org.springframework.util.Assert;
import com.innovez.core.audit.entity.RevisionInfoEntity;
/**
* Default implementation of class which hold meta information of revision.
*
* @author zakyalvan
*/
public class DefaultRevisionMetadata implements RevisionMetadata<Long> {
private final RevisionInfoEntity revisionInfoEntity;
public DefaultRevisionMetadata(RevisionInfoEntity revisionInfoEntity) {
Assert.notNull(revisionInfoEntity, "Revision info entity should not be null");
this.revisionInfoEntity = revisionInfoEntity;
}
public Long getRevisionNumber() {
return revisionInfoEntity.getRevision();
}
public DateTime getRevisionDate() {
return new DateTime(revisionInfoEntity.getTimestamp());
}
@SuppressWarnings("unchecked")
public <T> T getDelegate() {
return (T) revisionInfoEntity;
}
} | apache-2.0 |
LindaLawton/Google-Dotnet-Samples | Samples/DoubleClick Search API/v2/ConversionSample.cs | 13764 | // Copyright 2017 DAIMTO ([Linda Lawton](https://twitter.com/LindaLawtonDK)) : [www.daimto.com](http://www.daimto.com/)
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by DAIMTO-Google-apis-Sample-generator 1.0.0
// Template File Name: methodTemplate.tt
// Build date: 2017-10-08
// C# generater version: 1.0.0
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
// About
//
// Unoffical sample for the Doubleclicksearch v2 API for C#.
// This sample is designed to be used with the Google .Net client library. (https://github.com/google/google-api-dotnet-client)
//
// API Description: Reports and modifies your advertising data in DoubleClick Search (for example, campaigns, ad groups, keywords, and conversions).
// API Documentation Link https://developers.google.com/doubleclick-search/
//
// Discovery Doc https://www.googleapis.com/discovery/v1/apis/Doubleclicksearch/v2/rest
//
//------------------------------------------------------------------------------
// Installation
//
// This sample code uses the Google .Net client library (https://github.com/google/google-api-dotnet-client)
//
// NuGet package:
//
// Location: https://www.nuget.org/packages/Google.Apis.Doubleclicksearch.v2/
// Install Command: PM> Install-Package Google.Apis.Doubleclicksearch.v2
//
//------------------------------------------------------------------------------
using Google.Apis.Doubleclicksearch.v2;
using Google.Apis.Doubleclicksearch.v2.Data;
using System;
namespace GoogleSamplecSharpSample.Doubleclicksearchv2.Methods
{
public static class ConversionSample
{
public class ConversionGetOptionalParms
{
/// Numeric ID of the ad group.
public string AdGroupId { get; set; }
/// Numeric ID of the ad.
public string AdId { get; set; }
/// Numeric ID of the campaign.
public string CampaignId { get; set; }
/// Numeric ID of the criterion.
public string CriterionId { get; set; }
}
/// <summary>
/// Retrieves a list of conversions from a DoubleClick Search engine account.
/// Documentation https://developers.google.com/doubleclicksearch/v2/reference/conversion/get
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Doubleclicksearch service.</param>
/// <param name="agencyId">Numeric ID of the agency.</param>
/// <param name="advertiserId">Numeric ID of the advertiser.</param>
/// <param name="engineAccountId">Numeric ID of the engine account.</param>
/// <param name="endDate">Last date (inclusive) on which to retrieve conversions. Format is yyyymmdd.</param>
/// <param name="rowCount">The number of conversions to return per call.</param>
/// <param name="startDate">First date (inclusive) on which to retrieve conversions. Format is yyyymmdd.</param>
/// <param name="startRow">The 0-based starting index for retrieving conversions results.</param>
/// <param name="optional">Optional paramaters.</param>
/// <returns>ConversionListResponse</returns>
public static ConversionList Get(DoubleclicksearchService service, string agencyId, string advertiserId, string engineAccountId, int? endDate, int? rowCount, int? startDate, int? startRow, ConversionGetOptionalParms optional = null)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (agencyId == null)
throw new ArgumentNullException(agencyId);
if (advertiserId == null)
throw new ArgumentNullException(advertiserId);
if (engineAccountId == null)
throw new ArgumentNullException(engineAccountId);
if (endDate == null)
throw new ArgumentNullException(endDate);
if (rowCount == null)
throw new ArgumentNullException(rowCount);
if (startDate == null)
throw new ArgumentNullException(startDate);
if (startRow == null)
throw new ArgumentNullException(startRow);
// Building the initial request.
var request = service.Conversion.Get(agencyId, advertiserId, engineAccountId, endDate, rowCount, startDate, startRow);
// Applying optional parameters to the request.
request = (ConversionResource.GetRequest)SampleHelpers.ApplyOptionalParms(request, optional);
// Requesting data.
return request.Execute();
}
catch (Exception ex)
{
throw new Exception("Request Conversion.Get failed.", ex);
}
}
/// <summary>
/// Inserts a batch of new conversions into DoubleClick Search.
/// Documentation https://developers.google.com/doubleclicksearch/v2/reference/conversion/insert
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Doubleclicksearch service.</param>
/// <param name="body">A valid Doubleclicksearch v2 body.</param>
/// <returns>ConversionListResponse</returns>
public static ConversionList Insert(DoubleclicksearchService service, ConversionList body)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (body == null)
throw new ArgumentNullException("body");
// Make the request.
return service.Conversion.Insert(body).Execute();
}
catch (Exception ex)
{
throw new Exception("Request Conversion.Insert failed.", ex);
}
}
/// <summary>
/// Updates a batch of conversions in DoubleClick Search. This method supports patch semantics.
/// Documentation https://developers.google.com/doubleclicksearch/v2/reference/conversion/patch
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Doubleclicksearch service.</param>
/// <param name="advertiserId">Numeric ID of the advertiser.</param>
/// <param name="agencyId">Numeric ID of the agency.</param>
/// <param name="endDate">Last date (inclusive) on which to retrieve conversions. Format is yyyymmdd.</param>
/// <param name="engineAccountId">Numeric ID of the engine account.</param>
/// <param name="rowCount">The number of conversions to return per call.</param>
/// <param name="startDate">First date (inclusive) on which to retrieve conversions. Format is yyyymmdd.</param>
/// <param name="startRow">The 0-based starting index for retrieving conversions results.</param>
/// <param name="body">A valid Doubleclicksearch v2 body.</param>
/// <returns>ConversionListResponse</returns>
public static ConversionList Patch(DoubleclicksearchService service, string advertiserId, string agencyId, int? endDate, string engineAccountId, int? rowCount, int? startDate, int? startRow, ConversionList body)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (body == null)
throw new ArgumentNullException("body");
if (advertiserId == null)
throw new ArgumentNullException(advertiserId);
if (agencyId == null)
throw new ArgumentNullException(agencyId);
if (endDate == null)
throw new ArgumentNullException(endDate);
if (engineAccountId == null)
throw new ArgumentNullException(engineAccountId);
if (rowCount == null)
throw new ArgumentNullException(rowCount);
if (startDate == null)
throw new ArgumentNullException(startDate);
if (startRow == null)
throw new ArgumentNullException(startRow);
// Make the request.
return service.Conversion.Patch(body, advertiserId, agencyId, endDate, engineAccountId, rowCount, startDate, startRow).Execute();
}
catch (Exception ex)
{
throw new Exception("Request Conversion.Patch failed.", ex);
}
}
/// <summary>
/// Updates a batch of conversions in DoubleClick Search.
/// Documentation https://developers.google.com/doubleclicksearch/v2/reference/conversion/update
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Doubleclicksearch service.</param>
/// <param name="body">A valid Doubleclicksearch v2 body.</param>
/// <returns>ConversionListResponse</returns>
public static ConversionList Update(DoubleclicksearchService service, ConversionList body)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (body == null)
throw new ArgumentNullException("body");
// Make the request.
return service.Conversion.Update(body).Execute();
}
catch (Exception ex)
{
throw new Exception("Request Conversion.Update failed.", ex);
}
}
/// <summary>
/// Updates the availabilities of a batch of floodlight activities in DoubleClick Search.
/// Documentation https://developers.google.com/doubleclicksearch/v2/reference/conversion/updateAvailability
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Doubleclicksearch service.</param>
/// <param name="body">A valid Doubleclicksearch v2 body.</param>
/// <returns>UpdateAvailabilityResponseResponse</returns>
public static UpdateAvailabilityResponse UpdateAvailability(DoubleclicksearchService service, UpdateAvailabilityRequest body)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (body == null)
throw new ArgumentNullException("body");
// Make the request.
return service.Conversion.UpdateAvailability(body).Execute();
}
catch (Exception ex)
{
throw new Exception("Request Conversion.UpdateAvailability failed.", ex);
}
}
}
public static class SampleHelpers
{
/// <summary>
/// Using reflection to apply optional parameters to the request.
///
/// If the optonal parameters are null then we will just return the request as is.
/// </summary>
/// <param name="request">The request. </param>
/// <param name="optional">The optional parameters. </param>
/// <returns></returns>
public static object ApplyOptionalParms(object request, object optional)
{
if (optional == null)
return request;
System.Reflection.PropertyInfo[] optionalProperties = (optional.GetType()).GetProperties();
foreach (System.Reflection.PropertyInfo property in optionalProperties)
{
// Copy value from optional parms to the request. They should have the same names and datatypes.
System.Reflection.PropertyInfo piShared = (request.GetType()).GetProperty(property.Name);
if (property.GetValue(optional, null) != null) // TODO Test that we do not add values for items that are null
piShared.SetValue(request, property.GetValue(optional, null), null);
}
return request;
}
}
} | apache-2.0 |
RokuHodo/Twitch-Bot | Messages/IRCMessage.cs | 4119 | using System;
using System.Collections.Generic;
using TwitchBot.Debugger;
using TwitchBot.Enums.Debugger;
using TwitchBot.Enums.Extensions;
using TwitchBot.Extensions;
namespace TwitchBot.Messages
{
class IRCMessage
{
public Dictionary<string, string> tags { get; set; }
public string prefix { get; set; }
public string command { get; set; }
public string parameters { get; set; }
public string[] middle;
public string[] trailing;
public IRCMessage(string irc_message)
{
string irc_message_no_tags = string.Empty;
tags = GetTags(irc_message, out irc_message_no_tags);
prefix = GetPrefix(irc_message_no_tags);
command = GetCommand(irc_message_no_tags);
parameters = GetParameters(command, irc_message, out middle, out trailing);
}
#region Parser functions
/// <summary>
/// Searches for tags attached to the irc message and extracts any that exist and extracts them as a dictionary.
/// </summary>
private Dictionary<string, string> GetTags(string irc_message, out string irc_message_no_tags)
{
Dictionary<string, string> tags = new Dictionary<string, string>();
//irc message only conmtains tags when it is preceeded with "@"
if (!irc_message.StartsWith("@"))
{
irc_message_no_tags = irc_message;
return tags;
}
//tags exist between "@" an the first space
string tags_extracted = irc_message.TextBetween('@', ' ');
//tags are delineated by ";"
string[] tags_extracted_array = tags_extracted.StringToArray<string>(';'),
tags_array_temp;
foreach (string tag in tags_extracted_array)
{
tags_array_temp = tag.StringToArray<string>('=');
try
{
//there should never be a situation where this fails, but just in case
tags[tags_array_temp[0]] = tags_array_temp[1];
}
catch (Exception exception)
{
DebugBot.Error(DebugMethod.GET, "tag", DebugError.NORMAL_EXCEPTION);
DebugBot.PrintLine(nameof(exception), exception.Message);
}
}
//cut of the tags to make handling the message later easier
irc_message_no_tags = irc_message.TextAfter(" ");
return tags;
}
/// <summary>
/// Gets the prefix of the irc message. The irc message passed must have no tags attached.
/// </summary>
public string GetPrefix(string irc_message)
{
return irc_message.TextBefore(" ");
}
/// <summary>
/// Gets the irc message command. The irc message passed must have no tags attached.
/// </summary>
private string GetCommand(string irc_message)
{
return irc_message.TextBetween(' ', ' ');
}
/// <summary>
/// Gets the parameters after the irc command and parses for the middle and trialing part of the message. The irc message passed must have no tags attached.
/// </summary>
private string GetParameters(string irc_command, string irc_message, out string[] middle, out string[] trailing)
{
string parameters = irc_message.TextAfter(irc_command).RemovePadding(Padding.Left);
//check to see if there is trailing
if (parameters.IndexOf(":") != -1)
{
middle = parameters.TextBefore(":").RemovePadding(Padding.Both).StringToArray<string>(' ');
trailing = parameters.TextAfter(":").RemovePadding(Padding.Both).StringToArray<string>(' ');
}
else
{
middle = parameters.StringToArray<string>(' ');
trailing = new string[0];
}
return parameters;
}
#endregion
}
}
| apache-2.0 |
monoman/NugetCracker | Nuget/src/CommandLine/Commands/HelpCommand.cs | 8076 | using System;
using System.ComponentModel.Composition;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
using NuGet.Common;
namespace NuGet.Commands
{
[Export(typeof(HelpCommand))]
[Command(typeof(NuGetResources), "help", "HelpCommandDescription", AltName = "?", MaxArgs = 1,
UsageSummaryResourceName = "HelpCommandUsageSummary", UsageDescriptionResourceName = "HelpCommandUsageDescription",
UsageExampleResourceName = "HelpCommandUsageExamples")]
public class HelpCommand : Command
{
private readonly string _commandExe;
private readonly ICommandManager _commandManager;
private readonly string _helpUrl;
private readonly string _productName;
private string CommandName
{
get
{
if (Arguments != null && Arguments.Count > 0)
{
return Arguments[0];
}
return null;
}
}
[Option(typeof(NuGetResources), "HelpCommandAll")]
public bool All { get; set; }
[Option(typeof(NuGetResources), "HelpCommandMarkdown")]
public bool Markdown { get; set; }
[ImportingConstructor]
public HelpCommand(ICommandManager commandManager)
: this(commandManager, Assembly.GetExecutingAssembly().GetName().Name, Assembly.GetExecutingAssembly().GetName().Name, CommandLineConstants.NuGetDocsCommandLineReference)
{
}
[SuppressMessage("Microsoft.Design", "CA1054:UriParametersShouldNotBeStrings", MessageId = "3#",
Justification = "We don't use the Url for anything besides printing, so it's ok to represent it as a string.")]
public HelpCommand(ICommandManager commandManager, string commandExe, string productName, string helpUrl)
{
_commandManager = commandManager;
_commandExe = commandExe;
_productName = productName;
_helpUrl = helpUrl;
}
public override void ExecuteCommand()
{
if (!String.IsNullOrEmpty(CommandName))
{
ViewHelpForCommand(CommandName);
}
else if (All && Markdown)
{
ViewMarkdownHelp();
}
else if (All)
{
ViewHelpForAllCommands();
}
else
{
ViewHelp();
}
}
public void ViewHelp()
{
Console.WriteLine("{0} Version: {1}", _productName, this.GetType().Assembly.GetName().Version);
Console.WriteLine("usage: {0} <command> [args] [options] ", _commandExe);
Console.WriteLine("Type '{0} help <command>' for help on a specific command.", _commandExe);
Console.WriteLine();
Console.WriteLine("Available commands:");
Console.WriteLine();
var commands = from c in _commandManager.GetCommands()
orderby c.CommandAttribute.CommandName
select c.CommandAttribute;
// Padding for printing
int maxWidth = commands.Max(c => c.CommandName.Length + GetAltText(c.AltName).Length);
foreach (var command in commands)
{
PrintCommand(maxWidth, command);
}
if (_helpUrl != null)
{
Console.WriteLine();
Console.WriteLine("For more information, visit {0}", _helpUrl);
}
}
private void PrintCommand(int maxWidth, CommandAttribute commandAttribute)
{
// Write out the command name left justified with the max command's width's padding
Console.Write(" {0, -" + maxWidth + "} ", GetCommandText(commandAttribute));
// Starting index of the description
int descriptionPadding = maxWidth + 4;
Console.PrintJustified(descriptionPadding, commandAttribute.Description);
}
private static string GetCommandText(CommandAttribute commandAttribute)
{
return commandAttribute.CommandName + GetAltText(commandAttribute.AltName);
}
public void ViewHelpForCommand(string commandName)
{
ICommand command = _commandManager.GetCommand(commandName);
CommandAttribute attribute = command.CommandAttribute;
Console.WriteLine("usage: {0} {1} {2}", _commandExe, attribute.CommandName, attribute.UsageSummary);
Console.WriteLine();
if (!String.IsNullOrEmpty(attribute.AltName))
{
Console.WriteLine("alias: {0}", attribute.AltName);
Console.WriteLine();
}
Console.WriteLine(attribute.Description);
Console.WriteLine();
if (attribute.UsageDescription != null)
{
int padding = 5;
Console.PrintJustified(padding, attribute.UsageDescription);
Console.WriteLine();
}
var options = _commandManager.GetCommandOptions(command);
if (options.Count > 0)
{
Console.WriteLine("options:");
Console.WriteLine();
// Get the max option width. +2 for showing + against multivalued properties
int maxOptionWidth = options.Max(o => o.Value.Name.Length) + 2;
// Get the max altname option width
int maxAltOptionWidth = options.Max(o => (o.Key.AltName ?? String.Empty).Length);
foreach (var o in options)
{
Console.Write(" -{0, -" + (maxOptionWidth + 2) + "}", o.Value.Name +
(TypeHelper.IsMultiValuedProperty(o.Value) ? " +" : String.Empty));
Console.Write(" {0, -" + (maxAltOptionWidth + 4) + "}", GetAltText(o.Key.AltName));
Console.PrintJustified((10 + maxAltOptionWidth + maxOptionWidth), o.Key.Description);
}
if (_helpUrl != null)
{
Console.WriteLine();
Console.WriteLine("For more information, visit {0}", _helpUrl);
}
Console.WriteLine();
}
}
private void ViewHelpForAllCommands()
{
var commands = from c in _commandManager.GetCommands()
orderby c.CommandAttribute.CommandName
select c.CommandAttribute;
TextInfo info = CultureInfo.CurrentCulture.TextInfo;
foreach (var command in commands)
{
Console.WriteLine(info.ToTitleCase(command.CommandName) + " Command");
ViewHelpForCommand(command.CommandName);
}
}
/// <summary>
/// Prints help for all commands in markdown format.
/// </summary>
private void ViewMarkdownHelp()
{
var commands = from c in _commandManager.GetCommands()
orderby c.CommandAttribute.CommandName
select c;
foreach (var command in commands)
{
var template = new HelpCommandMarkdownTemplate
{
CommandAttribute = command.CommandAttribute,
Options = from item in _commandManager.GetCommandOptions(command)
select new { Name = item.Value.Name, Description = item.Key.Description }
};
Console.WriteLine(template.TransformText());
}
}
private static string GetAltText(string altNameText)
{
if (String.IsNullOrEmpty(altNameText))
{
return String.Empty;
}
return String.Format(CultureInfo.CurrentCulture, " ({0})", altNameText);
}
}
}
| apache-2.0 |
markcowl/azure-sdk-for-net | sdk/testcommon/Azure.Graph.Rbac/src/Generated/Models/RequiredResourceAccess.cs | 5084 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// <auto-generated/>
#nullable disable
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Azure.Core;
namespace Azure.Graph.Rbac.Models
{
/// <summary> Specifies the set of OAuth 2.0 permission scopes and app roles under the specified resource that an application requires access to. The specified OAuth 2.0 permission scopes may be requested by client applications (through the requiredResourceAccess collection) when calling a resource application. The requiredResourceAccess property of the Application entity is a collection of RequiredResourceAccess. </summary>
public partial class RequiredResourceAccess : IDictionary<string, object>
{
/// <summary> Initializes a new instance of RequiredResourceAccess. </summary>
/// <param name="resourceAccess"> The list of OAuth2.0 permission scopes and app roles that the application requires from the specified resource. </param>
/// <exception cref="ArgumentNullException"> <paramref name="resourceAccess"/> is null. </exception>
public RequiredResourceAccess(IEnumerable<ResourceAccess> resourceAccess)
{
if (resourceAccess == null)
{
throw new ArgumentNullException(nameof(resourceAccess));
}
ResourceAccess = resourceAccess.ToList();
AdditionalProperties = new ChangeTrackingDictionary<string, object>();
}
/// <summary> Initializes a new instance of RequiredResourceAccess. </summary>
/// <param name="resourceAccess"> The list of OAuth2.0 permission scopes and app roles that the application requires from the specified resource. </param>
/// <param name="resourceAppId"> The unique identifier for the resource that the application requires access to. This should be equal to the appId declared on the target resource application. </param>
/// <param name="additionalProperties"> . </param>
internal RequiredResourceAccess(IList<ResourceAccess> resourceAccess, string resourceAppId, IDictionary<string, object> additionalProperties)
{
ResourceAccess = resourceAccess;
ResourceAppId = resourceAppId;
AdditionalProperties = additionalProperties;
}
/// <summary> The list of OAuth2.0 permission scopes and app roles that the application requires from the specified resource. </summary>
public IList<ResourceAccess> ResourceAccess { get; }
/// <summary> The unique identifier for the resource that the application requires access to. This should be equal to the appId declared on the target resource application. </summary>
public string ResourceAppId { get; set; }
internal IDictionary<string, object> AdditionalProperties { get; }
/// <inheritdoc />
public IEnumerator<KeyValuePair<string, object>> GetEnumerator() => AdditionalProperties.GetEnumerator();
/// <inheritdoc />
IEnumerator IEnumerable.GetEnumerator() => AdditionalProperties.GetEnumerator();
/// <inheritdoc />
public bool TryGetValue(string key, out object value) => AdditionalProperties.TryGetValue(key, out value);
/// <inheritdoc />
public bool ContainsKey(string key) => AdditionalProperties.ContainsKey(key);
/// <inheritdoc />
public ICollection<string> Keys => AdditionalProperties.Keys;
/// <inheritdoc />
public ICollection<object> Values => AdditionalProperties.Values;
/// <inheritdoc />
int ICollection<KeyValuePair<string, object>>.Count => AdditionalProperties.Count;
/// <inheritdoc />
public void Add(string key, object value) => AdditionalProperties.Add(key, value);
/// <inheritdoc />
public bool Remove(string key) => AdditionalProperties.Remove(key);
/// <inheritdoc />
bool ICollection<KeyValuePair<string, object>>.IsReadOnly => AdditionalProperties.IsReadOnly;
/// <inheritdoc />
void ICollection<KeyValuePair<string, object>>.Add(KeyValuePair<string, object> value) => AdditionalProperties.Add(value);
/// <inheritdoc />
bool ICollection<KeyValuePair<string, object>>.Remove(KeyValuePair<string, object> value) => AdditionalProperties.Remove(value);
/// <inheritdoc />
bool ICollection<KeyValuePair<string, object>>.Contains(KeyValuePair<string, object> value) => AdditionalProperties.Contains(value);
/// <inheritdoc />
void ICollection<KeyValuePair<string, object>>.CopyTo(KeyValuePair<string, object>[] destination, int offset) => AdditionalProperties.CopyTo(destination, offset);
/// <inheritdoc />
void ICollection<KeyValuePair<string, object>>.Clear() => AdditionalProperties.Clear();
/// <inheritdoc />
public object this[string key]
{
get => AdditionalProperties[key];
set => AdditionalProperties[key] = value;
}
}
}
| apache-2.0 |
magnetsystems/message-smack | smack-extensions/src/main/java/org/jivesoftware/smackx/pubsub/EmbeddedPacketExtension.java | 1918 | /**
*
* Copyright the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.pubsub;
import java.util.List;
import org.jivesoftware.smack.packet.Stanza;
import org.jivesoftware.smack.packet.PacketExtension;
import org.jivesoftware.smack.util.PacketParserUtils;
/**
* This interface defines {@link PacketExtension} implementations that contain other
* extensions. This effectively extends the idea of an extension within one of the
* top level {@link Stanza} types to consider any embedded element to be an extension
* of its parent. This more easily enables the usage of some of Smacks parsing
* utilities such as {@link PacketParserUtils#parsePacketExtension(String, String, org.xmlpull.v1.XmlPullParser)} to be used
* to parse any element of the XML being parsed.
*
* <p>Top level extensions have only one element, but they can have multiple children, or
* their children can have multiple children. This interface is a way of allowing extensions
* to be embedded within one another as a partial or complete one to one mapping of extension
* to element.
*
* @author Robin Collier
*/
public interface EmbeddedPacketExtension extends PacketExtension
{
/**
* Get the list of embedded {@link PacketExtension} objects.
*
* @return List of embedded {@link PacketExtension}
*/
List<PacketExtension> getExtensions();
}
| apache-2.0 |
citrix-openstack-build/oslo.concurrency | tests/unit/test_lockutils.py | 16651 | # Copyright 2011 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import errno
import fcntl
import multiprocessing
import os
import shutil
import sys
import tempfile
import threading
import time
from oslo.config import cfg
from oslotest import base as test_base
import six
from six import moves
from oslo.concurrency.fixture import lockutils as fixtures
from oslo.concurrency import lockutils
from oslo.concurrency.openstack.common.fixture import config
class LockTestCase(test_base.BaseTestCase):
def setUp(self):
super(LockTestCase, self).setUp()
self.config = self.useFixture(config.Config()).config
def test_synchronized_wrapped_function_metadata(self):
@lockutils.synchronized('whatever', 'test-')
def foo():
"""Bar."""
pass
self.assertEqual(foo.__doc__, 'Bar.', "Wrapped function's docstring "
"got lost")
self.assertEqual(foo.__name__, 'foo', "Wrapped function's name "
"got mangled")
def test_lock_acquire_release_file_lock(self):
lock_dir = tempfile.mkdtemp()
lock_file = os.path.join(lock_dir, 'lock')
lock = lockutils._FcntlLock(lock_file)
def try_lock():
lock.release() # child co-owns it before fork
try:
my_lock = lockutils._FcntlLock(lock_file)
my_lock.lockfile = open(lock_file, 'w')
my_lock.trylock()
my_lock.unlock()
os._exit(1)
except IOError:
os._exit(0)
def attempt_acquire(count):
children = []
for i in range(count):
child = multiprocessing.Process(target=try_lock)
child.start()
children.append(child)
exit_codes = []
for child in children:
child.join()
exit_codes.append(child.exitcode)
return sum(exit_codes)
self.assertTrue(lock.acquire())
try:
acquired_children = attempt_acquire(10)
self.assertEqual(0, acquired_children)
finally:
lock.release()
try:
acquired_children = attempt_acquire(5)
self.assertNotEqual(0, acquired_children)
finally:
try:
shutil.rmtree(lock_dir)
except IOError:
pass
def test_lock_internally(self):
"""We can lock across multiple threads."""
saved_sem_num = len(lockutils._semaphores)
seen_threads = list()
def f(_id):
with lockutils.lock('testlock2', 'test-', external=False):
for x in range(10):
seen_threads.append(_id)
threads = []
for i in range(10):
thread = threading.Thread(target=f, args=(i,))
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
self.assertEqual(len(seen_threads), 100)
# Looking at the seen threads, split it into chunks of 10, and verify
# that the last 9 match the first in each chunk.
for i in range(10):
for j in range(9):
self.assertEqual(seen_threads[i * 10],
seen_threads[i * 10 + 1 + j])
self.assertEqual(saved_sem_num, len(lockutils._semaphores),
"Semaphore leak detected")
def test_nested_synchronized_external_works(self):
"""We can nest external syncs."""
tempdir = tempfile.mkdtemp()
try:
self.config(lock_path=tempdir)
sentinel = object()
@lockutils.synchronized('testlock1', 'test-', external=True)
def outer_lock():
@lockutils.synchronized('testlock2', 'test-', external=True)
def inner_lock():
return sentinel
return inner_lock()
self.assertEqual(sentinel, outer_lock())
finally:
if os.path.exists(tempdir):
shutil.rmtree(tempdir)
def _do_test_lock_externally(self):
"""We can lock across multiple processes."""
def lock_files(handles_dir):
with lockutils.lock('external', 'test-', external=True):
# Open some files we can use for locking
handles = []
for n in range(50):
path = os.path.join(handles_dir, ('file-%s' % n))
handles.append(open(path, 'w'))
# Loop over all the handles and try locking the file
# without blocking, keep a count of how many files we
# were able to lock and then unlock. If the lock fails
# we get an IOError and bail out with bad exit code
count = 0
for handle in handles:
try:
fcntl.flock(handle, fcntl.LOCK_EX | fcntl.LOCK_NB)
count += 1
fcntl.flock(handle, fcntl.LOCK_UN)
except IOError:
os._exit(2)
finally:
handle.close()
# Check if we were able to open all files
self.assertEqual(50, count)
handles_dir = tempfile.mkdtemp()
try:
children = []
for n in range(50):
pid = os.fork()
if pid:
children.append(pid)
else:
try:
lock_files(handles_dir)
finally:
os._exit(0)
for child in children:
(pid, status) = os.waitpid(child, 0)
if pid:
self.assertEqual(0, status)
finally:
if os.path.exists(handles_dir):
shutil.rmtree(handles_dir, ignore_errors=True)
def test_lock_externally(self):
lock_dir = tempfile.mkdtemp()
self.config(lock_path=lock_dir)
try:
self._do_test_lock_externally()
finally:
if os.path.exists(lock_dir):
shutil.rmtree(lock_dir, ignore_errors=True)
def test_lock_externally_lock_dir_not_exist(self):
lock_dir = tempfile.mkdtemp()
os.rmdir(lock_dir)
self.config(lock_path=lock_dir)
try:
self._do_test_lock_externally()
finally:
if os.path.exists(lock_dir):
shutil.rmtree(lock_dir, ignore_errors=True)
def test_synchronized_with_prefix(self):
lock_name = 'mylock'
lock_pfix = 'mypfix-'
foo = lockutils.synchronized_with_prefix(lock_pfix)
@foo(lock_name, external=True)
def bar(dirpath, pfix, name):
return True
lock_dir = tempfile.mkdtemp()
self.config(lock_path=lock_dir)
self.assertTrue(bar(lock_dir, lock_pfix, lock_name))
def test_synchronized_without_prefix(self):
lock_dir = tempfile.mkdtemp()
self.config(lock_path=lock_dir)
@lockutils.synchronized('lock', external=True)
def test_without_prefix():
# We can't check much
pass
try:
test_without_prefix()
finally:
if os.path.exists(lock_dir):
shutil.rmtree(lock_dir, ignore_errors=True)
def test_synchronized_prefix_without_hypen(self):
lock_dir = tempfile.mkdtemp()
self.config(lock_path=lock_dir)
@lockutils.synchronized('lock', 'hypen', True)
def test_without_hypen():
# We can't check much
pass
try:
test_without_hypen()
finally:
if os.path.exists(lock_dir):
shutil.rmtree(lock_dir, ignore_errors=True)
def test_contextlock(self):
lock_dir = tempfile.mkdtemp()
self.config(lock_path=lock_dir)
try:
# Note(flaper87): Lock is not external, which means
# a semaphore will be yielded
with lockutils.lock("test") as sem:
if six.PY2:
self.assertTrue(isinstance(sem, threading._Semaphore))
else:
self.assertTrue(isinstance(sem, threading.Semaphore))
# NOTE(flaper87): Lock is external so an InterProcessLock
# will be yielded.
with lockutils.lock("test2", external=True) as lock:
self.assertTrue(lock.exists())
with lockutils.lock("test1",
external=True) as lock1:
self.assertTrue(isinstance(lock1,
lockutils.InterProcessLock))
finally:
if os.path.exists(lock_dir):
shutil.rmtree(lock_dir, ignore_errors=True)
def test_contextlock_unlocks(self):
lock_dir = tempfile.mkdtemp()
self.config(lock_path=lock_dir)
sem = None
try:
with lockutils.lock("test") as sem:
if six.PY2:
self.assertTrue(isinstance(sem, threading._Semaphore))
else:
self.assertTrue(isinstance(sem, threading.Semaphore))
with lockutils.lock("test2", external=True) as lock:
self.assertTrue(lock.exists())
# NOTE(flaper87): Lock should be free
with lockutils.lock("test2", external=True) as lock:
self.assertTrue(lock.exists())
# NOTE(flaper87): Lock should be free
# but semaphore should already exist.
with lockutils.lock("test") as sem2:
self.assertEqual(sem, sem2)
finally:
if os.path.exists(lock_dir):
shutil.rmtree(lock_dir, ignore_errors=True)
def test_remove_lock_external_file(self):
lock_name = 'mylock'
lock_pfix = 'mypfix-remove-lock-test-'
lock_dir = tempfile.mkdtemp()
self.config(lock_path=lock_dir)
lockutils.remove_external_lock_file(lock_name, lock_pfix)
for ent in os.listdir(lock_dir):
self.assertRaises(OSError, ent.startswith, lock_pfix)
if os.path.exists(lock_dir):
shutil.rmtree(lock_dir, ignore_errors=True)
def test_no_slash_in_b64(self):
# base64(sha1(foobar)) has a slash in it
with lockutils.lock("foobar"):
pass
class BrokenLock(lockutils._FileLock):
def __init__(self, name, errno_code):
super(BrokenLock, self).__init__(name)
self.errno_code = errno_code
def unlock(self):
pass
def trylock(self):
err = IOError()
err.errno = self.errno_code
raise err
class FileBasedLockingTestCase(test_base.BaseTestCase):
def setUp(self):
super(FileBasedLockingTestCase, self).setUp()
self.lock_dir = tempfile.mkdtemp()
def test_lock_file_exists(self):
lock_file = os.path.join(self.lock_dir, 'lock-file')
@lockutils.synchronized('lock-file', external=True,
lock_path=self.lock_dir)
def foo():
self.assertTrue(os.path.exists(lock_file))
foo()
def test_bad_acquire(self):
lock_file = os.path.join(self.lock_dir, 'lock')
lock = BrokenLock(lock_file, errno.EBUSY)
self.assertRaises(threading.ThreadError, lock.acquire)
def test_interprocess_lock(self):
lock_file = os.path.join(self.lock_dir, 'processlock')
pid = os.fork()
if pid:
# Make sure the child grabs the lock first
start = time.time()
while not os.path.exists(lock_file):
if time.time() - start > 5:
self.fail('Timed out waiting for child to grab lock')
time.sleep(0)
lock1 = lockutils.InterProcessLock('foo')
lock1.lockfile = open(lock_file, 'w')
self.assertRaises(IOError, lock1.trylock)
else:
try:
lock2 = lockutils.InterProcessLock('foo')
lock2.lockfile = open(lock_file, 'w')
lock2.trylock()
finally:
# NOTE(bnemec): This is racy, but I don't want to add any
# synchronization primitives that might mask a problem
# with the one we're trying to test here.
time.sleep(.5)
os._exit(0)
def test_interthread_external_lock(self):
call_list = []
@lockutils.synchronized('foo', external=True, lock_path=self.lock_dir)
def foo(param):
"""Simulate a long-running threaded operation."""
call_list.append(param)
# NOTE(bnemec): This is racy, but I don't want to add any
# synchronization primitives that might mask a problem
# with the one we're trying to test here.
time.sleep(.5)
call_list.append(param)
def other(param):
foo(param)
thread = threading.Thread(target=other, args=('other',))
thread.start()
# Make sure the other thread grabs the lock
start = time.time()
while not os.path.exists(os.path.join(self.lock_dir, 'foo')):
if time.time() - start > 5:
self.fail('Timed out waiting for thread to grab lock')
time.sleep(0)
thread1 = threading.Thread(target=other, args=('main',))
thread1.start()
thread1.join()
thread.join()
self.assertEqual(call_list, ['other', 'other', 'main', 'main'])
def test_non_destructive(self):
lock_file = os.path.join(self.lock_dir, 'not-destroyed')
with open(lock_file, 'w') as f:
f.write('test')
with lockutils.lock('not-destroyed', external=True,
lock_path=self.lock_dir):
with open(lock_file) as f:
self.assertEqual(f.read(), 'test')
class LockutilsModuleTestCase(test_base.BaseTestCase):
def setUp(self):
super(LockutilsModuleTestCase, self).setUp()
self.old_env = os.environ.get('OSLO_LOCK_PATH')
def tearDown(self):
if self.old_env is None:
del os.environ['OSLO_LOCK_PATH']
else:
os.environ['OSLO_LOCK_PATH'] = self.old_env
super(LockutilsModuleTestCase, self).tearDown()
def _lock_path_conf_test(self, lock_dir):
cfg.CONF.unregister_opts(lockutils.util_opts)
lockutils_ = moves.reload_module(lockutils)
with lockutils_.lock('test-lock', external=True):
if not os.path.exists(lock_dir):
os._exit(2)
if not os.path.exists(os.path.join(lock_dir, 'test-lock')):
os._exit(3)
def test_main(self):
script = '\n'.join([
'import os',
'lock_path = os.environ.get("OSLO_LOCK_PATH")',
'assert lock_path is not None',
'assert os.path.isdir(lock_path)',
])
argv = ['', sys.executable, '-c', script]
retval = lockutils.main(argv)
self.assertEqual(retval, 0, "Bad OSLO_LOCK_PATH has been set")
class TestLockFixture(test_base.BaseTestCase):
def setUp(self):
super(TestLockFixture, self).setUp()
self.config = self.useFixture(config.Config()).config
self.tempdir = tempfile.mkdtemp()
def _check_in_lock(self):
self.assertTrue(self.lock.exists())
def tearDown(self):
self._check_in_lock()
super(TestLockFixture, self).tearDown()
def test_lock_fixture(self):
# Setup lock fixture to test that teardown is inside the lock
self.config(lock_path=self.tempdir)
fixture = fixtures.LockFixture('test-lock')
self.useFixture(fixture)
self.lock = fixture.lock
| apache-2.0 |
richard-strauss-werke/rsw-oxygen-framework | src/main/java/com/aerhard/oxygen/framework/inplace/InplaceButton.java | 10766 | package com.aerhard.oxygen.framework.inplace;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusAdapter;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.net.MalformedURLException;
import java.net.URL;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JPanel;
import javax.swing.text.BadLocationException;
import org.apache.log4j.Logger;
import ro.sync.annotations.api.API;
import ro.sync.annotations.api.APIType;
import ro.sync.annotations.api.SourceType;
import ro.sync.ecss.extensions.api.AuthorAccess;
import ro.sync.ecss.extensions.api.CursorType;
import ro.sync.ecss.extensions.api.access.AuthorUtilAccess;
import ro.sync.ecss.extensions.api.editor.AbstractInplaceEditor;
import ro.sync.ecss.extensions.api.editor.AuthorInplaceContext;
import ro.sync.ecss.extensions.api.editor.EditingEvent;
import ro.sync.ecss.extensions.api.editor.InplaceEditorArgumentKeys;
import ro.sync.ecss.extensions.api.editor.InplaceRenderer;
import ro.sync.ecss.extensions.api.editor.RendererLayoutInfo;
import ro.sync.ecss.extensions.api.node.AuthorElement;
import ro.sync.exml.view.graphics.Dimension;
import ro.sync.exml.view.graphics.Font;
import ro.sync.exml.view.graphics.Point;
import ro.sync.exml.view.graphics.Rectangle;
/**
* Superclass of {@link EditButton}; based on the oXygen SDK
*
* @author Costi
* @author Adriana
* @author Alexander Erhard (adjustments)
*/
@API(type = APIType.EXTENDABLE, src = SourceType.PUBLIC)
public class InplaceButton extends AbstractInplaceEditor implements InplaceRenderer {
/**
* Logger for logging.
*/
private static final Logger LOGGER = Logger.getLogger(InplaceButton.class.getName());
/**
* <code>true</code> if the platform is Eclipse.
*/
private static final Boolean IS_ECLIPSE = Boolean.valueOf(System
.getProperty("com.oxygenxml.is.eclipse.plugin"));
/**
* <code>true</code> if the platform is Windows.
*/
private static final boolean IS_WIN32 = System.getProperty("os.name")
.toUpperCase().startsWith("WIN");
/**
* The vertical gap of the panel layout.
*/
private final static int VGAP = 0;
/**
* The horizontal gap of the panel layout.
*/
private final static int HGAP = 5;
/**
* Browse URL button.
*/
private final JButton nameBtn;
/**
* URL chooser panel.
*/
private final JPanel nameChooserPanel;
/**
* <code>true</code> if we are during browse.
*/
private boolean isBrowsing = false;
/**
* Author Util.
*/
private AuthorUtilAccess utilAccess;
/**
* The default font.
*/
private final java.awt.Font defaultFont;
/**
* Constructor.
*/
public InplaceButton() {
nameChooserPanel = new JPanel(new BorderLayout(HGAP, VGAP));
nameBtn = new JButton();
if (IS_WIN32) {
// WE ARE ON WINDOWS
getNameBtn().setBorder(BorderFactory.createEmptyBorder(3, 6, 3, 5));
}
nameChooserPanel.add(getNameBtn(), BorderLayout.CENTER);
nameChooserPanel.setOpaque(false);
getNameBtn().addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
// ESC must cancel the edit.
e.consume();
cancelEditing();
}
}
});
FocusListener focusListener = new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
if (e.getOppositeComponent() != getNameBtn()
// && e.getOppositeComponent() != urlTextField
&& !isBrowsing) {
// The focus is outside the components of this editor.
fireEditingStopped(new EditingEvent(getNameBtn().getText(),
e.getOppositeComponent() == null));
}
}
};
getNameBtn().addFocusListener(focusListener);
defaultFont = getNameBtn().getFont();
}
/**
* @see ro.sync.ecss.extensions.api.Extension#getDescription()
*/
@Override
public String getDescription() {
return "A sample implementation that provides a browse button associated with a text field.";
}
// /////////////////////////// RENDERER METHODS //////////////////////
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceRenderer#getRendererComponent(ro.sync.ecss.extensions.api.editor.AuthorInplaceContext)
*/
@Override
public Object getRendererComponent(AuthorInplaceContext context) {
// The renderer will be reused so we must make sure it's properly
// initialized.
prepareComponents(context, false);
return nameChooserPanel;
}
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceRenderer#getRenderingInfo(ro.sync.ecss.extensions.api.editor.AuthorInplaceContext)
*/
@Override
public RendererLayoutInfo getRenderingInfo(AuthorInplaceContext context) {
// The renderer will be reused so we must make sure it's properly
// initialized.
prepareComponents(context, false);
return computeRenderingInfo(context);
}
/**
* Compute the dimension of the editor.
*
* @param context
* The current context.
*
* @return Layout information.
*/
private RendererLayoutInfo computeRenderingInfo(AuthorInplaceContext context) {
final java.awt.Dimension preferredSize = getNameBtn()
.getPreferredSize();
int width = (int) (HGAP + getNameBtn().getPreferredSize().getWidth());
// Get height correction
int correction = 0;
if (IS_ECLIPSE) {
// When using the renderer for Eclipse, MAC OS with just an icon,
// the SWING button is smaller than the SWT and when imposing the
// size to
// the SWT one the SWT button looks bad.
correction = 5;
}
return new RendererLayoutInfo(getNameBtn().getBaseline(
preferredSize.width, preferredSize.height), new Dimension(
width, preferredSize.height + correction));
}
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceRenderer#getTooltipText(ro.sync.ecss.extensions.api.editor.AuthorInplaceContext,
* int, int)
*/
@Override
public String getTooltipText(AuthorInplaceContext context, int x, int y) {
// The renderer will be reused so we must make sure it's properly
// initialized.
prepareComponents(context, false);
return "Zum Ändern klicken.";
}
public void performAction(AuthorInplaceContext context,
AuthorAccess authorAccess) {
}
// /////////////////////////// EDITOR METHODS //////////////////////
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceEditor#getEditorComponent(ro.sync.ecss.extensions.api.editor.AuthorInplaceContext,
* ro.sync.exml.view.graphics.Rectangle,
* ro.sync.exml.view.graphics.Point)
*/
@Override
public Object getEditorComponent(final AuthorInplaceContext context,
Rectangle allocation, Point mouseLocation) {
prepareComponents(context, true);
final AuthorAccess authorAccess = context.getAuthorAccess();
getNameBtn().addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
isBrowsing = true;
try {
performAction(context, authorAccess);
} finally {
isBrowsing = false;
}
}
});
return nameChooserPanel;
}
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceEditor#getScrollRectangle()
*/
@Override
public Rectangle getScrollRectangle() {
return null;
}
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceEditor#requestFocus()
*/
@Override
public void requestFocus() {
getNameBtn().requestFocus();
}
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceEditor#getValue()
*/
@Override
public Object getValue() {
String text = getNameBtn().getText();
try {
URL clearedURL = utilAccess.removeUserCredentials(new URL(text));
return clearedURL.toExternalForm();
} catch (MalformedURLException e) {
LOGGER.error(e, e);
return text;
}
}
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceEditor#stopEditing()
*/
@Override
public void stopEditing() {
stopEditing(false);
}
public void stopEditing(boolean onEnter) {
String text = getNameBtn().getText();
if (onEnter) {
fireNextEditLocationRequested();
} else {
fireEditingStopped(new EditingEvent(text));
}
}
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceEditor#cancelEditing()
*/
@Override
public void cancelEditing() {
fireEditingCanceled();
}
public void setBtnText(AuthorInplaceContext context) {
AuthorElement element = context.getElem();
String elementContent = null;
try {
elementContent = element.getTextContent();
} catch (BadLocationException e) {
LOGGER.error(e, e);
}
if (elementContent == null || elementContent.isEmpty()) {
elementContent = "[wählen]";
}
getNameBtn().setText(elementContent);
}
/**
* Prepare UI components.
*
* @param context
* The current context.
*/
private void prepareComponents(AuthorInplaceContext context,
boolean forEditing) {
utilAccess = context.getAuthorAccess().getUtilAccess();
context.getArguments().get(InplaceEditorArgumentKeys.PROPERTY_COLOR);
setBtnText(context);
// // We don't want an UNDO to reset the initial text.
// UndoManager undoManager = (UndoManager)
// urlTextField.getDocument().getProperty(UNDO_MANAGER_PROPERTY);
// if (undoManager != null) {
// undoManager.discardAllEdits();
// }
Font font = (Font) context.getArguments().get(
InplaceEditorArgumentKeys.FONT);
if (font != null) {
java.awt.Font currentFont = new java.awt.Font(font.getName(),
font.getStyle(), font.getSize());
getNameBtn().setFont(currentFont);
} else {
getNameBtn().setFont(defaultFont);
}
Point relMousePos = context.getRelativeMouseLocation();
boolean rollover = false;
if (relMousePos != null) {
RendererLayoutInfo renderInfo = computeRenderingInfo(context);
nameChooserPanel.setSize(renderInfo.getSize().width,
renderInfo.getSize().height);
// Unless we do the layout we can't determine the component under
// the mouse.
nameChooserPanel.doLayout();
Component componentAt = nameChooserPanel.getComponentAt(
relMousePos.x, relMousePos.y);
rollover = componentAt == getNameBtn();
}
getNameBtn().getModel().setRollover(rollover);
}
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceRenderer#getCursorType(ro.sync.ecss.extensions.api.editor.AuthorInplaceContext,
* int, int)
*/
@Override
public CursorType getCursorType(AuthorInplaceContext context, int x, int y) {
return CursorType.CURSOR_NORMAL;
}
/**
* @see ro.sync.ecss.extensions.api.editor.InplaceRenderer#getCursorType(int,
* int)
*/
@Override
public CursorType getCursorType(int x, int y) {
return null;
}
public JButton getNameBtn() {
return nameBtn;
}
} | apache-2.0 |
llohellohe/httpclient | httpclient/src/main/java/org/apache/http/impl/auth/BasicScheme.java | 6781 | /*
* ====================================================================
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.impl.auth;
import org.apache.http.annotation.NotThreadSafe;
import org.apache.commons.codec.binary.Base64;
import org.apache.http.Header;
import org.apache.http.HttpRequest;
import org.apache.http.auth.AuthenticationException;
import org.apache.http.auth.ChallengeState;
import org.apache.http.auth.ContextAwareAuthScheme;
import org.apache.http.auth.Credentials;
import org.apache.http.auth.AUTH;
import org.apache.http.auth.InvalidCredentialsException;
import org.apache.http.auth.MalformedChallengeException;
import org.apache.http.auth.params.AuthParams;
import org.apache.http.message.BufferedHeader;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import org.apache.http.util.CharArrayBuffer;
import org.apache.http.util.EncodingUtils;
/**
* Basic authentication scheme as defined in RFC 2617.
* <p>
* The following parameters can be used to customize the behavior of this
* class:
* <ul>
* <li>{@link org.apache.http.auth.params.AuthPNames#CREDENTIAL_CHARSET}</li>
* </ul>
*
* @since 4.0
*/
@NotThreadSafe
public class BasicScheme extends RFC2617Scheme {
/** Whether the basic authentication process is complete */
private boolean complete;
/**
* Creates an instance of <tt>BasicScheme</tt> with the given challenge
* state.
*
* @since 4.2
*/
public BasicScheme(final ChallengeState challengeState) {
super(challengeState);
this.complete = false;
}
public BasicScheme() {
this(null);
}
/**
* Returns textual designation of the basic authentication scheme.
*
* @return <code>basic</code>
*/
public String getSchemeName() {
return "basic";
}
/**
* Processes the Basic challenge.
*
* @param header the challenge header
*
* @throws MalformedChallengeException is thrown if the authentication challenge
* is malformed
*/
@Override
public void processChallenge(
final Header header) throws MalformedChallengeException {
super.processChallenge(header);
this.complete = true;
}
/**
* Tests if the Basic authentication process has been completed.
*
* @return <tt>true</tt> if Basic authorization has been processed,
* <tt>false</tt> otherwise.
*/
public boolean isComplete() {
return this.complete;
}
/**
* Returns <tt>false</tt>. Basic authentication scheme is request based.
*
* @return <tt>false</tt>.
*/
public boolean isConnectionBased() {
return false;
}
/**
* @deprecated (4.2) Use {@link ContextAwareAuthScheme#authenticate(Credentials, HttpRequest, org.apache.http.protocol.HttpContext)}
*/
@Deprecated
public Header authenticate(
final Credentials credentials, final HttpRequest request) throws AuthenticationException {
return authenticate(credentials, request, new BasicHttpContext());
}
/**
* Produces basic authorization header for the given set of {@link Credentials}.
*
* @param credentials The set of credentials to be used for authentication
* @param request The request being authenticated
* @throws InvalidCredentialsException if authentication credentials are not
* valid or not applicable for this authentication scheme
* @throws AuthenticationException if authorization string cannot
* be generated due to an authentication failure
*
* @return a basic authorization string
*/
@Override
public Header authenticate(
final Credentials credentials,
final HttpRequest request,
final HttpContext context) throws AuthenticationException {
if (credentials == null) {
throw new IllegalArgumentException("Credentials may not be null");
}
if (request == null) {
throw new IllegalArgumentException("HTTP request may not be null");
}
String charset = AuthParams.getCredentialCharset(request.getParams());
return authenticate(credentials, charset, isProxy());
}
/**
* Returns a basic <tt>Authorization</tt> header value for the given
* {@link Credentials} and charset.
*
* @param credentials The credentials to encode.
* @param charset The charset to use for encoding the credentials
*
* @return a basic authorization header
*/
public static Header authenticate(
final Credentials credentials,
final String charset,
boolean proxy) {
if (credentials == null) {
throw new IllegalArgumentException("Credentials may not be null");
}
if (charset == null) {
throw new IllegalArgumentException("charset may not be null");
}
StringBuilder tmp = new StringBuilder();
tmp.append(credentials.getUserPrincipal().getName());
tmp.append(":");
tmp.append((credentials.getPassword() == null) ? "null" : credentials.getPassword());
byte[] base64password = Base64.encodeBase64(
EncodingUtils.getBytes(tmp.toString(), charset), false);
CharArrayBuffer buffer = new CharArrayBuffer(32);
if (proxy) {
buffer.append(AUTH.PROXY_AUTH_RESP);
} else {
buffer.append(AUTH.WWW_AUTH_RESP);
}
buffer.append(": Basic ");
buffer.append(base64password, 0, base64password.length);
return new BufferedHeader(buffer);
}
}
| apache-2.0 |
lcostantino/healing-os | healing/openstack/common/apiclient/base.py | 16398 | # Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 OpenStack Foundation
# Copyright 2012 Grid Dynamics
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base utilities to build API operation managers and objects on top of.
"""
# E1102: %s is not callable
# pylint: disable=E1102
import abc
import copy
import six
from six.moves.urllib import parse
from healing.openstack.common.apiclient import exceptions
from healing.openstack.common.gettextutils import _
from healing.openstack.common import strutils
def getid(obj):
"""Return id if argument is a Resource.
Abstracts the common pattern of allowing both an object or an object's ID
(UUID) as a parameter when dealing with relationships.
"""
try:
if obj.uuid:
return obj.uuid
except AttributeError:
pass
try:
return obj.id
except AttributeError:
return obj
# TODO(aababilov): call run_hooks() in HookableMixin's child classes
class HookableMixin(object):
"""Mixin so classes can register and run hooks."""
_hooks_map = {}
@classmethod
def add_hook(cls, hook_type, hook_func):
"""Add a new hook of specified type.
:param cls: class that registers hooks
:param hook_type: hook type, e.g., '__pre_parse_args__'
:param hook_func: hook function
"""
if hook_type not in cls._hooks_map:
cls._hooks_map[hook_type] = []
cls._hooks_map[hook_type].append(hook_func)
@classmethod
def run_hooks(cls, hook_type, *args, **kwargs):
"""Run all hooks of specified type.
:param cls: class that registers hooks
:param hook_type: hook type, e.g., '__pre_parse_args__'
:param **args: args to be passed to every hook function
:param **kwargs: kwargs to be passed to every hook function
"""
hook_funcs = cls._hooks_map.get(hook_type) or []
for hook_func in hook_funcs:
hook_func(*args, **kwargs)
class BaseManager(HookableMixin):
"""Basic manager type providing common operations.
Managers interact with a particular type of API (servers, flavors, images,
etc.) and provide CRUD operations for them.
"""
resource_class = None
def __init__(self, client):
"""Initializes BaseManager with `client`.
:param client: instance of BaseClient descendant for HTTP requests
"""
super(BaseManager, self).__init__()
self.client = client
def _list(self, url, response_key, obj_class=None, json=None):
"""List the collection.
:param url: a partial URL, e.g., '/servers'
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
:param obj_class: class for constructing the returned objects
(self.resource_class will be used by default)
:param json: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
"""
if json:
body = self.client.post(url, json=json).json()
else:
body = self.client.get(url).json()
if obj_class is None:
obj_class = self.resource_class
data = body[response_key]
# NOTE(ja): keystone returns values as list as {'values': [ ... ]}
# unlike other services which just return the list...
try:
data = data['values']
except (KeyError, TypeError):
pass
return [obj_class(self, res, loaded=True) for res in data if res]
def _get(self, url, response_key):
"""Get an object from collection.
:param url: a partial URL, e.g., '/servers'
:param response_key: the key to be looked up in response dictionary,
e.g., 'server'
"""
body = self.client.get(url).json()
return self.resource_class(self, body[response_key], loaded=True)
def _head(self, url):
"""Retrieve request headers for an object.
:param url: a partial URL, e.g., '/servers'
"""
resp = self.client.head(url)
return resp.status_code == 204
def _post(self, url, json, response_key, return_raw=False):
"""Create an object.
:param url: a partial URL, e.g., '/servers'
:param json: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
:param return_raw: flag to force returning raw JSON instead of
Python object of self.resource_class
"""
body = self.client.post(url, json=json).json()
if return_raw:
return body[response_key]
return self.resource_class(self, body[response_key])
def _put(self, url, json=None, response_key=None):
"""Update an object with PUT method.
:param url: a partial URL, e.g., '/servers'
:param json: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
"""
resp = self.client.put(url, json=json)
# PUT requests may not return a body
if resp.content:
body = resp.json()
if response_key is not None:
return self.resource_class(self, body[response_key])
else:
return self.resource_class(self, body)
def _patch(self, url, json=None, response_key=None):
"""Update an object with PATCH method.
:param url: a partial URL, e.g., '/servers'
:param json: data that will be encoded as JSON and passed in POST
request (GET will be sent by default)
:param response_key: the key to be looked up in response dictionary,
e.g., 'servers'
"""
body = self.client.patch(url, json=json).json()
if response_key is not None:
return self.resource_class(self, body[response_key])
else:
return self.resource_class(self, body)
def _delete(self, url):
"""Delete an object.
:param url: a partial URL, e.g., '/servers/my-server'
"""
return self.client.delete(url)
@six.add_metaclass(abc.ABCMeta)
class ManagerWithFind(BaseManager):
"""Manager with additional `find()`/`findall()` methods."""
@abc.abstractmethod
def list(self):
pass
def find(self, **kwargs):
"""Find a single item with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
matches = self.findall(**kwargs)
num_matches = len(matches)
if num_matches == 0:
msg = _("No %(name)s matching %(args)s.") % {
'name': self.resource_class.__name__,
'args': kwargs
}
raise exceptions.NotFound(msg)
elif num_matches > 1:
raise exceptions.NoUniqueMatch()
else:
return matches[0]
def findall(self, **kwargs):
"""Find all items with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
found = []
searches = kwargs.items()
for obj in self.list():
try:
if all(getattr(obj, attr) == value
for (attr, value) in searches):
found.append(obj)
except AttributeError:
continue
return found
class CrudManager(BaseManager):
"""Base manager class for manipulating entities.
Children of this class are expected to define a `collection_key` and `key`.
- `collection_key`: Usually a plural noun by convention (e.g. `entities`);
used to refer collections in both URL's (e.g. `/v3/entities`) and JSON
objects containing a list of member resources (e.g. `{'entities': [{},
{}, {}]}`).
- `key`: Usually a singular noun by convention (e.g. `entity`); used to
refer to an individual member of the collection.
"""
collection_key = None
key = None
def build_url(self, base_url=None, **kwargs):
"""Builds a resource URL for the given kwargs.
Given an example collection where `collection_key = 'entities'` and
`key = 'entity'`, the following URL's could be generated.
By default, the URL will represent a collection of entities, e.g.::
/entities
If kwargs contains an `entity_id`, then the URL will represent a
specific member, e.g.::
/entities/{entity_id}
:param base_url: if provided, the generated URL will be appended to it
"""
url = base_url if base_url is not None else ''
url += '/%s' % self.collection_key
# do we have a specific entity?
entity_id = kwargs.get('%s_id' % self.key)
if entity_id is not None:
url += '/%s' % entity_id
return url
def _filter_kwargs(self, kwargs):
"""Drop null values and handle ids."""
for key, ref in six.iteritems(kwargs.copy()):
if ref is None:
kwargs.pop(key)
else:
if isinstance(ref, Resource):
kwargs.pop(key)
kwargs['%s_id' % key] = getid(ref)
return kwargs
def create(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._post(
self.build_url(**kwargs),
{self.key: kwargs},
self.key)
def get(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._get(
self.build_url(**kwargs),
self.key)
def head(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._head(self.build_url(**kwargs))
def list(self, base_url=None, **kwargs):
"""List the collection.
:param base_url: if provided, the generated URL will be appended to it
"""
kwargs = self._filter_kwargs(kwargs)
return self._list(
'%(base_url)s%(query)s' % {
'base_url': self.build_url(base_url=base_url, **kwargs),
'query': '?%s' % parse.urlencode(kwargs) if kwargs else '',
},
self.collection_key)
def put(self, base_url=None, **kwargs):
"""Update an element.
:param base_url: if provided, the generated URL will be appended to it
"""
kwargs = self._filter_kwargs(kwargs)
return self._put(self.build_url(base_url=base_url, **kwargs))
def update(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
params = kwargs.copy()
params.pop('%s_id' % self.key)
return self._patch(
self.build_url(**kwargs),
{self.key: params},
self.key)
def delete(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
return self._delete(
self.build_url(**kwargs))
def find(self, base_url=None, **kwargs):
"""Find a single item with attributes matching ``**kwargs``.
:param base_url: if provided, the generated URL will be appended to it
"""
kwargs = self._filter_kwargs(kwargs)
rl = self._list(
'%(base_url)s%(query)s' % {
'base_url': self.build_url(base_url=base_url, **kwargs),
'query': '?%s' % parse.urlencode(kwargs) if kwargs else '',
},
self.collection_key)
num = len(rl)
if num == 0:
msg = _("No %(name)s matching %(args)s.") % {
'name': self.resource_class.__name__,
'args': kwargs
}
raise exceptions.NotFound(404, msg)
elif num > 1:
raise exceptions.NoUniqueMatch
else:
return rl[0]
class Extension(HookableMixin):
"""Extension descriptor."""
SUPPORTED_HOOKS = ('__pre_parse_args__', '__post_parse_args__')
manager_class = None
def __init__(self, name, module):
super(Extension, self).__init__()
self.name = name
self.module = module
self._parse_extension_module()
def _parse_extension_module(self):
self.manager_class = None
for attr_name, attr_value in self.module.__dict__.items():
if attr_name in self.SUPPORTED_HOOKS:
self.add_hook(attr_name, attr_value)
else:
try:
if issubclass(attr_value, BaseManager):
self.manager_class = attr_value
except TypeError:
pass
def __repr__(self):
return "<Extension '%s'>" % self.name
class Resource(object):
"""Base class for OpenStack resources (tenant, user, etc.).
This is pretty much just a bag for attributes.
"""
HUMAN_ID = False
NAME_ATTR = 'name'
def __init__(self, manager, info, loaded=False):
"""Populate and bind to a manager.
:param manager: BaseManager object
:param info: dictionary representing resource attributes
:param loaded: prevent lazy-loading if set to True
"""
self.manager = manager
self._info = info
self._add_details(info)
self._loaded = loaded
def __repr__(self):
reprkeys = sorted(k
for k in self.__dict__.keys()
if k[0] != '_' and k != 'manager')
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
return "<%s %s>" % (self.__class__.__name__, info)
@property
def human_id(self):
"""Human-readable ID which can be used for bash completion.
"""
if self.NAME_ATTR in self.__dict__ and self.HUMAN_ID:
return strutils.to_slug(getattr(self, self.NAME_ATTR))
return None
def _add_details(self, info):
for (k, v) in six.iteritems(info):
try:
setattr(self, k, v)
self._info[k] = v
except AttributeError:
# In this case we already defined the attribute on the class
pass
def __getattr__(self, k):
if k not in self.__dict__:
#NOTE(bcwaldon): disallow lazy-loading if already loaded once
if not self.is_loaded():
self.get()
return self.__getattr__(k)
raise AttributeError(k)
else:
return self.__dict__[k]
def get(self):
"""Support for lazy loading details.
Some clients, such as novaclient have the option to lazy load the
details, details which can be loaded with this function.
"""
# set_loaded() first ... so if we have to bail, we know we tried.
self.set_loaded(True)
if not hasattr(self.manager, 'get'):
return
new = self.manager.get(self.id)
if new:
self._add_details(new._info)
def __eq__(self, other):
if not isinstance(other, Resource):
return NotImplemented
# two resources of different types are not equal
if not isinstance(other, self.__class__):
return False
if hasattr(self, 'id') and hasattr(other, 'id'):
return self.id == other.id
return self._info == other._info
def is_loaded(self):
return self._loaded
def set_loaded(self, val):
self._loaded = val
def to_dict(self):
return copy.deepcopy(self._info)
| apache-2.0 |
ipublishing-osp/esnavi | aeskit/src/main/java/jp/co/ipublishing/aeskit/alert/models/Alert.java | 2188 | /*
* Copyright 2015 iPublishing Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.co.ipublishing.aeskit.alert.models;
import android.support.annotation.NonNull;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import jp.co.ipublishing.aeskit.alert.models.figure.Figure;
/**
* 警報情報インターフェース。
*/
public interface Alert extends Serializable {
/**
* 警報レベルを取得する。
*
* @return 警報レベル
*/
@NonNull
AlertLevel getLevel();
/**
* 発生したエリア名を取得する。
*
* @return 警報が発生したエリア名
*/
@NonNull
String getArea();
/**
* メッセージの件名を取得する。
*
* @return 警報メッセージの件名
*/
@NonNull
String getMessageTitle();
/**
* メッセージの本文を取得する。
*
* @return 警報メッセージの本文
*/
@NonNull
String getMessageBody();
/**
* ヘッドラインの本文を取得する。
*
* @return 警報ヘッドラインの本文
*/
@NonNull
String getHeadlineBody();
/**
* 発生時刻を取得する。
*
* @return 警報の発生時刻
*/
@NonNull
Date getTime();
/**
* 危険区域を取得する。
*
* @return 危険区域
*/
@NonNull
List<Figure> getWarningAreas();
/**
* 避難状況かどうかを取得する。
*
* @return 避難状況であればtrue、そうでなければfalseを返す。
*/
boolean isEvacuationSituation();
}
| apache-2.0 |