max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
|---|---|---|
1,444
|
package mage.cards.t;
import java.util.UUID;
import mage.abilities.Ability;
import mage.abilities.common.SimpleActivatedAbility;
import mage.abilities.costs.common.SacrificeTargetCost;
import mage.abilities.costs.common.TapSourceCost;
import mage.abilities.effects.common.turn.AddExtraTurnControllerEffect;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.Zone;
import mage.filter.common.FilterControlledArtifactPermanent;
import mage.target.common.TargetControlledPermanent;
/**
*
* @author jeffwadsworth
*/
public final class TimeSieve extends CardImpl {
public TimeSieve(UUID ownerId, CardSetInfo setInfo) {
super(ownerId,setInfo,new CardType[]{CardType.ARTIFACT},"{U}{B}");
// {tap}, Sacrifice five artifacts: Take an extra turn after this one.
Ability ability = new SimpleActivatedAbility(Zone.BATTLEFIELD, new AddExtraTurnControllerEffect(), new TapSourceCost());
ability.addCost(new SacrificeTargetCost(new TargetControlledPermanent(5, 5, new FilterControlledArtifactPermanent("five artifacts"), true)));
this.addAbility(ability);
}
private TimeSieve(final TimeSieve card) {
super(card);
}
@Override
public TimeSieve copy() {
return new TimeSieve(this);
}
}
| 435
|
892
|
<reponame>github/advisory-database
{
"schema_version": "1.2.0",
"id": "GHSA-fcvh-7h6p-x524",
"modified": "2022-05-17T03:17:05Z",
"published": "2022-05-17T03:17:05Z",
"aliases": [
"CVE-2015-4537"
],
"details": "Lockbox in EMC Documentum D2 before 4.5 uses a hardcoded passphrase when a server lacks a D2.Lockbox file, which makes it easier for remote authenticated users to decrypt admin tickets by locating this passphrase in a decompiled D2 JAR archive.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2015-4537"
},
{
"type": "WEB",
"url": "http://seclists.org/bugtraq/2015/Aug/117"
},
{
"type": "WEB",
"url": "http://www.securitytracker.com/id/1033345"
}
],
"database_specific": {
"cwe_ids": [
"CWE-200"
],
"severity": "LOW",
"github_reviewed": false
}
}
| 424
|
416
|
<filename>sfm-reflect/src/main/java/org/simpleflatmapper/reflect/meta/IndexedColumn.java
package org.simpleflatmapper.reflect.meta;
public class IndexedColumn {
private final int indexValue;
private final String indexProperty;
private final PropertyNameMatcher subPropertyNameMatcher;
private final int score;
public final boolean partial;
public IndexedColumn(int indexValue, PropertyNameMatcher subPropertyNameMatcher, int score) {
this(indexValue, "", subPropertyNameMatcher, score, false);
}
public IndexedColumn(int indexValue, String indexProperty, PropertyNameMatcher subPropertyNameMatcher, int score, boolean partial) {
this.indexValue = indexValue;
this.indexProperty = indexProperty;
this.subPropertyNameMatcher = subPropertyNameMatcher;
this.score = score;
this.partial = partial;
}
public int getIndexValue() {
return indexValue;
}
public PropertyNameMatcher getSubPropertyNameMatcher() {
return subPropertyNameMatcher;
}
public String getIndexProperty() {
return indexProperty;
}
public IndexedColumn alignTo(int firstElementOffset) {
if (firstElementOffset == 0) return this;
return new IndexedColumn(indexValue - firstElementOffset, indexProperty, subPropertyNameMatcher, score, partial);
}
public int getScore() {
return score;
}
}
| 480
|
4,391
|
# This sample tests that an unbound variable that is generated in
# a function does not propagate beyond that function to callers.
from typing import Literal
def func1():
# This should generate an error
return a
# This should not.
b = func1()
tb1: Literal["Unknown"] = reveal_type(b)
def func2(val: int):
if val < 3:
return val
# This should generate an error
return a
# This should not.
c = func2(36)
tc1: Literal["int | Unknown"] = reveal_type(c)
| 165
|
892
|
<filename>advisories/unreviewed/2022/05/GHSA-622x-gpp9-6rc3/GHSA-622x-gpp9-6rc3.json
{
"schema_version": "1.2.0",
"id": "GHSA-622x-gpp9-6rc3",
"modified": "2022-05-13T01:24:43Z",
"published": "2022-05-13T01:24:43Z",
"aliases": [
"CVE-2016-5340"
],
"details": "The is_ashmem_file function in drivers/staging/android/ashmem.c in a certain Qualcomm Innovation Center (QuIC) Android patch for the Linux kernel 3.x mishandles pointer validation within the KGSL Linux Graphics Module, which allows attackers to bypass intended access restrictions by using the /ashmem string as the dentry name.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H"
}
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2016-5340"
},
{
"type": "WEB",
"url": "https://source.codeaurora.org/quic/la/kernel/msm-3.10/commit/?id=06e51489061e5473b4e2035c79dcf7c27a6f75a6"
},
{
"type": "WEB",
"url": "https://www.codeaurora.org/invalid-path-check-ashmem-memory-file-cve-2016-5340"
},
{
"type": "WEB",
"url": "http://source.android.com/security/bulletin/2016-10-01.html"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/92374"
},
{
"type": "WEB",
"url": "http://www.securitytracker.com/id/1036763"
}
],
"database_specific": {
"cwe_ids": [
"CWE-20"
],
"severity": "HIGH",
"github_reviewed": false
}
}
| 755
|
930
|
<filename>src/point_cloud.cpp
// Copyright 2017-2019, <NAME> and the Polyscope contributors. http://polyscope.run.
#include "polyscope/point_cloud.h"
#include "polyscope/file_helpers.h"
#include "polyscope/pick.h"
#include "polyscope/polyscope.h"
#include "polyscope/render/engine.h"
#include "polyscope/point_cloud_color_quantity.h"
#include "polyscope/point_cloud_scalar_quantity.h"
#include "polyscope/point_cloud_vector_quantity.h"
#include "imgui.h"
#include <fstream>
#include <iostream>
using std::cout;
using std::endl;
namespace polyscope {
// Initialize statics
const std::string PointCloud::structureTypeName = "Point Cloud";
// Constructor
PointCloud::PointCloud(std::string name, std::vector<glm::vec3> points_)
: QuantityStructure<PointCloud>(name, structureTypeName), points(std::move(points_)),
pointColor(uniquePrefix() + "#pointColor", getNextUniqueColor()),
pointRadius(uniquePrefix() + "#pointRadius", relativeValue(0.005)),
material(uniquePrefix() + "#material", "clay") {
cullWholeElements.setPassive(true);
}
// Helper to set uniforms
void PointCloud::setPointCloudUniforms(render::ShaderProgram& p) {
glm::mat4 P = view::getCameraPerspectiveMatrix();
glm::mat4 Pinv = glm::inverse(P);
p.setUniform("u_invProjMatrix", glm::value_ptr(Pinv));
p.setUniform("u_viewport", render::engine->getCurrentViewport());
if (pointRadiusQuantityName != "" && !pointRadiusQuantityAutoscale) {
// special case: ignore radius uniform
p.setUniform("u_pointRadius", 1.);
} else {
// common case
p.setUniform("u_pointRadius", pointRadius.get().asAbsolute());
}
}
void PointCloud::draw() {
if (!isEnabled()) {
return;
}
// If there is no dominant quantity, then this class is responsible for drawing points
if (dominantQuantity == nullptr) {
// Ensure we have prepared buffers
if (program == nullptr) {
prepare();
}
// Set program uniforms
setStructureUniforms(*program);
setPointCloudUniforms(*program);
program->setUniform("u_baseColor", pointColor.get());
// Draw the actual point cloud
program->draw();
}
// Draw the quantities
for (auto& x : quantities) {
x.second->draw();
}
}
void PointCloud::drawPick() {
if (!isEnabled()) {
return;
}
// Ensure we have prepared buffers
if (pickProgram == nullptr) {
preparePick();
}
// Set uniforms
setStructureUniforms(*pickProgram);
setPointCloudUniforms(*pickProgram);
pickProgram->draw();
}
void PointCloud::prepare() {
// It not quantity is coloring the points, draw with a default color
if (dominantQuantity != nullptr) {
return;
}
program = render::engine->requestShader("RAYCAST_SPHERE", addPointCloudRules({"SHADE_BASECOLOR"}));
render::engine->setMaterial(*program, material.get());
// Fill out the geometry data for the program
fillGeometryBuffers(*program);
}
void PointCloud::preparePick() {
// Request pick indices
size_t pickCount = points.size();
size_t pickStart = pick::requestPickBufferRange(this, pickCount);
// Create a new pick program
pickProgram = render::engine->requestShader("RAYCAST_SPHERE", addPointCloudRules({"SPHERE_PROPAGATE_COLOR"}, true),
render::ShaderReplacementDefaults::Pick);
// Fill color buffer with packed point indices
std::vector<glm::vec3> pickColors;
for (size_t i = pickStart; i < pickStart + pickCount; i++) {
glm::vec3 val = pick::indToVec(i);
pickColors.push_back(pick::indToVec(i));
}
// Store data in buffers
fillGeometryBuffers(*pickProgram);
pickProgram->setAttribute("a_color", pickColors);
}
std::vector<std::string> PointCloud::addPointCloudRules(std::vector<std::string> initRules, bool withPointCloud) {
initRules = addStructureRules(initRules);
if (withPointCloud) {
if (pointRadiusQuantityName != "") {
initRules.push_back("SPHERE_VARIABLE_SIZE");
}
if (wantsCullPosition()) {
initRules.push_back("SPHERE_CULLPOS_FROM_CENTER");
}
}
return initRules;
}
// helper
std::vector<double> PointCloud::resolvePointRadiusQuantity() {
PointCloudScalarQuantity* sizeScalarQ = nullptr;
PointCloudQuantity* sizeQ = getQuantity(pointRadiusQuantityName);
if (sizeQ != nullptr) {
sizeScalarQ = dynamic_cast<PointCloudScalarQuantity*>(sizeQ);
if (sizeScalarQ == nullptr) {
polyscope::error("Cannot populate point size from quantity [" + name + "], it is not a scalar quantity");
}
} else {
polyscope::error("Cannot populate point size from quantity [" + name + "], it does not exist");
}
std::vector<double> sizes;
if (sizeScalarQ == nullptr) {
// we failed to resolve above; populate with dummy data so we can continue processing
std::vector<double> ones(nPoints(), 1.);
sizes = ones;
} else {
sizes = sizeScalarQ->values;
}
// clamp to nonnegative and autoscale (if requested)
double max = 0;
for (double& x : sizes) {
if (!(x > 0)) x = 0; // ensure all nonnegative
max = std::fmax(max, x);
}
if (max == 0) max = 1e-6;
if (pointRadiusQuantityAutoscale) {
for (double& x : sizes) {
x /= max;
}
}
return sizes;
}
void PointCloud::fillGeometryBuffers(render::ShaderProgram& p) {
p.setAttribute("a_position", points);
if (pointRadiusQuantityName != "") {
// Resolve the quantity
std::vector<double> pointRadiusQuantityVals = resolvePointRadiusQuantity();
p.setAttribute("a_pointRadius", pointRadiusQuantityVals);
}
}
void PointCloud::geometryChanged() { refresh(); }
void PointCloud::buildPickUI(size_t localPickID) {
ImGui::TextUnformatted(("#" + std::to_string(localPickID) + " ").c_str());
ImGui::SameLine();
ImGui::TextUnformatted(to_string(points[localPickID]).c_str());
ImGui::Spacing();
ImGui::Spacing();
ImGui::Spacing();
ImGui::Indent(20.);
// Build GUI to show the quantities
ImGui::Columns(2);
ImGui::SetColumnWidth(0, ImGui::GetWindowWidth() / 3);
for (auto& x : quantities) {
x.second->buildPickUI(localPickID);
}
ImGui::Indent(-20.);
}
void PointCloud::buildCustomUI() {
ImGui::Text("# points: %lld", static_cast<long long int>(points.size()));
if (ImGui::ColorEdit3("Point color", &pointColor.get()[0], ImGuiColorEditFlags_NoInputs)) {
setPointColor(getPointColor());
}
ImGui::SameLine();
ImGui::PushItemWidth(70);
if (ImGui::SliderFloat("Radius", pointRadius.get().getValuePtr(), 0.0, .1, "%.5f", 3.)) {
pointRadius.manuallyChanged();
requestRedraw();
}
ImGui::PopItemWidth();
}
void PointCloud::buildCustomOptionsUI() {
if (ImGui::BeginMenu("Variable Radius")) {
if (ImGui::MenuItem("none", nullptr, pointRadiusQuantityName == "")) clearPointRadiusQuantity();
ImGui::Separator();
for (auto& q : quantities) {
PointCloudScalarQuantity* scalarQ = dynamic_cast<PointCloudScalarQuantity*>(q.second.get());
if (scalarQ != nullptr) {
if (ImGui::MenuItem(scalarQ->name.c_str(), nullptr, pointRadiusQuantityName == scalarQ->name))
setPointRadiusQuantity(scalarQ);
}
}
ImGui::EndMenu();
}
if (render::buildMaterialOptionsGui(material.get())) {
material.manuallyChanged();
setMaterial(material.get()); // trigger the other updates that happen on set()
}
}
double PointCloud::lengthScale() {
// TODO cache
// Measure length scale as twice the radius from the center of the bounding box
auto bound = boundingBox();
glm::vec3 center = 0.5f * (std::get<0>(bound) + std::get<1>(bound));
double lengthScale = 0.0;
for (glm::vec3& rawP : points) {
glm::vec3 p = glm::vec3(objectTransform.get() * glm::vec4(rawP, 1.0));
lengthScale = std::max(lengthScale, (double)glm::length2(p - center));
}
return 2 * std::sqrt(lengthScale);
}
std::tuple<glm::vec3, glm::vec3> PointCloud::boundingBox() {
glm::vec3 min = glm::vec3{1, 1, 1} * std::numeric_limits<float>::infinity();
glm::vec3 max = -glm::vec3{1, 1, 1} * std::numeric_limits<float>::infinity();
for (glm::vec3& rawP : points) {
glm::vec3 p = glm::vec3(objectTransform.get() * glm::vec4(rawP, 1.0));
min = componentwiseMin(min, p);
max = componentwiseMax(max, p);
}
return std::make_tuple(min, max);
}
std::string PointCloud::typeName() { return structureTypeName; }
void PointCloud::refresh() {
program.reset();
pickProgram.reset();
QuantityStructure<PointCloud>::refresh(); // call base class version, which refreshes quantities
}
// === Set point size from a scalar quantity
void PointCloud::setPointRadiusQuantity(PointCloudScalarQuantity* quantity, bool autoScale) {
setPointRadiusQuantity(quantity->name, autoScale);
}
void PointCloud::setPointRadiusQuantity(std::string name, bool autoScale) {
pointRadiusQuantityName = name;
pointRadiusQuantityAutoscale = autoScale;
resolvePointRadiusQuantity(); // do it once, just so we fail fast if it doesn't exist
refresh(); // TODO this is a bit overkill
}
void PointCloud::clearPointRadiusQuantity() {
pointRadiusQuantityName = "";
refresh();
}
// === Quantities
// Quantity default methods
PointCloudQuantity::PointCloudQuantity(std::string name_, PointCloud& pointCloud_, bool dominates_)
: Quantity<PointCloud>(name_, pointCloud_, dominates_) {}
void PointCloudQuantity::buildInfoGUI(size_t pointInd) {}
// === Quantity adders
PointCloudColorQuantity* PointCloud::addColorQuantityImpl(std::string name, const std::vector<glm::vec3>& colors) {
PointCloudColorQuantity* q = new PointCloudColorQuantity(name, colors, *this);
addQuantity(q);
return q;
}
PointCloudScalarQuantity* PointCloud::addScalarQuantityImpl(std::string name, const std::vector<double>& data,
DataType type) {
PointCloudScalarQuantity* q = new PointCloudScalarQuantity(name, data, *this, type);
addQuantity(q);
return q;
}
PointCloudParameterizationQuantity* PointCloud::addParameterizationQuantityImpl(std::string name,
const std::vector<glm::vec2>& param,
ParamCoordsType type) {
PointCloudParameterizationQuantity* q =
new PointCloudParameterizationQuantity(name, param, type, ParamVizStyle::CHECKER, *this);
addQuantity(q);
return q;
}
PointCloudParameterizationQuantity*
PointCloud::addLocalParameterizationQuantityImpl(std::string name, const std::vector<glm::vec2>& param,
ParamCoordsType type) {
PointCloudParameterizationQuantity* q =
new PointCloudParameterizationQuantity(name, param, type, ParamVizStyle::LOCAL_CHECK, *this);
addQuantity(q);
return q;
}
PointCloudVectorQuantity* PointCloud::addVectorQuantityImpl(std::string name, const std::vector<glm::vec3>& vectors,
VectorType vectorType) {
PointCloudVectorQuantity* q = new PointCloudVectorQuantity(name, vectors, *this, vectorType);
addQuantity(q);
return q;
}
PointCloud* PointCloud::setPointColor(glm::vec3 newVal) {
pointColor = newVal;
polyscope::requestRedraw();
return this;
}
glm::vec3 PointCloud::getPointColor() { return pointColor.get(); }
PointCloud* PointCloud::setMaterial(std::string m) {
material = m;
refresh();
requestRedraw();
return this;
}
std::string PointCloud::getMaterial() { return material.get(); }
PointCloud* PointCloud::setPointRadius(double newVal, bool isRelative) {
pointRadius = ScaledValue<float>(newVal, isRelative);
polyscope::requestRedraw();
return this;
}
double PointCloud::getPointRadius() { return pointRadius.get().asAbsolute(); }
} // namespace polyscope
| 4,306
|
496
|
# -*- coding: utf-8 -*-
import moment
from .Config import g_conf
class Metadata(dict):
"""Metadata
文章以及页面的元数据
"""
def __init__(self, fr):
dict.__init__({})
self["title"] = str(fr.get("title", ""))
self["slug"] = str(fr.get("slug", self["title"]))
self["date"] = moment.date(str(fr.get("date", ""))).locale(g_conf.locale)
self["layout"] = str(fr.get("layout", "post"))
self["status"] = str(fr.get("status", "publish"))
self["author"] = str(fr.get("author", ""))
self["banner"] = str(fr.get("banner", ""))
self["excerpt"] = str(fr.get("excerpt", ""))
self["path"] = ""
self["showfull"] = bool(fr.get("showfull", False))
self["comment"] = bool(fr.get("comment", True))
# 解析包含的 tag(无序)
self["tags"] = fr.get("tags", []) or []
# 解析包含的类别(有序)
self["categories"] = fr.get("categories", []) or []
if len(self["categories"]) == 0:
self["categories"].append('Default')
| 490
|
2,177
|
<gh_stars>1000+
/**
*
*/package org.nutz.mvc.filter;
| 26
|
930
|
<filename>scripts/txtool/txtool/__init__.py
from util import *
from check import *
from compile import *
from tx_count import get_transaction_count
__all__ = ['util', 'check', 'compile', 'generate_account', 'tx_count']
| 71
|
6,210
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.net;
import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.apache.cassandra.io.util.DataInputPlus;
import org.apache.cassandra.io.util.DataOutputPlus;
import org.apache.cassandra.utils.vint.VIntCoding;
import sun.misc.Unsafe;
import static org.apache.cassandra.net.MessagingService.VERSION_40;
import static org.apache.cassandra.utils.MonotonicClock.approxTime;
abstract class MessageGenerator
{
final long seed;
final Random random;
private MessageGenerator(long seed)
{
this.seed = seed;
this.random = new Random();
}
Message.Builder<Object> builder(long id)
{
random.setSeed(id ^ seed);
long now = approxTime.now();
int expiresInMillis;
int expiryMask = random.nextInt();
if (0 == (expiryMask & 0xffff)) expiresInMillis = 2;
else if (0 == (expiryMask & 0xfff)) expiresInMillis = 10;
else if (0 == (expiryMask & 0xff)) expiresInMillis = 100;
else if (0 == (expiryMask & 0xf)) expiresInMillis = 1000;
else expiresInMillis = 60 * 1000;
long expiresInNanos = TimeUnit.MILLISECONDS.toNanos((expiresInMillis / 2) + random.nextInt(expiresInMillis / 2));
return Message.builder(Verb._TEST_2, null)
.withId(id)
.withCreatedAt(now)
.withExpiresAt(now + expiresInNanos); // don't expire for now
}
public int uniformInt(int limit)
{
return random.nextInt(limit);
}
// generate a Message<?> with the provided id and with both id and info encoded in its payload
abstract Message<?> generate(long id, byte info);
abstract MessageGenerator copy();
static final class UniformPayloadGenerator extends MessageGenerator
{
final int minSize;
final int maxSize;
final byte[] fillWithBytes;
UniformPayloadGenerator(long seed, int minSize, int maxSize)
{
super(seed);
this.minSize = Math.max(9, minSize);
this.maxSize = Math.max(9, maxSize);
this.fillWithBytes = new byte[32];
random.setSeed(seed);
random.nextBytes(fillWithBytes);
}
Message<?> generate(long id, byte info)
{
Message.Builder<Object> builder = builder(id);
byte[] payload = new byte[minSize + random.nextInt(maxSize - minSize)];
ByteBuffer wrapped = ByteBuffer.wrap(payload);
setId(payload, id);
payload[8] = info;
wrapped.position(9);
while (wrapped.hasRemaining())
wrapped.put(fillWithBytes, 0, Math.min(fillWithBytes.length, wrapped.remaining()));
builder.withPayload(payload);
return builder.build();
}
MessageGenerator copy()
{
return new UniformPayloadGenerator(seed, minSize, maxSize);
}
}
static long getId(byte[] payload)
{
return unsafe.getLong(payload, BYTE_ARRAY_BASE_OFFSET);
}
static byte getInfo(byte[] payload)
{
return payload[8];
}
private static void setId(byte[] payload, long id)
{
unsafe.putLong(payload, BYTE_ARRAY_BASE_OFFSET, id);
}
static class Header
{
public final int length;
public final long id;
public final byte info;
Header(int length, long id, byte info)
{
this.length = length;
this.id = id;
this.info = info;
}
public byte[] read(DataInputPlus in, int length, int messagingVersion) throws IOException
{
byte[] result = new byte[Math.max(9, length)];
setId(result, id);
result[8] = info;
in.readFully(result, 9, Math.max(0, length - 9));
return result;
}
}
static Header readHeader(DataInputPlus in, int messagingVersion) throws IOException
{
int length = messagingVersion < VERSION_40
? in.readInt()
: (int) in.readUnsignedVInt();
long id = in.readLong();
if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN)
id = Long.reverseBytes(id);
byte info = in.readByte();
return new Header(length, id, info);
}
static void writeLength(byte[] payload, DataOutputPlus out, int messagingVersion) throws IOException
{
if (messagingVersion < VERSION_40)
out.writeInt(payload.length);
else
out.writeUnsignedVInt(payload.length);
}
static long serializedSize(byte[] payload, int messagingVersion)
{
return payload.length + (messagingVersion < VERSION_40 ? 4 : VIntCoding.computeUnsignedVIntSize(payload.length));
}
private static final Unsafe unsafe;
static
{
try
{
Field field = sun.misc.Unsafe.class.getDeclaredField("theUnsafe");
field.setAccessible(true);
unsafe = (sun.misc.Unsafe) field.get(null);
}
catch (Exception e)
{
throw new AssertionError(e);
}
}
private static final long BYTE_ARRAY_BASE_OFFSET = unsafe.arrayBaseOffset(byte[].class);
}
| 2,577
|
3,301
|
<gh_stars>1000+
package com.alibaba.alink.params.classification;
/**
* parameters of logistic regression predictor.
*/
public interface LogisticRegressionPredictParams<T> extends LinearModelMapperParams <T> {
}
| 64
|
4,816
|
<filename>include/retdec/fileformat/types/visual_basic/visual_basic_structures.h
/**
* @file include/retdec/fileformat/types/visual_basic/visual_basic_structures.h
* @brief Visual basic metadata structures.
* @copyright (c) 2017 Avast Software, licensed under the MIT license
*/
#ifndef RETDEC_FILEFORMAT_TYPES_VISUAL_BASIC_VISUAL_BASIC_STRUCTURES_H
#define RETDEC_FILEFORMAT_TYPES_VISUAL_BASIC_VISUAL_BASIC_STRUCTURES_H
#include <string>
namespace retdec {
namespace fileformat {
constexpr std::size_t VBHEADER_SIGNATURE = 0x21354256;
constexpr std::size_t VB_MAX_STRING_LEN = 100;
enum class VBExternTableEntryType
{
internal = 0x6,
external = 0x7
};
struct VBHeader
{
std::uint32_t signature; ///< "VB5!" signature
std::uint16_t runtimeBuild; ///< runtime flag
std::uint8_t languageDLL[14]; ///< language DLL
std::uint8_t backupLanguageDLL[14]; ///< backup language DLL
std::uint16_t runtimeDLLVersion; ///< version of the runtime DLL
std::uint32_t LCID1; ///< primary LCID
std::uint32_t LCID2; ///< backup LCID
std::uint32_t subMainAddr; ///< sub main code address
std::uint32_t projectInfoAddr; ///< ProjectInfo address
std::uint32_t MDLIntObjsFlags; ///< VB controll flags for IDs < 32
std::uint32_t MDLIntObjsFlags2; ///< VB controll flags for IDs > 32
std::uint32_t threadFlags; ///< thread flags
std::uint32_t nThreads; ///< number of threads to support in pool
std::uint16_t nForms; ///< number of forms in this application
std::uint16_t nExternals; ///< number of external OCX components
std::uint32_t nThunks; ///< number of thunks to create
std::uint32_t GUITableAddr; ///< GUITable address
std::uint32_t externalTableAddr; ///< ExternalTable address
std::uint32_t COMRegisterDataAddr; ///< ComRegisterData address
std::uint32_t projExeNameOffset; ///< offset to the string containing EXE filename
std::uint32_t projDescOffset; ///< offset to the string containing project's description
std::uint32_t helpFileOffset; ///< offset to the string containing name of the Help file
std::uint32_t projNameOffset; ///< offset to the string containing project's name
static std::size_t structureSize()
{
return
sizeof(signature) + sizeof(runtimeBuild) + sizeof(languageDLL) +
sizeof(backupLanguageDLL) + sizeof(runtimeDLLVersion) + sizeof(LCID1) +
sizeof(LCID2) + sizeof(subMainAddr) + sizeof(projectInfoAddr) +
sizeof(MDLIntObjsFlags) + sizeof(MDLIntObjsFlags2) + sizeof(threadFlags) +
sizeof(nThreads) + sizeof(nForms) + sizeof(nExternals) +
sizeof(nThunks) + sizeof(GUITableAddr) + sizeof(externalTableAddr) +
sizeof(COMRegisterDataAddr) + sizeof(projExeNameOffset) + sizeof(projDescOffset) +
sizeof(helpFileOffset) + sizeof(projNameOffset);
}
};
struct VBProjInfo
{
std::uint32_t version; ///< 5.00 in Hex (0x1F4), version
std::uint32_t objectTableAddr; ///< Object table address
std::uint32_t null; ///< unused value after compilation
std::uint32_t codeStartAddr; ///< etart of code address
std::uint32_t codeEndAddr; ///< end of code address
std::uint32_t dataSize; ///< size of VB object structures
std::uint32_t threadSpaceAddr; ///< eddress of address of thread object
std::uint32_t exHandlerAddr; ///< VBA exception handler address
std::uint32_t nativeCodeAddr; ///< .DATA section address
std::uint8_t pathInformation[528]; ///< path and id string, <SP6
std::uint32_t externalTableAddr; ///< External table address
std::uint32_t nExternals; ///< number of external OCX components
static std::size_t structureSize()
{
return
sizeof(version) + sizeof(objectTableAddr) + sizeof(null)
+ sizeof(codeStartAddr) + sizeof(codeEndAddr) + sizeof(dataSize)
+ sizeof(threadSpaceAddr) + sizeof(exHandlerAddr) + sizeof(nativeCodeAddr)
+ sizeof(pathInformation) + sizeof(externalTableAddr) + sizeof(nExternals);
}
};
struct VBObjectTable
{
std::uint32_t null1; ///< null
std::uint32_t execCOMAddr; ///< VB Project Exec COM address
std::uint32_t projecInfo2Addr; ///< Project Info 2 address
std::uint32_t reserved; ///< reserved
std::uint32_t null2; ///< null
std::uint32_t projectObjectAddr; ///< In-memory data address
std::uint8_t objectGUID[16]; ///< Object table GUID
std::uint16_t flagsCompileState; ///< internal compilation flags
std::uint16_t nObjects; ///< number of objects
std::uint16_t nCompiledObjects; ///< number of compiled objects
std::uint16_t nUsedObjects; ///< number of used objects
std::uint32_t objectDescriptorsAddr; ///< Object descriptos address
std::uint32_t IDE1; ///< IDE1 internals
std::uint32_t IDE2; ///< IDE2 internals
std::uint32_t IDE3; ///< IDE3 internals
std::uint32_t projectNameAddr; ///< project name address
std::uint32_t LCID1; ///< LCID
std::uint32_t LCID2; ///< backup LCID
std::uint32_t IDE4; ///< IDE4 internals
std::uint32_t templateVesion; ///< template version
static std::size_t structureSize()
{
return
sizeof(null1) + sizeof(execCOMAddr) + sizeof(projecInfo2Addr) + sizeof(reserved) +
sizeof(null2) + sizeof(projectObjectAddr) + sizeof(objectGUID) + sizeof(flagsCompileState) +
sizeof(nObjects) + sizeof(nCompiledObjects) + sizeof(nUsedObjects) +
sizeof(objectDescriptorsAddr) + sizeof(IDE1) + sizeof(IDE2) + sizeof(IDE3) +
sizeof(projectNameAddr) + sizeof(LCID1) + sizeof(LCID2) + sizeof(IDE4) +
sizeof(templateVesion);
}
};
struct VBPublicObjectDescriptor
{
std::uint32_t objectInfoAddr; ///< Object info of this object address
std::uint32_t reserved; ///< reserved
std::uint32_t publicBytesAddr; ///< public integers address
std::uint32_t staticBytesAddr; ///< static integers address
std::uint32_t modulePublicAddr; ///< public DATA section address
std::uint32_t moduleStaticAddr; ///< static DATA section address
std::uint32_t objectNameAddr; ///< object name address
std::uint32_t nMethods; ///< number of methods
std::uint32_t methodNamesAddr; ///< method names array address
std::uint32_t staticVarsCopyAddr; ///< static variables copy destionation offset
std::uint32_t objectType; ///< object type flags
std::uint32_t null; ///< null
static std::size_t structureSize()
{
return
sizeof(objectInfoAddr) + sizeof(reserved) + sizeof(publicBytesAddr) +
sizeof(staticBytesAddr) + sizeof(modulePublicAddr) + sizeof(moduleStaticAddr) +
sizeof(objectNameAddr) + sizeof(nMethods) + sizeof(methodNamesAddr) +
sizeof(staticVarsCopyAddr) + sizeof(objectType) + sizeof(null);
}
};
struct VBExternTableEntry
{
std::uint32_t type; ///< import type
std::uint32_t importDataAddr; ///< import data address
static std::size_t structureSize()
{
return sizeof(type) + sizeof(importDataAddr);
}
};
struct VBExternTableEntryData
{
std::uint32_t moduleNameAddr; ///< mode name address
std::uint32_t apiNameAddr; ///< api name address
static std::size_t structureSize()
{
return sizeof(moduleNameAddr) + sizeof(apiNameAddr);
}
};
struct VBCOMRData
{
std::uint32_t regInfoOffset; ///< Registration Info offset
std::uint32_t projNameOffset; ///< Project/TypeLib name offset
std::uint32_t helpFileOffset; ///< Help file offset
std::uint32_t projDescOffset; ///< Project description offset
std::uint8_t projCLSID[16]; ///< Project/TypeLib CLSID
std::uint32_t projTlbLCID; ///< TypeLib library LCID
std::uint16_t unknown; ///< unknown
std::uint16_t tlbVerMajor; ///< TypeLib major version
std::uint16_t tlbVerMinor; ///< TypeLib minor version
static std::size_t structureSize()
{
return
sizeof(regInfoOffset) + sizeof(projNameOffset) + sizeof(helpFileOffset) +
sizeof(projDescOffset) + sizeof(projCLSID) + sizeof(projTlbLCID) +
sizeof(unknown) + sizeof(tlbVerMajor) + sizeof(tlbVerMinor);
}
};
struct VBCOMRInfo
{
std::uint32_t ifInfoOffset; ///< Offset to COM Interface Info
std::uint32_t objNameOffset; ///< Offset to object name
std::uint32_t objDescOffset; ///< Offset to object description
std::uint32_t instancing; ///< Instancing mode
std::uint32_t objID; ///< Object ID within project
std::uint8_t objCLSID[16]; ///< Object CLSID
std::uint32_t isInterfaceFlag; ///< Specifies whether Interface CLSID is valid
std::uint32_t ifCLSIDOffset; ///< Interface CLSID
std::uint32_t eventCLSIDOffset; ///< Event CLSID
std::uint32_t hasEvents; ///< Specifies whether Event CLSID is valid
std::uint32_t olemicsFlags; ///< Status
std::uint8_t classType; ///< Class Type
std::uint8_t objectType; ///< Object Type
std::uint16_t toolboxBitmap32; ///< Control Bitmap ID in toobox
std::uint16_t defaultIcon; ///< Minimized icon of control window
std::uint16_t isDesignerFlag; ///< Specifies whether Designed Data offset is valid
std::uint32_t designerDataOffset; ///< Offset to Designed Data
static std::size_t structureSize()
{
return
sizeof(ifInfoOffset) + sizeof(objNameOffset) + sizeof(objDescOffset) +
sizeof(instancing) + sizeof(objID) + sizeof(objCLSID) + sizeof(isInterfaceFlag) +
sizeof(ifCLSIDOffset) + sizeof(eventCLSIDOffset) + sizeof(hasEvents) +
sizeof(olemicsFlags) + sizeof(classType) + sizeof(objectType) +
sizeof(toolboxBitmap32) + sizeof(defaultIcon) + sizeof(isDesignerFlag) +
sizeof(designerDataOffset);
}
};
} // namespace fileformat
} // namespace retdec
#endif
| 4,731
|
340
|
<reponame>cloudnoize/concord-bft<filename>bftengine/src/bftengine/ClientsManager.hpp<gh_stars>100-1000
// Concord
//
// Copyright (c) 2018-2021 VMware, Inc. All Rights Reserved.
//
// This product is licensed to you under the Apache 2.0 license (the "License"). You may not use this product except in
// compliance with the Apache 2.0 License.
//
// This product may include a number of subcomponents with separate copyright notices and license terms. Your use of
// these subcomponents is subject to the terms and conditions of the subcomponent's license, as noted in the LICENSE
// file.
#pragma once
#include "PrimitiveTypes.hpp"
#include "TimeUtils.hpp"
#include "bftengine/ReservedPagesClient.hpp"
#include "Metrics.hpp"
#include "IPendingRequest.hpp"
#include "bftengine/IKeyExchanger.hpp"
#include "PersistentStorage.hpp"
#include "ReplicaSpecificInfoManager.hpp"
#include <map>
#include <set>
#include <unordered_map>
#include <memory>
#include <queue>
namespace bftEngine {
class IStateTransfer;
namespace impl {
class ClientReplyMsg;
class ClientRequestMsg;
// Keeps track of Client IDs, public keys, and pending requests and replies. Supports saving and loading client public
// keys and pending reply messages to the reserved pages mechanism.
//
// Not thread-safe.
class ClientsManager : public ResPagesClient<ClientsManager>, public IPendingRequest, public IClientPublicKeyStore {
public:
// As preconditions to this constructor:
// - The ReplicaConfig singleton (i.e. ReplicaConfig::instance()) must be initialized with the relevant
// configuration.
// - The reserved pages mechanism must be initialized and usable.
// - The global logger CL_MNGR must be initialized.
// Behavior is undefined if any of these preconditions are not met. Behavior is also undefined if proxyClients,
// externalClients, and internalClients are all empty.
// Additionally, all current and future behavior of the constructed ClientsManager object becomes undefined if any of
// the following conditions occur:
// - The reserved pages mechanism stops being usable.
// - The concordMetrics::Component object referenced by metrics is destroyed.
// - The global logger CL_MNGR is destroyed.
ClientsManager(const std::set<NodeIdType>& proxyClients,
const std::set<NodeIdType>& externalClients,
const std::set<NodeIdType>& internalClients,
concordMetrics::Component& metrics);
ClientsManager(std::shared_ptr<PersistentStorage> ps,
const std::set<NodeIdType>& proxyClients,
const std::set<NodeIdType>& externalClients,
const std::set<NodeIdType>& internalClients,
concordMetrics::Component& metrics);
uint32_t numberOfRequiredReservedPages() const { return clientIds_.size() * reservedPagesPerClient_; }
// Loads any available client public keys and client reply records from the reserved pages. Saves any client public
// keys loaded from the reserved pages to the KeyExchangeManager singleton. Automatically deletes the oldest reply
// record for a client if a reply message is found in the reserved pages for that client but the ClientsManager
// already has a number of reply records for that client equalling or exceeding the maximum client batch size that
// was configured at the time of this ClientManager's construction (or 1 if client batching was disabled). If the
// ClientsManager already has existing reply records matching the client ID and sequence number of a reply found in
// the reserved pages, the existing record will be overwritten. Automatically deletes any request records for a given
// client with sequence numbers less than or equal to the sequence number of a reply to that client found in the
// reserved pages. As a precondition to this function, the KeyExchangeManager singleton
// (KeyExchangeManager::instance()) must be fully initialized and usable. Behavior is undefined if it is not, and is
// also undefined if the applicable reserved pages contain malformed data.
void loadInfoFromReservedPages();
// Replies
// Returns true if clientId belongs to a valid client and this ClientsManager currently has a record for a reply to
// that client with ID reqSeqNum. Returns false otherwise.
// TODO(GG): make sure that ReqId is based on time (and ignore requests with time that does
// not make sense (too high) - this will prevent some potential attacks)
bool hasReply(NodeIdType clientId, ReqId reqSeqNum) const;
bool isValidClient(NodeIdType clientId) const { return clientIds_.find(clientId) != clientIds_.end(); }
// First, if this ClientsManager has a number of reply records for the given clientId equalling or exceeding the
// maximum client batch size configured at the time of this ClientManager's construction (or 1 if client batching was
// not enabled), deletes the oldest such record. Then, a ClientReplyMsg is allocated with the given sequence number
// and payload, and a copy of the message is saved to the reserved pages (overwriting any existing reply for clientId
// in the reserved pages), and this ClientManager adds a record for this reply (potentially replacing any existing
// record for the given sequence number). Returns the allocated ClientReplyMsg. Behavior is undefined for all of the
// following cases:
// - clientId does not belong to a valid client.
// - The number of reply records this ClientsManager has for the given client is above the maximum even after the
// oldest one is deleted.
// - The size of the allocated reply message exceeds the maximum reply size that was configured at the time of this
// ClientsManager's construction.
std::unique_ptr<ClientReplyMsg> allocateNewReplyMsgAndWriteToStorage(NodeIdType clientId,
ReqId requestSeqNum,
uint16_t currentPrimaryId,
char* reply,
uint32_t replyLength,
uint32_t rsiLength);
// Loads a client reply message from the reserved pages, and allocates and returns a ClientReplyMsg containing the
// loaded message. Returns a null pointer if the configuration recorded at the time of this ClientManager's
// construction enabled client batching with a maximum batch size greater than 1 and the message loaded from the
// reserved pages has a sequence number not matching requestSeqNum. Behavior is undefined for all of the following
// cases:
// - clientId does not belong to a valid client.
// - The reserved pages do not contain client reply message data of the expected format for clientId.
// - The configuration recorded at the time of this ClientsManager's construction did not enable client batching or
// enabled it with a maximum batch size of 1, but the sequence number of the reply loaded from the reserved pages
// does not match requestSeqNum.
std::unique_ptr<ClientReplyMsg> allocateReplyFromSavedOne(NodeIdType clientId,
ReqId requestSeqNum,
uint16_t currentPrimaryId);
// Requests
// Returns true if there is a valid client with ID clientId and this ClientsManager currently has a recorded request
// with ID reqSeqNum from that client; otherwise returns false.
bool isClientRequestInProcess(NodeIdType clientId, ReqId reqSeqNum) const;
// Returns true IFF there is no pending requests for clientId, and reqSeqNum can become the new pending request, that
// is, if all of the following are true:
// - clientId belongs to a valid client.
// - The number of requests this ClientsManager currently has recorded for that client is not exactly equal to the
// maximum client batch size configured at the time of this ClientsManager's construction (or 1 if client batching
// was not enabled).
// - This ClientsManager does not already have any request or reply associated with that client recorded with ID
// matching reqSeqNum.
// otherwise returns false.
bool canBecomePending(NodeIdType clientId, ReqId reqSeqNum) const;
// Returns true if there is a valid client with ID clientId, this ClientsManager currently has a recorded request with
// ID reqSeqNum from that client, and that request has not been marked as committed; otherwise returns false.
bool isPending(NodeIdType clientId, ReqId reqSeqNum) const override;
// Adds a record for the request with reqSeqNum from the client with the given clientId (if a record for that request
// does not already exist). Behavior is undefined if clientId does not belong to a valid client.
void addPendingRequest(NodeIdType clientId, ReqId reqSeqNum, const std::string& cid);
// Mark a request with ID reqSequenceNum that this ClientsManager currently has recorded as committed (does nothing if
// there is no existing record for reqSequenceNum). Behavior is undefined if clientId does not belong to a valid
// client.
void markRequestAsCommitted(NodeIdType clientId, ReqId reqSequenceNum);
// Removes the current request record from the given client with the greatest sequence number if both of the following
// are true:
// - That greatest sequence number is greater than reqSequenceNum.
// - There is no current request record for the client with ID clientId and sequence number reqSequenceNum.
// - The number of requests this ClientsManager currently has recorded for the given client is exactly equal to the
// global system constant maxNumOfRequestsInBatch (note this is not the same quantity as the maximum configured
// client batch size).
// Does nothing otherwise. Behavior is undefined if clientId does not belong to a valid client.
void removeRequestsOutOfBatchBounds(NodeIdType clientId, ReqId reqSequenceNum);
// If clientId belongs to a valid client and this ClientsManager currently has a request recorded with reqSeqNum,
// removes the record for that request. Does nothing otherwise.
void removePendingForExecutionRequest(NodeIdType clientId, ReqId reqSeqNum);
// Removes all request records this ClientsManager currently has.
void clearAllPendingRequests();
// Finds the request recorded by this ClientsManager at the earliest time (ignoring requests marked as committed),
// writes its CID to the reference cid, and returns what that earliest time was. Writes an empty string to cid and
// returns bftEngine::impl::MaxTime if this ClientsManager does not currently have records for any non-committed
// requests.
Time infoOfEarliestPendingRequest(std::string& cid) const;
// Log a message for each request not marked as committed that this ClientsManager currently has a record for created
// at a time more than threshold milliseconds before currTime. As a precondition to this function, the global logger
// VC_LOG must be initialized. Behavior is undefined if it is not.
void logAllPendingRequestsExceedingThreshold(const int64_t threshold, const Time& currTime) const;
// Deletes the reply to clientId this ClientsManager currently has a record for made at the earliest time. If this
// ClientsManager has any reply records to the given clientId, but none of those records have a record time, the one
// for the earliest request sequence number will be deleted. Does nothing if this ClientsManager has no reply records
// to the given clientId. Behavior is undefined if clientId does not belong to a valid client.
void deleteOldestReply(NodeIdType clientId);
bool isInternal(NodeIdType clientId) const { return internalClients_.find(clientId) != internalClients_.end(); }
// Sets/updates a client public key and persist it to the reserved pages. Behavior is undefined in the following
// cases:
// - The given NodeIdType parameter is not the ID of a valid client.
// - The given public key does not fit in a single reserved page under ClientsManager's implementation.
void setClientPublicKey(NodeIdType, const std::string& key, concord::util::crypto::KeyFormat) override;
// General
static uint32_t reservedPagesPerClient(const uint32_t& sizeOfReservedPage, const uint32_t& maxReplySize);
protected:
uint32_t getReplyFirstPageId(NodeIdType clientId) const { return getKeyPageId(clientId) + 1; }
uint32_t getKeyPageId(NodeIdType clientId) const {
return (clientId - *clientIds_.cbegin()) * reservedPagesPerClient_;
}
const ReplicaId myId_;
std::string scratchPage_;
uint32_t reservedPagesPerClient_;
struct RequestInfo {
RequestInfo() : time(MinTime) {}
RequestInfo(Time t, const std::string& c) : time(t), cid(c) {}
Time time;
std::string cid;
bool committed = false;
};
struct ClientInfo {
std::map<ReqId, RequestInfo> requestsInfo;
std::map<ReqId, Time> repliesInfo; // replyId to replyTime
std::pair<std::string, concord::util::crypto::KeyFormat> pubKey;
};
std::set<NodeIdType> proxyClients_;
std::set<NodeIdType> externalClients_;
std::set<NodeIdType> internalClients_;
std::set<NodeIdType> clientIds_;
std::unordered_map<NodeIdType, ClientInfo> clientsInfo_;
const uint32_t maxReplySize_;
const uint16_t maxNumOfReqsPerClient_;
concordMetrics::Component& metrics_;
concordMetrics::CounterHandle metric_reply_inconsistency_detected_;
concordMetrics::CounterHandle metric_removed_due_to_out_of_boundaries_;
std::unique_ptr<RsiDataManager> rsiManager_;
};
} // namespace impl
} // namespace bftEngine
| 4,058
|
1,144
|
package de.metas.payment.paypal;
import java.net.URL;
import org.adempiere.ad.trx.api.ITrxManager;
import org.adempiere.exceptions.AdempiereException;
import org.adempiere.service.IClientDAO;
import org.compiere.model.I_C_Order;
import org.springframework.stereotype.Service;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.paypal.orders.AmountWithBreakdown;
import com.paypal.orders.ApplicationContext;
import com.paypal.orders.Order;
import com.paypal.orders.OrderRequest;
import com.paypal.orders.PurchaseUnitRequest;
import com.paypal.payments.Capture;
import de.metas.currency.Amount;
import de.metas.document.engine.DocStatus;
import de.metas.document.engine.IDocument;
import de.metas.document.engine.IDocumentBL;
import de.metas.email.EMail;
import de.metas.email.MailService;
import de.metas.email.mailboxes.ClientEMailConfig;
import de.metas.email.mailboxes.Mailbox;
import de.metas.email.templates.MailTemplateId;
import de.metas.email.templates.MailTextBuilder;
import de.metas.money.MoneyService;
import de.metas.order.IOrderDAO;
import de.metas.order.OrderId;
import de.metas.payment.paypal.client.PayPalClientExecutionContext;
import de.metas.payment.paypal.client.PayPalClientService;
import de.metas.payment.paypal.client.PayPalErrorResponse;
import de.metas.payment.paypal.client.PayPalOrder;
import de.metas.payment.paypal.client.PayPalOrderExternalId;
import de.metas.payment.paypal.client.PayPalOrderId;
import de.metas.payment.paypal.client.PayPalOrderService;
import de.metas.payment.paypal.config.PayPalConfig;
import de.metas.payment.reservation.PaymentReservation;
import de.metas.payment.reservation.PaymentReservationCapture;
import de.metas.payment.reservation.PaymentReservationId;
import de.metas.payment.reservation.PaymentReservationRepository;
import de.metas.ui.web.WebuiURLs;
import de.metas.util.Services;
import lombok.NonNull;
/*
* #%L
* de.metas.payment.paypal
* %%
* Copyright (C) 2019 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
/**
* PayPal Service Facade
*/
@Service
public class PayPal
{
//
// services:
private final PayPalOrderService paypalOrderService;
private final PayPalClientService paypalClient;
private final PaymentReservationRepository paymentReservationRepo;
private final MailService mailService;
private final MoneyService moneyService;
//
private final IClientDAO clientsRepo = Services.get(IClientDAO.class);
private final ITrxManager trxManager = Services.get(ITrxManager.class);
private final IOrderDAO ordersRepo = Services.get(IOrderDAO.class);
@VisibleForTesting
public static final String MAIL_VAR_ApproveURL = "ApproveURL";
@VisibleForTesting
public static final String MAIL_VAR_Amount = "Amount";
@VisibleForTesting
public static final String MAIL_VAR_SalesOrderDocumentNo = "SalesOrderDocumentNo";
public PayPal(
@NonNull final PayPalOrderService paypalOrderService,
@NonNull final PayPalClientService paypalClient,
@NonNull final PaymentReservationRepository paymentReservationRepo,
@NonNull final MailService mailService,
@NonNull final MoneyService moneyService)
{
this.paypalOrderService = paypalOrderService;
this.paypalClient = paypalClient;
this.paymentReservationRepo = paymentReservationRepo;
this.mailService = mailService;
this.moneyService = moneyService;
}
private PayPalOrder updatePayPalOrderFromAPI(@NonNull final PayPalOrderExternalId externalId)
{
final PayPalOrder paypalOrder = paypalOrderService.getByExternalId(externalId);
return updatePayPalOrderFromAPI(paypalOrder);
}
public void updatePayPalOrderFromAPI(@NonNull final PayPalOrderId id)
{
PayPalOrder paypalOrder = paypalOrderService.getById(id);
paypalOrder = updatePayPalOrderFromAPI(paypalOrder);
updateReservationFromPaypalOrder(paypalOrder);
}
private PayPalOrder updatePayPalOrderFromAPI(final PayPalOrder paypalOrder)
{
final PayPalClientResponse<Order, PayPalErrorResponse> response = paypalClient.getAPIOrderById(
paypalOrder.getExternalId(),
PayPalClientExecutionContext.builder()
.paymentReservationId(paypalOrder.getPaymentReservationId())
.internalPayPalOrderId(paypalOrder.getId())
.build());
if (response.isOK())
{
return paypalOrderService.save(paypalOrder.getId(), response.getResult());
}
else
{
final PayPalErrorResponse error = response.getError();
if (error.isResourceNotFound())
{
return paypalOrderService.markRemoteDeleted(paypalOrder.getId());
}
else
{
throw response.toException();
}
}
}
public boolean hasActivePaypalOrder(@NonNull final PaymentReservationId reservationId)
{
return paypalOrderService.getByReservationIdIfExists(reservationId).isPresent();
}
public void createPayPalOrderAndRequestPayerApproval(@NonNull final PaymentReservationId reservationId)
{
final PaymentReservation reservation = paymentReservationRepo.getById(reservationId);
createPayPalOrderAndRequestPayerApproval(reservation);
}
public void createPayPalOrderAndRequestPayerApproval(@NonNull final PaymentReservation reservation)
{
//
// Make sure there is no other paypal order
final PayPalOrder existingPaypalOrder = paypalOrderService.getByReservationIdIfExists(reservation.getId()).orElse(null);
if (existingPaypalOrder != null)
{
throw new AdempiereException("A paypal order already exists: " + existingPaypalOrder.getId());
}
PayPalOrder paypalOrder = paypalOrderService.create(reservation.getId());
final PayPalConfig config = paypalClient.getConfig();
final OrderRequest apiRequest = toAPIOrderRequest(reservation, config);
final Order apiOrder = paypalClient.createOrder(
apiRequest,
createPayPalClientExecutionContext(reservation, paypalOrder));
paypalOrder = paypalOrderService.save(paypalOrder.getId(), apiOrder);
final URL payerApproveUrl = paypalOrder.getPayerApproveUrl();
sendPayerApprovalRequestEmail(reservation, payerApproveUrl, config.getOrderApproveMailTemplateId());
}
private OrderRequest toAPIOrderRequest(
@NonNull final PaymentReservation reservation,
@NonNull final PayPalConfig config)
{
final String webuiFrontendUrl = WebuiURLs.newInstance().getFrontendURL();
final String approveCallbackUrl = config.getOrderApproveCallbackUrl(webuiFrontendUrl);
return new OrderRequest()
.intent("AUTHORIZE")
.applicationContext(new ApplicationContext()
.returnUrl(approveCallbackUrl)
.cancelUrl(approveCallbackUrl))
.purchaseUnits(ImmutableList.of(
new PurchaseUnitRequest()
.amount(toAmountWithBreakdown(reservation.getAmount()))));
}
private AmountWithBreakdown toAmountWithBreakdown(final de.metas.money.Money money)
{
final Amount amount = moneyService.toAmount(money);
return new AmountWithBreakdown()
.value(amount.getAsBigDecimal().toPlainString())
.currencyCode(amount.getCurrencyCode().toThreeLetterCode());
}
private static PayPalClientExecutionContext createPayPalClientExecutionContext(
@NonNull final PaymentReservation reservation,
@NonNull final PayPalOrder paypalOrder)
{
return PayPalClientExecutionContext.builder()
.paymentReservationId(reservation.getId())
.salesOrderId(reservation.getSalesOrderId())
.internalPayPalOrderId(paypalOrder.getId())
.build();
}
private static PayPalClientExecutionContext createPayPalClientExecutionContext(
@NonNull final PaymentReservationCapture capture,
@NonNull final PayPalOrder paypalOrder)
{
return PayPalClientExecutionContext.builder()
.paymentReservationId(capture.getReservationId())
.paymentReservationCaptureId(capture.getId())
//
.salesOrderId(capture.getSalesOrderId())
.salesInvoiceId(capture.getSalesInvoiceId())
.paymentId(capture.getPaymentId())
//
.internalPayPalOrderId(paypalOrder.getId())
.build();
}
public void sendPayerApprovalRequestEmail(final PayPalOrderId payPalOrderId)
{
final PayPalOrder paypalOrder = paypalOrderService.getById(payPalOrderId);
final PaymentReservationId reservationId = paypalOrder.getPaymentReservationId();
final PaymentReservation reservation = paymentReservationRepo.getById(reservationId);
sendPayerApprovalRequestEmail(
reservation,
paypalOrder.getPayerApproveUrl(),
paypalClient.getConfig().getOrderApproveMailTemplateId());
}
private void sendPayerApprovalRequestEmail(
@NonNull final PaymentReservation reservation,
@NonNull final URL payerApproveUrl,
@NonNull final MailTemplateId mailTemplateId)
{
final MailTextBuilder mailTextBuilder = mailService.newMailTextBuilder(mailTemplateId);
mailTextBuilder.bpartnerContact(reservation.getPayerContactId());
mailTextBuilder.customVariable(MAIL_VAR_ApproveURL, payerApproveUrl.toExternalForm());
mailTextBuilder.customVariable(MAIL_VAR_Amount, moneyService.toTranslatableString(reservation.getAmount()));
final I_C_Order salesOrder = ordersRepo.getById(reservation.getSalesOrderId());
mailTextBuilder.customVariable(MAIL_VAR_SalesOrderDocumentNo, salesOrder.getDocumentNo());
final Mailbox mailbox = findMailbox(reservation);
final EMail email = mailService.createEMail(mailbox,
reservation.getPayerEmail(),
mailTextBuilder.getMailHeader(),
mailTextBuilder.getFullMailText(),
mailTextBuilder.isHtml());
trxManager.runAfterCommit(() -> mailService.send(email));
}
private Mailbox findMailbox(@NonNull final PaymentReservation reservation)
{
final ClientEMailConfig tenantEmailConfig = clientsRepo.getEMailConfigById(reservation.getClientId());
return mailService.findMailBox(tenantEmailConfig, reservation.getOrgId());
}
public void authorizePayPalReservation(@NonNull final PaymentReservationId reservationId)
{
PayPalOrder paypalOrder = paypalOrderService.getByReservationId(reservationId);
paypalOrder = updatePayPalOrderFromAPI(paypalOrder);
final PaymentReservation reservation = updateReservationFromPaypalOrder(paypalOrder);
authorizePayPalOrder(reservation);
}
public PaymentReservation onOrderApprovedByPayer(@NonNull final PayPalOrderExternalId apiOrderId)
{
final PaymentReservation reservation = updateReservationFromAPIOrder(apiOrderId);
if (reservation.getStatus().isApprovedByPayer())
{
authorizePayPalOrder(reservation);
}
return reservation;
}
private PaymentReservation updateReservationFromAPIOrder(@NonNull final PayPalOrderExternalId apiOrderId)
{
final PayPalOrder paypalOrder = updatePayPalOrderFromAPI(apiOrderId);
return updateReservationFromPaypalOrder(paypalOrder);
}
private PaymentReservation updateReservationFromPaypalOrder(@NonNull final PayPalOrder paypalOrder)
{
final PaymentReservationId reservationId = paypalOrder.getPaymentReservationId();
final PaymentReservation reservation = paymentReservationRepo.getById(reservationId);
updateReservationFromPayPalOrderNoSave(reservation, paypalOrder);
paymentReservationRepo.save(reservation);
return reservation;
}
public void authorizePayPalOrder(final PaymentReservation reservation)
{
reservation.getStatus().assertApprovedByPayer();
PayPalOrder paypalOrder = paypalOrderService.getByReservationId(reservation.getId());
final Order apiOrder = paypalClient.authorizeOrder(
paypalOrder.getExternalId(),
createPayPalClientExecutionContext(reservation, paypalOrder));
paypalOrder = paypalOrderService.save(paypalOrder.getId(), apiOrder);
if (!paypalOrder.isAuthorized())
{
throw new AdempiereException("Not authorized: " + paypalOrder);
}
reservation.changeStatusTo(paypalOrder.getStatus().toPaymentReservationStatus());
paymentReservationRepo.save(reservation);
completeSalesOrder(reservation.getSalesOrderId());
}
private void completeSalesOrder(@NonNull final OrderId salesOrderId)
{
final IOrderDAO ordersRepo = Services.get(IOrderDAO.class);
final I_C_Order order = ordersRepo.getById(salesOrderId);
final DocStatus orderDocStatus = DocStatus.ofCode(order.getDocStatus());
if (orderDocStatus.isWaitingForPayment())
{
Services.get(IDocumentBL.class).processEx(order, IDocument.ACTION_WaitComplete);
ordersRepo.save(order);
}
}
public void processCapture(
@NonNull final PaymentReservation reservation,
@NonNull final PaymentReservationCapture capture)
{
reservation.getStatus().assertCompleted();
PayPalOrder paypalOrder = paypalOrderService.getByReservationId(capture.getReservationId());
final Boolean finalCapture = null;
final Capture apiCapture = paypalClient.captureOrder(
paypalOrder.getAuthorizationId(),
moneyService.toAmount(capture.getAmount()),
finalCapture,
createPayPalClientExecutionContext(capture, paypalOrder));
paypalOrder = updatePayPalOrderFromAPI(paypalOrder.getExternalId());
updateReservationFromPayPalOrderNoSave(reservation, paypalOrder);
}
private static void updateReservationFromPayPalOrderNoSave(
@NonNull final PaymentReservation reservation,
@NonNull final PayPalOrder payPalOrder)
{
reservation.changeStatusTo(payPalOrder.getStatus().toPaymentReservationStatus());
}
}
| 4,362
|
417
|
package de.delbrueg.predictor.position;
import javax.vecmath.Point2d;
import de.delbrueg.math.Circle;
import de.delbrueg.math.Line;
import de.janquadflieg.mrracer.Utils;
/**
* This is the resurrection of CenterCircleTargetPositionPredictor.
* Now its better because we use the step_size given from the plan module, so
* at least the communication between the 2 modules is exact!
*
* ignores time_step and speed, just uses the given targetTrackPosition!
*
* @author Tim
*/
public class ExactCenterCircleTargetPositionPredictor extends TargetPositionPredictor{
@Override
public Point2d predictFuturePosition(final double target_position_weight) {
debugString = "";
// calc step size along center circle
Circle cc = data.circles.getCenterCircle();
double cr = cc.getRadius(); // circle radius
final double step_size = data.targetTrackPosition.getY(); // in meter
debugString += "| " + Utils.dTS(cr);
Line cur_drive_line = new Line(data.position, data.direction);
boolean rightCurve = !cur_drive_line.isPointOnTheLeft(cc.getCenter());
Circle.RotationDirection rot_dir = rightCurve ?
Circle.RotationDirection.clockwise :
Circle.RotationDirection.counter_clockwise;
Point2d nearestPointOnCenterCircle = cc.getNearestPointOnCircle(data.position);
Point2d nullPosTarget = cc.goAlong(nearestPointOnCenterCircle,rot_dir,step_size);
// in = -1, center = 0, out = 1
double inOutTargetPosition = (cc.getCenter().x > 0 ? -1 : 1) * (data.targetTrackPosition.getX());
debugString += "| "+Utils.dTS(inOutTargetPosition);
// calc target point (with target position)
Point2d targetPoint = cc.getScaled(1+inOutTargetPosition*data.trackWidth/(2*cr)).getNearestPointOnCircle(nullPosTarget);
debugString += "| ";
return targetPoint;
}
}
| 715
|
1,056
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.gradle.execute;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import org.netbeans.api.annotations.common.NonNull;
import org.netbeans.modules.gradle.api.execute.GradleExecConfiguration;
/**
* Internal API to manipulate visible configurations. Exposed from project's Lookup to decouple UI
* and service layers.
* @author sdedic
*/
public interface ProjectConfigurationUpdater {
// incidentally matches ProjectConfigurationProvider.getConfigurations singnature
/**
* @return current list of configurations. Must include {@link GradleExecConfiguration#DEFAULT}.
*/
public @NonNull Collection<? extends GradleExecConfiguration> getConfigurations();
/**
* Shared configurations. Does not return fixed configurations that have not been customized.
* @return list of customized shared configurations.
*/
public @NonNull Collection<GradleExecConfiguration> getSharedConfigurations();
/**
* @return list of private configurations.
*/
public @NonNull Collection<GradleExecConfiguration> getPrivateConfigurations();
/**
* Returns fixed configurations defined by plugin code. Must include {@link GradleExecConfiguration#DEFAULT}.
* @return fixed configurations.
*/
public @NonNull Collection<GradleExecConfiguration> getFixedConfigurations();
/**
* Updates configuration definitions. Sets the customized shared and private configs to the supplied lists.
* Any configuration not present in the list(s) will be removed. Configuration can be made shared or private by
* moving it between the two lists.
*
* @param sharedConfigs shared configurations
* @param privateConfigs private configurations
* @throws IOException on I/O error
*/
public void setConfigurations(@NonNull List<GradleExecConfiguration> sharedConfigs, @NonNull List<GradleExecConfiguration> privateConfigs) throws IOException;
}
| 762
|
921
|
//-*****************************************************************************
//
// Copyright (c) 2009-2012,
// Sony Pictures Imageworks Inc. and
// Industrial Light & Magic, a division of Lucasfilm Entertainment Company Ltd.
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Sony Pictures Imageworks, nor
// Industrial Light & Magic, nor the names of their contributors may be used
// to endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//-*****************************************************************************
#ifndef Alembic_AbcGeom_IPolyMesh_h
#define Alembic_AbcGeom_IPolyMesh_h
#include <Alembic/Util/Export.h>
#include <Alembic/AbcGeom/Foundation.h>
#include <Alembic/AbcGeom/SchemaInfoDeclarations.h>
#include <Alembic/AbcGeom/IFaceSet.h>
#include <Alembic/AbcGeom/IGeomParam.h>
#include <Alembic/AbcGeom/IGeomBase.h>
namespace Alembic {
namespace AbcGeom {
namespace ALEMBIC_VERSION_NS {
//-*****************************************************************************
class ALEMBIC_EXPORT IPolyMeshSchema
: public IGeomBaseSchema<PolyMeshSchemaInfo>
{
public:
class Sample
{
public:
typedef Sample this_type;
// Users don't ever create this data directly.
Sample() { reset(); }
Abc::P3fArraySamplePtr getPositions() const { return m_positions; }
Abc::V3fArraySamplePtr getVelocities() const { return m_velocities; }
Abc::Int32ArraySamplePtr getFaceIndices() const { return m_indices; }
Abc::Int32ArraySamplePtr getFaceCounts() const { return m_counts; }
Abc::Box3d getSelfBounds() const { return m_selfBounds; }
bool valid() const
{
return m_positions && m_indices && m_counts;
}
void reset()
{
m_positions.reset();
m_velocities.reset();
m_indices.reset();
m_counts.reset();
m_selfBounds.makeEmpty();
}
ALEMBIC_OPERATOR_BOOL( valid() );
protected:
friend class IPolyMeshSchema;
Abc::P3fArraySamplePtr m_positions;
Abc::V3fArraySamplePtr m_velocities;
Abc::Int32ArraySamplePtr m_indices;
Abc::Int32ArraySamplePtr m_counts;
Abc::Box3d m_selfBounds;
};
//-*************************************************************************
// POLY MESH SCHEMA
//-*************************************************************************
public:
//! By convention we always define this_type in AbcGeom classes.
//! Used by unspecified-bool-type conversion below
typedef IPolyMeshSchema this_type;
//-*************************************************************************
// CONSTRUCTION, DESTRUCTION, ASSIGNMENT
//-*************************************************************************
//! The default constructor creates an empty OPolyMeshSchema
//! ...
IPolyMeshSchema() {}
//! This constructor creates a new mesh reader.
//! The first argument is the parent ICompoundProperty, from which the
//! error handler policy for is derived. The second argument is the name
//! of the ICompoundProperty that contains this schemas properties. The
//! remaining optional arguments can be used to override the
//! ErrorHandlerPolicy and to specify schema interpretation matching.
IPolyMeshSchema( const ICompoundProperty &iParent,
const std::string &iName,
const Abc::Argument &iArg0 = Abc::Argument(),
const Abc::Argument &iArg1 = Abc::Argument() )
: IGeomBaseSchema<PolyMeshSchemaInfo>( iParent, iName, iArg0, iArg1 )
{
init( iArg0, iArg1 );
}
//! This constructor wraps an existing ICompoundProperty as the mesh
//! reader, and the error handler policy is derived from it.
//! The remaining optional arguments can be used to override the
//! ErrorHandlerPolicy and to specify schema interpretation matching.
IPolyMeshSchema( const ICompoundProperty &iProp,
const Abc::Argument &iArg0 = Abc::Argument(),
const Abc::Argument &iArg1 = Abc::Argument() )
: IGeomBaseSchema<PolyMeshSchemaInfo>( iProp, iArg0, iArg1 )
{
init( iArg0, iArg1 );
}
//! Copy constructor.
IPolyMeshSchema(const IPolyMeshSchema& iCopy)
: IGeomBaseSchema<PolyMeshSchemaInfo>()
{
*this = iCopy;
}
const IPolyMeshSchema & operator=(const IPolyMeshSchema & rhs);
//! Return the number of samples contained in the property.
//! This can be any number, including zero.
//! This returns the number of samples that were written, independently
//! of whether or not they were constant.
size_t getNumSamples() const
{ return m_positionsProperty.getNumSamples(); }
//! Return the topological variance.
//! This indicates how the mesh may change.
MeshTopologyVariance getTopologyVariance() const;
//! Ask if we're constant - no change in value amongst samples,
//! regardless of the time sampling.
bool isConstant() const { return getTopologyVariance() == kConstantTopology; }
//! Time information.
//! Any of the properties could be the bearer of the time
//! sampling information, which otherwise defaults to Identity.
AbcA::TimeSamplingPtr getTimeSampling() const
{
if ( m_positionsProperty.valid() )
{
return m_positionsProperty.getTimeSampling();
}
else
{
return getObject().getArchive().getTimeSampling( 0 );
}
}
//-*************************************************************************
void get( Sample &oSample,
const Abc::ISampleSelector &iSS = Abc::ISampleSelector() ) const
{
ALEMBIC_ABC_SAFE_CALL_BEGIN( "IPolyMeshSchema::get()" );
m_positionsProperty.get( oSample.m_positions, iSS );
m_indicesProperty.get( oSample.m_indices, iSS );
m_countsProperty.get( oSample.m_counts, iSS );
m_selfBoundsProperty.get( oSample.m_selfBounds, iSS );
if ( m_velocitiesProperty && m_velocitiesProperty.getNumSamples() > 0 )
{
m_velocitiesProperty.get( oSample.m_velocities, iSS );
}
// Could error check here.
ALEMBIC_ABC_SAFE_CALL_END();
}
Sample getValue( const Abc::ISampleSelector &iSS = Abc::ISampleSelector() ) const
{
Sample smp;
get( smp, iSS );
return smp;
}
IV2fGeomParam getUVsParam() const
{
return m_uvsParam;
}
IN3fGeomParam getNormalsParam() const
{
return m_normalsParam;
}
Abc::IInt32ArrayProperty getFaceCountsProperty() const
{
return m_countsProperty;
}
Abc::IInt32ArrayProperty getFaceIndicesProperty() const
{
return m_indicesProperty;
}
Abc::IP3fArrayProperty getPositionsProperty() const
{
return m_positionsProperty;
}
Abc::IV3fArrayProperty getVelocitiesProperty() const
{
return m_velocitiesProperty;
}
//-*************************************************************************
// ABC BASE MECHANISMS
// These functions are used by Abc to deal with errors, rewrapping,
// and so on.
//-*************************************************************************
//! Reset returns this function set to an empty, default
//! state.
void reset()
{
m_positionsProperty.reset();
m_velocitiesProperty.reset();
m_indicesProperty.reset();
m_countsProperty.reset();
m_uvsParam.reset();
m_normalsParam.reset();
IGeomBaseSchema<PolyMeshSchemaInfo>::reset();
}
//! Valid returns whether this function set is
//! valid.
bool valid() const
{
return ( IGeomBaseSchema<PolyMeshSchemaInfo>::valid() &&
m_positionsProperty.valid() &&
m_indicesProperty.valid() &&
m_countsProperty.valid() );
}
// FaceSet related
//! Appends the names of any FaceSets for this PolyMesh.
void getFaceSetNames (std::vector <std::string> & oFaceSetNames);
IFaceSet getFaceSet( const std::string &iFaceSetName );
bool hasFaceSet( const std::string &iFaceSetName );
//! unspecified-bool-type operator overload.
//! ...
ALEMBIC_OVERRIDE_OPERATOR_BOOL( IPolyMeshSchema::valid() );
protected:
void init( const Abc::Argument &iArg0,
const Abc::Argument &iArg1 );
Abc::IP3fArrayProperty m_positionsProperty;
Abc::IV3fArrayProperty m_velocitiesProperty;
Abc::IInt32ArrayProperty m_indicesProperty;
Abc::IInt32ArrayProperty m_countsProperty;
IV2fGeomParam m_uvsParam;
IN3fGeomParam m_normalsParam;
// FaceSets, this starts as empty until client
// code attempts to access facesets.
bool m_faceSetsLoaded;
std::map <std::string, IFaceSet> m_faceSets;
Alembic::Util::mutex m_faceSetsMutex;
void loadFaceSetNames();
};
//-*****************************************************************************
typedef Abc::ISchemaObject<IPolyMeshSchema> IPolyMesh;
typedef Util::shared_ptr< IPolyMesh > IPolyMeshPtr;
} // End namespace ALEMBIC_VERSION_NS
using namespace ALEMBIC_VERSION_NS;
} // End namespace AbcGeom
} // End namespace Alembic
#endif
| 3,994
|
721
|
<reponame>gaybro8777/nucleus
# Copyright 2018 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes that provide the interface for writing genomics data.
`GenomicsWriter` defines the core API supported by writers, and is subclassed
directly or indirectly (via `DispatchingGenomicsWriter`) for all concrete
implementations.
`TFRecordWriter` is an implementation of the `GenomicsWriter` API for reading
`TFRecord` files. This is usable for all data types when writing data as
serialized protocol buffers.
`DispatchingGenomicsWriter` is an abstract class defined for convenience on top
of `GenomicsWriter` that supports writing to either the native file format or to
`TFRecord` files of the corresponding protocol buffer used to encode data of
that file type. The output format chosen is dependent upon the filename to which
the data are being written.
Concrete implementations for individual file types (e.g. BED, SAM, VCF, etc.)
reside in type-specific modules in this package. A general example of the write
functionality is shown below.
```python
# options is a writer-specific set of options.
options = ...
# records is an iterable of protocol buffers of the specific data type.
records = ...
with GenomicsWriterSubClass(output_path, options) as writer:
for proto in records:
writer.write(proto)
```
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import errno
from absl import logging
from nucleus.io.python import tfrecord_writer
class GenomicsWriter(object):
"""Abstract base class for writing genomics data.
A GenomicsWriter only has one method, write, which writes a single
protocol buffer to a file.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def write(self, proto):
"""Writes proto to the file.
Args:
proto: A protocol buffer.
"""
def __enter__(self):
"""Enter a `with` block."""
return self
@abc.abstractmethod
def __exit__(self, unused_type, unused_value, unused_traceback):
"""Exit a `with` block. Typically, this will close the file."""
class TFRecordWriter(GenomicsWriter):
"""A GenomicsWriter that writes to a TFRecord file.
Example usage:
writer = TFRecordWriter('/tmp/my_output.tfrecord.gz')
for record in records:
writer.write(record)
Note that TFRecord files do not need to be wrapped in a "with" block.
"""
def __init__(self, output_path, header=None, compression_type=None):
"""Initializer.
Args:
output_path: str. The output path to which the records are written.
header: An optional header for the particular data type. This can be
useful for file types that have logical headers where some operations
depend on that header information (e.g. VCF using its headers to
determine type information of annotation fields).
compression_type: Either 'ZLIB', 'GZIP', '' (uncompressed), or
None. If None, __init__ will guess the compression type based on
the input_path's suffix.
Raises:
IOError: if there was any problem opening output_path for writing.
"""
super(TFRecordWriter, self).__init__()
self.header = header
if compression_type is None:
compression_type = 'GZIP' if output_path.endswith('.gz') else ''
self._writer = tfrecord_writer.TFRecordWriter.from_file(
output_path, compression_type)
if self._writer is None:
raise IOError(errno.EIO, 'Error opening %s for writing' % output_path)
def write(self, proto):
"""Writes the proto to the TFRecord file."""
self._writer.write(proto.SerializeToString())
def __exit__(self, exit_type, exit_value, exit_traceback):
self.close()
def close(self):
"""Explicitly closes writer."""
self._writer.close()
class DispatchingGenomicsWriter(GenomicsWriter):
"""A GenomicsWriter that dispatches based on the file extension.
If '.tfrecord' is present in the filename, a TFRecordWriter is used.
Otherwise, a native writer is.
Sub-classes of DispatchingGenomicsWriter must define a _native_writer()
method.
"""
def __init__(self, output_path, **kwargs):
"""Initializer.
Args:
output_path: str. The output path to which the records are written.
**kwargs: k=v named args. Keyword arguments used to instantiate the native
writer, if applicable.
"""
super(DispatchingGenomicsWriter, self).__init__()
self.header = kwargs.get('header', None)
if '.tfrecord' in output_path:
self._writer = TFRecordWriter(output_path, header=self.header)
else:
self._writer = self._native_writer(output_path, **kwargs)
logging.info('Writing %s with %s',
output_path, self._writer.__class__.__name__)
self._post_init_hook()
@abc.abstractmethod
def _native_writer(self, output_path, **kwargs):
"""Returns a GenomicsWriter for writing the records `natively`.
Args:
output_path: The path to write the records to.
**kwargs: Zero or more keyword arguments.
Returns:
A GenomicsWriter.
"""
def write(self, proto):
self._writer.write(proto)
def __exit__(self, exit_type, exit_value, exit_traceback):
self._writer.__exit__(exit_type, exit_value, exit_traceback)
def _post_init_hook(self):
"""Hook for subclasses to run code at the end of __init__."""
| 1,858
|
320
|
import random
import string
def randomword(length: int) -> str:
"""Return a random word of specified length."""
letters = string.ascii_lowercase
return "".join(random.choice(letters) for _ in range(length))
| 69
|
357
|
<filename>vmdir/include/vmdircommon_schema.h
/*
* Copyright © 2016 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an “AS IS” BASIS, without
* warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
#ifndef __VMDIR_COMMON_SCHEMA_H__
#define __VMDIR_COMMON_SCHEMA_H__
#ifdef __cplusplus
extern "C" {
#endif
#include <ldap_schema.h>
typedef enum
{
VDIR_LDAP_USER_APPLICATIONS_ATTRIBUTE = 0,
VDIR_LDAP_DIRECTORY_OPERATION_ATTRIBUTE,
VDIR_LDAP_DISTRIBUTED_OPERATION_ATTRIBUTE,
VDIR_LDAP_DSA_OPERATION_ATTRIBUTE
} VDIR_LDAP_ATTRIBUTE_TYPE_USAGE;
typedef enum
{
VDIR_LDAP_STRUCTURAL_CLASS = 1,
VDIR_LDAP_ABSTRACT_CLASS,
VDIR_LDAP_AUXILIARY_CLASS
} VDIR_LDAP_OBJECT_CLASS_TYPE;
typedef struct _VDIR_LDAP_ATTRIBUTE_TYPE
{
LDAPAttributeType* pSource;
PSTR pszName;
PSTR pszOid;
PSTR pszDesc;
PSTR* ppszAliases;
PSTR pszSyntaxOid;
BOOLEAN bSingleValue;
BOOLEAN bCollective;
BOOLEAN bNoUserMod;
BOOLEAN bObsolete;
VDIR_LDAP_ATTRIBUTE_TYPE_USAGE usage;
// index configuration attributes
DWORD dwSearchFlags;
PSTR* ppszUniqueScopes;
} VDIR_LDAP_ATTRIBUTE_TYPE, *PVDIR_LDAP_ATTRIBUTE_TYPE;
typedef struct _VDIR_LDAP_OBJECT_CLASS
{
LDAPObjectClass* pSource;
PSTR pszName;
PSTR pszOid;
PSTR pszDesc;
PSTR pszSup;
PSTR* ppszMust;
PSTR* ppszMay;
BOOLEAN bObsolete;
VDIR_LDAP_OBJECT_CLASS_TYPE type;
} VDIR_LDAP_OBJECT_CLASS, *PVDIR_LDAP_OBJECT_CLASS;
typedef struct _VDIR_LDAP_CONTENT_RULE
{
LDAPContentRule* pSource;
PSTR pszName;
PSTR pszOid;
PSTR* ppszMust;
PSTR* ppszMay;
PSTR* ppszNot;
PSTR* ppszAux;
BOOLEAN bObsolete;
} VDIR_LDAP_CONTENT_RULE, *PVDIR_LDAP_CONTENT_RULE;
typedef struct _VDIR_LDAP_STRUCTURE_RULE
{
LDAPStructureRule* pSource;
PSTR pszName;
// TODO
} VDIR_LDAP_STRUCTURE_RULE, *PVDIR_LDAP_STRUCTURE_RULE;
typedef struct _VDIR_LDAP_NAME_FORM
{
LDAPNameForm* pSource;
PSTR pszName;
// TODO
} VDIR_LDAP_NAME_FORM, *PVDIR_LDAP_NAME_FORM;
typedef PLW_HASHMAP PVDIR_LDAP_ATTRIBUTE_TYPE_MAP;
typedef PLW_HASHMAP PVDIR_LDAP_OBJECT_CLASS_MAP;
typedef PLW_HASHMAP PVDIR_LDAP_CONTENT_RULE_MAP;
typedef PLW_HASHMAP PVDIR_LDAP_STRUCTURE_RULE_MAP;
typedef PLW_HASHMAP PVDIR_LDAP_NAME_FORM_MAP;
typedef struct _VDIR_LDAP_SCHEMA
{
PVDIR_LDAP_ATTRIBUTE_TYPE_MAP attributeTypes;
PVDIR_LDAP_OBJECT_CLASS_MAP objectClasses;
PVDIR_LDAP_CONTENT_RULE_MAP contentRules;
PVDIR_LDAP_STRUCTURE_RULE_MAP structureRules;
PVDIR_LDAP_NAME_FORM_MAP nameForms;
} VDIR_LDAP_SCHEMA, *PVDIR_LDAP_SCHEMA;
// make sure enum stat with 0 and w/o gap.
typedef enum
{
MOD_OP_ADD = 0,
MOD_OP_DELETE,
MOD_OP_REPLACE
} VDIR_LDAP_MOD_OP;
typedef struct _VDIR_LDAP_MOD
{
VDIR_LDAP_MOD_OP op;
PSTR pszType;
PVMDIR_STRING_LIST pVals;
} VDIR_LDAP_MOD, *PVDIR_LDAP_MOD;
typedef PLW_HASHMAP PVDIR_LDAP_MOD_MAP;
typedef struct _VDIR_LDAP_SCHEMA_OBJECT_DIFF
{
PSTR pszCN;
PSTR pszDN;
PVDIR_LDAP_MOD_MAP mods;
} VDIR_LDAP_SCHEMA_OBJECT_DIFF, *PVDIR_LDAP_SCHEMA_OBJECT_DIFF;
typedef struct _VDIR_LDAP_SCHEMA_DIFF
{
PVDIR_LINKED_LIST attrToAdd;
PVDIR_LINKED_LIST attrToModify;
PVDIR_LINKED_LIST classToAdd;
PVDIR_LINKED_LIST classToModify;
} VDIR_LDAP_SCHEMA_DIFF, *PVDIR_LDAP_SCHEMA_DIFF;
// copy.c
DWORD
VmDirLdapSchemaCopy(
PVDIR_LDAP_SCHEMA pOrgSchema,
PVDIR_LDAP_SCHEMA* ppCopySchema
);
DWORD
VmDirLdapAtDeepCopy(
PVDIR_LDAP_ATTRIBUTE_TYPE pOrgAt,
PVDIR_LDAP_ATTRIBUTE_TYPE* ppCopyAt
);
DWORD
VmDirLdapOcDeepCopy(
PVDIR_LDAP_OBJECT_CLASS pOrgOc,
PVDIR_LDAP_OBJECT_CLASS* ppCopyOc
);
DWORD
VmDirLdapCrDeepCopy(
PVDIR_LDAP_CONTENT_RULE pOrgCr,
PVDIR_LDAP_CONTENT_RULE* ppCopyCr
);
DWORD
VmDirLdapSrDeepCopy(
PVDIR_LDAP_STRUCTURE_RULE pOrgSr,
PVDIR_LDAP_STRUCTURE_RULE* ppCopySr
);
DWORD
VmDirLdapNfDeepCopy(
PVDIR_LDAP_NAME_FORM pOrgNf,
PVDIR_LDAP_NAME_FORM* ppCopyNf
);
// def.c
DWORD
VmDirLdapAtCreate(
LDAPAttributeType* pSource,
PVDIR_LDAP_ATTRIBUTE_TYPE* ppAt
);
DWORD
VmDirLdapOcCreate(
LDAPObjectClass* pSource,
PVDIR_LDAP_OBJECT_CLASS* ppOc
);
DWORD
VmDirLdapCrCreate(
LDAPContentRule* pSource,
PVDIR_LDAP_CONTENT_RULE* ppCr
);
DWORD
VmDirLdapSrCreate(
LDAPStructureRule* pSource,
PVDIR_LDAP_STRUCTURE_RULE* ppSr
);
DWORD
VmDirLdapNfCreate(
LDAPNameForm* pSource,
PVDIR_LDAP_NAME_FORM* ppNf
);
VOID
VmDirFreeLdapAt(
PVDIR_LDAP_ATTRIBUTE_TYPE pAt
);
VOID
VmDirFreeLdapOc(
PVDIR_LDAP_OBJECT_CLASS pOc
);
VOID
VmDirFreeLdapCr(
PVDIR_LDAP_CONTENT_RULE pCr
);
VOID
VmDirFreeLdapSr(
PVDIR_LDAP_STRUCTURE_RULE pSr
);
VOID
VmDirFreeLdapNf(
PVDIR_LDAP_NAME_FORM pNf
);
// diff.c
DWORD
VmDirLdapSchemaGetDiff(
PVDIR_LDAP_SCHEMA pOldSchema,
PVDIR_LDAP_SCHEMA pNewSchema,
PVDIR_LDAP_SCHEMA_DIFF* ppSchemaDiff
);
VOID
VmDirFreeLdapSchemaDiff(
PVDIR_LDAP_SCHEMA_DIFF pSchemaDiff
);
// file.c
DWORD
VmDirGetDefaultSchemaFile(
PSTR* ppszSchemaFile
);
DWORD
VmDirReadSchemaFile(
PCSTR pszSchemaFilePath,
PVMDIR_STRING_LIST* ppAtStrList,
PVMDIR_STRING_LIST* ppOcStrList,
PVMDIR_STRING_LIST* ppCrStrList,
PVMDIR_STRING_LIST* ppIdxStrList
);
// load.c
DWORD
VmDirLdapSchemaLoadStrLists(
PVDIR_LDAP_SCHEMA pSchema,
PVMDIR_STRING_LIST pAtStrList,
PVMDIR_STRING_LIST pOcStrList,
PVMDIR_STRING_LIST pCrStrList,
PVMDIR_STRING_LIST pIdxStrList
);
DWORD
VmDirLdapSchemaLoadFile(
PVDIR_LDAP_SCHEMA pSchema,
PCSTR pszSchemaFilePath
);
DWORD
VmDirLdapSchemaLoadRemoteSchema(
PVDIR_LDAP_SCHEMA pSchema,
LDAP* pLd
);
// merge.c
DWORD
VmDirLdapSchemaMerge(
PVDIR_LDAP_SCHEMA pOldSchema,
PVDIR_LDAP_SCHEMA pNewSchema,
PVDIR_LDAP_SCHEMA* ppMergedSchema
);
// parse.c
DWORD
VmDirLdapAtParseStr(
PCSTR pcszStr,
PVDIR_LDAP_ATTRIBUTE_TYPE* ppAt
);
DWORD
VmDirLdapOcParseStr(
PCSTR pcszStr,
PVDIR_LDAP_OBJECT_CLASS* ppOc
);
DWORD
VmDirLdapCrParseStr(
PCSTR pcszStr,
PVDIR_LDAP_CONTENT_RULE* ppCr
);
DWORD
VmDirLdapSrParseStr(
PCSTR pcszStr,
PVDIR_LDAP_STRUCTURE_RULE* ppSr
);
DWORD
VmDirLdapNfParseStr(
PCSTR pcszStr,
PVDIR_LDAP_NAME_FORM* ppNf
);
DWORD
VmDirLdapIdxParseStr(
PCSTR pcszStr,
PSTR* ppszAtName,
PBOOLEAN pbGlobalUniq
);
DWORD
VmDirLdapAtParseLDAPEntry(
LDAP* pLd,
LDAPMessage* pEntry,
PVDIR_LDAP_ATTRIBUTE_TYPE* ppAt
);
DWORD
VmDirLdapOcParseLDAPEntry(
LDAP* pLd,
LDAPMessage* pEntry,
PVDIR_LDAP_OBJECT_CLASS* ppOc
);
DWORD
VmDirLdapCrParseLDAPEntry(
LDAP* pLd,
LDAPMessage* pEntry,
PVDIR_LDAP_CONTENT_RULE* ppCr
);
DWORD
VmDirLdapAtToStr(
PVDIR_LDAP_ATTRIBUTE_TYPE pAt,
PSTR* ppszStr
);
DWORD
VmDirLdapOcToStr(
PVDIR_LDAP_OBJECT_CLASS pOc,
PSTR* ppszStr
);
DWORD
VmDirLdapCrToStr(
PVDIR_LDAP_CONTENT_RULE pCr,
PSTR* ppszStr
);
DWORD
VmDirLdapSrToStr(
PVDIR_LDAP_STRUCTURE_RULE pSr,
PSTR* ppszStr
);
DWORD
VmDirLdapNfToStr(
PVDIR_LDAP_NAME_FORM pNf,
PSTR* ppszStr
);
// patch.c
DWORD
VmDirPatchRemoteSchemaObjects(
LDAP* pLd,
PVDIR_LDAP_SCHEMA_DIFF pSchemaDiff
);
// resolve.c
DWORD
VmDirLdapOcResolveSup(
PVDIR_LDAP_SCHEMA pSchema,
PVDIR_LDAP_OBJECT_CLASS pOc
);
// schema.c
DWORD
VmDirLdapSchemaInit(
PVDIR_LDAP_SCHEMA* ppSchema
);
DWORD
VmDirLdapSchemaAddAt(
PVDIR_LDAP_SCHEMA pSchema,
PVDIR_LDAP_ATTRIBUTE_TYPE pAt
);
DWORD
VmDirLdapSchemaAddOc(
PVDIR_LDAP_SCHEMA pSchema,
PVDIR_LDAP_OBJECT_CLASS pOc
);
DWORD
VmDirLdapSchemaAddCr(
PVDIR_LDAP_SCHEMA pSchema,
PVDIR_LDAP_CONTENT_RULE pCr
);
DWORD
VmDirLdapSchemaAddSr(
PVDIR_LDAP_SCHEMA pSchema,
PVDIR_LDAP_STRUCTURE_RULE pSr
);
DWORD
VmDirLdapSchemaAddNf(
PVDIR_LDAP_SCHEMA pSchema,
PVDIR_LDAP_NAME_FORM pNf
);
DWORD
VmDirLdapSchemaAddIdx(
PVDIR_LDAP_SCHEMA pSchema,
PCSTR pszAtName,
BOOLEAN bGlobalUniq
);
DWORD
VmDirLdapSchemaResolveAndVerifyAll(
PVDIR_LDAP_SCHEMA pSchema
);
DWORD
VmDirLdapSchemaRemoveNoopData(
PVDIR_LDAP_SCHEMA pSchema
);
BOOLEAN
VmDirLdapSchemaIsEmpty(
PVDIR_LDAP_SCHEMA pSchema
);
VOID
VmDirFreeLdapSchema(
PVDIR_LDAP_SCHEMA pSchema
);
// verify.c
DWORD
VmDirLdapAtVerify(
PVDIR_LDAP_ATTRIBUTE_TYPE pAt
);
DWORD
VmDirLdapOcVerify(
PVDIR_LDAP_SCHEMA pSchema,
PVDIR_LDAP_OBJECT_CLASS pOc
);
DWORD
VmDirLdapCrVerify(
PVDIR_LDAP_SCHEMA pSchema,
PVDIR_LDAP_CONTENT_RULE pCRDesc
);
//////////////////////////////////////
// Legacy support structs/functions //
//////////////////////////////////////
typedef struct _VDIR_LEGACY_SCHEMA
{
PLW_HASHMAP pAtDefStrMap;
PLW_HASHMAP pOcDefStrMap;
PLW_HASHMAP pCrDefStrMap;
PVDIR_LDAP_SCHEMA pSchema;
} VDIR_LEGACY_SCHEMA, *PVDIR_LEGACY_SCHEMA;
typedef struct _VDIR_LEGACY_SCHEMA_MOD
{
PVMDIR_STRING_LIST pDelAt;
PVMDIR_STRING_LIST pAddAt;
PVMDIR_STRING_LIST pDelOc;
PVMDIR_STRING_LIST pAddOc;
PVMDIR_STRING_LIST pDelCr;
PVMDIR_STRING_LIST pAddCr;
} VDIR_LEGACY_SCHEMA_MOD, *PVDIR_LEGACY_SCHEMA_MOD;
// legacy/legacyload.c
DWORD
VmDirLegacySchemaLoadRemoteSchema(
PVDIR_LEGACY_SCHEMA pLegacySchema,
LDAP* pLd
);
// legacy/legacypatch.c
DWORD
VmDirPatchRemoteSubSchemaSubEntry(
LDAP* pLd,
PVDIR_LDAP_SCHEMA pNewSchema
);
// legacy/legacyschema.c
DWORD
VmDirLegacySchemaInit(
PVDIR_LEGACY_SCHEMA* ppLegacySchema
);
VOID
VmDirFreeLegacySchema(
PVDIR_LEGACY_SCHEMA pLegacySchema
);
// legacy/legacyschemamod.c
DWORD
VmDirLegacySchemaModInit(
PVDIR_LEGACY_SCHEMA_MOD* ppLegacySchemaMod
);
DWORD
VmDirLegacySchemaModPopulate(
PVDIR_LEGACY_SCHEMA_MOD pLegacySchemaMod,
PVDIR_LEGACY_SCHEMA pLegacySchema,
PVDIR_LDAP_SCHEMA pNewSchema
);
VOID
VmDirFreeLegacySchemaMod(
PVDIR_LEGACY_SCHEMA_MOD pLegacySchemaMod
);
// legacy/legacyutil.c
DWORD
VmDirLdapSearchSubSchemaSubEntry(
LDAP* pLd,
LDAPMessage** ppResult,
LDAPMessage** ppEntry
);
DWORD
VmDirFixLegacySchemaDefSyntaxErr(
PSTR pszDef,
PSTR* ppszFixedDef
);
#ifdef __cplusplus
}
#endif
#endif /* __VMDIR_COMMON_SCHEMA_H__ */
| 7,290
|
311
|
<filename>test-ports/default/lang/python37/files/patch-Modules_posixmodule.c
# Add closefrom(2) support
# https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=242274
# https://bugs.python.org/issue38061
# TODO: Upstream
--- Modules/posixmodule.c.orig 2019-10-14 22:32:36 UTC
+++ Modules/posixmodule.c
@@ -7810,8 +7810,16 @@ os_closerange_impl(PyObject *module, int fd_low, int f
int i;
Py_BEGIN_ALLOW_THREADS
_Py_BEGIN_SUPPRESS_IPH
- for (i = Py_MAX(fd_low, 0); i < fd_high; i++)
- close(i);
+ fd_low = Py_MAX(fd_low, 0);
+#ifdef __FreeBSD__
+ if (fd_high >= sysconf(_SC_OPEN_MAX)) {
+ closefrom(fd_low);
+ } else
+#endif
+ {
+ for (i = fd_low; i < fd_high; i++)
+ close(i);
+ }
_Py_END_SUPPRESS_IPH
Py_END_ALLOW_THREADS
Py_RETURN_NONE;
| 409
|
6,098
|
package water.api.schemas3;
import water.Iced;
import water.api.API;
public class RemoveV3 extends RequestSchemaV3<Iced, RemoveV3> {
@API(help="Object to be removed.")
public KeyV3 key;
@API(help="If true, removal operation will cascade down the object tree.", direction = API.Direction.INPUT)
public boolean cascade;
}
| 107
|
471
|
package com.dtflys.test.http.model;
import com.alibaba.fastjson.annotation.JSONField;
public class JsonTestUser2 {
private String Username;
@JSONField(name = "Username")
public String getUsername() {
return Username;
}
public void setUsername(String username) {
Username = username;
}
}
| 123
|
2,542
|
<filename>src/prod/src/ServiceModel/ReplicasHealthEvaluation.h
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#pragma once
namespace ServiceModel
{
class ReplicasHealthEvaluation
: public HealthEvaluationWithChildrenBase
{
DENY_COPY(ReplicasHealthEvaluation)
public:
ReplicasHealthEvaluation();
ReplicasHealthEvaluation(
FABRIC_HEALTH_STATE aggregatedHealthState,
HealthEvaluationList && unhealthyEvaluations,
ULONG totalCount,
BYTE maxPercentUnhealthyReplicasPerPartition);
ReplicasHealthEvaluation(ReplicasHealthEvaluation && other) = default;
ReplicasHealthEvaluation & operator = (ReplicasHealthEvaluation && other) = default;
virtual ~ReplicasHealthEvaluation();
__declspec(property(get=get_MaxPercentUnhealthyReplicasPerPartition)) BYTE MaxPercentUnhealthyReplicasPerPartition;
BYTE get_MaxPercentUnhealthyReplicasPerPartition() const { return maxPercentUnhealthyReplicasPerPartition_; }
__declspec(property(get=get_TotalCount)) ULONG TotalCount;
ULONG get_TotalCount() const { return totalCount_; }
virtual void SetDescription() override;
Common::ErrorCode ToPublicApi(
__in Common::ScopedHeap & heap,
__out FABRIC_HEALTH_EVALUATION & publicHealthEvaluation) const;
Common::ErrorCode FromPublicApi(
FABRIC_HEALTH_EVALUATION const & publicHealthEvaluation);
FABRIC_FIELDS_06(kind_, description_, unhealthyEvaluations_, maxPercentUnhealthyReplicasPerPartition_, totalCount_, aggregatedHealthState_);
BEGIN_JSON_SERIALIZABLE_PROPERTIES()
SERIALIZABLE_PROPERTY_CHAIN()
SERIALIZABLE_PROPERTY(Constants::UnhealthyEvaluations, unhealthyEvaluations_)
SERIALIZABLE_PROPERTY(Constants::MaxPercentUnhealthyReplicasPerPartition, maxPercentUnhealthyReplicasPerPartition_)
SERIALIZABLE_PROPERTY(Constants::TotalCount, totalCount_);
END_JSON_SERIALIZABLE_PROPERTIES()
BEGIN_DYNAMIC_SIZE_ESTIMATION()
DYNAMIC_SIZE_ESTIMATION_CHAIN()
DYNAMIC_SIZE_ESTIMATION_MEMBER(unhealthyEvaluations_)
END_DYNAMIC_SIZE_ESTIMATION()
private:
BYTE maxPercentUnhealthyReplicasPerPartition_;
ULONG totalCount_;
};
DEFINE_HEALTH_EVALUATION_ACTIVATOR( ReplicasHealthEvaluation, FABRIC_HEALTH_EVALUATION_KIND_REPLICAS )
}
| 1,044
|
72,551
|
<gh_stars>1000+
@import Foundation;
typedef NS_ENUM(NSInteger, BackgroundActivityResult) {
BackgroundActivityResultFinished = 1,
BackgroundActivityResultDeferred = 2,
};
typedef void (^BackgroundActivityCompletionHandler)(BackgroundActivityResult result);
@interface BackgroundActivityScheduler : NSObject
- (void)scheduleWithBlock:(void (^)(BackgroundActivityCompletionHandler completionHandler))block;
@end
| 115
|
14,668
|
<gh_stars>1000+
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "android_webview/browser/aw_download_manager_delegate.h"
#include "android_webview/browser/aw_content_browser_client.h"
#include "android_webview/browser/aw_contents_client_bridge.h"
#include "content/public/browser/browser_task_traits.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/web_contents.h"
namespace android_webview {
AwDownloadManagerDelegate::AwDownloadManagerDelegate() = default;
AwDownloadManagerDelegate::~AwDownloadManagerDelegate() = default;
bool AwDownloadManagerDelegate::InterceptDownloadIfApplicable(
const GURL& url,
const std::string& user_agent,
const std::string& content_disposition,
const std::string& mime_type,
const std::string& request_origin,
int64_t content_length,
bool is_transient,
content::WebContents* web_contents) {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
if (!web_contents)
return true;
AwContentsClientBridge* client =
AwContentsClientBridge::FromWebContents(web_contents);
if (!client)
return true;
std::string aw_user_agent =
web_contents->GetUserAgentOverride().ua_string_override;
if (aw_user_agent.empty()) {
// use default user agent if nothing is provided
aw_user_agent = user_agent.empty() ? GetUserAgent() : user_agent;
}
client->NewDownload(url, aw_user_agent, content_disposition, mime_type,
content_length);
return true;
}
} // namespace android_webview
| 564
|
785
|
<gh_stars>100-1000
package kafka.streams.table.join;
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.StringSerializer;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.http.ResponseEntity;
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.serializer.JsonSerde;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.web.client.RestTemplate;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(SpringRunner.class)
@SpringBootTest(
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
public class KafkaStreamsAggregateSampleTests {
@ClassRule
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "foobar");
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
@Autowired
StreamsBuilderFactoryBean streamsBuilderFactoryBean;
@LocalServerPort
int randomServerPort;
@Before
public void before() {
streamsBuilderFactoryBean.setCloseTimeout(0);
}
@BeforeClass
public static void setUp() {
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString());
}
@AfterClass
public static void tearDown() {
System.clearProperty("spring.cloud.stream.kafka.streams.binder.brokers");
}
@Test
public void testKafkaStreamsWordCountProcessor() throws Exception {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
ObjectMapper mapper = new ObjectMapper();
Serde<DomainEvent> domainEventSerde = new JsonSerde<>(DomainEvent.class, mapper);
senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, domainEventSerde.serializer().getClass());
DefaultKafkaProducerFactory<String, DomainEvent> pf = new DefaultKafkaProducerFactory<>(senderProps);
try {
KafkaTemplate<String, DomainEvent> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("foobar");
DomainEvent ddEvent = new DomainEvent();
ddEvent.setBoardUuid("12345");
ddEvent.setEventType("create-domain-event");
template.sendDefault("", ddEvent);
Thread.sleep(1000);
RestTemplate restTemplate = new RestTemplate();
String fooResourceUrl
= "http://localhost:" + randomServerPort + "/events";
ResponseEntity<String> response
= restTemplate.getForEntity(fooResourceUrl, String.class);
assertThat(response.getBody()).contains("create-domain-event");
}
finally {
pf.destroy();
}
}
}
| 1,142
|
1,444
|
<reponame>FateRevoked/mage<filename>Mage.Tests/src/test/java/org/mage/test/cards/copy/DeceiverOfFormTest.java
package org.mage.test.cards.copy;
import mage.constants.PhaseStep;
import mage.constants.Zone;
import mage.filter.Filter;
import org.junit.Test;
import org.mage.test.serverside.base.CardTestPlayerBase;
/**
*
* @author LevelX2
*
*
*/
public class DeceiverOfFormTest extends CardTestPlayerBase {
/**
* When creatures copy the revealed creature, they do not return to their
* original state at the end of turn
*
*/
@Test
public void testCopyEndsEndOfTurn() {
// Vigilance
addCard(Zone.LIBRARY, playerA, "Affa Protector", 1); // 1/4
// At the beginning of combat on your turn, reveal the top card of your library.
// If a creature card is revealed this way, you may have creatures you control other than Deceiver of Form becomes copies of that card until end of turn.
// You may put that card on the bottom of your library.
addCard(Zone.BATTLEFIELD, playerA, "Deceiver of Form", 1);
addCard(Zone.BATTLEFIELD, playerA, "Silvercoat Lion", 3);
attack(1, playerA, "Deceiver of Form");
attack(1, playerA, "Affa Protector");
attack(1, playerA, "Affa Protector");
attack(1, playerA, "Affa Protector");
skipInitShuffling();
setStopAt(2, PhaseStep.PRECOMBAT_MAIN);
execute();
assertPermanentCount(playerA, "Deceiver of Form", 1);
assertPermanentCount(playerA, "Silvercoat Lion", 3);
assertPowerToughness(playerA, "Silvercoat Lion", 2, 2, Filter.ComparisonScope.All);
assertLife(playerA, 20); // +2 from Robber
assertLife(playerB, 9);
}
}
| 655
|
1,104
|
<reponame>Zorghts/roll20-character-sheets
{
"html": "thesprawl_sheet.html",
"css": "thesprawl_style.css",
"authors": "<NAME>, <NAME>",
"roll20userid": "209786, 95207",
"preview": "Screenshot1.png",
"instructions": "A modified version of the base Sprawl character sheet by <NAME>. This version includes elements necessary to run the 'Darkening Alley' & 'Touched Prime' supplements.",
"legacy": true
}
| 139
|
923
|
import argparse
import random
import torch
from torch import nn, optim
from torch.utils.data import DataLoader
from torchvision.transforms import transforms
from dataset import LiverDataset
from unet import Unet
# 是否使用cuda
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
x_transforms = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5])
])
# mask只需要转换为tensor
y_transforms = transforms.ToTensor()
def train_model(model, criterion, optimizer, dataload, num_epochs=5):
for epoch in range(num_epochs):
print('Epoch {}/{}'.format(epoch, num_epochs - 1))
print('-' * 10)
dt_size = len(dataload.dataset)
epoch_loss = 0
step = 0
for x, y in dataload:
step += 1
inputs = x.to(device)
labels = y.to(device)
# zero the parameter gradients
optimizer.zero_grad()
# forward
outputs = model(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
epoch_loss += loss.item()
print("%d/%d,train_loss:%0.3f" %
(step,
(dt_size - 1) // dataload.batch_size + 1, loss.item()))
print("epoch %d loss:%0.3f" % (epoch, epoch_loss / step))
torch.save(model.state_dict(), 'weights_%d.pth' % epoch)
return model
#训练模型
def train(args):
model = Unet(3, 1).to(device)
batch_size = args.batch_size
criterion = nn.BCEWithLogitsLoss()
optimizer = optim.Adam(model.parameters())
liver_dataset = LiverDataset("data/train",
transform=x_transforms,
target_transform=y_transforms)
dataloaders = DataLoader(liver_dataset,
batch_size=batch_size,
shuffle=True,
num_workers=4)
train_model(model, criterion, optimizer, dataloaders)
#显示模型的输出结果
def test(args):
model = Unet(3, 1) #.to(device)
model.load_state_dict(torch.load(args.ckpt, map_location='cpu'))
liver_dataset = LiverDataset("data/train",
transform=x_transforms,
target_transform=y_transforms)
dataloaders = DataLoader(liver_dataset, batch_size=1)
model.eval()
import matplotlib.pyplot as plt
plt.ion()
with torch.no_grad():
for x, _ in dataloaders:
y = model(x)
print("y的shape", y.shape())
img_y = torch.squeeze(y).numpy()
plt.imshow(img_y)
plt.savefig("./results/output_%d.jpg" % random.randint(0, 100))
plt.pause(0.01)
plt.show()
if __name__ == '__main__':
#参数解析
parse = argparse.ArgumentParser()
parse = argparse.ArgumentParser()
parse.add_argument("action", type=str, help="train or test")
parse.add_argument("--batch_size", type=int, default=1)
parse.add_argument("--ckpt",
type=str,
default="./weights_19.pth",
help="the path of model weight file")
args = parse.parse_args()
if args.action == "train":
train(args)
elif args.action == "test":
test(args)
| 1,714
|
1,615
|
/**
* Created by MomoLuaNative.
* Copyright (c) 2019, Momo Group. All rights reserved.
*
* This source code is licensed under the MIT.
* For the full copyright and license information,please view the LICENSE file in the root directory of this source tree.
*/
package org.luaj.vm2;
import android.util.LongSparseArray;
import com.immomo.mlncore.MLNCore;
import java.lang.ref.SoftReference;
/**
* Created by Xiong.Fangyu on 2019-08-28
*
* 由于userdata也就是java对象保存到native层,需要占用jni的global表,
* 多虚拟机的情况下,多个页面非常容易造成global表溢出
*
* 修改为在java层保存对象
*
* 每个对象有个独一无二的long值,native层通过long值拿到java对象,并执行相应函数
* 假设单个虚拟机中java对象不超过{@link Long#MAX_VALUE}个
*/
class UserdataCache {
/**
* 下一个java对象可用id
*/
private volatile long cacheLong = 1;
/**
* 缓存userdata
*/
private final LongSparseArray<LuaUserdata> cache;
/**
* 已被删除缓存的userdata
*/
private LongSparseArray<SoftReference<LuaUserdata>> removedCache;
/**
* 标记销毁状态
*/
private boolean destroyed = false;
UserdataCache() {
cache = new LongSparseArray<LuaUserdata>(100);
}
/**
* userdata在初始化后,需要放入缓存中
*
* @see LuaUserdata
*/
void put(LuaUserdata ud) {
if (destroyed)
return;
if (ud.id != 0)
return;
ud.id = cacheLong++;
cache.put(ud.id, ud);
}
/**
* 获取缓存的userdata
* 提供给native调用,Java层一般不调用
*
* @see Globals#__getUserdata(long, long)
*
* @param id id
* @return 返回缓存的userdata
*/
LuaUserdata get(long id) {
LuaUserdata ret = cache.get(id);
if (ret != null)
return ret;
if (MLNCore.UserdataCacheType == MLNCore.TYPE_REMOVE_CACHE) {
SoftReference<LuaUserdata> ref = removedCache != null ? removedCache.get(id) : null;
ret = ref != null ? ref.get() : null;
if (ret != null && MLNCore.DEBUG) {
return MLNCore.onNullGet(id, ret);
}
return ret;
}
return null;
}
/**
* 当userdata gc时,清除相应缓存
* @param ud
*/
void onUserdataGc(LuaUserdata ud, boolean finalized) {
if (finalized) {
cache.remove(ud.id);
return;
}
switch (MLNCore.UserdataCacheType) {
case MLNCore.TYPE_REMOVE:
cache.remove(ud.id);
break;
case MLNCore.TYPE_REMOVE_CACHE:
cache.remove(ud.id);
if (removedCache == null) {
removedCache = new LongSparseArray<>(50);
}
removedCache.put(ud.id, new SoftReference<LuaUserdata>(ud));
break;
default:
break;
}
}
/**
* 虚拟机销毁时调用
* 只会调用一次
*/
void onDestroy() {
destroyed = true;
for (int i = 0, l = cache.size(); i < l; i ++) {
cache.valueAt(i).__onLuaGc();
}
cache.clear();
if (removedCache != null) {
removedCache.clear();
}
}
}
| 1,835
|
877
|
from .shim import *
if QT_AVAILABLE:
from .util import *
from .waitbox import WaitBox
| 37
|
304
|
<filename>apps/app4_links/views.py
# coding: utf-8
"""
We will use most views from the previous app.
"""
from django.shortcuts import render
from django.urls import reverse
def index(request):
# reverse() can take a route name (and optionally, url parameters),
# and return the proper URL based on all urls.py. Here we get
# "/app4/prefix/"
first_link = reverse('prefix')
return render(request, 'app4_index.html', {'first_link': first_link})
| 157
|
578
|
<filename>src/gfx-pbr/PipelinePbr.h
// Copyright (c) 2019 <NAME> <EMAIL>
// This software is provided 'as-is' under the zlib License, see the LICENSE.txt file.
// This notice and the license may not be removed or altered from any source distribution.
#pragma once
#ifndef TWO_MODULES
#include <stl/table.h>
#include <gfx/Pipeline.h>
#endif
#include <gfx-pbr/Forward.h>
namespace two
{
export_ class refl_ TWO_GFX_EXPORT BlockGeometry : public DrawBlock
{
public:
BlockGeometry(GfxSystem& gfx);
~BlockGeometry();
virtual void init_block() override;
virtual void begin_render(Render& render) override;
virtual void options(Render& render, const DrawElement& element, ProgramVersion& program) const final;
virtual void submit(Render& render, const Pass& pass) const final;
virtual void submit(Render& render, const DrawElement& element, const Pass& pass) const final;
Material* m_material = nullptr;
Material* m_material_twosided = nullptr;
};
export_ TWO_GFX_PBR_EXPORT func_ void begin_pbr_render(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_gi_probes(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_shadowmaps(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_shadow(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_opaque(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_alpha(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_geometry(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_lights(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_voxel_gi(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_lightmap(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_begin_post(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_post_auto(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void pass_post_effects(GfxSystem& gfx, Render& render, DofBlur& dof, Glow& glow, Tonemap& tonemap, BCS& bcs);
export_ TWO_GFX_PBR_EXPORT func_ void render_pbr_forward(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void render_pbr_deferred(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void render_shadow(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void render_voxel(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void render_lightmap(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT func_ void render_reflection(GfxSystem& gfx, Render& render);
export_ TWO_GFX_PBR_EXPORT void gather_gi_probes(Scene& scene, vector<GIProbe*>& gi_probes);
export_ TWO_GFX_PBR_EXPORT void gather_lightmaps(Scene& scene, vector<LightmapAtlas*>& atlases);
export_ TWO_GFX_PBR_EXPORT void gather_reflection_probes(Scene& scene, vector<ReflectionProbe*>& reflection_probes);
export_ TWO_GFX_PBR_EXPORT func_ void pipeline_pbr(GfxSystem& gfx, Renderer& pipeline, bool deferred = false);
namespace gfx
{
export_ TWO_GFX_PBR_EXPORT func_ void setup_pipeline_pbr(GfxSystem& gfx);
}
}
| 1,190
|
580
|
/**
* Copyright 2012, Big Switch Networks, Inc.
* Originally created by <NAME>, Big Switch Networks
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
**/
package net.floodlightcontroller.core.web;
import java.util.Map;
import org.restlet.resource.Get;
import org.restlet.resource.ServerResource;
import net.floodlightcontroller.core.IFloodlightProviderService;
/**
* Get summary counters registered by all modules
* @author shudongz
*/
public class ControllerSummaryResource extends ServerResource {
@Get("json")
public Map<String, Object> retrieve() {
IFloodlightProviderService floodlightProvider =
(IFloodlightProviderService)getContext().getAttributes().
get(IFloodlightProviderService.class.getCanonicalName());
return floodlightProvider.getControllerInfo("summary");
}
}
| 431
|
2,603
|
/* ----------------------------------------------------------------------------
* SAM Software Package License
* ----------------------------------------------------------------------------
* Copyright (c) 2014, Atmel Corporation
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the disclaimer below.
*
* Atmel's name may not be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* DISCLAIMER: THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE
* DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* ----------------------------------------------------------------------------
*/
/**
* \file
*
* \section Purpose
*
* Interface for configuration the Analog-to-Digital Converter (DACC) peripheral.
*
* \section Usage
*
* -# Configurate the pins for DACC
* -# Initialize the DACC with DACC_Initialize().
* -# Select the active channel using DACC_EnableChannel()
* -# Start the conversion with DACC_StartConversion()
* -# Wait the end of the conversion by polling status with DACC_GetStatus()
* -# Finally, get the converted data using DACC_GetConvertedData()
*
*/
#ifndef _DAC_DMA_
#define _DAC_DMA_
/*----------------------------------------------------------------------------
* Headers
*----------------------------------------------------------------------------*/
#include "chip.h"
#include <stdint.h>
#include <assert.h>
#ifdef __cplusplus
extern "C" {
#endif
/*----------------------------------------------------------------------------
* Types
*----------------------------------------------------------------------------*/
/** DAC transfer complete callback. */
typedef void (*DacCallback)( uint8_t, void* ) ;
/** \brief Dac Transfer Request prepared by the application upper layer.
*
* This structure is sent to the DAC_SendCommand function to start the transfer.
* At the end of the transfer, the callback is invoked by the interrupt handler.
*/
typedef struct
{
/** Pointer to the Tx data. */
uint8_t *pTxBuff;
/** Tx size in bytes. */
uint16_t TxSize;
/** Tx loop back. */
uint16_t loopback;
/** DACC channel*/
uint8_t dacChannel;
/** Callback function invoked at the end of transfer. */
DacCallback callback;
/** Callback arguments. */
void *pArgument;
} DacCmd ;
/** Constant structure associated with DAC port. This structure prevents
client applications to have access in the same time. */
typedef struct
{
/** Pointer to DAC Hardware registers */
Dacc* pDacHw ;
/** Current SpiCommand being processed */
DacCmd *pCurrentCommand ;
/** Pointer to DMA driver */
sXdmad* pXdmad ;
/** DACC Id as defined in the product datasheet */
uint8_t dacId ;
/** Mutual exclusion semaphore. */
volatile int8_t semaphore ;
} DacDma;
/*------------------------------------------------------------------------------
* Definitions
*------------------------------------------------------------------------------*/
#define DAC_OK 0
#define DAC_ERROR 1
#define DAC_ERROR_LOCK 2
#define DACC_CHANNEL_0 0
#define DACC_CHANNEL_1 1
/*------------------------------------------------------------------------------
* Exported functions
*------------------------------------------------------------------------------*/
extern uint32_t Dac_ConfigureDma( DacDma *pDacd ,
Dacc *pDacHw ,
uint8_t DacId,
sXdmad *pXdmad );
extern uint32_t Dac_SendData( DacDma *pDacd, DacCmd *pCommand);
/*------------------------------------------------------------------------------
* Macros function of register access
*------------------------------------------------------------------------------*/
#define DACC_SoftReset(pDACC) ((pDACC)->DACC_CR = DACC_CR_SWRST)
#define DACC_CfgModeReg(pDACC, mode) { (pDACC)->DACC_MR = (mode); }
#define DACC_GetModeReg(pDACC) ((pDACC)->DACC_MR)
#define DACC_CfgTrigger(pDACC, mode) { (pDACC)->DACC_TRIGR = (mode); }
#define DACC_EnableChannel(pDACC, channel) {(pDACC)->DACC_CHER = (1 << (channel));}
#define DACC_DisableChannel(pDACC, channel) {(pDACC)->DACC_CHDR = (1 << (channel));}
#define DACC_EnableIt(pDACC, mode) {(pDACC)->DACC_IER = (mode);}
#define DACC_DisableIt(pDACC, mode) {(pDACC)->DACC_IDR = (mode);}
#define DACC_GetStatus(pDACC) ((pDACC)->DACC_ISR)
#define DACC_GetChannelStatus(pDACC) ((pDACC)->DACC_CHSR)
#define DACC_GetInterruptMaskStatus(pDACC) ((pDACC)->DACC_IMR)
#ifdef __cplusplus
}
#endif
#endif /* #ifndef _DAC_DMA_ */
| 1,979
|
2,231
|
<reponame>SwampertX/FStar<filename>examples/low-level/old/huffman/ocaml/huffman_ffi.c<gh_stars>1000+
#include <caml/memory.h>
#include <caml/fail.h>
#include "camlstack.h"
#include "stack.h"
static value empty_string = (value)0;
CAMLprim value stack_mknode(value v1, value v2, value v3, value v4, value v5)
{
CAMLparam5 (v1, v2, v3, v4, v5);
int mask[3];
int nbits = 0;
if (empty_string == (value)0)
empty_string = stack_caml_alloc_string(1);
if (!(Is_long(v2) || is_stack_pointer((void *)v2))) {
mask[0] = 2;
nbits++;
}
if (!(Is_long(v3) || is_stack_pointer((void *)v3))) {
mask[nbits] = 3;
nbits++;
}
if (!(Is_long(v4) || is_stack_pointer((void *)v4))) {
mask[nbits] = 4;
nbits++;
}
value tuple = stack_caml_alloc_tuple(6,nbits,mask);
if (tuple == (value)0)
caml_failwith ("Huffman.mknode");
else {
Field(tuple, 0) = v1;
Field(tuple, 1) = v2;
Field(tuple, 2) = v3;
Field(tuple, 3) = v4;
Field(tuple, 4) = v5;
Field(tuple, 5) = empty_string;
CAMLreturn(tuple);
}
}
| 507
|
459
|
<reponame>dimitri-justeau/choco-solver
/*
* This file is part of choco-solver, http://choco-solver.org/
*
* Copyright (c) 2021, IMT Atlantique. All rights reserved.
*
* Licensed under the BSD 4-clause license.
*
* See LICENSE file in the project root for full license information.
*/
package org.chocosolver.solver.variables.view.graph;
import org.chocosolver.solver.variables.DirectedGraphVar;
import org.chocosolver.solver.variables.Variable;
import org.chocosolver.solver.variables.view.GraphView;
import org.chocosolver.util.objects.graphs.DirectedGraph;
/**
* An abstract class for directed graph views over other variables
*
* @author <NAME>
* @since 31/03/2021
*/
public abstract class DirectedGraphView<V extends Variable> extends GraphView<V, DirectedGraph> implements DirectedGraphVar {
/**
* Creates a graph view.
*
* @param name name of the view
* @param variables observed variables
*/
protected DirectedGraphView(String name, V[] variables) {
super(name, variables);
}
@Override
public boolean isDirected() {
return true;
}
}
| 389
|
412
|
package temp;
class A
{
int toint()
{
return 123456;
}
}
| 32
|
892
|
<reponame>westonsteimel/advisory-database-github
{
"schema_version": "1.2.0",
"id": "GHSA-f8wp-q2v8-9hc2",
"modified": "2022-01-27T00:03:11Z",
"published": "2022-01-20T00:00:23Z",
"aliases": [
"CVE-2021-33912"
],
"details": "libspf2 before 1.2.11 has a four-byte heap-based buffer overflow that might allow remote attackers to execute arbitrary code (via an unauthenticated e-mail message from anywhere on the Internet) with a crafted SPF DNS record, because of incorrect sprintf usage in SPF_record_expand_data in spf_expand.c. The vulnerable code may be part of the supply chain of a site's e-mail infrastructure (e.g., with additional configuration, Exim can use libspf2; the Postfix web site links to unofficial patches for use of libspf2 with Postfix; older versions of spfquery relied on libspf2) but most often is not.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-33912"
},
{
"type": "WEB",
"url": "https://github.com/shevek/libspf2/tree/8131fe140704eaae695e76b5cd09e39bd1dd220b"
},
{
"type": "WEB",
"url": "https://lists.debian.org/debian-lts-announce/2022/01/msg00015.html"
},
{
"type": "WEB",
"url": "https://nathanielbennett.com/blog/libspf2-cve-jan-2022-disclosure"
}
],
"database_specific": {
"cwe_ids": [
"CWE-787"
],
"severity": "CRITICAL",
"github_reviewed": false
}
}
| 623
|
637
|
<gh_stars>100-1000
/*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., <NAME>,
* <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson;
import hudson.model.Hudson;
import org.jvnet.hudson.test.Issue;
import org.jvnet.hudson.test.HudsonTestCase;
import org.jvnet.hudson.test.recipes.LocalData;
import org.jvnet.hudson.test.recipes.Recipe;
import java.io.File;
import java.net.URL;
import java.util.Collection;
import java.util.Enumeration;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* @author <NAME>
*/
public class ClassicPluginStrategyTest extends HudsonTestCase {
@Override
protected void setUp() throws Exception {
useLocalPluginManager = true;
super.setUp();
}
@Override
protected Hudson newHudson() throws Exception {
File home = homeLoader.allocate();
for (Recipe.Runner r : recipes) {
r.decorateHome(this,home);
}
LocalPluginManager pluginManager = new LocalPluginManager(home) {
@Override
protected Collection<String> loadBundledPlugins() {
// Overriding so we can force loading of the detached plugins for testing
Set<String> names = new LinkedHashSet<>();
names.addAll(loadPluginsFromWar("/WEB-INF/plugins"));
names.addAll(loadPluginsFromWar("/WEB-INF/detached-plugins"));
return names;
}
};
return new Hudson(home, createWebServer(), pluginManager);
}
/**
* Test finding resources via DependencyClassLoader.
*/
@LocalData
public void testDependencyClassLoader() throws Exception {
// Test data has: foo3 depends on foo2,foo1; foo2 depends on foo1
// (thus findResources from foo3 can find foo1 resources via 2 dependency paths)
PluginWrapper p = jenkins.getPluginManager().getPlugin("foo3");
String res;
// In the current impl, the dependencies are the parent ClassLoader so resources
// are found there before checking the plugin itself. Adjust the expected results
// below if this is ever changed to check the plugin first.
Enumeration<URL> en = p.classLoader.getResources("test-resource");
for (int i = 0; en.hasMoreElements(); i++) {
res = en.nextElement().toString();
if (i < 2)
assertTrue("In current impl, " + res + "should be foo1 or foo2",
res.contains("/foo1/") || res.contains("/foo2/"));
else
assertTrue("In current impl, " + res + "should be foo3", res.contains("/foo3/"));
}
res = p.classLoader.getResource("test-resource").toString();
assertTrue("In current impl, " + res + " should be foo1 or foo2",
res.contains("/foo1/") || res.contains("/foo2/"));
}
/**
* Test finding resources via DependencyClassLoader.
* Check transitive dependency exclude disabled plugins
*/
@LocalData
@Issue("JENKINS-18654")
public void testDisabledDependencyClassLoader() throws Exception {
PluginWrapper p = jenkins.getPluginManager().getPlugin("foo4");
Enumeration<URL> en = p.classLoader.getResources("test-resource");
for (int i = 0; en.hasMoreElements(); i++) {
String res = en.nextElement().toString();
if (i == 0)
assertTrue("expected foo4, found "+res , res.contains("/foo4/"));
else
fail("disabled dependency should not be included");
}
}
/**
* Test finding resources under masking.
* "foo1" plugin contains attribute of Mask-Classes: org.apache.http.
*/
@LocalData
@Issue("JENKINS-27289")
public void testMaskResourceClassLoader() throws Exception {
PluginWrapper pw = jenkins.getPluginManager().getPlugin("foo1");
Class<?> clazz = pw.classLoader.loadClass("org.apache.http.impl.io.SocketInputBuffer");
ClassLoader cl = clazz.getClassLoader();
URL url = cl.getResource("org/apache/http/impl/io/SocketInputBuffer.class");
assertNotNull(url);
assertTrue("expected to find the class from foo1 plugin", url.toString().contains("plugins/foo1"));
}
}
| 2,003
|
569
|
<filename>app/src/main/java/com/anarchy/classifyview/ContentActivity.java
package com.anarchy.classifyview;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.helper.ItemTouchHelper;
import com.anarchy.classifyview.core.BaseFragment;
import com.anarchy.classifyview.sample.demonstrate.DemonstrateFragment;
import com.anarchy.classifyview.sample.ireader.IReaderMockFragment;
import com.anarchy.classifyview.sample.layoutmanager.LayoutManagerFragment;
import com.anarchy.classifyview.sample.normal.NormalFragment;
import com.anarchy.classifyview.sample.normalfolder.NormalFolderFragment;
import com.anarchy.classifyview.sample.viewpager.ViewPagerFragment;
/**
* <p/>
* Date: 16/6/12 09:40
* Author: <EMAIL>
* <p/>
*/
public class ContentActivity extends AppCompatActivity {
@SuppressWarnings("unchecked")
private Class<? extends Fragment>[] mClasses = new Class[]{NormalFragment.class,
DemonstrateFragment.class, ViewPagerFragment.class, LayoutManagerFragment.class,
NormalFolderFragment.class,IReaderMockFragment.class};//,
private int position;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.content_main);
position = getIntent().getIntExtra(MainActivity.EXTRA_POSITION, 0);
try {
getSupportFragmentManager().beginTransaction().add(R.id.container, mClasses[position].newInstance()).commit();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
@Override
public void onBackPressed() {
Fragment fragment = getSupportFragmentManager().findFragmentById(R.id.container);
if (!(fragment instanceof BaseFragment && ((BaseFragment) fragment).onBackPressed())) {
super.onBackPressed();
}
}
}
| 757
|
14,668
|
<reponame>zealoussnow/chromium
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "printing/printer_status.h"
namespace printing {
PrinterStatus::PrinterStatus() = default;
PrinterStatus::PrinterStatus(const PrinterStatus& other) = default;
PrinterStatus::~PrinterStatus() = default;
} // namespace printing
| 129
|
3,897
|
<reponame>pradeep-gr/mbed-os5-onsemi
/**
* @file
* @brief Registers, Bit Masks and Bit Positions for the PMU module.
*/
/* ****************************************************************************
* Copyright (C) 2016 Maxim Integrated Products, Inc., All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL MAXIM INTEGRATED BE LIABLE FOR ANY CLAIM, DAMAGES
* OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* Except as contained in this notice, the name of Maxim Integrated
* Products, Inc. shall not be used except as stated in the Maxim Integrated
* Products, Inc. Branding Policy.
*
* The mere transfer of this software does not imply any licenses
* of trade secrets, proprietary technology, copyrights, patents,
* trademarks, maskwork rights, or any other form of intellectual
* property whatsoever. Maxim Integrated Products, Inc. retains all
* ownership rights.
*
* $Date: 2016-10-10 19:24:21 -0500 (Mon, 10 Oct 2016) $
* $Revision: 24667 $
*
**************************************************************************** */
/* Define to prevent redundant inclusion */
#ifndef _PMU_H_
#define _PMU_H_
/* **** Includes **** */
#include "pmu_regs.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* @ingroup periphlibs
* @defgroup pmuGroup Peripheral Management Unit
* @brief Peripheral Management Unit (PMU) Interface.
* @{
*/
/**
* Enum type for the clock scale used for the PMU timeout clock.
*/
typedef enum {
PMU_PS_SEL_DISABLE = MXC_V_PMU_CFG_PS_SEL_DISABLE, /**< Timeout disabled */
PMU_PS_SEL_DIV_2_8 = MXC_V_PMU_CFG_PS_SEL_DIV_2_8, /**< Timeout clk = PMU clock / 2^8 = 256 */
PMU_PS_SEL_DIV_2_16 = MXC_V_PMU_CFG_PS_SEL_DIV_2_16, /**< Timeout clk = PMU clock / 2^16 = 65536 */
PMU_PS_SEL_DIV_2_24 = MXC_V_PMU_CFG_PS_SEL_DIV_2_24 /**< Timeout clk = PMU clock / 2^24 = 16777216 */
}pmu_ps_sel_t;
/**
* Enumeration type for the number of clk ticks for the timeout duration.
*/
typedef enum {
PMU_TO_SEL_TICKS_4 = MXC_V_PMU_CFG_TO_SEL_TICKS_4, /**< timeout = 4 * Timeout clk period */
PMU_TO_SEL_TICKS_8 = MXC_V_PMU_CFG_TO_SEL_TICKS_8, /**< timeout = 8 * Timeout clk period */
PMU_TO_SEL_TICKS_16 = MXC_V_PMU_CFG_TO_SEL_TICKS_16, /**< timeout = 16 * Timeout clk period */
PMU_TO_SEL_TICKS_32 = MXC_V_PMU_CFG_TO_SEL_TICKS_32, /**< timeout = 32 * Timeout clk period */
PMU_TO_SEL_TICKS_64 = MXC_V_PMU_CFG_TO_SEL_TICKS_64, /**< timeout = 64 * Timeout clk period */
PMU_TO_SEL_TICKS_128 = MXC_V_PMU_CFG_TO_SEL_TICKS_128, /**< timeout = 128 * Timeout clk period */
PMU_TO_SEL_TICKS_256 = MXC_V_PMU_CFG_TO_SEL_TICKS_256, /**< timeout = 256 * Timeout clk period */
PMU_TO_SEL_TICKS_512 = MXC_V_PMU_CFG_TO_SEL_TICKS_512 /**< timeout = 512 * Timeout clk period */
}pmu_to_sel_t;
/*
* The macros like the one below are designed to help build static PMU programs
* as arrays of 32bit words.
*/
#define PMU_IS(interrupt, stop) ((!!interrupt) << PMU_INT_POS) | ((!!stop) << PMU_STOP_POS)
/*
* Structure type to build a PMU Move Op Code.
*/
typedef struct pmu_move_des_t {
uint32_t op_code : 3; /* 0x0 */
uint32_t interrupt : 1;
uint32_t stop : 1;
uint32_t read_size : 2;
uint32_t read_inc : 1;
uint32_t write_size : 2;
uint32_t write_inc : 1;
uint32_t cont : 1;
uint32_t length : 20;
uint32_t write_address;
uint32_t read_address;
} pmu_move_des_t;
#define PMU_MOVE(i, s, rs, ri, ws, wi, c, length, wa, ra) \
(PMU_MOVE_OP | PMU_IS(i,s) | ((rs & 3) << PMU_MOVE_READS_POS) | ((!!ri) << PMU_MOVE_READI_POS) | \
((ws & 3) << PMU_MOVE_WRITES_POS) | ((!!wi) << PMU_MOVE_WRITEI_POS) | ((!!c) << PMU_MOVE_CONT_POS) | ((length & 0xFFFFF) << PMU_MOVE_LEN_POS)), wa, ra
/* new_value = value | (old_value & ~ mask) */
typedef struct pmu_write_des_t {
uint32_t op_code : 3; /* 0x1 */
uint32_t interrupt : 1;
uint32_t stop : 1;
uint32_t : 3;
uint32_t write_method : 4;
uint32_t : 20;
uint32_t write_address;
uint32_t value;
uint32_t mask;
} pmu_write_des_t;
#define PMU_WRITE(i, s, wm, a, v, m) (PMU_WRITE_OP | PMU_IS(i,s) | ((wm & 0xF) << PMU_WRITE_METHOD_POS)), a, v, m
typedef struct pmu_wait_des_t {
uint32_t op_code : 3; /* 0x2 */
uint32_t interrupt : 1;
uint32_t stop : 1;
uint32_t wait : 1;
uint32_t sel : 1;
uint32_t : 25;
uint32_t mask1;
uint32_t mask2;
uint32_t wait_count;
} pmu_wait_des_t;
#define PMU_WAIT(i, s, sel, m1, m2, cnt) (PMU_WAIT_OP | PMU_IS(i,s) | ((cnt>0)?(1<<PMU_WAIT_WAIT_POS):0) | ((!!sel) << PMU_WAIT_SEL_POS)), \
m1, m2, cnt
typedef struct pmu_jump_des_t {
uint32_t op_code : 3; /* 0x3 */
uint32_t interrupt : 1;
uint32_t stop : 1;
uint32_t : 27;
uint32_t address;
} pmu_jump_des_t;
#define PMU_JUMP(i, s, a) (PMU_JUMP_OP | PMU_IS(i,s)), a
typedef struct pmu_loop_des_t {
uint32_t op_code : 3; /* 0x4 */
uint32_t interrupt : 1;
uint32_t stop : 1;
uint32_t sel_counter : 1;
uint32_t : 26;
uint32_t address;
} pmu_loop_des_t;
#define PMU_LOOP(i, s, c, a) (PMU_LOOP_OP | PMU_IS(i,s) | ((!!c) << PMU_LOOP_SEL_COUNTER_POS)), a
typedef struct pmu_poll_des_t {
uint32_t op_code : 3; /* 0x5 */
uint32_t interrupt : 1;
uint32_t stop : 1;
uint32_t : 2;
uint32_t and : 1;
uint32_t : 24;
uint32_t poll_addr;
uint32_t data;
uint32_t mask;
uint32_t poll_interval;
} pmu_poll_des_t;
#define PMU_POLL(i, s, a, adr, d, m, per) (PMU_POLL_OP | PMU_IS(i,s) | ((!!a) << PMU_POLL_AND_POS)), adr, d, m, per
typedef struct pmu_branch_des_t {
uint32_t op_code : 3; /* 0x6 */
uint32_t interrupt : 1;
uint32_t stop : 1;
uint32_t : 2;
uint32_t and : 1;
uint32_t type : 3;
uint32_t : 21;
uint32_t poll_addr;
uint32_t data;
uint32_t mask;
uint32_t address;
} pmu_branch_des_t;
#define PMU_BRANCH(i, s, a, t, adr, d, m, badr) \
(PMU_BRANCH_OP | PMU_IS(i,s) | ((!!a) << PMU_BRANCH_AND_POS)| ((t & 7) << PMU_BRANCH_TYPE_POS)), adr, d, m, badr
typedef struct pmu_transfer_des_t {
uint32_t op_code : 3; /* 0x7 */
uint32_t interrupt : 1;
uint32_t stop : 1;
uint32_t read_size : 2;
uint32_t read_inc : 1;
uint32_t write_size : 2;
uint32_t write_inc : 1;
uint32_t : 1;
uint32_t tx_length : 20;
uint32_t write_address;
uint32_t read_address;
uint32_t int_mask : 25; /* valid int_mask is from 0 - 24 */
uint32_t : 1;
uint32_t burst_size : 6;
} pmu_transfer_des_t;
#define PMU_TRANSFER(i, s, rs, ri, ws, wi, l, wa, ra, imsk, b) \
(PMU_TRANSFER_OP | PMU_IS(i,s) | ((rs & 3) << PMU_TX_READS_POS) | ((!!ri) << PMU_TX_READI_POS) | \
((ws & 3) << PMU_TX_WRITES_POS) | ((!!wi) << PMU_TX_WRITEI_POS) | ((l & 0xFFFFF) << PMU_TX_LEN_POS)), wa, ra, \
((imsk) | ((b & 0x3F) << PMU_TX_BS_POS))
/**
* Callback function type for the PMU.
* @details The callback function signature is:
* @code
* void callback(int status);
* @endcode
* @p pmu_status - The callback function argument is a status bit
* indicating the status of the PMU program. The callback function
* will be called for every opcode that has the interrupt bit set.
* If NULL, the channel interrupt will not be enabled.
*/
typedef void (*pmu_callback)(int pmu_status);
/**
* @brief Start a PMU program on a channel
*
* @param[in] channel The channel number to start the PMU program.
* @param[in] program_address A pointer to the first opcode of the PMU program.
* @param[in] callback A pointer to the callback function or NULL. See pmu_callback() for details.
*
* @return #E_NO_ERROR if everything is successful, error if unsuccessful.
*/
int PMU_Start(unsigned int channel, const void *program_address, pmu_callback callback);
/**
* @brief Set a loop counter value on a channel
* @param channel Channel number to set the value on
* @param counter_num Counter number for the channel (0 or 1)
* @param value Loop count value
* @returns #E_NO_ERROR if everything is successful, error if unsuccessful.
*/
int PMU_SetCounter(unsigned int channel, unsigned int counter_num, uint16_t value);
/**
* @brief Stop a running channel. This will clear the enable bit on the channel
* and stop the running PMU program at the current opcode. The callback
* function is not called.
* @param channel Channel to stop
*/
void PMU_Stop(unsigned int channel);
/**
* @brief Function to handle PMU interrupts. This function can be called from
* the PMU interrupt service routine, or periodically from the
* application if interrupts are not enabled.
*/
void PMU_Handler(void);
/**
* @brief Set the AHB bus operation timeout on a channel
* @param channel Selected PMU channel
* @param timeoutClkScale Clk scale use for timeout clk
* @param timeoutTicks Number of ticks for timeout duration
* @returns #E_NO_ERROR if everything is successful, error if unsuccessful.
*/
int PMU_SetTimeout(unsigned int channel, pmu_ps_sel_t timeoutClkScale, pmu_to_sel_t timeoutTicks);
/**
* @brief Gets the PMU channel's flags
* @param channel Selected PMU channel
* @return 0 = flags not set, non-zero = flags
*/
uint32_t PMU_GetFlags(unsigned int channel);
/**
* @brief Clear the PMU channel's flags based on the mask
* @param channel Selected PMU channel
* @param mask bits of the flags to clear
*/
void PMU_ClearFlags(unsigned int channel, unsigned int mask);
/**
* @brief Determines if the PMU channel is running
* @param channel Selected PMU channel
* @return 0 - channel is off
* @return non-zero = channel is running
*/
uint32_t PMU_IsActive(unsigned int channel);
/**@} end of group pmuGroup*/
#ifdef __cplusplus
}
#endif
#endif /* _PMU_H_ */
| 4,799
|
641
|
/*********************************************************************
* SEGGER Microcontroller GmbH & Co. KG *
* Solutions for real time microcontroller applications *
**********************************************************************
* *
* (c) 1996 - 2015 SEGGER Microcontroller GmbH & Co. KG *
* *
* Internet: www.segger.com Support: <EMAIL> *
* *
**********************************************************************
** emWin V5.30 - Graphical user interface for embedded applications **
All Intellectual Property rights in the Software belongs to SEGGER.
emWin is protected by international copyright laws.
This file has been licensed to Infineon Technologies AG, a german
company at the address Am Campeon 1-12 in 85579 Neubiberg, Germany and
is sublicensed and distributed by Infineon Technologies AG in
accordance with the DAVE (TM) 3 Software License Agreement to be used
for and with Infineon's Cortex-M0, M0+ and M4 based 32-bit microcon-
troller products only.
Full source code is available at: www.segger.com
We appreciate your understanding and fairness.
----------------------------------------------------------------------
Licensing information
Licensor: SEGGER Microcontroller GmbH & Co. KG
Licensed to: Infineon Technologies AG, Am Campeon 1-12, 85579 Neubiberg
Licensed SEGGER software: emWin
License number: GUI-00324
License model: Buyout SRC [Buyout Source Code License]
Licensed product: -
Licensed platform: Infineon's Cortex M0, M0+, M4 based 32-bit microcontroller products
Licensed number of seats: -
----------------------------------------------------------------------
File : HEADER_Private.h
Purpose : Private HEADER include
--------------------END-OF-HEADER-------------------------------------
*/
#ifndef HEADER_PRIVATE_H
#define HEADER_PRIVATE_H
#include "HEADER.h"
#include "WIDGET.h"
#include "WM.h"
#include "GUI_ARRAY.h"
#if GUI_WINSUPPORT
/*********************************************************************
*
* Object definition
*
**********************************************************************
*/
typedef struct {
int Width;
I16 Align;
WM_HMEM hDrawObj;
char acText[1];
} HEADER_COLUMN;
typedef struct {
WIDGET_DRAW_ITEM_FUNC * pfDrawSkin;
} HEADER_SKIN_PRIVATE;
typedef struct {
const GUI_FONT * pFont;
GUI_COLOR BkColor;
GUI_COLOR TextColor;
GUI_COLOR ArrowColor;
HEADER_SKIN_PRIVATE SkinPrivate;
} HEADER_PROPS;
typedef struct {
WIDGET Widget;
HEADER_PROPS Props;
WIDGET_SKIN const * pWidgetSkin;
GUI_ARRAY Columns;
int CapturePosX;
int CaptureItem;
int ScrollPos;
int Sel;
int DirIndicatorColumn;
int DirIndicatorReverse;
unsigned Fixed;
U8 DragLimit;
} HEADER_Obj;
/*********************************************************************
*
* Private (module internal) data
*
**********************************************************************
*/
extern HEADER_PROPS HEADER__DefaultProps;
extern const GUI_CURSOR * HEADER__pDefaultCursor;
extern int HEADER__DefaultBorderH;
extern int HEADER__DefaultBorderV;
extern const WIDGET_SKIN HEADER__SkinClassic;
extern WIDGET_SKIN HEADER__Skin;
extern WIDGET_SKIN const * HEADER__pSkinDefault;
/*********************************************************************
*
* Macros for internal use
*
**********************************************************************
*/
#if GUI_DEBUG_LEVEL >= GUI_DEBUG_LEVEL_CHECK_ALL
#define HEADER_INIT_ID(p) (p->Widget.DebugId = HEADER_ID)
#else
#define HEADER_INIT_ID(p)
#endif
#if GUI_DEBUG_LEVEL >= GUI_DEBUG_LEVEL_CHECK_ALL
HEADER_Obj * HEADER_LockH(HEADER_Handle h);
#define HEADER_LOCK_H(h) HEADER_LockH(h)
#else
#define HEADER_LOCK_H(h) (HEADER_Obj *)GUI_LOCK_H(h)
#endif
void HEADER__SetDrawObj(HEADER_Handle hObj, unsigned Index, GUI_DRAW_HANDLE hDrawObj);
#endif // GUI_WINSUPPORT
#endif // Avoid multiple inclusion
| 1,910
|
1,089
|
/*
Copyright 2011, 2015 Pixplicity, Larva Labs LLC and Google, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Sharp is heavily based on prior work. It was originally forked from
https://github.com/pents90/svg-android
And changes from other forks have been consolidated:
https://github.com/b2renger/svg-android
https://github.com/mindon/svg-android
https://github.com/josefpavlik/svg-android
*/
package com.pixplicity.sharp.imageviewdemo;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import com.jsibbold.zoomage.ZoomageView;
import com.pixplicity.sharp.OnSvgElementListener;
import com.pixplicity.sharp.Sharp;
import com.pixplicity.sharp.SharpDrawable;
import com.pixplicity.sharp.SharpPicture;
import java.util.Random;
public class SvgDemoActivity extends AppCompatActivity {
private ZoomageView mImageView;
private Button mButton;
private Sharp mSvg;
private boolean mRenderBitmap = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_svg_demo);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
mImageView = findViewById(R.id.iv_image);
mButton = findViewById(R.id.bt_button);
Sharp.setLogLevel(Sharp.LOG_LEVEL_INFO);
mSvg = Sharp.loadResource(getResources(), R.raw.cartman);
// If you want to load typefaces from assets:
// .withAssets(getAssets());
// If you want to load an SVG from assets:
//mSvg = Sharp.loadAsset(getAssets(), "cartman.svg");
mButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
reloadSvg(true);
}
});
reloadSvg(false);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
new MenuInflater(this).inflate(R.menu.main, menu);
menu.findItem(R.id.action_render_bitmap).setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem menuItem) {
mRenderBitmap = !menuItem.isChecked();
menuItem.setChecked(mRenderBitmap);
reloadSvg(false);
return true;
}
});
return true;
}
private void reloadSvg(final boolean changeColor) {
mSvg.setOnElementListener(new OnSvgElementListener() {
@Override
public void onSvgStart(@NonNull Canvas canvas,
@Nullable RectF bounds) {
}
@Override
public void onSvgEnd(@NonNull Canvas canvas,
@Nullable RectF bounds) {
}
@Override
public <T> T onSvgElement(@Nullable String id,
@NonNull T element,
@Nullable RectF elementBounds,
@NonNull Canvas canvas,
@Nullable RectF canvasBounds,
@Nullable Paint paint) {
if (changeColor && paint != null && paint.getStyle() == Paint.Style.FILL &&
("shirt".equals(id) || "hat".equals(id) || "pants".equals(id))) {
Random random = new Random();
paint.setColor(Color.argb(255, random.nextInt(256),
random.nextInt(256), random.nextInt(256)));
}
return element;
}
@Override
public <T> void onSvgElementDrawn(@Nullable String id,
@NonNull T element,
@NonNull Canvas canvas,
@Nullable Paint paint) {
}
});
mSvg.getSharpPicture(new Sharp.PictureCallback() {
@Override
public void onPictureReady(SharpPicture picture) {
Drawable drawable = picture.getDrawable();
if (mRenderBitmap) {
// Create a bitmap with a size that is somewhat arbitrarily determined by SharpDrawable
// This will no doubt look bad when scaled up, so perhaps a different dimension would be used in practice
int width = Math.max(1, drawable.getIntrinsicWidth());
int height = Math.max(1, drawable.getIntrinsicHeight());
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
// Draw SharpDrawable onto this bitmap
Canvas canvas = new Canvas(bitmap);
drawable.setBounds(0, 0, canvas.getWidth(), canvas.getHeight());
drawable.draw(canvas);
BitmapDrawable bitmapDrawable = new BitmapDrawable(getResources(), bitmap);
// You could do some bitmap operations here that aren't supported by Picture
//bitmapDrawable.setColorFilter(Color.RED, PorterDuff.Mode.MULTIPLY);
//bitmapDrawable.setAlpha(100);
// Use the BitmapDrawable instead of the SharpDrawable
drawable = bitmapDrawable;
} else {
SharpDrawable.prepareView(mImageView);
}
mImageView.setImageDrawable(drawable);
// We don't want to use the same drawable, as we're specifying a custom size; therefore
// we call createDrawable() instead of getDrawable()
int iconSize = getResources().getDimensionPixelSize(R.dimen.icon_size);
mButton.setCompoundDrawables(
picture.createDrawable(mButton, iconSize),
null, null, null);
}
});
}
}
| 3,276
|
634
|
<filename>zentral/contrib/monolith/migrations/0029_auto_20170919_1521.py
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-09-19 15:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('monolith', '0028_auto_20170919_1416'),
]
operations = [
migrations.RemoveField(
model_name='printer',
name='info',
),
migrations.AlterField(
model_name='printer',
name='name',
field=models.CharField(help_text='display name of the printer', max_length=128),
),
]
| 301
|
1,056
|
<filename>ide/subversion/test/qa-functional/src/org/netbeans/test/subversion/operators/MergeTwoRepoOperator.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.test.subversion.operators;
import org.netbeans.jemmy.operators.*;
/** Class implementing all necessary methods for handling "Merge AnagramGame to..." NbDialog.
*
* @author peter
* @version 1.0
*/
public class MergeTwoRepoOperator extends JDialogOperator {
/**
* Creates new MergeTwoRepoOperator that can handle it.
*/
public MergeTwoRepoOperator() {
super("Merge");
}
private JLabelOperator _lblSecondRepositoryFolder;
private JLabelOperator _lblStartingWithRevision;
private JLabelOperator _lblEndingWithRevision;
private JLabelOperator _lblFirstRepositoryFolder;
private JLabelOperator _lblEmptyMeansRepositoryHEAD;
private JComboBoxOperator _cboMergeFrom;
private JComboBoxOperator _cboRepository1;
private JTextFieldOperator _txtStartRevision;
private JButtonOperator _btSearch;
private JTextFieldOperator _txtEndRevision;
private JButtonOperator _btSearch2;
private JLabelOperator _lblEmptyMeansRepositoryHEAD2;
private JButtonOperator _btBrowse1;
private JButtonOperator _btBrowse2;
private JLabelOperator _lblMergeIntoLocalFolderChangesBetweenTwoRepositoryFolders;
private JLabelOperator _lblMergeFrom;
private JComboBoxOperator _cboRepository2;
private JLabelOperator _lblJLabel;
private JTextFieldOperator _txtRepositoryFolder1;
private JTextFieldOperator _txtCurrentFolder;
private JTextFieldOperator _txtRepositoryFolder2;
private JLabelOperator _lblPreview;
private JButtonOperator _btMerge;
private JButtonOperator _btCancel;
private JButtonOperator _btHelp;
//******************************
// Subcomponents definition part
//******************************
/** Tries to find "Second Repository Folder:" JLabel in this dialog.
* @return JLabelOperator
*/
public JLabelOperator lblSecondRepositoryFolder() {
if (_lblSecondRepositoryFolder==null) {
_lblSecondRepositoryFolder = new JLabelOperator(this, "Second Repository Folder");
}
return _lblSecondRepositoryFolder;
}
/** Tries to find "Starting with Revision:" JLabel in this dialog.
* @return JLabelOperator
*/
public JLabelOperator lblStartingWithRevision() {
if (_lblStartingWithRevision==null) {
_lblStartingWithRevision = new JLabelOperator(this, "Starting");
}
return _lblStartingWithRevision;
}
/** Tries to find "Ending with Revision:" JLabel in this dialog.
* @return JLabelOperator
*/
public JLabelOperator lblEndingWithRevision() {
if (_lblEndingWithRevision==null) {
_lblEndingWithRevision = new JLabelOperator(this, "Ending");
}
return _lblEndingWithRevision;
}
/** Tries to find "First Repository Folder:" JLabel in this dialog.
* @return JLabelOperator
*/
public JLabelOperator lblFirstRepositoryFolder() {
if (_lblFirstRepositoryFolder==null) {
_lblFirstRepositoryFolder = new JLabelOperator(this, "First Repository Folder");
}
return _lblFirstRepositoryFolder;
}
/** Tries to find "(empty means repository HEAD)" JLabel in this dialog.
* @return JLabelOperator
*/
public JLabelOperator lblEmptyMeansRepositoryHEAD() {
if (_lblEmptyMeansRepositoryHEAD==null) {
_lblEmptyMeansRepositoryHEAD = new JLabelOperator(this, "(empty means repository HEAD)");
}
return _lblEmptyMeansRepositoryHEAD;
}
/** Tries to find null JComboBox in this dialog.
* @return JComboBoxOperator
*/
public JComboBoxOperator cboMergeFrom() {
if (_cboMergeFrom==null) {
_cboMergeFrom = new JComboBoxOperator(this, 2);
}
return _cboMergeFrom;
}
/** Tries to find null JComboBox in this dialog.
* @return JComboBoxOperator
*/
public JComboBoxOperator cboRepository1() {
if (_cboRepository1==null) {
_cboRepository1 = new JComboBoxOperator(this, 1);
}
return _cboRepository1;
}
/** Tries to find null JTextField in this dialog.
* @return JTextFieldOperator
*/
public JTextFieldOperator txtStartRevision() {
if (_txtStartRevision==null) {
_txtStartRevision = new JTextFieldOperator(this, 2);
}
return _txtStartRevision;
}
/** Tries to find "Search..." JButton in this dialog.
* @return JButtonOperator
*/
public JButtonOperator btSearch() {
if (_btSearch==null) {
_btSearch = new JButtonOperator(this, "Search...");
}
return _btSearch;
}
/** Tries to find null JTextField in this dialog.
* @return JTextFieldOperator
*/
public JTextFieldOperator txtEndRevision() {
if (_txtEndRevision==null) {
_txtEndRevision = new JTextFieldOperator(this, 3);
}
return _txtEndRevision;
}
/** Tries to find "Search..." JButton in this dialog.
* @return JButtonOperator
*/
public JButtonOperator btSearch2() {
if (_btSearch2==null) {
_btSearch2 = new JButtonOperator(this, "Search...", 1);
}
return _btSearch2;
}
/** Tries to find "(empty means repository HEAD)" JLabel in this dialog.
* @return JLabelOperator
*/
public JLabelOperator lblEmptyMeansRepositoryHEAD2() {
if (_lblEmptyMeansRepositoryHEAD2==null) {
_lblEmptyMeansRepositoryHEAD2 = new JLabelOperator(this, "(empty means repository HEAD)", 1);
}
return _lblEmptyMeansRepositoryHEAD2;
}
/** Tries to find "Browse..." JButton in this dialog.
* @return JButtonOperator
*/
public JButtonOperator btBrowseRepositoryFolder1() {
if (_btBrowse1==null) {
_btBrowse1 = new JButtonOperator(this, "Browse", 1);
}
return _btBrowse1;
}
/** Tries to find "Browse..." JButton in this dialog.
* @return JButtonOperator
*/
public JButtonOperator btBrowseRepositoryFolder2() {
if (_btBrowse2==null) {
_btBrowse2 = new JButtonOperator(this, "Browse");
}
return _btBrowse2;
}
/** Tries to find "Merge into local folder changes between two repository folders." JLabel in this dialog.
* @return JLabelOperator
*/
public JLabelOperator lblMergeIntoLocalFolderChangesBetweenTwoRepositoryFolders() {
if (_lblMergeIntoLocalFolderChangesBetweenTwoRepositoryFolders==null) {
_lblMergeIntoLocalFolderChangesBetweenTwoRepositoryFolders = new JLabelOperator(this, "Merge into local folder changes between two repository folders.");
}
return _lblMergeIntoLocalFolderChangesBetweenTwoRepositoryFolders;
}
/** Tries to find "Merge from:" JLabel in this dialog.
* @return JLabelOperator
*/
public JLabelOperator lblMergeFrom() {
if (_lblMergeFrom==null) {
_lblMergeFrom = new JLabelOperator(this, "Merge from:");
}
return _lblMergeFrom;
}
/** Tries to find null JComboBox in this dialog.
* @return JComboBoxOperator
*/
public JComboBoxOperator cboRepository2() {
if (_cboRepository2==null) {
_cboRepository2 = new JComboBoxOperator(this);
}
return _cboRepository2;
}
/** Tries to find null JLabel in this dialog.
* @return JLabelOperator
*/
public JLabelOperator lblJLabel() {
if (_lblJLabel==null) {
_lblJLabel = new JLabelOperator(this, 8);
}
return _lblJLabel;
}
/** Tries to find null JTextField in this dialog.
* @return JTextFieldOperator
*/
public JTextFieldOperator txtRepositoryFolder1() {
if (_txtRepositoryFolder1==null) {
_txtRepositoryFolder1 = new JTextFieldOperator(this, 4);
}
return _txtRepositoryFolder1;
}
/** Tries to find null JTextField in this dialog.
* @return JTextFieldOperator
*/
public JTextFieldOperator txtCurrentFolder() {
if (_txtCurrentFolder==null) {
_txtCurrentFolder = new JTextFieldOperator(this, 5);
}
return _txtCurrentFolder;
}
/** Tries to find null JTextField in this dialog.
* @return JTextFieldOperator
*/
public JTextFieldOperator txtRepositoryFolder2() {
if (_txtRepositoryFolder2==null) {
_txtRepositoryFolder2 = new JTextFieldOperator(this, 6);
}
return _txtRepositoryFolder2;
}
/** Tries to find "Preview:" JLabel in this dialog.
* @return JLabelOperator
*/
public JLabelOperator lblPreview() {
if (_lblPreview==null) {
_lblPreview = new JLabelOperator(this, "Preview:");
}
return _lblPreview;
}
/** Tries to find "Merge" JButton in this dialog.
* @return JButtonOperator
*/
public JButtonOperator btMerge() {
if (_btMerge==null) {
_btMerge = new JButtonOperator(this, "Merge");
}
return _btMerge;
}
/** Tries to find "Cancel" JButton in this dialog.
* @return JButtonOperator
*/
public JButtonOperator btCancel() {
if (_btCancel==null) {
_btCancel = new JButtonOperator(this, "Cancel");
}
return _btCancel;
}
/** Tries to find "Help" JButton in this dialog.
* @return JButtonOperator
*/
public JButtonOperator btHelp() {
if (_btHelp==null) {
_btHelp = new JButtonOperator(this, "Help");
}
return _btHelp;
}
//****************************************
// Low-level functionality definition part
//****************************************
/**
* returns selected item for cboMergeFrom
*
* @return String item
*/
public String getSelectedMergeFrom() {
return cboMergeFrom().getSelectedItem().toString();
}
/**
* selects item for cboMergeFrom
*
* @param item String item
*/
public void selectMergeFrom(String item) {
cboMergeFrom().selectItem(item);
}
/**
* types text for cboMergeFrom
*
* @param text String text
*/
public void setMergeFrom(String text) {
cboMergeFrom().clearText();
cboMergeFrom().typeText(text);
}
/**
* returns selected item for cboRepository1
*
* @return String item
*/
public String getSelectedRepositoryFolder1() {
return cboRepository1().getSelectedItem().toString();
}
public String getRepositoryFolder1() {
return cboRepository1().getEditor().getItem().toString();
}
/**
* selects item for cboRepository1
*
* @param item String item
*/
public void selectRepositoryFolder1(String item) {
cboRepository1().selectItem(item);
}
/**
* types text for cboRepository1
*
* @param text String text
*/
public void setRepositoryFolder1(String text) {
cboRepository1().clearText();
cboRepository1().typeText(text);
}
/**
* gets text for txtStartRevision
*
* @return String text
*/
public String getStartRevision() {
return txtStartRevision().getText();
}
/**
* sets text for txtStartRevision
*
* @param text String text
*/
public void setStartRevision(String text) {
txtStartRevision().clearText();
txtStartRevision().typeText(text);
}
/** clicks on "Search..." JButton
*/
public void search() {
btSearch().push();
}
/**
* gets text for txtEndRevision
*
* @return String text
*/
public String getEndRevision() {
return txtEndRevision().getText();
}
/**
* sets text for txtEndRevision
*
* @param text String text
*/
public void setEndRevision(String text) {
txtEndRevision().clearText();
txtEndRevision().typeText(text);
}
/** clicks on "Search..." JButton
*/
public void search2() {
btSearch2().push();
}
/** clicks on "Browse..." JButton
*/
public RepositoryBrowserOperator browseRepositoryFolder1() {
btBrowseRepositoryFolder1().pushNoBlock();
return new RepositoryBrowserOperator();
}
/** clicks on "Browse..." JButton
*/
public RepositoryBrowserOperator browseRepositoryFolder2() {
btBrowseRepositoryFolder2().pushNoBlock();
return new RepositoryBrowserOperator();
}
/**
* returns selected item for cboRepository2
*
* @return String item
*/
public String getSelectedRepositoryFolder2() {
return cboRepository2().getSelectedItem().toString();
}
public String getRepositoryFolder2() {
return cboRepository2().getEditor().getItem().toString();
}
/**
* selects item for cboRepository2
*
* @param item String item
*/
public void selectRepositoryFolder2(String item) {
cboRepository2().selectItem(item);
}
/**
* types text for cboRepository1
*
* @param text String text
*/
public void setRepositoryFolder2(String text) {
cboRepository2().clearText();
cboRepository2().typeText(text);
}
/**
* gets text for txtRepositoryFolder1
*
* @return String text
*/
public String getTxtRepositoryFolder1() {
return txtRepositoryFolder1().getText();
}
/**
* gets text for txtCurrentFolder
*
* @return String text
*/
public String getTxtCurrentFolder() {
return txtCurrentFolder().getText();
}
/**
* gets text for txtRepositoryFolder2
*
* @return String text
*/
public String getTxtRepositoryFolder2() {
return txtRepositoryFolder2().getText();
}
/** clicks on "Merge" JButton
*/
public void merge() {
btMerge().push();
}
/** clicks on "Cancel" JButton
*/
public void cancel() {
btCancel().push();
}
/** clicks on "Help" JButton
*/
public void help() {
btHelp().push();
}
//*****************************************
// High-level functionality definition part
//*****************************************
/**
* Performs verification of MergeTwoRepoOperator by accessing all its components.
*/
public void verify() {
lblSecondRepositoryFolder();
lblStartingWithRevision();
lblEndingWithRevision();
lblFirstRepositoryFolder();
lblEmptyMeansRepositoryHEAD();
cboMergeFrom();
cboRepository1();
txtStartRevision();
btSearch();
txtEndRevision();
btSearch2();
lblEmptyMeansRepositoryHEAD2();
btBrowseRepositoryFolder1();
btBrowseRepositoryFolder2();
lblMergeIntoLocalFolderChangesBetweenTwoRepositoryFolders();
lblMergeFrom();
cboRepository2();
lblJLabel();
txtRepositoryFolder1();
txtCurrentFolder();
txtRepositoryFolder2();
lblPreview();
btMerge();
btCancel();
btHelp();
}
}
| 6,570
|
6,044
|
<filename>cme/protocols/mssql/db_navigator.py
from cme.helpers.misc import validate_ntlm
from cme.cmedb import DatabaseNavigator
class navigator(DatabaseNavigator):
def display_creds(self, creds):
data = [['CredID', 'Admin On', 'CredType', 'Domain', 'UserName', 'Password']]
for cred in creds:
credID = cred[0]
domain = cred[1]
username = cred[2]
password = cred[3]
credtype = cred[4]
# pillaged_from = cred[5]
links = self.db.get_admin_relations(userID=credID)
data.append([credID, str(len(links)) + ' Host(s)', credtype, domain, username, password])
self.print_table(data, title='Credentials')
def display_hosts(self, hosts):
data = [['HostID', 'Admins', 'IP', 'Hostname', 'Domain', 'OS', 'DB Instances']]
for host in hosts:
hostID = host[0]
ip = host[1]
hostname = host[2]
domain = host[3]
os = host[4]
instances = host[5]
links = self.db.get_admin_relations(hostID=hostID)
data.append([hostID, str(len(links)) + ' Cred(s)', ip, hostname, domain, os, instances])
self.print_table(data, title='Hosts')
def do_hosts(self, line):
filterTerm = line.strip()
if filterTerm == "":
hosts = self.db.get_computers()
self.display_hosts(hosts)
else:
hosts = self.db.get_computers(filterTerm=filterTerm)
if len(hosts) > 1:
self.display_hosts(hosts)
elif len(hosts) == 1:
data = [['HostID', 'IP', 'Hostname', 'Domain', 'OS']]
hostIDList = []
for host in hosts:
hostID = host[0]
hostIDList.append(hostID)
ip = host[1]
hostname = host[2]
domain = host[3]
os = host[4]
data.append([hostID, ip, hostname, domain, os])
self.print_table(data, title='Host(s)')
data = [['CredID', 'CredType', 'Domain', 'UserName', 'Password']]
for hostID in hostIDList:
links = self.db.get_admin_relations(hostID=hostID)
for link in links:
linkID, credID, hostID = link
creds = self.db.get_credentials(filterTerm=credID)
for cred in creds:
credID = cred[0]
domain = cred[1]
username = cred[2]
password = <PASSWORD>[3]
credtype = cred[4]
# pillaged_from = cred[5]
data.append([credID, credtype, domain, username, password])
self.print_table(data, title='Credential(s) with Admin Access')
def do_creds(self, line):
filterTerm = line.strip()
if filterTerm == "":
creds = self.db.get_credentials()
self.display_creds(creds)
elif filterTerm.split()[0].lower() == "add":
args = filterTerm.split()[1:]
if len(args) == 3:
domain, username, password = args
if validate_ntlm(password):
self.db.add_credential("hash", domain, username, password)
else:
self.db.add_credential("plaintext", domain, username, password)
else:
print("[!] Format is 'add domain username password")
return
elif filterTerm.split()[0].lower() == "remove":
args = filterTerm.split()[1:]
if len(args) != 1:
print("[!] Format is 'remove <credID>'")
return
else:
self.db.remove_credentials(args)
self.db.remove_links(credIDs=args)
elif filterTerm.split()[0].lower() == "plaintext":
creds = self.db.get_credentials(credtype="plaintext")
self.display_creds(creds)
elif filterTerm.split()[0].lower() == "hash":
creds = self.db.get_credentials(credtype="hash")
self.display_creds(creds)
else:
creds = self.db.get_credentials(filterTerm=filterTerm)
data = [['CredID', 'CredType', 'Domain', 'UserName', 'Password']]
credIDList = []
for cred in creds:
credID = cred[0]
credIDList.append(credID)
credType = cred[1]
domain = cred[2]
username = cred[3]
password = cred[4]
data.append([credID, credType, domain, username, password])
self.print_table(data, title='Credential(s)')
data = [['HostID', 'IP', 'Hostname', 'Domain', 'OS']]
for credID in credIDList:
links = self.db.get_admin_relations(userID=credID)
for link in links:
linkID, credID, hostID = link
hosts = self.db.get_computers(hostID)
for host in hosts:
hostID = host[0]
ip = host[1]
hostname = host[2]
domain = host[3]
os = host[4]
data.append([hostID, ip, hostname, domain, os])
self.print_table(data, title='Admin Access to Host(s)')
def complete_hosts(self, text, line, begidx, endidx):
"Tab-complete 'creds' commands."
commands = ["add", "remove"]
mline = line.partition(' ')[2]
offs = len(mline) - len(text)
return [s[offs:] for s in commands if s.startswith(mline)]
def complete_creds(self, text, line, begidx, endidx):
"Tab-complete 'creds' commands."
commands = ["add", "remove", "hash", "plaintext"]
mline = line.partition(' ')[2]
offs = len(mline) - len(text)
return [s[offs:] for s in commands if s.startswith(mline)]
| 3,338
|
13,111
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.config.group.openapi;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import lombok.Getter;
import org.apache.skywalking.oap.server.library.util.StringFormatGroup;
public class EndpointGroupingRule4Openapi {
private final Map<String/*serviceName*/, Map<String/*endpointName*/, String/*endpointGroupName*/>> directLookup = new HashMap<>();
@Getter
private final Map<String, Map<String, StringFormatGroup>> groupedRules = new HashMap<>();
void addDirectLookup(String serviceName, String endpointName, String endpointGroupName) {
Map<String, String> endpointNameLookup = directLookup.computeIfAbsent(serviceName, name -> new HashMap<>());
endpointNameLookup.put(endpointName, endpointGroupName);
}
void addGroupedRule(String serviceName, String endpointGroupName, String ruleRegex) {
String rulesGroupkey = getGroupedRulesKey(ruleRegex);
Map<String, StringFormatGroup> rules = groupedRules.computeIfAbsent(serviceName, name -> new HashMap<>());
StringFormatGroup formatGroup = rules.computeIfAbsent(rulesGroupkey, name -> new StringFormatGroup());
formatGroup.addRule(endpointGroupName, ruleRegex);
}
public StringFormatGroup.FormatResult format(String service, String endpointName) {
Map<String, String> endpointNameLookup = directLookup.get(service);
if (endpointNameLookup != null && endpointNameLookup.get(endpointName) != null) {
return new StringFormatGroup.FormatResult(true, endpointNameLookup.get(endpointName), endpointName);
}
Map<String, StringFormatGroup> rules = groupedRules.get(service);
if (rules != null) {
final StringFormatGroup stringFormatGroup = rules.get(getGroupedRulesKey(endpointName));
if (stringFormatGroup != null) {
return stringFormatGroup.format(endpointName);
}
}
return new StringFormatGroup.FormatResult(false, endpointName, endpointName);
}
void sortRulesAll() {
groupedRules.entrySet().forEach(rules -> {
sortRulesByService(rules.getKey());
});
}
void sortRulesByService(String serviceName) {
Map<String, StringFormatGroup> rules = groupedRules.get(serviceName);
if (rules != null) {
rules.entrySet().forEach(stringFormatGroup -> {
stringFormatGroup.getValue()
.sortRules(new EndpointGroupingRule4Openapi.EndpointGroupingRulesComparator());
});
}
}
String getGroupedRulesKey(String string) {
String[] ss = string.split("/");
if (ss.length == 1) { //eg. POST:/
return ss[0] + "/";
}
if (ss.length > 1) {
return ss[0] + "/" + ss[1];
}
return "/";
}
static class EndpointGroupingRulesComparator implements Comparator<StringFormatGroup.PatternRule> {
private static final String VAR_PATTERN = "\\(\\[\\^\\/\\]\\+\\)";
@Override
public int compare(final StringFormatGroup.PatternRule rule1, final StringFormatGroup.PatternRule rule2) {
String pattern1 = rule1.getPattern().pattern();
String pattern2 = rule2.getPattern().pattern();
if (getPatternVarsCount(pattern1) < getPatternVarsCount(pattern2)) {
return -1;
} else if (getPatternVarsCount(pattern1) > getPatternVarsCount(pattern2)) {
return 1;
}
int length1 = getPatternLength(pattern1);
int length2 = getPatternLength(pattern2);
return length2 - length1;
}
private int getPatternVarsCount(String pattern) {
return ",".concat(pattern).concat(",").split(VAR_PATTERN).length - 1;
}
private int getPatternLength(String pattern) {
return pattern.replaceAll(VAR_PATTERN, "#").length();
}
}
}
| 1,787
|
440
|
{
"test_on_lines_delete": [
{
"input": "hello-world.txt",
"expected": "hello-world_on_lines_ending_with_o_or_d_delete_o.txt",
"command": ["on", "lines", "ending", "with", "(o|d)", "delete", "o"]
}
],
"test_on_lines_replace": [
{
"input": "hello-world.txt",
"expected": "hello-world_on_lines_ending_with_hello_replace_o_with_().txt",
"command": ["on", "lines", "ending", "with", "hello", "replace", "o", "with", "()"]
}
],
"test_on_lines_prepend": [
{
"input": "hello-world.txt",
"expected": "hello-world_on_lines_ending_with_Hello_prepend_world_with_O.txt",
"command": ["on", "lines", "ending", "with", "Hello", "prepend", "world", "with", "O"]
}
],
"test_on_lines_append": [
{
"input": "hello-world.txt",
"expected": "hello-world_on_lines_ending_with_Hello_append_l_with_l.txt",
"command": ["on", "lines", "ending", "with", "Hello", "append", "l", "with", "l"]
}
],
"test_on_lines_wrap": [
{
"input": "hello-world.txt",
"expected": "hello-world_on_lines_ending_with_d_wrap_o_with_!.txt",
"command": ["on", "lines", "ending", "with", "d", "wrap", "o", "with", "!"]
}
]
}
| 555
|
9,156
|
<reponame>turtlequeue/pulsar
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.impl.schema.generic;
import org.apache.pulsar.client.api.schema.Field;
import org.apache.pulsar.client.api.schema.GenericRecord;
import org.apache.pulsar.client.api.schema.GenericSchema;
import org.apache.pulsar.client.impl.schema.AbstractStructSchema;
import org.apache.pulsar.common.schema.SchemaInfo;
import java.util.List;
/**
*
* A minimal abstract generic schema representation for support Un-AvroBasedGenericSchema.
*
*/
abstract class AbstractGenericSchema extends AbstractStructSchema<GenericRecord> implements GenericSchema<GenericRecord> {
protected List<Field> fields;
// the flag controls whether to use the provided schema as reader schema
// to decode the messages. In `AUTO_CONSUME` mode, setting this flag to `false`
// allows decoding the messages using the schema associated with the messages.
protected final boolean useProvidedSchemaAsReaderSchema;
protected AbstractGenericSchema(SchemaInfo schemaInfo,
boolean useProvidedSchemaAsReaderSchema) {
super(schemaInfo);
this.useProvidedSchemaAsReaderSchema = useProvidedSchemaAsReaderSchema;
}
@Override
public List<Field> getFields() {
return fields;
}
}
| 635
|
348
|
{"nom":"Saint-Laurent-du-Var","circ":"6ème circonscription","dpt":"Alpes-Maritimes","inscrits":23185,"abs":14222,"votants":8963,"blancs":936,"nuls":0,"exp":8027,"res":[{"nuance":"LR","nom":"<NAME>","voix":4578},{"nuance":"MDM","nom":"<NAME>","voix":3449}]}
| 106
|
1,425
|
<filename>gremlin-core/src/test/java/org/apache/tinkerpop/gremlin/process/traversal/strategy/verification/ComputerVerificationStrategyTest.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.process.traversal.strategy.verification;
import org.apache.tinkerpop.gremlin.process.computer.traversal.step.map.TraversalVertexProgramStep;
import org.apache.tinkerpop.gremlin.process.traversal.P;
import org.apache.tinkerpop.gremlin.process.traversal.Translator;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.process.traversal.translator.GroovyTranslator;
import org.apache.tinkerpop.gremlin.process.traversal.util.DefaultTraversalStrategies;
import org.apache.tinkerpop.gremlin.process.traversal.util.EmptyTraversal;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.Arrays;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.max;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.min;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.out;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.outE;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.sum;
import static org.junit.Assert.fail;
/**
* @author <NAME> (http://markorodriguez.com)
*/
@RunWith(Parameterized.class)
public class ComputerVerificationStrategyTest {
private static final Translator.ScriptTranslator translator = GroovyTranslator.of("__");
@Parameterized.Parameters(name = "{0}")
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][]{
// illegal
{__.where(__.out().values("name")), false},
{__.local(out().out()), false},
// legal
{__.values("age").union(max(), min(), sum()), true},
{__.count().sum(), true},
{__.where("a", P.eq("b")).out(), true},
{__.where(__.and(outE("knows"), outE("created"))).values("name"), true},
});
}
@Parameterized.Parameter(value = 0)
public Traversal.Admin traversal;
@Parameterized.Parameter(value = 1)
public boolean legal;
@Test
public void shouldBeVerifiedIllegal() {
final String repr = translator.translate(traversal.getBytecode()).getScript();
final TraversalStrategies strategies = new DefaultTraversalStrategies();
strategies.addStrategies(ComputerVerificationStrategy.instance());
this.traversal.asAdmin().setParent(new TraversalVertexProgramStep(EmptyTraversal.instance(), EmptyTraversal.instance())); // trick it
this.traversal.asAdmin().setStrategies(strategies);
try {
this.traversal.asAdmin().applyStrategies();
if (!this.legal)
fail("The traversal should not be allowed: " + repr);
} catch (final VerificationException ise) {
if (this.legal)
fail("The traversal should be allowed: " + repr);
}
}
}
| 1,454
|
336
|
<gh_stars>100-1000
from .tasks import task_deindex_instance, task_index_instance
def handler_factory_deindex_instance(search_model):
def handler_deindex_instance(sender, **kwargs):
instance = kwargs['instance']
task_deindex_instance.apply_async(
kwargs={
'app_label': instance._meta.app_label,
'model_name': instance._meta.model_name,
'object_id': instance.pk
}
)
return handler_deindex_instance
def handler_index_instance(sender, **kwargs):
instance = kwargs['instance']
task_index_instance.apply_async(
kwargs={
'app_label': instance._meta.app_label,
'model_name': instance._meta.model_name,
'object_id': instance.pk
}
)
def handler_factory_index_instance_m2m(model):
def handler_index_instance_m2m(sender, **kwargs):
instance = kwargs['instance']
action = kwargs.get('action')
if action in ('post_add', 'post_remove'):
if model == instance._meta.model or model == kwargs['model']._meta.model:
task_index_instance.apply_async(
kwargs={
'app_label': instance._meta.app_label,
'model_name': instance._meta.model_name,
'object_id': instance.pk
}
)
for pk in kwargs['pk_set']:
task_index_instance.apply_async(
kwargs={
'app_label': kwargs['model']._meta.app_label,
'model_name': kwargs['model']._meta.model_name,
'object_id': pk
}
)
return handler_index_instance_m2m
| 976
|
381
|
<gh_stars>100-1000
package com.tngtech.jgiven.example.projects.testng;
import com.tngtech.jgiven.Stage;
import com.tngtech.jgiven.annotation.ScenarioState;
class WhenStage extends Stage<WhenStage> {
@ScenarioState(required = true)
String message;
@ScenarioState
String result;
public void handle_message() {
result = message + " TestNG!";
}
}
| 141
|
1,056
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.lib.lexer;
import java.util.List;
import org.netbeans.api.lexer.PartType;
import org.netbeans.api.lexer.TokenId;
import org.netbeans.lib.editor.util.ArrayUtilities;
import org.netbeans.spi.lexer.LexerInput;
import org.netbeans.lib.lexer.token.AbstractToken;
import org.netbeans.lib.lexer.token.JoinToken;
import org.netbeans.lib.lexer.token.PartToken;
import org.netbeans.spi.lexer.TokenPropertyProvider;
/**
* Lexer input operation over multiple joined sections (embedded token lists).
* <br/>
* It produces regular tokens (to be added directly into ETL represented by
* {@link #activeTokenList()} and also special {@link #JoinToken} instances
* in case a token spans boundaries of multiple ETLs.
* <br/>
* It can either work over JoinTokenList directly or, during a modification,
* it simulates that certain token lists are already removed/added to underlying token list.
* <br/>
*
* {@link #recognizedTokenLastInTokenList()} gives information whether the lastly
* produced token ends right at boundary of the activeTokenList.
*
* @author <NAME>
* @version 1.00
*/
public class JoinLexerInputOperation<T extends TokenId> extends LexerInputOperation<T> {
CharSequence inputSourceText;
private TokenListText readText; // For servicing read()
private TokenListText readExistingText;
/**
* Token list in which the last recognized token started.
*/
private EmbeddedTokenList<?,T> activeTokenList;
/**
* Index of activeTokenList in JTL.
*/
private int activeTokenListIndex;
/**
* End offset of the active token list.
*/
private int activeTokenListEndOffset;
/**
* Real token's start offset used to derive the token's offset in ETL.
* Since tokenStartOffset is affected by TokenListList.readOffsetShift
* it cannot be used for this purpose.
*/
private int realTokenStartOffset;
private boolean recognizedTokenJoined; // Whether recognized token will consist of parts
private int skipTokenListCount;
public JoinLexerInputOperation(JoinTokenList<T> joinTokenList, int relexJoinIndex, Object lexerRestartState,
int activeTokenListIndex, int relexOffset
) {
super(joinTokenList, relexJoinIndex, lexerRestartState);
this.inputSourceText = joinTokenList.inputSourceText();
this.activeTokenListIndex = activeTokenListIndex;
tokenStartOffset = relexOffset;
readOffset = relexOffset;
}
public final void init() {
// Following code uses tokenList() method overriden in MutableJoinLexerInputOperation
// so the following code would fail when placed in constructor since the constructor of MJLIO would not yet run.
fetchActiveTokenList();
// readOffset contains relex-offset. Skip empty parts (ETLs) to obtain
// correct start offset of first lexed token
readText = new TokenListText(activeTokenListIndex);
// Assign realTokenStartOffset after fetchActiveTokenList() since it would overwrite it
realTokenStartOffset = readOffset;
}
/**
* Get active ETL into which the last produced token should be added.
* For join tokens there is an ETL into which a last part of JT should be added.
*/
public EmbeddedTokenList<?,T> activeTokenList() {
return activeTokenList;
}
/**
* Get index of active ETL into which the last produced token should be added.
* For join tokens there is an index of the last ETL into which a last part of JT should be added.
*/
public int activeTokenListIndex() {
return activeTokenListIndex;
}
public int skipTokenListCount() {
return skipTokenListCount;
}
public void clearSkipTokenListCount() {
skipTokenListCount = 0;
}
/**
* True if the last returned token is last in {@link #activeTokenList()}.
* For join tokens this applies to the last part of join token.
*/
public boolean recognizedTokenLastInTokenList() {
// realTokenStartOffset is set to the end of last recognized token
return (realTokenStartOffset == activeTokenListEndOffset);
}
@Override
public int lastTokenEndOffset() {
return realTokenStartOffset;
}
@Override
public int read(int offset) { // index >= 0 is guaranteed by contract
return readText.read(offset);
}
@Override
public char readExisting(int offset) {
if (readText.isInBounds(offset)) {
return readText.inBoundsChar(offset);
}
if (readExistingText == null) {
readExistingText = new TokenListText(readText);
}
return readExistingText.existingChar(offset);
}
@Override
public void assignTokenLength(int tokenLength) {
super.assignTokenLength(tokenLength);
// Check whether activeTokenList needs to be changed due to various flags
if (recognizedTokenLastInTokenList()) { // Advance to next token list
// Since this is done when recognizing a next token it should be ok when recognizing
// last token in the last ETL (it should not go beyond last ETL).
do {
// Cannot check activeTokenList.joinInfo == null since for token list updater
// the lexing does not directly modify the ETLs.
skipTokenListCount++;
activeTokenListIndex++;
fetchActiveTokenList();
} while (realTokenStartOffset == activeTokenListEndOffset); // Skip empty ETLs
}
// Advance to end of currently recognized token
realTokenStartOffset += tokenLength;
// Joined token past ETL's boundary
recognizedTokenJoined = (realTokenStartOffset > activeTokenListEndOffset);
}
private void fetchActiveTokenList() {
activeTokenList = tokenList(activeTokenListIndex);
activeTokenList.updateModCount();
realTokenStartOffset = activeTokenList.startOffset();
activeTokenListEndOffset = activeTokenList.endOffset();
}
public EmbeddedTokenList<?,T> tokenList(int tokenListIndex) { // Also used by JoinTokenListChange
return ((JoinTokenList<T>) tokenList).tokenList(tokenListIndex);
}
protected int tokenListCount() {
return ((JoinTokenList<T>) tokenList).tokenListCount();
}
protected void fillTokenData(AbstractToken<T> token) {
if (!recognizedTokenJoined) {
// Subtract tokenLength since this is already advanced to end of token
token.setRawOffset(realTokenStartOffset - tokenLength);
}
}
@Override
protected boolean isFlyTokenAllowed() {
return super.isFlyTokenAllowed() && !recognizedTokenJoined;
}
@Override
protected AbstractToken<T> createDefaultTokenInstance(T id) {
if (recognizedTokenJoined) {
return createJoinToken(id, null, PartType.COMPLETE);
} else { // Regular case
return super.createDefaultTokenInstance(id);
}
}
@Override
protected AbstractToken<T> createPropertyTokenInstance(T id,
TokenPropertyProvider<T> propertyProvider, PartType partType) {
if (recognizedTokenJoined) {
return createJoinToken(id, propertyProvider, partType);
} else { // Regular case
return super.createPropertyTokenInstance(id, propertyProvider, partType);
}
}
private AbstractToken<T> createJoinToken(T id,
TokenPropertyProvider<T> propertyProvider, PartType partType) {
// Create join token
// realTokenStartOffset is already advanced by tokenLength so first decrease it
realTokenStartOffset -= tokenLength;
WrapTokenId<T> wid = wrapTokenIdCache.plainWid(id);
JoinToken<T> joinToken = new JoinToken<T>(wid, tokenLength, propertyProvider, partType);
int joinPartCountEstimate = readText.tokenListIndex - activeTokenListIndex + 1;
@SuppressWarnings("unchecked")
PartToken<T>[] parts = new PartToken[joinPartCountEstimate];
int partLength = activeTokenListEndOffset - realTokenStartOffset;
PartToken<T> partToken = new PartToken<T>(wid, partLength, propertyProvider, PartType.START, joinToken, 0, 0);
partToken.setRawOffset(realTokenStartOffset); // realTokenStartOffset already decreased by tokenLength
parts[0] = partToken;
int partIndex = 1;
int partTextOffset = partLength; // Length of created parts so far
int firstPartTokenListIndex = activeTokenListIndex;
do {
activeTokenListIndex++;
fetchActiveTokenList();
// realTokenStartOffset set to start activeTokenList
PartType partPartType;
// Attempt total ETL's length as partLength
partLength = activeTokenListEndOffset - realTokenStartOffset;
if (partLength == 0) {
continue;
}
if (partTextOffset + partLength >= tokenLength) { // Last part
partLength = tokenLength - partTextOffset;
// If the partType of the join token is not complete then this will be PartType.MIDDLE
partPartType = (partType == PartType.START) ? PartType.MIDDLE : PartType.END;
} else { // Non-last part
partPartType = PartType.MIDDLE;
}
partToken = new PartToken<T>(wid, partLength, propertyProvider, partPartType, joinToken, partIndex, partTextOffset);
// realTokenStartOffset still points to start of activeTokenList
partToken.setRawOffset(realTokenStartOffset); // ETL.startOffset() will be subtracted upon addition to ETL
partTextOffset += partLength;
parts[partIndex++] = partToken;
} while (partTextOffset < tokenLength);
// Update realTokenStartOffset which pointed to start of activeTokenList
realTokenStartOffset += partLength;
// Check that the array does not have any extra items
if (partIndex < parts.length) {
@SuppressWarnings("unchecked")
PartToken<T>[] tmp = new PartToken[partIndex];
System.arraycopy(parts, 0, tmp, 0, partIndex);
parts = tmp;
}
List<PartToken<T>> partList = ArrayUtilities.unmodifiableList(parts);
joinToken.setJoinedParts(partList, activeTokenListIndex - firstPartTokenListIndex);
// joinToken.setTokenList() makes no sense - JoinTokenList instances are temporary
// joinToken.setRawOffset() makes no sense - offset taken from initial part
return joinToken;
}
/**
* Class for reading of text of subsequent ETLs - it allows to see their text
* as a consecutive character sequence (inputSourceText is used as a backing char sequence)
* with an increasing readIndex (it's not decremented after token's recognition).
*/
final class TokenListText {
int tokenListIndex;
int tokenListStartOffset;
int tokenListEndOffset;
/**
* A constant added to readOffset to allow a smoothly increasing reading offset
* when reading through multiple ETLs with gaps among them.
* Its value is zero for the first ETL at relexOffset. When going to next TL it should be increased by
* (tokenList(n+1).getStartOffset() - tokenList(n).getEndOffset()) and similarly
* for backward move among token lists.
*/
int readOffsetShift;
TokenListText(int tokenListIndex) {
this.tokenListIndex = tokenListIndex;
EmbeddedTokenList<?,T> etl = tokenList(tokenListIndex);
etl.updateModCount();
tokenListStartOffset = etl.startOffset();
tokenListEndOffset = etl.endOffset();
readOffsetShift = 0;
}
TokenListText(TokenListText text) {
this.tokenListIndex = text.tokenListIndex;
this.tokenListStartOffset = text.tokenListStartOffset;
this.tokenListEndOffset = text.tokenListEndOffset;
this.readOffsetShift = text.readOffsetShift;
}
/**
* Read next char or return EOF.
*/
int read(int offset) {
offset += readOffsetShift;
if (offset < tokenListEndOffset) {
if (offset >= tokenListStartOffset) {
return inputSourceText.charAt(offset);
} else { // Goto previous
while (true) { // Char should exist
offset -= movePreviousTokenList();
if (offset >= tokenListStartOffset) { // ETL might be empty
return inputSourceText.charAt(offset);
}
}
}
} else { // offset >= tokenListEndOffset
while (tokenListIndex + 1 < tokenListCount()) {
offset += moveNextTokenList();
if (offset < tokenListEndOffset) { // ETL might be empty
return inputSourceText.charAt(offset);
}
}
return LexerInput.EOF;
}
}
/**
* Check whether currently set text covers the given relative index.
*
* @param index index in the same metrics as readIndex.
* @return whether the given index is within current bounds.
*/
boolean isInBounds(int offset) {
offset += readOffsetShift;
return offset >= tokenListStartOffset && offset < tokenListEndOffset;
}
/**
* Get char that was previously verified to be within bounds.
*/
char inBoundsChar(int offset) {
offset += readOffsetShift;
return inputSourceText.charAt(offset);
}
char existingChar(int offset) {
offset += readOffsetShift;
if (offset < tokenListStartOffset) {
while (true) { // Char should exist
offset -= movePreviousTokenList();
if (offset >= tokenListStartOffset) { // ETL might be empty
return inputSourceText.charAt(offset);
}
}
} else if (offset >= tokenListEndOffset) {
while (true) { // Char should exist
offset += moveNextTokenList();
if (offset < tokenListEndOffset) { // ETL might be empty
return inputSourceText.charAt(offset);
}
}
}
// Index within current bounds
return inputSourceText.charAt(offset);
}
private int movePreviousTokenList() {
tokenListIndex--;
EmbeddedTokenList etl = tokenList(tokenListIndex);
etl.updateModCount();
tokenListEndOffset = etl.endOffset();
// Decrease offset shift by the size of gap between ETLs
int shift = tokenListStartOffset - tokenListEndOffset;
readOffsetShift -= shift;
// Also shift given offset value
tokenListStartOffset = etl.startOffset();
return shift;
}
private int moveNextTokenList() {
tokenListIndex++;
EmbeddedTokenList etl = tokenList(tokenListIndex);
etl.updateModCount();
tokenListStartOffset = etl.startOffset();
// Increase offset shift by the size of gap between ETLs
int shift = tokenListStartOffset - tokenListEndOffset;
readOffsetShift += shift;
// Also shift given offset value
tokenListEndOffset = etl.endOffset();
return shift;
}
@Override
public String toString() {
return "tlInd=" + tokenListIndex + ", <" + tokenListStartOffset + "," + // NOI18N
tokenListEndOffset + ">"; // NOI18N
}
}
@Override
public String toString() {
return super.toString() + ", realTokenStartOffset=" + realTokenStartOffset + // NOI18N
", activeTokenListIndex=" + activeTokenListIndex + // NOI18N
", activeTokenListEndOffset=" + activeTokenListEndOffset; // NOI18N
}
}
| 6,794
|
407
|
<reponame>iuskye/SREWorks
package com.alibaba.tesla.authproxy.model.mapper;
import com.alibaba.tesla.authproxy.model.RoleMenuRelDO;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
/**
* <p>Title: RoleMenuRelMapper.java</p>
* <p>Description: 角色菜单关系数据访问接口 </p>
* <p>Copyright: Copyright (c) 2017</p>
* <p>Company: alibaba </p>
*
* @author <EMAIL>
* @version 1.0
* @date 2017年5月3日
*/
@Mapper
public interface RoleMenuRelMapper {
/**
* 根据应用ID该应用下的角色菜单关系
*
* @param appId 应用ID
* @return
*/
int deleteByApp(String appId);
/**
* 批量插入角色菜单关系
*
* @param roleMenuRels
* @return
*/
int batchInsert(List<RoleMenuRelDO> roleMenuRels);
}
| 408
|
14,668
|
<filename>chrome/browser/extensions/blocklist_unittest.cc
// Copyright 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/extensions/blocklist.h"
#include "base/bind.h"
#include "base/run_loop.h"
#include "base/threading/thread_task_runner_handle.h"
#include "chrome/browser/extensions/blocklist_state_fetcher.h"
#include "chrome/browser/extensions/fake_safe_browsing_database_manager.h"
#include "chrome/browser/extensions/test_blocklist.h"
#include "chrome/browser/extensions/test_blocklist_state_fetcher.h"
#include "chrome/browser/extensions/test_extension_prefs.h"
#include "content/public/test/browser_task_environment.h"
#include "extensions/browser/extension_prefs.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace extensions {
namespace {
class BlocklistTest : public testing::Test {
public:
BlocklistTest() : test_prefs_(base::ThreadTaskRunnerHandle::Get()) {}
protected:
ExtensionPrefs* prefs() { return test_prefs_.prefs(); }
std::string AddExtension(const std::string& id) {
return test_prefs_.AddExtension(id)->id();
}
private:
content::BrowserTaskEnvironment task_environment_;
TestExtensionPrefs test_prefs_;
};
template <typename T>
void Assign(T* to, const T& from) {
*to = from;
}
} // namespace
TEST_F(BlocklistTest, OnlyIncludesRequestedIDs) {
std::string a = AddExtension("a");
std::string b = AddExtension("b");
std::string c = AddExtension("c");
Blocklist blocklist(prefs());
TestBlocklist tester(&blocklist);
tester.SetBlocklistState(a, BLOCKLISTED_MALWARE, false);
tester.SetBlocklistState(b, BLOCKLISTED_MALWARE, false);
EXPECT_EQ(BLOCKLISTED_MALWARE, tester.GetBlocklistState(a));
EXPECT_EQ(BLOCKLISTED_MALWARE, tester.GetBlocklistState(b));
EXPECT_EQ(NOT_BLOCKLISTED, tester.GetBlocklistState(c));
std::set<std::string> blocklisted_ids;
blocklist.GetMalwareIDs(
{a, c}, base::BindOnce(&Assign<std::set<std::string>>, &blocklisted_ids));
base::RunLoop().RunUntilIdle();
EXPECT_EQ((std::set<std::string>{a}), blocklisted_ids);
}
TEST_F(BlocklistTest, SafeBrowsing) {
std::string a = AddExtension("a");
Blocklist blocklist(prefs());
TestBlocklist tester(&blocklist);
tester.DisableSafeBrowsing();
EXPECT_EQ(NOT_BLOCKLISTED, tester.GetBlocklistState(a));
tester.SetBlocklistState(a, BLOCKLISTED_MALWARE, false);
// The manager is still disabled at this point, so it won't be blocklisted.
EXPECT_EQ(NOT_BLOCKLISTED, tester.GetBlocklistState(a));
tester.EnableSafeBrowsing();
tester.NotifyUpdate();
base::RunLoop().RunUntilIdle();
// Now it should be.
EXPECT_EQ(BLOCKLISTED_MALWARE, tester.GetBlocklistState(a));
tester.Clear(true);
// Safe browsing blocklist empty, now enabled.
EXPECT_EQ(NOT_BLOCKLISTED, tester.GetBlocklistState(a));
}
// Test getting different blocklist states from Blocklist.
TEST_F(BlocklistTest, GetBlocklistStates) {
Blocklist blocklist(prefs());
TestBlocklist tester(&blocklist);
std::string a = AddExtension("a");
std::string b = AddExtension("b");
std::string c = AddExtension("c");
std::string d = AddExtension("d");
std::string e = AddExtension("e");
tester.SetBlocklistState(a, BLOCKLISTED_MALWARE, false);
tester.SetBlocklistState(b, BLOCKLISTED_SECURITY_VULNERABILITY, false);
tester.SetBlocklistState(c, BLOCKLISTED_CWS_POLICY_VIOLATION, false);
tester.SetBlocklistState(d, BLOCKLISTED_POTENTIALLY_UNWANTED, false);
Blocklist::BlocklistStateMap states_abc;
Blocklist::BlocklistStateMap states_bcd;
blocklist.GetBlocklistedIDs(
{a, b, c, e},
base::BindOnce(&Assign<Blocklist::BlocklistStateMap>, &states_abc));
blocklist.GetBlocklistedIDs(
{b, c, d, e},
base::BindOnce(&Assign<Blocklist::BlocklistStateMap>, &states_bcd));
base::RunLoop().RunUntilIdle();
EXPECT_EQ(BLOCKLISTED_MALWARE, states_abc[a]);
EXPECT_EQ(BLOCKLISTED_SECURITY_VULNERABILITY, states_abc[b]);
EXPECT_EQ(BLOCKLISTED_CWS_POLICY_VIOLATION, states_abc[c]);
EXPECT_EQ(BLOCKLISTED_SECURITY_VULNERABILITY, states_bcd[b]);
EXPECT_EQ(BLOCKLISTED_CWS_POLICY_VIOLATION, states_bcd[c]);
EXPECT_EQ(BLOCKLISTED_POTENTIALLY_UNWANTED, states_bcd[d]);
EXPECT_EQ(0U, states_abc.count(e));
EXPECT_EQ(0U, states_bcd.count(e));
int old_request_count = tester.fetcher()->request_count();
Blocklist::BlocklistStateMap states_ad;
blocklist.GetBlocklistedIDs(
{a, d, e},
base::BindOnce(&Assign<Blocklist::BlocklistStateMap>, &states_ad));
base::RunLoop().RunUntilIdle();
EXPECT_EQ(BLOCKLISTED_MALWARE, states_ad[a]);
EXPECT_EQ(BLOCKLISTED_POTENTIALLY_UNWANTED, states_ad[d]);
EXPECT_EQ(0U, states_ad.count(e));
EXPECT_EQ(old_request_count, tester.fetcher()->request_count());
}
// Test both Blocklist and BlocklistStateFetcher by requesting the blocklist
// states, sending fake requests and parsing the responses.
TEST_F(BlocklistTest, FetchBlocklistStates) {
Blocklist blocklist(prefs());
scoped_refptr<FakeSafeBrowsingDatabaseManager> blocklist_db(
new FakeSafeBrowsingDatabaseManager(true));
Blocklist::ScopedDatabaseManagerForTest scoped_blocklist_db(blocklist_db);
std::string a = AddExtension("a");
std::string b = AddExtension("b");
std::string c = AddExtension("c");
blocklist_db->Enable();
blocklist_db->SetUnsafe(a, b);
// Prepare real fetcher.
BlocklistStateFetcher* fetcher = new BlocklistStateFetcher();
TestBlocklistStateFetcher fetcher_tester(fetcher);
blocklist.SetBlocklistStateFetcherForTest(fetcher);
fetcher_tester.SetBlocklistVerdict(
a, ClientCRXListInfoResponse_Verdict_CWS_POLICY_VIOLATION);
fetcher_tester.SetBlocklistVerdict(
b, ClientCRXListInfoResponse_Verdict_POTENTIALLY_UNWANTED);
Blocklist::BlocklistStateMap states;
blocklist.GetBlocklistedIDs(
{a, b, c},
base::BindOnce(&Assign<Blocklist::BlocklistStateMap>, &states));
base::RunLoop().RunUntilIdle();
// Two fetchers should be created.
EXPECT_TRUE(fetcher_tester.HandleFetcher(a));
EXPECT_TRUE(fetcher_tester.HandleFetcher(b));
EXPECT_EQ(BLOCKLISTED_CWS_POLICY_VIOLATION, states[a]);
EXPECT_EQ(BLOCKLISTED_POTENTIALLY_UNWANTED, states[b]);
EXPECT_EQ(0U, states.count(c));
Blocklist::BlocklistStateMap cached_states;
blocklist.GetBlocklistedIDs(
{a, b, c},
base::BindOnce(&Assign<Blocklist::BlocklistStateMap>, &cached_states));
base::RunLoop().RunUntilIdle();
// No new fetchers.
EXPECT_FALSE(fetcher_tester.HandleFetcher(c));
EXPECT_EQ(BLOCKLISTED_CWS_POLICY_VIOLATION, cached_states[a]);
EXPECT_EQ(BLOCKLISTED_POTENTIALLY_UNWANTED, cached_states[b]);
EXPECT_EQ(0U, cached_states.count(c));
}
} // namespace extensions
| 2,521
|
1,127
|
//
// KSWebSocketConfigure.h
// ZeroShare
//
// Created by saeipi on 2018/8/29.
// Copyright © 2018年 saeipi. All rights reserved.
//
#import <Foundation/Foundation.h>
typedef NS_ENUM(NSInteger,KSWebSocketStatus) {
KSWebSocketStatusNone = 0,//默认/关闭
KSWebSocketStatusConnecting,//连接中
KSWebSocketStatusConnected,//连接成功
KSWebSocketStatusConnectFail,//出现错误/连接失败
KSWebSocketStatusLoseConnect//主动关闭连接
};
typedef NS_ENUM(NSInteger, KSNetworkReachabilityStatus) {
KSNetworkReachabilityStatusUnknown = -1,
KSNetworkReachabilityStatusNotReachable = 0,
KSNetworkReachabilityStatusReachableWAN = 1,
};
@interface KSWebSocketConfigure : NSObject
/**
最大重连次数,默认10次
*/
@property (nonatomic, assign) int maxResetCount;
/**
重连计数
*/
@property (nonatomic, assign) int resetCount;
/**
重连计时
*/
@property (nonatomic, assign) int reconnectTime;
/**
服务端地址
*/
@property (nonatomic, copy ) NSString *urlString;
/**
是否自动连接
*/
@property (nonatomic, assign) BOOL isAutoConnect;
/**
是否发起连接
*/
@property (nonatomic, assign) BOOL isConnect;
/**
网络连接是否正常
*/
@property (nonatomic, assign) BOOL isReachable;
/**
状态
*/
@property (nonatomic, assign) KSWebSocketStatus status;
@property (nonatomic, assign) KSNetworkReachabilityStatus networkStatus;
/**
Token
*/
@property (nonatomic, copy ) NSString *authorToken;
/**
心跳间隔时间
*/
@property (nonatomic, assign) int heartbeatTime;
/**
重连间隔时间
*/
@property (nonatomic, assign) int intervalTime;
@end
| 929
|
678
|
<reponame>bzxy/cydia
/**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/WebKit.framework/WebKit
*/
#import <WebKit/XXUnknownSuperclass.h>
@class WebNodeHighlight, WebView;
__attribute__((visibility("hidden")))
@interface WebNodeHighlighter : XXUnknownSuperclass {
@private
WebView *_inspectedWebView; // 4 = 0x4
WebNodeHighlight *_currentHighlight; // 8 = 0x8
}
- (id)initWithInspectedWebView:(id)inspectedWebView; // 0x8a59
- (void)dealloc; // 0x875d1
- (void)highlightNode:(id)node; // 0x874fd
- (void)hideHighlight; // 0x874a5
- (void)didAttachWebNodeHighlight:(id)highlight; // 0x87481
- (void)willDetachWebNodeHighlight:(id)highlight; // 0x87461
@end
| 279
|
347
|
<filename>backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/GetVmCheckpointByIdQuery.java<gh_stars>100-1000
package org.ovirt.engine.core.bll;
import javax.inject.Inject;
import org.ovirt.engine.core.bll.context.EngineContext;
import org.ovirt.engine.core.common.businessentities.VmCheckpoint;
import org.ovirt.engine.core.common.queries.IdQueryParameters;
import org.ovirt.engine.core.dao.VmCheckpointDao;
public class GetVmCheckpointByIdQuery<P extends IdQueryParameters> extends QueriesCommandBase<P> {
@Inject
private VmCheckpointDao vmCheckpointDao;
public GetVmCheckpointByIdQuery(P parameters, EngineContext engineContext) {
super(parameters, engineContext);
}
@Override
protected void executeQueryCommand() {
VmCheckpoint vmCheckpoint = vmCheckpointDao.get(getParameters().getId());
if (vmCheckpoint != null) {
vmCheckpoint.setDisks(vmCheckpointDao.getDisksByCheckpointId(vmCheckpoint.getId()));
}
setReturnValue(vmCheckpoint);
}
}
| 391
|
389
|
/***************************** Include Files *******************************/
#include "ddynclk.h"
#include "xil_types.h"
/* Back-ported from Linux driver */
#define DIV_ROUND_UP(n, d) (((n) + (d) - 1) / (d))
#define CLK_BIT_WEDGE 13
#define CLK_BIT_NOCOUNT 12
/* This value is used to signal an error */
#define ERR_CLKCOUNTCALC 0xFFFFFFFF
#define ERR_CLKDIVIDER (1 << CLK_BIT_WEDGE | 1 << CLK_BIT_NOCOUNT)
#define DYNCLK_DIV_1_REGMASK 0x1041
/* 25 MHz (125 KHz / 5) */
#define DYNCLK_DEFAULT_FREQ 125000
#define MMCM_FREQ_VCOMIN 600000
#define MMCM_FREQ_VCOMAX 1200000
#define MMCM_FREQ_PFDMIN 10000
#define MMCM_FREQ_PFDMAX 450000
#define MMCM_FREQ_OUTMIN 4000
#define MMCM_FREQ_OUTMAX 800000
#define MMCM_DIV_MAX 106
#define MMCM_FB_MIN 2
#define MMCM_FB_MAX 64
#define MMCM_CLKDIV_MAX 128
#define MMCM_CLKDIV_MIN 1
/************************** Type Definitions ***************************/
struct dglnt_dynclk_reg {
uint32_t clk0L;
uint32_t clkFBL;
uint32_t clkFBH_clk0H;
uint32_t divclk;
uint32_t lockL;
uint32_t fltr_lockH;
};
struct dglnt_dynclk_mode {
uint32_t freq;
uint32_t fbmult;
uint32_t clkdiv;
uint32_t maindiv;
};
/************************** Constant Definitions ***************************/
static const uint64_t lock_lookup[64] = {
0b0011000110111110100011111010010000000001,
0b0011000110111110100011111010010000000001,
0b0100001000111110100011111010010000000001,
0b0101101011111110100011111010010000000001,
0b0111001110111110100011111010010000000001,
0b1000110001111110100011111010010000000001,
0b1001110011111110100011111010010000000001,
0b1011010110111110100011111010010000000001,
0b1100111001111110100011111010010000000001,
0b1110011100111110100011111010010000000001,
0b1111111111111000010011111010010000000001,
0b1111111111110011100111111010010000000001,
0b1111111111101110111011111010010000000001,
0b1111111111101011110011111010010000000001,
0b1111111111101000101011111010010000000001,
0b1111111111100111000111111010010000000001,
0b1111111111100011111111111010010000000001,
0b1111111111100010011011111010010000000001,
0b1111111111100000110111111010010000000001,
0b1111111111011111010011111010010000000001,
0b1111111111011101101111111010010000000001,
0b1111111111011100001011111010010000000001,
0b1111111111011010100111111010010000000001,
0b1111111111011001000011111010010000000001,
0b1111111111011001000011111010010000000001,
0b1111111111010111011111111010010000000001,
0b1111111111010101111011111010010000000001,
0b1111111111010101111011111010010000000001,
0b1111111111010100010111111010010000000001,
0b1111111111010100010111111010010000000001,
0b1111111111010010110011111010010000000001,
0b1111111111010010110011111010010000000001,
0b1111111111010010110011111010010000000001,
0b1111111111010001001111111010010000000001,
0b1111111111010001001111111010010000000001,
0b1111111111010001001111111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001,
0b1111111111001111101011111010010000000001
};
static const uint32_t filter_lookup_low[64] = {
0b0001011111,
0b0001010111,
0b0001111011,
0b0001011011,
0b0001101011,
0b0001110011,
0b0001110011,
0b0001110011,
0b0001110011,
0b0001001011,
0b0001001011,
0b0001001011,
0b0010110011,
0b0001010011,
0b0001010011,
0b0001010011,
0b0001010011,
0b0001010011,
0b0001010011,
0b0001010011,
0b0001010011,
0b0001010011,
0b0001010011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0001100011,
0b0010010011,
0b0010010011,
0b0010010011,
0b0010010011,
0b0010010011,
0b0010010011,
0b0010010011,
0b0010010011,
0b0010010011,
0b0010010011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011,
0b0010100011
};
/************************** Function Definitions ***************************/
static uint32_t dglnt_dynclk_divider(uint32_t divide)
{
uint32_t output = 0;
uint32_t highTime = 0;
uint32_t lowTime = 0;
if ((divide < 1) || (divide > 128))
return ERR_CLKDIVIDER;
if (divide == 1)
return DYNCLK_DIV_1_REGMASK;
highTime = divide / 2;
/* if divide is odd */
if (divide & 0x1) {
lowTime = highTime + 1;
output = 1 << CLK_BIT_WEDGE;
} else {
lowTime = highTime;
}
output |= 0x03F & lowTime;
output |= 0xFC0 & (highTime << 6);
return output;
}
static uint32_t dglnt_dynclk_count_calc(uint32_t divide)
{
uint32_t output = 0;
uint32_t divCalc = 0;
divCalc = dglnt_dynclk_divider(divide);
if (divCalc == ERR_CLKDIVIDER)
output = ERR_CLKCOUNTCALC;
else
output = (0xFFF & divCalc) | ((divCalc << 10) & 0x00C00000);
return output;
}
static int dglnt_dynclk_find_reg(struct dglnt_dynclk_reg *regValues,
struct dglnt_dynclk_mode *clkParams)
{
if ((clkParams->fbmult < 2) || clkParams->fbmult > 64)
return XST_FAILURE;
regValues->clk0L = dglnt_dynclk_count_calc(clkParams->clkdiv);
if (regValues->clk0L == ERR_CLKCOUNTCALC)
return XST_FAILURE;
regValues->clkFBL = dglnt_dynclk_count_calc(clkParams->fbmult);
if (regValues->clkFBL == ERR_CLKCOUNTCALC)
return XST_FAILURE;
regValues->clkFBH_clk0H = 0;
regValues->divclk = dglnt_dynclk_divider(clkParams->maindiv);
if (regValues->divclk == ERR_CLKDIVIDER)
return XST_FAILURE;
regValues->lockL = (uint32_t)(lock_lookup[clkParams->fbmult - 1] &
0xFFFFFFFF);
regValues->fltr_lockH = (uint32_t)((lock_lookup[clkParams->fbmult - 1] >>
32) & 0x000000FF);
regValues->fltr_lockH |= ((filter_lookup_low[clkParams->fbmult - 1] <<
16) & 0x03FF0000);
return 0;
}
static void dglnt_dynclk_write_reg(struct dglnt_dynclk_reg *regValues,
uintptr_t baseaddr)
{
DDynClk_WriteReg(baseaddr, (DDYNCLK_CLK_L), regValues->clk0L);
DDynClk_WriteReg(baseaddr, (DDYNCLK_FB_L), regValues->clkFBL);
DDynClk_WriteReg(baseaddr, (DDYNCLK_FB_H_CLK_H), regValues->clkFBH_clk0H);
DDynClk_WriteReg(baseaddr, (DDYNCLK_DIV), regValues->divclk);
DDynClk_WriteReg(baseaddr, (DDYNCLK_LOCK_L), regValues->lockL);
DDynClk_WriteReg(baseaddr, (DDYNCLK_FLTR_LOCK_H), regValues->fltr_lockH);
}
static uint32_t dglnt_dynclk_find_mode(uint32_t freq, uint32_t parentFreq,
struct dglnt_dynclk_mode *bestPick)
{
uint32_t bestError = MMCM_FREQ_OUTMAX;
uint32_t curError;
uint32_t curClkMult;
uint32_t curFreq;
uint32_t divVal;
uint32_t curFb, curClkDiv;
uint32_t minFb = 0;
uint32_t maxFb = 0;
uint32_t curDiv = 1;
uint32_t maxDiv;
int freq_found = 0;
bestPick->freq = 0;
if (parentFreq == 0)
return 0;
/* minimum frequency is actually dictated by VCOmin */
if (freq < MMCM_FREQ_OUTMIN)
freq = MMCM_FREQ_OUTMIN;
if (freq > MMCM_FREQ_OUTMAX)
freq = MMCM_FREQ_OUTMAX;
if (parentFreq > MMCM_FREQ_PFDMAX)
curDiv = 2;
maxDiv = parentFreq / MMCM_FREQ_PFDMIN;
if (maxDiv > MMCM_DIV_MAX)
maxDiv = MMCM_DIV_MAX;
while (curDiv <= maxDiv && !freq_found) {
minFb = curDiv * DIV_ROUND_UP(MMCM_FREQ_VCOMIN, parentFreq);
maxFb = curDiv * (MMCM_FREQ_VCOMAX / parentFreq);
if (maxFb > MMCM_FB_MAX)
maxFb = MMCM_FB_MAX;
if (minFb < MMCM_FB_MIN)
minFb = MMCM_FB_MIN;
divVal = curDiv * freq;
/*
* This multiplier is used to find the best clkDiv value for
* each FB value
*/
curClkMult = ((parentFreq * 1000) + (divVal / 2)) / divVal;
curFb = minFb;
while (curFb <= maxFb && !freq_found) {
curClkDiv = ((curClkMult * curFb) + 500) / 1000;
if (curClkDiv > MMCM_CLKDIV_MAX)
curClkDiv = MMCM_CLKDIV_MAX;
if (curClkDiv < MMCM_CLKDIV_MIN)
curClkDiv = MMCM_CLKDIV_MIN;
curFreq = (((parentFreq * curFb) / curDiv) / curClkDiv);
if (curFreq >= freq)
curError = curFreq - freq;
else
curError = freq - curFreq;
if (curError < bestError) {
bestError = curError;
bestPick->clkdiv = curClkDiv;
bestPick->fbmult = curFb;
bestPick->maindiv = curDiv;
bestPick->freq = curFreq;
}
if (!curError)
freq_found = 1;
curFb++;
}
curDiv++;
}
return bestPick->freq;
}
static int dglnt_dynclk_enable(DDynClk* drv_inst)
{
unsigned int clock_state;
if (drv_inst->IsReady == XIL_COMPONENT_IS_READY && drv_inst->freq) {
DDynClk_WriteReg(drv_inst->Config.BaseAddress, DDYNCLK_CTRL, 1);
do {
clock_state = DDynClk_ReadReg(drv_inst->Config.BaseAddress,
DDYNCLK_STATUS);
} while (!clock_state);
}
return 0;
}
static void dglnt_dynclk_disable(DDynClk* drv_inst)
{
DDynClk_WriteReg(drv_inst->Config.BaseAddress, DDYNCLK_CTRL, 0);
}
static int dglnt_dynclk_set_rate(DDynClk* drv_inst,
unsigned long rate, unsigned long parent_rate)
{
struct dglnt_dynclk_reg clkReg;
struct dglnt_dynclk_mode clkMode;
if (parent_rate == 0 || rate == 0)
return XST_FAILURE;
if (rate == drv_inst->freq)
return 0;
/*
* Convert from Hz to KHz, then multiply by five to account for
* BUFR division
*/
rate = (rate + 100) / 200;
/* convert from Hz to KHz */
parent_rate = (parent_rate + 500) / 1000;
if (!dglnt_dynclk_find_mode(rate, parent_rate, &clkMode))
return XST_FAILURE;
/*
* Write to the PLL dynamic configuration registers to configure it
* with the calculated parameters.
*/
dglnt_dynclk_find_reg(&clkReg, &clkMode);
dglnt_dynclk_write_reg(&clkReg, drv_inst->Config.BaseAddress);
drv_inst->freq = clkMode.freq * 200;
dglnt_dynclk_disable(drv_inst);
dglnt_dynclk_enable(drv_inst);
return 0;
}
int DDynClk_CfgInitialize(DDynClk *InstancePtr, DDynClk_Config *CfgPtr,
UINTPTR EffectiveAddr)
{
/* Verify arguments */
Xil_AssertNonvoid(InstancePtr != NULL);
Xil_AssertNonvoid(CfgPtr != NULL);
Xil_AssertNonvoid((u32 *)EffectiveAddr != NULL);
/* Setup the instance */
memset((void *)InstancePtr, 0, sizeof(DDynClk));
memcpy((void *)&(InstancePtr->Config), (const void *)CfgPtr,
sizeof(DDynClk_Config));
InstancePtr->Config.BaseAddress = EffectiveAddr;
// Read the reference clock frequency from RO register (maybe a Config parameter would be better?)
InstancePtr->RefClkFreqHz = DDynClk_ReadReg(EffectiveAddr, DDYNCLK_REF_CLK_FREQ);
if (InstancePtr->RefClkFreqHz == 0)
return XST_FAILURE;
/* Set the flag to indicate the driver is ready */
InstancePtr->IsReady = (u32)(XIL_COMPONENT_IS_READY);
return XST_SUCCESS;
}
int DDynClk_Enable(DDynClk *InstancePtr)
{
if (dglnt_dynclk_enable(InstancePtr) != 0)
return XST_FAILURE;
return XST_SUCCESS;
}
int DDynClk_Disable(DDynClk *InstancePtr)
{
dglnt_dynclk_disable(InstancePtr);
return XST_SUCCESS;
}
int DDynClk_SetRate(DDynClk *InstancePtr, uint32_t RateInHz)
{
if (0 != dglnt_dynclk_set_rate(InstancePtr, RateInHz, InstancePtr->RefClkFreqHz))
return XST_FAILURE;
return XST_SUCCESS;
}
| 5,297
|
763
|
<filename>projects/batfish/src/main/java/org/batfish/representation/cisco/IpBgpPeerGroup.java
package org.batfish.representation.cisco;
import javax.annotation.Nullable;
import org.batfish.datamodel.Ip;
import org.batfish.datamodel.Prefix;
import org.batfish.datamodel.Prefix6;
public class IpBgpPeerGroup extends LeafBgpPeerGroup {
private Ip _ip;
public IpBgpPeerGroup(Ip ip) {
_ip = ip;
}
public Ip getIp() {
return _ip;
}
@Override
public String getName() {
return _ip.toString();
}
@Override
public Prefix getNeighborPrefix() {
return _ip.toPrefix();
}
@Nullable
@Override
public Prefix6 getNeighborPrefix6() {
return null;
}
}
| 275
|
4,234
|
<gh_stars>1000+
#pragma once
#include <mbgl/util/size.hpp>
#include <mbgl/gfx/backend.hpp>
namespace mbgl {
namespace gfx {
class RendererBackend;
} // namespace gfx
} // namespace mbgl
struct GLFWwindow;
class GLFWBackend {
public:
explicit GLFWBackend() = default;
GLFWBackend(const GLFWBackend&) = delete;
GLFWBackend& operator=(const GLFWBackend&) = delete;
virtual ~GLFWBackend() = default;
static std::unique_ptr<GLFWBackend> Create(GLFWwindow* window, bool capFrameRate) {
return mbgl::gfx::Backend::Create<GLFWBackend, GLFWwindow*, bool>(window, capFrameRate);
}
virtual mbgl::gfx::RendererBackend& getRendererBackend() = 0;
virtual mbgl::Size getSize() const = 0;
virtual void setSize(mbgl::Size) = 0;
};
| 301
|
777
|
<reponame>google-ar/chromium
#!/usr/bin/python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Compute global objects.
Global objects are defined by interfaces with [Global] or [PrimaryGlobal] on
their definition: http://heycam.github.io/webidl/#Global
Design document: http://www.chromium.org/developers/design-documents/idl-build
"""
# pylint: disable=relative-import
import optparse
import os
import sys
from utilities import get_file_contents
from utilities import get_interface_extended_attributes_from_idl
from utilities import idl_filename_to_interface_name
from utilities import read_file_to_list
from utilities import read_pickle_files
from utilities import write_pickle_file
GLOBAL_EXTENDED_ATTRIBUTES = frozenset([
'Global',
'PrimaryGlobal',
])
def parse_options():
usage = 'Usage: %prog [options] [GlobalObjects.pickle]'
parser = optparse.OptionParser(usage=usage)
parser.add_option('--idl-files-list', help='file listing IDL files')
parser.add_option('--global-objects-component-files', action='append',
help='optionally preceeded input pickle filename.')
options, args = parser.parse_args()
if options.idl_files_list is None:
parser.error('Must specify a file listing IDL files using --idl-files-list.')
if options.global_objects_component_files is None:
options.global_objects_component_files = []
if len(args) != 1:
parser.error('Must specify an output pickle filename as an argument')
return options, args
def dict_union(dicts):
return dict((k, v) for d in dicts for k, v in d.iteritems())
def idl_file_to_global_names(idl_filename):
"""Returns global names, if any, for an IDL file.
If the [Global] or [PrimaryGlobal] extended attribute is declared with an
identifier list argument, then those identifiers are the interface's global
names; otherwise, the interface has a single global name, which is the
interface's identifier (http://heycam.github.io/webidl/#Global).
"""
interface_name = idl_filename_to_interface_name(idl_filename)
full_path = os.path.realpath(idl_filename)
idl_file_contents = get_file_contents(full_path)
extended_attributes = get_interface_extended_attributes_from_idl(idl_file_contents)
global_keys = GLOBAL_EXTENDED_ATTRIBUTES.intersection(
extended_attributes.iterkeys())
if not global_keys:
return
if len(global_keys) > 1:
raise ValueError('The [Global] and [PrimaryGlobal] extended attributes '
'MUST NOT be declared on the same interface.')
global_key = next(iter(global_keys))
global_value = extended_attributes[global_key]
if global_value:
return global_value.strip('()').split(',')
return [interface_name]
def idl_files_to_interface_name_global_names(idl_files):
"""Yields pairs (interface_name, global_names) found in IDL files."""
for idl_filename in idl_files:
interface_name = idl_filename_to_interface_name(idl_filename)
global_names = idl_file_to_global_names(idl_filename)
if global_names:
yield interface_name, global_names
################################################################################
def main():
options, args = parse_options()
output_global_objects_filename = args.pop()
interface_name_global_names = dict_union(
existing_interface_name_global_names
for existing_interface_name_global_names
in read_pickle_files(options.global_objects_component_files))
# Input IDL files are passed in a file, due to OS command line length
# limits. This is generated at GYP time, which is ok b/c files are static.
idl_files = read_file_to_list(options.idl_files_list)
interface_name_global_names.update(
idl_files_to_interface_name_global_names(idl_files))
write_pickle_file(output_global_objects_filename,
interface_name_global_names)
if __name__ == '__main__':
sys.exit(main())
| 1,456
|
1,080
|
<filename>ext/rugged/rugged_diff_hunk.c
/*
* Copyright (C) the Rugged contributors. All rights reserved.
*
* This file is part of Rugged, distributed under the MIT license.
* For full terms see the included LICENSE file.
*/
#include "rugged.h"
extern VALUE rb_cRuggedDiff;
VALUE rb_cRuggedDiffHunk;
VALUE rugged_diff_hunk_new(VALUE owner, size_t hunk_idx, const git_diff_hunk *hunk, size_t lines_in_hunk)
{
VALUE rb_hunk = rb_class_new_instance(0, NULL, rb_cRuggedDiffHunk);
rugged_set_owner(rb_hunk, owner);
rb_iv_set(rb_hunk, "@header", rb_str_new(hunk->header, hunk->header_len));
rb_iv_set(rb_hunk, "@line_count", INT2FIX(lines_in_hunk));
rb_iv_set(rb_hunk, "@hunk_index", INT2FIX(hunk_idx));
rb_iv_set(rb_hunk, "@old_start", INT2FIX(hunk->old_start));
rb_iv_set(rb_hunk, "@old_lines", INT2FIX(hunk->old_lines));
rb_iv_set(rb_hunk, "@new_start", INT2FIX(hunk->new_start));
rb_iv_set(rb_hunk, "@new_lines", INT2FIX(hunk->new_lines));
return rb_hunk;
}
/*
* call-seq:
* hunk.each_line { |line| } -> self
* hunk.each_line -> Enumerator
*
* If given a block, yields each line that is part of the current hunk.
*
* If no block is given, an enumerator is returned instead.
*/
static VALUE rb_git_diff_hunk_each_line(VALUE self)
{
git_patch *patch;
int error = 0, l, lines_count, hunk_idx;
RETURN_ENUMERATOR(self, 0, 0);
Data_Get_Struct(rugged_owner(self), git_patch, patch);
lines_count = FIX2INT(rb_iv_get(self, "@line_count"));
hunk_idx = FIX2INT(rb_iv_get(self, "@hunk_index"));
for (l = 0; l < lines_count; ++l) {
const git_diff_line *line;
error = git_patch_get_line_in_hunk(&line, patch, hunk_idx, l);
if (error) break;
rb_yield(rugged_diff_line_new(line));
}
rugged_exception_check(error);
return self;
}
void Init_rugged_diff_hunk(void)
{
rb_cRuggedDiffHunk = rb_define_class_under(rb_cRuggedDiff, "Hunk", rb_cObject);
rb_include_module(rb_cRuggedDiffHunk, rb_mEnumerable);
rb_define_method(rb_cRuggedDiffHunk, "each", rb_git_diff_hunk_each_line, 0);
rb_define_method(rb_cRuggedDiffHunk, "each_line", rb_git_diff_hunk_each_line, 0);
rb_define_attr(rb_cRuggedDiffHunk, "header", 1, 0);
rb_define_attr(rb_cRuggedDiffHunk, "line_count", 1, 0);
rb_define_attr(rb_cRuggedDiffHunk, "hunk_index", 1, 0);
rb_define_attr(rb_cRuggedDiffHunk, "old_start", 1, 0);
rb_define_attr(rb_cRuggedDiffHunk, "old_lines", 1, 0);
rb_define_attr(rb_cRuggedDiffHunk, "new_start", 1, 0);
rb_define_attr(rb_cRuggedDiffHunk, "new_lines", 1, 0);
rb_define_alias(rb_cRuggedDiffHunk, "count", "line_count");
rb_define_alias(rb_cRuggedDiffHunk, "size", "line_count");
}
| 1,146
|
1,062
|
/**
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.mr4c.testing;
import com.google.mr4c.algorithm.Algorithm;
import com.google.mr4c.algorithm.AlgorithmBase;
import com.google.mr4c.algorithm.AlgorithmContext;
import com.google.mr4c.algorithm.AlgorithmData;
import com.google.mr4c.algorithm.AlgorithmSchema;
import com.google.mr4c.algorithm.AlgorithmType;
import com.google.mr4c.config.algorithm.AlgorithmConfig;
import com.google.mr4c.dataset.DataFile;
import com.google.mr4c.dataset.Dataset;
import com.google.mr4c.keys.DataKey;
import com.google.mr4c.keys.DataKeyDimension;
import com.google.mr4c.keys.DataKeyElement;
import com.google.mr4c.keys.DataKeyFactory;
import com.google.mr4c.keys.Keyspace;
import com.google.mr4c.keys.KeyspaceDimension;
import com.google.mr4c.metadata.MetadataArray;
import com.google.mr4c.metadata.MetadataField;
import com.google.mr4c.metadata.MetadataMap;
import com.google.mr4c.metadata.PrimitiveType;
import com.google.mr4c.sources.AbstractDatasetSource;
import com.google.mr4c.sources.BytesDataFileSink;
import com.google.mr4c.sources.CustomExecutionSource;
import com.google.mr4c.sources.DataFileSink;
import com.google.mr4c.sources.DatasetSource;
import com.google.mr4c.sources.DatasetSource.WriteMode;
import com.google.mr4c.sources.ExecutionSource;
import com.google.mr4c.util.CombinatoricUtils;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Properties;
import org.apache.commons.lang3.ArrayUtils;
import static org.junit.Assert.*;
/**
* Class to manage the creation of a complete set of data for an algorithm run.
* It uses simple methods to derive data from keys and dataset names.
*/
public class TestDataManager {
private Map<String,InputDatasetSource> m_inputs = new HashMap<String,InputDatasetSource>();
private Map<String,OutputDatasetSource> m_outputs = new HashMap<String,OutputDatasetSource>();
private Keyspace m_keyspace = new Keyspace();
private Set<DataKey> m_keys;
private CustomExecutionSource m_exeSrc;
// for each key, can derive ALL associated data as follows:
// 1. Make a label: dataset_name + key.toString()
// 2. label.getBytes() becomes the file data
// 3. metadata for the key could be the string value, hash, length, array of bytes, etc.
public void addInputDataset(String name) {
m_inputs.put(name, new InputDatasetSource(name));
}
public void addOutputDataset(String name) {
m_outputs.put(name, new OutputDatasetSource(name));
}
public void addDimension(String dimName, Collection<String> elementNames) {
m_keyspace.addKeyspaceDimension(generateKeyspaceDimension(dimName, elementNames));
}
/**
* Call this after adding all dimensions and datasets to generate the keys and keyspaces.
*/
public void readyToTest() {
m_keys = generateKeys(m_keyspace);
Algorithm algo = new TestAlgorithm();
AlgorithmConfig algoConfig = new AlgorithmConfig("test", "testing", AlgorithmType.JAVA, TestAlgorithm.class.getName());
algo.setAlgorithmConfig(algoConfig);
algo.init();
m_exeSrc = new CustomExecutionSource(algo);
for ( String name : m_inputs.keySet() ) {
m_exeSrc.addInputSource(name, m_inputs.get(name));
}
for ( String name : m_outputs.keySet() ) {
OutputDatasetSource src = m_outputs.get(name);
src.init();
m_exeSrc.addOutputSource(name, src);
}
}
public ExecutionSource getExecutionSource() {
return m_exeSrc;
}
public Set<DataKey> getKeys() {
return m_keys;
}
public Keyspace getKeyspace() {
return m_keyspace;
}
public DatasetSource getInputDatasetSource(String name) {
return m_inputs.get(name);
}
public DatasetSource getOutputDatasetSource(String name) {
return m_outputs.get(name);
}
/**
Check that these modes, and only these modes, were called
*/
public void assertWriteCalled(String name, WriteMode ... modes) {
m_outputs.get(name).assertWriteCalled(modes);
}
public void assertSerializedContentCorrect(String name) {
m_outputs.get(name).assertSerializedContentCorrect();
}
public void assertFileContentCorrect(String name) throws IOException {
m_outputs.get(name).assertFileContentCorrect();
}
public boolean copyToFinalCalled(String name) {
return m_outputs.get(name).m_copy;
}
public static Dataset buildDataset(String name, Set<DataKey> keys) {
Dataset dataset = new Dataset();
populateDataset(dataset, name, keys);
return dataset;
}
public static void populateDataset(Dataset dataset, String name, Set<DataKey> keys) {
for ( DataKey key : keys ) {
dataset.addFile(key, buildFile(name,key));
dataset.addMetadata(key, buildMetadata(name,key));
}
}
public static DataFile buildFile(String name, DataKey key) {
String label = toLabel(name,key);
return new DataFile(label.getBytes(), "text/plain");
}
public static MetadataMap buildMetadata(String name, DataKey key) {
String label = toLabel(name,key);
MetadataMap metamap = new MetadataMap();
metamap.getMap().put("label", new MetadataField(label, PrimitiveType.STRING));
metamap.getMap().put("length", new MetadataField(label.length(), PrimitiveType.INTEGER));
metamap.getMap().put("bytes", new MetadataArray(ArrayUtils.toObject(label.getBytes()), PrimitiveType.BYTE));
return metamap;
}
public static String toLabel(String name, DataKey key) {
return String.format("%s_%s", name, key.toString());
}
// dim name + list of values ---> Keyspace Dimension
public static KeyspaceDimension generateKeyspaceDimension(String dimName, Collection<String> elementNames) {
DataKeyDimension dim = new DataKeyDimension(dimName);
KeyspaceDimension ksd = new KeyspaceDimension(dim);
for ( String eleName : elementNames ) {
ksd.addElement(new DataKeyElement(eleName, dim));
}
return ksd;
}
// Keyspace --> all combos of keys
public static Set<DataKey> generateKeys(Keyspace keyspace) {
Set<DataKey> keys = new HashSet<DataKey>();
List<Collection<DataKeyElement>> inputs = new ArrayList<Collection<DataKeyElement>>();
for ( DataKeyDimension dim : keyspace.getDimensions() ) {
KeyspaceDimension ksd = keyspace.getKeyspaceDimension(dim);
inputs.add(ksd.getElements());
}
List<List<DataKeyElement>> outputs = CombinatoricUtils.everyCombination(inputs);
for ( List<DataKeyElement> keyElements : outputs ) {
keys.add(DataKeyFactory.newKey(keyElements));
}
return keys;
}
private class InputDatasetSource extends AbstractDatasetSource {
private String m_name;
private InputDatasetSource(String name) {
m_name = name;
}
public Dataset readDataset() throws IOException {
return buildDataset(m_name, m_keys);
}
public void writeDataset(Dataset dataset) throws IOException {
throw new IOException("Writing to input dataset source");
}
public void writeDataset(Dataset dataset, WriteMode writeMode) throws IOException {
throw new IOException("Writing to input dataset source");
}
public DataFileSink getDataFileSink(DataKey key) throws IOException {
throw new IOException("Writing to input dataset source");
}
public DataFile findDataFile(DataKey key) throws IOException {
return buildFile(m_name, key);
}
public void copyToFinal() throws IOException {
throw new IOException("Writing to input dataset source");
}
public String getDescription() {
return String.format("Test input dataset source [%s]", m_name);
}
}
private class OutputDatasetSource extends AbstractDatasetSource {
private String m_name;
private Dataset m_expected;
private Dataset m_written;
private Map<DataKey,BytesDataFileSink> m_sinks = new HashMap<DataKey,BytesDataFileSink>();
private boolean m_copy=false;
private Set<WriteMode> m_writeCalls = new HashSet<WriteMode>();
private OutputDatasetSource(String name) {
m_name = name;
}
private void init() {
m_expected = buildDataset(m_name, m_keys);
}
public Dataset readDataset() throws IOException {
throw new IOException("Reading from output dataset source");
}
public void writeDataset(Dataset dataset) throws IOException {
writeDataset(dataset, WriteMode.ALL);
}
public void writeDataset(Dataset dataset, WriteMode writeMode) throws IOException {
validateMode(writeMode);
m_writeCalls.add(writeMode);
boolean writeFiles = writeMode!=WriteMode.SERIALIZED_ONLY;
boolean writeSerialized = writeMode!=WriteMode.FILES_ONLY;
if ( writeSerialized ) {
m_written = dataset;
}
if ( writeFiles ) {
for ( DataKey key : dataset.getAllFileKeys() ) {
DataFile file = dataset.getFile(key);
DataFileSink sink = getDataFileSink(key);
sink.writeFile(file.getBytes());
}
}
}
private void validateMode(WriteMode writeMode) {
boolean twice = writeMode==WriteMode.ALL && !m_writeCalls.isEmpty() || m_writeCalls.contains(writeMode);
String msg = "Tried to write twice to dataset " + m_name;
assertFalse(msg, twice);
}
public DataFileSink getDataFileSink(DataKey key) throws IOException {
BytesDataFileSink sink = m_sinks.get(key);
if ( sink==null ) {
sink = new BytesDataFileSink();
m_sinks.put(key, sink);
}
return sink;
}
public DataFile findDataFile(DataKey key) throws IOException {
throw new IOException("Reading from output dataset source");
}
public void copyToFinal() throws IOException {
if ( m_copy ) {
throw new IllegalStateException("Tried to write the same dataset twice");
}
m_copy=true;
}
public String getDescription() {
return String.format("Test output dataset source [%s]", m_name);
}
void assertWriteCalled(WriteMode ... modes) {
List<WriteMode> modeList = Arrays.asList(modes);
assertTrue(String.format("Called modes %s on dataset %s", modeList, m_name), m_writeCalls.containsAll(modeList));
}
void assertSerializedContentCorrect() {
assertTrue( String.format("Serialized content for dataset %s", m_name), m_expected.equalsIgnoreFileContent(m_written));
}
void assertFileContentCorrect() throws IOException {
for ( DataKey key : m_expected.getAllFileKeys() ) {
DataFile file = m_expected.getFile(key);
BytesDataFileSink sink = m_sinks.get(key);
String msg = String.format("File content for key %s in dataset %s", key, m_name);
assertNotNull(msg, sink);
assertTrue(msg, Arrays.equals(file.getBytes(), sink.getWrittenBytes()));
}
}
}
private class TestAlgorithm extends AlgorithmBase {
public void init() {
System.out.println("Called init!!!!!");
AlgorithmSchema schema = new AlgorithmSchema();
for ( String input : m_inputs.keySet() ) {
schema.addInputDataset(input);
}
for ( String output : m_outputs.keySet() ) {
schema.addOutputDataset(output);
}
for ( DataKeyDimension dim : m_keyspace.getDimensions() ) {
schema.addExpectedDimension(dim);
}
setAlgorithmSchema(schema);
}
public void cleanup() {}
public void execute(AlgorithmData data, AlgorithmContext context) throws IOException {
for ( String name : m_outputs.keySet() ) {
Dataset dataset = data.getOutputDataset(name);
populateDataset(dataset, name, m_keys);
}
}
public Collection<File> getRequiredFiles() {
return Collections.emptyList();
}
public Collection<File> getGeneratedLogFiles() {
return Collections.<File>emptySet();
}
}
}
| 4,201
|
442
|
#include <ros/ros.h>
#include <std_msgs/Int32.h> // <1>
int main(int argc, char **argv) {
ros::init(argc, argv, "count_publisher");
ros::NodeHandle node;
ros::Publisher pub = node.advertise<std_msgs::Int32>("counter", 10); // <2>
ros::Rate rate(1); // <3>
int count = 0;
while (ros::ok()) { // <4>
std_msgs::Int32 msg; // <5>
msg.data = count;
pub.publish(msg); // <6>
++count;
rate.sleep(); // <7>
}
return 0; // <8>
}
| 216
|
14,668
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/domain_reliability/dispatcher.h"
#include <memory>
#include <utility>
#include "base/bind.h"
#include "base/callback.h"
#include "base/timer/timer.h"
#include "components/domain_reliability/util.h"
namespace domain_reliability {
struct DomainReliabilityDispatcher::Task {
Task(base::OnceClosure closure,
std::unique_ptr<MockableTime::Timer> timer,
base::TimeDelta min_delay,
base::TimeDelta max_delay);
Task(Task&& other);
Task& operator=(Task&& other);
~Task();
base::OnceClosure closure;
std::unique_ptr<MockableTime::Timer> timer;
base::TimeDelta min_delay;
base::TimeDelta max_delay;
bool eligible;
};
DomainReliabilityDispatcher::Task::Task(
base::OnceClosure closure,
std::unique_ptr<MockableTime::Timer> timer,
base::TimeDelta min_delay,
base::TimeDelta max_delay)
: closure(std::move(closure)),
timer(std::move(timer)),
min_delay(min_delay),
max_delay(max_delay),
eligible(false) {}
DomainReliabilityDispatcher::Task::Task(Task&& other) = default;
DomainReliabilityDispatcher::Task& DomainReliabilityDispatcher::Task::operator=(
Task&& other) = default;
DomainReliabilityDispatcher::Task::~Task() = default;
DomainReliabilityDispatcher::DomainReliabilityDispatcher(MockableTime* time)
: time_(time) {}
DomainReliabilityDispatcher::~DomainReliabilityDispatcher() = default;
void DomainReliabilityDispatcher::ScheduleTask(base::OnceClosure closure,
base::TimeDelta min_delay,
base::TimeDelta max_delay) {
DCHECK(closure);
// Would be DCHECK_LE, but you can't << a TimeDelta.
DCHECK(min_delay <= max_delay);
std::unique_ptr<Task> owned_task = std::make_unique<Task>(
std::move(closure), time_->CreateTimer(), min_delay, max_delay);
Task* task = owned_task.get();
tasks_.insert(std::move(owned_task));
if (max_delay.InMicroseconds() < 0)
RunAndDeleteTask(task);
else if (min_delay.InMicroseconds() < 0)
MakeTaskEligible(task);
else
MakeTaskWaiting(task);
}
void DomainReliabilityDispatcher::RunEligibleTasks() {
// Move all eligible tasks to a separate set so that eligible_tasks_.erase in
// RunAndDeleteTask won't erase elements out from under the iterator. (Also
// keeps RunEligibleTasks from running forever if a task adds a new, already-
// eligible task that does the same, and so on.)
std::set<Task*> tasks;
tasks.swap(eligible_tasks_);
for (auto* task : tasks) {
DCHECK(task);
DCHECK(task->eligible);
RunAndDeleteTask(task);
}
}
void DomainReliabilityDispatcher::RunAllTasksForTesting() {
std::set<Task*> tasks;
for (auto& task : tasks_)
tasks.insert(task.get());
for (auto* task : tasks) {
DCHECK(task);
RunAndDeleteTask(task);
}
}
void DomainReliabilityDispatcher::MakeTaskWaiting(Task* task) {
DCHECK(task);
DCHECK(!task->eligible);
DCHECK(!task->timer->IsRunning());
task->timer->Start(
FROM_HERE, task->min_delay,
base::BindOnce(&DomainReliabilityDispatcher::MakeTaskEligible,
base::Unretained(this), task));
}
void
DomainReliabilityDispatcher::MakeTaskEligible(Task* task) {
DCHECK(task);
DCHECK(!task->eligible);
task->eligible = true;
eligible_tasks_.insert(task);
task->timer->Start(
FROM_HERE, task->max_delay - task->min_delay,
base::BindOnce(&DomainReliabilityDispatcher::RunAndDeleteTask,
base::Unretained(this), task));
}
void DomainReliabilityDispatcher::RunAndDeleteTask(Task* task) {
DCHECK(task);
DCHECK(task->closure);
std::move(task->closure).Run();
if (task->eligible)
eligible_tasks_.erase(task);
auto it = tasks_.find(task);
DCHECK(it != tasks_.end());
tasks_.erase(it);
}
} // namespace domain_reliability
| 1,494
|
1,355
|
<reponame>PavelBlend/fluid-engine-dev
// Copyright (c) 2019 <NAME>
//
// I am making my contributions/submissions to this project solely in my
// personal capacity and am not conveying any rights to any intellectual
// property of any third parties.
#include "marching_cubes.h"
#include "pybind11_utils.h"
#include <jet/marching_cubes.h>
#include <jet/scalar_grid3.h>
namespace py = pybind11;
using namespace jet;
void addMarchingCubes(pybind11::module& m) {
m.def("marchingCubes",
[](ScalarGrid3Ptr grid, py::object gridSize, py::object origin,
double isoValue, int bndClose,
int bndConnectivity) -> TriangleMesh3Ptr {
auto mesh = TriangleMesh3::builder().makeShared();
marchingCubes(grid->constDataAccessor(),
objectToVector3D(gridSize),
objectToVector3D(origin), mesh.get(), isoValue,
bndClose, bndConnectivity);
return mesh;
},
R"pbdoc(
Computes marching cubes and extract triangle mesh from grid.
)pbdoc");
}
| 495
|
11,393
|
package com.interview.tree;
/**
* Date 05/04/2016
* @author <NAME>
*
* Lowest common ancestor in binary search tree.
*
* Time complexity O(height of tree)
* Space complexity O(height of tree)
*
* https://leetcode.com/problems/lowest-common-ancestor-of-a-binary-search-tree/
*/
public class LowestCommonAncestoryBinarySearchTree {
public Node lowestCommonAncestor(Node root, int p, int q) {
if (root.data > Math.max(p, q)) {
return lowestCommonAncestor(root.left, p, q);
} else if (root.data < Math.min(p, q)) {
return lowestCommonAncestor(root.right, p, q);
} else {
return root;
}
}
}
| 279
|
2,350
|
import os
import os.path as osp
import re
import torch
from torch_geometric.data import InMemoryDataset, download_url, extract_gz
from torch_geometric.utils import from_smiles
class MoleculeNet(InMemoryDataset):
r"""The `MoleculeNet <http://moleculenet.ai/datasets-1>`_ benchmark
collection from the `"MoleculeNet: A Benchmark for Molecular Machine
Learning" <https://arxiv.org/abs/1703.00564>`_ paper, containing datasets
from physical chemistry, biophysics and physiology.
All datasets come with the additional node and edge features introduced by
the `Open Graph Benchmark <https://ogb.stanford.edu/docs/graphprop/>`_.
Args:
root (string): Root directory where the dataset should be saved.
name (string): The name of the dataset (:obj:`"ESOL"`,
:obj:`"FreeSolv"`, :obj:`"Lipo"`, :obj:`"PCBA"`, :obj:`"MUV"`,
:obj:`"HIV"`, :obj:`"BACE"`, :obj:`"BBPB"`, :obj:`"Tox21"`,
:obj:`"ToxCast"`, :obj:`"SIDER"`, :obj:`"ClinTox"`).
transform (callable, optional): A function/transform that takes in an
:obj:`torch_geometric.data.Data` object and returns a transformed
version. The data object will be transformed before every access.
(default: :obj:`None`)
pre_transform (callable, optional): A function/transform that takes in
an :obj:`torch_geometric.data.Data` object and returns a
transformed version. The data object will be transformed before
being saved to disk. (default: :obj:`None`)
pre_filter (callable, optional): A function that takes in an
:obj:`torch_geometric.data.Data` object and returns a boolean
value, indicating whether the data object should be included in the
final dataset. (default: :obj:`None`)
"""
url = 'https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/{}'
# Format: name: [display_name, url_name, csv_name, smiles_idx, y_idx]
names = {
'esol': ['ESOL', 'delaney-processed.csv', 'delaney-processed', -1, -2],
'freesolv': ['FreeSolv', 'SAMPL.csv', 'SAMPL', 1, 2],
'lipo': ['Lipophilicity', 'Lipophilicity.csv', 'Lipophilicity', 2, 1],
'pcba': ['PCBA', 'pcba.csv.gz', 'pcba', -1,
slice(0, 128)],
'muv': ['MUV', 'muv.csv.gz', 'muv', -1,
slice(0, 17)],
'hiv': ['HIV', 'HIV.csv', 'HIV', 0, -1],
'bace': ['BACE', 'bace.csv', 'bace', 0, 2],
'bbbp': ['BBPB', 'BBBP.csv', 'BBBP', -1, -2],
'tox21': ['Tox21', 'tox21.csv.gz', 'tox21', -1,
slice(0, 12)],
'toxcast':
['ToxCast', 'toxcast_data.csv.gz', 'toxcast_data', 0,
slice(1, 618)],
'sider': ['SIDER', 'sider.csv.gz', 'sider', 0,
slice(1, 28)],
'clintox': ['ClinTox', 'clintox.csv.gz', 'clintox', 0,
slice(1, 3)],
}
def __init__(self, root, name, transform=None, pre_transform=None,
pre_filter=None):
self.name = name.lower()
assert self.name in self.names.keys()
super().__init__(root, transform, pre_transform, pre_filter)
self.data, self.slices = torch.load(self.processed_paths[0])
@property
def raw_dir(self):
return osp.join(self.root, self.name, 'raw')
@property
def processed_dir(self):
return osp.join(self.root, self.name, 'processed')
@property
def raw_file_names(self):
return f'{self.names[self.name][2]}.csv'
@property
def processed_file_names(self):
return 'data.pt'
def download(self):
url = self.url.format(self.names[self.name][1])
path = download_url(url, self.raw_dir)
if self.names[self.name][1][-2:] == 'gz':
extract_gz(path, self.raw_dir)
os.unlink(path)
def process(self):
with open(self.raw_paths[0], 'r') as f:
dataset = f.read().split('\n')[1:-1]
dataset = [x for x in dataset if len(x) > 0] # Filter empty lines.
data_list = []
for line in dataset:
line = re.sub(r'\".*\"', '', line) # Replace ".*" strings.
line = line.split(',')
smiles = line[self.names[self.name][3]]
ys = line[self.names[self.name][4]]
ys = ys if isinstance(ys, list) else [ys]
ys = [float(y) if len(y) > 0 else float('NaN') for y in ys]
y = torch.tensor(ys, dtype=torch.float).view(1, -1)
data = from_smiles(smiles)
data.y = y
if self.pre_filter is not None and not self.pre_filter(data):
continue
if self.pre_transform is not None:
data = self.pre_transform(data)
data_list.append(data)
torch.save(self.collate(data_list), self.processed_paths[0])
def __repr__(self) -> str:
return f'{self.names[self.name][0]}({len(self)})'
| 2,343
|
370
|
<gh_stars>100-1000
/*
* Copyright (c) 2015-2018 Dubalu LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#pragma once
#include "geometry.h"
class MultiPoint;
class Point : public Geometry {
friend class MultiPoint;
Cartesian p;
public:
template <typename T, typename = std::enable_if_t<std::is_same<Cartesian, std::decay_t<T>>::value>>
explicit Point(T&& point)
: Geometry(Type::POINT),
p(std::forward<T>(point))
{
p.normalize();
}
Point(Point&& point) noexcept
: Geometry(std::move(point)),
p(std::move(point.p)) { }
Point(const Point& point)
: Geometry(point),
p(point.p) { }
~Point() = default;
Point& operator=(Point&& point) noexcept {
Geometry::operator=(std::move(point));
p = std::move(point.p);
return *this;
}
Point& operator=(const Point& point) {
Geometry::operator=(point);
p = point.p;
return *this;
}
bool operator<(const Point& point) const noexcept {
return p < point.p;
}
bool operator==(const Point& point) const noexcept {
return p == point.p;
}
const Cartesian& getCartesian() const noexcept {
return p;
}
std::string toWKT() const override {
std::string wkt("POINT");
wkt.append(to_string());
return wkt;
}
std::string to_string() const override {
char result[40];
const auto geodetic = p.toGeodetic();
snprintf(result, 40, "(%.7f %.7f %.7f)",
std::get<1>(geodetic),
std::get<0>(geodetic),
std::get<2>(geodetic)
);
return std::string(result);
}
std::vector<std::string> getTrixels(bool, double) const override {
return { HTM::getTrixelName(p) };
}
std::vector<range_t> getRanges(bool, double) const override {
auto id = HTM::getId(p);
return { range_t(id, id) };
}
std::vector<Cartesian> getCentroids() const override {
return std::vector<Cartesian>({ p });
}
};
| 979
|
2,329
|
<filename>testdata/src/main/java/data/SimpleClassCaller.java
package data;
public class SimpleClassCaller {
public void m() {
SimpleClass sc = new SimpleClass();
sc.foo();
}
}
| 64
|
336
|
#include "library.h"
#undef IMPORT
#define IMPORT(local, remote, ret, args) ret local args
IMPORT(Test1, , void, ()) {}
IMPORT(Test2, , void, ()) {}
IMPORT(Test3, , void, ()) {}
IMPORT(Test4, , void, ()) {}
IMPORT(PutPixel, BIOS::LCD::PutPixel, void, (int a, int b, ui16 c)) {}
IMPORT(Print, BIOS::LCD::Print, int, (int a, int b, ui16 c, ui16 d, const char* e)) {return 0;}
IMPORT(GetKeys, BIOS::KEY::GetKeys, ui16, ()) {return 0;}
IMPORT(Printf, BIOS::LCD::Printf, int, (int x, int y, unsigned short clrf, unsigned short clrb, const char * format, ...) ){return 0;}
| 234
|
16,461
|
#import <Foundation/Foundation.h>
#import <QuartzCore/QuartzCore.h>
#import "ABI42_0_0RNSVGLength.h"
#ifndef ABI42_0_0RNSVGPropHelper_h
#define ABI42_0_0RNSVGPropHelper_h
@interface ABI42_0_0RNSVGPropHelper : NSObject
+ (CGFloat) fromRelativeWithNSString:(NSString *)length
relative:(CGFloat)relative
fontSize:(CGFloat)fontSize;
+ (CGFloat) fromRelative:(ABI42_0_0RNSVGLength*)length
relative:(CGFloat)relative
fontSize:(CGFloat)fontSize;
+ (CGFloat)fromRelative:(ABI42_0_0RNSVGLength*)length
relative:(CGFloat)relative;
@end
#endif
| 330
|
953
|
/*! @file */
/*
Copyright (C) 2021, Sakura Editor Organization
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented;
you must not claim that you wrote the original software.
If you use this software in a product, an acknowledgment
in the product documentation would be appreciated but is
not required.
2. Altered source versions must be plainly marked as such,
and must not be misrepresented as being the original software.
3. This notice may not be removed or altered from any source
distribution.
*/
#include "StdAfx.h"
#include "CConvert_CodeAutoToSjis.h"
#include "charset/CCodeFactory.h"
#include "charset/CCodeMediator.h"
/*!
コンストラクタ
*/
CConvert_CodeAutoToSjis::CConvert_CodeAutoToSjis(const SEncodingConfig& sEncodingConfig) noexcept
: m_sEncodingConfig(sEncodingConfig)
{
}
/*!
文字コード変換 自動判別→SJIS
@date 2014/02/10 Moca F_CODECNV_AUTO2SJIS追加。
*/
bool CConvert_CodeAutoToSjis::DoConvert(CNativeW* pcData)
{
// Shift-JISに変換する(変換エラーは無視する)
const auto bin = CCodeFactory::CreateCodeBase(CODE_SJIS)->UnicodeToCode(*pcData);
// バイナリシーケンスの文字コードを自動検出する
CCodeMediator m(m_sEncodingConfig);
ECodeType eCodeType = m.CheckKanjiCode(reinterpret_cast<const char*>(bin.data()), bin.length());
// 検出された文字コードに基づいて変換に使うCCodeBaseを生成する
std::unique_ptr<CCodeBase> pcCodeBase;
switch (eCodeType)
{
case CODE_JIS:
// JIS変換はbase64デコードを行うモードを使う
pcCodeBase = std::unique_ptr<CCodeBase>(CCodeFactory::CreateCodeBase(CODE_JIS, true));
break;
case CODE_EUC:
case CODE_UNICODE:
case CODE_UNICODEBE:
case CODE_UTF8:
case CODE_UTF7:
// サポートされているその他の変換
pcCodeBase = CCodeFactory::CreateCodeBase(eCodeType);
break;
default:
// サポートされていない変換は失敗扱いにする(変換しない)
return false;
}
// 検出された文字コードに基づいてUnicode変換する(変換エラーは無視する)
*pcData = pcCodeBase->CodeToUnicode(bin);
return true;
}
| 974
|
778
|
// | / |
// ' / __| _` | __| _ \ __|
// . \ | ( | | ( |\__ `
// _|\_\_| \__,_|\__|\___/ ____/
// Multi-Physics
//
// License: BSD License
// Kratos default license: kratos/license.txt
//
// Main authors: <NAME>
//
#if !defined(KRATOS_DISTRIBUTE_LOAD_ON_SURFACE_PROCESS_H_INCLUDED )
#define KRATOS_DISTRIBUTE_LOAD_ON_SURFACE_PROCESS_H_INCLUDED
// System includes
// External includes
// Project includes
#include "includes/define.h"
#include "processes/process.h"
#include "includes/model_part.h"
#include "includes/kratos_parameters.h"
namespace Kratos {
///@name Kratos Globals
///@{
///@name Kratos Classes
///@{
/// Process to create the animated Eigenvectors
/** This process distributes a load on surface load conditions belonging to a modelpart.
* The load is distributed according to the surface area.
*/
class KRATOS_API(STRUCTURAL_MECHANICS_APPLICATION) DistributeLoadOnSurfaceProcess : public Process
{
public:
///@name Type Definitions
///@{
///@}
///@name Pointer Definitions
/// Pointer definition of DistributeLoadOnSurfaceProcess
KRATOS_CLASS_POINTER_DEFINITION(DistributeLoadOnSurfaceProcess);
typedef std::size_t SizeType;
///@}
///@name Life Cycle
///@{
DistributeLoadOnSurfaceProcess(ModelPart& rModelPart,
Parameters Parameters);
///@}
///@name Operations
///@{
void ExecuteInitializeSolutionStep() override;
///@}
///@name Input and output
///@{
/// Turn back information as a string.
virtual std::string Info() const override {
return "DistributeLoadOnSurfaceProcess";
}
/// Print information about this object.
void PrintInfo(std::ostream& rOStream) const override {
rOStream << "DistributeLoadOnSurfaceProcess";
}
/// Print object's data.
void PrintData(std::ostream& rOStream) const override {
}
///@}
private:
///@name Member Variables
///@{
ModelPart& mrModelPart;
Parameters mParameters;
///@}
///@name Private Operations
///@{
///@}
}; // Class DistributeLoadOnSurfaceProcess
///@}
} // namespace Kratos.
#endif // KRATOS_DISTRIBUTE_LOAD_ON_SURFACE_PROCESS_H_INCLUDED defined
| 913
|
5,169
|
<gh_stars>1000+
{
"name": "GJHGallop",
"version": "0.0.12",
"summary": "GJHGallop is add pods from Gallop",
"description": "TODO: Add long description of the pod here.",
"homepage": "https://github.com/JohnYuGit/GJHGallop",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"JohnYuGit": "<EMAIL>"
},
"source": {
"git": "https://github.com/JohnYuGit/GJHGallop.git",
"tag": "0.0.12"
},
"platforms": {
"ios": "8.0"
},
"source_files": [
"GJHGallop/*.{h,m}",
"GJHGallop/AsyncDisplay/*.{h,m}",
"GJHGallop/Constraint/*.{h,m}",
"GJHGallop/CoreText/*.{h,m}",
"GJHGallop/UI/HTMLParser/*.{h,m}",
"GJHGallop/UI/LWAlertView/*.{h,m}",
"GJHGallop/UI/LWImageBrowser/*.{h,m}",
"GJHGallop/Utils/*.{h,m}"
],
"dependencies": {
"SDWebImage": [
]
},
"libraries": "xml2",
"requires_arc": true,
"xcconfig": {
"HEADER_SEARCH_PATHS": "/usr/include/libxml2"
}
}
| 488
|
14,668
|
<filename>base/memory/memory_pressure_monitor.cc
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/memory/memory_pressure_monitor.h"
#include "base/check.h"
#include "base/metrics/histogram.h"
#include "base/notreached.h"
namespace base {
namespace {
MemoryPressureMonitor* g_monitor = nullptr;
} // namespace
MemoryPressureMonitor::MemoryPressureMonitor() {
DCHECK(!g_monitor);
g_monitor = this;
}
MemoryPressureMonitor::~MemoryPressureMonitor() {
DCHECK(g_monitor);
g_monitor = nullptr;
}
// static
MemoryPressureMonitor* MemoryPressureMonitor::Get() {
return g_monitor;
}
} // namespace base
| 241
|
897
|
def sort012(arr,n):
listofZero = []
listofOne = []
listofTwo = []
for i in range(0,n):
if arr[i] == 0:
listofZero.append(0)
elif arr[i] == 1:
listofOne.append(1)
else:
listofTwo.append(2)
Total = listofZero+listofOne+listofTwo
return Total
arr = list(map(int,input("Enter the Number:\n").split()))
n = len(arr)
print(sort012(arr,n))
'''
Time Complexity is O(n)
Space Complexity is O(1)
Input:
Enter the Number:
0 2 1 2 0
Output:
0 0 1 2 2
Explanation:
0s 1s and 2s are segregated
into ascending order.
'''
| 286
|
825
|
<reponame>jiangkang/Hummer<filename>iOS/Hummer/Classes/Core/Manager/ImageLoader/HMURLConvertible.h
//
// HMURLConvertible.h
// Hummer
//
// Created by didi on 2020/11/16.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@protocol HMURLConvertible <NSObject>
- (nullable NSURL *)hm_asUrl;
- (nullable NSURL *)hm_asFileUrl;
- (nullable NSString *)hm_asString;
@end
NS_ASSUME_NONNULL_END
| 168
|
19,127
|
<reponame>pmesnier/openssl<filename>crypto/evp/legacy_mdc2.c
/*
* Copyright 2015-2021 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the Apache License 2.0 (the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy
* in the file LICENSE in the source distribution or at
* https://www.openssl.org/source/license.html
*/
/*
* MDC2 low level APIs are deprecated for public use, but still ok for
* internal use.
*/
#include "internal/deprecated.h"
#include <openssl/mdc2.h>
#include "crypto/evp.h"
#include "legacy_meth.h"
IMPLEMENT_LEGACY_EVP_MD_METH(mdc2, MDC2)
static const EVP_MD mdc2_md = {
NID_mdc2,
NID_mdc2WithRSA,
MDC2_DIGEST_LENGTH,
0,
EVP_ORIG_GLOBAL,
LEGACY_EVP_MD_METH_TABLE(mdc2_init, mdc2_update, mdc2_final, NULL,
MDC2_BLOCK),
};
const EVP_MD *EVP_mdc2(void)
{
return &mdc2_md;
}
| 405
|
577
|
//
// ArticleCell.h
// PlainReader
//
// Created by guojiubo on 14-3-28.
// Copyright (c) 2014年 guojiubo. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "PRArticle.h"
#import "PRTableViewCell.h"
@interface PRArticleCell : PRTableViewCell
@property (nonatomic, weak) PRArticle *article;
@end
| 117
|
1,826
|
<reponame>niklasf/flexmark-java
package com.vladsch.flexmark.ext.xwiki.macros;
import com.vladsch.flexmark.ext.xwiki.macros.internal.MacroBlockParser;
import com.vladsch.flexmark.ext.xwiki.macros.internal.MacroInlineParser;
import com.vladsch.flexmark.ext.xwiki.macros.internal.MacroNodeRenderer;
import com.vladsch.flexmark.html.HtmlRenderer;
import com.vladsch.flexmark.parser.Parser;
import com.vladsch.flexmark.util.data.DataKey;
import com.vladsch.flexmark.util.data.MutableDataHolder;
import org.jetbrains.annotations.NotNull;
/**
* Extension for macros
* <p>
* Create it with {@link #create()} and then configure it on the builders
* <p>
* The parsed macros text is turned into {@link Macro} nodes.
*/
// TODO: Rename this class to XWikiMacroExtension
public class MacroExtension implements Parser.ParserExtension, HtmlRenderer.HtmlRendererExtension {
final public static DataKey<Boolean> ENABLE_INLINE_MACROS = new DataKey<>("ENABLE_INLINE_MACROS", true);
final public static DataKey<Boolean> ENABLE_BLOCK_MACROS = new DataKey<>("ENABLE_BLOCK_MACROS", true);
final public static DataKey<Boolean> ENABLE_RENDERING = new DataKey<>("ENABLE_RENDERING", false);
private MacroExtension() {
}
@Override
public void rendererOptions(@NotNull MutableDataHolder options) {
}
@Override
public void extend(@NotNull HtmlRenderer.Builder htmlRendererBuilder, @NotNull String rendererType) {
htmlRendererBuilder.nodeRendererFactory(new MacroNodeRenderer.Factory());
}
public static MacroExtension create() {
return new MacroExtension();
}
@Override
public void parserOptions(MutableDataHolder options) {
}
@Override
public void extend(Parser.Builder parserBuilder) {
if (ENABLE_BLOCK_MACROS.get(parserBuilder)) parserBuilder.customBlockParserFactory(new MacroBlockParser.Factory());
if (ENABLE_INLINE_MACROS.get(parserBuilder)) parserBuilder.customInlineParserExtensionFactory(new MacroInlineParser.Factory());
}
}
| 715
|
905
|
<filename>malwareconfig/decoders/Xtreme.py
from malwareconfig import crypto
from malwareconfig.common import Decoder
from malwareconfig.common import string_printable
from binascii import hexlify, unhexlify
# temp imports
import re
from struct import unpack
class Xtreme(Decoder):
decoder_name = "Xtreme"
decoder__version = 1
decoder_author = "@kevthehermit"
decoder_description = "Xtreme decoder for 2.9, 3.1, 3.2, 3.5"
def __init__(self):
self.config = {}
def get_unicode_string(self, buf, pos):
out = ''
for i in range(len(buf[pos:])):
if buf[pos+i] == 0 and buf[pos+i+1] == 0:
out += '\x00'
break
out += chr(buf[pos+i])
if out == '':
return None
else:
return out.replace('\x00','')
def get_config(self):
'''
This is the main entry
:return:
'''
raw_config = {}
file_data = self.file_info.file_data
# Get Resource
res_data = self.file_info.pe_resource_id(b'X\x00T\x00R\x00E\x00M\x00E\x00')
# Return no resource
if not res_data:
print(" [-] No Config Resource Found")
return # Check what we do for a negative result
key = '<KEY>'
decrypted_config = crypto.decrypt_arc4(key, res_data)
# 1.3.x - Not implemented yet.
if len(decrypted_config) == 0xe10:
config_data = None
# 2.9.x - Not a stable extract.
elif len(decrypted_config) == 0x1390 or len(decrypted_config) == 0x1392:
config_data = self.v29(decrypted_config)
# 3.1 & 3.2
elif len(decrypted_config) == 0x5Cc:
config_data = self.v32(decrypted_config)
# 3.5
elif len(decrypted_config) == 0x7f0:
config_data = self.v35(decrypted_config)
else:
config_data = None
def v29(self, rawConfig):
config_data = {}
config_data["ID"] = self.get_unicode_string(rawConfig, 0x9e0)
config_data["Group"] = self.get_unicode_string(rawConfig, 0xa5a)
config_data["Version"] = self.get_unicode_string(rawConfig, 0xf2e)
config_data["Mutex"] = self.get_unicode_string(rawConfig, 0xfaa)
config_data["Install Dir"] = self.get_unicode_string(rawConfig, 0xb50)
config_data["Install Name"] = self.get_unicode_string(rawConfig, 0xad6)
config_data["HKLM"] = self.get_unicode_string(rawConfig, 0xc4f)
config_data["HKCU"] = self.get_unicode_string(rawConfig, 0xcc8)
config_data["Custom Reg Key"] = self.get_unicode_string(rawConfig, 0xdc0)
config_data["Custom Reg Name"] = self.get_unicode_string(rawConfig, 0xe3a)
config_data["Custom Reg Value"] = self.get_unicode_string(rawConfig, 0xa82)
config_data["ActiveX Key"] = self.get_unicode_string(rawConfig, 0xd42)
config_data["Injection"] = self.get_unicode_string(rawConfig, 0xbd2)
config_data["FTP Server"] = self.get_unicode_string(rawConfig, 0x111c)
config_data["FTP UserName"] = self.get_unicode_string(rawConfig, 0x1210)
config_data["FTP Password"] = self.get_unicode_string(rawConfig, 0x128a)
config_data["FTP Folder"] = self.get_unicode_string(rawConfig, 0x1196)
config_data["Domain1"] = str(self.get_unicode_string(rawConfig, 0x50)+":"+str(unpack("<I", rawConfig[0:4])[0]))
config_data["Domain2"] = str(self.get_unicode_string(rawConfig, 0xca)+":"+str(unpack("<I", rawConfig[4:8])[0]))
config_data["Domain3"] = str(self.get_unicode_string(rawConfig, 0x144)+":"+str(unpack("<I", rawConfig[8:12])[0]))
config_data["Domain4"] = str(self.get_unicode_string(rawConfig, 0x1be)+":"+str(unpack("<I", rawConfig[12:16])[0]))
config_data["Domain5"] = str(self.get_unicode_string(rawConfig, 0x238)+":"+str(unpack("<I", rawConfig[16:20])[0]))
config_data["Domain6"] = str(self.get_unicode_string(rawConfig, 0x2b2)+":"+str(unpack("<I", rawConfig[20:24])[0]))
config_data["Domain7"] = str(self.get_unicode_string(rawConfig, 0x32c)+":"+str(unpack("<I", rawConfig[24:28])[0]))
config_data["Domain8"] = str(self.get_unicode_string(rawConfig, 0x3a6)+":"+str(unpack("<I", rawConfig[28:32])[0]))
config_data["Domain9"] = str(self.get_unicode_string(rawConfig, 0x420)+":"+str(unpack("<I", rawConfig[32:36])[0]))
config_data["Domain10"] = str(self.get_unicode_string(rawConfig, 0x49a)+":"+str(unpack("<I", rawConfig[36:40])[0]))
config_data["Domain11"] = str(self.get_unicode_string(rawConfig, 0x514)+":"+str(unpack("<I", rawConfig[40:44])[0]))
config_data["Domain12"] = str(self.get_unicode_string(rawConfig, 0x58e)+":"+str(unpack("<I", rawConfig[44:48])[0]))
config_data["Domain13"] = str(self.get_unicode_string(rawConfig, 0x608)+":"+str(unpack("<I", rawConfig[48:52])[0]))
config_data["Domain14"] = str(self.get_unicode_string(rawConfig, 0x682)+":"+str(unpack("<I", rawConfig[52:56])[0]))
config_data["Domain15"] = str(self.get_unicode_string(rawConfig, 0x6fc)+":"+str(unpack("<I", rawConfig[56:60])[0]))
config_data["Domain16"] = str(self.get_unicode_string(rawConfig, 0x776)+":"+str(unpack("<I", rawConfig[60:64])[0]))
config_data["Domain17"] = str(self.get_unicode_string(rawConfig, 0x7f0)+":"+str(unpack("<I", rawConfig[64:68])[0]))
config_data["Domain18"] = str(self.get_unicode_string(rawConfig, 0x86a)+":"+str(unpack("<I", rawConfig[68:72])[0]))
config_data["Domain19"] = str(self.get_unicode_string(rawConfig, 0x8e4)+":"+str(unpack("<I", rawConfig[72:76])[0]))
config_data["Domain20"] = str(self.get_unicode_string(rawConfig, 0x95e)+":"+str(unpack("<I", rawConfig[76:80])[0]))
self.config = config_data
def v32(self, rawConfig):
config_data = {}
config_data["ID"] = self.get_unicode_string(rawConfig, 0x1b4)
config_data["Group"] = self.get_unicode_string(rawConfig, 0x1ca)
config_data["Version"] = self.get_unicode_string(rawConfig, 0x2bc)
config_data["Mutex"] = self.get_unicode_string(rawConfig, 0x2d4)
config_data["Install Dir"] = self.get_unicode_string(rawConfig, 0x1f8)
config_data["Install Name"] = self.get_unicode_string(rawConfig, 0x1e2)
config_data["HKLM"] = self.get_unicode_string(rawConfig, 0x23a)
config_data["HKCU"] = self.get_unicode_string(rawConfig, 0x250)
config_data["ActiveX Key"] = self.get_unicode_string(rawConfig, 0x266)
config_data["Injection"] = self.get_unicode_string(rawConfig, 0x216)
config_data["FTP Server"] = self.get_unicode_string(rawConfig, 0x35e)
config_data["FTP UserName"] = self.get_unicode_string(rawConfig, 0x402)
config_data["FTP Password"] = self.get_unicode_string(rawConfig, 0x454)
config_data["FTP Folder"] = self.get_unicode_string(rawConfig, 0x3b0)
config_data["Domain1"] = str(self.get_unicode_string(rawConfig, 0x14)+":"+str(unpack("<I", rawConfig[0:4])[0]))
config_data["Domain2"] = str(self.get_unicode_string(rawConfig, 0x66)+":"+str(unpack("<I", rawConfig[4:8])[0]))
config_data["Domain3"] = str(self.get_unicode_string(rawConfig, 0xb8)+":"+str(unpack("<I", rawConfig[8:12])[0]))
config_data["Domain4"] = str(self.get_unicode_string(rawConfig, 0x10a)+":"+str(unpack("<I", rawConfig[12:16])[0]))
config_data["Domain5"] = str(self.get_unicode_string(rawConfig, 0x15c)+":"+str(unpack("<I", rawConfig[16:20])[0]))
config_data["Msg Box Title"] = self.get_unicode_string(rawConfig, 0x50c)
config_data["Msg Box Text"] = self.get_unicode_string(rawConfig, 0x522)
self.config = config_data
def v35(self, config_raw):
config_data = {}
config_data['ID'] = self.get_unicode_string(config_raw, 0x1b4)
config_data['Group'] = self.get_unicode_string(config_raw, 0x1ca)
config_data['Version'] = self.get_unicode_string(config_raw, 0x2d8)
config_data['Mutex'] = self.get_unicode_string(config_raw, 0x2f0)
config_data['Install Dir'] = self.get_unicode_string(config_raw, 0x1f8)
config_data['Install Name'] = self.get_unicode_string(config_raw, 0x1e2)
config_data['HKLM'] = self.get_unicode_string(config_raw, 0x23a)
config_data['HKCU'] = self.get_unicode_string(config_raw, 0x250)
config_data['ActiveX Key'] = self.get_unicode_string(config_raw, 0x266)
config_data['Injection'] = self.get_unicode_string(config_raw, 0x216)
config_data['FTP Server'] = self.get_unicode_string(config_raw, 0x380)
config_data['FTP UserName'] = self.get_unicode_string(config_raw, 0x422)
config_data['FTP Password'] = self.get_unicode_string(config_raw, 0x476)
config_data['FTP Folder'] = self.get_unicode_string(config_raw, 0x3d2)
config_data['Domain1'] = str(self.get_unicode_string(config_raw, 0x14)+':'+str(unpack('<I', config_raw[0:4])[0]))
config_data['Domain2'] = str(self.get_unicode_string(config_raw, 0x66)+':'+str(unpack('<I', config_raw[4:8])[0]))
config_data['Domain3'] = str(self.get_unicode_string(config_raw, 0xb8)+':'+str(unpack('<I', config_raw[8:12])[0]))
config_data['Domain4'] = str(self.get_unicode_string(config_raw, 0x10a)+':'+str(unpack('<I', config_raw[12:16])[0]))
config_data['Domain5'] = str(self.get_unicode_string(config_raw, 0x15c)+':'+str(unpack('<I', config_raw[16:20])[0]))
config_data['Msg Box Title'] = self.get_unicode_string(config_raw, 0x52c)
config_data['Msg Box Text'] = self.get_unicode_string(config_raw, 0x542)
self.config = config_data
| 4,439
|
1,993
|
<gh_stars>1000+
/*
* Copyright 2013-2020 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package brave.handler;
import brave.Tracing;
import brave.TracingCustomizer;
import brave.propagation.TraceContext;
/**
* @since 5.4
* @deprecated Since 5.12 use {@link SpanHandler#end(TraceContext, MutableSpan, Cause)} with {@link
* Cause#FINISHED}
*/
@Deprecated public abstract class FinishedSpanHandler extends SpanHandler {
/**
* Use to avoid comparing against {@code null} references.
*
* @since 5.4
* @deprecated Since 5.12 use {@link SpanHandler#NOOP}
*/
public static final FinishedSpanHandler NOOP = new FinishedSpanHandler() {
@Override public boolean handle(TraceContext context, MutableSpan span) {
return true;
}
@Override public String toString() {
return "NoopFinishedSpanHandler{}";
}
};
/**
* @since 5.4
* @deprecated Since 5.12 use {@link SpanHandler#end(TraceContext, MutableSpan, Cause)} with
* {@link Cause#FINISHED}
*/
public abstract boolean handle(TraceContext context, MutableSpan span);
/**
* @since 5.7
* @deprecated Since 5.12 use {@link SpanHandler#end(TraceContext, MutableSpan, Cause)} with
* {@link Cause#ORPHANED}
*/
public boolean supportsOrphans() {
return false;
}
/**
* @since 5.4
* @deprecated Since 5.12, set {@link Tracing.Builder#alwaysSampleLocal()}. Tip: the same {@link
* TracingCustomizer} that {@linkplain Tracing.Builder#addSpanHandler(SpanHandler) adds this
* handler} can also also set {@link Tracing.Builder#alwaysSampleLocal()}.
*/
@Deprecated public boolean alwaysSampleLocal() {
return false;
}
@Override public boolean end(TraceContext context, MutableSpan span, Cause cause) {
switch (cause) {
case FLUSHED:
case FINISHED:
return handle(context, span);
case ORPHANED:
return !supportsOrphans() || handle(context, span);
default:
assert false : "Bug!: missing state handling for " + cause;
return true;
}
}
}
| 848
|
2,151
|
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/geolocation/geolocation_service_impl.h"
#include "content/browser/permissions/permission_controller_impl.h"
#include "content/public/browser/permission_type.h"
#include "content/public/browser/render_frame_host.h"
#include "content/public/common/content_features.h"
#include "third_party/blink/public/mojom/feature_policy/feature_policy.mojom.h"
namespace content {
GeolocationServiceImplContext::GeolocationServiceImplContext(
PermissionControllerImpl* permission_controller)
: permission_controller_(permission_controller),
request_id_(PermissionController::kNoPendingOperation),
weak_factory_(this) {}
GeolocationServiceImplContext::~GeolocationServiceImplContext() {
}
void GeolocationServiceImplContext::RequestPermission(
RenderFrameHost* render_frame_host,
bool user_gesture,
const base::Callback<void(blink::mojom::PermissionStatus)>& callback) {
if (request_id_ != PermissionController::kNoPendingOperation) {
mojo::ReportBadMessage(
"GeolocationService client may only create one Geolocation at a "
"time.");
return;
}
request_id_ = permission_controller_->RequestPermission(
PermissionType::GEOLOCATION, render_frame_host,
render_frame_host->GetLastCommittedOrigin().GetURL(), user_gesture,
// NOTE: The permission request is canceled in the destructor, so it is
// safe to pass |this| as Unretained.
base::Bind(&GeolocationServiceImplContext::HandlePermissionStatus,
weak_factory_.GetWeakPtr(), std::move(callback)));
}
void GeolocationServiceImplContext::HandlePermissionStatus(
const base::Callback<void(blink::mojom::PermissionStatus)>& callback,
blink::mojom::PermissionStatus permission_status) {
request_id_ = PermissionController::kNoPendingOperation;
callback.Run(permission_status);
}
GeolocationServiceImpl::GeolocationServiceImpl(
device::mojom::GeolocationContext* geolocation_context,
PermissionControllerImpl* permission_controller,
RenderFrameHost* render_frame_host)
: geolocation_context_(geolocation_context),
permission_controller_(permission_controller),
render_frame_host_(render_frame_host) {
DCHECK(geolocation_context);
DCHECK(permission_controller);
DCHECK(render_frame_host);
}
GeolocationServiceImpl::~GeolocationServiceImpl() {}
void GeolocationServiceImpl::Bind(
blink::mojom::GeolocationServiceRequest request) {
binding_set_.AddBinding(
this, std::move(request),
std::make_unique<GeolocationServiceImplContext>(permission_controller_));
}
void GeolocationServiceImpl::CreateGeolocation(
mojo::InterfaceRequest<device::mojom::Geolocation> request,
bool user_gesture) {
if (base::FeatureList::IsEnabled(features::kUseFeaturePolicyForPermissions) &&
!render_frame_host_->IsFeatureEnabled(
blink::mojom::FeaturePolicyFeature::kGeolocation)) {
return;
}
binding_set_.dispatch_context()->RequestPermission(
render_frame_host_, user_gesture,
// There is an assumption here that the GeolocationServiceImplContext will
// outlive the GeolocationServiceImpl.
base::Bind(&GeolocationServiceImpl::CreateGeolocationWithPermissionStatus,
base::Unretained(this), base::Passed(&request)));
}
void GeolocationServiceImpl::CreateGeolocationWithPermissionStatus(
device::mojom::GeolocationRequest request,
blink::mojom::PermissionStatus permission_status) {
if (permission_status != blink::mojom::PermissionStatus::GRANTED)
return;
geolocation_context_->BindGeolocation(std::move(request));
}
} // namespace content
| 1,267
|
460
|
<reponame>heatedcpu/webkettle
package org.flhy.ext.job.steps;
import com.mxgraph.model.mxCell;
import com.mxgraph.util.mxUtils;
import org.flhy.ext.core.PropsUI;
import org.flhy.ext.job.step.AbstractJobEntry;
import org.flhy.ext.utils.JSONArray;
import org.flhy.ext.utils.JSONObject;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.metastore.api.IMetaStore;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import java.util.List;
@Component("COLUMNS_EXIST")
@Scope("prototype")
public class JobEntryColumnsExist extends AbstractJobEntry {
@Override
public void decode(JobEntryInterface jobEntry, mxCell cell, List<DatabaseMeta> databases, IMetaStore metaStore) throws Exception {
org.pentaho.di.job.entries.columnsexist.JobEntryColumnsExist jobEntryColumnsExist = (org.pentaho.di.job.entries.columnsexist.JobEntryColumnsExist) jobEntry;
jobEntryColumnsExist.setDatabase(DatabaseMeta.findDatabase(databases, cell.getAttribute("connection")));
jobEntryColumnsExist.setSchemaname(cell.getAttribute("schemaname"));
jobEntryColumnsExist.setTablename(cell.getAttribute("tablename"));
JSONArray jsonArray = JSONArray.fromObject(cell.getAttribute("fields"));
jobEntryColumnsExist.setArguments(new String[jsonArray.size()]);
for(int i=0; i<jsonArray.size(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i);
jobEntryColumnsExist.getArguments()[i] = jsonObject.optString("field");
}
}
@Override
public Element encode(JobEntryInterface jobEntry) throws Exception {
Document doc = mxUtils.createDocument();
Element e = doc.createElement(PropsUI.JOB_JOBENTRY_NAME);
org.pentaho.di.job.entries.columnsexist.JobEntryColumnsExist jobEntryColumnsExist = (org.pentaho.di.job.entries.columnsexist.JobEntryColumnsExist) jobEntry;
DatabaseMeta databaseMeta = jobEntryColumnsExist.getDatabase();
e.setAttribute("connection", databaseMeta == null ? "" : databaseMeta.getName());
e.setAttribute("schemaname", jobEntryColumnsExist.getSchemaname());
e.setAttribute("tablename", jobEntryColumnsExist.getTablename());
JSONArray jsonArray = new JSONArray();
if(jobEntryColumnsExist.getArguments() != null) {
for(int j=0; j< jobEntryColumnsExist.getArguments().length; j++) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("field", jobEntryColumnsExist.getArguments()[j]);
jsonArray.add(jsonObject);
}
}
e.setAttribute("fields", jsonArray.toString());
return e;
}
}
| 916
|
2,180
|
/*
* This file is part of the Heritrix web crawler (crawler.archive.org).
*
* Licensed to the Internet Archive (IA) by one or more individual
* contributors.
*
* The IA licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.archive.crawler.util;
import java.util.Map;
import org.archive.io.warc.WARCWriter;
import org.archive.modules.CoreAttributeConstants;
import org.archive.modules.CrawlURI;
import org.archive.modules.revisit.IdenticalPayloadDigestRevisit;
import org.archive.modules.revisit.ServerNotModifiedRevisit;
import org.archive.util.ArchiveUtils;
import org.archive.util.Histotable;
public class CrawledBytesHistotable extends Histotable<String>
implements CoreAttributeConstants {
private static final long serialVersionUID = 7923431123239026213L;
public static final String NOTMODIFIED = "notModified";
public static final String DUPLICATE = "dupByHash";
public static final String OTHERDUPLICATE = "otherDup";
public static final String NOVEL = "novel";
public static final String NOTMODIFIEDCOUNT = "notModifiedCount";
public static final String DUPLICATECOUNT = "dupByHashCount";
public static final String OTHERDUPLICATECOUNT = "otherDupCount";
public static final String NOVELCOUNT = "novelCount";
// total size of warc response and resource record payloads (includes http
// headers, does not include warc record headers)
public static final String WARC_NOVEL_CONTENT_BYTES = "warcNovelContentBytes";
public static final String WARC_NOVEL_URLS = "warcNovelUrls";
public CrawledBytesHistotable() {
super();
}
@SuppressWarnings("unchecked")
public void accumulate(CrawlURI curi) {
if (curi.getRevisitProfile() instanceof ServerNotModifiedRevisit) {
tally(NOTMODIFIED, curi.getContentSize());
tally(NOTMODIFIEDCOUNT,1);
} else if (curi.getRevisitProfile() instanceof IdenticalPayloadDigestRevisit) {
tally(DUPLICATE,curi.getContentSize());
tally(DUPLICATECOUNT,1);
} else if (curi.getRevisitProfile() != null) {
tally(OTHERDUPLICATE, curi.getContentSize());
tally(OTHERDUPLICATECOUNT, 1);
} else {
tally(NOVEL,curi.getContentSize());
tally(NOVELCOUNT,1);
}
Map<String,Map<String,Long>> warcStats = (Map<String,Map<String,Long>>) curi.getData().get(A_WARC_STATS);
if (warcStats != null) {
tally(WARC_NOVEL_CONTENT_BYTES,
WARCWriter.getStat(warcStats, "response", "contentBytes")
+ WARCWriter.getStat(warcStats, "resource", "contentBytes"));
tally(WARC_NOVEL_URLS,
WARCWriter.getStat(warcStats, "response", "numRecords")
+ WARCWriter.getStat(warcStats, "resource", "numRecords"));
}
}
public String summary() {
StringBuilder sb = new StringBuilder();
sb.append(ArchiveUtils.formatBytesForDisplay(getTotalBytes()));
sb.append(" crawled (");
sb.append(ArchiveUtils.formatBytesForDisplay(get(NOVEL)));
sb.append(" novel");
if(get(DUPLICATE)!=null) {
sb.append(", ");
sb.append(ArchiveUtils.formatBytesForDisplay(get(DUPLICATE)));
sb.append(" ");
sb.append(DUPLICATE);
}
if(get(NOTMODIFIED)!=null) {
sb.append(", ");
sb.append(ArchiveUtils.formatBytesForDisplay(get(NOTMODIFIED)));
sb.append(" ");
sb.append(NOTMODIFIED);
}
if(get(OTHERDUPLICATE)!=null) {
sb.append(", ");
sb.append(ArchiveUtils.formatBytesForDisplay(get(OTHERDUPLICATE)));
sb.append(" ");
sb.append(OTHERDUPLICATECOUNT);
}
sb.append(")");
return sb.toString();
}
public long getTotalBytes() {
return get(NOVEL) + get(DUPLICATE) + get(NOTMODIFIED) + get(OTHERDUPLICATE);
}
public long getTotalUrls() {
return get(NOVELCOUNT) + get(DUPLICATECOUNT) + get(NOTMODIFIEDCOUNT) + get(OTHERDUPLICATECOUNT);
}
}
| 1,950
|
892
|
<reponame>github/advisory-database<gh_stars>100-1000
{
"schema_version": "1.2.0",
"id": "GHSA-4pgc-f487-53cr",
"modified": "2022-05-13T01:53:07Z",
"published": "2022-05-13T01:53:07Z",
"aliases": [
"CVE-2018-6552"
],
"details": "Apport does not properly handle crashes originating from a PID namespace allowing local users to create certain files as root which an attacker could leverage to perform a denial of service via resource exhaustion, possibly gain root privileges, or escape from containers. The is_same_ns() function returns True when /proc/<global pid>/ does not exist in order to indicate that the crash should be handled in the global namespace rather than inside of a container. However, the portion of the data/apport code that decides whether or not to forward a crash to a container does not always replace sys.argv[1] with the value stored in the host_pid variable when /proc/<global pid>/ does not exist which results in the container pid being used in the global namespace. This flaw affects versions 2.20.8-0ubuntu4 through 2.20.9-0ubuntu7, 2.20.7-0ubuntu3.7, 2.20.7-0ubuntu3.8, 2.20.1-0ubuntu2.15 through 2.20.1-0ubuntu2.17, and 2.14.1-0ubuntu3.28.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.0/AV:L/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H"
}
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2018-6552"
},
{
"type": "WEB",
"url": "https://usn.ubuntu.com/3664-2/"
},
{
"type": "WEB",
"url": "https://usn.ubuntu.com/usn/usn-3664-1"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
}
| 678
|
435
|
{
"category": "DjangoCon 2014",
"copyright_text": "",
"description": "",
"duration": 1466,
"id": 4795,
"language": "eng",
"quality_notes": "",
"recorded": "2014-09-16",
"related_urls": [
"http://amara.org/v/FOPU/"
],
"slug": "geo-django-geo-beyond-the-django",
"speakers": [
"<NAME>"
],
"summary": "Have you gone through the comprehensive GeoDjango docs, but wondered\nwhere to go next? Are you curious about how you can combine the power\nGeoDjango with other community-built tools? Do you want to create pretty\nmaps in Python? If so, you are in the right place. Learn about GeoDjango\nand Geographic Information Systems and navigate beyond the docs into the\nexciting GIS technology landscape.\n\nHelp us caption & translate this video!\n\nhttp://amara.org/v/FOPU/\n",
"tags": [],
"thumbnail_url": "https://i.ytimg.com/vi/mUhinowr3RY/maxresdefault.jpg",
"title": "Geo+Django: Geo beyond the Django",
"videos": [
{
"length": 0,
"type": "youtube",
"url": "https://www.youtube.com/watch?v=mUhinowr3RY"
}
]
}
| 411
|
369
|
<reponame>bitigchi/MuditaOS
// Copyright (c) 2017-2021, Mudita <NAME>.o.o. All rights reserved.
// For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md
#pragma once
#include "DatabaseModel.hpp"
#include "Application.hpp"
#include "ListItemProvider.hpp"
#include "Common/Query.hpp"
#include "Interface/ThreadRecord.hpp"
#include <vector>
#include <module-db/Interface/ContactRecord.hpp>
struct ThreadListStruct
{
std::shared_ptr<ThreadRecord> thread;
std::shared_ptr<ContactRecord> contact;
std::shared_ptr<utils::PhoneNumber::View> number;
ThreadListStruct(std::shared_ptr<ThreadRecord> thread,
std::shared_ptr<ContactRecord> contact,
std::shared_ptr<utils::PhoneNumber::View> number)
: thread(thread), contact(contact), number(number)
{}
};
class BaseThreadsRecordModel : public app::DatabaseModel<ThreadListStruct>, public gui::ListItemProvider
{
public:
BaseThreadsRecordModel() = delete;
BaseThreadsRecordModel(app::ApplicationCommon *app);
unsigned int requestRecordsCount() override;
bool updateRecords(std::vector<ThreadListStruct> records) override;
app::ApplicationCommon *getApplication(void)
{
return application;
};
};
| 451
|
1,319
|
<reponame>zhangyimi/Research
# *_*coding:utf-8 *_*
"""
import
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import json
import logging
import paddle.fluid as fluid
import six
from nets.common.transformer_encoder import encoder, two_stream_encoder, pre_process_layer, gelu
from nets.common.ernie_encoder import ErnieModel
class ErnieGenModel(object):
"""
ErnieGenModel
"""
def __init__(self,
src_ids,
position_ids,
sentence_ids,
input_mask,
config,
use_fp16=False,
role_ids=None,
turn_ids=None,
weight_sharing=True,
task_type="normal",
two_stream=False,
decoding=False,
gather_idx=None):
"""
:param src_ids:
:param position_ids:
:param sentence_ids:
:param task_ids:
:param input_mask:
:param config:
:param weight_sharing:
:param use_fp16:
"""
self._hidden_size = config.get('hidden_size', 768)
self._emb_size = config.get('emb_size', self._hidden_size)
self._n_layer = config.get('num_hidden_layers', 12)
self._n_head = config.get('num_attention_heads', 12)
self._voc_size = config.get('vocab_size', 30522)
self._max_position_seq_len = config.get('max_position_embeddings', 512)
self._param_share = config.get('param_share', "normal")
self._pre_encoder_cmd = config.get('pre_encoder_cmd', "nd")
self._preprocess_cmd = config.get('preprocess_cmd', "")
self._postprocess_cmd = config.get('postprocess_cmd', "dan")
self._epsilon = config.get('epsilon', 1e-05)
self._emb_mapping_in = config.get('emb_mapping_in', False)
self._n_layer_per_block = config.get('n_layer_per_block', 1)
if config.has('sent_type_vocab_size'):
self._sent_types = config['sent_type_vocab_size']
else:
self._sent_types = config.get('type_vocab_size', 2)
self._hidden_act = config.get('hidden_act', 'gelu')
self._prepostprocess_dropout = config.get('hidden_dropout_prob', 0.1)
self._attention_dropout = config.get('attention_probs_dropout_prob', 0.1)
self._weight_sharing = weight_sharing
self._word_emb_name = "word_embedding"
self._pos_emb_name = "pos_embedding"
self._sent_emb_name = "sent_embedding"
self._dtype = "float16" if use_fp16 else "float32"
self._emb_dtype = "float32"
self._task_type = task_type
self._is_dialogue_task = (task_type == "dialog")
if self._is_dialogue_task:
self._role_type_size = config["role_type_size"]
self._turn_type_size = config["turn_type_size"]
self._role_emb_name = "role_embedding"
self._turn_emb_name = "turn_embedding"
self._two_stream = two_stream
if decoding:
self.caches = [{
"k":
fluid.layers.fill_constant_batch_size_like(
input=src_ids,
shape=[-1, 0, self._hidden_size],
dtype=self._dtype,
value=0),
"v":
fluid.layers.fill_constant_batch_size_like(
input=src_ids,
shape=[-1, 0, self._hidden_size],
dtype=self._dtype,
value=0),
} for i in range(self._n_layer)]
else:
self.caches = None
# Initialize all weigths by truncated normal initializer, and all biases
# will be initialized by constant zero by default.
self._param_initializer = fluid.initializer.TruncatedNormal(
scale=config.get('initializer_range', 0.02))
self._build_model(src_ids, position_ids, sentence_ids,
input_mask, gather_idx, role_ids, turn_ids)
def _gen_input(self, src_ids, position_ids, sentence_ids, input_mask,
role_ids=None, turn_ids=None):
"""
:param src_ids:
:param position_ids:
:param sentence_ids:
:param task_ids:
:param input_mask:
:return:
"""
# padding id in vocabulary must be set to 0
emb_out = fluid.layers.embedding(
input=src_ids,
size=[self._voc_size, self._emb_size],
dtype=self._emb_dtype,
param_attr=fluid.ParamAttr(
name=self._word_emb_name, initializer=self._param_initializer),
is_sparse=False)
position_emb_out = fluid.layers.embedding(
input=position_ids,
size=[self._max_position_seq_len, self._emb_size],
dtype=self._emb_dtype,
param_attr=fluid.ParamAttr(
name=self._pos_emb_name, initializer=self._param_initializer))
if not self._is_dialogue_task:
sent_emb_out = fluid.layers.embedding(
sentence_ids,
size=[self._sent_types, self._emb_size],
dtype=self._emb_dtype,
param_attr=fluid.ParamAttr(
name=self._sent_emb_name, initializer=self._param_initializer))
emb_out = emb_out + position_emb_out + sent_emb_out
else:
role_emb_out = fluid.layers.embedding(
input=role_ids,
size=[self._role_type_size, self._emb_size],
dtype=self._emb_dtype,
param_attr=fluid.ParamAttr(
name=self._role_emb_name, initializer=self._param_initializer))
turn_emb_out = fluid.layers.embedding(
input=turn_ids,
size=[self._turn_type_size, self._emb_size],
dtype=self._emb_dtype,
param_attr=fluid.ParamAttr(
name=self._turn_emb_name, initializer=self._param_initializer))
emb_out = emb_out + position_emb_out + role_emb_out + turn_emb_out
emb_out = pre_process_layer(
emb_out,
self._pre_encoder_cmd,
self._prepostprocess_dropout,
name="pre_encoder",
epsilon=self._epsilon)
if self._emb_mapping_in:
emb_out = fluid.layers.fc(input=emb_out,
num_flatten_dims=2,
size=self._hidden_size,
param_attr=fluid.ParamAttr(
name='emb_hidden_mapping',
initializer=self._param_initializer),
bias_attr='emb_hidden_mapping_bias')
if self._dtype is "float16":
emb_out = fluid.layers.cast(x=emb_out, dtype=self._dtype)
input_mask = fluid.layers.cast(x=input_mask, dtype=self._dtype)
self_attn_mask = input_mask
self_attn_mask = fluid.layers.scale(
x=self_attn_mask, scale=1e4, bias=-1.0, bias_after_scale=False)
n_head_self_attn_mask = fluid.layers.stack(
x=[self_attn_mask] * self._n_head, axis=1)
n_head_self_attn_mask.stop_gradient = True
return emb_out, n_head_self_attn_mask
def encode(self, src_ids, position_ids, sentence_ids,
input_mask, gather_idx=None, remove_mask=False,
role_ids=None, turn_ids=None):
"""transformer encode"""
if self._two_stream:
emb_out, n_head_self_attn_mask = self._gen_input(src_ids[0],
position_ids[0], sentence_ids[0], input_mask[0],
role_ids=role_ids[0], turn_ids=turn_ids[0])
g_emb_out, n_head_query_attn_mask = self._gen_input(src_ids[1],
position_ids[1], sentence_ids[1], input_mask[1],
role_ids=role_ids[1], turn_ids=turn_ids[1])
self._enc_out_context, self._enc_out_query, self._checkpoints = two_stream_encoder(
enc_input_context=emb_out,
enc_input_query=g_emb_out,
attn_bias_context=n_head_self_attn_mask,
attn_bias_query=n_head_query_attn_mask,
n_layer=self._n_layer,
n_head=self._n_head,
d_key=self._hidden_size // self._n_head,
d_value=self._hidden_size // self._n_head,
d_model=self._hidden_size,
d_inner_hid=self._hidden_size * 4,
prepostprocess_dropout=self._prepostprocess_dropout,
attention_dropout=self._attention_dropout,
relu_dropout=0,
hidden_act=self._hidden_act,
preprocess_cmd=self._preprocess_cmd,
postprocess_cmd=self._postprocess_cmd,
param_initializer=self._param_initializer,
name='encoder',
param_share=self._param_share,
epsilon=self._epsilon,
n_layer_per_block=self._n_layer_per_block)
enc_out = self._enc_out_query
else:
emb_out, n_head_self_attn_mask = self._gen_input(src_ids,
position_ids, sentence_ids, input_mask,
role_ids=role_ids, turn_ids=turn_ids)
enc_out, self._checkpoints= encoder(
enc_input=emb_out,
attn_bias=n_head_self_attn_mask,
n_layer=self._n_layer,
n_head=self._n_head,
d_key=self._hidden_size // self._n_head,
d_value=self._hidden_size // self._n_head,
d_model=self._hidden_size,
d_inner_hid=self._hidden_size * 4,
prepostprocess_dropout=self._prepostprocess_dropout,
attention_dropout=self._attention_dropout,
relu_dropout=0,
hidden_act=self._hidden_act,
preprocess_cmd=self._preprocess_cmd,
postprocess_cmd=self._postprocess_cmd,
param_initializer=self._param_initializer,
name='encoder',
param_share=self._param_share,
epsilon=self._epsilon,
n_layer_per_block=self._n_layer_per_block,
caches=self.caches,
gather_idx=gather_idx,
remove_mask=remove_mask)
if self._dtype == "float16":
enc_out = fluid.layers.cast(
x=enc_out, dtype=self._emb_dtype)
return enc_out
def _build_model(self, src_ids, position_ids, sentence_ids,
input_mask, gather_idx=None, role_ids=None, turn_ids=None):
self._enc_out = self.encode(src_ids, position_ids, sentence_ids,
input_mask, gather_idx, remove_mask=False,
role_ids=role_ids, turn_ids=turn_ids)
def get_sequence_output(self):
"""
:return:
"""
return self._enc_out
def get_pooled_output(self):
"""Get the first feature of each sequence for classification"""
next_sent_feat = fluid.layers.slice(
input=self._enc_out, axes=[1], starts=[0], ends=[1])
next_sent_feat = fluid.layers.fc(
input=next_sent_feat,
size=self._hidden_size,
act="tanh",
param_attr=fluid.ParamAttr(
name="pooled_fc.w_0", initializer=self._param_initializer),
bias_attr="pooled_fc.b_0")
return next_sent_feat
| 6,141
|
456
|
<reponame>erlong16/react-native<gh_stars>100-1000
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
#include "NodeStateWrapper.h"
#include <fbjni/fbjni.h>
#include <react/jni/ReadableNativeMap.h>
using namespace facebook::jni;
namespace facebook {
namespace react {
jni::local_ref<NodeStateWrapper::jhybriddata> NodeStateWrapper::initHybrid(
jni::alias_ref<jclass>) {
return makeCxxInstance();
}
jni::local_ref<ReadableNativeMap::jhybridobject> NodeStateWrapper::getState() {
folly::dynamic map = state_->getDynamic();
local_ref<ReadableNativeMap::jhybridobject> readableNativeMap =
ReadableNativeMap::newObjectCxxArgs(map);
return readableNativeMap;
}
void NodeStateWrapper::updateState(ReadableNativeMap *map) {
// Get folly::dynamic from map
auto dynamicMap = map->consume();
// Set state
state_->updateState(dynamicMap);
}
void NodeStateWrapper::registerNatives() {
registerHybrid({
makeNativeMethod("getState", NodeStateWrapper::getState),
makeNativeMethod("updateState", NodeStateWrapper::updateState),
});
}
} // namespace react
} // namespace facebook
| 411
|
1,109
|
<reponame>qiquanzhijia/gryphon<gh_stars>1000+
import functools
from delorean import Delorean
from gryphon.lib.logger import get_logger
logger = get_logger(__name__)
from gryphon.lib.session import get_a_memcache_connection
def _localcache_get(localcache, key):
result = localcache.get(key)
if not result:
return None
value, expiry_time = result
if Delorean().epoch > expiry_time:
logger.debug('Localcache for %s expired' % key)
return None
return value
def _localcache_set(localcache, key, value, time):
expiry_time = Delorean().epoch + time
logger.debug("Setting localcache %s" % expiry_time)
localcache[key] = [value, expiry_time]
def cache_me(time=1200, ignore_self=False):
"""
Decorator that caches the result of a method for the specified time in seconds.
Use it as:
@cache_me(time=1200) # 20min
def functionToCache(arguments):
...
"""
def decorator(function):
localcache = function.localcache = {}
memcache = get_a_memcache_connection()
@functools.wraps(function)
def wrapper(*args, **kwargs):
if ignore_self:
key_args = args[1:]
else:
key_args = args
key = '%s%s%s' % (function.__name__, str(key_args), str(kwargs))
key = key.replace(" ", "_") # Memcache doesn't allow spaces in keys
# try localcache first
local_value = _localcache_get(localcache, key)
logger.debug('Localcache lookup for %s, found? %s', key, local_value != None)
if local_value:
return local_value
# then try memcache
memcache_value = memcache.get(key)
logger.debug('Memcache lookup for %s, found? %s', key, memcache_value != None)
if memcache_value:
_localcache_set(localcache, key, memcache_value, time)
return memcache_value
# otherwise calculate it
calculated_value = function(*args, **kwargs)
memcache.set(key, calculated_value, time=time)
_localcache_set(localcache, key, calculated_value, time)
return calculated_value
return wrapper
return decorator
| 987
|
524
|
{
"title": "Datastore Admin",
"defaultService": "datastoreadmin\/readme",
"services": [
{
"title": "Overview",
"type": "datastoreadmin\/readme"
}, {
"title": "DatastoreAdminClient",
"type": "datastoreadmin\/v1/datastoreadminclient"
}
],
"pattern": "datastoreadmin\/\\w{1,}"
}
| 188
|
348
|
{"nom":"<NAME>","circ":"1ère circonscription","dpt":"Oise","inscrits":126,"abs":46,"votants":80,"blancs":0,"nuls":0,"exp":80,"res":[{"nuance":"LR","nom":"M. <NAME>","voix":45},{"nuance":"REM","nom":"<NAME>","voix":13},{"nuance":"FN","nom":"Mme <NAME>","voix":12},{"nuance":"RDG","nom":"Mme <NAME>","voix":4},{"nuance":"FI","nom":"<NAME>","voix":3},{"nuance":"EXG","nom":"M. <NAME>","voix":1},{"nuance":"ECO","nom":"<NAME>","voix":1},{"nuance":"COM","nom":"M. <NAME>","voix":1},{"nuance":"DLF","nom":"Mme <NAME>","voix":0},{"nuance":"EXD","nom":"Mme <NAME>","voix":0},{"nuance":"DIV","nom":"M. <NAME>","voix":0}]}
| 252
|
5,865
|
<filename>common/src/test/java/com/thoughtworks/go/domain/ArtifactPlanTest.java
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.domain;
import com.thoughtworks.go.config.ArtifactTypeConfigs;
import com.thoughtworks.go.config.BuildArtifactConfig;
import com.thoughtworks.go.config.PluggableArtifactConfig;
import com.thoughtworks.go.config.TestArtifactConfig;
import com.thoughtworks.go.work.DefaultGoPublisher;
import org.apache.commons.io.FileUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static com.thoughtworks.go.domain.packagerepository.ConfigurationPropertyMother.create;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
public class ArtifactPlanTest {
private File testFolder;
private File srcFolder;
@BeforeEach
public void setUp() {
testFolder = new File("test.com");
srcFolder = new File(testFolder, "src");
srcFolder.mkdirs();
}
@AfterEach
public void tearDown() {
FileUtils.deleteQuietly(testFolder);
}
@Test
public void shouldPublishArtifacts() {
final DefaultGoPublisher publisher = mock(DefaultGoPublisher.class);
final ArtifactPlan artifactPlan = new ArtifactPlan(ArtifactPlanType.file, "src", "dest");
artifactPlan.publishBuiltInArtifacts(publisher, testFolder);
verify(publisher).upload(new File(testFolder, "src"), "dest");
}
@Test
public void shouldIgnoreIdAndBuildIdAsPartOfEqualAndHashCodeCheck() {
final ArtifactPlan installer_1 = new ArtifactPlan(ArtifactPlanType.file, "src", "dest");
installer_1.setId(100);
installer_1.setBuildId(1000);
final ArtifactPlan installer_2 = new ArtifactPlan(ArtifactPlanType.file, "src", "dest");
installer_2.setId(200);
installer_2.setBuildId(2000);
assertTrue(installer_1.equals(installer_2));
}
@Test
public void shouldNormalizePath() {
ArtifactPlan artifactPlan = new ArtifactPlan(ArtifactPlanType.file, "folder\\src", "folder\\dest");
assertThat(artifactPlan.getSrc(), is("folder/src"));
assertThat(artifactPlan.getDest(), is("folder/dest"));
}
@Test
public void shouldProvideAppendFilePathToDest() {
ArtifactPlan artifactPlan = new ArtifactPlan(ArtifactPlanType.file, "test/**/*/a.log", "logs");
assertThat(artifactPlan.destinationURL(new File("pipelines/pipelineA"),
new File("pipelines/pipelineA/test/a/b/a.log")), is("logs/a/b"));
}
@Test
public void shouldProvideAppendFilePathToDestWhenUsingDoubleStart() {
ArtifactPlan artifactPlan = new ArtifactPlan(ArtifactPlanType.file, "**/*/a.log", "logs");
assertThat(artifactPlan.destinationURL(new File("pipelines/pipelineA"),
new File("pipelines/pipelineA/test/a/b/a.log")), is("logs/test/a/b"));
}
@Test
public void shouldProvideAppendFilePathToDestWhenPathProvidedAreSame() {
ArtifactPlan artifactPlan = new ArtifactPlan(ArtifactPlanType.file, "test/a/b/a.log", "logs");
assertThat(artifactPlan.destinationURL(new File("pipelines/pipelineA"),
new File("pipelines/pipelineA/test/b/a.log")), is("logs"));
}
@Test
public void shouldProvideAppendFilePathToDestWhenUsingSingleStarToMatchFile() {
ArtifactPlan artifactPlan = new ArtifactPlan(ArtifactPlanType.file, "test/a/b/*.log", "logs");
assertThat(artifactPlan.destinationURL(new File("pipelines/pipelineA"),
new File("pipelines/pipelineA/test/a/b/a.log")), is("logs"));
}
@Test
public void shouldProvideAppendFilePathToDestWhenPathMatchingAtTheRoot() {
ArtifactPlan artifactPlan = new ArtifactPlan(ArtifactPlanType.file, "*.jar", "logs");
assertThat(artifactPlan.destinationURL(new File("pipelines/pipelineA"),
new File("pipelines/pipelineA/a.jar")), is("logs"));
}
@Test
public void shouldTrimThePath() {
assertThat(new ArtifactPlan(ArtifactPlanType.file, "pkg ", "logs "),
is(new ArtifactPlan(ArtifactPlanType.file, "pkg", "logs")));
}
@Test
public void toArtifactPlans_shouldConvertArtifactConfigsToArtifactPlanList() {
final PluggableArtifactConfig artifactConfig = new PluggableArtifactConfig("id", "storeId", create("Foo", true, "Bar"));
final ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs(Arrays.asList(
new BuildArtifactConfig("source", "destination"),
new TestArtifactConfig("test-source", "test-destination"),
artifactConfig
));
final List<ArtifactPlan> artifactPlans = ArtifactPlan.toArtifactPlans(artifactTypeConfigs);
assertThat(artifactPlans, containsInAnyOrder(
new ArtifactPlan(ArtifactPlanType.file, "source", "destination"),
new ArtifactPlan(ArtifactPlanType.unit, "test-source", "test-destination"),
new ArtifactPlan(artifactConfig.toJSON())
));
}
@Test
public void shouldConvertPluggableArtifactConfigToArtifactPlans() {
final PluggableArtifactConfig artifactConfig = new PluggableArtifactConfig("ID", "StoreID", create("Foo", true, "Bar"), create("Baz", false, "Car"));
final ArtifactPlan artifactPlan = new ArtifactPlan(artifactConfig);
assertThat(artifactPlan.getArtifactPlanType(), is(ArtifactPlanType.external));
assertThat(artifactPlan.getPluggableArtifactConfiguration().size(), is(3));
assertThat(artifactPlan.getPluggableArtifactConfiguration(), hasEntry("id", "ID"));
assertThat(artifactPlan.getPluggableArtifactConfiguration(), hasEntry("storeId", "StoreID"));
final Map<String, String> configuration = (Map<String, String>) artifactPlan.getPluggableArtifactConfiguration().get("configuration");
assertThat(configuration.size(), is(2));
assertThat(configuration, hasEntry("Foo", "Bar"));
assertThat(configuration, hasEntry("Baz", "Car"));
}
}
| 2,579
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.