repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
Psychoanalytic-Electronic-Publishing/OpenPubArchive-Content-Server
|
app/tests/testOpasFilesupport.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import localsecrets
import pathlib
import opasFileSupport
from unitTestConfig import base_plus_endpoint_encoded, headers
class TestOpasFileSupport(unittest.TestCase):
"""
Note: tests are performed in alphabetical order, hence the function naming
with forced order in the names.
"""
def test_0_Find(self):
# This should work whether on local or S3
if localsecrets.S3_KEY is not None: # test AWS
print ("S3 FS tests")
fs = opasFileSupport.FlexFileSystem(root=localsecrets.XML_ORIGINALS_PATH)
filename = "ADPSA.001.0007A(bEXP_ARCH1).XML"
ret_val = fs.find(filename)
print (ret_val)
try:
assert (filename in ret_val)
except Exception as e:
print (f"Except: {e}")
assert (False)
def test_0_exists(self):
# This should work whether on local or S3
if localsecrets.S3_KEY is not None: # test AWS
print ("S3 FS tests")
fs = opasFileSupport.FlexFileSystem(root=localsecrets.XML_ORIGINALS_PATH)
filename = "ADPSA.001.0007A(bEXP_ARCH1).XML"
ret_val = fs.find(filename)
print (ret_val)
try:
assert (filename in ret_val)
except Exception as e:
print (f"Except: {e}")
assert (False)
def test_1_fetch_file_info(self):
# get from s3 if localsecrets set to use it
fs = opasFileSupport.FlexFileSystem(key=localsecrets.S3_KEY,
secret=localsecrets.S3_SECRET,
root=localsecrets.XML_ORIGINALS_PATH)
filename="PEPTOPAUTHVS.001.0021A(bEXP_ARCH1).XML"
filespec = fs.find(filename)
ret = fs.fileinfo(filespec=filespec)
assert (ret.filesize >= 16719)
def test_0_get_filespec(self):
# get from s3 if localsecrets set to use it
if localsecrets.S3_KEY is not None: # test AWS
print ("S3 FS tests")
fs = opasFileSupport.FlexFileSystem(key=localsecrets.S3_KEY,
secret=localsecrets.S3_SECRET,
root=localsecrets.IMAGE_SOURCE_PATH)
ret = fs.fullfilespec(filespec="IJAPS.016.0181A.FIG002.jpg", path=localsecrets.IMAGE_SOURCE_PATH)
assert(ret =='pep-web-files/doc/g/IJAPS.016.0181A.FIG002.jpg')
else:
print ("Local FS tests")
fs = opasFileSupport.FlexFileSystem(root=localsecrets.XML_ORIGINALS_PATH)
# >>> fs.fullfilespec(filespec="pep.css", path="embedded-graphics")
'pep-graphics/embedded-graphics/pep.css'
ret = fs.fullfilespec(filespec="IJAPS.016.0181A.FIG002.jpg", path=localsecrets.IMAGE_SOURCE_PATH)
assert(ret =='X:\\_PEPA1\\g\\IJAPS.016.0181A.FIG002.jpg')
def test_2_exists(self):
fs = opasFileSupport.FlexFileSystem(root=localsecrets.IMAGE_SOURCE_PATH)
ret = fs.exists(filespec="IJAPS.016.0181A.FIG002.jpg", path=localsecrets.IMAGE_SOURCE_PATH)
assert(ret == True)
ret = fs.exists(filespec="IJAPS.016.0181A.FIG002B.jpg", path=localsecrets.IMAGE_SOURCE_PATH)
assert(ret == False)
def test_3_get_download_filename(self):
"""
"""
fs = opasFileSupport.FlexFileSystem(root=localsecrets.PDF_ORIGINALS_PATH)
filespec = "AIM.026.0021A.pdf"
ret = fs.get_download_filename(filespec=filespec, path=localsecrets.PDF_ORIGINALS_PATH)
print (ret)
assert(filespec in ret)
def test_4_get_image_filename(self):
"""
"""
fs = opasFileSupport.FlexFileSystem(root=localsecrets.IMAGE_SOURCE_PATH) # must be for the image if not the root
filespec = "AIM.036.0275A.FIG001"
ret = fs.get_image_filename(filespec=filespec, path=localsecrets.IMAGE_SOURCE_PATH)
print (ret)
assert(filespec in ret)
ret = fs.get_image_filename(filespec=filespec)
print (ret)
assert(filespec in ret)
def test_5_get_image_len(self):
"""
>>> fs = FlexFileSystem(key=localsecrets.S3_KEY, secret=localsecrets.S3_SECRET)
>>> binimg = fs.get_image_binary(filespec="AIM.036.0275A.FIG001", path=localsecrets.IMAGE_SOURCE_PATH)
>>> len(binimg)
26038
"""
fs = opasFileSupport.FlexFileSystem(root=localsecrets.IMAGE_SOURCE_PATH) # must be for the image if not the root
filespec = "AIM.036.0275A.FIG001"
img_bin = fs.get_image_binary(filespec=filespec, path=localsecrets.IMAGE_SOURCE_PATH)
image_len = len(img_bin)
print (image_len)
assert(image_len >= 26038)
def test_6_get_file_contents(self):
"""
# left in for an example
>> fs = FlexFileSystem(key=localsecrets.S3_KEY, secret=localsecrets.S3_SECRET)
>> file_content = fs.get_file_contents(filespec='pep-web-xml/_PEPArchive/ADPSA/001.1926/ADPSA.001.0007A(bEXP_ARCH1).XML', path=None)
>> a = len(file_content)
>> print (a)
692
"""
fs = opasFileSupport.FlexFileSystem(root=localsecrets.XML_ORIGINALS_PATH) # must be for the image if not the root
filespec = "ADPSA.001.0007A(bEXP_ARCH1).XML"
content = fs.get_file_contents(filespec=filespec, path=localsecrets.XML_ORIGINALS_PATH)
content_len = len(content)
print (content_len)
assert(content_len >= 691)
def test_7_get_matching_filenames(self):
pat = r"(.*?)\((bEXP_ARCH1|bSeriesTOC)\)\.(xml|XML)$"
fs = opasFileSupport.FlexFileSystem(key=localsecrets.S3_KEY,
secret=localsecrets.S3_SECRET,
root=localsecrets.XML_ORIGINALS_PATH)
root = pathlib.Path(localsecrets.XML_ORIGINALS_PATH)
testsubpath = "_PEPCurrent/IJP/"
testfullpath = root / testsubpath
matchlist = fs.get_matching_filelist(path=testfullpath, filespec_regex=pat, revised_after_date="2022-09-04")
print (len(matchlist))
assert (len(matchlist) == 0)
matchlist = fs.get_matching_filelist(path=testfullpath, filespec_regex=pat)
print (len(matchlist))
assert (len(matchlist) >= 100)
matchlist = fs.get_matching_filelist(path=testfullpath, filespec_regex=pat, max_items=20)
print (len(matchlist))
assert (len(matchlist) == 20)
matchlist = fs.get_matching_filelist(path=testfullpath, filespec_regex=pat, max_items=20)
print (len(matchlist))
assert (len(matchlist) == 20)
# function removed 2021-05-05
#res = opasFileSupport.get_s3_matching_files(subpath_tomatch="_PEPArchive/BAP/.*\.xml", after_revised_date="2020-09-01")
#res = opasFileSupport.get_s3_matching_files(subpath_tomatch="_PEPCurrent/.*\.xml")
if __name__ == '__main__':
unittest.main()
print ("Tests Complete.")
|
ehrbase/ehrbase_client_library
|
client/src/test/java/org/ehrbase/client/classgenerator/examples/geccoserologischerbefundcomposition/definition/BefundObservation.java
|
<gh_stars>1-10
/*
* Copyright (c) 2022 vitasystems GmbH and Hannover Medical School.
*
* This file is part of project openEHR_SDK
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehrbase.client.classgenerator.examples.geccoserologischerbefundcomposition.definition;
import com.nedap.archie.rm.archetyped.FeederAudit;
import com.nedap.archie.rm.datastructures.Cluster;
import com.nedap.archie.rm.generic.PartyProxy;
import java.time.temporal.TemporalAccessor;
import java.util.List;
import javax.annotation.processing.Generated;
import org.ehrbase.client.annotations.Archetype;
import org.ehrbase.client.annotations.Choice;
import org.ehrbase.client.annotations.Entity;
import org.ehrbase.client.annotations.Path;
import org.ehrbase.client.classgenerator.interfaces.EntryEntity;
import org.ehrbase.client.classgenerator.shareddefinition.Language;
@Entity
@Archetype("openEHR-EHR-OBSERVATION.laboratory_test_result.v1")
@Generated(
value = "org.ehrbase.client.classgenerator.ClassGenerator",
date = "2021-05-19T16:20:30.040760700+02:00",
comments = "https://github.com/ehrbase/openEHR_SDK Version: null")
public class BefundObservation implements EntryEntity {
/** Path: GECCO_Serologischer Befund/Befund/origin */
@Path("/data[at0001]/origin|value")
private TemporalAccessor originValue;
/**
* Path: GECCO_Serologischer Befund/Befund/Labor, welches den Untersuchungsauftrag annimmt
* Description: Angaben zu dem Labor, das die Anfrage erhalten hat und die Hauptverantwortung für
* die Verwaltung der Berichterstattung über den Test trägt, auch wenn andere Labore bestimmte
* Aspekte ausführen. Comment: Dieser Slot gibt die Details des Labors an, dass die Anforderung
* erhalten hat und die Gesamtverantwortung für die Berichterstellung des Tests trägt, selbst wenn
* andere Labore bestimmte Aspekte ausführen.
*
* <p>Das Empfangslabor kann den Test entweder durchführen oder an ein anderes Labor verweisen.
* Wenn ein anderes Labor für die Durchführung der Tests mit bestimmten Analyten zuständig ist,
* ist zu erwarten, dass diese Details im SLOT 'Analyte result detail' innerhalb des Archetyps
* CLUSTER.laboratory_test_analyte enthalten sind.
*/
@Path("/protocol[at0004]/items[at0017]")
private Cluster laborWelchesDenUntersuchungsauftragAnnimmt;
/**
* Path: GECCO_Serologischer Befund/Befund/Details der Testanforderung/Anforderung Description:
* Name des ursprünglich angeforderten Tests. Comment: Dieses Datenelement ist zu verwenden, wenn
* die angeforderte Testung von der tatsächlich vom Labor durchgeführten Testung abweicht.
*/
@Path("/protocol[at0004]/items[at0094]/items[at0106 and name/value='Anforderung']/value|defining_code")
private AnforderungDefiningCode anforderungDefiningCode;
/**
* Path: GECCO_Serologischer Befund/Befund/Details der Testanforderung/Einsender Description:
* Details über den Kliniker oder die Abteilung, die das Labortestergebnis angefordert hat.
*/
@Path("/protocol[at0004]/items[at0094]/items[at0090]")
private Cluster einsender;
/**
* Path: GECCO_Serologischer Befund/Befund/Details der Testanforderung/Verteilerliste Description:
* Details über weitere Kliniker oder Organisationen, die eine Kopie der Analyseergebnisse
* benötigen. Comment: Die "Verteilerliste" dient nur zu Informationszwecken. Der Hauptempfänger
* des Berichts ist die Person, die dazu bestimmt ist, auf die Information zu reagieren.
*/
@Path("/protocol[at0004]/items[at0094]/items[at0035]")
private List<Cluster> verteilerliste;
/**
* Path: GECCO_Serologischer Befund/Befund/Test Details Description: Strukturierte Details über
* die beim Labortest verwendete Methodik, das Gerät oder die Auswertung. Comment: Zum Beispiel:
* "Details der ELISA/Nephelometrie".
*/
@Path("/protocol[at0004]/items[at0110]")
private List<Cluster> testDetails;
/**
* Path: GECCO_Serologischer Befund/Befund/Erweiterung Description: Weitere Informationen, die
* erforderlich sind, um lokale Inhalte abzubilden oder das Modell an andere Referenzmodelle
* anzupassen. Comment: Zum Beispiel: Lokaler Informationsbedarf oder zusätzliche Metadaten, um
* ein Mapping auf FHIR oder CIMI Modelle zu ermöglichen.
*/
@Path("/protocol[at0004]/items[at0117]")
private List<Cluster> erweiterung;
/** Path: GECCO_Serologischer Befund/Befund/subject */
@Path("/subject")
private PartyProxy subject;
/** Path: GECCO_Serologischer Befund/Befund/language */
@Path("/language")
private Language language;
/** Path: GECCO_Serologischer Befund/Befund/feeder_audit */
@Path("/feeder_audit")
private FeederAudit feederAudit;
/**
* Path: GECCO_Serologischer Befund/Befund/Jedes Ereignis Description: Jeder Zeitpunkt oder jedes
* Intervall, das in einem Template oder zur Laufzeit definiert werden kann.
*/
@Path("/data[at0001]/events[at0002]")
@Choice
private List<BefundJedesEreignisChoice> jedesEreignis;
public void setOriginValue(TemporalAccessor originValue) {
this.originValue = originValue;
}
public TemporalAccessor getOriginValue() {
return this.originValue;
}
public void setLaborWelchesDenUntersuchungsauftragAnnimmt(Cluster laborWelchesDenUntersuchungsauftragAnnimmt) {
this.laborWelchesDenUntersuchungsauftragAnnimmt = laborWelchesDenUntersuchungsauftragAnnimmt;
}
public Cluster getLaborWelchesDenUntersuchungsauftragAnnimmt() {
return this.laborWelchesDenUntersuchungsauftragAnnimmt;
}
public void setAnforderungDefiningCode(AnforderungDefiningCode anforderungDefiningCode) {
this.anforderungDefiningCode = anforderungDefiningCode;
}
public AnforderungDefiningCode getAnforderungDefiningCode() {
return this.anforderungDefiningCode;
}
public void setEinsender(Cluster einsender) {
this.einsender = einsender;
}
public Cluster getEinsender() {
return this.einsender;
}
public void setVerteilerliste(List<Cluster> verteilerliste) {
this.verteilerliste = verteilerliste;
}
public List<Cluster> getVerteilerliste() {
return this.verteilerliste;
}
public void setTestDetails(List<Cluster> testDetails) {
this.testDetails = testDetails;
}
public List<Cluster> getTestDetails() {
return this.testDetails;
}
public void setErweiterung(List<Cluster> erweiterung) {
this.erweiterung = erweiterung;
}
public List<Cluster> getErweiterung() {
return this.erweiterung;
}
public void setSubject(PartyProxy subject) {
this.subject = subject;
}
public PartyProxy getSubject() {
return this.subject;
}
public void setLanguage(Language language) {
this.language = language;
}
public Language getLanguage() {
return this.language;
}
public void setFeederAudit(FeederAudit feederAudit) {
this.feederAudit = feederAudit;
}
public FeederAudit getFeederAudit() {
return this.feederAudit;
}
public void setJedesEreignis(List<BefundJedesEreignisChoice> jedesEreignis) {
this.jedesEreignis = jedesEreignis;
}
public List<BefundJedesEreignisChoice> getJedesEreignis() {
return this.jedesEreignis;
}
}
|
ShengFQ/DCNet
|
app/src/main/java/com/bandary/dcnet/utils/WifiConfHelper.java
|
<filename>app/src/main/java/com/bandary/dcnet/utils/WifiConfHelper.java
/*
* WifiConfHelper
* @author shengfq
* @since 2015-01-30
* @version 2.0
*
* */
package com.bandary.dcnet.utils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import android.content.Context;
import android.net.wifi.ScanResult;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.net.wifi.WpsInfo;
import android.util.Log;
import com.bandary.dcnet.R;
/**
* wifi连接点操作类 应用在快连技术 类似于CrimeLab.java
* 就是能将wifi的所有相关操作完成,并且还能将想要的数据对象获取和读写。
* */
public class WifiConfHelper {
// 日志tag
private final String TAG = "WifiConfHelper";
// wifi连接管理器
// private WifiManager wifiManager;
// 当前连接的wifi
// private WifiInfo current_wifiInfo;
// 保存wifi端点显示列表
// private ArrayList<HashMap<String, Object>> hashmap_wifi_items = new
// ArrayList<HashMap<String, Object>>();
// appContext
private Context mContext;
// 扫描结果列表,每一个ScanResult都是一个wifi项
private List<ScanResult> mResults;
/**
* wifi配置类,需要Activity的实例才能使用它的服务,则通过构造函数依赖注入
* */
private WifiConfHelper(Context context) {
this.mContext = context;
this.mResults=loadScanResult();
}
// 单例模式 --2015-02-06开始考虑使用单例模式
// 后来考虑到context问题为不同的activity的时候,如果使用单例模式获取的本对象的实例会不会不会改变
private static WifiConfHelper instance;
/**
* 创建唯一的WifiConfHelper对象
* */
public static WifiConfHelper getInstance(Context context) {
if (instance == null) {
instance = new WifiConfHelper(context);
}
return instance;
}
/**
* 获取服务对象
* */
private WifiManager getWifiManagerService() {
return (WifiManager) mContext.getSystemService(Context.WIFI_SERVICE);
}
/**
* 当前连接的wifi节点,如果当前没连接呢
* */
private WifiInfo getWifiInfo() {
WifiManager wifiManager = getWifiManagerService();
setWifiEnabled();
return wifiManager.getConnectionInfo();
}
/**
* 获取扫描的wifi结果列表,每一个wifi信号都是一个ScanResult对象
* */
private List<ScanResult> loadScanResult() {
WifiManager wifiManager = getWifiManagerService();
setWifiEnabled();
wifiManager.startScan();
return wifiManager.getScanResults();
}
/**
* 设置wifi开关为打开
* */
private boolean setWifiEnabled() {
WifiManager wifiManager = getWifiManagerService();
if (!wifiManager.isWifiEnabled()) {
Log.i(TAG, "wifi service is staring");
wifiManager.setWifiEnabled(true);
}
Log.i(TAG, "wifi service was stared");
return wifiManager.isWifiEnabled();
}
/**
* 刷新扫描结果
* */
public void reFreshScanResult(){
this.mResults=loadScanResult();
}
/**
* 返回扫描结果集合
* */
public List<ScanResult> getScanResults(){
return mResults;
}
/**
* 返回特定SSID的扫描项
* @param ssid ssid号
* @return 包含完整ssid的项
* */
public ScanResult getScanResult(String ssid){
for (ScanResult s : mResults) {
if(s.SSID.equals(ssid)){
return s;
}
}
return null;
}
/**
* 通过扫描的结果判断加密名称
*
* @param result
* 扫描结果对象
* @return {@link WIFI_CAPABILITIES}加密方式枚举对象
* */
public WIFI_CAPABILITIES getSecurity(ScanResult result) {
WIFI_CAPABILITIES instance;
if (result.capabilities.contains(WIFI_CAPABILITIES.WPA_PSK.getValue())) {
instance = WIFI_CAPABILITIES.WPA_PSK;
} else if (result.capabilities.contains(WIFI_CAPABILITIES.WPA2_PSK
.getValue())) {
instance = WIFI_CAPABILITIES.WPA2_PSK;
} else if (result.capabilities.contains(WIFI_CAPABILITIES.EAP
.getValue())) {
instance = WIFI_CAPABILITIES.EAP;
} else if (result.capabilities.contains(WIFI_CAPABILITIES.WPA_PSK
.getValue())
&& result.capabilities.contains(WIFI_CAPABILITIES.WPA2_PSK
.getValue())) {
instance = WIFI_CAPABILITIES.WPA_WPA2;
} else {
instance = WIFI_CAPABILITIES.NONE_PASSWD;
}
return instance;
}
/**
* 通过扫描的结果判断信号等级
*
* @param result
* 扫描结果对象
* @return {@link WifiSignalLevel}信号等级枚举对象
* */
public WifiSignalLevel getSignalLevel(ScanResult result) {
WifiSignalLevel instance;
int level = Math.abs(result.level);
if (level > 100) {
instance = WifiSignalLevel.Level_four;
} else if (level > 70) {
instance = WifiSignalLevel.Level_three;
} else if (level > 50) {
instance = WifiSignalLevel.Level_two;
} else {
instance = WifiSignalLevel.Level_one;
}
return instance;
}
/**
* 将扫描的结果项封装到ArrayList,将要填充到ListView中
* @param list_scan 已经扫描到的结果
* @return ArrayList 的每一项都是一个HashMap对象
* */
public ArrayList<HashMap<String, Object>> getAllNetWorkList() {
String vStr = WIFI_CAPABILITIES.NONE_PASSWD.getValue();
int imgID = WifiSignalLevel.Level_one.getValue();
ArrayList<HashMap<String, Object>> hashmap_wifi_items = new ArrayList<HashMap<String, Object>>();
//WifiManager wifiManager = getWifiManagerService();
if (setWifiEnabled()) {
//wifiManager.startScan();
// 开始扫描网络
// List<ScanResult> list_scan = getScanResult();
if (mResults != null) {
for (int i = 0; i < mResults.size(); i++) {
// 得到扫描结果
HashMap<String, Object> vMap = new HashMap<String, Object>();
ScanResult scanResult = mResults.get(i);
WIFI_CAPABILITIES capabilities = getSecurity(scanResult);
vStr = capabilities.getValue();
WifiSignalLevel signalLevel = getSignalLevel(scanResult);
imgID = signalLevel.getValue();
vMap.put("ItemTitle", scanResult.SSID + "(" + vStr + ")");
vMap.put("ItemImage", imgID);
hashmap_wifi_items.add(vMap);
}
}
}
return hashmap_wifi_items;
}
}
/**
* wifi加密名称枚举类
* */
enum WIFI_CAPABILITIES {
/**
* 5种加密名称代表wifi连接点可能的加密方式
* */
WPA_PSK("WPA-PSK"), WPA2_PSK("WPA2-PSK"), EAP("EAP"), WPA_WPA2("WPA_WPA2"), NONE_PASSWD(
"<PASSWORD>");
/**
* 加密方式的名称
* */
private final String key;
/**
* 构造方法
* */
WIFI_CAPABILITIES(String key) {
this.key = key;
}
/**
* 返回枚举对象固定的字符串常量
* */
public String getValue() {
return this.key;
}
}
/**
* wifi信号等级枚举类
* */
enum WifiSignalLevel {
/**
* 四个枚举对象,代表4个wifi信号强度等级
* */
Level_one(R.drawable.device_list_wifi_1), Level_two(
R.drawable.device_list_wifi_2), Level_three(
R.drawable.device_list_wifi_3), Level_four(
R.drawable.device_list_wifi_4);
/**
* 信号等级图片的资源ID
* */
private final int value;
/**
* 返回枚举对象固定的字符串常量
* */
public int getValue() {
return value;
}
/**
* 构造方法
* */
WifiSignalLevel(int v) {
this.value = v;
}
}
|
code4tomorrow/Python
|
2_intermediate/chapter11/solutions/count_magical.py
|
<filename>2_intermediate/chapter11/solutions/count_magical.py
# Write a function called count_magical
# that returns the number of even numbers
# in a given list. In the function,
# if the number of evens is greater than
# half of the length of the list, print "Magical"
# Else, print "Not Magical"
#
# Write a function called main which tests
# the count_magical function on at least
# 3 different lists of integers. Use the main
# function to test count_magical by calling main().
def count_magical(my_list):
number_of_evens = 0
for n in my_list:
if n % 2 == 0:
number_of_evens += 1
if number_of_evens > len(my_list) / 2:
print("Magical")
else:
print("Not Magical")
return number_of_evens
def main():
list_1 = [1, 2, 3, 4, 5, 6] # not magical, 3 evens
list_2 = [0, 35, 1, 35, 2, 4] # not magical, 3 evens
list_3 = [10, 20, 12, 3, -9] # magical, 3 evens
print("Number of evens in list 1:", count_magical(list_1))
print("Number of evens in list 2:", count_magical(list_2))
print("Number of evens in list 3:", count_magical(list_3))
main()
|
zoooo-hs/instagram-clone-be
|
src/main/java/com/zoooohs/instagramclone/util/FileUtils.java
|
package com.zoooohs.instagramclone.util;
public class FileUtils {
public static String getExtension(String path) {
int pos = path.lastIndexOf( "." );
if (pos == -1) {
return "";
}
return "." + path.substring( pos + 1 );
}
}
|
hiowenluke/kdo
|
src/sync/topic.js
|
const log = require('./log');
const fn = (...args) => {
log('-'.repeat(50));
log(...args);
log('-'.repeat(50));
};
module.exports = fn;
|
B-Dickinson/Algoritmos-em-Java
|
poo/questao6_poo/AudioBook.java
|
package questao6_poo;
public class AudioBook extends Livro {
private int duracao;
public AudioBook(String titulo,double preco,Autor autor,int duracao){
super(titulo, preco,autor);
this.setDuracao(duracao);
}
public int getDuracao() {
return duracao;
}
public void setDuracao(int duracao) {
this.duracao = duracao;
}
@Override
public String toString(){
return "Título: "+this.getTitulo()+"\nPreço: "+this.getPreco()+"\nAutor: "+this.getAutor().getNome()
+this.getAutor().getSobrenome()+"\nDuração: "+this.getDuracao();
}
}
|
gurubamal/ccsdk-features
|
sdnr/wt/devicemanager-core/provider/src/main/java/org/onap/ccsdk/features/sdnr/wt/devicemanager/impl/xml/FaultEntityManager.java
|
<gh_stars>0
/*
* ============LICENSE_START========================================================================
* ONAP : ccsdk feature sdnr wt
* =================================================================================================
* Copyright (C) 2019 highstreet technologies GmbH Intellectual Property. All rights reserved.
* =================================================================================================
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
* ============LICENSE_END==========================================================================
*/
package org.onap.ccsdk.features.sdnr.wt.devicemanager.impl.xml;
import org.onap.ccsdk.features.sdnr.wt.devicemanager.impl.util.InternalSeverity;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.data.provider.rev201110.Fault;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.data.provider.rev201110.FaultcurrentEntity;
public class FaultEntityManager {
private static final String FAULT_TAG = "[layerProtocol=";
/**
* The leading indication for notification or events that are not in the currentProblem data of the ONF Coremodel
*/
private static final String NOCURRENTPROBLEMINDICATION = "#";
/**
* Specific problems are not moving into current problem list
*
* @param problemName to be verified
* @return true if problem is current
*/
public static boolean isManagedAsCurrentProblem(String problemName) {
return !problemName.startsWith(NOCURRENTPROBLEMINDICATION);
}
public static boolean isManagedAsCurrentProblem(Fault problem) {
return isManagedAsCurrentProblem(problem.getProblem());
}
/**
* Specific problems are not moving into current problem list
*
* @param fault to be verified
* @return true if cleared indication
*/
public static boolean isNoAlarmIndication(Fault fault) {
InternalSeverity severity = InternalSeverity.valueOf(fault.getSeverity());
return severity.isNoAlarmIndication();
}
/**
* Create a specific ES id for the current log.
*
* @return a string with the generated ES Id
*/
public static String genSpecificEsId(String nodeName, String objectId, String problemName) {
String uuId;
if (objectId.endsWith("]") && objectId.contains(FAULT_TAG)) {
uuId = objectId.substring(objectId.indexOf(FAULT_TAG) + FAULT_TAG.length(), objectId.length()-1);
} else {
uuId = objectId;
}
StringBuffer strBuf = new StringBuffer();
strBuf.append(nodeName);
strBuf.append("/");
strBuf.append(uuId);
strBuf.append("/");
strBuf.append(problemName);
return strBuf.toString();
}
/**
* Create Es id
*
* @param fault used to create uuid for faultcurrent
* @return String with Id
*/
public static String genSpecificEsId(FaultcurrentEntity fault) {
return genSpecificEsId(fault.getNodeId(), fault.getObjectId(), fault.getProblem());
}
}
|
justremotephone/android_external_chromium_org
|
ui/ozone/platform/dri/buffer_data.cc
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ui/ozone/platform/dri/buffer_data.h"
#include <gbm.h>
#include "base/logging.h"
#include "ui/ozone/platform/dri/dri_wrapper.h"
namespace ui {
// Pixel configuration for the current buffer format.
// TODO(dnicoara) These will need to change once we query the hardware for
// supported configurations.
const uint8_t kColorDepth = 24;
const uint8_t kPixelDepth = 32;
BufferData::BufferData(DriWrapper* dri, gbm_bo* buffer)
: dri_(dri),
handle_(gbm_bo_get_handle(buffer).u32),
framebuffer_(0) {
// Register the buffer with the controller. This will allow us to scan out the
// buffer once we're done drawing into it. If we can't register the buffer
// then there's no point in having BufferData associated with it.
if (!dri_->AddFramebuffer(gbm_bo_get_width(buffer),
gbm_bo_get_height(buffer),
kColorDepth,
kPixelDepth,
gbm_bo_get_stride(buffer),
handle_,
&framebuffer_)) {
LOG(ERROR) << "Failed to register buffer";
}
}
BufferData::~BufferData() {
if (framebuffer_)
dri_->RemoveFramebuffer(framebuffer_);
}
// static
BufferData* BufferData::CreateData(DriWrapper* dri,
gbm_bo* buffer) {
BufferData* data = new BufferData(dri, buffer);
if (!data->framebuffer()) {
delete data;
return NULL;
}
// GBM can destroy the buffers at any time as long as they aren't locked. This
// sets a callback such that we can clean up all our state when GBM destroys
// the buffer.
gbm_bo_set_user_data(buffer, data, BufferData::Destroy);
return data;
}
// static
void BufferData::Destroy(gbm_bo* buffer, void* data) {
BufferData* bd = static_cast<BufferData*>(data);
delete bd;
}
// static
BufferData* BufferData::GetData(gbm_bo* buffer) {
return static_cast<BufferData*>(gbm_bo_get_user_data(buffer));
}
} // namespace ui
|
sozuuuuu/rebacklogs
|
app/models/social_profile.rb
|
# refs: https://qiita.com/mnishiguchi/items/e15bbef61287f84b546e
# == Schema Information
#
# Table name: social_profiles
#
# id :integer not null, primary key
# user_id :integer
# provider :string
# uid :string
# name :string
# nickname :string
# email :string
# url :string
# image_url :string
# description :string
# others :text
# credentials :text
# raw_info :text
# created_at :datetime not null
# updated_at :datetime not null
#
class SocialProfile < ApplicationRecord
belongs_to :user
store :others
validates_uniqueness_of :uid, scope: :provider
def self.find_for_oauth(auth)
profile = find_or_create_by(uid: auth.uid, provider: auth.provider)
profile.save_oauth_data!(auth)
profile
end
def save_oauth_data!(auth)
return unless valid_oauth?(auth)
provider = auth["provider"]
policy = policy(provider, auth)
self.update_attributes( uid: policy.uid,
name: policy.name,
nickname: policy.nickname,
email: policy.email,
url: policy.url,
image_url: policy.image_url,
description: policy.description,
credentials: policy.credentials,
raw_info: policy.raw_info )
end
private
def policy(provider, auth)
class_name = "#{provider}".classify
"OAuthPolicy::#{class_name}".constantize.new(auth)
end
def valid_oauth?(auth)
(self.provider.to_s == auth['provider'].to_s) && (self.uid == auth['uid'])
end
end
|
AntonBaukin/embeddy
|
boot/sources/net/java/osgi/embeddy/boot/BootJaRLoader.java
|
package net.java.osgi.embeddy.boot;
/* Java */
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
/* embeddy: zip file system */
import net.java.osgi.embeddy.boot.ziper.Directory;
import net.java.osgi.embeddy.boot.ziper.FileItem;
import net.java.osgi.embeddy.boot.ziper.FileObject;
import net.java.osgi.embeddy.boot.ziper.FilePlains;
import net.java.osgi.embeddy.boot.ziper.FilePlains.PlainNamesCache;
import net.java.osgi.embeddy.boot.ziper.NamesCache;
import net.java.osgi.embeddy.boot.ziper.ZiPFileLoader;
import net.java.osgi.embeddy.boot.ziper.ZiPClassLoader;
import net.java.osgi.embeddy.boot.ziper.ZiPScanner;
import net.java.osgi.embeddy.boot.ziper.ZiPArchive;
import net.java.osgi.embeddy.boot.ziper.ZiPScanner.ZiPVisitor;
import net.java.osgi.embeddy.boot.ziper.ZiPStorage;
/**
* Scans boot JAR file including the nested archives.
*
* @author <EMAIL>.
*/
public class BootJaRLoader implements Closeable
{
public BootJaRLoader(File file, BootSet boot)
{
this.file = EX.assertn(file);
this.boot = EX.assertn(boot);
this.storage = EX.assertn(boot.createTempStorage());
}
public final File file;
public final BootSet boot;
public final ZiPStorage storage;
/* Boot JAR Loader */
public void bootLoad()
{
//~: read the root archive
readRoot();
//~: map the archives
mapBootArchives();
//~: create the main loader
createMainLoader();
}
public ClassLoader getMainLoader()
{
return mainLoader;
}
public ZiPArchive getRootArchive()
{
return root;
}
/* Closeable */
public void close()
{
try
{
storage.close();
}
catch(Throwable e)
{
throw EX.wrap(e, "Error occurred while closing boot class loader!");
}
}
/* protected: boot stages */
protected ZiPArchive root;
protected ClassLoader mainLoader;
protected void readRoot()
{
EX.assertx(root == null);
//~: load the archive
try
{
//~: scan this ZIP file
ZiPScanner s = new ZiPScanner(file, createVisitor());
this.root = s.scan();
//~: store the root
storage.permanent(this.root);
}
catch(Throwable e)
{
throw EX.wrap(e, "Error had occured while loading ",
"contents of JAR: [", file.getAbsolutePath(), "]!");
}
}
protected void mapBootArchives()
{
//~: find the boot jar files
Set<FileItem> jars = new LinkedHashSet<>(11);
findBootJars(jars);
//~: map them by the archives
archives = new LinkedHashMap<>(jars.size());
for(FileItem jar : jars)
archives.put(jar, null);
mapBootJars(archives);
}
protected Map<FileItem, ZiPArchive> archives;
protected void findBootJars(final Set<FileItem> jars)
{
//~: get boot path
final String bp = EX.asserts(boot.getBootPath());
FileObject bd = EX.assertn(root.files.lookup(bp),
"Boot path [", bp, "] not found in top JAR file!");
//?: {it must be directory}
if(!(bd instanceof Directory))
throw EX.ass("Boot path [", bp, "] is not a directory!");
//~: collect the nested jar files
FilePlains.each((Directory) bd, f ->
{
EX.assertx(f.getName().toLowerCase().endsWith(".jar"),
"Boot path [", bp, "] has not a JAR file [", f.getName(), "]!");
jars.add(f);
return true;
});
}
protected void mapBootJars(Map<FileItem, ZiPArchive> zam)
{
//~: map all nested archives
HashMap<FileItem, ZiPArchive> nm = new HashMap<>(root.nested.size());
for(ZiPArchive za : root.nested)
nm.put(za.ref.file, za);
//~: do map
for(Map.Entry<FileItem, ZiPArchive> e : zam.entrySet())
{
ZiPArchive za = EX.assertn(nm.get(e.getKey()),
"No nested JAR Archive found for file [", e.getKey().getName(), "]!");
e.setValue(za);
}
}
protected void createMainLoader()
{
ZiPClassLoader res; this.mainLoader = res =
new ZiPClassLoader(EX.assertn(boot.getInitialLoader()));
//~: connect file loaders
for(ZiPArchive za : archives.values())
res.connect(new ZiPFileLoader(za, storage));
}
/* protected: Boot ZiP Visitor */
protected class BootVisitor implements ZiPVisitor
{
public NamesCache createNamesCache(ZiPArchive archive)
{
return new PlainNamesCache();
}
public boolean isAcrhive(ZiPArchive target)
{
return target.ref.getName().toLowerCase().endsWith(".jar");
}
public boolean isThatAcrhive(ZiPArchive target)
{
if((target.level != 1) || !isAcrhive(target))
return false;
String path = FilePlains.path(target.ref);
return path.startsWith(boot.getBootPath());
}
public boolean isScanDown(ZiPArchive target)
{
return isThatAcrhive(target);
}
public OutputStream copyArchive(ZiPArchive target)
throws IOException
{
return !isThatAcrhive(target)?(null):(storage.temporary(target));
}
}
protected BootVisitor createVisitor()
{
return new BootVisitor();
}
}
|
kapiak/ware_prod
|
assistant/weblink_channel/migrations/0020_auto_20200821_0444.py
|
# Generated by Django 3.1 on 2020-08-21 04:44
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('weblink_channel', '0019_auto_20200819_0907'),
]
operations = [
migrations.RemoveField(
model_name='purchaseorderitem',
name='created_by',
),
migrations.RemoveField(
model_name='purchaseorderitem',
name='customer_order_item',
),
migrations.RemoveField(
model_name='purchaseorderitem',
name='purchase_order',
),
migrations.RemoveField(
model_name='purchaseorderitem',
name='sales_order_item',
),
migrations.RemoveField(
model_name='purchaseorderitem',
name='updated_by',
),
migrations.RemoveField(
model_name='weblinkorder',
name='address',
),
migrations.RemoveField(
model_name='weblinkorder',
name='created_by',
),
migrations.RemoveField(
model_name='weblinkorder',
name='customer',
),
migrations.RemoveField(
model_name='weblinkorder',
name='order',
),
migrations.RemoveField(
model_name='weblinkorder',
name='updated_by',
),
migrations.RemoveField(
model_name='weblinkorderitem',
name='created_by',
),
migrations.RemoveField(
model_name='weblinkorderitem',
name='order',
),
migrations.RemoveField(
model_name='weblinkorderitem',
name='updated_by',
),
migrations.RemoveField(
model_name='weblinkorderitem',
name='variant',
),
migrations.DeleteModel(
name='PurchaseOrder',
),
migrations.DeleteModel(
name='PurchaseOrderItem',
),
migrations.DeleteModel(
name='WebLinkOrder',
),
migrations.DeleteModel(
name='WebLinkOrderItem',
),
]
|
sean5470/panda3d
|
panda/src/gobj/geomCacheEntry.h
|
<gh_stars>1-10
/**
* PANDA 3D SOFTWARE
* Copyright (c) Carnegie Mellon University. All rights reserved.
*
* All use of this software is subject to the terms of the revised BSD
* license. You should have received a copy of this license along
* with this source code in a file named "LICENSE."
*
* @file geomCacheEntry.h
* @author drose
* @date 2005-03-21
*/
#ifndef GEOMCACHEENTRY_H
#define GEOMCACHEENTRY_H
#include "pandabase.h"
#include "geomCacheManager.h"
#include "referenceCount.h"
#include "config_gobj.h"
#include "pointerTo.h"
#include "mutexHolder.h"
class Geom;
class GeomPrimitive;
/**
* This object contains a single cache entry in the GeomCacheManager. This is
* actually the base class of any number of individual cache types.
*/
class EXPCL_PANDA_GOBJ GeomCacheEntry : public ReferenceCount {
public:
INLINE GeomCacheEntry();
virtual ~GeomCacheEntry();
PT(GeomCacheEntry) record(Thread *current_thread);
void refresh(Thread *current_thread);
PT(GeomCacheEntry) erase();
virtual void evict_callback();
virtual void output(ostream &out) const;
private:
int _last_frame_used;
INLINE void remove_from_list();
INLINE void insert_before(GeomCacheEntry *node);
private:
GeomCacheEntry *_prev, *_next;
public:
static TypeHandle get_class_type() {
return _type_handle;
}
static void init_type() {
ReferenceCount::init_type();
register_type(_type_handle, "GeomCacheEntry",
ReferenceCount::get_class_type());
}
private:
static TypeHandle _type_handle;
friend class GeomCacheManager;
};
INLINE ostream &operator << (ostream &out, const GeomCacheEntry &entry);
#include "geomCacheEntry.I"
#endif
|
vbmacher/qsOS
|
QSS/src/kernel/console/help.c
|
/*
* HELP.C
*
* (c) Copyright 2005, vbmacher
*
*/
#include <rtl.h>
#include <colors.h>
#include <console.h>
void commandHELP()
{
textattr(atrSUBBANNER);
cprintf("\n\tAvailable commands:");
textattr(atrBORDER);
cprintf("\n\tHALT - halt processor, disable interrupts");
cprintf("\n\tHELP - show this screen");
cprintf("\n\tLINE - change line parameters");
cprintf("\n\tNICK - change current nick name");
cprintf("\n\tREBOOT - reboot computer");
cprintf("\n\tSTATUS - show status of current COM port adapter and line parameters");
cprintf("\n\tDIR - show current directory content");
cprintf("\n\tMKDIR - make a directory");
cprintf("\n\tREAD - show file content");
cprintf("\n\tDELFILE - delete a file");
cprintf("\n\tDELDIR - delete a directory");
cprintf("\n\tCD - change current path");
cprintf("\n\tFORMAT - format a diskette");
cprintf("\n\tRUN - run executable file");
cprintf("\n\tDUMPKERNEL - dump kernel to a file");
}
|
jmosro/IPEC-CPlusPlus
|
03.CPP_Ejercicios_2019/21.ValorAbsoluto.cpp
|
#include <stdio.h>
/**
* @brief Calcular el valor absoluto.
* @class main
* @author <NAME>
* @author <NAME> - <EMAIL>
* @date 2019-septiembre
* @version 1
*/
int main(void){
double numero, absoluto;
printf("Ingresar un numero: "); // imprimir salida de datos
scanf(" %lf", &numero); // entrada de datos
// Calcular el valor absoluto
if(numero >= 0) absoluto = numero;
else absoluto = -numero;
printf("Valor absoluto de %lf es %lf", numero, absoluto); // imprimir salida de datos
return 0; // retorno del programa al sistema
}
|
songjachin/MwanAndroid
|
app/src/main/java/com/songjachin/mwanandroid/database/IHistoryDaoCallback.java
|
package com.songjachin.mwanandroid.database;
import java.util.List;
/**
* Created by matthew
*/
public interface IHistoryDaoCallback {
/**
* 添加历史的结果
*
* @param isSuccess
*/
void onHistoryAdd(boolean isSuccess);
/**
* 删除历史的结果
*
* @param isSuccess
*/
void onHistoryDel(boolean isSuccess);
/**
* 历史数据加载的结果
*
*/
void onHistoriesLoaded(List<HistoryArticle> articles);
/**
* 历史内容清空结果
*/
void onHistoriesClean(boolean isSuccess);
}
|
vbogatyrov/tdi-studio-se
|
main/plugins/org.talend.designer.components.libs/libs_src/salesforceBulkAPI/com/sforce/soap/partner/DescribeIcon.java
|
package com.sforce.soap.partner;
/**
* Generated by ComplexTypeCodeGenerator.java. Please do not edit.
*/
public class DescribeIcon implements com.sforce.ws.bind.XMLizable , IDescribeIcon{
/**
* Constructor
*/
public DescribeIcon() {}
/**
* element : contentType of type {http://www.w3.org/2001/XMLSchema}string
* java type: java.lang.String
*/
private static final com.sforce.ws.bind.TypeInfo contentType__typeInfo =
new com.sforce.ws.bind.TypeInfo("urn:partner.soap.sforce.com","contentType","http://www.w3.org/2001/XMLSchema","string",1,1,true);
private boolean contentType__is_set = false;
private java.lang.String contentType;
@Override
public java.lang.String getContentType() {
return contentType;
}
@Override
public void setContentType(java.lang.String contentType) {
this.contentType = contentType;
contentType__is_set = true;
}
protected void setContentType(com.sforce.ws.parser.XmlInputStream __in,
com.sforce.ws.bind.TypeMapper __typeMapper) throws java.io.IOException, com.sforce.ws.ConnectionException {
__in.peekTag();
if (__typeMapper.verifyElement(__in, contentType__typeInfo)) {
setContentType(__typeMapper.readString(__in, contentType__typeInfo, java.lang.String.class));
}
}
/**
* element : height of type {http://www.w3.org/2001/XMLSchema}int
* java type: java.lang.Integer
*/
private static final com.sforce.ws.bind.TypeInfo height__typeInfo =
new com.sforce.ws.bind.TypeInfo("urn:partner.soap.sforce.com","height","http://www.w3.org/2001/XMLSchema","int",1,1,true);
private boolean height__is_set = false;
private java.lang.Integer height;
@Override
public java.lang.Integer getHeight() {
return height;
}
@Override
public void setHeight(java.lang.Integer height) {
this.height = height;
height__is_set = true;
}
protected void setHeight(com.sforce.ws.parser.XmlInputStream __in,
com.sforce.ws.bind.TypeMapper __typeMapper) throws java.io.IOException, com.sforce.ws.ConnectionException {
__in.peekTag();
if (__typeMapper.verifyElement(__in, height__typeInfo)) {
setHeight((java.lang.Integer)__typeMapper.readObject(__in, height__typeInfo, java.lang.Integer.class));
}
}
/**
* element : theme of type {http://www.w3.org/2001/XMLSchema}string
* java type: java.lang.String
*/
private static final com.sforce.ws.bind.TypeInfo theme__typeInfo =
new com.sforce.ws.bind.TypeInfo("urn:partner.soap.sforce.com","theme","http://www.w3.org/2001/XMLSchema","string",1,1,true);
private boolean theme__is_set = false;
private java.lang.String theme;
@Override
public java.lang.String getTheme() {
return theme;
}
@Override
public void setTheme(java.lang.String theme) {
this.theme = theme;
theme__is_set = true;
}
protected void setTheme(com.sforce.ws.parser.XmlInputStream __in,
com.sforce.ws.bind.TypeMapper __typeMapper) throws java.io.IOException, com.sforce.ws.ConnectionException {
__in.peekTag();
if (__typeMapper.verifyElement(__in, theme__typeInfo)) {
setTheme(__typeMapper.readString(__in, theme__typeInfo, java.lang.String.class));
}
}
/**
* element : url of type {http://www.w3.org/2001/XMLSchema}string
* java type: java.lang.String
*/
private static final com.sforce.ws.bind.TypeInfo url__typeInfo =
new com.sforce.ws.bind.TypeInfo("urn:partner.soap.sforce.com","url","http://www.w3.org/2001/XMLSchema","string",1,1,true);
private boolean url__is_set = false;
private java.lang.String url;
@Override
public java.lang.String getUrl() {
return url;
}
@Override
public void setUrl(java.lang.String url) {
this.url = url;
url__is_set = true;
}
protected void setUrl(com.sforce.ws.parser.XmlInputStream __in,
com.sforce.ws.bind.TypeMapper __typeMapper) throws java.io.IOException, com.sforce.ws.ConnectionException {
__in.peekTag();
if (__typeMapper.verifyElement(__in, url__typeInfo)) {
setUrl(__typeMapper.readString(__in, url__typeInfo, java.lang.String.class));
}
}
/**
* element : width of type {http://www.w3.org/2001/XMLSchema}int
* java type: java.lang.Integer
*/
private static final com.sforce.ws.bind.TypeInfo width__typeInfo =
new com.sforce.ws.bind.TypeInfo("urn:partner.soap.sforce.com","width","http://www.w3.org/2001/XMLSchema","int",1,1,true);
private boolean width__is_set = false;
private java.lang.Integer width;
@Override
public java.lang.Integer getWidth() {
return width;
}
@Override
public void setWidth(java.lang.Integer width) {
this.width = width;
width__is_set = true;
}
protected void setWidth(com.sforce.ws.parser.XmlInputStream __in,
com.sforce.ws.bind.TypeMapper __typeMapper) throws java.io.IOException, com.sforce.ws.ConnectionException {
__in.peekTag();
if (__typeMapper.verifyElement(__in, width__typeInfo)) {
setWidth((java.lang.Integer)__typeMapper.readObject(__in, width__typeInfo, java.lang.Integer.class));
}
}
/**
*/
@Override
public void write(javax.xml.namespace.QName __element,
com.sforce.ws.parser.XmlOutputStream __out, com.sforce.ws.bind.TypeMapper __typeMapper)
throws java.io.IOException {
__out.writeStartTag(__element.getNamespaceURI(), __element.getLocalPart());
writeFields(__out, __typeMapper);
__out.writeEndTag(__element.getNamespaceURI(), __element.getLocalPart());
}
protected void writeFields(com.sforce.ws.parser.XmlOutputStream __out,
com.sforce.ws.bind.TypeMapper __typeMapper)
throws java.io.IOException {
__typeMapper.writeString(__out, contentType__typeInfo, contentType, contentType__is_set);
__typeMapper.writeObject(__out, height__typeInfo, height, height__is_set);
__typeMapper.writeString(__out, theme__typeInfo, theme, theme__is_set);
__typeMapper.writeString(__out, url__typeInfo, url, url__is_set);
__typeMapper.writeObject(__out, width__typeInfo, width, width__is_set);
}
@Override
public void load(com.sforce.ws.parser.XmlInputStream __in,
com.sforce.ws.bind.TypeMapper __typeMapper) throws java.io.IOException, com.sforce.ws.ConnectionException {
__typeMapper.consumeStartTag(__in);
loadFields(__in, __typeMapper);
__typeMapper.consumeEndTag(__in);
}
protected void loadFields(com.sforce.ws.parser.XmlInputStream __in,
com.sforce.ws.bind.TypeMapper __typeMapper) throws java.io.IOException, com.sforce.ws.ConnectionException {
setContentType(__in, __typeMapper);
setHeight(__in, __typeMapper);
setTheme(__in, __typeMapper);
setUrl(__in, __typeMapper);
setWidth(__in, __typeMapper);
}
@Override
public String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder();
sb.append("[DescribeIcon ");
sb.append(" contentType='").append(com.sforce.ws.util.Verbose.toString(contentType)).append("'\n");
sb.append(" height='").append(com.sforce.ws.util.Verbose.toString(height)).append("'\n");
sb.append(" theme='").append(com.sforce.ws.util.Verbose.toString(theme)).append("'\n");
sb.append(" url='").append(com.sforce.ws.util.Verbose.toString(url)).append("'\n");
sb.append(" width='").append(com.sforce.ws.util.Verbose.toString(width)).append("'\n");
sb.append("]\n");
return sb.toString();
}
}
|
malte-wessel/a-million-times
|
src/runners/createSimplexNoise.js
|
import createNoise from '../util/createNoise';
const ACCELERATION = 0.00002;
const FINAL_VELOCITY = 0.002;
export default function createVelocityWave2(columns, rows) {
const noise = createNoise(columns, rows, 0.12);
return function init() {
return function update(x, y, index, hand, value, velocity, t) {
if (t > 10000) {
if (Math.abs(velocity) === FINAL_VELOCITY) return false;
if (Math.abs(velocity) < FINAL_VELOCITY) {
return velocity > 0
? velocity + ACCELERATION
: velocity - ACCELERATION;
} else {
return velocity > 0
? FINAL_VELOCITY
: -FINAL_VELOCITY;
}
}
const time = hand ? t : t;
const finalVelocity = noise(time / 10000, x, y);
const a = Math.min(1, t / 500000);
return a * (finalVelocity - (value) + hand * 0.5);
};
};
}
|
FastReports/gofrcloud
|
model_my_permissions_vm.go
|
<reponame>FastReports/gofrcloud
/*
* FastReport Cloud
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* API version: v1
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package gofrcloud
import (
"encoding/json"
)
// MyPermissionsVM struct for MyPermissionsVM
type MyPermissionsVM struct {
Subscription *SubscriptionPermission `json:"subscription,omitempty"`
Files *FilePermission `json:"files,omitempty"`
Datasources *DataSourcePermission `json:"datasources,omitempty"`
Groups *GroupPermission `json:"groups,omitempty"`
}
// NewMyPermissionsVM instantiates a new MyPermissionsVM object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewMyPermissionsVM() *MyPermissionsVM {
this := MyPermissionsVM{}
return &this
}
// NewMyPermissionsVMWithDefaults instantiates a new MyPermissionsVM object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewMyPermissionsVMWithDefaults() *MyPermissionsVM {
this := MyPermissionsVM{}
return &this
}
// GetSubscription returns the Subscription field value if set, zero value otherwise.
func (o *MyPermissionsVM) GetSubscription() SubscriptionPermission {
if o == nil || o.Subscription == nil {
var ret SubscriptionPermission
return ret
}
return *o.Subscription
}
// GetSubscriptionOk returns a tuple with the Subscription field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *MyPermissionsVM) GetSubscriptionOk() (*SubscriptionPermission, bool) {
if o == nil || o.Subscription == nil {
return nil, false
}
return o.Subscription, true
}
// HasSubscription returns a boolean if a field has been set.
func (o *MyPermissionsVM) HasSubscription() bool {
if o != nil && o.Subscription != nil {
return true
}
return false
}
// SetSubscription gets a reference to the given SubscriptionPermission and assigns it to the Subscription field.
func (o *MyPermissionsVM) SetSubscription(v SubscriptionPermission) {
o.Subscription = &v
}
// GetFiles returns the Files field value if set, zero value otherwise.
func (o *MyPermissionsVM) GetFiles() FilePermission {
if o == nil || o.Files == nil {
var ret FilePermission
return ret
}
return *o.Files
}
// GetFilesOk returns a tuple with the Files field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *MyPermissionsVM) GetFilesOk() (*FilePermission, bool) {
if o == nil || o.Files == nil {
return nil, false
}
return o.Files, true
}
// HasFiles returns a boolean if a field has been set.
func (o *MyPermissionsVM) HasFiles() bool {
if o != nil && o.Files != nil {
return true
}
return false
}
// SetFiles gets a reference to the given FilePermission and assigns it to the Files field.
func (o *MyPermissionsVM) SetFiles(v FilePermission) {
o.Files = &v
}
// GetDatasources returns the Datasources field value if set, zero value otherwise.
func (o *MyPermissionsVM) GetDatasources() DataSourcePermission {
if o == nil || o.Datasources == nil {
var ret DataSourcePermission
return ret
}
return *o.Datasources
}
// GetDatasourcesOk returns a tuple with the Datasources field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *MyPermissionsVM) GetDatasourcesOk() (*DataSourcePermission, bool) {
if o == nil || o.Datasources == nil {
return nil, false
}
return o.Datasources, true
}
// HasDatasources returns a boolean if a field has been set.
func (o *MyPermissionsVM) HasDatasources() bool {
if o != nil && o.Datasources != nil {
return true
}
return false
}
// SetDatasources gets a reference to the given DataSourcePermission and assigns it to the Datasources field.
func (o *MyPermissionsVM) SetDatasources(v DataSourcePermission) {
o.Datasources = &v
}
// GetGroups returns the Groups field value if set, zero value otherwise.
func (o *MyPermissionsVM) GetGroups() GroupPermission {
if o == nil || o.Groups == nil {
var ret GroupPermission
return ret
}
return *o.Groups
}
// GetGroupsOk returns a tuple with the Groups field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *MyPermissionsVM) GetGroupsOk() (*GroupPermission, bool) {
if o == nil || o.Groups == nil {
return nil, false
}
return o.Groups, true
}
// HasGroups returns a boolean if a field has been set.
func (o *MyPermissionsVM) HasGroups() bool {
if o != nil && o.Groups != nil {
return true
}
return false
}
// SetGroups gets a reference to the given GroupPermission and assigns it to the Groups field.
func (o *MyPermissionsVM) SetGroups(v GroupPermission) {
o.Groups = &v
}
func (o MyPermissionsVM) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.Subscription != nil {
toSerialize["subscription"] = o.Subscription
}
if o.Files != nil {
toSerialize["files"] = o.Files
}
if o.Datasources != nil {
toSerialize["datasources"] = o.Datasources
}
if o.Groups != nil {
toSerialize["groups"] = o.Groups
}
return json.Marshal(toSerialize)
}
type NullableMyPermissionsVM struct {
value *MyPermissionsVM
isSet bool
}
func (v NullableMyPermissionsVM) Get() *MyPermissionsVM {
return v.value
}
func (v *NullableMyPermissionsVM) Set(val *MyPermissionsVM) {
v.value = val
v.isSet = true
}
func (v NullableMyPermissionsVM) IsSet() bool {
return v.isSet
}
func (v *NullableMyPermissionsVM) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableMyPermissionsVM(val *MyPermissionsVM) *NullableMyPermissionsVM {
return &NullableMyPermissionsVM{value: val, isSet: true}
}
func (v NullableMyPermissionsVM) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableMyPermissionsVM) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
|
xupeihuagudulei/demo
|
flink-learn/src/main/java/com/jsy/aaa/udf/official/NiceDemo.java
|
package com.jsy.aaa.udf.official;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import java.util.Random;
/**
* 官方打印
* https://ci.apache.org/projects/flink/flink-docs-release-1.13/docs/connectors/table/print/
*
* @Author: jsy
* @Date: 2021/7/1 23:47
*/
public class NiceDemo {
private static final String PRINT_SINK_SQL = "create table sink_print ( \n" +
" sales BIGINT," +
" r_num BIGINT " +
") with ('connector' = 'print' )";
private static final String KAFKA_SQL = "CREATE TABLE t2 (\n" +
" user_id VARCHAR ," +
" item_id VARCHAR," +
" category_id VARCHAR," +
" behavior VARCHAR," +
" proctime TIMESTAMP(3)," +
" ts VARCHAR" +
") WITH (" +
" 'connector' = 'kafka'," +
" 'topic' = 'ods_kafka'," +
" 'properties.bootstrap.servers' = 'localhost:9092'," +
" 'properties.group.id' = 'test1'," +
" 'format' = 'json'," +
" 'scan.startup.mode' = 'earliest-offset'" +
")";
public static void main(String[] args) {
StreamExecutionEnvironment bsEnv = StreamExecutionEnvironment.getExecutionEnvironment();
EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
StreamTableEnvironment tEnv = StreamTableEnvironment.create(bsEnv, bsSettings);
bsEnv.enableCheckpointing(5000);
bsEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
// tEnv.getConfig().getConfiguration().setBoolean("table.dynamic-table-options.enabled", true);
// 接收来自外部数据源的 DataStream
DataStream<Tuple4<String, String, String, Long>> ds = bsEnv.addSource(new SourceFunction<Tuple4<String, String, String, Long>>() {
@Override
public void run(SourceContext<Tuple4<String, String, String, Long>> out) throws Exception {
Random random = new Random();
while (true) {
int sale = random.nextInt(1000);
out.collect(new Tuple4<>("product_id", "category", "product_name", Long.valueOf(sale)));
Thread.sleep(100L);
}
}
@Override
public void cancel() {
}
});
// 把 DataStream 注册为表,表名是 “ShopSales”
tEnv.createTemporaryView("ShopSales", ds, "product_id, category, product_name, sales");
tEnv.createTemporaryView("aa", ds);
String topSql = "insert into sink_print SELECT * " +
"FROM (" +
" SELECT sales," +
" ROW_NUMBER() OVER (PARTITION BY category ORDER BY sales DESC) as row_num" +
" FROM ShopSales ) " +
"WHERE row_num = 1";
// Table table2 = tEnv.sqlQuery(topSql);
// tEnv.toRetractStream(table2, Row.class).print("########");
tEnv.executeSql(PRINT_SINK_SQL);
tEnv.executeSql(topSql);
try {
bsEnv.execute("aaaa");
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
RomanBezpalyi/js-band-final-task
|
src/redux/books/operations.js
|
import { toast } from 'react-toastify';
import * as api from '../../services/books-api';
import { getBooksRequest, getBooksSuccesss, getBooksError } from './actions';
import { getToken } from '../session/selectors';
// eslint-disable-next-line import/prefer-default-export
export const getBooks = () => (dispatch, getState) => {
const token = getToken(getState());
if (!token) return;
dispatch(getBooksRequest());
api
.getBooks(token)
.then(({ data }) => dispatch(getBooksSuccesss(data)))
.catch(({ message }) => {
dispatch(getBooksError(message));
toast.error(`${message}`);
});
};
|
WLChopSticks/FastHealingCopy
|
FastHealing/FastHealing/Classes/FamousDoctors/View/NiuView/FHMatchBtn.h
|
//
// FHMatchBtn.h
// FastHealing
//
// Created by 王 on 16/1/25.
// Copyright © 2016年 FastHealing. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface FHMatchBtn : UIButton
@end
|
thirdweb-dev/go-sdk
|
cmd/thirdweb/storage_commands.go
|
<reponame>thirdweb-dev/go-sdk
package main
import (
"fmt"
"log"
"os"
"github.com/spf13/cobra"
"github.com/thirdweb-dev/go-sdk/thirdweb"
)
var storageCmd = &cobra.Command{
Use: "storage [command]",
Short: "Interact with the IPFS storage interface",
Args: cobra.MinimumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
log.Println("Please input a command to run")
},
}
var storageUploadCmd = &cobra.Command{
Use: "upload",
Short: "Upload data with storage interface",
Run: func(cmd *cobra.Command, args []string) {
storage := getStorage()
uri, err := storage.Upload(&thirdweb.NFTMetadataInput{
Name: "Test NFT 1",
Description: "Description 1",
}, "", "")
if err != nil {
panic(err)
}
fmt.Println("Successfully uploaded to URI:", uri)
},
}
var storageUploadBatchCmd = &cobra.Command{
Use: "uploadBatch",
Short: "Upload data with storage interface",
Run: func(cmd *cobra.Command, args []string) {
storage := getStorage()
uriWithBaseUris, err := storage.UploadBatch(
[]interface{}{
&thirdweb.NFTMetadataInput{Name: "Test NFT 2", Description: "Description 2"},
&thirdweb.NFTMetadataInput{Name: "Test NFT 3", Description: "Description 3"},
},
0,
"",
"",
)
if err != nil {
panic(err)
}
fmt.Println("Successfully uploaded to URI:", uriWithBaseUris)
},
}
var storageUploadImageCmd = &cobra.Command{
Use: "uploadImage",
Short: "Upload image with storage interface",
Run: func(cmd *cobra.Command, args []string) {
storage := getStorage()
imageFile, err := os.Open("internal/test/0.jpg")
if err != nil {
panic(err)
}
defer imageFile.Close()
uri, err := storage.Upload(&thirdweb.NFTMetadataInput{
Name: "Test NFT 1",
Image: imageFile,
}, "", "")
if err != nil {
panic(err)
}
fmt.Println("Successfully uploaded to URI:", uri)
},
}
var storageUploadImageLinkCmd = &cobra.Command{
Use: "uploadImageLink",
Short: "Upload image with link with storage interface",
Run: func(cmd *cobra.Command, args []string) {
storage := getStorage()
imageFile, err := os.Open("internal/test/0.jpg")
if err != nil {
panic(err)
}
defer imageFile.Close()
uri, err := storage.Upload(&thirdweb.NFTMetadataInput{
Name: "Test NFT 1",
Image: "ipfs://QmcCJC4T37rykDjR6oorM8hpB9GQWHKWbAi2YR1uTabUZu/0",
}, "", "")
if err != nil {
panic(err)
}
fmt.Println("Successfully uploaded to URI:", uri)
},
}
func init() {
storageCmd.AddCommand(storageUploadCmd)
storageCmd.AddCommand(storageUploadBatchCmd)
storageCmd.AddCommand(storageUploadImageCmd)
storageCmd.AddCommand(storageUploadImageLinkCmd)
}
|
araneforseti/heimdallr-viz
|
vendor/cache/ruby/2.6.0/gems/jaro_winkler-1.5.1/ext/jaro_winkler/extconf.rb
|
<filename>vendor/cache/ruby/2.6.0/gems/jaro_winkler-1.5.1/ext/jaro_winkler/extconf.rb
# frozen_string_literal: true
if RUBY_ENGINE == 'ruby'
require 'mkmf'
$CFLAGS << ' -std=c99 '
create_makefile('jaro_winkler/jaro_winkler_ext')
else
open('Makefile', 'wb') do |file|
file.write <<-MAKEFILE
.PHONY: install
install:
\techo "Cannot compile C Extension, fall back to pure Ruby version."
clean:
MAKEFILE
end
end
|
Bhuvanesh1208/ruby2.6.1
|
lib/ruby/2.6.0/optionparser.rb
|
# frozen_string_literal: false
require_relative 'optparse'
|
Dagon0577/DagonParser
|
src/main/java/parser/ast/fragment/tableref/TableReference.java
|
<filename>src/main/java/parser/ast/fragment/tableref/TableReference.java
package parser.ast.fragment.tableref;
import parser.ast.AST;
/**
* @author Dagon0577
* @date 2020/7/14
*/
public interface TableReference extends AST {
int PRECEDENCE_REFS = 0;
int PRECEDENCE_JOIN = 1;
int PRECEDENCE_FACTOR = 2;
int getPrecedence();
Object removeLastConditionElement();
boolean isSingleTable();
boolean replace(AST from, AST to);
boolean removeSchema(byte[] schema);
}
|
hwpplayers/nanoutils
|
lib/concat/index.js
|
<reponame>hwpplayers/nanoutils
import curry2 from '../_internal/_curry2'
export default curry2((a, b) =>
Array.isArray(a) && Array.isArray(b) || typeof a === 'string' && typeof b === 'string'
? a.concat(b)
: null
)
|
van-de-bugs/http-server
|
src/main/java/com/debugs/server/http/config/ResponseType.java
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.debugs.server.http.config;
/**
*
* @author <NAME> <<EMAIL>>
*/
public enum ResponseType {
JSON, XML
}
|
dvt32/cpp-journey
|
Java/CodeForces/Password Check.java
|
// https://codeforces.com/problemset/problem/411/A
import java.util.Scanner;
public class Solution {
public static void main(String[] args) {
Scanner read = new Scanner(System.in);
String password = read.nextLine();
boolean isValidPassword = false;
if (password.length() >= 5) {
boolean hasUpperCaseChar = false;
boolean hasLowerCaseChar = false;
boolean hasDigit = false;
for (int i = 0; i < password.length(); ++i) {
char c = password.charAt(i);
if (Character.isUpperCase(c)) {
hasUpperCaseChar = true;
}
else if (Character.isLowerCase(c)) {
hasLowerCaseChar = true;
}
else if (Character.isDigit(c)) {
hasDigit = true;
}
if (hasUpperCaseChar && hasLowerCaseChar && hasDigit) {
isValidPassword = true;
break;
}
}
}
if (isValidPassword) {
System.out.println("Correct");
}
else {
System.out.println("Too weak");
}
// Close scanner
read.close();
}
}
|
oskaremilsson/ctrl.name
|
src/pages/Playlists/Playlists.js
|
<reponame>oskaremilsson/ctrl.name<filename>src/pages/Playlists/Playlists.js
import React, { useEffect, useState } from "react";
import { useSelector, useDispatch } from "react-redux";
import { selectors, actions } from "shared/stores";
import {
makeStyles,
Box,
List,
ListSubheader,
Divider,
CircularProgress,
Avatar,
} from "@material-ui/core";
import { ToggleButton, ToggleButtonGroup } from "@material-ui/lab";
import PlaylistListItem from "shared/components/PlaylistListItem";
import FloatingCurrentMe from "shared/components/FloatingCurrentMe";
import spotify from "utils/spotify";
const useStyles = makeStyles((theme) => ({
list: {
backgroundColor: theme.palette.background.default,
},
}));
const { getPlaylists, getCurrentMe, getMe, getMeAccessToken } = selectors;
export default function Playlists() {
const dispatch = useDispatch();
const classes = useStyles();
const currentMe = useSelector((state) => getCurrentMe(state));
const me = useSelector((state) => getMe(state));
const meAccessToken = useSelector((state) => getMeAccessToken(state));
const storedPlaylists = useSelector((state) => getPlaylists(state));
const [selectedPlaylists, setSelectedPlaylists] = useState(undefined);
const [loadMore, setLoadMore] = useState(false);
const [playlists, setPlaylists] = useState([]);
const [nextQuery, setNextQuery] = useState(undefined);
const [allLoaded, setAllLoaded] = useState(false);
const meAvatarAlt = (me && me.id) || "me";
const meAvatarImg = me?.images[0]?.url;
const currentMeAvatarAlt = currentMe?.id || "current";
const currentMeAvatarImg = currentMe?.images[0]?.url;
let listTitle = "My playlists";
if (selectedPlaylists !== me?.id) {
listTitle = `${currentMe?.display_name}'s playlists`;
if (currentMe?.display_name === currentMe?.id) {
listTitle = `ctrl.${selectedPlaylists}'s playlists`;
}
}
useEffect(() => {
if (me) {
setSelectedPlaylists(me.id);
}
}, [me]);
useEffect(() => {
if (storedPlaylists && selectedPlaylists) {
if (!storedPlaylists[selectedPlaylists]) {
setLoadMore(true);
} else {
setPlaylists(storedPlaylists[selectedPlaylists]);
}
}
}, [selectedPlaylists, storedPlaylists]);
useEffect(() => {
let mounted = true;
if (meAccessToken && loadMore && me && selectedPlaylists) {
const user =
selectedPlaylists === me.id ? "me" : `users/${selectedPlaylists}`;
spotify(meAccessToken, nextQuery)
.get(nextQuery || `${user}/playlists`)
.then((res) => {
if (mounted) {
setLoadMore(false);
setPlaylists((playlists) => playlists.concat(res.data.items));
setNextQuery(res.data.next);
if (res.data.next) {
setLoadMore(true);
} else {
setAllLoaded(true);
}
}
})
.catch((_) => {
console.log("error");
});
}
return () => (mounted = false);
}, [meAccessToken, playlists, nextQuery, loadMore, me, selectedPlaylists]);
useEffect(() => {
if (selectedPlaylists && allLoaded && playlists && storedPlaylists) {
setAllLoaded(false);
dispatch(
actions.setPlaylists({
...storedPlaylists,
...{ [selectedPlaylists]: playlists },
})
);
}
}, [dispatch, selectedPlaylists, allLoaded, playlists, storedPlaylists]);
return (
<Box marginTop={2}>
<Box display="flex" justifyContent="center">
{currentMe && me && currentMe?.id !== me?.id && (
<ToggleButtonGroup
value={selectedPlaylists}
exclusive
onChange={(_, value) => {
if (value !== null) {
setSelectedPlaylists(value);
setPlaylists([]);
setNextQuery(undefined);
}
}}
aria-label="playlist selector"
>
<ToggleButton value={me.id} aria-label="my playlists">
<Avatar alt={meAvatarAlt} src={meAvatarImg} />
</ToggleButton>
<ToggleButton value={currentMe.id} aria-label="currentMe playlists">
<Avatar alt={currentMeAvatarAlt} src={currentMeAvatarImg} />
</ToggleButton>
</ToggleButtonGroup>
)}
</Box>
<List className={classes.list}>
<ListSubheader>{listTitle}</ListSubheader>
{playlists &&
playlists.map((playlist, i) => (
<Box key={playlist.id + i}>
<PlaylistListItem playlist={playlist} />
<Divider />
</Box>
))}
</List>
{(loadMore || (playlists && playlists.length < 1)) && (
<Box display="flex" justifyContent="center">
<CircularProgress />
</Box>
)}
<FloatingCurrentMe />
</Box>
);
}
|
DyncKathline/FilePicker
|
filePicker/src/main/java/com/kathline/library/util/PermissionUtil.java
|
package com.kathline.library.util;
import android.Manifest;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.provider.Settings;
import android.text.TextUtils;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.content.ContextCompat;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentActivity;
import androidx.fragment.app.FragmentManager;
import com.kathline.library.R;
import java.util.ArrayList;
import java.util.List;
/**
* PermissionUtil.getInstance().with(this).requestPermissions()
*/
public class PermissionUtil {
private static final String TAG = "PermissionsUtil";
private PermissionFragment fragment;
private static PermissionUtil mInstance;
public static PermissionUtil getInstance() {
if(mInstance == null) {
synchronized (PermissionUtil.class) {
if(mInstance == null) {
mInstance = new PermissionUtil();
}
}
}
return mInstance;
}
public PermissionUtil with(@NonNull FragmentActivity activity) {
fragment = getPermissionsFragment(activity);
return this;
}
public PermissionUtil with(@NonNull Fragment fragmentX) {
fragment = getPermissionsFragment(fragmentX);
return this;
}
public void showDialogTips(Context context, List<String> permission, DialogInterface.OnClickListener onDenied) {
showDialogTips(context, String.format("您拒绝了相关权限,无法正常使用本功能。请前往 设置->应用管理->%s->权限管理中启用 %s 权限",
context.getString(R.string.app_name),
listToString(permission)
), permission, onDenied);
}
public void showDialogTips(final Context context, String msg, final List<String> permission, DialogInterface.OnClickListener onDenied) {
String message = TextUtils.isEmpty(msg) ? String.format("您拒绝了相关权限,无法正常使用本功能。请前往 设置->应用管理->%s->权限管理中启用 %s 权限", context.getString(R.string.app_name), listToString(permission)) : msg;
android.app.AlertDialog alertDialog = new AlertDialog.Builder(context).setTitle("权限被禁用").setMessage(message).setCancelable(false)
.setNegativeButton("返回", onDenied)
.setPositiveButton("去设置", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
Intent intent = PermissionUtil.PermissionSettingPage.getSmartPermissionIntent(context, permission);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
// String manufacturer = Build.MANUFACTURER;
// String model = Build.MODEL;//手机型号,如MI 6,MI 9 SE
// if(manufacturer.equalsIgnoreCase("xiaomi")) {
// Intent intent=new Intent();
// //model.toUpperCase().contains("MI 6")
// intent.setAction("miui.intent.action.APP_PERM_EDITOR");
// intent.putExtra("extra_pkgname", context.getPackageName());
// context.startActivity(intent);
// }else {
// //第二个参数为包名
// Uri uri = Uri.fromParts("package", context.getPackageName(), null);
// Intent intent = new Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
// intent.setData(uri);
// context.startActivity(intent);
// }
}
}).create();
alertDialog.show();
}
public static String listToString(List<String> list) {
StringBuilder builder = new StringBuilder();
int size = list.size();
for (int i = 0; i < size; i++) {
if (i < size - 1) {
builder.append(list.get(i)).append(",");
} else {
builder.append(list.get(i));
}
}
return builder.toString();
}
private PermissionFragment getPermissionsFragment(FragmentActivity activity) {
PermissionFragment fragment = (PermissionFragment) activity.getSupportFragmentManager().findFragmentByTag(TAG);
boolean isNewInstance = fragment == null;
if (isNewInstance) {
fragment = new PermissionFragment();
FragmentManager fragmentManager = activity.getSupportFragmentManager();
fragmentManager
.beginTransaction()
.add(fragment, TAG)
.commitNow();
}
return fragment;
}
private PermissionFragment getPermissionsFragment(Fragment fragmentX) {
PermissionFragment fragment = (PermissionFragment) fragmentX.getChildFragmentManager().findFragmentByTag(TAG);
boolean isNewInstance = fragment == null;
if (isNewInstance) {
fragment = new PermissionFragment();
FragmentManager fragmentManager = fragmentX.getChildFragmentManager();
fragmentManager
.beginTransaction()
.add(fragment, TAG)
.commitNow();
}
return fragment;
}
/**
* 外部调用申请权限
* @param permissions 申请的权限
* @param listener 监听权限接口
*/
public void requestPermissions(String[] permissions, PermissionListener listener) {
fragment.setListener(listener);
fragment.requestPermissions(permissions);
}
public interface PermissionListener {
void onGranted();
void onDenied(List<String> deniedPermission);
void onShouldShowRationale(List<String> deniedPermission);
}
public static class PermissionFragment extends Fragment {
/**
* 申请权限的requestCode
*/
private static final int PERMISSIONS_REQUEST_CODE = 1;
/**
* 权限监听接口
*/
private PermissionListener listener;
public void setListener(PermissionListener listener) {
this.listener = listener;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
}
/**
* 申请权限
* @param permissions 需要申请的权限
*/
public void requestPermissions(@NonNull String[] permissions) {
List<String> requestPermissionList = new ArrayList<>();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
//找出所有未授权的权限
for (String permission : permissions) {
if (ContextCompat.checkSelfPermission(getContext(), permission) != PackageManager.PERMISSION_GRANTED) {
requestPermissionList.add(permission);
}
}
if (requestPermissionList.isEmpty()) {
//已经全部授权
permissionAllGranted();
} else {
//申请授权
requestPermissions(requestPermissionList.toArray(new String[requestPermissionList.size()]), PERMISSIONS_REQUEST_CODE);
}
}else {
//已经全部授权
permissionAllGranted();
}
}
/**
* fragment回调处理权限的结果
* @param requestCode 请求码 要等于申请时候的请求码
* @param permissions 申请的权限
* @param grantResults 对应权限的处理结果
*/
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode != PERMISSIONS_REQUEST_CODE) {
return;
}
if (grantResults.length > 0) {
List<String> deniedPermissionList = new ArrayList<>();
for (int i = 0; i < grantResults.length; i++) {
if (grantResults[i] != PackageManager.PERMISSION_GRANTED) {
deniedPermissionList.add(permissions[i]);
}
}
if (deniedPermissionList.isEmpty()) {
//已经全部授权
permissionAllGranted();
} else {
//勾选了对话框中”Don’t ask again”的选项, 返回false
for (String deniedPermission : deniedPermissionList) {
boolean flag = shouldShowRequestPermissionRationale(deniedPermission);
if (flag) {
//拒绝授权
permissionShouldShowRationale(deniedPermissionList);
return;
}
}
//拒绝授权
permissionHasDenied(deniedPermissionList);
}
}
}
/**
* 权限全部已经授权
*/
private void permissionAllGranted() {
if (listener != null) {
listener.onGranted();
}
}
/**
* 有权限被拒绝
*
* @param deniedList 被拒绝的权限
*/
private void permissionHasDenied(List<String> deniedList) {
if (listener != null) {
listener.onDenied(deniedList);
}
}
/**
* 权限被拒绝并且勾选了不在询问
*
* @param deniedList 勾选了不在询问的权限
*/
private void permissionShouldShowRationale(List<String> deniedList) {
if (listener != null) {
listener.onShouldShowRationale(deniedList);
}
}
}
/**
* 是否是 Android 11 及以上版本
*/
static boolean isAndroid11() {
return Build.VERSION.SDK_INT >= 11;//Build.VERSION_CODES.R
}
/**
* 是否是 Android 10 及以上版本
*/
static boolean isAndroid10() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
}
/**
* 是否是 Android 9.0 及以上版本
*/
static boolean isAndroid9() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.P;
}
/**
* 是否是 Android 8.0 及以上版本
*/
static boolean isAndroid8() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O;
}
/**
* 是否是 Android 7.0 及以上版本
*/
static boolean isAndroid7() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
}
/**
* 是否是 Android 6.0 及以上版本
*/
static boolean isAndroid6() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.M;
}
/**
* 判断某个权限集合是否包含特殊权限
*/
static boolean containsSpecialPermission(List<String> permissions) {
if (permissions == null || permissions.isEmpty()) {
return false;
}
for (String permission : permissions) {
if (isSpecialPermission(permission)) {
return true;
}
}
return false;
}
/**
* 判断某个权限是否是特殊权限
*/
static boolean isSpecialPermission(String permission) {
return Permission.MANAGE_EXTERNAL_STORAGE.equals(permission) ||
Permission.REQUEST_INSTALL_PACKAGES.equals(permission) ||
Permission.SYSTEM_ALERT_WINDOW.equals(permission) ||
Permission.NOTIFICATION_SERVICE.equals(permission) ||
Permission.WRITE_SETTINGS.equals(permission);
}
public static class Permission {
/** 外部存储权限(特殊权限,需要 Android 11 及以上) */
public static final String MANAGE_EXTERNAL_STORAGE = "android.permission.MANAGE_EXTERNAL_STORAGE";
/** 安装应用权限(特殊权限,需要 Android 8.0 及以上) */
public static final String REQUEST_INSTALL_PACKAGES = "android.permission.REQUEST_INSTALL_PACKAGES";
/** 通知栏权限(特殊权限,需要 Android 6.0 及以上,注意此权限不需要在清单文件中注册也能申请) */
public static final String NOTIFICATION_SERVICE = "android.permission.NOTIFICATION_SERVICE";
/** 悬浮窗权限(特殊权限,需要 Android 6.0 及以上) */
public static final String SYSTEM_ALERT_WINDOW = "android.permission.SYSTEM_ALERT_WINDOW";
/** 系统设置权限(特殊权限,需要 Android 6.0 及以上) */
public static final String WRITE_SETTINGS = "android.permission.WRITE_SETTINGS";
}
public static class PermissionSettingPage {
/**
* 根据传入的权限自动选择最合适的权限设置页
*/
public static Intent getSmartPermissionIntent(Context context, List<String> deniedPermissions) {
// 如果失败的权限里面不包含特殊权限
if (deniedPermissions == null || deniedPermissions.isEmpty() || !PermissionUtil.containsSpecialPermission(deniedPermissions)) {
return PermissionSettingPage.getApplicationDetailsIntent(context);
}
// 如果当前只有一个权限被拒绝了
if (deniedPermissions.size() == 1) {
String permission = deniedPermissions.get(0);
if (Permission.MANAGE_EXTERNAL_STORAGE.equals(permission)) {
return getStoragePermissionIntent(context);
}
if (Permission.REQUEST_INSTALL_PACKAGES.equals(permission)) {
return getInstallPermissionIntent(context);
}
if (Permission.SYSTEM_ALERT_WINDOW.equals(permission)) {
return getWindowPermissionIntent(context);
}
if (Permission.NOTIFICATION_SERVICE.equals(permission)) {
return getNotifyPermissionIntent(context);
}
if (Permission.WRITE_SETTINGS.equals(permission)) {
return getSettingPermissionIntent(context);
}
return getApplicationDetailsIntent(context);
}
if (PermissionUtil.isAndroid11() && deniedPermissions.size() == 3 &&
(deniedPermissions.contains(Permission.MANAGE_EXTERNAL_STORAGE) &&
deniedPermissions.contains(Manifest.permission.READ_EXTERNAL_STORAGE) &&
deniedPermissions.contains(Manifest.permission.WRITE_EXTERNAL_STORAGE))) {
return getStoragePermissionIntent(context);
}
return PermissionSettingPage.getApplicationDetailsIntent(context);
}
/**
* 获取应用详情界面意图
*/
static Intent getApplicationDetailsIntent(Context context) {
Intent intent = new Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
intent.setData(Uri.parse("package:" + context.getPackageName()));
return intent;
}
/**
* 获取安装权限设置界面意图
*/
static Intent getInstallPermissionIntent(Context context) {
Intent intent = null;
if (PermissionUtil.isAndroid8()) {
intent = new Intent(Settings.ACTION_MANAGE_UNKNOWN_APP_SOURCES);
intent.setData(Uri.parse("package:" + context.getPackageName()));
}
if (intent == null || !areActivityIntent(context, intent)) {
intent = getApplicationDetailsIntent(context);
}
return intent;
}
/**
* 获取悬浮窗权限设置界面意图
*/
static Intent getWindowPermissionIntent(Context context) {
Intent intent = null;
if (PermissionUtil.isAndroid6()) {
intent = new Intent(Settings.ACTION_MANAGE_OVERLAY_PERMISSION);
// 在 Android 11 上面不能加包名跳转,因为就算加了也没有效果
// 还有人反馈在 Android 11 的 TV 模拟器上会出现崩溃的情况
// https://developer.android.google.cn/reference/android/provider/Settings#ACTION_MANAGE_OVERLAY_PERMISSION
if (!PermissionUtil.isAndroid11()) {
intent.setData(Uri.parse("package:" + context.getPackageName()));
}
}
if (intent == null || !areActivityIntent(context, intent)) {
intent = getApplicationDetailsIntent(context);
}
return intent;
}
/**
* 获取通知栏权限设置界面意图
*/
static Intent getNotifyPermissionIntent(Context context) {
Intent intent = null;
if (PermissionUtil.isAndroid8()) {
intent = new Intent(Settings.ACTION_APP_NOTIFICATION_SETTINGS);
intent.putExtra(Settings.EXTRA_APP_PACKAGE, context.getPackageName());
//intent.putExtra(Settings.EXTRA_CHANNEL_ID, context.getApplicationInfo().uid);
}
if (intent == null || !areActivityIntent(context, intent)) {
intent = getApplicationDetailsIntent(context);
}
return intent;
}
/**
* 获取系统设置权限界面意图
*/
static Intent getSettingPermissionIntent(Context context) {
Intent intent = null;
if (PermissionUtil.isAndroid6()) {
intent = new Intent(Settings.ACTION_MANAGE_WRITE_SETTINGS);
intent.setData(Uri.parse("package:" + context.getPackageName()));
}
if (intent == null || !areActivityIntent(context, intent)) {
intent = getApplicationDetailsIntent(context);
}
return intent;
}
/**
* 获取存储权限设置界面意图
*/
static Intent getStoragePermissionIntent(Context context) {
Intent intent = null;
if (PermissionUtil.isAndroid11()) {
intent = new Intent("android.settings.MANAGE_APP_ALL_FILES_ACCESS_PERMISSION");//Settings.ACTION_MANAGE_APP_ALL_FILES_ACCESS_PERMISSION
intent.setData(Uri.parse("package:" + context.getPackageName()));
}
if (intent == null || !areActivityIntent(context, intent)) {
intent = getApplicationDetailsIntent(context);
}
return intent;
}
/**
* 判断这个意图的 Activity 是否存在
*/
private static boolean areActivityIntent(Context context, Intent intent) {
return !context.getPackageManager().queryIntentActivities(intent, PackageManager.MATCH_DEFAULT_ONLY).isEmpty();
}
}
}
|
Brucehanyf/python_tutorial
|
base/day05.py
|
<reponame>Brucehanyf/python_tutorial
# 字典 类似map的结构
#定义字典
alien_0 = {'color':'green','point':5}
# 访问字典中的值
print(alien_0['color'])
print(alien_0['point'])
new_point = alien_0['point']
print ('you just have '+ str(new_point) +'points')
#添加键值对
alien_0['x_position'] = 1
alien_0['y_positon'] = 10
print (alien_0)
# 创建一个空的字典
alien_1 = {}
alien_1['color'] = 'Blue'
alien_1['points'] = 109
print (alien_1)
# 修改字典中的值
alien_1['color'] = 'Red'
alien_1['points'] = 10
print (alien_1)
# 删除键值对
del alien_1['color']
print (alien_1)
favorite_language = {
'jen':'python',
'sarah':'c',
'bob':'rubby',
'phil': 'python'
}
print ("bob favorite language is "+ favorite_language['bob'])
# 遍历字典
user_0 = {
'username':'User',
'age':10,
'gender':'girl'
}
# 遍历字典中的键值对
for key,val in user_0.items():
print (str(key) + str(val))
# 遍历字典中的所有键
for key in user_0.keys():
print (key)
# 隐式遍历所有键
for name in user_0:
print (name)
# 按顺序遍历字典中的所有键
for name in sorted(user_0.keys()):
print (name.title()+' thank you!')
# 遍历字典中的所有值
for value in user_0.values():
print ('value: '+str(value))
# 嵌套语句
alies_s = [alien_1,alien_0]
print (alies_s)
alies_s = []
for alien in range(30):
new_alien = {'color':'red','point':alien,'speed':'slow'}
alies_s.append(new_alien)
print (alies_s)
# 字典中存列表
pizz = {
'crust':'thick',
'toppings':['mushroom','extra_cheese']
}
favorite_languages ={
'jen':['python','java'],
'sarch':['c','c##'],
'edward':['rubby','jumlia'],
'bob':['js','go']
}
for name,languages in favorite_languages.items():
for language in languages:
print (language)
# 在字典中存储字典
users = {
'alice': {
'first':'alice',
'last':'A',
'location':'BJ'
},
'bruce': {
'first':'bruce',
'last':'B',
'location':'SZ'
},
'Cherry': {
'first':'cherry',
'last':'C',
'location':'GZ'
}
}
for user,user_info in users.items():
print ('user info: ' + user_info['first'] + user_info['last']+ user_info['location'])
|
miketweaver/thorium
|
src/components/views/Targeting/coordinates.js
|
<reponame>miketweaver/thorium
import React, {Component} from "react";
import {Row, Col} from "helpers/reactstrap";
import Keypad from "../Navigation/keypad";
import gql from "graphql-tag.macro";
function compareCoord(coord1 = {}, coord2 = {}) {
const {x, y, z} = coord1 || {};
const {x: x1, y: y1, z: z1} = coord2 || {};
if (x !== x1 || y !== y1 || z !== z1) return true;
return false;
}
export default class Coordinates extends Component {
constructor(props) {
super(props);
this.state = {
calculatedTarget: props.targeting.calculatedTarget || {},
selectedField: null,
enteredTarget: props.targeting.enteredTarget || {},
};
}
componentDidUpdate(prevProps) {
if (
compareCoord(
this.props.targeting.calculatedTarget,
prevProps.targeting.calculatedTarget,
) ||
compareCoord(
this.props.targeting.enteredTarget,
prevProps.targeting.enteredTarget,
)
) {
this.setState({
calculatedTarget: this.props.targeting.calculatedTarget || {},
enteredTarget: this.props.targeting.enteredTarget || {},
});
}
}
keydown = e => {
let key;
let enteredTarget = Object.assign({}, this.state.enteredTarget);
let selectedField = this.state.selectedField;
if (selectedField === null) {
selectedField = "x";
enteredTarget = {
x: "",
y: "",
z: "",
};
}
if (e.which) {
// Empty
} else {
key = e.toString();
}
if (
enteredTarget[selectedField] === null ||
enteredTarget[selectedField] === undefined
)
enteredTarget[selectedField] = "";
if (key === "." && enteredTarget[selectedField].indexOf(".") > -1) return;
enteredTarget[selectedField] += key;
this.setState({
enteredTarget,
selectedField,
});
};
clear = () => {
const {enteredTarget, selectedField} = this.state;
if (
selectedField === null ||
enteredTarget[selectedField] === null ||
enteredTarget[selectedField] === ""
) {
this.setState({
enteredTarget: {},
selectedField: null,
});
return;
}
enteredTarget[selectedField] = enteredTarget[selectedField].slice(
0,
enteredTarget[selectedField].length - 1,
);
this.setState({
enteredTarget,
});
};
enter = () => {
const {enteredTarget, selectedField} = this.state;
if (selectedField === null) {
this.setState({
selectedField: "x",
});
return;
}
if (
enteredTarget[selectedField] === null ||
enteredTarget[selectedField] === ""
) {
return;
}
if (
selectedField === "z" &&
enteredTarget[selectedField] !== null &&
enteredTarget[selectedField] !== ""
) {
this.inputTarget();
return;
}
if (selectedField === "x") {
this.setState({
selectedField: "y",
});
return;
}
if (selectedField === "y") {
this.setState({
selectedField: "z",
});
return;
}
};
inputTarget = () => {
const {enteredTarget} = this.state;
const mutation = gql`
mutation SetEnteredTarget(
$id: ID!
$coordinates: StringCoordinatesInput!
) {
setTargetingEnteredTarget(id: $id, coordinates: $coordinates)
}
`;
const variables = {
id: this.props.targeting.id,
coordinates: enteredTarget,
};
this.props.client.mutate({
mutation,
variables,
});
this.setState({
selectedField: null,
});
};
render() {
const {enteredTarget, selectedField, calculatedTarget} = this.state;
return (
<Row style={{justifyContent: "space-around"}}>
<Col style={{maxWidth: "20vw"}}>
<h4>Keypad</h4>
<Keypad
margin
keydown={this.keydown}
clear={this.clear}
enter={this.enter}
/>
</Col>
<Col style={{maxWidth: "20vw"}}>
<Row>
<Col>
<h4>Calculated Coordinates</h4>
<div className="coordinate-box">
X: <div>{calculatedTarget.x || ""}</div>
</div>
<div className="coordinate-box">
Y: <div>{calculatedTarget.y || ""}</div>
</div>
<div className="coordinate-box">
Z: <div>{calculatedTarget.z || ""}</div>
</div>
</Col>
</Row>
<Row className="current-coordinates">
<Col>
<h4>Current Coordinates</h4>
<div className="coordinate-box">
X:{" "}
<div className={`${selectedField === "x" ? "selected" : ""}`}>
{enteredTarget.x}
</div>
</div>
<div className="coordinate-box">
Y:{" "}
<div className={`${selectedField === "y" ? "selected" : ""}`}>
{enteredTarget.y}
</div>
</div>
<div className="coordinate-box">
Z:{" "}
<div className={`${selectedField === "z" ? "selected" : ""}`}>
{enteredTarget.z}
</div>
</div>
</Col>
</Row>
</Col>
</Row>
);
}
}
|
kshitij1234/Chemisty-Department-Website
|
chemdeptsite/models.py
|
import os
from django.db import models
# from django.core.exceptions import ValidationError
#
# def validate_only_one_instance(obj):
# model = obj.__class__
# if (model.objects.count() > 0 and
# obj.id != model.objects.get().id):
# raise ValidationError("Can only create 1 %s instance. Delete previous or modify it." % model.__name__)
def get_image_path(self, file):
return os.path.join("chemdeptsite", "static", "images", type(self).__name__, file)
class HeadsDesk(models.Model):
name = models.CharField(max_length=100, blank=False) # need for alt tag of image
picture = models.ImageField(upload_to=get_image_path, blank=True, null=True)
message = models.TextField()
def save(self, *args, **kwargs):
# object is possibly being updated, if so, clean up.
self.remove_on_image_update()
return super(HeadsDesk, self).save(*args, **kwargs)
def remove_on_image_update(self):
try:
# is the object in the database yet?
obj = HeadsDesk.objects.get(pk=self.pk)
except HeadsDesk.DoesNotExist:
# object is not in db, nothing to worry about
return
# is the save due to an update of the actual image file?
if obj.picture and self.picture and obj.picture != self.picture:
# delete the old image file from the storage in favor of the new file
obj.picture.delete()
def get_image_url(self):
return str(self.picture.url)[19:]
def __str__(self):
return "Entry : " + str(self.id)
# def clean(self):
# validate_only_one_instance(self)
class Meta:
verbose_name_plural = "HeadsDesk"
class NoticeBoard(models.Model):
title = models.CharField(max_length=300, blank=False)
body = models.CharField(max_length=300, blank=True, null=True)
date = models.DateTimeField()
def __str__(self):
return self.title
class Meta:
verbose_name_plural = "NoticeBoard"
class News(models.Model):
title = models.CharField(max_length=300, blank=False)
date = models.DateTimeField()
more_info = models.TextField()
def __str__(self):
return self.title
class Meta:
verbose_name_plural = "News"
class QuickLinks(models.Model):
title = models.CharField(max_length=300, blank=False)
link = models.URLField()
date = models.DateTimeField()
def __str__(self):
return self.title
class Meta:
verbose_name_plural = "QuickLinks"
class Facilities(models.Model):
name = models.CharField(max_length=500, blank=False)
text = models.TextField(max_length=2000, blank=True)
picture = models.ImageField(upload_to=get_image_path, blank=True, null=True)
def save(self, *args, **kwargs):
# object is possibly being updated, if so, clean up.
self.remove_on_image_update()
return super(Facilities, self).save(*args, **kwargs)
def remove_on_image_update(self):
try:
# is the object in the database yet?
obj = Facilities.objects.get(pk=self.pk)
except Facilities.DoesNotExist:
# object is not in db, nothing to worry about
return
# is the save due to an update of the actual image file?
if obj.picture and self.picture and obj.picture != self.picture:
# delete the old image file from the storage in favor of the new file
obj.picture.delete()
def get_image_url(self):
return str(self.picture.url)[19:]
def __str__(self):
return self.name
class Meta:
verbose_name_plural = "Facilities"
|
RedisLabsModules/redisbench-admin
|
tests/test_metrics.py
|
# BSD 3-Clause License
#
# Copyright (c) 2021., Redis Labs Modules
# All rights reserved.
#
import json
import os
import yaml
from redistimeseries.client import Client
from redisbench_admin.run.common import merge_default_and_config_metrics
from redisbench_admin.run.metrics import extract_results_table, collect_redis_metrics
def test_extract_results_table():
with open(
"./tests/test_data/redis-benchmark-full-suite-1Mkeys-100B.yml", "r"
) as yml_file:
benchmark_config = yaml.safe_load(yml_file)
merged_exporter_timemetric_path, metrics = merge_default_and_config_metrics(
benchmark_config, None, None
)
with open(
"./tests/test_data/results/oss-standalone-2021-07-23-16-15-12-71d4528-redis-benchmark-full-suite-1Mkeys-100B.json",
"r",
) as json_file:
results_dict = json.load(json_file)
extract_results_table(
metrics,
results_dict,
)
def test_collect_redis_metrics():
rts_host = os.getenv("RTS_DATASINK_HOST", None)
rts_port = 16379
if rts_host is None:
assert False
rts = Client(port=rts_port, host=rts_host)
rts.redis.ping()
time_ms, metrics_arr, overall_metrics = collect_redis_metrics([rts.redis])
assert len(metrics_arr) == 1
assert len(metrics_arr[0].keys()) == 2
assert "cpu" in metrics_arr[0].keys()
assert "memory" in metrics_arr[0].keys()
assert "allocator_active" in metrics_arr[0]["memory"]
allocator_active = metrics_arr[0]["memory"]["allocator_active"]
allocator_active_kv = overall_metrics["memory_allocator_active"]
assert allocator_active == allocator_active_kv
_, _, overall_metrics = collect_redis_metrics([rts.redis, rts.redis])
allocator_active_kv = overall_metrics["memory_allocator_active"]
assert (2 * allocator_active) == allocator_active_kv
|
granitic/bookmark-service
|
src/test/java/com/example/bookmark/security/unit/brokenauthentication/PasswordSecurityRequirementTests.java
|
package com.example.bookmark.security.unit.brokenauthentication;
import com.example.bookmark.service.User;
import org.junit.jupiter.api.*;
import org.junit.jupiter.api.MethodOrderer.OrderAnnotation;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Ensure ASVS V2.1 Password Security Requirements are met.
*/
@DisplayName("V2.1 Password Security Requirements")
@TestMethodOrder(OrderAnnotation.class)
public class PasswordSecurityRequirementTests {
static Validator validator;
@BeforeAll
static void init() {
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
validator = factory.getValidator();
}
@DisplayName("2.1.1 Verify that user set passwords are at least 12 characters in length")
@Order(1)
@Test
void verifyPasswordsAreAtLeast12CharactersInLength() {
User user = createUserWithPassword("<PASSWORD>");
Set<ConstraintViolation<User>> violations = validator.validate(user);
assertThat(violations).hasSize(1);
assertThat(violations.iterator().next().getMessage()).isEqualTo("Password must be 12 or more characters in length.");
}
@DisplayName("2.1.2 Verify that passwords 64 characters or longer are permitted but may" +
"be no longer than 128 characters")
@Order(2)
@Test
void verifyPasswordsOf64CharactersOrLongerArePermitted() {
User user = createUserWithPassword("<PASSWORD>" +
"jksnsn_okuhj:juiklasjldfjdhjkhaljlafiuioaioakljlklhsah");
Set<ConstraintViolation<User>> violations = validator.validate(user);
assertThat(violations).isEmpty();
user = createUserWithPassword("<PASSWORD>" +
"jksnsn_okuhj:juiklasjldfjdhjkhaljlafiuioaioakljlklhsahjshskdkdjdhdndhdhdudjdndmmdkdldidjdjdzhdujlsj" +
"kjsaldasioioeuoueqiouieoqrkjlkjklajklj");
violations = validator.validate(user);
assertThat(violations).hasSize(1);
assertThat(violations.iterator().next().getMessage()).isEqualTo("Password must be no more than 128 characters in length.");
}
@DisplayName("2.1.3 Verify that password truncation is not performed")
@Order(3)
@Test
void verifyPasswordTruncationIsNotPerformed() {
String password = "<PASSWORD>" +
"jksnsn_okuhj:juiklasjldfjdhjkhaljlafiuioaioakljlklhsah";
User user = createUserWithPassword(password);
Set<ConstraintViolation<User>> violations = validator.validate(user);
assertThat(violations).isEmpty();
assertThat(user.getPassword()).isEqualTo(password);
}
@DisplayName("2.1.4 Verify that any printable Unicode character is permitted")
@Order(4)
@Test
void verifyAnyPrintableUnicodeCharacterIsPermitted() {
String emoji = "\\ud83d\\udc3b";
String password = "<PASSWORD>!\"§$%&/()=?!_-:.;,öäü@€" + emoji;
User user = createUserWithPassword(password);
Set<ConstraintViolation<User>> violations = validator.validate(user);
assertThat(violations).isEmpty();
assertThat(user.getPassword()).isEqualTo(password);
}
@DisplayName("2.1.5 Verify users can change their password")
@Order(5)
@Disabled("Can only be tested in integration test")
@Test
void verifyUsersCanChangePassword() {
}
@DisplayName("2.1.6 Verify password change requires the user's current and new password")
@Order(6)
@Disabled("Can only be tested in integration test")
@Test
void verifyPasswordChangeRequiresOldAndNewPassword() {
}
@DisplayName("2.1.7 Verify that passwords are checked against a set of breached passwords")
@Order(7)
@ParameterizedTest
@ValueSource(strings = {"1234567Password", "<PASSWORD>", "<PASSWORD>", "<PASSWORD>", "<PASSWORD>"})
void verifyPasswordsAreCheckedAgainstBreachedPasswords(String password) {
User user = createUserWithPassword(password);
Set<ConstraintViolation<User>> violations = validator.validate(user);
assertThat(violations).hasSize(1);
assertThat(violations.iterator().next().getMessage()).contains(
"Password contains the dictionary word");
}
@DisplayName("2.1.8 Verify that a password strength meter is provided")
@Order(8)
@Disabled("This should be tested on the frontend")
@Test
void verifyPasswordsStrengthMeterIsProvided() {
}
@DisplayName("2.1.9 Verify that there are no password composition rules limiting the type" +
"of characters permitted. There should be no requirement for upper or" +
"lower case or numbers or special characters")
@Order(9)
@ParameterizedTest
@ValueSource(strings = {"hajanamanahaolakaiajkal", "!$/()(&%=)(%$§$$&/&%&((/", "NHJMNGGGHJKUZTTIKKKLLO", "18278625363898653"})
void verifyThereIsNoPasswordCompositionRulesLimiting(String password) {
User user = createUserWithPassword(password);
Set<ConstraintViolation<User>> violations = validator.validate(user);
assertThat(violations).isEmpty();
assertThat(user.getPassword()).isEqualTo(password);
}
@DisplayName("2.1.10 Verify that there are no periodic credential rotation or password history requirements.")
@Order(10)
@Disabled("This should be tested in an integration test")
@Test
void verifyNoPeriodicCredentialRotationIsRequired() {
}
@DisplayName("2.1.11 Verify that \"paste\" functionality, browser password helpers, and " +
"external password managers are permitted.")
@Order(11)
@Disabled("This should be tested in frontend")
@Test
void verifyPasteFunctionalityIsPermitted() {
}
@DisplayName("2.1.12 Verify that the user can choose to either temporarily view the entire" +
"masked password, or temporarily view the last typed character of the password")
@Order(12)
@Disabled("This should be tested in frontend")
@Test
void verifyTemporaryViewPassword() {
}
private User createUserWithPassword(String password) {
return
new User(
UUID.randomUUID().toString(),
"firstname",
"lastname",
password,
"<EMAIL>",
List.of("USER"));
}
}
|
austxcodemonkey/geode-native
|
tests/cpp/fwklib/UDPIpc.cpp
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "UDPIpc.hpp"
#include <sstream>
#include <string>
#include <vector>
#include "FwkStrCvt.hpp"
#include "GsRandom.hpp"
#include "config.h"
namespace apache {
namespace geode {
namespace client {
namespace testframework {
bool UDPMessage::ping(ACE_SOCK_Dgram& io, ACE_INET_Addr& who) {
clear();
setCmd(ACK_REQUEST);
setSender(who);
return send(io);
}
std::string UDPMessage::dump(int32_t max) {
char buf[1024];
std::string dmp("Dump of message parts: ");
sprintf(buf, "\nTag: %u\nCmd: %u %s\nId: %u\nLength: %u\nMessage:\n",
m_hdr.tag, m_hdr.cmd, cmdString(m_hdr.cmd), m_hdr.id,
ntohl(m_hdr.length));
dmp += buf;
if (!m_msg.empty()) {
if (max > 0) {
dmp += m_msg.substr(0, max);
} else {
dmp += m_msg;
}
} else {
dmp += "No Message.";
}
dmp += "\nEnd of Dump";
return dmp;
}
bool UDPMessage::receiveFrom(ACE_SOCK_Dgram& io,
const ACE_Time_Value* timeout) {
bool ok = true;
ACE_Time_Value wait(2);
if (!timeout) {
timeout = &wait;
}
iovec buffs;
int32_t red = static_cast<int32_t>(io.recv(&buffs, m_sender, 0, timeout));
if (red < 0) {
if (errno != ETIME) {
FWKEXCEPTION("UDPMessage::receiveFrom: Failed, errno: " << errno);
}
return false;
}
uint32_t len = buffs.iov_len;
if (len < UDP_HEADER_SIZE) { // We must at least have a header
FWKEXCEPTION("UDPMessage::receiveFrom: Failed, header length: " << len);
}
clear();
memcpy(&m_hdr, buffs.iov_base, UDP_HEADER_SIZE);
if (m_hdr.tag != UDP_MSG_TAG) {
FWKEXCEPTION("UDPMessage::receiveFrom: Failed, invalid tag: " << m_hdr.tag);
}
char* ptr = reinterpret_cast<char*>(buffs.iov_base);
ptr += UDP_HEADER_SIZE;
len -= UDP_HEADER_SIZE;
uint32_t sent = ntohl(m_hdr.length);
if (sent != len) {
FWKEXCEPTION("UDPMessage::receiveFrom: Failed, expected "
<< sent << " bytes, received " << len);
}
if (len > 0) {
m_msg = std::string(ptr, len);
}
delete[] reinterpret_cast<char*>(buffs.iov_base);
// FWKINFO( "UDPMessage::receiveFrom: " << dump( 50 ) );;
if (needToAck()) {
UDPMessage ack(ACK);
ok = (ok && ack.sendTo(io, m_sender));
}
return ok;
}
bool UDPMessage::sendTo(ACE_SOCK_Dgram& io, ACE_INET_Addr& who) {
setSender(who);
return send(io);
}
bool UDPMessage::send(ACE_SOCK_Dgram& io) {
bool ok = true;
int32_t tot = 0;
int32_t vcnt = 1;
iovec buffs[2];
m_hdr.length = 0;
buffs[0].iov_base = reinterpret_cast<char*>(&m_hdr);
buffs[0].iov_len = UDP_HEADER_SIZE;
tot += UDP_HEADER_SIZE;
if (!m_msg.empty()) {
auto len = static_cast<uint32_t>(m_msg.size());
m_hdr.length = htonl(len);
buffs[1].iov_base = const_cast<char*>(m_msg.c_str());
buffs[1].iov_len = len;
vcnt = 2;
tot += len;
}
// FWKINFO( "UDPMessage::send: " << dump( 50 ) );;
int32_t sent = static_cast<int32_t>(io.send(buffs, vcnt, m_sender));
if (sent < 0) {
FWKEXCEPTION("UDPMessage::send: Failed, errno: " << errno);
}
if (sent != tot) {
ok = false;
FWKSEVERE("UDPMessage::send: Failed to completely send, " << sent << ", "
<< tot);
}
if (needToAck()) {
UDPMessage ack;
ok = (ok && ack.receiveFrom(io));
}
return ok;
}
UDPMessageClient::UDPMessageClient(std::string server)
: m_server(server.c_str()) {
int32_t result = -1;
int32_t tries = 100;
ACE_INET_Addr* client = new ACE_INET_Addr();
while ((result < 0) && (tries > 0)) {
uint32_t port = GsRandom::random(1111u, 31111u) + tries;
client->set(port, "localhost");
result = m_io.open(*client);
}
delete client;
if (result < 0) {
FWKEXCEPTION("Client failed to open io, " << errno);
}
const ACE_Time_Value timeout(20);
UDPMessage msg;
tries = 3;
bool pingMsg = false;
while (!pingMsg && tries-- > 0) {
try {
pingMsg = msg.ping(m_io, m_server);
} catch (...) {
continue;
}
}
if (pingMsg) {
// if ( msg.ping( m_io, m_server ) ) {
msg.setSender(m_server);
bool connectionOK = false;
tries = 10;
while (!connectionOK && (--tries > 0)) {
try {
msg.clear();
msg.setCmd(ADDR_REQUEST);
if (msg.sendTo(m_io, m_server)) {
if (msg.receiveFrom(m_io, &timeout)) {
std::string newConn = msg.what();
ACE_INET_Addr conn(newConn.c_str());
if (msg.ping(m_io, conn)) { // We have a working addr
m_server = conn;
connectionOK = true;
tries = 0;
}
}
}
} catch (...) {
continue;
}
}
if (!connectionOK) {
FWKEXCEPTION(
"UDPMessageClient failed to establish connection to server.");
}
} else {
FWKEXCEPTION("Failed to contact " << server);
}
}
int32_t Receiver::doTask() {
auto msg = std::unique_ptr<UDPMessage>(new UDPMessage());
UDPMessage cmsg;
try {
while (*m_run) {
if (isListener()) {
cmsg.clear();
ACE_Time_Value wait(30); // Timeout is relative time.
if (cmsg.receiveFrom(*m_io, &wait)) {
if (cmsg.getCmd() == ADDR_REQUEST) {
auto&& addr = m_addrs.front();
m_addrs.pop_front();
m_addrs.push_back(addr);
cmsg.clear();
cmsg.setMessage(addr);
cmsg.setCmd(ADDR_RESPONSE);
cmsg.send(*m_io);
}
}
} else {
msg->clear();
ACE_Time_Value timeout(2);
if (msg->receiveFrom(
*m_io, &timeout)) { // Timeout is relative time, send ack.
if (msg->getCmd() == ADDR_REQUEST) {
auto&& addr = m_addrs.front();
m_addrs.pop_front();
m_addrs.push_back(addr);
cmsg.clear();
cmsg.setMessage(addr);
cmsg.setCmd(ADDR_RESPONSE);
cmsg.send(*m_io);
}
if (msg->length() > 0) {
m_queues->putInbound(msg.release());
msg.reset(new UDPMessage());
}
}
}
}
} catch (FwkException& ex) {
FWKSEVERE("Receiver::doTask() caught exception: " << ex.what());
} catch (...) {
FWKSEVERE("Receiver::doTask() caught unknown exception");
}
return 0;
}
void Receiver::initialize() {
int32_t tries = 100;
uint16_t port = m_basePort;
int32_t lockResult = m_mutex.tryacquire();
int32_t result = -1;
if (lockResult != -1) { // The listener thread
ACE_INET_Addr addr(port, "localhost");
m_listener = ACE_Thread::self();
result = m_io->open(addr);
} else {
while ((result < 0) && (--tries > 0)) {
port += ++m_offset;
ACE_INET_Addr addr(port, "localhost");
result = m_io->open(addr);
if (result == 0) {
char hbuff[256];
char* hst = &hbuff[0];
char* fqdn = ACE_OS::getenv("GF_FQDN");
if (fqdn) {
hst = fqdn;
} else {
addr.get_host_name(hbuff, 255);
}
char buff[1024];
sprintf(buff, "%s:%u", hst, port);
m_addrs.push_back(buff);
}
}
}
if (result < 0) {
FWKEXCEPTION("Server failed to open io, " << errno << ", on port " << port);
}
}
int32_t STReceiver::doTask() {
auto msg = std::unique_ptr<UDPMessage>(new UDPMessage());
try {
while (*m_run) {
msg->clear();
ACE_Time_Value timeout(2); // Timeout is relative time
if (msg->receiveFrom(m_io, &timeout)) {
if (msg->getCmd() == ADDR_REQUEST) {
msg->clear();
msg->setMessage(m_addr);
msg->setCmd(ADDR_RESPONSE);
msg->send(m_io);
} else {
if (msg->length() > 0) {
m_queues->putInbound(msg.release());
msg.reset(new UDPMessage());
}
}
}
}
} catch (FwkException& ex) {
FWKSEVERE("STReceiver::doTask() caught exception: " << ex.what());
} catch (...) {
FWKSEVERE("STReceiver::doTask() caught unknown exception");
}
return 0;
}
void STReceiver::initialize() {
int32_t result = -1;
ACE_INET_Addr addr(m_basePort, "localhost");
result = m_io.open(addr);
if (result == 0) {
char hbuff[256];
char* hst = &hbuff[0];
char* fqdn = ACE_OS::getenv("GF_FQDN");
if (fqdn) {
hst = fqdn;
} else {
addr.get_host_name(hbuff, 255);
}
char buff[1024];
sprintf(buff, "%s:%u", hst, m_basePort);
m_addr = buff;
}
if (result < 0) {
FWKEXCEPTION("STReceiver::initialize failed to open io, "
<< errno << ", on port " << m_basePort);
}
}
int32_t Responder::doTask() {
try {
while (*m_run) {
UDPMessage* msg = m_queues->getOutbound();
if (msg) {
msg->send(*m_io);
delete msg;
}
}
} catch (FwkException& ex) {
FWKSEVERE("Responder::doTask() caught exception: " << ex.what());
} catch (...) {
FWKSEVERE("Responder::doTask() caught unknown exception");
}
return 0;
}
void Responder::initialize() {
int32_t result = -1;
int32_t tries = 100;
while ((result < 0) && (--tries > 0)) {
uint16_t port = ++m_offset + 111 + m_basePort;
result = m_io->open(ACE_INET_Addr(port, "localhost"));
if (result < 0) {
FWKWARN("Server failed to open io, " << errno << ", on port " << port);
}
}
if (result < 0) {
FWKEXCEPTION("Server failed to open io, " << errno);
}
}
} // namespace testframework
} // namespace client
} // namespace geode
} // namespace apache
|
FruitsBerriesMelons123/CommonLibSSE
|
src/RE/TESWorldSpace.cpp
|
<gh_stars>1-10
#include "RE/TESWorldSpace.h"
namespace RE
{
bool TESWorldSpace::HasMaxHeightData() const
{
return maxHeightData != 0;
}
}
|
larryRishi/algorithm004-05
|
Week 5/id_200/leetCode-1143-200.java
|
//1143. 最长公共子序列
lass Solution {
public int longestCommonSubsequence(String text1, String text2) {
int m = text1.length();
int n = text2.length();
int[][] dp=new int[m+1][n+1];
for(int i=0;i<=m;i++)
for(int j=0;j<=n;j++)
dp[i][j]=0;
for(int i=1;i<=m;i++){
for(int j=1;j<=n;j++){
if(text1.charAt(i-1) == text2.charAt(j-1)){
dp[i][j]=dp[i-1][j-1]+1;
}else{
dp[i][j]=Math.max(dp[i-1][j],dp[i][j-1]);
}
}
}
return dp[m][n];
}
}
|
Travmatth/21sh
|
libftprintf/libft/io/ft_putendl_fd.c
|
<reponame>Travmatth/21sh
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* ft_putendl_fd.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: tmatthew <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2018/04/16 13:57:51 by tmatthew #+# #+# */
/* Updated: 2018/05/19 18:28:32 by tmatthew ### ########.fr */
/* */
/* ************************************************************************** */
#include "../libft.h"
/*
** ft_putendl_fd
** Description Outputs the string s to the file descriptor fd followed by a
** ’\n’.
** Param. #1 The string to output.
** Param. #2 The file descriptor.
** Return value None.
** Libc functions write(2).
*/
void ft_putendl_fd(char const *s, int fd)
{
if (!s)
return ;
write(fd, s, ft_strlen(s));
write(fd, "\n", 1);
}
|
liu010203xiang/mygit
|
app/src/main/java/com/unipad/brain/home/iview/BaseView.java
|
package com.unipad.brain.home.iview;
import android.content.Context;
public interface BaseView {
Context getContext(); // 得到上下文
void request(String jsonStr, int flag); // 接口返回结果
void showToast(String checkResult); // Toast 客户端验登录证结果
/**
* 正在请求中 (有进度)
*
* @param total
* 总数进度
* @param current
* 现在的进度
* @param isUploading
* 是否正在上传
*/
void loadingDialog(long total, long current, boolean isUploading);
/**
*
* @param isOpen
* true 打开 , false 关闭
*/
void showDialog(boolean isOpen);
}
|
eflexsystems/ember-cp-validations
|
tests/unit/validators/inline-test.js
|
import { module, test } from 'qunit';
import { setupTest } from 'ember-qunit';
module('Unit | Validator | inline', function (hooks) {
setupTest(hooks);
test('it works', function (assert) {
assert.expect(2);
const validator = this.owner.factoryFor('validator:inline').create({
options: {
foo: 'bar',
validate(value, options) {
assert.deepEqual(this, validator, 'Context is preserved');
assert.deepEqual(options.foo, 'bar', 'It receives options');
},
},
});
validator.validate('foo', validator.options.toObject());
});
});
|
myckhel/gfx-panel
|
resources/js/redux/surveyDetail/actions.js
|
import {
SURVEY_GET_DETAILS,
SURVEY_GET_DETAILS_SUCCESS,
SURVEY_GET_DETAILS_ERROR,
SURVEY_DELETE_QUESTION,
SURVEY_SAVE
} from 'Constants/actionTypes';
export const getSurveyDetail = () => ({
type: SURVEY_GET_DETAILS
});
export const getSurveyDetailSuccess = (items) => ({
type: SURVEY_GET_DETAILS_SUCCESS,
payload: items
});
export const getSurveyDetailError = (error) => ({
type: SURVEY_GET_DETAILS_ERROR,
payload: error
});
export const deleteSurveyQuestion = (questionId,survey) => ({
type: SURVEY_DELETE_QUESTION,
payload: {questionId,survey}
});
export const saveSurvey = (survey) => ({
type: SURVEY_SAVE,
payload: survey
});
|
bobvawter/iaido
|
pkg/testing/capture.go
|
// Copyright 2020 The Cockroach Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package testing
import (
"context"
"io"
"log"
"net"
"github.com/bobvawter/iaido/pkg/loop"
)
// Capture is a trivial server that will accept connections and capture the bytes.
//
// Note that access to dest is not synchronized and may be written to
// from multiple goroutines simultaneously.
func Capture(ctx context.Context, dest io.Writer) (net.Addr, loop.Option, error) {
l, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
return nil, nil, err
}
go func() {
<-ctx.Done()
_ = l.Close()
}()
tcp := l.(*net.TCPListener)
opt := loop.WithHandler(tcp, func(ctx context.Context, conn net.Conn) error {
log.Printf("recording data from %s", conn.RemoteAddr())
_, err := io.Copy(dest, conn)
return err
})
return tcp.Addr(), opt, nil
}
|
rakhi2001/ecom7
|
Python3/1015.py
|
__________________________________________________________________________________________________
sample 32 ms submission
class Solution:
def smallestRepunitDivByK(self, k: int) -> int:
if k%2==0 or k%5==0:
return -1
n=0
for x in range(1,k+1):
n=(n*10+1)%k
if n==0:
return x
return -1
__________________________________________________________________________________________________
sample 13128 kb submission
class Solution:
def smallestRepunitDivByK(self, K: int) -> int:
if K % 2 == 0:
return -1
K_Max = 10 ** 5 + 1
index = 1
num = 1
if 1 % K == 0:
return 1
while index < K_Max:
num = ((num * (10 % K)) % K + 1) % K
index += 1
if num == 0:
return index
return -1
__________________________________________________________________________________________________
|
mikeydub/go-gallery
|
service/persist/mongodb/history.go
|
<reponame>mikeydub/go-gallery<gh_stars>1-10
package mongodb
import (
"context"
"github.com/mikeydub/go-gallery/service/persist"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
)
var historyColName = "history"
// HistoryRepository is a repository that stores collections in a MongoDB database
type HistoryRepository struct {
historiesStorage *storage
}
// NewHistoryRepository creates a new instance of the collection mongo repository
func NewHistoryRepository(mgoClient *mongo.Client) *HistoryRepository {
return &HistoryRepository{
historiesStorage: newStorage(mgoClient, 0, galleryDBName, historyColName),
}
}
// Upsert caches a transfer in the memory storage
func (h *HistoryRepository) Upsert(pCtx context.Context, pNFTID persist.DBID, pHistory persist.OwnershipHistory) error {
pHistory.NFTID = pNFTID
if _, err := h.historiesStorage.upsert(pCtx, bson.M{"nft_id": pNFTID}, pHistory); err != nil {
return err
}
return nil
}
|
dyna-mis/Hilabeling
|
src/core/qgsdatadefinedsizelegend.cpp
|
/***************************************************************************
qgsdatadefinedsizelegend.cpp
--------------------------------------
Date : June 2017
Copyright : (C) 2017 by <NAME>
Email : <EMAIL>
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include "qgsdatadefinedsizelegend.h"
#include "qgsproperty.h"
#include "qgspropertytransformer.h"
#include "qgssymbollayerutils.h"
#include "qgsxmlutils.h"
QgsDataDefinedSizeLegend::QgsDataDefinedSizeLegend( const QgsDataDefinedSizeLegend &other )
: mType( other.mType )
, mTitleLabel( other.mTitleLabel )
, mSizeClasses( other.mSizeClasses )
, mSymbol( other.mSymbol.get() ? other.mSymbol->clone() : nullptr )
, mSizeScaleTransformer( other.mSizeScaleTransformer.get() ? new QgsSizeScaleTransformer( *other.mSizeScaleTransformer ) : nullptr )
, mVAlign( other.mVAlign )
, mFont( other.mFont )
, mTextColor( other.mTextColor )
, mTextAlignment( other.mTextAlignment )
{
}
QgsDataDefinedSizeLegend &QgsDataDefinedSizeLegend::operator=( const QgsDataDefinedSizeLegend &other )
{
if ( this != &other )
{
mType = other.mType;
mTitleLabel = other.mTitleLabel;
mSizeClasses = other.mSizeClasses;
mSymbol.reset( other.mSymbol.get() ? other.mSymbol->clone() : nullptr );
mSizeScaleTransformer.reset( other.mSizeScaleTransformer.get() ? new QgsSizeScaleTransformer( *other.mSizeScaleTransformer ) : nullptr );
mVAlign = other.mVAlign;
mFont = other.mFont;
mTextColor = other.mTextColor;
mTextAlignment = other.mTextAlignment;
}
return *this;
}
void QgsDataDefinedSizeLegend::setSymbol( QgsMarkerSymbol *symbol )
{
mSymbol.reset( symbol );
}
QgsMarkerSymbol *QgsDataDefinedSizeLegend::symbol() const
{
return mSymbol.get();
}
void QgsDataDefinedSizeLegend::setSizeScaleTransformer( QgsSizeScaleTransformer *transformer )
{
mSizeScaleTransformer.reset( transformer );
}
QgsSizeScaleTransformer *QgsDataDefinedSizeLegend::sizeScaleTransformer() const
{
return mSizeScaleTransformer.get();
}
void QgsDataDefinedSizeLegend::updateFromSymbolAndProperty( const QgsMarkerSymbol *symbol, const QgsProperty &ddSize )
{
mSymbol.reset( symbol->clone() );
mSymbol->setDataDefinedSize( QgsProperty() ); // original symbol may have had data-defined size associated
const QgsSizeScaleTransformer *sizeTransformer = dynamic_cast< const QgsSizeScaleTransformer * >( ddSize.transformer() );
mSizeScaleTransformer.reset( sizeTransformer ? sizeTransformer->clone() : nullptr );
if ( mTitleLabel.isEmpty() )
mTitleLabel = ddSize.propertyType() == QgsProperty::ExpressionBasedProperty ? ddSize.expressionString() : ddSize.field();
// automatically generate classes if no classes are defined
if ( sizeTransformer && mSizeClasses.isEmpty() )
{
mSizeClasses.clear();
const auto prettyBreaks { QgsSymbolLayerUtils::prettyBreaks( sizeTransformer->minValue(), sizeTransformer->maxValue(), 4 ) };
for ( double v : prettyBreaks )
{
mSizeClasses << SizeClass( v, QString::number( v ) );
}
}
}
QgsLegendSymbolList QgsDataDefinedSizeLegend::legendSymbolList() const
{
QgsLegendSymbolList lst;
if ( !mTitleLabel.isEmpty() )
{
QgsLegendSymbolItem title( nullptr, mTitleLabel, QString() );
lst << title;
}
if ( mType == LegendCollapsed )
{
QgsLegendSymbolItem i;
i.setDataDefinedSizeLegendSettings( new QgsDataDefinedSizeLegend( *this ) );
lst << i;
return lst;
}
else if ( mType == LegendSeparated )
{
const auto constMSizeClasses = mSizeClasses;
for ( const SizeClass &cl : constMSizeClasses )
{
QgsLegendSymbolItem si( mSymbol.get(), cl.label, QString() );
QgsMarkerSymbol *s = static_cast<QgsMarkerSymbol *>( si.symbol() );
s->setSize( cl.size );
lst << si;
}
}
return lst;
}
void QgsDataDefinedSizeLegend::drawCollapsedLegend( QgsRenderContext &context, QSize *outputSize, int *labelXOffset ) const
{
if ( mType != LegendCollapsed || mSizeClasses.isEmpty() || !mSymbol )
{
if ( outputSize )
*outputSize = QSize();
if ( labelXOffset )
*labelXOffset = 0;
return;
}
// parameters that could be configurable
double hLengthLineMM = 2; // extra horizontal space to be occupied by callout line
double hSpaceLineTextMM = 1; // horizontal space between end of the line and start of the text
std::unique_ptr<QgsMarkerSymbol> s( mSymbol->clone() );
QList<SizeClass> classes = mSizeClasses;
// optionally scale size values if transformer is defined
if ( mSizeScaleTransformer )
{
for ( SizeClass &cls : classes )
cls.size = mSizeScaleTransformer->size( cls.size );
}
// make sure we draw bigger symbols first
std::sort( classes.begin(), classes.end(), []( const SizeClass & a, const SizeClass & b ) { return a.size > b.size; } );
int hLengthLine = std::round( context.convertToPainterUnits( hLengthLineMM, QgsUnitTypes::RenderMillimeters ) );
int hSpaceLineText = std::round( context.convertToPainterUnits( hSpaceLineTextMM, QgsUnitTypes::RenderMillimeters ) );
int dpm = std::round( context.scaleFactor() * 1000 ); // scale factor = dots per millimeter
// get font metrics - we need a temporary image just to get the metrics right for the given DPI
QImage tmpImg( QSize( 1, 1 ), QImage::Format_ARGB32_Premultiplied );
tmpImg.setDotsPerMeterX( dpm );
tmpImg.setDotsPerMeterY( dpm );
QFontMetrics fm( mFont, &tmpImg );
int textHeight = fm.height();
int leading = fm.leading();
int minTextDistY = textHeight + leading;
//
// determine layout of the rendered elements
//
// find out how wide the text will be
int maxTextWidth = 0;
for ( const SizeClass &c : qgis::as_const( classes ) )
{
int w = fm.width( c.label );
if ( w > maxTextWidth )
maxTextWidth = w;
}
// add extra width needed to handle varying rendering of font weight
maxTextWidth += 1;
// find out size of the largest symbol
double largestSize = classes.at( 0 ).size;
int outputLargestSize = std::round( context.convertToPainterUnits( largestSize, s->sizeUnit(), s->sizeMapUnitScale() ) );
// find out top Y coordinate for individual symbol sizes
QList<int> symbolTopY;
for ( const SizeClass &c : qgis::as_const( classes ) )
{
int outputSymbolSize = std::round( context.convertToPainterUnits( c.size, s->sizeUnit(), s->sizeMapUnitScale() ) );
switch ( mVAlign )
{
case AlignCenter:
symbolTopY << std::round( outputLargestSize / 2 - outputSymbolSize / 2 );
break;
case AlignBottom:
symbolTopY << std::round( outputLargestSize - outputSymbolSize );
break;
}
}
// determine Y coordinate of texts: ideally they should be at the same level as symbolTopY
// but we need to avoid overlapping texts, so adjust the vertical positions
int middleIndex = 0; // classes.count() / 2; // will get the ideal position
QList<int> textCenterY;
int lastY = symbolTopY[middleIndex];
textCenterY << lastY;
for ( int i = middleIndex + 1; i < classes.count(); ++i )
{
int symbolY = symbolTopY[i];
if ( symbolY - lastY < minTextDistY )
symbolY = lastY + minTextDistY;
textCenterY << symbolY;
lastY = symbolY;
}
int textTopY = textCenterY.first() - textHeight / 2;
int textBottomY = textCenterY.last() + textHeight / 2;
int totalTextHeight = textBottomY - textTopY;
int fullWidth = outputLargestSize + hLengthLine + hSpaceLineText + maxTextWidth;
int fullHeight = std::max( static_cast< int >( std::round( outputLargestSize ) ) - textTopY, totalTextHeight );
if ( outputSize )
*outputSize = QSize( fullWidth, fullHeight );
if ( labelXOffset )
*labelXOffset = outputLargestSize + hLengthLine + hSpaceLineText;
if ( !context.painter() )
return; // only layout
//
// drawing
//
QPainter *p = context.painter();
p->save();
p->translate( 0, -textTopY );
// draw symbols first so that they do not cover
for ( const SizeClass &c : qgis::as_const( classes ) )
{
s->setSize( c.size );
int outputSymbolSize = std::round( context.convertToPainterUnits( c.size, s->sizeUnit(), s->sizeMapUnitScale() ) );
double tx = ( outputLargestSize - outputSymbolSize ) / 2;
p->save();
switch ( mVAlign )
{
case AlignCenter:
p->translate( tx, ( outputLargestSize - outputSymbolSize ) / 2 );
break;
case AlignBottom:
p->translate( tx, outputLargestSize - outputSymbolSize );
break;
}
s->drawPreviewIcon( p, QSize( outputSymbolSize, outputSymbolSize ) );
p->restore();
}
p->setPen( mTextColor );
p->setFont( mFont );
int i = 0;
for ( const SizeClass &c : qgis::as_const( classes ) )
{
// line from symbol to the text
p->drawLine( outputLargestSize / 2, symbolTopY[i], outputLargestSize + hLengthLine, textCenterY[i] );
// draw label
QRect rect( outputLargestSize + hLengthLine + hSpaceLineText, textCenterY[i] - textHeight / 2,
maxTextWidth, textHeight );
p->drawText( rect, mTextAlignment, c.label );
i++;
}
p->restore();
}
QImage QgsDataDefinedSizeLegend::collapsedLegendImage( QgsRenderContext &context, const QColor &backgroundColor, double paddingMM ) const
{
if ( mType != LegendCollapsed || mSizeClasses.isEmpty() || !mSymbol )
return QImage();
// find out the size first
QSize contentSize;
drawCollapsedLegend( context, &contentSize );
int padding = std::round( context.convertToPainterUnits( paddingMM, QgsUnitTypes::RenderMillimeters ) );
int dpm = std::round( context.scaleFactor() * 1000 ); // scale factor = dots per millimeter
QImage img( contentSize.width() + padding * 2, contentSize.height() + padding * 2, QImage::Format_ARGB32_Premultiplied );
img.setDotsPerMeterX( dpm );
img.setDotsPerMeterY( dpm );
img.fill( backgroundColor );
QPainter painter( &img );
painter.setRenderHint( QPainter::Antialiasing, true );
painter.translate( padding, padding ); // so we do not need to care about padding at all
// now do the rendering
QPainter *oldPainter = context.painter();
context.setPainter( &painter );
drawCollapsedLegend( context );
context.setPainter( oldPainter );
painter.end();
return img;
}
QgsDataDefinedSizeLegend *QgsDataDefinedSizeLegend::readXml( const QDomElement &elem, const QgsReadWriteContext &context )
{
if ( elem.isNull() )
return nullptr;
QgsDataDefinedSizeLegend *ddsLegend = new QgsDataDefinedSizeLegend;
ddsLegend->setLegendType( elem.attribute( QStringLiteral( "type" ) ) == QLatin1String( "collapsed" ) ? LegendCollapsed : LegendSeparated );
ddsLegend->setVerticalAlignment( elem.attribute( QStringLiteral( "valign" ) ) == QLatin1String( "center" ) ? AlignCenter : AlignBottom );
ddsLegend->setTitle( elem.attribute( QStringLiteral( "title" ) ) );
QDomElement elemSymbol = elem.firstChildElement( QStringLiteral( "symbol" ) );
if ( !elemSymbol.isNull() )
{
ddsLegend->setSymbol( QgsSymbolLayerUtils::loadSymbol<QgsMarkerSymbol>( elemSymbol, context ) );
}
QgsSizeScaleTransformer *transformer = nullptr;
QDomElement elemTransformer = elem.firstChildElement( QStringLiteral( "transformer" ) );
if ( !elemTransformer.isNull() )
{
transformer = new QgsSizeScaleTransformer;
transformer->loadVariant( QgsXmlUtils::readVariant( elemTransformer ) );
}
ddsLegend->setSizeScaleTransformer( transformer );
QDomElement elemTextStyle = elem.firstChildElement( QStringLiteral( "text-style" ) );
if ( !elemTextStyle.isNull() )
{
QDomElement elemFont = elemTextStyle.firstChildElement( QStringLiteral( "font" ) );
if ( !elemFont.isNull() )
{
ddsLegend->setFont( QFont( elemFont.attribute( QStringLiteral( "family" ) ), elemFont.attribute( QStringLiteral( "size" ) ).toInt(),
elemFont.attribute( QStringLiteral( "weight" ) ).toInt(), elemFont.attribute( QStringLiteral( "italic" ) ).toInt() ) );
}
ddsLegend->setTextColor( QgsSymbolLayerUtils::decodeColor( elemTextStyle.attribute( QStringLiteral( "color" ) ) ) );
ddsLegend->setTextAlignment( static_cast<Qt::AlignmentFlag>( elemTextStyle.attribute( QStringLiteral( "align" ) ).toInt() ) );
}
QDomElement elemClasses = elem.firstChildElement( QStringLiteral( "classes" ) );
if ( !elemClasses.isNull() )
{
QList<SizeClass> classes;
QDomElement elemClass = elemClasses.firstChildElement( QStringLiteral( "class" ) );
while ( !elemClass.isNull() )
{
classes << SizeClass( elemClass.attribute( QStringLiteral( "size" ) ).toDouble(), elemClass.attribute( QStringLiteral( "label" ) ) );
elemClass = elemClass.nextSiblingElement();
}
ddsLegend->setClasses( classes );
}
return ddsLegend;
}
void QgsDataDefinedSizeLegend::writeXml( QDomElement &elem, const QgsReadWriteContext &context ) const
{
QDomDocument doc = elem.ownerDocument();
elem.setAttribute( QStringLiteral( "type" ), mType == LegendCollapsed ? "collapsed" : "separated" );
elem.setAttribute( QStringLiteral( "valign" ), mVAlign == AlignCenter ? "center" : "bottom" );
elem.setAttribute( QStringLiteral( "title" ), mTitleLabel );
if ( mSymbol )
{
QDomElement elemSymbol = QgsSymbolLayerUtils::saveSymbol( QStringLiteral( "source" ), mSymbol.get(), doc, context );
elem.appendChild( elemSymbol );
}
if ( mSizeScaleTransformer )
{
QDomElement elemTransformer = QgsXmlUtils::writeVariant( mSizeScaleTransformer->toVariant(), doc );
elemTransformer.setTagName( QStringLiteral( "transformer" ) );
elem.appendChild( elemTransformer );
}
QDomElement elemFont = doc.createElement( QStringLiteral( "font" ) );
elemFont.setAttribute( QStringLiteral( "family" ), mFont.family() );
elemFont.setAttribute( QStringLiteral( "size" ), mFont.pointSize() );
elemFont.setAttribute( QStringLiteral( "weight" ), mFont.weight() );
elemFont.setAttribute( QStringLiteral( "italic" ), mFont.italic() );
QDomElement elemTextStyle = doc.createElement( QStringLiteral( "text-style" ) );
elemTextStyle.setAttribute( QStringLiteral( "color" ), QgsSymbolLayerUtils::encodeColor( mTextColor ) );
elemTextStyle.setAttribute( QStringLiteral( "align" ), static_cast<int>( mTextAlignment ) );
elemTextStyle.appendChild( elemFont );
elem.appendChild( elemTextStyle );
if ( !mSizeClasses.isEmpty() )
{
QDomElement elemClasses = doc.createElement( QStringLiteral( "classes" ) );
for ( const SizeClass &sc : qgis::as_const( mSizeClasses ) )
{
QDomElement elemClass = doc.createElement( QStringLiteral( "class" ) );
elemClass.setAttribute( QStringLiteral( "size" ), sc.size );
elemClass.setAttribute( QStringLiteral( "label" ), sc.label );
elemClasses.appendChild( elemClass );
}
elem.appendChild( elemClasses );
}
}
|
heweixi/overlord
|
platform/api/model/config.go
|
<reponame>heweixi/overlord<gh_stars>1-10
package model
import (
"fmt"
"overlord/pkg/log"
)
// ServerConfig is apiserver's config
type ServerConfig struct {
Listen string `toml:"listen"`
Etcd string `toml:"etcd"`
Versions []*VersionConfig `toml:"versions"`
Monitor *MonitorConfig `toml:"monitor"`
Cluster *DefaultClusterConfig `toml:"cluster"`
*log.Config
}
// DefaultClusterConfig is the config used to write into cluster
type DefaultClusterConfig struct {
DialTimeout int `toml:"dial_timeout"`
ReadTimeout int `toml:"read_timeout"`
WriteTimeout int `toml:"write_timeout"`
NodeConns int `toml:"node_connections"`
PingFailLimit int `toml:"ping_fail_limit"`
PingAutoEject bool `toml:"ping_auto_eject"`
}
// MonitorConfig types
type MonitorConfig struct {
URL string `toml:"url"`
Panel string `toml:"panel"`
NameVar string `toml:"name_var"`
// in url is `orgId`
OrgID int `toml:"org_id"`
}
// Href get monitory href
func (mc *MonitorConfig) Href(cname string) string {
return fmt.Sprintf("%s/%s?orgId=%d&var-%s=%s", mc.URL, mc.Panel, mc.OrgID, mc.NameVar, cname)
}
// VersionConfig is the config for used version
type VersionConfig struct {
CacheType string `toml:"cache_type"`
Versions []string `toml:"versions"`
}
|
sofimrtn/Gestion_Hospital_IPS
|
IPS-GestionHospital/src/persistence/DataCita.java
|
package persistence;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import alb.util.jdbc.Jdbc;
import business.dto.CitaDto;
public class DataCita extends DataManager {
private static final String SQL_SELECT_CITA = "Select idcita, urgente, idpaciente, idempleado, sala, fechainicio, fechafin from cita";
private static final String SQL_SELECT_CITA_BY_IDEMPLEADO = "Select idcita, urgente, idpaciente, idempleado, sala, fechainicio, fechafin from cita where idempleado = ?";
private static final String SQL_INSERT_CITA = "Insert into cita (urgente, idpaciente, idempleado, sala, fechainicio, fechafin)" + "values (?, ?, ?, ?, ?, ?)";
private static final String SQL_DELETE_CITA = "Delete from cita where idcita=?";
private static final String SQL_UPDATE_CITA = "Update cita set urgente=?, fechainicio=?,"
+ " fechafin=?, idpaciente=?, idempleado=?, sala=?,procedimientos=?, antecedentes=?, prescripciones=? where idcita=?";
private static final String SQL_UPDATE_PROC = "Update cita set procedimientos=? where idcita=?";
private static final String SQL_UPDATE_SINTOMAS = "Update cita set sintomas=? where idcita=?";
//(VIC) TIRO DEL UPDATE_CITA-->
// private static final String SQL_UPDATE_PRESCRIPCIONES= "Update cita set prescripciones=? where idcita=?";
// private static final String SQL_UPDATE_ANTECEDENTES= "Update cita set antecedentes=? where idcita=?";
private static final String SQL_SELECT_CITA_BY_ID = "Select * from cita where idcita=?";
private static final String SQL_SELECT_CITA_BY_IDPACIENTE = "Select * from cita where idpaciente=?";
public List<CitaDto> list() {
List<CitaDto> citas = null;
PreparedStatement st = null;
ResultSet rs = null;
try {
st = getConexion().prepareStatement(SQL_SELECT_CITA);
rs = st.executeQuery();
citas = new ArrayList<>();
while (rs.next()) {
CitaDto cita = new CitaDto();
cita.id = rs.getInt(1);
cita.urgente = rs.getBoolean(2);
cita.idPaciente = rs.getInt(3);
cita.idEmpleado = rs.getInt(4);
cita.sala = rs.getString(5);
cita.fechainicio = rs.getTimestamp(6);
cita.fechafin = rs.getTimestamp(7);
citas.add(cita);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
Jdbc.close(rs, st);
}
return citas;
}
public List<CitaDto> listCitasByidEmpleado(int id) {
List<CitaDto> citas = null;
PreparedStatement st = null;
ResultSet rs = null;
try {
st = getConexion().prepareStatement(SQL_SELECT_CITA_BY_IDEMPLEADO);
st.setInt(1, id);
rs = st.executeQuery();
citas = new ArrayList<>();
while (rs.next()) {
CitaDto cita = new CitaDto();
cita.id = rs.getInt(1);
cita.urgente = rs.getBoolean(2);
cita.idPaciente = rs.getInt(3);
cita.idEmpleado = rs.getInt(4);
cita.sala = rs.getString(5);
cita.fechainicio = rs.getTimestamp(6);
cita.fechafin = rs.getTimestamp(7);
citas.add(cita);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
Jdbc.close(rs, st);
}
return citas;
}
public void add(CitaDto cita) {
PreparedStatement st = null;
ResultSet rs = null;
try {
st = getConexion().prepareStatement(SQL_INSERT_CITA);
st.setBoolean(1, cita.urgente);
st.setInt(2, cita.idPaciente);
st.setInt(3, cita.idEmpleado);
st.setString(4, cita.sala);
st.setTimestamp(5, new Timestamp(cita.fechainicio.getTime()));
st.setTimestamp(6, new Timestamp(cita.fechafin.getTime()));
st.executeUpdate();
} catch (SQLException e) {
e.printStackTrace();
} finally {
Jdbc.close(rs, st);
}
}
public void delete(CitaDto cita) {
PreparedStatement st = null;
ResultSet rs = null;
try {
st = getConexion().prepareStatement(SQL_DELETE_CITA);
st.setInt(1, cita.id);
st.executeUpdate();
} catch (SQLException e) {
e.printStackTrace();
} finally {
Jdbc.close(rs, st);
}
}
public void update(CitaDto cita) {
PreparedStatement st = null;
ResultSet rs = null;
try {
st = getConexion().prepareStatement(SQL_UPDATE_CITA);
st.setBoolean(1, cita.urgente);
st.setTimestamp(2, new Timestamp(cita.fechainicio.getTime()));
st.setTimestamp(3, new Timestamp(cita.fechafin.getTime()));
st.setInt(4, cita.idPaciente);
st.setInt(5, cita.idEmpleado);
st.setString(6, cita.sala);
st.setString(7, cita.procedimientos);
st.setString(8, cita.antecedentes);
st.setString(9,cita.prescripcion);
st.setInt(10, cita.id);
st.executeUpdate();
} catch (SQLException e) {
e.printStackTrace();
} finally {
Jdbc.close(rs, st);
}
}
public void updateProc(CitaDto cita) {
PreparedStatement st = null;
ResultSet rs = null;
try {
st = getConexion().prepareStatement(SQL_UPDATE_PROC);
st.setString(1, cita.procedimientos);
st.setInt(2,cita.id);
st.executeUpdate();
} catch (SQLException e) {
e.printStackTrace();
} finally {
Jdbc.close(rs, st);
}
}
public void updateSintomas(CitaDto cita) {
PreparedStatement st = null;
ResultSet rs = null;
try {
st = getConexion().prepareStatement(SQL_UPDATE_SINTOMAS);
st.setString(1, cita.sintomas);
st.setInt(2,cita.id);
st.executeUpdate();
} catch (SQLException e) {
e.printStackTrace();
} finally {
Jdbc.close(rs, st);
}
}
public List<CitaDto> listCitaById(int id) {
List<CitaDto> citas = null;
PreparedStatement st = null;
ResultSet rs = null;
try {
st = getConexion().prepareStatement(SQL_SELECT_CITA_BY_ID);
st.setInt(1, id);
rs = st.executeQuery();
citas = new ArrayList<>();
while (rs.next()) {
CitaDto cita = new CitaDto();
cita.id = rs.getInt(1);
cita.urgente = rs.getBoolean(2);
cita.idPaciente = rs.getInt(3);
cita.idEmpleado = rs.getInt(4);
cita.sala = rs.getString(5);
cita.fechainicio = rs.getTimestamp(6);
cita.fechafin = rs.getTimestamp(7);
cita.antecedentes =rs.getString(8);
cita.sintomas= rs.getString(9);
cita.prescripcion= rs.getString(10);
cita.procedimientos=rs.getString(11);
citas.add(cita);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
Jdbc.close(rs, st);
}
return citas;
}
public List<CitaDto> listCitasByIdPaciente(int id) {
List<CitaDto> citas = null;
PreparedStatement st = null;
ResultSet rs = null;
try {
st = getConexion().prepareStatement(SQL_SELECT_CITA_BY_IDPACIENTE);
st.setInt(1, id);
rs = st.executeQuery();
citas = new ArrayList<>();
while (rs.next()) {
CitaDto cita = new CitaDto();
cita.id = rs.getInt(1);
cita.urgente = rs.getBoolean(2);
cita.idPaciente = rs.getInt(3);
cita.idEmpleado = rs.getInt(4);
cita.sala = rs.getString(5);
cita.fechainicio = rs.getTimestamp(6);
cita.fechafin = rs.getTimestamp(7);
cita.antecedentes =rs.getString(8);
cita.sintomas= rs.getString(9);
cita.prescripcion= rs.getString(10);
cita.procedimientos=rs.getString(11);
citas.add(cita);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
Jdbc.close(rs, st);
}
return citas;
}
}
|
lebretr/pocNG2
|
ressources/scripts/postinstall.js
|
/**
* Ce postintall permet d'executer le scipt postinstall.sh
* si l'os est un Linux et si la variable d'environnement
* "NODE_SERVEUR" a la valeur "TRUE" ou "NODE_ENV" a la valeur "production"
*/
var osType=require('os').type();
var nodeServeur=process.env.NODE_SERVEUR;
var nodeEnv=process.env.NODE_ENV;
console.log('OS: '+osType);
console.log('nodeServeur: '+nodeServeur);
if(osType==='Linux' && (nodeServeur==="TRUE" || nodeEnv==="production")){
console.log('Droit execution pour ./ressources/scripts/postinstall.sh');
require('fs').chmodSync('./ressources/scripts/postinstall.sh', 0764);
console.log('Execution de ./ressources/scripts/postinstall.sh');
require('child_process').execFile('./ressources/scripts/postinstall.sh', function(error, stdout, stderr){
console.log(stdout);
console.error(stderr);
if (error){
throw 'postinstall.sh';
}
});
}else{
console.log('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!');
console.log('ATTENTION: SI CET ENVIRONNEMENT N\'EST PAS UN SERVEUR ALORS VOUS N\'AVEZ RIEN A FAIRE');
console.log('DANS LE CAS CONTRAIRE, VEUILLEZ POSITIONNER LA VARIABLE D\'ENVIRONNEMENT NODE_ENV À production POUR EXECUTER LE SCRIPT postinstall.sh');
console.log('export NODE_ENV="production"');
console.log('AJOUTER CETTE LIGNE DE COMMANDE DANS LE FICHIER .profile DE VOTRE HOME POUR RENDRE LA MODIFICATION PERMANENTE');
console.log('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!');
}
|
77loopin/ray
|
python/ray/experimental/workflow/tests/test_workflow_manager.py
|
<reponame>77loopin/ray
import time
import pytest
import ray
from ray.experimental import workflow
from filelock import FileLock
def test_workflow_manager_simple(workflow_start_regular):
assert [] == workflow.list_all()
with pytest.raises(ValueError):
workflow.get_status("X")
def test_workflow_manager(workflow_start_regular, tmp_path):
# For sync between jobs
tmp_file = str(tmp_path / "lock")
lock = FileLock(tmp_file)
lock.acquire()
# For sync between jobs
flag_file = tmp_path / "flag"
flag_file.touch()
@workflow.step
def long_running(i):
lock = FileLock(tmp_file)
with lock.acquire():
pass
if i % 2 == 0:
if flag_file.exists():
raise ValueError()
return 100
outputs = [
long_running.step(i).run_async(workflow_id=str(i)) for i in range(100)
]
# Test list all, it should list all jobs running
all_tasks = workflow.list_all()
assert len(all_tasks) == 100
all_tasks_running = workflow.list_all(workflow.RUNNING)
assert dict(all_tasks) == dict(all_tasks_running)
assert workflow.get_status("0") == "RUNNING"
# Release lock and make sure all tasks finished
lock.release()
for o in outputs:
try:
r = ray.get(o)
except Exception:
continue
assert 100 == r
all_tasks_running = workflow.list_all(workflow.WorkflowStatus.RUNNING)
assert len(all_tasks_running) == 0
# Half of them failed and half succeed
failed_jobs = workflow.list_all("FAILED")
assert len(failed_jobs) == 50
finished_jobs = workflow.list_all("SUCCESSFUL")
assert len(finished_jobs) == 50
all_tasks_status = workflow.list_all({
workflow.WorkflowStatus.SUCCESSFUL, workflow.WorkflowStatus.FAILED,
workflow.WorkflowStatus.RUNNING
})
assert len(all_tasks_status) == 100
assert failed_jobs == [(k, v) for (k, v) in all_tasks_status
if v == workflow.WorkflowStatus.FAILED]
assert finished_jobs == [(k, v) for (k, v) in all_tasks_status
if v == workflow.WorkflowStatus.SUCCESSFUL]
# Test get_status
assert workflow.get_status("0") == "FAILED"
assert workflow.get_status("1") == "SUCCESSFUL"
lock.acquire()
r = workflow.resume("0")
assert workflow.get_status("0") == workflow.RUNNING
flag_file.unlink()
lock.release()
assert 100 == ray.get(r)
assert workflow.get_status("0") == workflow.SUCCESSFUL
# Test cancel
lock.acquire()
workflow.resume("2")
assert workflow.get_status("2") == workflow.RUNNING
workflow.cancel("2")
assert workflow.get_status("2") == workflow.CANCELED
# Now resume_all
resumed = workflow.resume_all(include_failed=True)
assert len(resumed) == 48
lock.release()
assert [ray.get(o) for (_, o) in resumed] == [100] * 48
@pytest.mark.parametrize(
"workflow_start_regular", [{
"num_cpus": 4
}], indirect=True)
def test_actor_manager(workflow_start_regular, tmp_path):
lock_file = tmp_path / "lock"
@workflow.virtual_actor
class LockCounter:
def __init__(self, lck):
self.counter = 0
self.lck = lck
@workflow.virtual_actor.readonly
def val(self):
with FileLock(self.lck):
return self.counter
def incr(self):
with FileLock(self.lck):
self.counter += 1
return self.counter
def __getstate__(self):
return (self.lck, self.counter)
def __setstate__(self, state):
self.lck, self.counter = state
actor = LockCounter.get_or_create("counter", str(lock_file))
ray.get(actor.ready())
lock = FileLock(lock_file)
lock.acquire()
assert [("counter", workflow.SUCCESSFUL)] == workflow.list_all()
v = actor.val.run_async()
# Readonly function won't make the workflow running
assert [("counter", workflow.SUCCESSFUL)] == workflow.list_all()
lock.release()
assert ray.get(v) == 0
# Writer function would make the workflow running
lock.acquire()
v = actor.incr.run_async()
time.sleep(2)
assert [("counter", workflow.RUNNING)] == workflow.list_all()
lock.release()
assert ray.get(v) == 1
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-v", __file__]))
|
moutainhigh/primo
|
primo-generator-mock-test/primo-generator-mock-test-demo/src/main/java/wiki/primo/generator/primogeneratormocktestdemo/mapper/UserTemplateMapper.java
|
<reponame>moutainhigh/primo
package wiki.primo.generator.primogeneratormocktestdemo.mapper;
import wiki.primo.generator.primogeneratormocktestdemo.entity.UserTemplate;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* Mapper接口
* </p>
*
* @author chenhx
* @since 2020-12-29
*/
public interface UserTemplateMapper extends BaseMapper<UserTemplate> {
}
|
uberpixel/Rayne
|
Modules/Newton/RNNewtonInit.cpp
|
<filename>Modules/Newton/RNNewtonInit.cpp
//
// RNNewtonInit.cpp
// Rayne-Newton
//
// Copyright 2018 by Überpixel. All rights reserved.
// Unauthorized use is punishable by torture, mutilation, and vivisection.
//
#include <Rayne.h>
RNModule(RayneNewton, "net.uberpixel.rayne.newton")
|
pulibrary/figgy
|
app/indexers/ephemera_box_indexer.rb
|
<gh_stars>10-100
# frozen_string_literal: true
class EphemeraBoxIndexer
delegate :query_service, to: :metadata_adapter
attr_reader :resource
def initialize(resource:)
@resource = resource
end
def to_solr
return {} unless resource.is_a?(::EphemeraBox)
{
"title_tesim" => [decorated.title],
"title_ssim" => [decorated.title],
"title_tsim" => [decorated.title]
}
end
private
def decorated
@decorated ||= resource.decorate
end
end
|
wuyueandrew/GraphScope
|
research/query_service/ir/compiler/src/test/java/com/alibaba/graphscope/integration/ldbc/LdbcQueryTest.java
|
package com.alibaba.graphscope.integration.ldbc;
import com.alibaba.graphscope.gremlin.plugin.traversal.IrCustomizedTraversal;
import com.google.common.collect.Sets;
import org.apache.tinkerpop.gremlin.process.AbstractGremlinProcessTest;
import org.apache.tinkerpop.gremlin.process.traversal.Order;
import org.apache.tinkerpop.gremlin.process.traversal.P;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.structure.Column;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
public abstract class LdbcQueryTest extends AbstractGremlinProcessTest {
public abstract Traversal<Vertex, Map<String, Object>> get_ldbc_1_test();
public abstract Traversal<Vertex, Map<String, Object>> get_ldbc_2_test();
public abstract Traversal<Vertex, Vertex> get_ldbc_3_test();
public abstract Traversal<Vertex, Map<String, Object>> get_ldbc_4_test();
public abstract Traversal<Vertex, Map<Object, Long>> get_ldbc_5_test();
public abstract Traversal<Vertex, Map<String, Object>> get_ldbc_6_test();
public abstract Traversal<Vertex, Map<String, Object>> get_ldbc_7_test();
public abstract Traversal<Vertex, Map<String, Object>> get_ldbc_8_test();
public abstract Traversal<Vertex, Map<String, Object>> get_ldbc_9_test();
public abstract Traversal<Vertex, Map<String, Object>> get_ldbc_11_test();
public abstract Traversal<Vertex, Map<Object, Long>> get_ldbc_12_test();
@Test
public void run_ldbc_1_test() {
Traversal<Vertex, Map<String, Object>> traversal = this.get_ldbc_1_test();
this.printTraversalForm(traversal);
int counter = 0;
List<String> expected =
Arrays.asList(
"{p=2, a={firstName=[Chau], lastName=[Nguyen], id=[4848]}}",
"{p=2, a={firstName=[Chau], lastName=[Do], id=[9101]}}",
"{p=2, a={firstName=[Chau], lastName=[Nguyen], id=[2199023265573]}}",
"{p=2, a={firstName=[Chau], lastName=[Loan], id=[6597069771031]}}",
"{p=2, a={firstName=[Chau], lastName=[Nguyen], id=[8796093031224]}}",
"{p=2, a={firstName=[Chau], lastName=[Ho], id=[10995116285282]}}",
"{p=2, a={firstName=[Chau], lastName=[Loan], id=[13194139544258]}}",
"{p=2, a={firstName=[Chau], lastName=[Ha], id=[15393162793500]}}",
"{p=2, a={firstName=[Chau], lastName=[Ho], id=[19791209303405]}}",
"{p=2, a={firstName=[Chau], lastName=[Nguyen], id=[26388279068635]}}",
"{p=2, a={firstName=[Chau], lastName=[Loan], id=[26388279076217]}}",
"{p=2, a={firstName=[Chau], lastName=[Nguyen], id=[28587302322743]}}",
"{p=2, a={firstName=[Chau], lastName=[Ho], id=[28587302323020]}}",
"{p=2, a={firstName=[Chau], lastName=[Ho], id=[32985348842021]}}",
"{p=3, a={firstName=[Chau], lastName=[Loan], id=[10995116284332]}}",
"{p=3, a={firstName=[Chau], lastName=[Ha], id=[15393162789090]}}",
"{p=3, a={firstName=[Chau], lastName=[Nguyen], id=[26388279072379]}}",
"{p=3, a={firstName=[Chau], lastName=[Ho], id=[32985348840129]}}");
while (traversal.hasNext()) {
Map<String, Object> bindings = traversal.next();
Assert.assertTrue(bindings.toString().equals(expected.get(counter)));
++counter;
}
Assert.assertEquals(expected.size(), (long) counter);
}
@Test
public void run_ldbc_2_test() {
Traversal<Vertex, Map<String, Object>> traversal = this.get_ldbc_2_test();
this.printTraversalForm(traversal);
int counter = 0;
List<String> expected =
Arrays.asList(
"{p={lastName=[Khan], firstName=[Kunal], id=[30786325587937]},"
+ " m={id=[2061587339072], creationDate=[20120803072025654],"
+ " content=[fine]}}",
"{p={lastName=[Rao], firstName=[Abhishek], id=[8796093031885]},"
+ " m={id=[2061587080174], creationDate=[20120803070839866],"
+ " content=[About <NAME>, aham for the inteAbout Ernest"
+ " Hemingway, which he wrote FAbo]}}",
"{p={lastName=[Khan], firstName=[John], id=[8796093029267]},"
+ " m={id=[2061585035664], creationDate=[20120803070658893],"
+ " content=[About Amitabh Bachchan, l. In additAbout <NAME>,"
+ " en Yi-hsiunAbout Denzel Wash]}}",
"{p={lastName=[Khan], firstName=[John], id=[8796093029267]},"
+ " m={imageFile=[], id=[2061587033910],"
+ " creationDate=[20120803070442531], content=[About Abbas I of Persia,"
+ " took back land from the Portuguese and the Mughals. Abbas was a"
+ " great builder and moved his kingdom's capital from Qazvin to"
+ " Isfahan. In his later years, the shah became suspicious of his own"
+ " so]}}",
"{p={lastName=[Khan], firstName=[Arjun], id=[30786325579845]},"
+ " m={id=[2061589148849], creationDate=[20120803065019796],"
+ " content=[thanks]}}",
"{p={lastName=[Rao], firstName=[Abhishek], id=[8796093031885]},"
+ " m={id=[2061590844627], creationDate=[20120803064241072],"
+ " content=[About <NAME>, he Great Books and seminar method."
+ " It haAbout Edward Elga]}}",
"{p={lastName=[Khan], firstName=[John], id=[8796093029267]},"
+ " m={id=[2061585035666], creationDate=[20120803064146457],"
+ " content=[great]}}",
"{p={lastName=[Bolier], firstName=[Albert], id=[8796093031407]},"
+ " m={id=[2061588995483], creationDate=[20120803063619558],"
+ " content=[thx]}}",
"{p={lastName=[Rao], firstName=[Abhishek], id=[8796093031885]},"
+ " m={id=[2061587080596], creationDate=[20120803062639721],"
+ " content=[About Walt Whitman, n was concernAbout At Fillmore East,"
+ " 500 Greatest About Italy, Coun]}}",
"{p={lastName=[Khan], firstName=[Shweta], id=[7725]},"
+ " m={id=[2061588924543], creationDate=[20120803061904810],"
+ " content=[great]}}",
"{p={lastName=[Rao], firstName=[Arjun], id=[10995116279390]},"
+ " m={id=[2061585663942], creationDate=[20120803061330637],"
+ " content=[About William the Conqueror, ed, but William was able to"
+ " put them dowAbout Anne]}}",
"{p={lastName=[Garcia], firstName=[Isabel], id=[10995116286316]},"
+ " m={id=[2061590620731], creationDate=[20120803060002996],"
+ " content=[maybe]}}",
"{p={lastName=[Khan], firstName=[Arjun], id=[24189255811940]},"
+ " m={id=[2061586499110], creationDate=[20120803053430934],"
+ " content=[About <NAME>, t, literary critic, biAbout Francis"
+ " Bacon, proper methodology ]}}",
"{p={lastName=[Zhang], firstName=[Yang], id=[13194139535025]},"
+ " m={id=[2061588568513], creationDate=[20120803050920738],"
+ " content=[About <NAME>, edom and human rights seemAbout"
+ " <NAME>, and E]}}",
"{p={lastName=[Kumar], firstName=[Deepak], id=[17592186053700]},"
+ " m={id=[2061588703136], creationDate=[20120803045119016],"
+ " content=[great]}}",
"{p={lastName=[Garcia], firstName=[Isabel], id=[10995116286316]},"
+ " m={id=[2061587017186], creationDate=[20120803043821969],"
+ " content=[ok]}}",
"{p={lastName=[Rao], firstName=[Arjun], id=[10995116279390]},"
+ " m={id=[2061584412526], creationDate=[20120803043634096],"
+ " content=[great]}}",
"{p={lastName=[Zhang], firstName=[Yang], id=[13194139535025]},"
+ " m={id=[2061589148863], creationDate=[20120803034926704],"
+ " content=[ok]}}",
"{p={lastName=[Rao], firstName=[Arjun], id=[10995116279390]},"
+ " m={id=[2061585353358], creationDate=[20120803033714516],"
+ " content=[About <NAME>, ended more than fiAbout <NAME>,"
+ " grams. In 1957,]}}",
"{p={lastName=[Rao], firstName=[Abhishek], id=[8796093031885]},"
+ " m={id=[2061587080731], creationDate=[20120803033107843],"
+ " content=[About Augustine of Hippo, ity of God, distincAbout Harold"
+ " Arlen, , a numbe]}}");
while (traversal.hasNext()) {
Map<String, Object> bindings = traversal.next();
Assert.assertTrue(bindings.toString().equals(expected.get(counter)));
++counter;
}
Assert.assertEquals(expected.size(), (long) counter);
}
@Test
public void run_ldbc_3_test() {
Traversal<Vertex, Vertex> traversal = this.get_ldbc_3_test();
this.printTraversalForm(traversal);
// V[72066390130957625]: id: 8796093029689, firstName: Eun-Hye, lastName: Yoon
Assert.assertEquals(72066390130957625L, traversal.next().id());
Assert.assertFalse(traversal.hasNext());
}
@Test
public void run_ldbc_4_test() {
Traversal<Vertex, Map<String, Object>> traversal = this.get_ldbc_4_test();
this.printTraversalForm(traversal);
int counter = 0;
List<String> expected =
Arrays.asList(
"{postCount=3, tagName=Ehud_Olmert}",
"{postCount=1, tagName=Be-Bop-A-Lula}",
"{postCount=1, tagName=Kingdom_of_Sardinia}",
"{postCount=1, tagName=The_Singles:_The_First_Ten_Years}");
while (traversal.hasNext()) {
Map<String, Object> bindings = traversal.next();
Assert.assertTrue(bindings.toString().equals(expected.get(counter)));
++counter;
}
Assert.assertEquals(expected.size(), (long) counter);
}
@Test
public void run_ldbc_5_test() {
Traversal<Vertex, Map<Object, Long>> traversal = this.get_ldbc_5_test();
this.printTraversalForm(traversal);
String expected =
"{v[288230788468638061]=2, v[288231613102324388]=2, v[288231887980212232]=2,"
+ " v[288230788468574546]=1, v[288230788468598862]=1, v[288230925907600292]=1,"
+ " v[288230925907605637]=1, v[288231338224392113]=1, v[288231475663357917]=1,"
+ " v[288231475663417090]=1, v[288231750541328016]=1, v[288231887980204174]=1,"
+ " v[288231887981252988]=1, v[288232162858188293]=1, v[288232162858200268]=1,"
+ " v[288232300297113293]=1, v[288232300297120851]=1, v[288232437736032472]=1,"
+ " v[288232437736038238]=1}";
Map<Object, Long> result = traversal.next();
Assert.assertEquals(expected, result.toString());
Assert.assertFalse(traversal.hasNext());
}
@Test
public void run_ldbc_6_test() {
Traversal<Vertex, Map<String, Object>> traversal = this.get_ldbc_6_test();
this.printTraversalForm(traversal);
int counter = 0;
List<String> expected =
Arrays.asList(
"{keys=Tom_Gehrels, values=28}",
"{keys=Sammy_Sosa, values=9}",
"{keys=Charles_Dickens, values=5}",
"{keys=Genghis_Khan, values=5}",
"{keys=I<KEY>, values=5}",
"{keys=Marc_Gicquel, values=5}",
"{keys=Freddie_Mercury, values=4}",
"{keys=Peter_Hain, values=4}",
"{keys=Robert_Fripp, values=4}",
"{keys=Boris_Yeltsin, values=3}");
while (traversal.hasNext()) {
Map<String, Object> bindings = traversal.next();
Assert.assertTrue(bindings.toString().equals(expected.get(counter)));
++counter;
}
Assert.assertEquals(expected.size(), (long) counter);
}
@Test
public void run_ldbc_7_test() {
Traversal<Vertex, Map<String, Object>> traversal = this.get_ldbc_7_test();
this.printTraversalForm(traversal);
int counter = 0;
List<String> expected =
Arrays.asList(
"{likedate=20120912143240024, liker={firstName=[Jean-Pierre],"
+ " lastName=[Kanam], id=[17592186049473]},"
+ " message={id=[2199024319581], content=[About <NAME>, Holy Roman"
+ " Emperor, rmation. In addition to thAbout <NAME>]}}",
"{likedate=20120911131917097, liker={firstName=[Ajuma], lastName=[Leakey],"
+ " id=[32985348842700]}, message={id=[1786707240991], content=[About"
+ " Costa Rica, a to the southeast, the Pacific Ocean to theAbout"
+ " Napoleon III, pula]}}",
"{likedate=20120910053218016, liker={firstName=[Mohamed], lastName=[Wong],"
+ " id=[32985348835903]}, message={id=[1786706525707], content=[About"
+ " <NAME>, strong voice and imposing height. He has performed"
+ " ro]}}",
"{likedate=20120905054751531, liker={firstName=[Alfonso],"
+ " lastName=[Gonzalez], id=[32985348842314]},"
+ " message={id=[1786707314163], content=[About Anne, Queen of Great"
+ " Britain, the resAbout <NAME>, ed in MaAbout <NAME>]}}",
"{likedate=20120904190103709, liker={firstName=[Sirak], lastName=[Dego],"
+ " id=[32985348835356]}, message={id=[1511833096614], content=[About"
+ " <NAME>, (Hofoper). DAbout <NAME>, list politicaAbout"
+ " <NAME>, a ]}}",
"{likedate=20120904154553202, liker={firstName=[Charles], lastName=[Bona],"
+ " id=[32985348837885]}, message={id=[1786707240991], content=[About"
+ " Costa Rica, a to the southeast, the Pacific Ocean to theAbout"
+ " Napoleon III, pula]}}",
"{likedate=20120904131816796, liker={firstName=[Aditya], lastName=[Khan],"
+ " id=[32985348841531]}, message={id=[1786707314163], content=[About"
+ " Anne, Queen of Great Britain, the resAbout <NAME>, ed in"
+ " MaAbout Frank Zappa]}}",
"{likedate=20120904114300885, liker={firstName=[John], lastName=[Kumar],"
+ " id=[32985348836287]}, message={id=[1786706617430], content=[About"
+ " <NAME>, iner whose film, telAbout <NAME>, ions. He"
+ " has als]}}",
"{likedate=20120902221041145, liker={firstName=[Ivan], lastName=[Santiago],"
+ " id=[32985348841493]}, message={id=[1786707240991], content=[About"
+ " Costa Rica, a to the southeast, the Pacific Ocean to theAbout"
+ " Napoleon III, pula]}}",
"{likedate=20120902190352859, liker={firstName=[Baruch], lastName=[Dego],"
+ " id=[4139]}, message={id=[2061584849006], content=[About Left"
+ " Behind, game Left Behind: Eternal Forces and its sequels, Left ]}}",
"{likedate=20120902133650353, liker={firstName=[Hossein],"
+ " lastName=[Karimi], id=[32985348842048]},"
+ " message={id=[2061584849006], content=[About Left Behind, game Left"
+ " Behind: Eternal Forces and its sequels, Left ]}}",
"{likedate=20120901170918823, liker={firstName=[Megumi], lastName=[Suzuki],"
+ " id=[28587302326663]}, message={id=[1511830609010], content=[About"
+ " <NAME>, nt relievers in major league history. PAbout Naima,"
+ " an ]}}",
"{likedate=20120901130728341, liker={firstName=[Deepak], lastName=[Bose],"
+ " id=[24189255812226]}, message={id=[2061584849006], content=[About"
+ " Left Behind, game Left Behind: Eternal Forces and its sequels, Left"
+ " ]}}",
"{likedate=20120901113114136, liker={firstName=[Zeki], lastName=[Arikan],"
+ " id=[19791209300402]}, message={id=[2061584849006], content=[About"
+ " Left Behind, game Left Behind: Eternal Forces and its sequels, Left"
+ " ]}}",
"{likedate=20120901095813249, liker={firstName=[Manuel], lastName=[Cosio],"
+ " id=[28587302332758]}, message={id=[2061584849006], content=[About"
+ " Left Behind, game Left Behind: Eternal Forces and its sequels, Left"
+ " ]}}",
"{likedate=20120901072309081, liker={firstName=[Jacques],"
+ " lastName=[Arnaud], id=[15393162795133]},"
+ " message={id=[2061584849006], content=[About Left Behind, game Left"
+ " Behind: Eternal Forces and its sequels, Left ]}}",
"{likedate=20120831225619539, liker={firstName=[Ching], lastName=[Hoang],"
+ " id=[32985348833559]}, message={id=[1236951518889], content=[About"
+ " Pope Paul VI, cese, yet denying hiAbout Bono, tress reduction. It"
+ " About Lou Reed, an American rock muAbout ]}}",
"{likedate=20120831154258335, liker={firstName=[<NAME>],"
+ " lastName=[Akbar], id=[17592186053137]},"
+ " message={id=[2061584849006], content=[About Left Behind, game Left"
+ " Behind: Eternal Forces and its sequels, Left ]}}",
"{likedate=20120831135801287, liker={firstName=[Ismail], lastName=[Aziz],"
+ " id=[10995116282290]}, message={id=[2061584849006], content=[About"
+ " Left Behind, game Left Behind: Eternal Forces and its sequels, Left"
+ " ]}}",
"{likedate=20120831122429543, liker={firstName=[Ali], lastName=[Lo],"
+ " id=[8796093029002]}, message={id=[2061584849006], content=[About"
+ " Left Behind, game Left Behind: Eternal Forces and its sequels, Left"
+ " ]}}");
while (traversal.hasNext()) {
Map<String, Object> bindings = traversal.next();
Assert.assertTrue(bindings.toString().equals(expected.get(counter)));
++counter;
}
Assert.assertEquals(expected.size(), (long) counter);
}
@Test
public void run_ldbc_8_test() {
Traversal<Vertex, Map<String, Object>> traversal = this.get_ldbc_8_test();
this.printTraversalForm(traversal);
int counter = 0;
Set<String> expected =
Sets.newHashSet(
"{comment={id=[1786706501264], creationDate=[20120508164718581],"
+ " content=[About Denmark, ual struggle for control of the Baltic Sea;"
+ " before the digging of the Kiel]}, commenter={firstName=[Isabel],"
+ " lastName=[Garcia], id=[10995116286316]}}",
"{comment={id=[1786706501272], creationDate=[20120417052950802],"
+ " content=[About <NAME>, ia. Scholars have also"
+ " suggesteAbout Hong Kong, en described as E]},"
+ " commenter={firstName=[Abhishek], lastName=[Rao],"
+ " id=[8796093031885]}}",
"{comment={id=[1786711226851], creationDate=[20120421111140819],"
+ " content=[About <NAME>, and factories – About <NAME>, d"
+ " a pioneer of roAbout Samu]}, commenter={firstName=[Arif],"
+ " lastName=[Lesmana], id=[5976]}}",
"{comment={id=[1786711226855], creationDate=[20120421005529694],"
+ " content=[About <NAME>, ohn Rhys-Davies (About Dog Man"
+ " Star, it is considered Ab]}, commenter={firstName=[Jan],"
+ " lastName=[Anton], id=[10995116288583]}}",
"{comment={id=[1924145454735], creationDate=[20120509045720143],"
+ " content=[cool]}, commenter={firstName=[Deepak], lastName=[Kumar],"
+ " id=[4398046513018]}}",
"{comment={id=[1374389640939], creationDate=[20111019210610461],"
+ " content=[duh]}, commenter={firstName=[Abhishek], lastName=[Rao],"
+ " id=[8796093031885]}}",
"{comment={id=[1786711226853], creationDate=[20120421210719836],"
+ " content=[roflol]}, commenter={firstName=[Eddie], lastName=[Garcia],"
+ " id=[5330]}}",
"{comment={id=[1374389640931], creationDate=[20111019163439495],"
+ " content=[no way!]}, commenter={firstName=[Abhishek],"
+ " lastName=[Rao], id=[8796093031885]}}",
"{comment={id=[1786711226854], creationDate=[20120421030229805],"
+ " content=[About <NAME>, ic, literaryAbout Thomas"
+ " Edison, dern industrA]}, commenter={firstName=[Mads],"
+ " lastName=[Haugland], id=[13194139543018]}}",
"{comment={id=[2061584551353], creationDate=[20120902192046400],"
+ " content=[ok]}, commenter={firstName=[John], lastName=[Iyar],"
+ " id=[28587302328223]}}",
"{comment={id=[1786706501274], creationDate=[20120416184458353],"
+ " content=[duh]}, commenter={firstName=[John], lastName=[Wilson],"
+ " id=[1490]}}",
"{comment={id=[1786711226865], creationDate=[20120421221926988],"
+ " content=[About Euripides, ives that wereAbout <NAME>-Davies, d"
+ " the voices oAbout Henry D]}, commenter={firstName=[Antonio],"
+ " lastName=[Garcia], id=[13194139539603]}}",
"{comment={id=[2061584551352], creationDate=[20120903111223692],"
+ " content=[maybe]}, commenter={firstName=[Ashin], lastName=[Karat],"
+ " id=[17592186044810]}}",
"{comment={id=[1786706501276], creationDate=[20120417004630904],"
+ " content=[About <NAME>, <NAME>, RichaAbout Tony"
+ " Blair, our Prime Mi]}, commenter={firstName=[Deepak],"
+ " lastName=[Kumar], id=[4398046513018]}}",
"{comment={id=[1374389640938], creationDate=[20111019193909069],"
+ " content=[cool]}, commenter={firstName=[Albert], lastName=[Bolier],"
+ " id=[8796093031407]}}",
"{comment={id=[1374389640946], creationDate=[20111020022520841],"
+ " content=[ok]}, commenter={firstName=[Isabel], lastName=[Garcia],"
+ " id=[10995116286316]}}",
"{comment={id=[2061584551354], creationDate=[20120902192731040],"
+ " content=[About Lil Wayne, m. L<NAME> released his debut rock"
+ " album, Rebirth, in 2]}, commenter={firstName=[Farrukh],"
+ " lastName=[Znaimer], id=[17592186048413]}}",
"{comment={id=[1511830965950], creationDate=[20111221171052493], content=[I"
+ " see]}, commenter={firstName=[Rahul], lastName=[Reddy],"
+ " id=[19791209303129]}}",
"{comment={id=[1511828737702], creationDate=[20111206230226884],"
+ " content=[thanks]}, commenter={firstName=[K.], lastName=[Sharma],"
+ " id=[9773]}}",
"{comment={id=[1374389640934], creationDate=[20111019225544527],"
+ " content=[About Desiderius Erasmus, ined committed to reforming"
+ " theAbout George Washington, his wea]},"
+ " commenter={firstName=[Yang], lastName=[Zhang],"
+ " id=[13194139535025]}}");
while (traversal.hasNext()) {
Map<String, Object> bindings = traversal.next();
Assert.assertTrue(expected.contains(bindings.toString()));
++counter;
}
Assert.assertEquals(expected.size(), (long) counter);
}
@Test
public void run_ldbc_9_test() {
Traversal<Vertex, Map<String, Object>> traversal = this.get_ldbc_9_test();
this.printTraversalForm(traversal);
int counter = 0;
List<String> expected =
Arrays.asList(
"{post={id=[1511829711860], creationDate=[20111216235809425],"
+ " content=[About Augustine of Hippo, osopher and theologian from"
+ " Roman Africa. About Che Gueva]}, friends={lastName=[Wang],"
+ " firstName=[Xiaolu], id=[2199023260919]}}",
"{post={id=[1511830666887], creationDate=[20111216235709064],"
+ " content=[good]}, friends={lastName=[Yamada], firstName=[Prince],"
+ " id=[2199023260291]}}",
"{post={imageFile=[], id=[1511831473649], creationDate=[20111216235526728],"
+ " content=[About Ho Chi Minh, ource Consulting, Economist"
+ " Intelligence Unit and ECA International, Ho Chi Minh City is ranked"
+ " 132 on the list of]}, friends={lastName=[Brown], firstName=[Jack],"
+ " id=[10995116288703]}}",
"{post={id=[1511834999905], creationDate=[20111216235442124],"
+ " content=[yes]}, friends={lastName=[Hosseini], firstName=[Hossein],"
+ " id=[19791209310913]}}",
"{post={id=[1511828923913], creationDate=[20111216235355410],"
+ " content=[ok]}, friends={lastName=[Khan], firstName=[Kiran],"
+ " id=[8796093029365]}}",
"{post={id=[1511829478450], creationDate=[20111216235154542],"
+ " content=[dia.org American Samoa competed at the 2004 Summer"
+ " Olympics in Athens, Greece.About Ame]},"
+ " friends={lastName=[Amenábar], firstName=[Carlos],"
+ " id=[10995116279387]}}",
"{post={id=[1511829728931], creationDate=[20111216234926822],"
+ " content=[cool]}, friends={lastName=[Khan], firstName=[Babar],"
+ " id=[8796093030398]}}",
"{post={id=[1511828864535], creationDate=[20111216234741888],"
+ " content=[LOL]}, friends={lastName=[Codreanu], firstName=[Victor],"
+ " id=[17592186047200]}}",
"{post={imageFile=[], id=[1511829728929], creationDate=[20111216234716228],"
+ " content=[About <NAME>, is views were more or less formed"
+ " during three perioAbout Aristotle, odern advent ]},"
+ " friends={lastName=[Kazadi], firstName=[<NAME>],"
+ " id=[8796093026337]}}",
"{post={id=[1511829863227], creationDate=[20111216234401470],"
+ " content=[ok]}, friends={lastName=[David], firstName=[Mihai],"
+ " id=[17592186045238]}}",
"{post={id=[1511828832823], creationDate=[20111216234140954],"
+ " content=[About <NAME>, ecretary of State in the"
+ " administratioAbout Luxembo]}, friends={lastName=[Donati],"
+ " firstName=[Giuseppe], id=[8796093028051]}}",
"{post={id=[1511831505173], creationDate=[20111216233920753], content=[no"
+ " way!]}, friends={lastName=[Chen], firstName=[Lin], id=[9850]}}",
"{post={id=[1511829861734], creationDate=[20111216233654529],"
+ " content=[thx]}, friends={lastName=[Redl], firstName=[Eva],"
+ " id=[21990232557420]}}",
"{post={id=[1511831310812], creationDate=[20111216233627372],"
+ " content=[About <NAME>, notable songs throughout About Rudy"
+ " Giuliani, t to run and remain activAbout <NAME>, the]},"
+ " friends={lastName=[Xu], firstName=[Anson], id=[17592186049298]}}",
"{post={id=[1511835042602], creationDate=[20111216233450452], content=[no"
+ " way!]}, friends={lastName=[Abouba], firstName=[Hamani],"
+ " id=[15393162791608]}}",
"{post={imageFile=[photo1511833666259.jpg], id=[1511833666259],"
+ " creationDate=[20111216233439099], content=[]},"
+ " friends={lastName=[Yamada], firstName=[Prince],"
+ " id=[2199023260291]}}",
"{post={imageFile=[photo1511833666258.jpg], id=[1511833666258],"
+ " creationDate=[20111216233438099], content=[]},"
+ " friends={lastName=[Yamada], firstName=[Prince],"
+ " id=[2199023260291]}}",
"{post={imageFile=[photo1511833666257.jpg], id=[1511833666257],"
+ " creationDate=[20111216233437099], content=[]},"
+ " friends={lastName=[Yamada], firstName=[Prince],"
+ " id=[2199023260291]}}",
"{post={imageFile=[photo1511833666256.jpg], id=[1511833666256],"
+ " creationDate=[20111216233436099], content=[]},"
+ " friends={lastName=[Yamada], firstName=[Prince],"
+ " id=[2199023260291]}}",
"{post={imageFile=[photo1511833666255.jpg], id=[1511833666255],"
+ " creationDate=[20111216233435099], content=[]},"
+ " friends={lastName=[Yamada], firstName=[Prince],"
+ " id=[2199023260291]}}");
while (traversal.hasNext()) {
Map<String, Object> bindings = traversal.next();
Assert.assertTrue(bindings.toString().equals(expected.get(counter)));
++counter;
}
Assert.assertEquals(expected.size(), (long) counter);
}
@Test
public void run_ldbc_11_test() {
Traversal<Vertex, Map<String, Object>> traversal = this.get_ldbc_11_test();
this.printTraversalForm(traversal);
int counter = 0;
List<String> expected =
Arrays.asList(
"{works=2002, orgname=Lao_Airlines, friends={lastName=[Pham],"
+ " firstName=[Eve-<NAME>], id=[6597069767125]}}",
"{works=2002, orgname=Lao_Airlines, friends={lastName=[Hafez],"
+ " firstName=[Atef], id=[28587302330691]}}",
"{works=2004, orgname=Lao_Airlines, friends={lastName=[Vorachith],"
+ " firstName=[Cy], id=[5869]}}",
"{works=2005, orgname=Lao_Air, friends={lastName=[Vang], firstName=[Mee],"
+ " id=[8796093022909]}}",
"{works=2005, orgname=Lao_Airlines, friends={lastName=[Charoenpura],"
+ " firstName=[Jetsada], id=[10995116285549]}}",
"{works=2006, orgname=Lao_Airlines, friends={lastName=[Anwar],"
+ " firstName=[A.], id=[24189255815555]}}",
"{works=2007, orgname=Lao_Air, friends={lastName=[Li], firstName=[Ben],"
+ " id=[2199023266276]}}",
"{works=2007, orgname=Lao_Airlines, friends={lastName=[Sysavanh],"
+ " firstName=[Pao], id=[8796093027636]}}",
"{works=2008, orgname=Lao_Air, friends={lastName=[Vongvichit],"
+ " firstName=[Mee], id=[1259]}}",
"{works=2009, orgname=Lao_Air, friends={lastName=[Achiou], firstName=[Ali],"
+ " id=[2199023258003]}}");
while (traversal.hasNext()) {
Map<String, Object> bindings = traversal.next();
Assert.assertTrue(bindings.toString().equals(expected.get(counter)));
++counter;
}
Assert.assertEquals(expected.size(), (long) counter);
}
@Test
public void run_ldbc_12_test() {
Traversal<Vertex, Map<Object, Long>> traversal = this.get_ldbc_12_test();
this.printTraversalForm(traversal);
String expected =
"{v[72061992084440954]=5, v[72066390130957203]=5, v[72070788177462961]=5,"
+ " v[72057594037929426]=4, v[72057594037935661]=4, v[72066390130959821]=4,"
+ " v[72066390130959343]=3, v[72068589154214252]=2, v[72068589154207326]=1,"
+ " v[72081783293739876]=1}";
Map<Object, Long> result = traversal.next();
Assert.assertEquals(expected, result.toString());
Assert.assertFalse(traversal.hasNext());
}
public static class Traversals extends LdbcQueryTest {
@Override
public Traversal<Vertex, Map<String, Object>> get_ldbc_1_test() {
return ((IrCustomizedTraversal)
g.V().hasLabel("PERSON")
.has("id", 30786325583618L)
.both("1..4", "KNOWS")
.as("p"))
.endV()
.has("id", P.neq(30786325583618L))
.has("firstName", P.eq("Chau"))
.as("a")
.order()
.by(__.select("p").by("~len"), Order.asc)
.dedup()
.order()
.by(__.select("p").by("~len"), Order.asc)
.by(__.select("a").by("id"))
.by(__.select("a").by("lastName"))
.limit(20)
.select("a", "p")
.by(__.valueMap("id", "firstName", "lastName"))
.by("~len");
}
@Override
public Traversal<Vertex, Map<String, Object>> get_ldbc_2_test() {
return g.V().hasLabel("PERSON")
.has("id", 17592186044810L)
.both("KNOWS")
.as("p")
.in("HASCREATOR")
.has("creationDate", P.lte(20120803072025654L))
.order()
.by("creationDate", Order.desc)
.by("id", Order.asc)
.limit(20)
.as("m")
.select("p", "m")
.by(__.valueMap("id", "firstName", "lastName"))
.by(__.valueMap("id", "imageFile", "creationDate", "content"));
}
@Override
public Traversal<Vertex, Vertex> get_ldbc_3_test() {
return g.V().hasLabel("PERSON")
.has("id", 17592186055119L)
.union(__.both("KNOWS"), __.both("KNOWS").both("KNOWS"))
.dedup()
.where(__.out("ISLOCATEDIN").has("name", P.without("Laos", "United_States")))
.where(
__.in("HASCREATOR")
.has(
"creationDate",
P.gt(20110601000000000L).and(P.lt(20110713000000000L)))
.out("ISLOCATEDIN")
.has("name", P.eq("Laos").or(P.eq("Scotland")))
.values("name")
.dedup()
.count()
.is(2))
.order()
.by(
__.in("HASCREATOR")
.has(
"creationDate",
P.gt(20110601000000000L).and(P.lt(20110713000000000L)))
.out("ISLOCATEDIN")
.has("name", "Laos")
.count(),
Order.desc)
.by("id", Order.asc)
.limit(20);
}
@Override
public Traversal<Vertex, Map<String, Object>> get_ldbc_4_test() {
return g.V().hasLabel("PERSON")
.has("id", 15393162790846L)
.as("person")
.both("KNOWS")
.in("HASCREATOR")
.hasLabel("POST")
.as("post")
.has("creationDate", P.gte(20120801000000000L).and(P.lt(20120830000000000L)))
.out("HASTAG")
.as("tag")
.select("person")
.not(
__.both("KNOWS")
.in("HASCREATOR")
.hasLabel("POST")
.has("creationDate", P.lt(20120801000000000L))
.out("HASTAG")
.where(P.eq("tag")))
.select("tag")
.groupCount()
.order()
.by(__.select(Column.values), Order.desc)
.by(__.select(Column.keys).values("name"), Order.asc)
.limit(10)
.select(Column.keys)
.values("name")
.as("tagName")
.select(Column.values)
.as("postCount")
.select("tagName", "postCount");
}
@Override
public Traversal<Vertex, Map<Object, Long>> get_ldbc_5_test() {
return ((IrCustomizedTraversal)
g.V().hasLabel("PERSON")
.has("id", 21990232560302L)
.both("1..3", "KNOWS"))
.endV()
.dedup()
.as("p")
.inE("HASMEMBER")
.has("joinDate", P.gt(20120901000000000L))
.outV()
.as("forum")
.out("CONTAINEROF")
.hasLabel("POST")
.out("HASCREATOR")
.where(P.eq("p"))
.select("forum")
.groupCount()
.order()
.by(__.select(Column.values), Order.desc)
.by(__.select(Column.keys).values("id"), Order.asc)
.limit(20);
}
@Override
public Traversal<Vertex, Map<String, Object>> get_ldbc_6_test() {
return g.V().hasId(72088380363511554L)
.union(__.both("KNOWS"), __.both("KNOWS").both("KNOWS"))
.dedup()
.has("id", P.neq(30786325583618L))
.in("HASCREATOR")
.hasLabel("POST")
.as("_t")
.out("HASTAG")
.has("name", P.eq("Angola"))
.select("_t")
.dedup()
.out("HASTAG")
.has("name", P.neq("Angola"))
.groupCount()
.order()
.by(__.select(Column.values), Order.desc)
.by(__.select(Column.keys).values("name"), Order.asc)
.limit(10)
.select(Column.keys)
.values("name")
.as("keys")
.select(Column.values)
.as("values")
.select("keys", "values");
}
@Override
public Traversal<Vertex, Map<String, Object>> get_ldbc_7_test() {
return g.V().hasLabel("PERSON")
.has("id", 17592186053137L)
.in("HASCREATOR")
.as("message")
.inE("LIKES")
.as("like")
.values("creationDate")
.as("likedate")
.select("like")
.outV()
.as("liker")
.order()
.by(__.select("likedate"), Order.desc)
.by("id", Order.asc)
.limit(20)
.select("message", "likedate", "liker")
.by(__.valueMap("id", "content", "imageFile"))
.by()
.by(__.valueMap("id", "firstName", "lastName"));
}
@Override
public Traversal<Vertex, Map<String, Object>> get_ldbc_8_test() {
return g.V().hasLabel("PERSON")
.has("id", 17592186044810L)
.in("HASCREATOR")
.in("REPLYOF")
.hasLabel("COMMENT")
.as("comment")
.order()
.by("creationDate", Order.desc)
.by("id", Order.asc)
.limit(20)
.out("HASCREATOR")
.as("commenter")
.select("commenter", "comment")
.by(__.valueMap("id", "firstName", "lastName"))
.by(__.valueMap("creationDate", "id", "content"));
}
@Override
public Traversal<Vertex, Map<String, Object>> get_ldbc_9_test() {
return ((IrCustomizedTraversal)
g.V().hasLabel("PERSON")
.has("id", 13194139542834L)
.both("1..3", "KNOWS"))
.endV()
.dedup()
.has("id", P.neq(13194139542834L))
.as("friends")
.in("HASCREATOR")
.has("creationDate", P.lt(20111217000000000L))
.as("post")
.order()
.by("creationDate", Order.desc)
.by("id", Order.asc)
.limit(20)
.select("friends", "post")
.by(__.valueMap("id", "firstName", "lastName"))
.by(__.valueMap("id", "content", "imageFile", "creationDate"));
}
@Override
public Traversal<Vertex, Map<String, Object>> get_ldbc_11_test() {
return ((IrCustomizedTraversal)
g.V().hasLabel("PERSON")
.has("id", 30786325583618L)
.as("root")
.both("1..3", "KNOWS"))
.endV()
.dedup()
.has("id", P.neq(30786325583618L))
.as("friends")
.outE("WORKAT")
.has("workFrom", P.lt(2010))
.as("startWork")
.values("workFrom")
.as("works")
.select("startWork")
.inV()
.as("comp")
.values("name")
.as("orgname")
.select("comp")
.out("ISLOCATEDIN")
.has("name", "Laos")
.select("friends")
.order()
.by(__.select("works"), Order.asc)
.by("id", Order.asc)
.by(__.select("orgname"), Order.desc)
.limit(10)
.select("friends", "orgname", "works")
.by(__.valueMap("id", "firstName", "lastName"))
.by()
.by();
}
@Override
public Traversal<Vertex, Map<Object, Long>> get_ldbc_12_test() {
return g.V().hasLabel("PERSON")
.has("id", 17592186044810L)
.both("KNOWS")
.as("friend")
.in("HASCREATOR")
.hasLabel("COMMENT")
.where(
__.out("REPLYOF")
.hasLabel("POST")
.out("HASTAG")
.out("HASTYPE")
.has("name", P.eq("BasketballPlayer")))
.select("friend")
.groupCount()
.order()
.by(__.select(Column.values), Order.desc)
.by(__.select(Column.keys).values("id"), Order.asc)
.limit(20);
}
}
}
|
PeterDaveHello/eden
|
modules/tests/person/__init__.py
|
from search_person import *
from create_person import *
|
gonglu666/gt_im_sdk
|
src/com/minxing/client/utils/StringUtil.java
|
<gh_stars>0
package com.minxing.client.utils;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
public class StringUtil {
public static String convertContentToHtml(String content){
content = content.replaceAll("(\r\n|\n)", "<br/>");
content = content.replaceAll(" ", " ");
return content;
}
public static String pathDecode(String path) throws UnsupportedEncodingException{
if(path != null){
path = path.replace("+", "%2b");
path = URLDecoder.decode(path,"UTF-8");
}
return path;
}
//图文内容里带\n \r \r\n 会造成图文消息结构混乱,导致客户端crash、web显示消息异常
public static String convertContent(String content){
if(content!=null){
// content = content.replaceAll("(\n)", "\\\\n");
// content = content.replaceAll("(\r)", "\\\\r");
toUnicode(content);
}
return content;
}
private static String toUnicode(String str) {
StringBuffer unicode = new StringBuffer();
for (int i = 0; i < str.length(); i++) {
char c = str.charAt(i); // 取出每一个字符
unicode.append("\\u" + Integer.toHexString(c));// 转换为unicode
}
return unicode.toString();
}
final protected static char[] hexArray = "0123456789abcdef".toCharArray();
public static String bytesToHex(byte[] bytes) {
char[] hexChars = new char[bytes.length * 2];
for ( int j = 0; j < bytes.length; j++ ) {
int v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
public static boolean isEmpty(String str) {
return str == null || str.length() == 0;
}
public static boolean isNotEmpty(String str) {
return !isEmpty(str);
}
}
|
mevans845/healthcare
|
deploy/templates/data_project.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Configures a data project with storage and logging.
For details and usage, see deploy/README.md.
"""
# Map from prefixes used by IAM to those used in BigQuery.
_IAM_TO_BIGQUERY_MEMBER = {
'user': 'userByEmail',
'group': 'groupByEmail',
'domain': 'domain',
# BigQuery treats serviceAccounts as users.
'serviceAccount': 'userByEmail',
}
DEFAULT_CUSTOM_ROLE_STAGE = 'GA'
def _get_bigquery_access_for_role(role_name, members):
"""Converts role and IAM style members to BigQuery style ACL."""
access_for_role = []
for member in members:
if(member == 'allAuthenticatedUsers'):
access_for_role.append({
'role': role_name,
'specialGroup': member
})
else:
member_type, member_name = member.split(':')
access_for_role.append({
'role': role_name,
_IAM_TO_BIGQUERY_MEMBER[member_type]: member_name
})
return access_for_role
def generate_config(context):
"""Generate Deployment Manager configuration."""
project_id = context.env['project']
if ('local_audit_logs' in context.properties) == (
'remote_audit_logs' in context.properties):
raise ValueError('Must specify local_audit_logs or remote_audit_logs but '
'not both.')
use_local_logs = 'local_audit_logs' in context.properties
has_organization = context.properties['has_organization']
resources = []
# Custom roles
custom_roles = context.properties.get('custom_roles', [])
for role in custom_roles:
name = role['name']
permissions = role['permissions']
description = role.get('description', name)
title = role.get('title', name)
resources.append({
'name': name,
'type': 'gcp-types/iam-v1:projects.roles',
'properties': {
'parent': 'projects/' + project_id,
'roleId': name,
'role': {
'title': title,
'description': description,
'stage': DEFAULT_CUSTOM_ROLE_STAGE,
'includedPermissions': permissions,
}
}
})
# Set project-level IAM roles. Adding owners and auditors roles, and removing
# the single-owner. Non-organization projects cannot have a owner group, so
# use projectIamAdmin instead.
if has_organization:
owners_group_role = 'roles/owner'
else:
owners_group_role = 'roles/resourcemanager.projectIamAdmin'
project_bindings = {
owners_group_role: ['group:' + context.properties['owners_group']],
'roles/iam.securityReviewer': [
'group:' + context.properties['auditors_group']],
}
if 'editors_group' in context.properties:
project_bindings['roles/editor'] = [
'group:' + context.properties['editors_group']]
# Merge in additional permissions, which may include the above roles.
for additional in context.properties.get(
'additional_project_permissions', []):
for role in additional['roles']:
project_bindings[role] = (
project_bindings.get(role, []) + additional['members'])
policy_patch = {
'add': [{'role': role, 'members': members}
for role, members in sorted(project_bindings.items())]
}
if has_organization and 'remove_owner_user' in context.properties:
policy_patch['remove'] = [{
'role': 'roles/owner',
'members': ['user:' + context.properties['remove_owner_user']],
}]
get_iam_policy_name = 'set-project-bindings-get-iam-policy'
resources.extend([{
'name': get_iam_policy_name,
'action': ('gcp-types/cloudresourcemanager-v1:'
'cloudresourcemanager.projects.getIamPolicy'),
'properties': {
'resource': project_id,
},
'metadata': {
'runtimePolicy': ['UPDATE_ALWAYS'],
},
}, {
'name': 'set-project-bindings-patch-iam-policy',
'action': ('gcp-types/cloudresourcemanager-v1:'
'cloudresourcemanager.projects.setIamPolicy'),
'properties': {
'resource': project_id,
'policy': '$(ref.' + get_iam_policy_name + ')',
'gcpIamPolicyPatch': policy_patch,
},
}])
# Create a logs GCS bucket and BigQuery dataset, or get the names of the
# remote bucket and dataset.
previous_gcs_bucket = None
logs_bucket_id = None
if use_local_logs:
logs_gcs_bucket = context.properties['local_audit_logs'].get(
'logs_gcs_bucket')
# Logs GCS bucket is only needed if there are data GCS buckets.
if logs_gcs_bucket:
logs_bucket_id = project_id + '-logs'
# Create the local GCS bucket to hold logs.
resources.append({
'name': logs_bucket_id,
'type': 'storage.v1.bucket',
'properties': {
'location': logs_gcs_bucket['location'],
'storageClass': logs_gcs_bucket['storage_class'],
'lifecycle': {
'rule': [{
'action': {
'type': 'Delete'
},
'condition': {
'age': logs_gcs_bucket['ttl_days'],
'isLive': True,
},
}],
},
},
'accessControl': {
'gcpIamPolicy': {
'bindings': [
{
'role':
'roles/storage.admin',
'members': [
'group:' + context.properties['owners_group']
],
},
{
'role':
'roles/storage.objectViewer',
'members': [
'group:' + context.properties['auditors_group']
],
},
{
'role': 'roles/storage.objectCreator',
'members': [
'group:<EMAIL>'],
},
],
},
},
})
previous_gcs_bucket = logs_bucket_id
# Get name of local BigQuery dataset to hold audit logs.
# This dataset will need to be created after running this deployment
dataset_id = 'audit_logs'
log_sink_destination = ('bigquery.googleapis.com/projects/' +
project_id + '/datasets/' + dataset_id)
else:
logs_bucket_id = context.properties['remote_audit_logs'].get(
'logs_gcs_bucket_name')
log_sink_destination = (
'bigquery.googleapis.com/projects/' +
context.properties['remote_audit_logs']['audit_logs_project_id'] +
'/datasets/' +
context.properties['remote_audit_logs']['logs_bigquery_dataset_id'])
# Create a logs metric sink of audit logs to a BigQuery dataset. This also
# creates a service account that must be given WRITER access to the dataset.
log_sink_name = 'audit-logs-to-bigquery'
resources.append({
'name': log_sink_name,
'type': 'logging.v2.sink',
'properties': {
'sink': log_sink_name,
'destination': log_sink_destination,
'filter': 'logName:"logs/cloudaudit.googleapis.com"',
'uniqueWriterIdentity': True,
},
})
# BigQuery dataset(s) to hold actual data. Create serially to avoid exceeding
# API quota.
previous_bq_update = None
for bq_dataset in context.properties.get('bigquery_datasets', []):
ds_name = bq_dataset['name']
bq_create_resource = {
'name': 'create-big-query-dataset-' + ds_name,
'type': 'bigquery.v2.dataset',
'properties': {
'datasetReference': {
'datasetId': ds_name,
},
'location': bq_dataset['location'],
},
}
if previous_bq_update:
bq_create_resource['metadata'] = {'dependsOn': [previous_bq_update]}
resources.append(bq_create_resource)
add_permissions = bq_dataset.get('additional_dataset_permissions', {})
access_list = [{
'role': 'OWNER',
'groupByEmail': context.properties['owners_group']
}]
for reader in context.properties.get('data_readonly_groups', []):
access_list.append({
'role': 'READER',
'groupByEmail': reader
})
for writer in context.properties.get('data_readwrite_groups', []):
access_list.append({
'role': 'WRITER',
'groupByEmail': writer
})
access_list += (
_get_bigquery_access_for_role(
'OWNER', add_permissions.get('owners', [])) +
_get_bigquery_access_for_role(
'WRITER', add_permissions.get('readwrite', [])) +
_get_bigquery_access_for_role(
'READER', add_permissions.get('readonly', [])))
# Update permissions for the dataset. This also removes the deployment
# manager service account's access.
previous_bq_update = 'update-big-query-dataset-' + ds_name
resources.append({
'name': previous_bq_update,
'action': 'gcp-types/bigquery-v2:bigquery.datasets.patch',
'properties': {
'projectId': project_id,
'datasetId': ds_name,
'access': access_list,
},
'metadata': {
'dependsOn': ['create-big-query-dataset-' + ds_name],
},
})
# GCS bucket(s) to hold actual data. Create serially to avoid exceeding API
# quota.
default_bucket_owners = ['group:' + context.properties['owners_group']]
default_bucket_readwrite = [
'group:' + readwrite
for readwrite in context.properties.get('data_readwrite_groups', [])]
default_bucket_readonly = [
'group:' + readonly
for readonly in context.properties.get('data_readonly_groups', [])]
for data_bucket in context.properties.get('data_buckets', []):
if not logs_bucket_id:
raise ValueError('Logs GCS bucket must be provided for data buckets.')
bucket_roles = []
add_permissions = data_bucket.get('additional_bucket_permissions', {})
bucket_roles.append({
'role': 'roles/storage.admin',
'members': default_bucket_owners + add_permissions.get('owners', [])
})
bucket_roles.append({
'role': 'roles/storage.objectAdmin',
'members': default_bucket_readwrite + add_permissions.get(
'readwrite', [])
})
bucket_roles.append({
'role': 'roles/storage.objectViewer',
'members': default_bucket_readonly + add_permissions.get('readonly', [])
})
bucket_roles.append({
'role': 'roles/storage.objectCreator',
'members': add_permissions.get('writeonly', [])
})
bindings = [role for role in bucket_roles if role['members']]
data_bucket_id = project_id + data_bucket['name_suffix']
data_bucket_resource = {
'name': data_bucket_id,
'type': 'storage.v1.bucket',
'properties': {
'location': data_bucket['location'],
'storageClass': data_bucket['storage_class'],
'logging': {
'logBucket': logs_bucket_id,
},
'versioning': {
'enabled': True,
},
},
'accessControl': {
'gcpIamPolicy': {
'bindings': bindings,
},
},
}
if previous_gcs_bucket:
data_bucket_resource['metadata'] = {
'dependsOn': [previous_gcs_bucket],
}
resources.append(data_bucket_resource)
previous_gcs_bucket = data_bucket_id
# Create a logs-based metric for unexpected users, if a list of expected
# users is provided.
if 'expected_users' in data_bucket:
unexpected_access_filter = (
'resource.type=gcs_bucket AND '
'logName=projects/%(project_id)s/logs/'
'cloudaudit.googleapis.com%%2Fdata_access AND '
'protoPayload.resourceName=projects/_/buckets/%(bucket_id)s AND '
'protoPayload.authenticationInfo.principalEmail!=(%(exp_users)s)') % {
'project_id': project_id,
'bucket_id': data_bucket_id,
'exp_users': (' AND '.join(data_bucket['expected_users']))
}
resources.append({
'name': 'unexpected-access-' + data_bucket_id,
'type': 'logging.v2.metric',
'properties': {
'metric': 'unexpected-access-' + data_bucket_id,
'description':
'Count of unexpected data access to ' + data_bucket_id + '.',
'filter': unexpected_access_filter,
'metricDescriptor': {
'metricKind': 'DELTA',
'valueType': 'INT64',
'unit': '1',
'labels': [{
'key': 'user',
'valueType': 'STRING',
'description': 'Unexpected user',
}],
},
'labelExtractors': {
'user':
'EXTRACT(protoPayload.authenticationInfo.principalEmail)'
},
},
})
# Create a Pub/Sub topic for the Cloud Healthcare service account to publish
# to, with a subscription for the readwrite group.
if 'pubsub' in context.properties:
pubsub_config = context.properties['pubsub']
topic_name = pubsub_config['topic']
publisher_account = pubsub_config['publisher_account']
resources.append({
'name': topic_name,
'type': 'pubsub.v1.topic',
'properties': {
'topic': topic_name,
},
'accessControl': {
'gcpIamPolicy': {
'bindings': [
{
'role': 'roles/pubsub.publisher',
'members': [
'serviceAccount:' + publisher_account
],
},
],
},
},
})
resources.append({
'name': pubsub_config['subscription'],
'type': 'pubsub.v1.subscription',
'properties': {
'subscription': pubsub_config['subscription'],
'topic': 'projects/{}/topics/{}'.format(project_id, topic_name),
'ackDeadlineSeconds': pubsub_config['ack_deadline_sec']
},
'accessControl': {
'gcpIamPolicy': {
'bindings': [
{
'role': 'roles/pubsub.editor',
'members': [
'group:' + writer for writer in context.properties[
'data_readwrite_groups']
],
},
],
},
},
'metadata': {
'dependsOn': [topic_name],
},
})
# Create Logs-based metrics for IAM policy changes.
policy_change_filter = ('protoPayload.methodName="SetIamPolicy" OR\n'
'protoPayload.methodName:".setIamPolicy"')
resources.append({
'name': 'iam-policy-change-count',
'type': 'logging.v2.metric',
'properties': {
'metric': 'iam-policy-change-count',
'description': 'Count of IAM policy changes.',
'filter': policy_change_filter,
'metricDescriptor': {
'metricKind': 'DELTA',
'valueType': 'INT64',
'unit': '1',
'labels': [{
'key': 'user',
'valueType': 'STRING',
'description': 'Unexpected user',
}],
},
'labelExtractors': {
'user': 'EXTRACT(protoPayload.authenticationInfo.principalEmail)'
},
},
})
# Create Logs-based metrics for GCS bucket permission changes.
bucket_change_filter = """
resource.type=gcs_bucket AND
protoPayload.serviceName=storage.googleapis.com AND
(protoPayload.methodName=storage.setIamPermissions OR
protoPayload.methodName=storage.objects.update)"""
resources.append({
'name': 'bucket-permission-change-count',
'type': 'logging.v2.metric',
'properties': {
'metric': 'bucket-permission-change-count',
'description': 'Count of GCS permissions changes.',
'filter': bucket_change_filter,
'metricDescriptor': {
'metricKind': 'DELTA',
'valueType': 'INT64',
'unit': '1',
'labels': [{
'key': 'user',
'valueType': 'STRING',
'description': 'Unexpected user',
}],
},
'labelExtractors': {
'user': 'EXTRACT(protoPayload.authenticationInfo.principalEmail)'
},
},
})
# Create Logs-based metrics for Bigquery permission changes.
bigquery_change_filter = ('resource.type="bigquery_resource" AND\n'
'protoPayload.methodName="datasetservice.update"')
resources.append({
'name': 'bigquery-settings-change-count',
'type': 'logging.v2.metric',
'properties': {
'metric': 'bigquery-settings-change-count',
'description': 'Count of bigquery permission changes.',
'filter': bigquery_change_filter,
'metricDescriptor': {
'metricKind': 'DELTA',
'valueType': 'INT64',
'unit': '1',
'labels': [{
'key': 'user',
'valueType': 'STRING',
'description': 'Unexpected user',
}],
},
'labelExtractors': {
'user': 'EXTRACT(protoPayload.authenticationInfo.principalEmail)'
},
},
})
# Enable data-access logging. UPDATE_ALWAYS is added to metadata to get a new
# etag each time.
resources.extend([{
'name': 'audit-configs-get-iam-etag',
'action': ('gcp-types/cloudresourcemanager-v1:'
'cloudresourcemanager.projects.getIamPolicy'),
'properties': {
'resource': project_id,
},
'metadata': {
'dependsOn': ['set-project-bindings-patch-iam-policy'],
'runtimePolicy': ['UPDATE_ALWAYS'],
},
}, {
'name': 'audit-configs-patch-iam-policy',
'action': ('gcp-types/cloudresourcemanager-v1:'
'cloudresourcemanager.projects.setIamPolicy'),
'properties': {
'resource': project_id,
'policy': {
'etag': '$(ref.audit-configs-get-iam-etag.etag)',
'auditConfigs': [{
'auditLogConfigs': [
{'logType': 'ADMIN_READ'},
{'logType': 'DATA_WRITE'},
{'logType': 'DATA_READ'},
],
'service': 'allServices',
}],
},
'updateMask': 'auditConfigs,etag',
},
'metadata': {
'dependsOn': ['audit-configs-get-iam-etag'],
},
}])
return {'resources': resources}
|
qinweizhao/qwz-mall
|
mall-backend/mall-service/mall-service-system/src/main/java/com/qinweizhao/system/controller/SysLogininforController.java
|
package com.qinweizhao.system.controller;
import com.qinweizhao.common.core.utils.poi.ExcelUtil;
import com.qinweizhao.common.core.web.controller.BaseController;
import com.qinweizhao.common.core.web.domain.AjaxResult;
import com.qinweizhao.common.core.web.page.TableDataInfo;
import com.qinweizhao.common.security.annotation.InnerAuth;
import com.qinweizhao.common.security.annotation.RequiresPermissions;
import com.qinweizhao.component.log.annotation.Log;
import com.qinweizhao.component.log.enums.BusinessType;
import com.qinweizhao.system.api.domain.SysLogininfor;
import com.qinweizhao.system.service.ISysLogininforService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletResponse;
import java.util.List;
/**
* 系统访问记录
*
* @author ruoyi
*/
@RestController
@RequestMapping("/logininfor")
public class SysLogininforController extends BaseController {
@Autowired
private ISysLogininforService logininforService;
@RequiresPermissions("system:logininfor:list")
@GetMapping("/list")
public TableDataInfo list(SysLogininfor logininfor) {
startPage();
List<SysLogininfor> list = logininforService.selectLogininforList(logininfor);
return getDataTable(list);
}
@Log(title = "登录日志", businessType = BusinessType.EXPORT)
@RequiresPermissions("system:logininfor:export")
@PostMapping("/export")
public void export(HttpServletResponse response, SysLogininfor logininfor) {
List<SysLogininfor> list = logininforService.selectLogininforList(logininfor);
ExcelUtil<SysLogininfor> util = new ExcelUtil<SysLogininfor>(SysLogininfor.class);
util.exportExcel(response, list, "登录日志");
}
@RequiresPermissions("system:logininfor:remove")
@Log(title = "登录日志", businessType = BusinessType.DELETE)
@DeleteMapping("/{infoIds}")
public AjaxResult remove(@PathVariable Long[] infoIds) {
return toAjax(logininforService.deleteLogininforByIds(infoIds));
}
@RequiresPermissions("system:logininfor:remove")
@Log(title = "登录日志", businessType = BusinessType.DELETE)
@DeleteMapping("/clean")
public AjaxResult clean() {
logininforService.cleanLogininfor();
return AjaxResult.success();
}
@InnerAuth
@PostMapping
public AjaxResult add(@RequestBody SysLogininfor logininfor) {
return toAjax(logininforService.insertLogininfor(logininfor));
}
}
|
azkarmoulana/pycon
|
symposion/social_auth/views.py
|
from django.http import HttpResponseRedirect
from django.views.generic.list import ListView
from django.utils.translation import ugettext as _
from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required
from account.mixins import LoginRequiredMixin
from social_auth.decorators import dsa_view
from social_auth.models import UserSocialAuth
from social_auth.utils import backend_setting
from social_auth.views import DEFAULT_REDIRECT
class SocialAuths(LoginRequiredMixin, ListView):
model = UserSocialAuth
def get_queryset(self):
qs = super(SocialAuths, self).get_queryset()
qs = qs.filter(user=self.request.user)
return qs
@login_required
@dsa_view()
def disconnect(request, backend, association_id=None):
associated = request.user.social_auth.count()
url = request.REQUEST.get(REDIRECT_FIELD_NAME, '') or backend_setting(backend, 'SOCIAL_AUTH_DISCONNECT_REDIRECT_URL') or DEFAULT_REDIRECT
if not request.user.has_usable_password() and associated <= 1:
messages.error(request, _("Cannot remove the only Social Account without first setting a Password or adding another Social Account."))
return HttpResponseRedirect(url)
usa = request.user.social_auth.get(pk=association_id)
backend.disconnect(request.user, association_id)
messages.success(request, _("Removed the %(provider)s account '%(uid)s'.") % {
"provider": usa.provider,
"uid": usa.extra_data.get("display", usa.uid) if usa.extra_data is not None else usa.uid,
})
return HttpResponseRedirect(url)
|
zealoussnow/chromium
|
ios/chrome/browser/crash_report/features.h
|
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef IOS_CHROME_BROWSER_CRASH_REPORT_FEATURES_H_
#define IOS_CHROME_BROWSER_CRASH_REPORT_FEATURES_H_
#include "base/feature_list.h"
extern const base::Feature kCrashpadIOS;
extern const base::Feature kMetrickitCrashReport;
extern const base::Feature kSyntheticCrashReportsForUte;
// Returns true if kSyntheticCrashReportsForUte and kLogBreadcrumbs features are
// both enabled. There is not much value in uploading Synthetic Crash Reports
// without Breadcrumbs.
bool EnableSyntheticCrashReportsForUte();
#endif // IOS_CHROME_BROWSER_CRASH_REPORT_FEATURES_H_
|
yanzv/indra_world
|
scripts/dec2020_compositional.py
|
import os
import sys
import glob
import tqdm
import pickle
import logging
from indra_world.corpus import Corpus
from indra_world.assembly.operations import *
from indra_world.sources.dart import process_reader_outputs
from indra.pipeline import AssemblyPipeline
logger = logging.getLogger('dec2020_compositional')
HERE = os.path.dirname(os.path.abspath(__file__))
# December experiment
reader_versions = {'flat':
{'cwms': '2020.10.22',
'hume': 'r2020_10_26_2.flat',
# Note that this just matches the version on the
# bioexp machine dart drive and was manually renamed
# On DART, these entries appear as 1.1 and can only
# be differentiated by date.
'sofia': '1.1_old',
'eidos': '1.0.3'},
'compositional':
{'cwms': '2020.10.22',
'hume': 'r2020_10_28.compositional',
'sofia': '1.1',
'eidos': '1.0.3'}}
DART_STORAGE = '/dart'
def load_reader_outputs(reader_versions):
logger.info('Loading outputs based on %s' % str(reader_versions))
reader_outputs = {}
for reader, version in reader_versions.items():
logger.info('Loading %s/%s' % (reader, version))
reader_outputs[reader] = {}
reader_folder = os.path.join(DART_STORAGE, reader, version)
fnames = glob.glob('%s/*' % reader_folder)
logger.info('Found %d files' % len(fnames))
for fname in tqdm.tqdm(fnames):
doc_id = os.path.basename(fname)
with open(fname, 'r') as fh:
doc_str = fh.read()
reader_outputs[reader][doc_id] = doc_str
return reader_outputs
if __name__ == '__main__':
corpus_id = 'compositional_dec2020'
logger.info('Processing reader output...')
reader_outputs = load_reader_outputs(reader_versions['compositional'])
stmts = process_reader_outputs(reader_outputs, corpus_id)
'''
stmts = []
for reader in reader_versions['compositional']:
logger.info('Loading %s' % reader)
if os.path.exists('compositional_dec2020_%s_raw.pkl' % reader):
with open('compositional_dec2020_%s_raw.pkl' % reader, 'rb') as fh:
stmts += pickle.load(fh)
'''
logger.info('Got a total of %s statements' % len(stmts))
assembly_config_file = os.path.join(
HERE, os.pardir, 'indra_wm_service', 'resources',
'assembly_compositional_december2020.json')
pipeline = AssemblyPipeline.from_json_file(assembly_config_file)
assembled_stmts = pipeline.run(stmts)
num_docs = 44591
meta_data = {
'corpus_id': corpus_id,
'description': 'Compositional grounding assembly for the December '
'2020 documents.',
'display_name': 'Compositional grounding assembly December 2020',
'readers': list(reader_versions['compositional'].keys()),
'assembly': {
'level': 'grounding_location',
'grounding_threshold': 0.6,
},
'num_statements': len(assembled_stmts),
'num_documents': num_docs
}
corpus = Corpus(corpus_id=corpus_id,
statements=assembled_stmts,
raw_statements=stmts,
meta_data=meta_data)
corpus.s3_put()
|
Yinkozi/R8
|
src/main/java/com/android/tools/r8/BaseCompilerCommandParser.java
|
// Copyright (c) 2018, the R8 project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
package com.android.tools.r8;
import com.android.tools.r8.AssertionsConfiguration.AssertionTransformation;
import com.android.tools.r8.origin.Origin;
import com.android.tools.r8.utils.ExceptionDiagnostic;
import com.android.tools.r8.utils.StringDiagnostic;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.function.Consumer;
public class BaseCompilerCommandParser<
C extends BaseCompilerCommand, B extends BaseCompilerCommand.Builder<C, B>> {
protected static final String MIN_API_FLAG = "--min-api";
protected static final String THREAD_COUNT_FLAG = "--thread-count";
static final Iterable<String> ASSERTIONS_USAGE_MESSAGE =
Arrays.asList(
" --force-enable-assertions[:[<class name>|<package name>...]]",
" --force-ea[:[<class name>|<package name>...]]",
" # Forcefully enable javac generated assertion code.",
" --force-disable-assertions[:[<class name>|<package name>...]]",
" --force-da[:[<class name>|<package name>...]]",
" # Forcefully disable javac generated assertion code. This",
" # is the default handling of javac assertion code when",
" # generating DEX file format.",
" --force-passthrough-assertions[:[<class name>|<package name>...]]",
" --force-pa[:[<class name>|<package name>...]]",
" # Don't change javac generated assertion code. This",
" # is the default handling of javac assertion code when",
" # generating class file format.");
static final Iterable<String> THREAD_COUNT_USAGE_MESSAGE =
Arrays.asList(
" " + THREAD_COUNT_FLAG + " <number of threads>",
" # Number of threads to use for compilation. If not specified",
" # the number will be based on heuristics taking the number",
" # of cores into account.");
public static void parsePositiveIntArgument(
Consumer<Diagnostic> errorConsumer,
String flag,
String argument,
Origin origin,
Consumer<Integer> setter) {
int value;
try {
value = Integer.parseInt(argument);
} catch (NumberFormatException e) {
errorConsumer.accept(
new StringDiagnostic("Invalid argument to " + flag + ": " + argument, origin));
return;
}
if (value < 1) {
errorConsumer.accept(
new StringDiagnostic("Invalid argument to " + flag + ": " + argument, origin));
return;
}
setter.accept(value);
}
private static String PACKAGE_ASSERTION_POSTFIX = "...";
private void addAssertionTransformation(
B builder, AssertionTransformation transformation, String scope) {
if (scope == null) {
builder.addAssertionsConfiguration(
b -> b.setTransformation(transformation).setScopeAll().build());
} else {
assert scope.length() > 0;
if (scope.endsWith(PACKAGE_ASSERTION_POSTFIX)) {
builder.addAssertionsConfiguration(
b ->
b.setTransformation(transformation)
.setScopePackage(
scope.substring(0, scope.length() - PACKAGE_ASSERTION_POSTFIX.length()))
.build());
} else {
builder.addAssertionsConfiguration(
b -> b.setTransformation(transformation).setScopeClass(scope).build());
}
}
}
boolean tryParseAssertionArgument(B builder, String arg, Origin origin) {
String FORCE_ENABLE_ASSERTIONS = "--force-enable-assertions";
String FORCE_EA = "--force-ea";
String FORCE_DISABLE_ASSERTIONS = "--force-disable-assertions";
String FORCE_DA = "--force-da";
String FORCE_PASSTHROUGH_ASSERTIONS = "--force-passthrough-assertions";
String FORCE_PA = "--force-pa";
AssertionTransformation transformation = null;
String remaining = null;
if (arg.startsWith(FORCE_ENABLE_ASSERTIONS)) {
transformation = AssertionTransformation.ENABLE;
remaining = arg.substring(FORCE_ENABLE_ASSERTIONS.length());
} else if (arg.startsWith(FORCE_EA)) {
transformation = AssertionTransformation.ENABLE;
remaining = arg.substring(FORCE_EA.length());
} else if (arg.startsWith(FORCE_DISABLE_ASSERTIONS)) {
transformation = AssertionTransformation.DISABLE;
remaining = arg.substring(FORCE_DISABLE_ASSERTIONS.length());
} else if (arg.startsWith(FORCE_DA)) {
transformation = AssertionTransformation.DISABLE;
remaining = arg.substring(FORCE_DA.length());
} else if (arg.startsWith(FORCE_PASSTHROUGH_ASSERTIONS)) {
transformation = AssertionTransformation.PASSTHROUGH;
remaining = arg.substring(FORCE_PASSTHROUGH_ASSERTIONS.length());
} else if (arg.startsWith(FORCE_PA)) {
transformation = AssertionTransformation.PASSTHROUGH;
remaining = arg.substring(FORCE_PA.length());
}
if (transformation != null) {
if (remaining.length() == 0) {
addAssertionTransformation(builder, transformation, null);
return true;
} else {
if (remaining.length() == 1 || remaining.charAt(0) != ':') {
return false;
}
String classOrPackageScope = remaining.substring(1);
if (classOrPackageScope.contains(";")
|| classOrPackageScope.contains("[")
|| classOrPackageScope.contains("/")) {
builder.error(
new StringDiagnostic("Illegal assertion scope: " + classOrPackageScope, origin));
}
addAssertionTransformation(builder, transformation, remaining.substring(1));
return true;
}
} else {
return false;
}
}
/**
* This method must match the lookup in
* {@link com.android.tools.r8.JdkClassFileProvider#fromJdkHome}.
*/
private static boolean isJdkHome(Path home) {
Path jrtFsJar = home.resolve("lib").resolve("jrt-fs.jar");
if (Files.exists(jrtFsJar)) {
return true;
}
// JDK has rt.jar in jre/lib/rt.jar.
Path rtJar = home.resolve("jre").resolve("lib").resolve("rt.jar");
if (Files.exists(rtJar)) {
return true;
}
// JRE has rt.jar in lib/rt.jar.
rtJar = home.resolve("lib").resolve("rt.jar");
if (Files.exists(rtJar)) {
return true;
}
return false;
}
static void addLibraryArgument(BaseCommand.Builder builder, Origin origin, String arg) {
Path path = Paths.get(arg);
if (isJdkHome(path)) {
try {
builder
.addLibraryResourceProvider(JdkClassFileProvider.fromJdkHome(path));
} catch (IOException e) {
builder.error(new ExceptionDiagnostic(e, origin));
}
} else {
builder.addLibraryFiles(path);
}
}
}
|
kohlschuetter/dumbo
|
dumbo-jabsorb/src/main/java/org/jabsorb/serializer/response/CircularReferenceHandler.java
|
/*
* jabsorb - a Java to JavaScript Advanced Object Request Broker
* http://www.jabsorb.org
*
* Copyright 2007-2009 The jabsorb team
*
* based on original code from
* JSON-RPC-Java - a JSON-RPC to Java Bridge with dynamic invocation
*
* Copyright Metaparadigm Pte. Ltd. 2004.
* <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.jabsorb.serializer.response;
import java.util.List;
import org.jabsorb.serializer.MarshallException;
/**
* Allows circular references to be signalled when found in Java code.
*
* @author <NAME>
*/
public interface CircularReferenceHandler
{
/**
* Signals that a circular reference was found.
*
* @param originalLocation The location where it first appeared
* @param ref The reference of from the current location where it next
* appeared
* @param java The object which is refered to from within itself.
* @return The object to put in the place of the circular reference in the
* JSONObject
* @throws MarshallException May be thrown if a circular reference is found
* and cannot be handled
*/
public Object circularReferenceFound(List<Object> originalLocation,
Object ref, Object java) throws MarshallException;
}
|
kikikos/openairinterface5g
|
cmake_targets/basic_simulator/enb/CMakeFiles/RRC_Rel14/DL-DCCH-MessageType.c
|
<reponame>kikikos/openairinterface5g
/*
* Generated by asn1c-0.9.29 (http://lionet.info/asn1c)
* From ASN.1 module "EUTRA-RRC-Definitions"
* found in "/home/user/openairinterface5g/openair2/RRC/LTE/MESSAGES/asn1c/ASN1_files/lte-rrc-14.7.0.asn1"
* `asn1c -pdu=all -fcompound-names -gen-PER -no-gen-OER -no-gen-example -D /home/user/openairinterface5g/cmake_targets/basic_simulator/enb/CMakeFiles/RRC_Rel14`
*/
#include "DL-DCCH-MessageType.h"
static asn_per_constraints_t asn_PER_type_c1_constr_2 CC_NOTUSED = {
{ APC_CONSTRAINED, 4, 4, 0, 15 } /* (0..15) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
asn_per_constraints_t asn_PER_type_DL_DCCH_MessageType_constr_1 CC_NOTUSED = {
{ APC_CONSTRAINED, 1, 1, 0, 1 } /* (0..1) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
static asn_TYPE_member_t asn_MBR_c1_2[] = {
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.csfbParametersResponseCDMA2000),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_CSFBParametersResponseCDMA2000,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"csfbParametersResponseCDMA2000"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.dlInformationTransfer),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_DLInformationTransfer,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"dlInformationTransfer"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.handoverFromEUTRAPreparationRequest),
(ASN_TAG_CLASS_CONTEXT | (2 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_HandoverFromEUTRAPreparationRequest,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"handoverFromEUTRAPreparationRequest"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.mobilityFromEUTRACommand),
(ASN_TAG_CLASS_CONTEXT | (3 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_MobilityFromEUTRACommand,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"mobilityFromEUTRACommand"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.rrcConnectionReconfiguration),
(ASN_TAG_CLASS_CONTEXT | (4 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_RRCConnectionReconfiguration,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"rrcConnectionReconfiguration"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.rrcConnectionRelease),
(ASN_TAG_CLASS_CONTEXT | (5 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_RRCConnectionRelease,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"rrcConnectionRelease"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.securityModeCommand),
(ASN_TAG_CLASS_CONTEXT | (6 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_SecurityModeCommand,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"securityModeCommand"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.ueCapabilityEnquiry),
(ASN_TAG_CLASS_CONTEXT | (7 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_UECapabilityEnquiry,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ueCapabilityEnquiry"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.counterCheck),
(ASN_TAG_CLASS_CONTEXT | (8 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_CounterCheck,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"counterCheck"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.ueInformationRequest_r9),
(ASN_TAG_CLASS_CONTEXT | (9 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_UEInformationRequest_r9,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ueInformationRequest-r9"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.loggedMeasurementConfiguration_r10),
(ASN_TAG_CLASS_CONTEXT | (10 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_LoggedMeasurementConfiguration_r10,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"loggedMeasurementConfiguration-r10"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.rnReconfiguration_r10),
(ASN_TAG_CLASS_CONTEXT | (11 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_RNReconfiguration_r10,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"rnReconfiguration-r10"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.rrcConnectionResume_r13),
(ASN_TAG_CLASS_CONTEXT | (12 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_RRCConnectionResume_r13,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"rrcConnectionResume-r13"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.spare3),
(ASN_TAG_CLASS_CONTEXT | (13 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_NULL,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"spare3"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.spare2),
(ASN_TAG_CLASS_CONTEXT | (14 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_NULL,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"spare2"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType__c1, choice.spare1),
(ASN_TAG_CLASS_CONTEXT | (15 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_NULL,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"spare1"
},
};
static const asn_TYPE_tag2member_t asn_MAP_c1_tag2el_2[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* csfbParametersResponseCDMA2000 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 }, /* dlInformationTransfer */
{ (ASN_TAG_CLASS_CONTEXT | (2 << 2)), 2, 0, 0 }, /* handoverFromEUTRAPreparationRequest */
{ (ASN_TAG_CLASS_CONTEXT | (3 << 2)), 3, 0, 0 }, /* mobilityFromEUTRACommand */
{ (ASN_TAG_CLASS_CONTEXT | (4 << 2)), 4, 0, 0 }, /* rrcConnectionReconfiguration */
{ (ASN_TAG_CLASS_CONTEXT | (5 << 2)), 5, 0, 0 }, /* rrcConnectionRelease */
{ (ASN_TAG_CLASS_CONTEXT | (6 << 2)), 6, 0, 0 }, /* securityModeCommand */
{ (ASN_TAG_CLASS_CONTEXT | (7 << 2)), 7, 0, 0 }, /* ueCapabilityEnquiry */
{ (ASN_TAG_CLASS_CONTEXT | (8 << 2)), 8, 0, 0 }, /* counterCheck */
{ (ASN_TAG_CLASS_CONTEXT | (9 << 2)), 9, 0, 0 }, /* ueInformationRequest-r9 */
{ (ASN_TAG_CLASS_CONTEXT | (10 << 2)), 10, 0, 0 }, /* loggedMeasurementConfiguration-r10 */
{ (ASN_TAG_CLASS_CONTEXT | (11 << 2)), 11, 0, 0 }, /* rnReconfiguration-r10 */
{ (ASN_TAG_CLASS_CONTEXT | (12 << 2)), 12, 0, 0 }, /* rrcConnectionResume-r13 */
{ (ASN_TAG_CLASS_CONTEXT | (13 << 2)), 13, 0, 0 }, /* spare3 */
{ (ASN_TAG_CLASS_CONTEXT | (14 << 2)), 14, 0, 0 }, /* spare2 */
{ (ASN_TAG_CLASS_CONTEXT | (15 << 2)), 15, 0, 0 } /* spare1 */
};
static asn_CHOICE_specifics_t asn_SPC_c1_specs_2 = {
sizeof(struct DL_DCCH_MessageType__c1),
offsetof(struct DL_DCCH_MessageType__c1, _asn_ctx),
offsetof(struct DL_DCCH_MessageType__c1, present),
sizeof(((struct DL_DCCH_MessageType__c1 *)0)->present),
asn_MAP_c1_tag2el_2,
16, /* Count of tags in the map */
0, 0,
-1 /* Extensions start */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_c1_2 = {
"c1",
"c1",
&asn_OP_CHOICE,
0, /* No effective tags (pointer) */
0, /* No effective tags (count) */
0, /* No tags (pointer) */
0, /* No tags (count) */
{ 0, &asn_PER_type_c1_constr_2, CHOICE_constraint },
asn_MBR_c1_2,
16, /* Elements count */
&asn_SPC_c1_specs_2 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_messageClassExtension_tags_19[] = {
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static asn_SEQUENCE_specifics_t asn_SPC_messageClassExtension_specs_19 = {
sizeof(struct DL_DCCH_MessageType__messageClassExtension),
offsetof(struct DL_DCCH_MessageType__messageClassExtension, _asn_ctx),
0, /* No top level tags */
0, /* No tags in the map */
0, 0, 0, /* Optional elements (not needed) */
-1, /* First extension addition */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_messageClassExtension_19 = {
"messageClassExtension",
"messageClassExtension",
&asn_OP_SEQUENCE,
asn_DEF_messageClassExtension_tags_19,
sizeof(asn_DEF_messageClassExtension_tags_19)
/sizeof(asn_DEF_messageClassExtension_tags_19[0]) - 1, /* 1 */
asn_DEF_messageClassExtension_tags_19, /* Same as above */
sizeof(asn_DEF_messageClassExtension_tags_19)
/sizeof(asn_DEF_messageClassExtension_tags_19[0]), /* 2 */
{ 0, 0, SEQUENCE_constraint },
0, 0, /* No members */
&asn_SPC_messageClassExtension_specs_19 /* Additional specs */
};
asn_TYPE_member_t asn_MBR_DL_DCCH_MessageType_1[] = {
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType, choice.c1),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
+1, /* EXPLICIT tag at current level */
&asn_DEF_c1_2,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"c1"
},
{ ATF_NOFLAGS, 0, offsetof(struct DL_DCCH_MessageType, choice.messageClassExtension),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
0,
&asn_DEF_messageClassExtension_19,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"messageClassExtension"
},
};
static const asn_TYPE_tag2member_t asn_MAP_DL_DCCH_MessageType_tag2el_1[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* c1 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 } /* messageClassExtension */
};
asn_CHOICE_specifics_t asn_SPC_DL_DCCH_MessageType_specs_1 = {
sizeof(struct DL_DCCH_MessageType),
offsetof(struct DL_DCCH_MessageType, _asn_ctx),
offsetof(struct DL_DCCH_MessageType, present),
sizeof(((struct DL_DCCH_MessageType *)0)->present),
asn_MAP_DL_DCCH_MessageType_tag2el_1,
2, /* Count of tags in the map */
0, 0,
-1 /* Extensions start */
};
asn_TYPE_descriptor_t asn_DEF_DL_DCCH_MessageType = {
"DL-DCCH-MessageType",
"DL-DCCH-MessageType",
&asn_OP_CHOICE,
0, /* No effective tags (pointer) */
0, /* No effective tags (count) */
0, /* No tags (pointer) */
0, /* No tags (count) */
{ 0, &asn_PER_type_DL_DCCH_MessageType_constr_1, CHOICE_constraint },
asn_MBR_DL_DCCH_MessageType_1,
2, /* Elements count */
&asn_SPC_DL_DCCH_MessageType_specs_1 /* Additional specs */
};
|
lucasfts/no-waste
|
no-waste-api/src/models/userSettings.js
|
const mongoose = require('mongoose');
const settingsSchema = mongoose.Schema({
userId: {type: String, required: true},
settingsId: {type: String, required: true}
});
module.exports = mongoose.model('UserSettings', settingsSchema);
|
vlstus/GoF-Patterns
|
src/main/java/org/study/creational/factorymethod/logistic/AbstractLogistics.java
|
<gh_stars>0
package org.study.creational.factorymethod.logistic;
import org.study.creational.factorymethod.transport.Transport;
public abstract class AbstractLogistics implements Logistics {
protected abstract Transport getTransport();
public void transport() {
System.out.println("Transporting shipment with " + getTransport());
}
}
|
we-code-this/channel-zero-api
|
seeds/production/promos.js
|
<filename>seeds/production/promos.js
exports.seed = function(knex, Promise) {
return knex('promos')
.select()
.where({
name: 'DigTheDig',
})
.then(function(rows) {
if (rows.length === 0) {
return knex('promos').insert([
{
id: 1,
user_id: 1,
name: '<NAME>',
url: '#',
filename: 'madurgency.svg',
location: 'horizontal',
published: true,
created_at: '2019-01-19 12:00:00',
},
{
id: 2,
user_id: 1,
name: '<NAME>',
url: '#',
filename: 'the-age.svg',
location: 'horizontal',
published: true,
created_at: '2019-01-19 12:01:00',
},
{
id: 3,
user_id: 1,
name: '<NAME>',
url: '#',
filename: 'enemy-books.svg',
location: 'horizontal',
published: true,
created_at: '2019-01-19 12:02:00',
},
{
id: 4,
user_id: 1,
name: '<NAME>',
url: '#',
filename: 'spitburg-pirates.svg',
location: 'horizontal',
published: true,
created_at: '2019-01-19 12:03:00',
},
{
id: 5,
user_id: 1,
name: 'HipHopGods',
url: '#',
filename: 'hiphopgods.svg',
location: 'horizontal',
published: true,
created_at: '2019-01-19 12:04:00',
},
{
id: 6,
user_id: 1,
name: 'Spitifly',
url: '#',
filename: 'spitifly.svg',
location: 'vertical',
published: true,
created_at: '2019-01-19 12:06:00',
},
{
id: 7,
user_id: 1,
name: 'Rapstation.com',
url: 'https://rapstation.com',
filename: 'rapstation.svg',
location: 'vertical',
published: true,
created_at: '2019-01-19 12:05:00',
},
{
id: 8,
user_id: 1,
name: 'Spitslam',
url: 'https://slamjamz.com',
filename: 'spitslam.svg',
location: 'vertical',
published: true,
created_at: '2019-01-19 12:06:00',
},
{
id: 9,
user_id: 1,
name: 'DigTheDig',
url: '/dig',
filename: 'dig-the-dig.svg',
location: 'vertical',
published: true,
created_at: '2019-01-19 12:07:00',
},
]);
}
});
};
|
youzhengjie9/cloud-mall
|
cloud-mall-commons/src/main/java/com/boot/feign/system/fallback/CouponsRecordFallbackFeign.java
|
package com.boot.feign.system.fallback;
import com.boot.feign.system.fallback.impl.CouponsRecordFallbackFeignImpl;
import com.boot.pojo.CouponsRecord;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.stereotype.Component;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@Component
@FeignClient(value = "cloud-mall-system",fallback = CouponsRecordFallbackFeignImpl.class)
public interface CouponsRecordFallbackFeign {
@ResponseBody
@GetMapping(path = "/feign/couponsRecord/selectCouponsRecordByUserIdAndLimit/{page}/{size}/{userid}/{usetype}/{nowtime}")
public List<CouponsRecord> selectCouponsRecordByUserIdAndLimit(@PathVariable("page") int page,
@PathVariable("size") int size,
@PathVariable("userid") long userid,
@PathVariable("usetype") int usetype,
@PathVariable("nowtime") String nowtime);
@ResponseBody
@GetMapping(path = "/feign/couponsRecord/selectCouponsRecord/{couponsid}/{userid}/{usetype}")
public CouponsRecord selectCouponsRecord(@PathVariable("couponsid") long couponsid,
@PathVariable("userid") long userid,
@PathVariable("usetype") int usetype);
}
|
yashgolwala/Software_Measurement_Team_M
|
ProjectSourceCode/Apache Commons Math v3.5/src/test/java/org/apache/commons/math3/optimization/linear/SimplexTableauTest.java
|
<reponame>yashgolwala/Software_Measurement_Team_M<filename>ProjectSourceCode/Apache Commons Math v3.5/src/test/java/org/apache/commons/math3/optimization/linear/SimplexTableauTest.java<gh_stars>0
version https://git-lfs.github.com/spec/v1
oid sha256:56484096394c83148939310308b2968f215afc942cf00a3ebd5b53a5b80e86a3
size 5150
|
joansmith1/ontrack
|
ontrack-extension/ontrack-extension-git/src/main/java/net/ontrack/extension/git/GitCommitNotFoundException.java
|
<filename>ontrack-extension/ontrack-extension-git/src/main/java/net/ontrack/extension/git/GitCommitNotFoundException.java<gh_stars>0
package net.ontrack.extension.git;
import net.sf.jstring.support.CoreException;
public class GitCommitNotFoundException extends CoreException {
public GitCommitNotFoundException(String name) {
super(name);
}
}
|
skafle/cloudata
|
src/examples/first_examples/org/cloudata/examples/first/JdbcExample.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudata.examples.first;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.DecimalFormat;
/**
* @author jindolk
*
*/
public class JdbcExample {
public static void main(String[] args) throws Exception {
if(args.length < 2) {
System.out.println("Usage:java JdbcExample <url> <table_name>");
System.out.println(" url: jdbc:cloudata://zookeepr_hostname:zookeeper_port:clusterId/cloudata_service_name?user=test&charset=EUC-KR&filesystem=hdfs://hdfshost:50070");
System.exit(0);
}
String url = args[0];
String tableName = args[1];
Class.forName("org.cloudata.jdbc.CloudataDriver");
Connection conn = DriverManager.getConnection(url);
Statement stmt = null;
ResultSet rs = null;
try {
boolean exist = false;
//print table list
rs = conn.getMetaData().getTables(null, null, null, null);
System.out.println("--------------- Table -----------------");
while(rs.next()) {
System.out.println(rs.getString("Name") + "," + rs.getString("Owner") + "," + rs.getString("Version"));
if(rs.getString(1).equals(tableName)) {
exist = true;
}
}
rs.close();
System.out.println("---------------------------------------");
if(exist) {
System.out.println(tableName + " already exists");
System.exit(0);
}
stmt = conn.createStatement();
//create table
stmt.executeUpdate("create table " + tableName + " (col1, col2) version = 0");
//insert
insert(tableName, conn);
//select
select(tableName, conn);
//delete
delete(tableName, conn);
System.out.println("----------------------------------------------");
//select
select(tableName, conn);
} finally {
if(rs != null) {
rs.close();
}
if(stmt != null) {
stmt.close();
}
if(conn != null) {
conn.close();
}
}
}
private static void delete(String tableName, Connection conn) throws SQLException {
Statement stmt = null;
try {
stmt = conn.createStatement();
DecimalFormat df = new DecimalFormat("0000000000");
String rowKey = df.format(1);
stmt.executeUpdate("delete col1 from " + tableName + " where rowkey = '" + rowKey + "'");
rowKey = df.format(2);
stmt.executeUpdate("delete col1 from " + tableName + " where rowkey = '" + rowKey + "' and col1 = 'ck1_2'");
rowKey = df.format(5);
stmt.executeUpdate("delete * from " + tableName + " where rowkey = '" + rowKey + "'");
} finally {
if(stmt != null) {
stmt.close();
}
}
}
private static void insert(String tableName, Connection conn) throws SQLException {
Statement stmt = null;
try {
stmt = conn.createStatement();
DecimalFormat df = new DecimalFormat("0000000000");
for(int i = 0; i < 10; i++) {
String rowKey = df.format(i);
for(int j = 1; j <= 3; j++) {
String col1Key = "ck1_" + j;
String col1Value = "value1_" + j;
String col2Key = "ck2_" + j;
String col2Value = "value2_" + j;
stmt.executeUpdate("insert into " + tableName + " values ( " +
"('" + col1Key + "','" + col1Value + "')," +
"('" + col2Key + "','" + col2Value + "') " +
") where rowkey = '" + rowKey + "'");
}
}
} finally {
if(stmt != null) {
stmt.close();
}
}
}
private static void select(String tableName, Connection conn)
throws SQLException {
ResultSet rs = null;
Statement stmt = null;
try {
stmt = conn.createStatement();
rs = stmt.executeQuery("select * from " + tableName);
int count = 0;
while(rs.next()) {
ResultSetMetaData rsmd = rs.getMetaData();
int colCount = rsmd.getColumnCount();
System.out.print("Row:" + rs.getString(colCount + 1) + ",");
for(int i = 1; i <= colCount; i++) {
byte[] value = rs.getBytes(i);
String valueStr = value == null ? "null" : new String(value);
System.out.print(rsmd.getColumnName(i) + "[" + rs.getString(i) + ":" + valueStr + ":" + rs.getTimestamp(i) + "]");
if(i < colCount) {
System.out.print(", ");
}
}
System.out.print("\n");
count++;
}
System.out.println(count + " values selected");
} finally {
if(rs != null) {
rs.close();
}
if(stmt != null) {
stmt.close();
}
}
}
}
|
IMCN-UvA/imcn-imaging
|
nl/uva/imcn/structures/Octree.java
|
package nl.uva.imcn.structures;
import java.io.*;
import java.util.*;
import nl.uva.imcn.utilities.*;
/**
*
* This class handles octrees and perform simple operations
*
* Implementation loosely adapted from:
* "Simple and efficient traversal methods for quadtrees and octrees"
* <NAME> and <NAME>, MERL.
*
* (the elaborate binary operations used to speed up the system are not
* necessarily used here)
*
* @version April 2006
* @author <NAME>
*
*
*/
public class Octree {
private OctreeCell root;
private static int LEVELS;
private static int ROOT_LEVEL;
private static float SCALE;
private static final byte X=0;
private static final byte Y=1;
private static final byte Z=2;
private static final boolean debug = false;
private static final boolean verbose = false;
public Octree(int lv_) {
LEVELS = lv_;
ROOT_LEVEL = lv_-1;
SCALE = (float)Math.pow(2.0,ROOT_LEVEL);
}
/**
* traversal from a root cell to a leaf cell using x,y,z.
* <p>
* The leaf cell is returned in cell
*/
public OctreeCell traverseToLeaf(OctreeCell init, int x, int y, int z) {
OctreeCell cell = init;
while (cell.children!=null) {
int cs = 1 << (cell.level-1);
if (debug) System.out.print("["+cell.x+","+cell.y+","+cell.z+","+cs+"]");
int childIndex = 0;
if (x>=cell.x+cs) childIndex += 1;
if (y>=cell.y+cs) childIndex += 2;
if (z>=cell.z+cs) childIndex += 4;
cell = cell.children[childIndex];
}
if (debug) System.out.print("["+cell.x+","+cell.y+","+cell.z+"]");
return cell;
}
/**
* traversal from a root cell to a child cell using x,y,z down to a certain level.
* <p>
* The child cell is returned in cell, and can be a leaf cell if the level is too low.
*/
public OctreeCell traverseToLevel(OctreeCell init, int x, int y, int z, int level) {
OctreeCell cell = init;
int n = cell.level - level;
while (cell.children!=null && n>0) {
n--;
int cs = 1 << (cell.level-1);
if (debug) System.out.print("["+cell.x+","+cell.y+","+cell.z+","+cs+"]");
int childIndex = 0;
if (x>=cell.x+cs) childIndex += 1;
if (y>=cell.y+cs) childIndex += 2;
if (z>=cell.z+cs) childIndex += 4;
cell = cell.children[childIndex];
}
if (debug) System.out.print("["+cell.x+","+cell.y+","+cell.z+"]");
return cell;
}
/**
* locate the leaf cell containing point p.
* <p>
* The point coordinates are in [0,1].
*/
public OctreeCell locateCell(float[] p) {
// determine the x,y,z codes for the point position
int x = (int)(p[X]*SCALE);
int y = (int)(p[Y]*SCALE);
int z = (int)(p[Z]*SCALE);
// follow the branching from root to leaf
OctreeCell cell = root;
cell = traverseToLeaf(cell,x,y,z);
return cell;
}
public OctreeCell locateCell(float px, float py, float pz) {
// determine the x,y,z codes for the point position
int x = (int)(px*SCALE);
int y = (int)(py*SCALE);
int z = (int)(pz*SCALE);
// follow the branching from root to leaf
OctreeCell cell = root;
cell = traverseToLeaf(cell,x,y,z);
return cell;
}
/** locate the left neighbor of same of larger size */
public OctreeCell getXminusNeighbor(OctreeCell cell) {
// if no left neighbor
if (cell.x == 0) return null;
else {
// determine the smallest common ancestor
OctreeCell parentCell = cell;
while (parentCell.x==cell.x) {
parentCell = parentCell.parent;
if (parentCell.parent==null) break;
}
// start from smallest ancestor and follow branching
parentCell = traverseToLevel(parentCell, cell.x-1, cell.y, cell.z, cell.level);
return parentCell;
}
}
/** locate the right neighbor of same of larger size */
public OctreeCell getXplusNeighbor(OctreeCell cell) {
// get location of smallest possible right neighbor
int cellSize = 1 << cell.level;
if (debug) System.out.print("["+cell.x+","+cell.y+","+cell.z+","+cellSize+"]");
// if no right neighbor
if ( (cell.x+cellSize >= SCALE) || (cell.parent==null) ) {
if (debug) System.out.print("no neighbor\n");
return null;
} else {
// determine the smallest common ancestor
OctreeCell parentCell = cell.parent;
if (debug) System.out.print("["+parentCell.x+","+parentCell.y+","+parentCell.z+"]");
while (parentCell.x!=cell.x) {
if (parentCell.parent==null) break;
else parentCell = parentCell.parent;
if (verbose) System.out.print("["+parentCell.x+","+parentCell.y+","+parentCell.z+"]");
}
// start from smallest ancestor and follow branching
parentCell = traverseToLevel(parentCell, cell.x+cellSize, cell.y, cell.z, cell.level);
if (debug) System.out.print("["+parentCell.x+","+parentCell.y+","+parentCell.z+"]");
if (debug) System.out.print("\n");
return parentCell;
}
}
/** locate the anterior neighbor of same of larger size */
public OctreeCell getYminusNeighbor(OctreeCell cell) {
// if no left neighbor
if (cell.y == 0) return null;
else {
// determine the smallest common ancestor
OctreeCell parentCell = cell;
while (parentCell.y==cell.y) {
parentCell = parentCell.parent;
if (parentCell.parent==null) break;
}
// start from smallest ancestor and follow branching
parentCell = traverseToLevel(parentCell, cell.x, cell.y-1, cell.z, cell.level);
return parentCell;
}
}
/** locate the right neighbor of same of larger size */
public OctreeCell getYplusNeighbor(OctreeCell cell) {
// get location of smallest possible right neighbor
int cellSize = 1 << cell.level;
// if no right neighbor
if ( (cell.y+cellSize >= SCALE) || (cell.parent==null) ) return null;
else {
// determine the smallest common ancestor
OctreeCell parentCell = cell.parent;
while (parentCell.y!=cell.y) {
if (parentCell.parent==null) break;
else parentCell = parentCell.parent;
}
// start from smallest ancestor and follow branching
parentCell = traverseToLevel(parentCell, cell.x, cell.y+cellSize, cell.z, cell.level);
return parentCell;
}
}
/** locate the left neighbor of same of larger size */
public OctreeCell getZminusNeighbor(OctreeCell cell) {
// if no left neighbor
if (cell.z == 0) return null;
else {
// determine the smallest common ancestor
OctreeCell parentCell = cell;
while (parentCell.z==cell.z) {
parentCell = parentCell.parent;
if (parentCell.parent==null) break;
}
// start from smallest ancestor and follow branching
parentCell = traverseToLevel(parentCell, cell.x, cell.y, cell.z-1, cell.level);
return parentCell;
}
}
/** locate the right neighbor of same of larger size */
public OctreeCell getZplusNeighbor(OctreeCell cell) {
// get location of smallest possible right neighbor
int cellSize = 1 << cell.level;
// if no right neighbor
if ( (cell.z+cellSize >= SCALE) || (cell.parent==null) ) return null;
else {
// determine the smallest common ancestor
OctreeCell parentCell = cell.parent;
while (parentCell.z!=cell.z) {
if (parentCell.parent==null) break;
else parentCell = parentCell.parent;
}
// start from smallest ancestor and follow branching
parentCell = traverseToLevel(parentCell, cell.x, cell.y, cell.z+cellSize, cell.level);
return parentCell;
}
}
/**
* create a new octree from the given image.
* <p>
* It fills the tree with the image at finest scale
* (if the image is bigger, it gets cropped, if it's smaller it's padded with 0)
*/
public void createFromImage(float[] img, int nx, int ny, int nz) {
// create the full tree recursively
root = create(0,0,0, ROOT_LEVEL, 0, null);
// set the image values at level 0
OctreeCell cellX,cellY,cellZ;
cellX = traverseToLeaf(root,0,0,0);
for (int x=0;(x<nx && cellX!=null);x++) {
cellY = cellX;
for (int y=0;(y<ny && cellY!=null);y++) {
cellZ = cellY;
for (int z=0;(z<nz && cellZ!=null);z++) {
cellZ = traverseToLeaf(cellZ,0,0,0);
cellZ.value = img[x+nx*y+nx*ny*z];
int cs = 1 << (cellZ.level-1);
if (z+1>=cellZ.z+cs) cellZ = getZplusNeighbor(cellZ);
}
int cs = 1 << (cellY.level-1);
if (y+1>=cellY.y+cs) cellY = getYplusNeighbor(cellY);
}
int cs = 1 << (cellX.level-1);
if (x+1>=cellX.x+cs) cellX = getXplusNeighbor(cellX);
}
return;
}
/**
* create a new octree from the given image.
* <p>
* It fills the tree with the image at the scale defined by level
* (lower levels of the tree are trimmed)
*/
public void createFromImage(float[] img, int nx, int ny, int nz, int level) {
// create the full tree recursively
root = create(0,0,0, ROOT_LEVEL, level, null);
int incr = 1 << level;
// set the image values at level 0
OctreeCell cellX,cellY,cellZ;
cellX = traverseToLeaf(root,0,0,0);
for (int x=0;(x<nx && cellX!=null);x+=incr) {
cellY = cellX;
for (int y=0;(y<ny && cellY!=null);y+=incr) {
cellZ = cellY;
for (int z=0;(z<nz && cellZ!=null);z+=incr) {
cellZ = traverseToLeaf(cellZ,0,0,0);
cellZ.value = img[x/incr + nx*y/incr + nx*ny*z/incr];
int cs = 1 << (cellZ.level-1);
if (z+incr>=cellZ.z+cs) cellZ = getZplusNeighbor(cellZ);
}
int cs = 1 << (cellY.level-1);
if (y+incr>=cellY.y+cs) cellY = getYplusNeighbor(cellY);
}
int cs = 1 << (cellX.level-1);
if (x+incr>=cellX.x+cs) cellX = getXplusNeighbor(cellX);
}
return;
}
/** for testing: works only with full octree */
public void createFromImageInReverse(float[] img, int nx, int ny, int nz) {
// create the full tree recursively
root = create(0,0,0, ROOT_LEVEL, 0, null);
// set the image values at level 0
OctreeCell cellX,cellY,cellZ;
cellX = traverseToLeaf(root,(int)(SCALE-1),(int)(SCALE-1),(int)(SCALE-1));
for (int x=(int)(SCALE-1);x>=0 && cellX!=null;x--) {
cellY = cellX;
for (int y=(int)(SCALE-1);y>=0 && cellY!=null;y--) {
cellZ = cellY;
for (int z=(int)(SCALE-1);z>=0 && cellZ!=null;z--) {
if ( (x<nx) && (y<ny) && (z<nz) )
cellZ.value = img[x+nx*y+nx*ny*z];
else
cellZ.value = 0.0f;
cellZ = getZminusNeighbor(cellZ);
}
cellY = getYminusNeighbor(cellY);
}
cellX = getXminusNeighbor(cellX);
}
return;
}
public void createFromImageRootToLeaf(float[] img, int nx, int ny, int nz) {
// create the full tree recursively
root = create(0,0,0, ROOT_LEVEL, 0, null);
// set the image values at level 0
OctreeCell cell;
for (int x=0;x<nx;x++) for (int y=0;y<ny;y++) for (int z=0;z<nz;z++) {
if ( (x<SCALE) && (y<SCALE) && (z<SCALE) ) {
if (debug) System.out.print("("+x+","+y+","+z+")->");
cell = root;
cell = traverseToLeaf(cell,x,y,z);
if (debug) System.out.print("\n");
cell.value = img[x+nx*y+nx*ny*z];
}
}
return;
}
/**
* create an empty octree fully unfolded
* <p>
* All values are 0
*/
public void createBlank() { root = create(0,0,0, ROOT_LEVEL, 0, null); }
/** recursive octree building.
* <p>
* The octree is fully propagated from current location and level to minlevel.
*/
private OctreeCell create(int x, int y, int z, int lv, int minlv, OctreeCell parent) {
OctreeCell cell = new OctreeCell(x,y,z,lv);
cell.parent = parent;
if (lv>minlv) {
lv--;
// cell size
int cs = 1 << lv;
cell.children = new OctreeCell[8];
// create the tree along proper pattern
cell.children[0] = create(x, y, z, lv, minlv, cell);
cell.children[1] = create(x+cs, y, z, lv, minlv, cell);
cell.children[2] = create(x, y+cs, z, lv, minlv, cell);
cell.children[4] = create(x, y, z+cs, lv, minlv, cell);
cell.children[3] = create(x+cs, y+cs, z, lv, minlv, cell);
cell.children[5] = create(x+cs, y, z+cs, lv, minlv, cell);
cell.children[6] = create(x, y+cs, z+cs, lv, minlv, cell);
cell.children[7] = create(x+cs, y+cs, z+cs, lv, minlv, cell);
}
return cell;
}
/** compute the necessary octree level for storing an image of nx,ny,nz dimensions */
public static int findMinimumLevel(int nx, int ny, int nz) {
int level = 1;
int size = 1;
while ( (size<nx) || (size<ny) || (size<nz) ) {
size = 2*size;
level++;
}
return level;
}
/** bring the octree back into an image.
* <p>
* Assumes no particular structure for the tree: if might be slow.
* (however it sweeps regularly rather than going back to the root)
*/
public float[] exportToImage(int nx, int ny, int nz) {
float[] img = new float[nx*ny*nz];
OctreeCell cellX,cellY,cellZ;
cellX = traverseToLeaf(root,0,0,0);
for (int x=0;(x<nx && cellX!=null);x++) {
cellY = cellX;
for (int y=0;(y<ny && cellY!=null);y++) {
cellZ = cellY;
for (int z=0;(z<nz && cellZ!=null);z++) {
cellZ = traverseToLeaf(cellZ,x,y,z);
img[x+nx*y+nx*ny*z] = cellZ.value;
int cs = 1 << (cellZ.level);
if (z+1>=cellZ.z+cs) cellZ = getZplusNeighbor(cellZ);
}
int cs = 1 << (cellY.level);
if (y+1>=cellY.y+cs) cellY = getYplusNeighbor(cellY);
}
int cs = 1 << (cellX.level);
if (x+1>=cellX.x+cs) cellX = getXplusNeighbor(cellX);
}
return img;
}
/** prints the octree scale into an image.
* <p>
* Assumes no particular structure for the tree: if might be slow.
* (however it sweeps regularly rather than going back to the root)
*/
public float[] exportScaleToImage(int nx, int ny, int nz) {
float[] img = new float[nx*ny*nz];
OctreeCell cellX,cellY,cellZ;
cellX = traverseToLeaf(root,0,0,0);
for (int x=0;(x<nx && cellX!=null);x++) {
cellY = cellX;
for (int y=0;(y<ny && cellY!=null);y++) {
cellZ = cellY;
for (int z=0;(z<nz && cellZ!=null);z++) {
cellZ = traverseToLeaf(cellZ,x,y,z);
img[x+nx*y+nx*ny*z] = cellZ.level;
int cs = 1 << (cellZ.level);
if (z+1>=cellZ.z+cs) cellZ = getZplusNeighbor(cellZ);
}
int cs = 1 << (cellY.level);
if (y+1>=cellY.y+cs) cellY = getYplusNeighbor(cellY);
}
int cs = 1 << (cellX.level);
if (x+1>=cellX.x+cs) cellX = getXplusNeighbor(cellX);
}
return img;
}
/** bring the octree back into an image.
* <p>
* Go back to the root every time (slow, for debug)
*/
public float[] exportToImageRootToLeaf(int nx, int ny, int nz) {
float[] img = new float[nx*ny*nz];
for (int x=0;x<nx;x++) for (int y=0;y<ny;y++) for (int z=0;z<nz;z++) {
if ( (x<SCALE) && (y<SCALE) && (z<SCALE) ) {
OctreeCell cell = traverseToLeaf(root,x,y,z);
img[x+nx*y+nx*ny*z] = cell.value;
} else {
img[x+nx*y+nx*ny*z] = 0.0f;
}
}
return img;
}
/** display the octree structure.
* <p>
* Starts from cell and explores the entire tree below it.
*/
private String display(OctreeCell cell) {
if (cell.children==null) {
return "("+cell.x+","+cell.y+","+cell.z+","+cell.level+")"+cell.value+"\n";
} else {
String info = "("+cell.x+","+cell.y+","+cell.z+","+cell.level+")";
//String info = "";
for (int n=0;n<8;n++) {
info += display(cell.children[n]);
}
return info;
}
}
/** display the octree structure from the root node.
*/
public String display() {
String info = "Octree: ";
info += display(root);
return info;
}
/** prune the octree to group similar values.
* <p>
* Remove child cells of value less than dist apart from their mean.
*/
public void pruneToDistance(float dist) {
float mean;
/*
OctreeCell cellX,cellY,cellZ;
cellX = traverseToLeaf(root,0,0,0);
for (int x=0;(x<SCALE && cellX!=null);x++) {
cellY = cellX;
for (int y=0;(y<SCALE && cellY!=null);y++) {
cellZ = cellY;
for (int z=0;(z<SCALE && cellZ!=null);z++) {
cellZ = traverseToLeaf(cellZ,0,0,0);
OctreeCell cell = cellZ.parent;
boolean merge = false;
if (cell!=null) {
// compute mean cell value
mean = 0.0f;
for (int n=0;n<8;n++) {
mean += (cell.children[n]).value/8.0f;
}
// compare to value
merge = true;
for (int n=0;n<8;n++) {
OctreeCell child = cell.children[n];
if ( (child.children!=null) || (Numerics.abs(mean -child.value)>dist) ) {
merge = false;
break;
}
}
// merge
if (merge) {
cell.value = mean;
if (debug) System.out.print("merge:["+cell.x+","+cell.y+","+cell.z+","+cell.level+":"+cell.value+"]\n");
for (int n=0;n<8;n++) cell.children[n] = null;
cell.children = null;
}
}
int cs = 1 << (cellZ.level);
if (z+1>=cellZ.z+cs) cellZ = getZplusNeighbor(cellZ);
}
int cs = 1 << (cellY.level);
if (y+1>=cellY.y+cs) cellY = getYplusNeighbor(cellY);
}
int cs = 1 << (cellX.level);
if (x+1>=cellX.x+cs) cellX = getXplusNeighbor(cellX);
}
*/
for (int l=0;l<ROOT_LEVEL;l++) {
for (int x=0;x<SCALE;x++) for (int y=0;y<SCALE;y++) for (int z=0;z<SCALE;z++) {
OctreeCell cell = (traverseToLeaf(root,x,y,z)).parent;
if (cell!=null && cell.level==l+1) {
// check if leaf parent for all
boolean allleaf = true;
for (int n=0;n<8;n++) if (cell.children[n].children!=null) allleaf = false;
if (allleaf) {
// compute mean cell value
mean = 0.0f;
for (int n=0;n<8;n++) {
mean += (cell.children[n]).value/8.0f;
}
// compare to value
boolean merge = true;
for (int n=0;n<8;n++) {
if (Numerics.abs(mean-cell.children[n].value) > dist) {
merge = false;
break;
}
}
// merge
if (merge) {
cell.value = mean;
if (debug) System.out.print("merge:["+cell.x+","+cell.y+","+cell.z+","+cell.level+":"+cell.value+"]\n");
for (int n=0;n<8;n++) cell.children[n] = null;
cell.children = null;
}
}
}
}
}
}
}
|
youssefhoummad/downloader
|
src/one_dl_ui.py
|
<reponame>youssefhoummad/downloader
import os
from datetime import datetime
import threading
import time
import ntpath
import tkinter as tk
from tkinter import ttk
from pySmartDL import SmartDL
try:
from .win10toast import ToastNotifier
from .imageLabel import ImageLabel
from .buttons import *
from .constants import *
from .utils import *
except:
from win10toast import ToastNotifier
from imageLabel import ImageLabel
from buttons import *
from constants import *
from utils import *
toaster = ToastNotifier()
class DownloadItem(tk.Frame):
def __init__(self, parent, dl):
tk.Frame.__init__(self, parent)
self.config(bg=parent['bg'])
self.parent = parent
# self.url =s url
# self.dl_object = dl
self.progress = tk.StringVar()
self.progress.set('0%')
self.speed = tk.StringVar()
self.speed.set('-- kB/s')
self.size = tk.StringVar()
self.size.set('-- MB')
self.time = tk.StringVar()
self.time.set('-m -s')
# self.init_UI()
def init_UI(self):
_f = tk.Frame(self, bg=self['bg'])
_f.pack(ipady=3)
self.ico = get_icon(self.extension)
self._icon = tk.Label(_f, text="", image=self.ico, bg=self['bg'])
# self._progress = ttk.Progressbar(_f, length=100)
self._name = tk.Label(_f, text=self.filename, bg=self['bg'], width=26, anchor='nw')
self._progress = tk.Label(_f, textvariable=self.progress, bg=self['bg'], width=8)
self._speed = tk.Label(_f, textvariable=self.speed, bg=self['bg'], width=10)
self._size = tk.Label(_f, textvariable=self.size, bg=self['bg'], width=10)
self._time = tk.Label(_f, textvariable=self.time, bg=self['bg'], width=12)
self._pause = ButtonPause(_f, command=self.pause_download)
self._cancel = ButtonCancel(_f, command=self.cancel_download)
self._icon.pack(side='left', padx=(5,0))
self._name.pack(side='left', padx=(5, 0))
self._progress.pack(side='left', padx=(5, 0))
self._speed.pack(side='left', padx=(5, 0))
self._size.pack(side='left', padx=(5, 0))
self._time.pack(side='left', padx=(5, 0))
self._pause.pack(side='left', padx=(5, 0))
self._cancel.pack(side='left', padx=(5, 10))
_line = ImageLabel(self, image_path=r'.\img\line.png', width=500, height=10)
_line.pack()
def start(self):
@threaded
def start_downloading():
try:
self.dl_object.start()
except Exception as e:
# del DATABASE[-1]
print(e)
raise 'Download Faild'
@threaded
def show_progress():
while not self.dl_object: continue
while not hasattr(self.dl_object, 'get_speed'): continue
self.init_UI()
while not self.dl_object.isFinished():
try:
self.speed.set(self.dl_object.get_speed(human=True))
self.time.set(self.dl_object.get_eta(human=True).replace(' minute,', 'm').replace(' minutes,', 'm').replace(' seconds', 's'))
self.size.set(self.dl_object.get_dl_size(human=True))
# self._progress['value'] = 100 * self.dl_object.get_progress()
self.progress.set(f'{self.dl_object.get_progress():.0%}')
except Exception as e:
print(e)
time.sleep(0.2)
self.update_idletasks()
if self.dl_object.isFinished():
self.speed.set('Finished')
self.time.set(str(self.dl_object.get_dl_time(human=True)).replace(' minute,', 'm').replace(' minutes,', 'm').replace(' seconds', 's'))
self.size.set(self.dl_object.get_final_filesize(human=True))
# self._progress['value'] = 100
self.progress.set('100%')
self._pause.config(images=[r'.\img\points.tif', r'.\img\pointsHover.tif', r'.\img\pointsPress.tif'])
self._pause.command = lambda : open_path(self.dl_object.get_dest(), DEST)
self.update_idletasks()
# show system notification
toaster.show_toast("download completed",
f"{self.filename} downloaded",
icon_path=APP_ICO,
callback_on_click=lambda a=None:open_path(self.dl_object.get_dest(), DEST),
duration=10,
threaded=True)
start_downloading()
show_progress()
def cancel_download(self):
if self.dl_object:
self.dl_object.stop()
# DATABASE.remove(self)
self.destroy()
def pause_download(self):
# print('pause Download..')
self.dl_object.pause()
self._pause.config(images=PLAY_IMAGES)
self._pause.command = self.resume_download
def resume_download(self):
# print('resume Download...')
self.dl_object.resume()
self._pause.command = self.pause_download
self._pause.config(images=PAUSE_IMAGES)
@property
def status(self):
if self.dl_object:
return self.dl_object.get_status()
return None
@property
def extension(self):
dest = self.dl_object.get_dest()
return os.path.splitext(dest)[1]
@property
def filename(self):
head, tail = ntpath.split(self.dl_object.get_dest())
return tail or ntpath.basename(head)
# if __name__ == "__main__":
# root = tk.Tk()
# root.config(bg="white")
# # url = 'http://www.ovh.net/files/100Mio.dat'
# url = 'https://github.com/iTaybb/pySmartDL/raw/master/test/7za920.zip'
# dl = SmartDL(url, DEST)
# item1 = DownloadItem(root, dl_object=dl)
# # item2 = DownloadItem(root)
# item1.pack()
# # item2.pack()
# root.mainloop()
|
bobbyz007/NaiveChat
|
itstack-naive-chat-server/itstack-naive-chat-server-agreement/src/main/java/org/itstack/naive/chat/protocol/login/dto/ChatRecordDto.java
|
package org.itstack.naive.chat.protocol.login.dto;
import java.util.Date;
/**
* 博 客:http://bugstack.cn
* 公众号:bugstack虫洞栈 | 沉淀、分享、成长,让自己和他人都能有所收获!
* create by 小傅哥 on @2020
*/
public class ChatRecordDto {
private String talkId; // 对话框ID
private String userId; // 用户ID[自己/好友]
private String userNickName; // 用户昵称[群组聊天]
private String userHead; // 用户头像[群组聊天]
private Integer msgUserType; // 消息类型[0自己/1好友]
private String msgContent; // 消息内容
private Integer msgType; // 消息类型;0文字消息、1固定表情
private Date msgDate; // 消息时间
public String getTalkId() {
return talkId;
}
public void setTalkId(String talkId) {
this.talkId = talkId;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getUserNickName() {
return userNickName;
}
public void setUserNickName(String userNickName) {
this.userNickName = userNickName;
}
public String getUserHead() {
return userHead;
}
public void setUserHead(String userHead) {
this.userHead = userHead;
}
public Integer getMsgUserType() {
return msgUserType;
}
public void setMsgUserType(Integer msgUserType) {
this.msgUserType = msgUserType;
}
public String getMsgContent() {
return msgContent;
}
public void setMsgContent(String msgContent) {
this.msgContent = msgContent;
}
public Integer getMsgType() {
return msgType;
}
public void setMsgType(Integer msgType) {
this.msgType = msgType;
}
public Date getMsgDate() {
return msgDate;
}
public void setMsgDate(Date msgDate) {
this.msgDate = msgDate;
}
}
|
ut-osa/nightcore
|
src/gateway/engine_connection.h
|
<filename>src/gateway/engine_connection.h
#pragma once
#include "base/common.h"
#include "common/uv.h"
#include "common/protocol.h"
#include "common/stat.h"
#include "utils/appendable_buffer.h"
#include "server/io_worker.h"
#include "server/connection_base.h"
namespace faas {
namespace gateway {
class Server;
class EngineConnection final : public server::ConnectionBase {
public:
static constexpr int kBaseTypeId = 2;
static int type_id(uint16_t node_id) { return kBaseTypeId + node_id; }
EngineConnection(Server* server, uint16_t node_id, uint16_t conn_id,
std::span<const char> initial_data);
~EngineConnection();
uint16_t node_id() const { return node_id_; }
uint16_t conn_id() const { return conn_id_; }
uv_stream_t* InitUVHandle(uv_loop_t* uv_loop) override;
void Start(server::IOWorker* io_worker) override;
void ScheduleClose() override;
void SendMessage(const protocol::GatewayMessage& message, std::span<const char> payload);
private:
enum State { kCreated, kRunning, kClosing, kClosed };
Server* server_;
uint16_t node_id_;
uint16_t conn_id_;
server::IOWorker* io_worker_;
State state_;
uv_tcp_t uv_tcp_handle_;
std::string log_header_;
utils::AppendableBuffer read_buffer_;
void ProcessGatewayMessages();
DECLARE_UV_ALLOC_CB_FOR_CLASS(BufferAlloc);
DECLARE_UV_READ_CB_FOR_CLASS(RecvData);
DECLARE_UV_WRITE_CB_FOR_CLASS(DataSent);
DECLARE_UV_CLOSE_CB_FOR_CLASS(Close);
DISALLOW_COPY_AND_ASSIGN(EngineConnection);
};
} // namespace gateway
} // namespace faas
|
lukflug/postman
|
src/main/java/me/srgantmoomoo/postman/module/modules/player/ChatWatermark.java
|
<gh_stars>0
package me.srgantmoomoo.postman.module.modules.player;
import java.util.ArrayList;
import org.lwjgl.input.Keyboard;
import me.srgantmoomoo.api.event.events.PacketEvent;
import me.srgantmoomoo.api.util.Refrence;
import me.srgantmoomoo.postman.Main;
import me.srgantmoomoo.postman.module.Category;
import me.srgantmoomoo.postman.module.Module;
import me.zero.alpine.listener.EventHandler;
import me.zero.alpine.listener.Listener;
import net.minecraft.network.play.client.CPacketChatMessage;
import net.minecraftforge.common.MinecraftForge;
public class ChatWatermark extends Module {
public ChatWatermark() {
super ("chatWatermark", "draws esp around storage blocks", Keyboard.KEY_NONE, Category.PLAYER);
this.addSettings();
}
public void setup(){
ArrayList<String> Separators = new ArrayList<>();
Separators.add(">>");
Separators.add("<<");
Separators.add("|");
}
@EventHandler
private final Listener<PacketEvent.Send> listener = new Listener<>(event -> {
if (event.getPacket() instanceof CPacketChatMessage){
if (((CPacketChatMessage) event.getPacket()).getMessage().startsWith("/") || ((CPacketChatMessage) event.getPacket()).getMessage().startsWith("."))
return;
String Separator2 = null;
Separator2 = " \u300b";
String old = ((CPacketChatMessage) event.getPacket()).getMessage();
String suffix = Separator2 + toUnicode(Refrence.NAME);
String s = old + suffix;
if (s.length() > 255) return;
((CPacketChatMessage) event.getPacket()).message = s;
}
});
public void onEnable(){
Main.EVENT_BUS.subscribe(this);
}
public void onDisable(){
Main.EVENT_BUS.unsubscribe(this);
}
public String toUnicode(String s){
return s.toLowerCase()
.replace("a", "\u1d00")
.replace("b", "\u0299")
.replace("c", "\u1d04")
.replace("d", "\u1d05")
.replace("e", "\u1d07")
.replace("f", "\ua730")
.replace("g", "\u0262")
.replace("h", "\u029c")
.replace("i", "\u026a")
.replace("j", "\u1d0a")
.replace("k", "\u1d0b")
.replace("l", "\u029f")
.replace("m", "\u1d0d")
.replace("n", "\u0274")
.replace("o", "\u1d0f")
.replace("p", "\u1d18")
.replace("q", "\u01eb")
.replace("r", "\u0280")
.replace("s", "\ua731")
.replace("t", "\u1d1b")
.replace("u", "\u1d1c")
.replace("v", "\u1d20")
.replace("w", "\u1d21")
.replace("x", "\u02e3")
.replace("y", "\u028f")
.replace("z", "\u1d22");
}
}
|
socialsoftware/edition
|
microfrontend/src/microfrontends/about/pages/Acknowledgements_en.js
|
import React from 'react'
const Acknowledgements_en = () => {
return (
<div>
<h3>Sponsors</h3>
<p>
The <em>LdoD Archive</em> is a collaborative digital archive sponsored
by the Foundation for Science and Technology (FCT), and supported by
the Centre for Portuguese Literature at the University of Coimbra
(CLP), the <em>Instituto de Engenharia de Sistemas e Computadores,
Investigação e Desenvolvimento em Lisboa</em> (INESC-ID), the National
Library of Portugal (BNP), and the PhD Programme in Materialities of
Literature (MATLIT).
</p>
<p>
The <em>LdoD Archive</em> was developed under the research project “No Problem Has a Solution:
A Digital Archive of the <em>Book of Disquiet</em>,” coordinated by <NAME>
(PTDC/CLE-LLI/118713/2010). Project funded by the Foundation for Science and Technology (FCT),
and co-funded by the European Regional Development Fund (FEDER), through Axis I of the Competitiveness
Factors Operational Program (POFC) of the National Strategic Framework (QREN)—European Union
(COMPETE: FCOMP-01-0124-FEDER-019715). Additional national funds by the Foundation for Science and
Technology (FCT), under the “Plurianual Funding—Unit 759” projects: “PEst-OE/ELT/UI0759/2013” y
“PEst-OE/ELT/UI0759/2014”.
</p>
<p> </p>
<h3>Collaboration</h3>
<p>
During its development period, the <em>LdoD Archive</em> benefited from
the collaboration of several individuals and institutions. Our
gratitude is due, first and foremost, to the editors Jeró<NAME>, <NAME>, and <NAME>, for consulting as
specialists in the editorial problems of the <em>Book of Disquiet</em>.
Our team of project consultants included three leading digital humanities
scholars whose critical insight contributed to the advancement of our
ideas: <NAME>, <NAME>, and <NAME> have
generously shared their expertise and provided continuing support and inspiration.
We also want to thank <NAME> for his contribution to the initial definition of
the TEI schema. The <em>LdoD Archive</em> further acknowledges the collaboration of the
Archives of Contemporary Portuguese Culture (National Library of
Portugal), the Public Library and Regional Archive of Ponta Delgada
(Regional Directorate for Culture of the Government of the Azores), and
the Information and Communication Systems and Infrastructures
Management Service (SGSIIC) at the University of Coimbra. We are also
grateful to all beta users who, in September 2014 and May 2017,
participated in usability tests of the functionalities and web
design of the <em>LdoD Archive</em>.
</p>
<p>
These acknowledgements are extended to the organizers and participants
of national and international seminars and conferences, in which the
various conceptual and technical components of the <em>LdoD
Archive</em> were presented, namely in universities and research centres in
Portugal, Australia, Cyprus, Italy, France, Spain, Colombia, United
States of America, Sweden, and Greece. The interaction with different
communities of researchers (digital humanities, computer science,
information science, textual encoding, textual criticism, Pessoa
studies, electronic literature) is reflected in the final results. We
highlight the following presentations:
</p>
<ul>
<h4>2012</h4>
<ul>
<li>University of Coimbra, Centre for Portuguese Literature
(CLP): Colloquium “Estranhar Pessoa com as Materialidades da
Literatura” May 25, 2012; coords. Manuel Portela and Osvaldo
Manuel Silvestre)</li>
</ul>
<h4>2013</h4>
<ul>
<li>New University of Lisbon, School of Social and Human Sciences
(FCSH): VI Seminar “<a
href="http://elab.fcsh.unl.pt/actividades/estranhar-pessoa-vi-seminario"
target="new">Assuntos Materiais</a>,” organized by the Project
“Estranhar Pessoa” (Februrary 7, 2013; coords. António
M. Feijó and <NAME>)
</li>
<li>University of Western Sydney, School of Humanities and
Communication Arts: Symposium “Surface Tensions: Literature in
the Database”, organized by the Project “<a
href="https://www.westernsydney.edu.au/writing_and_society/research/past_research_projects/creative_nation_writers_and_writing_in_the_new_media_culture"
target="new">Creative Nation: Writers and Writing in the New
Media Culture</a>” (June 10, 2013; coords. Anna Gibbs and Maria
Angel)
</li>
<li>University of Cyprus: “<a
href="https://ecscw2013.cs.ucy.ac.cy/index.php" target="new">ECSCW
2013: European Conference on Computer-Supported Cooperative Work</a>”
(September 21-25, 2013; coord. <NAME>)
</li>
<li>University of Rome La Sapienza: “<a
href="http://digilab2.let.uniroma1.it/teiconf2013/" target="new">The
Linked TEI: Text Encoding in the Web</a>,” organized by DIGILAB,
Università La Sapienza, and Text Encoding Initiative
Consortium (October 2-5, 2013; coords. Fabio Ciotti and Arianna
Ciula)
</li>
<li>Instituto Universitário da Maia, <span
id="DeltaPlaceHolderMain">Centre for Research in Technologies
and Intermedia Studies </span>(CITEI): Symposium “Desafios e
Oportunidades da Edição Digital” (November 7,
2013; coords. C&<NAME> and Isabel Rio Novo)
</li>
<li>École Normale Supérieure, Paris, Institut des
textes et manuscrits modernes (ITEM): “<a
href="https://textualscholarship.files.wordpress.com/2015/04/programme-ests-paris-conference-2013.pdf"
target="new">Variance in Textual Scholarship and Genetic
Criticism/ La variance en philologie et dans la critique
génétique</a>,” 10th Conference of the European
Society for Textual Scholarship (November 22-24, 2013; coords. Dirk
<NAME> and <NAME>)
</li>
</ul>
<h4>2014</h4>
<ul>
<li>University of Coimbra, Biblioteca Geral: International
Congress “<a
href="http://www.uc.pt/bguc/500anos/Congresso_internacional"
target="new">A Biblioteca da Universidade: Permanências e
Metamorfoses</a>” (January 16-18, 2014; coord. <NAME>
Bernardes)
</li>
<li>Calouste Gulbenkian Foundation: International conference
“<a href="http://estranharpessoa.com/programa" target="new">O
<NAME></a>,” organized by the Project
“Estranhar Pessoa,” Laboratory of Advanced Literary
Studies (ELAB), Network of Philosophy and Literature, Institute of
Philosophy of Language (IFL) and Program in Theory of Literature
(March 6-8, 2014; coords. Antó<NAME>ó and <NAME>
Baptista)
</li>
<li>Los Andes University, Bogotá: International conference
“<a
href="https://ilusionymaterialidad.wordpress.com/programa-2/"
target="new">Ilusión y materialidad de los archivos literarios</a>,
”
organized by Universidad de los Andes, Instituto Caro y Cuervo and
Biblioteca Luis Ángel Arango (May 6-8, 2014; coord. Jerónimo
Pizarro)
</li>
<li>University of Salamanca: Seminar Series “Arcádia
Babélica”, organized by the Philology Department of the
University of Salamanca (June 20, 2014; coord. <NAME>)</li>
<li>Northwestern University, Evanston, Illinois: International
conference “<a href="http://tei.northwestern.edu/" target="new">Decoding
the Encoded</a>,” organized by the Text Encoding Initiative
Consortium (October 22-24, 2014; coord. <NAME>)
</li>
</ul>
<h4>2015</h4>
<ul>
<li>University of Grenoble: Symposium “<a
href="http://www.nedimah.eu/reports/toward-new-social-contract-between-publishers-and-editors"
target="new">Toward a New Social Contract between Publishers and
Editors</a>,” organized by the Network for Digital Methods in the
Arts and Humanities—NeDiMAH (European Science Foundation), Maison
des Sciences de l’Homme and Université de Grenoble (January
26, 2015; coords. <NAME> and <NAME>)
</li>
<li>University of Rome La Sapienza: Symposium “<a
href="http://www.disp.let.uniroma1.it/archivionotizie/ecd/dce-edizioni-confronto/comparing-editions"
target="new">Edizioni Critiche Digitali: Edizioni a Confronto /
Digital Critical Editions: Comparing Editions</a>,” organized by
the Dipartimento di Studi Greco-Latini, Italiani, Scenico-Musicali,
Universitá la Sapienza, Roma (March 27, 2105; coords. Paola
Italia and <NAME>)
</li>
<li>University of Georgia, Athens, GA, Wilson Center for
Humanities and Arts: Symposium “<a
href="https://willson.uga.edu/event/textual-machines-a-spring-symposium-exhibit/"
target="new">Textual Machines</a>” (April 17-18, 2015; coord.
<NAME>)
</li>
<li>University of Coimbra, Centre for Portuguese Literature
(CLP): International conference “<a
href="https://eld2015.wordpress.com/programme/" target="new">Digital
Literary Studies | Estudos Literários Digitais</a>” (May
14-15, 2015; coord. <NAME>)
</li>
<li>University of Lisbon, Institute for Scoial Sciences (ICS):
Colloquium “Cultura e Digital em Portugal em 2015” (June
17, 2015; coords. José Luí<NAME>, João
<NAME> and <NAME>)</li>
<li>University of Gothemburg, Center for Digital Humanities: <a
href="http://cdh.hum.gu.se/Aktuellt/e/?eventId=2355210788"
target="new">Seminar</a> (September 24, 2015; coord. <NAME>)
</li>
<li>New University of Lisbon, School of Social and Human Sciences
(FCSH): International congress “<a
href="https://congressohdpt.wordpress.com/programa/" target="new">Humanidades
Digitais em Portugal: Construir Pontes e Quebrar Barreiras na Era
Digital</a>” (October 8-9, 2015; coord. <NAME>)
</li>
<li>University of Coimbra, Centre for Social and Cultural History
(CHSC): International conference “<a
href="http://ahlist.org/conferences/2015-ahlist-coimbra/program/"
target="new">Consilience and Inclusion: Scientific and Cultural
Encounters</a>,” organized by the Association of History,
Literature, Science, and Technology (November 19-21, 2015; coord.
Yonsoo Kim)
</li>
<li>University of Coimbra, School of Economics: Colloquium
“<a
href="https://www.uc.pt/feuc/noticias/2015/novembro15/20151123"
target="new">On/Off: Navegando pelas Culturas Digitais,
Tecnologia e Conhecimento</a>,” organized by the “Art,
Culture and Communication” Section of the Portuguese
Association of Sociology (November 26, 2015; coords. Claudino
Ferreira and <NAME>)
</li>
</ul>
<h4>2016</h4>
<ul>
<li>Aristotle University of Thessaloniki, Department of American
Studies: Seminar (January 28, 2016; coord. Tatiani Raptzikou)</li>
<li>University of Maryland, College Park, Maryland Institute for
Technology in the Humanities (MITH): Research seminar (March 29,
2016; coord. Neil Fraistat)</li>
<li>University of Maryland, College Park, English Department,
Center for Comparative and Literary Studies (CCLS): <a
href="http://www.english.umd.edu/events/23271" target="new">Research
Talk</a> (April 14, 2016; coord. Orrin Wang)
</li>
<li>Rochester Institute of Technology, Rochester, NY, Digital
Humanities and Social Sciences Program and School of Media Sciences:
Lecture (April 19, 2016; coord. <NAME>)</li>
<li>University of Pisa, Informatica Umanistica: <a
href="http://www.labcd.unipi.it/seminari/silvestre-a-digital-archive-of-fernando-pessoa/"
target="new">Seminario di Cultura Digitale</a> (December 7, 2016;
coord. Enrica Salvatori)
</li>
</ul>
<h4>2017</h4>
<ul>
<li>University of Lisbon, School of Arts and Humanities: <a
href="http://www.letras.ulisboa.pt/pt/agenda/conferencia-reimaginar-a-edicao-digital-no-arquivo-livro-do-desassossego"
target="new">Lecture for the Program in Textual Criticism</a>
(January 24, 2017; coords. Esperança Cardeira, <NAME>
and Joã<NAME>ísio)
</li>
<li>University of California, Los Angeles, Graduate School of
Education and Information Studies: <a
href="https://is.gseis.ucla.edu/research/colloquium/" target="new">Colloquium:
Breslauer lecture series</a> (February 2, 2017; coord. <NAME>)
</li>
<li>Calouste Gulbenkian Foundation: <a
href="http://casafernandopessoa.cm-lisboa.pt/fileadmin/CASA_FERNANDO_PESSOA/AF_CFP_Congresso_Internacional_FP_2017_Programa_Digital_V3.pdf"
target="new">IV Congresso Internacional Fernando Pessoa</a>,
organizaed by <NAME> (February 9-11, 2017; coord. Clara
Riso)
</li>
<li>Fernando Pessoa University, Porto: International conference
“<a href="https://conference.eliterature.org/2017/conference"
target="new">ELO 2017: Affiliations, Communities, Translations</a>”
(July 18-22, 2017; coords. <NAME> and <NAME>)
</li>
<li>Complutense University of Madrid, School of Education:
International Symposium of Digital Humanities of the South “<a
href="https://www.ucm.es/leethi/esc-programa" target="new">Escritura
Creativa Digital y Colecciones Digitales</a>,” organized by the
research group LEETHI-Literaturas Españolas y Europeas del
Texto al Hipermedia (September 13-15, 2017; coord. <NAME>)
</li>
</ul>
</ul>
<p> </p>
<h3>Used Tools</h3>
<ul>
<li><a href="http://annotatorjs.org/">Annotator</a></li>
<li><a href="http://getbootstrap.com/">Bootstrap</a></li>
<li><a href="https://github.com/wenzhixin/bootstrap-table">Bootstrap Table</a></li>
<li><a href="https://eclipse.org/">Eclipse</a></li>
<li><a href="https://fenix-framework.github.io/">Fénix Framework</a></li>
<li><a href="https://github.com/">GitHub</a></li>
<li><a href="https://www.java.com/">Java</a></li>
<li><a href="http://www.oracle.com/technetwork/java/javaee/jsp/index.html">JavaServer Pages</a></li>
<li><a href="http://jblas.org/">jblas</a></li>
<li><a href="http://jmeter.apache.org/">JMeter</a></li>
<li><a href="https://jquery.com/">jQuery</a></li>
<li><a href="http://junit.org/">JUnit</a></li>
<li><a href="https://lucene.apache.org/">Lucene</a></li>
<li><a href="http://mallet.cs.umass.edu/">Mallet</a></li>
<li><a href="https://maven.apache.org/">Maven</a></li>
<li><a href="https://www.mysql.com/">MySQL</a></li>
<li><a href="http://openseadragon.github.io">OpenSeadragon</a></li>
<li><a href="https://www.oxygenxml.com/">Oxygen</a></li>
<li><a href="http://github.com/mleibman/slickgrid">SlickGrid</a></li>
<li><a href="https://projects.spring.io/spring-boot/">Spring Boot</a></li>
<li><a href="http://projects.spring.io/spring-social/">Spring Social</a></li>
<li><a href="https://tomcat.apache.org/">Tomcat</a></li>
</ul>
<p> </p>
<p>[updated 18-08-2017]</p>
<p> </p>
</div>
)
}
export default Acknowledgements_en
|
micaste/symphony-api-client-java
|
symphony-bdk-legacy/symphony-api-client-java/src/main/java/model/RoomProperties.java
|
<reponame>micaste/symphony-api-client-java<filename>symphony-bdk-legacy/symphony-api-client-java/src/main/java/model/RoomProperties.java
package model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public class RoomProperties {
private String name;
private String description;
private User creatorUser;
private Long createdDate;
private Boolean external;
private Boolean crossPod;
private Boolean isPublic;
private Boolean copyProtected;
private Boolean readOnly;
private Boolean discoverable;
private Boolean membersCanInvite;
private List<Keyword> keywords;
private Boolean canViewHistory;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public User getCreatorUser() {
return creatorUser;
}
public void setCreatorUser(User creatorUser) {
this.creatorUser = creatorUser;
}
public Long getCreatedDate() {
return createdDate;
}
public void setCreatedDate(Long createdDate) {
this.createdDate = createdDate;
}
public Boolean getExternal() {
return external;
}
public void setExternal(Boolean external) {
this.external = external;
}
public Boolean getCrossPod() {
return crossPod;
}
public void setCrossPod(Boolean crossPod) {
this.crossPod = crossPod;
}
public Boolean getPublic() {
return isPublic;
}
public void setPublic(Boolean isPublic) {
this.isPublic = isPublic;
}
public Boolean getCopyProtected() {
return copyProtected;
}
public void setCopyProtected(Boolean copyProtected) {
this.copyProtected = copyProtected;
}
public Boolean getReadOnly() {
return readOnly;
}
public void setReadOnly(Boolean readOnly) {
this.readOnly = readOnly;
}
public Boolean getDiscoverable() {
return discoverable;
}
public void setDiscoverable(Boolean discoverable) {
this.discoverable = discoverable;
}
public Boolean getMembersCanInvite() {
return membersCanInvite;
}
public void setMembersCanInvite(Boolean membersCanInvite) {
this.membersCanInvite = membersCanInvite;
}
public List<Keyword> getKeywords() {
return keywords;
}
public void setKeywords(List<Keyword> keywords) {
this.keywords = keywords;
}
public Boolean getCanViewHistory() {
return canViewHistory;
}
public void setCanViewHistory(Boolean canViewHistory) {
this.canViewHistory = canViewHistory;
}
}
|
jianjustin/awesome-leetcode-algorithm
|
src/main/java/org/awesome/leetcode/algorithm/arrays/YuanQuanZhongZuiHouShengXiaDeShuZiLcof.java
|
package org.awesome.leetcode.algorithm.arrays;
public class YuanQuanZhongZuiHouShengXiaDeShuZiLcof {
public int lastRemaining(int n, int m) {
int[] arr = new int[n];
int begin = 0, result = 0;
for(int i=1; i<n; i++) {
int item = m%n+begin-1;
while(arr[item] == -1)item++;
arr[item] = -1;
while(arr[item] == -1)item++;
begin = item;
}
for(int i=1; i<n; i++)
if(arr[i]==0) {
result = arr[i];
break;
}
return result;
}
public static void main(String[] args) {
System.out.println((new YuanQuanZhongZuiHouShengXiaDeShuZiLcof()).lastRemaining(5, 3));
}
}
|
hunshikan/corant
|
corant-modules/corant-modules-jpa/corant-modules-jpa-hibernate-ogm/src/main/java/org/corant/modules/jpa/hibernate/ogm/type/BigDecimalType.java
|
package org.corant.modules.jpa.hibernate.ogm.type;
import java.math.BigDecimal;
import org.hibernate.MappingException;
import org.hibernate.engine.spi.Mapping;
import org.hibernate.ogm.type.impl.AbstractGenericBasicType;
import org.hibernate.type.descriptor.java.BigDecimalTypeDescriptor;
/**
* corant-modules-jpa-hibernate-ogm
*
* @author sushuaihao 2019/8/14
* @since
*/
public class BigDecimalType extends AbstractGenericBasicType<BigDecimal> {
public static final BigDecimalType INSTANCE = new BigDecimalType();
private static final long serialVersionUID = 4457726737615145243L;
public BigDecimalType() {
super(BigDecimalGridTypeDescriptor.INSTANCE, BigDecimalTypeDescriptor.INSTANCE);
}
@Override
public int getColumnSpan(Mapping mapping) throws MappingException {
return 1;
}
@Override
public String getName() {
return null;
}
}
|
xiivler/smo-practice
|
include/nn/atk.h
|
#pragma once
#include "types.h"
namespace nn { namespace atk {
class SoundArchive
{
public:
const char* GetItemLabel(u32 id) const;
u32 GetItemId(char const* label) const;
};
class SoundActor // Inherits SoundStartable, size: 0x7C
{
public:
virtual ~SoundActor();
u8 data[0x7C-0x4];
};
} }
|
Claudio5/GyroDraw
|
app/src/main/java/ch/epfl/sweng/GyroDraw/MainActivity.java
|
package ch.epfl.sweng.GyroDraw;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.TextView;
import android.widget.Toast;
import com.google.firebase.FirebaseApp;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.database.DataSnapshot;
import ch.epfl.sweng.GyroDraw.auth.LoginActivity;
import ch.epfl.sweng.GyroDraw.firebase.FbDatabase;
import ch.epfl.sweng.GyroDraw.firebase.OnSuccessValueEventListener;
import ch.epfl.sweng.GyroDraw.home.HomeActivity;
import ch.epfl.sweng.GyroDraw.utils.GlideUtils;
import ch.epfl.sweng.GyroDraw.utils.network.ConnectivityWrapper;
/**
* Class representing the first page shown to the user upon first app launch.
*/
public class MainActivity extends NoBackPressActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
overridePendingTransition(R.anim.fade_in, R.anim.fade_out);
setContentView(R.layout.activity_loading_screen);
GlideUtils.startDotsWaitingAnimation(this);
GlideUtils.startBackgroundAnimation(this);
FirebaseApp.initializeApp(this);
FirebaseAuth auth = FirebaseAuth.getInstance();
if (auth.getCurrentUser() != null && ConnectivityWrapper.isOnline(this)) {
FbDatabase.getUserByEmail(auth.getCurrentUser().getEmail(),
new OnSuccessValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
handleRedirection(dataSnapshot);
}
});
} else {
displayMainLayout();
}
}
/**
* Checks if account exists and redirects to {@link HomeActivity}.
* Else shows the MainActivity.
*
* @param dataSnapshot the snapshot containing the response to evaluate
*/
@VisibleForTesting
public void handleRedirection(DataSnapshot dataSnapshot) {
if (dataSnapshot.exists()) {
cloneAccountFromFirebase(dataSnapshot);
TextView errorMessage = findViewById(
R.id.errorMessage);
errorMessage.setTypeface(typeMuro);
handleUserStatus(errorMessage);
} else {
displayMainLayout();
}
}
private void displayMainLayout() {
setContentView(R.layout.activity_main);
GlideUtils.startBackgroundAnimation(this);
findViewById(R.id.login_button).setOnClickListener(
new OnClickListener() {
@Override
public void onClick(View view) {
if (ConnectivityWrapper.isOnline(getApplicationContext())) {
launchActivity(LoginActivity.class);
finish();
} else {
Toast.makeText(getApplicationContext(), "No internet connection",
Toast.LENGTH_SHORT).show();
}
}
});
}
}
|
matthewberends/Software
|
src/software/gui/widgets/zoomable_qgraphics_view.h
|
#pragma once
#include <QtGui/QKeyEvent>
#include <QtGui/QWheelEvent>
#include <QtWidgets/QGraphicsView>
/**
* A custom QGraphicsView that allows zooming with the mousewheel
*/
class ZoomableQGraphicsView : public QGraphicsView
{
Q_OBJECT
public:
explicit ZoomableQGraphicsView(QWidget *parent = 0);
protected slots:
void wheelEvent(QWheelEvent *event);
private:
const double zoom_in_scaling_factor = 1.04;
const double zoom_out_scaling_factor = 0.96;
};
|
aayushkapoor206/whatshot
|
node_modules/ionic-framework/dist/src/es5/system/ionic/animations/ios-transition.js
|
System.register('ionic/animations/ios-transition', ['./animation'], function (_export) {
'use strict';
var Animation, DURATION, EASING, OPACITY, TRANSLATEX, OFF_RIGHT, OFF_LEFT, CENTER, OFF_OPACITY, SHOW_BACK_BTN_CSS, IOSTransition;
var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; desc = parent = getter = undefined; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
return {
setters: [function (_animation) {
Animation = _animation.Animation;
}],
execute: function () {
DURATION = 550;
EASING = 'cubic-bezier(0.36,0.66,0.04,1)';
OPACITY = 'opacity';
TRANSLATEX = 'translateX';
OFF_RIGHT = '99.5%';
OFF_LEFT = '-33%';
CENTER = '0%';
OFF_OPACITY = 0.8;
SHOW_BACK_BTN_CSS = 'show-back-button';
IOSTransition = (function (_Animation) {
_inherits(IOSTransition, _Animation);
function IOSTransition(enteringView, leavingView, opts) {
_classCallCheck(this, IOSTransition);
_get(Object.getPrototypeOf(IOSTransition.prototype), 'constructor', this).call(this, null, opts);
this.duration(DURATION);
this.easing(EASING);
// what direction is the transition going
var backDirection = opts.direction === 'back';
// do they have navbars?
var enteringHasNavbar = enteringView.hasNavbar();
var leavingHasNavbar = leavingView && leavingView.hasNavbar();
var enteringPage = new Animation(enteringView.pageRef());
enteringPage.before.addClass('show-page');
this.add(enteringPage);
// entering content
var enteringContent = new Animation(enteringView.contentRef());
this.add(enteringContent);
if (backDirection) {
// entering content, back direction
enteringContent.fromTo(TRANSLATEX, OFF_LEFT, CENTER).fromTo(OPACITY, OFF_OPACITY, 1);
} else {
// entering content, forward direction
enteringContent.fromTo(TRANSLATEX, OFF_RIGHT, CENTER).fromTo(OPACITY, 1, 1);
}
if (enteringHasNavbar) {
// entering page has a navbar
var enteringNavBar = new Animation(enteringView.navbarRef());
this.add(enteringNavBar);
var enteringTitle = new Animation(enteringView.titleRef());
var enteringNavbarItems = new Animation(enteringView.navbarItemRefs());
var enteringNavbarBg = new Animation(enteringView.navbarBgRef());
var enteringBackButton = new Animation(enteringView.backBtnRef());
enteringNavBar.add(enteringTitle).add(enteringNavbarItems).add(enteringNavbarBg).add(enteringBackButton);
enteringNavBar.before.addClass('show-navbar');
enteringTitle.fadeIn();
enteringNavbarItems.fadeIn();
// set properties depending on direction
if (backDirection) {
// entering navbar, back direction
enteringTitle.fromTo(TRANSLATEX, OFF_LEFT, CENTER);
if (enteringView.enableBack()) {
// back direction, entering page has a back button
enteringBackButton.before.addClass(SHOW_BACK_BTN_CSS).fadeIn();
}
} else {
// entering navbar, forward direction
enteringTitle.fromTo(TRANSLATEX, OFF_RIGHT, CENTER);
if (leavingHasNavbar) {
// entering navbar, forward direction, and there's a leaving navbar
// should just fade in, no sliding
enteringNavbarBg.fromTo(TRANSLATEX, CENTER, CENTER).fadeIn();
} else {
// entering navbar, forward direction, and there's no leaving navbar
// should just slide in, no fading in
enteringNavbarBg.fromTo(TRANSLATEX, OFF_RIGHT, CENTER).fromTo(OPACITY, 1, 1);
}
if (enteringView.enableBack()) {
// forward direction, entering page has a back button
enteringBackButton.before.addClass(SHOW_BACK_BTN_CSS).fadeIn();
var enteringBackBtnText = new Animation(enteringView.backBtnTextRef());
enteringBackBtnText.fromTo(TRANSLATEX, '100px', '0px');
enteringNavBar.add(enteringBackBtnText);
} else {
enteringBackButton.before.removeClass(SHOW_BACK_BTN_CSS);
}
}
}
// setup leaving view
if (leavingView) {
// leaving content
var leavingContent = new Animation(leavingView.contentRef());
this.add(leavingContent);
if (backDirection) {
// leaving content, back direction
leavingContent.fromTo(TRANSLATEX, CENTER, '100%').fromTo(OPACITY, 1, 1);
} else {
// leaving content, forward direction
leavingContent.fromTo(TRANSLATEX, CENTER, OFF_LEFT).fromTo(OPACITY, 1, OFF_OPACITY);
}
if (leavingHasNavbar) {
// leaving page has a navbar
var leavingNavBar = new Animation(leavingView.navbarRef());
var leavingBackButton = new Animation(leavingView.backBtnRef());
var leavingTitle = new Animation(leavingView.titleRef());
var leavingNavbarItems = new Animation(leavingView.navbarItemRefs());
var leavingNavbarBg = new Animation(leavingView.navbarBgRef());
leavingNavBar.add(leavingBackButton).add(leavingTitle).add(leavingNavbarItems).add(leavingNavbarBg);
this.add(leavingNavBar);
// fade out leaving navbar items
leavingBackButton.fadeOut();
leavingTitle.fadeOut();
leavingNavbarItems.fadeOut();
if (backDirection) {
// leaving navbar, back direction
leavingTitle.fromTo(TRANSLATEX, CENTER, '100%');
if (enteringHasNavbar) {
// leaving navbar, back direction, and there's an entering navbar
// should just fade out, no sliding
leavingNavbarBg.fromTo(TRANSLATEX, CENTER, CENTER).fadeOut();
} else {
// leaving navbar, back direction, and there's no entering navbar
// should just slide out, no fading out
leavingNavbarBg.fromTo(TRANSLATEX, CENTER, '100%').fromTo(OPACITY, 1, 1);
}
var leavingBackBtnText = new Animation(leavingView.backBtnTextRef());
leavingBackBtnText.fromTo(TRANSLATEX, CENTER, 300 + 'px');
leavingNavBar.add(leavingBackBtnText);
} else {
// leaving navbar, forward direction
leavingTitle.fromTo(TRANSLATEX, CENTER, OFF_LEFT);
}
}
}
}
return IOSTransition;
})(Animation);
Animation.register('ios-transition', IOSTransition);
}
};
});
|
navikt/fp-formidling
|
domenetjenester/brevbestiller/src/test/java/no/nav/foreldrepenger/melding/datamapper/DatamapperTestUtil.java
|
<filename>domenetjenester/brevbestiller/src/test/java/no/nav/foreldrepenger/melding/datamapper/DatamapperTestUtil.java<gh_stars>1-10
package no.nav.foreldrepenger.melding.datamapper;
import static org.mockito.Mockito.when;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.Period;
import java.util.Optional;
import java.util.UUID;
import org.mockito.Mockito;
import no.nav.foreldrepenger.melding.behandling.Behandling;
import no.nav.foreldrepenger.melding.behandling.BehandlingType;
import no.nav.foreldrepenger.melding.datamapper.konfig.BrevParametere;
import no.nav.foreldrepenger.melding.dokumentdata.DokumentAdresse;
import no.nav.foreldrepenger.melding.dokumentdata.DokumentData;
import no.nav.foreldrepenger.melding.dokumentdata.DokumentFelles;
import no.nav.foreldrepenger.melding.dokumentdata.DokumentFelles.Kopi;
import no.nav.foreldrepenger.melding.fagsak.FagsakYtelseType;
import no.nav.foreldrepenger.melding.geografisk.Språkkode;
import no.nav.foreldrepenger.melding.hendelser.DokumentHendelse;
import no.nav.foreldrepenger.melding.kodeverk.kodeverdi.DokumentMalType;
import no.nav.foreldrepenger.melding.typer.Saksnummer;
public class DatamapperTestUtil {
public static final String SØKERS_NAVN = "<NAME>";
public static final String SØKERS_FNR = "11111111111";
public static final String VERGES_NAVN = "<NAME>";
public static final String VERGES_FNR = "99999999999";
public static final String SAKSNUMMER = "123456";
public static final String FRITEKST = "FRITEKST";
public static final Period SVARFRIST = Period.ofWeeks(6);
private static final int KLAGEFRIST = 14;
private static final int KLAGEFRIST_INNSYN = 14;
private static final Period SØK_ANTALL_UKER = Period.ofWeeks(6);
private static BrevParametere brevParametere = new BrevParametere(KLAGEFRIST, KLAGEFRIST_INNSYN, SVARFRIST, SØK_ANTALL_UKER);
public static BrevParametere getBrevParametere() {
return brevParametere;
}
public static DokumentFelles getDokumentFelles() {
DokumentFelles dokumentFelles = Mockito.mock(DokumentFelles.class);
when(dokumentFelles.getSakspartNavn()).thenReturn(SØKERS_NAVN);
when(dokumentFelles.getSakspartPersonStatus()).thenReturn("ANNET");
return dokumentFelles;
}
public static DokumentFelles lagStandardDokumentFelles(DokumentData dokumentdata) {
return lagStandardDokumentFelles(dokumentdata, null, false);
}
public static DokumentFelles lagStandardDokumentFelles(DokumentData dokumentdata, Kopi kopi, boolean tilVerge) {
DokumentAdresse dokumentAdresse = new DokumentAdresse.Builder()
.medAdresselinje1("Adresse 1")
.medPostNummer("0491")
.medPoststed("OSLO")
.medMottakerNavn(SØKERS_NAVN)
.build();
return DokumentFelles.builder(dokumentdata)
.medAutomatiskBehandlet(Boolean.TRUE)
.medDokumentDato(LocalDate.now())
.medKontaktTelefonNummer("22222222")
.medMottakerAdresse(dokumentAdresse)
.medNavnAvsenderEnhet("NAV Familie og pensjonsytelser")
.medPostadresse(dokumentAdresse)
.medReturadresse(dokumentAdresse)
.medMottakerId(tilVerge ? VERGES_FNR : SØKERS_FNR)
.medMottakerNavn(tilVerge ? VERGES_NAVN : SØKERS_NAVN)
.medSaksnummer(new Saksnummer(SAKSNUMMER))
.medSakspartId(SØKERS_FNR)
.medSakspartNavn(SØKERS_NAVN)
.medErKopi(kopi != null ? Optional.of(kopi) : null)
.medMottakerType(DokumentFelles.MottakerType.PERSON)
.medSpråkkode(Språkkode.NB)
.medSakspartPersonStatus("ANNET")
.build();
}
public static DokumentData lagStandardDokumentData(DokumentMalType dokumentMalType) {
return DokumentData.builder()
.medDokumentMalType(dokumentMalType)
.medBehandlingUuid(UUID.randomUUID())
.medBestillingType("B")
.medBestiltTid(LocalDateTime.now())
.build();
}
public static DokumentHendelse.Builder lagStandardHendelseBuilder() {
return DokumentHendelse.builder()
.medBestillingUuid(UUID.randomUUID())
.medBehandlingUuid(UUID.randomUUID())
.medFritekst(FRITEKST)
.medYtelseType(FagsakYtelseType.FORELDREPENGER);
}
public static DokumentHendelse.Builder lagStandardHendelseSVPBuilder() {
return DokumentHendelse.builder()
.medBestillingUuid(UUID.randomUUID())
.medBehandlingUuid(UUID.randomUUID())
.medFritekst(FRITEKST)
.medYtelseType(FagsakYtelseType.SVANGERSKAPSPENGER);
}
public static DokumentHendelse standardDokumenthendelse() {
return lagStandardHendelseBuilder().build();
}
public static Behandling.Builder standardBehandlingBuilder() {
return Behandling.builder()
.medUuid(UUID.randomUUID())
.medBehandlingType(BehandlingType.FØRSTEGANGSSØKNAD)
.medSpråkkode(Språkkode.NB);
}
public static Behandling standardBehandling() {
return standardBehandlingBuilder().build();
}
}
|
RobotLocomotion/drake-python3.7
|
solvers/test/rotation_constraint_test.cc
|
#include "drake/solvers/rotation_constraint.h"
#include <random>
#include <gtest/gtest.h>
#include "drake/common/symbolic.h"
#include "drake/common/test_utilities/eigen_matrix_compare.h"
#include "drake/math/random_rotation.h"
#include "drake/math/rotation_matrix.h"
#include "drake/solvers/mathematical_program.h"
#include "drake/solvers/mosek_solver.h"
#include "drake/solvers/solve.h"
using Eigen::Vector3d;
using Eigen::Matrix3d;
using drake::symbolic::Expression;
using std::sqrt;
namespace drake {
namespace solvers {
namespace {
void AddObjective(MathematicalProgram* prog,
const Eigen::Ref<const MatrixDecisionVariable<3, 3>>& R,
const Eigen::Ref<const Matrix3d>& R_desired) {
const auto R_error = R - R_desired;
// sigma >= |error|_2
MatrixDecisionVariable<1, 1> sigma =
prog->NewContinuousVariables<1, 1>("sigma");
// trace(R_errorᵀ * R_error) = sum_{i,j} R_error(i,j)²
prog->AddLorentzConeConstraint(
sigma(0), (R_error.transpose() * R_error).trace(), 1E-15);
// min sigma
prog->AddCost(sigma(0));
}
// Iterates over possible setting of the RPY limits flag, and for each setting
// evaluates a mesh of points within those limits. This test confirms that
// of the rotation matrices generated from rotations with those limits are
// still feasible after the RPY limits constraints have been applied.
class TestRpyLimitsFixture : public ::testing::TestWithParam<int> {
public:
DRAKE_NO_COPY_NO_MOVE_NO_ASSIGN(TestRpyLimitsFixture)
TestRpyLimitsFixture() = default;
};
TEST_P(TestRpyLimitsFixture, TestRpyLimits) {
const int limits = GetParam();
// Add brace scope to avoid reflowing all of this code.
{
MathematicalProgram prog;
auto Rvar = NewRotationMatrixVars(&prog);
AddBoundingBoxConstraintsImpliedByRollPitchYawLimits(
&prog, Rvar, static_cast<RollPitchYawLimits>(limits));
auto bb_constraints = prog.bounding_box_constraints();
// Bounds are loose, so just test that feasible points are indeed feasible.
const double rmin =
(limits & kRoll_0_to_PI)
? 0
: (limits & kRoll_NegPI_2_to_PI_2) ? -M_PI_2 : -M_PI;
const double rmax = (limits & kRoll_NegPI_2_to_PI_2) ? M_PI_2 : M_PI;
const double pmin =
(limits & kPitch_0_to_PI)
? 0
: (limits & kPitch_NegPI_2_to_PI_2) ? -M_PI_2 : -M_PI;
const double pmax = (limits & kPitch_NegPI_2_to_PI_2) ? M_PI_2 : M_PI;
const double ymin = (limits & kYaw_0_to_PI)
? 0
: (limits & kYaw_NegPI_2_to_PI_2) ? -M_PI_2 : -M_PI;
const double ymax = (limits & kYaw_NegPI_2_to_PI_2) ? M_PI_2 : M_PI;
for (double roll = rmin; roll <= rmax; roll += M_PI / 6) {
for (double pitch = pmin; pitch <= pmax; pitch += M_PI / 6) {
for (double yaw = ymin; yaw <= ymax; yaw += M_PI / 6) {
const drake::math::RollPitchYaw<double> rpy(roll, pitch, yaw);
Matrix3d R = rpy.ToMatrix3ViaRotationMatrix();
Eigen::Map<Eigen::Matrix<double, 9, 1>> vecR(R.data(), R.size());
prog.SetInitialGuessForAllVariables(vecR);
for (const auto& b : bb_constraints) {
const Eigen::VectorXd x = prog.EvalBindingAtInitialGuess(b);
const Eigen::VectorXd& lb = b.evaluator()->lower_bound();
const Eigen::VectorXd& ub = b.evaluator()->upper_bound();
for (int i = 0; i < x.size(); i++) {
EXPECT_GE(x(i), lb(i));
EXPECT_LE(x(i), ub(i));
}
}
}
}
}
}
}
INSTANTIATE_TEST_SUITE_P(
RotationTest, TestRpyLimitsFixture,
::testing::Range(1 << 1, 1 << 7, 2));
// Sets up and solves an optimization:
// <pre>
// min_R sum_{i,j} |R(i,j) - R_desired(i,j)|^2
// </pre>
// where the columans (and rows) of R_desired are outside the unit ball.
// Confirms that the SpectralPSD constraint results in a matrix with columns
// and rows of unit length (or less), and that the actual PSD constraint (typed
// in a very different way here) was satisfied.
GTEST_TEST(RotationTest, TestSpectralPsd) {
MathematicalProgram prog;
auto Rvar = NewRotationMatrixVars(&prog);
// R_desired is outside the unit ball.
AddObjective(&prog, Rvar, 2 * Eigen::Matrix<double, 3, 3>::Ones());
AddRotationMatrixSpectrahedralSdpConstraint(&prog, Rvar);
MathematicalProgramResult result = Solve(prog);
ASSERT_TRUE(result.is_success());
Matrix3d R = result.GetSolution(Rvar);
double tol = 1e-6;
EXPECT_LE(R.col(0).lpNorm<2>(), 1 + tol);
EXPECT_LE(R.col(1).lpNorm<2>(), 1 + tol);
EXPECT_LE(R.col(2).lpNorm<2>(), 1 + tol);
EXPECT_LE(R.row(0).lpNorm<2>(), 1 + tol);
EXPECT_LE(R.row(1).lpNorm<2>(), 1 + tol);
EXPECT_LE(R.row(2).lpNorm<2>(), 1 + tol);
// Check eq 10 in https://arxiv.org/pdf/1403.4914.pdf
Eigen::Matrix4d U;
// clang-format off
// NOLINTNEXTLINE(whitespace/comma)
U << 1 - R(0, 0) - R(1, 1) + R(2, 2), R(0, 2) + R(2, 0), R(0, 1) - R(1, 0),
R(1, 2) + R(2, 1),
// NOLINTNEXTLINE(whitespace/comma)
R(0, 2) + R(2, 0), 1 + R(0, 0) - R(1, 1) - R(2, 2), R(1, 2) - R(2, 1),
R(0, 1) + R(1, 0),
// NOLINTNEXTLINE(whitespace/comma)
R(0, 1) - R(1, 0), R(1, 2) - R(2, 1), 1 + R(0, 0) + R(1, 1) + R(2, 2),
R(2, 0) - R(0, 2),
// NOLINTNEXTLINE(whitespace/comma)
R(1, 2) + R(2, 1), R(0, 1) + R(1, 0), R(2, 0) - R(0, 2), 1 - R(0, 0)
+ R(1, 1) - R(2, 2);
// clang-format on
const Eigen::Array4d lambda_mag{U.eigenvalues().array().real()};
for (int i = 0; i < 4; i++) EXPECT_GE(lambda_mag(i), -tol);
}
// Sets up and solves an optimization:
// <pre>
// min_R sum_{i,j} |R(i,j) - R_desired(i,j)|^2
// </pre>
// where the columns (and rows) of R_desired are outside the unit ball.
// Confirms that the Orthonormal SOCP constraints result in a solution matrix
// with columns and rows of unit length or less, and that the specific
// orthogonality relaxation implemented by the routine is satisfied.
GTEST_TEST(RotationTest, TestOrthonormal) {
MathematicalProgram prog;
auto Rvar = NewRotationMatrixVars(&prog);
// R_desired is outside the unit ball.
AddObjective(&prog, Rvar, 2 * Eigen::Matrix<double, 3, 3>::Ones());
AddRotationMatrixOrthonormalSocpConstraint(&prog, Rvar);
MathematicalProgramResult result = Solve(prog);
ASSERT_TRUE(result.is_success());
Matrix3d R = result.GetSolution(Rvar);
double tol = 1e-4;
EXPECT_LE(R.col(0).lpNorm<2>(), 1 + tol);
EXPECT_LE(R.col(1).lpNorm<2>(), 1 + tol);
EXPECT_LE(R.col(2).lpNorm<2>(), 1 + tol);
EXPECT_LE(2 * std::abs(R.col(0).dot(R.col(1))),
2 - R.col(0).dot(R.col(0)) - R.col(1).dot(R.col(1)) + tol);
EXPECT_LE(2 * std::abs(R.col(1).dot(R.col(2))),
2 - R.col(1).dot(R.col(1)) - R.col(2).dot(R.col(2)) + tol);
EXPECT_LE(2 * std::abs(R.col(0).dot(R.col(2))),
2 - R.col(0).dot(R.col(0)) - R.col(2).dot(R.col(2)) + tol);
EXPECT_LE(R.row(0).lpNorm<2>(), 1 + tol);
EXPECT_LE(R.row(1).lpNorm<2>(), 1 + tol);
EXPECT_LE(R.row(2).lpNorm<2>(), 1 + tol);
EXPECT_LE(2 * std::abs(R.row(0).dot(R.row(1))),
2 - R.row(0).dot(R.row(0)) - R.row(1).dot(R.row(1)) + tol);
EXPECT_LE(2 * std::abs(R.row(1).dot(R.row(2))),
2 - R.row(0).dot(R.row(0)) - R.row(1).dot(R.row(1)) + tol);
EXPECT_LE(2 * std::abs(R.row(0).dot(R.row(2))),
2 - R.row(0).dot(R.row(0)) - R.row(1).dot(R.row(1)) + tol);
}
} // namespace
} // namespace solvers
} // namespace drake
int main(int argc, char** argv) {
// Ensure that we have the MOSEK license for the entire duration of this test,
// so that we do not have to release and re-acquire the license for every
// test.
auto mosek_license = drake::solvers::MosekSolver::AcquireLicense();
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
|
wh00sh/Tanda-DAPP
|
node_modules/mdi-material-ui/DishwasherAlert.js
|
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _createIcon = _interopRequireDefault(require("./util/createIcon"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
var _default = (0, _createIcon["default"])('M16 2H4C2.9 2 2 2.9 2 4V20C2 21.11 2.9 22 4 22H16C17.11 22 18 21.11 18 20V4C18 2.9 17.11 2 16 2M8 4C8.55 4 9 4.45 9 5S8.55 6 8 6 7 5.55 7 5 7.45 4 8 4M5 4C5.55 4 6 4.45 6 5S5.55 6 5 6 4 5.55 4 5 4.45 4 5 4M16 20H4V8H16V20M12.67 15.33C12.69 16.03 12.41 16.71 11.91 17.21C10.86 18.26 9.15 18.27 8.09 17.21C7.59 16.71 7.31 16.03 7.33 15.33C7.4 14.62 7.63 13.94 8 13.33C8.37 12.5 8.81 11.73 9.33 11L10 10C11.79 12.59 12.67 14.36 12.67 15.33M20 15H22V17H20V15M20 7H22V13H20V7Z');
exports["default"] = _default;
|
DBCDK/fcrepo-3.5-patched
|
fcrepo-server/src/main/java/org/fcrepo/server/test/ClearSQLDatabase.java
|
package org.fcrepo.server.test;
import org.fcrepo.server.Server;
import org.fcrepo.server.utilities.rebuild.SQLRebuilder;
/**
* Clears the SQL database post-install. Can be called by system test scripts to
* make sure that the configured SQL database is empty after installing fedora,
* but before running it for the first time.
*
* @author <NAME>
* @version $Id$
*/
public class ClearSQLDatabase {
public static void main(String[] args) {
try {
SQLRebuilder sqlDb = new SQLRebuilder();
sqlDb.setServerConfiguration(Server.getConfig());
sqlDb.init();
sqlDb.blankExistingTables();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
|
anytimefitness/oa-oauth
|
lib/omniauth/strategies/oauth/qzone.rb
|
require 'omniauth/oauth'
require 'multi_json'
module OmniAuth
module Strategies
#
# Authenticate to qzone (QQ) via OAuth and retrieve basic
# user information.
#
# Usage:
#
# use OmniAuth::Strategies::Qzone, 'consumerkey', 'consumersecret'
#
class Qzone < OmniAuth::Strategies::OAuth
# Initialize the middleware
def initialize(app, consumer_key=nil, consumer_secret=nil, options={}, &block)
client_options = {
:access_token_path => '/oauth/qzoneoauth_access_token',
:authorize_path => '/oauth/qzoneoauth_authorize',
:http_method => :get,
:request_token_path => '/oauth/qzoneoauth_request_token',
:scheme => :query_string,
:site => 'http://openapi.qzone.qq.com',
}
options[:authorize_params] = {:oauth_consumer_key => consumer_key}
super(app, :qzone, consumer_key, consumer_secret, client_options, options, &block)
end
#HACK qzone is using a none-standard parameter oauth_overicode
def consumer_options
@consumer_options[:access_token_path] = '/oauth/qzoneoauth_access_token?oauth_vericode=' + request['oauth_vericode'] if request['oauth_vericode']
@consumer_options
end
def callback_phase
session['oauth'][name.to_s]['callback_confirmed'] = true
super
end
def auth_hash
ui = user_info
OmniAuth::Utils.deep_merge(super,
{
'uid' => ui['uid'],
'user_info' => ui,
'extra' => {
'user_hash' => user_hash,
},
}
)
end
def user_info
user_hash = self.user_hash
{
'uid' => @access_token.params[:openid],
'nickname' => user_hash['nickname'],
'name' => user_hash['nickname'],
'image' => user_hash['figureurl'],
'urls' => {
'figureurl_1' => user_hash['figureurl_1'],
'figureurl_2' => user_hash['figureurl_2'],
},
}
end
def user_hash
@user_hash ||= MultiJson.decode(@access_token.get("/user/get_user_info?format=json&openid=#{@access_token.params[:openid]}").body)
end
end
end
end
|
matjaz99/MyTestProjects
|
javase/src/main/java/si/matjazcerkvenik/test/javase/callback/example4/Callable.java
|
package si.matjazcerkvenik.test.javase.callback.example4;
public interface Callable {
public void callBackMethod();
}
|
hepyu/lcp-java
|
open-lcp-core-framework/src/main/java/com/open/lcp/core/framework/api/listener/CommandListener.java
|
<gh_stars>1-10
package com.open.lcp.core.framework.api.listener;
import com.open.lcp.core.api.command.CommandContext;
/**
* 命令侦听:不支持动态变更
*
* @author
*/
public interface CommandListener {
/**
* 返回需要侦听的接口列表,大小写不敏感,仅在首次加载时获取,之后不会更新。
*
* @return
*/
public String[] getCommands();
/**
* 命令执行前调用。如果需要改变指令执行的数据,可以此操作。
*
* @param ctx 用户请求报文的相关数据。
*/
public void beforeExec(CommandContext ctx);
/**
* 命令执行后调用。如果需要知道执行后的结果,可以此操作。
*
* @param ctx 用户请求报文的相关数据
* @param execBeginTime 接口调用的起始时间
* @param code 执行结果:错误码。执行结果未知时,值为-1
* @param data 执行结果:具体对象
* @param ext 其它信息,一般为版本升级信息
*/
public void afterExec(CommandContext ctx, long execBeginTime, int code, Object data, Object ext);
}
|
skkuse-adv/2019Fall_team2
|
analysis/reverse-engineering/decompile-fitts-20191031-2200/sources/kr/co/popone/fitts/feature/recommend/personalization/PersonalizedRecommendActivity$updatePostUi$manager$1$1.java
|
package kr.co.popone.fitts.feature.recommend.personalization;
import androidx.recyclerview.widget.GridLayoutManager.SpanSizeLookup;
public final class PersonalizedRecommendActivity$updatePostUi$manager$1$1 extends SpanSizeLookup {
PersonalizedRecommendActivity$updatePostUi$manager$1$1() {
}
public int getSpanSize(int i) {
return (i + 1) % 5 == 0 ? 2 : 1;
}
}
|
Fork-World/meecrowave
|
meecrowave-core/src/test/java/org/apache/meecrowave/ContextInProxiedInstancesTest.java
|
<filename>meecrowave-core/src/test/java/org/apache/meecrowave/ContextInProxiedInstancesTest.java
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.meecrowave;
import org.apache.tomcat.util.http.fileupload.util.Streams;
import org.junit.Test;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.context.Dependent;
import javax.enterprise.context.Initialized;
import javax.enterprise.context.RequestScoped;
import javax.enterprise.event.Observes;
import javax.enterprise.inject.spi.CDI;
import javax.servlet.ServletContext;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.net.URL;
import static org.junit.Assert.assertEquals;
public class ContextInProxiedInstancesTest {
@Test
public void fields() throws IOException {
try (final Meecrowave meecrowave = new Meecrowave(new Meecrowave.Builder()
.randomHttpPort()
.includePackages(ContextInProxiedInstancesTest.class.getName())).bake()) {
// proxies can use @Context
try (final InputStream stream = new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/app").openStream()) {
assertEquals("app", Streams.asString(stream, "UTF-8"));
}
try (final InputStream stream = new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/req").openStream()) {
assertEquals("req", Streams.asString(stream, "UTF-8"));
}
// not proxied can also
try (final InputStream stream = new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/dep").openStream()) {
assertEquals("dep", Streams.asString(stream, "UTF-8"));
}
assertEquals(Dep.class, CDI.current().select(Dep.class).get().getClass()); // ensure it is not proxied but injection works (thanks CXF)
}
}
@Path("app")
@ApplicationScoped
public static class App {
@Context
private UriInfo uri;
public void init(@Observes @Initialized(ApplicationScoped.class) final ServletContext sc) {
// init without a Message
}
@GET
public String get() {
return uri.getPath();
}
}
@Path("req")
@RequestScoped
public static class Req {
@Context
private UriInfo uri;
@GET
public String get() {
return uri.getPath();
}
}
@Path("dep")
@Dependent
public static class Dep {
@Context
private UriInfo uri;
@GET
public String get() {
return uri.getPath();
}
@Produces
@RequestScoped
public MyRestApi createMyApi() {
return new MyRestApi() {
@Override
public String get() {
return null;
}
@Override
public void close() throws Exception {
}
};
}
}
@Path("myapi")
public interface MyRestApi extends Serializable, AutoCloseable {
@GET
String get();
}
}
|
wangdingfeng/J2EEFAST
|
fast-web/src/main/java/com/j2eefast/web/legao/mapper/LTableMapper.java
|
/*
* All content copyright http://www.j2eefast.com, unless
* otherwise indicated. All rights reserved.
* No deletion without permission
*/
package com.j2eefast.web.legao.mapper;
import com.j2eefast.web.legao.entity.LTableEntity;
import java.util.List;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.j2eefast.web.legao.vo.LTableVo;
import org.apache.ibatis.annotations.Param;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.ibatis.annotations.Select;
import org.apache.ibatis.annotations.Update;
/**
* l_table Mapper接口
* @author: polaris_wang
* @date 2021-12-21 15:12:50
*/
public interface LTableMapper extends BaseMapper<LTableEntity> {
/**
* 自定义分页查询
* @param page
* @param lTableEntity 实体类
*/
Page<LTableVo> findPage(IPage<LTableEntity> page,
@Param("lTable") LTableEntity lTableEntity,
@Param("sql_filter") String sql_filter);
/**
* 通过ID查询
* @param id 查询ID
* @return
*/
LTableEntity findLTableById(@Param("id") Long id);
/**
* 查询列表
* @param lTableEntity 查询条件对象
* @return
*/
List<LTableEntity> findList(LTableEntity lTableEntity);
/**
* 通过桌号查询数据
* @param num
* @return
*/
@Select("SELECT count(id) FROM l_table WHERE del_flag !=0 AND table_num=#{num}")
int findByNum(@Param("num") Integer num);
}
|
Easonrust/leetcode_journey
|
problem/10.regular-expression-matching.java
|
<reponame>Easonrust/leetcode_journey<filename>problem/10.regular-expression-matching.java
/*
* @lc app=leetcode id=10 lang=java
*
* [10] Regular Expression Matching
*/
// @lc code=start
class Solution {
public boolean isMatch(String text, String pattern) {
// 只有当text和pattern都完事了,才说明匹配
if (pattern.isEmpty()) {
return text.isEmpty();
}
boolean firstMatch = false;
if (!text.isEmpty() && (pattern.charAt(0) == text.charAt(0) || pattern.charAt(0) == '.')) {
firstMatch = true;
}
if (pattern.length() >= 2 && pattern.charAt(1) == '*') {
// 第一个被匹配上了和第一个没被匹配上的情况
if (isMatch(text, pattern.substring(2))) {
return true;
} else if (firstMatch && isMatch(text.substring(1), pattern)) {
return true;
}
return false;
} else {
if (firstMatch && isMatch(text.substring(1), pattern.substring(1))) {
return true;
}
}
return false;
}
}
// @lc code=end
|
Lewes/ecssweb
|
feedback/migrations/0007_auto_20180728_0930.py
|
<gh_stars>1-10
# Generated by Django 2.0.5 on 2018-07-28 08:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('feedback', '0006_feedbackauditlog'),
]
operations = [
migrations.AlterField(
model_name='feedbackauditlog',
name='action',
field=models.CharField(choices=[('create', 'created'), ('edit', 'edited'), ('delet', 'deleted')], max_length=20),
),
]
|
linyingzhen/btnew
|
src/person/prize/Detail/_Attention.js
|
<filename>src/person/prize/Detail/_Attention.js<gh_stars>0
/**
* const prefixCls = 'style-101574';
* const images = '/static/images/src/person/prize/Detail';
* @Author: lyz0720
* @Date: 2018-09-14 10:38:23
* @Last Modified by: czy0729
* @Last Modified time: 2018-11-07 11:03:43
* @Path bt_mb_new /src/person/prize/Detail/index.js
*/
import React from 'react';
import PropTypes from 'prop-types';
import { observer } from '@';
import { Flex, Icon } from '@components';
import Styles from '@styles';
const prefixCls = 'style-101574';
const _Attention = (props, { $ }) => {
const { ww = '-' } = $.getState('detail');
return (
<Flex className={prefixCls} justify="center">
<Icon className="t-26 t-event" type="information-circle-fill" />
<p className="t-24 l-34 ml-xs">
<span className="t-void">请务必使用此旺旺ID“</span>
<span className="t-event">{ww}</span>
<span className="t-void">”进行购买</span>
</p>
<style jsx global>{`
.style-101574 {
padding: 0.16rem ${Styles.wind};
background: ${Styles.color_danger};
}
`}</style>
</Flex>
);
};
_Attention.contextTypes = {
$: PropTypes.object
};
export default observer(_Attention);
|
mattbrun/derby-login
|
lib/middleware/sessionMiddleware.js
|
module.exports = function(req, res, next) {
var self = this;
var model = req.getModel();
// Generate new userId if absent and put it to session and model
var userId = req.session.userId;
if (!userId) userId = req.session.userId = model.id();
model.set('_session.userId', userId);
var isAuthenticated = req.isAuthenticated();
// _session.loggedIn is the main way to distinguish auth and not auth states
if (isAuthenticated) model.set('_session.loggedIn', true);
// Request hook
self.request(req, res, userId, isAuthenticated, next);
};
|
Pirate5946/techframework
|
src/main/java/advance/datastructureandalgorithm/tree/FindSumPath.java
|
package advance.datastructureandalgorithm.tree;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
/**
* @ClassName FindSumPath
* @Descrption TODO
* @Author lt
* @Date 2019/6/8 16:48
* @Version 1.0
**/
public class FindSumPath {
public static void main(String[] args) {
TreeNode root = new TreeNode(1);
TreeNode leave21 = new TreeNode(2);
root.left = leave21;
TreeNode leave22 = new TreeNode(3);
root.right = leave22 ;
TreeNode leave31 = new TreeNode(4);
leave21.left = leave31 ;
TreeNode leave41 = new TreeNode(5);
leave31.left = leave41;
ArrayList<ArrayList<Integer>> allPath = findAllPath(root, 6);
System.out.println(allPath);
}
public static ArrayList<ArrayList<Integer>> findAllPath(TreeNode root, int target) {
ArrayList<ArrayList<Integer>> allPath = new ArrayList<>();
if (root == null) {
return allPath;
}
ArrayList<Integer> path = new ArrayList<>();
findPath(root, allPath, path, target);
Collections.sort(allPath, new Comparator<ArrayList<Integer>>() {
@Override
public int compare(ArrayList<Integer> o1, ArrayList<Integer> o2) {
if(o1.size()>o2.size()) {
return -1;
} else if(o1.size()==o2.size()) {
return 0;
} else {
return 1;
}
}
});
return allPath;
}
private static void findPath(TreeNode root, ArrayList<ArrayList<Integer>> allPath,
ArrayList<Integer> path, int target) {
path.add(root.val);
if (root.left == null && root.right == null) {
if (target == root.val) {
allPath.add(path);
}
return;
}
if (target < root.val) {
return;
}
// 这个path2 变量很关键 ;
// 它是递归内部 每一层 new出来的变量,所以他记忆着每一层的变量 : 具有记忆属性
ArrayList<Integer> path2 = new ArrayList<>(path);
if (root.left != null) {
findPath(root.left, allPath, path, target - root.val);
}
if (root.right != null) {
findPath(root.right, allPath, path2, target - root.val);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.