text stringlengths 1 1.05M |
|---|
package com.solofeed.tchernocraft.tileentity;
import com.solofeed.tchernocraft.Tchernocraft;
import com.solofeed.tchernocraft.util.ReflectionUtils;
import net.minecraft.tileentity.TileEntity;
import net.minecraftforge.fml.common.registry.GameRegistry;
import org.apache.commons.lang3.StringUtils;
import java.util.Set;
/**
* Tchernocraft's tile entities handler
*/
public final class TileEntityHandler {
/** Relative location of block classes */
private static final String TILE_ENTITIES_LOCATION = "com.solofeed.tchernocraft.tileentity.tileentities";
/** private constructor */
private TileEntityHandler(){
throw new UnsupportedOperationException("TileEntityHandler constructor must never be called");
}
/**
* Registers all tile entities
*/
public static void registerTileEntities() {
Tchernocraft.LOGGER.info("Registering tile entities ...");
Set<Class<?>> tileEntityClasses = ReflectionUtils.getClasses(TILE_ENTITIES_LOCATION, TchernocraftTileEntity.class);
for(Class<?> tClass : tileEntityClasses){
register(tClass.asSubclass(TileEntity.class));
}
Tchernocraft.LOGGER.info("All tile entities registered !");
}
/**
* Register a tile entity and set its id
* @param tClass tile entity class to push in registry
*/
private static void register(Class<? extends TileEntity> tClass){
TchernocraftTileEntity annotation = tClass.getAnnotation(TchernocraftTileEntity.class);
if(annotation != null && StringUtils.isNotBlank(annotation.id())){
String id = Tchernocraft.MOD_ID + ":" + annotation.id();
GameRegistry.registerTileEntity(tClass, id);
} else {
throw new IllegalArgumentException("Error while instanciating tile entities, invalid class declaration");
}
}
}
|
/******************************************************************************
Course videos: https://www.red-gate.com/hub/university/courses/t-sql/tsql-for-beginners
Course scripts: https://litknd.github.io/TSQLBeginners
Introducing SELECTs and Aliasing
SAMPLE SOLUTIONS
*****************************************************************************/
/* โ๐ป Doorstop โ๐ป */
RAISERROR(N'Did you mean to run the whole thing?', 20, 1) WITH LOG;
GO
/* ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ
Homework - WITH SOLUTIONS
๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ ๐ฎ */
USE WideWorldImporters;
GO
/*
Q1
Write a query that SELECTS all the rows from Application.People
Return all columns in the table
Use a "worst practice" to SELECT every column in the table
GO
*/
--Talk through the query
SELECT *
FROM Application.People;
GO
/********************************************************************************
Discussion:
Look at the execution plan (CTRL+M for "actual" execution plan)
Execution plans are a map of how the query is run behind the scenes
You don't need to use these while you are learning TSQL!
I will show them sometimes when discussing solutions simply to talk about how queries work
And maybe someday you will use these, seeing them as you learn makes them less mysterious
What are those Compute Scalars for?
And how can we learn more about them?
*******************************************************************************/
--Can we see what's in those Compute Scalars?
EXEC sp_help 'Application.People';
GO
SELECT name,
definition,
is_persisted
FROM sys.computed_columns AS cc
WHERE cc.object_id = OBJECT_ID('Application.People');
GO
--An advanced aside about persisted computed columns
--Compare with and without the trace flag (176 is 2016+)
--To nerd out on this, see <NAME>'s article:
--https://sqlperformance.com/2017/05/sql-plan/properly-persisted-computed-columns
SELECT SearchName
FROM Application.People
OPTION (QUERYTRACEON 176);
GO
/*
Q2
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
GO
*/
SELECT FullName,
PreferredName,
EmailAddress AS [Email]
FROM Application.People;
GO
--Look at the plan. Why is it different from the plan when we were doing "select *"?
--Big picture takeaway: selecting only the columns you need changes how the query is run
--This can help performance in many ways, especially as your queries grow more complex
/*
Q3
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
Return ONLY rows where Email has not been entered (NULL)
GO
*/
--IS NULL
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE EmailAddress IS NULL;
GO
/********************************************************************************
Quick quiz:
Why does this not return any rows?
Look at the plan.
Is there a way to make it return rows?
*******************************************************************************/
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE EmailAddress = NULL;
GO
--If you change the ANSI_NULLS setting to OFF,
--The equality comparison works the same way as the IS NULL syntax
--But don't do this! It is deprecated.
--Run these queries as a single batch (in Azure Data Studio it resets your sessions ANSI_NULL setting automatically after execution!)
SET ANSI_NULLS OFF;
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE EmailAddress IS NULL;
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE EmailAddress = NULL; /* Will only match NULLs with ANSI_NULLS set to OFF (deprecated) */
--Back to the right setting:
SET ANSI_NULLS ON;
GO
/*
Q4
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
Return ONLY rows where PreferredName is Agrita
GO
*/
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE PreferredName = N'Agrita';
GO
/********************************************************************************
Discussion: What's with that N?
Look at the data type for PreferredName
N'Agrita' indicates unicode / NVARCHAR data type for that string
Sometimes data type mismatches can make huge differences in performance
*******************************************************************************/
EXEC sp_help 'Application.People';
GO
/*
Q5
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
Return ONLY rows where PreferredName starts with the letter A
GO
*/
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE PreferredName LIKE N'A%';
GO
/*
Q6
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
Return ONLY rows where PreferredName starts with the LOWERCASE letter 'a'
GO
*/
--Try this, does it work?
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE PreferredName LIKE 'a%';
GO
--Column level collation
EXEC sp_help 'Application.People';
GO
--Latin1_General_100_CI_AS
--My instance level collation
SELECT SERVERPROPERTY('collation');
GO
--SQL_Latin1_General_CP1_CS_AS
--Decoding a collation
SELECT name,
description
FROM fn_helpcollations()
WHERE name = 'SQL_Latin1_General_CP1_CS_AS';
GO
--You can specify collation in a query
--If we want to run a case sensitive comparison...
--Look at the plan - what is it having to do to accomplish this?
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE PreferredName COLLATE SQL_Latin1_General_CP1_CS_AS LIKE N'a%';
GO
/*
Q7
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
Return ONLY rows where PreferredName contains 'y' or 'Y' anywhere in the string
AND the email address contains a space
Order the results by EmailAddress Ascending
GO
*/
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE PreferredName LIKE N'%y%' /* Since the column is case-insensitive, i don't need collate */
AND EmailAddress LIKE N'% %'
ORDER BY EmailAddress ASC;
GO
--You can order by a column alias
--ASC is the default and is not required to be stated
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE PreferredName LIKE N'%y%'
AND EmailAddress LIKE N'% %'
ORDER BY Email;
GO
--Although you can use a number in ORDER BY
--which refers to column position, this is an anti-pattern
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE PreferredName LIKE N'%y%'
AND EmailAddress LIKE N'% %'
ORDER BY 3;
GO
/*
Q8
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY two columns:
FullName
The length (number of characters in) the FullName column,
as calculated by the LEN() SQL Server function
https://docs.microsoft.com/en-us/sql/t-sql/functions/len-transact-sql?view=sql-server-2017
alias as: Len Full Name
Order the results by the length of FullName, Descending
Return only 10 rows
Do NOT use SET ROWCOUNT -- instead do everything in a single TSQL statement
GO
*/
SELECT TOP 10
FullName,
LEN(FullName) AS [Len Full Name]
FROM Application.People
ORDER BY LEN(FullName) DESC;
GO
--This syntax is SQL Server 2012+
SELECT FullName,
LEN(FullName) AS [Len Full Name]
FROM Application.People
ORDER BY LEN(FullName) DESC OFFSET 0 ROWS FETCH NEXT 10 ROWS ONLY;
GO
--Look at the execution plans
--The way you write your syntax doesn't dictate how the query is executed behind the scenes
--The database engine may choose the same "plan" to execute two queries written in different ways!
/*
Q9
Write a query that SELECTS all the rows from Application.People
Just like Q8...
Return rows for ONLY two columns:
FullName
The length (number of characters in) the FullName column,
as calculated by the LEN() SQL Server function
https://docs.microsoft.com/en-us/sql/t-sql/functions/len-transact-sql?view=sql-server-2017
alias as: Len Full Name
Order the results by the length of FullName, Descending
Return only 10 rows
EXCEPT this time...
Return rows ONLY #11 - 20 (as ordered by description above)
Do NOT use the TOP keyword, do not use ROW_NUMBER(), and do not use SET ROWCOUNT
GO
*/
SELECT FullName,
LEN(FullName) AS [Len Full Name]
FROM Application.People
ORDER BY LEN(FullName) DESC /* repeats function in ORDER BY */
OFFSET 10 ROWS FETCH NEXT 10 ROWS ONLY;
GO
SELECT FullName,
LEN(FullName) AS [Len Full Name]
FROM Application.People
ORDER BY [Len Full Name] DESC /* uses column name in ORDER BY */
OFFSET 10 ROWS FETCH NEXT 10 ROWS ONLY;
GO
SELECT FullName,
LEN(FullName) AS [Len Full Name]
FROM Application.People
ORDER BY 2 DESC /* Uses column numeric position in ORDER BY (tacky!) */
OFFSET 10 ROWS FETCH NEXT 10 ROWS ONLY;
GO
--Discussion: why would you ever need to do a query like this in the real world?
/********************************************************************************
Discussion....
Will this syntax work?
Why, or why not?
*******************************************************************************/
SELECT FullName,
PreferredName,
EmailAddress AS Email
FROM Application.People
WHERE PreferredName LIKE N'%y%'
AND Email LIKE N'% %'
ORDER BY PreferredName;
GO
|
import numpy as np
import ctypes
def generate_heatmap(data_points, image_size):
# Convert data points to C-compatible format
data_array = np.array(data_points, dtype=np.float32)
data_ptr = data_array.ctypes.data_as(ctypes.POINTER(ctypes.c_float))
num_points = len(data_points)
# Call the C++ function to generate heatmap
heatmap_lib = ctypes.CDLL('./heatmap_gen.so')
heatmap_lib.generate_heatmap.restype = np.ctypeslib.ndpointer(dtype=np.float32, shape=(image_size[1], image_size[0]))
heatmap = heatmap_lib.generate_heatmap(data_ptr, num_points, image_size[0], image_size[1])
return heatmap |
package com.jira.client.web.config;
import com.jira.client.web.model.properties.AgileProperties;
import com.jira.client.web.model.properties.AutoTestProperties;
import com.jira.client.web.model.properties.IamProperties;
import com.jira.client.web.model.properties.OAuthProperties;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
/**
* @author XIAXINYU3
* @date 2020/12/3
*/
@Getter
@Setter
@Component
@ToString
@ConfigurationProperties(prefix = "steam")
public class SteamProperties {
private String host;
private OAuthProperties oauth;
private AgileProperties agile;
private IamProperties iam;
private AutoTestProperties autoTest;
}
|
<reponame>yinfuquan/spring-boot-examples
package com.yin.springboot.mybatis.server;
import java.util.List;
import com.yin.springboot.mybatis.domain.OmsOrderItem;
public interface OmsOrderItemService{
int deleteByPrimaryKey(Long id);
int insert(OmsOrderItem record);
int insertOrUpdate(OmsOrderItem record);
int insertOrUpdateSelective(OmsOrderItem record);
int insertSelective(OmsOrderItem record);
OmsOrderItem selectByPrimaryKey(Long id);
int updateByPrimaryKeySelective(OmsOrderItem record);
int updateByPrimaryKey(OmsOrderItem record);
int updateBatch(List<OmsOrderItem> list);
int batchInsert(List<OmsOrderItem> list);
}
|
import subprocess
import sys
def build_and_upload_package():
try:
# Step 1: Create source distribution and wheel distribution
subprocess.run([sys.executable, "setup.py", "sdist", "bdist_wheel"], check=True)
# Step 2: Upload distribution files to PyPI using twine
subprocess.run([sys.executable, "-m", "twine", "upload", "dist/*"], check=True)
print("Package build and upload successful.")
except subprocess.CalledProcessError as e:
print(f"Error occurred: {e}")
except Exception as e:
print(f"Unexpected error occurred: {e}")
if __name__ == "__main__":
build_and_upload_package() |
<reponame>liimur/IRCClientiOS
/*
* Copyright (C) 2004-2009 <NAME> <EMAIL>
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*/
static void libirc_add_to_set (int fd, fd_set *set, int * maxfd)
{
FD_SET (fd, set);
if ( *maxfd < fd )
*maxfd = fd;
}
#if defined (ENABLE_DEBUG)
static void libirc_dump_data (const char * prefix, const char * buf, unsigned int length)
{
printf ("%s: ", prefix);
for ( ; length > 0; length -- )
printf ("%c", *buf++);
}
#endif
/*
* Finds a separator (\x0D\x0A), which separates two lines.
*/
static int libirc_findcrlf (const char * buf, int length)
{
int offset = 0;
for ( ; offset < (length - 1); offset++ )
if ( buf[offset] == 0x0D && buf[offset+1] == 0x0A )
return (offset + 2);
return 0;
}
static int libirc_findcrorlf (char * buf, int length)
{
int offset = 0;
for ( ; offset < length; offset++ )
{
if ( buf[offset] == 0x0D || buf[offset] == 0x0A )
{
buf[offset++] = '\0';
if ( offset < (length - 1)
&& (buf[offset] == 0x0D || buf[offset] == 0x0A) )
offset++;
return offset;
}
}
return 0;
}
static void libirc_event_ctcp_internal (irc_session_t * session, const char * event, const char * origin, const char ** params, unsigned int count)
{
if ( origin )
{
char nickbuf[128], textbuf[256];
irc_target_get_nick (origin, nickbuf, sizeof(nickbuf));
if ( strstr (params[0], "PING") == params[0] )
irc_cmd_ctcp_reply (session, nickbuf, params[0]);
else if ( !strcmp (params[0], "VERSION") )
{
unsigned int high, low;
irc_get_version (&high, &low);
sprintf (textbuf, "VERSION libirc by <NAME> ver.%d.%d", high, low);
irc_cmd_ctcp_reply (session, nickbuf, textbuf);
}
else if ( !strcmp (params[0], "FINGER") )
{
sprintf (textbuf, "FINGER %s (%s) Idle 0 seconds",
session->username ? session->username : "nobody",
session->realname ? session->realname : "noname");
irc_cmd_ctcp_reply (session, nickbuf, textbuf);
}
else if ( !strcmp (params[0], "TIME") )
{
time_t now = time(0);
#if defined (ENABLE_THREADS) && defined (HAVE_LOCALTIME_R)
struct tm tmtmp, *ltime = localtime_r (&now, &tmtmp);
#else
struct tm * ltime = localtime (&now);
#endif
strftime (textbuf, sizeof(textbuf), "%a %b %e %H:%M:%S %Z %Y", ltime);
irc_cmd_ctcp_reply (session, nickbuf, textbuf);
}
}
}
|
<filename>src/main/java/com/went/core/erabatis/phantom/FieldSource.java
package com.went.core.erabatis.phantom;
/**
* <p>Title: FieldSource</p>
* <p>Description: ่กจๅญๆฎตไฟกๆฏ</p>
* <p>Copyright: Shanghai era Information of management platform 2017</p>
*
* @author <NAME>
* @version 1.0
* <pre>History: 2017/10/21 <NAME> Create </pre>
*/
public interface FieldSource<T> {
}
|
module.exports = (Bluebird, logger) => {
function warningThen(onFulfilled, onRejected) {
if(!logger.active){
return super.then(onFulfilled, onRejected);
}
if(typeof onFulfilled !== "function" && onFulfilled !== null) { // explicit `then(null, handler)` case
try { throw new Error(); } catch (e) { // get stack
console.warn(" Warning: .then's onFulfilled only accepts functions, got ", onFulfilled, e);
}
}
if(typeof onRejected !== "function") {
try { throw new Error(); } catch (e) { // get stack
console.warn(" Warning: .then's onRejected only accepts functions, got ", onRejected, e);
}
}
return super.then(onFulfilled, onRejected);
}
return warningThen;
}; |
#!bin/bash
exec 0<>/dev/console 1<>/dev/console 2<>/dev/console
cat <<'msgend'
<byYonasProduction>
<Winter is coming>
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
msgend
sleep 5
echo " continuing...."
|
<filename>SysView_Driver/SysList.hpp
#pragma once
void* __cdecl operator new(size_t size, POOL_TYPE pool, ULONG tag);
void __cdecl operator delete(void* p, unsigned __int64);
typedef class CSysList
{
enum TAG
{
PROCESS = 0x555,
MODULE,
DRIVER,
BLACKLIST,
THREAD
};
protected:
static PLIST_PROCESS ProcessHead;
static PLIST_PROCESS ProcessLast;
static PLIST_THREAD ThreadHead;
static PLIST_THREAD ThreadLast;
static PLIST_MODULE ModuleHead;
static PLIST_MODULE ModuleLast;
static PLIST_BLACKLIST BlacklistHead;
static PLIST_BLACKLIST BlacklistLast;
static PLIST_DRIVER DriverHead;
static PLIST_DRIVER DriverLast;
CSysList();
~CSysList();
static void* Alloc(size_t, POOL_TYPE pool, ULONG Tag);
static void Free(void* p, ULONG Tag);
static ULONG GetCount(USHORT ID);
static void* GetHead(USHORT ID);
static void* GetLast(USHORT ID);
template <typename T>
static void SetHead(USHORT ID, T p);
template <typename X>
static void SetLast(USHORT ID, X p);
public:
static void* Get(ULONG index, USHORT ID);
static void Erease(USHORT ID);
static void Insert(void* pEntry, USHORT ID);
static void Remove(void* Entry, USHORT ID);
}SYSLIST, *PSYSLIST;
|
def find_substring(string, substring):
"""
This function will take a given string and substring and return True if the substring occurs in the string.
"""
if substring in string:
return True
else:
return False |
#!/bin/bash
#####################################
# Author: Sebastiaan Tammer
# Version: v1.0.0
# Date: 2018-09-09
# Description: Show of the capabilities of an interactive script.
# Usage: ./interactive.sh
#####################################
# Prompt the user for information.
read -p "Name a fictional character: " character_name
read -p "Name an actual location: " location
read -p "What's your favorite food? " food
# Compose the story.
echo "Recently, ${character_name} was seen in ${location} eating ${food}!"
|
<filename>tests/test_version.py
from .context import sol
#============================ defines ===============================
#============================ fixtures ==============================
#============================ helpers ===============================
#============================ tests =================================
def test_version():
assert type(sol.version()) == list
assert len(sol.version()) == 4
|
/*******************************************************************************
* Copyright 2020 Regents of the University of California. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be found in the LICENSE.txt file at the root of the project.
******************************************************************************/
package edu.cavsat.springboot.controller;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.stream.Collectors;
import javax.validation.Valid;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import edu.cavsat.model.bean.CAvSATConstraint;
import edu.cavsat.model.bean.DBEnvironment;
import edu.cavsat.model.bean.Query;
import edu.cavsat.model.bean.SQLQuery;
import edu.cavsat.model.bean.Schema;
import edu.cavsat.model.bean.Stats;
import edu.cavsat.model.logic.AnswersComputer;
import edu.cavsat.model.logic.AnswersComputerAgg;
import edu.cavsat.model.logic.CAvSATInitializer;
import edu.cavsat.model.logic.CAvSATInitializerAggSQL;
import edu.cavsat.model.logic.CAvSATInitializerSQL;
import edu.cavsat.model.logic.EncoderForPrimaryKeysAggSQL;
import edu.cavsat.model.logic.EncoderForPrimaryKeysSQL;
import edu.cavsat.model.logic.ProblemParser;
import edu.cavsat.model.logic.QueryAnalyser;
import edu.cavsat.util.CAvSATSQLQueries;
import edu.cavsat.util.Constants;
import edu.cavsat.util.DBUtil;
import edu.cavsat.util.ExecCommand;
import edu.cavsat.util.MSSQLServerImpl;
import lombok.Data;
/**
* @author Akhil
*
*/
@RestController
@RequestMapping("/api")
public class CavsatController {
private Connection con;
private BufferedWriter wr;
public CavsatController() {
super();
this.wr = new BufferedWriter(new OutputStreamWriter(System.out));
}
public CavsatController(BufferedWriter wr) {
super();
this.wr = wr;
}
@PostMapping("/get-query-analysis")
public ResponseEntity<?> getGraph(@Valid @RequestBody DBEnvWithInput dbEnvWithInput) {
DBEnvironment dbEnv = dbEnvWithInput.dbEnv;
Schema schema = ProblemParser.parseSchema(dbEnv, dbEnvWithInput.schemaName);
SQLQuery sqlQuery = ProblemParser.parseSQLQuery(dbEnvWithInput.querySyntax, schema);
Query query = ProblemParser.parseQuery(sqlQuery.getSQLSyntaxWithoutAggregates(), schema,
dbEnvWithInput.queryLanguage);
try {
QueryAnalyser qa = new QueryAnalyser();
String jsonData = qa.analyseQuery(query, sqlQuery, schema);
return ResponseEntity.ok(jsonData);
} catch (IOException e) {
e.printStackTrace();
}
return ResponseEntity.ok().build();
}
@PostMapping("/check-jdbc-connection")
public ResponseEntity<?> getSchemas(@Valid @RequestBody DBEnvironment dbEnv) {
DBEnvironment responseDbEnv = dbEnv;
responseDbEnv.setSchemas(new ArrayList<String>());
String url = DBUtil.constructConnectionURL(dbEnv, "");
Connection con = null;
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
try {
con = DriverManager.getConnection(url, dbEnv.getUsername(), dbEnv.getPassword());
if (con != null) {
ResultSet schemas = con.prepareStatement(sqlQueriesImpl.getSchemasQuery()).executeQuery();
while (schemas.next())
responseDbEnv.getSchemas().add(schemas.getString(1));
con.close();
return ResponseEntity.ok(new ObjectMapper().writeValueAsString(responseDbEnv));
}
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
} catch (SQLException | JsonProcessingException e) {
System.out.println(e);
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
}
@PostMapping("/get-cavsat-constraints")
public ResponseEntity<?> getCavSATConstraints(@Valid @RequestBody DBEnvWithInput dbEnvWithInput) {
DBEnvironment currDbEnv = dbEnvWithInput.dbEnv;
String url = DBUtil.constructConnectionURL(currDbEnv, dbEnvWithInput.schemaName);
Connection con = null;
try {
con = DriverManager.getConnection(url, currDbEnv.getUsername(), currDbEnv.getPassword());
if (con != null) {
ResultSet rs = con.prepareStatement(Constants.CAvSAT_GET_CONSTRAINTS_QUERY).executeQuery();
List<CAvSATConstraint> constraints = new ArrayList<CAvSATConstraint>();
while (rs.next())
constraints.add(new CAvSATConstraint(rs.getInt(1), rs.getString(2), rs.getString(3)));
con.close();
return ResponseEntity.ok(new ObjectMapper().writeValueAsString(constraints));
}
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
} catch (SQLException | JsonProcessingException e) {
e.printStackTrace();
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
}
@PostMapping("/prepare-cavsat-tables")
public ResponseEntity<?> prepareCAvSATTables(@Valid @RequestBody DBEnvWithInput dbEnvWithInput) {
DBEnvironment dbEnv = dbEnvWithInput.dbEnv;
Schema schema = ProblemParser.parseSchema(dbEnv, dbEnvWithInput.schemaName);
String url = DBUtil.constructConnectionURL(dbEnv, dbEnvWithInput.schemaName);
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
try {
Connection con = DriverManager.getConnection(url, dbEnv.getUsername(), dbEnv.getPassword());
CAvSATInitializer init = new CAvSATInitializer(sqlQueriesImpl);
init.prepareCAvSATTables(schema, con);
con.close();
} catch (SQLException e) {
e.printStackTrace();
}
return ResponseEntity.ok().build();
}
@PostMapping("/get-database-preview")
public ResponseEntity<?> getDatabasePreview(@Valid @RequestBody DBEnvWithInput dbEnvWithInput) {
DBEnvironment dbEnv = dbEnvWithInput.dbEnv;
Schema schema = ProblemParser.parseSchema(dbEnv, dbEnvWithInput.schemaName);
String url = DBUtil.constructConnectionURL(dbEnv, dbEnvWithInput.schemaName);
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
try {
Connection con = DriverManager.getConnection(url, dbEnv.getUsername(), dbEnv.getPassword());
String jsonData = sqlQueriesImpl.getDatabasePreviewAsJSON(schema, con, 10);
con.close();
return ResponseEntity.ok(jsonData);
} catch (SQLException | JsonProcessingException e) {
e.printStackTrace();
}
return ResponseEntity.ok().build();
}
@PostMapping("/run-sat-module")
public ResponseEntity<?> runSATModule(@Valid @RequestBody DBEnvWithInput dbEnvWithInput) throws IOException {
DBEnvironment dbEnv = dbEnvWithInput.dbEnv;
Schema schema = ProblemParser.parseSchema(dbEnv, dbEnvWithInput.schemaName);
SQLQuery sqlQuery = ProblemParser.parseSQLQuery(dbEnvWithInput.querySyntax, schema);
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
try {
if (con == null)
con = DriverManager.getConnection(DBUtil.constructConnectionURL(dbEnv, dbEnvWithInput.schemaName),
dbEnv.getUsername(), dbEnv.getPassword());
dropTables(sqlQueriesImpl, sqlQuery, true);
} catch (SQLException e) {
e.printStackTrace();
}
ResponseEntity<?> re;
long time = System.currentTimeMillis();
// System.out.println("SAT solving start at " + new
// Timestamp(System.currentTimeMillis()));
if (sqlQuery.isAggregate()) {
re = handleAggQueryViaSAT(schema, sqlQuery);
} else {
re = handleSPJQueryViaSAT(schema, sqlQuery);
}
time = System.currentTimeMillis() - time;
wr.append("Completely done in " + Long.toString(time) + "\n\n");
return re;
}
private ResponseEntity<?> handleAggQueryViaSAT(Schema schema, SQLQuery sqlQuery) {
ObjectMapper mapper = new ObjectMapper();
ObjectNode node = mapper.createObjectNode();
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
Map<String, Long> evalTimeData = new LinkedHashMap<String, Long>();
PreparedStatement psInsert;
ResultSet rsSelect;
double[] bounds = null;
boolean underlyingConsAns = false;
try {
if (sqlQuery.getGroupingAttributes().isEmpty()) {
SQLQuery underlyingCQ = sqlQuery.getQueryWithoutAggregates();
handleSPJQueryViaSAT(schema, underlyingCQ);
rsSelect = con
.prepareStatement(sqlQueriesImpl.getNumberOfRows(Constants.CAvSAT_ANS_FROM_CONS_TABLE_NAME))
.executeQuery(); // Boolean SPJ query's consistent answer is stored in ans_from_cons table, and
// final_answers table is never built
rsSelect.next();
if (rsSelect.getInt(1) != 0) {
underlyingConsAns = true;
dropTables(sqlQueriesImpl, underlyingCQ, true);
bounds = handleScalarAggQuery(schema, sqlQuery);
psInsert = con.prepareStatement(sqlQueriesImpl.insertIntoFinalAnswersAggTable(
new ArrayList<String>(Arrays.asList(Constants.BOOL_CONS_ANSWER_COLUMN_NAME,
Constants.GLB_COLUMN_NAME, Constants.LUB_COLUMN_NAME))));
psInsert.setDouble(2, bounds[0]);
psInsert.setDouble(3, bounds[1]);
} else {
psInsert = con.prepareStatement(sqlQueriesImpl.insertIntoFinalAnswersAggTable(
Collections.singletonList(Constants.BOOL_CONS_ANSWER_COLUMN_NAME)));
}
con.prepareStatement(sqlQueriesImpl
.createFinalAnswersAggTable(Collections.singletonList(Constants.BOOL_CONS_ANSWER_COLUMN_NAME)))
.execute();
psInsert.setString(1, underlyingConsAns ? "1" : "0");
psInsert.executeUpdate();
} else {
SQLQuery underlyingCQ = sqlQuery.getQueryWithoutAggregates();
SQLQuery groupWiseCQ = sqlQuery.getQueryWithoutGroupBy();
List<String> answerAttributes = new ArrayList<String>(sqlQuery.getGroupingAttributes());
answerAttributes.addAll(Arrays.asList(Constants.GLB_COLUMN_NAME, Constants.LUB_COLUMN_NAME));
handleSPJQueryViaSAT(schema, underlyingCQ);
con.prepareStatement(sqlQueriesImpl.createFinalAnswersAggTable(sqlQuery.getGroupingAttributes().stream()
.map(a -> a.split("\\.")[1]).collect(Collectors.toList()))).execute();
rsSelect = con.prepareStatement("SELECT * FROM " + Constants.CAvSAT_FINAL_ANSWERS_TABLE_NAME)
.executeQuery();
psInsert = con.prepareStatement(sqlQueriesImpl.insertIntoFinalAnswersAggTable(answerAttributes));
int topK = 10, nGroups = 0;
while (rsSelect.next() && (topK-- > 0)) {
nGroups++;
dropTables(sqlQueriesImpl, underlyingCQ, false);
groupWiseCQ.setWhereConditions(new ArrayList<String>(sqlQuery.getWhereConditions()));
for (String attribute : underlyingCQ.getSelect()) {
groupWiseCQ.getWhereConditions()
.add(attribute + " = '" + rsSelect.getString(attribute.split("\\.")[1]) + "'");
}
bounds = handleScalarAggQuery(schema, groupWiseCQ);
for (int i = 0; i < sqlQuery.getGroupingAttributes().size(); i++)
psInsert.setString(i + 1, rsSelect.getString(i + 1));
psInsert.setDouble(sqlQuery.getGroupingAttributes().size() + 1, bounds[0]);
psInsert.setDouble(sqlQuery.getGroupingAttributes().size() + 2, bounds[1]);
psInsert.addBatch();
}
wr.append("nGroups: " + nGroups + "\n");
psInsert.executeBatch();
}
String jsonData = sqlQueriesImpl.getTablePreviewAsJSON(Constants.CAvSAT_AGG_FINAL_ANSWERS_TABLE_NAME, con,
Constants.PREVIEW_ROW_COUNT);
node.set("jsonDataPreview", mapper.readValue(jsonData, ObjectNode.class));
node.set("runningTimeAnalysis",
wrapAttributeValueDataForBootstrapTable(evalTimeData, "Running Time Analysis"));
node.put("totalRowCount", 1);
node.put("previewRowCount", 1);
node.put("approach", "Partial MaxSAT Solving");
return ResponseEntity.ok(mapper.writeValueAsString(node));
} catch (SQLException | IOException e) {
e.printStackTrace();
return ResponseEntity.status(HttpStatus.NO_CONTENT).build();
}
}
private double[] handleScalarAggQuery(Schema schema, SQLQuery sqlQuery) {
switch (sqlQuery.getAggFunctions().get(0).toLowerCase()) {
case "count":
return handleCountQuery(schema, sqlQuery);
case "sum":
return handleSumQuery(schema, sqlQuery);
case "min":
return handleMinMaxQueryItr(schema, sqlQuery, true);
case "max":
return handleMinMaxQueryItr(schema, sqlQuery, false);
case "average":
System.out.println("Average function is not supported");
return null;
}
return null;
}
private double[] handleCountQuery(Schema schema, SQLQuery sqlQuery) {
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
CAvSATInitializerAggSQL init = new CAvSATInitializerAggSQL(sqlQueriesImpl);
int glb = Integer.MIN_VALUE, lub = Integer.MAX_VALUE;
try {
EncoderForPrimaryKeysAggSQL encoder = new EncoderForPrimaryKeysAggSQL(schema, con, sqlQueriesImpl);
AnswersComputerAgg computer = new AnswersComputerAgg();
init.createAnsFromCons(sqlQuery, schema, con);
init.createWitnesses(sqlQuery, schema, con);
init.createRelevantTables(sqlQuery, schema, con);
init.attachSequentialFactIDsToRelevantTables(sqlQuery, con);
encoder.createAlphaClauses(sqlQuery, true, Constants.FORMULA_FILE_NAME);
encoder.createBetaClausesForCount(sqlQuery, Constants.FORMULA_FILE_NAME);
encoder.writeFinalFormulaFile(false, true, Constants.FORMULA_FILE_NAME, Constants.FORMULA_FILE_NAME, wr);
long glbtime = AnswersComputerAgg.runSolver(Constants.MAXSAT_COMMAND, Constants.FORMULA_FILE_NAME,
Constants.SAT_OUTPUT_FILE_NAME);
wr.append("GLB time: " + Long.toString(glbtime) + "\n");
ExecCommand.writeSolverAnalysis(Constants.SAT_OUTPUT_FILE_NAME, wr);
glb = computer.getFalsifiedClausesCount(Constants.FORMULA_FILE_NAME,
ExecCommand.readOutput(Constants.SAT_OUTPUT_FILE_NAME));
encoder.encodeWPMinSATtoWPMaxSAT(wr);
long lubtime = AnswersComputerAgg.runSolver(Constants.MAXSAT_COMMAND,
Constants.MIN_TO_MAX_ENCODED_FORMULA_FILE_NAME, Constants.SAT_OUTPUT_FILE_NAME);
wr.append("LUB time: " + Long.toString(lubtime) + "\n");
ExecCommand.writeSolverAnalysis(Constants.SAT_OUTPUT_FILE_NAME, wr);
lub = computer.getFalsifiedClausesCount(Constants.FORMULA_FILE_NAME,
ExecCommand.readOutput(Constants.SAT_OUTPUT_FILE_NAME));
ResultSet rsSelect = con.prepareStatement(sqlQueriesImpl.getConsAnsAgg()).executeQuery();
rsSelect.next();
double ansFromCons = rsSelect.getDouble(1);
glb += ansFromCons;
lub += ansFromCons;
return new double[] { glb, lub };
} catch (SQLException | IOException e) {
e.printStackTrace();
return null;
}
}
/*
* private ResponseEntity<?> handleMinMaxQuery(Schema schema, SQLQuery sqlQuery,
* boolean min) { ObjectMapper mapper = new ObjectMapper(); ObjectNode node =
* mapper.createObjectNode(); CAvSATSQLQueries sqlQueriesImpl = new
* MSSQLServerImpl(); CAvSATInitializerAggSQL init = new
* CAvSATInitializerAggSQL(sqlQueriesImpl); int glb = Integer.MIN_VALUE, lub =
* Integer.MAX_VALUE, bound1, bound2; try { EncoderForPrimaryKeysAggSQL encoder
* = new EncoderForPrimaryKeysAggSQL(schema, con, Constants.FORMULA_FILE_NAME,
* sqlQueriesImpl);
*
* init.createAnsFromCons(sqlQuery, schema, con); init.createWitnesses(sqlQuery,
* schema, con); init.createRelevantTables(sqlQuery, schema, con);
* init.attachSequentialFactIDsToRelevantTables(sqlQuery, con);
* encoder.createAlphaClauses(sqlQuery, true);
* encoder.createBetaClausesForMinMax(sqlQuery, min);
*
* encoder.writeFinalFormulaFile(true, true);
* AnswersComputerAgg.runSolver(Constants.MAXSAT_COMMAND,
* Constants.FORMULA_FILE_NAME); bound1 =
* AnswersComputerAgg.computeDifficultBoundMinMax(Constants.FORMULA_FILE_NAME,
* ExecCommand.readOutput(Constants.SAT_OUTPUT_FILE_NAME)); bound2 =
* AnswersComputerAgg.computeEasyBoundMinMax(sqlQuery, con); lub = min ? bound1
* : bound2; glb = min ? bound2 : bound1; return
* ResponseEntity.ok(mapper.writeValueAsString(node)); } catch (SQLException |
* IOException e) { e.printStackTrace(); return
* ResponseEntity.status(HttpStatus.NOT_FOUND).build(); } }
*/
private double[] handleMinMaxQueryItr(Schema schema, SQLQuery sqlQuery, boolean min) {
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
CAvSATInitializerAggSQL init = new CAvSATInitializerAggSQL(sqlQueriesImpl);
double glb = Integer.MIN_VALUE, lub = Integer.MAX_VALUE, bound1, bound2;
try {
EncoderForPrimaryKeysAggSQL encoder = new EncoderForPrimaryKeysAggSQL(schema, con, sqlQueriesImpl);
init.createAnsFromCons(sqlQuery, schema, con);
init.createWitnesses(sqlQuery, schema, con);
init.createRelevantTables(sqlQuery, schema, con);
init.attachSequentialFactIDsToRelevantTables(sqlQuery, con);
encoder.createAlphaClauses(sqlQuery, true, Constants.FORMULA_FILE_NAME);
encoder.writeFinalFormulaFile(false, false, Constants.FORMULA_FILE_NAME, Constants.FORMULA_FILE_NAME, wr);
// bound1 = encoder.computeDifficultBoundMinMaxItr(sqlQuery, min);
bound1 = Integer.MAX_VALUE;
bound2 = AnswersComputerAgg.computeEasyBoundMinMax(sqlQuery, con);
lub = min ? bound1 : bound2;
glb = min ? bound2 : bound1;
ResultSet rsSelect = con.prepareStatement(sqlQueriesImpl.getConsAnsAgg()).executeQuery();
rsSelect.next();
double ansFromCons = rsSelect.getDouble(1);
if (min) {
glb = Double.min(glb, ansFromCons);
lub = Double.min(lub, ansFromCons);
} else {
glb = Double.max(glb, ansFromCons);
lub = Double.max(lub, ansFromCons);
}
return new double[] { glb, lub };
} catch (SQLException | IOException e) {
e.printStackTrace();
return null;
}
}
private double[] handleSumQuery(Schema schema, SQLQuery sqlQuery) {
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
CAvSATInitializerAggSQL init = new CAvSATInitializerAggSQL(sqlQueriesImpl);
double glb = Integer.MIN_VALUE, lub = Integer.MAX_VALUE;
try {
EncoderForPrimaryKeysAggSQL encoder = new EncoderForPrimaryKeysAggSQL(schema, con, sqlQueriesImpl);
init.createAnsFromCons(sqlQuery, schema, con);
init.createWitnesses(sqlQuery, schema, con);
init.createRelevantTables(sqlQuery, schema, con);
init.attachSequentialFactIDsToRelevantTables(sqlQuery, con);
encoder.createAlphaClauses(sqlQuery, true, Constants.FORMULA_FILE_NAME);
encoder.createBetaClausesForSum(sqlQuery, true, Constants.FORMULA_FILE_NAME);
encoder.writeFinalFormulaFile(true, true, Constants.FORMULA_FILE_NAME, Constants.FORMULA_FILE_NAME, wr);
long glbtime = AnswersComputerAgg.runSolver(Constants.MAXSAT_COMMAND, Constants.FORMULA_FILE_NAME,
Constants.SAT_OUTPUT_FILE_NAME);
wr.append("GLB time: " + Long.toString(glbtime) + "\n");
ExecCommand.writeSolverAnalysis(Constants.SAT_OUTPUT_FILE_NAME, wr);
// glb =
// AnswersComputerAgg.computeSum(encoder.getWitnessesQueryForSum(sqlQuery),
// Constants.FORMULA_FILE_NAME,
// ExecCommand.readOutput(Constants.SAT_OUTPUT_FILE_NAME), con);
encoder.encodeWPMinSATtoWPMaxSAT(wr);
long lubtime = AnswersComputerAgg.runSolver(Constants.MAXSAT_COMMAND,
Constants.MIN_TO_MAX_ENCODED_FORMULA_FILE_NAME, Constants.SAT_OUTPUT_FILE_NAME);
wr.append("LUB time: " + Long.toString(lubtime) + "\n");
ExecCommand.writeSolverAnalysis(Constants.SAT_OUTPUT_FILE_NAME, wr);
// ResultSet rsSelect =
// con.prepareStatement(sqlQueriesImpl.getConsAnsAgg()).executeQuery();
// rsSelect.next();
// double ansFromCons = rsSelect.getDouble(1);
// glb += ansFromCons;
// lub += ansFromCons;
glb = 900;
lub = 2200;
return new double[] { glb, lub };
} catch (SQLException | IOException e) {
e.printStackTrace();
return null;
}
}
private ResponseEntity<?> handleSPJQueryViaSAT(Schema schema, SQLQuery sqlQuery) {
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
ObjectMapper mapper = new ObjectMapper();
ObjectNode node = mapper.createObjectNode();
// Map<String, Long> evalTimeData = new LinkedHashMap<String, Long>();
long start, globalStart;
try {
CAvSATInitializerSQL init = new CAvSATInitializerSQL(sqlQueriesImpl);
AnswersComputer computer = new AnswersComputer(con);
EncoderForPrimaryKeysSQL encoder = new EncoderForPrimaryKeysSQL(schema, con, Constants.FORMULA_FILE_NAME,
sqlQueriesImpl);
start = System.currentTimeMillis();
globalStart = start;
init.createAnsFromConsNew(sqlQuery, schema, con);
// evalTimeData.put("Time to compute answers from the consistent part of the
// database (ms)",
// System.currentTimeMillis() - start);
// start = System.currentTimeMillis();
if (sqlQuery.getSelect().isEmpty() && init.checkBooleanConsAnswer(con)) {
long totalEvaluationTime = System.currentTimeMillis() - globalStart;
wr.append("Total " + totalEvaluationTime + " ms\n");
// evalTimeData.put("Total Evaluation Time (ms)", totalEvaluationTime);
// node.put("totalEvaluationTime", totalEvaluationTime);
// String jsonData =
// sqlQueriesImpl.getTablePreviewAsJSON(Constants.CAvSAT_ANS_FROM_CONS_TABLE_NAME,
// con,
// Constants.PREVIEW_ROW_COUNT);
// node.set("jsonDataPreview", mapper.readValue(jsonData, ObjectNode.class));
// node.set("runningTimeAnalysis",
// wrapAttributeValueDataForBootstrapTable(evalTimeData, "Running Time
// Analysis"));
// node.put("totalRowCount", 1);
// node.put("previewRowCount", 1);
// node.put("approach", "Consistent part of the DB");
return ResponseEntity.ok(mapper.writeValueAsString(node));
}
init.createWitnesses(sqlQuery, schema, con);
// evalTimeData.put("Time to compute minimal witnesses to the query (ms)",
// System.currentTimeMillis() - start);
// start = System.currentTimeMillis();
init.createRelevantTables(sqlQuery, schema, con);
// evalTimeData.put("Time to compute relevant facts (ms)",
// System.currentTimeMillis() - start);
// start = System.currentTimeMillis();
init.attachSequentialFactIDsToRelevantTables(sqlQuery, con);
// evalTimeData.put("Time to attach FactIDs to the relevant facts (ms)",
// System.currentTimeMillis() - start);
// start = System.currentTimeMillis();
encoder.createAlphaClausesOpt(sqlQuery);
// evalTimeData.put("Time to create positive clauses from key-equal groups
// (ms)",
// System.currentTimeMillis() - start);
// start = System.currentTimeMillis();
encoder.createBetaClausesOpt(sqlQuery);
// evalTimeData.put("Time to create negative clauses from minimal witnesses
// (ms)",
// System.currentTimeMillis() - start);
// start = System.currentTimeMillis();
String infinity;
if (sqlQuery.getSelect().size() == 0) {
infinity = encoder.writeFinalFormulaFile(Constants.FORMULA_FILE_NAME, false, wr);
Stats stats = computer.computeBooleanAnswer(Constants.FORMULA_FILE_NAME, "MaxHS");
ExecCommand.writeSolverAnalysis(Constants.SAT_OUTPUT_FILE_NAME, wr);
long totalEvaluationTime = System.currentTimeMillis() - globalStart;
wr.append("Total " + totalEvaluationTime + " ms\n");
// evalTimeData.put("Total Evaluation Time (ms)", totalEvaluationTime);
// node.put("totalEvaluationTime", totalEvaluationTime);
// String jsonData;
if (!stats.isSolved())
con.prepareStatement("insert into " + Constants.CAvSAT_ANS_FROM_CONS_TABLE_NAME + " values (1)")
.execute();
// jsonData =
// sqlQueriesImpl.getTablePreviewAsJSON(Constants.CAvSAT_ANS_FROM_CONS_TABLE_NAME,
// con,
// stats.isSolved() ? 0 : 1);
// node.put("totalRowCount", stats.isSolved() ? 0 : 1);
// node.put("previewRowCount", stats.isSolved() ? 0 : 1);
// node.set("jsonDataPreview", mapper.readValue(jsonData, ObjectNode.class));
// node.set("runningTimeAnalysis",
// wrapAttributeValueDataForBootstrapTable(evalTimeData, "Running Time
// Analysis"));
// node.put("approach", "SAT Solving");
return ResponseEntity.ok(mapper.writeValueAsString(node));
} else {
infinity = encoder.writeFinalFormulaFile(Constants.FORMULA_FILE_NAME, true, wr);
}
// evalTimeData.put("Time to write the clauses to a DIMAC file (ms)",
// System.currentTimeMillis() - start);
// start = System.currentTimeMillis();
long satTime = computer.eliminatePotentialAnswersInMemory(Constants.FORMULA_FILE_NAME, infinity, wr);
// evalTimeData.put("Time to eliminate inconsistent potential answers (ms)",
// System.currentTimeMillis() - start);
// evalTimeData.put("Total SAT-solving time (ms)", satTime);
wr.append("SAT: " + satTime + " ms\n");
// start = System.currentTimeMillis();
computer.buildFinalAnswers(sqlQueriesImpl);
// evalTimeData.put("Time to write the final consistent answers to a table
// (ms)",
// System.currentTimeMillis() - start);
// start = System.currentTimeMillis();
// int totalRowCount =
// computer.getRowCount(Constants.CAvSAT_FINAL_ANSWERS_TABLE_NAME,
// sqlQueriesImpl);
// String jsonData =
// sqlQueriesImpl.getTablePreviewAsJSON(Constants.CAvSAT_FINAL_ANSWERS_TABLE_NAME,
// con,
// Constants.PREVIEW_ROW_COUNT);
long totalEvaluationTime = System.currentTimeMillis() - globalStart;
// evalTimeData.put("Total Evaluation Time (ms)", totalEvaluationTime);
wr.append("Total " + totalEvaluationTime + " ms\n");
// node.put("totalEvaluationTime", totalEvaluationTime);
// node.set("jsonDataPreview", mapper.readValue(jsonData, ObjectNode.class));
// node.set("runningTimeAnalysis",
// wrapAttributeValueDataForBootstrapTable(evalTimeData, "Running Time
// Analysis"));
// node.put("totalRowCount", totalRowCount);
// node.put("previewRowCount",
// totalRowCount < Constants.PREVIEW_ROW_COUNT ? totalRowCount :
// Constants.PREVIEW_ROW_COUNT);
// node.put("approach", "Partial MaxSAT Solving");
// System.out.println("SAT solving end at " + new
// Timestamp(System.currentTimeMillis()));
return ResponseEntity.ok(mapper.writeValueAsString(node));
} catch (SQLException | IOException e) {
e.printStackTrace();
}
return ResponseEntity.ok().build();
}
/*
* @PostMapping("/run-sat-module-unopt") ResponseEntity<?>
* runSATModuleUnOpt(@Valid @RequestBody DBEnvWithInput dbEnvWithInput) {
* System.out.println("SAT solving UnOpt start at " + new
* Timestamp(System.currentTimeMillis())); DBEnvironment dbEnv =
* dbEnvWithInput.dbEnv; Schema schema = ProblemParser.parseSchema(dbEnv,
* dbEnvWithInput.schemaName); SQLQuery sqlQuery =
* ProblemParser.parseSQLQuery(dbEnvWithInput.querySyntax, schema);
* CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl(); ObjectMapper mapper
* = new ObjectMapper(); ObjectNode node = mapper.createObjectNode(); long
* start, globalStart; try { if (con == null) con =
* DriverManager.getConnection(DBUtil.constructConnectionURL(dbEnv,
* dbEnvWithInput.schemaName), dbEnv.getUsername(), dbEnv.getPassword());
* dropTables(sqlQueriesImpl, sqlQuery); AnswersComputer computer = new
* AnswersComputer(con); EncoderForPrimaryKeysSQL encoder = new
* EncoderForPrimaryKeysSQL(schema, con, Constants.FORMULA_FILE_NAME,
* sqlQueriesImpl); Map<String, Long> evalTimeData = new LinkedHashMap<String,
* Long>(); start = System.currentTimeMillis(); globalStart = start;
*
* encoder.createAlphaClausesUnOpt(sqlQuery);
* evalTimeData.put("Time to create positive clauses from key-equal groups (ms)"
* , System.currentTimeMillis() - start); start = System.currentTimeMillis();
*
* encoder.createBetaClausesUnOpt(sqlQuery); evalTimeData.
* put("Time to create negative clauses from minimal witnesses (ms)",
* System.currentTimeMillis() - start); start = System.currentTimeMillis();
*
* String infinity = encoder.writeFinalFormulaFile(Constants.FORMULA_FILE_NAME,
* false); evalTimeData.put("Time to write the clauses to a DIMAC file (ms)",
* System.currentTimeMillis() - start); start = System.currentTimeMillis();
*
* long satTime =
* computer.eliminatePotentialAnswersUnOptInMemory(Constants.FORMULA_FILE_NAME,
* infinity);
* evalTimeData.put("Time to eliminate inconsistent potential answers (ms)",
* System.currentTimeMillis() - start);
* evalTimeData.put("Total SAT-solving time (ms)", satTime); start =
* System.currentTimeMillis();
*
* computer.buildFinalAnswersUnOpt(sqlQueriesImpl);
* evalTimeData.put("Time to write the final consistent answers to a table (ms)"
* , System.currentTimeMillis() - start); start = System.currentTimeMillis();
*
* int totalRowCount =
* computer.getRowCount(Constants.CAvSAT_FINAL_ANSWERS_TABLE_NAME,
* sqlQueriesImpl); String jsonData =
* sqlQueriesImpl.getTablePreviewAsJSON(Constants.
* CAvSAT_FINAL_ANSWERS_TABLE_NAME, con, Constants.PREVIEW_ROW_COUNT); long
* totalEvaluationTime = System.currentTimeMillis() - globalStart;
* evalTimeData.put("Total Evaluation Time (ms)", totalEvaluationTime);
*
* node.put("totalEvaluationTime", totalEvaluationTime);
* node.set("jsonDataPreview", mapper.readValue(jsonData, ObjectNode.class));
* node.set("runningTimeAnalysis",
* wrapAttributeValueDataForBootstrapTable(evalTimeData,
* "Running Time Analysis")); node.put("totalRowCount", totalRowCount);
* node.put("previewRowCount", totalRowCount < Constants.PREVIEW_ROW_COUNT ?
* totalRowCount : Constants.PREVIEW_ROW_COUNT); node.put("approach",
* "Partial MaxSAT Solving"); System.out.println("SAT solving end at " + new
* Timestamp(System.currentTimeMillis())); return
* ResponseEntity.ok(mapper.writeValueAsString(node)); } catch (SQLException |
* IOException e) { e.printStackTrace(); } return ResponseEntity.ok().build(); }
*/
@PostMapping("/run-conquer-rewriting")
public ResponseEntity<?> runConQuerSQLRewriting(@Valid @RequestBody DBEnvWithInput dbEnvWithInput) {
System.out.println("ConQuer start at " + new Timestamp(System.currentTimeMillis()));
DBEnvironment dbEnv = dbEnvWithInput.dbEnv;
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
ObjectMapper mapper = new ObjectMapper();
ObjectNode node = mapper.createObjectNode();
long start;
int previewRowCount = 100;
Map<String, Long> evalTimeData = new TreeMap<String, Long>();
try {
if (con == null)
con = DriverManager.getConnection(DBUtil.constructConnectionURL(dbEnv, dbEnvWithInput.schemaName),
dbEnv.getUsername(), dbEnv.getPassword());
AnswersComputer computer = new AnswersComputer(con);
start = System.currentTimeMillis();
String jsonData = computer.computeSQLQueryAnswers(dbEnvWithInput.conQuerSQLRewriting, sqlQueriesImpl,
previewRowCount);
long runnningtime = System.currentTimeMillis() - start;
evalTimeData.put("Time to run ConQuer SQL Rewriting (ms)", runnningtime);
ObjectNode data = mapper.readValue(jsonData, ObjectNode.class);
int totalRowCount = data.get("rowCount").asInt(-1);
data.remove("rowCount");
node.set("jsonDataPreview", data);
node.put("totalEvaluationTime", runnningtime);
node.put("totalRowCount", totalRowCount);
node.put("previewRowCount",
totalRowCount < Constants.PREVIEW_ROW_COUNT ? totalRowCount : Constants.PREVIEW_ROW_COUNT);
node.set("runningTimeAnalysis",
wrapAttributeValueDataForBootstrapTable(evalTimeData, "Running Time Analysis"));
node.put("approach", "ConQuer SQL Rewriting");
System.out.println("ConQuer end at " + new Timestamp(System.currentTimeMillis()));
return ResponseEntity.ok(mapper.writeValueAsString(node));
} catch (SQLException | IOException e) {
e.printStackTrace();
}
return ResponseEntity.ok().build();
}
@PostMapping("/run-kw-rewriting")
public ResponseEntity<?> runKWSQLRewriting(@Valid @RequestBody DBEnvWithInput dbEnvWithInput) {
System.out.println("KW start at " + new Timestamp(System.currentTimeMillis()));
DBEnvironment dbEnv = dbEnvWithInput.dbEnv;
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
ObjectMapper mapper = new ObjectMapper();
ObjectNode node = mapper.createObjectNode();
long start;
int previewRowCount = 100;
Map<String, Long> evalTimeData = new TreeMap<String, Long>();
try {
if (con == null)
con = DriverManager.getConnection(DBUtil.constructConnectionURL(dbEnv, dbEnvWithInput.schemaName),
dbEnv.getUsername(), dbEnv.getPassword());
AnswersComputer computer = new AnswersComputer(con);
start = System.currentTimeMillis();
String jsonData = computer.computeSQLQueryAnswers(dbEnvWithInput.kwSQLRewriting, sqlQueriesImpl,
previewRowCount);
long runnningtime = System.currentTimeMillis() - start;
evalTimeData.put("Time to run Koutris-Wijsen SQL Rewriting (ms)", runnningtime);
ObjectNode dataObject = mapper.readValue(jsonData, ObjectNode.class);
int totalRowCount = dataObject.get("rowCount").asInt(-1);
dataObject.remove("rowCount");
node.set("jsonDataPreview", dataObject);
node.put("totalEvaluationTime", runnningtime);
node.put("totalRowCount", totalRowCount);
node.put("previewRowCount",
totalRowCount < Constants.PREVIEW_ROW_COUNT ? totalRowCount : Constants.PREVIEW_ROW_COUNT);
node.set("runningTimeAnalysis",
wrapAttributeValueDataForBootstrapTable(evalTimeData, "Running Time Analysis"));
node.put("approach", "Koutris-Wijsen SQL Rewriting");
System.out.println("KW end at " + new Timestamp(System.currentTimeMillis()));
return ResponseEntity.ok(mapper.writeValueAsString(node));
} catch (SQLException | IOException e) {
e.printStackTrace();
}
return ResponseEntity.ok().build();
}
@PostMapping("/compute-potential-answers")
public ResponseEntity<?> computePotentialAnswers(@Valid @RequestBody DBEnvWithInput dbEnvWithInput) {
long time = System.currentTimeMillis();
DBEnvironment dbEnv = dbEnvWithInput.dbEnv;
Schema schema = ProblemParser.parseSchema(dbEnv, dbEnvWithInput.schemaName);
SQLQuery sqlQuery = ProblemParser.parseSQLQuery(dbEnvWithInput.querySyntax, schema);
String url = DBUtil.constructConnectionURL(dbEnv, dbEnvWithInput.schemaName);
CAvSATSQLQueries sqlQueriesImpl = new MSSQLServerImpl();
Map<String, Long> evalTimeData = new TreeMap<String, Long>();
long start;
int previewRowCount = 100;
try {
if (con == null)
con = DriverManager.getConnection(url, dbEnv.getUsername(), dbEnv.getPassword());
AnswersComputer computer = new AnswersComputer(con);
ObjectMapper mapper = new ObjectMapper();
ObjectNode node = mapper.createObjectNode();
start = System.currentTimeMillis();
if (sqlQuery.getSelect().size() == 0)
sqlQuery.getSelect().add("1 AS " + Constants.BOOL_CONS_ANSWER_COLUMN_NAME);
String jsonData = computer.computeSQLQueryAnswers(sqlQuery.getSQLSyntax(), sqlQueriesImpl, previewRowCount);
ObjectNode dataObject = mapper.readValue(jsonData, ObjectNode.class);
int totalRowCount = dataObject.get("rowCount").asInt(-1);
dataObject.remove("rowCount");
long totalEvaluationTime = System.currentTimeMillis() - start;
evalTimeData.put("Time to compute potential answers (ms)", totalEvaluationTime);
node.set("jsonDataPreview", dataObject);
node.put("totalEvaluationTime", totalEvaluationTime);
node.put("totalRowCount", totalRowCount);
node.put("previewRowCount",
totalRowCount < Constants.PREVIEW_ROW_COUNT ? totalRowCount : Constants.PREVIEW_ROW_COUNT);
node.set("runningTimeAnalysis",
wrapAttributeValueDataForBootstrapTable(evalTimeData, "Running Time Analysis"));
time = System.currentTimeMillis() - time;
wr.append("Pot: " + time + " ms\n");
return ResponseEntity.ok(mapper.writeValueAsString(node));
} catch (SQLException | IOException e) {
e.printStackTrace();
}
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
private void dropTables(CAvSATSQLQueries sqlQueriesImpl, SQLQuery query, boolean dropFinalAnsAgg) {
try {
con.prepareStatement(sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_ANS_FROM_CONS_TABLE_NAME)).execute();
con.prepareStatement(sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_WITNESSES_TABLE_NAME)).execute();
for (String relationName : query.getFrom()) {
con.prepareStatement(
sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_RELEVANT_TABLE_PREFIX + relationName))
.execute();
}
con.prepareStatement(
sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_RELEVANT_DISTINCT_POTENTIAL_ANS_TABLE_NAME))
.execute();
con.prepareStatement(
sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_UNOPT_DISTINCT_POTENTIAL_ANS_TABLE_NAME))
.execute();
con.prepareStatement(sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_WITNESSES_WITH_FACTID_TABLE_NAME))
.execute();
con.prepareStatement(
sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_UNOPT_WITNESSES_WITH_FACTID_TABLE_NAME))
.execute();
con.prepareStatement(
sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_UNOPT_DISTINCT_POTENTIAL_ANS_TABLE_NAME))
.execute();
con.prepareStatement(
sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_ALL_DISTINCT_POTENTIAL_ANS_TABLE_NAME)).execute();
con.prepareStatement(sqlQueriesImpl.getDropTableQuery("CAVSAT_CONSISTENT_PVARS")).execute();
con.prepareStatement(sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_FINAL_ANSWERS_TABLE_NAME)).execute();
if (dropFinalAnsAgg)
con.prepareStatement(sqlQueriesImpl.getDropTableQuery(Constants.CAvSAT_AGG_FINAL_ANSWERS_TABLE_NAME))
.execute();
} catch (SQLException e) {
e.printStackTrace();
}
}
private ObjectNode wrapAttributeValueDataForBootstrapTable(Map<String, Long> map, String tableName) {
ObjectMapper mapper = new ObjectMapper();
ObjectNode node = mapper.createObjectNode();
ArrayNode columns = mapper.createArrayNode();
ObjectNode columnMeta = mapper.createObjectNode();
columnMeta.put("dataField", "attr");
columnMeta.put("text", "Attribute");
columns.add(columnMeta);
columnMeta = mapper.createObjectNode();
columnMeta.put("dataField", "value");
columnMeta.put("text", "Value");
columns.add(columnMeta);
ArrayNode data = mapper.createArrayNode();
node.put("name", tableName);
node.putPOJO("columns", columns);
for (String key : map.keySet()) {
ObjectNode row = mapper.createObjectNode();
row.put("attr", key);
row.put("value", map.get(key));
data.add(row);
}
node.put("name", tableName);
node.putPOJO("columns", columns);
node.putPOJO("data", data);
return node;
}
@Data
public static class DBEnvWithInput {
private DBEnvironment dbEnv;
private String schemaName;
private String querySyntax;
private String queryLanguage;
private String conQuerSQLRewriting;
private String kwSQLRewriting;
}
}
|
class RecruitmentManager:
def __init__(self):
self.countries_of_recruitment = []
self.contacts = {}
def add_contact(self, country, name, email):
if country not in self.countries_of_recruitment:
self.countries_of_recruitment.append(country)
self.contacts[country] = []
self.contacts[country].append({'name': name, 'email': email})
def get_contacts(self, country):
if country in self.countries_of_recruitment:
return self.contacts[country]
else:
return []
# Example usage
manager = RecruitmentManager()
manager.add_contact('USA', 'John Doe', 'john@example.com')
manager.add_contact('USA', 'Jane Smith', 'jane@example.com')
manager.add_contact('Canada', 'Alex Johnson', 'alex@example.com')
print(manager.get_contacts('USA')) # Output: [{'name': 'John Doe', 'email': 'john@example.com'}, {'name': 'Jane Smith', 'email': 'jane@example.com'}]
print(manager.get_contacts('Canada')) # Output: [{'name': 'Alex Johnson', 'email': 'alex@example.com'}]
print(manager.get_contacts('UK')) # Output: [] |
/*******************************************************************************
* Copyright 2016
* Ubiquitous Knowledge Processing (UKP) Lab
* Technische Universitรคt Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.main;
import static org.apache.uima.fit.factory.AnalysisEngineFactory.createEngineDescription;
import static org.apache.uima.fit.factory.CollectionReaderFactory.createReaderDescription;
import static org.apache.uima.fit.factory.ExternalResourceFactory.createExternalResourceDescription;
import static org.apache.uima.fit.pipeline.SimplePipeline.runPipeline;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.collection.CollectionReaderDescription;
import org.apache.uima.fit.factory.AggregateBuilder;
import org.apache.uima.resource.ExternalResourceDescription;
import de.tudarmstadt.ukp.dkpro.core.api.resources.ResourceObjectProviderBase;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence;
//import de.tudarmstadt.ukp.dkpro.core.io.xmi.XmiWriter;
import de.tudarmstadt.ukp.dkpro.core.matetools.MateLemmatizer;
import de.tudarmstadt.ukp.dkpro.core.matetools.MatePosTagger;
import de.tudarmstadt.ukp.dkpro.core.opennlp.OpenNlpPosTagger;
import de.tudarmstadt.ukp.dkpro.core.opennlp.OpenNlpSegmenter;
import de.tudarmstadt.ukp.dkpro.core.snowball.SnowballStemmer;
import de.tudarmstadt.ukp.dkpro.core.stopwordremover.StopWordRemover;
import de.tudarmstadt.ukp.dkpro.core.languagetool.LanguageToolLemmatizer;
import de.tudarmstadt.ukp.dkpro.core.languagetool.LanguageToolSegmenter;
import de.tudarmstadt.ukp.dkpro.wsd.annotator.WSDAnnotatorContextPOS;
import de.tudarmstadt.ukp.dkpro.wsd.annotator.WSDAnnotatorIndividualPOS;
import de.tudarmstadt.ukp.dkpro.wsd.lesk.resource.WSDResourceSimplifiedLesk;
import de.tudarmstadt.ukp.dkpro.wsd.lesk.util.normalization.NoNormalization;
import de.tudarmstadt.ukp.dkpro.wsd.lesk.util.overlap.PairedOverlap;
import de.tudarmstadt.ukp.dkpro.wsd.resource.WSDResourceIndividualPOS;
import de.tudarmstadt.ukp.dkpro.wsd.si.uby.resource.UbySenseInventoryResource;
import de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.annotator.OwnDictionaryAnnotator;
import de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.annotator.SynonymAnnotator;
import de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.annotator.SemanticFieldAnnotator;
import de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.annotator.WSDItemAnnotator;
import de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.lesk.util.tokenization.EnglishStopLemmatizer;
import de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.lesk.util.tokenization.GermanStopLemmatizer;
import de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.lesk.util.tokenization.TokenCleaner;
import de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.reader.CthReader;
import de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.writer.SQLWriter;
import de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.wsd.algorithm.MostFrequentSenseBaseline;
import de.tudarmstadt.ukp.uby.resource.UbyResource;
/**
* Main-Class. Starts the UIMA Pipeline.
*
* @author <NAME>
*
*/
public class RunPipeline {
/**
* Path to header/title file.
*/
public static final String HEADER_FILE_PATH = "src/test/resources/headers.txt";
/**
* The Output Destination.
*/
public static final String OUTPUT_DESTINATION = "target/";
/**
* The Location of the Input-Files.
*/
public static final String INPUT_LOCATION = "src/test/resources/docs/";
/**
* The Location of the Text-File which contains the files to be ignored.
*/
public static final String ALREADY_PROCESSED = "";
/**
* Location of the alternative writing dictionary file.
*/
public static final String ALTERNATIVE_WRITING_DATASET = "src/main/resources/alternativeSpellingDict.txt";
/**
* Location of the hypernyms dictionary file.
*/
public static final String HYPERNYMS_DATASET = "src/main/resources/hypernymDict.txt";
/**
* The Language of the Input-Files. Supported Languages: German (de),
* English (en), French (fr), Italian (it)
*/
public static final String INPUT_FILE_LANGUAGE = "de";
/**
* Location of the language specific stop-word list.
*/
public static final String STOP_WORD_LIST_LOCATION = (INPUT_FILE_LANGUAGE.equals("en")) ? "src/main/resources/stopwords/stoplist_en.txt" : "src/main/resources/stopwords/stoplist_de.txt" ;
/**
* The version of the transliteration which should be mapped to the translation
* For instance you can use "--" for the Master-Transliteration, "-het" for Hittite and "-akk" for the Akkadian Trasnliteration.
*/
public static final String TRANSLITERATION_VERSION = "--";
/**
* The name of the transliteration view.
*/
public static final String TRANSLITERATION_VIEW_NAME = "_transliteration";
/* MySQL database */
public static final String DB_URL = "jdbc:mysql://localhost:3306/uby_open_0_7_0";
public static final String DB_DRIVER = "com.mysql.jdbc.Driver";
public static final String DB_DRIVER_NAME = "mysql";
public static final String DB_USERNAME = "root";
public static final String DB_PASSWORD = "";
/* H2 embedded database */
// public static final String DB_URL =
// "jdbc:h2:file:C:/Users/DAR/Downloads/ubymedium070";
// public static final String DB_DRIVER = "org.h2.Driver";
// public static final String DB_DRIVER_NAME = "h2";
// public static final String DB_USERNAME = "sa";
// public static final String DB_PASSWORD = "";
public static void main(String[] args) throws Exception {
AggregateBuilder builder = new AggregateBuilder();
// Second View for the Transliteration
//builder.add(tokenizer, CAS.NAME_DEFAULT_SOFA, OdtParser.TRANSLITERATION_VIEW_NAME);
// Description of the the reader component
CollectionReaderDescription reader = createReaderDescription(CthReader.class,
CthReader.PARAM_HEADER_FILE_PATH, HEADER_FILE_PATH,
CthReader.PARAM_SOURCE_LOCATION, INPUT_LOCATION,
CthReader.PARAM_LANGUAGE, INPUT_FILE_LANGUAGE,
CthReader.PARAM_TRANSLITERATION_VERSION, TRANSLITERATION_VERSION,
CthReader.PARAM_TRANSLITERATION_VIEW_NAME, TRANSLITERATION_VIEW_NAME,
CthReader.PARAM_PROCESSED_FILE, ALREADY_PROCESSED);
if (INPUT_FILE_LANGUAGE.matches("de|en")) {
AnalysisEngineDescription tokenizer = createEngineDescription(OpenNlpSegmenter.class);
builder.add(tokenizer);
AnalysisEngineDescription tokenCleaner = createEngineDescription(TokenCleaner.class);
builder.add(tokenCleaner);
AnalysisEngineDescription posTagger = createEngineDescription(OpenNlpPosTagger.class);
builder.add(posTagger);
AnalysisEngineDescription lemmatizer = createEngineDescription(MateLemmatizer.class);
builder.add(lemmatizer);
} else {
AnalysisEngineDescription tokenizer = createEngineDescription(LanguageToolSegmenter.class);
builder.add(tokenizer);
AnalysisEngineDescription tokenCleaner = createEngineDescription(TokenCleaner.class);
builder.add(tokenCleaner);
AnalysisEngineDescription posTagger = (INPUT_FILE_LANGUAGE.equals("it")) ? createEngineDescription(OpenNlpPosTagger.class) : createEngineDescription(MatePosTagger.class);
builder.add(posTagger);
AnalysisEngineDescription lemmatizer = createEngineDescription(LanguageToolLemmatizer.class);
builder.add(lemmatizer);
}
AnalysisEngineDescription ownDictAnnotator = createEngineDescription(OwnDictionaryAnnotator.class,
OwnDictionaryAnnotator.PARAM_ALTERNATIVE_SPELLING_MODEL_LOCATION, ALTERNATIVE_WRITING_DATASET,
OwnDictionaryAnnotator.PARAM_HYPERNYM_MODEL_LOCATION, HYPERNYMS_DATASET);
builder.add(ownDictAnnotator);
if (INPUT_FILE_LANGUAGE.matches("en|de")) {
AnalysisEngineDescription stopWordRemover = createEngineDescription(StopWordRemover.class,
StopWordRemover.PARAM_MODEL_LOCATION,
new String[] { STOP_WORD_LIST_LOCATION });
builder.add(stopWordRemover);
AnalysisEngineDescription wsdItemAnnotator = createEngineDescription(WSDItemAnnotator.class,
WSDItemAnnotator.PARAM_LANGUAGE, INPUT_FILE_LANGUAGE);
builder.add(wsdItemAnnotator);
// Create a UBY resource
ExternalResourceDescription ubyResource = createExternalResourceDescription(UbyResource.class,
UbyResource.PARAM_MODEL_LOCATION, ResourceObjectProviderBase.NOT_REQUIRED, UbyResource.PARAM_URL,
DB_URL, UbyResource.PARAM_DRIVER, DB_DRIVER, UbyResource.PARAM_DIALECT, DB_DRIVER_NAME,
UbyResource.PARAM_USERNAME, DB_USERNAME, UbyResource.PARAM_PASSWORD, DB_PASSWORD);
// Create a UBY Sense Inventory
final String ubyInventoryName = "ubySenseInventory";
ExternalResourceDescription ubyInventory = createExternalResourceDescription(
UbySenseInventoryResource.class, UbySenseInventoryResource.PARAM_UBY_DATABASE_URL, DB_URL,
UbySenseInventoryResource.PARAM_UBY_JDBC_DRIVER_CLASS, DB_DRIVER,
UbySenseInventoryResource.PARAM_UBY_DB_VENDOR, DB_DRIVER_NAME,
UbySenseInventoryResource.PARAM_UBY_USER, DB_USERNAME,
UbySenseInventoryResource.PARAM_UBY_PASSWORD, DB_PASSWORD,
UbySenseInventoryResource.PARAM_SENSE_INVENTORY_NAME, ubyInventoryName);
// Create a resource for the most frequent sense baseline
ExternalResourceDescription mfsBaselineResource = createExternalResourceDescription(
WSDResourceIndividualPOS.class, WSDResourceIndividualPOS.SENSE_INVENTORY_RESOURCE, ubyInventory,
WSDResourceIndividualPOS.DISAMBIGUATION_METHOD, MostFrequentSenseBaseline.class.getName());
// Create an annotator for the MFS baseline
AnalysisEngineDescription mfsBaseline = createEngineDescription(WSDAnnotatorIndividualPOS.class,
WSDAnnotatorIndividualPOS.WSD_ALGORITHM_RESOURCE, mfsBaselineResource,
WSDAnnotatorIndividualPOS.PARAM_DISAMBIGUATION_METHOD_NAME, "MFS");
builder.add(mfsBaseline);
String tokenizationStrategyClassName = (INPUT_FILE_LANGUAGE.equals("de"))
? GermanStopLemmatizer.class.getName() : EnglishStopLemmatizer.class.getName();
ExternalResourceDescription simplifiedLeskResource = createExternalResourceDescription(
WSDResourceSimplifiedLesk.class, WSDResourceSimplifiedLesk.SENSE_INVENTORY_RESOURCE, ubyInventory,
WSDResourceSimplifiedLesk.PARAM_NORMALIZATION_STRATEGY, NoNormalization.class.getName(),
WSDResourceSimplifiedLesk.PARAM_OVERLAP_STRATEGY, PairedOverlap.class.getName(),
WSDResourceSimplifiedLesk.PARAM_TOKENIZATION_STRATEGY, tokenizationStrategyClassName);
AnalysisEngineDescription simplifiedLesk = createEngineDescription(WSDAnnotatorContextPOS.class,
WSDAnnotatorContextPOS.WSD_METHOD_CONTEXT, simplifiedLeskResource,
WSDAnnotatorContextPOS.PARAM_CONTEXT_ANNOTATION, Sentence.class.getName(),
WSDAnnotatorContextPOS.PARAM_NORMALIZE_CONFIDENCE, true,
WSDAnnotatorContextPOS.PARAM_DISAMBIGUATION_METHOD_NAME, "Lesk");
builder.add(simplifiedLesk);
//Synonyms
AnalysisEngineDescription synonymsAnnotator = createEngineDescription(SynonymAnnotator.class,
SynonymAnnotator.RES_UBY, ubyResource);
builder.add(synonymsAnnotator);
//Semantic Fields
AnalysisEngineDescription semanticFieldAnnotator = createEngineDescription(SemanticFieldAnnotator.class,
SemanticFieldAnnotator.RES_UBY, ubyResource);
builder.add(semanticFieldAnnotator);
}
//Snowball Stemmer
AnalysisEngineDescription stemmer = createEngineDescription(SnowballStemmer.class, SnowballStemmer.PARAM_LANGUAGE, INPUT_FILE_LANGUAGE);
builder.add(stemmer);
// WRITER
AnalysisEngineDescription sqlWriter = createEngineDescription(SQLWriter.class,
SQLWriter.PARAM_ALTERNATIVE_SPELLING_MODEL_LOCATION, ALTERNATIVE_WRITING_DATASET,
SQLWriter.PARAM_HYPERNYM_MODEL_LOCATION, HYPERNYMS_DATASET, SQLWriter.PARAM_TARGET_LOCATION,
OUTPUT_DESTINATION);
builder.add(sqlWriter);
// Alternative XMI Writer for debugging purposes
// AnalysisEngineDescription writer = createEngineDescription(XmiWriter.class, XmiWriter.PARAM_TARGET_LOCATION, OUTPUT_DESTINATION);
// builder.add(writer);
// Run the Pipeline!
runPipeline(reader, builder.createAggregateDescription());
}
} |
import {
Access,
Accesses,
Engine,
Field,
FieldSubType,
FieldType,
GeneratorResult,
Input,
Model,
StringVariations,
Template,
} from '@hapify/generator/dist/interfaces';
// Export types
export { Access, Engine, FieldSubType, FieldType, Input };
export interface IStringVariants extends StringVariations {}
export interface IGeneratorResult extends GeneratorResult {}
export interface IAccesses extends Accesses {}
export interface IModel extends Model {
/** The model's notes */
notes?: string;
/** The fields of the model */
fields: IField[];
}
export interface IField extends Field {
/** The field's notes */
notes?: string;
}
export interface ITemplate extends Template {}
|
//
// RAPThreadDataSource.h
// redditAPI
//
// Created by Woudini on 2/27/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
typedef void (^TableViewCellTopicBlock)(id cell, id item);
typedef void (^TableViewCellCommentBlock)(id cell, id item, id indexPath);
@interface RAPThreadDataSource : NSObject <UITableViewDataSource, UITableViewDelegate>
- (id)initWithItems:(NSArray *)anItems
cellIdentifier:(NSString *)aCellIdentifier
topicCellBlock:(TableViewCellTopicBlock)aTopicCellBlock
commentCellBlock:(TableViewCellCommentBlock)aCommentCellBlock;
- (id)itemAtIndexPath:(NSIndexPath *)indexPath;
@end
|
from kafka import KafkaProducer
import time
def process_temperature_message(message: bytes, output_topic: str) -> None:
producer = KafkaProducer(bootstrap_servers=['127.0.0.1:9092'])
s = message.decode('utf-8')
temp = s.split(' ')
temperature = int(temp[1])
print("Fire alarm algorithm receiving", s)
if temperature > 200:
print('Fire Alarm: Temperature exceeds 200')
msg = f"{temp[0]} {temperature}"
producer.send(output_topic, bytes(msg, "utf-8"))
producer.flush()
time.sleep(5) |
import java.util.Random;
public class RandomNumberBetween
{
public static void main(String[] args)
{
Random random = new Random();
int randomNumber = 1 + random.nextInt(50);
System.out.println("Generated random number between 1 and 50 is: " + randomNumber);
}
} |
<filename>src/templates/help.js
export default `
/*
ShaderScribble
===========
by Surma (twitter.com/DasSurma)
Options:
--------
- name=<name>: Load scratchpad with the
given name
- norun: Donโt start the rendering loop
- help: Discard any stored data and show this
- boilerplate: Discard any stored data and
load a semi-sane boilerplate
- flip: Split horizontally
Example:
https://shaderscribble.surma.technology/#norun,boilerplate,flip
GLSL Cheat sheet:
-----------------
https://www.khronos.org/files/webgl20-reference-guide.pdf
-------------------
Code at github.com/surma/shaderscribble
Apache 2.0
*/
`;
|
<reponame>szab100/secmgr
// Copyright 2010 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.enterprise.secmgr.ntlmssp;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import junit.framework.TestCase;
/**
* Unit tests to ensure that NTLM messages are properly encoded.
*/
public final class NtlmMessageEncoderTest extends TestCase {
private static final int PAYLOAD_HEADER_LENGTH = 8;
private static final byte[] EMPTY = new byte[0];
private static final byte[] EXPECTED =
new byte[] { 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 };
public void testWrite8Ranges() {
tryGoodRangeWrite8(0);
tryGoodRangeWrite8(1);
tryGoodRangeWrite8(127);
tryGoodRangeWrite8(128);
tryGoodRangeWrite8(255);
tryBadRangeWrite8(256);
tryBadRangeWrite8(-1);
tryBadRangeWrite8(-128);
tryBadRangeWrite8(-129);
}
private void tryGoodRangeWrite8(int n) {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
try {
encoder.write8(n);
} catch (UnsupportedEncodingException e) {
fail("Method should not have signalled exception");
}
}
private void tryBadRangeWrite8(int n) {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
try {
encoder.write8(n);
fail("Method should have signalled exception");
} catch (UnsupportedEncodingException e) {
// pass
}
}
public void testWrite16Ranges() {
tryGoodRangeWrite16(0);
tryGoodRangeWrite16(1);
tryGoodRangeWrite16(0x7fff);
tryGoodRangeWrite16(0x8000);
tryGoodRangeWrite16(0xffff);
tryBadRangeWrite16(0x10000);
tryBadRangeWrite16(-1);
tryBadRangeWrite16(-0x7fff);
tryBadRangeWrite16(-0x8000);
tryBadRangeWrite16(-0x8001);
}
private void tryGoodRangeWrite16(int n) {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
try {
encoder.write16(n);
} catch (UnsupportedEncodingException e) {
fail("Method should not have signalled exception");
}
}
private void tryBadRangeWrite16(int n) {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
try {
encoder.write16(n);
fail("Method should have signalled exception");
} catch (UnsupportedEncodingException e) {
// pass
}
}
public void testWrite32Ranges() {
tryGoodRangeWrite32(0);
tryGoodRangeWrite32(1);
tryGoodRangeWrite32(0x7fffffff);
tryGoodRangeWrite32(0x80000000);
tryGoodRangeWrite32(0xffffffff);
tryGoodRangeWrite32(-1);
tryGoodRangeWrite32(-0x7fffffff);
tryGoodRangeWrite32(-0x80000000);
}
private void tryGoodRangeWrite32(int n) {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.write32(n);
}
public void testWrite8Encoding()
throws UnsupportedEncodingException {
tryWrite8Encoding(0);
tryWrite8Encoding(1);
tryWrite8Encoding(0x7f);
tryWrite8Encoding(0x80);
tryWrite8Encoding(0xff);
}
private void tryWrite8Encoding(int n)
throws UnsupportedEncodingException {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.write8(n);
byte[] bytes = encoder.getBytes();
assertEquals(1, bytes.length);
assertEquals(n, bytes[0] & 0xff);
}
public void testWrite16Encoding()
throws UnsupportedEncodingException {
tryWrite16Encoding(0);
tryWrite16Encoding(1);
tryWrite16Encoding(0x7fff);
tryWrite16Encoding(0x8000);
tryWrite16Encoding(0xffff);
}
private void tryWrite16Encoding(int n)
throws UnsupportedEncodingException {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.write16(n);
byte[] bytes = encoder.getBytes();
assertEquals(2, bytes.length);
assertEquals(n & 0xff, bytes[0] & 0xff);
assertEquals((n >> 8) & 0xff, bytes[1] & 0xff);
}
public void testWrite32Encoding() {
tryWrite32Encoding(0);
tryWrite32Encoding(1);
tryWrite32Encoding(0x7fffffff);
tryWrite32Encoding(0x80000000);
tryWrite32Encoding(0xffffffff);
}
private void tryWrite32Encoding(int n) {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.write32(n);
byte[] bytes = encoder.getBytes();
assertEquals(4, bytes.length);
assertEquals(n & 0xff, bytes[0] & 0xff);
assertEquals((n >> 8) & 0xff, bytes[1] & 0xff);
assertEquals((n >> 16) & 0xff, bytes[2] & 0xff);
assertEquals((n >> 24) & 0xff, bytes[3] & 0xff);
}
public void testWriteIntCombo()
throws UnsupportedEncodingException {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.write8(0x01);
encoder.write16(0x0302);
encoder.write32(0x07060504);
byte[] encoded = encoder.getBytes();
assertTrue(Arrays.equals(EXPECTED, encoded));
}
public void testWriteBytesNull() {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.writeBytes(null);
byte[] encoded = encoder.getBytes();
assertEquals(0, encoded.length);
}
public void testWriteBytesEmpty() {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.writeBytes(EMPTY);
byte[] encoded = encoder.getBytes();
assertEquals(0, encoded.length);
}
public void testWriteBytesSimple() {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.writeBytes(EXPECTED);
byte[] encoded = encoder.getBytes();
assertTrue(Arrays.equals(EXPECTED, encoded));
}
public void testWritePayloadNull()
throws UnsupportedEncodingException {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.writePayload(null);
byte[] encoded = encoder.getBytes();
assertEquals(PAYLOAD_HEADER_LENGTH, encoded.length);
checkPayloadHeader(0, 0, PAYLOAD_HEADER_LENGTH, encoded, 0);
}
public void testWritePayloadEmpty()
throws UnsupportedEncodingException {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.writePayload(EMPTY);
byte[] encoded = encoder.getBytes();
assertEquals(PAYLOAD_HEADER_LENGTH, encoded.length);
checkPayloadHeader(0, 0, PAYLOAD_HEADER_LENGTH, encoded, 0);
}
public void testWritePayloadSimple()
throws UnsupportedEncodingException {
NtlmMessageEncoder encoder = NtlmMessageEncoder.make();
encoder.writePayload(EXPECTED);
byte[] encoded = encoder.getBytes();
assertEquals(PAYLOAD_HEADER_LENGTH + EXPECTED.length, encoded.length);
checkPayloadHeader(EXPECTED.length, EXPECTED.length, PAYLOAD_HEADER_LENGTH, encoded, 0);
checkBytes(EXPECTED, encoded, PAYLOAD_HEADER_LENGTH);
}
private void checkPayloadHeader(int j, int k, int l, byte[] actual, int offset) {
assertEquals(j & 0xff, actual[offset++]);
assertEquals((j >> 8) & 0xff, actual[offset++]);
assertEquals(k & 0xff, actual[offset++]);
assertEquals((k >> 8) & 0xff, actual[offset++]);
assertEquals(l & 0xff, actual[offset++]);
assertEquals((l >> 8) & 0xff, actual[offset++]);
assertEquals((l >> 16) & 0xff, actual[offset++]);
assertEquals((l >> 24) & 0xff, actual[offset++]);
}
private void checkBytes(byte[] expected, byte[] actual, int offset) {
for (byte b : expected) {
assertEquals(b, actual[offset++]);
}
}
}
|
#!/usr/bin/env bash
set -eu -o pipefail
# try to test all possible different cases:
# - cloned repo vs. archive
# - tags vs. no tags in history
# - tagged commit vs. sometime after
# - on branch tip vs. detached head
export ALWAYS_LONG_VERSION="y"
export REVISION_SEPARATOR=" r"
export HASH_SEPARATOR=" "
export DIRTY_MARKER=" dirty"
compare() {
echo -e "\033[32m> $1\033[0m"
git archive HEAD | tar -x --to-stdout version.sh | bash -
bash version.sh 2>/dev/null
}
pwd=$PWD
for script in "${@-version.sh}"; do
echo -e "\033[1;31m--- TESTING: $script ---\033[0m"
# create a temporary directory and init a repository
tmp=$(mktemp -d -p /tmp version.sh-test-XXXXXXX)
cd "$tmp"
git init . >/dev/null
git checkout -b main -q
git config --local advice.detachedHead false >/dev/null
git config --local user.name testscript
git config --local user.email "git@$HOSTNAME"
# copy the script and create a few commits
cp "$pwd/$script" version.sh
echo "version.sh export-subst" > .gitattributes
git add version.sh .gitattributes
git commit -m init >/dev/null
git commit --allow-empty -m one >/dev/null
git commit --allow-empty -m two >/dev/null
git commit --allow-empty -m three >/dev/null
git log --pretty=oneline | cat
# begin comparisons
compare "no tags, branch tip"
git checkout -q HEAD~1
compare "no tags, detached HEAD"
git checkout -q main
git tag -a -m tagged 0.1 HEAD~2
compare "tagged ~2, branch tip"
git checkout -q HEAD~1
compare "tagged ~1, detached HEAD"
git checkout -q HEAD~1
compare "tagged, detached on tag"
git checkout -q main
git tag -a -m tagged 0.2 HEAD
compare "tagged, branch tip"
echo >> version.sh
compare "tagged, branch tip, dirty"
# clean up before next script
cd /tmp && rm -rf "$tmp"
done
|
import { configure } from 'enzyme'
import Adapter from 'enzyme-adapter-react-16'
configure({adapter: new Adapter()})
let context = require.context('./tests', true, /\.js$/)
context.keys().forEach(context) |
import * as State from '../system/state';
import * as Util from '../system/util';
import * as Graphics from '../graphics';
function convertDataFromTiledEditor (data) {
// do stuff
}
export default class TileMap {
loaded = false;
layers = [ ];
playLayer = 0;
get isTiledEditorMap ( ) {
return false;
}
constructor ( data ) {
this.data = data;
}
cacheLayer ( tileset, layer ) {
const { width, height, gridSize } = this.data.properties;
const cachedLayer = document.createElement( 'canvas' );
const ctx = cachedLayer.getContext( '2d' );
cachedLayer.width = width;
cachedLayer.height = height;
for ( let i = 0, count = layer.tiles.length; i < count; i++ ) {
const tile = layer.tiles[ i ];
ctx.drawImage( tileset,
tile.src.x, tile.src.y, gridSize, gridSize,
tile.x, tile.y, gridSize, gridSize );
}
return new Graphics.Texture( cachedLayer );
}
async load ( ) {
if ( typeof this.data === 'string' ) {
// TODO Util.load JSON file
}
if ( this.isTiledEditorMap ) {
this.data = convertDataFromTiledEditor( this.data );
}
const { layers, events, blocks, properties } = this.data;
const { width, height, gridSize, eventLayer } = properties;
const playLayer = layers.indexOf( layers.find( x => x.name === eventLayer ) );
const tileset = await Util.load( properties.tileset );
const tileLayers = layers.map( layer => this.cacheLayer( tileset, layer ) );
this.playLayer = playLayer;
this.layers = tileLayers;
this.loaded = true;
return this;
}
async use ( ) {
if ( !this.loaded ) {
await this.load( );
}
State.internal.map = this;
}
update ( time ) {
const { camera, canvas } = State.internal;
for ( let i = 0, count = this.layers.length; i < count; i++ ) {
this.layers[ i ].clip = {
x: camera.x,
y: camera.y,
w: canvas.width,
h: canvas.height
};
}
}
} |
<filename>lang/py/pylib/code/math/math_gamma.py
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2010 <NAME>. All rights reserved.
#
"""Factorial
"""
#end_pymotw_header
import math
for i in [ 0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6 ]:
try:
print '{:2.1f} {:6.2f}'.format(i, math.gamma(i))
except ValueError, err:
print 'Error computing gamma(%s):' % i, err
|
python train.py --model-name="AttentionLSTM" --config=./configs/attention_lstm.txt --epoch-num=10 \
--valid-interval=1 --log-interval=10
|
#!/usr/bin/env bats -*- bats -*-
# shellcheck disable=SC2096
#
# Tests for podman build
#
load helpers
@test "podman build - basic test" {
rand_filename=$(random_string 20)
rand_content=$(random_string 50)
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
dockerfile=$tmpdir/Dockerfile
cat >$dockerfile <<EOF
FROM $IMAGE
RUN apk add nginx
RUN echo $rand_content > /$rand_filename
EOF
# The 'apk' command can take a long time to fetch files; bump timeout
PODMAN_TIMEOUT=240 run_podman build -t build_test --format=docker $tmpdir
is "$output" ".*COMMIT" "COMMIT seen in log"
run_podman run --rm build_test cat /$rand_filename
is "$output" "$rand_content" "reading generated file in image"
run_podman rmi -f build_test
}
@test "podman buildx - basic test" {
rand_filename=$(random_string 20)
rand_content=$(random_string 50)
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
dockerfile=$tmpdir/Dockerfile
cat >$dockerfile <<EOF
FROM $IMAGE
RUN echo $rand_content > /$rand_filename
VOLUME /a/b/c
VOLUME ['/etc/foo', '/etc/bar']
EOF
run_podman buildx build --load -t build_test --format=docker $tmpdir
is "$output" ".*COMMIT" "COMMIT seen in log"
run_podman run --rm build_test cat /$rand_filename
is "$output" "$rand_content" "reading generated file in image"
# Make sure the volumes are created at surprising yet Docker-compatible
# destinations (see bugzilla.redhat.com/show_bug.cgi?id=2014149).
run_podman run --rm build_test find /[ /etc/bar\] -print
is "$output" "/\[
/\[/etc
/\[/etc/foo,
/etc/bar]" "weird VOLUME gets converted to directories with brackets and comma"
# Now confirm that each volume got a unique device ID
run_podman run --rm build_test stat -c '%D' / /a /a/b /a/b/c /\[ /\[/etc /\[/etc/foo, /etc /etc/bar\]
# First, the non-volumes should all be the same...
is "${lines[0]}" "${lines[1]}" "devnum( / ) = devnum( /a )"
is "${lines[0]}" "${lines[2]}" "devnum( / ) = devnum( /a/b )"
is "${lines[0]}" "${lines[4]}" "devnum( / ) = devnum( /[ )"
is "${lines[0]}" "${lines[5]}" "devnum( / ) = devnum( /[etc )"
is "${lines[0]}" "${lines[7]}" "devnum( / ) = devnum( /etc )"
is "${lines[6]}" "${lines[8]}" "devnum( /[etc/foo, ) = devnum( /etc/bar] )"
# ...then, each volume should be different
if [[ "${lines[0]}" = "${lines[3]}" ]]; then
die "devnum( / ) (${lines[0]}) = devnum( volume0 ) (${lines[3]}) -- they should differ"
fi
if [[ "${lines[0]}" = "${lines[6]}" ]]; then
die "devnum( / ) (${lines[0]}) = devnum( volume1 ) (${lines[6]}) -- they should differ"
fi
# FIXME: is this expected? I thought /a/b/c and /[etc/foo, would differ
is "${lines[3]}" "${lines[6]}" "devnum( volume0 ) = devnum( volume1 )"
run_podman rmi -f build_test
}
@test "podman build test -f -" {
rand_filename=$(random_string 20)
rand_content=$(random_string 50)
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
containerfile=$PODMAN_TMPDIR/Containerfile
cat >$containerfile <<EOF
FROM $IMAGE
RUN apk add nginx
RUN echo $rand_content > /$rand_filename
EOF
# The 'apk' command can take a long time to fetch files; bump timeout
PODMAN_TIMEOUT=240 run_podman build -t build_test -f - --format=docker $tmpdir < $containerfile
is "$output" ".*COMMIT" "COMMIT seen in log"
run_podman run --rm build_test cat /$rand_filename
is "$output" "$rand_content" "reading generated file in image"
run_podman rmi -f build_test
}
@test "podman build - global runtime flags test" {
skip_if_remote "--runtime-flag flag not supported for remote"
rand_content=$(random_string 50)
tmpdir=$PODMAN_TMPDIR/build-test
run mkdir -p $tmpdir
containerfile=$tmpdir/Containerfile
cat >$containerfile <<EOF
FROM $IMAGE
RUN echo $rand_content
EOF
run_podman 1 --runtime-flag invalidflag build -t build_test $tmpdir
is "$output" ".*invalidflag" "failed when passing undefined flags to the runtime"
}
@test "podman build - set runtime" {
skip_if_remote "--runtime flag not supported for remote"
# Test on the CLI and via containers.conf
tmpdir=$PODMAN_TMPDIR/build-test
run mkdir -p $tmpdir
containerfile=$tmpdir/Containerfile
cat >$containerfile <<EOF
FROM $IMAGE
RUN echo $rand_content
EOF
run_podman 125 --runtime=idonotexist build -t build_test $tmpdir
is "$output" ".*\"idonotexist\" not found.*" "failed when passing invalid OCI runtime via CLI"
containersconf=$tmpdir/containers.conf
cat >$containersconf <<EOF
[engine]
runtime="idonotexist"
EOF
CONTAINERS_CONF="$containersconf" run_podman 125 build -t build_test $tmpdir
is "$output" ".*\"idonotexist\" not found.*" "failed when passing invalid OCI runtime via containers.conf"
}
# Regression from v1.5.0. This test passes fine in v1.5.0, fails in 1.6
@test "podman build - cache (#3920)" {
# Make an empty test directory, with a subdirectory used for tar
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir/subtest || die "Could not mkdir $tmpdir/subtest"
echo "This is the ORIGINAL file" > $tmpdir/subtest/myfile1
run tar -C $tmpdir -cJf $tmpdir/myfile.tar.xz subtest
cat >$tmpdir/Dockerfile <<EOF
FROM $IMAGE
ADD myfile.tar.xz /
EOF
# One of: ADD myfile /myfile or COPY . .
run_podman build -t build_test -f $tmpdir/Dockerfile $tmpdir
is "$output" ".*COMMIT" "COMMIT seen in log"
if [[ "$output" =~ "Using cache" ]]; then
is "$output" "[no instance of 'Using cache']" "no cache used"
fi
iid=${lines[-1]}
run_podman run --rm build_test cat /subtest/myfile1
is "$output" "This is the ORIGINAL file" "file contents, first time"
# Step 2: Recreate the tarfile, with new content. Rerun podman build.
echo "This is a NEW file" >| $tmpdir/subtest/myfile2
run tar -C $tmpdir -cJf $tmpdir/myfile.tar.xz subtest
run_podman build -t build_test -f $tmpdir/Dockerfile $tmpdir
is "$output" ".*COMMIT" "COMMIT seen in log"
# Since the tarfile is modified, podman SHOULD NOT use a cached layer.
if [[ "$output" =~ "Using cache" ]]; then
is "$output" "[no instance of 'Using cache']" "no cache used"
fi
# Pre-buildah-1906, this fails with ENOENT because the tarfile was cached
run_podman run --rm build_test cat /subtest/myfile2
is "$output" "This is a NEW file" "file contents, second time"
run_podman rmi -f build_test $iid
}
@test "podman build - URLs" {
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
cat >$tmpdir/Dockerfile <<EOF
FROM $IMAGE
ADD https://github.com/containers/podman/blob/main/README.md /tmp/
EOF
run_podman build -t add_url $tmpdir
run_podman run --rm add_url stat /tmp/README.md
run_podman rmi -f add_url
# Now test COPY. That should fail.
sed -i -e 's/ADD/COPY/' $tmpdir/Dockerfile
run_podman 125 build -t copy_url $tmpdir
is "$output" ".*error building at STEP .*: source can't be a URL for COPY"
}
@test "podman build - workdir, cmd, env, label" {
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
# Random workdir, and multiple random strings to verify command & env
workdir=/$(random_string 10)
s_echo=$(random_string 15)
s_env1=$(random_string 20)
s_env2=$(random_string 25)
s_env3=$(random_string 30)
s_env4=$(random_string 40)
# Label name: make sure it begins with a letter! jq barfs if you
# try to ask it for '.foo.<N>xyz', i.e. any string beginning with digit
label_name=l$(random_string 8)
label_value=$(random_string 12)
# #8679: Create a secrets directory, and mount it in the container
# (can only test locally; podman-remote has no --default-mounts-file opt)
MOUNTS_CONF=
secret_contents="ceci nest pas un secret"
CAT_SECRET="echo $secret_contents"
if ! is_remote; then
mkdir $tmpdir/secrets
echo $tmpdir/secrets:/run/secrets > $tmpdir/mounts.conf
secret_filename=secretfile-$(random_string 20)
secret_contents=shhh-$(random_string 30)-shhh
echo $secret_contents >$tmpdir/secrets/$secret_filename
MOUNTS_CONF=--default-mounts-file=$tmpdir/mounts.conf
CAT_SECRET="cat /run/secrets/$secret_filename"
fi
# For --dns-search: a domain that is unlikely to exist
local nosuchdomain=nx$(random_string 10).net
# Command to run on container startup with no args
cat >$tmpdir/mycmd <<EOF
#!/bin/sh
PATH=/usr/bin:/bin
pwd
echo "\$1"
printenv | grep MYENV | sort | sed -e 's/^MYENV.=//'
$CAT_SECRET
EOF
# For overriding with --env-file; using multiple files confirms that
# the --env-file option is cumulative, not last-one-wins.
cat >$PODMAN_TMPDIR/env-file1 <<EOF
MYENV3=$s_env3
http_proxy=http-proxy-in-env-file
EOF
cat >$PODMAN_TMPDIR/env-file2 <<EOF
https_proxy=https-proxy-in-env-file
EOF
# Build args: one explicit (foo=bar), one implicit (foo)
local arg_implicit_value=implicit_$(random_string 15)
local arg_explicit_value=explicit_$(random_string 15)
# NOTE: it's important to not create the workdir.
# Podman will make sure to create a missing workdir
# if needed. See #9040.
cat >$tmpdir/Containerfile <<EOF
FROM $IMAGE
ARG arg_explicit
ARG arg_implicit
LABEL $label_name=$label_value
WORKDIR $workdir
# Test for #7094 - chowning of invalid symlinks
RUN mkdir -p /a/b/c
RUN ln -s /no/such/nonesuch /a/b/c/badsymlink
RUN ln -s /bin/mydefaultcmd /a/b/c/goodsymlink
RUN touch /a/b/c/myfile
RUN chown -h 1:2 /a/b/c/badsymlink /a/b/c/goodsymlink && chown -h 4:5 /a/b/c/myfile
VOLUME /a/b/c
# Test for environment passing and override
ENV MYENV1=$s_env1
ENV MYENV2 this-should-be-overridden-by-env-host
ENV MYENV3 this-should-be-overridden-by-env-file
ENV MYENV4 this-should-be-overridden-by-cmdline
ENV http_proxy http-proxy-in-image
ENV ftp_proxy ftp-proxy-in-image
ADD mycmd /bin/mydefaultcmd
RUN chmod 755 /bin/mydefaultcmd
RUN chown 2:3 /bin/mydefaultcmd
RUN $CAT_SECRET
RUN echo explicit-build-arg=\$arg_explicit
RUN echo implicit-build-arg=\$arg_implicit
CMD ["/bin/mydefaultcmd","$s_echo"]
RUN cat /etc/resolv.conf
EOF
# The goal is to test that a missing value will be inherited from
# environment - but that can't work with remote, so for simplicity
# just make it explicit in that case too.
local build_arg_implicit="--build-arg arg_implicit"
if is_remote; then
build_arg_implicit+="=$arg_implicit_value"
fi
# cd to the dir, so we test relative paths (important for podman-remote)
cd $PODMAN_TMPDIR
export arg_explicit="THIS SHOULD BE OVERRIDDEN BY COMMAND LINE!"
export arg_implicit=${arg_implicit_value}
run_podman ${MOUNTS_CONF} build \
--build-arg arg_explicit=${arg_explicit_value} \
$build_arg_implicit \
--dns-search $nosuchdomain \
-t build_test -f build-test/Containerfile build-test
local iid="${lines[-1]}"
if [[ $output =~ missing.*build.argument ]]; then
die "podman did not see the given --build-arg(s)"
fi
# Make sure 'podman build' had the secret mounted
is "$output" ".*$secret_contents.*" "podman build has /run/secrets mounted"
# --build-arg should be set, both via 'foo=bar' and via just 'foo' ($foo)
is "$output" ".*explicit-build-arg=${arg_explicit_value}" \
"--build-arg arg_explicit=explicit-value works"
is "$output" ".*implicit-build-arg=${arg_implicit_value}" \
"--build-arg arg_implicit works (inheriting from environment)"
is "$output" ".*search $nosuchdomain" \
"--dns-search added to /etc/resolv.conf"
if is_remote; then
ENVHOST=""
else
ENVHOST="--env-host"
fi
# Run without args - should run the above script. Verify its output.
export MYENV2="$s_env2"
export MYENV3="env-file-should-override-env-host!"
run_podman ${MOUNTS_CONF} run --rm \
--env-file=$PODMAN_TMPDIR/env-file1 \
--env-file=$PODMAN_TMPDIR/env-file2 \
${ENVHOST} \
-e MYENV4="$s_env4" \
build_test
is "${lines[0]}" "$workdir" "container default command: pwd"
is "${lines[1]}" "$s_echo" "container default command: output from echo"
is "${lines[2]}" "$s_env1" "container default command: env1"
if is_remote; then
is "${lines[3]}" "this-should-be-overridden-by-env-host" "podman-remote does not send local environment"
else
is "${lines[3]}" "$s_env2" "container default command: env2"
fi
is "${lines[4]}" "$s_env3" "container default command: env3 (from envfile)"
is "${lines[5]}" "$s_env4" "container default command: env4 (from cmdline)"
is "${lines[6]}" "$secret_contents" \
"Contents of /run/secrets/$secret_filename in container"
# Proxies - environment should override container, but not env-file
http_proxy=http-proxy-from-env ftp_proxy=ftp-proxy-from-env \
run_podman run --rm \
--env-file=$PODMAN_TMPDIR/env-file1 \
--env-file=$PODMAN_TMPDIR/env-file2 \
build_test \
printenv http_proxy https_proxy ftp_proxy
is "${lines[0]}" "http-proxy-in-env-file" "env-file overrides env"
is "${lines[1]}" "https-proxy-in-env-file" "env-file sets proxy var"
if is_remote; then
is "${lines[2]}" "ftp-proxy-in-image" "podman-remote does not send local environment"
else
is "${lines[2]}" "ftp-proxy-from-env" "ftp-proxy is passed through"
fi
# test that workdir is set for command-line commands also
run_podman run --rm build_test pwd
is "$output" "$workdir" "pwd command in container"
# Determine buildah version, so we can confirm it gets into Labels
# Multiple --format options confirm command-line override (last one wins)
run_podman info --format '{{.Ignore}}' --format '{{ .Host.BuildahVersion }}'
is "$output" "[1-9][0-9.-]\+" ".Host.BuildahVersion is reasonable"
buildah_version=$output
# Confirm that 'podman inspect' shows the expected values
# FIXME: can we rely on .Env[0] being PATH, and the rest being in order??
run_podman image inspect build_test
# (Assert that output is formatted, not a one-line blob: #8011)
if [[ "${#lines[*]}" -lt 10 ]]; then
die "Output from 'image inspect' is only ${#lines[*]} lines; see #8011"
fi
tests="
Env[1] | MYENV1=$s_env1
Env[2] | MYENV2=this-should-be-overridden-by-env-host
Env[3] | MYENV3=this-should-be-overridden-by-env-file
Env[4] | MYENV4=this-should-be-overridden-by-cmdline
Cmd[0] | /bin/mydefaultcmd
Cmd[1] | $s_echo
WorkingDir | $workdir
Labels.$label_name | $label_value
"
# FIXME: 2021-02-24: Fixed in buildah #3036; re-enable this once podman
# vendors in a newer buildah!
# Labels.\"io.buildah.version\" | $buildah_version
parse_table "$tests" | while read field expect; do
actual=$(jq -r ".[0].Config.$field" <<<"$output")
dprint "# actual=<$actual> expect=<$expect}>"
is "$actual" "$expect" "jq .Config.$field"
done
# Bad symlink in volume. Prior to #7094, well, we wouldn't actually
# get here because any 'podman run' on a volume that had symlinks,
# be they dangling or valid, would barf with
# Error: chown <mountpath>/_data/symlink: ENOENT
run_podman run --rm build_test stat -c'%u:%g:%N' /a/b/c/badsymlink
is "$output" "1:2:'/a/b/c/badsymlink' -> '/no/such/nonesuch'" \
"bad symlink to nonexistent file is chowned and preserved"
run_podman run --rm build_test stat -c'%u:%g:%N' /a/b/c/goodsymlink
is "$output" "1:2:'/a/b/c/goodsymlink' -> '/bin/mydefaultcmd'" \
"good symlink to existing file is chowned and preserved"
run_podman run --rm build_test stat -c'%u:%g' /bin/mydefaultcmd
is "$output" "2:3" "target of symlink is not chowned"
run_podman run --rm build_test stat -c'%u:%g:%N' /a/b/c/myfile
is "$output" "4:5:/a/b/c/myfile" "file in volume is chowned"
# Hey, as long as we have an image with lots of layers, let's
# confirm that 'image tree' works as expected
run_podman image tree build_test
is "${lines[0]}" "Image ID: ${iid:0:12}" \
"image tree: first line"
is "${lines[1]}" "Tags: \[localhost/build_test:latest]" \
"image tree: second line"
is "${lines[2]}" "Size: [0-9.]\+[kM]B" \
"image tree: third line"
is "${lines[3]}" "Image Layers" \
"image tree: fourth line"
is "${lines[4]}" ".* ID: [0-9a-f]\{12\} Size: .* Top Layer of: \[$IMAGE]" \
"image tree: first layer line"
is "${lines[-1]}" ".* ID: [0-9a-f]\{12\} Size: .* Top Layer of: \[localhost/build_test:latest]" \
"image tree: last layer line"
# FIXME: 'image tree --whatrequires' does not work via remote
if ! is_remote; then
run_podman image tree --whatrequires $IMAGE
is "${lines[-1]}" \
".*ID: .* Top Layer of: \\[localhost/build_test:latest\\]" \
"'image tree --whatrequires' shows our built image"
fi
# Clean up
run_podman rmi -f build_test
}
@test "podman build - COPY with ignore" {
local tmpdir=$PODMAN_TMPDIR/build-test-$(random_string 10)
mkdir -p $tmpdir/subdir{1,2}
# Create a bunch of files. Declare this as an array to avoid duplication
# because we iterate over that list below, checking for each file.
# A leading "-" indicates that the file SHOULD NOT exist in the built image
#
# Weird side effect of Buildah 3486, relating to subdirectories and
# wildcard patterns. See that PR for details, it's way too confusing
# to explain in a comment.
local -a files=(
-test1 -test1.txt
test2 test2.txt
subdir1/sub1 subdir1/sub1.txt
-subdir1/sub2 -subdir1/sub2.txt
subdir1/sub3 subdir1/sub3.txt
-subdir2/sub1 -subdir2/sub1.txt
-subdir2/sub2 -subdir2/sub2.txt
-subdir2/sub3 -subdir2/sub3.txt
this-file-does-not-match-anything-in-ignore-file
comment
)
for f in ${files[@]}; do
# The magic '##-' strips off the '-' prefix
echo "$f" > $tmpdir/${f##-}
done
# Directory that doesn't exist in the image; COPY should create it
local newdir=/newdir-$(random_string 12)
cat >$tmpdir/Containerfile <<EOF
FROM $IMAGE
COPY ./ $newdir/
EOF
# Run twice: first with a custom --ignorefile, then with a default one.
# This ordering is deliberate: if we were to run with .dockerignore
# first, and forget to rm it, and then run with --ignorefile, _and_
# there was a bug in podman where --ignorefile was a NOP (eg #9570),
# the test might pass because of the existence of .dockerfile.
for ignorefile in ignoreme-$(random_string 5) .dockerignore; do
# Patterns to ignore. Mostly copied from buildah/tests/bud/dockerignore
cat >$tmpdir/$ignorefile <<EOF
# comment
test*
!test2*
subdir1
subdir2
!*/sub1*
!subdir1/sub3*
EOF
# Build an image. For .dockerignore
local -a ignoreflag
unset ignoreflag
if [[ $ignorefile != ".dockerignore" ]]; then
ignoreflag="--ignorefile $tmpdir/$ignorefile"
fi
run_podman build -t build_test ${ignoreflag} $tmpdir
# Delete the ignore file! Otherwise, in the next iteration of the loop,
# we could end up with an existing .dockerignore that invisibly
# takes precedence over --ignorefile
rm -f $tmpdir/$ignorefile
# It would be much more readable, and probably safer, to iterate
# over each file, running 'podman run ... ls -l $f'. But each podman run
# takes a second or so, and we are mindful of each second.
run_podman run --rm build_test find $newdir -type f
for f in ${files[@]}; do
if [[ $f =~ ^- ]]; then
f=${f##-}
if [[ $output =~ $f ]]; then
die "File '$f' found in image; it should have been ignored via $ignorefile"
fi
else
is "$output" ".*$newdir/$f" \
"File '$f' should exist in container (no match in $ignorefile)"
fi
done
# Clean up
run_podman rmi -f build_test
done
}
# Regression test for #9867
# Make sure that if you exclude everything in context dir, that
# the Containerfile/Dockerfile in the context dir are used
@test "podman build with ignore '*'" {
local tmpdir=$PODMAN_TMPDIR/build-test-$(random_string 10)
mkdir -p $tmpdir
cat >$tmpdir/Containerfile <<EOF
FROM scratch
EOF
cat >$tmpdir/.dockerignore <<EOF
*
EOF
run_podman build -t build_test $tmpdir
# Rename Containerfile to Dockerfile
mv $tmpdir/Containerfile $tmpdir/Dockerfile
run_podman build -t build_test $tmpdir
# Rename Dockerfile to foofile
mv $tmpdir/Dockerfile $tmpdir/foofile
run_podman 125 build -t build_test $tmpdir
is "$output" ".*Dockerfile: no such file or directory"
run_podman build -t build_test -f $tmpdir/foofile $tmpdir
# Clean up
run_podman rmi -f build_test
}
@test "podman build - stdin test" {
# Random workdir, and random string to verify build output
workdir=/$(random_string 10)
random_echo=$(random_string 15)
PODMAN_TIMEOUT=240 run_podman build -t build_test - << EOF
FROM $IMAGE
RUN mkdir $workdir
WORKDIR $workdir
RUN /bin/echo $random_echo
EOF
is "$output" ".*COMMIT" "COMMIT seen in log"
is "$output" ".*STEP .*: RUN /bin/echo $random_echo"
run_podman run --rm build_test pwd
is "$output" "$workdir" "pwd command in container"
run_podman rmi -f build_test
}
# #8092 - podman build should not gobble stdin (Fixes: #8066)
@test "podman build - does not gobble stdin that does not belong to it" {
random1=random1-$(random_string 12)
random2=random2-$(random_string 15)
random3=random3-$(random_string 12)
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
cat >$tmpdir/Containerfile <<EOF
FROM $IMAGE
RUN echo x${random2}y
EOF
# This is a little rococo, bear with me please. #8092 fixed a bug
# in which 'podman build' would slurp up any input in the pipeline.
# Not a problem in a contrived example such as the one below, but
# definitely a problem when running commands in a pipeline to bash:
# all commands after 'podman build' would silently be ignored.
# In the test below, prior to #8092, the 'sed' would not get
# any input, and we would never see $random3 in the output.
# And, we use 'sed' to massage $random3 just on the remote
# chance that podman itself could pass stdin through.
results=$(echo $random3 | (
echo $random1
run_podman build -t build_test $tmpdir
sed -e 's/^/a/' -e 's/$/z/'
))
# First simple test: confirm that we see the piped-in string, as
# massaged by sed. This fails in 287edd4e2, the commit before #8092.
# We do this before the thorough test (below) because, should it
# fail, the diagnostic is much clearer and easier to understand.
is "$results" ".*a${random3}z" "stdin remains after podman-build"
# More thorough test: verify all the required strings in order.
# This is unlikely to fail, but it costs us nothing and could
# catch a regression somewhere else.
# FIXME: podman-remote output differs from local: #8342 (spurious ^M)
# FIXME: podman-remote output differs from local: #8343 (extra SHA output)
remote_extra=""
if is_remote; then remote_extra=".*";fi
expect="${random1}
.*
STEP 1/2: FROM $IMAGE
STEP 2/2: RUN echo x${random2}y
x${random2}y${remote_extra}
COMMIT build_test${remote_extra}
--> [0-9a-f]\{11\}
Successfully tagged localhost/build_test:latest
[0-9a-f]\{64\}
a${random3}z"
is "$results" "$expect" "Full output from 'podman build' pipeline"
run_podman rmi -f build_test
}
@test "podman build --layers test" {
rand_content=$(random_string 50)
tmpdir=$PODMAN_TMPDIR/build-test
run mkdir -p $tmpdir
containerfile=$tmpdir/Containerfile
cat >$containerfile <<EOF
FROM $IMAGE
RUN echo $rand_content
EOF
# Build twice to make sure second time uses cache
run_podman build -t build_test $tmpdir
if [[ "$output" =~ "Using cache" ]]; then
is "$output" "[no instance of 'Using cache']" "no cache used"
fi
run_podman build -t build_test $tmpdir
is "$output" ".*cache" "used cache"
run_podman build -t build_test --layers=true $tmpdir
is "$output" ".*cache" "used cache"
run_podman build -t build_test --layers=false $tmpdir
if [[ "$output" =~ "Using cache" ]]; then
is "$output" "[no instance of 'Using cache']" "no cache used"
fi
BUILDAH_LAYERS=false run_podman build -t build_test $tmpdir
if [[ "$output" =~ "Using cache" ]]; then
is "$output" "[no instance of 'Using cache']" "no cache used"
fi
BUILDAH_LAYERS=false run_podman build -t build_test --layers=1 $tmpdir
is "$output" ".*cache" "used cache"
BUILDAH_LAYERS=1 run_podman build -t build_test --layers=false $tmpdir
if [[ "$output" =~ "Using cache" ]]; then
is "$output" "[no instance of 'Using cache']" "no cache used"
fi
run_podman rmi -a --force
}
# Caveat lector: this test was mostly copy-pasted from buildah in #9275.
# It's not entirely clear what it's testing, or if the 'mount' section is
# necessary.
@test "build with copy-from referencing the base image" {
target=derived
target_mt=derived-mt
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
containerfile1=$tmpdir/Containerfile1
cat >$containerfile1 <<EOF
FROM $IMAGE AS build
RUN rm -f /etc/issue
USER 1001
COPY --from=$IMAGE /etc/issue /test/
EOF
containerfile2=$tmpdir/Containerfile2
cat >$containerfile2 <<EOF
FROM $IMAGE AS test
RUN rm -f /etc/alpine-release
FROM quay.io/libpod/alpine AS final
COPY --from=$IMAGE /etc/alpine-release /test/
EOF
# Before the build, $IMAGE's base image should not be present
local base_image=quay.io/libpod/alpine:latest
run_podman 1 image exists $base_image
run_podman build --jobs 1 -t ${target} -f ${containerfile2} ${tmpdir}
run_podman build --no-cache --jobs 4 -t ${target_mt} -f ${containerfile2} ${tmpdir}
# After the build, the base image should exist
run_podman image exists $base_image
# (can only test locally; podman-remote has no image mount command)
# (can also only test as root; mounting under rootless podman is too hard)
# We perform the test as a conditional, not a 'skip', because there's
# value in testing the above 'build' commands even remote & rootless.
if ! is_remote && ! is_rootless; then
run_podman image mount ${target}
root_single_job=$output
run_podman image mount ${target_mt}
root_multi_job=$output
# Check that both the version with --jobs 1 and --jobs=N have the same number of files
nfiles_single=$(find $root_single_job -type f | wc -l)
nfiles_multi=$(find $root_multi_job -type f | wc -l)
run_podman image umount ${target_mt}
run_podman image umount ${target}
is "$nfiles_single" "$nfiles_multi" \
"Number of files (--jobs=1) == (--jobs=4)"
# Make sure the number is reasonable
test "$nfiles_single" -gt 50
fi
# Clean up
run_podman rmi ${target_mt} ${target} ${base_image}
run_podman image prune -f
}
@test "podman build --pull-never" {
local tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
# First, confirm that --pull-never is a NOP if image exists locally
local random_string=$(random_string 15)
cat >$tmpdir/Containerfile <<EOF
FROM $IMAGE
RUN echo $random_string
EOF
run_podman build -t build_test --pull-never $tmpdir
is "$output" ".*$random_string" "pull-never is OK if image already exists"
run_podman rmi build_test
# Now try an image that does not exist locally nor remotely
cat >$tmpdir/Containerfile <<EOF
FROM quay.io/libpod/nosuchimage:nosuchtag
RUN echo $random_string
EOF
run_podman 125 build -t build_test --pull-never $tmpdir
is "$output" \
".*Error: error creating build container: quay.io/libpod/nosuchimage:nosuchtag: image not known" \
"--pull-never fails with expected error message"
}
@test "podman build --logfile test" {
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
tmpbuilddir=$tmpdir/build
mkdir -p $tmpbuilddir
dockerfile=$tmpbuilddir/Dockerfile
cat >$dockerfile <<EOF
FROM $IMAGE
EOF
run_podman build -t build_test --format=docker --logfile=$tmpdir/logfile $tmpbuilddir
run cat $tmpdir/logfile
is "$output" ".*COMMIT" "COMMIT seen in log"
run_podman rmi -f build_test
}
@test "podman build check_label" {
skip_if_no_selinux
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
tmpbuilddir=$tmpdir/build
mkdir -p $tmpbuilddir
dockerfile=$tmpbuilddir/Dockerfile
cat >$dockerfile <<EOF
FROM $IMAGE
RUN cat /proc/self/attr/current
EOF
run_podman build -t build_test --security-opt label=level:s0:c3,c4 --format=docker $tmpbuilddir
is "$output" ".*s0:c3,c4COMMIT" "label setting level"
run_podman rmi -f build_test
}
@test "podman build check_seccomp_ulimits" {
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
tmpbuilddir=$tmpdir/build
mkdir -p $tmpbuilddir
dockerfile=$tmpbuilddir/Dockerfile
cat >$dockerfile <<EOF
FROM $IMAGE
RUN grep Seccomp: /proc/self/status |awk '{ print \$1\$2 }'
RUN grep "Max open files" /proc/self/limits |awk '{ print \$4":"\$5 }'
EOF
run_podman build --ulimit nofile=101:102 -t build_test $tmpbuilddir
is "$output" ".*Seccomp:2" "setting seccomp"
is "$output" ".*101:102" "setting ulimits"
run_podman rmi -f build_test
run_podman build -t build_test --security-opt seccomp=unconfined $tmpbuilddir
is "$output" ".*Seccomp:0" "setting seccomp"
run_podman rmi -f build_test
}
@test "podman build --authfile bogus test" {
run_podman 125 build --authfile=/tmp/bogus - <<< "from scratch"
is "$output" ".*/tmp/bogus: no such file or directory"
}
@test "podman build COPY hardlinks " {
tmpdir=$PODMAN_TMPDIR/build-test
subdir=$tmpdir/subdir
subsubdir=$subdir/subsubdir
mkdir -p $subsubdir
dockerfile=$tmpdir/Dockerfile
cat >$dockerfile <<EOF
FROM $IMAGE
COPY . /test
EOF
ln $dockerfile $tmpdir/hardlink1
ln $dockerfile $subdir/hardlink2
ln $dockerfile $subsubdir/hardlink3
run_podman build -t build_test $tmpdir
run_podman run --rm build_test stat -c '%i' /test/Dockerfile
dinode=$output
run_podman run --rm build_test stat -c '%i' /test/hardlink1
is "$output" "$dinode" "COPY hardlinks work"
run_podman run --rm build_test stat -c '%i' /test/subdir/hardlink2
is "$output" "$dinode" "COPY hardlinks work"
run_podman run --rm build_test stat -c '%i' /test/subdir/subsubdir/hardlink3
is "$output" "$dinode" "COPY hardlinks work"
run_podman rmi -f build_test
}
@test "podman build -f test" {
tmpdir=$PODMAN_TMPDIR/build-test
subdir=$tmpdir/subdir
mkdir -p $subdir
containerfile1=$tmpdir/Containerfile1
cat >$containerfile1 <<EOF
FROM scratch
copy . /tmp
EOF
containerfile2=$PODMAN_TMPDIR/Containerfile2
cat >$containerfile2 <<EOF
FROM $IMAGE
EOF
run_podman build -t build_test -f Containerfile1 $tmpdir
run_podman 125 build -t build_test -f Containerfile2 $tmpdir
is "$output" ".*Containerfile2: no such file or directory" "Containerfile2 should not exist"
run_podman build -t build_test -f $containerfile1 $tmpdir
run_podman build -t build_test -f $containerfile2 $tmpdir
run_podman build -t build_test -f $containerfile1
run_podman build -t build_test -f $containerfile2
run_podman build -t build_test -f $containerfile1 -f $containerfile2 $tmpdir
is "$output" ".*$IMAGE" "Containerfile2 is also passed to server"
run_podman rmi -f build_test
}
@test "podman build .dockerignore failure test" {
tmpdir=$PODMAN_TMPDIR/build-test
subdir=$tmpdir/subdir
mkdir -p $subdir
cat >$tmpdir/.dockerignore <<EOF
*
subdir
!*/sub1*
EOF
cat >$tmpdir/Containerfile <<EOF
FROM $IMAGE
COPY ./ ./
COPY subdir ./
EOF
run_podman 125 build -t build_test $tmpdir
is "$output" ".*Error: error building at STEP \"COPY subdir ./\"" ".dockerignore was ignored"
}
@test "podman build .containerignore and .dockerignore test" {
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir
touch $tmpdir/test1 $tmpdir/test2
cat >$tmpdir/.containerignore <<EOF
test2*
EOF
cat >$tmpdir/.dockerignore <<EOF
test1*
EOF
cat >$tmpdir/Containerfile <<EOF
FROM $IMAGE
COPY ./ /tmp/test/
RUN ls /tmp/test/
EOF
run_podman build -t build_test $tmpdir
is "$output" ".*test1" "test1 should exists in the final image"
}
@test "podman build build context ownership" {
tmpdir=$PODMAN_TMPDIR/build-test
subdir=$tmpdir/subdir
mkdir -p $subdir
touch $tmpdir/empty-file.txt
if is_remote && ! is_rootless ; then
# TODO: set this file's owner to a UID:GID that will not be mapped
# in the context where the remote server is running, which generally
# requires us to be root (or running with more mapped IDs) on the
# client, but not root (or running with fewer mapped IDs) on the
# remote server
# 4294967292:4294967292 (0xfffffffc:0xfffffffc) isn't that, but
# it will catch errors where a remote server doesn't apply the right
# default as it copies content into the container
chown 4294967292:4294967292 $tmpdir/empty-file.txt
fi
cat >$tmpdir/Dockerfile <<EOF
FROM $IMAGE
COPY empty-file.txt .
RUN echo 0:0 | tee expected.txt
RUN stat -c "%u:%g" empty-file.txt | tee actual.txt
RUN cmp expected.txt actual.txt
EOF
run_podman build -t build_test $tmpdir
}
@test "podman build build context is a symlink to a directory" {
tmpdir=$PODMAN_TMPDIR/build-test
mkdir -p $tmpdir/target
ln -s target $tmpdir/link
echo FROM $IMAGE > $tmpdir/link/Dockerfile
echo RUN echo hello >> $tmpdir/link/Dockerfile
run_podman build -t build_test $tmpdir/link
}
function teardown() {
# A timeout or other error in 'build' can leave behind stale images
# that podman can't even see and which will cascade into subsequent
# test failures. Try a last-ditch force-rm in cleanup, ignoring errors.
run_podman '?' rm -t 0 -a -f
run_podman '?' rmi -f build_test
# Many of the tests above leave interim layers behind. Clean them up.
run_podman '?' image prune -f
basic_teardown
}
# vim: filetype=sh
|
function capitalizeFirstLetters(str) {
let strArr = str.split(" ");
let newStr = "";
for (let i = 0; i < strArr.length; i++) {
newStr += strArr[i].charAt(0).toUpperCase() + strArr[i].slice(1);
if (i < (strArr.length - 1)) {
newStr += " ";
}
}
return newStr;
}
// given string
let str = "the quick brown fox";
let result = capitalizeFirstLetters(str);
console.log(result);
// Output: The Quick Brown Fox |
#!/bin/bash
S="${BASH_SOURCE[0]}"
D=`dirname "$S"`
SECURE_CORE_ROOT="`cd "$D"/.. && pwd`"
for dir in poky meta-openembedded meta-secure-core; do
(cd "$SECURE_CORE_ROOT/$dir"; git pull)
done
|
<gh_stars>0
import React from "react";
import { RingView } from "./RingView";
import { RingSize } from "./model/RingSize";
import { RingColor } from "./model/RingColor";
export const TestRingPage = () => {
return (
<div style={{ width: "300px", height: "300px" }}>
<RingView size={RingSize.SMALL} color={RingColor.COLOR_2} />
</div>
);
};
|
#!/bin/sh
gpu=7
cr=24
kr=6
dp=28
wd=6
mkdir -p "logs/model/coarse/all/crop${cr}/kernel${kr}/depth${dp}/width${wd}/"
python -u src/train.py --gpu $gpu \
--coarse_classes \
--crop_size $cr --kernel_size $kr \
--depth $dp --width_factor $wd |
tee "logs/model/coarse/all/crop${cr}/kernel${kr}/depth${dp}/width${wd}/model_coarse_all_crop${cr}_kernel${kr}_depth${dp}_width${wd}.log"
|
<filename>chest/windows/core/src/main/java/net/community/chest/win32/core/serial/ObjectNullMultiple256Record.java
/*
*
*/
package net.community.chest.win32.core.serial;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.StreamCorruptedException;
import net.community.chest.CoVariantReturn;
import net.community.chest.io.encode.ElementEncoder;
import net.community.chest.lang.PubliclyCloneable;
import net.community.chest.win32.core.DataFormatConverter;
/**
* <P>Copyright as per GPLv2</P>
* @author <NAME>.
* @since Feb 19, 2013 3:59:05 PM
*
*/
public class ObjectNullMultiple256Record extends SerializationRecord
implements PubliclyCloneable<ObjectNullMultiple256Record>,
ElementEncoder<ObjectNullMultiple256Record>{
private static final long serialVersionUID = 1996923559485206152L;
private short _nullCount;
public ObjectNullMultiple256Record ()
{
super(RecordTypeEnumeration.ObjectNullMultiple256);
}
public ObjectNullMultiple256Record (InputStream in) throws IOException
{
super(RecordTypeEnumeration.ObjectNullMultiple256);
Object result=read(in);
if (result != this)
throw new StreamCorruptedException("Mismatched read data instance");
}
public short getNullCount ()
{
return _nullCount;
}
public void setNullCount (short nullCount)
{
_nullCount = nullCount;
}
@Override
@CoVariantReturn
public ObjectNullMultiple256Record read (InputStream in) throws IOException
{
return getClass().cast(super.read(in));
}
@Override
public void readRecordData (InputStream in) throws IOException
{
setNullCount(DataFormatConverter.readUnsignedByte(in));
logInternal("Count=" + getNullCount());
}
@Override
public void writeRecordData (OutputStream out) throws IOException
{
DataFormatConverter.writeUnsignedByte(out, getNullCount());
}
@Override
public ObjectNullMultiple256Record clone () throws CloneNotSupportedException
{
return getClass().cast(super.clone());
}
@Override
public int hashCode ()
{
return super.hashCode() + getNullCount();
}
@Override
public boolean equals (Object obj)
{
if (!super.equals(obj))
return false;
if (this == obj)
return true;
ObjectNullMultiple256Record other=(ObjectNullMultiple256Record) obj;
if (getNullCount() == other.getNullCount())
return true;
else
return false;
}
@Override
public String toString ()
{
return super.toString() + ";count=" + getNullCount();
}
}
|
<reponame>joshuarubin/zb<gh_stars>100-1000
package object
import (
"bufio"
"bytes"
"fmt"
"io"
stdioutil "io/ioutil"
"srcd.works/go-git.v4/plumbing"
"srcd.works/go-git.v4/plumbing/storer"
"srcd.works/go-git.v4/utils/ioutil"
)
// Tag represents an annotated tag object. It points to a single git object of
// any type, but tags typically are applied to commit or blob objects. It
// provides a reference that associates the target with a tag name. It also
// contains meta-information about the tag, including the tagger, tag date and
// message.
//
// https://git-scm.com/book/en/v2/Git-Internals-Git-References#Tags
type Tag struct {
Hash plumbing.Hash
Name string
Tagger Signature
Message string
TargetType plumbing.ObjectType
Target plumbing.Hash
s storer.EncodedObjectStorer
}
// GetTag gets a tag from an object storer and decodes it.
func GetTag(s storer.EncodedObjectStorer, h plumbing.Hash) (*Tag, error) {
o, err := s.EncodedObject(plumbing.TagObject, h)
if err != nil {
return nil, err
}
return DecodeTag(s, o)
}
// DecodeTag decodes an encoded object into a *Commit and associates it to the
// given object storer.
func DecodeTag(s storer.EncodedObjectStorer, o plumbing.EncodedObject) (*Tag, error) {
t := &Tag{s: s}
if err := t.Decode(o); err != nil {
return nil, err
}
return t, nil
}
// ID returns the object ID of the tag, not the object that the tag references.
// The returned value will always match the current value of Tag.Hash.
//
// ID is present to fulfill the Object interface.
func (t *Tag) ID() plumbing.Hash {
return t.Hash
}
// Type returns the type of object. It always returns plumbing.TagObject.
//
// Type is present to fulfill the Object interface.
func (t *Tag) Type() plumbing.ObjectType {
return plumbing.TagObject
}
// Decode transforms a plumbing.EncodedObject into a Tag struct.
func (t *Tag) Decode(o plumbing.EncodedObject) (err error) {
if o.Type() != plumbing.TagObject {
return ErrUnsupportedObject
}
t.Hash = o.Hash()
reader, err := o.Reader()
if err != nil {
return err
}
defer ioutil.CheckClose(reader, &err)
r := bufio.NewReader(reader)
for {
line, err := r.ReadSlice('\n')
if err != nil && err != io.EOF {
return err
}
line = bytes.TrimSpace(line)
if len(line) == 0 {
break // Start of message
}
split := bytes.SplitN(line, []byte{' '}, 2)
switch string(split[0]) {
case "object":
t.Target = plumbing.NewHash(string(split[1]))
case "type":
t.TargetType, err = plumbing.ParseObjectType(string(split[1]))
if err != nil {
return err
}
case "tag":
t.Name = string(split[1])
case "tagger":
t.Tagger.Decode(split[1])
}
if err == io.EOF {
return nil
}
}
data, err := stdioutil.ReadAll(r)
if err != nil {
return err
}
t.Message = string(data)
return nil
}
// Encode transforms a Tag into a plumbing.EncodedObject.
func (t *Tag) Encode(o plumbing.EncodedObject) error {
o.SetType(plumbing.TagObject)
w, err := o.Writer()
if err != nil {
return err
}
defer ioutil.CheckClose(w, &err)
if _, err = fmt.Fprintf(w,
"object %s\ntype %s\ntag %s\ntagger ",
t.Target.String(), t.TargetType.Bytes(), t.Name); err != nil {
return err
}
if err = t.Tagger.Encode(w); err != nil {
return err
}
if _, err = fmt.Fprint(w, "\n\n"); err != nil {
return err
}
if _, err = fmt.Fprint(w, t.Message); err != nil {
return err
}
return err
}
// Commit returns the commit pointed to by the tag. If the tag points to a
// different type of object ErrUnsupportedObject will be returned.
func (t *Tag) Commit() (*Commit, error) {
if t.TargetType != plumbing.CommitObject {
return nil, ErrUnsupportedObject
}
o, err := t.s.EncodedObject(plumbing.CommitObject, t.Target)
if err != nil {
return nil, err
}
return DecodeCommit(t.s, o)
}
// Tree returns the tree pointed to by the tag. If the tag points to a commit
// object the tree of that commit will be returned. If the tag does not point
// to a commit or tree object ErrUnsupportedObject will be returned.
func (t *Tag) Tree() (*Tree, error) {
switch t.TargetType {
case plumbing.CommitObject:
c, err := t.Commit()
if err != nil {
return nil, err
}
return c.Tree()
case plumbing.TreeObject:
return GetTree(t.s, t.Target)
default:
return nil, ErrUnsupportedObject
}
}
// Blob returns the blob pointed to by the tag. If the tag points to a
// different type of object ErrUnsupportedObject will be returned.
func (t *Tag) Blob() (*Blob, error) {
if t.TargetType != plumbing.BlobObject {
return nil, ErrUnsupportedObject
}
return GetBlob(t.s, t.Target)
}
// Object returns the object pointed to by the tag.
func (t *Tag) Object() (Object, error) {
o, err := t.s.EncodedObject(t.TargetType, t.Target)
if err != nil {
return nil, err
}
return DecodeObject(t.s, o)
}
// String returns the meta information contained in the tag as a formatted
// string.
func (t *Tag) String() string {
obj, _ := t.Object()
return fmt.Sprintf(
"%s %s\nTagger: %s\nDate: %s\n\n%s\n%s",
plumbing.TagObject, t.Name, t.Tagger.String(), t.Tagger.When.Format(DateFormat),
t.Message, objectAsString(obj),
)
}
// TagIter provides an iterator for a set of tags.
type TagIter struct {
storer.EncodedObjectIter
s storer.EncodedObjectStorer
}
// NewTagIter returns a TagIter for the given object storer and underlying
// object iterator.
//
// The returned TagIter will automatically skip over non-tag objects.
func NewTagIter(s storer.EncodedObjectStorer, iter storer.EncodedObjectIter) *TagIter {
return &TagIter{iter, s}
}
// Next moves the iterator to the next tag and returns a pointer to it. If it
// has reached the end of the set it will return io.EOF.
func (iter *TagIter) Next() (*Tag, error) {
obj, err := iter.EncodedObjectIter.Next()
if err != nil {
return nil, err
}
return DecodeTag(iter.s, obj)
}
// ForEach call the cb function for each tag contained on this iter until
// an error happends or the end of the iter is reached. If ErrStop is sent
// the iteration is stop but no error is returned. The iterator is closed.
func (iter *TagIter) ForEach(cb func(*Tag) error) error {
return iter.EncodedObjectIter.ForEach(func(obj plumbing.EncodedObject) error {
t, err := DecodeTag(iter.s, obj)
if err != nil {
return err
}
return cb(t)
})
}
func objectAsString(obj Object) string {
switch o := obj.(type) {
case *Commit:
return o.String()
default:
return ""
}
}
|
import React from 'react';
import ReactDOM from 'react-dom';
import { Table } from 'react-bootstrap';
const users = [
{
name: 'John Doe',
job: 'Web Developer',
age: 31,
city: 'Boston',
},
{
name: 'Jane Smith',
job: 'Data Scientist',
age: 27,
city: 'New York',
},
{
name: 'Dave Williams',
job: 'Engineer',
age: 38,
city: 'Los Angeles',
},
];
const UsersTable = () => {
return (
<Table>
<thead>
<tr>
<th>Name</th>
<th>Job</th>
<th>Age</th>
<th>City</th>
</tr>
</thead>
<tbody>
{users.map(user => (
<tr key={user.name}>
<td>{user.name}</td>
<td>{user.job}</td>
<td>{user.age}</td>
<td>{user.city}</td>
</tr>
))}
</tbody>
</Table>
)
};
ReactDOM.render(<UsersTable />, document.getElementById('root')); |
<reponame>tdrv90/freeCodeCamp
/*
https://www.freecodecamp.org/learn/javascript-algorithms-and-data-structures/intermediate-algorithm-scripting/sum-all-numbers-in-a-range
We'll pass you an array of two numbers. Return the sum of those two numbers
plus the sum of all the numbers between them.
The lowest number will not always come first.
For example, sumAll([4,1]) should return 10 because sum of all the numbers
between 1 and 4 (both inclusive) is 10.
(1) sumAll([1, 4]) should return a number.
(2) sumAll([1, 4]) should return 10.
(3) sumAll([4, 1]) should return 10.
(4) sumAll([5, 10]) should return 45.
(5) sumAll([10, 5]) should return 45.
*/
function sumAll(arr) {
let min = +arr[0];
let max = +arr[1];
let sum = 0;
if (arr[0] > arr[1]) {
min = +arr[1];
max = +arr[0];
}
for (let i = min; i <= max; i++) {
sum += i;
}
return sum;
}
console.log(sumAll([1, 4])); // 10
console.log(sumAll([10, 5])); // 45
|
export class Address {
public_place: string;
house_number: string;
complement: string;
neighborhood: string;
cep: string;
city: string;
state: string;
}
export class Course {
id: number;
name: string;
dateRegister: Date;
workload: string;
}
export class Student {
id: number;
name: string;
cpf: string;
email: string;
phone: string;
course = new Course();
address = new Address();
}
|
<filename>public/javascripts/controllers/RecordCtrl.js
onceUpon.controller('RecordCtrl', function RecordCtrl($scope, SentencesFactory,
SocketFactory, PlaybackFactory, $http, $timeout, Modernizr) {
// only SocketFactory needs to be exposed to controller scope for template
$scope.SocketFactory = SocketFactory;
// Recorder, context, and recognition objects must be scoped to the whole controller
$scope.rec;
$scope.context;
$scope.mediaStreamSource;
$scope.recognition;
// Keep track of current sentence values and state
$scope.interim;
$scope.final = null;
$scope.recognizing = false;
$scope.timeRemaining = null;
$scope.getButtonClass = function() {
if (SocketFactory.userPosition === 0 && !$scope.recognizing) {
return "ready";
} else if (SocketFactory.userPosition === 0 && $scope.recognizing) {
return "recording";
} else if (SocketFactory.userPosition !== 0) {
return "waiting";
}
}
$scope.start = function() {
PlaybackFactory.stopAll(); // stop all playback when recording starts
$scope.startTimer();
$scope.rec.record();
$scope.recognition.start();
}
$scope.save = function() {
$scope.rec.stop();
$scope.recognition.stop();
// Factory will do the actual work of saving the recording
// We pass it the recorder object to do so
SentencesFactory.saveSentence($scope.rec, $scope.text);
}
// If after 15s of recording we haven't gotten anything, turn it off
$scope.startTimer = function() {
$timeout(function() {
if (!$scope.final) {
$scope.recognizing = false;
$scope.rec.stop();
$scope.recognition.stop();
SocketFactory.abortRecording();
$scope.interim = null;
$scope.final = null;
}
}, 5000);
}
// Initialize Speech Recognition object and handlers
var initRecognize = function() {
if (!('webkitSpeechRecognition' in window)) {
console.log("Your browser does not support speech recognition. Please use the latest version of Google Chrome.");
} else {
$scope.recognition = new webkitSpeechRecognition();
// We want to see the interim results
$scope.recognition.interimResults = true;
// Don't continue speech recognition if user pauses
// Because as it is now, they have one opportunity to record
$scope.recognition.continuous = false;
// Using American English for now
$scope.recognition.lang = 'en-US';
// Do these things when speech recognition is enabled
$scope.recognition.onstart = function() {
$scope.recognizing = true;
SocketFactory.beginRecording();
// Every custom event handler needs to apply its scope
$scope.$apply();
};
// Do these things when the user has finished talking
$scope.recognition.onresult = function (event) {
// Get index of this sentence relative to all the sentence events
// recorded while the user has been on this page
var sentenceIndex = event.resultIndex;
// Get sentence from transcript of the most current interim results
var sentence = event.results[sentenceIndex][0].transcript;
// Display interim results
if (!event.results[sentenceIndex].isFinal) {
$scope.interim = sentence;
SocketFactory.updateText(sentence);
$scope.$apply();
} else {
$scope.final = sentence;
// Set the text to this sentence transcription
// and save all to the db
$scope.text = sentence;
$scope.save();
// Send a socket message to the server to tell everyone that this
// user has finished recording
SocketFactory.endRecording();
// We've got a final result, clear the interim results.
$scope.interim = null;
$scope.final = null;
// Every custom handler needs to apply its scope
$scope.$apply();
}
};
$scope.recognition.onerror = function(event) {
console.log("speech recognition error:" + event.error);
if (event.error === "not-allowed") {
console.log("Speech recognition not allowed");
} else {
console.log("Other speech recognition error");
}
// Every custom event handler needs to apply its scope
$scope.$apply();
};
$scope.recognition.onend = function() {
$scope.recognizing = false;
// Here's a hack to re-start recognition after the preset time limit
// Disabled for now because we are using a short input window
// recognition.start();
// Every custom event handler needs to apply its scope
$scope.$apply();
};
}
}
// getUserMedia success and error callbacks
var gumSuccess = function (stream) {
// Support various implementations of AudioContext
$scope.context = new (window.AudioContext || window.webkitAudioContext)();
$scope.mediaStreamSource = $scope.context.createMediaStreamSource(stream);
$scope.rec = new Recorder($scope.mediaStreamSource, {numChannels:1});
}
var gumError = function (err) {
console.log('The following getUserMedia error occured: ' + err);
}
// Initialize microphone when this partial is loaded
angular.element(document).ready(function() {
// only load microphone if we have both gUM and speech recognition
if (Modernizr.getusermedia && Modernizr.speechrecognition) {
// Support multiple browser implementations of getUserMedia
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia ||
navigator.mediaDevices.getUserMedia);
navigator.getUserMedia(
{audio:true, video: false},
gumSuccess, gumError
);
// Initialize the speech recognition object,
// but don't start recognition yet
// We start that along with recording when user presses button
initRecognize();
}
});
});
|
<reponame>Banuba/beauty-android-java<filename>app/src/main/assets/bnb-resources/effects/Makeup/modules/hair/avg-color/accumulate/copy.frag.js<gh_stars>0
'use strict';
const fragmentShader = "modules/hair/avg-color/accumulate/copy.frag";
exports.default = fragmentShader;
|
#
# Copyright (c) 2000, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# @test
# @bug 4348213
# @summary Verify that deserialization allows an incoming class descriptor
# representing a class in the unnamed package to be resolved to a
# local class with the same name in a named package, and vice-versa.
if [ "${TESTJAVA}" = "" ]
then
echo "TESTJAVA not set. Test cannot execute. Failed."
exit 1
fi
if [ "${TESTSRC}" = "" ]
then
TESTSRC="."
fi
set -ex
${TESTJAVA}/bin/javac -d . ${TESTSRC}/A.java ${TESTSRC}/Test.java
${TESTJAVA}/bin/java Test
|
import React, { Fragment } from 'react';
import Typography from '@material-ui/core/Typography';
import {
List, ListItem, ListItemText, Divider
} from '@material-ui/core';
const CommentList = (props) => {
const {
comments
} = props;
return (
<List >
{comments.map((data, i) => (
<Fragment key={i}>
<ListItem>
<ListItemText primary={data.comment} secondary={
<Fragment>
<Typography component="span" variant="body2" color="textPrimary">
{data.user.username}
</Typography>
{' '}{data.createdAt}
</Fragment>
}/>
</ListItem>
<Divider variant="fullWidth" component="li" />
</Fragment>
))}
</List>
);
};
export default CommentList;
|
#! /usr/bin/env bash
bug=1936707
if [[ $1 == "--hwut-info" ]]; then
echo "sphericalcow: $bug 0.24.10 --plot flag doesnt seem to work"
exit
fi
tmp=`pwd`
# (*) Create a screwed-up version of 'dot' the graphviz package.
echo "#! /usr/bin/env bash" > $bug/dot
# NOTE: The 'VIZ' is written weirdly so that quex doesn't find the keyword 'Graphviz'
# or similar variations.
echo "echo \"I am a screwed-up version of 'dot' (GraphVIZ)\"" >> $bug/dot
chmod u+x $bug/dot
cd $bug/
# (1) First, let's try with the 'good' dot program
echo "(1) Run with 'dot' available"
quex --cbm -i error.qx -o Simple --plot svg
echo
# (2) Second, Let's screw the PATH variable so that quex finds the screwed 'dot'
# application in the bug's directory:
echo "(2) Run without 'dot' available"
export PATH=`pwd`:$PATH
quex --cbm -i error.qx -o Simple --plot svg
echo
# (3) First, let's try with the 'good' dot program
echo "(3) Run with missing command line argument"
quex --cbm -i error.qx -o Simple --plot
echo
# cleansening
rm -rf Simple Simple.cpp Simple-token_ids Simplism dot X.svg
cd $tmp
|
#include <iostream>
#include <stdlib.h>
using namespace std;
void your_code() {
// Make your boolean statement!
// Make below three statement to be true
bool t1 = true;
bool t2 = false;
bool t3 = true;
// Make below three statement to be false
bool f1 = false;
bool f2 = false;
bool f3 = true;
// DO NOT AMEND THIS CLAUSE
test_pass_fail(t1, t2, t3, f1, f2, f3);
}
/* DO NOT AMEND THE CODE BELOW */
void if_fail() {
cout << "FAIL" << endl;
exit(EXIT_FAILURE);
}
void if_pass() {
cout << "pass" << endl;
}
void test_pass_fail(int b1, int b2, int b3, int b4, int b5, int b6) {
cout << "Boolean 1: ";
if (!b1) {
if_fail();
}
if_pass();
cout << "Boolean 2: ";
if (!b2) {
if_fail();
}
if_pass();
cout << "Boolean 3: ";
if (!b3) {
if_fail();
}
if_pass();
cout << "Boolean 4: ";
if (!b4) {
if_fail();
}
if_pass();
cout << "Boolean 5: ";
if (!b5) {
if_fail();
}
if_pass();
cout << "Boolean 6: ";
if (!b6) {
if_fail();
}
if_pass();
}
int main() {
your_code();
return 0;
}
|
package transport // package github.com/justanotherorganization/justanotherbotkit/transport
import "github.com/justanotherorganization/justanotherbotkit/transport/internal/proto"
type (
// Event wraps a pb.BaseEvent up with it's accompanied transport.
Event struct {
*pb.BaseEvent
Transport
}
)
|
alter table comment add content varchar(1024) null; |
socket.on('rooms', function(msg) {
console.log(msg);
});
socket.on('player-disconnected', function(msg) {
ships[msg].disconnected = true;
console.log("player " + msg + " disconnected");
});
socket.on('binary-data', function(msg) {
decodeBinary(msg);
});
socket.on('init-data', function(msg) {
data = msg;
// start game
animFrame();
});
socket.on('new-ship-in-room', function(msg) {
ships = msg;
});
|
# _*_ coding: utf-8 _*_
"""
Created by lr on 2019/08/30.
"""
from functools import wraps
from flask import request
from werkzeug.contrib.cache import SimpleCache
__author__ = 'lr'
'''
class Limiter(object):
cache = SimpleCache()
def limited(self, callback):
self.limited_callback = callback
return callback
def limit(self, key='', key_func=None, time_delta=60):
def decorator(f):
key_prefix = "limiter/"
@wraps(f)
def wrapper(*args, **kwargs):
# global cache
full_key = key_prefix + key_func() if key_func else key
value = Limiter.cache.get(full_key)
if not value:
Limiter.cache.set(full_key, time_delta, timeout=time_delta)
return f(*args, **kwargs)
else:
return self.limited_callback()
return wrapper
return decorator
'''
cache = SimpleCache()
def cached(timeout=5 * 60, key='cached_{}_{}'):
'''
:param timeout: ็ผๅญ็็งๆฐ
:param key: ็ผๅญ็key็ๆ ผๅผ
:return:
'''
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
# ไปฅ { key:value } ็ๅฝขๅผๅญๅฐๅ
ๅญ
query_args = dict(request.args.to_dict())
body_args = request.get_json(silent=True) or {}
req_args = {**query_args, **body_args}
suffix = ''
for (k, v) in req_args.items():
suffix = suffix + '&{}={}'.format(k, v)
cache_key = key.format(request.path, suffix)
value = cache.get(cache_key) # ่ทๅ
if value is None:
value = f(*args, **kwargs)
cache.set(cache_key, value, timeout=timeout) # ่ฎพ็ฝฎ
return value
return decorated_function
return decorator |
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
#
# Uncomment a feed source
sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
# echo 'src-git helloworld https://github.com/fw876/helloworld' >>feeds.conf.default
# echo 'src-git passwall https://github.com/xiaorouji/openwrt-passwall' >>feeds.conf.default
echo 'src-git small8 https://github.com/kenzok8/small-package' >>feeds.conf.default
|
package com.went.core.erabatis.component.condition;
import com.went.core.erabatis.phantom.ChainCondition;
import com.went.core.erabatis.phantom.Condition;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
/**
* <p>Title: </p>
* <p>Description: </p>
* <p>Copyright: Shanghai Batchsight GMP Information of management platform, Inc. Copyright(c) 2017</p>
*
* @author <NAME>
* @version 1.0
* <pre>History: 2018/2/8 <NAME> Create </pre>
*/
public class And implements ChainCondition<And> {
private List<Condition> conditions;
private boolean not = false;
@Override
public List<Condition> getConditions() {
return conditions;
}
@Override
public void setConditions(List<Condition> conditions) {
this.conditions = conditions;
}
@Override
public boolean isNot() {
return not;
}
@Override
public void setNot(boolean not) {
this.not = not;
}
public And(Condition... conditions) {
LinkedList<Condition> linkedList = new LinkedList<>();
linkedList.addAll(Arrays.asList(conditions));
setConditions(linkedList);
}
public And(List<Condition> conditions) {
setConditions(conditions);
}
}
|
#!/bin/bash
# Joshua Meyer (2017)
# USAGE:
#
# ./run.sh <corpus_name>
#
# INPUT:
#
# input_dir/
# lexicon.txt
# lexicon_nosil.txt
# phones.txt
# task.arpabo
# transcripts
#
# audio_dir/
# utterance1.wav
# utterance2.wav
# utterance3.wav
# .
# .
# utteranceN.wav
#
# config_dir/
# mfcc.conf
# topo_orig.proto
#
#
# OUTPUT:
#
# exp_dir
# feat_dir
# data_dir
#
cmd=utils/run.pl
train_monophones=1
train_triphones=1
adapt_models=0
save_model=0
if [ "$#" -ne 8 ]; then
echo "ERROR: $0"
echo "missing args"
exit 1
fi
data_dir=$1
num_iters_mono=$2
tot_gauss_mono=$3
num_iters_tri=$4
tot_gauss_tri=$5
num_leaves_tri=$6
exp_dir=$7
num_processors=$8
if [ "$train_monophones" -eq "1" ]; then
printf "\n####===========================####\n";
printf "#### BEGIN TRAINING MONOPHONES ####\n";
printf "####===========================####\n\n";
printf "#### Train Monophones ####\n";
steps/train_mono.sh \
--cmd "$cmd" \
--nj $num_processors \
--num-iters $num_iters_mono \
--totgauss $tot_gauss_mono \
--beam 6 \
${data_dir}/train \
${data_dir}/lang \
${exp_dir}/monophones \
|| printf "\n####\n#### ERROR: train_mono.sh \n####\n\n" \
|| exit 1;
../../../src/gmmbin/gmm-info ${exp_dir}/monophones/final.mdl
printf "#### Align Monophones ####\n";
steps/align_si.sh \
--cmd "$cmd" \
--nj $num_processors \
--boost-silence 1.25 \
--beam 10 \
--retry-beam 40 \
${data_dir}/train \
${data_dir}/lang \
${exp_dir}/monophones \
${exp_dir}/monophones_aligned \
|| printf "\n####\n#### ERROR: align_si.sh \n####\n\n" \
|| exit 1;
printf "\n####===========================####\n";
printf "#### END TRAINING MONOPHONES ####\n";
printf "####===========================####\n\n";
fi
if [ "$train_triphones" -eq "1" ]; then
printf "\n####==========================####\n";
printf "#### BEGIN TRAINING TRIPHONES ####\n";
printf "####==========================####\n\n";
printf "### Train Triphones ###\n"
steps/train_deltas.sh \
--cmd "$cmd" \
--num-iters $num_iters_tri \
--beam 10 \
$num_leaves_tri \
$tot_gauss_tri \
${data_dir}/train \
${data_dir}/lang \
${exp_dir}/monophones_aligned \
${exp_dir}/triphones \
|| printf "\n####\n#### ERROR: train_deltas.sh \n####\n\n" \
|| exit 1;
../../../src/gmmbin/gmm-info ${exp_dir}/triphones/final.mdl
printf "### Align Triphones ###\n"
steps/align_si.sh \
--cmd "$cmd" \
--nj $num_processors \
--boost-silence 1.25 \
--beam 10 \
--retry-beam 40 \
${data_dir}/train \
${data_dir}/lang \
${exp_dir}/triphones \
${exp_dir}/triphones_aligned \
|| printf "\n####\n#### ERROR: align_si.sh \n####\n\n" \
|| exit 1;
printf "\n####========================####\n";
printf "#### END TRAINING TRIPHONES ####\n";
printf "####========================####\n\n";
fi
if [ "$adapt_models" -eq "1" ]; then
printf "\n####==========================####\n";
printf "#### BEGIN SPEAKER ADAPTATION ####\n";
printf "####==========================####\n\n";
printf "### Begin LDA + MLLT Triphones ###\n"
steps/train_lda_mllt.sh \
--cmd "$cmd" \
--splice-opts "--left-context=3 --right-context=3" \
$num_leaves_tri \
$tot_gauss_tri \
${data_dir}/train \
${data_dir}/lang \
${exp_dir}/triphones_aligned \
${exp_dir}/triphones_lda_mllt \
|| printf "\n####\n#### ERROR: train_lda_mllt.sh \n####\n\n" \
|| exit 1;
../../../src/gmmbin/gmm-info ${exp_dir}/triphones_lda_mllt/final.mdl
printf "### Align LDA + MLLT Triphones ###\n"
steps/align_si.sh \
--cmd "$cmd" \
--nj $num_processors \
${data_dir}/train \
${data_dir}/lang \
${exp_dir}/triphones_lda_mllt \
${exp_dir}/triphones_lda_mllt_aligned \
|| printf "\n####\n#### ERROR: align_si.sh \n####\n\n" \
|| exit 1;
printf "\n####===========================####\n";
printf "#### BEGIN TRAINING SAT (fMLLR) ####\n";
printf "####============================####\n\n";
printf "### Train LDA + MLLT + SAT Triphones ###\n"
steps/train_sat.sh \
--cmd "$cmd" \
$num_leaves_tri \
$tot_gauss_tri \
${data_dir}/train \
${data_dir}/lang \
${exp_dir}/triphones_lda_mllt_aligned \
${exp_dir}/triphones_lda_mllt_sat \
|| printf "\n####\n#### ERROR: train_sat.sh \n####\n\n" \
|| exit 1;
../../../src/gmmbin/gmm-info ${exp_dir}/triphones_lda_mllt_sat/final.mdl
printf "### Align LDA + MLLT + SAT Triphones ###\n"
steps/align_fmllr.sh \
--cmd "$cmd" \
--nj $num_processors \
${data_dir}/train \
${data_dir}/lang \
${exp_dir}/triphones_lda_mllt_sat \
${exp_dir}/triphones_lda_mllt_sat_aligned \
|| printf "\n####\n#### ERROR: align_si.sh \n####\n\n" \
|| exit 1;
fi
if [ "$save_model" -eq "1" ]; then
# Copy all necessary files to use new LM with this acoustic model
# and only necessary files to save space
cp data_${corpus_name} ${corpus_name}_${run}
# delete unneeded files
rm -rf ${corpus_name}_${run}/train ${corpus_name}_${run}/test ${corpus_name}_${run}/lang_decode
# copy acoustic model and decision tree to new dir
mkdir ${corpus_name}_${run}/model
cp exp_${corpus_name}/triphones/final.mdl ${corpus_name}_${run}/model/final.mdl
cp exp_${corpus_name}/triphones/tree ${corpus_name}_${run}/model/tree
tar -zcvf ${corpus_name}_${run}.tar.gz ${corpus_name}_${run}
# clean up
rm -rf ${corpus_name}_${run}
# move for storage
mkdir compressed_experiments
mv ${corpus_name}_${run}.tar.gz compressed_experiments/${corpus_name}_${run}.tar.gz
fi
exit;
|
<reponame>dailave/oqs
/*
* $Id$
*
* Copyright 2006-2008 <NAME>. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opoo.oqs;
/**
* The project's version.
*
* @author <NAME>(<EMAIL>)
* @version 1.0
*/
public class Oqs {
private static final Package PKG = Oqs.class.getPackage();
private static final String OQS_INFO = new StringBuffer()
.append("***************************************************************")
.append("\n* ").append(Oqs.getImplementationTitle()).append(" - ").append(Oqs.getDescription())
.append("\n* Version : ").append(Oqs.getImplementationVersion())
.append("\n* License : Apache License Version 2.0")
.append("\n* Copyright 2006-2008 <NAME>. All rights reserved.")
.append("\n***************************************************************")
.toString();
public static String getImplementationVendor() {
return (PKG != null ? PKG.getImplementationVendor() : null);
}
public static String getImplementationTitle() {
return (PKG != null ? PKG.getImplementationTitle() : null);
}
public static String getImplementationVersion() {
return (PKG != null ? PKG.getImplementationVersion() : null);
}
public static String getSpecificationVersion() {
return (PKG != null ? PKG.getSpecificationVersion() : null);
}
public static String getSpecificationVendor() {
return (PKG != null ? PKG.getSpecificationVendor() : null);
}
public static String getSpecificationTitle() {
return (PKG != null ? PKG.getSpecificationTitle() : null);
}
public static String getDescription() {
return "Simple O/R Mapping & JDBC Extensions";
}
public static String getOqsInfo() {
return OQS_INFO;
}
}
|
#!/bin/bash
# Number of tests
total=8
###############################################################################################################################
clear
f_banner
echo -e "${BLUE}Uses recon-ng, Traceroute, wafw00f and Whatweb.${NC}"
echo
echo -e "${BLUE}[*] Acquire API keys for maximum results with recon-ng.${NC}"
echo
echo $medium
echo
echo "Usage"
echo
echo "Domain: target.com"
echo
echo $medium
echo
echo -n "Domain: "
read domain
# Check for no answer
if [[ -z $domain ]]; then
f_error
fi
if [ ! -d $home/data/$domain ]; then
cp -R $discover/report/ $home/data/$domain
sed -i "s/#COMPANY#/$company/" $home/data/$domain/index.htm
sed -i "s/#DOMAIN#/$domain/" $home/data/$domain/index.htm
sed -i "s/#DATE#/$rundate/" $home/data/$domain/index.htm
fi
echo
echo $medium
echo
###############################################################################################################################
echo " Sub-domains (1/$total)"
if [ -f /usr/share/dnsrecon/namelist.txt ]; then
dnsrecon -d $domain -D /usr/share/dnsrecon/namelist.txt -f -t brt > tmp
fi
# PTF
if [ -f /pentest/intelligence-gathering/dnsrecon/namelist.txt ]; then
dnsrecon -d $domain -D /pentest/intelligence-gathering/dnsrecon/namelist.txt -f -t brt > tmp
fi
grep $domain tmp | grep -v "$domain\." | egrep -v '(Performing|Records Found|xxx)' | sed 's/\[\*\] //g; s/^[ \t]*//' | awk '{print $2,$3}' | column -t | sort -u > sub-dnsrecon
egrep -v '(\[|.nat.|1.1.1.1|6.9.6.9|127.0.0.1)' sub-dnsrecon | tr '[A-Z]' '[a-z]' | column -t | sort -u | awk '$2 !~ /[a-z]/' > subdomains
if [ -e $home/data/$domain/data/subdomains.htm ]; then
cat $home/data/$domain/data/subdomains.htm subdomains | grep -v "<" | grep -v "$domain\." | column -t | sort -u > subdomains-combined
cp $discover/report/data/subdomains.htm $home/data/$domain/data/subdomains.htm
cat subdomains-combined >> $home/data/$domain/data/subdomains.htm
echo "</pre>" >> $home/data/$domain/data/subdomains.htm
fi
awk '{print $3}' records > tmp
awk '{print $2}' sub-dnsrecon >> tmp
grep -E '[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}' tmp | egrep -v '(-|=|:|1.1.1.1|6.9.6.9|127.0.0.1)' | grep -v [a-z] | $sip > hosts
###############################################################################################################################
echo " Zone Transfer (2/$total)"
dnsrecon -d $domain -t axfr > tmp
egrep -v '(Checking for|Failed|filtered|No answer|NS Servers|Removing|reset|TCP Open|Testing NS)' tmp | sed 's/^....//g; /^$/d' > zonetransfer
echo
###############################################################################################################################
echo "Web Application Firewall (3/$total)"
wafw00f -a http://www.$domain > tmp 2>/dev/null
sed '1,16d' tmp > waf
echo
###############################################################################################################################
echo "Traceroute"
echo " UDP (4/$total)"
echo "UDP" > tmp
traceroute $domain | awk -F" " '{print $1,$2,$3}' >> tmp
echo >> tmp
echo "ICMP ECHO" >> tmp
echo " ICMP ECHO (5/$total)"
traceroute -I $domain | awk -F" " '{print $1,$2,$3}' >> tmp
echo >> tmp
echo "TCP SYN" >> tmp
echo " TCP SYN (6/$total)"
traceroute -T $domain | awk -F" " '{print $1,$2,$3}' >> tmp
grep -v 'traceroute' tmp > tmp2
# Remove blank lines from end of file
awk '/^[[:space:]]*$/{p++;next} {for(i=0;i<p;i++){printf "\n"}; p=0; print}' tmp2 > ztraceroute
echo
###############################################################################################################################
echo "Whatweb (~5 min) (7/$total)"
grep -v '<' $home/data/$domain/data/subdomains.htm | awk '{print $1}' > tmp
whatweb -i tmp --color=never --no-errors > tmp2 2>/dev/null
# Find lines that start with http, and insert a line after
sort tmp2 | sed '/^http/a\ ' > tmp3
# Cleanup
cat tmp3 | sed 's/,/\n/g; s/\[200 OK\]/\n\[200 OK\]\n/g; s/\[301 Moved Permanently\]/\n\[301 Moved Permanently\]\n/g; s/\[302 Found\]/\n\[302 Found\]\n/g; s/\[404 Not Found\]/\n\[404 Not Found\]\n/g' | egrep -v '(Unassigned|UNITED STATES)' | sed 's/^[ \t]*//' | cat -s | more > whatweb
grep '@' whatweb | sed 's/Email//g; s/\[//g; s/\]//g' | tr '[A-Z]' '[a-z]' | grep "@$domain" | grep -v 'hosting' | cut -d ' ' -f2 | sort -u > emails
rm tmp*
# Remove all empty files
find . -type f -empty -exec rm "{}" \;
echo
###############################################################################################################################
echo "recon-ng (8/$total)"
echo "marketplace install all" > active.rc
echo "workspaces load $domain" >> active.rc
cat $discover/resource/recon-ng-active.rc >> active.rc
sed -i "s/yyy/$domain/g" active.rc
recon-ng -r $CWD/active.rc
###############################################################################################################################
echo "Summary" > zreport
echo $short >> zreport
echo > tmp
if [ -e emails ]; then
emailcount=$(wc -l emails | cut -d ' ' -f1)
echo "Emails $emailcount" >> zreport
echo "Emails ($emailcount)" >> tmp
echo $short >> tmp
cat emails >> tmp
echo >> tmp
fi
if [ -e hosts ]; then
hostcount=$(wc -l hosts | cut -d ' ' -f1)
echo "Hosts $hostcount" >> zreport
echo "Hosts ($hostcount)" >> tmp
echo $short >> tmp
cat hosts >> tmp
echo >> tmp
fi
if [ -e subdomains ]; then
subdomaincount=$(wc -l subdomains | cut -d ' ' -f1)
echo "Subdomains $subdomaincount" >> zreport
echo "Subdomains ($subdomaincount)" >> tmp
echo $long >> tmp
cat subdomains >> tmp
echo >> tmp
fi
cat tmp >> zreport
echo "Web Application Firewall" >> zreport
echo $long >> zreport
cat waf >> zreport
echo >> zreport
echo "Traceroute" >> zreport
echo $long >> zreport
cat ztraceroute >> zreport
echo >> zreport
echo "Zone Transfer" >> zreport
echo $long >> zreport
cat zonetransfer >> zreport
echo >> zreport
echo "Whatweb" >> zreport
echo $long >> zreport
cat whatweb >> zreport
cat zreport >> $home/data/$domain/data/active-recon.htm
echo "</pre>" >> $home/data/$domain/data/active-recon.htm
cat ztraceroute >> $home/data/$domain/data/traceroute.htm
echo "</pre>" >> $home/data/$domain/data/traceroute.htm
cat waf >> $home/data/$domain/data/waf.htm
echo "</pre>" >> $home/data/$domain/data/waf.htm
cat whatweb >> $home/data/$domain/data/whatweb.htm
echo "</pre>" >> $home/data/$domain/data/whatweb.htm
cat zonetransfer >> $home/data/$domain/data/zonetransfer.htm
echo "</pre>" >> $home/data/$domain/data/zonetransfer.htm
if [[ -e $home/data/$domain/data/emails.htm && -e emails ]]; then
cat $home/data/$domain/data/emails.htm emails | grep -v '<' | sort -u > tmp-new-emails
cat $home/data/$domain/data/emails.htm | grep '<' > tmp-new-page
mv tmp-new-page $home/data/$domain/data/emails.htm
cat tmp-new-emails >> $home/data/$domain/data/emails.htm
echo "</pre>" >> $home/data/$domain/data/emails.htm
fi
if [[ -e $home/data/$domain/data/hosts.htm && -e hosts ]]; then
cat $home/data/$domain/data/hosts.htm hosts | grep -v '<' | $sip > tmp-new-hosts
cat $home/data/$domain/data/hosts.htm | grep '<' > tmp-new-page
mv tmp-new-page $home/data/$domain/data/hosts.htm
cat tmp-new-hosts >> $home/data/$domain/data/hosts.htm
echo "</pre>" >> $home/data/$domain/data/hosts.htm
fi
mv active.rc emails hosts sub* waf whatweb z* /tmp/subdomains-active $home/data/$domain/tools/active/ 2>/dev/null
rm tmp*
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The supporting data folder is located at ${YELLOW}$home/data/$domain/${NC}\n"
$web $home/data/$domain/index.htm &
echo
echo
|
#!/bin/bash
benchmark=$1
preprocess=$2
split=$3
if [ -z "${benchmark}" ]; then
echo "benchmark is unset or set to the empty string"
exit 1;
fi
if [ -z "${preprocess}" ]; then
echo "No preprocessing"
preprocess="np"
fi
if [ "${preprocess}" = "p" ]; then
datasets=$(cat ./settings.py | grep "DATASETS_DIR" | cut -d '=' -f 2 | cut -d '#' -f 1 | tr -d \'\" | tr -d '[:space:]')
bcs=$(cat ./settings.py | grep "BCS_DIR" | cut -d '=' -f 2 | cut -d '#' -f 1 | tr -d \'\" | tr -d '[:space:]')
data=$(cat ./settings.py | grep "DATA_DIR" | cut -d '=' -f 2 | cut -d '#' -f 1 | tr -d \'\" | tr -d '[:space:]')
echo "Removing dataset folder of $benchmark"
rm -rf "$data/$datasets/$benchmark"
echo "Removing IR folder of $benchmark"
rm -rf "$data/$bcs/$benchmark"
python __init__.py -p=$benchmark -a=BC
python __init__.py -p=$benchmark -a=PDG
python __init__.py -p=$benchmark -a=AS
if [ "${split}" = "ns" ]; then
python __init__.py -p=$benchmark -a=FE -ft=afs_NN
python __init__.py -p=$benchmark -a=FE -ft=afs.bb1_NN
else
python __init__.py -p=$benchmark -a=FE -ft=afs_NN -s=True
python __init__.py -p=$benchmark -a=FE -ft=afs.bb1_NN -s=True
fi
fi
python __init__.py -p=$benchmark -a=MC -cf=afs_NN,afs_G2v -ca=cc_0.95,cc_0.98 -sc=online
python __init__.py -p=$benchmark -a=MC -cf=afs.bb1_NN,afs.bb1_G2v -ca=cc_0.95,cc_0.98 -sc=online
|
<gh_stars>1-10
class Logger::SimpleJsonFormatter < Logger::Formatter
Format = "[%s] [%s]: %s\n"
SEVERITY_MAP = {
"DEBUG" => "debug",
"ERROR" => "err",
"WARN" => "warning",
"INFO" => "info",
"FATAL" => "crit"
}
attr_accessor :datetime_format
def initialize
@datetime_format = nil
end
def call(severity, time, progname, msg)
JSON.dump({time: format_datetime(time), level: map_severity(severity), full_message: msg2str(msg)}) + "\n"
end
protected
def map_severity(severity)
SEVERITY_MAP.keys.include?(severity) ? SEVERITY_MAP[severity] : "info"
end
def format_datetime(time)
if @datetime_format.nil?
time.strftime("%s")
else
time.strftime(@datetime_format)
end
end
def msg2str(msg)
case msg
when ::String
msg
when ::Exception
("#{ msg.message } (#{ msg.class })\n" <<
(msg.backtrace || []).join("\n"))
else
msg.inspect
end
end
end
class JsonLikeLogger < Logger
def initialize(*args)
super(*args)
self.formatter = Logger::SimpleJsonFormatter.new
end
end
class BufferedJsonLogger < ActiveSupport::BufferedLogger
def open_logfile(log)
JsonLikeLogger.new log
end
end
|
#!/usr/bin/env bash
set -e -u -o pipefail
declare -r SCRIPT_NAME=$(basename "$0")
declare -r SCRIPT_DIR=$(cd $(dirname "$0") && pwd)
log() {
local level=$1; shift
echo -e "$level: $@"
}
err() {
log "ERROR" "$@" >&2
}
info() {
log "INFO" "$@"
}
die() {
local code=$1; shift
local msg="$@"; shift
err $msg
exit $code
}
usage() {
local msg="$1"
cat <<-EOF
Error: $msg
USAGE:
$SCRIPT_NAME CATALOG_VERSION DEST_DIR VERSION
Example:
$SCRIPT_NAME release-v0.7 deploy/resources/v0.7.0 0.7.0
EOF
exit 1
}
#declare -r CATALOG_VERSION="release-v0.7"
declare -r TEKTON_CATALOG="https://raw.githubusercontent.com/openshift/tektoncd-catalog"
declare -r TEKTON_CATALOG_TASKS=(
s2i
openshift-client
buildah
)
declare -r OPENSHIFT_CATALOG="https://raw.githubusercontent.com/openshift/pipelines-catalog"
declare -r OPENSHIFT_CATALOG_TASKS=(
s2i-go
s2i-java-8
s2i-java-11
s2i-python-3
s2i-nodejs
s2i-perl
s2i-php
s2i-ruby
)
download_task() {
local task_path="$1"; shift
local task_url="$1"; shift
info "downloading ... $t from $task_url"
# validate url
curl --output /dev/null --silent --head --fail "$task_url" || return 1
cat <<-EOF > "$task_path"
# auto generated by script/update-tasks.sh
# DO NOT EDIT: use the script instead
# source: $task_url
#
---
$(curl -sLf "$task_url" |
sed -e 's|^kind: Task|kind: ClusterTask|g' \
-e "s|^\(\s\+\)workingdir:\(.*\)|\1workingDir:\2|g" )
EOF
# NOTE: helps when the original and the generated need to compared
# curl -sLf "$task_url" -o "$task_path.orig"
}
get_tasks() {
local dest_dir="$1"; shift
local version="${1//./-}"; shift
local catalog="$1"; shift
local catalog_version="$1"; shift
# NOTE: receives array by its name
local catalog_tasks_ref="$1[@]"; shift
local tasks=("${!catalog_tasks_ref}")
info "Downloading tasks from catalog $catalog to $dest_dir directory"
for t in ${tasks[@]} ; do
# task filenames do not follow a naming convention,
# some are taskname.yaml while others are taskname-task.yaml
# so, try both before failing
local task_url="$catalog/$catalog_version/$t/${t}-task.yaml"
local task_alt_url="$catalog/$catalog_version/$t/${t}.yaml"
mkdir -p "$dest_dir/$t/"
local task_path="$dest_dir/$t/$t-task.yaml"
download_task "$task_path" "$task_url" ||
download_task "$task_path" "$task_alt_url" ||
die 1 "Failed to download $t"
create_version "$task_path" "$t" "$version" ||
die 1 "failed to convert $t to $t-$version"
done
}
create_version() {
local task_path="$1"; shift
local task="$1"; shift
local version="$1"; shift
local task_version_path="$(dirname $task_path)/$task-$version-task.yaml"
sed \
-e "s|^\(\s\+name:\)\s\+\($task\)|\1 \2-$version|g" \
$task_path > "$task_version_path"
}
main() {
local catalog_version=${1:-''}
[[ -z "$catalog_version" ]] && usage "missing catalog_version"
shift
local dest_dir=${1:-''}
[[ -z "$dest_dir" ]] && usage "missing destination directory"
shift
local version=${1:-''}
[[ -z "$version" ]] && usage "missing task_version"
shift
mkdir -p "$dest_dir" || die 1 "failed to create ${dest_dir}"
dest_dir="$dest_dir/addons/clustertasks"
mkdir -p "$dest_dir" || die 1 "failed to create catalog dir ${catalog_dir}"
get_tasks "$dest_dir" "$version" \
"$TEKTON_CATALOG" "$catalog_version" TEKTON_CATALOG_TASKS
get_tasks "$dest_dir" "$version" \
"$OPENSHIFT_CATALOG" "$catalog_version" OPENSHIFT_CATALOG_TASKS
return $?
}
main "$@"
|
<filename>trclib/TrcColor.java
/*
* Copyright (c) 2020 Titan Robotics Club (http://www.titanrobotics.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package TrcCommonLib.trclib;
/**
* This class implements a platform independent color object. The color object describes color in either RGB or HSV
* formats. It also provides methods to convert between the two formats.
*/
public class TrcColor
{
private final double[] rgb;
private final double[] hsv;
/**
* Constructor: Create an instance of the object.
*
* @param red specifies the red value (0-255).
* @param green specifies the green value (0-255).
* @param blue specifies the blue value (0-255).
*/
public TrcColor(int red, int green, int blue)
{
rgb = new double[3];
rgb[0] = TrcUtil.clipRange(red, 0, 255)/255.0;
rgb[1] = TrcUtil.clipRange(green, 0, 255)/255.0;
rgb[2] = TrcUtil.clipRange(blue, 0, 255)/255.0;
hsv = rgbToHsv(rgb);
} //TrcColor
/**
* This method returns the normalized red value.
*
* @return normalized red value (0.0-1.0).
*/
public double getRed()
{
return rgb[0];
} //getRed
/**
* This method returns the normalized green value.
*
* @return normalized green value (0.0-1.0).
*/
public double getGreen()
{
return rgb[1];
} //getGreen
/**
* This method returns the normalized blue value.
*
* @return normalized blue value (0.0-1.0).
*/
public double getBlue()
{
return rgb[2];
} //getBlue
/**
* This method returns the normalized RGB values in an array.
*
* @return normalized RGB values (0.0-1.0).
*/
public double[] getRGB()
{
return rgb.clone();
} //getRGB
/**
* This method returns the hue of the HSV color.
*
* @return HSV hue.
*/
public double getHue()
{
return hsv[0];
} //getHue
/**
* This method returns the saturation of the HSV color.
*
* @return HSV saturation.
*/
public double getSaturation()
{
return hsv[1];
} //getSaturation
/**
* This method returns the value of the HSV color.
*
* @return HSV value.
*/
public double getValue()
{
return hsv[2];
} //getValue
/**
* This method returns the HSV values in an array.
*
* @return HSV values.
*/
public double[] getHSV()
{
return hsv.clone();
} //getHSV
/**
* This method translates the RGB color values into HSV.
*
* @return HSV values of the color object as an array of doubles.
*/
public double[] rgbToHsv(double ... rgb)
{
double minValue = Math.min(Math.min(rgb[0], rgb[1]), rgb[2]);
double maxValue = Math.max(Math.max(rgb[0], rgb[1]), rgb[2]);
double range = maxValue - minValue;
double hue, sat, value;
value = maxValue;
if (range < 0.00001)
{
hue = sat = 0.0;
}
else if (maxValue == 0.0)
{
sat = 0.0;
hue = Double.NaN;
}
else
{
sat = range/maxValue;
if (rgb[0] == maxValue)
{
hue = (rgb[1] - rgb[2])/range;
}
else if (rgb[1] == maxValue)
{
hue = 2.0 + (rgb[2] - rgb[0])/range;
}
else
{
hue = 4.0 + (rgb[0] - rgb[1])/range;
}
hue *= 60.0;
if (hue < 0.0)
{
hue += 360.0;
}
}
return new double[]{hue, sat, value};
} //rgbToHsv
} //class TrcColor
|
#!/bin/bash
#PBS -V
#PBS -N no_w_CNRM_F2_85
#PBS -l nodes=1:ppn=1,walltime=2:00:00
#PBS -l mem=40gb
#PBS -q fast
#PBS -k o
##PBS -j oe
#PBS -e /home/hnoorazar/analog_codes/04_analysis/parallel/quick/error/E_no_w_CNRM_F2_85
#PBS -o /home/hnoorazar/analog_codes/04_analysis/parallel/quick/error/O_no_w_CNRM_F2_85
#PBS -m abe
echo
echo We are in the $PWD directory
echo
cd /home/hnoorazar/analog_codes/04_analysis/parallel/quick
echo
echo We are now in $PWD.
echo
module purge
# Load R
module load udunits/2.2.20
module load libxml2/2.9.4
module load gdal/2.1.2_gcc proj/4.9.2
module load gcc/7.3.0 r/3.5.1/gcc/7.3.0
module load gcc/7.3.0
module load r/3.5.1/gcc/7.3.0
module load r/3.5.1
Rscript --vanilla ./no_no_count_counties_quick.R rcp85 no_precip 1 w_gen3 CNRM-CM5 _2051_2075
echo
echo "----- DONE -----"
echo
exit 0
|
<reponame>AlexChachanashviliOss/Decimated
package io.github.achacha.decimated.timeprovider;
public class TimeProviderSystem implements TimeProvider {
@Override
public long getMillis() {
return System.currentTimeMillis();
}
}
|
TERMUX_PKG_HOMEPAGE=http://kubernetes.io
TERMUX_PKG_DESCRIPTION="Kubernetes.io client binary"
TERMUX_PKG_LICENSE="Apache-2.0"
TERMUX_PKG_MAINTAINER="Leonid Plyushch <leonid.plyushch@gmail.com>"
TERMUX_PKG_VERSION=1.16.2
TERMUX_PKG_REVISION=2
TERMUX_PKG_SRCURL=https://dl.k8s.io/v$TERMUX_PKG_VERSION/kubernetes-src.tar.gz
TERMUX_PKG_SHA256=21d884b67abd1182958313474a40678ba8f3713e6b6f520401e42c02ba6ea302
termux_step_extract_package() {
mkdir -p "$TERMUX_PKG_CACHEDIR"
mkdir -p "$TERMUX_PKG_SRCDIR"
termux_download "$TERMUX_PKG_SRCURL" "$TERMUX_PKG_CACHEDIR"/kubernetes-src.tar.gz \
"$TERMUX_PKG_SHA256"
tar xf "$TERMUX_PKG_CACHEDIR"/kubernetes-src.tar.gz \
-C "$TERMUX_PKG_SRCDIR"
}
termux_step_make() {
termux_setup_golang
# Needed to generate manpages.
#(
# export GOPATH="$TERMUX_PKG_BUILDDIR/host"
# unset GOOS GOARCH CGO_LDFLAGS
# unset CC CXX CFLAGS CXXFLAGS LDFLAGS
# cd "$TERMUX_PKG_SRCDIR"
# ./hack/update-generated-docs.sh
#)
export GOPATH="$TERMUX_PKG_BUILDDIR/target"
#chmod +w "$TERMUX_PKG_SRCDIR"/_output
#rm -rf "$TERMUX_PKG_SRCDIR"/_output
cd "$TERMUX_PKG_SRCDIR"/cmd/kubectl
go build .
}
termux_step_make_install() {
install -Dm700 "$TERMUX_PKG_SRCDIR"/cmd/kubectl/kubectl \
"$TERMUX_PREFIX"/bin/kubectl
#mkdir -p "$TERMUX_PREFIX"/share/man/man1
#cp -f "$TERMUX_PKG_SRCDIR"/docs/man/man1/kubectl-*.1 \
# "$TERMUX_PREFIX"/share/man/man1/
}
|
#!/bin/bash
source /cvmfs/cms.cern.ch/cmsset_default.sh
export SCRAM_ARCH=slc6_amd64_gcc700
export SSL_CERT_DIR=/etc/grid-security/certificates
export X509_USER_PROXY=/home/tuos/x509up_u126986
cd /scratch/tuos/trigger/CMSSW_10_3_0_pre5/src/rerecoMonitor/AODProgress
eval `scramv1 runtime -sh`
dateAndTime=$(date +"%Y%m%d_%H%M%S")
#echo "running getrawfromdas.sh"
#fileName_getrawfromdas="output_getrawfromdas_$dateAndTime.txt"
#./getrawfromdas.sh > "$fileName_getrawfromdas"
echo "running getaodfromdas.sh"
fileName_getaodfromdas="output_getaodfromdas.txt"
./getaodfromdas.sh > "$fileName_getaodfromdas"
echo "running readAODTotal.sh"
fileName_progress="output_rereco_events.txt"
./readAODTotal.sh >> "$fileName_progress"
|
from ..Core.commands import Commands
from ..Core.registers import Registers
from ..Runtime.base import Base
from ..Runtime.atoi import Atoi
from .write import Write
class Read(Base):
is_loaded = False
def __init__(self, compiler):
Base.__init__(self, compiler)
if Read.is_loaded:
return
self.load('read.asm', 'read')
Read.is_loaded = True
Write(compiler)
def call(self):
self.compiler.code.add(Commands.CALL, ['read'])
Atoi(self.compiler)
self.compiler.code.add(Commands.MOV, [Registers.EAX, 62])
self.compiler.code.add(Commands.CALL, ['write'])
self.compiler.code.add(Commands.MOV, [Registers.EAX, 32])
self.compiler.code.add(Commands.CALL, ['write'])
self.compiler.code.add(Commands.CALL, ['atoi'])
self.compiler.code.add(Commands.PUSH, Registers.EAX)
|
import {
BadRequestException,
Body,
Controller,
Get,
Logger,
NotFoundException,
Post,
Redirect,
Render,
Res,
UseFilters,
UseGuards,
UseInterceptors,
} from '@nestjs/common'
import { Response } from 'express'
import { CurrentUser } from '../common/decorators/current-user.decorator'
import { JwtAuthGuard } from './jwt/jwt.guard'
import { UsersService } from './users.service'
import { UserLogInDTO } from './dtos/user-login.dto'
import { UserDTO } from './dtos/user.dto'
// import { UserRegisterDTO } from './dtos/user-register.dto'
import { OnlyAdminInterceptor } from '../common/interceptors/only-admin.interceptor'
import { InjectRepository } from '@nestjs/typeorm'
import { UserEntity } from './users.entity'
import { Repository } from 'typeorm'
import { HttpApiExceptionFilter } from '../common/exceptions/http-api-exception.filter'
@Controller()
export class UsersController {
private readonly logger = new Logger(UsersController.name)
constructor(
private readonly usersService: UsersService,
@InjectRepository(UserEntity)
private readonly usersRepository: Repository<UserEntity>,
) {}
// @Post('users')
// @UseFilters(new HttpApiExceptionFilter())
// async signUp(@Body() body: UserRegisterDTO) {
// return this.usersService.registerUser(body)
// }
@Render('pages/login')
@Get('login')
@UseGuards(JwtAuthGuard)
async getLogIn(
@CurrentUser() currentUser: UserDTO,
@Res({ passthrough: true }) response: Response,
) {
if (currentUser) throw new NotFoundException('๋ก๊ทธ์ธ ์๋ฃ')
return { title: 'amamov | login' }
}
@Post('login')
@Redirect('/')
async logIn(
@Body() body: UserLogInDTO,
@Res({ passthrough: true }) response: Response,
) {
await this.usersService.logIn(body, response)
}
@Get('logout')
@Redirect('/')
async logOut(@Res({ passthrough: true }) response: Response) {
response.clearCookie('jwt')
}
@Render('pages/user-update')
@Get('users/v1/update')
@UseGuards(JwtAuthGuard)
@UseInterceptors(new OnlyAdminInterceptor())
async getUserUpdatePage(@CurrentUser() currentUser: UserDTO) {
return {
title: 'amamov | profile update',
initialValue: currentUser.bio || '',
}
}
@Post('users/v1/update')
@UseGuards(JwtAuthGuard)
@UseInterceptors(new OnlyAdminInterceptor())
@UseFilters(new HttpApiExceptionFilter())
async updateUserUpdatePage(
@CurrentUser() currentUser: UserDTO,
@Body('contents') bio: string,
) {
const user = await this.usersService.findUserById(currentUser.id)
user.bio = bio
try {
await this.usersRepository.save(user)
} catch (error) {
this.logger.error(error)
throw new BadRequestException(error)
}
}
}
|
import test from './SumOfTwoIntegers.js';
function TreeNode(val) {
this.val = val;
this.left = this.right = null;
}
function ListNode(val) {
this.val = val;
this.next = null;
}
function dec2bin(dec) {
return (dec >>> 0).toString(2);
}
console.dir(test(-2,5));
|
'use strict'
angular.module('xentinels').directive('sidenav', ["AuthService", function(AuthService) {
return {
restrict: 'A',
templateUrl: 'app/partials/sidenav.html',
link: function($scope, $element, $attrs) {
$(".button-collapse").sideNav();
}
};
}]);
|
#!/usr/bin/env bash
#this pastes together the input features from the real data
#with the RF class probabilities for both fl & nonfl ("5 class")
#only use the probabilities from here on
#e.g. SRR1163655.sorted.bam.bed.rl.nX3.minX2.mq.rm.sr.snps.ot.gc.umap.ed.td.logsX3.sm.sdX2.lmX4.lsX10
#or features.full as a symlink to the above (input to the prediction)
features_f=$1
p='features.full.just_features'
#paste $features_f ${p}.nonfl.2_class_out ${p}.fl.2_class_out ${p}.nonfl.4_class_out ${p}.fl.4_class_out | bgzip > features.classes.bgz
paste $features_f ${p}.nonfl.5_class_out ${p}.fl.5_class_out | bgzip > features.classes.bgz
tabix -s1 -b2 -e3 features.classes.bgz
|
const httpStatus = require('http-status');
const APIError = require('../helpers/APIError');
const config = require('../../config/config');
const user = {
username: 'popgram',
password: '<PASSWORD>'
};
function login(req, res, next) {
if (req.body.username === user.username && req.body.password === user.password) {
return res.json({
token: config.token,
username: user.username
});
}
const err = new APIError('Authentication error', httpStatus.UNAUTHORIZED, true);
return next(err);
}
module.exports = { login };
|
<reponame>valkirilov/fmi-rsa<filename>src/Point.java
/**
*
* A simple class which represents a Point in the 2D plane
*
* @author valentin
*
*/
public class Point {
private long x;
private long y;
Point(long x, long y) {
this.x = x;
this.y = y;
}
long getX() {
return x;
}
void setX(long x) {
this.x = x;
}
long getY() {
return y;
}
void setY(long y) {
this.y = y;
}
}
|
<gh_stars>1-10
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Ads
module GoogleAds
module V8
module Enums
# Container for enum describing possible policy topic entry types.
class PolicyTopicEntryTypeEnum
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# The possible policy topic entry types.
module PolicyTopicEntryType
# No value has been specified.
UNSPECIFIED = 0
# The received value is not known in this version.
#
# This is a response-only value.
UNKNOWN = 1
# The resource will not be served.
PROHIBITED = 2
# The resource will not be served under some circumstances.
LIMITED = 4
# The resource cannot serve at all because of the current targeting
# criteria.
FULLY_LIMITED = 8
# May be of interest, but does not limit how the resource is served.
DESCRIPTIVE = 5
# Could increase coverage beyond normal.
BROADENING = 6
# Constrained for all targeted countries, but may serve in other countries
# through area of interest.
AREA_OF_INTEREST_ONLY = 7
end
end
end
end
end
end
end
|
#!/bin/bash
set -e
TIMEOUT=300s
for i in $@; do
CGO_ENABLED=0 golangci-lint run -e format_gen --disable-all \
-Egofmt \
-Egovet \
-Egolint \
-Egoimports \
-Eineffassign \
-Eerrcheck \
-Edeadcode \
-Emisspell \
-Egocyclo \
-Estaticcheck \
-Egosimple \
-Estructcheck \
-Etypecheck \
-Eunused \
-Evarcheck \
-Eunconvert \
-Emaligned \
-Eprealloc \
-Estylecheck \
$i && echo "ok\t$i"
done
|
#!/usr/bin/env bash
yarn build:minimal && ./go.sh "static/testData/badQuality/underexposed_1.jpg" "static/testData/badQuality/underexposed_2.jpg" "static/testData/badQuality/underexposed_3.jpg"
|
<reponame>thekevinscott/ml-classifier<gh_stars>100-1000
import * as tf from '@tensorflow/tfjs';
export interface IClasses {
[index: string]: number;
}
// export enum DataType {
// TRAIN = "train",
// EVAL = "eval",
// };
export interface IData {
classes: IClasses;
[index: string]: IImageData;
}
export interface IImageData {
xs?: tf.Tensor;
ys?: tf.Tensor2D;
}
export interface ICollectedData {
classes: IClasses;
xs?: tf.Tensor;
ys?: tf.Tensor2D;
}
export interface IParams {
[index: string]: any;
batchSize?: number;
epochs?: number;
};
export type TypedArray = Int8Array | Uint8Array | Int16Array | Uint16Array | Int32Array | Uint32Array | Uint8ClampedArray | Float32Array | Float64Array;
export interface IArgs {
pretrainedModel?: string | tf.Model;
trainingModel?: tf.Model | Function;
// trainingModel?: tf.Model | (data: IImageData, classes: number, params: IParams) => tf.Model;
onLoadStart?: Function;
onLoadComplete?: Function;
onAddDataStart?: Function;
onAddDataComplete?: Function;
onClearDataStart?: Function;
onClearDataComplete?: Function;
onTrainStart?: Function;
onTrainComplete?: Function;
onPredictComplete?: Function;
onPredictStart?: Function;
onEvaluateStart?: Function;
onEvaluateComplete?: Function;
onSaveStart?: Function;
onSaveComplete?: Function;
}
|
function resolveModulePath(importStatement, currentFilePath) {
const moduleTypes = ['immutable', 'mutable', 'causal', 'hashing', 'literals'];
const modulePath = importStatement.replace('import * from', '').trim();
const resolvedPath = currentFilePath.split('/').slice(0, -1).join('/') + modulePath + '.js';
for (const type of moduleTypes) {
const typeIndex = resolvedPath.indexOf(type);
if (typeIndex !== -1) {
return resolvedPath.slice(0, typeIndex + type.length) + '.js';
}
}
return resolvedPath;
}
// Example usage
console.log(resolveModulePath("import * from './model/immutable'", "/src/main.js")); // Output: "/src/model/immutable.js" |
import os
from bs4 import BeautifulSoup
import requests
from urllib.parse import urlparse
DIST_CEXT = os.path.join(
os.path.dirname(os.path.realpath(os.path.dirname(__file__))),
"kolibri",
"dist",
"cext",
)
PYPI_DOWNLOAD = "https://pypi.python.org/simple/"
PIWHEEL_DOWNLOAD = "https://www.piwheels.org/simple/"
def process_html_file(html_file_path):
with open(html_file_path, 'r') as file:
html_content = file.read()
soup = BeautifulSoup(html_content, 'html.parser')
links = soup.find_all('a', href=True)
for link in links:
url = link['href']
if url.startswith(PYPI_DOWNLOAD) or url.startswith(PIWHEEL_DOWNLOAD):
package_name = os.path.basename(urlparse(url).path)
package_dir = os.path.join(DIST_CEXT, package_name)
os.makedirs(package_dir, exist_ok=True) |
package academy.devonline.java.home_section001_classes.methods_dyna_array.dyna_array_contains;
public class DynaArrayTest {
public static void main(String[] args) {
DynaArray dynaArray = new DynaArray();
dynaArray.add(0);
dynaArray.add(1);
dynaArray.add(2);
dynaArray.add(3);
System.out.println(dynaArray.onString(dynaArray.contains(0)));
System.out.println(dynaArray.onString(dynaArray.contains(2)));
System.out.println(dynaArray.onString(dynaArray.contains(100)));
System.out.println(dynaArray.onString(dynaArray.contains(-3)));
}
}
|
This deep learning model can be implemented using a three-part architecture to recognize the sentiment of a sentence:
1. A preprocessing layer which will process the input sentence and extract features that can be used as inputs to the model. This can include tokenization to create word embeddings, lemmatization to normalize words, and stop-word removal to eliminate unnecessary words from the sentence.
2. An encoder layer which will encode the input sentence into a numerical representation. This can be done using a recurrent neural network (RNN) or a Long Short Term Memory (LSTM) network.
3. An output layer which will use the generated numerical representation to classify the sentiment into one of the two categories; positive or negative. This can be done using a simple binary classification algorithm such as logistic regression. |
<gh_stars>0
/**
* CertManager is based on [jetstack's cert-manager](https://github.com/jetstack/cert-manager) helm chart.
*
* @module "@kloudlib/cert-manager"
* @packageDocumentation
*
* @example
* ```typescript
* import { CertManager } from '@kloudlib/cert-manager';
*
* new CertManger('cert-manager', {
* useStagingACME: true,
* acme: {
* email: '<EMAIL>',
* },
* });
* ```
*/
import * as pulumi from '@pulumi/pulumi';
import * as k8s from '@pulumi/kubernetes';
import * as abstractions from '@kloudlib/abstractions';
export interface CertManagerInputs {
provider?: k8s.Provider;
namespace?: pulumi.Input<string>;
/**
* the helm chart version
*/
version?: string;
/**
* if true then the staging ACME api will be used
* rather than the production ACME api.
* defaults to true
*/
useStagingACME?: boolean;
/**
* configure acme settings
*/
acme?: {
/**
* the email that letsencrypt reminders will be sent to
*/
email?: string;
};
}
export interface CertManagerOutputs {
/**
* Helm metadata
*/
meta: pulumi.Output<abstractions.HelmMeta>;
}
/**
* @noInheritDoc
*/
export class CertManager extends pulumi.ComponentResource implements CertManagerOutputs {
readonly meta: pulumi.Output<abstractions.HelmMeta>;
constructor(name: string, props?: CertManagerInputs, opts?: pulumi.CustomResourceOptions) {
super('kloudlib:CertManager', name, props, opts);
const certIssuerName = `${name}-cert-issuer`;
this.meta = pulumi.output<abstractions.HelmMeta>({
chart: 'cert-manager',
version: props?.version ?? 'v0.15.2',
repo: 'https://charts.jetstack.io',
});
// Note: cert manager requires manual installation of
// custom resource definitions. This has been done above.
// When upgrading cert manager these CRDs will generally
// also require updating. Please follow the online documentation
// when updating cert manager closely.
// https://github.com/jetstack/cert-manager/tree/master/deploy
const certManager = new k8s.helm.v3.Chart(
name,
{
namespace: props?.namespace,
version: this.meta.version,
chart: this.meta.chart,
fetchOpts: {
repo: this.meta.repo,
},
values: {
installCRDs: true,
ingressShim: {
defaultIssuerKind: 'ClusterIssuer',
defaultIssuerName: certIssuerName,
},
},
},
{
parent: this,
providers: props?.provider
? {
kubernetes: props?.provider,
}
: {},
}
);
const certIssuer = new k8s.apiextensions.CustomResource(
certIssuerName,
{
apiVersion: 'cert-manager.io/v1alpha2',
kind: 'ClusterIssuer',
metadata: {
name: certIssuerName,
namespace: props?.namespace,
},
spec: {
acme: {
server:
props?.useStagingACME === false
? 'https://acme-v02.api.letsencrypt.org/directory'
: 'https://acme-staging-v02.api.letsencrypt.org/directory',
email: props?.acme?.email,
http01: {},
privateKeySecretRef: {
name: 'letsencrypt-production',
},
solvers: [
{
http01: {
ingress: {
class: 'nginx',
},
},
},
],
},
},
},
{
parent: this,
provider: props?.provider,
dependsOn: [certManager],
}
);
}
}
|
def combine_strings(str1, str2):
return str1 + str2 |
<gh_stars>0
package vectorwing.farmersdelight.common.block;
import net.minecraft.core.BlockPos;
import net.minecraft.core.Direction;
import net.minecraft.world.damagesource.DamageSource;
import net.minecraft.world.entity.Entity;
import net.minecraft.world.entity.LivingEntity;
import net.minecraft.world.item.context.BlockPlaceContext;
import net.minecraft.world.level.BlockGetter;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.LevelAccessor;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.SimpleWaterloggedBlock;
import net.minecraft.world.level.block.SoundType;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.level.block.state.StateDefinition;
import net.minecraft.world.level.block.state.properties.BlockStateProperties;
import net.minecraft.world.level.block.state.properties.BooleanProperty;
import net.minecraft.world.level.material.FluidState;
import net.minecraft.world.level.material.Fluids;
import net.minecraft.world.level.material.Material;
import net.minecraft.world.phys.Vec3;
import net.minecraft.world.phys.shapes.CollisionContext;
import net.minecraft.world.phys.shapes.VoxelShape;
import javax.annotation.Nullable;
@SuppressWarnings("deprecation")
public class SafetyNetBlock extends Block implements SimpleWaterloggedBlock
{
public static final BooleanProperty WATERLOGGED = BlockStateProperties.WATERLOGGED;
protected static final VoxelShape SHAPE = Block.box(0.0D, 8.0D, 0.0D, 16.0D, 9.0D, 16.0D);
public SafetyNetBlock() {
super(Block.Properties.of(Material.CLOTH_DECORATION).strength(0.2F).sound(SoundType.WOOL));
this.registerDefaultState(this.getStateDefinition().any().setValue(WATERLOGGED, false));
}
@Override
protected void createBlockStateDefinition(StateDefinition.Builder<Block, BlockState> builder) {
builder.add(WATERLOGGED);
}
@Nullable
public BlockState getStateForPlacement(BlockPlaceContext context) {
FluidState fluid = context.getLevel().getFluidState(context.getClickedPos());
return this.defaultBlockState().setValue(WATERLOGGED, fluid.getType() == Fluids.WATER);
}
@Override
public BlockState updateShape(BlockState stateIn, Direction facing, BlockState facingState, LevelAccessor worldIn, BlockPos currentPos, BlockPos facingPos) {
if (stateIn.getValue(WATERLOGGED)) {
worldIn.scheduleTick(currentPos, Fluids.WATER, Fluids.WATER.getTickDelay(worldIn));
}
return super.updateShape(stateIn, facing, facingState, worldIn, currentPos, facingPos);
}
@Override
public FluidState getFluidState(BlockState state) {
return state.getValue(WATERLOGGED) ? Fluids.WATER.getSource(false) : super.getFluidState(state);
}
@Override
public VoxelShape getShape(BlockState state, BlockGetter worldIn, BlockPos pos, CollisionContext context) {
return SHAPE;
}
@Override
public void fallOn(Level worldIn, BlockState state, BlockPos pos, Entity entityIn, float fallDistance) {
if (entityIn.isSuppressingBounce()) {
super.fallOn(worldIn, state, pos, entityIn, fallDistance);
} else {
entityIn.causeFallDamage(fallDistance, 0.0F, DamageSource.FALL);
}
}
@Override
public void updateEntityAfterFallOn(BlockGetter worldIn, Entity entityIn) {
if (entityIn.isSuppressingBounce()) {
super.updateEntityAfterFallOn(worldIn, entityIn);
} else {
this.bounceEntity(entityIn);
}
}
private void bounceEntity(Entity entityIn) {
Vec3 vec3d = entityIn.getDeltaMovement();
if (vec3d.y < 0.0D) {
double entityWeightOffset = entityIn instanceof LivingEntity ? 0.6D : 0.8D;
entityIn.setDeltaMovement(vec3d.x, -vec3d.y * entityWeightOffset, vec3d.z);
}
}
}
|
<gh_stars>0
// This is some class.
class SomeClass {
constructor(someOptions) {
this.prop1 = someOptions.prop1;
this.prop2 = someOptions.prop2;
this.prop3 = someOptions.prop3;
}
someFunc() {
console.log("This is some func of some class");
}
static type = "SOME_CLASS";
}
// This is some extend class by some class.
class SomeExtendClass extends SomeClass {
constructor(someOptions) {
super(someOptions);
this.prop4 = someOptions.prop4;
}
someFunc() {
super.someFunc();
console.log("Some text");
}
anotherFunc() {
console.log("This is some func of some extend class");
console.log(`This is prop4: ${this.prop4}`);
}
static type = "SOME_EXTEND_CLASS";
get prop2Calc() {
return this.prop2 + 100;
}
set prop2Calc(value) {
this.prop2 = value;
}
}
// This is some object.
const someObject = {
prop1: "Property One",
prop2: 1,
prop3: function() {
console.log(this.prop1);
}
}
// This is some object to extend class.
const someExtendObject = {
prop1: "Property Two",
prop2: 2,
prop3: function() {
console.log(this.prop1);
},
prop4: "New property"
}
const someClass = new SomeClass(someObject);
console.log(someClass);
someClass.someFunc();
console.log(SomeClass.type);
const someExtendClass = new SomeExtendClass(someExtendObject);
console.log(someExtendClass);
someExtendClass.someFunc();
someExtendClass.anotherFunc();
console.log(someExtendClass.prop2Calc);
someExtendClass.prop2 = 11;
console.log(someExtendClass.prop2Calc);
console.log(SomeExtendClass.type);
|
package com.service;
import com.entity.User;
import java.util.List;
import java.util.Map;
public interface UserService {
public User getUser(long userId);
public User getUser(String username, String userpass);
public String setUser();
public List<User> getlist(Map<String, Object> map);
}
|
$(function (jQuery) {
let slides = $('.slides');
$(slides).slick({
autoplay: true,
dots: true,
arrows: false,
customPaging: function (slider, i) {
console.log(slider);
console.log(i);
return "<button class='button-dot'>"
}
});
}(jQuery));
|
#!/bin/bash -vx
source $OKTA_HOME/$REPO/scripts/setup.sh
export TEST_SUITE_TYPE="checkstyle"
export TEST_RESULT_FILE_DIR="${REPO}/build2/reports/lint"
if ! npm run lint:report; then
echo "lint failed! Exiting..."
exit ${TEST_FAILURE}
fi
echo $TEST_SUITE_TYPE > $TEST_SUITE_TYPE_FILE
echo $TEST_RESULT_FILE_DIR > $TEST_RESULT_FILE_DIR_FILE
exit $PUBLISH_TYPE_AND_RESULT_DIR;
|
import { PrismaClientKnownRequestError } from '@prisma/client/runtime';
import { PrismaError } from './PrismaError';
export class PrismaP2013Error extends PrismaError {
constructor(originalError: PrismaClientKnownRequestError) {
super(originalError, 'Missing the required argument');
}
}
|
import {registry} from '@jahia/ui-extender';
// import register from './ContentEditorExtensions.register';
registry.add('callback', 'contentEditorExtensions', {
targets: ['jahiaApp-init:20'],
// callback: register
callback: () => import('./ContentEditorExtensions.register')
});
|
/***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#include "EulerAngles.hpp"
namespace openstudio {
/// default constructor with no rotation
EulerAngles::EulerAngles() : m_storage(3, 0.0) {}
/// constructor with psi, theta, phi
EulerAngles::EulerAngles(double psi, double theta, double phi) : m_storage(3) {
m_storage[0] = psi;
m_storage[1] = theta;
m_storage[2] = phi;
}
/// copy constructor
EulerAngles::EulerAngles(const EulerAngles& other) : m_storage(other.m_storage) {}
/// get psi
double EulerAngles::psi() const {
return m_storage[0];
}
/// get theta
double EulerAngles::theta() const {
return m_storage[1];
}
/// get phi
double EulerAngles::phi() const {
return m_storage[2];
}
/// ostream operator
std::ostream& operator<<(std::ostream& os, const EulerAngles& angles) {
os << "[" << angles.psi() << ", " << angles.theta() << ", " << angles.phi() << "]";
return os;
}
} // namespace openstudio
|
def extract_view_names(url_patterns: list) -> dict:
view_names_dict = {}
for pattern, view_name in url_patterns:
if view_name in view_names_dict:
view_names_dict[view_name].append(pattern)
else:
view_names_dict[view_name] = [pattern]
return view_names_dict |
#!/bin/bash -ex
# Utilities.
function get_container_ip {
docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $1
}
function create_namespace {
name=$1
curl -k -H "Content-Type: application/yaml" -XPOST --data-binary @- https://172.17.0.3:6443/api/v1/namespaces <<EOF
apiVersion: v1
kind: Namespace
metadata:
name: ${name}
EOF
sleep 1
}
ETCD_IP=`get_container_ip st-etcd`
K8S_IP=`get_container_ip st-apiserver`
# Run policy controller.
docker rm -f calico-policy-controller || true
sleep 2
docker run --detach --name=calico-policy-controller \
-e K8S_API=https://${K8S_IP}:6443 \
-e K8S_INSECURE_SKIP_TLS_VERIFY=true \
-e ETCD_ENDPOINTS=http://${ETCD_IP}:2379 \
calico/kube-policy-controller
sleep 2
# Create a namespace.
NS_NAME=chocolate
create_namespace ${NS_NAME}
# Check for that namespace in etcd.
docker exec st-etcd etcdctl ls --recursive /calico | grep ${NS_NAME}
for n in `seq 0 9`; do
# Stop k8s API
make stop-k8s-apiserver
# Wait 60 seconds
sleep 60
# Start k8s API and etcd
make run-k8s-apiserver
# Wait 20 seconds
sleep 20
# Create k8s namespace
create_namespace testns${n}
# Check for that namespace in etcd.
docker exec st-etcd etcdctl ls --recursive /calico | grep testns${n}
done
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
nc -l 8888 -o outserver1 -c "sh ./delay-server.sh" &
sleep 1
nghttp -vv https://127.0.0.1:${1}/delay-chunked-response
|
#!/usr/bin/env bash
eval $(minikube docker-env)
echo "Your shell is now configured to talk to Minikube, enjoy!"
|
class Grouping:
def __init__(self):
self.buckets = {}
def add_indices(self, bucket_index, indices):
if bucket_index not in self.buckets:
self.buckets[bucket_index] = []
self.buckets[bucket_index].extend(indices)
def remove_indices(self, bucket_index, indices):
if bucket_index in self.buckets:
self.buckets[bucket_index] = [idx for idx in self.buckets[bucket_index] if idx not in indices]
def get_indices(self, bucket_index):
return self.buckets.get(bucket_index, []) |
#!/bin/bash
get_relative_path(){
echo "$(dirname $(realpath $0))"
}
# This is working when this script is run,
# but might not work when called this function
# from command line, as bash may not able to find
# the path of this file of 'get_relative_path' function
get_relative_path |
package com.java.study.answer.zuo.dadvanced.advanced_class_05;
import java.util.Arrays;
public class Code_03_Min_Gold {
public static int minGold1(int[] knights, int[] dragons) {
Arrays.sort(knights);
int res = 0;
for (int i = 0; i < dragons.length; i++) {
int cost = getMaxLeftmost(knights, dragons[i]);
if (cost == Integer.MAX_VALUE) {
return Integer.MAX_VALUE;
}
res += cost;
}
return res;
}
public static int getMaxLeftmost(int[] sortedArr, int dragon) {
int L = 0;
int R = sortedArr.length - 1;
int index = -1;
while (L <= R) {
int mid = (L + R) / 2;
if (sortedArr[mid] < dragon) {
L = mid + 1;
} else {
index = mid;
R = mid - 1;
}
}
return index == -1 ? Integer.MAX_VALUE : sortedArr[index];
}
// all values is positive.
public static int minGold2(int[] knights, int[] dragons) {
int sum = 0;
for (int i = 0; i < knights.length; i++) {
sum += knights[i];
}
int[] dp = new int[sum + 1];
for (int i = 1; i <= sum; i++) {
dp[i] = Integer.MAX_VALUE;
}
dp[knights[0]] = knights[0];
// printArray(dp);
for (int i = 1; i < knights.length; i++) {
for (int j = 1; j <= sum; j++) {
if (j - knights[i] >= 0
&& dp[j - knights[i]] < Integer.MAX_VALUE) {
dp[j] = Math.min(dp[j], dp[j - knights[i]] + knights[i]);
}
}
// printArray(dp);
}
for (int i = dp.length - 2; i >= 0; i--) {
dp[i] = Math.min(dp[i], dp[i + 1]);
}
// printArray(dp);
int res = 0;
for (int i = 0; i < dragons.length; i++) {
int cost = getMaxLeftmost(dp, dragons[i]);
if (cost == Integer.MAX_VALUE) {
return Integer.MAX_VALUE;
}
res += cost;
}
return res;
}
public static void printArray(int[] dp) {
for (int i = 0; i < dp.length; i++) {
System.out.print((dp[i] == Integer.MAX_VALUE ? "X" : dp[i]) + " ");
}
System.out.println();
}
public static void main(String[] args) {
int[] knights1 = { 2, 10, 5 };
int[] dragons1 = { 3, 8, 6 };
System.out.println(minGold1(knights1, dragons1));
int[] knights2 = { 2, 10, 5 };
int[] dragons2 = { 3, 8, 6 };
System.out.println(minGold2(knights2, dragons2));
}
}
|
package com.company;
public class URLDepthPair {
private String URL;
private int depth;
public int getDepth() { return depth; }
public String getURL() { return URL; }
public URLDepthPair(String URL, int depth){
this.URL = URL;
this.depth = depth;
}
@Override
public String toString() {
return "depth: " + depth + " URL: ["+ URL + "]";
}
}
|
package com.attributestudios.wolfarmor.client.renderer.entity.layer;
import com.attributestudios.wolfarmor.WolfArmorMod;
import com.attributestudios.wolfarmor.api.util.Capabilities;
import com.attributestudios.wolfarmor.api.util.Resources;
import com.attributestudios.wolfarmor.api.IWolfArmorCapability;
import com.attributestudios.wolfarmor.client.model.ModelWolfBackpack;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.client.renderer.entity.RenderLiving;
import net.minecraft.client.renderer.entity.layers.LayerRenderer;
import net.minecraft.entity.passive.EntityWolf;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import javax.annotation.Nonnull;
/**
* A layer renderer for wolf backpacks.
*/
@SideOnly(Side.CLIENT)
public class LayerWolfBackpack implements LayerRenderer<EntityWolf> {
//region Fields
private ModelWolfBackpack modelWolfBackpack;
private final RenderLiving renderer;
//endregion Fields
//region Constructors
/**
* Creates a new layer renderer for armored wolf backpacks.
*
* @param renderer The parent renderer.
*/
public LayerWolfBackpack(@Nonnull RenderLiving renderer) {
this.renderer = renderer;
this.modelWolfBackpack = new ModelWolfBackpack(0.0F);
}
//endregion Constructors
//region Public / Protected Methods
/**
* Renders the layer.
*
* @param entityWolf The wolf to render. If it is not am EntityWolfArmored, the layer will not render.
* @param limbSwing The entity's limb swing progress.
* @param limbSwingAmount The entity's limb swing progress amount.
* @param partialTicks The current game tick.
* @param ageInTicks The entity's age.
* @param netHeadYaw The yaw of the entity's head.
* @param headPitch The pitch of the entity's head.
* @param scale The scale at which to render the layer.
*/
@SuppressWarnings("ConstantConditions")
@Override
public void doRenderLayer(@Nonnull EntityWolf entityWolf,
float limbSwing,
float limbSwingAmount,
float partialTicks,
float ageInTicks,
float netHeadYaw,
float headPitch,
float scale) {
if (!WolfArmorMod.getConfiguration().getIsWolfChestRenderEnabled()) {
return;
}
IWolfArmorCapability wolfArmor = entityWolf.getCapability(Capabilities.CAPABILITY_WOLF_ARMOR, null);
if (wolfArmor != null && wolfArmor.getHasChest()) {
this.modelWolfBackpack.setModelAttributes(renderer.getMainModel());
this.modelWolfBackpack.setLivingAnimations(entityWolf, limbSwing, limbSwingAmount, partialTicks);
this.renderer.bindTexture(Resources.TEXTURE_WOLF_BACKPACK);
GlStateManager.color(1, 1, 1, 1);
if (!entityWolf.isInvisible()) {
this.modelWolfBackpack.render(entityWolf, limbSwing, limbSwingAmount, ageInTicks, netHeadYaw, headPitch, scale);
} else {
GlStateManager.pushMatrix();
{
GlStateManager.color(1, 1, 1, 0.15F);
GlStateManager.depthMask(false);
{
GlStateManager.enableBlend();
{
GlStateManager.blendFunc(GlStateManager.SourceFactor.SRC_ALPHA,
GlStateManager.DestFactor.ONE_MINUS_SRC_ALPHA);
this.modelWolfBackpack.render(entityWolf,
limbSwing,
limbSwingAmount,
ageInTicks,
netHeadYaw,
headPitch,
scale);
}
GlStateManager.disableBlend();
}
GlStateManager.depthMask(true);
}
GlStateManager.popMatrix();
}
}
}
//endregion Public / Protected Methods
//region Accessors / Mutators
/**
* Whether or not textures should be combined.
*
* @return false.
*/
@Override
public boolean shouldCombineTextures() {
return false;
}
//endregion Accessors / Mutators
}
|
import React, { useState, useEffect } from 'react'
import { pingUrl } from '../utils'
import { ipfsGateway, ipfsNodeUri } from '../../site.config'
import styles from './Status.module.css'
export default function Status({ type }: { type: string }) {
const [isOnline, setIsOnline] = useState(false)
const [isLoading, setIsLoading] = useState(true)
async function ping() {
const url =
type === 'gateway'
? `${ipfsGateway}/ipfs/QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG/readme`
: `${ipfsNodeUri}/api/v0/id`
const ping = await pingUrl(url)
setIsLoading(false)
isOnline !== ping && setIsOnline(ping)
}
useEffect(() => {
ping()
const timer = setInterval(() => {
ping()
}, 10000) // run every 10 sec.
return () => {
clearInterval(timer)
setIsOnline(false)
setIsLoading(false)
}
}, [])
const classes = isLoading
? styles.loading
: isOnline
? styles.online
: styles.status
return (
<span
className={classes}
title={isLoading ? 'Checking...' : isOnline ? 'Online' : 'Offline'}
/>
)
}
|
def run_game_loop(gameplay):
clock = pygame.time.Clock()
while gameplay.is_running():
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameplay.quit_game()
keys = pygame.key.get_pressed()
gameplay.handle_input(keys)
gameplay.update_game_state()
gameplay.render_game_screen()
clock.tick(60) # Cap the frame rate at 60 FPS
pygame.quit() |
#!/bin/bash
# https://github.com/Hyy2001X/AutoBuild-Actions
# AutoBuild Module by Hyy2001
# AutoBuild Actions
Diy_Core() {
Author=Hyy2001
Default_Device=d-team_newifi-d2
}
Diy-Part1() {
[ -e feeds.conf.default ] && sed -i "s/#src-git helloworld/src-git helloworld/g" feeds.conf.default
[ ! -d package/lean ] && mkdir -p package/lean
Replace_File mac80211.sh package/kernel/mac80211/files/lib/wifi
Replace_File system package/base-files/files/etc/config
Replace_File AutoUpdate.sh package/base-files/files/bin
Replace_File banner package/base-files/files/etc
ExtraPackages svn network/services dnsmasq https://github.com/openwrt/openwrt/trunk/package/network/services
ExtraPackages svn network/services dropbear https://github.com/openwrt/openwrt/trunk/package/network/services
# ExtraPackages svn network/services ppp https://github.com/openwrt/openwrt/trunk/package/network/services
ExtraPackages svn network/services hostapd https://github.com/openwrt/openwrt/trunk/package/network/services
# ExtraPackages svn kernel mt76 https://github.com/openwrt/openwrt/trunk/package/kernel
ExtraPackages git lean luci-app-autoupdate https://github.com/Hyy2001X main
ExtraPackages git lean luci-theme-argon https://github.com/jerrykuku 18.06
ExtraPackages git other luci-app-argon-config https://github.com/jerrykuku master
ExtraPackages git other luci-app-adguardhome https://github.com/Hyy2001X master
ExtraPackages svn other luci-app-smartdns https://github.com/project-openwrt/openwrt/trunk/package/ntlf9t
ExtraPackages svn other smartdns https://github.com/project-openwrt/openwrt/trunk/package/ntlf9t
ExtraPackages git other OpenClash https://github.com/vernesong master
ExtraPackages git other luci-app-serverchan https://github.com/tty228 master
ExtraPackages svn other luci-app-socat https://github.com/project-openwrt/openwrt/trunk/package/lienol
# [UPX ๅ็ผฉ] ExtraPackages git other openwrt-upx https://github.com/Hyy2001X master
# [ๅบ็จ่ฟๆปค] ExtraPackages git OAF openwrt-OpenAppFilter https://github.com/Lienol master
# [AdGuardHome ๆ ธๅฟ] ExtraPackages svn other AdGuardHome https://github.com/project-openwrt/openwrt/trunk/package/ntlf9t
}
Diy-Part2() {
GET_TARGET_INFO
Replace_File mwan3 package/feeds/packages/mwan3/files/etc/config
sed -i 's/143/143,25,5222/' package/feeds/helloworld/luci-app-ssr-plus/root/etc/init.d/shadowsocksr
# ExtraPackages svn feeds/packages mwan3 https://github.com/openwrt/packages/trunk/net
echo "Author: $Author"
echo "Openwrt Version: $Openwrt_Version"
echo "AutoUpdate Version: $AutoUpdate_Version"
echo "Router: $TARGET_PROFILE"
sed -i "s?$Lede_Version?$Lede_Version Compiled by $Author [$Display_Date]?g" $Default_File
echo "$Openwrt_Version" > package/base-files/files/etc/openwrt_info
sed -i "s?Openwrt?Openwrt $Openwrt_Version / AutoUpdate $AutoUpdate_Version?g" package/base-files/files/etc/banner
}
Diy-Part3() {
GET_TARGET_INFO
Default_Firmware=openwrt-$TARGET_BOARD-$TARGET_SUBTARGET-$TARGET_PROFILE-squashfs-sysupgrade.bin
AutoBuild_Firmware=AutoBuild-$TARGET_PROFILE-Lede-${Openwrt_Version}.bin
AutoBuild_Detail=AutoBuild-$TARGET_PROFILE-Lede-${Openwrt_Version}.detail
mkdir -p bin/Firmware
echo "Firmware: $AutoBuild_Firmware"
mv bin/targets/$TARGET_BOARD/$TARGET_SUBTARGET/$Default_Firmware bin/Firmware/$AutoBuild_Firmware
echo "[$(date "+%H:%M:%S")] Calculating MD5 and SHA256 ..."
Firmware_MD5=$(md5sum bin/Firmware/$AutoBuild_Firmware | cut -d ' ' -f1)
Firmware_SHA256=$(sha256sum bin/Firmware/$AutoBuild_Firmware | cut -d ' ' -f1)
echo -e "MD5: $Firmware_MD5\nSHA256: $Firmware_SHA256"
touch bin/Firmware/$AutoBuild_Detail
echo -e "\nMD5:$Firmware_MD5\nSHA256:$Firmware_SHA256" >> bin/Firmware/$AutoBuild_Detail
}
GET_TARGET_INFO() {
Diy_Core
[ -e $GITHUB_WORKSPACE/Openwrt.info ] && . $GITHUB_WORKSPACE/Openwrt.info
AutoUpdate_Version=$(awk 'NR==6' package/base-files/files/bin/AutoUpdate.sh | awk -F '[="]+' '/Version/{print $2}')
Default_File="package/lean/default-settings/files/zzz-default-settings"
Lede_Version=$(egrep -o "R[0-9]+\.[0-9]+\.[0-9]+" $Default_File)
Openwrt_Version="$Lede_Version-$Compile_Date"
TARGET_PROFILE=$(egrep -o "CONFIG_TARGET.*DEVICE.*=y" .config | sed -r 's/.*DEVICE_(.*)=y/\1/')
[ -z "$TARGET_PROFILE" ] && TARGET_PROFILE="$Default_Device"
TARGET_BOARD=$(awk -F '[="]+' '/TARGET_BOARD/{print $2}' .config)
TARGET_SUBTARGET=$(awk -F '[="]+' '/TARGET_SUBTARGET/{print $2}' .config)
}
ExtraPackages() {
PKG_PROTO=$1
PKG_DIR=$2
PKG_NAME=$3
REPO_URL=$4
REPO_BRANCH=$5
[ -d package/$PKG_DIR ] && mkdir -p package/$PKG_DIR
[ -d package/$PKG_DIR/$PKG_NAME ] && rm -rf package/$PKG_DIR/$PKG_NAME
[ -d $PKG_NAME ] && rm -rf $PKG_NAME
Retry_Times=3
while [ ! -e $PKG_NAME/Makefile ]
do
echo "[$(date "+%H:%M:%S")] Checking out package [$PKG_NAME] ..."
case $PKG_PROTO in
git)
git clone -b $REPO_BRANCH $REPO_URL/$PKG_NAME $PKG_NAME > /dev/null 2>&1
;;
svn)
svn checkout $REPO_URL/$PKG_NAME $PKG_NAME > /dev/null 2>&1
esac
if [ -e $PKG_NAME/Makefile ] || [ -e $PKG_NAME/README* ];then
echo "[$(date "+%H:%M:%S")] Package [$PKG_NAME] is detected!"
mv $PKG_NAME package/$PKG_DIR
break
else
[ $Retry_Times -lt 1 ] && echo "[$(date "+%H:%M:%S")] Skip check out package [$PKG_NAME] ..." && break
echo "[$(date "+%H:%M:%S")] [Error] [$Retry_Times] Checkout failed,retry in 3s ..."
Retry_Times=$(($Retry_Times - 1))
rm -rf $PKG_NAME > /dev/null 2>&1
sleep 3
fi
done
}
Replace_File() {
FILE_NAME=$1
PATCH_DIR=$GITHUB_WORKSPACE/openwrt/$2
FILE_RENAME=$3
[ ! -d $PATCH_DIR ] && mkdir -p $PATCH_DIR
if [ -f $GITHUB_WORKSPACE/Customize/$FILE_NAME ];then
if [ -e $GITHUB_WORKSPACE/Customize/$FILE_NAME ];then
echo "[$(date "+%H:%M:%S")] Customize File [$FILE_NAME] is detected!"
if [ -z $FILE_RENAME ];then
[ -e $PATCH_DIR/$FILE_NAME ] && rm -f $PATCH_DIR/$FILE_NAME
mv -f $GITHUB_WORKSPACE/Customize/$FILE_NAME $PATCH_DIR/$1
else
[ -e $PATCH_DIR/$FILE_NAME ] && rm -f $PATCH_DIR/$3
mv -f $GITHUB_WORKSPACE/Customize/$FILE_NAME $PATCH_DIR/$3
fi
else
echo "[$(date "+%H:%M:%S")] Customize File [$FILE_NAME] is not detected,skip move ..."
fi
else
if [ -d $GITHUB_WORKSPACE/Customize/$FILE_NAME ];then
echo "[$(date "+%H:%M:%S")] Customize Folder [$FILE_NAME] is detected !"
mv -f $GITHUB_WORKSPACE/Customize/$FILE_NAME $PATCH_DIR
else
echo "[$(date "+%H:%M:%S")] Customize Folder [$FILE_NAME] is not detected,skip move ..."
fi
fi
}
|
#!/usr/bin/env sh
set -e
if [ $(getent group ping) ]; then
delgroup ping
addgroup -g $DOCKER_GID docker
adduser -D -G docker hypso
fi
exec su-exec hypso java -jar /home/hypso/hypso.jar |
<gh_stars>1-10
package depth_first_search;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* ๋ฐฑ์ค 16964๋ฒ: DFS ์คํ์
์ ์ง
*
* @see https://www.acmicpc.net/problem/16964/
*
*/
public class Boj16964 {
private static ArrayList<Integer>[] tree;
private static int[] visit;
private static int[] seq;
private static int N;
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
N = Integer.parseInt(br.readLine());
tree = new ArrayList[N];
visit = new int[N];
for(int i = 0; i < N; i++) {
tree[i] = new ArrayList<>();
visit[i] = -2;
}
int loop = N - 1;
while(loop-- > 0) {
StringTokenizer st = new StringTokenizer(br.readLine());
int node1 = Integer.parseInt(st.nextToken()) - 1;
int node2 = Integer.parseInt(st.nextToken()) - 1;
tree[node1].add(node2);
tree[node2].add(node1);
}
seq = new int[N];
StringTokenizer st = new StringTokenizer(br.readLine());
for(int i = 0; i < N; i++) {
seq[i] = Integer.parseInt(st.nextToken()) - 1;
}
visit[0] = -1;
dfs(0);
System.out.println(makeSPJ());
}
private static int makeSPJ() {
if(seq[0] != 0) return 0;
ArrayDeque<Integer> stack = new ArrayDeque<>();
for(int i = 0; i < N; i++) {
while(!stack.isEmpty()) {
int parent = stack.peek();
if (parent == visit[seq[i]]) break; // contains parent ?
if (stack.size() == 1) return 0; // !contains
stack.pop();
}
stack.push(seq[i]);
}
return 1;
}
private static void dfs(int current) {
for(int next: tree[current]) {
if(visit[next] != -2) continue;
visit[next] = current;
dfs(next);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.