text
stringlengths 1
1.05M
|
|---|
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Created by alex on 6/7/17.
*/
require("./app/css/app-styles.css");
require("bootstrap");
require("./app/css/_sassy.sass");
//# sourceMappingURL=styles.js.map
|
#!/bin/sh
NAMESERVER=$(awk '/nameserver/{print $2}' /etc/resolv.conf | tr '\\n' ' ')
RESOLVER_CONFIG="/etc/nginx/conf.d/resolver.conf"
echo Got nameserver $NAMESERVER from resolv.conf
echo Writing include file at $RESOLVER_CONFIG
echo "resolver $NAMESERVER;" > $RESOLVER_CONFIG
nginx -g 'daemon off;'
|
#!/bin/bash
ADB=$ANDROID_HOME/platform-tools/adb
# This is a wrapper for adb. If there are multiple devices / emulators, this script will prompt for which device to use
# Then it'll pass whatever commands to that specific device or emulator.
# Run adb devices once, in event adb hasn't been started yet
BLAH=$($ADB devices)
# Grab the IDs of all the connected devices / emulators
IDS=($($ADB devices | sed '1,1d' | sed '$d' | cut -f 1 | sort))
NUMIDS=${#IDS[@]}
# Check for number of connected devices / emulators
if [[ 0 -eq "$NUMIDS" ]]; then
# No IDs, exit
$ADB $@
exit 0;
elif [[ 1 -eq "$NUMIDS" ]]; then
# Just one device / emulator
$ADB $@
exit 0;
elif [[ "devices" == $1 ]]; then
$ADB $@;
exit 0;
elif [[ "-s" == $1 ]]; then
$ADB $@;
exit 0;
fi
# If we got here, there are multiple devices, need to get information then prompt user for which device/emulator to uninstall from
# Grab the model name for each device / emulator
declare -a MODEL_NAMES
for (( x=0; x < $NUMIDS; x++ )); do
MODEL_NAMES[x]=$($ADB devices | grep ${IDS[$x]} | cut -f 1 | xargs -I $ $ADB -s $ shell cat /system/build.prop | grep "ro.product.model" | cut -d "=" -f 2 | tr -d ' \r\t\n')
done
# Grab the platform version for each device / emulator
declare -a PLATFORM_VERSIONS
for (( x=0; x < $NUMIDS; x++ )); do
PLATFORM_VERSIONS[x]=$($ADB devices | grep ${IDS[$x]} | cut -f 1 | xargs -I $ $ADB -s $ shell cat /system/build.prop | grep "ro.build.version.release" | cut -d "=" -f 2 | tr -d ' \r\t\n')
done
echo "Multiple devices detected, please select one"
for (( x=0; x < $NUMIDS; x++ )); do
echo -e "$[x+1]: ${IDS[x]}\t\t${PLATFORM_VERSIONS[x]}\t\t${MODEL_NAMES[x]}"
done
echo -n "> "
read USER_CHOICE
# Validate user entered a number
if [[ $USER_CHOICE =~ ^[0-9]+$ ]]; then
echo "executing following command:"
echo " adb -s ${IDS[$USER_CHOICE-1]} $@"
$ADB -s ${IDS[$USER_CHOICE-1]} $@
else
echo "You must enter a number"
fi
|
<reponame>osak/mikutterd<filename>core/boot/shell/spec.rb
# -*- coding: utf-8 -*-
# specファイル自動生成
require "fileutils"
require 'ripper'
miquire :core, "userconfig"
# イカサマ依存関係自動解決クラス。
# あまり頼りにしないでくれ、Rubyのパース面倒なんだよ
class Depend < Ripper::Filter
attr_reader :spec
def initialize(*args)
@last_const = ""
super end
def set_spec(spec)
@spec = spec
self end
# シンボル開始の:。次のidentをシンボルと見る
def on_symbeg(tok, f)
:symbol end
def on_ident(tok, f)
if f == :symbol
on_ex_symbol(tok)
else
on_ex_ident(tok) end end
# 定数。Gtk::TimeLine みたいなのが出てきた場合、on_const Gtk, on_op ::, on_const NestedQuote
# の順番でイベントが発生するみたい…。
def on_const(tok, f)
if f == :op
@last_const += '::' + tok
else
@last_const = tok end
case @last_const
when /\AG[td]k\Z/ # GtkクラスとかGdkクラス使ってたらgtkプラグインに依存してるだろう
depend :gtk
when /\APlugin::(\w+)/ # Plugin::なんとか は、プラグインスラッグのキャメルケース名なので、使ってたら依存してる
depend $1.gsub(/([a-z])([A-Z])/, '\1_\2').downcase.to_sym end end
def on_op(tok, f)
case tok
when "::"
:op end end
# シンボル
def on_ex_symbol(tok)
end
# 変数とかメソッド的なやつ。
# command DSLを検出してもcommandプラグインには依存させない。
# commandプラグインは基本的なmikutterコマンドを定義するだけで、mikutterコマンドの仕組み自体には関係ないから。
# commandプラグインで使われている条件クラスを使っていたら、on_constで依存してると判断される。
def on_ex_ident(tok)
case tok
when /tab|timeline|nativewidget/ # UIっぽい単語があったらguiに依存してそう
depend :gui
when 'profiletab' # profiletabはプロフィールにも依存する
depend :gui
depend :profile
when 'settings' # 設定DSLの開始。settingsプラグイン。
depend :settings
end end
# slug に依存していることを登録する
# ==== Args
# [slug] 依存しているプラグイン (Symbol)
def depend(slug)
if spec['slug'].to_sym != slug and not spec["depends"]["plugin"].include?(slug.to_s)
spec["depends"]["plugin"] << slug.to_s end end
end
def spec_generate(dir)
specfile = File.join(dir, ".mikutter.yml")
legacy_specfile = File.join(dir, "spec")
spec = if FileTest.exist?(specfile)
YAML.load_file(specfile)
elsif FileTest.exist?(legacy_specfile)
YAML.load_file(legacy_specfile)
else
user = UserConfig[:verify_credentials] || {}
idname = user[:idname]
{"slug" => File.basename(dir).to_sym, "depends" => {"mikutter" => Environment::VERSION.to_s}, "version" => "1.0", "author" => idname} end
slug = spec["slug"].to_sym
basefile = File.join(dir, "#{slug}.rb")
unless FileTest.exist? basefile
puts "file #{basefile} notfound. select plugin slug."
expects = Dir.glob(File.join(dir, "*.rb")).map{ |filename| File.basename(filename, '.rb') }
if expects.empty?
puts "please create #{basefile}."
end
expects.each_with_index{ |filename, index|
puts "[#{index}] #{filename}"
}
print "input number or slug [q:quit, s:skip]> "
number = STDIN.gets.chomp
case number
when /q/i
abort
when /s/i
return
when /\A[0-9]+\Z/
slug = expects[number.to_i].to_sym
else
slug = number.to_sym end
spec["slug"] = slug
basefile = File.join(dir, "#{slug}.rb") end
source = File.open(basefile){ |io| io.read }
if not spec.has_key?("name")
print "#{slug}: name> "
spec["name"] = STDIN.gets.chomp end
if not spec.has_key?("description")
print "#{slug}: description> "
spec["description"] = STDIN.gets.chomp end
spec["depends"] = {"version" => "1.0", "plugin" => []} if not spec.has_key?("depends")
spec["depends"]["plugin"] = [] if not spec["depends"].has_key?("plugin")
depend = Depend.new(source).set_spec(spec)
depend.parse
content = YAML.dump(depend.spec)
File.open(specfile, "w"){ |io| io.write content }
puts content
end
target = ARGV[1]
if target == "all"
unless ARGV[2]
puts "directory is not specified."
puts "usage: mikutter.rb spec all directory"
exit end
Dir.glob(File.join(ARGV[2], "*/")).each{ |dir|
spec_generate(dir) }
else
unless ARGV[1]
puts "directory is not specified."
puts "usage: mikutter.rb spec directory"
exit end
spec_generate(target)
end
|
# Add boot script
# sudo cp /tmp/boot.sh /var/lib/cloud/scripts/per-boot/boot.sh
# sudo chmod 744 /var/lib/cloud/scripts/per-boot/boot.sh
# Install MongoDB Libraries
sudo cp /tmp/mongodb-org-5.0.repo /etc/yum.repos.d/mongodb-org-5.0.repo
sudo yum install -y mongodb-org-${MONGO_VERSION}
sudo cp /tmp/mongod.conf /etc/mongod.conf
|
<gh_stars>10-100
from unittest import TestCase
from altimeter.core.config import (
AWSConfig,
InvalidConfigException,
ScanConfig,
)
class TestScanConfig(TestCase):
def test_from_dict(self):
scan_config_dict = {
"accounts": ["123", "456"],
"regions": ["us-west-2", "us-west-1"],
"scan_sub_accounts": False,
"preferred_account_scan_regions": ["us-east-1", "us-west-2"],
}
scan_config = ScanConfig(**scan_config_dict)
self.assertTupleEqual(scan_config.accounts, ("123", "456"))
self.assertTupleEqual(scan_config.regions, ("us-west-2", "us-west-1"))
self.assertEqual(scan_config.scan_sub_accounts, False)
self.assertTupleEqual(
scan_config.preferred_account_scan_regions, ("us-east-1", "us-west-2")
)
class TestConfig(TestCase):
def test_from_dict(self):
config_dict = {
"artifact_path": "/tmp/altimeter_single_account",
"pruner_max_age_min": 4320,
"graph_name": "alti",
"accessor": {"multi_hop_accessors": [], "credentials_cache": {"cache": {}}},
"concurrency": {"max_account_scan_threads": 1, "max_svc_scan_threads": 64,},
"scan": {
"accounts": ("1234",),
"regions": (),
"scan_sub_accounts": False,
"preferred_account_scan_regions": (
"us-west-1",
"us-west-2",
"us-east-1",
"us-east-2",
),
},
"neptune": None,
}
config = AWSConfig(**config_dict)
self.assertIsNone(config.neptune)
self.assertEqual(config.pruner_max_age_min, 4320)
|
package org.apache.ddlutils.task;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.StringTokenizer;
import org.apache.commons.collections.set.ListOrderedSet;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.ddlutils.io.PrettyPrintingXmlWriter;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
/**
* A simple helper task that dumps information about a database using JDBC.
*
* @version $Revision: 289996 $
* @ant.task name="dumpMetadata"
*/
public class DumpMetadataTask extends Task
{
/** Methods that are filtered when enumerating the properties. */
private static final String[] IGNORED_PROPERTY_METHODS = { "getConnection", "getCatalogs", "getSchemas" };
/** The data source to use for accessing the database. */
private BasicDataSource _dataSource;
/** The file to write the dump to. */
private File _outputFile = null;
/** The encoding of the XML output file. */
private String _outputEncoding = "UTF-8";
/** The database catalog(s) to read. */
private String _catalogPattern = "%";
/** The database schema(s) to read. */
private String _schemaPattern = "%";
/** The pattern for reading all tables. */
private String _tablePattern = "%";
/** The pattern for reading all procedures. */
private String _procedurePattern = "%";
/** The pattern for reading all columns. */
private String _columnPattern = "%";
/** The tables types to read; <code>null</code> or an empty list means that we shall read every type. */
private String[] _tableTypes = null;
/** Whether to read tables. */
private boolean _dumpTables = true;
/** Whether to read procedures. */
private boolean _dumpProcedures = true;
/**
* Adds the data source to use for accessing the database.
*
* @param dataSource The data source
*/
public void addConfiguredDatabase(BasicDataSource dataSource)
{
_dataSource = dataSource;
}
/**
* Specifies the output file to which the database metadata is written to.
*
* @param outputFile The output file
* @ant.required
*/
public void setOutputFile(File outputFile)
{
_outputFile = outputFile;
}
/**
* Specifies the encoding of the output file.
*
* @param encoding The encoding
* @ant.not-required Per default, <code>UTF-8</code> is used.
*/
public void setOutputEncoding(String encoding)
{
_outputEncoding = encoding;
}
/**
* Sets the catalog pattern used when accessing the database.
*
* @param catalogPattern The catalog pattern
* @ant.not-required Per default, no specific catalog is used (value <code>%</code>).
*/
public void setCatalogPattern(String catalogPattern)
{
_catalogPattern = ((catalogPattern == null) || (catalogPattern.length() == 0) ? null : catalogPattern);
}
/**
* Sets the schema pattern used when accessing the database.
*
* @param schemaPattern The schema pattern
* @ant.not-required Per default, no specific schema is used (value <code>%</code>).
*/
public void setSchemaPattern(String schemaPattern)
{
_schemaPattern = ((schemaPattern == null) || (schemaPattern.length() == 0) ? null : schemaPattern);
}
/**
* Specifies the table to be processed. For details see {@link DatabaseMetaData#getTables(String, String, String, String[])}.
*
* @param tablePattern The table pattern
* @ant.not-required By default, all tables are read (value <code>%</code>).
*/
public void setTablePattern(String tablePattern)
{
_tablePattern = ((tablePattern == null) || (tablePattern.length() == 0) ? null : tablePattern);
}
/**
* Specifies the procedures to be processed. For details and typical table types see {@link DatabaseMetaData#getProcedures(String, String, String)}.
*
* @param procedurePattern The procedure pattern
* @ant.not-required By default, all procedures are read (value <code>%</code>).
*/
public void setProcedurePattern(String procedurePattern)
{
_procedurePattern = ((procedurePattern == null) || (procedurePattern.length() == 0) ? null : procedurePattern);
}
/**
* Specifies the columns to be processed. For details and typical table types see {@link DatabaseMetaData#getColumns(String, String, String, String)}.
*
* @param columnPattern The column pattern
* @ant.not-required By default, all columns are read (value <code>%</code>).
*/
public void setColumnPattern(String columnPattern)
{
_columnPattern = ((columnPattern == null) || (columnPattern.length() == 0) ? null : columnPattern);
}
/**
* Specifies the table types to be processed. For details and typical table types see {@link DatabaseMetaData#getTables(String, String, String, String[])}.
*
* @param tableTypes The table types to read
* @ant.not-required By default, all types of tables are read.
*/
public void setTableTypes(String tableTypes)
{
ArrayList types = new ArrayList();
if (tableTypes != null)
{
StringTokenizer tokenizer = new StringTokenizer(tableTypes, ",");
while (tokenizer.hasMoreTokens())
{
String token = tokenizer.nextToken().trim();
if (token.length() > 0)
{
types.add(token);
}
}
}
_tableTypes = (String[])types.toArray(new String[types.size()]);
}
/**
* Specifies whether procedures shall be read from the database.
*
* @param readProcedures <code>true</code> if procedures shall be read
* @ant.not-required By default, procedures are read.
*/
public void setDumpProcedures(boolean readProcedures)
{
_dumpProcedures = readProcedures;
}
/**
* Specifies whether tables shall be read from the database.
*
* @param readTables <code>true</code> if tables shall be read
* @ant.not-required By default, tables are read.
*/
public void setDumpTables(boolean readTables)
{
_dumpTables = readTables;
}
/**
* {@inheritDoc}
*/
public void execute() throws BuildException
{
if (_dataSource == null)
{
log("No data source specified, so there is nothing to do.", Project.MSG_INFO);
return;
}
Connection connection = null;
OutputStream output = null;
try
{
connection = _dataSource.getConnection();
if (_outputFile == null)
{
output = System.out;
}
else
{
output = new FileOutputStream(_outputFile);
}
PrettyPrintingXmlWriter xmlWriter = new PrettyPrintingXmlWriter(output, _outputEncoding);
xmlWriter.writeDocumentStart();
xmlWriter.writeElementStart(null, "metadata");
xmlWriter.writeAttribute(null, "driverClassName", _dataSource.getDriverClassName());
dumpMetaData(xmlWriter, connection.getMetaData());
xmlWriter.writeDocumentEnd();
}
catch (Exception ex)
{
throw new BuildException(ex);
}
finally
{
if (connection != null)
{
try
{
connection.close();
}
catch (SQLException ex)
{}
}
if ((_outputFile != null) && (output != null))
{
try
{
output.close();
}
catch (IOException ex)
{}
}
}
}
/**
* Dumps the database meta data into XML elements under the current element in the given writer.
*
* @param xmlWriter The XML writer to write to
* @param metaData The meta data to write
*/
private void dumpMetaData(PrettyPrintingXmlWriter xmlWriter, DatabaseMetaData metaData) throws NoSuchMethodException,
IllegalAccessException,
InvocationTargetException,
SQLException
{
// We rather iterate over the methods because most metadata properties
// do not follow the bean naming standard
Method[] methods = metaData.getClass().getMethods();
Set filtered = new HashSet(Arrays.asList(IGNORED_PROPERTY_METHODS));
for (int idx = 0; idx < methods.length; idx++)
{
// only no-arg methods that return something and that are not defined in Object
// we also filter certain methods
if ((methods[idx].getParameterTypes().length == 0) &&
(methods[idx].getReturnType() != null) &&
(Object.class != methods[idx].getDeclaringClass()) &&
!filtered.contains(methods[idx].getName()))
{
dumpProperty(xmlWriter, metaData, methods[idx]);
}
}
dumpCatalogsAndSchemas(xmlWriter, metaData);
if (_dumpTables)
{
dumpTables(xmlWriter, metaData);
}
if (_dumpProcedures)
{
dumpProcedures(xmlWriter, metaData);
}
}
/**
* Dumps the property represented by the given method in the current element in the given writer.
*
* @param xmlWriter The XML writer to write to
* @param obj The instance we're working on
* @param propGetter The method for accessing the property
*/
private void dumpProperty(PrettyPrintingXmlWriter xmlWriter, Object obj, Method propGetter)
{
try
{
addProperty(xmlWriter, getPropertyName(propGetter.getName()), propGetter.invoke(obj, null));
}
catch (Throwable ex)
{
log("Could not dump property "+propGetter.getName()+": "+ex.getStackTrace(), Project.MSG_ERR);
}
}
/**
* Adds a property to the current element in the given xml writer, either as an attribute (primitive value or
* string) or as a sub element.
*
* @param xmlWriter The XML writer to write to
* @param name The name of the property
* @param value The value of the property
*/
private void addProperty(PrettyPrintingXmlWriter xmlWriter, String name, Object value)
{
if (value != null)
{
if (value.getClass().isArray())
{
addArrayProperty(xmlWriter, name, (Object[])value);
}
else if (value.getClass().isPrimitive() || (value instanceof String))
{
xmlWriter.writeAttribute(null, name, value.toString());
}
else if (value instanceof ResultSet)
{
addResultSetProperty(xmlWriter, name, (ResultSet)value);
}
}
}
/**
* Adds a property that is represented as an array to the current element in the given xml writer.
*
* @param xmlWriter The XML writer to write to
* @param name The name of the property
* @param values The values of the property
*/
private void addArrayProperty(PrettyPrintingXmlWriter xmlWriter, String name, Object[] values)
{
String propName = name;
if (propName.endsWith("s"))
{
propName = propName.substring(0, propName.length() - 1);
}
xmlWriter.writeElementStart(null, propName + "s");
for (int idx = 0; idx < values.length; idx++)
{
addProperty(xmlWriter, "value", values[idx]);
}
xmlWriter.writeElementEnd();
}
/**
* Adds a property that is represented as a result set to the current element in the given xml writer.
*
* @param xmlWriter The XML writer to write to
* @param name The name of the property
* @param result The values of the property as a result set
*/
private void addResultSetProperty(PrettyPrintingXmlWriter xmlWriter, String name, ResultSet result)
{
String propName = name;
if (propName.endsWith("s"))
{
propName = propName.substring(0, propName.length() - 1);
}
try
{
ResultSetMetaData metaData = result.getMetaData();
xmlWriter.writeElementStart(null, propName + "s");
try
{
while (result.next())
{
xmlWriter.writeElementStart(null, propName);
try
{
for (int idx = 1; idx <= metaData.getColumnCount(); idx++)
{
Object value = result.getObject(idx);
addProperty(xmlWriter, metaData.getColumnLabel(idx), value);
}
}
finally
{
xmlWriter.writeElementEnd();
}
}
}
finally
{
xmlWriter.writeElementEnd();
}
}
catch (SQLException ex)
{
log("Could not read the result set metadata: "+ex.getStackTrace(), Project.MSG_ERR);
}
}
/**
* Derives the property name from the given method name.
*
* @param methodName The method name
* @return The property name
*/
private String getPropertyName(String methodName)
{
if (methodName.startsWith("get"))
{
if (Character.isLowerCase(methodName.charAt(4)))
{
return Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4);
}
else
{
return methodName.substring(3);
}
}
else if (methodName.startsWith("is"))
{
if (Character.isLowerCase(methodName.charAt(3)))
{
return Character.toLowerCase(methodName.charAt(2)) + methodName.substring(3);
}
else
{
return methodName.substring(2);
}
}
else
{
return methodName;
}
}
/**
* Defines an interface for a callback that retrieves a specific result set from the metadata, and
* also writes rows to a given xml writer as well as handles errors.
*/
private static interface ResultSetXmlOperation
{
/**
* Returns the result set to work on.
*
* @return The result set
*/
public ResultSet getResultSet() throws SQLException;
/**
* Writes the row currently maintained by the given result set to the given xml writer.
*
* @param xmlWriter The xml writer to write to
* @param result The row to write
*/
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException;
/**
* Handles the given exception.
*
* @param ex The sql exception
*/
public void handleError(SQLException ex);
}
/**
* Helper method that performs the given operation.
*
* @param xmlWriter The xml writer that the operation shall write to
* @param name The name of the xml element surrounding the operation's output
* @param op The operation
*/
private void performResultSetXmlOperation(PrettyPrintingXmlWriter xmlWriter, String name, ResultSetXmlOperation op)
{
ResultSet result = null;
try
{
result = op.getResultSet();
if (name != null)
{
xmlWriter.writeElementStart(null, name);
}
try
{
while (result.next())
{
op.handleRow(xmlWriter, result);
}
}
finally
{
if (name != null)
{
xmlWriter.writeElementEnd();
}
}
}
catch (SQLException ex)
{
op.handleError(ex);
}
finally
{
if (result != null)
{
try
{
result.close();
}
catch (SQLException ex)
{
log("Could not close a result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
}
}
/**
* Dumps the catalogs and schemas of the database.
*
* @param xmlWriter The xml writer to write to
* @param metaData The database meta data
*/
private void dumpCatalogsAndSchemas(PrettyPrintingXmlWriter xmlWriter, final DatabaseMetaData metaData)
{
performResultSetXmlOperation(xmlWriter, "catalogs", new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getCatalogs();
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
String catalogName = result.getString("TABLE_CAT");
if ((catalogName != null) && (catalogName.length() > 0))
{
xmlWriter.writeElementStart(null, "catalog");
xmlWriter.writeAttribute(null, "name", catalogName);
xmlWriter.writeElementEnd();
}
}
public void handleError(SQLException ex)
{
log("Could not read the catalogs from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
});
performResultSetXmlOperation(xmlWriter, "schemas", new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getSchemas();
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
String schemaName = result.getString("TABLE_SCHEM");
if ((schemaName != null) && (schemaName.length() > 0))
{
xmlWriter.writeElementStart(null, "schema");
xmlWriter.writeAttribute(null, "name", schemaName);
xmlWriter.writeElementEnd();
}
}
public void handleError(SQLException ex)
{
log("Could not read the schemas from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
});
}
/**
* Dumps all tables.
*
* @param xmlWriter The xml writer to write to
* @param metaData The database metadata
*/
private void dumpTables(PrettyPrintingXmlWriter xmlWriter, final DatabaseMetaData metaData)
{
// First we need the list of supported table types
final ArrayList tableTypeList = new ArrayList();
performResultSetXmlOperation(xmlWriter, "tableTypes", new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getTableTypes();
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
String tableType = result.getString("TABLE_TYPE");
tableTypeList.add(tableType);
xmlWriter.writeElementStart(null, "tableType");
xmlWriter.writeAttribute(null, "name", tableType);
xmlWriter.writeElementEnd();
}
public void handleError(SQLException ex)
{
log("Could not read the table types from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
});
final String[] tableTypesToRead;
if ((_tableTypes == null) || (_tableTypes.length == 0))
{
tableTypesToRead = (String[])tableTypeList.toArray(new String[tableTypeList.size()]);
}
else
{
tableTypesToRead = _tableTypes;
}
performResultSetXmlOperation(xmlWriter, "tables", new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getTables(_catalogPattern, _schemaPattern, _tablePattern, tableTypesToRead);
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
Set columns = getColumnsInResultSet(result);
String tableName = result.getString("TABLE_NAME");
if ((tableName != null) && (tableName.length() > 0))
{
String catalog = result.getString("TABLE_CAT");
String schema = result.getString("TABLE_SCHEM");
log("Reading table " + ((schema != null) && (schema.length() > 0) ? schema + "." : "") + tableName, Project.MSG_INFO);
xmlWriter.writeElementStart(null, "table");
xmlWriter.writeAttribute(null, "name", tableName);
if (catalog != null)
{
xmlWriter.writeAttribute(null, "catalog", catalog);
}
if (schema != null)
{
xmlWriter.writeAttribute(null, "schema", schema);
}
addStringAttribute(xmlWriter, "type", result, columns, "TABLE_TYPE");
addStringAttribute(xmlWriter, "remarks", result, columns, "REMARKS");
addStringAttribute(xmlWriter, "typeName", result, columns, "TYPE_NAME");
addStringAttribute(xmlWriter, "typeCatalog", result, columns, "TYPE_CAT");
addStringAttribute(xmlWriter, "typeSchema", result, columns, "TYPE_SCHEM");
addStringAttribute(xmlWriter, "identifierColumn", result, columns, "SELF_REFERENCING_COL_NAME");
addStringAttribute(xmlWriter, "identifierGeneration", result, columns, "REF_GENERATION");
dumpColumns(xmlWriter, metaData, catalog, schema, tableName);
dumpPKs(xmlWriter, metaData, catalog, schema, tableName);
dumpVersionColumns(xmlWriter, metaData, catalog, schema, tableName);
dumpFKs(xmlWriter, metaData, catalog, schema, tableName);
dumpIndexes(xmlWriter, metaData, catalog, schema, tableName);
xmlWriter.writeElementEnd();
}
}
public void handleError(SQLException ex)
{
log("Could not read the tables from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
});
}
/**
* Dumps the columns of the indicated table.
*
* @param xmlWriter The xml writer to write to
* @param metaData The database metadata
* @param catalogName The catalog name
* @param schemaName The schema name
* @param tableName The table name
*/
private void dumpColumns(PrettyPrintingXmlWriter xmlWriter,
final DatabaseMetaData metaData,
final String catalogName,
final String schemaName,
final String tableName) throws SQLException
{
performResultSetXmlOperation(xmlWriter, null, new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getColumns(catalogName, schemaName, tableName, _columnPattern);
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
Set columns = getColumnsInResultSet(result);
String columnName = result.getString("COLUMN_NAME");
if ((columnName != null) && (columnName.length() > 0))
{
xmlWriter.writeElementStart(null, "column");
xmlWriter.writeAttribute(null, "name", columnName);
addIntAttribute(xmlWriter, "typeCode", result, columns, "DATA_TYPE");
addStringAttribute(xmlWriter, "type", result, columns, "TYPE_NAME");
addIntAttribute(xmlWriter, "size", result, columns, "COLUMN_SIZE");
addIntAttribute(xmlWriter, "digits", result, columns, "DECIMAL_DIGITS");
addIntAttribute(xmlWriter, "precision", result, columns, "NUM_PREC_RADIX");
if (columns.contains("NULLABLE"))
{
try
{
switch (result.getInt("NULLABLE"))
{
case DatabaseMetaData.columnNoNulls:
xmlWriter.writeAttribute(null, "nullable", "false");
break;
case DatabaseMetaData.columnNullable:
xmlWriter.writeAttribute(null, "nullable", "true");
break;
default:
xmlWriter.writeAttribute(null, "nullable", "unknown");
break;
}
}
catch (SQLException ex)
{
log("Could not read the NULLABLE value for colum '" + columnName + "' of table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
addStringAttribute(xmlWriter, "remarks", result, columns, "REMARKS");
addStringAttribute(xmlWriter, "defaultValue", result, columns, "COLUMN_DEF");
addIntAttribute(xmlWriter, "maxByteLength", result, columns, "CHAR_OCTET_LENGTH");
addIntAttribute(xmlWriter, "index", result, columns, "ORDINAL_POSITION");
if (columns.contains("IS_NULLABLE"))
{
try
{
String value = result.getString("IS_NULLABLE");
if ("no".equalsIgnoreCase(value))
{
xmlWriter.writeAttribute(null, "isNullable", "false");
}
else if ("yes".equalsIgnoreCase(value))
{
xmlWriter.writeAttribute(null, "isNullable", "true");
}
else
{
xmlWriter.writeAttribute(null, "isNullable", "unknown");
}
}
catch (SQLException ex)
{
log("Could not read the IS_NULLABLE value for colum '" + columnName + "' of table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
addStringAttribute(xmlWriter, "refCatalog", result, columns, "SCOPE_CATLOG");
addStringAttribute(xmlWriter, "refSchema", result, columns, "SCOPE_SCHEMA");
addStringAttribute(xmlWriter, "refTable", result, columns, "SCOPE_TABLE");
addShortAttribute(xmlWriter, "sourceTypeCode", result, columns, "SOURCE_DATA_TYPE");
xmlWriter.writeElementEnd();
}
}
public void handleError(SQLException ex)
{
log("Could not read the colums for table '" + tableName + "' from the result set: "+ex.getStackTrace(), Project.MSG_ERR);
}
});
}
/**
* Dumps the primary key columns of the indicated table.
*
* @param xmlWriter The xml writer to write to
* @param metaData The database metadata
* @param catalogName The catalog name
* @param schemaName The schema name
* @param tableName The table name
*/
private void dumpPKs(PrettyPrintingXmlWriter xmlWriter,
final DatabaseMetaData metaData,
final String catalogName,
final String schemaName,
final String tableName) throws SQLException
{
performResultSetXmlOperation(xmlWriter, null, new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getPrimaryKeys(catalogName, schemaName, tableName);
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
Set columns = getColumnsInResultSet(result);
String columnName = result.getString("COLUMN_NAME");
if ((columnName != null) && (columnName.length() > 0))
{
xmlWriter.writeElementStart(null, "primaryKey");
xmlWriter.writeAttribute(null, "column", columnName);
addStringAttribute(xmlWriter, "name", result, columns, "PK_NAME");
addShortAttribute(xmlWriter, "sequenceNumberInPK", result, columns, "KEY_SEQ");
xmlWriter.writeElementEnd();
}
}
public void handleError(SQLException ex)
{
log("Could not read the primary keys for table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
});
}
/**
* Dumps the versioned (auto-updating) columns of the indicated table.
*
* @param xmlWriter The xml writer to write to
* @param metaData The database metadata
* @param catalogName The catalog name
* @param schemaName The schema name
* @param tableName The table name
*/
private void dumpVersionColumns(PrettyPrintingXmlWriter xmlWriter,
final DatabaseMetaData metaData,
final String catalogName,
final String schemaName,
final String tableName) throws SQLException
{
performResultSetXmlOperation(xmlWriter, null, new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getVersionColumns(catalogName, schemaName, tableName);
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
Set columns = getColumnsInResultSet(result);
String columnName = result.getString("COLUMN_NAME");
if ((columnName != null) && (columnName.length() > 0))
{
xmlWriter.writeElementStart(null, "versionedColumn");
xmlWriter.writeAttribute(null, "column", columnName);
addIntAttribute(xmlWriter, "typeCode", result, columns, "DATA_TYPE");
addStringAttribute(xmlWriter, "type", result, columns, "TYPE_NAME");
addIntAttribute(xmlWriter, "size", result, columns, "BUFFER_LENGTH");
addIntAttribute(xmlWriter, "precision", result, columns, "COLUMN_SIZE");
addShortAttribute(xmlWriter, "scale", result, columns, "DECIMAL_DIGITS");
if (columns.contains("PSEUDO_COLUMN"))
{
try
{
switch (result.getShort("PSEUDO_COLUMN"))
{
case DatabaseMetaData.versionColumnPseudo:
xmlWriter.writeAttribute(null, "columnType", "pseudo column");
break;
case DatabaseMetaData.versionColumnNotPseudo:
xmlWriter.writeAttribute(null, "columnType", "real column");
break;
default:
xmlWriter.writeAttribute(null, "columnType", "unknown");
break;
}
}
catch (SQLException ex)
{
log("Could not read the PSEUDO_COLUMN value for versioned colum '" + columnName + "' of table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
xmlWriter.writeElementEnd();
}
}
public void handleError(SQLException ex)
{
log("Could not read the versioned columns for table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
});
}
/**
* Dumps the foreign key columns of the indicated table to other tables.
*
* @param xmlWriter The xml writer to write to
* @param metaData The database metadata
* @param catalogName The catalog name
* @param schemaName The schema name
* @param tableName The table name
*/
private void dumpFKs(PrettyPrintingXmlWriter xmlWriter,
final DatabaseMetaData metaData,
final String catalogName,
final String schemaName,
final String tableName) throws SQLException
{
performResultSetXmlOperation(xmlWriter, null, new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getImportedKeys(catalogName, schemaName, tableName);
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
Set columns = getColumnsInResultSet(result);
xmlWriter.writeElementStart(null, "foreignKey");
addStringAttribute(xmlWriter, "name", result, columns, "FK_NAME");
addStringAttribute(xmlWriter, "primaryKeyName", result, columns, "PK_NAME");
addStringAttribute(xmlWriter, "column", result, columns, "PKCOLUMN_NAME");
addStringAttribute(xmlWriter, "foreignCatalog", result, columns, "FKTABLE_CAT");
addStringAttribute(xmlWriter, "foreignSchema", result, columns, "FKTABLE_SCHEM");
addStringAttribute(xmlWriter, "foreignTable", result, columns, "FKTABLE_NAME");
addStringAttribute(xmlWriter, "foreignColumn", result, columns, "FKCOLUMN_NAME");
addShortAttribute(xmlWriter, "sequenceNumberInFK", result, columns, "KEY_SEQ");
if (columns.contains("UPDATE_RULE"))
{
try
{
switch (result.getShort("UPDATE_RULE"))
{
case DatabaseMetaData.importedKeyNoAction:
xmlWriter.writeAttribute(null, "updateRule", "no action");
break;
case DatabaseMetaData.importedKeyCascade:
xmlWriter.writeAttribute(null, "updateRule", "cascade PK change");
break;
case DatabaseMetaData.importedKeySetNull:
xmlWriter.writeAttribute(null, "updateRule", "set FK to NULL");
break;
case DatabaseMetaData.importedKeySetDefault:
xmlWriter.writeAttribute(null, "updateRule", "set FK to default");
break;
default:
xmlWriter.writeAttribute(null, "updateRule", "unknown");
break;
}
}
catch (SQLException ex)
{
log("Could not read the UPDATE_RULE value for a foreign key of table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
if (columns.contains("DELETE_RULE"))
{
try
{
switch (result.getShort("DELETE_RULE"))
{
case DatabaseMetaData.importedKeyNoAction:
case DatabaseMetaData.importedKeyRestrict:
xmlWriter.writeAttribute(null, "deleteRule", "no action");
break;
case DatabaseMetaData.importedKeyCascade:
xmlWriter.writeAttribute(null, "deleteRule", "cascade PK change");
break;
case DatabaseMetaData.importedKeySetNull:
xmlWriter.writeAttribute(null, "deleteRule", "set FK to NULL");
break;
case DatabaseMetaData.importedKeySetDefault:
xmlWriter.writeAttribute(null, "deleteRule", "set FK to default");
break;
default:
xmlWriter.writeAttribute(null, "deleteRule", "unknown");
break;
}
}
catch (SQLException ex)
{
log("Could not read the DELETE_RULE value for a foreign key of table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
if (columns.contains("DEFERRABILITY"))
{
try
{
switch (result.getShort("DEFERRABILITY"))
{
case DatabaseMetaData.importedKeyInitiallyDeferred:
xmlWriter.writeAttribute(null, "deferrability", "initially deferred");
break;
case DatabaseMetaData.importedKeyInitiallyImmediate:
xmlWriter.writeAttribute(null, "deferrability", "immediately deferred");
break;
case DatabaseMetaData.importedKeyNotDeferrable:
xmlWriter.writeAttribute(null, "deferrability", "not deferred");
break;
default:
xmlWriter.writeAttribute(null, "deferrability", "unknown");
break;
}
}
catch (SQLException ex)
{
log("Could not read the DEFERRABILITY value for a foreign key of table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
xmlWriter.writeElementEnd();
}
public void handleError(SQLException ex)
{
log("Could not determine the foreign keys for table '" + tableName + "': " + ex.getStackTrace(), Project.MSG_ERR);
}
});
}
/**
* Dumps the indexes of the indicated table.
*
* @param xmlWriter The xml writer to write to
* @param metaData The database metadata
* @param catalogName The catalog name
* @param schemaName The schema name
* @param tableName The table name
*/
private void dumpIndexes(PrettyPrintingXmlWriter xmlWriter,
final DatabaseMetaData metaData,
final String catalogName,
final String schemaName,
final String tableName) throws SQLException
{
performResultSetXmlOperation(xmlWriter, null, new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getIndexInfo(catalogName, schemaName, tableName, false, false);
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
Set columns = getColumnsInResultSet(result);
xmlWriter.writeElementStart(null, "index");
addStringAttribute(xmlWriter, "name", result, columns, "INDEX_NAME");
addBooleanAttribute(xmlWriter, "nonUnique", result, columns, "NON_UNIQUE");
addStringAttribute(xmlWriter, "indexCatalog", result, columns, "INDEX_QUALIFIER");
if (columns.contains("TYPE"))
{
try
{
switch (result.getShort("TYPE"))
{
case DatabaseMetaData.tableIndexStatistic:
xmlWriter.writeAttribute(null, "type", "table statistics");
break;
case DatabaseMetaData.tableIndexClustered:
xmlWriter.writeAttribute(null, "type", "clustered");
break;
case DatabaseMetaData.tableIndexHashed:
xmlWriter.writeAttribute(null, "type", "hashed");
break;
case DatabaseMetaData.tableIndexOther:
xmlWriter.writeAttribute(null, "type", "other");
break;
default:
xmlWriter.writeAttribute(null, "type", "unknown");
break;
}
}
catch (SQLException ex)
{
log("Could not read the TYPE value for an index of table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
addStringAttribute(xmlWriter, "column", result, columns, "COLUMN_NAME");
addShortAttribute(xmlWriter, "sequenceNumberInIndex", result, columns, "ORDINAL_POSITION");
if (columns.contains("ASC_OR_DESC"))
{
try
{
String value = result.getString("ASC_OR_DESC");
if ("A".equalsIgnoreCase(value))
{
xmlWriter.writeAttribute(null, "sortOrder", "ascending");
}
else if ("D".equalsIgnoreCase(value))
{
xmlWriter.writeAttribute(null, "sortOrder", "descending");
}
else
{
xmlWriter.writeAttribute(null, "sortOrder", "unknown");
}
}
catch (SQLException ex)
{
log("Could not read the ASC_OR_DESC value for an index of table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
addIntAttribute(xmlWriter, "cardinality", result, columns, "CARDINALITY");
addIntAttribute(xmlWriter, "pages", result, columns, "PAGES");
addStringAttribute(xmlWriter, "filter", result, columns, "FILTER_CONDITION");
}
public void handleError(SQLException ex)
{
log("Could not read the indexes for table '" + tableName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
});
}
/**
* Dumps all procedures.
*
* @param xmlWriter The xml writer to write to
* @param metaData The database metadata
*/
private void dumpProcedures(PrettyPrintingXmlWriter xmlWriter, final DatabaseMetaData metaData) throws SQLException
{
performResultSetXmlOperation(xmlWriter, "procedures", new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getProcedures(_catalogPattern, _schemaPattern, _procedurePattern);
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
Set columns = getColumnsInResultSet(result);
String procedureName = result.getString("PROCEDURE_NAME");
if ((procedureName != null) && (procedureName.length() > 0))
{
String catalog = result.getString("PROCEDURE_CAT");
String schema = result.getString("PROCEDURE_SCHEM");
log("Reading procedure " + ((schema != null) && (schema.length() > 0) ? schema + "." : "") + procedureName, Project.MSG_INFO);
xmlWriter.writeElementStart(null, "procedure");
xmlWriter.writeAttribute(null, "name", procedureName);
if (catalog != null)
{
xmlWriter.writeAttribute(null, "catalog", catalog);
}
if (schema != null)
{
xmlWriter.writeAttribute(null, "schema", schema);
}
addStringAttribute(xmlWriter, "remarks", result, columns, "REMARKS");
if (columns.contains("PROCEDURE_TYPE"))
{
try
{
switch (result.getShort("PROCEDURE_TYPE"))
{
case DatabaseMetaData.procedureReturnsResult:
xmlWriter.writeAttribute(null, "type", "returns result");
break;
case DatabaseMetaData.procedureNoResult:
xmlWriter.writeAttribute(null, "type", "doesn't return result");
break;
case DatabaseMetaData.procedureResultUnknown:
xmlWriter.writeAttribute(null, "type", "may return result");
break;
default:
xmlWriter.writeAttribute(null, "type", "unknown");
break;
}
}
catch (SQLException ex)
{
log("Could not read the PROCEDURE_TYPE value for the procedure '" + procedureName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
dumpProcedure(xmlWriter, metaData, "%", "%", procedureName);
xmlWriter.writeElementEnd();
}
}
public void handleError(SQLException ex)
{
log("Could not read the procedures from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
});
}
/**
* Dumps the contents of the indicated procedure.
*
* @param xmlWriter The xml writer to write to
* @param metaData The database metadata
* @param catalogName The catalog name
* @param schemaName The schema name
* @param procedureName The procedure name
*/
private void dumpProcedure(PrettyPrintingXmlWriter xmlWriter,
final DatabaseMetaData metaData,
final String catalogName,
final String schemaName,
final String procedureName) throws SQLException
{
performResultSetXmlOperation(xmlWriter, null, new ResultSetXmlOperation()
{
public ResultSet getResultSet() throws SQLException
{
return metaData.getProcedureColumns(catalogName, schemaName, procedureName, _columnPattern);
}
public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException
{
Set columns = getColumnsInResultSet(result);
String columnName = result.getString("COLUMN_NAME");
if ((columnName != null) && (columnName.length() > 0))
{
xmlWriter.writeElementStart(null, "column");
xmlWriter.writeAttribute(null, "name", columnName);
if (columns.contains("COLUMN_TYPE"))
{
try
{
switch (result.getShort("COLUMN_TYPE"))
{
case DatabaseMetaData.procedureColumnIn:
xmlWriter.writeAttribute(null, "type", "in parameter");
break;
case DatabaseMetaData.procedureColumnInOut:
xmlWriter.writeAttribute(null, "type", "in/out parameter");
break;
case DatabaseMetaData.procedureColumnOut:
xmlWriter.writeAttribute(null, "type", "out parameter");
break;
case DatabaseMetaData.procedureColumnReturn:
xmlWriter.writeAttribute(null, "type", "return value");
break;
case DatabaseMetaData.procedureColumnResult:
xmlWriter.writeAttribute(null, "type", "result column in ResultSet");
break;
default:
xmlWriter.writeAttribute(null, "type", "unknown");
break;
}
}
catch (SQLException ex)
{
log("Could not read the COLUMN_TYPE value for the column '" + columnName + "' of procedure '" + procedureName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
addIntAttribute(xmlWriter, "typeCode", result, columns, "DATA_TYPE");
addStringAttribute(xmlWriter, "type", result, columns, "TYPE_NAME");
addIntAttribute(xmlWriter, "length", result, columns, "LENGTH");
addIntAttribute(xmlWriter, "precision", result, columns, "PRECISION");
addShortAttribute(xmlWriter, "short", result, columns, "SCALE");
addShortAttribute(xmlWriter, "radix", result, columns, "RADIX");
if (columns.contains("NULLABLE"))
{
try
{
switch (result.getInt("NULLABLE"))
{
case DatabaseMetaData.procedureNoNulls:
xmlWriter.writeAttribute(null, "nullable", "false");
break;
case DatabaseMetaData.procedureNullable:
xmlWriter.writeAttribute(null, "nullable", "true");
break;
default:
xmlWriter.writeAttribute(null, "nullable", "unknown");
break;
}
}
catch (SQLException ex)
{
log("Could not read the NULLABLE value for the column '" + columnName + "' of procedure '" + procedureName + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
}
addStringAttribute(xmlWriter, "remarks", result, columns, "REMARKS");
}
}
public void handleError(SQLException ex)
{
log("Could not read the columns for procedure '"+procedureName+"' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
}
});
}
/**
* If the result set contains the indicated column, extracts its value and sets an attribute at the given element.
*
* @param xmlWriter The xml writer to write to
* @param attrName The name of the attribute to write
* @param result The result set
* @param columns The columns in the result set
* @param columnName The name of the column in the result set
*/
private void addStringAttribute(PrettyPrintingXmlWriter xmlWriter, String attrName, ResultSet result, Set columns, String columnName) throws SQLException
{
if (columns.contains(columnName))
{
try
{
xmlWriter.writeAttribute(null, attrName, result.getString(columnName));
}
catch (SQLException ex)
{
log("Could not read the value from result set column " + columnName + ":" + ex.getStackTrace(), Project.MSG_ERR);
}
}
}
/**
* If the result set contains the indicated column, extracts its int value and sets an attribute at the given element.
*
* @param xmlWriter The xml writer to write to
* @param attrName The name of the attribute to write
* @param result The result set
* @param columns The columns in the result set
* @param columnName The name of the column in the result set
*/
private void addIntAttribute(PrettyPrintingXmlWriter xmlWriter, String attrName, ResultSet result, Set columns, String columnName) throws SQLException
{
if (columns.contains(columnName))
{
try
{
xmlWriter.writeAttribute(null, attrName, String.valueOf(result.getInt(columnName)));
}
catch (SQLException ex)
{
// A few databases do not comply with the jdbc spec and return a string (or null),
// so lets try this just in case
String value = result.getString(columnName);
if (value != null)
{
try
{
xmlWriter.writeAttribute(null, attrName, new Integer(value).toString());
}
catch (NumberFormatException parseEx)
{
log("Could not parse the value from result set column " + columnName + ":" + ex.getStackTrace(), Project.MSG_ERR);
}
}
}
}
}
/**
* If the result set contains the indicated column, extracts its short value and sets an attribute at the given element.
*
* @param xmlWriter The xml writer to write to
* @param attrName The name of the attribute to write
* @param result The result set
* @param columns The columns in the result set
* @param columnName The name of the column in the result set
*/
private void addShortAttribute(PrettyPrintingXmlWriter xmlWriter, String attrName, ResultSet result, Set columns, String columnName) throws SQLException
{
if (columns.contains(columnName))
{
try
{
xmlWriter.writeAttribute(null, attrName, String.valueOf(result.getShort(columnName)));
}
catch (SQLException ex)
{
// A few databases do not comply with the jdbc spec and return a string (or null),
// so lets try strings this just in case
String value = result.getString(columnName);
if (value != null)
{
try
{
xmlWriter.writeAttribute(null, attrName, new Short(value).toString());
}
catch (NumberFormatException parseEx)
{
log("Could not parse the value from result set column " + columnName + ":" + ex.getStackTrace(), Project.MSG_ERR);
}
}
}
}
}
/**
* If the result set contains the indicated column, extracts its boolean value and sets an attribute at the given element.
*
* @param xmlWriter The xml writer to write to
* @param attrName The name of the attribute to write
* @param result The result set
* @param columns The columns in the result set
* @param columnName The name of the column in the result set
*/
private void addBooleanAttribute(PrettyPrintingXmlWriter xmlWriter, String attrName, ResultSet result, Set columns, String columnName) throws SQLException
{
if (columns.contains(columnName))
{
try
{
xmlWriter.writeAttribute(null, attrName, String.valueOf(result.getBoolean(columnName)));
}
catch (SQLException ex)
{
// A few databases do not comply with the jdbc spec and return a string (or null),
// so lets try strings this just in case
String value = result.getString(columnName);
if (value != null)
{
try
{
xmlWriter.writeAttribute(null, attrName, new Boolean(value).toString());
}
catch (NumberFormatException parseEx)
{
log("Could not parse the value from result set column " + columnName + ":" + ex.getStackTrace(), Project.MSG_ERR);
}
}
}
}
}
/**
* Determines the columns that are present in the given result set.
*
* @param resultSet The result set
* @return The columns
*/
private Set getColumnsInResultSet(ResultSet resultSet) throws SQLException
{
ListOrderedSet result = new ListOrderedSet();
ResultSetMetaData metaData = resultSet.getMetaData();
for (int idx = 1; idx <= metaData.getColumnCount(); idx++)
{
result.add(metaData.getColumnName(idx).toUpperCase());
}
return result;
}
}
|
<gh_stars>100-1000
// @ts-ignore
import Tap from 'tap';
import { makeSpectacle } from '../../src';
import { loadEvents } from '../utils';
import { InMemoryOpticContextBuilder } from '../../src/in-memory';
import * as OpticEngine from '../../../optic-engine-wasm/build';
import { generateOpenApi } from '../../src/openapi';
const specs = [
{
name: 'generate OpenAPI 3.0.1',
file: './test/specs/mark-req-nested-field-optional.json',
},
{
name: 'handle contributions',
file: './test/specs/contributions.json',
},
];
specs.forEach(({ name, file }) => {
Tap.test(name, async (test) => {
const events = loadEvents(file);
const opticContext = await InMemoryOpticContextBuilder.fromEvents(
OpticEngine,
events
);
const spectacle = await makeSpectacle(opticContext);
const results = await generateOpenApi(spectacle);
test.matchSnapshot(results);
});
});
|
#!/bin/bash
dieharder -d 207 -g 59 -S 1643896274
|
package io.opensphere.analysis.listtool.view;
import java.awt.Color;
import java.awt.Point;
import java.io.Serializable;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javax.swing.DefaultRowSorter;
import javax.swing.SwingWorker;
import javax.swing.table.TableModel;
import org.apache.log4j.Logger;
import org.jdesktop.swingx.sort.TableSortController;
import io.opensphere.analysis.listtool.model.ListToolTableModel;
import io.opensphere.core.model.DoubleRange;
import io.opensphere.core.util.Utilities;
/**
* The Class CustomTableSortControllerExtension.
*
* Helps to more efficiently retrieve the data that will be sorted by making a
* single query to the cache and also using map to prevent repetitive data
* retrieves on non-primary sort columns.
*/
public class CustomTableSortControllerExtension extends TableSortController<TableModel>
{
/** The Constant LOGGER. */
private static final Logger LOGGER = Logger.getLogger(CustomTableSortControllerExtension.class);
/** The null proxy. */
private static Object ourNullProxy = new Object();
/** The table model. */
private final ListToolTableModel myTableModel;
/** The my sort wrapper. */
private final CustomTableRowSorterModelWrapper mySortWrapper;
/** Whether the sorter is busy. */
private final BooleanProperty myIsBusy = new SimpleBooleanProperty(false);
/**
* Instantiates a new custom table sort controller extension.
*
* @param model the model
*/
public CustomTableSortControllerExtension(ListToolTableModel model)
{
super(model);
myTableModel = model;
mySortWrapper = new CustomTableRowSorterModelWrapper();
setModelWrapper(mySortWrapper);
}
/**
* Initialize. Call this after setting this as the row sorter because
* JXTable will set this stuff itself.
*/
public void init()
{
for (int column = 0; column < myTableModel.getColumnCount(); ++column)
{
setComparator(column, new ListToolComparator());
}
}
@Override
public void sort()
{
mySortWrapper.clearCache();
try
{
super.sort();
}
catch (IndexOutOfBoundsException e)
{
// Ground this exception.
if (LOGGER.isTraceEnabled())
{
LOGGER.trace("Received IndexOutOfBoundsException on super.sort()");
}
LOGGER.error(e);
}
}
@Override
public void toggleSortOrder(final int column)
{
myIsBusy.set(true);
SwingWorker<Void, Void> worker = new SwingWorker<Void, Void>()
{
@Override
protected Void doInBackground()
{
List<?> columnData = myTableModel.getColumnValues(column);
mySortWrapper.setColumnData(column, columnData);
return null;
}
@Override
protected void done()
{
try
{
get();
}
catch (InterruptedException | ExecutionException e)
{
LOGGER.error(e, e);
}
try
{
CustomTableSortControllerExtension.super.toggleSortOrder(column);
mySortWrapper.clearCache();
}
finally
{
myIsBusy.set(false);
}
}
};
worker.execute();
}
/**
* Gets the property for when the sorter is busy.
*
* @return the property for when the sorter is busy
*/
public BooleanProperty isBusyProperty()
{
return myIsBusy;
}
/**
* The Class CustomTableRowSorterModelWrapper.
*/
private class CustomTableRowSorterModelWrapper extends DefaultRowSorter.ModelWrapper<TableModel, Integer>
{
/** The my column data. */
private List<?> myColumnData;
/** The primary column index. */
private int myPrimaryColumnIndex;
/** The point to object map. */
private final Map<Point, Object> myPointToObjectMap = new HashMap<>();
/**
* Clear cache.
*/
public void clearCache()
{
myPointToObjectMap.clear();
myPrimaryColumnIndex = -1;
myColumnData = null;
}
@Override
public int getColumnCount()
{
return myTableModel != null ? myTableModel.getColumnCount() : 0;
}
@Override
public Integer getIdentifier(int row)
{
return Integer.valueOf(row);
}
@Override
public TableModel getModel()
{
return myTableModel;
}
@Override
public int getRowCount()
{
return myTableModel != null ? myTableModel.getRowCount() : 0;
}
@Override
public Object getValueAt(int row, int column)
{
Object value = null;
if (column == myPrimaryColumnIndex && myColumnData != null && row < myColumnData.size())
{
value = myColumnData.get(row);
}
else
{
Point p = new Point(row, column);
value = myPointToObjectMap.get(p);
if (value == null)
{
value = myTableModel.getValueAt(row, column);
myPointToObjectMap.put(p, value == null ? ourNullProxy : value);
}
}
return Utilities.sameInstance(value, ourNullProxy) ? null : value;
}
/**
* Sets the column data.
*
* @param index the index
* @param data the data
*/
public void setColumnData(int index, List<?> data)
{
myPrimaryColumnIndex = index;
myColumnData = data;
}
}
/**
* The list tool comparator.
*/
private static class ListToolComparator implements Comparator<Object>, Serializable
{
/** serialVersionUID. */
private static final long serialVersionUID = 1L;
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public int compare(Object o1, Object o2)
{
if (o1 instanceof Comparable && o2 instanceof Comparable)
{
return ((Comparable)o1).compareTo(o2);
}
if (o1 instanceof DoubleRange && o2 instanceof DoubleRange)
{
return ((DoubleRange)o1).compareMaxThenMin((DoubleRange)o2);
}
if (o1 instanceof Color && o2 instanceof Color)
{
Color c1 = (Color)o1;
Color c2 = (Color)o2;
float h1 = Color.RGBtoHSB(c1.getRed(), c1.getGreen(), c1.getBlue(), null)[0];
float h2 = Color.RGBtoHSB(c2.getRed(), c2.getGreen(), c2.getBlue(), null)[0];
return Float.compare(h1, h2);
}
return 0;
}
}
}
|
word = input()
ans, forbidden = '', 'CAMBRIDGE'
for i in word:
if i not in forbidden:
ans += i
print(ans)
|
#!/bin/bash
mkdir -p data
mkdir -p data/logs
if [ -f ./config/currentView ] ; then
rm ./config/currentView
fi
/opt/gopath/src/github.com/hyperledger/hyperledger-bftsmart-orderering/startReplica.sh 3 > data/logs/replica-3.success 2>&1 &
|
const _shortner = (text) => {
if (text && text.length > 500) {
return text.substring(0, 500) + "..."
} else {
return text ? text : "No description"
}
}
module.exports = {
_shortner
}
|
<gh_stars>0
from django.contrib import admin
from .models import Vocabularies
admin.site.register(Vocabularies)
|
fn format_column_values(columns: Vec<&str>) -> String {
format!("VALUES ({})", columns.join(", "))
}
|
import React from 'react'
import { userSettingsAreaRoutes } from '../../../user/settings/routes'
import { UserSettingsAreaRoute } from '../../../user/settings/UserSettingsArea'
import { SHOW_BUSINESS_FEATURES } from '../../dotcom/productSubscriptions/features'
import { authExp } from '../../site-admin/SiteAdminAuthenticationProvidersPage'
const UserSettingsExternalAccountsPage = React.lazy(async () => ({
default: (await import('./UserSettingsExternalAccountsPage')).UserSettingsExternalAccountsPage,
}))
const UserSubscriptionsEditProductSubscriptionPage = React.lazy(async () => ({
default: (await import('../productSubscriptions/UserSubscriptionsEditProductSubscriptionPage'))
.UserSubscriptionsEditProductSubscriptionPage,
}))
const UserSubscriptionsNewProductSubscriptionPage = React.lazy(async () => ({
default: (await import('../productSubscriptions/UserSubscriptionsNewProductSubscriptionPage'))
.UserSubscriptionsNewProductSubscriptionPage,
}))
const UserSubscriptionsProductSubscriptionPage = React.lazy(async () => ({
default: (await import('../productSubscriptions/UserSubscriptionsProductSubscriptionPage'))
.UserSubscriptionsProductSubscriptionPage,
}))
const UserSubscriptionsProductSubscriptionsPage = React.lazy(async () => ({
default: (await import('../productSubscriptions/UserSubscriptionsProductSubscriptionsPage'))
.UserSubscriptionsProductSubscriptionsPage,
}))
export const enterpriseUserSettingsAreaRoutes: ReadonlyArray<UserSettingsAreaRoute> = [
...userSettingsAreaRoutes,
{
path: '/external-accounts',
exact: true,
// tslint:disable-next-line:jsx-no-lambda
render: props => <UserSettingsExternalAccountsPage {...props} />,
condition: () => authExp,
},
{
path: '/subscriptions/new',
exact: true,
render: props => <UserSubscriptionsNewProductSubscriptionPage {...props} />,
condition: () => SHOW_BUSINESS_FEATURES,
},
{
path: '/subscriptions/:subscriptionUUID',
exact: true,
render: props => <UserSubscriptionsProductSubscriptionPage {...props} />,
condition: () => SHOW_BUSINESS_FEATURES,
},
{
path: '/subscriptions/:subscriptionUUID/edit',
exact: true,
render: props => <UserSubscriptionsEditProductSubscriptionPage {...props} />,
condition: () => SHOW_BUSINESS_FEATURES,
},
{
path: '/subscriptions',
exact: true,
render: props => <UserSubscriptionsProductSubscriptionsPage {...props} />,
condition: () => SHOW_BUSINESS_FEATURES,
},
]
|
package com.hadas.krzysztof.session;
import com.hadas.krzysztof.utils.RestHelper;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.JsonNode;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.request.HttpRequest;
import com.mashape.unirest.request.HttpRequestWithBody;
public class Session {
private static final String TIDAL_SESSION_HEADER = "X-Tidal-SessionId";
private static final String COUNTRY_CODE = "countryCode";
public static final String API_URL = "https://api.tidalhifi.com/v1/";
private String countryCode;
private String sessionId;
private String userId;
public static Session login(String username, String password, RestHelper restHelper) {
HttpResponse<JsonNode> jsonResponse = restHelper.executeRequest(Unirest.post(API_URL + "login/username")
.header("X-Tidal-Token", "<PASSWORD>")
.field("username", username)
.field("password", password));
return restHelper.checkAndDeserialize(jsonResponse, Session.class);
}
public static Session login(String username, String password) {
RestHelper restHelper = new RestHelper();
return login(username, password, restHelper);
}
public HttpRequest get(String url) {
return Unirest.get(API_URL + url)
.header(TIDAL_SESSION_HEADER, sessionId)
.queryString(COUNTRY_CODE, countryCode);
}
public HttpRequest delete(String url) {
return Unirest.delete(API_URL + url)
.header(TIDAL_SESSION_HEADER, sessionId)
.queryString(COUNTRY_CODE, countryCode);
}
public HttpRequestWithBody post(String url) {
return Unirest.post(API_URL + url)
.header(TIDAL_SESSION_HEADER, sessionId)
.queryString(COUNTRY_CODE, countryCode);
}
public String getCountryCode() {
return countryCode;
}
public void setCountryCode(String countryCode) {
this.countryCode = countryCode;
}
public String getSessionId() {
return sessionId;
}
public void setSessionId(String sessionId) {
this.sessionId = sessionId;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
}
|
<filename>src/cal/diagnosis-response.ts<gh_stars>1-10
export interface DiagnosisResponse {
name: string;
version: string;
timestamp: string; // ISO 8601
checks: CheckResult[];
}
export interface CheckResult {
name: string;
desc: string;
result: boolean;
}
|
// Copyright (C) 2019 <NAME>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package vici
import (
"bytes"
"reflect"
"testing"
)
var (
goldNamedPacket = &packet{
ptype: pktCmdRequest,
name: "install",
msg: &Message{
keys: []string{"child", "ike"},
data: map[string]interface{}{
"child": "test-CHILD_SA",
"ike": "test-IKE_SA",
},
},
}
goldNamedPacketBytes = []byte{
// Packet type
0,
// Length of "install"
7,
// "install" in bytes
105, 110, 115, 116, 97, 108, 108,
// Encoded message bytes
3, 5, 99, 104, 105, 108, 100, 0, 13, 116, 101, 115, 116,
45, 67, 72, 73, 76, 68, 95, 83, 65, 3, 3, 105, 107, 101,
0, 11, 116, 101, 115, 116, 45, 73, 75, 69, 95, 83, 65,
}
goldUnnamedPacket = &packet{
ptype: pktCmdResponse,
msg: &Message{
keys: []string{"success", "errmsg"},
data: map[string]interface{}{
"success": "no",
"errmsg": "failed to install CHILD_SA",
},
},
}
goldUnnamedPacketBytes = []byte{
// Packet type
1,
// Encoded message bytes
3, 7, 115, 117, 99, 99, 101, 115, 115, 0, 2, 110, 111, 3, 6,
101, 114, 114, 109, 115, 103, 0, 26, 102, 97, 105, 108, 101,
100, 32, 116, 111, 32, 105, 110, 115, 116, 97, 108, 108, 32,
67, 72, 73, 76, 68, 95, 83, 65,
}
)
func TestPacketParse(t *testing.T) {
p := &packet{}
err := p.parse(goldNamedPacketBytes)
if err != nil {
t.Fatalf("Error parsing packet: %v", err)
}
if !reflect.DeepEqual(p, goldNamedPacket) {
t.Fatalf("Parsed named packet does not equal gold packet.\nExpected: %v\nReceived: %v", goldNamedPacket, p)
}
p = &packet{}
err = p.parse(goldUnnamedPacketBytes)
if err != nil {
t.Fatalf("Error parsing packet: %v", err)
}
if !reflect.DeepEqual(p, goldUnnamedPacket) {
t.Fatalf("Parsed unnamed packet does not equal gold packet.\nExpected: %v\nReceived: %v", goldUnnamedPacket, p)
}
}
func TestPacketBytes(t *testing.T) {
b, err := goldNamedPacket.bytes()
if err != nil {
t.Fatalf("Unexpected error getting packet bytes: %v", err)
}
if !bytes.Equal(b, goldNamedPacketBytes) {
t.Fatalf("Encoded packet does not equal gold bytes.\nExpected: %v\nReceived: %v", goldNamedPacketBytes, b)
}
b, err = goldUnnamedPacket.bytes()
if err != nil {
t.Fatalf("Unexpected error getting packet bytes: %v", err)
}
if !bytes.Equal(b, goldUnnamedPacketBytes) {
t.Fatalf("Encoded packet does not equal gold bytes.\nExpected: %v\nReceived: %v", goldUnnamedPacketBytes, b)
}
}
|
<gh_stars>0
var ADLmappingGraph = (function () {
var self = {}
self.initMappedProperties = function () {
self.mappedProperties = {mappings: {}, model: {}}
}
self.attrs = {
table: {shape: "ellipse", color: "grey"},
column: {shape: "box", color: "#9edae5"},
literal: {shape: "ellipse", color: "#c2f8f3"},
},
self.drawNode = function (columnName, color, position) {
var columnObj = ADLmappings.currentMappedColumns[columnName]
var existingNodes = visjsGraph.getExistingIdsMap()
var visjsData = {nodes: [], edges: []}
var typeStr = "";
if (Array.isArray(columnObj.types)) {
columnObj.types.forEach(function (item, index) {
if (index > 0)
typeStr += "\n"
typeStr += item.type_label
})
} else
typeStr = typeObj.data.label
if (!existingNodes[columnName]) {
var node = {
id: columnName,
label: columnName + "\n" + typeStr + "",
data: columnObj,
shape: ADLmappingGraph.attrs["column"].shape,
color: color,
}
if (position) {
node.x = position.x;
node.y = position.y;
node.fixed = {x: true, y: true};
}
visjsData.nodes.push(node);
}
if (!visjsGraph.data || !visjsGraph.data.nodes) {
var options = {
selectNodeFn: function (node, event) {
MainController.UI.hidePopup("graphPopupDiv")
if (node)
self.currentNode = node;
},
onRightClickFn: self.graphActions.showGraphPopupMenu,
keepNodePositionOnDrag: 1
}
visjsGraph.draw("ADLmappings_graph", visjsData, options)
} else {
visjsGraph.data.nodes.add(visjsData.nodes)
visjsGraph.data.edges.add(visjsData.edges)
}
setTimeout(function () {
visjsGraph.network.fit()
}, 300)
}
self.graphActions = {
showGraphPopupMenu: function (node, point, e) {
var top = $("#ADLmappings_graph").position().top
point.y += top
var html = "";
if (node.from) {//edge
self.currentEdge = node;
html = " <span class=\"popupMenuItem\" onclick=\"ADLmappingGraph.graphActions.deleteProperty();\"> delete Property</span>"
} else {
self.currentNode = node;
if (!node)
MainController.UI.hidePopup("graphPopupDiv")
html = " <span class=\"popupMenuItem\" onclick=\"ADLmappingGraph.graphActions.isPropertySubject();\"> is property subject</span>" +
"<span class=\"popupMenuItem\" onclick=\"ADLmappingGraph.graphActions.isPropertyObject();\"> is property object</span>"
}
$("#graphPopupDiv").html(html);
MainController.UI.showPopup(point, "graphPopupDiv")
},
deleteProperty: function () {
delete self.mappedProperties.mappings[self.currentEdge.id]
visjsGraph.data.edges.remove(self.currentEdge.id)
},
isPropertySubject: function () {
self.currentAssociation = {
subject: self.currentNode
}
},
isPropertyObject: function () {
if (!self.currentAssociation)
return;
self.currentAssociation.object = self.currentNode;
if (!self.currentAssociation.subject || !self.currentAssociation.object)
return alert("select subject and object Nodes")
var subjectLabel = "";
self.currentAssociation.subject.data.types.forEach(function (item, index) {
if (index > 0)
subjectLabel += " / "
subjectLabel += item.type_label
})
var objectLabel = "";
self.currentAssociation.object.data.types.forEach(function (item, index) {
if (index > 0)
objectLabel += " / "
objectLabel += item.type_label
})
/* var html = "MAPPING ASSOCIATION<br>" +
"<table>" +
"<tr><td class='td_title'>Subject</td><td class='td_value>" + self.currentAssociation.subject.data.columnId + "</td><td class='td_value'>" + subjectLabel + "</td></tr>" +
"<tr><td class='td_title'>Object</td><td class='td_value>" + self.currentAssociation.object.data.columnId + "</td><td class='td_value>" + objectLabel + "</td></tr>" +
"<tr><td class='td_title'>Property</td><td colspan='2' class='td_value></td><td class='td_value> <span id='ADLMapping_graphPropertySpan' style='font-weight:bold'>select a property...</span></td></tr>" +
"</table>" +
"</div>" +
"<button onclick='ADLmappingGraph.graphActions.setAssociation()'>OK</button>" +
"<button onclick='ADLmappingGraph.graphActions.cancelAssociation()'>Cancel</button>"
$("#ADLmappings_Tabs").tabs("option", "active", 3);
$("#mainDialogDiv").html(html);*/
$("#mainDialogDiv").load("snippets/ADL/ADLPropertyassocationDialog.html");
setTimeout(function () {
$("#ADLMapping_graphAssociationSubjectSpan").html(self.currentAssociation.subject.data.columnId + "->" + subjectLabel)
$("#ADLMapping_graphAssociationObjectSpan").html(self.currentAssociation.object.data.columnId + "->" + objectLabel)
ADLmappings.displayPropertiesTree("ADLmappingPropertiesTree")
var to2 = false;
$('#ADLmappings_propertiesSearchTree').keyup(function () {
if (to2) {
clearTimeout(to2);
}
to2 = setTimeout(function () {
var searchString = $("#ADLmappings_propertiesSearchTree").val();
var xx = $('#ADLmappingPropertiesTree').jstree(true)
$('#ADLmappingPropertiesTree').jstree(true).search(searchString);
}, 250);
});
})
$("#mainDialogDiv").dialog("open")
ADLmappingGraph.isAssigningProperty = true
setTimeout(function () {
})
},
cancelAssociation: function () {
$("#mainDialogDiv").dialog("close")
},
setAssociation: function (property, association) {
if (!property) {
property = $("#ADLmappingPropertiesTree").jstree(true).get_selected(true)
if (property && property.length > 0)
property = property[0]
}
if (!property)
return alert("select a property")
if (!association)
association = self.currentAssociation
var existingNodes = visjsGraph.getExistingIdsMap()
var visjsData = {nodes: [], edges: []}
var edgeId = association.subject.data.columnId + "_" + property.data.id + "_" + association.object.data.columnId
self.mappedProperties.mappings[edgeId] = {
subject: association.subject.data.columnId,
predicate: property.data.id,
object: association.object.data.columnId,
}
self.mappedProperties.model[property.data.id] = {parents: property.parents, label: property.data.label}
if (!existingNodes[edgeId]) {
visjsData.edges.push({
id: edgeId,
from: association.subject.data.columnId,
to: association.object.data.columnId,
label: property.data.label,
length: 300,
arrows: {
to: {
enabled: true,
type: "arrow",
scaleFactor: 0.5
},
},
})
if (!existingNodes[association.object.data.columnId]) {
visjsData.nodes.push({
id: association.object.data.columnId,
label: association.object.data.columnId,
data: association.object.data.columnId,
shape: ADLmappingGraph.attrs["literal"].shape,
color: ADLmappingGraph.attrs["literal"].color,
})
}
if (!visjsGraph.data || !visjsGraph.data.nodes) {
var options = {
onclickFn: function (node, event) {
return;
if (node)
self.currentNode = node;
},
onRightClickFn: self.graphActions.showGraphPopupMenu,
keepNodePositionOnDrag: 1,
}
visjsGraph.draw("ADLmappings_graph", visjsData, options)
} else {
visjsGraph.data.nodes.add(visjsData.nodes)
visjsGraph.data.edges.add(visjsData.edges)
}
visjsGraph.network.fit()
/* setTimeout(function(){
visjsGraph.network.moveTo({
position: {x:-500, y:-500}
});
},200)
visjsGraph.network.fit()*/
}
ADLmappingGraph.isAssigningProperty = false;
$("#mainDialogDiv").dialog("close")
}
,
removeNode: function (column) {
visjsGraph.data.nodes.remove(column)
}
}
return self;
})()
|
<reponame>nihei9/vartan
package spec
import (
"strings"
"testing"
verr "github.com/nihei9/vartan/error"
)
func TestLexer_Run(t *testing.T) {
idTok := func(text string) *token {
return newIDToken(text, newPosition(1, 0))
}
termPatTok := func(text string) *token {
return newTerminalPatternToken(text, newPosition(1, 0))
}
strTok := func(text string) *token {
return newStringLiteralToken(text, newPosition(1, 0))
}
symTok := func(kind tokenKind) *token {
return newSymbolToken(kind, newPosition(1, 0))
}
posTok := func(num int) *token {
return newPositionToken(num, newPosition(1, 0))
}
invalidTok := func(text string) *token {
return newInvalidToken(text, newPosition(1, 0))
}
tests := []struct {
caption string
src string
tokens []*token
err error
}{
{
caption: "the lexer can recognize all kinds of tokens",
src: `id"terminal"'string':|;#()$1...#%`,
tokens: []*token{
idTok("id"),
termPatTok("terminal"),
strTok(`string`),
symTok(tokenKindColon),
symTok(tokenKindOr),
symTok(tokenKindSemicolon),
symTok(tokenKindTreeNodeOpen),
symTok(tokenKindTreeNodeClose),
posTok(1),
symTok(tokenKindExpantion),
symTok(tokenKindDirectiveMarker),
symTok(tokenKindMetaDataMarker),
newEOFToken(),
},
},
{
caption: "the lexer can recognize keywords",
src: `fragment`,
tokens: []*token{
symTok(tokenKindKWFragment),
newEOFToken(),
},
},
{
caption: "the lexer can recognize character sequences and escape sequences in a terminal",
src: `"abc\"\\"`,
tokens: []*token{
termPatTok(`abc"\\`),
newEOFToken(),
},
},
{
caption: "the lexer can recognize character sequences and escape sequences in a string literal",
src: `'.*+?|()[\'\\'`,
tokens: []*token{
strTok(`.*+?|()['\`),
newEOFToken(),
},
},
{
caption: "a pattern must include at least one character",
src: `""`,
err: synErrEmptyPattern,
},
{
caption: "a string must include at least one character",
src: `''`,
err: synErrEmptyString,
},
{
caption: "the lexer can recognize newlines and combine consecutive newlines into one",
src: "\u000A | \u000D | \u000D\u000A | \u000A\u000A \u000D\u000D \u000D\u000A\u000D\u000A",
tokens: []*token{
symTok(tokenKindNewline),
symTok(tokenKindOr),
symTok(tokenKindNewline),
symTok(tokenKindOr),
symTok(tokenKindNewline),
symTok(tokenKindOr),
symTok(tokenKindNewline),
newEOFToken(),
},
},
{
caption: "the lexer ignores line comments",
src: `
// This is the first comment.
foo
// This is the second comment.
// This is the third comment.
bar // This is the fourth comment.
`,
tokens: []*token{
symTok(tokenKindNewline),
idTok("foo"),
symTok(tokenKindNewline),
idTok("bar"),
symTok(tokenKindNewline),
newEOFToken(),
},
},
{
caption: "identifiers beginning with an underscore are not allowed because they are used only auto-generated identifiers",
src: `_abc`,
err: synErrAutoGenID,
},
{
caption: "an unclosed terminal is not a valid token",
src: `"abc`,
err: synErrUnclosedTerminal,
},
{
caption: "an incompleted terminal in a pattern is not a valid token",
src: `"\`,
err: synErrIncompletedEscSeq,
},
{
caption: "an unclosed string is not a valid token",
src: `'abc`,
err: synErrUnclosedString,
},
{
caption: "an incompleted terminal in a string is not a valid token",
src: `'\`,
err: synErrIncompletedEscSeq,
},
{
caption: "a position must be greater than or equal to 1",
src: `$0`,
err: synErrZeroPos,
},
{
caption: "the lexer can recognize valid tokens following an invalid token",
src: `abc!!!def`,
tokens: []*token{
idTok("abc"),
invalidTok("!!!"),
idTok("def"),
newEOFToken(),
},
},
{
caption: "the lexer skips white spaces",
// \u0009: HT
// \u0020: SP
src: "a\u0009b\u0020c",
tokens: []*token{
idTok("a"),
idTok("b"),
idTok("c"),
newEOFToken(),
},
},
}
for _, tt := range tests {
t.Run(tt.caption, func(t *testing.T) {
l, err := newLexer(strings.NewReader(tt.src))
if err != nil {
t.Fatal(err)
}
n := 0
for {
var tok *token
tok, err = l.next()
if err != nil {
break
}
testToken(t, tok, tt.tokens[n])
n++
if tok.kind == tokenKindEOF {
break
}
}
if tt.err != nil {
synErr, ok := err.(*verr.SpecError)
if !ok {
t.Fatalf("unexpected error; want: %v, got: %v", tt.err, err)
}
if tt.err != synErr.Cause {
t.Fatalf("unexpected error; want: %v, got: %v", tt.err, synErr.Cause)
}
} else {
if err != nil {
t.Fatalf("unexpected error; want: %v, got: %v", tt.err, err)
}
}
})
}
}
func testToken(t *testing.T, tok, expected *token) {
t.Helper()
if tok.kind != expected.kind || tok.text != expected.text || tok.num != expected.num {
t.Fatalf("unexpected token; want: %+v, got: %+v", expected, tok)
}
}
|
<gh_stars>1-10
# File: D (Python 2.4)
from pirates.instance import DistributedInstanceWorld
class DistributedMiniGameWorld(DistributedInstanceWorld.DistributedInstanceWorld):
def __init__(self, cr):
DistributedInstanceWorld.DistributedInstanceWorld.__init__(self, cr)
self._turnOnWorldGrid = True
def generate(self):
DistributedInstanceWorld.DistributedInstanceWorld.generate(self)
def announceGenerate(self):
DistributedInstanceWorld.DistributedInstanceWorld.announceGenerate(self)
def turnOn(self, av = None):
DistributedInstanceWorld.DistributedInstanceWorld.turnOn(self, None)
if self.worldGrid and av and self._turnOnWorldGrid:
self.worldGrid.turnOn(av)
self._turnOnIslands()
def turnOff(self, cacheIslands = []):
DistributedInstanceWorld.DistributedInstanceWorld.turnOff(self, cacheIslands)
def addWorldInterest(self, area = None):
DistributedInstanceWorld.DistributedInstanceWorld.addWorldInterest(self, area)
if area:
area.turnOn(localAvatar)
def removeWorldInterest(self, area = None):
if not area and area.gridVisContext:
area = None
DistributedInstanceWorld.DistributedInstanceWorld.removeWorldInterest(self, area)
|
<filename>src/javascript/lib/nej/util/cache/cache.js
/*
* ------------------------------------------
* 缓存管理基类实现文件
* @version 1.0
* @author genify(<EMAIL>)
* ------------------------------------------
*/
/** @module util/cache/cache */
NEJ.define([
'base/global',
'base/klass',
'base/util',
'util/event',
'./storage.js'
],function(NEJ,_k,_u,_t,_j,_p,_o,_f,_r){
var _pro,
_ckey = 'dat-'+(+new Date);
/**
* 缓存对象基类
*
* 脚本举例
* ```javascript
* NEJ.define([
* 'base/klass',
* 'util/ajax/xdr',
* 'util/cache/cache'
* ],function(_k,_j,_t,_p){
* var _pro;
*
* _p._$$CacheCustom = _k._$klass();
* _pro = _p._$$CacheCustom._$extend(_t._$$CacheAbstract);
*
* // 取缓存数据,先从内存中取,没有从服务器上取
* _pro._$getDataInCache = function(_key){
* this.__setDataInCache(_key,_value);
* };
*
* // 取数据
* _pro._$getData = function(_key){
* var _data = this._$getDataInCache(_key);
* // 数据已在缓存中
* if (_data!=null){
* this._$dispatchEvent('ondataload',{
* key:_key
* });
* return;
* }
* // 从服务器端载入数据
* // rkey为请求唯一标识,可以是URL,也可以是某种算法的结果
* var _rkey = this.__doGenReqKey(_key),
* _callback = this._$dispatchEvent._$bind(
* this,'ondataload',{key:_key}
* );
* if (!this.__doQueueRequest(_rkey,_callback)){
* _j._$request({
* onload:function(_data){
* // 缓存数据
* this.__setDataInCache(_key,_data);
* // 触发队列中同请求的回调逻辑
* this.__doCallbackRequest(_rkey);
* }._$bind(this)
* });
* }
* };
*
* return _p;
* });
* ```
*
* 脚本举例
* ```javascript
* NEJ.define([
* '/path/to/custom/cache.js'
* ],function(_p){
* // 使用Cache
* var _cache = _p._$$CacheCustom._$allocate({
* ondataload:function(_event){
* // get data in cache
* var _data = this._$getDataInCache(_event.key);
* // TODO
* }
* });
* // 第一个请求
* _cache._$getData('a');
* // 第二个请求
* _cache._$getData('b');、
* // 不会发请求,直接走缓存
* _cache._$getData('a');
* });
* ```
*
* @class module:util/cache/cache._$$CacheAbstract
* @extends module:util/event._$$EventTarget
*
* @param {Object} config - 配置参数
*/
_p._$$CacheAbstract = _k._$klass();
_pro = _p._$$CacheAbstract._$extend(_t._$$EventTarget);
/**
* 初始化函数
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__init
* @return {Void}
*/
_pro.__init = function(){
this.__super();
this.__cache = this.constructor[_ckey];
if (!this.__cache){
this.__cache = {};
// request loading information
this.__cache[_ckey+'-l'] = {};
this.constructor[_ckey] = this.__cache;
}
};
/**
* 控件销毁
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__destroy
* @return {Void}
*/
_pro.__destroy = function(){
this.__doClearReqFromQueue();
this.__super();
};
/**
* 从缓存中取数据
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__getDataInCache
* @param {String} arg0 - 缓存键值
* @return {Variable} 缓存数据
*/
_pro.__getDataInCache = function(_key){
return this.__cache[_key];
};
/**
* 数据存入缓存
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__setDataInCache
* @param {String} arg0 - 缓存键值
* @param {Variable} arg1 - 缓存数据
* @return {Void}
*/
_pro.__setDataInCache = function(_key,_value){
this.__cache[_key] = _value;
};
/**
* 带默认值取本地数据
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__getDataInCacheWithDefault
* @param {String} arg0 - 键值
* @param {Variable} arg1 - 默认值
* @return {Void}
*/
_pro.__getDataInCacheWithDefault = function(_key,_default){
var _data = this.__getDataInCache(_key);
if (_data==null){
_data = _default;
this.__setDataInCache(_key,_data);
}
return _data;
};
/**
* 删除缓存数据,不传键值则清除所有缓存
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__delDataInCache
* @param {String} arg0 - 缓存键值
* @return {Void}
*/
_pro.__delDataInCache = function(_key){
if (_key!=null){
delete this.__cache[_key];
return;
}
_u._$loop(
this.__cache,function(_item,_key){
if (_key!=(_ckey+'-l')){
this.__delDataInCache(_key);
}
},this
);
};
/**
* 从本地存储中删除数据
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__delDataInStorage
* @param {String} arg0 - 存储键值
* @return {String} 存储数据
*/
_pro.__delDataInStorage = function(_key){
return _j._$delDataInStorage(_key);
};
/**
* 从本地存储中取数据
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__getDataInStorage
* @param {String} arg0 - 存储键值
* @return {String} 存储数据
*/
_pro.__getDataInStorage = function(_key){
return _j._$getDataInStorage(_key);
};
/**
* 数据存入本地缓存
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__setDataInStorage
* @param {String} arg0 - 存储键值
* @param {Variable} arg1 - 存储数据
* @return {Void}
*/
_pro.__setDataInStorage = function(_key,_value){
_j._$setDataInStorage(_key,_value);
};
/**
* 带默认值取本地数据
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__getDataLocalWithDefault
* @param {String} arg0 - 键值
* @param {Variable} arg1 - 默认值
* @return {Variable} 数据
*/
_pro.__getDataLocalWithDefault = function(_key,_default){
var _data = this.__getDataLocal(_key);
if (_data==null){
_data = _default;
this.__setDataLocal(_key,_data);
}
return _data;
};
/**
* 取本地数据,检测内存和本地存储
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__getDataLocal
* @param {String} arg0 - 键值
* @return {Variable} 数据
*/
_pro.__getDataLocal = function(_key){
// get from memory
var _data = this.__getDataInCache(_key);
if (_data!=null){
return _data;
}
// get from storage
_data = this.__getDataInStorage(_key);
if (_data!=null){
this.__setDataInCache(_key,_data);
}
return _data;
};
/**
* 存本地数据
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__setDataLocal
* @param {String} arg0 - 键值
* @param {Variable} arg1 - 数据
* @return {Void}
*/
_pro.__setDataLocal = function(_key,_value){
this.__setDataInStorage(_key,_value);
this.__setDataInCache(_key,_value);
};
/**
* 清除本地缓存,不传键值则清除所有缓存
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__delDataLocal
* @param {String} arg0 - 缓存键值
* @return {Void}
*/
_pro.__delDataLocal = function(_key){
if (_key!=null){
delete this.__cache[_key];
_j._$delDataInStorage(_key);
return;
}
_u._$loop(
this.__cache,function(_item,_key){
if (_key!=(_ckey+'-l')){
this.__delDataLocal(_key);
}
},this
);
};
/**
* 清除缓存数据
*
* 脚本举例
* ```javascript
* var _cache = new c._$$CacheAbstract();
* j._$clearDataLocal('name','jack');
* // 清空所有hash值
* j._$clearDataInStorage();
* ```
*
* @method module:util/cache/cache._$$CacheAbstract#_$clearDataLocal
* @return {Void}
*/
_pro._$clearDataLocal = function(){
this.__delDataLocal();
};
/**
* 请求回调
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__doCallbackRequest
* @param {String} arg0 - 请求标识
* @return {Void}
*/
_pro.__doCallbackRequest = function(_key){
var _data = this.__cache[_ckey+'-l'],
_args = _r.slice.call(arguments,1);
_u._$forEach(
_data[_key],function(_callback){
try{
_callback.apply(this,_args);
}catch(ex){
// ignore
if (DEBUG) throw ex;
console.error(ex.message);
console.error(ex.stack);
}
}
);
delete _data[_key];
};
/**
* 锁定请求,同样的请求只发送一次
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__doQueueRequest
* @param {String} arg0 - 请求标识
* @param {Function} arg1 - 请求回调
* @return {Boolean} 是否已存在相同请求
*/
_pro.__doQueueRequest = function(_key,_callback){
// cache to clear list
if (!this.__qtmp){
this.__qtmp = [];
}
this.__qtmp.push({
key:_key,
callback:_callback
});
// check request queue list
_callback = _callback||_f;
var _list = this.__cache[_ckey+'-l'][_key];
if (!_list){
_list = [_callback];
this.__cache[_ckey+'-l'][_key] = _list;
return !1;
}
_list.push(_callback);
return !0;
};
/**
* 从请求队列中移除回调
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__doClearReqFromQueue
* @return {Void}
*/
_pro.__doClearReqFromQueue = function(){
_u._$forEach(this.__qtmp,function(it){
var _xlist = this.__cache[_ckey+'-l'][it.key];
_u._$reverseEach(
_xlist,function(item,index,list){
if (item===it.callback){
list.splice(index);
}
}
);
if (!_xlist||!_xlist.length){
this.__doClearReqQueue(it.key);
}
},this);
delete this.__qtmp;
};
/**
* 清除锁定请求
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__doClearReqQueue
* @param {String} arg0 - 请求标识
* @return {Void}
*/
_pro.__doClearReqQueue = function(_key){
delete this.__cache[_ckey+'-l'][_key];
};
/**
* 检测列表中是否已存在指定片段数据
*
* @protected
* @method module:util/cache/cache._$$CacheAbstract#__hasFragment
* @param {Array} arg0 - 列表
* @param {Number} arg1 - 偏移量
* @param {Number} arg2 - 数量,0表示全列表,默认为0
* @return {Boolean} 是否已经存在
*/
_pro.__hasFragment = function(_list,_offset,_limit){
if (!_list) return !1;
_offset = parseInt(_offset)||0;
_limit = parseInt(_limit)||0;
if (!_limit){
if (!_list.loaded){
return !1;
}
_limit = _list.length;
}
// length is list total number
if (!!_list.loaded){
_limit = Math.min(_limit,_list.length-_offset);
}
for(var i=0;i<_limit;i++){
if (_list[_offset+i]===undefined){
return !1;
}
}
return !0;
};
/**
* 判断列表指定片段是否填充满的
*
* @method module:util/cache/cache._$$CacheAbstract#_$isFragmentFilled
* @param {Array} arg0 - 列表标识
* @param {Number} arg1 - 偏移量
* @param {Number} arg2 - 数量,0表示全列表,默认为0
* @return {Boolean} 是否已经存在
*/
_pro._$isFragmentFilled = function(_key,_offset,_limit){
return this.__hasFragment(
this._$getListInCache(_key),
_offset,_limit
);
};
if (CMPT){
NEJ.P('nej.ut')._$$Cache = _p._$$CacheAbstract;
}
return _p;
});
|
function extractVersion(codeSnippet) {
const versionRegex = /const version = '(\d+\.\d+\.\d+)'/;
const match = codeSnippet.match(versionRegex);
if (match) {
return match[1];
} else {
return "Version number not found";
}
}
// Test the function with the provided example
const codeSnippet = "dgmartin/design-tokens\n/* istanbul ignore file */\nconst version = '6.3.1'\nexport default version";
console.log(extractVersion(codeSnippet)); // Output: "6.3.1"
|
/*
* rest_client_not_implemented.h
*
* Created on: 22 Oct 2015
* Author: dhsmith
*/
#ifndef REST_CLIENT_NOT_IMPLEMENTED_H_
#define REST_CLIENT_NOT_IMPLEMENTED_H_
#include <exception>
#include <string>
#include "cli_exception.h"
namespace fts3
{
namespace cli
{
/**
* A Exception class used when the required functionality has not been implemented
* in this (c++) rest client.
*/
class rest_client_not_implemented : public cli_exception
{
public:
/**
* Constructors
*/
rest_client_not_implemented() : cli_exception("Not implemented in this REST client") {}
rest_client_not_implemented(std::string const & msg) : cli_exception("Not implemented in this REST client: " + msg) {}
virtual bool tryFallback() const
{
return true;
}
};
}
}
#endif /* REST_CLIENT_NOT_IMPLEMENTED_H_ */
|
<gh_stars>0
require "simple_discovery/version"
require "simple_discovery/announcer"
require "simple_discovery/browser"
module Discovery
PORT = 2512
end
|
#!/bin/bash
# Pass in name and status
function die { echo $1: status $2 ; exit $2; }
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 0 ) || die 'Failure running cmsRun transition_test_cfg.py 0' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 1 ) || die 'Failure running cmsRun transition_test_cfg.py 1' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 2 ) || die 'Failure running cmsRun transition_test_cfg.py 2' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 3 ) || die 'Failure running cmsRun transition_test_cfg.py 3' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 4 ) || die 'Failure running cmsRun transition_test_cfg.py 4' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 5 ) || die 'Failure running cmsRun transition_test_cfg.py 5' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 6 ) || die 'Failure running cmsRun transition_test_cfg.py 6' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 7 ) || die 'Failure running cmsRun transition_test_cfg.py 7' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 8 ) || die 'Failure running cmsRun transition_test_cfg.py 8' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 9 ) || die 'Failure running cmsRun transition_test_cfg.py 9' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 10 ) || die 'Failure running cmsRun transition_test_cfg.py 10' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 11 ) || die 'Failure running cmsRun transition_test_cfg.py 11' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 12 ) || die 'Failure running cmsRun transition_test_cfg.py 12' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 13 ) || die 'Failure running cmsRun transition_test_cfg.py 13' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 14 ) || die 'Failure running cmsRun transition_test_cfg.py 14' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 15 ) || die 'Failure running cmsRun transition_test_cfg.py 15' $?
(cmsRun ${LOCAL_TEST_DIR}/transition_test_cfg.py 16 ) || die 'Failure running cmsRun transition_test_cfg.py 16' $?
|
import React from 'react';
import ReactModal from 'react-modal';
import { FaCalendarAlt, FaMapMarker, FaAngleRight, FaTimes } from 'react-icons/fa';
import Img from "gatsby-image"
import './experienceItem.scss';
export default class extends React.Component {
constructor(props) {
super(props);
this.state = {
showModal: false
}
this.handleOpenModal = this.handleOpenModal.bind(this);
this.handleCloseModal = this.handleCloseModal.bind(this);
}
componentDidMount() {
ReactModal.setAppElement('#main');
}
handleOpenModal () {
this.setState({ showModal: true });
}
handleCloseModal () {
this.setState({ showModal: false });
}
render() {
const { model } = this.props;
// debugger;
return (
<div className='item elevatable'>
<span onClick={this.handleOpenModal}>
<Img
fluid={model.optImg.childImageSharp.fluid}
alt={model.alt}
className='itmImage'
/>
</span>
<ReactModal
isOpen={this.state.showModal}
contentLabel="onRequestClose Example"
onRequestClose={this.handleCloseModal}
className="Modal"
overlayClassName="Overlay"
>
{/* <img src={model.image} alt={model.alt} /> */}
<Img
fluid={model.optImg.childImageSharp.fluid}
alt={model.alt}
className='itmImage'
/>
<FaTimes className='closeBtn' onClick={this.handleCloseModal}/>
{this.title(model)}
<div className='detail'><FaCalendarAlt/>{model.period}</div>
{model.location ? <div className='detail'><FaMapMarker/>{model.location}</div> : null}
<div className='bullets'>
{model.bullets.map((el, ix) => <div className='detail' key={ix}><FaAngleRight/><span dangerouslySetInnerHTML={{__html: el}}></span></div>)}
</div>
</ReactModal>
</div>
);
}
title(model) {
if (model.title) {
return <h3>{model.title} <br/>@<br/> {model.vendor}</h3>
} else {
return <h3>{model.vendor}</h3>
}
}
}
|
#ifndef parent_window_h
#define parent_window_h
#include <QWidget>
#include "checksum_data.h"
#include "checksum_window.h"
class parent_window : public QWidget
{
Q_OBJECT
public:
parent_window();
private:
checksum_window *window;
checksum_data data;
};
#endif
|
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2014 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.exceptions;
import static com.google.common.base.Preconditions.checkNotNull;
import org.sosy_lab.cpachecker.core.interfaces.ConfigurableProgramAnalysis;
/**
* Exception for cases when a class implements an interface like
* {@link ConfigurableProgramAnalysis}, but fails to conform to some additional
* semantic condition.
*/
public class InvalidComponentException extends CPAException {
private static final long serialVersionUID = 3018467878727210858L;
public InvalidComponentException(Class<?> cls, String componentType, String msg) {
super(cls.getCanonicalName() + " is not a valid " + checkNotNull(componentType) + ": " + checkNotNull(msg));
}
public InvalidComponentException(Class<?> cpa, String componentType, Throwable cause) {
super(cpa.getCanonicalName() + " is not a valid " + checkNotNull(componentType) + ": "
+ (cause.getMessage() != null ? cause.getMessage() : cause.getClass().getSimpleName()), cause);
}
}
|
import { setupApp, teardownApp } from './firestore.setup';
import './helpers';
describe('firestore', () => {
let db;
afterEach(async () => {
await teardownApp();
});
describe('default rules', () => {
let ref;
beforeEach(async () => {
db = await setupApp();
ref = db.collection('some-nonexistent-collection');
});
it('fail when reading/writing an unauthorized collection', () => {
expect(ref.get()).toDeny();
expect(ref.add({})).toDeny();
});
});
describe('feedback rules', () => {
let colRef;
let docRef;
const mockFeedback = {
contentRating: 5,
styleRating: 5,
comment: '',
};
const mockData = {
'sessions/1': {
title: 'Awesome Stuff',
},
'sessions/1/feedback/1': mockFeedback,
'sessions/1/feedback/2': mockFeedback,
};
describe('when not authenticated', () => {
beforeEach(async () => {
db = await setupApp(undefined, mockData);
colRef = db.collection('sessions/1/feedback');
docRef = colRef.doc('1');
});
it('fail when reading/writing an unauthorized collection', () => {
expect(colRef.get()).toDeny();
expect(colRef.add({})).toDeny();
expect(docRef.get()).toDeny();
expect(docRef.update({})).toDeny();
expect(docRef.delete()).toDeny();
});
});
describe('when authenticated', () => {
let ownDocRef;
beforeEach(async () => {
db = await setupApp({ uid: '2' }, mockData);
colRef = db.collection('sessions/1/feedback');
docRef = colRef.doc('1');
ownDocRef = colRef.doc('2');
});
it('fail on other documents', () => {
expect(colRef.get()).toDeny();
expect(colRef.add(mockFeedback)).toDeny();
expect(docRef.get()).toDeny();
expect(docRef.update({})).toDeny();
expect(docRef.delete()).toDeny();
});
it('can interact with own documents', () => {
expect(ownDocRef.get()).toAllow();
expect(ownDocRef.update({})).toAllow();
expect(ownDocRef.delete()).toAllow();
});
it('data validation', () => {
expect(ownDocRef.update({})).toAllow();
[
{ contentRating: -1 },
{ contentRating: 11 },
{ styleRating: -1 },
{ styleRating: 11 },
{ comment: null },
{ comment: 'c'.repeat(257) },
].forEach((data) => {
expect(ownDocRef.update(data)).toDeny();
});
});
});
});
});
//# sourceMappingURL=firestore.rules.test.js.map
|
<reponame>smagill/opensphere-desktop<filename>open-sphere-base/core/src/main/java/io/opensphere/core/model/BoundingBox.java
package io.opensphere.core.model;
import io.opensphere.core.math.Vector3d;
/**
* Interface for an object that models a rectangle in 2D or a box in 3D.
*
* @param <T> position type of the bounding box corners.
*/
public interface BoundingBox<T extends Position> extends Quadrilateral<T>
{
/**
* Returns true if this bounding box contains another bounding box.
*
* @param otherBox the box to check for containment in this bounding box
* @return true if contains, false if not.
*/
boolean contains(BoundingBox<T> otherBox);
/**
* Get the depth of this bounding box. Units are implementation-dependent.
*
* @return The height.
*/
double getDepth();
/**
* Get the height of this bounding box. Units are implementation-dependent.
*
* @return The height.
*/
double getHeight();
/**
* Get the lower-left corner of the box.
*
* @return The lower-left corner.
*/
T getLowerLeft();
/**
* Get the lower-right corner of the box.
*
* @return The lower-right corner.
*/
T getLowerRight();
/**
* Determine the offset of this box inside another box. (0,0,0) is the
* lower-left-back corner of the outer box and (1,1,1) is the
* upper-right-front corner.
*
* @param outerBox The outer box.
* @return The coordinates relative to the outer box.
*/
Vector3d getOffset(BoundingBox<T> outerBox);
/**
* Get the percentage this is from the lowest corner to the highest corner.
*
* @param position Position within the box.
* @return a vector containing the x and y percentage offset into the box.
*/
Vector3d getOffsetPercent(Position position);
/**
* Get the upper-left corner of the box.
*
* @return The upper-left corner.
*/
T getUpperLeft();
/**
* Get the upper-right corner of the box.
*
* @return The upper-right corner.
*/
T getUpperRight();
/**
* Get the width of this bounding box. Units are implementation-dependent.
*
* @return The width.
*/
double getWidth();
/**
* Returns a {@link BoundingBox} that represents the envelope (bounding
* rectangle) of the intersection of two bounding boxes. If there is no
* intersection it will return null.
*
* If one of the two bounding boxes is null it will return the envelope of
* the non-null box. If both boxes are null it will return null.
*
* @param otherBox the {@link BoundingBox} to intersect with with
* @return the envelope (rectangle) of the intersection of the two bounding
* boxes, or null if no intersection.
* @throws UnsupportedOperationException if not valid for this type.
*/
BoundingBox<T> intersection(BoundingBox<T> otherBox);
/**
* Returns true if this bounding box intersects another bounding box.
* <p>
* Intersection is defined like
* {@link com.vividsolutions.jts.geom.Geometry#intersects(com.vividsolutions.jts.geom.Geometry)}
* , and as such, includes the case where two geometries touch but do not
* overlap.
*
* @param otherBox to check for intersection.
* @return true if it intersects, false if not.
* @throws UnsupportedOperationException if not valid for this type.
*/
boolean intersects(BoundingBox<T> otherBox);
/**
* Provide a simple string with minimal data.
*
* @return Simple string.
*/
String toSimpleString();
/**
* Returns the envelope (bounding rectangle) for the union of two bounding
* boxes. If both boxes are null, returns null. If one of the bounding boxes
* are null returns the envelope of the non-null box.
*
* @param otherBox the {@link BoundingBox} to union with with
* @return the {@link BoundingBox} the resultant envelope (bounding
* rectangle) of the union or null.
* @throws UnsupportedOperationException if not valid for this type.
*/
BoundingBox<T> union(BoundingBox<T> otherBox);
}
|
<gh_stars>1-10
/// <reference types="react" />
/**
* Appends the ownerState object to the props, merging with the existing one if necessary.
*
* @param elementType Type of the element that owns the `existingProps`. If the element is a DOM node, `ownerState` are not applied.
* @param existingProps Props of the element.
* @param ownerState
*/
export default function appendOwnerState(elementType: React.ElementType, existingProps: Record<string, any>, ownerState: object): Record<string, any>;
|
import requests
from bs4 import BeautifulSoup
# Get the HTML from the webpage
page = requests.get('https://example.com')
soup = BeautifulSoup(page.text, 'html.parser')
# Isolate the product information
product_list = soup.find_all('div', class_='product')
# Extract the product information
products = []
for product in product_list:
name = product.find('div', class_='name').text.strip()
price = product.find('div', class_='price').text.strip()
description = product.find('div', class_='description').text.strip()
products.append({
'name': name,
'price': price,
'description': description
})
# Print the product list
print(products)
|
const mongoose = require('mongoose');
const RecordSchema = new mongoose.Schema({
title: {
type: String,
required: true
},
description: {
type: String,
required: true
}
});
const Record = mongoose.model('Record', RecordSchema);
module.exports = Record;
// create record
exports.createRecord = (req, res) => {
const record = new Record({
title: req.body.title,
description: req.body.description
});
record
.save()
.then(data => {
res.send(data);
})
.catch(err => {
res.status(500).send({
message: err.message || 'Some error occurred while creating the Record.'
});
});
};
// get all records
exports.getAllRecords = (req, res) => {
Record.find()
.then(records => {
res.send(records);
})
.catch(err => {
res.status(500).send({
message: err.message || 'Some error occurred while retrieving records.'
});
});
};
// update record
exports.updateRecord = (req, res) => {
Record.findByIdAndUpdate(
req.params.recordId,
{
title: req.body.title,
description: req.body.description
},
{ new: true }
)
.then(record => {
if (!record) {
return res.status(404).send({
message: 'Record not found with id ' + req.params.recordId
});
}
res.send(record);
})
.catch(err => {
if (err.kind === 'ObjectId') {
return res.status(404).send({
message: 'Record not found with id ' + req.params.recordId
});
}
return res.status(500).send({
message:
'Error updating record with id ' +
req.params.recordId
});
});
};
// delete record
exports.deleteRecord = (req, res) => {
Record.findByIdAndRemove(req.params.recordId)
.then(record => {
if (!record) {
return res.status(404).send({
message: 'Record not found with id ' + req.params.recordId
});
}
res.send({ message: 'Record deleted successfully!' });
})
.catch(err => {
if (err.kind === 'ObjectId' || err.name === 'NotFound') {
return res.status(404).send({
message: 'Record not found with id ' + req.params.recordId
});
}
return res.status(500).send({
message: 'Could not delete record with id ' + req.params.recordId
});
});
};
|
#!/bin/bash
XORSLP_EC=../target/release/xorslp_ec
set -eu
cargo build --release --features 2048block
for i in 4 3 2; do
echo "< RS(8, $i) >"
$XORSLP_EC --data-block 8 --parity-block $i --enc-dec
echo "</ RS(8, $i) >"
echo ""
echo "< RS(9, $i) >"
$XORSLP_EC --data-block 9 --parity-block $i --enc-dec
echo "</ RS(9, $i) >"
echo ""
echo "< RS(10, $i) >"
$XORSLP_EC --data-block 10 --parity-block $i --enc-dec
echo "</ RS(10, $i) >"
echo ""
done
|
<reponame>jb1361/memelon
from imageai.Classification.Custom import ClassificationModelTrainer
import tensorflow as tf
import os
# This will disable the gpu
# os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
print("Num GPUs Available: ", len(tf.config.experimental.list_physical_devices('GPU')))
model_trainer = ClassificationModelTrainer()
model_trainer.setModelTypeAsDenseNet121()
model_trainer.setDataDirectory("E:\memelon\doge-training\doges")
model_trainer.trainModel(num_objects=3, num_experiments=10, enhance_data=False, batch_size=16, show_network_summary=True)
|
#!/bin/sh
docker cp ./dist/. tms-finder-ue:/usr/share/nginx/html
|
<reponame>appigram/windmill-react-ui
import React from 'react';
export interface HelperTextProps extends React.HTMLAttributes<HTMLSpanElement> {
/**
* Defines the color of the helper text (the same as with Input, Select, etc.)
*/
valid?: boolean;
}
declare const HelperText: React.ForwardRefExoticComponent<HelperTextProps & React.RefAttributes<HTMLSpanElement>>;
export default HelperText;
//# sourceMappingURL=HelperText.d.ts.map
|
function finalPosition($commands) {
$x = 0; // Initial x-coordinate
$y = 0; // Initial y-coordinate
// Iterate through each command and update the position accordingly
for ($i = 0; $i < strlen($commands); $i++) {
$command = $commands[$i];
if ($command === 'U') {
$y++; // Move up
} elseif ($command === 'D') {
$y--; // Move down
} elseif ($command === 'L') {
$x--; // Move left
} elseif ($command === 'R') {
$x++; // Move right
}
}
return "$x,$y"; // Return the final position as a string in the format "x,y"
}
// Test the function with an example input
echo finalPosition("UURRDDLL"); // Output: "0,0"
|
require "spec_helper"
describe "GitAppTest" do
it "can deploy git app to the main branch" do
Hatchet::GitApp.new("lock_fail_main", allow_failure: true).deploy do |app|
expect(app.output).to match("INTENTIONAL ERROR")
end
end
it "returns the correct branch name on circle CI" do
skip("only runs on circle") unless ENV["CIRCLE_BRANCH"]
expect(Hatchet.git_branch).to eq(ENV["CIRCLE_BRANCH"])
end
end
|
import time
from win10toast import ToastNotifier
def set_reminder():
rem = str(input("Enter your reminder message: ")) # Prompt user for reminder message
print("In how many minutes?")
local_time = float(input()) # Prompt user for time in minutes
local_time *= 60 # Convert time to seconds
print('I will remind you for ' + rem + ' in ' + str(local_time) + ' seconds ') # Display reminder message and time in seconds
time.sleep(local_time) # Pause program for specified time
toaster = ToastNotifier()
toaster.show_toast(rem, "Here is your reminder!", duration=10) # Display reminder notification after specified time
set_reminder() # Call the function to set a reminder
|
<form>
<label for="name">Name:</label>
<input type="text" id="name">
<label for="email">Email:</label>
<input type="text" id="email">
<input type="submit" value="Submit">
</form>
|
package vcs.citydb.wfs.kvp;
import net.opengis.fes._2.AbstractQueryExpressionType;
import net.opengis.fes._2.FilterType;
import net.opengis.fes._2.SortByType;
import net.opengis.wfs._2.ParameterType;
import net.opengis.wfs._2.PropertyName;
import net.opengis.wfs._2.QueryType;
import net.opengis.wfs._2.StoredQueryType;
import org.citygml4j.builder.jaxb.CityGMLBuilder;
import org.citygml4j.model.module.citygml.CityGMLModule;
import org.citygml4j.model.module.citygml.CityGMLModuleType;
import vcs.citydb.wfs.config.WFSConfig;
import vcs.citydb.wfs.exception.KVPParseException;
import vcs.citydb.wfs.exception.WFSException;
import vcs.citydb.wfs.exception.WFSExceptionCode;
import vcs.citydb.wfs.kvp.parser.*;
import vcs.citydb.wfs.util.xml.NamespaceFilter;
import javax.xml.bind.JAXBElement;
import javax.xml.validation.Schema;
import java.util.*;
public class QueryExpressionReader {
private final net.opengis.wfs._2.ObjectFactory wfsFactory;
private final net.opengis.fes._2.ObjectFactory fesFactory;
private final net.opengis.gml.ObjectFactory gmlFactory;
private final Schema wfsSchema;
private final CityGMLBuilder cityGMLBuilder;
private final WFSConfig wfsConfig;
public QueryExpressionReader(net.opengis.wfs._2.ObjectFactory wfsFactory,
net.opengis.fes._2.ObjectFactory fesFactory,
net.opengis.gml.ObjectFactory gmlFactory,
Schema wfsSchema,
CityGMLBuilder cityGMLBuilder,
WFSConfig wfsConfig) {
this.wfsFactory = wfsFactory;
this.fesFactory = fesFactory;
this.gmlFactory = gmlFactory;
this.wfsSchema = wfsSchema;
this.cityGMLBuilder = cityGMLBuilder;
this.wfsConfig = wfsConfig;
}
public List<JAXBElement<? extends AbstractQueryExpressionType>> read(Map<String, String> parameters, String operationName, NamespaceFilter namespaceFilter, boolean allowMultipleQueries) throws WFSException {
List<JAXBElement<? extends AbstractQueryExpressionType>> queries = new ArrayList<>();
try {
// ensure mutual exclusivity
checkMutualExclusivity(parameters, operationName, KVPConstants.FILTER, KVPConstants.RESOURCE_ID, KVPConstants.BBOX);
List<List<String>> typeNames = null;
List<FilterType> resourceIds = null;
if (parameters.containsKey(KVPConstants.TYPE_NAMES))
typeNames = new ValueListParser<>(new StringParser()).parse(KVPConstants.TYPE_NAMES, parameters.get(KVPConstants.TYPE_NAMES));
if (parameters.containsKey(KVPConstants.RESOURCE_ID))
resourceIds = new ResourceIdParser(fesFactory).parse(KVPConstants.RESOURCE_ID, parameters.get(KVPConstants.RESOURCE_ID));
typeNames = checkTypeNames(typeNames, resourceIds);
if (typeNames != null) {
if (!allowMultipleQueries && typeNames.size() > 1)
throw new WFSException(WFSExceptionCode.OPERATION_PARSING_FAILED, "The request may only take a single ad hoc query expression or stored query expression.");
// ad-hoc query
checkAlignedListSize(typeNames.size(), parameters, operationName,
KVPConstants.RESOURCE_ID, KVPConstants.ALIASES, KVPConstants.SRS_NAME, KVPConstants.PROPERTY_NAME, KVPConstants.FILTER, KVPConstants.FILTER_LANGUAGE, KVPConstants.SORT_BY);
List<List<String>> aliases = null;
List<String> srsNames = null;
List<List<PropertyName>> propertyNames = null;
List<FilterType> filters = null;
List<String> filterLanguages;
List<SortByType> sortBys = null;
if (parameters.containsKey(KVPConstants.STOREDQUERY_ID))
throw new WFSException(WFSExceptionCode.OPERATION_PARSING_FAILED, "The request may either contain ad hoc query expression(s) or a stored query expression but not both.");
if (parameters.containsKey(KVPConstants.ALIASES))
aliases = new ValueListParser<>(new StringParser()).parse(KVPConstants.ALIASES, parameters.get(KVPConstants.ALIASES));
if (parameters.containsKey(KVPConstants.SRS_NAME))
srsNames = new SingleValueListParser<>(new StringParser()).parse(KVPConstants.SRS_NAME, parameters.get(KVPConstants.SRS_NAME));
if (parameters.containsKey(KVPConstants.PROPERTY_NAME))
propertyNames = new ValueListParser<>(new PropertyNameParser(namespaceFilter)).parse(KVPConstants.PROPERTY_NAME, parameters.get(KVPConstants.PROPERTY_NAME));
if (parameters.containsKey(KVPConstants.FILTER_LANGUAGE)) {
filterLanguages = new SingleValueListParser<>(new StringParser()).parse(KVPConstants.FILTER_LANGUAGE, parameters.get(KVPConstants.FILTER_LANGUAGE));
for (String filterLanguage : filterLanguages) {
if (!KVPConstants.DEFAULT_FILTER_LANGUAGE.equals(filterLanguage))
throw new WFSException(WFSExceptionCode.INVALID_PARAMETER_VALUE, "Only the language '" + KVPConstants.DEFAULT_FILTER_LANGUAGE + "' is supported for filter expressions.", KVPConstants.FILTER_LANGUAGE);
}
}
if (parameters.containsKey(KVPConstants.FILTER))
filters = new SingleValueListParser<>(new FilterParser(namespaceFilter, wfsSchema, cityGMLBuilder, wfsConfig)).parse(KVPConstants.FILTER, parameters.get(KVPConstants.FILTER));
if (parameters.containsKey(KVPConstants.SORT_BY))
sortBys = new SortByParser().parse(KVPConstants.SORT_BY, parameters.get(KVPConstants.SORT_BY));
FilterType bbox = null;
if (parameters.containsKey(KVPConstants.BBOX))
bbox = new BBoxParser(fesFactory, gmlFactory).parse(KVPConstants.BBOX, parameters.get(KVPConstants.BBOX));
for (int i = 0; i < typeNames.size(); i++) {
QueryType query = new QueryType();
query.getTypeNames().addAll(typeNames.get(i));
if (aliases != null)
query.getAliases().addAll(aliases.get(i));
if (srsNames != null)
query.setSrsName(srsNames.get(i));
// projection
if (propertyNames != null) {
List<JAXBElement<?>> jaxbElements = new ArrayList<>();
for (PropertyName propertyName : propertyNames.get(i))
jaxbElements.add(wfsFactory.createPropertyName(propertyName));
query.setAbstractProjectionClause(jaxbElements);
}
// selection
if (resourceIds != null)
query.setAbstractSelectionClause(fesFactory.createAbstractSelectionClause(resourceIds.get(i)));
else if (bbox != null)
query.setAbstractSelectionClause(fesFactory.createAbstractSelectionClause(bbox));
else if (filters != null)
query.setAbstractSelectionClause(fesFactory.createAbstractSelectionClause(filters.get(i)));
// sorting
if (sortBys != null)
query.setAbstractSortingClause(fesFactory.createSortBy(sortBys.get(i)));
queries.add(wfsFactory.createQuery(query));
}
} else {
// stored query
String storedQueryId = null;
if (parameters.containsKey(KVPConstants.STOREDQUERY_ID))
storedQueryId = new StringParser().parse(KVPConstants.STOREDQUERY_ID, parameters.get(KVPConstants.STOREDQUERY_ID));
if (storedQueryId == null)
throw new WFSException(WFSExceptionCode.MISSING_PARAMETER_VALUE, "The query request lacks the mandatory parameter " + KVPConstants.STOREDQUERY_ID + ".");
StoredQueryType storedQuery = new StoredQueryType();
storedQuery.setId(storedQueryId);
for (String key : parameters.keySet()) {
if (KVPConstants.PARAMETERS.contains(key))
continue;
ParameterType parameter = new ParameterType();
parameter.setName(key);
parameter.setContent(Arrays.asList(new Object[]{parameters.get(key)}));
storedQuery.getParameter().add(parameter);
}
queries.add(wfsFactory.createStoredQuery(storedQuery));
}
} catch (KVPParseException e) {
throw new WFSException(WFSExceptionCode.INVALID_PARAMETER_VALUE, e.getMessage(), e.getParameter(), e.getCause());
}
return queries;
}
private void checkMutualExclusivity(Map<String, String> parameters, String operationName, String... keys) throws WFSException {
String found = null;
for (String key : keys) {
if (parameters.containsKey(key)) {
if (found == null)
found = key;
else throw new WFSException(WFSExceptionCode.OPERATION_PARSING_FAILED, "The parameters " + found + " and " + key + " are mutually exclusive.", operationName);
}
}
}
private List<List<String>> checkTypeNames(List<List<String>> typeNames, List<FilterType> resourceIds) {
if (typeNames == null && resourceIds != null) {
typeNames = new ArrayList<>(resourceIds.size());
for (int i = 0; i < resourceIds.size(); i++) {
CityGMLModule core = wfsConfig.getFeatureTypes().getDefaultVersion().getCityGMLModule(CityGMLModuleType.CORE);
typeNames.add(Collections.singletonList("schema-element(" + core.getNamespacePrefix() + ":_CityObject)"));
}
}
return typeNames;
}
private void checkAlignedListSize(int size, Map<String, String> parameters, String operationName, String... keys) throws WFSException {
for (String key : keys) {
if (parameters.containsKey(key)) {
String[] lists = parameters.get(key).split(KVPConstants.LIST_DELIMITER);
if (lists.length != size)
throw new WFSException(WFSExceptionCode.OPERATION_PARSING_FAILED, "The query uses parameter lists whose sizes are not aligned.", operationName);
}
}
}
}
|
<html>
<head>
<title>Timer</title>
<script>
// function to display the timer element
function displayTimer() {
let minutes = 30;
let seconds = 0;
// get the timer element
const timerElement = document.getElementById("timer");
// set the interval to subtract one second
setInterval(() => {
seconds -= 1;
if (seconds < 0) {
minutes -= 1;
seconds=59;
}
if (minutes < 0) {
minutes = 0;
seconds = 0;
}
// update the timer element with the current minutes and seconds
timerElement.innerHTML = minutes + ":" + seconds;
}, 1000);
}
</script>
</head>
<body onload="displayTimer()">
<div id="timer">30:00</div>
</body>
</html>
|
#!/bin/bash
usage() {
echo
echo "USAGE: $0 <archive_format> <student_media_dir>[,<student_media_dir>,...]"
echo
echo " Archive Formats:"
echo " 7z -7zip with LZMA compression split into 2G files"
echo " 7zma2 -7zip with LZMA2 compression split into 2G files"
echo " 7zcopy -7zip with no compression split into 2G files"
echo " tar -tar archive with no compression"
echo " tgz -gzip compressed tar archive"
echo " tbz -bzip2 compressed tar archive"
echo " txz -xz compressed tar archive"
echo
}
case ${1}
in
7z)
ARCHIVE_CMD="7z a -t7z -m0=LZMA -mmt=on -v2g"
ARCHIVE_EXT="7z"
;;
7zma2)
ARCHIVE_CMD="7z a -t7z -m0=LZMA2 -mmt=on -v2g"
ARCHIVE_EXT="7z"
;;
7zcopy)
ARCHIVE_CMD="7z a -t7z -mx=0 -v2g"
ARCHIVE_EXT="7z"
;;
tar)
ARCHIVE_CMD="tar cvf"
ARCHIVE_EXT="tar"
;;
tar.gz|tgz)
ARCHIVE_CMD="tar czvf"
ARCHIVE_EXT="tgz"
;;
tar.bz2|tbz)
ARCHIVE_CMD="tar cjvf"
ARCHIVE_EXT="tbz"
;;
tar.xz|txz)
ARCHIVE_CMD="tar cJvf"
ARCHIVE_EXT="txz"
;;
*)
usage
exit
;;
esac
if [ -z ${2} ]
then
echo "ERROR: No student media directories were provided."
exit 1
else
for SM_DIR in $(echo ${2} | sed 's/,/ /g')
do
if ! [ -d ${2} ]
then
echo "ERROR: The provided student media directory doesn't appear to exist."
echo "Skipping ..."
else
echo "---------------------------------------------------------------------"
echo "COMMAND: ${ARCHIVE_CMD} ${SM_DIR}.${ARCHIVE_EXT} ${SM_DIR}"
echo
${ARCHIVE_CMD} ${SM_DIR}.${ARCHIVE_EXT} ${SM_DIR}
echo
echo "COMMAND: md5sum ${SM_DIR}.${ARCHIVE_EXT}* > ${SM_DIR}.${ARCHIVE_EXT}.md5sums"
echo
md5sum ${SM_DIR}.${ARCHIVE_EXT}* > ${SM_DIR}.${ARCHIVE_EXT}.md5sums
fi
done
fi
|
#!/bin/sh
DT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../.."
if [ "$1" = "debug" ]; then
DEBUG="debug"
else
OUT_DIR=$1
DEBUG=$2
fi
# If not run from DataTables build script, redirect to there
if [ -z "$DT_BUILD" ]; then
cd $DT_DIR/build
./make.sh extension AutoFill $DEBUG
cd -
exit
fi
# Change into script's own dir
cd $(dirname $0)
DT_SRC=$(dirname $(dirname $(pwd)))
DT_BUILT="${DT_SRC}/built/DataTables"
. $DT_SRC/build/include.sh
# Copy CSS
rsync -r css $OUT_DIR
css_frameworks autoFill $OUT_DIR/css
# Copy JS
rsync -r js $OUT_DIR
js_compress $OUT_DIR/js/dataTables.autoFill.js
js_frameworks autoFill $OUT_DIR/js
# Copy Types
if [ -d $OUT_DIR/types ]; then
rm -r $OUT_DIR/types
fi
mkdir $OUT_DIR/types
if [ -d types/ ]; then
cp types/* $OUT_DIR/types
else
if [ -f types.d.ts ]; then
cp types.d.ts $OUT_DIR/types
fi
fi
# Copy and build examples
rsync -r examples $OUT_DIR
examples_process $OUT_DIR/examples
# Readme and license
cp Readme.md $OUT_DIR
cp License.txt $OUT_DIR
|
<reponame>astrionic/advent-of-code-2020
package astrionic.adventofcode2020.solutions.day15
import astrionic.adventofcode2020.framework.AdventSolution
import scala.collection.mutable
object Day15 extends AdventSolution {
override def solvePart1(input: String): String = solve(input, 2020)
// Takes about 250 times as long as part 1 (around 9 seconds on my current machine)
override def solvePart2(input: String): String = solve(input, 30000000)
def parseInput(input: String): List[Int] =
input.split(',').flatMap(_.toIntOption).toList
def solve(input: String, turns: Int): String = {
val startingNumbers = parseInput(input)
val map = mutable.Map[Int, Int]()
map.addAll(
startingNumbers
.slice(0, startingNumbers.length - 1)
.zip(1 until startingNumbers.length)
.toMap
)
var previousNum = startingNumbers.last
for(turn <- startingNumbers.length + 1 to turns) {
val currentNum = map.get(previousNum) match {
case Some(n) => (turn - 1) - n
case None => 0
}
map.addOne(previousNum -> (turn - 1))
previousNum = currentNum
}
previousNum.toString
}
}
|
#!/bin/bash
STR="Test 1 "
echo $STR
./hello
|
require 'spec_helper'
describe RuboCop::Cop::DarkFinger::MigrationConstants do
let(:config) { RuboCop::Config.new }
def offenses_for(source)
cop = described_class.new(config)
processed_source = parse_source(source)
_investigate(cop, processed_source)
cop.offenses
end
def expect_no_offenses_for(source)
expect(
offenses_for(source)
).to be_empty
end
it 'returns no violations when no constants are used' do
source = <<-EOS
class FooMigration < ActiveRecord::Migration[5.1]
def up
end
end
EOS
expect_no_offenses_for(source)
end
it 'returns an error if an unknown and undeclared constant is sent a message' do
source = <<-EOS
class FooMigration < ActiveRecord::Migration[5.1]
def up
SomeModel.all.each do
# stuff
end
end
end
EOS
offenses = offenses_for(source)
expect(offenses.size).to eq(1)
expect(offenses.first.message).to match(%Q(Undeclared constant: "SomeModel"))
end
it 'does not return errors when using classes that are declared in the file' do
source = <<-EOS
class SomeModel < ActiveRecord::Base; end
class FooMigration < ActiveRecord::Migration[5.1]
def up
SomeModel.all.each do
# stuff
end
end
end
EOS
expect_no_offenses_for(source)
end
it 'does not return errors when using modules that are declared in the file' do
source = <<-EOS
module Foo
module Bar
class Baz < ActiveRecord::Base; end
end
end
class FooMigration < ActiveRecord::Migration[5.1]
def up
Foo::Bar::Baz.first
end
end
EOS
expect_no_offenses_for(source)
end
it 'does not return errors when using constants that are declared in the file' do
source = <<-EOS
SOME_CONSTANT="foobar"
class FooMigration < ActiveRecord::Migration[5.1]
def up
puts SOME_CONSTANT
end
end
EOS
expect_no_offenses_for(source)
end
it 'does not return errors when using "top level system constants"' do
source = <<-EOS
class FooMigration < ActiveRecord::Migration[5.1]
def up
YAML.load_file("foo")
File.read("foo")
Array[:lol]
Hash.new(:foo)
end
end
EOS
expect_no_offenses_for(source)
end
end
|
<gh_stars>1-10
var fontSize = 15;
var width = 1400;
var height = 750;
var tree = d3.layout.tree()
.size([height, width - 160]);
var diagonal = d3.svg.diagonal()
.projection(function(d) { return [d.y, d.x]; });
var svg = d3.select("body").append("svg")
.attr("width", width)
.attr("height", height)
.append("g")
.attr("transform", "translate(40,0)"); // translate everting to the right in order to not cut off text
d3.json("progress.json", function(error, json) {
var nodes = tree.nodes(json),
links = tree.links(nodes);
var link = svg.selectAll("path.link")
.data(links)
.enter().append("path")
.attr("class", "link")
.attr("d", diagonal);
var node = svg.selectAll("g.node")
.data(nodes)
.enter().append("g")
.attr("class", "node")
.attr("transform", function(d) { return "translate(" + d.y + "," + d.x + ")"; })
.on('click', function(d, i) {
console.log(d.selected);
if (d.selected==2) {
d.selected = 0;
d3.select(this).select("rect").attr("stroke", "green");
} else if (d.selected == 1) {
d.selected = 2;
d3.select(this).select("rect").attr("stroke", "yellow");
} else {
d.selected = 1
d3.select(this).select("rect").attr("stroke","red");
} })
node.append("rect")
.attr("x", function(d) { return fontSize * -0.45 * d.name.length;})
.attr("y", -1*fontSize) // roughly center around text
.attr("width", function(d) { return fontSize * 0.65 * d.name.length;})
.attr("height", 2*fontSize)
.attr("rx", 10)
.attr("ry", 10)
.attr("stroke", "black");
node.append("text")
.attr("font-size", fontSize + "px")
.attr("dx", function(d) { return -2 * d.name.length; })
.attr("dy", 3) // centered vertically
.attr("text-anchor", "middle")
.text(function(d) { return d.name; });
});
d3.select(self.frameElement).style("height", height + "px");
|
#!/bin/bash
# Downloads a version of Bochs patched to be used with Pintos and builds and installs two variants
# of it to /usr/local.
set -e
TMPDIR=$(mktemp -d)
trap "rm -rf $TMPDIR" EXIT
cd "$TMPDIR"
wget -O "bochs-2.2.6-pintos-unix-patched.tar.gz" "https://drive.google.com/uc?export=download&id=1nPFKg5XxicgxRF4GeyZj1G4QoiOR5du3"
tar xzf "bochs-2.2.6-pintos-unix-patched.tar.gz"
cd "bochs-2.2.6"
# From pintos src/misc/bochs-2.2.6-build.sh
DSTDIR=/usr/local
CFGOPTS="--with-x --with-x11 --with-term --with-nogui --prefix=$DSTDIR --enable-cpu-level=6"
echo Compiling Plain
( mkdir plain &&
cd plain &&
../configure $CFGOPTS --enable-gdb-stub &&
make &&
make install
)
echo Compiling With DBG
( mkdir with-dbg &&
cd with-dbg &&
pwd &&
../configure --enable-debugger $CFGOPTS &&
make &&
cp bochs "$DSTDIR/bin/bochs-dbg"
)
echo Successfully installed Bochs
|
# Generated by Django 3.2.6 on 2021-09-02 10:44
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("catalog", "0016_add_gin_gist"),
]
operations = [
migrations.AlterModelOptions(
name="index",
options={
"ordering": ("external_name",),
"verbose_name": "Index",
"verbose_name_plural": "Indexes",
},
),
]
|
/*!
* urllib-sync - request.js
* Copyright(c) Alibaba Group Holding Limited.
* Author: busi.hyy <<EMAIL>>
*/
'use strict';
/**
* Module dependencies.
*/
var utility = require('utility');
var urllib = require('urllib');
var path = require('path');
var util = require('util');
var fs = require('fs');
var os = require('os');
var input = {};
try {
input = utility.base64decode(process.argv[2] || '');
input = JSON.parse(input);
} catch (err) {
console.error(err.message);
process.exit(1);
}
urllib.request(input.url, input.args, function (err, data, res) {
if (err) {
console.error(err.message);
process.exit(1);
}
var name = util.format('%s:%s', process.pid, Date.now());
var type = 'buffer';
if (data && typeof data === 'object' && !Buffer.isBuffer(data)) {
type = 'json';
data = JSON.stringify(data);
} else if (typeof data === 'string') {
type = 'string';
}
var filepath = path.join(os.tmpdir(), name);
// if need to writeFile
if ((res.statusCode / 100 | 0) === 2 && input.args.writeFile) {
type = 'file';
filepath = input.args.writeFile;
}
fs.writeFileSync(filepath, data);
var res = {
path: filepath,
type: type,
status: res.statusCode,
headers: res.headers
};
console.log(JSON.stringify(res));
process.exit(0);
});
|
<filename>tests/java/org/pantsbuild/testing/EasyMockTestTest.java
// Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.pantsbuild.testing;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.collect.ImmutableList;
import com.google.common.reflect.TypeToken;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.Test;
import static org.easymock.EasyMock.expectLastCall;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class EasyMockTestTest extends EasyMockTest {
private void assertSimplyParametrizedMockWorks(Runnable mockRunnable) {
final AtomicBoolean ran = new AtomicBoolean(false);
mockRunnable.run();
expectLastCall().andAnswer(new IAnswer<Void>() {
@Override public Void answer() {
ran.set(true);
return null;
}
});
control.replay();
mockRunnable.run();
assertTrue(ran.get());
}
@Test
public void testSimplyParametrizedMockViaOverload() {
assertSimplyParametrizedMockWorks(createMock(Runnable.class));
}
@Test
public void testSimplyParametrizedMock() {
assertSimplyParametrizedMockWorks(createMock(new TypeToken<Runnable>() { }));
}
@Test
public void testNestedParametrizedMock() {
List<List<String>> list = createMock(new TypeToken<List<List<String>>>() { });
EasyMock.expect(list.get(0)).andReturn(ImmutableList.of("jake"));
control.replay();
assertEquals(ImmutableList.of("jake"), list.get(0));
}
}
|
<reponame>marcinbunsch/things-client
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "Things" do
it "should load appropriate classes and modules" do
lambda do
Things::App
Things::Todo
Things::List
Things::Status
Things::Area
Things::Project
Things::Tag
Things::Person
end.should_not raise_error(NameError)
end
end
|
function countVowels(str) {
let count = 0;
const vowels = ["a", "e", "i", "o", "u"];
for (let char of str) {
if(vowels.includes(char.toLowerCase())) {
count += 1;
}
}
return count;
}
|
package gov.cms.bfd.pipeline.rda.grpc;
import com.codahale.metrics.MetricRegistry;
import gov.cms.bfd.model.rda.PreAdjMcsClaim;
import gov.cms.mpsm.rda.v1.McsClaimChange;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* PipelineJob requires that the class of the job be used to define the PipelineJobType. This class
* is a simple wrapper to ensure that PreAdjMcsClaim processing has a unique PipelineJobType value
* based on its class.
*/
public class RdaMcsClaimLoadJob
extends AbstractRdaLoadJob<McsClaimChange, RdaChange<PreAdjMcsClaim>> {
private static final Logger LOGGER = LoggerFactory.getLogger(RdaMcsClaimLoadJob.class);
public RdaMcsClaimLoadJob(
Config config,
Callable<RdaSource<McsClaimChange, RdaChange<PreAdjMcsClaim>>> sourceFactory,
Callable<RdaSink<McsClaimChange, RdaChange<PreAdjMcsClaim>>> sinkFactory,
MetricRegistry appMetrics) {
super(config, sourceFactory, sinkFactory, appMetrics, LOGGER);
}
}
|
var classCatch_1_1Generators_1_1MapGenerator =
[
[ "MapGenerator", "classCatch_1_1Generators_1_1MapGenerator.html#a525c7eaf53ad220ee7add534aff2522c", null ],
[ "get", "classCatch_1_1Generators_1_1MapGenerator.html#a199d377afba00519f202c59b4b488235", null ],
[ "next", "classCatch_1_1Generators_1_1MapGenerator.html#aa07e2f12d38ae060c30cc30d9dc236c5", null ]
];
|
sap.ui.define([
"../util/Api",
"../util/RestClient",
], function (Api, RestClient) {
"use strict";
var eventApi = Api.define("restService", {
/**
* Get code list by its name
*
* @param {string} name Code list name
* @returns {Promise<CodeInfo[]>} Code list
*/
getCodeListByName: function (name) {
return RestClient.get(
this.createUrl("/events/codeLists/" + name)
);
},
/**
* Report event with payload
*
* @param {string} eventType Event type
* @param {object} payload Paylaod of reporting
* @returns {Promise<any>} Result of reporting
*/
report: function (eventType, payload) {
return RestClient.post(
this.createUrl("/events/" + eventType),
payload
);
},
});
return eventApi;
});
|
# vim: set ts=4 sw=4 et:
run_func() {
local func="$1" desc="$2" funcname="$3" restoretrap= logpipe= logfile= teepid=
: ${funcname:=$func}
logpipe=$(mktemp -u -p ${XBPS_STATEDIR} ${pkgname}_${XBPS_CROSS_BUILD}_XXXXXXXX.logpipe) || exit 1
logfile=${XBPS_STATEDIR}/${pkgname}_${XBPS_CROSS_BUILD}_${funcname}.log
msg_normal "${pkgver:-xbps-src}: running ${desc:-${func}} ...\n"
set -E
restoretrap=$(trap -p ERR)
trap 'error_func $funcname $LINENO' ERR
mkfifo "$logpipe"
tee "$logfile" < "$logpipe" &
teepid=$!
$func &>"$logpipe"
wait $teepid
rm "$logpipe"
eval "$restoretrap"
set +E
}
ch_wrksrc() {
cd "$wrksrc" || msg_error "$pkgver: cannot access wrksrc directory [$wrksrc]\n"
if [ -n "$build_wrksrc" ]; then
cd $build_wrksrc || \
msg_error "$pkgver: cannot access build_wrksrc directory [$build_wrksrc]\n"
fi
}
# runs {pre,do,post}_X tripplets
run_step() {
local step_name="$1" optional_step="$2" skip_post_hook="$3"
ch_wrksrc
run_pkg_hooks "pre-$step_name"
# Run pre_* Phase
if declare -f "pre_$step_name" >/dev/null; then
ch_wrksrc
run_func "pre_$step_name"
fi
ch_wrksrc
# Run do_* Phase
if declare -f "do_$step_name" >/dev/null; then
run_func "do_$step_name"
elif [ -n "$build_style" ]; then
if [ -r $XBPS_BUILDSTYLEDIR/${build_style}.sh ]; then
. $XBPS_BUILDSTYLEDIR/${build_style}.sh
if declare -f "do_$step_name" >/dev/null; then
run_func "do_$step_name"
elif [ ! "$optional_step" ]; then
msg_error "$pkgver: cannot find do_$step_name() in $XBPS_BUILDSTYLEDIR/${build_style}.sh!\n"
fi
else
msg_error "$pkgver: cannot find build style $XBPS_BUILDSTYLEDIR/${build_style}.sh!\n"
fi
elif [ ! "$optional_step" ]; then
msg_error "$pkgver: cannot find do_$step_name()!\n"
fi
# Run do_ phase hooks
run_pkg_hooks "do-$step_name"
# Run post_* Phase
if declare -f "post_$step_name" >/dev/null; then
ch_wrksrc
run_func "post_$step_name"
fi
if ! [ "$skip_post_hook" ]; then
ch_wrksrc
run_pkg_hooks "post-$step_name"
fi
}
error_func() {
local err=$?
local src=
local i=
[ -n "$1" -a -n "$2" ] || exit 1;
msg_red "$pkgver: $1: '${BASH_COMMAND}' exited with $err\n"
for ((i=1;i<${#FUNCNAME[@]};i++)); do
src=${BASH_SOURCE[$i]}
src=${src#$XBPS_DISTDIR/}
msg_red " in ${FUNCNAME[$i]}() at $src:${BASH_LINENO[$i-1]}\n"
[ "${FUNCNAME[$i]}" = "$1" ] && break;
done
exit 1
}
exit_and_cleanup() {
local rval=$1
if [ -n "$XBPS_TEMP_MASTERDIR" -a "$XBPS_TEMP_MASTERDIR" != "1" ]; then
rm -rf "$XBPS_TEMP_MASTERDIR"
fi
exit ${rval:=0}
}
msg_red() {
# error messages in bold/red
[ -n "$NOCOLORS" ] || printf >&2 "\033[1m\033[31m"
printf >&2 "=> ERROR: $@"
[ -n "$NOCOLORS" ] || printf >&2 "\033[m"
}
msg_red_nochroot() {
[ -n "$NOCOLORS" ] || printf >&2 "\033[1m\033[31m"
printf >&2 "$@"
[ -n "$NOCOLORS" ] || printf >&2 "\033[m"
}
msg_error() {
msg_red "$@"
[ -n "$XBPS_INFORMATIVE_RUN" ] || exit 1
}
msg_warn() {
# warn messages in bold/yellow
[ -n "$NOCOLORS" ] || printf >&2 "\033[1m\033[33m"
printf >&2 "=> WARNING: $@"
[ -n "$NOCOLORS" ] || printf >&2 "\033[m"
}
msg_warn_nochroot() {
[ -n "$NOCOLORS" ] || printf >&2 "\033[1m\033[33m"
printf >&2 "=> WARNING: $@"
[ -n "$NOCOLORS" ] || printf >&2 "\033[m"
}
msg_normal() {
if [ -z "$XBPS_QUIET" ]; then
# normal messages in bold
[ -n "$NOCOLORS" ] || printf "\033[1m"
printf "=> $@"
[ -n "$NOCOLORS" ] || printf "\033[m"
fi
}
msg_normal_append() {
[ -n "$NOCOLORS" ] || printf "\033[1m"
printf "$@"
[ -n "$NOCOLORS" ] || printf "\033[m"
}
set_build_options() {
local f j pkgopts _pkgname
local -A options
if [ -z "$build_options" ]; then
return 0
fi
for f in ${build_options}; do
_pkgname=${pkgname//[^A-Za-z0-9_]/_}
eval pkgopts="\$XBPS_PKG_OPTIONS_${_pkgname}"
if [ -z "$pkgopts" -o "$pkgopts" = "" ]; then
pkgopts=${XBPS_PKG_OPTIONS}
fi
OIFS="$IFS"; IFS=','
for j in ${pkgopts}; do
case "$j" in
"$f") options[$j]=1 ;;
"~$f") options[${j#\~}]=0 ;;
esac
done
IFS="$OIFS"
done
for f in ${build_options_default}; do
[[ -z "${options[$f]}" ]] && options[$f]=1
done
# Prepare final options.
for f in ${!options[@]}; do
if [[ ${options[$f]} -eq 1 ]]; then
eval export build_option_${f}=1
else
eval unset build_option_${f}
fi
done
# Re-read pkg template to get conditional vars.
if [ -z "$XBPS_BUILD_OPTIONS_PARSED" ]; then
source_file $XBPS_SRCPKGDIR/$pkgname/template
XBPS_BUILD_OPTIONS_PARSED=1
unset PKG_BUILD_OPTIONS
set_build_options
unset XBPS_BUILD_OPTIONS_PARSED
return 0
fi
# Sort pkg build options alphabetically.
export PKG_BUILD_OPTIONS=$(
for f in ${build_options}; do
[[ "${options[$f]}" -eq 1 ]] || printf '~'
printf '%s\n' "$f"
done | sort | tr -s '\n' ' '
)
}
source_file() {
local f="$1"
if [ ! -f "$f" -o ! -r "$f" ]; then
return 0
fi
if ! source "$f"; then
msg_error "xbps-src: failed to read $f!\n"
fi
}
run_pkg_hooks() {
local phase="$1" hookn f
eval unset -f hook
for f in ${XBPS_COMMONDIR}/hooks/${phase}/*.sh; do
[ ! -r $f ] && continue
hookn=${f##*/}
hookn=${hookn%.sh}
. $f
run_func hook "$phase hook: $hookn" ${phase}_${hookn}
done
}
unset_package_funcs() {
local f
for f in $(typeset -F); do
case "$f" in
*_package)
unset -f "$f"
;;
esac
done
}
get_endian() {
local arch="${1%-*}"
case "$arch" in
aarch64) echo "le";;
armv5tel) echo "le";;
armv6l) echo "le";;
armv7l) echo "le";;
i686) echo "le";;
mipsel*) echo "le";;
mips*) echo "be";;
ppc*le) echo "le";;
ppc*) echo "be";;
x86_64) echo "le";;
esac
}
get_libc() {
local arch="${1%-*}"
if [ "${arch}" = "$1" ]; then
echo "glibc"
else
echo "${1#${arch}-}"
fi
}
get_wordsize() {
local arch="${1%-*}"
case "$arch" in
aarch64) echo "64";;
armv5tel) echo "32";;
armv6l) echo "32";;
armv7l) echo "32";;
i686) echo "32";;
mipsel*) echo "32";;
mips*) echo "32";;
ppc64*) echo "64";;
ppc*) echo "32";;
x86_64) echo "64";;
esac
}
get_no_atomic8() {
local arch="${1%-*}"
case "$arch" in
armv5tel) echo "yes";;
armv6l) echo "yes";;
mips*) echo "yes";;
ppcle) echo "yes";;
ppc) echo "yes";;
esac
}
get_subpkgs() {
local f
for f in $(typeset -F); do
case "$f" in
*_package)
echo "${f%_package}"
;;
esac
done
}
setup_pkg() {
local pkg="$1" cross="$2" show_problems="$3"
local basepkg val _vars f dbgflags arch extrarepo
[ -z "$pkg" ] && return 1
basepkg=${pkg%-32bit}
# Start with a sane environment
unset -v PKG_BUILD_OPTIONS XBPS_CROSS_CFLAGS XBPS_CROSS_CXXFLAGS XBPS_CROSS_FFLAGS XBPS_CROSS_CPPFLAGS XBPS_CROSS_LDFLAGS XBPS_TARGET_QEMU_MACHINE
unset -v subpackages run_depends build_depends host_build_depends
unset_package_funcs
if [ -n "$cross" ]; then
source_file $XBPS_CROSSPFDIR/${cross}.sh
_vars="TARGET_MACHINE CROSS_TRIPLET CROSS_CFLAGS CROSS_CXXFLAGS TARGET_QEMU_MACHINE"
for f in ${_vars}; do
eval val="\$XBPS_$f"
if [ -z "$val" ]; then
echo "ERROR: XBPS_$f is not defined!"
exit 1
fi
done
export XBPS_CROSS_BASE=/usr/$XBPS_CROSS_TRIPLET
export XBPS_TARGET_QEMU_MACHINE
XBPS_INSTALL_XCMD="env XBPS_TARGET_ARCH=$XBPS_TARGET_MACHINE $XBPS_INSTALL_CMD -c /host/repocache-$XBPS_TARGET_MACHINE -r $XBPS_CROSS_BASE"
XBPS_QUERY_XCMD="env XBPS_TARGET_ARCH=$XBPS_TARGET_MACHINE $XBPS_QUERY_CMD -c /host/repocache-$XBPS_TARGET_MACHINE -r $XBPS_CROSS_BASE"
XBPS_RECONFIGURE_XCMD="env XBPS_TARGET_ARCH=$XBPS_TARGET_MACHINE $XBPS_RECONFIGURE_CMD -r $XBPS_CROSS_BASE"
XBPS_REMOVE_XCMD="env XBPS_TARGET_ARCH=$XBPS_TARGET_MACHINE $XBPS_REMOVE_CMD -r $XBPS_CROSS_BASE"
XBPS_RINDEX_XCMD="env XBPS_TARGET_ARCH=$XBPS_TARGET_MACHINE $XBPS_RINDEX_CMD"
XBPS_UHELPER_XCMD="env XBPS_TARGET_ARCH=$XBPS_TARGET_MACHINE xbps-uhelper -r $XBPS_CROSS_BASE"
XBPS_CHECKVERS_XCMD="env XBPS_TARGET_ARCH=$XBPS_TARGET_MACHINE xbps-checkvers -r $XBPS_CROSS_BASE"
else
export XBPS_TARGET_MACHINE=${XBPS_ARCH:-$XBPS_MACHINE}
unset XBPS_CROSS_BASE XBPS_CROSS_LDFLAGS XBPS_CROSS_FFLAGS
unset XBPS_CROSS_CFLAGS XBPS_CROSS_CXXFLAGS XBPS_CROSS_CPPFLAGS
unset XBPS_CROSS_RUSTFLAGS XBPS_CROSS_RUST_TARGET
XBPS_INSTALL_XCMD="$XBPS_INSTALL_CMD"
XBPS_QUERY_XCMD="$XBPS_QUERY_CMD"
XBPS_RECONFIGURE_XCMD="$XBPS_RECONFIGURE_CMD"
XBPS_REMOVE_XCMD="$XBPS_REMOVE_CMD"
XBPS_RINDEX_XCMD="$XBPS_RINDEX_CMD"
XBPS_UHELPER_XCMD="$XBPS_UHELPER_CMD"
XBPS_CHECKVERS_XCMD="$XBPS_CHECKVERS_CMD"
fi
export XBPS_ENDIAN=$(get_endian ${XBPS_MACHINE})
export XBPS_TARGET_ENDIAN=$(get_endian ${XBPS_TARGET_MACHINE})
export XBPS_LIBC=$(get_libc ${XBPS_MACHINE})
export XBPS_TARGET_LIBC=$(get_libc ${XBPS_TARGET_MACHINE})
export XBPS_WORDSIZE=$(get_wordsize ${XBPS_MACHINE})
export XBPS_TARGET_WORDSIZE=$(get_wordsize ${XBPS_TARGET_MACHINE})
export XBPS_NO_ATOMIC8=$(get_no_atomic8 ${XBPS_MACHINE})
export XBPS_TARGET_NO_ATOMIC8=$(get_no_atomic8 ${XBPS_TARGET_MACHINE})
export XBPS_INSTALL_XCMD XBPS_QUERY_XCMD XBPS_RECONFIGURE_XCMD \
XBPS_REMOVE_XCMD XBPS_RINDEX_XCMD XBPS_UHELPER_XCMD
# Source all sourcepkg environment setup snippets.
# Source all subpkg environment setup snippets.
for f in ${XBPS_COMMONDIR}/environment/setup-subpkg/*.sh; do
source_file "$f"
done
for f in ${XBPS_COMMONDIR}/environment/setup/*.sh; do
source_file "$f"
done
if [ ! -f ${XBPS_SRCPKGDIR}/${basepkg}/template ]; then
msg_error "xbps-src: unexistent file: ${XBPS_SRCPKGDIR}/${basepkg}/template\n"
fi
if [ -n "$cross" ]; then
export CROSS_BUILD="$cross"
source_file ${XBPS_SRCPKGDIR}/${basepkg}/template
else
unset CROSS_BUILD
source_file ${XBPS_SRCPKGDIR}/${basepkg}/template
fi
# Check if required vars weren't set.
_vars="pkgname version short_desc revision homepage license"
for f in ${_vars}; do
eval val="\$$f"
if [ -z "$val" -o -z "$f" ]; then
msg_error "\"$f\" not set on $pkgname template.\n"
fi
done
# Check if version is valid.
case "$version" in
*-*) msg_error "version contains invalid character: -\n";;
*_*) msg_error "version contains invalid character: _\n";;
esac
case "$version" in
*[0-9]*) : good ;;
*) msg_error "version must contain at least one digit.\n";;
esac
# Check if base-chroot is already installed.
if [ -z "$bootstrap" -a -z "$CHROOT_READY" -a "z$show_problems" != "zignore-problems" ]; then
msg_red "${pkg} is not a bootstrap package and cannot be built without it.\n"
msg_error "Please install bootstrap packages and try again.\n"
fi
sourcepkg="${pkgname}"
if [ -z "$subpackages" ]; then
subpackages="$(get_subpkgs)"
fi
if [ -h $XBPS_SRCPKGDIR/$basepkg ]; then
# Source all subpkg environment setup snippets.
for f in ${XBPS_COMMONDIR}/environment/setup-subpkg/*.sh; do
source_file "$f"
done
pkgname=$pkg
if ! declare -f ${basepkg}_package >/dev/null; then
msg_error "$pkgname: missing ${basepkg}_package() function!\n"
fi
fi
pkgver="${pkg}-${version}_${revision}"
# If build_style() unset, a do_install() function must be defined.
if [ -z "$build_style" ]; then
# Check that at least do_install() is defined.
if ! declare -f do_install >/dev/null; then
msg_error "$pkgver: missing do_install() function!\n"
fi
fi
FILESDIR=$XBPS_SRCPKGDIR/$sourcepkg/files
PATCHESDIR=$XBPS_SRCPKGDIR/$sourcepkg/patches
DESTDIR=$XBPS_DESTDIR/$XBPS_CROSS_TRIPLET/${sourcepkg}-${version}
PKGDESTDIR=$XBPS_DESTDIR/$XBPS_CROSS_TRIPLET/${pkg}-${version}
if [ -n "$disable_parallel_build" -o -z "$XBPS_MAKEJOBS" ]; then
XBPS_MAKEJOBS=1
fi
makejobs="-j$XBPS_MAKEJOBS"
# strip whitespaces to make " noarch " valid too.
if [ "${archs// /}" = "noarch" ]; then
arch="noarch"
else
arch="$XBPS_TARGET_MACHINE"
fi
if [ -n "$XBPS_BINPKG_EXISTS" ]; then
local _binpkgver="$($XBPS_QUERY_XCMD -R -ppkgver $pkgver 2>/dev/null)"
if [ "$_binpkgver" = "$pkgver" ]; then
if [ -z "$XBPS_DEPENDENCY" ]; then
local _repo="$($XBPS_QUERY_XCMD -R -prepository $pkgver 2>/dev/null)"
msg_normal "xbps-src: $pkgver: found ($XBPS_TARGET_MACHINE) ($_repo)\n"
fi
exit_and_cleanup
fi
fi
if [ -z "$XBPS_DEBUG_PKGS" -o "$repository" = "nonfree" ]; then
nodebug=yes
fi
# -g is required to build -dbg packages.
if [ -z "$nodebug" ]; then
dbgflags="-g"
fi
# build profile is used always in order to expose the host triplet,
# but the compiler flags from it are only used when not crossing
if [ -z "$CHROOT_READY" ]; then
source_file ${XBPS_COMMONDIR}/build-profiles/bootstrap.sh
else
source_file ${XBPS_COMMONDIR}/build-profiles/${XBPS_MACHINE}.sh
fi
set_build_options
export CFLAGS="$XBPS_CFLAGS $XBPS_CROSS_CFLAGS $CFLAGS $dbgflags"
export CXXFLAGS="$XBPS_CXXFLAGS $XBPS_CROSS_CXXFLAGS $CXXFLAGS $dbgflags"
export FFLAGS="$XBPS_FFLAGS $XBPS_CROSS_FFLAGS $FFLAGS"
export CPPFLAGS="$XBPS_CPPFLAGS $XBPS_CROSS_CPPFLAGS $CPPFLAGS"
export LDFLAGS="$XBPS_LDFLAGS $XBPS_CROSS_LDFLAGS $LDFLAGS"
export BUILD_CC="cc"
export BUILD_CXX="c++"
export BUILD_CPP="cpp"
export BUILD_FC="gfortran"
export BUILD_LD="ld"
export BUILD_CFLAGS="$XBPS_CFLAGS"
export BUILD_CXXFLAGS="$XBPS_CXXFLAGS"
export BUILD_CPPFLAGS="$XBPS_CPPFLAGS"
export BUILD_LDFLAGS="$XBPS_LDFLAGS"
export BUILD_FFLAGS="$XBPS_FFLAGS"
export CC_FOR_BUILD="cc"
export CXX_FOR_BUILD="g++"
export CPP_FOR_BUILD="cpp"
export FC_FOR_BUILD="gfortran"
export LD_FOR_BUILD="ld"
export PKG_CONFIG_FOR_BUILD="/usr/bin/pkg-config"
export CFLAGS_FOR_BUILD="$XBPS_CFLAGS"
export CXXFLAGS_FOR_BUILD="$XBPS_CXXFLAGS"
export CPPFLAGS_FOR_BUILD="$XBPS_CPPFLAGS"
export LDFLAGS_FOR_BUILD="$XBPS_LDFLAGS"
export FFLAGS_FOR_BUILD="$XBPS_FFLAGS"
if [ -n "$cross" ]; then
# Regular tools names
export CC="${XBPS_CROSS_TRIPLET}-gcc"
export CXX="${XBPS_CROSS_TRIPLET}-c++"
export CPP="${XBPS_CROSS_TRIPLET}-cpp"
export FC="${XBPS_CROSS_TRIPLET}-gfortran"
export GCC="$CC"
export LD="${XBPS_CROSS_TRIPLET}-ld"
export AR="${XBPS_CROSS_TRIPLET}-ar"
export AS="${XBPS_CROSS_TRIPLET}-as"
export RANLIB="${XBPS_CROSS_TRIPLET}-ranlib"
export STRIP="${XBPS_CROSS_TRIPLET}-strip"
export OBJDUMP="${XBPS_CROSS_TRIPLET}-objdump"
export OBJCOPY="${XBPS_CROSS_TRIPLET}-objcopy"
export NM="${XBPS_CROSS_TRIPLET}-nm"
export READELF="${XBPS_CROSS_TRIPLET}-readelf"
export PKG_CONFIG="${XBPS_CROSS_TRIPLET}-pkg-config"
# Target tools
export CC_target="$CC"
export CXX_target="$CXX"
export CPP_target="$CPP"
export GCC_target="$GCC"
export FC_target="$FC"
export LD_target="$LD"
export AR_target="$AR"
export AS_target="$AS"
export RANLIB_target="$RANLIB"
export STRIP_target="$STRIP"
export OBJDUMP_target="$OBJDUMP"
export OBJCOPY_target="$OBJCOPY"
export NM_target="$NM"
export READELF_target="$READELF"
# Target flags
export CFLAGS_target="$CFLAGS"
export CXXFLAGS_target="$CXXFLAGS"
export CPPFLAGS_target="$CPPFLAGS"
export LDFLAGS_target="$LDFLAGS"
# Host tools
export CC_host="cc"
export CXX_host="g++"
export CPP_host="cpp"
export GCC_host="$CC_host"
export FC_host="gfortran"
export LD_host="ld"
export AR_host="ar"
export AS_host="as"
export RANLIB_host="ranlib"
export STRIP_host="strip"
export OBJDUMP_host="objdump"
export OBJCOPY_host="objcopy"
export NM_host="nm"
export READELF_host="readelf"
# Host flags
export CFLAGS_host="$XBPS_CFLAGS"
export CXXFLAGS_host="$XBPS_CXXFLAGS"
export CPPFLAGS_host="$XBPS_CPPFLAGS"
export LDFLAGS_host="$XBPS_LDFLAGS"
# Rust flags which are passed to rustc
export RUSTFLAGS="$XBPS_CROSS_RUSTFLAGS"
# Rust target, which differs from our triplets
export RUST_TARGET="$XBPS_CROSS_RUST_TARGET"
# Rust build, which is the host system, may also differ
export RUST_BUILD="$XBPS_RUST_TARGET"
else
# Target flags from build-profile
export CFLAGS="$XBPS_TARGET_CFLAGS $CFLAGS"
export CXXFLAGS="$XBPS_TARGET_CXXFLAGS $CXXFLAGS"
export FFLAGS="$XBPS_TARGET_FFLAGS $FFLAGS"
export CPPFLAGS="$XBPS_TARGET_CPPFLAGS $CPPFLAGS"
export LDFLAGS="$XBPS_TARGET_LDFLAGS $LDFLAGS"
# Tools
export CC="cc"
export CXX="g++"
export CPP="cpp"
export GCC="$CC"
export FC="gfortran"
export LD="ld"
export AR="ar"
export AS="as"
export RANLIB="ranlib"
export STRIP="strip"
export OBJDUMP="objdump"
export OBJCOPY="objcopy"
export NM="nm"
export READELF="readelf"
export PKG_CONFIG="pkg-config"
export RUST_TARGET="$XBPS_RUST_TARGET"
export RUST_BUILD="$XBPS_RUST_TARGET"
# Unset cross evironment variables
unset CC_target CXX_target CPP_target GCC_target FC_target LD_target AR_target AS_target
unset RANLIB_target STRIP_target OBJDUMP_target OBJCOPY_target NM_target READELF_target
unset CFLAGS_target CXXFLAGS_target CPPFLAGS_target LDFLAGS_target
unset CC_host CXX_host CPP_host GCC_host FC_host LD_host AR_host AS_host
unset RANLIB_host STRIP_host OBJDUMP_host OBJCOPY_host NM_host READELF_host
unset CFLAGS_host CXXFLAGS_host CPPFLAGS_host LDFLAGS_host
unset RUSTFLAGS
fi
# Setup some specific package vars.
if [ -z "$wrksrc" ]; then
wrksrc="$XBPS_BUILDDIR/${sourcepkg}-${version}"
else
wrksrc="$XBPS_BUILDDIR/$wrksrc"
fi
if [ "$cross" -a "$nocross" -a "$show_problems" != "ignore-problems" ]; then
msg_red "$pkgver: cannot be cross compiled, exiting...\n"
msg_red "$pkgver: $nocross\n"
exit 2
elif [ "$broken" -a "$show_problems" != "ignore-problems" ]; then
msg_red "$pkgver: cannot be built, it's currently broken; see the build log:\n"
msg_red "$pkgver: $broken\n"
exit 2
fi
if [ -n "$restricted" -a -z "$XBPS_ALLOW_RESTRICTED" -a "$show_problems" != "ignore-problems" ]; then
msg_red "$pkgver: does not allow redistribution of sources/binaries (restricted license).\n"
msg_red "If you really need this software, run 'echo XBPS_ALLOW_RESTRICTED=yes >> etc/conf'\n"
exit 2
fi
export XBPS_STATEDIR="${XBPS_BUILDDIR}/.xbps-${sourcepkg}"
export XBPS_WRAPPERDIR="${XBPS_STATEDIR}/wrappers"
mkdir -p $XBPS_STATEDIR $XBPS_WRAPPERDIR
source_file $XBPS_COMMONDIR/environment/build-style/${build_style}.sh
# Source all build-helper files that are defined
for f in $build_helper; do
if [ ! -r $XBPS_BUILDHELPERDIR/${f}.sh ]; then
msg_error "$pkgver: cannot find build helper $XBPS_BUILDHELPERDIR/${f}.sh!\n"
fi
. $XBPS_BUILDHELPERDIR/${f}.sh
done
}
|
package main
import (
"fmt"
"strconv"
)
func main() {
fmt.Println("Learning Primitive Datatypes")
var a bool = true
fmt.Printf("%v , %T\n", a, a)
// bit operators
var (
b int64 = 10
c int64 = 3
)
fmt.Println("b = " + strconv.FormatInt(b, 2))
fmt.Println("c = " + strconv.FormatInt(c, 2))
fmt.Println("b & c = " + strconv.FormatInt(b&c, 2)) // AND
fmt.Println("b | c = " + strconv.FormatInt(b|c, 2)) // OR
fmt.Println("b ^ c = " + strconv.FormatInt(b^c, 2)) // XOR
fmt.Println("b &^ c = " + strconv.FormatInt(b&^c, 2)) // NOR
fmt.Println("b << 3 = " + strconv.FormatInt(b<<3, 2)) // left shift
fmt.Println("b >> 3 = " + strconv.FormatInt(b>>3, 2)) // right shift
// dealing with complex numbers
var n complex64 = 45 + 34i
var m complex128 = complex(45, 65)
fmt.Printf("%v , %T\n", real(n), real(n))
fmt.Printf("%v , %T\n", imag(n), imag(n))
fmt.Printf("%v , %T\n", n, n)
fmt.Printf("%v , %T\n", imag(m), imag(m))
fmt.Printf("%v , %T\n", real(m), real(m))
fmt.Printf("%v , %T\n", m, m)
// dealing with strings
// strings in Go are any UTF8 characters
s := "strings"
s2 := ",another strings"
by := []byte(s)
fmt.Printf("%v , %T\n", s, s) // string
fmt.Printf("%v , %T\n", s+s2, s+s2) // string
fmt.Printf("%v , %T\n", by, by) // []unit8
// s[2] = t // strings are immutable
fmt.Printf("%v , %T\n", s[2], s[2]) //unit8
// dealing with rune
// rune in Go are any UTF32 characters
r := 'a' // rune
r2 := "a" // string
fmt.Printf("%v , %T\n", r, r) // 97 , int32
fmt.Printf("%v , %T\n", r2, r2) // a , string
// Summary
// 1. Numeric Types
// ---> Signed integers
// int has varying sizes but min 32 bits
// 8 bit (int8) through 64 bit (int64)
// ---> Unsigned integers ( can store larger numbers as do not stores sign)
// 8 bit (byte and uint8) through 32 bit (uint32)
// can't mix types, unit8 + unit32
// ---> Floating point Numbers
// 32 and 64 bit versions
// ---> complex numbers
// zero value 0 + 0i
// come in 64 and 128 bit version
// 2. Text Types
// ---> strings
// UTF8 , immutable
// can be converted to bytes
// ---> Rune
// UTF32 , Alias for int32
}
|
#!/bin/sh
#With DSCL#
userList=`dscl . list /Users AuthenticationAuthority | awk '$2~/LocalCachedUser/ {print $1}'`
echo "Listing account and home directory for the following users..."
for a in $userList ; do
echo "$a"
done
|
#!/bin/bash
# Script to automate cryptocurrency mining using Docker
# Source configuration from hellminer.conf
. ./hellminer.conf
# Check for the presence of Docker
DOCKER=$(which docker)
if [ -z "${DOCKER}" ]; then
echo "ERROR: Docker does not seem to be installed. Please download and install Docker CE as outlined on https://docs.docker.com/engine/install/."
exit 1
else
# Append worker name to payout address if specified
if [ ! -z "${WORKER_NAME}" ]; then
PAYOUT_ADDRESS="${PAYOUT_ADDRESS}.${WORKER_NAME}"
fi
# Run Docker container with specified parameters
${DOCKER} run --name ${IMAGE_NAME} --rm -it ${IMAGE_NAME} --cpu=${THREADS} --instances=${SOLVERS} -c ${POOL_HOST}:${POOL_PORT} -u ${PAYOUT_ADDRESS} -p x
fi
# EOF
|
XBPS_CFLAGS="-O2 -pipe -fstack-protector -march=armv7-a -mfpu=vfpv3 -mfloat-abi=hard"
XBPS_CXXFLAGS="$XBPS_CFLAGS"
XBPS_TRIPLET="armv7l-unknown-linux-musleabi"
|
#!/bin/bash
if [ "$CI" != "true" ]; then
echo ""
echo "Can only use the tag release script on CI"
echo ""
exit 1
fi
PACKAGE_VERSION=$(node ./scripts/getPackageVersion.js)
TAG_EXISTS=$(./scripts/tag_exists.sh v$PACKAGE_VERSION)
if [[ $TAG_EXISTS == "false" ]]; then
git tag v$PACKAGE_VERSION
PACKAGE_MINOR_VERSION=$(node ./scripts/getMinorPackageVersion.js)
PACKAGE_MAJOR_VERSION=$(node ./scripts/getMajorPackageVersion.js)
git push origin --tags
fi
|
/*
* Copyright 2018 The boardgame.io Authors
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
*/
import {
getAllNeighbors,
getDistance,
getRange,
getReachable,
createCoordinate,
} from './hex-utils';
test('neighbors of origo', () => {
const coordinate = createCoordinate([0, 0, 0]);
const result = getAllNeighbors(coordinate);
const expectedNeighbors = [
[1, -1, 0],
[1, 0, -1],
[0, 1, -1],
[0, -1, 1],
[-1, 1, 0],
[-1, 0, 1],
].map(createCoordinate);
expect(result).toEqual(expectedNeighbors);
});
test('neighbors of (1, 0, -1)', () => {
const coordinate = createCoordinate([1, 0, -1]);
const result = getAllNeighbors(coordinate);
const expectedNeighbors = [
[2, -1, -1],
[2, 0, -2],
[1, 1, -2],
[1, -1, 0],
[0, 1, -1],
[0, 0, 0],
].map(createCoordinate);
expect(result).toEqual(expectedNeighbors);
});
test('distance between neighbors', () => {
const origo = createCoordinate([0, 0, 0]);
const neighbor = createCoordinate([1, 0, -1]);
const result = getDistance(origo, neighbor);
expect(result).toEqual(1);
});
test('distance between non-neighbors', () => {
const origo = createCoordinate([0, 0, 0]);
const nonNeighbor = createCoordinate([3, 3, -6]);
const result = getDistance(origo, nonNeighbor);
expect(result).toEqual(6);
});
test('range from origo', () => {
const origo = createCoordinate([0, 0, 0]);
const result = getRange(origo, 1);
expect(result).toEqual(
[
[-1, 0, 1],
[-1, 1, 0],
[0, -1, 1],
[0, 0, 0],
[0, 1, -1],
[1, -1, 0],
[1, 0, -1],
].map(createCoordinate)
);
});
test('range not from origo', () => {
const origo = createCoordinate([2, -3, 1]);
const result = getRange(origo, 2);
expect(result).toEqual(
[
[0, -3, 3],
[0, -2, 2],
[0, -1, 1],
[1, -4, 3],
[1, -3, 2],
[1, -2, 1],
[1, -1, 0],
[2, -5, 3],
[2, -4, 2],
[2, -3, 1],
[2, -2, 0],
[2, -1, -1],
[3, -5, 2],
[3, -4, 1],
[3, -3, 0],
[3, -2, -1],
[4, -5, 1],
[4, -4, 0],
[4, -3, -1],
].map(createCoordinate)
);
});
test('getReachable', () => {
const origo = createCoordinate([0, 0, 0]);
const result = getReachable(
origo,
3,
[
[0, 1, -1],
[1, 0, -1],
[2, -1, -1],
[0, -1, 1],
[-1, 0, 1],
[-2, 2, 0],
].map(createCoordinate)
);
expect(result).toEqual(
[
[0, 0, 0],
[1, -1, 0],
[-1, 1, 0],
[2, -2, 0],
[1, -2, 1],
[-1, 2, -1],
[-2, 1, 1],
[3, -3, 0],
[3, -2, -1],
[2, -3, 1],
[2, -3, 1],
[1, -3, 2],
[0, -2, 2],
[0, 2, -2],
[-1, 3, -2],
[-2, 3, -1],
[-2, 0, 2],
[-3, 2, 1],
[-3, 1, 2],
].map(createCoordinate)
);
});
|
import React from "react";
import { Row, Statistic, Progress } from "antd";
const HeaderStatistics = ({
rp_activity,
mesure_time,
rp_vol,
rp_half_life,
now,
total,
}) => {
return (
<>
<Row>
<Statistic
title="RP Activity"
suffix="MBq"
value={rp_activity}
style={{ margin: 10 }}
/>
<Statistic
title="Measure Time"
value={new Date(mesure_time).toLocaleTimeString("en-GB", {
hour: "2-digit",
minute: "2-digit",
})}
style={{ margin: 10 }}
/>
<Statistic
title="RP Volume"
suffix="ml"
value={rp_vol}
style={{ margin: 10 }}
/>
<Statistic
title="RP Half Life"
suffix="min"
value={rp_half_life}
style={{ margin: 10 }}
/>
</Row>
<Row>
{now && Object.keys(now).length !== 0 ? (
<Progress
style={{ paddingRight: 20 }}
strokeColor={{
"0%": "red",
"100%": "green",
}}
percent={(now.total_activity_now / total) * 100}
format={(percent) => `${((percent * total) / 100).toFixed(0)} MBq`}
/>
) : null}
</Row>
</>
);
};
export default HeaderStatistics;
|
package io.opensphere.core.projection;
import org.apache.log4j.Logger;
import io.opensphere.core.math.Vector3d;
import io.opensphere.core.model.Altitude.ReferenceLevel;
import io.opensphere.core.model.GeographicPosition;
import io.opensphere.core.model.LineType;
import io.opensphere.core.model.Tessera;
import io.opensphere.core.model.TesseraList;
import io.opensphere.core.util.lang.Pair;
/**
* A geographic projection which is mutable.
*/
public abstract class AbstractMutableGeographicProjection extends AbstractGeographicProjection
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(AbstractMutableGeographicProjection.class);
/** Message for trying to illegally access a mutable projection. */
protected static final String MUTABLE_PROJECTION_MSG = "Cannot project using non-snapshot mutable projection for terrain based positions.";
@Override
public Pair<Tessera<GeographicPosition>, ProjectionCursor> convertLineToModel(GeographicPosition start,
GeographicPosition end, LineType type, Vector3d modelCenter)
{
if (start.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN
&& end.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN)
{
return getSnapshot().convertLineToModel(start, end, type, modelCenter);
}
throw new UnsupportedOperationException(MUTABLE_PROJECTION_MSG);
}
@Override
public Pair<Tessera<GeographicPosition>, ProjectionCursor> convertLineToModel(ProjectionCursor start, GeographicPosition end,
LineType type, Vector3d modelCenter)
{
if (start.getVertex().getCoordinates().getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN
&& end.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN)
{
return getSnapshot().convertLineToModel(start, end, type, modelCenter);
}
LOGGER.error(MUTABLE_PROJECTION_MSG);
return null;
}
@Override
public TesseraList<? extends GeographicProjectedTesseraVertex> convertQuadToModel(GeographicPosition vert1,
GeographicPosition vert2, GeographicPosition vert3, GeographicPosition vert4, Vector3d modelCenter)
{
if (vert1.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN
&& vert2.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN
&& vert3.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN
&& vert4.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN)
{
return getSnapshot().convertQuadToModel(vert1, vert2, vert3, vert4, modelCenter);
}
LOGGER.error(MUTABLE_PROJECTION_MSG);
return null;
}
@Override
public Vector3d convertToModel(GeographicPosition inPos, Vector3d modelCenter)
{
if (inPos.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN)
{
return getSnapshot().convertToModel(inPos, modelCenter);
}
LOGGER.error(MUTABLE_PROJECTION_MSG);
return null;
}
@Override
public GeographicPosition convertToPosition(Vector3d inPos, ReferenceLevel altReference)
{
if (altReference != ReferenceLevel.TERRAIN)
{
return getSnapshot().convertToPosition(inPos, altReference);
}
LOGGER.error(MUTABLE_PROJECTION_MSG);
return null;
}
@Override
public TesseraList<? extends GeographicProjectedTesseraVertex> convertTriangleToModel(GeographicPosition vert1,
GeographicPosition vert2, GeographicPosition vert3, Vector3d modelCenter)
{
if (vert1.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN
&& vert2.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN
&& vert3.getLatLonAlt().getAltitudeReference() != ReferenceLevel.TERRAIN)
{
return getSnapshot().convertTriangleToModel(vert1, vert2, vert3, modelCenter);
}
LOGGER.error(MUTABLE_PROJECTION_MSG);
return null;
}
}
|
TERMUX_PKG_HOMEPAGE=https://www.gnu.org/software/autoconf/autoconf.html
TERMUX_PKG_DESCRIPTION="Creator of shell scripts to configure source code packages"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_VERSION=2.69
TERMUX_PKG_SRCURL=https://mirrors.kernel.org/gnu/autoconf/autoconf-${TERMUX_PKG_VERSION}.tar.xz
TERMUX_PKG_SHA256=64ebcec9f8ac5b2487125a86a7760d2591ac9e1d3dbd59489633f9de62a57684
TERMUX_PKG_DEPENDS="m4, make, perl"
TERMUX_PKG_PLATFORM_INDEPENDENT=yes
termux_step_post_extract_package () {
perl -p -i -e "s|/bin/sh|$TERMUX_PREFIX/bin/sh|" lib/*/*.m4
}
termux_step_post_massage () {
perl -p -i -e "s|/usr/bin/m4|$TERMUX_PREFIX/bin/m4|" bin/*
perl -p -i -e "s|CONFIG_SHELL-/bin/sh|CONFIG_SHELL-$TERMUX_PREFIX/bin/sh|" bin/autoconf
}
|
#!/usr/bin/env bash
##############################################################################
# diff-example.sh
#
# Custom diff command which strips the first N lines from each file
# before comparing the files
#
# Usage:
# bash diff-example.sh n input-file-1 input-file-2
##############################################################################
n1=$1
n2=$n1
input1=$2
input2=$3
diff -q <(tail -n +"${n1}" "${input1}") <(tail -n +"${n2}" "${input2}")
|
try:
inputNumber = int(input("Please enter a number: "))
except ValueError:
print("Please enter a valid number")
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/XRBase/XRBase.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/XRBase/XRBase.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
setenforce 0
sed -i 's/^SELINUX=.*/SELINUX=disabled/g' /etc/selinux/config
modprobe br_netfilter
echo "net.bridge.bridge-nf-call-iptables=1" | sudo tee -a /etc/sysctl.conf
echo "net.bridge.bridge-nf-call-ip6tables=1" | sudo tee -a /etc/sysctl.conf
echo "net.bridge.bridge-nf-call-arptables=1" | sudo tee -a /etc/sysctl.conf
sysctl -p
yum update -y
yum install -y vim
rpm -Uvh http://repo.rundeck.org/latest.rpm
yum install -y rundeck java
yum install -y epel-release
yum install -y ansible
cd /etc/rundeck
RUNDECK_PORT="8080"
sed -i "s/localhost:4440/10.200.0.10:$RUNDECK_PORT/g" rundeck-config.properties
echo "server.port=${RUNDECK_PORT}" >> rundeck-config.properties
service rundeckd start
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { ModalModule } from 'ng2-bootstrap';
import { PaginationModule } from 'ng2-bootstrap';
import { SharedModule } from './shared.module';
import { CommentTableComponent } from '../manage/comment-table/comment-table.component';
import { PostTableComponent } from '../manage/post-table/post-table.component';
@NgModule({
imports:[
CommonModule,
FormsModule,
SharedModule,
ModalModule.forRoot(),
PaginationModule.forRoot()
],
declarations:[
CommentTableComponent,
PostTableComponent
],
exports:[
CommonModule,
FormsModule,
ModalModule,
PaginationModule,
CommentTableComponent,
PostTableComponent
]
})
export class PostSharedModule {
}
|
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.runtime.manager.impl.deploy;
import java.lang.reflect.Constructor;
import java.util.HashMap;
import java.util.Map;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.persistence.EntityManagerFactory;
import org.kie.api.KieServices;
import org.kie.api.runtime.manager.RuntimeManager;
import org.kie.api.task.TaskService;
import org.kie.api.executor.ExecutorService;
import org.kie.internal.runtime.Cacheable;
import org.kie.internal.runtime.conf.ObjectModel;
import org.kie.internal.runtime.manager.InternalRuntimeManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class EjbObjectModelResolver extends ReflectionObjectModelResolver {
private static final Logger logger = LoggerFactory.getLogger(EjbObjectModelResolver.class);
public static final String ID = "ejb";
private Map<String, Class<?>> knownContextParamMapping = new HashMap<String, Class<?>>();
public EjbObjectModelResolver() {
knownContextParamMapping.put("entityManagerFactory", EntityManagerFactory.class);
knownContextParamMapping.put("runtimeManager", RuntimeManager.class);
knownContextParamMapping.put("kieSession", KieServices.class);
knownContextParamMapping.put("taskService", TaskService.class);
knownContextParamMapping.put("executorService", ExecutorService.class);
knownContextParamMapping.put("classLoader", ClassLoader.class);
}
@Override
public Object getInstance(ObjectModel model, ClassLoader cl, Map<String, Object> contextParams) {
Class<?> clazz = getClassObject(model.getIdentifier(), cl);
Object instance = null;
InternalRuntimeManager manager = null;
if (contextParams.containsKey("runtimeManager")) {
manager = (InternalRuntimeManager) contextParams.get("runtimeManager");
instance = manager.getCacheManager().get(clazz.getName());
if (instance != null) {
return instance;
}
}
if (model.getParameters() == null || model.getParameters().isEmpty()) {
logger.debug("About to create instance of {} with no arg constructor", model.getIdentifier());
// no parameters then use no arg constructor
try {
instance = clazz.newInstance();
} catch (Exception e) {
throw new IllegalArgumentException("Unable to create instance (no arg constructor) of type "
+ model.getIdentifier() + " due to " + e.getMessage(), e);
}
} else {
logger.debug("About to create instance of {} with {} parameters", model.getIdentifier(), model.getParameters().size());
// process parameter instances
Class<?>[] parameterTypes = new Class<?>[model.getParameters().size()];
Object[] paramInstances = new Object[model.getParameters().size()];
int index = 0;
for (Object param : model.getParameters()) {
if (param instanceof ObjectModel) {
logger.debug("Parameter is of type ObjectModel (id: {}), trying to create instance based on that model",
((ObjectModel) param).getIdentifier());
Class<?> paramclazz = getClassObject(((ObjectModel)param).getIdentifier(), cl);
parameterTypes[index] = paramclazz;
paramInstances[index] = getInstance(((ObjectModel)param), cl, contextParams);
} else {
if (contextParams.containsKey(param)) {
logger.debug("Parametr references context parametr with name {}", param);
Object contextValue = contextParams.get(param);
Class<?> paramClass = contextValue.getClass();
if (knownContextParamMapping.containsKey(param)) {
paramClass = knownContextParamMapping.get(param);
}
parameterTypes[index] = paramClass;
paramInstances[index] = contextValue;
} else {
if (param.toString().startsWith("jndi:")) {
logger.debug("Parameter is jndi lookup type - {}", param);
// remove the jndi: prefix
String lookupName = param.toString().substring(5);
try {
Object jndiObject = InitialContext.doLookup(lookupName);
parameterTypes[index] = Object.class;
paramInstances[index] = jndiObject;
} catch (NamingException e) {
throw new IllegalArgumentException("Unable to look up object from jndi using name " + lookupName, e);
}
} else {
logger.debug("Parameter is simple type (string) - {}", param);
parameterTypes[index] = param.getClass();
paramInstances[index] = param;
}
}
}
index++;
}
try {
logger.debug("Creating instance of class {} with parameter types {} and parameter instances {}",
clazz, parameterTypes, paramInstances);
Constructor<?> constructor = clazz.getConstructor(parameterTypes);
instance = constructor.newInstance(paramInstances);
} catch (Exception e) {
throw new IllegalArgumentException("Unable to create instance (" + parameterTypes + " constructor) of type "
+ model.getIdentifier() + " due to " + e.getMessage(), e);
}
}
logger.debug("Created instance : {}", instance);
if (manager != null && instance instanceof Cacheable) {
manager.getCacheManager().add(instance.getClass().getName(), instance);
}
return instance;
}
@Override
public boolean accept(String resolverId) {
if (ID.equals(resolverId)) {
return true;
}
logger.debug("Resolver id {} is not accepted by {}", resolverId, this.getClass());
return false;
}
}
|
<reponame>FenixFinance/types
export enum BridgeTool {
nxtp = 'nxtp',
hop = 'hop',
anyswap = 'anyswap',
cbridge = 'cbridge',
horizon = 'horizon',
}
export interface Bridge {
key: BridgeTool
name: string
logoURI: string
bridgeUrl?: string
discordUrl?: string
supportUrl?: string
docsUrl?: string
explorerUrl?: string
}
export const supportedBridges: Array<Bridge> = [
{
key: BridgeTool.nxtp,
name: 'Connext',
logoURI:
'https://raw.githubusercontent.com/fenixfinance/types/main/src/assets/icons/bridges/connext.png',
bridgeUrl: 'https://xpollinate.io/',
discordUrl: 'https://chat.connext.network/',
supportUrl:
'https://www.notion.so/connext/Connext-NXTP-Support-19a357ebabdd4e888cfcd138fe3e4644',
docsUrl: 'https://docs.connext.network/',
explorerUrl: 'https://connextscan.io/',
},
{
key: BridgeTool.hop,
name: 'Hop',
logoURI:
'https://raw.githubusercontent.com/fenixfinance/types/main/src/assets/icons/bridges/hop.png',
bridgeUrl: 'https://app.hop.exchange/',
discordUrl: 'https://discord.gg/PwCF88emV4',
supportUrl: 'https://help.hop.exchange/hc/en-us',
docsUrl: 'https://docs.hop.exchange/',
explorerUrl: 'https://explorer.hop.exchange/mainnet/',
},
{
key: BridgeTool.anyswap,
name: 'Multichain (AnySwap)',
logoURI:
'https://raw.githubusercontent.com/fenixfinance/types/main/src/assets/icons/bridges/anyswap.png',
bridgeUrl: 'https://app.multichain.org/',
// discordUrl: '',
supportUrl: 'https://multichain.zendesk.com/hc/en-us',
docsUrl: 'https://docs.multichain.org/',
explorerUrl: 'https://anyswap.net/',
},
{
key: BridgeTool.cbridge,
name: 'cBRIDGE',
logoURI:
'https://raw.githubusercontent.com/fenixfinance/types/main/src/assets/icons/bridges/cbridge.png',
bridgeUrl: 'https://cbridge.celer.network/',
discordUrl: 'https://discord.com/invite/uGx4fjQ',
supportUrl: 'https://form.typeform.com/to/Q4LMjUaK',
docsUrl: 'https://cbridge-docs.celer.network/',
// explorerUrl: '',
// analyticsUrl: 'https://cbridge-analytics.celer.network/',
},
{
key: BridgeTool.horizon,
name: 'Horizon',
logoURI:
'https://raw.githubusercontent.com/fenixfinance/types/main/src/assets/icons/bridges/horizon.png',
bridgeUrl: 'https://bridge.harmony.one/',
discordUrl: 'https://harmony.one/discord',
supportUrl: 'https://bridge.harmony.one/help',
docsUrl: 'https://docs.harmony.one/home/general/horizon-bridge',
explorerUrl: 'https://bridge.harmony.one/explorer',
},
]
|
<gh_stars>100-1000
package com.github.messenger4j.send.message.template.receipt;
import static java.util.Optional.empty;
import java.util.Optional;
import lombok.EqualsAndHashCode;
import lombok.NonNull;
import lombok.ToString;
/**
* @author <NAME>
* @since 1.0.0
*/
@ToString
@EqualsAndHashCode
public final class Summary {
private final float totalCost;
private final Optional<Float> subtotal;
private final Optional<Float> totalTax;
private final Optional<Float> shippingCost;
private Summary(
float totalCost,
Optional<Float> subtotal,
Optional<Float> totalTax,
Optional<Float> shippingCost) {
this.totalCost = totalCost;
this.subtotal = subtotal;
this.totalTax = totalTax;
this.shippingCost = shippingCost;
}
public static Summary create(float totalCost) {
return create(totalCost, empty(), empty(), empty());
}
public static Summary create(
float totalCost,
@NonNull Optional<Float> subtotal,
@NonNull Optional<Float> totalTax,
@NonNull Optional<Float> shippingCost) {
return new Summary(totalCost, subtotal, totalTax, shippingCost);
}
public float totalCost() {
return totalCost;
}
public Optional<Float> subtotal() {
return subtotal;
}
public Optional<Float> totalTax() {
return totalTax;
}
public Optional<Float> shippingCost() {
return shippingCost;
}
}
|
<gh_stars>1-10
// Author : XuBenHao
// Version : 1.0.0
// Mail : <EMAIL>
// Copyright : XuBenHao 2020 - 2030
//
#ifndef MYSQL_AGENT_MYSQLAGENT_H
#define MYSQL_AGENT_MYSQLAGENT_H
#include "header.h"
class MySqlAgent
{
public:
public:
MySqlAgent(
char* pStrHost_,
int nHostLen_,
char* pStrUser_,
int nUserLen_,
char* pStrPassword_,
int nPasswordLen_);
void Connect();
~MySqlAgent();
// no use parameter bind to set sql and get result
// for no parameter bind and no returning result sql
bool Query(
char* pContent_);
bool StoreResult(MYSQL_RES** ppRes_);
void FieldSeek(MYSQL_RES* pRes_, int nIndex_);
int NumFields(MYSQL_RES* pRes_);
MYSQL_FIELD* FetchField(MYSQL_RES* pRes_);
MYSQL_ROW FetchRow(MYSQL_RES* pRes_);
int NumRows(MYSQL_RES* pRes_);
void FreeResult(MYSQL_RES* pRes_);
// use parameter bind to set parameter and get result
// 1.prepare
// 2.bind parameter
// 3.execute
// 4.bind_result
// 5.store result
// 6.fetch
// 7.free result
bool Prepare(
char* pContent_,
int nLength_);
bool BindParameter(MYSQL_BIND* pBinds_);
bool Execute();
bool BindResult(MYSQL_BIND* pBinds_);
bool StoreResult();
int Fetch();
void FreeResult();
private:
char m_strHost[100];
char m_strUser[100];
char m_strPassword[100];
MYSQL *m_pConnection;
MYSQL_STMT *m_pStmt;
};
#endif // MYSQLAGENT_H
|
#!/bin/bash
#
# Copyright 2017 Istio Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Script to configure and start the Istio sidecar.
set -e
# Load optional config variables
ISTIO_SIDECAR_CONFIG=${ISTIO_SIDECAR_CONFIG:-/var/lib/istio/envoy/sidecar.env}
if [[ -r ${ISTIO_SIDECAR_CONFIG} ]]; then
# shellcheck disable=SC1090
. "$ISTIO_SIDECAR_CONFIG"
fi
# Set defaults
ISTIO_BIN_BASE=${ISTIO_BIN_BASE:-/usr/local/bin}
ISTIO_LOG_DIR=${ISTIO_LOG_DIR:-/var/log/istio}
ISTIO_CFG=${ISTIO_CFG:-/var/lib/istio}
# Default kube config for istio components
# TODO: use different configs, with different service accounts, for ca/pilot/mixer
KUBECONFIG=${ISTIO_CFG}/kube.config
# TODO: use separate user for ca
if [ "$(id -u)" = "0" ] ; then
exec su -s /bin/bash -c "${ISTIO_BIN_BASE}/istio_ca --self-signed-ca --kube-config ${KUBECONFIG} 2> ${ISTIO_LOG_DIR}/istio_ca.err.log > ${ISTIO_LOG_DIR}/istio_ca.log" istio-proxy
else
"${ISTIO_BIN_BASE}/istio_ca" --self-signed-ca --kube-config "${KUBECONFIG}"
fi
|
def do_GET(self):
"""
Call on a GET request and parses the URL parameters of the request.
It then calls the GET() method.
"""
parsed_url = urlparse(self.path)
query_params = parse_qs(parsed_url.query)
# Assuming GET() method is implemented elsewhere
# Call the GET method with the parsed parameters
result = GET(query_params)
# Assuming self.respond() method is implemented elsewhere
# Respond with the result and content type
self.respond(result.encode(), "text/html")
|
#!/bin/sh
setup_git() {
git config --global user.email "45767933+joshswimlane@users.noreply.github.com"
git config --global user.name "joshswimlane"
}
commit_website_files() {
git add generated_attck_data.json
git commit --message "Travis build: $TRAVIS_BUILD_NUMBER"
}
upload_files() {
git push origin master --quiet --force
}
setup_git
commit_website_files
upload_files
|
def check_duplicates(A):
seen = set()
for elem in A:
if elem in seen:
return True
seen.add(elem)
return False
|
import React from 'react';
import { Grid } from '@material-ui/core';
import stock1 from '../../../assets/images/stock-photos/stock-6.jpg';
import stock2 from '../../../assets/images/stock-photos/stock-7.jpg';
export default function LivePreviewExample() {
return (
<>
<div className="mb-spacing-6">
<Grid container spacing={0}>
<Grid item xl={6} className="overflow-hidden d-flex">
<a
href="#/"
onClick={(e) => e.preventDefault()}
className="d-block w-100 hover-scale-rounded">
<img src={stock1} className="img-fit-container" alt="..." />
</a>
</Grid>
<Grid item xl={6} className="bg-white d-flex align-items-center">
<div className="p-4 p-lg-5">
<h1 className="display-4 font-weight-bold mb-4">
Bamburgh React Admin Dashboard with Material-UI PRO
</h1>
<p className="font-size-xl text-black-50">
Premium admin template powered by the most popular UI components
framework available for React: Material-UI. Features hundreds of
examples making web development fast and easy. Start from one of
the individual apps included or from the general dashboard and
build beautiful scalable applications and presentation websites.
</p>
<div className="d-flex text-black-50 font-size-lg align-items-center flex-wrap mt-4">
<div className="pr-5">
<div className="display-4 font-weight-bold text-success pb-2">
+300
</div>
<div>components</div>
</div>
<div className="pr-5">
<div className="display-4 font-weight-bold text-success pb-2">
+10
</div>
<div>applications</div>
</div>
<div className="pr-5">
<div className="display-4 font-weight-bold text-success pb-2">
+90
</div>
<div>widgets</div>
</div>
</div>
</div>
</Grid>
<Grid
item
xl={6}
className="bg-second d-flex align-items-center text-white">
<div className="p-4 text-center p-lg-5">
<h1 className="display-4 font-weight-bold mb-4">
Bamburgh React Admin Dashboard with Material-UI PRO
</h1>
<p className="font-size-xl text-white-50">
Premium admin template powered by the most popular UI components
framework available for React: Material-UI. Features hundreds of
examples making web development fast and easy. Start from one of
the individual apps included or from the general dashboard and
build beautiful scalable applications and presentation websites.
</p>
<div className="d-flex text-white-50 font-size-lg justify-content-center align-items-center flex-wrap mt-4">
<div className="px-3 text-left">
<div className="display-4 font-weight-bold text-warning pb-2">
+300
</div>
<div>components</div>
</div>
<div className="px-3 text-left">
<div className="display-4 font-weight-bold text-warning pb-2">
+10
</div>
<div>applications</div>
</div>
<div className="px-3 text-left">
<div className="display-4 font-weight-bold text-warning pb-2">
+90
</div>
<div>widgets</div>
</div>
</div>
</div>
</Grid>
<Grid item xl={6} className="overflow-hidden d-flex">
<a
href="#/"
onClick={(e) => e.preventDefault()}
className="d-block w-100 hover-scale-rounded">
<img src={stock2} className="img-fit-container" alt="..." />
</a>
</Grid>
</Grid>
</div>
</>
);
}
|
<reponame>ttiurani/extendedmind
/* Copyright 2013-2016 Extended Mind Technologies Oy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
function TagEditorController($q, $rootScope, $scope, TagsService) {
// INITIALIZING
if (angular.isFunction($scope.registerFeatureEditorAboutToCloseCallback))
$scope.registerFeatureEditorAboutToCloseCallback(tagEditorAboutToClose, 'TagEditorController');
var parentPickerOpen;
// VISIBILITY
$scope.showTagEditorComponent = function(componentName) {
switch (componentName) {
case 'collapsible':
return $scope.collapsibleOpen && !$scope.isPropertyInDedicatedEdit();
case 'lessMore':
return $scope.tag.created && !$scope.isPropertyInDedicatedEdit();
case 'parentPicker':
return parentPickerOpen;
}
};
$scope.showTagProperty = function(propertyName) {
switch (propertyName) {
case 'parent':
return !$scope.tagIsParent($scope.tag) && !$scope.isPropertyInDedicatedEdit();
}
};
$scope.showTagAction = function(actionName){
switch (actionName){
case 'favoriteContext':
return $scope.tag.trans.tagType === 'context' &&
$scope.showEditorProperty('title') &&
$scope.isPersonalData();
}
};
$scope.collapsibleOpen = false;
$scope.toggleCollapsible = function() {
$scope.collapsibleOpen = !$scope.collapsibleOpen;
};
// PARENT PICKER
$scope.openParentPicker = function() {
parentPickerOpen = true;
};
$scope.closeParentPicker = function() {
parentPickerOpen = false;
};
$scope.closeParentPickerAndSetParentToTag = function(tag, parentTag) {
function doCloseAndSave() {
$scope.closeParentPicker();
tag.trans.parent = parentTag;
}
if (!parentTag.trans.uuid) {
// Parent tag is new, save it first. Close parent picker on error saving new parent.
$scope.saveTag(parentTag).then(doCloseAndSave, $scope.closeParentPicker);
}
else {
doCloseAndSave();
}
};
$scope.closeParentPickerAndClearParentFromTag = function(tag, parentTag) {
$scope.closeParentPicker();
if (tag.trans.parent === parentTag){
tag.trans.parent = undefined;
}
};
$scope.tagIsParent = function(tag) {
var tags = $scope.getTagsArray('all');
for (var i = 0; i < tags.length; i++) {
if (tags[i].trans.parent && tags[i].trans.parent.trans.uuid === tag.trans.uuid) {
return true;
}
}
return false;
};
$scope.getParentTagArray = function() {
if ($scope.tag.trans.tagType === 'context'){
return $scope.getTagsArray('contextsParentless', {owner: $scope.tag.trans.owner});
}else if ($scope.tag.trans.tagType === 'keyword'){
return $scope.getTagsArray('keywordsParentless', {owner: $scope.tag.trans.owner});
}
};
$scope.getNewParentTag = function() {
if ($scope.tag.trans.tagType === 'context'){
return TagsService.getNewTag({tagType: 'context'}, $scope.tag.trans.owner);
}else if ($scope.tag.trans.tagType === 'keyword'){
return TagsService.getNewTag({tagType: 'keyword'}, $scope.tag.trans.owner);
}
};
$scope.getTagParentText = function(tag) {
if (!tag.trans.parent || tag.trans.parent.trans.deleted){
return 'select parent ' + tag.trans.tagType + '\u2026';
}else if (tag.trans.tagType === 'keyword'){
return '#' + tag.trans.parent.trans.title;
}else if (tag.trans.tagType === 'context'){
return '@' + tag.trans.parent.trans.title;
}
};
function isSubEditorOpenInTagEditor(){
return parentPickerOpen ;
}
$scope.registerIsSubEditorOpenCondition(isSubEditorOpenInTagEditor);
// SAVING, DELETING
function saveTagInEdit() {
$scope.deferEdit().then(function() {
if ($scope.tag.trans.tagType === 'context'){
$scope.saveContext($scope.tag);
}else if ($scope.tag.trans.tagType === 'keyword'){
$scope.saveKeyword($scope.tag);
}
});
}
var deleting = false;
$scope.deleteTagInEdit = function() {
deleting = true;
var deferredDelete;
if ($scope.tag.trans.tagType === 'context'){
deferredDelete = $scope.deleteContext($scope.tag);
}else if ($scope.tag.trans.tagType === 'keyword'){
deferredDelete = $scope.deleteKeyword($scope.tag);
}
deferredDelete.then(function(){
$scope.closeEditor();
});
};
$scope.undeleteTagInEdit = function() {
if ($scope.tag.trans.tagType === 'context'){
$scope.undeleteContext($scope.tag);
}else if ($scope.tag.trans.tagType === 'keyword'){
$scope.undeleteKeyword($scope.tag);
}
};
$scope.endTagEdit = function() {
$scope.closeEditor();
};
function tagEditorAboutToClose() {
if (angular.isFunction($scope.unregisterEditorAboutToCloseCallback))
$scope.unregisterEditorAboutToCloseCallback('TagEditorController');
if ($scope.isEdited($scope.tag) && !$scope.tag.trans.deleted){
saveTagInEdit();
} else if (deleting){
$scope.swipeToContextsAndReset();
deleting = false;
} else {
TagsService.resetTag($scope.tag);
}
}
// TITLE HANDLING
var gotoTitleCallback;
$scope.gotoTagTitle = function() {
if (typeof gotoTitleCallback === 'function') gotoTitleCallback();
};
$scope.registerGotoTagTitleCallback = function(callback) {
gotoTitleCallback = callback;
};
// TITLEBAR
$scope.tagTitlebarTextKeyDown = function (keydownEvent) {
$scope.handleBasicTitlebarKeydown(keydownEvent, $scope.item);
// Return
if (event.keyCode === 13){
if($scope.titlebarHasText()) {
// Enter in editor saves, no line breaks allowed
$scope.closeEditor();
saveTagInEdit();
}
event.preventDefault();
event.stopPropagation();
}
};
$scope.getPrefix = function(tag) {
if (tag.trans.title && tag.trans.title.length) {
if (tag.trans.tagType === 'context')
return '\u0040'; // @ (commercial at)
else if (tag.trans.tagType === 'keyword')
return '\u0023'; // # (number sign)
}
};
$scope.getTagPropertyNameInEdit = function() {
var propertyName = $scope.getPropertyNameInEdit();
if (!propertyName) {
if (parentPickerOpen) {
propertyName = 'parent';
}
}
return propertyName;
};
// FAVORITING
$scope.clickFavorite = function() {
if (!$scope.isFavoriteContext($scope.tag)){
$scope.favoriteContext($scope.tag);
}else{
$scope.unfavoriteContext($scope.tag);
}
};
// WATCH FOR CHANGES
function setTagWatch(){
return $scope.$watch(function() {
// Autosave on every tick. Function is debounced so it can be called every digest
if (!$scope.isAutoSavingPrevented()) $scope.autoSave($scope.tag);
});
}
var clearTagWatch = setTagWatch();
// REINITIALIZING
function reinitializeNoteEditor(){
clearTagWatch();
clearTagWatch = setTagWatch();
$scope.resetSaveStatus();
}
$scope.registerReinitializeEditorCallback(reinitializeNoteEditor);
// CLEAN UP
$scope.$on('$destroy', function() {
clearTagWatch();
if (angular.isFunction($scope.unregisterReinitializeEditorCallback))
$scope.unregisterReinitializeEditorCallback();
if (angular.isFunction($scope.unregisterEditorAboutToCloseCallback)){
$scope.unregisterEditorAboutToCloseCallback();
}
});
}
TagEditorController['$inject'] = ['$q', '$rootScope', '$scope', 'TagsService'];
angular.module('em.main').controller('TagEditorController', TagEditorController);
|
def permutations(arr):
result = []
if len(arr) == 1:
result = [arr]
else:
for item in arr:
sub_list = list(arr)
sub_list.remove(item)
for per in permutations(sub_list):
result.append([item] + per)
return result
print(permutations([1, 2, 3]))
|
using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
public class EntityNameExtractor
{
public List<string> ExtractEntityNames(string codeSnippet)
{
List<string> entityNames = new List<string>();
// Define the pattern to match the modelBuilder.Entity method calls
string pattern = @"modelBuilder\.Entity\(""(.*?)""";
// Use regular expression to find matches
MatchCollection matches = Regex.Matches(codeSnippet, pattern);
// Extract entity names from the matches and store them in the list
foreach (Match match in matches)
{
string entityName = match.Groups[1].Value;
entityNames.Add(entityName);
}
return entityNames;
}
}
|
<filename>voltcraft/__init__.py
"""voltcraft python module"""
try:
from voltcraft._version import version as __version__
except ImportError:
__version__ = "not-installed"
__author__ = "<NAME>"
|
<filename>src/test/java/io/bdrc/xmltoldmigration/MigrationTest.java
package io.bdrc.xmltoldmigration;
import static io.bdrc.libraries.LangStrings.EWTS_TAG;
import static io.bdrc.xmltoldmigration.MigrationHelpers.OUTPUT_STTL;
import static io.bdrc.xmltoldmigration.MigrationHelpers.OUTPUT_TRIG;
import static io.bdrc.libraries.Models.BDA;
import static io.bdrc.libraries.Models.BDR;
import static io.bdrc.libraries.Models.setPrefixes;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.validation.Validator;
import javax.xml.xpath.XPathExpressionException;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonParseException;
import com.github.jsonldjava.core.JsonLdError;
import io.bdrc.ewtsconverter.EwtsConverter;
import io.bdrc.xmltoldmigration.helpers.ExceptionHelper;
import io.bdrc.xmltoldmigration.helpers.ImageListTranslation;
import io.bdrc.xmltoldmigration.helpers.SymetricNormalization;
import io.bdrc.xmltoldmigration.xml2files.CommonMigration;
import io.bdrc.xmltoldmigration.xml2files.EtextBodyMigration;
import io.bdrc.xmltoldmigration.xml2files.EtextMigration;
import io.bdrc.xmltoldmigration.xml2files.EtextMigration.EtextInfos;
import io.bdrc.xmltoldmigration.xml2files.ImagegroupMigration;
import io.bdrc.xmltoldmigration.xml2files.OutlineMigration;
import io.bdrc.xmltoldmigration.xml2files.PersonMigration;
import io.bdrc.xmltoldmigration.xml2files.ProductMigration;
import io.bdrc.xmltoldmigration.xml2files.PubinfoMigration;
import io.bdrc.xmltoldmigration.xml2files.WorkMigration;
import io.bdrc.xmltoldmigration.xml2files.WorkMigration.WorkModelInfo;
/**
* Unit test for simple App.
*/
public class MigrationTest
{
final static String TESTDIR = "src/test/";
public static final EwtsConverter converter = new EwtsConverter();
@BeforeClass
public static void init() throws NoSuchAlgorithmException {
SymetricNormalization.normalizeOneDirection(true, false);
WorkMigration.splitItems = false;
EtextMigration.testMode = true;
}
@AfterClass
public static void close() {
CommonMigration.speller.close();
ExceptionHelper.closeAll();
System.out.println("finishing with the following triples to be added:");
System.out.println(SymetricNormalization.triplesToAdd.toString());
System.out.println("and the following person events to be added in places:");
System.out.println(PersonMigration.placeEvents.toString());
}
public void flushLog() {
try {
MigrationHelpers.logWriter.flush();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void showDifference (Model src, Model dst) {
Model plus = dst.difference(src);
Model minus = src.difference(dst);
System.out.println("plus:");
System.out.println(plus.toString());
System.out.println("minus:");
System.out.println(minus.toString());
}
public String toUnicode(String s, List<String>conversionWarnings) {
String convertedValue = converter.toUnicode(s, conversionWarnings, true);
System.out.println("converting \""+s+"\" into "+convertedValue);
if (conversionWarnings.size() > 0) {
System.out.println("with warnings: "+String.join(", ", conversionWarnings));
}
return convertedValue;
}
public void assertAbstractTitle(final String title, final String expected, final Model m) {
final Literal origL = m.createLiteral(title, "bo-x-ewts");
final Literal translformedL = CommonMigration.abstractTitle(origL, m);
System.out.println(translformedL.getString());
assert(expected.equals(translformedL.getString()));
}
@Test
public void testAbstractTitle() {
Model m = ModelFactory.createDefaultModel();
assertAbstractTitle("test bzhugs so", "test/", m);
assertAbstractTitle("test bzhugs so/", "test/", m);
assertAbstractTitle("test bzhugs so/", "test/", m);
assertAbstractTitle("(ya) yang bzlog 'phyong /", "yang bzlog 'phyong /", m);
assertAbstractTitle("(ya)_gsang ba sbas ston las/_sman gyi gzhung shes yig chung /(phyi/_kha/_85)", "gsang ba sbas ston las/_sman gyi gzhung shes yig chung /", m);
assertAbstractTitle("(ya)bla ma'i rnal 'byor zab mo nyams len gnad kyi snying po/", "bla ma'i rnal 'byor zab mo nyams len gnad kyi snying po/", m);
assertAbstractTitle("4)_nyams ldan sogs kha shas kyi rtsom dpe/", "nyams ldan sogs kha shas kyi rtsom dpe/", m);
assertAbstractTitle("ka1)_bshad bya gzhung gi rang bzhin ngos bzung ba/", "bshad bya gzhung gi rang bzhin ngos bzung ba/", m);
}
@Test
public void testGetLiteral() {
try {
System.out.println("testing getLiteral");
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
Document doc = builder.newDocument();
Element elem = doc.createElementNS("http://www.tbrc.org/models/work#", "w:title");
elem.setTextContent("maṅgalatthadīpanī aṭṭhakathāmaṅgalasūtra");
elem.setAttribute("lang", "pāli");
elem.setAttribute("encoding", "kmfemc");
Model model = ModelFactory.createDefaultModel();
String dflt = EWTS_TAG;
String propHint = "title";
String RID = "W1FEMC010006";
Literal lit = CommonMigration.getLiteral(elem, dflt, model, propHint, RID, RID);
assertTrue("pi-x-kmfemc".equals(lit.getLanguage()));
} catch (Exception ex) {
ex.printStackTrace();
}
}
@Test
public void testW1FEMC010006() throws IOException {
System.out.println("testing W1FEMC010006");
Document doc = MigrationHelpers.documentFromFileName(TESTDIR+"xml/W1FEMC010006.xml");
Model fromXml = mergeModelInfoList(WorkMigration.MigrateWork(doc));
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/W1FEMC010006.ttl");
//
// // ==== TEMP DEBUG ====
// fromXml.write(new FileWriter("/Users/chris/BUDA/NEW_MIGRATION_TESTING/testW1FEMC010006-fromXml.ttl"), "TTL");
// correctModel.write(new FileWriter("/Users/chris/BUDA/NEW_MIGRATION_TESTING/testW1FEMC010006-correctModel.ttl"), "TTL");
//
//fromXml.write(System.out, "TTL");
assertTrue( fromXml.isIsomorphicWith(correctModel) );
}
@Test
public void testW1FEMC020013() throws IOException {
System.out.println("testing W1FEMC020013");
Document doc = MigrationHelpers.documentFromFileName(TESTDIR+"xml/W1FEMC020013.xml");
Model fromXml = mergeModelInfoList(WorkMigration.MigrateWork(doc));
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/W1FEMC020013.ttl");
//
// // ==== TEMP DEBUG ====
// fromXml.write(new FileWriter("/Users/chris/BUDA/NEW_MIGRATION_TESTING/testW1FEMC020013-fromXml.ttl"), "TTL");
// correctModel.write(new FileWriter("/Users/chris/BUDA/NEW_MIGRATION_TESTING/testW1FEMC020013-correctModel.ttl"), "TTL");
//
//fromXml.write(System.out, "TTL");
assertTrue( fromXml.isIsomorphicWith(correctModel) );
}
@Test
public void testUrlNormalization() {
assertTrue(CommonMigration.normalizeToLUrl("http://treasuryoflives.org/biographies/abc").equals("https://www.treasuryoflives.org/biographies/abc"));
assertTrue(CommonMigration.normalizeToLUrl("http://beta.treasuryoflives.org/biographies/abc").equals("https://www.treasuryoflives.org/biographies/abc"));
assertTrue(CommonMigration.getRIDFromTbrcUrl("http://tbrc.org/#library_work_Object-W00EGS1016761").equals("W00EGS1016761"));
assertTrue(CommonMigration.getRIDFromTbrcUrl("http://tbrc.org/link?RID=O2DB102429|O2DB1024292DB102470$W21634").equals("O2DB1024292DB102470"));
assertTrue(CommonMigration.getRIDFromTbrcUrl("http://www.tbrc.org/link/?RID=O1KG4084|O1KG40841KG4095$W1KG3381#library_work_Object-O1KG4084|O1KG40841KG4095$W1KG3381").equals("O1KG40841KG4095"));
assertTrue(CommonMigration.getRIDFromTbrcUrl("http://mercury.tbrc.org/link?RID=O3LS12537|O3LS125373LS13489$W8039").equals("O3LS125373LS13489"));
assertTrue(CommonMigration.getRIDFromTbrcUrl("http://tbrc.org/?locale=bo#library_work_Object-W1PD107999").equals("W1PD107999"));
assertTrue(CommonMigration.getRIDFromTbrcUrl("http://tbrc.org/link/?RID=T1CZ28#library_topic_Object-T1CZ28").equals("T1CZ28"));
}
@Test
public void testNormalize() {
assertTrue(CommonMigration.normalizeString("").equals(""));
String allWhiteSpaces = " 1 \u0009 2 \n 3 \u000C 4 \r 5 \u0020 6 \u0085 7 \u00A0 8 \u1680 9 \u180E 10 \u2000 11 \u2001 12 \u2002 13 \u2003 14 \u2004 15 \u2005 16 \u2006 17 \u2007 18 \u2008 19 \u2009 20 \u200A 21 \u2028 22 \u2029 23 \u202F 24 \u205F 25 \u3000 26 \0 27 ";
assertTrue(CommonMigration.normalizeString(allWhiteSpaces).equals("1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27"));
assertTrue(EtextBodyMigration.normalizeString("ས་ཏ ེ་ དབུ་རུ་ ང་བ་ཡ ི ས། ཁམས་ག", "", "", true, "").equals("ས་ཏེ་དབུ་རུ་ ང་བ་ཡིས། ཁམས་ག"));
}
@Test
public void testP1331() throws IOException
{
System.out.println("testing P1331");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/P1331.xml");
Validator validator = MigrationHelpers.getValidatorFor("person");
assertTrue(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "person");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/P1331.ttl");
//fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testP1583() throws IOException
{
System.out.println("testing P1583");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/P1583.xml");
Validator validator = MigrationHelpers.getValidatorFor("person");
assertTrue(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "person");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/P1583.ttl");
//fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testG488() throws JsonGenerationException, JsonLdError, IOException
{
System.out.println("testing G488");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/G488.xml");
Validator validator = MigrationHelpers.getValidatorFor("place");
assertFalse(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "place");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/G488.ttl");
//fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testG844() throws JsonGenerationException, JsonLdError, IOException
{
System.out.println("testing G844");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/G844.xml");
Validator validator = MigrationHelpers.getValidatorFor("place");
assertFalse(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "place");
//fromXml.write(System.out, "TTL");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/G844.ttl");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testSubscriber() throws IOException
{
System.out.println("testing product (subscriber)");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/PR99NCUL01.xml");
Validator validator = MigrationHelpers.getValidatorFor("product");
assertTrue(CommonMigration.documentValidates(d, validator));
String prType = ProductMigration.getType(d);
assertTrue(prType.contentEquals("subscriber"));
Model fromXml = ProductMigration.MigrateSubscriber(d);
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/PR99NCUL01.ttl");
//fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testCollection() throws IOException
{
System.out.println("testing product (subscriber)");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/PR1CTC17.xml");
Validator validator = MigrationHelpers.getValidatorFor("product");
assertTrue(CommonMigration.documentValidates(d, validator));
String prType = ProductMigration.getType(d);
assertTrue(prType.contentEquals("collection"));
Model fromXml = ProductMigration.MigrateCollection(d);
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/PR1CTC17.ttl");
//fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testCorporation() throws IOException
{
System.out.println("testing corporation");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/CorporationTest.xml");
Validator validator = MigrationHelpers.getValidatorFor("corporation");
assertTrue(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "corporation");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/CorporationTest.ttl");
//fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testWork() throws JsonLdError, JsonParseException, IOException
{
System.out.println("testing work");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/WorkTestFPL.xml");
Validator validator = MigrationHelpers.getValidatorFor("work");
assertFalse(CommonMigration.documentValidates(d, validator));
Model fromXml = mergeModelInfoList(WorkMigration.MigrateWork(d));
//fromXml.write(System.out, "TTL");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/WorkTestFPL.ttl");
//MigrationHelpers.modelToOutputStream(fromXml, System.out, "work", MigrationHelpers.OUTPUT_STTL, "");
//showDifference(fromXml, correctModel);
fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
public static Model mergeModelList(List<Model> list) {
Model res = ModelFactory.createDefaultModel();
setPrefixes(res);
for (Model m : list) {
if (m != null)
res.add(m);
}
return res;
}
public static Model mergeResources(List<Resource> list) {
Model res = ModelFactory.createDefaultModel();
setPrefixes(res);
for (Resource r : list) {
if (r != null)
res.add(r.getModel());
}
return res;
}
public static Model mergeModelInfoList(List<WorkModelInfo> list) {
Model res = ModelFactory.createDefaultModel();
setPrefixes(res);
for (WorkModelInfo mi : list) {
if (mi != null && mi.m != null)
res.add(mi.m);
}
return res;
}
@Test
public void testOutline() throws JsonParseException, IOException, JsonLdError
{
System.out.println("testing outline");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/OutlineTest.xml");
Validator validator = MigrationHelpers.getValidatorFor("outline");
assertTrue(CommonMigration.documentValidates(d, validator));
Model fromXml = mergeModelInfoList(OutlineMigration.MigrateOutline(d));
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/OutlineTest.ttl");
//showDifference(fromXml, correctModel);
//fromXml.write(System.out, "TURTLE");
//
// // ==== TEMP DEBUG ====
// fromXml.write(new FileWriter("/Users/chris/BUDA/NEW_MIGRATION_TESTING/testOutline-fromXml.ttl"), "TTL");
// correctModel.write(new FileWriter("/Users/chris/BUDA/NEW_MIGRATION_TESTING/testOutline-correctModel.ttl"), "TTL");
//
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testPubinfo() throws JsonLdError, JsonParseException, IOException
{
System.out.println("testing pubinfo");
WorkMigration.splitItems = false;
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/PubinfoTest.xml");
//assertTrue(CommonMigration.documentValidates(d, pubinfoValidator));
Model fromXml = mergeModelList(PubinfoMigration.MigratePubinfo(d));
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/PubinfoTest.ttl");
fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testOffice() throws IOException
{
System.out.println("testing office");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/OfficeTest.xml");
Validator validator = MigrationHelpers.getValidatorFor("office");
assertTrue(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "office");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/OfficeTest.ttl");
//fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testTopic() throws IOException
{
System.out.println("testing topic");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/TopicTest.xml");
Validator validator = MigrationHelpers.getValidatorFor("topic");
assertTrue(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "topic");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/TopicTest.ttl");
//fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testTaxonomy() throws IOException
{
System.out.println("testing taxonomy");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/TaxonomyTest.xml");
Validator validator = MigrationHelpers.getValidatorFor("outline");
assertTrue(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "taxonomy");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/TaxonomyTest.ttl");
//fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
//@Test
// we don't migrate scanrequests anymore
public void testScanrequest() throws IOException
{
System.out.println("testing scanrequest");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/ScanrequestTest.xml");
Validator validator = MigrationHelpers.getValidatorFor("scanrequest");
assertTrue(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "scanrequest");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/ScanrequestTest.ttl");
//MigrationHelpers.modelToOutputStream(fromXml, System.out, "item", MigrationHelpers.OUTPUT_STTL);
//assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testWithdrawn() throws IOException
{
System.out.println("testing withdrawn record");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/withdrawn-test.xml");
Model fromXml = MigrationHelpers.migrateWithdrawn(d, "office");
//fromXml.write(System.out, "TTL");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/withdrawn-test.ttl");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testImagegroup() throws IOException
{
System.out.println("testing imagegroup");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/ImagegroupTest.xml");
Validator validator = MigrationHelpers.getValidatorFor("imagegroup");
assertTrue(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "imagegroup");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/ImagegroupTest.ttl");
//fromXml.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testL8LS14115() throws IOException
{
System.out.println("testing lineage");
Document d = MigrationHelpers.documentFromFileName(TESTDIR+"xml/L8LS14115.xml");
Validator validator = MigrationHelpers.getValidatorFor("lineage");
assertTrue(CommonMigration.documentValidates(d, validator));
Model fromXml = MigrationHelpers.xmlToRdf(d, "lineage");
Model correctModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/L8LS14115.ttl");
//fromXml.write(System.out, "TTL");
//
// // ==== TEMP DEBUG ====
// fromXml.write(new FileWriter("/Users/chris/BUDA/NEW_MIGRATION_TESTING/testL8LS14115-fromXml.ttl"), "TTL");
// correctModel.write(new FileWriter("/Users/chris/BUDA/NEW_MIGRATION_TESTING/testL8LS14115-correctModel.ttl"), "TTL");
//
assertTrue( MigrationHelpers.isSimilarTo(fromXml, correctModel) );
flushLog();
}
@Test
public void testImageList() {
System.out.println("testing image list");
Map<String,Integer> imageNums = ImageListTranslation.getImageNums("49050001.tif:3", null);
Map<String,Integer> expected = new HashMap<>();
expected.put("49050001.tif", 1);
expected.put("49050002.tif", 2);
expected.put("49050003.tif", 3);
assertEquals(expected, imageNums);
imageNums = ImageListTranslation.getImageNums("49050025.tif:3", null);
expected = new HashMap<>();
expected.put("49050025.tif", 1);
expected.put("49050026.tif", 2);
expected.put("49050027.tif", 3);
assertEquals(expected, imageNums);
imageNums = ImageListTranslation.getImageNums("49050025.tif:2|49050028.tif:2", "1-24,27");
expected = new HashMap<>();
expected.put("49050025.tif", 25);
expected.put("49050026.tif", 26);
expected.put("49050028.tif", 28);
expected.put("49050029.tif", 29);
assertEquals(expected, imageNums);
}
@Test
public void testEtext() throws XPathExpressionException, IOException
{
System.out.println("testing etext");
ByteArrayOutputStream out = new ByteArrayOutputStream();
Model itemModel = ModelFactory.createDefaultModel();
MigrationHelpers.setPrefixes(itemModel, "item");
EtextInfos ei = EtextMigration.migrateOneEtext(TESTDIR+"xml/EtextTest.xml", true, out, false, itemModel, true, BDA+"CP001", BDR+"PR0ET001");
String computedContent = new String( out.toByteArray(), StandardCharsets.UTF_8 );
assertTrue(ei.eInstanceId.equals("IE1CZ2485"));
assertTrue(ei.etextId.equals("UT1CZ2485_001_0000"));
//MigrationHelpers.modelToOutputStream(ei.etextModel, System.out, "etext", MigrationHelpers.OUTPUT_STTL, ei.etextId);
//MigrationHelpers.modelToOutputStream(itemModel, System.out, "item", MigrationHelpers.OUTPUT_STTL, ei.itemId);
//System.out.println(computedContent);
Model correctEtextModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/EtextTest-etext.ttl");
Model correctItemModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/EtextTest-item.ttl");
String correctContent = new String(Files.readAllBytes(Paths.get(TESTDIR+"ttl/EtextTest-content.txt")));
//ei.etextModel.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(ei.etextModel, correctEtextModel) );
//itemModel.write(System.out, "TTL");
assertTrue( MigrationHelpers.isSimilarTo(itemModel, correctItemModel) );
assertTrue(computedContent.equals(correctContent.trim()));
assertFalse(EtextBodyMigration.rtfP.matcher(" 9 ").find());
assertTrue(EtextBodyMigration.rtfP.matcher("1$0000270").find());
assertTrue(EtextBodyMigration.rtfP.matcher("PAGE -PAGE 2--PAGE 1-").find());
assertTrue(EtextBodyMigration.rtfP.matcher("PAGE \\* MERGEFORMAT 2").find());
// test with different options:
out = new ByteArrayOutputStream();
itemModel = ModelFactory.createDefaultModel();
MigrationHelpers.setPrefixes(itemModel, "item");
ei = EtextMigration.migrateOneEtext(TESTDIR+"xml/EtextTest.xml", false, out, false, itemModel, true, BDA+"CP001", BDR+"PR0ET001");
computedContent = new String( out.toByteArray(), StandardCharsets.UTF_8 );
//System.out.println(computedContent);
// this one is a bit bogus because it adds spaces in line milestones, but in real life data there is no lines when we must
// no keep the pagination
correctContent = new String(Files.readAllBytes(Paths.get(TESTDIR+"ttl/EtextTest-content-noPages.txt")));
assertTrue(computedContent.equals(correctContent.trim()));
flushLog();
}
@Test
public void testEtextReadItem() throws XPathExpressionException, IOException
{
Model itemModel = MigrationHelpers.modelFromFileName(TESTDIR+"ttl/ImagegroupTest.ttl");
int foundVol = EtextMigration.getVolumeNumber("4158", itemModel, null);
assertTrue(foundVol == 1);
}
@Test
public void testQcDate() throws XPathExpressionException, IOException
{
Model m = ModelFactory.createDefaultModel();
System.out.println(ImagegroupMigration.qcdateToXsdDate("2003", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("06-17-2004", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("06-17-04", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("17-06-04", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("05/14/02", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("06.03", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("10", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("8/17/05", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("7.2003", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("3/13/2002", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("04-03", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("-7-6-4", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("6-15-005", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("122902", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("2-6-4", m));
System.out.println(ImagegroupMigration.qcdateToXsdDate("7--6-04", m));
System.out.println(CommonMigration.oldstyleRIDsP.matcher("W22084").matches());
System.out.println(CommonMigration.oldstyleRIDsP.matcher("W3CN1317").matches());
System.out.println(CommonMigration.oldstyleRIDsP.matcher("R8LS12811").matches());
}
@Test
public void testIsEmptyPubValue() throws XPathExpressionException, IOException
{
assertTrue(PubinfoMigration.isEmptyPubValue("s.l.]"));
assertTrue(PubinfoMigration.isEmptyPubValue("s.l."));
assertTrue(PubinfoMigration.isEmptyPubValue( "[n.d.]"));
assertTrue(PubinfoMigration.isEmptyPubValue( "[ s. n. ]"));
assertTrue(PubinfoMigration.isEmptyPubValue( "[s. n]"));
assertFalse(PubinfoMigration.isEmptyPubValue("s.w.tashigangpa/"));
}
}
|
<reponame>1aurabrown/ervell
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import styled from 'styled-components';
import StickyBreadcrumbPath from 'react/components/UI/StickyBreadcrumbPath';
import WithCurrentRoute from 'react/hocs/WithCurrentRoute';
import WithLoginStatus from 'react/hocs/WithLoginStatus';
const Options = styled.div`
`;
const Option = styled.a`
display: block;
&:last-child:not(first-child) {
color: ${x => x.theme.colors.gray.regular};
&:hover {
color: ${x => x.theme.colors.gray.semiBold};
}
}
`;
const FeedOptions = (
<Options>
<div>Feed</div>
<Option href="/explore">Explore</Option>
</Options>
);
const ExploreOptions = ({ isLoggedIn }) => (
<Options>
<div>Explore</div>
{isLoggedIn &&
<Option href="/feed">Feed</Option>
}
</Options>
);
ExploreOptions.propTypes = {
isLoggedIn: PropTypes.bool.isRequired,
};
class HomeBreadcrumb extends Component {
static propTypes = {
currentRoute: PropTypes.shape({
href: PropTypes.string.isRequired,
}).isRequired,
isLoggedIn: PropTypes.bool.isRequired,
}
render() {
const { currentRoute: { pathname }, isLoggedIn } = this.props;
const stuckChildren = (
<StickyBreadcrumbPath.Crumb>
{{
'/': 'Feed',
'/feed': 'Feed',
'/explore': 'Explore',
'/explore/channels': 'Explore',
'/explore/blocks': 'Explore',
'/notifications': 'Notifications',
}[pathname]}
</StickyBreadcrumbPath.Crumb>
);
return (
<StickyBreadcrumbPath stuckChildren={stuckChildren}>
<StickyBreadcrumbPath.Crumb>
{{
'/': FeedOptions,
'/feed': FeedOptions,
'/explore': ExploreOptions({ isLoggedIn }),
'/explore/channels': ExploreOptions({ isLoggedIn }),
'/explore/blocks': ExploreOptions({ isLoggedIn }),
'/notifications': 'Notifications',
}[pathname]}
</StickyBreadcrumbPath.Crumb>
</StickyBreadcrumbPath>
);
}
}
export default WithLoginStatus(WithCurrentRoute(HomeBreadcrumb));
|
<filename>moisturizer/utils.py
PRIMITIVES = [int, bool, float, str, dict, list, type(None)]
def flatten_dict(nested, separator='.'):
def items():
for key, value in nested.items():
if isinstance(value, dict):
for subkey, subvalue in flatten_dict(value).items():
yield "{}{}{}".format(key,
separator,
subkey), subvalue
else:
yield key, value
return dict(items())
def unflatten_dict(flatten, separator='.'):
unflatten = {}
for key, value in flatten.items():
parts = key.split(separator)
d = unflatten
for part in parts[:-1]:
if part not in d:
d[part] = dict()
d = d[part]
d[parts[-1]] = value
return unflatten
|
<filename>track_oracle/file_formats/track_mitre_xml/file_format_mitre_xml.h
/*ckwg +5
* Copyright 2012-2016 by Kitware, Inc. All Rights Reserved. Please refer to
* KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
* Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
*/
#ifndef INCL_FILE_FORMAT_MITRE_BOX_XML_H
#define INCL_FILE_FORMAT_MITRE_BOX_XML_H
#include <vital/vital_config.h>
#include <track_oracle/file_formats/track_mitre_xml/track_mitre_xml_export.h>
#include <track_oracle/file_formats/file_format_base.h>
#include <track_oracle/file_formats/track_mitre_xml/track_mitre_xml.h>
namespace kwiver {
namespace track_oracle {
class TRACK_MITRE_XML_EXPORT file_format_mitre_xml: public file_format_base
{
public:
file_format_mitre_xml(): file_format_base( TF_MITRE_BOX_XML, "MITRE VIRAT query tracks" )
{
this->globs.push_back( "*.xml" );
}
virtual ~file_format_mitre_xml() {}
virtual int supported_operations() const { return FF_READ_FILE; }
// return a dynamically-allocated instance of the schema
virtual track_base_impl* schema_instance() const { return new track_mitre_xml_type(); }
// Inspect the file and return true if it is of this format
virtual bool inspect_file( const std::string& fn ) const;
using file_format_base::read;
// read tracks from the file
virtual bool read( const std::string& fn,
track_handle_list_type& tracks ) const;
};
} // ...track_oracle
} // ...kwiver
#endif
|
package mim.auth.service.configuration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@Configuration
public class SecurityConfiguration {
@Bean
public PasswordEncoder passwordEncoder() {
return new BCryptPasswordEncoder();
}
}
|
#!/usr/bin/env bash
# .node bin:
if [ -d "$HOME/.node/bin" ]; then
export PATH="$HOME/.node/bin:$PATH"
fi
# .node node_modules:
if [ -d "$HOME/.node/lib/node_modules" ]; then
export NODE_PATH="$HOME/.node/lib/node_modules:$NODE_PATH"
fi
# If Homebrew has NOT installed npm, you should supplement
# your NODE_PATH with the npm module folder:
# /usr/local/lib/node_modules
if type "brew" &> /dev/null && [ "$(brew ls --versions node)" ]; then
export NODE_PATH="/usr/local/lib/node_modules:$NODE_PATH"
fi
|
# MIT License
# Copyright (c) 2020 Synergy Lab | Georgia Institute of Technology
# Author: William Won (william.won@gatech.edu)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# `DEFAULT_TEST_MODULE` defined in `DEFAULT_TEST_DIR/DEFAULT_TEST_MODULE.bsv` will be compiled and tested.
DEFAULT_TEST_DIR="testbench"
DEFAULT_TEST_MODULE="AdderTest"
|
<gh_stars>1-10
import React, {useEffect, useState} from "react";
import Workshop from "./Workshop";
import axios from "./AxiosInterceptor";
import {Button, Form, Table} from "react-bootstrap";
const WorkshopsList = props => {
const [workshops, setWorkshops] = useState([]);
const [workshopTitle, setWorkshopTitle] = useState("");
const [workshopDescription, setWorkshopDescription] = useState("");
const [workshopPerfUnity, setWorkshopPerfUnity] = useState("");
const [workshopIntensityUnity, setWorkshopIntensityUnity] = useState("");
const handleAddWorkshop = event => {
event.preventDefault();
axios.post(`api/workshops`,
{
title: workshopTitle,
description: workshopDescription,
perfUnity: workshopPerfUnity,
intensityUnity: workshopIntensityUnity
})
.then(response => {
console.log(response.data);
setWorkshopTitle("");
setWorkshopPerfUnity("");
setWorkshopIntensityUnity("");
setWorkshopDescription("");
setWorkshops([...workshops, {id: response.data.id, title: workshopTitle, description: workshopDescription, perfUnity: workshopPerfUnity, intensityUnity: workshopIntensityUnity}])
}, error => console.log(error));
}
useEffect(() => {
const fetchData = async () => {
await axios.get('api/workshops')
.then(response => {
setWorkshops(response.data)
}, error => console.log(error));
}
fetchData();
}, []);
return (
<div className="container">
<h1>Nos ateliers</h1>
<Table striped bordered hover>
<thead className="table-dark">
<tr>
<th scope="col">Image</th>
<th scope="col">Titre</th>
</tr>
</thead>
<tbody>
{workshops.map(currentWorkshop => <Workshop key={currentWorkshop.id} workshop={currentWorkshop}/>)}
</tbody>
</Table>
{props.user === undefined || props.user.role !== "ROLE_ADMIN" ? (
<></>
) :
(
<>
<Form>
<Form.Group controlId="formWorkshopTitle">
<Form.Label>Titre</Form.Label>
<Form.Control size="lg" type="text" value={workshopTitle} onChange={event => setWorkshopTitle(event.target.value)}/>
</Form.Group>
<Form.Group controlId="formWorkshopDescription">
<Form.Label>Description</Form.Label>
<Form.Control as="textarea" rows={10} cols={50} value={workshopDescription} onChange={event => setWorkshopDescription(event.target.value)}/>
</Form.Group>
<Form.Group controlId="formWorkshopPerfUnity">
<Form.Label>Unité de performance</Form.Label>
<Form.Control size="lg" type="text" value={workshopPerfUnity} onChange={event => setWorkshopPerfUnity(event.target.value)}/>
</Form.Group>
<Form.Group controlId="formWorkshopIntensityUnity">
<Form.Label>Unité d'intensité</Form.Label>
<Form.Control size="lg" type="text" value={workshopIntensityUnity} onChange={event => setWorkshopIntensityUnity(event.target.value)}/>
</Form.Group>
<br/>
<Button className="input-submit" onClick={handleAddWorkshop}>Valider</Button>
</Form>
</>
)
}
</div>
)
}
export default WorkshopsList;
|
def max_list(list):
max = list[0]
for i in list:
if i > max:
max = i
return max
list = [15, -9, 58]
print("Max number in list is : ", max_list(list))
|
<gh_stars>0
package eu.le_tian.iConsoleOS.data;
import androidx.room.Entity;
import androidx.room.PrimaryKey;
@Entity(tableName = "Exercise")
public class Exercise {
@PrimaryKey(autoGenerate = true)
private long exerciseID;
private long exStartDateTime;
private long exStopDateTime;
private String exProfileName;
private long userParentID;
public Exercise(long exerciseID, long exStartDateTime, long exStopDateTime, String exProfileName, long userParentID) {
this.exerciseID = exerciseID;
this.exStartDateTime = exStartDateTime;
this.exStopDateTime = exStopDateTime;
this.exProfileName = exProfileName;
this.userParentID = userParentID;
}
public long getExerciseID() {
return exerciseID;
}
public void setExerciseID(long exerciseID) {
this.exerciseID = exerciseID;
}
public long getExStartDateTime() {
return exStartDateTime;
}
public void setExStartDateTime(long exStartDateTime) {
this.exStartDateTime = exStartDateTime;
}
public long getExStopDateTime() {
return exStopDateTime;
}
public void setExStopDateTime(long exStopDateTime) {
this.exStopDateTime = exStopDateTime;
}
public String getExProfileName() {
return exProfileName;
}
public void setExProfileName(String exProfileName) {
this.exProfileName = exProfileName;
}
public long getUserParentID() {
return userParentID;
}
public void setUserParentID(long userParentID) {
this.userParentID = userParentID;
}
}
|
#include <stdio.h>
int main() {
char sentence[100];
// Input sentence
printf("Enter a sentence: ");
scanf("%[^\n]s", sentence);
// Iterate through the sentence to find vowels
for(int i = 0; i < strlen(sentence); i++) {
// If vowel is detected, print it
if(sentence[i] == 'a' || sentence[i] == 'e' || sentence[i] == 'i' || sentence[i] == 'o' || sentence[i] == 'u')
printf("%c", sentence[i]);
}
printf("\n");
return 0;
}
|
package com.cjy.flb.activity;
import android.app.Activity;
import android.os.Handler;
import android.os.Message;
import android.view.View;
import android.widget.Button;
import com.cjy.flb.R;
import com.cjy.flb.utils.MHttpUtils;
import com.cjy.flb.utils.SharedPreUtil;
import com.cjy.flb.utils.ToastUtil;
import java.lang.ref.WeakReference;
public class AddFlbActivity extends BaseActivity implements View.OnClickListener {
private Button btnAddFlb;
private int count = 60;
private Thread threadTime;
private final AddFlbHandler mHandler = new AddFlbHandler(this);
static class AddFlbHandler extends Handler {
WeakReference<Activity> weakReference;
public AddFlbHandler(Activity activity) {
weakReference = new WeakReference<>(activity);
}
@Override
public void handleMessage(Message msg) {
AddFlbActivity activity = (AddFlbActivity) weakReference.get();
if (activity != null) {
switch (msg.what) {
case 100:
ToastUtil.showShort(activity.getString(R.string.add_box_ok));
activity.threadTime.interrupt();
break;
case 200:
activity.countdownDisplay();
break;
case 400:
ToastUtil.showShort(activity.getString(R.string.add_box_error));
break;
case 500:
activity.setRepeat();//重设
break;
default:
break;
}
}
super.handleMessage(msg);
}
}
private void countdownDisplay() {
String mId = SharedPreUtil.getString("FlbID");
String mSn = SharedPreUtil.getString("FlbSN");
if (count % 5 == 0) {
MHttpUtils.addFromFlb(mId, mSn, context, mHandler);
}
btnAddFlb.setText(getString(R.string.add_box_connection) + count + "s");
count--;
}
private void setRepeat() {
try {
btnAddFlb.setText(getString(R.string.add_box_retry));
btnAddFlb.setEnabled(true);
count = 60;
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void initView() {
setContentView(R.layout.activity_add_flb);
initBar(getString(R.string.add_medicine_box), false, false);
}
@Override
public void initData() {
btnAddFlb = (Button) findViewById(R.id.btn_addFlb);
}
@Override
public void initListener() {
btnAddFlb.setOnClickListener(this);
}
@Override
public void onClick(View v) {
threadTime = new Thread() {
@Override
public void run() {
try {
while (!isInterrupted() && count > 0) {
mHandler.sendEmptyMessage(200);
sleep(1000);
}
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mHandler.sendEmptyMessage(500);
}
}
};
threadTime.setPriority(10);
threadTime.start();
btnAddFlb.setEnabled(false);
}
@Override
protected void onDestroy() {
super.onDestroy();
if (threadTime != null)
threadTime.interrupt();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.