text
stringlengths 1
1.05M
|
|---|
import discord
from discord.http import Route
async def post_command(client, command, guild_id: int = None):
if guild_id:
r = Route('POST', f'/applications/{client.application_id}/guilds/{guild_id}/commands')
else:
r = Route('POST', f'/applications/{client.application_id}/commands')
return await client.http.request(r, json=command.to_dict())
async def patch_existing_command(client, old, new):
if old.guild_specific:
r = Route('PATCH', f'/applications/{old.application_id}/guilds/{old.guild_id}/commands/{old.id}')
else:
r = Route('PATCH', f'/applications/{old.application_id}/commands/{old.id}')
return await client.http.request(r, json=new.to_dict())
async def fetch_any_command(client, command_id: int, guild_id: int = None):
if guild_id:
r = Route('GET', f'/applications/{client.application_id}/guilds/{guild_id}/commands/{command_id}')
else:
r = Route('GET', f'/applications/{client.application_id}/commands/{command_id}')
return await client.http.request(r)
async def fetch_global_commands(client):
r = Route('GET', f'/applications/{client.application_id}/commands')
return await client.http.request(r)
async def fetch_guild_commands(client, guild_id: int):
r = Route('GET', f'/applications/{client.application_id}/guilds/{guild_id}/commands')
return await client.http.request(r)
async def fetch_overwrites(client, command_id: int, guild_id: int):
r = Route('GET', f'/applications/{client.application_id}/guilds/{guild_id}/commands/{command_id}/permissions')
return await client.http.request(r)
async def put_overwrites(client, command_id: int, guild_id: int, overwrites: dict):
r = Route('PUT',
f'/applications/{client.application_id}/guilds/{guild_id}/commands/{command_id}/permissions')
return await client.http.request(r, json=overwrites)
async def delete_command(client, command_id: int, guild_id: int = None):
if guild_id:
r = Route('DELETE', f'/applications/{client.application_id}/guilds/{guild_id}/commands/{command_id}')
else:
r = Route('DELETE', f'/applications/{client.application_id}/commands/{command_id}')
return await client.http.request(r)
|
<gh_stars>1-10
var _;
_ = Uint16Array.length;
_ = Uint16Array.name;
_ = Uint16Array.prototype;
_ = Uint16Array.BYTES_PER_ELEMENT;
new Uint16Array();
|
def string_separator(str, separator):
# Initialise the output list
output = []
# Split the string on the set separator
words = str.split()
# Iterate through all words in the list
for word in words:
# Append the word along with a separator
output.append(word + separator)
# Return the output
return ''.join(output)
|
"""
Create a program to replace all instances of a given substring in a string.
"""
def replace_substring(string, substring, replacement):
#Split the string into a list
string_list = string.split()
#Replace the word if it exists
for index, word in enumerate(string_list):
if word == substring:
string_list[index] = replacement
return " ".join(string_list)
#Test the replace_substring()
print(replace_substring(string, substring, replacement))
|
<reponame>jcottobboni/inventorymaster
class AddlocationToProduct < ActiveRecord::Migration
def change
add_column :inventorymaster_products, :location_id, :integer
end
end
|
class TVShowDatabase:
def __init__(self, api_key):
self.api = TVShowDatabaseAPI(api_key)
def search_by_title(self, title):
try:
return self.api.search_by_title(title)
except Exception as e:
return f"Error occurred during title search: {e}"
def search_by_year(self, year):
try:
return self.api.search_by_year(year)
except Exception as e:
return f"Error occurred during year search: {e}"
def get_tvshow_info(self, title):
try:
return self.api.get_tvshow_info(title)
except Exception as e:
return f"Error occurred while retrieving TV show info: {e}"
class TVShowDatabaseAPI:
def __init__(self, api_key):
self.api_key = api_key
def search_by_title(self, title):
# Simulate querying TV show database by title
# Replace with actual API call
return ["TV Show 1", "TV Show 2", "TV Show 3"]
def search_by_year(self, year):
# Simulate querying TV show database by year
# Replace with actual API call
return ["TV Show 4", "TV Show 5"]
def get_tvshow_info(self, title):
# Simulate retrieving detailed information about a specific TV show
# Replace with actual API call
return {"title": title, "genre": "Action", "rating": 8.5}
|
import { SCREEN_BREAKPOINTS } from './constants';
export const isMobileScreen = () => screen.width < SCREEN_BREAKPOINTS.XS;
|
monetdbd create mydbfarm
monetdbd start mydbfarm
monetdb create voc
monetdb release voc
|
num1 = float(input("Enter your first number: "))
num2 = float(input("Enter your second number: "))
op = input("Enter the operator : ")
if op == "+":
result = num1 + num2
elif op == "-":
result = num1 - num2
elif op == "*":
result = num1 * num2
elif op == "/":
result = num1 / num2
print("The result is : ", result)
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from os import remove, close, mkdir
from os.path import exists, join
from tempfile import mkstemp, mkdtemp
from shutil import rmtree
from unittest import TestCase, main
from six import StringIO
from future import standard_library
from functools import partial
from operator import itemgetter
import pandas as pd
from qiita_core.util import qiita_test_checker
import qiita_db as qdb
with standard_library.hooks():
import configparser
@qiita_test_checker()
class TestMakeStudyFromCmd(TestCase):
def setUp(self):
qdb.study.StudyPerson.create(
'SomeDude', '<EMAIL>', 'some',
'111 fake street', '111-121-1313')
qdb.user.User.create('<EMAIL>', 'password')
self.config1 = CONFIG_1
self.config2 = CONFIG_2
def test_make_study_from_cmd(self):
fh = StringIO(self.config1)
qdb.commands.load_study_from_cmd('<EMAIL>', 'newstudy', fh)
sql = ("select study_id from qiita.study where email = %s and "
"study_title = %s")
study_id = self.conn_handler.execute_fetchone(sql, ('<EMAIL>',
'newstudy'))
self.assertTrue(study_id is not None)
fh2 = StringIO(self.config2)
with self.assertRaises(configparser.NoOptionError):
qdb.commands.load_study_from_cmd('<EMAIL>', 'newstudy2', fh2)
@qiita_test_checker()
class TestLoadArtifactFromCmd(TestCase):
def setUp(self):
self.artifact_count = qdb.util.get_count('qiita.artifact')
self.fp_count = qdb.util.get_count('qiita.filepath')
self.files_to_remove = []
def tearDown(self):
for fp in self.files_to_remove:
if exists(fp):
remove(fp)
def test_load_artifact_from_cmd_error(self):
with self.assertRaises(ValueError):
qdb.commands.load_artifact_from_cmd(
["fp1", "fp2"], ["preprocessed_fasta"], "Demultiplexed",
parents=[1], dflt_params_id=10,
required_params='{"input_data": 1}')
with self.assertRaises(ValueError):
qdb.commands.load_artifact_from_cmd(
["fp1"], ["preprocessed_fasta"], "Demultiplexed",
parents=[1, 2], dflt_params_id=10)
def test_load_artifact_from_cmd_root(self):
fd, forward_fp = mkstemp(suffix='_forward.fastq.gz')
close(fd)
self.files_to_remove.append(forward_fp)
fd, reverse_fp = mkstemp(suffix='_reverse.fastq.gz')
close(fd)
self.files_to_remove.append(reverse_fp)
fd, barcodes_fp = mkstemp(suffix='_barcodes.fastq.gz')
close(fd)
self.files_to_remove.append(barcodes_fp)
fps = [forward_fp, reverse_fp, barcodes_fp]
for fp in fps:
with open(fp, 'w') as f:
f.write('\n')
ftypes = ['raw_forward_seqs', 'raw_reverse_seqs', 'raw_barcodes']
metadata = pd.DataFrame.from_dict(
{'SKB8.640193': {'center_name': 'ANL',
'primer': 'GTGCCAGCMGCCGCGGTAA',
'barcode': 'GTCCGCAAGTTA',
'run_prefix': "s_G1_L001_sequences",
'platform': 'ILLUMINA',
'instrument_model': 'Illumina MiSeq',
'library_construction_protocol': 'AAAA',
'experiment_design_description': 'BBBB'}},
orient='index', dtype=str)
pt = qdb.metadata_template.prep_template.PrepTemplate.create(
metadata, qdb.study.Study(1), "16S")
obs = qdb.commands.load_artifact_from_cmd(
fps, ftypes, 'FASTQ', prep_template=pt.id)
self.files_to_remove.extend([fp for _, fp, _ in obs.filepaths])
self.assertEqual(obs.id, self.artifact_count + 1)
self.assertTrue(
qdb.util.check_count('qiita.filepath', self.fp_count + 5))
def test_load_artifact_from_cmd_processed(self):
fd, file1 = mkstemp()
close(fd)
self.files_to_remove.append(file1)
fd, file2 = mkstemp()
close(fd)
self.files_to_remove.append(file2)
fps = [file1, file2]
ftypes = ['preprocessed_fasta', 'preprocessed_fastq']
for fp in fps:
with open(fp, 'w') as f:
f.write("\n")
obs = qdb.commands.load_artifact_from_cmd(
fps, ftypes, 'Demultiplexed', parents=[1], dflt_params_id=1,
required_params='{"input_data": 1}',
optional_params='{"min_per_read_length_fraction": 0.80}')
self.files_to_remove.extend([fp for _, fp, _ in obs.filepaths])
self.assertEqual(obs.id, self.artifact_count + 1)
self.assertTrue(
qdb.util.check_count('qiita.filepath', self.fp_count + 2))
def test_load_artifact_from_cmd_biom(self):
fd, otu_table_fp = mkstemp(suffix='_otu_table.biom')
close(fd)
self.files_to_remove.append(otu_table_fp)
fps = [otu_table_fp]
ftypes = ['biom']
for fp in fps:
with open(fp, 'w') as f:
f.write("\n")
obs = qdb.commands.load_artifact_from_cmd(
fps, ftypes, 'BIOM', parents=[3], dflt_params_id=10,
required_params='{"input_data": 3}')
self.files_to_remove.extend([fp for _, fp, _ in obs.filepaths])
self.assertEqual(obs.id, self.artifact_count + 1)
self.assertTrue(
qdb.util.check_count('qiita.filepath', self.fp_count + 1))
@qiita_test_checker()
class TestLoadSampleTemplateFromCmd(TestCase):
def setUp(self):
# Create a sample template file
self.st_contents = SAMPLE_TEMPLATE
# create a new study to attach the sample template
info = {
"timeseries_type_id": 1,
"metadata_complete": True,
"mixs_compliant": True,
"number_samples_collected": 4,
"number_samples_promised": 4,
"study_alias": "TestStudy",
"study_description": "Description of a test study",
"study_abstract": "No abstract right now...",
"emp_person_id": qdb.study.StudyPerson(2),
"principal_investigator_id": qdb.study.StudyPerson(3),
"lab_person_id": qdb.study.StudyPerson(1)
}
self.study = qdb.study.Study.create(
qdb.user.User('<EMAIL>'), "Test study", info)
def test_load_sample_template_from_cmd(self):
"""Correctly adds a sample template to the DB"""
fh = StringIO(self.st_contents)
st = qdb.commands.load_sample_template_from_cmd(fh, self.study.id)
self.assertEqual(st.id, self.study.id)
@qiita_test_checker()
class TestLoadPrepTemplateFromCmd(TestCase):
def setUp(self):
self.pt_contents = PREP_TEMPLATE
def test_load_prep_template_from_cmd(self):
"""Correctly adds a prep template to the DB"""
fh = StringIO(self.pt_contents)
st = qdb.commands.load_prep_template_from_cmd(fh, 1, '18S')
self.assertEqual(st.id, 3)
@qiita_test_checker()
class TestLoadParametersFromCmd(TestCase):
def setUp(self):
fd, self.fp = mkstemp(suffix='_params.txt')
close(fd)
fd, self.fp_wrong = mkstemp(suffix='_params.txt')
close(fd)
with open(self.fp, 'w') as f:
f.write(PARAMETERS)
with open(self.fp_wrong, 'w') as f:
f.write(PARAMETERS_ERROR)
self.files_to_remove = [self.fp, self.fp_wrong]
def tearDown(self):
for fp in self.files_to_remove:
if exists(fp):
remove(fp)
@qiita_test_checker()
class TestPatch(TestCase):
def setUp(self):
self.patches_dir = mkdtemp()
self.py_patches_dir = join(self.patches_dir, 'python_patches')
mkdir(self.py_patches_dir)
patch2_fp = join(self.patches_dir, '2.sql')
patch10_fp = join(self.patches_dir, '10.sql')
with open(patch2_fp, 'w') as f:
f.write("CREATE TABLE qiita.patchtest2 (testing integer);\n")
f.write("INSERT INTO qiita.patchtest2 VALUES (1);\n")
f.write("INSERT INTO qiita.patchtest2 VALUES (9);\n")
with open(patch10_fp, 'w') as f:
f.write("CREATE TABLE qiita.patchtest10 (testing integer);\n")
def tearDown(self):
rmtree(self.patches_dir)
# The tests on this class are really tied up to the status of the
# database, so we do an exception and reset the DB in each test
qdb.environment_manager.drop_and_rebuild_tst_database()
def _check_patchtest2(self, exists=True):
if exists:
assertion_fn = self.assertTrue
else:
assertion_fn = self.assertFalse
obs = self.conn_handler.execute_fetchone(
"""SELECT EXISTS(SELECT * FROM information_schema.tables
WHERE table_name = 'patchtest2')""")[0]
assertion_fn(obs)
if exists:
exp = [[1], [9]]
obs = self.conn_handler.execute_fetchall(
"""SELECT * FROM qiita.patchtest2 ORDER BY testing""")
self.assertEqual(obs, exp)
def _check_patchtest10(self):
obs = self.conn_handler.execute_fetchone(
"""SELECT EXISTS(SELECT * FROM information_schema.tables
WHERE table_name = 'patchtest10')""")[0]
self.assertTrue(obs)
exp = []
obs = self.conn_handler.execute_fetchall(
"""SELECT * FROM qiita.patchtest10""")
self.assertEqual(obs, exp)
def _assert_current_patch(self, patch_to_check):
current_patch = self.conn_handler.execute_fetchone(
"""SELECT current_patch FROM settings""")[0]
self.assertEqual(current_patch, patch_to_check)
def test_unpatched(self):
"""Test patching from unpatched state"""
# Reset the settings table to the unpatched state
self.conn_handler.execute(
"""UPDATE settings SET current_patch = 'unpatched'""")
self._assert_current_patch('unpatched')
qdb.environment_manager.patch(self.patches_dir)
self._check_patchtest2()
self._check_patchtest10()
self._assert_current_patch('10.sql')
def test_skip_patch(self):
"""Test patching from a patched state"""
self.conn_handler.execute(
"""UPDATE settings SET current_patch = '2.sql'""")
self._assert_current_patch('2.sql')
# If it tried to apply patch 2.sql again, this will error
qdb.environment_manager.patch(self.patches_dir)
self._assert_current_patch('10.sql')
self._check_patchtest10()
# Since we "tricked" the system, patchtest2 should not exist
self._check_patchtest2(exists=False)
def test_nonexistent_patch(self):
"""Test case where current patch does not exist"""
self.conn_handler.execute(
"""UPDATE settings SET current_patch = 'nope.sql'""")
self._assert_current_patch('nope.sql')
with self.assertRaises(RuntimeError):
qdb.environment_manager.patch(self.patches_dir)
def test_python_patch(self):
# Write a test python patch
patch10_py_fp = join(self.py_patches_dir, '10.py')
with open(patch10_py_fp, 'w') as f:
f.write(PY_PATCH)
# Reset the settings table to the unpatched state
self.conn_handler.execute(
"""UPDATE settings SET current_patch = 'unpatched'""")
self._assert_current_patch('unpatched')
qdb.environment_manager.patch(self.patches_dir)
obs = self.conn_handler.execute_fetchall(
"""SELECT testing FROM qiita.patchtest10""")
exp = [[1], [100]]
self.assertEqual(obs, exp)
self._assert_current_patch('10.sql')
@qiita_test_checker()
class TestUpdateArtifactFromCmd(TestCase):
def setUp(self):
fd, seqs_fp = mkstemp(suffix='_seqs.fastq')
close(fd)
fd, barcodes_fp = mkstemp(suffix='_barcodes.fastq')
close(fd)
self.filepaths = [seqs_fp, barcodes_fp]
self.checksums = []
for fp in sorted(self.filepaths):
with open(fp, 'w') as f:
f.write("%s\n" % fp)
self.checksums.append(qdb.util.compute_checksum(fp))
self.filepaths_types = ["raw_forward_seqs", "raw_barcodes"]
self._clean_up_files = [seqs_fp, barcodes_fp]
self.uploaded_files = qdb.util.get_files_from_uploads_folders("1")
# The files for the Artifact 1 doesn't exist, create them
for _, fp, _ in qdb.artifact.Artifact(1).filepaths:
with open(fp, 'w') as f:
f.write('\n')
self._clean_up_files.append(fp)
def tearDown(self):
new_uploaded_files = qdb.util.get_files_from_uploads_folders("1")
new_files = set(new_uploaded_files).difference(self.uploaded_files)
path_builder = partial(
join, qdb.util.get_mountpoint("uploads")[0][1], '1')
self._clean_up_files.extend(
[path_builder(fp) for _, fp, _ in new_files])
for f in self._clean_up_files:
if exists(f):
remove(f)
def test_update_artifact_from_cmd_error(self):
with self.assertRaises(ValueError):
qdb.commands.update_artifact_from_cmd(
self.filepaths[1:], self.filepaths_types, 1)
with self.assertRaises(ValueError):
qdb.commands.update_artifact_from_cmd(
self.filepaths, self.filepaths_types[1:], 1)
def test_update_artifact_from_cmd(self):
artifact = qdb.commands.update_artifact_from_cmd(
self.filepaths, self.filepaths_types, 1)
for _, fp, _ in artifact.filepaths:
self._clean_up_files.append(fp)
for obs, exp in zip(sorted(artifact.filepaths, key=itemgetter(1)),
self.checksums):
self.assertEqual(qdb.util.compute_checksum(obs[1]), exp)
CONFIG_1 = """[required]
timeseries_type_id = 1
metadata_complete = True
mixs_compliant = True
principal_investigator = SomeDude, <EMAIL>, some
reprocess = False
study_alias = 'test study'
study_description = 'test study description'
study_abstract = 'study abstract'
efo_ids = 1,2,3,4
[optional]
number_samples_collected = 50
number_samples_promised = 25
lab_person = SomeDude, <EMAIL>, some
funding = 'funding source'
vamps_id = vamps_id
"""
CONFIG_2 = """[required]
timeseries_type_id = 1
metadata_complete = True
principal_investigator = SomeDude, <EMAIL>, some
reprocess = False
study_alias = 'test study'
study_description = 'test study description'
study_abstract = 'study abstract'
efo_ids = 1,2,3,4
[optional]
number_samples_collected = 50
number_samples_promised = 25
lab_person = SomeDude, <EMAIL>, some
funding = 'funding source'
vamps_id = vamps_id
"""
SAMPLE_TEMPLATE = (
"sample_name\trequired_sample_info_status\tcollection_timestamp\t"
"sample_type\tphysical_specimen_remaining\tphysical_specimen_location\t"
"dna_extracted\thost_subject_id\tTreatment\tDOB\tlatitude\tlongitude"
"\ttaxon_id\tscientific_name\tDescription\n"
"PC.354\treceived\t06/18/14 16:44:00\ttype_1\tTrue\tLocation_1\tTrue\t"
"HS_ID_PC.354\tControl\t20061218\t1.88401499993\t56.0003871552\t"
"9606\thomo sapiens\tControl_mouse_I.D._354\n"
"PC.593\treceived\t06/18/14 16:44:00\ttype_1\tTrue\tLocation_1\tTrue\t"
"HS_ID_PC.593\tControl\t20071210\t35.4079458313\t83.2595338611\t"
"9606\thomo sapiens\tControl_mouse_I.D._593\n"
"PC.607\treceived\t06/18/14 16:44:00\ttype_1\tTrue\tLocation_1\tTrue\t"
"HS_ID_PC.607\tFast\t20071112\t18.3175615444\t91.3713989729\t"
"9606\thomo sapiens\tFasting_mouse_I.D._607\n"
"PC.636\treceived\t06/18/14 16:44:00\ttype_1\tTrue\tLocation_1\tTrue\t"
"HS_ID_PC.636\tFast\t20080116\t31.0856060708\t4.16781143893\t"
"9606\thomo sapiens\tFasting_mouse_I.D._636")
PREP_TEMPLATE = (
'sample_name\tbarcode\tcenter_name\tcenter_project_name\t'
'description_prep\tebi_submission_accession\temp_status\tprimer\t'
'run_prefix\tstr_column\tplatform\tlibrary_construction_protocol\t'
'experiment_design_description\tinstrument_model\n'
'SKB7.640196\tCCTCTGAGAGCT\tANL\tTest Project\tskb7\tNone\tEMP\t'
'GTGCCAGCMGCCGCGGTAA\tts_G1_L001_sequences\tValue for sample 3\tA\tB\tC\t'
'Illumina MiSeq\n'
'SKB8.640193\tGTCCGCAAGTTA\tANL\tTest Project\tskb8\tNone\tEMP\t'
'GTGCCAGCMGCCGCGGTAA\tts_G1_L001_sequences\tValue for sample 1\tA\tB\tC\t'
'Illumina MiSeq\n'
'SKD8.640184\tCGTAGAGCTCTC\tANL\tTest Project\tskd8\tNone\tEMP\t'
'GTGCCAGCMGCCGCGGTAA\tts_G1_L001_sequences\tValue for sample 2\tA\tB\tC\t'
'Illumina MiSeq\n')
PY_PATCH = """
from qiita_db.study import Study
from qiita_db.sql_connection import TRN
study = Study(1)
with TRN:
sql = "INSERT INTO qiita.patchtest10 (testing) VALUES (%s)"
TRN.add(sql, [[study.id], [study.id*100]], many=True)
TRN.execute()
"""
PARAMETERS = """max_bad_run_length\t3
min_per_read_length_fraction\t0.75
sequence_max_n\t0
rev_comp_barcode\tFalse
rev_comp_mapping_barcodes\tFalse
rev_comp\tFalse
phred_quality_threshold\t3
barcode_type\thamming_8
max_barcode_errors\t1.5
"""
PARAMETERS_ERROR = """max_bad_run_length\t3\tmin_per_read_length_fraction\t0.75
sequence_max_n\t0
rev_comp_barcode\tFalse
rev_comp_mapping_barcodes\tFalse
rev_comp\tFalse
phred_quality_threshold\t3
barcode_type\thamming_8
max_barcode_errors\t1.5
"""
if __name__ == "__main__":
main()
|
<reponame>hazardousparticle/logiG510_LEDcontrol
#include "hidapi_mod.h"
#include "Logi510.h"
#include <iostream>
using namespace std;
//delay in ms between color cycles
#define SPEED 100
//button Q to break the loop
#define QUIT_KEY 0x51
HANDLE dev_handle = NULL;
int main(int argc, char* argv[])
{
dev_handle = kb_device_open(LOGI_510_VID, LOGI_510_PID);
if (NotValidHandle(dev_handle))//check if g510 found
{
cout << "No G510 keyboard found." << endl;
//if not check for a g110
dev_handle = kb_device_open(LOGI_510_VID, LOGI_110_PID);
if (NotValidHandle(dev_handle))
{
cout << "No G110 keyboard found." << endl;
return 0;
}
else
{
cout << "TODO: modify LOGI_510_COLOR_CHANGE_CMD according to g110" << endl;
cout << "G110 device found but currently not supported" << endl;
return 0;
}
}
Color *c1 = getL510_LEDColor(dev_handle);
unsigned char r = 0;
unsigned char g = 0;
unsigned char b = 0;
c1->separate(r, g, b);
printf_s("Read LED color: red=%02X, green=%02X, blue=%02X\r\n", r, g, b);
//FreeConsole();
Color c = Color(r,g,b);
HsvColor hsv;
hsv.s = (unsigned char)0xff;
hsv.v = (unsigned char)0xff;
hsv.h = (unsigned char)0;
while (1)
{
HsvToRgb(hsv, c);
setL510_LEDColor(dev_handle, &c);
hsv.h++;
if (hsv.h >= 0xff)
{
hsv.h = 0;
}
Sleep(SPEED);
//signal to quit
if (GetAsyncKeyState(QUIT_KEY))
{
cout << "Signal to quit..." << endl;
break;
}
}
setL510_LEDColor(dev_handle, c1);
delete c1;
c1 = nullptr;
kb_device_close(dev_handle);
return 0;
}
|
<reponame>savvasth96/fructose
package fwcd.fructose.swing;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
/**
* Convience class to allow lambda-implementations of {@link DocumentListener}.
*
* @author Fredrik
*
*/
@FunctionalInterface
public interface DocChangeListener extends DocumentListener {
void onChange(DocumentEvent e);
@Override
default void insertUpdate(DocumentEvent e) {
onChange(e);
}
@Override
default void removeUpdate(DocumentEvent e) {
onChange(e);
}
@Override
default void changedUpdate(DocumentEvent e) {
onChange(e);
}
}
|
#!/bin/bash
# shellcheck disable=SC1091
source functions.sh && init
set -o nounset
# defaults
# shellcheck disable=SC2207
disks=($(lsblk -dno name -e1,7,11 | sed 's|^|/dev/|' | sort))
stimer=$(date +%s)
# Look for active MD arrays
# shellcheck disable=SC2207
mdarrays=($(awk '/md/ {print $4}' /proc/partitions))
if ((${#mdarrays[*]} != 0)); then
for mdarray in "${mdarrays[@]}"; do
echo "MD array: $mdarray"
mdadm --stop "/dev/$mdarray"
# sometimes --remove fails, according to manpages seems we
# don't need it / are doing it wrong
mdadm --remove "/dev/$mdarray" || :
done
fi
# Wipe the filesystem and clear block on each block device
for bd in "${disks[@]}"; do
sgdisk -Z "$bd" &
done
for bd in "${disks[@]}"; do
# -n is so that wait will return on any job finished, returning it's exit status.
# Without the -n, wait will only report exit status of last exited process
wait -n
done
if [[ -d /sys/firmware/efi ]]; then
for bootnum in $(efibootmgr | sed -n '/^Boot[0-9A-F]/ s|Boot\([0-9A-F]\{4\}\).*|\1|p'); do
efibootmgr -Bb "$bootnum"
done
fi
echo "Disk wipe finished."
## End installation
etimer=$(date +%s)
echo -e "${BYELLOW}Clean time: $((etimer - stimer))${NC}"
|
#!/usr/bin/env bash
# i=0;
# IFS=$'\n'
# for line in `env`; do
# VAR=`echo $line | sed 's/\([A-Za-z_1-9]*\)=.*$/\1/'`
# if [ "x$VAR" == "x_" ] || [ "x$VAR" == "xSHLVL" ]; then
# continue;
# fi
# ENV_VAR[$i]="$VAR";
# VAR_VAL[$i]=`echo $line | sed 's/\([A-Za-z_1-9]*\)=\(.*\)$/\2/'`
# i=$[$i+1];
# done
ENV_VAR=(`env | sed -n 's|^\([A-Z]*\)=[a-zA-Z0-9/]\{1,\}$|\1|p' | egrep -v SHLVL`)
I=0
while [ $I -lt ${#ENV_VAR[@]} ]; do
eval "VAL=\$${ENV_VAR[$I]}"
VAR_VAL[$I]=$VAL
I=$[$I+1]
done
cat <<EOF
<?xml version="1.0"?>
<oval_definitions xmlns:oval-def="http://oval.mitre.org/XMLSchema/oval-definitions-5" xmlns:oval="http://oval.mitre.org/XMLSchema/oval-common-5" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ind-def="http://oval.mitre.org/XMLSchema/oval-definitions-5#independent" xmlns:unix-def="http://oval.mitre.org/XMLSchema/oval-definitions-5#unix" xmlns:lin-def="http://oval.mitre.org/XMLSchema/oval-definitions-5#linux" xmlns="http://oval.mitre.org/XMLSchema/oval-definitions-5" xsi:schemaLocation="http://oval.mitre.org/XMLSchema/oval-definitions-5#unix unix-definitions-schema.xsd http://oval.mitre.org/XMLSchema/oval-definitions-5#independent independent-definitions-schema.xsd http://oval.mitre.org/XMLSchema/oval-definitions-5#linux linux-definitions-schema.xsd http://oval.mitre.org/XMLSchema/oval-definitions-5 oval-definitions-schema.xsd http://oval.mitre.org/XMLSchema/oval-common-5 oval-common-schema.xsd">
<generator>
<oval:product_name>environmentvariable</oval:product_name>
<oval:product_version>1.0</oval:product_version>
<oval:schema_version>5.4</oval:schema_version>
<oval:timestamp>2008-03-31T00:00:00-00:00</oval:timestamp>
</generator>
<definitions>
<definition class="compliance" version="1" id="oval:1:def:1"> <!-- comment="false" -->
<metadata>
<title></title>
<description></description>
</metadata>
<criteria>
<criteria operator="OR">
<criterion test_ref="oval:1:tst:1"/>
EOF
I=0
while [ $I -lt ${#ENV_VAR[@]} ]; do
cat <<EOF
<criterion test_ref="oval:1:tst:$[I+2]"/>
EOF
I=$[$I+1]
done
cat <<EOF
</criteria>
</criteria>
</definition>
</definitions>
<tests>
<!-- DEFAULT STATELESS OBJECT -->
<!-- check="all" -->
<environmentvariable_test version="1" id="oval:1:tst:1" check="at least one" comment="false" xmlns="http://oval.mitre.org/XMLSchema/oval-definitions-5#independent">
<object object_ref="oval:1:obj:1"/>
<state state_ref="oval:1:ste:1"/>
</environmentvariable_test>
EOF
I=0
while [ $I -lt ${#ENV_VAR[@]} ]; do
cat <<EOF
<environmentvariable_test version="1" id="oval:1:tst:$[I+2]" check="at least one" comment="false" xmlns="http://oval.mitre.org/XMLSchema/oval-definitions-5#independent">
<object object_ref="oval:1:obj:2"/>
<state state_ref="oval:1:ste:$[I+2]"/>
</environmentvariable_test>
EOF
I=$[$I+1]
done
cat <<EOF
</tests>
<objects>
<environmentvariable_object version="1" id="oval:1:obj:1" xmlns="http://oval.mitre.org/XMLSchema/oval-definitions-5#independent">
<name operation="not equal">${ENV_VAR[1]}</name>
</environmentvariable_object>
<environmentvariable_object version="1" id="oval:1:obj:2" xmlns="http://oval.mitre.org/XMLSchema/oval-definitions-5#independent">
<name operation="pattern match">_</name>
</environmentvariable_object>
</objects>
<states>
<environmentvariable_state version="1" id="oval:1:ste:1" xmlns="http://oval.mitre.org/XMLSchema/oval-definitions-5#independent">
<name>${ENV_VAR[1]}</name>
<value>${VAR_VAL[1]}</value>
</environmentvariable_state>
EOF
I=0
while [ $I -lt ${#ENV_VAR[@]} ]; do
cat <<EOF
<environmentvariable_state version="1" id="oval:1:ste:$[$I+2]" xmlns="http://oval.mitre.org/XMLSchema/oval-definitions-5#independent">
<name>${ENV_VAR[$I]}</name>
<value>${VAR_VAL[$I]}</value>
</environmentvariable_state>
EOF
I=$[$I+1]
done
cat <<EOF
</states>
</oval_definitions>
EOF
exit $(( ${#ENV_VAR[@]} + 1 ))
|
#!/usr/bin/env bash
{{!
Template adapted from here:
https://github.com/chriskempson/base16-builder/blob/master/templates/gnome-terminal/dark.sh.erb
}}
# Base16 Rosé Pine - Gnome Terminal color scheme install script
# Emilia Dunfelt <sayhi@dunfelt.se>
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="Base 16 Rosé Pine 256"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="base-16-rose-pine-256"
[[ -z "$DCONF" ]] && DCONF=dconf
[[ -z "$UUIDGEN" ]] && UUIDGEN=uuidgen
dset() {
local key="$1"; shift
local val="$1"; shift
if [[ "$type" == "string" ]]; then
val="'$val'"
fi
"$DCONF" write "$PROFILE_KEY/$key" "$val"
}
# Because dconf still doesn't have "append"
dlist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONF" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONF" write "$key" "[$entries]"
}
# Newest versions of gnome-terminal use dconf
if which "$DCONF" > /dev/null 2>&1; then
# Check that uuidgen is available
type $UUIDGEN >/dev/null 2>&1 || { echo >&2 "Requires uuidgen but it's not installed. Aborting!"; exit 1; }
[[ -z "$BASE_KEY_NEW" ]] && BASE_KEY_NEW=/org/gnome/terminal/legacy/profiles:
if [[ -n "`$DCONF list $BASE_KEY_NEW/`" ]]; then
if which "$UUIDGEN" > /dev/null 2>&1; then
PROFILE_SLUG=`uuidgen`
fi
if [[ -n "`$DCONF read $BASE_KEY_NEW/default`" ]]; then
DEFAULT_SLUG=`$DCONF read $BASE_KEY_NEW/default | tr -d \'`
else
DEFAULT_SLUG=`$DCONF list $BASE_KEY_NEW/ | grep '^:' | head -n1 | tr -d :/`
fi
DEFAULT_KEY="$BASE_KEY_NEW/:$DEFAULT_SLUG"
PROFILE_KEY="$BASE_KEY_NEW/:$PROFILE_SLUG"
# Copy existing settings from default profile
$DCONF dump "$DEFAULT_KEY/" | $DCONF load "$PROFILE_KEY/"
# Add new copy to list of profiles
dlist_append $BASE_KEY_NEW/list "$PROFILE_SLUG"
# Update profile values with theme options
dset visible-name "'$PROFILE_NAME'"
dset palette "['#191724', '#e2e1e7', '#ebbcba', '#f6c177', '#9ccfd8', '#c4a7e7', '#31748f', '#e0def4', '#555169', '#e2e1e7', '#ebbcba', '#f6c177', '#9ccfd8', '#c4a7e7', '#31748f', '#c5c3ce']"
dset background-color "'#191724'"
dset foreground-color "'#e0def4'"
dset bold-color "'#e0def4'"
dset bold-color-same-as-fg "true"
dset cursor-colors-set "true"
dset cursor-background-color "'#e0def4'"
dset cursor-foreground-color "'#191724'"
dset use-theme-colors "false"
dset use-theme-background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
exit 0
fi
fi
# Fallback for Gnome 2 and early Gnome 3
[[ -z "$GCONFTOOL" ]] && GCONFTOOL=gconftool
[[ -z "$BASE_KEY" ]] && BASE_KEY=/apps/gnome-terminal/profiles
PROFILE_KEY="$BASE_KEY/$PROFILE_SLUG"
gset() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
"$GCONFTOOL" --set --type "$type" "$PROFILE_KEY/$key" -- "$val"
}
# Because gconftool doesn't have "append"
glist_append() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$GCONFTOOL" --get "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "$val"
} | head -c-1 | tr "\n" ,
)"
"$GCONFTOOL" --set --type list --list-type $type "$key" "[$entries]"
}
# Append the Base16 profile to the profile list
glist_append string /apps/gnome-terminal/global/profile_list "$PROFILE_SLUG"
gset string visible_name "$PROFILE_NAME"
gset string palette "#191724:#e2e1e7:#ebbcba:#f6c177:#9ccfd8:#c4a7e7:#31748f:#e0def4:#555169:#e2e1e7:#ebbcba:#f6c177:#9ccfd8:#c4a7e7:#31748f:#c5c3ce"
gset string background_color "#191724"
gset string foreground_color "#e0def4"
gset string bold_color "#e0def4"
gset bool bold_color_same_as_fg "true"
gset bool cursor-colors-set "true"
gset string cursor-background-color "'#e0def4'"
gset string cursor-foreground-color "'#191724'"
gset bool use_theme_colors "false"
gset bool use_theme_background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
|
def is_prime(n):
for i in range(2, n):
if n % i == 0:
return False
return True
number = 22
if is_prime(number):
print(f"{number} is a prime number")
else:
print(f"{number} is not a prime number")
|
from __future__ import absolute_import
# import scipy.io as sio
import os
import matplotlib.pyplot as plt
###########################################
# ML and AI Procedures for FTIR/Raman Spectroscopy
#
#
#
###########################################
import numpy as np
# python 2.7 from sklearn.cross_validation import train_test_split
# from tqdm import tqdm
plt.style.use("ggplot")
###########################################
from pathlib import Path
MODELPATH = Path("openvibspec/models").absolute()
def prepare_dat2train(x, y, testsize=0.2, random=0, normalising="l2"):
"""
FUNCTION PREPARING DATA FOR THE USE IN A RANDOM FOREST CLASSIFIER:
Preparation of the data includes splitting into training and test-set and normalising the data with the sklearn 'StandardScaler'!
Parameters
----------
x : numpy array (2D)
classes per point (as int) with 2 dimensions: x*y,spectra = shape(), e.g. output of kmeans or similiar clsutering approach
y : numpy array with int (1D)
classes: x*y = shape()
test_size : float
value for the amount of used test-data. This value has to be below 1, given example of '0.2'
indicates that 20% of the data is used for testing, while 80% is used for the purpose of fitting parameters to the model aka train the model
random_state : int
seed for the 'random_state'
Returns
-------s
X_train : umpy array (2D) of shape x*y,spectra = shape
selected data points of size equal (1 - random_state)
y_train : numpy array (1D)
Corresponding classes to 'X_train'
X_test : numpy array (2D) of shape x*y,spectra = shape
selected data points of size equal random_state
y_test : numpy array (1D)
Corresponding classes to 'X_test'
"""
from sklearn.model_selection import train_test_split
from sklearn import preprocessing
# X_normalized = preprocessing.normalize(X, norm='l2')
X_train, X_test, y_train, y_test = train_test_split(
np.nan_to_num(preprocessing.normalize(x, norm=normalising)),
y,
test_size=float(testsize),
random_state=int(random),
)
# sc = StandardScaler()
# X_train = sc.fit_transform(X_train)
# X_test = sc.transform(X_test)
return X_train, X_test, y_train, y_test
def randomforest_train(x, y, trees=20, jobs=2, random=0, out=1, save_file_path=str()):
# n_estimators=20,
# n_samples=250,
# n_features=4,
# n_informative=2,
# n_redundant=0,
# random_state=0,
# shuffle=False,
# n_jobs=2,
"""
Parameters
----------
x : numpy array (2D)
X_train from prepare_dat2train() with 2 dimensions: x*y,spectra = shape(), e.g. output of kmeans or similiar clsutering approach
y : numpy array (1D)
wavenumbers: x*y = shape()
trees : int
Number of Trees used in the construction of the tree ensemble
jobs : int
The number of jobs to run in parallel
random: int
Controls both the randomness of the bootstrapping of the samples used when building trees
save_file_path: str() as filename
Returns
-------
X_train : umpy array (2D) of shape x*y,spectra = shape
selected data points of size equal (1 - random_state)
y_train : numpy array (1D)
Corresponding classes to 'X_train'
X_test : numpy array (2D) of shape x*y,spectra = shape
selected data points of size equal random_state
y_test : numpy array (1D)
Corresponding classes to 'X_test'
"""
from sklearn.ensemble import RandomForestClassifier
clf = RandomForestClassifier(
n_estimators=int(trees), n_jobs=int(jobs), random_state=int(random), verbose=out
)
rf = clf.fit(x, y)
filename = save_file_path
from joblib import dump
dump(rf, filename)
# return x,y ,clf, yhat
return rf
def randomforest_load_eval(x, y, rf, norm=True, report=True, normalising="l2"):
from sklearn.metrics import classification_report, confusion_matrix, accuracy_score
from joblib import load
from sklearn import preprocessing
clf = load(str(rf))
if norm == True:
preds = clf.predict(preprocessing.normalize(x, norm=str(normalising)))
else:
preds = clf.predict(x)
if report == True:
print("CLASSIFICATION REPORT")
print(classification_report(y, preds))
print("CONFUSION MATRIX")
print(confusion_matrix(y, preds))
print("ACCURACY SCORE:")
print(accuracy_score(y, preds))
return preds
else:
return preds
# return x
def dec_bound_plot(X_train, X_test, y_train, y_test, forest):
"""
This function provides the decision boundary plot.
In case of a multidimensional Input Classifier you need to specify the certain features.
Now every feature except the first 2 features are filtered
TODO: Specify Filter-List
"""
import matplotlib.pyplot as plt
from mlxtend.plotting import plot_decision_regions
# from sklearn.decomposition import PCA
X_combined = np.vstack((X_train, X_test))
y_combined = np.hstack((y_train, y_test))
# pca = PCA(n_components = 2)
# X_train2 = pca.fit_transform(X_combined)
#
# plot_decision_regions function takes "forest" as classifier
#
feature_values = {i: 1 for i in range(2, X_combined.shape[1])}
feature_width = {i: 1 for i in range(2, X_combined.shape[1])}
fig, ax = plt.subplots(figsize=(7, 7))
plot_decision_regions(
X_combined,
y_combined,
clf=forest,
# feature_index=[0,2],
filler_feature_values=feature_values,
filler_feature_ranges=feature_width,
res=0.02,
legend=2,
ax=ax,
)
# plt.xlabel('petal length [cm]')
# plt.ylabel('petal width [cm]')
# plt.legend(loc='upper left')
# plt.tight_layout()
plt.show()
return
# ------------------------------------------------------
#
# Features from RF
#
# ------------------------------------------------------
# TODO: install
# def get_rf_features(rf, wvn, X_test=None, y_test=None, method=1):
# """
#
#
# """
# if method==1:
# plt.barh(wvn, rf.feature_importances_)
# plt.show()
#
# if method==2:
# perm_importance = permutation_importance(rf, X_test, y_test)
# plt.barh(wvn, perm_importance.importances_mean)
# plt.show()
#
#
# if method==3:
# explainer = shap.TreeExplainer(rf)
# shap_values = explainer.shap_values(X_test)
# shap.summary_plot(shap_values, X_test)#, plot_type="bar")
#
# return
#
def kmeans(x, c=4, jobs=2, out=15):
"""
Wrapper of the scikit learn Kmeans implementation
Kmeans: standard clustering approach to vector quantization.
https://projecteuclid.org/euclid.bsmsp/1200512992
Parameters
----------
x : numpy array (2D)
spectroscopic data with 2 dimensions: x*y,spectra = shape(), e.g. output of kmeans or similiar clsutering approach
c : int
number of putative centroids
n_jobs : int
The number of jobs to run in parallel
verbose: int
provides information about the progress
Returns
-------
y : numpy array (1D) of shape x*y
selected data points with cluster affiliation / Corresponding classes
"""
from sklearn.cluster import KMeans
kmeans = KMeans(n_clusters=c, n_jobs=jobs, verbose=out)
kmeans.fit(np.nan_to_num(x))
y = kmeans.predict(np.nan_to_num(x))
return y
def hca(x):
return x
def pca(x, pc):
from sklearn.decomposition import PCA
pca = PCA(n_components=pc)
pca.fit(np.nan_to_num(x))
p = pca.transform(np.nan_to_num(x))
return p
def pca_all(x, pc):
""" """
from sklearn.decomposition import PCA
pca = PCA(n_components=pc)
pca.fit(np.nan_to_num(x))
p = pca.transform(np.nan_to_num(x))
vr = pca.explained_variance_ratio_
print("Explained Variance based on N PCs =", vr)
cov = pca.get_covariance()
it = pca.inverse_transform(p)
scores = pca.score_samples(x)
return p, vr, cov, it, scores
def plot_pca(p, vr):
import matplotlib.pyplot as plt
plt.style.use("ggplot")
plt.scatter(p[:, 1], p[:, 2], color=["r"])
plt.scatter(p[:, 0], p[:, 1], color=["b"])
plt.show()
plt.plot(np.cumsum(vr))
plt.xlabel("number of components")
plt.ylabel("cumulative explained variance")
plt.show()
return
def transform_gt(pic):
"""
you need to specify the given ground truth as an image with the following axes:
pic.shape = x,y,z
"""
import numpy as np
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import OneHotEncoder
from keras.preprocessing.image import load_img
from collections import Counter
label_encoder = LabelEncoder()
img = load_img(pic)
inp = np.asarray(img)
x, y, z = inp.shape
inp2 = inp.reshape(x * y, z)
yl = inp2.tolist()
new_A = map(tuple, yl)
final_count = Counter(new_A)
a = list(final_count)
pair = {}
for i, j in enumerate(a):
pair[j] = i
for i, pixel in enumerate(yl):
if tuple(pixel) in list(pair.keys()):
yl[i] = pair[tuple(pixel)]
integer_encoded = label_encoder.fit_transform(yl)
onehot_encoder = OneHotEncoder(sparse=False)
integer_encoded = integer_encoded.reshape(len(integer_encoded), 1)
onehot_encoded = onehot_encoder.fit_transform(integer_encoded)
return final_count, pair, onehot_encoded
####################################################################################################
####################################################################################################
# PRETRAINED DEEP NEURAL NETWORKS
####################################################################################################
class DeepLearn:
"""
Deep learning based procedures for the use in spectroscopy.
Here you can find pretrained deep neural networks, which can be used for classification / RMieS-Correction, or further training.
To ensure a smooth use of your data, the spectroscopic specifications of the used training data are shown below:
Attributes of the raw data:
- Spectroscopic data recording was performed on a Agilent Cary 620/670 FTIR Imaging System with 128x128 pixel MCT (Mercury Cadmium Telluride) and FPA for whole slide.
- Data recording with 128 scans, results in a wavenumber range of 950 to 3700 cm^1.
- With a spectral resolution of ca. 4 cm^1 this resulted in 1428 datapoints on the z-axis.
- The pixel has a edge length of 5.65µm.
- Per detector field this results in a FOV of ca. 715x715 µm^2.
Data sources:
Tissue was formaldehyde-fixed paraffin-embedded (FFPE) from human colon.
- https://www.biomax.us/CO1002b
- https://www.biomax.us/tissue-arrays/Colon/CO722
You can find further information on data collection in:
[1] https://pubs.rsc.org/en/content/articlelanding/fd/2016/c5fd00157a#!divAbstract
#--------------------------------------------------------------------------------------------------------------------------------------------------------------------
Components of the class are the following methods:
DeepLearn.net()
This method returns the basic structure of the used deep neural networks in form of a graph.
For further use, this structure was bound to the python interfaces of TensorFlow and Keras to allow a permanent integration in most modern workflows.
It is currently divided into two main classes. First, the spectral classification and second the RMieS-correction of FTIR spectral data, using a fast deep learning algorithm.
Specifications of the used training data:
We used complete uncorrected FTIR data in the range of the fingerprint region between a wavenumber of 950 to 1800 cm^1.
The groundtruth was based on the segmentation of the used random forest from [1].
These in turn were created from a multi-step process of pre-segmentation of RMieS-corrected spectra and pathologist annotation.
With regard to the learning behaviour of the deep neuronal networks, it could be shown that no new classifier has to be built but
that the existing networks in transfer learning can be used for a variety of applications, while the false positive number could be significantly reduced. [2]
The data from groundtruth was processed under the following conditions:
- Agilent Resolution Pro Software.
- Fourier Transformation using Merz phase correction.
- Blackman-Harris-4-term apodization and zero filling of 2.
Specifications for own use:
The spectral data must be available as 2d-numpy array which is structured as follows:
x_data = x_axis*y_axis, z_axis
It is important for the application to observe the data points on the z-axis
The classification ( dl.net(x_data,classify=True) ) of the individual RMieS-uncorrected spectra (semantic segmentation) is carried
out on the first 450 wavenumbers between 950 and 1800 cm^1.
The correction ( dl.net(x_data, miecorr=True) ) of the raw data is done on the first 909 wavenumbers between 950 and 2300 cm^1.
Examples:
import openvibspec.ml_ftir as ovml
dl = ovml.DeepLearn()
x_pred, model = dl.net(x_data[:,:450],classify=True)
x_corr, model = dl.net(x_data[:,:909], miecorr=True)
Args:
x_data(numpy array):
classify=False(str): if True it uses the entered data (x_data) to predict previously learned 19 classes on uncorrected FTIR spectra of human colon tissue
miecorr=False(str): if True it uses the entered data (x_data) to predict the regression of the RMieS-Correction Function based on Bassan
References:
[2] Classification of (RMieS) uncorrected FTIR spectra with deep neural networks.
https://academic.oup.com/bioinformatics/article-abstract/36/1/287/5521621
[3] Deep neural networks for the correction of RMie Scattering in FTIR data.
https://arxiv.org/abs/2002.07681
#--------------------------------------------------------------------------------------------------------------------------------------------------------------------
DeepLearn.transfer()
The transfer function is based on using the data representations discovered by the existing networks for faster learning on new data.
For example, the networks trained on ffpe can be used to create classification networks for other tissues and their laboratory preparation with significantly less data.
For further informations regarding the theoretical part of this procedure, please see reference [2].
Besides the spectral data a groundtruth as label is needed for the transfer learning.
Models and weights are automatically saved in the working directory in *h5 and *json format using following naming convention:
model_ptMLP_MieReg_%d-%m-%Y_%I-%M-%S_%p
Examples:
import openvibspec.ml_ftir as ovml
dl = ovml.DeepLearn()
dl.transfer(x_data[:5,:909],y_data, batch=10, train_epochs=10, miecorr=True, trainable=False)
dl.transfer(x_data[:5,:909],x_data_corrected[:5,:909], batch=10, train_epochs=10, miecorr=True, trainable=False)
Args:
x_data(numpy array): 2D array shape(x_axis*y_axis, z_axis)
y_data(numpy array): label vector with classes assigned as numbers from 1 to n
batch(int): number of examples per batch
train_epochs(int): number of iterations per training
add_l(list of int()): possible list for adding layers
classify=True(str): classification modus
miecorr=True(str): regresssion modus
trainable=False(str): if trainable=True: allows the adjustment of the already loaded weights from the pretrained networks
#--------------------------------------------------------------------------------------------------------------------------------------------------------------------
DeepLearn.load_and_predict()
This function allows to load and use the trained network which was saved under DeepLearn.transfer()
Examples:
import openvibspec.ml_ftir as ovml
dl = ovml.DeepLearn()
a = dl.load_and_predict(x_new_data[:,:450],'model_ptMLP_class_DATE_TIME')
Args:
x_new_data(numpy array): 2D array shape(x_axis*y_axis, z_axis)
model_ptMLP_class_DATE_TIME(str): model_ptMLP_MieReg_* or model_ptMLP_class_*
"""
def net(
self, x, classify=False, miecorr=False, predict=False, train=False, show=False
):
import keras
from keras.models import model_from_json
# import tensorflow as tf
# import tensorflow.compat.v1 as tf
# tf.disable_v2_behavior()
"""
####################################################################################################
# DETERMINE WICH MODEL PARAMETERS YOU WANT TO USE
# CLASSIFY == TRUE GIVES THE MODEL TRAINED TO CLASSIFY ALL CELLUAR COMPONENTS BASED ON SPECTRA
# BETWEEN 950-1800 WVN
#
# MIECORR == TRUE GIVES THE CORRESPONDING NEURAL NETWORK FOR PERFORMING EFFICIENT RMIE-CORRECTION
# ON FFPE-BASED TISSUE SPECTRA
#
####################################################################################################
"""
############# TODO
#
# CURRENTLY THE loaded_model INSTANCE IS EQUIPPED WITH DROPOUT LAYERS
# SINSCE THE ORIGINAL MODEL WAS BASED ON THEANO THEY CAN ONLY BE USED IN A MONTE-CARLO-DROPOUT WAY
#
# THIS SHOULD BE IMPLEMENTED AS 2ND CASE
#
#
#############
if classify == True:
if x.shape[1] != 450:
raise ValueError(
"This is a classification problem: Your spectral data needs 450 datapoints in WVN range of 950-1800 1/cm"
)
json_file = open(
os.path.join(str(MODELPATH) + "/model_weights_classification.json"), "r"
)
loaded_model_json = json_file.read()
loaded_model = model_from_json(loaded_model_json)
if show == True:
print(loaded_model.summary())
loaded_model.load_weights(
os.path.join(str(MODELPATH) + "/model_weights_classification.best.hdf5")
)
print("Loaded model from disk")
model2 = keras.Sequential(
[
loaded_model.layers[0],
loaded_model.layers[1],
loaded_model.layers[3],
loaded_model.layers[5],
loaded_model.layers[7],
loaded_model.layers[9],
loaded_model.layers[11],
loaded_model.layers[13],
]
)
# model = loaded_model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
from sklearn.preprocessing import normalize
trX = normalize(x, axis=1, norm="l2")
return model2.predict(trX)
if miecorr == True:
if x.shape[1] != 909:
raise ValueError(
"This is a regression problem: Your spectral data needs 909 datapoints in WVN range of 950-2300 1/cm"
)
####################################################################################################
# THIS MODEL NEEDS THE FIRST 909 WVN. RANGE FROM 950-2300 WVN 1/cm
#
#
#
####################################################################################################x
json_file = open(
os.path.join(str(MODELPATH) + "/model_weights_regression.json"), "r"
)
loaded_model_json = json_file.read()
loaded_model = model_from_json(loaded_model_json)
if show == True:
print(loaded_model.summary())
loaded_model.load_weights(
os.path.join(str(MODELPATH) + "/model_weights_regression.best.hdf5")
)
print("Loaded model from disk")
loaded_model.compile(loss="mean_squared_error", optimizer="adam")
from sklearn.preprocessing import normalize
trX = normalize(x, axis=1, norm="l2")
return loaded_model.predict(trX)
def transfer(
self,
x,
y,
batch,
train_epochs,
add_l=[],
classify=False,
miecorr=False,
trainable=False,
):
import keras
from keras.models import model_from_json
from keras.models import Sequential
from datetime import datetime
from sklearn.preprocessing import normalize
"""
ALL PARTS OF THE TRANSFER-LEARNING NETWORKS ON FTIR SPECTROSCOPIC DATA
"""
trX = normalize(x, axis=1, norm="l2")
# def onehot(y):
# import keras
# from keras.utils import np_utils
#
# c = np.max(y) + 1
#
# y1hot = np_utils.to_categorical(y, num_classes=c)
#
# return(y1hot)
def add_layer():
from keras.layers import Dense
from keras.models import Model
# yoh = onehot(y)
yoh = y
sm = int(yoh.shape[1])
print("training on", sm, "classes")
json_file = open(
os.path.join(str(MODELPATH) + "/model_weights_classification.json"), "r"
)
loaded_model_json = json_file.read()
loaded_model = model_from_json(loaded_model_json)
loaded_model.load_weights(
os.path.join(str(MODELPATH) + "/model_weights_classification.best.hdf5")
)
if trainable == False:
for layer in loaded_model.layers:
layer.trainable = False
else:
for layer in loaded_model.layers:
layer.trainable = True
if not add_l:
model2 = keras.Sequential(
[
loaded_model.layers[0],
loaded_model.layers[1],
loaded_model.layers[3],
loaded_model.layers[5],
loaded_model.layers[7],
loaded_model.layers[9],
loaded_model.layers[11],
loaded_model.layers[13],
]
)
preds = Dense(sm, name="newlast", activation="softmax")(
model2.layers[-1].output
)
model2 = Model(inputs=model2.input, outputs=preds)
model2.compile(
loss="categorical_crossentropy",
optimizer="rmsprop",
metrics=["accuracy"],
)
history = model2.fit(trX, yoh, batch_size=batch, epochs=train_epochs)
print(model2.summary())
if add_l:
def add_2_model(add_l):
base = Model(
inputs=loaded_model.input,
outputs=loaded_model.layers[-1].output,
)
model = Sequential()
model.add(base)
model.add(Dense(add_l[0], input_dim=450, activation="relu"))
for layer_size in add_l[1:]:
model.add(Dense(layer_size, activation="relu"))
model.add(Dense(sm, activation="softmax"))
return model
model = add_2_model(add_l)
model.compile(
loss="categorical_crossentropy",
optimizer="rmsprop",
metrics=["accuracy"],
)
history = model.fit(trX, yoh, batch_size=batch, epochs=train_epochs)
print(model.summary())
dtstr = datetime.now().strftime("%d-%m-%Y_%I-%M-%S_%p")
model_json = model.to_json()
with open("model_ptMLP_class_" + dtstr + ".json", "w") as json_file:
json_file.write(model_json)
model.save_weights("model_model_ptMLP_class_" + dtstr + ".h5")
print(
"Saved model to disk to", "model_model_ptMLP_class_" + dtstr + ".json"
)
print("and weights to")
print("Saved model to disk to", "model_model_ptMLP_class_" + dtstr + ".h5")
###########################PLOTTING##########################
history_dict = history.history
history_dict.keys()
a = np.array(history_dict["acc"])
print(a.shape)
l = np.array(history_dict["loss"])
e = range(1, len(a) + 1)
plt.plot(e, a, "bo", color="red", label="Acc Training")
plt.plot(e, l, "b", label="Loss Training")
plt.xlabel("Epochs")
plt.legend()
plt.savefig("model.pdf")
return (model, history_dict)
def simple_val_of_data(x, y):
from sklearn.model_selection import train_test_split
from random import randrange
from sklearn.preprocessing import normalize
trX = normalize(x, axis=1, norm="l2")
seed = randrange(999)
print("used random seed was", seed)
x_train, x_test, y_train, y_test = train_test_split(
trX, y, test_size=0.4, random_state=seed
)
return x_train, x_test, y_train, y_test
def train_layer():
sm = int(y.shape[1])
json_filer = open(
os.path.join(str(MODELPATH) + "/model_weights_regression.json"), "r"
)
loaded_model_jsonr = json_filer.read()
loaded_modelr = model_from_json(loaded_model_jsonr)
loaded_modelr.load_weights(
os.path.join(str(MODELPATH) + "/model_weights_regression.best.hdf5")
)
if trainable == False:
for layer in loaded_modelr.layers:
layer.trainable = False
else:
for layer in loaded_modelr.layers:
layer.trainable = True
loaded_modelr.compile(loss="mean_squared_error", optimizer="adam")
history = loaded_modelr.fit(x, y, batch_size=batch, epochs=train_epochs)
dtstr = datetime.now().strftime("%d-%m-%Y_%I-%M-%S_%p")
print(loaded_modelr.summary())
model_json = loaded_modelr.to_json()
with open("model_ptMLP_MieReg_" + dtstr + ".json", "w") as json_file:
json_file.write(model_json)
loaded_modelr.save_weights("model_model_ptMLP_MieReg_" + dtstr + ".h5")
print(
"Saved model to disk to", "model_model_ptMLP_MieReg_" + dtstr + ".json"
)
print("and weights to")
print("Saved model to disk to", "model_model_ptMLP_MieReg_" + dtstr + ".h5")
return
if classify == True:
if x.shape[1] != 450:
raise ValueError(
"This is a classification problem: x needs to be 450 datapoints in WVN range of 950-1800 1/cm"
)
mod, h = add_layer()
if miecorr == True:
if y.shape[1] != x.shape[1]:
raise ValueError(
"This is a regression problem: x and y need 909 datapoints in WVN range of 950-2300 1/cm"
)
train_layer()
# def gan_ir_upsample(self,lst_class, lst_ir):
# def gan_ir_upsample(lst_class, lst_ir):
# def gan_ir_upsample(path_class, path_ir):
def gan_ir_upsample(self, path_class, path_ir):
"""
INPUT:
lst_class: python path containing groundtruth maps
lst_ir: python path containing IR images
OUTPUT:
"""
import matplotlib.pyplot as plt
import numpy as np
import os
import re
import scipy.io as sio
import tensorflow as tf
# import torchvision as tv
def convert_data_2_tensor(lst_class, lst_ir, BATCH_SIZE):
"""Convert images from numpy array to an TensorFlow dataset object."""
tensor_class = tf.convert_to_tensor(lst_class)
tensor_ir = tf.convert_to_tensor(lst_ir)
dataset = tf.data.Dataset.from_tensor_slices((tensor_class, tensor_ir))
dataset = dataset.batch(BATCH_SIZE)
return dataset
def read_data_2_array_np(path_to_class, path_to_ir):
"""Read data from filesystem to array and return the array lists.
Will be further proceed by the method 'convert_data_2_tensor'.
"""
lst_class = []
for file_path in sorted_nicely(os.listdir(path_to_class)):
img_class = sio.loadmat(path_to_class + "/" + file_path)
img_class = img_class["multi"].transpose((2, 0, 1))
img_class = img_class.astype(dtype=np.float32)
img_class = np.divide(img_class, 255)
lst_class.append(img_class[(1, 2, 3), :, :])
lst_ir = []
for file_path in sorted_nicely(os.listdir(path_to_ir)):
img_ir = sio.loadmat(path_to_ir + "/" + file_path)
img_ir = img_ir["xx"].transpose((2, 0, 1))
img_ir = img_ir.astype(dtype=np.float64)
lst_ir.append(img_ir[0:1, :, :])
return lst_class, lst_ir
def show_img_mat(path):
"""Print image as a matplot-plot."""
fig = plt.figure()
data = sio.loadmat(path)
x = data["gen"]
x = x - x.min() # shift/normalize x_min to be 0 and x_max to be 1
x = x / x.max()
plt.title(path)
plt.imshow(x[0][0], cmap="binary")
plt.show()
def sorted_nicely(l):
"""Sort the given iterable in the way that humans expect."""
convert = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [convert(c) for c in re.split("([0-9]+)", key)]
return sorted(l, key=alphanum_key)
def gan(
lst_class_files,
lst_ir_files,
MODEL_LOAD_PATH=str(MODELPATH),
BATCH_SIZE_TEST=1,
):
# ---------------------------------------
# CHANGE MODEL DIRECTORY TO GLOBAL VAR.
# ---------------------------------------
"""Run GeneratorUNet (Main Method)."""
# Convert data to an TensorFlow dataset
dataset = convert_data_2_tensor(
lst_class_files, lst_ir_files, BATCH_SIZE_TEST
)
# Read saved TensorFlow model (pb file)
generator = tf.keras.models.load_model(
filepath=MODEL_LOAD_PATH,
)
loop_count = 1
iterator = iter(dataset)
for img_class, img_ir in iterator:
img_class = tf.transpose(img_class, perm=(0, 2, 3, 1))
img_ir = tf.transpose(img_ir, perm=(0, 2, 3, 1))
_out_img = generator((img_ir, img_class))
_out_img_mat = tf.make_ndarray(tf.make_tensor_proto(_out_img))
_out_img_mat = np.transpose(_out_img_mat, [0, 3, 1, 2])
sio.savemat(
"higher_res{}.mat".format(loop_count), dict({"gen": _out_img_mat})
)
# ------------------------------------------------------------
# TODO change to internal OpenVibSpec fromat instead of *.mat
# ------------------------------------------------------------
# show_img_mat('out/tf_generated_imgs/{}.mat'.format(loop_count))
loop_count = loop_count + 1
# if __name__ == "__main__":
# Read data to an list and convert 2 array
lst_class_files_np, lst_ir_files_np = read_data_2_array_np(
str(path_class), str(path_ir)
)
# Obsolete because we changend to numpy instead of torch
# lst_class_files_tv, lst_ir_files_tv = read_data_2_array_tv("datasets/Class", "datasets/IR")
# if np.array_equal(np.asarray(lst_class_files_np), np.asarray(lst_class_files_tv)):
# print("True")
gan(lst_class_files_np, lst_ir_files_np)
return
def ocsvm(x, noise=0.03, g=0.001, show=False):
"""
Outlier detection for use in spectroscopy.
https://papers.nips.cc/paper/1999/file/8725fb777f25776ffa9076e44fcfd776-Paper.pdf
One can also use it for novelty detection, as Schoelkopf had in mind
Parameters
----------
x : numpy array (2D)
classes per point (as int) with 2 dimensions: x*y,spectra = shape(), e.g. output of kmeans or similiar clsutering approach
noise : float
estimate about the given noise
Returns
-------
anom: numpy array (2D) of shape x*y,spectra = shape
selected data points which fall under the assumption of "bad" data points
filt_dat: numpy array (2D)
selected data points which fall under the assumption of "good" data points aka filtered data
TODO:
Adjusting the axis label and legend of the plot!
"""
import numpy as np
from sklearn.svm import OneClassSVM
svm = OneClassSVM(kernel="rbf", gamma=g, nu=noise)
if len(x.shape) == 2:
svm.fit(x)
pred = svm.predict(x)
a_indx = np.where(pred == -1)
n_indx = np.where(pred == 1)
anom = x[a_indx]
filt_dat = x[n_indx]
else:
print("Spectra must be in 2D array")
print()
print("n = Axis 0")
print("Spectra = Axis 1")
if show == True:
ax = plt.gca()
ax.invert_xaxis()
plt.style.use("ggplot")
plt.plot(filt_dat.T, color="blue", label="Normal")
plt.plot(anom.T, color="orange", label="Anomalies")
# plt.legend()
plt.show()
# plt.savefig(name)
# plt.clf()
return anom, filt_dat
# --------------------------------------------------
# --------------------------------------------------
# LOAD AND SAVE MODELS SKLEARN
# --------------------------------------------------
# --------------------------------------------------
def save_model(model, name="model", timestamp=None):
from sklearn.externals import joblib
from datetime import datetime
ts = datetime.now().timestamp()
st = datetime.fromtimestamp(ts).strftime("%Y_%m_%d_%H:%M:%S")
if timestamp == True:
joblib.dump(model, name + st)
else:
joblib.dump(model, name)
return
def load_model(model):
from sklearn.externals import joblib
m = joblib.load(model)
return m
|
<filename>src/components/HeyRecruiter/index.js
import React, { useState } from 'react'
import { Link } from 'gatsby'
const HeyRecruiter = () => {
const [folded, setFolded] = useState(false)
return (
<div
className="p-3 shadow"
style={{
position: 'fixed',
bottom: '20px',
right: '20px',
textAlign: 'right',
backgroundColor: 'rgba(255,255,255,0.85)',
zIndex: '2147483647',
borderRadius: '8px',
}}
>
{folded === false ? (
<>
<button
type="button"
className="btn-close"
aria-label="Close"
style={{
position: 'absolute',
left: '8px',
top: '8px',
}}
onClick={() => setFolded(true)}
></button>
<h3>
<span role="img" aria-label="Hey recruiter!">
🙋🏽♂️
</span>{' '}
Hey recruiter!
</h3>
<ul className="mb-0">
<li
style={{
display: 'inline-block',
}}
>
<a
href="https://www.linkedin.com/in/davidleiva-fe/"
target="_blank"
rel="noreferrer"
className="btn btn-sm d-flex align-items-center mx-1 mb-2 rounded-pill Mya text-white"
style={{
background: '#0e76a8',
lineHeight: '16px',
}}
>
<i className="align-middle icon icon-logo-linkedin me-1"></i>
<span>Linkedin profile</span>
</a>
</li>
<li
style={{
display: 'inline-block',
}}
>
<Link
to="https://drive.google.com/file/d/1J6KjhY-_8f2ThcR7bDjV_e52QU1VfJMM/view?usp=sharing"
target="_blank"
download
>
<button
type="submit"
className="btn btn-sm d-flex align-items-center btn-outline-primary mx-1 mb-2 rounded-pill MyButton"
style={{ lineHeight: '16px' }}
>
<i className="icon icon-ic_file_download_48px me-1"></i>
<span>Download CV</span>
</button>
</Link>
</li>
<br />
<li
style={{
display: 'inline-block',
}}
>
<a
href="tel:+34653071080"
className="btn btn-sm d-flex align-items-center btn-outline-primary mx-1 rounded-pill Mya"
style={{ lineHeight: '16px' }}
>
<i className="align-middle icon icon-phone-call me-1"></i>
<span className="d-none d-md-inline">
+34653071080
</span>
</a>
</li>
<li
style={{
display: 'inline-block',
}}
>
<a
target="_blank"
href="mailto:<EMAIL>"
className="btn btn-sm d-flex align-items-center btn-outline-primary mx-1 rounded-pill MyButton"
style={{ lineHeight: '16px' }}
>
<i className="align-middle icon icon-email-84 me-1 d-none d-inline-md"></i>
{/* <span className="d-none d-inline-md"><EMAIL></span> */}
<span className="d-none d-md-inline">
<EMAIL>
</span>
<i className="align-middle icon icon-email-84 d-inline d-none-md"></i>
</a>
</li>
</ul>
</>
) : (
<button
className="bg-transparent border-0"
style={{
lineHeight: '0px',
transform: 'rotate(270deg)',
}}
onClick={() => setFolded(false)}
>
<i
className="icon icon-minimal-right"
style={{ opacity: 0.5 }}
></i>
</button>
)}
</div>
)
}
export default HeyRecruiter
|
public class Circle {
private double radius;
// Constructor
public Circle(double radius) {
this.radius = radius;
}
// Getters
public double getRadius() {
return radius;
}
public double getCircumference() {
return 2 * Math.PI * radius;
}
public double getArea() {
return Math.PI * radius * radius;
}
}
|
<filename>src/main/java/cn/chenlichao/wmi4j/SWbemLastError.java
/*
* Copyright 2014-2014 <NAME>
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.chenlichao.wmi4j;
import org.jinterop.dcom.impls.automation.IJIDispatch;
/**
* todo implement
* Created by chenlichao on 14-7-28.
*/
public class SWbemLastError extends AbstractSecurityScriptingObject {
SWbemLastError(IJIDispatch dispatch) {
super(dispatch);
}
/**
* Returns an {@link SWbemPropertySet} object that is a collection of the properties for the current class or instance.
* This property is read-only.
*
* @return An {@link SWbemPropertySet} object that is a collection of the properties for the current class or instance.
* @throws WMIException
*/
public SWbemPropertySet getProperties() throws WMIException {
return getProperty(SWbemPropertySet.class, "Properties_");
}
/**
* Get the variant value of the specified WMI property.
* @param propertyName Name of the property.
* @return The variant value of the specified WMI property.
* @throws WMIException
*/
public WMIVariant getPropertyByName(String propertyName) throws WMIException {
return getProperties().item(propertyName).getValue();
}
}
|
<filename>routes/index.js
const router = require("express").Router();
// config express-validator for body
const { body } = require("express-validator/check");
// Controllers
const projectsController = require('../controllers/projects_controller');
const tasksController = require('../controllers/tasks_controller');
const usersController = require('../controllers/users_controller');
const authController = require('../controllers/auth_controller');
// Routing
module.exports = function() {
// index
router.get("/", authController.userAuthenticated, projectsController.projectsIndex);
// new projects
router.get("/nuevo-proyecto", authController.userAuthenticated, projectsController.projectsNewForm);
router.post("/nuevo-proyecto",
authController.userAuthenticated,
body('name')
.trim()
.escape(),
projectsController.projectsNewSubmit
);
// show projects
router.get("/proyectos/:url", authController.userAuthenticated, projectsController.projectsShow);
// update project
router.get("/proyecto/editar/:id", authController.userAuthenticated, projectsController.projectsEditForm);
router.post("/nuevo-proyecto/:id",
authController.userAuthenticated,
body('name')
.trim()
.escape(),
projectsController.projectsEditSubmit
);
// delete project
router.delete("/proyectos/:url", authController.userAuthenticated, projectsController.projectsDelete);
// add task
router.post("/proyectos/:url", authController.userAuthenticated, tasksController.tasksNewSubmit);
// update task
router.patch("/tareas/:id", authController.userAuthenticated, tasksController.tasksChangeStatus);
// delete task
router.delete("/tareas/:id", authController.userAuthenticated, tasksController.tasksDelete);
// create account
router.get("/crear-cuenta", usersController.usersNewForm);
router.post("/crear-cuenta", usersController.usersNewSubmit);
router.get("/confirmar/:email", usersController.accountConfirm);
// sign in
router.get('/iniciar-sesion', usersController.usersSignIn);
router.post('/iniciar-sesion', authController.authenticateUser);
// close session
router.get('/cerrar-sesion', authController.closeSession);
// recover password
router.get('/recuperar-password', usersController.recoverPasswordForm)
router.post('/recuperar-password', authController.sendToken);
router.get('/recuperar-password/:token', authController.resetPasswordForm);
router.post('/recuperar-password/:token', authController.resetPasswordSubmit);
return router;
};
|
// This file is part of the Orbbec Astra SDK [https://orbbec3d.com]
// Copyright (c) 2015 Orbbec 3D
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Be excellent to each other.
#include "astra_context.hpp"
#include "astra_context_impl.hpp"
#include "astra_create_stream_proxy.hpp"
#include "astra_cxx_compatibility.hpp"
#include <astra_core/capi/astra_streamservice_proxy.h>
#include <astra_core_api.h>
namespace astra {
context::context()
: impl_(astra::make_unique<context_impl>()),
proxy_(create_stream_proxy(this))
{}
context::~context() {}
astra_status_t context::initialize()
{
astra_api_set_proxy(proxy());
return impl_->initialize();
}
astra_status_t context::terminate()
{
return impl_->terminate();
}
astra_streamservice_proxy_t* context::proxy()
{
return proxy_.get();
}
astra_status_t context::streamset_open(const char* connectionString,
astra_streamsetconnection_t& streamSet)
{
return impl_->streamset_open(connectionString, streamSet);
}
astra_status_t context::streamset_close(astra_streamsetconnection_t& streamSet)
{
return impl_->streamset_close(streamSet);
}
astra_status_t context::reader_create(astra_streamsetconnection_t streamSet,
astra_reader_t& reader)
{
return impl_->reader_create(streamSet, reader);
}
astra_status_t context::reader_destroy(astra_reader_t& reader)
{
return impl_->reader_destroy(reader);
}
astra_status_t context::reader_get_stream(astra_reader_t reader,
astra_stream_type_t type,
astra_stream_subtype_t subtype,
astra_streamconnection_t& connection)
{
return impl_->reader_get_stream(reader, type, subtype, connection);
}
astra_status_t context::stream_get_description(astra_streamconnection_t connection,
astra_stream_desc_t* description)
{
return impl_->stream_get_description(connection, description);
}
astra_status_t context::stream_start(astra_streamconnection_t connection)
{
return impl_->stream_start(connection);
}
astra_status_t context::stream_stop(astra_streamconnection_t connection)
{
return impl_->stream_stop(connection);
}
astra_status_t context::reader_open_frame(astra_reader_t reader,
int timeoutMillis,
astra_reader_frame_t& frame)
{
return impl_->reader_open_frame(reader, timeoutMillis, frame);
}
astra_status_t context::reader_close_frame(astra_reader_frame_t& frame)
{
return impl_->reader_close_frame(frame);
}
astra_status_t context::reader_register_frame_ready_callback(astra_reader_t reader,
astra_frame_ready_callback_t callback,
void* clientTag,
astra_reader_callback_id_t& callbackId)
{
return impl_->reader_register_frame_ready_callback(reader, callback, clientTag, callbackId);
}
astra_status_t context::reader_unregister_frame_ready_callback(astra_reader_callback_id_t& callbackId)
{
return impl_->reader_unregister_frame_ready_callback(callbackId);
}
astra_status_t context::reader_get_frame(astra_reader_frame_t frame,
astra_stream_type_t type,
astra_stream_subtype_t subtype,
astra_frame_t*& subFrame)
{
return impl_->reader_get_frame(frame, type, subtype, subFrame);
}
astra_status_t context::stream_set_parameter(astra_streamconnection_t connection,
astra_parameter_id parameterId,
size_t inByteLength,
astra_parameter_data_t inData)
{
return impl_->stream_set_parameter(connection, parameterId, inByteLength, inData);
}
astra_status_t context::stream_get_parameter(astra_streamconnection_t connection,
astra_parameter_id parameterId,
size_t& resultByteLength,
astra_result_token_t& token)
{
return impl_->stream_get_parameter(connection, parameterId, resultByteLength, token);
}
astra_status_t context::stream_get_result(astra_streamconnection_t connection,
astra_result_token_t token,
size_t dataByteLength,
astra_parameter_data_t dataDestination)
{
return impl_->stream_get_result(connection, token, dataByteLength, dataDestination);
}
astra_status_t context::stream_invoke(astra_streamconnection_t connection,
astra_command_id commandId,
size_t inByteLength,
astra_parameter_data_t inData,
size_t& resultByteLength,
astra_result_token_t& token)
{
return impl_->stream_invoke(connection, commandId, inByteLength, inData, resultByteLength, token);
}
astra_status_t context::temp_update()
{
return impl_->temp_update();
}
astra_status_t context::notify_host_event(astra_event_id id, const void* data, size_t dataSize)
{
return impl_->notify_host_event(id, data, dataSize);
}
}
|
<filename>src/main/java/com/alipay/api/response/AlipayAssetPointVoucherprodBenefittemplateSettleResponse.java
package com.alipay.api.response;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.asset.point.voucherprod.benefittemplate.settle response.
*
* @author auto create
* @since 1.0, 2021-07-13 10:41:56
*/
public class AlipayAssetPointVoucherprodBenefittemplateSettleResponse extends AlipayResponse {
private static final long serialVersionUID = 2231429348347547299L;
/**
* 资产id,即结算的权益模板的id
*/
@ApiField("asset_id")
private String assetId;
/**
* 资金单据流水id,权益结算平台的结算流水号
*/
@ApiField("bill_no")
private String billNo;
/**
* 实际结算的金额
*/
@ApiField("settle_amount")
private String settleAmount;
/**
* 当前结算状态,I:结算已受理,尚未划拨资金;S:划拨资金完成;C: 划拨资金失败
*/
@ApiField("status")
private String status;
public void setAssetId(String assetId) {
this.assetId = assetId;
}
public String getAssetId( ) {
return this.assetId;
}
public void setBillNo(String billNo) {
this.billNo = billNo;
}
public String getBillNo( ) {
return this.billNo;
}
public void setSettleAmount(String settleAmount) {
this.settleAmount = settleAmount;
}
public String getSettleAmount( ) {
return this.settleAmount;
}
public void setStatus(String status) {
this.status = status;
}
public String getStatus( ) {
return this.status;
}
}
|
<filename>ansible/roles/logger-service/files/db/refresh_summary_breakdown_source.sql
-- This stored procedure is used to (re)populate the "event_summary_breakdown_source"
-- and "event_summary_breakdown_source_entity" tables
-- from all existing log information
delimiter $$
DROP PROCEDURE IF EXISTS `logger`.`refresh_summary_breakdown_source`;
CREATE DEFINER=`logger_user`@`%` PROCEDURE `refresh_summary_breakdown_source`()
BEGIN
-- ############ Create temporary tables #################
SET SESSION TRANSACTION ISOLATION LEVEL READ UNCOMMITTED;
drop table IF EXISTS fix_event_summary_breakdown_reason_entity_source;
drop table IF EXISTS fix_event_summary_breakdown_reason_entity_source_record_counts;
drop table IF EXISTS fix_event_summary_breakdown_reason_entity_source_event_counts;
CREATE TABLE `fix_event_summary_breakdown_reason_entity_source` (
`month` varchar(255) NOT NULL,
`log_event_type_id` int(11) NOT NULL,
`log_reason_type_id` int(11) NOT NULL default '-1',
`entity_uid` varchar(255) NOT NULL,
`log_source_type_id` int(11) NOT NULL default '-1',
`number_of_events` bigint(20) NOT NULL,
`record_count` bigint(20) NOT NULL,
PRIMARY KEY (`month`,`log_event_type_id`,`log_reason_type_id`,`entity_uid`, `log_source_type_id`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE `fix_event_summary_breakdown_reason_entity_source_record_counts` (
`month` varchar(255) NOT NULL,
`log_event_type_id` int(11) NOT NULL,
`log_reason_type_id` int(11) NOT NULL default '-1',
`entity_uid` varchar(255) NOT NULL,
`log_source_type_id` int(11) NOT NULL default '-1',
`record_count` bigint(20) NOT NULL,
PRIMARY KEY (`month`,`log_event_type_id`,`log_reason_type_id`,`entity_uid`, `log_source_type_id`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
CREATE TABLE `fix_event_summary_breakdown_reason_entity_source_event_counts` (
`month` varchar(255) NOT NULL,
`log_event_type_id` int(11) NOT NULL,
`log_reason_type_id` int(11) NOT NULL default '-1',
`entity_uid` varchar(255) NOT NULL,
`log_source_type_id` int(11) NOT NULL default '-1',
`event_count` bigint(20) NOT NULL,
PRIMARY KEY (`month`,`log_event_type_id`,`log_reason_type_id`,`entity_uid`, `log_source_type_id`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
-- ############ Populate reason summarys #################
-- record source count sums
insert into fix_event_summary_breakdown_reason_entity_source_record_counts
select month, log_event_type_id, IFNULL(log_reason_type_id, -1) AS reason, entity_uid, IFNULL(log_source_type_id, -1), SUM(record_count) from logger.log_event le
inner join logger.log_detail ld ON ld.log_event_id=le.id
group by month, log_event_type_id, reason, entity_uid, log_source_type_id;
-- event source count sums
insert into fix_event_summary_breakdown_reason_entity_source_event_counts
select month, log_event_type_id, IFNULL(log_reason_type_id, -1) AS reason, entity_uid, IFNULL(log_source_type_id, -1) as source, COUNT(log_event_id) from logger.log_event le
inner join logger.log_detail ld ON ld.log_event_id=le.id
group by month, log_event_type_id, reason, entity_uid, log_source_type_id;
-- join into single table
insert into fix_event_summary_breakdown_reason_entity_source
select rc.month, rc.log_event_type_id, rc.log_reason_type_id, rc.entity_uid, rc.log_source_type_id, ec.event_count, rc.record_count
from logger.fix_event_summary_breakdown_reason_entity_source_record_counts rc
inner join logger.fix_event_summary_breakdown_reason_entity_source_event_counts ec
ON rc.month=ec.month AND rc.log_event_type_id=ec.log_event_type_id AND rc.log_reason_type_id=ec.log_reason_type_id AND rc.log_source_type_id=ec.log_source_type_id AND rc.entity_uid=ec.entity_uid;
-- ############ Swap new and old #################
drop table IF EXISTS old_event_summary_breakdown_reason_entity_source;
RENAME table event_summary_breakdown_reason_entity_source TO old_event_summary_breakdown_reason_entity_source;
RENAME table fix_event_summary_breakdown_reason_entity_source TO event_summary_breakdown_reason_entity_source;
-- drop temporary tables
drop table IF EXISTS fix_event_summary_breakdown_reason_entity_source_record_counts;
drop table IF EXISTS fix_event_summary_breakdown_reason_entity_source_event_counts;
drop table IF EXISTS fix_event_summary_breakdown_reason_entity_source;
END$$
|
<filename>front/tingke-manage-system/src/api/admin/diary.js
import request from '../../utils/request';
export default {
//随笔增删改查
selectDiary(page,limit,condition) {
return request({
url: `/admin/acl-diary/selectAllDiary/${page}/${limit}`,
method: 'post',
data: condition
});
},
selectDiaryById(id){
return request({
url: `/admin/acl-diary/selectDiaryById/${id}`,
method: 'get'
});
},
addDiary(diary){
return request({
url: `/admin/acl-diary/addDiary`,
method: 'post',
data: diary
});
},
editDiary(diary){
return request({
url: `/admin/acl-diary/editDiary`,
method: 'post',
data: diary
});
},
deleteDiary(id){
return request({
url: `/admin/acl-diary/deleteDiary/${id}`,
method: 'get'
});
},
editDiaryPublish(id,isPublish){
return request({
url: `/admin/acl-diary/editDiaryPublish/${id}/${isPublish}`,
method: 'get'
});
}
}
|
#!/bin/bash
#set -ex
if [ -z "${1}" ]; then
echo "Missing argument. Format:"
echo " ${0} key1:value1[,keyX:valueX,...] [-f]"
exit 1
fi
if [ -f /var/lib/dcos/mesos-slave-common ]; then
if [ "${2}" != "-f" ]; then
echo "mesos-slave-common exists. Use -f to overwrite."
exit 1
fi
fi
# Example: hospital:kansas
sudo su -c "echo MESOS_ATTRIBUTES=${1} > /var/lib/dcos/mesos-slave-common"
sudo rm -rf /var/lib/mesos/slave/meta/slaves/latest
sudo systemctl restart dcos-mesos-slave.service
echo "Attributes set:"
echo ${1}
|
/*
* The MIT License
*
* Copyright 2016 - <NAME>.
* http://www.SimonSinding.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package initials;
/**
*
* @author thesinding
*/
public class Initials {
public static void main(String[] args) {
String[] text = new String[9]; // Define a String array
text[0] = "0111110000111110";
text[1] = "1100011001100011";
text[2] = "1000001001000001";
text[3] = "1100000001100000";
text[4] = "0110000000110000";
text[5] = "0011110000011110";
text[6] = "1000011001000011";
text[7] = "1100011001100011";
text[8] = "0111110000111110";
for(int y = 0; y < text.length; y++){ // Run for the count of the String array
for(int x = 0; x < text[y].length(); x++){ // Run for the count of the characters in array key
int temp = Character.getNumericValue(text[y].charAt(x)); // Temporaly save the numeric value of the charaters
if(temp == 1){
System.out.print('*'); // Print * if its
} else {
System.out.print(' ');
}
}
System.out.print('\n');
}
}
}
|
package br.com.sefaz.dao;
import br.com.sefaz.model.Telefone;
//Herança do DaoGeneric para Utilizar os Cruds Para telefone
public class DaoTelefones<E> extends DaoGeneric<Telefone> {
}
|
#include <iostream>
#include <algorithm> // for sorting
using namespace std;
bool checkAnagram(string s1, string s2)
{
// If both strings are of different length, they
// cannot be anagrams of each other
if (s1.length() != s2.length())
return false;
// Sort both strings
sort(s1.begin(), s1.end());
sort(s2.begin(), s2.end());
// Compare sorted strings
for (int i = 0; i < s1.length(); i++)
if (s1[i] != s2[i])
return false;
return true;
}
int main()
{
string str1="heart";
string str2="earth";
if (checkAnagram(str1, str2))
cout << "The strings are anagrams.";
else
cout << "The strings are not anagrams.";
return 0;
}
|
#!/bin/bash
# Stop and Delete all docker images
docker stop $(docker ps -aq)
docker rm $(docker ps -aq)
# Clean all none images
docker rmi $(docker images | grep none | awk ' { print $3 }')
docker volume prune -f
|
#! /bin/bash -eu
export SVM_DOCKER_IMAGE=alphasentaurii/spacekit:svm
# export CAL_BASE_IMAGE="stsci/hst-pipeline:CALDP_drizzlecats_CAL_rc6"
export CAL_BASE_IMAGE="stsci/hst-pipeline:latest"
docker build -f Dockerfile -t ${SVM_DOCKER_IMAGE} --build-arg CAL_BASE_IMAGE="${CAL_BASE_IMAGE}" .
|
#!/bin/sh
# Author: bougyman <tj@rubyists.com>
# License: MIT
# This utility adds helper commands for administering runit services
set -e
commands="sv-list svls sv-find sv-enable sv-disable sv-start sv-stop sv-restart"
# Locate the service in the user's $SVDIR or /etc/sv
find_service() {
service=$1
svdir=$(svdir 2>/dev/null)
if [ "x$service" != "x" ];then
if [ -L $svdir/$service ];then
location=$(readlink -f $svdir/$service)
fi
fi
if [ "x$location" != "x" ];then
echo $location
else
if [ -d /etc/sv/$service ];then
echo /etc/sv/$service
elif [ -d "$svdir/../sv/$service" ];then
echo "$svdir/../sv/$service"
elif [ -d "$svdir/../Service/$service" ];then
echo "$svdir/../Service/$service"
elif [ -d "$svdir/../Services/$service" ];then
echo "$svdir/../Services/$service"
fi
fi
}
# Set to user's $SVDIR or /service
svdir() {
if [ -z $SVDIR ];then
#echo "using /service" >&2
if [ -d /var/service ];then
svdir=/var/service
elif [ -d /service ];then
svdir=/service
elif [ -d /etc/service ];then
svdir=/etc/service
else
echo "No service directory found" 1>&2
exit 127
fi
else
#echo "using $SVDIR" >&2
if [ -d "$SVDIR" ];then
svdir=$SVDIR
else
echo "No service directory found" 1>&2
exit 127
fi
fi
echo $svdir
}
# Add sudo if we don't own the directory in question
check_owner() {
lndir=$1
if [ ! -w $lndir ];then
echo "sudo "
fi
}
# Symlink a service (from find_service's path to `svdir`/$service)
enable() {
echo "Enabling $1" >&2
service=$1
svdir=$(find_service $service)
if [ -z "$svdir" -o ! -d "$svdir" ];then
echo "No such service '$service'" >&2
exit 1
fi
ln_dir=$(svdir)
if [ -L "$ln_dir/$service" ];then
echo "Service already enabled!" >&2
echo " $(sv s $ln_dir/$service)" >&2
exit 1
fi
$(check_owner $ln_dir) ln -s $svdir $ln_dir
}
# Remove a symlink of a service (from find_service's path to `svdir`/$service)
disable() {
echo "Disabling $1" >&2
service=$1
ln_dir=$(svdir)
if [ ! -L "$ln_dir/$service" ];then
echo "Service not enabled!" >&2
exit 1
fi
$(check_owner $ln_dir) rm $ln_dir/$service
}
# Generic list, of one service or all
list() {
svdir=$(svdir)
if [ ! -z "$1" ];then
$(check_owner $svdir) sv s "$svdir/"$1
else
echo "Listing All Services"
$(check_owner $svdir) sv s "$svdir/"*
fi
}
make_links() {
me="$0"
echo $me
here="$( cd "$(dirname "$me" )" && pwd )"
for link in $commands;do
[ -L "$here/$link" ] || ln -s "$me" "$here/$link"
done
}
# Usage
usage() {
cmd=$1
case "$cmd" in
sv-enable) echo "sv-enable <service> - Enable a service and start it (will restart on boots)";;
sv-disable) echo "sv-disable <service> - Disable a service from starting (also stop the service)";;
sv-stop) echo "sv-stop <service> - Stop a service (will come back on reboot)";;
sv-start) echo "sv-start <service> - Start a stopped service";;
sv-restart) echo "sv-restart <service> - Restart a running service";;
svls) echo "svls [<service>] - Show list of services (Default: all services, pass a service name to see just one)";;
sv-find) echo "sv-find <service> - Find a service, if it exists";;
sv-list) echo "sv-list - List available services";;
make-links) echo "Make symlinks for the individual commands";;
commands) echo "Valid Commands: ${commands} make-links"
echo "use command -h for help";;
*) echo "Invalid command (${commands})";;
esac
}
# Start main program
cmd=$(basename $0) # Get the command
if [ "$cmd" = "sv-helper" ] || [ "$cmd" = "sv-helper.sh" ];then
cmd=$1
if [ "x${cmd}" = "x" ];then
cmd="commands"
else
shift
fi
fi
# help
while getopts h options
do
case $options in
h) echo $(usage $cmd)
exit;;
esac
done
svc=$(find_service $@)
case "$cmd" in
enable|sv-enable) enable $@;;
disable|sv-disable) disable $@;;
start|sv-start) $(check_owner $svc) sv u $svc;;
restart|sv-restart) $(check_owner $svc) sv t $svc;;
stop|sv-stop) $(check_owner $svc) sv d $svc;;
ls|svls) list $@;;
make-links) make_links;;
find|sv-find) find_service $@;;
list|sv-list) find $(find_service) -maxdepth 1 -mindepth 1 -type d -exec basename {} \;|sort|tr "\n" " ";echo ;;
*) usage commands;;
esac
|
package chylex.hee.item;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.entity.Entity;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.item.Item;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemStack;
import net.minecraft.world.World;
import chylex.hee.HardcoreEnderExpansion;
import chylex.hee.entity.fx.FXHelper;
import chylex.hee.entity.item.EntityItemEndPowder;
import chylex.hee.item.base.ItemAbstractCustomEntity;
import chylex.hee.mechanics.enhancements.EnhancementRegistry;
import chylex.hee.mechanics.enhancements.IEnhanceableTile;
import chylex.hee.system.abstractions.Pos;
import chylex.hee.system.abstractions.Pos.PosMutable;
import chylex.hee.system.logging.Log;
public class ItemEndPowder extends ItemAbstractCustomEntity{
@Override
public void onUpdate(ItemStack is, World world, Entity entity, int slot, boolean isHeld){
if (isHeld && world.isRemote && entity == HardcoreEnderExpansion.proxy.getClientSidePlayer() && ((EntityPlayer)entity).openContainer == ((EntityPlayer)entity).inventoryContainer){
final byte maxDist = 8;
Random rand = world.rand;
PosMutable mpos = new PosMutable();
for(int attempt = 0; attempt < 600; attempt++){
mpos.set(entity).move(rand.nextInt(maxDist*2+1)-maxDist, rand.nextInt(maxDist*2+1)-maxDist, rand.nextInt(maxDist*2+1)-maxDist);
Block block = mpos.getBlock(world);
if (block != Blocks.air && EnhancementRegistry.canEnhanceBlock(mpos.getBlock(world))){
FXHelper.create("portalbig").pos(mpos).fluctuatePos(0.65D).fluctuateMotion(0.02D).paramSingle(0.075F+rand.nextFloat()*0.05F).spawn(world.rand, 4);
}
}
}
}
@Override
public ItemStack onItemRightClick(ItemStack is, World world, EntityPlayer player){
player.openGui(HardcoreEnderExpansion.instance, 4, world, 0, -1, 0);
return is;
}
@Override
public boolean onItemUse(ItemStack is, EntityPlayer player, World world, int x, int y, int z, int side, float hitX, float hitY, float hitZ){
Pos pos = Pos.at(x, y, z);
if (EnhancementRegistry.canEnhanceBlock(pos.getBlock(world))){
if (!(pos.getTileEntity(world) instanceof IEnhanceableTile)){
Item prevItem = Item.getItemFromBlock(pos.getBlock(world));
Item newItem = EnhancementRegistry.getItemTransformation(prevItem);
if (newItem instanceof ItemBlock){
pos.setBlock(world, ((ItemBlock)newItem).field_150939_a);
if (!(pos.getTileEntity(world) instanceof IEnhanceableTile)){
Log.reportedError("Failed converting $0 to enhanceable tile ($1 <-> $2)!", prevItem, newItem, pos.getTileEntity(world));
return false;
}
}
}
player.openGui(HardcoreEnderExpansion.instance, 4, world, x, y, z);
return true;
}
return false;
}
@Override
protected EntityItem createEntityItem(World world, double x, double y, double z, ItemStack is){
return new EntityItemEndPowder(world, x, y, z, is);
}
}
|
#include "ObjectPrueba.h"
#ifndef FILE_H
#define FILE_H
typedef enum fmode
{
None = 0,
Read,
Write,
Append,
ReadUpdate,
WriteUpdate,
AppendUpdate
} FileMode;
typedef struct FileStreamPrivate FileStreamPrivate;
typedef struct file
{
Object object;
private(FileStream);
FILE *(*Get)(void);
void (*StreamOpen)(struct string *, FileMode);
void (*StreamClose)(void);
void (*StreamFlush)(void);
char (*GetChar)(void);
struct string *(*GetString)(struct string *, size_t);
void (*PutChar)(char);
void (*PutString)(struct string *);
#define File(args...) overload(File, args);
void *(*File0)(void);
void *(*File1)(const struct file *, FileMode);
//void (*Dispose)(void); -- Required if interface was given
} FileStream;
/* Public */
extern FILE *FileStream_Get(FileStream * const );
extern void FileStream_StreamOpen(struct string *, FileMode, FileStream * const);
extern void FileStream_StreamClose(FileStream * const);
extern void FileStream_StreamFlush(FileStream * const);
extern char FileStream_GetChar(FileStream * const);
extern struct string *FileStream_GetString(struct string *, size_t, FileStream * const);
extern void FileStream_PutChar(char, FileStream * const);
extern void FileStream_PutString(struct string *, FileStream * const);
/* Ctor */
extern void *FileStream_File0(FileStream *);
extern void *FileStream_File1(FileStream *, FileMode, FileStream *);
#endif /* FILE_H */
|
const jwt = require('jsonwebtoken');
const { config } = require('../config');
class AuthService {
static async refreshUserToken(bearer) {
try {
const refreshToken = bearer.replace('Bearer ', '');
const { authJwtSecret, authJwtRefreshTokenSecret } = config;
const payload = jwt.verify(refreshToken, authJwtRefreshTokenSecret);
delete payload.iat;
delete payload.exp;
const token = jwt.sign(payload, authJwtSecret, { expiresIn: '15m' });
return token;
} catch (error) {
throw new Error(error);
}
}
}
module.exports = AuthService;
|
#!/usr/bin/env bash
set -euo pipefail
source tools/activate_python.sh
PYTHONPATH="${PYTHONPATH:-}:$(pwd)/tools/s3prl"
export PYTHONPATH
python="coverage run --append"
cwd=$(pwd)
#### Make sure chainer-independent ####
python3 -m pip uninstall -y chainer
# [ESPnet2] test asr recipe
cd ./egs2/mini_an4/asr1
echo "==== [ESPnet2] ASR ==="
./run.sh --stage 1 --stop-stage 1
feats_types="raw fbank_pitch"
token_types="bpe char"
for t in ${feats_types}; do
./run.sh --stage 2 --stop-stage 4 --feats-type "${t}" --python "${python}"
done
for t in ${token_types}; do
./run.sh --stage 5 --stop-stage 5 --token-type "${t}" --python "${python}"
done
for t in ${feats_types}; do
for t2 in ${token_types}; do
echo "==== feats_type=${t}, token_types=${t2} ==="
./run.sh --ngpu 0 --stage 6 --stop-stage 13 --skip-upload false --feats-type "${t}" --token-type "${t2}" \
--asr-args "--max_epoch=1" --lm-args "--max_epoch=1" --python "${python}"
done
done
echo "==== feats_type=raw, token_types=bpe, model_conf.extract_feats_in_collect_stats=False, normalize=utt_mvn ==="
./run.sh --ngpu 0 --stage 10 --stop-stage 13 --skip-upload false --feats-type "raw" --token-type "bpe" \
--feats_normalize "utterance_mvn" --lm-args "--max_epoch=1" --python "${python}" \
--asr-args "--model_conf extract_feats_in_collect_stats=false --max_epoch=1"
echo "==== use_streaming, feats_type=raw, token_types=bpe, model_conf.extract_feats_in_collect_stats=False, normalize=utt_mvn ==="
./run.sh --use_streaming true --ngpu 0 --stage 6 --stop-stage 13 --skip-upload false --feats-type "raw" --token-type "bpe" \
--feats_normalize "utterance_mvn" --lm-args "--max_epoch=1" --python "${python}" \
--asr-args "--model_conf extract_feats_in_collect_stats=false --max_epoch=1 --encoder=contextual_block_transformer --decoder=transformer
--encoder_conf block_size=40 --encoder_conf hop_size=16 --encoder_conf look_ahead=16"
if python3 -c "import k2" &> /dev/null; then
echo "==== use_k2, num_paths > nll_batch_size, feats_type=raw, token_types=bpe, model_conf.extract_feats_in_collect_stats=False, normalize=utt_mvn ==="
./run.sh --num_paths 500 --nll_batch_size 20 --use_k2 true --ngpu 0 --stage 12 --stop-stage 13 --skip-upload false --feats-type "raw" --token-type "bpe" \
--feats_normalize "utterance_mvn" --lm-args "--max_epoch=1" --python "${python}" \
--asr-args "--model_conf extract_feats_in_collect_stats=false --max_epoch=1"
echo "==== use_k2, num_paths == nll_batch_size, feats_type=raw, token_types=bpe, model_conf.extract_feats_in_collect_stats=False, normalize=utt_mvn ==="
./run.sh --num_paths 20 --nll_batch_size 20 --use_k2 true --ngpu 0 --stage 12 --stop-stage 13 --skip-upload false --feats-type "raw" --token-type "bpe" \
--feats_normalize "utterance_mvn" --lm-args "--max_epoch=1" --python "${python}" \
--asr-args "--model_conf extract_feats_in_collect_stats=false --max_epoch=1"
fi
# Remove generated files in order to reduce the disk usage
rm -rf exp dump data
cd "${cwd}"
# [ESPnet2] test tts recipe
cd ./egs2/mini_an4/tts1
echo "==== [ESPnet2] TTS ==="
./run.sh --ngpu 0 --stage 1 --stop-stage 8 --skip-upload false --train-args "--max_epoch 1" --python "${python}"
# Remove generated files in order to reduce the disk usage
rm -rf exp dump data
# [ESPnet2] test gan-tts recipe
# NOTE(kan-bayashi): pytorch 1.4 - 1.6 works but 1.6 has a problem with CPU,
# so we test this recipe using only pytorch > 1.6 here.
# See also: https://github.com/pytorch/pytorch/issues/42446
if python3 -c 'import torch as t; from packaging.version import parse as L; assert L(t.__version__) > L("1.6")' &> /dev/null; then
./run.sh --fs 22050 --tts_task gan_tts --feats_extract linear_spectrogram --feats_normalize none --inference_model latest.pth \
--ngpu 0 --stop-stage 8 --skip-upload false --train-args "--num_iters_per_epoch 1 --max_epoch 1" --python "${python}"
rm -rf exp dump data
fi
cd "${cwd}"
# [ESPnet2] test enh recipe
if python -c 'import torch as t; from packaging.version import parse as L; assert L(t.__version__) >= L("1.2.0")' &> /dev/null; then
cd ./egs2/mini_an4/enh1
echo "==== [ESPnet2] ENH ==="
./run.sh --stage 1 --stop-stage 1 --python "${python}"
feats_types="raw"
for t in ${feats_types}; do
echo "==== feats_type=${t} ==="
./run.sh --ngpu 0 --stage 2 --stop-stage 10 --skip-upload false --feats-type "${t}" --spk-num 1 --enh-args "--max_epoch=1" --python "${python}"
done
# Remove generated files in order to reduce the disk usage
rm -rf exp dump data
cd "${cwd}"
fi
# [ESPnet2] test ssl1 recipe
if python3 -c "import fairseq" &> /dev/null; then
cd ./egs2/mini_an4/ssl1
echo "==== [ESPnet2] SSL1/HUBERT ==="
./run.sh --ngpu 0 --stage 1 --stop-stage 7 --feats-type "raw" --token_type "word" --skip-upload false --pt-args "--max_epoch=1" --pretrain_start_iter 0 --pretrain_stop_iter 1 --python "${python}"
# Remove generated files in order to reduce the disk usage
rm -rf exp dump data
cd "${cwd}"
fi
# [ESPnet2] test enh_asr1 recipe
if python -c 'import torch as t; from packaging.version import parse as L; assert L(t.__version__) >= L("1.2.0")' &> /dev/null; then
cd ./egs2/mini_an4/enh_asr1
echo "==== [ESPnet2] ENH_ASR ==="
./run.sh --ngpu 0 --stage 0 --stop-stage 15 --skip-upload_hf false --feats-type "raw" --spk-num 1 --enh_asr_args "--max_epoch=1 --enh_separator_conf num_spk=1" --python "${python}"
# Remove generated files in order to reduce the disk usage
rm -rf exp dump data
cd "${cwd}"
fi
# [ESPnet2] test st recipe
cd ./egs2/mini_an4/st1
echo "==== [ESPnet2] ST ==="
./run.sh --stage 1 --stop-stage 1
feats_types="raw fbank_pitch"
token_types="bpe char"
for t in ${feats_types}; do
./run.sh --stage 2 --stop-stage 4 --feats-type "${t}" --python "${python}"
done
for t in ${token_types}; do
./run.sh --stage 5 --stop-stage 5 --tgt_token_type "${t}" --src_token_type "${t}" --python "${python}"
done
for t in ${feats_types}; do
for t2 in ${token_types}; do
echo "==== feats_type=${t}, token_types=${t2} ==="
./run.sh --ngpu 0 --stage 6 --stop-stage 13 --skip-upload false --feats-type "${t}" --tgt_token_type "${t2}" --src_token_type "${t2}" \
--st-args "--max_epoch=1" --lm-args "--max_epoch=1" --inference_args "--beam_size 5" --python "${python}"
done
done
echo "==== feats_type=raw, token_types=bpe, model_conf.extract_feats_in_collect_stats=False, normalize=utt_mvn ==="
./run.sh --ngpu 0 --stage 10 --stop-stage 13 --skip-upload false --feats-type "raw" --tgt_token_type "bpe" --src_token_type "bpe" \
--feats_normalize "utterance_mvn" --lm-args "--max_epoch=1" --inference_args "--beam_size 5" --python "${python}" \
--st-args "--model_conf extract_feats_in_collect_stats=false --max_epoch=1"
echo "==== use_streaming, feats_type=raw, token_types=bpe, model_conf.extract_feats_in_collect_stats=False, normalize=utt_mvn ==="
./run.sh --use_streaming true --ngpu 0 --stage 6 --stop-stage 13 --skip-upload false --feats-type "raw" --tgt_token_type "bpe" --src_token_type "bpe" \
--feats_normalize "utterance_mvn" --lm-args "--max_epoch=1" --inference_args "--beam_size 5" --python "${python}" \
--st-args "--model_conf extract_feats_in_collect_stats=false --max_epoch=1 --encoder=contextual_block_transformer --decoder=transformer
--encoder_conf block_size=40 --encoder_conf hop_size=16 --encoder_conf look_ahead=16"
# Remove generated files in order to reduce the disk usage
rm -rf exp dump data
cd "${cwd}"
# [ESPnet2] Validate configuration files
echo "<blank>" > dummy_token_list
echo "==== [ESPnet2] Validation configuration files ==="
if python3 -c 'import torch as t; from packaging.version import parse as L; assert L(t.__version__) >= L("1.8.0")' &> /dev/null; then
for f in egs2/*/asr1/conf/train_asr*.yaml; do
if [ "$f" == "egs2/fsc/asr1/conf/train_asr.yaml" ]; then
if ! python3 -c "import s3prl" > /dev/null; then
continue
fi
fi
${python} -m espnet2.bin.asr_train --config "${f}" --iterator_type none --dry_run true --output_dir out --token_list dummy_token_list
done
for f in egs2/*/asr1/conf/train_lm*.yaml; do
${python} -m espnet2.bin.lm_train --config "${f}" --iterator_type none --dry_run true --output_dir out --token_list dummy_token_list
done
for f in egs2/*/tts1/conf/train*.yaml; do
${python} -m espnet2.bin.tts_train --config "${f}" --iterator_type none --normalize none --dry_run true --output_dir out --token_list dummy_token_list
done
for f in egs2/*/enh1/conf/train*.yaml; do
${python} -m espnet2.bin.enh_train --config "${f}" --iterator_type none --dry_run true --output_dir out
done
for f in egs2/*/ssl1/conf/train*.yaml; do
${python} -m espnet2.bin.hubert_train --config "${f}" --iterator_type none --normalize none --dry_run true --output_dir out --token_list dummy_token_list
done
for f in egs2/*/enh_asr1/conf/train_enh_asr*.yaml; do
${python} -m espnet2.bin.enh_s2t_train --config "${f}" --iterator_type none --dry_run true --output_dir out --token_list dummy_token_list
done
fi
# These files must be same each other.
for base in cmd.sh conf/slurm.conf conf/queue.conf conf/pbs.conf; do
file1=
for f in egs2/*/*/"${base}"; do
if [ -z "${file1}" ]; then
file1="${f}"
fi
diff "${file1}" "${f}" || { echo "Error: ${file1} and ${f} differ: To solve: for f in egs2/*/*/${base}; do cp egs2/TEMPLATE/asr1/${base} \${f}; done" ; exit 1; }
done
done
echo "==== [ESPnet2] test setup.sh ==="
for d in egs2/TEMPLATE/*; do
if [ -d "${d}" ]; then
d="${d##*/}"
egs2/TEMPLATE/"$d"/setup.sh egs2/test/"${d}"
fi
done
echo "=== report ==="
coverage combine egs2/*/*/.coverage
coverage report
coverage xml
|
def quick_sort(arr):
if not arr:
return []
pivot = arr[0]
left = [x for x in arr[1:] if x <= pivot]
right = [x for x in arr[1:] if x > pivot]
return quick_sort(left) + [pivot] + quick_sort(right)
arr = [5, 2, 3, 1, 9]
sorted_arr = quick_sort(arr)
print(sorted_arr) # output: [1, 2, 3, 5, 9]
|
<gh_stars>1-10
package io.github.rcarlosdasilva.weixin.model.builder;
import java.util.Calendar;
import com.google.common.base.Preconditions;
import io.github.rcarlosdasilva.weixin.common.dictionary.MessageType;
import io.github.rcarlosdasilva.weixin.core.exception.InvalidNotificationResponseTypeException;
import io.github.rcarlosdasilva.weixin.model.notification.NotificationMeta;
import io.github.rcarlosdasilva.weixin.model.notification.NotificationResponsePlaintext;
/**
* 微信推送通知的回复构建器
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class NotificationResponseBuilder {
private NotificationMeta meta;
private NotificationResponsePlaintext response;
private boolean noResponse = false;
public NotificationResponseBuilder() {
this.response = new NotificationResponsePlaintext();
}
/**
* 创建微信推送通知的回复构建器.
*
* @param notificationMeta
* 推送基础内容.
* @return 构建器
*/
public NotificationResponseBuilder with(NotificationMeta notificationMeta) {
Preconditions.checkNotNull(notificationMeta);
NotificationResponseBuilder builder = new NotificationResponseBuilder();
builder.meta = notificationMeta;
return builder;
}
/**
* 默认返回sucess.
*
* <p>
* 假如服务器无法保证在五秒内处理并回复,必须做出下述回复:<br>
* 1、直接回复success(推荐方式) <br>
* 2、直接回复空串(指字节长度为0的空字符串,而不是XML结构体中content字段的内容为空)
*
* @return {@link NotificationResponseBuilder}
*/
public NotificationResponseBuilder responseNothing() {
this.noResponse = true;
return this;
}
/**
* 回复文本消息.
*
* @param content
* 回复的消息内容(换行:在content中能够换行,微信客户端就支持换行显示)
* @return {@link NotificationResponseBuilder}
*/
public NotificationResponseBuilder responseText(String content) {
this.response.setType(MessageType.TEXT);
this.response.getInfo().setContent(content);
return this;
}
/**
* 回复图片消息.
*
* @param mediaId
* 通过素材管理中的接口上传多媒体文件,得到的id。
* @return {@link NotificationResponseBuilder}
*/
public NotificationResponseBuilder responseImage(String mediaId) {
this.response.setType(MessageType.IMAGE);
this.response.getInfo().setMediaId(mediaId);
return this;
}
/**
* 回复语音消息.
*
* @param mediaId
* 通过素材管理中的接口上传多媒体文件,得到的id。
* @return {@link NotificationResponseBuilder}
*/
public NotificationResponseBuilder responseVoice(String mediaId) {
this.response.setType(MessageType.VOICE);
this.response.getInfo().setMediaId(mediaId);
return this;
}
/**
* 回复视频消息.
*
* @param mediaId
* 通过素材管理中的接口上传多媒体文件,得到的id。
* @param title
* 视频消息的标题
* @param description
* 视频消息的描述
* @return {@link NotificationResponseBuilder}
*/
public NotificationResponseBuilder responseVideo(String mediaId, String title,
String description) {
this.response.setType(MessageType.VIDEO);
this.response.getInfo().setMediaId(mediaId);
this.response.getInfo().setTitle(title);
this.response.getInfo().setDescription(description);
return this;
}
/**
* 回复音乐消息.
*
* @param mediaThumbId
* 缩略图的媒体id,通过素材管理中的接口上传多媒体文件,得到的id
* @param title
* 音乐标题
* @param description
* 音乐描述
* @param musicUrl
* 音乐链接
* @param musicHqUrl
* 高质量音乐链接,WIFI环境优先使用该链接播放音乐
* @return {@link NotificationResponseBuilder}
*/
public NotificationResponseBuilder responseMusic(String mediaThumbId, String title,
String description, String musicUrl, String musicHqUrl) {
this.response.setType(MessageType.MUSIC);
this.response.getInfo().setTitle(title);
this.response.getInfo().setDescription(description);
this.response.getInfo().setMediaThumbId(mediaThumbId);
this.response.getInfo().setUrl(musicUrl);
this.response.getInfo().setOtherUrl(musicHqUrl);
return this;
}
/**
* 回复图文消息.
* <p>
* 指定图文消息中的一个图文信息,如有多个图文,可多次调用该方法
*
* @param title
* 图文消息标题
* @param description
* 图文消息描述
* @param url
* 点击图文消息跳转链接
* @param picUrl
* 图片链接,支持JPG、PNG格式,较好的效果为大图360*200,小图200*200
* @return {@link NotificationResponseBuilder}
*/
public NotificationResponseBuilder responseNewsOneOf(String title, String description, String url,
String picUrl) {
this.response.setType(MessageType.NEWS_EXTERNAL);
this.response.addInfo(title, description, url, picUrl);
return this;
}
/**
* 构建微信推送响应模型.
*
* @return {@link NotificationResponsePlaintext}
*/
public NotificationResponsePlaintext build() {
if (this.noResponse) {
return null;
}
if (this.response.getType() == null) {
throw new InvalidNotificationResponseTypeException();
}
this.response.setFromUser(this.meta.getToUser());
this.response.setToUser(this.meta.getFromUser());
this.response.setTime(Calendar.getInstance().getTimeInMillis());
return this.response;
}
}
|
<filename>lib/commands/listCommand.js<gh_stars>10-100
var Promise = require("bluebird");
var yargs = require('yargs');
var _ = require('lodash');
var Table = require('cli-table2');
var colors = require('colors');
var bittrexPromise = require('../bittrex-promise');
module.exports = listCommand;
function listCommand(argv){
var baseCurrency = argv.market.toUpperCase();
bittrexPromise.getMarketSummaries().then(function(markets){
//console.log(markets);
var zzz = markets.map(function(market){
var uuu = market.MarketName.split('-');
var last = market.Last;
var prev = market.PrevDay;
var percentChange = (last - prev)/prev*100.0;
return {
price: last,
percentChange: percentChange,
baseVolume: market.BaseVolume,
baseCurrency: uuu[0],
tradeCurrency: uuu[1]
};
});
var lists = _.groupBy(zzz, x=> x.baseCurrency);
var list = lists[baseCurrency];
var xxx = _.orderBy(list,function(market){
if(argv.c){
return market.tradeCurrency;
} else if(argv.r){
return market.price;
} else if(argv.v){
return market.baseVolume;
} else if(argv.p){
return market.percentChange;
} else {
return market.baseVolume;
}
},'desc');
var table = new Table({head: ['Coin'.cyan, 'Rate'.cyan,'Volume'.cyan, '% Change'.cyan]});
var n = argv.n || xxx.length;
for(var x = 0; x < n; x++){
var market = xxx[x];
var key = market.baseCurrency + '-' + market.tradeCurrency;
var pc = market.percentChange || 0;
var pc2 = '';
if(pc > 0){
pc2 = pc.toFixed(2).green;
} else {
pc2 = pc.toFixed(2).red;
}
table.push([key, market.price.toFixed(8), parseFloat(market.baseVolume.toFixed(3)), pc2]);
}
//print the table
console.log(table.toString());
});
}
|
#!/bin/bash
set -ev
TAGS=$1
export CGO_CFLAGS_ALLOW=".*"
export CGO_LDFLAGS_ALLOW=".*"
export CGO_LDFLAGS="-Wl,--dynamic-linker=/lib64/ld-linux-x86-64.so.2"
DIRS="common lcore eal ring mempool memzone port"
echo "Testing $TAGS"
for subdir in $DIRS; do
go test -tags $TAGS github.com/yerden/go-dpdk/$subdir
done
|
<gh_stars>1-10
//
// ESPUDPSocketClient.h
// EspTouchDemo
//
// Created by fby on 4/13/15.
// Copyright (c) 2015 fby. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface ESPUDPSocketClient : NSObject
- (void) close;
- (void) interrupt;
/**
* send the data by UDP
*
* @param bytesArray2
* the array of datas to be sent
* @param targetHostName
* the host name of target, e.g. 192.168.1.101
* @param port
* the port of target
* @param interval
* the milliseconds to between each UDP sent
*/
- (void) sendDataWithBytesArray2: (NSArray *) bytesArray2 ToTargetHostName: (NSString *)targetHostName WithPort: (int) port
andInterval: (long) interval;
/**
* send the data by UDP
*
* @param bytesArray2
* the data to be sent
* @param offset
* the offset which data to be sent
* @param count
* the count of the data
* @param targetHostName
* the host name of target, e.g. 192.168.1.101
* @param port
* the port of target
* @param interval
* the milliseconds to between each UDP sent
*/
- (void) sendDataWithBytesArray2: (NSArray *) bytesArray2 Offset: (NSUInteger) offset Count: (NSUInteger) count ToTargetHostName: (NSString *)targetHostName WithPort: (int) port
andInterval: (long) interval;
@end
|
sudo curl -L https://yt-dl.org/downloads/latest/youtube-dl -o /usr/local/bin/youtube-dl
# sudo wget https://yt-dl.org/downloads/latest/youtube-dl -O /usr/local/bin/youtube-dl
sudo chmod a+rx /usr/local/bin/youtube-dl
|
def product(lst):
if len(lst) == 1:
return lst[0]
else:
return lst[0] * product(lst[1:])
print(product([1, 5, 8, 10]))
|
<filename>Starwars/battle.h
#ifndef JNP4_BATTLE_H
#define JNP4_BATTLE_H
#include <iostream>
#include <type_traits>
#include <cassert>
#include <tuple>
#include <array>
#include <cmath>
#include <algorithm>
#include "rebelfleet.h"
#include "imperialfleet.h"
template<typename T, T i, T t1>
static constexpr T root(){
if constexpr(i*i > t1) {
return i;
}
else {
return root<T, i+1, t1>();
}
}
template<typename T, T t0, T t1, typename...S>
class SpaceBattle{
private:
T timeElapsed;
int rebelStrength;
int imperialStrength;
std::tuple<S...> battlingShips;
std::array<T, root<T, 0, t1>()> attackWindows;
public:
SpaceBattle(S... ship) : battlingShips(ship...) {
rebelStrength = 0;
imperialStrength = 0;
attackWindows = calcWindows();
countForces(battlingShips);
timeElapsed = t0;
assert(t0 < t1 && t0 >= 0);
}
template<size_t n = 0, typename ...xs>
constexpr void countForces(std::tuple<xs...> &t) {
if constexpr (n < sizeof...(xs)) {
if (std::tuple_element_t<n, std::tuple<xs...>>::isImperial()){
if (std::get<n>(t).getShield() != 0) imperialStrength++;
}
else{
if (std::get<n>(t).getShield() != 0) rebelStrength++;
}
countForces<n + 1, xs...>(t);
}
}
constexpr static ::std::array<T, root<T, 0, t1>()>calcWindows() {
static_assert(std::is_integral<T>::value && t0 <= t1 && t0 >= 0);
std::array<T, root<T, 0, t1>()> gaps{0};
T current = 0;
while(current < root<T, 0, t1>()){
gaps[current] = pow(current, 2);
current++;
}
return gaps;
}
template<size_t n = 0, typename ...xs, typename I>
constexpr void getRebelShips(std::tuple<xs...> &t, ImperialStarship<I> &imperial) {
if constexpr (n < sizeof...(xs)) {
if constexpr (!std::tuple_element_t<n, std::tuple<xs...>>::isImperial()){
auto &ship = std::get<n>(t);
if(ship.getShield() != 0 && imperial.getShield() != 0){
attack(imperial, ship);
if(ship.getShield() == 0) rebelStrength--;
if(imperial.getShield() == 0) imperialStrength--;
}
}
getRebelShips<n + 1, xs...>(t, imperial);
}
}
template<size_t n = 0, typename ...xs>
constexpr void getImperialShips(std::tuple<xs...> &t) {
if constexpr (n < sizeof...(xs)) {
if constexpr (std::tuple_element_t<n, std::tuple<xs...>>::isImperial()){
auto &ship = std::get<n>(t);
if(ship.getShield() != 0) {
getRebelShips<0, xs...>(t, ship);
}
}
getImperialShips<n + 1, xs...>(t);
}
}
int countRebelFleet() {
return rebelStrength;
}
int countImperialFleet() {
return imperialStrength;
}
void fight(){
getImperialShips(battlingShips);
}
void tick(T timeStep) {
if(imperialStrength == 0 && rebelStrength == 0) std::cout << "DRAW\n";
if(imperialStrength == 0 && rebelStrength != 0) std::cout << "REBELLION WON\n";
if(imperialStrength != 0 && rebelStrength == 0) std::cout << "IMPERIUM WON\n";
bool intoBattle = std::find(std::begin(attackWindows), std::end(attackWindows), timeElapsed) != std::end(attackWindows);
if(intoBattle) fight();
timeElapsed = (timeElapsed + timeStep) % (t1 + 1);
}
};
#endif
|
<filename>scripts/taller3.js
const resetButton = document.getElementById("btn-reset");
resetButton.addEventListener("click", reset);
function reset() {
const inputAmount = document.getElementById("input-amount");
const inputInterest = document.getElementById("input-interest");
const inputTime = document.getElementById("input-time");
const inputTypeInterest = document.getElementById("type-interest");
const textResultado = document.getElementById("answer");
inputAmount.value = "";
inputInterest.value = "";
inputTime.value = "";
inputTypeInterest.value = "simple";
textResultado.innerHTML = "Final investment";
}
// Logica :D
function mostrarMontoInversion() {
const inputAmount = document.getElementById("input-amount");
const inputInterest = document.getElementById("input-interest");
const inputTime = document.getElementById("input-time");
const inputTypeInterest = document.getElementById("type-interest");
const textResultado = document.getElementById("answer");
let montoValue = inputAmount.value;
let interesValue = inputInterest.value;
let tiempoValue = inputTime.value;
let tipoInteresValue = inputTypeInterest.value;
if (montoValue !== "" && interesValue !== "" && tiempoValue !== "") {
let resultado = 0;
if (tipoInteresValue === "simple") {
resultado = investmentWithSimpleInterest(
montoValue,
interesValue,
tiempoValue
);
resultado = redondeoAlCentesimo(resultado);
textResultado.innerHTML = `Your investment with initial capital of $${montoValue} and simple annual interest of ${interesValue}% over ${tiempoValue} years will be <span>$${resultado}</span>.`;
}
if (tipoInteresValue === "compound") {
resultado = investmentWithCompoundInterest(
montoValue,
interesValue,
tiempoValue
);
resultado = redondeoAlCentesimo(resultado);
textResultado.innerHTML = `Your investment with initial capital of $${montoValue} and compound annual interest of ${interesValue}% over ${tiempoValue} years will be <span>$${resultado}</span>`;
}
} else {
textResultado.innerHTML = `Missing data ❗❗`;
}
}
|
<reponame>wuximing/dsshop
/**
* @fileoverview 判断点是否在多边形内
* @author <EMAIL>
*/
// 多边形的射线检测,参考:https://blog.csdn.net/WilliamSun0122/article/details/77994526
var tolerance = 1e-6;
// 三态函数,判断两个double在eps精度下的大小关系
function dcmp(x) {
if (Math.abs(x) < tolerance) {
return 0;
}
return x < 0 ? -1 : 1;
}
// 判断点Q是否在p1和p2的线段上
function onSegment(p1, p2, q) {
if ((q[0] - p1[0]) * (p2[1] - p1[1]) === (p2[0] - p1[0]) * (q[1] - p1[1]) &&
Math.min(p1[0], p2[0]) <= q[0] &&
q[0] <= Math.max(p1[0], p2[0]) &&
Math.min(p1[1], p2[1]) <= q[1] &&
q[1] <= Math.max(p1[1], p2[1])) {
return true;
}
return false;
}
// 判断点P在多边形内-射线法
export default function isInPolygon(points, x, y) {
var isHit = false;
var n = points.length;
if (n <= 2) {
// svg 中点小于 3 个时,不显示,也无法被拾取
return false;
}
for (var i = 0; i < n; i++) {
var p1 = points[i];
var p2 = points[(i + 1) % n];
if (onSegment(p1, p2, [x, y])) {
// 点在多边形一条边上
return true;
}
// 前一个判断min(p1[1],p2[1])<P.y<=max(p1[1],p2[1])
// 后一个判断被测点 在 射线与边交点 的左边
if (dcmp(p1[1] - y) > 0 !== dcmp(p2[1] - y) > 0 &&
dcmp(x - ((y - p1[1]) * (p1[0] - p2[0])) / (p1[1] - p2[1]) - p1[0]) < 0) {
isHit = !isHit;
}
}
return isHit;
}
//# sourceMappingURL=polygon.js.map
|
class BinarySearchTree:
'''
A binary search tree is a Tree Data Structure
in which each node has at most two children which
are referred to as the left child and the right child.
'''
#defining a constructor to initialize the root node of a BST
def __init__(self, value):
self.value = value
self.left = None
self.right = None
#method to insert a node at its correct position in BST
def insert(self, value):
#compare the new element with the root node
if self.value > value:
#if the new element is smaller than root node,
#it has to be inserted in left subtree
if self.left is None:
self.left = BinarySearchTree(value)
else:
self.left.insert(value)
else:
#if the new element is greater than root node,
#it has to be inserted in right subtree
if self.right is None:
self.right = BinarySearchTree(value)
else:
self.right.insert(value)
#method to search an element in the tree
def search(self, value):
if self.value == value:
return True
#search in left subtree
if value < self.value:
if self.left is None:
return False
return self.left.search(value)
#search in right subtree
if value > self.value:
if self.right is None:
return False
return self.right.search(value)
|
#!/bin/bash
##########################################################################################################################################################################################
#- Purpose: Script used to install pre-requisites, deploy/undeploy service, start/stop service, test service
#- Parameters are:
#- [-a] action - value: login, install, deploy, undeploy, buildcontainer, deploycontainer, integration
#- [-e] Stop on Error - by default false
#- [-s] Silent mode - by default false
#- [-c] configuration file - which contains the list of path of each avtool.sh to call (avtool.env by default)
#
# executable
###########################################################################################################################################################################################
set -u
#repoRoot="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
parent_path=$(
cd "$(dirname "${BASH_SOURCE[0]}")/../../"
pwd -P
)
# Read variables in configuration file
SCRIPTS_DIRECTORY=`dirname $0`
source "$SCRIPTS_DIRECTORY"/common.sh
# container version (current date)
export APP_VERSION=$(date +"%y%m%d.%H%M%S")
# container internal HTTP port
export APP_PORT=80
# webapp prefix
export AZURE_APP_PREFIX="mini01"
#######################################################
#- function used to print out script usage
#######################################################
function usage() {
echo
echo "Arguments:"
echo -e " -a Sets MTS Tool action {login, install, deploy, undeploy, buildcontainer, deploycontainer, integration}"
echo -e " -c Sets the MTS Tool configuration file"
echo -e " -e Sets the stop on error (false by defaut)"
echo -e " -e Sets Silent mode installation or deployment (false by defaut)"
echo
echo "Example:"
echo -e " bash ./mtstool.sh -a install "
echo -e " bash ./mtstool.sh -a deploy -c .mtstool.env -e true -s true"
}
action=
configuration_file="$(dirname "${BASH_SOURCE[0]}")/../../configuration/.default.env"
stoperror=false
silentmode=false
while getopts "a:c:e:s:hq" opt; do
case $opt in
a) action=$OPTARG ;;
c) configuration_file=$OPTARG ;;
e) stoperror=$OPTARG ;;
s) silentmode=$OPTARG ;;
:)
echo "Error: -${OPTARG} requires a value"
exit 1
;;
*)
usage
exit 1
;;
esac
done
# Validation
if [[ $# -eq 0 || -z $action || -z $configuration_file ]]; then
echo "Required parameters are missing"
usage
exit 1
fi
if [[ ! $action == login && ! $action == install && ! $action == deploycontainer && ! $action == buildcontainer && ! $action == deploy && ! $action == deploycosmos && ! $action == undeploy && ! $action == integration ]]; then
echo "Required action is missing, values: login, install, deploy, deploycosmos, undeploy, deploycontainer, buildcontainer, integration"
usage
exit 1
fi
# colors for formatting the ouput
YELLOW='\033[1;33m'
GREEN='\033[1;32m'
RED='\033[0;31m'
BLUE='\033[1;34m'
NC='\033[0m' # No Color
# check if configuration file is set
if [[ -z $configuration_file ]]; then
configuration_file="$(dirname "${BASH_SOURCE[0]}")/../../configuration/.default.env"
fi
# get Azure Subscription and Tenant Id if already connected
AZURE_SUBSCRIPTION_ID=$(az account show --query id --output tsv 2> /dev/null) || true
AZURE_TENANT_ID=$(az account show --query tenantId -o tsv 2> /dev/null) || true
# check if configuration file is set
if [[ -z ${AZURE_SUBSCRIPTION_ID} || -z ${AZURE_TENANT_ID} ]]; then
printError "Connection to Azure required, launching 'az login'"
printMessage "Login..."
azLogin
checkLoginAndSubscription
printMessage "Login done"
AZURE_SUBSCRIPTION_ID=$(az account show --query id --output tsv 2> /dev/null) || true
AZURE_TENANT_ID=$(az account show --query tenantId -o tsv 2> /dev/null) || true
fi
AZURE_APP_PREFIX="mini$(shuf -i 1000-9999 -n 1)"
# Check if configuration file exists
if [[ ! -f "$configuration_file" ]]; then
cat > "$configuration_file" << EOF
AZURE_REGION="eastus2"
AZURE_APP_PREFIX=${AZURE_APP_PREFIX}
AZURE_SUBSCRIPTION_ID=${AZURE_SUBSCRIPTION_ID}
AZURE_TENANT_ID=${AZURE_TENANT_ID}
EOF
fi
if [[ $configuration_file ]]; then
if [ ! -f "$configuration_file" ]; then
printError "$configuration_file does not exist."
exit 1
fi
set -o allexport
source "$configuration_file"
set +o allexport
else
printWarning "No env. file specified. Using environment variables."
fi
if [[ "${action}" == "install" ]] ; then
printMessage "Installing pre-requisite"
printProgress "Installing azure cli"
MTS_TEMPDIR=$(mktemp -d -t env-XXXXXXXXXX)
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
az config set extension.use_dynamic_install=yes_without_prompt
printProgress "Installing ffmpeg"
sudo apt-get -y update
sudo apt-get -y install ffmpeg
sudo apt-get -y install jq
sudo apt-get -y install dig
printProgress "Installing .Net 6.0 SDK "
wget https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O "${MTS_TEMPDIR}"/packages-microsoft-prod.deb
sudo dpkg -i "${MTS_TEMPDIR}"/packages-microsoft-prod.deb
sudo apt-get update
sudo apt-get install -y apt-transport-https
sudo apt-get install -y dotnet-sdk-6.0
printProgress "Installing Typescript and node services "
npm install -g typescript
tsc --version
npm install -g webpack
npm i --save-dev @types/jquery
npm install -g http-server
npm install -g forever
printMessage "Installing pre-requisites done"
exit 0
fi
if [[ "${action}" == "login" ]] ; then
printMessage "Login..."
azLogin
checkLoginAndSubscription
printMessage "Login done"
exit 0
fi
if [[ "${action}" == "deploy" ]] ; then
printMessage "Deploying the infrastructure..."
# Check Azure connection
printProgress "Check Azure connection for subscription: '$AZURE_SUBSCRIPTION_ID'"
azLogin
checkError
# Deploy infrastructure image
printMessage "Deploy infrastructure subscription: '$AZURE_SUBSCRIPTION_ID' region: '$AZURE_REGION' prefix: '$AZURE_APP_PREFIX' sku: 'B2' No Cosmos"
deployAzureInfrastructure $AZURE_SUBSCRIPTION_ID $AZURE_REGION $AZURE_APP_PREFIX "B2" "0"
printMessage "Azure Container Registry DNS name: ${ACR_LOGIN_SERVER}"
printMessage "Azure Web App Url: ${WEB_APP_SERVER}"
printMessage "Azure function Url: ${FUNCTION_SERVER}"
printMessage "Cosmos DB Service: ${COSMOS_DB_SERVICE_NAME}"
printMessage "Storage Account Name: ${STORAGE_ACCOUNT_NAME}"
printMessage "Deploying the infrastructure done"
exit 0
fi
if [[ "${action}" == "deploycosmos" ]] ; then
printMessage "Deploying the infrastructure with Cosmos..."
# Check Azure connection
printProgress "Check Azure connection for subscription: '$AZURE_SUBSCRIPTION_ID'"
azLogin
checkError
# Deploy infrastructure image
printMessage "Deploy infrastructure subscription: '$AZURE_SUBSCRIPTION_ID' region: '$AZURE_REGION' prefix: '$AZURE_APP_PREFIX' sku: 'B2' with Cosmos"
deployAzureInfrastructure $AZURE_SUBSCRIPTION_ID $AZURE_REGION $AZURE_APP_PREFIX "B2" "1"
printMessage "Azure Container Registry DNS name: ${ACR_LOGIN_SERVER}"
printMessage "Azure Web App Url: ${WEB_APP_SERVER}"
printMessage "Azure function Url: ${FUNCTION_SERVER}"
printMessage "Cosmos DB Service: ${COSMOS_DB_SERVICE_NAME}"
printMessage "Storage Account Name: ${STORAGE_ACCOUNT_NAME}"
printMessage "Deploying the infrastructure done"
exit 0
fi
if [[ "${action}" == "undeploy" ]] ; then
printMessage "Undeploying the infrastructure..."
# Check Azure connection
printProgress "Check Azure connection for subscription: '$AZURE_SUBSCRIPTION_ID'"
azLogin
checkError
undeployAzureInfrastructure $AZURE_SUBSCRIPTION_ID $AZURE_APP_PREFIX
printMessage "Undeploying the infrastructure done"
exit 0
fi
if [[ "${action}" == "deploycontainer" ]] ; then
printMessage "Deploying the containers in the infrastructure..."
# Check Azure connection
printProgress "Check Azure connection for subscription: '$AZURE_SUBSCRIPTION_ID'"
azLogin
checkError
# Read the environnment variables
getDeploymentVariables "${AZURE_APP_PREFIX}"
# get latest image version
latest_dotnet_version=$(getLatestImageVersion "${ACR_NAME}" "be-dotnet-web-api")
if [ -z "${latest_dotnet_version}" ]; then
latest_dotnet_version=$APP_VERSION
fi
printProgress "Latest version to deploy: '$latest_dotnet_version'"
# deploy be-dotnet-web-api
printProgress "Deploy image be-dotnet-web-api:${latest_dotnet_version} from Azure Container Registry ${ACR_LOGIN_SERVER}"
deployWebAppContainer "$AZURE_SUBSCRIPTION_ID" "$AZURE_APP_PREFIX" "functionapp" "$FUNCTION_NAME" "${ACR_LOGIN_SERVER}" "${ACR_NAME}" "be-dotnet-web-api" "latest" "${latest_dotnet_version}" "${APP_PORT}"
printProgress "Checking role assignment 'Storage Table Data Contributor' between '${FUNCTION_NAME}' and Storage '${STORAGE_ACCOUNT_NAME}' "
resourcegroup="rg${AZURE_APP_PREFIX}"
WebAppMsiPrincipalId=$(az functionapp show -n "${FUNCTION_NAME}" -g "${resourcegroup}" -o json | jq -r .identity.principalId)
WebAppMsiAcrPullAssignmentCount=$(az role assignment list --assignee "${WebAppMsiPrincipalId}" --scope /subscriptions/"${AZURE_SUBSCRIPTION_ID}"/resourceGroups/"${resourcegroup}"/providers/Microsoft.Storage/storageAccounts/"${STORAGE_ACCOUNT_NAME}" | jq -r 'select(.[].roleDefinitionName=="Storage Table Data Contributor") | length')
if [ "$WebAppMsiAcrPullAssignmentCount" != "1" ];
then
printProgress "Assigning 'Storage Table Data Contributor' role assignment on scope ${STORAGE_ACCOUNT_NAME}..."
az role assignment create --assignee-object-id "$WebAppMsiPrincipalId" --assignee-principal-type ServicePrincipal --scope /subscriptions/"${SUBSCRIPTION_ID}"/resourceGroups/"${resourcegroup}"/providers/Microsoft.Storage/storageAccounts/"${STORAGE_ACCOUNT_NAME}" --role "Storage Table Data Contributor"
fi
# get latest image version
latest_webapp_version=$(getLatestImageVersion "${ACR_NAME}" "fe-ts-web-app")
if [ -z "${latest_webapp_version}" ]; then
latest_webapp_version=$APP_VERSION
fi
# deploy fe-ts-web-app
printProgress "Deploy image fe-ts-web-app:${latest_webapp_version} from Azure Container Registry ${ACR_LOGIN_SERVER}"
deployWebAppContainer "$AZURE_SUBSCRIPTION_ID" "$AZURE_APP_PREFIX" "webapp" "$WEB_APP_NAME" "${ACR_LOGIN_SERVER}" "${ACR_NAME}" "fe-ts-web-app" "latest" "${latest_webapp_version}" "${APP_PORT}"
#printProgress "Checking role assignment 'Storage Blob Data Contributor' between '${WEB_APP_NAME}' and Storage '${STORAGE_ACCOUNT_NAME}' "
#resourcegroup="rg${AZURE_APP_PREFIX}"
#WebAppMsiPrincipalId=$(az webapp show -n "${WEB_APP_NAME}" -g "${resourcegroup}" -o json | jq -r .identity.principalId)
#WebAppMsiAcrPullAssignmentCount=$(az role assignment list --assignee "${WebAppMsiPrincipalId}" --scope /subscriptions/"${AZURE_SUBSCRIPTION_ID}"/resourceGroups/"${resourcegroup}"/providers/Microsoft.Storage/storageAccounts/"${STORAGE_ACCOUNT_NAME}" | jq -r 'select(.[].roleDefinitionName=="Storage Blob Data Contributor") | length')
#if [ "$WebAppMsiAcrPullAssignmentCount" != "1" ];
#then
# printProgress "Assigning 'Storage Blob Data Contributor' role assignment on scope ${STORAGE_ACCOUNT_NAME}..."
# az role assignment create --assignee-object-id "$WebAppMsiPrincipalId" --assignee-principal-type ServicePrincipal --scope /subscriptions/"${SUBSCRIPTION_ID}"/resourceGroups/"${resourcegroup}"/providers/Microsoft.Storage/storageAccounts/"${STORAGE_ACCOUNT_NAME}" --role "Storage Blob Data Contributor"
#fi
printProgress "Checking role assignment 'Storage Blob Data Contributor' between current user and Storage '${STORAGE_ACCOUNT_NAME}' "
resourcegroup="rg${AZURE_APP_PREFIX}"
WebAppMsiPrincipalId=$(az ad signed-in-user show --query objectId --output tsv )
WebAppMsiAcrPullAssignmentCount=$(az role assignment list --assignee "${WebAppMsiPrincipalId}" --scope /subscriptions/"${AZURE_SUBSCRIPTION_ID}"/resourceGroups/"${resourcegroup}"/providers/Microsoft.Storage/storageAccounts/"${STORAGE_ACCOUNT_NAME}" | jq -r 'select(.[].roleDefinitionName=="Storage Blob Data Contributor") | length')
if [ "$WebAppMsiAcrPullAssignmentCount" != "1" ];
then
printProgress "Assigning 'Storage Blob Data Contributor' role assignment on scope ${STORAGE_ACCOUNT_NAME}..."
az role assignment create --assignee-object-id "$WebAppMsiPrincipalId" --assignee-principal-type User --scope /subscriptions/"${SUBSCRIPTION_ID}"/resourceGroups/"${resourcegroup}"/providers/Microsoft.Storage/storageAccounts/"${STORAGE_ACCOUNT_NAME}" --role "Storage Blob Data Contributor"
fi
# Test services
# Test be-dotnet-web-api
dotnet_rest_api_url="https://${FUNCTION_SERVER}/version"
printProgress "Testing be-dotnet-web-api url: $dotnet_rest_api_url expected version: ${latest_dotnet_version}"
result=$(checkWebUrl "${dotnet_rest_api_url}" "${latest_dotnet_version}" 420)
if [[ $result != "true" ]]; then
printError "Error while testing be-dotnet-web-api"
else
printMessage "Testing be-dotnet-web-api successful"
fi
# Test web-app
node_web_app_url="https://${WEB_APP_SERVER}/config.json"
printProgress "Testing node_web_app_url url: $node_web_app_url expected version: ${latest_webapp_version}"
result=$(checkWebUrl "${node_web_app_url}" "${latest_webapp_version}" 420)
if [[ $result != "true" ]]; then
printError "Error while testing node_web_app_url"
else
printMessage "Testing node_web_app_url successful"
fi
printMessage "Deploying the containers in the infrastructure done"
exit 0
fi
if [[ "${action}" == "buildcontainer" ]] ; then
printMessage "Building the containers..."
# Check Azure connection
printProgress "Check Azure connection for subscription: '$AZURE_SUBSCRIPTION_ID'"
azLogin
checkError
# Read the environnment variables
getDeploymentVariables "${AZURE_APP_PREFIX}"
# Create or update application
echo "Check if Application 'sp-${AZURE_APP_PREFIX}-app' exists"
cmd="az ad app list --query \"[?displayName=='sp-${AZURE_APP_PREFIX}-app'].appId\" --output tsv"
printProgress "$cmd"
appId=$(eval "$cmd") || true
if [[ -z ${appId} ]] ; then
# Create application
#cmd="az ad app create --reply-urls \"https://webapp${AZURE_APP_PREFIX}.azurewebsites.net/\" --oauth2-allow-implicit-flow \"true\" --display-name \"sp-${AZURE_APP_PREFIX}-app\" --required-resource-access \"[{\\\"resourceAppId\\\": \\\"00000003-0000-0000-c000-000000000000\\\",\\\"resourceAccess\\\": [{\\\"id\\\": \\\"e1fe6dd8-ba31-4d61-89e7-88639da4683d\\\",\\\"type\\\": \\\"Scope\\\"}]},{\\\"resourceAppId\\\": \\\"e406a681-f3d4-42a8-90b6-c2b029497af1\\\",\\\"resourceAccess\\\": [{\\\"id\\\": \\\"03e0da56-190b-40ad-a80c-ea378c433f7f\\\",\\\"type\\\": \\\"Scope\\\"}]}]\" | jq -r \".appId\" "
cmd="az ad app create --oauth2-allow-implicit-flow \"true\" --display-name \"sp-${AZURE_APP_PREFIX}-app\" --required-resource-access \"[{\\\"resourceAppId\\\": \\\"00000003-0000-0000-c000-000000000000\\\",\\\"resourceAccess\\\": [{\\\"id\\\": \\\"e1fe6dd8-ba31-4d61-89e7-88639da4683d\\\",\\\"type\\\": \\\"Scope\\\"}]},{\\\"resourceAppId\\\": \\\"e406a681-f3d4-42a8-90b6-c2b029497af1\\\",\\\"resourceAccess\\\": [{\\\"id\\\": \\\"03e0da56-190b-40ad-a80c-ea378c433f7f\\\",\\\"type\\\": \\\"Scope\\\"}]}]\" | jq -r \".appId\" "
printProgress "$cmd"
appId=$(eval "$cmd")
# Get application objectId
cmd="az ad app list --query \"[?displayName=='sp-${AZURE_APP_PREFIX}-app'].objectId\" --output tsv"
printProgress "$cmd"
objectId=$(eval "$cmd") || true
if [[ ! -z ${objectId} ]] ; then
cmd="az rest --method PATCH --uri \"https://graph.microsoft.com/v1.0/applications/$objectId\" \
--headers \"Content-Type=application/json\" \
--body \"{\\\"spa\\\":{\\\"redirectUris\\\":[\\\"https://webapp${AZURE_APP_PREFIX}.azurewebsites.net/\\\"]},\\\"identifierUris\\\":[\\\"api://${appId}\\\"]}\""
printProgress "$cmd"
eval "$cmd"
else
printError "Error while creating application sp-${AZURE_APP_PREFIX}-app can't get objectId"
exit 1
fi
fi
echo "Application Id: ${appId} for name: 'sp-${AZURE_APP_PREFIX}-app'"
# Build dotnet-api docker image
echo "Update file: ./src/be-dotnet-web-api/appsettings.json"
cmd="cat ./src/be-dotnet-web-api/appsettings.json | jq -r '.AzureAd.ClientId = \"${appId}\"' > tmp.$$.json && mv tmp.$$.json ./src/be-dotnet-web-api/appsettings.json"
eval "$cmd"
cmd="cat ./src/be-dotnet-web-api/appsettings.Development.json | jq -r '.AzureAd.ClientId = \"${appId}\"' > tmp.$$.json && mv tmp.$$.json ./src/be-dotnet-web-api/appsettings.Development.json"
eval "$cmd"
cmd="cat ./src/be-dotnet-web-api/appsettings.json | jq -r '.AzureAd.TenantId = \"${AZURE_TENANT_ID}\"' > tmp.$$.json && mv tmp.$$.json ./src/be-dotnet-web-api/appsettings.json"
eval "$cmd"
cmd="cat ./src/be-dotnet-web-api/appsettings.Development.json | jq -r '.AzureAd.TenantId = \"${AZURE_TENANT_ID}\"' > tmp.$$.json && mv tmp.$$.json ./src/be-dotnet-web-api/appsettings.Development.json"
eval "$cmd"
cmd="cat ./src/be-dotnet-web-api/appsettings.json | jq -r '.StorageAccount = \"${STORAGE_ACCOUNT_NAME}\"' > tmp.$$.json && mv tmp.$$.json ./src/be-dotnet-web-api/appsettings.json"
eval "$cmd"
cmd="cat ./src/be-dotnet-web-api/appsettings.Development.json | jq -r '.StorageAccount = \"${STORAGE_ACCOUNT_NAME}\"' > tmp.$$.json && mv tmp.$$.json ./src/be-dotnet-web-api/appsettings.Development.json"
eval "$cmd"
# Build dotnet-api docker image
printMessage "Building dotnet-rest-api container version:${APP_VERSION} port: ${APP_PORT}"
buildWebAppContainer "${ACR_LOGIN_SERVER}" "./src/be-dotnet-web-api" "be-dotnet-web-api" "${APP_VERSION}" "latest" ${APP_PORT}
printMessage "Building fe-ts-web-app container version:${APP_VERSION} port: ${APP_PORT}"
# Update version in HTML package
echo "Update file: ./src/fe-ts-web-app/src/config.json"
cmd="cat ./src/fe-ts-web-app/src/config.json | jq -r '.version = \"${APP_VERSION}\"' > tmp.$$.json && mv tmp.$$.json ./src/fe-ts-web-app/src/config.json"
eval "$cmd"
cmd="cat ./src/fe-ts-web-app/src/config.json | jq -r '.clientId = \"${appId}\"' > tmp.$$.json && mv tmp.$$.json ./src/fe-ts-web-app/src/config.json"
eval "$cmd"
cmd="cat ./src/fe-ts-web-app/src/config.json | jq -r '.tokenAPIRequest.scopes = [\"api://${appId}/user_impersonation\" ]' > tmp.$$.json && mv tmp.$$.json ./src/fe-ts-web-app/src/config.json"
eval "$cmd"
cmd="cat ./src/fe-ts-web-app/src/config.json | jq -r '.authority = \"https://login.microsoftonline.com/${AZURE_TENANT_ID}\"' > tmp.$$.json && mv tmp.$$.json ./src/fe-ts-web-app/src/config.json"
eval "$cmd"
cmd="cat ./src/fe-ts-web-app/src/config.json | jq -r '.tenantId = \"${AZURE_TENANT_ID}\"' > tmp.$$.json && mv tmp.$$.json ./src/fe-ts-web-app/src/config.json"
eval "$cmd"
cmd="cat ./src/fe-ts-web-app/src/config.json | jq -r '.redirectUri = \"https://${WEB_APP_SERVER}/\"' > tmp.$$.json && mv tmp.$$.json ./src/fe-ts-web-app/src/config.json"
eval "$cmd"
cmd="cat ./src/fe-ts-web-app/src/config.json | jq -r '.storageAccountName = \"${STORAGE_ACCOUNT_NAME}\"' > tmp.$$.json && mv tmp.$$.json ./src/fe-ts-web-app/src/config.json"
eval "$cmd"
cmd="cat ./src/fe-ts-web-app/src/config.json | jq -r '.storageContainerName = \"${STORAGE_ACCOUNT_INPUT_CONTAINER}\"' > tmp.$$.json && mv tmp.$$.json ./src/fe-ts-web-app/src/config.json"
eval "$cmd"
cmd="cat ./src/fe-ts-web-app/src/config.json | jq -r '.storageSASToken = \"\"' > tmp.$$.json && mv tmp.$$.json ./src/fe-ts-web-app/src/config.json"
eval "$cmd"
cmd="cat ./src/fe-ts-web-app/src/config.json | jq -r '.apiEndpoint = \"https://${FUNCTION_SERVER}/\"' > tmp.$$.json && mv tmp.$$.json ./src/fe-ts-web-app/src/config.json"
eval "$cmd"
# build web app
pushd ./src/fe-ts-web-app
npm install
tsc --build tsconfig.json
webpack --config webpack.config.min.js
popd
buildWebAppContainer "${ACR_LOGIN_SERVER}" "./src/fe-ts-web-app" "fe-ts-web-app" "${APP_VERSION}" "latest" ${APP_PORT}
checkError
printMessage "Building the containers done"
exit 0
fi
if [[ "${action}" == "integration" ]] ; then
printMessage "Testing all the services deployed..."
printMessage "Testing all the services done"
exit 0
fi
|
package slacknotifications.teamcity.settings;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import java.io.IOException;
import jetbrains.buildServer.serverSide.SBuildServer;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.junit.Test;
import slacknotifications.SlackNotificationProxyConfig;
public class SlackNotificationMainSettingsTest {
SBuildServer server = mock(SBuildServer.class);
Integer proxyPort = 8080;
String proxyHost = "myproxy.mycompany.com";
String defaultChannel = "#my-channel";
String teamName = "myteam";
String token = "<PASSWORD>";
String iconUrl = "http://www.myicon.com/icon.gif";
String botName = "Team City";
@Test
public void TestFullConfig(){
SlackNotificationMainSettings whms = new SlackNotificationMainSettings(server);
whms.register();
whms.readFrom(getFullConfigElement());
String proxy = whms.getProxyForUrl("http://something.somecompany.com");
SlackNotificationProxyConfig whpc = whms.getProxyConfigForUrl("http://something.somecompany.com");
assertTrue(proxy.equals(this.proxyHost));
assertTrue(whpc.getProxyHost().equals(this.proxyHost ));
assertTrue(whpc.getProxyPort().equals(this.proxyPort));
assertTrue(whms.getDefaultChannel().equals(this.defaultChannel));
assertTrue(whms.getTeamName().equals(this.teamName));
assertTrue(whms.getToken().equals(this.token));
assertTrue(whms.getIconUrl().equals(this.iconUrl));
assertTrue(whms.getBotName().equals(this.botName));
assertTrue(whms.getShowBuildAgent());
assertTrue(whms.getShowElapsedBuildTime());
assertFalse(whms.getShowCommits());
assertEquals(15, whms.getMaxCommitsToDisplay());
}
@Test
public void TestEmptyDefaultsConfig(){
SlackNotificationMainSettings whms = new SlackNotificationMainSettings(server);
whms.register();
whms.readFrom(getEmptyDefaultsConfigElement());
String proxy = whms.getProxyForUrl("http://something.somecompany.com");
SlackNotificationProxyConfig whpc = whms.getProxyConfigForUrl("http://something.somecompany.com");
assertTrue(proxy.equals(this.proxyHost));
assertTrue(whpc.getProxyHost().equals(this.proxyHost ));
assertTrue(whpc.getProxyPort().equals(this.proxyPort));
assertTrue(whms.getDefaultChannel().equals(this.defaultChannel));
assertTrue(whms.getTeamName().equals(this.teamName));
assertTrue(whms.getToken().equals(this.token));
assertTrue(whms.getIconUrl().equals(this.iconUrl));
assertTrue(whms.getBotName().equals(this.botName));
assertNull(whms.getShowBuildAgent());
assertNull(whms.getShowElapsedBuildTime());
assertTrue(whms.getShowCommits());
assertEquals(5, whms.getMaxCommitsToDisplay());
}
private Element getFullConfigElement(){
return getElement("src/test/resources/main-config-full.xml");
}
private Element getEmptyDefaultsConfigElement(){
return getElement("src/test/resources/main-config-empty-defaults.xml");
}
private Element getElement(String filePath){
SAXBuilder builder = new SAXBuilder();
builder.setIgnoringElementContentWhitespace(true);
try {
Document doc = builder.build(filePath);
return doc.getRootElement();
} catch (JDOMException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
}
|
import run_script from "../run-script";
const { shell } = window.require('electron')
const { remote } = window.require('electron')
const app = remote.app;
const dialog = remote.dialog
const WIN = remote.getCurrentWindow()
var fs = window.require('fs');
const options = {
title: "Save",
defaultPath: app.getPath('desktop'),
buttonLabel: "Save",
filters: [
{ name: 'C++ source files', extensions: ['cpp', 'cc', 'cxx', 'c++', 'cp'] },
{ name: 'C source files', extensions: ['c'] },
{ name: 'Header files', extensions: ['h', 'hpp', 'rh', 'hh'] },
{ name: 'Resource files', extensions: ['rc'] },
{ name: 'All Files', extensions: ['*'] }
]
}
export function saveAs(data, callback) {
dialog.showSaveDialog(WIN, options).then((result) => {
if (result.filePath === undefined) {
return;
}
save(result.filePath, data, callback)
}).catch(err => console.log('there was an error saving the file: ' + err))
}
export function open(callback) {
const options = {
title: "Open",
properties: ['multiSelections'],
defaultPath: app.getPath('desktop'),
buttonLabel: "Open",
filters: [
{ name: 'C++ source files', extensions: ['cpp', 'cc', 'cxx', 'c++', 'cp'] },
{ name: 'C source files', extensions: ['c'] },
{ name: 'Header files', extensions: ['h', 'hpp', 'rh', 'hh'] },
{ name: 'Resource files', extensions: ['rc'] },
{ name: 'All Files', extensions: ['*'] }
]
}
dialog.showOpenDialog(WIN, options).then((result) => {
if (result.filePaths === undefined) {
return
};
if (typeof callback === 'function') {
for (var file of result.filePaths) {
callback(file.split("\\").pop().toString(), fs.readFileSync(file.toString()).toString(), file.toString())
}
}
}).catch(err => console.log(err))
}
function readDirectory(path) {
let files = []
fs.readdirSync(path).forEach(file => {
if (fs.existsSync(path + "\\" + file) && fs.lstatSync(path + "\\" + file).isDirectory()) {
files.push({ title: file, files: readDirectory(path + "\\" + file) })
} else {
files.push(path + "\\" + file.toString())
}
});
return files
}
export function openDirectory(callback) {
const options = {
title: "Open Directory",
defaultPath: app.getPath('desktop'),
buttonLabel: "Open",
properties: ['openDirectory']
}
dialog.showOpenDialog(WIN, options).then((result) => {
if (result.filePaths === undefined) {
return
};
var files = readDirectory(result.filePaths[0])
callback(files)
}).catch(err => console.log(err))
}
export function save(path, data, callback) {
fs.writeFile(path, data, (err) => {
if (err) {
console.log("An error ocurred creating the file: " + err.message)
return
}
if (typeof callback === 'function')
callback(path);
});
}
export function execute(path) {
shell.openItem(path)
// run_script("start", [path], null, write);
}
export function compileAndRun({path, data, callback, append, openTerminal}) {
if (path === undefined) {
saveAs(data, (path) => {
if (typeof callback === 'function')
callback(path)
run_script("g++", [path, "-o", path.replace(".cpp", ".exe"), null, append], () => execute(path.replace(".cpp", ".exe")), append, openTerminal)
})
} else {
save(path, data, (path) => {
if (typeof callback === 'function')
callback(path)
run_script("g++", [path, "-o", path.replace(".cpp", ".exe"), null, append], () => execute(path.replace(".cpp", ".exe")), append, openTerminal)
})
}
}
export function run(path, data, callback, write, openTerminal) {
if (path === undefined) {
saveAs(data, (path) => {
if (typeof callback === 'function')
callback(path)
run_script("g++", [path, "-o", path.replace(".cpp", ".exe"), null, write], () => execute(path.replace(".cpp", ".exe")), write, openTerminal)
})
} else {
execute(path.replace(".cpp", ".exe"))
}
}
// export function compile(path, data, callback, write) {
// if (path === undefined) {
// saveAs(data, (path) => {
// if (typeof callback === 'function')
// callback(path)
// run_script("g++", [path, "-o", path.replace(".cpp", ".exe")], null, write)
// })
// } else {
// save(path, data, (path) => {
// if (typeof callback === 'function')
// callback(path)
// run_script("g++", [path, "-o", path.replace(".cpp", ".exe")], null, write)
// })
// }
// }
export function compile({ path, data, callback, append, openTerminal }) {
if (path === undefined) {
saveAs(data, (path) => {
if (typeof callback === 'function')
callback(path)
run_script("g++", [path, "-o", path.replace(".cpp", ".exe")], null, append, openTerminal)
})
} else {
save(path, data, (path) => {
if (typeof callback === 'function')
callback(path)
run_script("g++", [path, "-o", path.replace(".cpp", ".exe")], null, append, openTerminal)
})
}
}
|
package com.publicissapient.camunda.service;
import java.util.logging.Logger;
import com.publicissapient.camunda.model.Order;
import com.publicissapient.camunda.utility.CommonUtility;
import org.camunda.bpm.engine.delegate.BpmnError;
import org.camunda.bpm.engine.delegate.DelegateExecution;
import org.camunda.bpm.engine.delegate.JavaDelegate;
/**
* This class should/can be replaced with a standalone microservice.
*
* @author <NAME> (<EMAIL>)
*/
public class OrderAnalyzerDelegate implements JavaDelegate {
private final static Logger LOGGER = Logger.getLogger("OrderAnalyzerDelegate");
public void execute(DelegateExecution execution) throws Exception {
String businessKey = execution.getProcessBusinessKey();
boolean isOrderValid = (Boolean) execution.getVariable("isOrderValidDecision");
LOGGER.info("Process business Key '" + businessKey + "'...");
LOGGER.info("Is Order Valid: '" + execution.getVariable("isOrderValidDecision") + "'...");
if (!isOrderValid) {
execution.setVariable("isOrderValid", false);
execution.setVariable("error", "GENERAL_CONFIG_VALIDATION_FAILURE");
throw new BpmnError("VALIDATION_FAILURE");
}
Order order = new Order();
order.setOrderId((String) execution.getVariable("orderId"));
order.setOrderType((String) execution.getVariable("orderType"));
order.setDestination((String) execution.getVariable("destination"));
order.setEmailId((String) execution.getVariable("emailId"));
order.setNumberOfItems((Integer) execution.getVariable("numberOfItems"));
order.setOrderValid(true);
LOGGER.info(order.toString());
String orderJson = CommonUtility.convertToJSONString(order);
execution.setVariable("order", orderJson);
}
}
|
<filename>dbManager/src/main/java/sword/langbook3/android/models/AgentDetails.java
package sword.langbook3.android.models;
import sword.collections.ImmutableHashSet;
import sword.collections.ImmutableList;
import sword.collections.ImmutableSet;
import sword.langbook3.android.db.ImmutableCorrelation;
import sword.langbook3.android.db.ImmutableCorrelationArray;
public final class AgentDetails<AlphabetId, CorrelationId, BunchId, RuleId> {
public final ImmutableSet<BunchId> targetBunches;
public final ImmutableSet<BunchId> sourceBunches;
public final ImmutableSet<BunchId> diffBunches;
public final ImmutableCorrelation<AlphabetId> startMatcher;
public final ImmutableCorrelationArray<AlphabetId> startAdder;
public final ImmutableList<CorrelationId> startAdderCorrelationIds;
public final ImmutableCorrelation<AlphabetId> endMatcher;
public final ImmutableCorrelationArray<AlphabetId> endAdder;
public final ImmutableList<CorrelationId> endAdderCorrelationIds;
public final RuleId rule;
public AgentDetails(ImmutableSet<BunchId> targetBunches, ImmutableSet<BunchId> sourceBunches, ImmutableSet<BunchId> diffBunches,
ImmutableCorrelation<AlphabetId> startMatcher, ImmutableCorrelationArray<AlphabetId> startAdder, ImmutableList<CorrelationId> startAdderCorrelationIds,
ImmutableCorrelation<AlphabetId> endMatcher, ImmutableCorrelationArray<AlphabetId> endAdder, ImmutableList<CorrelationId> endAdderCorrelationIds, RuleId rule) {
if (startMatcher == null) {
startMatcher = ImmutableCorrelation.empty();
}
if (startAdder == null) {
startAdder = ImmutableCorrelationArray.empty();
}
if (startAdderCorrelationIds == null) {
startAdderCorrelationIds = ImmutableList.empty();
}
if (startAdder.size() != startAdderCorrelationIds.size()) {
throw new IllegalArgumentException();
}
if (endMatcher == null) {
endMatcher = ImmutableCorrelation.empty();
}
if (endAdder == null) {
endAdder = ImmutableCorrelationArray.empty();
}
if (endAdderCorrelationIds == null) {
endAdderCorrelationIds = ImmutableList.empty();
}
if (endAdder.size() != endAdderCorrelationIds.size()) {
throw new IllegalArgumentException();
}
if (startMatcher.equalCorrelation(startAdder.concatenateTexts()) && endMatcher.equalCorrelation(endAdder.concatenateTexts())) {
if (targetBunches.isEmpty()) {
throw new IllegalArgumentException();
}
rule = null;
}
else if (rule == null) {
throw new IllegalArgumentException();
}
if (sourceBunches == null) {
sourceBunches = ImmutableHashSet.empty();
}
if (diffBunches == null) {
diffBunches = ImmutableHashSet.empty();
}
if (!sourceBunches.filter(diffBunches::contains).isEmpty()) {
throw new IllegalArgumentException();
}
if (sourceBunches.contains(null)) {
throw new IllegalArgumentException();
}
if (diffBunches.contains(null)) {
throw new IllegalArgumentException();
}
this.targetBunches = targetBunches;
this.sourceBunches = sourceBunches;
this.diffBunches = diffBunches;
this.startMatcher = startMatcher;
this.startAdder = startAdder;
this.startAdderCorrelationIds = startAdderCorrelationIds;
this.endMatcher = endMatcher;
this.endAdder = endAdder;
this.endAdderCorrelationIds = endAdderCorrelationIds;
this.rule = rule;
}
public boolean modifyCorrelations() {
return rule != null;
}
}
|
<reponame>srabraham/MMM-Jast<gh_stars>0
class JastUtils {
static getCurrentValue(stock, exchangeData) {
let currentValue = "-";
if (stock.current) {
currentValue = stock.current;
if (
exchangeData &&
stock.tradeCurrency &&
stock.displayCurrency &&
stock.tradeCurrency !== stock.displayCurrency
) {
const exchange = exchangeData.find(
(exchange) =>
exchange.from === stock.tradeCurrency &&
exchange.to === stock.displayCurrency
);
if (exchange) {
currentValue = currentValue * exchange.rate;
}
}
currentValue = currentValue.toFixed(2);
}
return currentValue;
}
static getCurrency(stock, exchangeData, config) {
let currency = config.defaultCurrency;
if (stock.displayCurrency) {
const exchange = exchangeData.find(
(exchange) =>
exchange.from === stock.tradeCurrency &&
exchange.to === stock.displayCurrency
);
if (exchange) {
currency = stock.displayCurrency;
} else if (stock.tradeCurrency) {
currency = stock.tradeCurrency;
}
}
return currency;
}
static getStockChange(stock) {
if (stock.current && stock.last) {
return (((stock.current - stock.last) / stock.last) * 100).toFixed(1);
} else {
return 0;
}
}
static getDepotGrowth(config, exchangeData) {
let growth = 0;
let errors = false;
config.stocks.forEach((stock) => {
if (stock.current && stock.last) {
let change =
stock.current * stock.quantity - stock.last * stock.quantity;
if (
stock.tradeCurrency &&
stock.tradeCurrency !== config.defaultCurrency
) {
const exchange = exchangeData.find(
(exchange) =>
exchange.from === stock.tradeCurrency &&
exchange.to === stock.displayCurrency
);
if (exchange) {
change = change * exchange.rate;
} else {
errors = true;
}
} else {
errors = true;
}
growth = growth + change;
}
});
return { value: growth.toFixed(2), errors };
}
}
|
#!/bin/bash
mkdir -p download
# Download all PDF pages from the download page
wget --directory-prefix=./download --accept=pdf --mirror --level=0 --no-parent --no-directories https://web.archive.org/web/20191123111549/https://www.yourhome.gov.au/downloads
# Download the print sample to get nice cover and intro pages
wget --continue --directory-prefix=./download https://web.archive.org/web/20160910093317/http://canprint.com.au/yourhome/files/assets/common/downloads/publication.pdf
# Generate the document
# Some pdf files can be downloaded as damaged, it will give error if it occurs. You should re-download the damaged files manually.
pdflatex YourHome.tex
# Run it through ghostscript which produces a much smaller document
gs -dBATCH -dNOPAUSE -q -sDEVICE=pdfwrite -sOutputFile=YourHome_`date +%Y-%m-%d`.pdf YourHome.pdf
|
#!/usr/bin/env bash
if [ -z $1 ]; then
echo "Builds a Docker image and publishes it with 'beta' tag"
echo "Usage: ./build.sh <directory> [--cache]"
echo ""
echo "The --cache argument instructs build to use Docker layer cache."
echo "Use with caution, cached layers might become outdated."
exit
fi
# fail on any error
set -e
DIR="$1"
NO_CACHE=""
if [ "${2}" = "--cache" ]; then
echo "WARNING: Using Docker layer cache, the resulting image might be outdated!"
else
NO_CACHE="--no-cache"
fi
docker build --pull --tag apify/actor-${DIR}:beta --tag apify/actor-${DIR}:latest ${NO_CACHE} ./${DIR}/
echo "Running image for test"
docker run apify/actor-${DIR}:beta
echo "Pushing image to Docker Hub"
docker push apify/actor-${DIR}:beta
echo "Docker image was built and published as apify/actor-${DIR}:beta"
echo "Now test the image and then publish it with 'latest' tag by running:"
echo " docker push apify/actor-${DIR}:latest"
|
#!/usr/bin/env bash
set -ex
cd tests/unit
../singlerod/short/build/install/bin/unittests
|
<filename>model/base.go
package model
import (
"fmt"
"sync"
"time"
"github.com/axiaoxin-com/logging"
"go.uber.org/zap"
"gorm.io/driver/mysql"
"gorm.io/gorm"
"gorm.io/gorm/schema"
c "github.com/skrbox/ioseek/pkg/conf"
. "github.com/skrbox/ioseek/pkg/log"
)
var (
DB *gorm.DB
once sync.Once
mysqlTmpl = `%s@tcp(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local`
)
func init() {
once.Do(func() {
var driver gorm.Dialector
if *c.DBHostPort != "" {
L.Infof("初始化数据连接: %s", *c.DBHostPort)
driver = mysql.Open(fmt.Sprintf(mysqlTmpl, *c.DBUserPass, *c.DBHostPort, *c.DBDatabase))
}
db, err := gorm.Open(driver, &gorm.Config{
Logger: logging.NewGormLogger(zap.DebugLevel, zap.DebugLevel, time.Millisecond*500),
CreateBatchSize: 100,
NowFunc: func() time.Time {
return time.Now().Local()
},
NamingStrategy: schema.NamingStrategy{
SingularTable: true,
NoLowerCase: true,
},
})
if err != nil {
panic(err)
}
DB = db
})
}
type meta struct {
UUID string `gorm:"primaryKey"`
CreatedAt time.Time
UpdatedAt time.Time
DeletedAt gorm.DeletedAt `gorm:"index"`
}
|
def validate_isbn_10(isbn):
"""This function takes an ISBN-10 code and returns True if the code is valid and False otherwise"""
# Convert the code to a list of digits
isbn_digits = [int(x) for x in isbn if x.isdigit()]
# Calculate the verification code
verify = 0
for i in range(len(isbn_digits) - 1):
verify += isbn_digits[i] * (i + 1)
verify = (verify * 11) % 11
# Compare the calculated code with the given code
if verify == isbn_digits[-1]:
return True
else:
return False
# Driver Program
isbn = "0-306-40615-2"
print(validate_isbn_10(isbn)) # Prints True
|
# Core Django imports
from django.db.models import Count
from django.shortcuts import render, get_object_or_404, redirect
from django.core.mail import send_mail
from django.core.paginator import (Paginator, EmptyPage, PageNotAnInteger)
from django.contrib.postgres.search import SearchVector, SearchQuery, SearchRank
from django.contrib import messages
# app imports
from posts.models import Post, Comment
from posts.forms import EmailPostForm, CommentForm, SearchForm
# third-party packages
from taggit.models import Tag
def post_home(request):
"""
Function view to render the home page
"""
template_name = "posts/home.html"
context = {'section': 'home'}
return render(request, template_name, context)
def post_about(request):
"""
Function view to render the about page
"""
template_name = "posts/about.html"
context = {'section': 'about'}
return render(request, template_name, context)
def post_list(request, subject=None, tag_slug=None):
"""
Function view to render posts on the basis of
subject or tags passed to the url
"""
posts = Post.published.all()
if subject:
if not subject == 'all':
posts = Post.published.filter(subject=subject)
elif tag_slug:
tag = get_object_or_404(Tag, slug=tag_slug)
posts = posts.filter(tags__in=[tag])
## pagination
paginator = Paginator(posts, 2) # 2 posts in each page
page = request.GET.get('page')
try:
posts = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver the first page
posts = paginator.page(1)
except EmptyPage:
# If page is out of range, deliver the last page
posts = paginator.page(paginator.num_pages)
paginate = len(posts)
template_name = "posts/list.html"
context = {
'section': 'list',
'subject': subject,
'tag_slug': tag_slug,
'object_list': posts,
'paginate': paginate,
'page': page,
}
return render(request, template_name, context)
def post_detail(request, year, month, day, post_slug):
"""
Function view for a single post
"""
post = get_object_or_404(Post, title_slug=post_slug,
status='published',
created__year=year,
created__month=month,
created__day=day)
# List of active comments for this post
comments = post.postcomments.filter(active=True)
new_comment = None
if request.method == 'POST':
# a comment was posted
comment_form = CommentForm(data=request.POST)
if comment_form.is_valid():
body = request.POST.get('body')
# Don't save to db yet, only create an object
new_comment = Comment.objects.create(
post=post, user=request.user, body=body
)
# Finally, save the comment to the database
new_comment.save()
messages.success(request, 'Comment added successfully!')
return redirect(post.get_absolute_url())
else:
messages.error(request, 'Invalid form submission')
else:
comment_form = CommentForm()
## similar posts based on tags
# flat means give single values and not tuples
post_tags_ids = post.tags.values_list('id', flat=True)
similar_posts = Post.published.filter(tags__in=post_tags_ids)\
.exclude(id=post.id)
# use the Count function to generate the same_tags field
# which contains number of tags shared with all of the tags queried
similar_posts = similar_posts.annotate(same_tags=Count('tags'))\
.order_by('-same_tags', '-created')[:4]
# display the latest posts based on no. of shared tags
return render(request, template_name='posts/detail.html',\
context={'post': post,
'comments': comments,
'new_comment': new_comment,
'comment_form': comment_form,
'similar_posts': similar_posts} )
def post_search(request):
"""
View that renders a form for users to search for posts in our database
"""
form = SearchForm()
query = None
results = []
if 'query' in request.GET:
form = SearchForm(request.GET)
if form.is_valid():
query = form.cleaned_data['query']
# give more weight to title than description
# A = 1.0, B = 0.4, C = 0.2, D = 0.1
search_vector = SearchVector('title', weight='A') \
+ SearchVector('description', weight='B')
# using SearchQuery so that words provided are passed through a
# stemming algorithm, before looking for matching terms
search_query = SearchQuery(query)
# rank results based on relevancy
search_rank = SearchRank(search_vector, search_query)
results = Post.published.\
annotate(rank=search_rank)\
.filter(rank__gte=0.2).order_by('-rank')
# display, descendingly, only the matches that have
# ranks greater than 0.2
return render(request, template_name='posts/search.html',\
context = { 'section': 'search', 'form': form,
'query': query, 'results': results })
def post_share(request, post_id):
"""
Function view to handle the email form and send an email
"""
# retrieve post by id
post = get_object_or_404(Post, id=post_id, status='published')
sent = False
if request.method == 'POST':
# if a form was submitted
form = EmailPostForm(data=request.POST)
if form.is_valid():
# get a dict of valid fields and their value
clean_data = form.cleaned_data
# use the value inside clean_data to build our email
post_url = request.build_absolute_uri(post.get_absolute_url())
subject = f"{clean_data['name']} recommends you check out {post.title}"
message = f"Check out {post.title} at {post_url}\n\n"\
f"{clean_data['name']}\'s comments: {clean_data['comments']}"
send_mail(subject, message, f"{clean_data['to']}", [clean_data['to']])
sent = True
else:
form = EmailPostForm()
return render(request, template_name='posts/share.html',\
context = { 'section': 'share', 'post': post, 'form': form, 'sent': sent })
|
package com.univocity.envlp.wallet;
import com.univocity.cardano.wallet.addresses.*;
import com.univocity.envlp.wallet.persistence.dao.*;
import com.univocity.envlp.wallet.persistence.model.*;
import org.testng.annotations.*;
import java.util.*;
import static org.testng.Assert.*;
public class WalletServiceTest extends BaseTest {
@Test
public void testCreateNewWallet() {
String seed = service.generateSeed();
WalletSnapshot wallet = createNewWallet("wallet from seed", seed);
assertNotNull(wallet);
assertNotNull(wallet.getCreatedAt());
assertNotNull(wallet.getId());
assertEquals(wallet.getName(), "wallet from seed");
assertEquals(wallet.getAccounts().size(), 1);
assertNotNull(wallet.getAccounts().get(0L));
seed = AddressManagerTest.seed;
wallet = createNewWallet("myWallet", seed);
assertEquals(wallet.getAccounts().get(0L), AddressManagerTest.publicRootKey_0);
}
@Test(dependsOnMethods = "testCreateNewWallet")
public void testAddAccountFromSeed() {
WalletSnapshot wallet = service.getWalletByName("myWallet");
service.addAccountFromSeed(wallet, AddressManagerTest.seed, 10);
assertEquals(wallet.getAccounts().get(10L), AddressManagerTest.publicRootKey_10);
wallet = service.getWalletByName("myWallet");
assertEquals(wallet.getAccounts().get(10L), AddressManagerTest.publicRootKey_10);
assertEquals(wallet.getAccounts().size(), 2);
}
@Test(dependsOnMethods = "testAddAccountFromSeed")
public void testAddAccountsFromSeed() {
WalletSnapshot wallet = service.getWalletByName("myWallet");
assertEquals(wallet.getAccounts().get(0L), AddressManagerTest.publicRootKey_0);
assertEquals(wallet.getAccounts().get(10L), AddressManagerTest.publicRootKey_10);
service.addAccountFromSeed(wallet, AddressManagerTest.seed, 5); //add
service.addAccountFromSeed(wallet, AddressManagerTest.seed, 11); //add
service.addAccountsFromSeed(wallet, AddressManagerTest.seed, 10); //add 10 more
assertEquals(wallet.getAccounts().get(0L), AddressManagerTest.publicRootKey_0);
assertEquals(wallet.getAccounts().get(10L), AddressManagerTest.publicRootKey_10);
assertEquals(wallet.getAccounts().size(), 14);
for (long i = 0; i < 14; i++) {
assertNotNull(wallet.getAccounts().get(i));
}
}
@Test(dependsOnMethods = "testAddAccountsFromSeed")
public void testGetPaymentAddress() {
WalletSnapshot wallet = service.getWalletByName("myWallet");
String address0_0 = service.getPaymentAddress(wallet, 0, 0);
assertNotNull(address0_0);
String address0_0Again = service.getPaymentAddress(wallet, 0, 0);
assertNotNull(address0_0Again);
assertEquals(address0_0, address0_0Again);
//account wasn't created, return null.
assertNull(service.getPaymentAddress(wallet, 9999, 0));
}
@Test
public void testAllocateNextPaymentAddress() {
String seed = service.generateSeed();
WalletSnapshot wallet = createNewWallet("randomWallet2", seed);
//no other accounts registered, will allocate to default account 0
String payment1 = service.allocateNextPaymentAddress(wallet);
assertNotNull(payment1);
String payment2 = service.allocateNextPaymentAddress(wallet);
assertNotNull(payment2);
assertNotEquals(payment1, payment2);
assertEquals(payment1, service.getPaymentAddress(wallet, 0, 0));
assertEquals(payment2, service.getPaymentAddress(wallet, 0, 1));
//Once account is added, will allocate from any account other than 0.
wallet = service.addAccountFromSeed(wallet, seed, 23);
String payment3 = service.allocateNextPaymentAddress(wallet);
assertEquals(payment3, service.getPaymentAddress(wallet, 23, 0));
String payment4 = service.allocateNextPaymentAddress(wallet);
assertEquals(payment4, service.getPaymentAddress(wallet, 23, 1));
String payment5 = service.allocateNextPaymentAddress(wallet);
assertEquals(payment5, service.getPaymentAddress(wallet, 23, 2));
//Added another account
wallet = service.addAccountFromSeed(wallet, seed, 3);
String payment6 = service.allocateNextPaymentAddress(wallet);
assertEquals(payment6, service.getPaymentAddress(wallet, 3, 0));
String payment7 = service.allocateNextPaymentAddress(wallet);
assertEquals(payment7, service.getPaymentAddress(wallet, 3, 1));
String payment8 = service.allocateNextPaymentAddress(wallet);
assertEquals(payment8, service.getPaymentAddress(wallet, 3, 2));
//will cycle through accounts (not using the default 0)
String payment9 = service.allocateNextPaymentAddress(wallet);
assertEquals(payment9, service.getPaymentAddress(wallet, 3, 3));
String payment10 = service.allocateNextPaymentAddress(wallet);
assertEquals(payment10, service.getPaymentAddress(wallet, 23, 3));
String payment11 = service.allocateNextPaymentAddress(wallet);
assertEquals(payment11, service.getPaymentAddress(wallet, 3, 4));
String payment12 = service.allocateNextPaymentAddress(wallet);
assertEquals(payment12, service.getPaymentAddress(wallet, 23, 4));
}
@Test
public void testAllocateNextPaymentAddressFromAccount() {
String seed = service.generateSeed();
WalletSnapshot wallet = createNewWallet("randomWallet1", seed);
String payment1 = service.allocateNextPaymentAddress(wallet, 0);
assertNotNull(payment1);
String payment2 = service.allocateNextPaymentAddress(wallet, 0);
assertNotNull(payment2);
assertNotEquals(payment1, payment2);
assertEquals(payment1, service.getPaymentAddress(wallet, 0, 0));
assertEquals(payment2, service.getPaymentAddress(wallet, 0, 1));
}
@Test(dependsOnMethods = "testAllocateNextPaymentAddressFromAccount")
public void testGetAddressesForDefaultAccount() {
WalletSnapshot wallet = service.getWalletByName("randomWallet1");
List<AddressAllocation> addresses = service.getAddressesForDefaultAccount(wallet);
assertEquals(addresses.size(), 2);
for (AddressAllocation address : addresses) {
assertNotNull(address.getPaymentAddress());
assertEquals(address.getAccountIndex(), 0);
assertEquals(address.getWalletId(), wallet.getId());
}
//most recent address first
assertEquals(addresses.get(0).getDerivationIndex(), 1);
assertEquals(addresses.get(1).getDerivationIndex(), 0);
}
}
|
<filename>D3dTiles/src/Primitives/Primitive.cpp
#include "stdafx.h"
#include "D3dTiles/Primitives/Primitive.h"
namespace TileEngine {
Primitive::~Primitive() {}
} // namespace TileEngine
|
import Audio1 from '../Assets/Audio/bensound-allthat.mp3';
import Audio2 from '../Assets/Audio/bensound-countryboy.mp3';
import Audio3 from '../Assets/Audio/bensound-evolution.mp3';
import Audio4 from '../Assets/Audio/bensound-highoctane.mp3';
import Audio5 from '../Assets/Audio/bensound-hipjazz.mp3';
import SongArt1 from '../Assets/Images/aic_dirt.jpg';
import SongArt2 from '../Assets/Images/matt_corby.jpg';
import SongArt3 from '../Assets/Images/chris_cornell.jpg';
import SongArt4 from '../Assets/Images/chris_cornell2.jpg';
import SongArt5 from '../Assets/Images/led_zeppelin.jpg';
const Songs = [
{name: 'All That', artist: 'Bensound', file: Audio1, songPic: SongArt1, duration: '2:25', id: "first"},
{name: 'Country Boy', artist: 'Bensound', file: Audio2, songPic: SongArt2, duration: '3:27', id: "second"},
{name: 'Evolution', artist: 'Bensound', file: Audio3, songPic: SongArt3, duration: '2:45', id: "third"},
{name: 'High Octane', artist: 'Bensound', file: Audio4, songPic: SongArt4, duration: '2:35', id: "fourth"},
{name: 'Hip Jazz', artist: 'Bensound', file: Audio5, songPic: SongArt5, duration: '2:43', id: "fifth"}
];
export default Songs;
|
/*
* Copyright (C) 2017 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gilecode.yagson.stream;
import java.io.IOException;
import java.io.Writer;
/**
* A writer which used to limit the JSON output to the specified number of characters. If a JSON output
* length exceeds the limit, the output is stripped to match the limit and {@link StringOutputLimitExceededException}
* is thrown to interrupt the serialization process.
* <p/>
* <b>NOTE: This writer is NOT THREAD-SAFE</b>
*
* @author <NAME>
*/
public class LimitedWriter extends Writer {
protected final Writer delegate;
protected final long charsLimit;
protected long charsCount;
public LimitedWriter(Writer delegate, long charsLimit) {
this.delegate = delegate;
this.charsLimit = charsLimit;
}
@Override
public void write(char[] cbuf, int off, int len) throws IOException, OutputLimitExceededException {
long newCount = charsCount + len;
if (newCount > charsLimit) {
len -= (newCount - charsLimit);
charsCount = charsLimit;
if (len > 0) {
delegate.write(cbuf, off, len);
}
throwLimitExceededException();
return;
} else {
charsCount = newCount;
delegate.write(cbuf, off, len);
}
}
protected void throwLimitExceededException() throws OutputLimitExceededException {
throw new OutputLimitExceededException();
}
@Override
public void flush() throws IOException {
delegate.flush();
}
@Override
public void close() throws IOException {
delegate.close();
}
}
|
#!/bin/sh
flask db upgrade
|
import { readOnlyRatingStar } from './readOnlyRatingStar';
export function ratingSummary(rating){
return `<div class="rating-summary">
<div class="">
<div class="pull-left">${readOnlyRatingStar(rating)}</div>
<div class="pull-left"> 3.7 </div>
<div class="pull-left"> (29) </div>
<div class="pull-left"> <a href="#">write a review</a> </div>
</div>
</div>`;
}
function ratingSummaryPopHover(){
return `
<div class="rt-summary-popover">
${ratingSummaryPopHover()}
</div>`;
}
|
#!/bin/bash
sudo apt install build-essential git pkg-config libgl1-mesa-dev libpthread-stubs0-dev libjpeg-dev libxml2-dev libpng-dev libtiff5-dev libgdal-dev libpoppler-dev libdcmtk-dev libgstreamer1.0-dev libgtk2.0-dev libcairo2-dev libpoppler-glib-dev libxrandr-dev libxinerama-dev curl cmake
git clone https://github.com/esmini/esmini.git esmini
cd esmini
git checkout b772909dae9205aaacccd2692dc42599888afa57 # 1st Feb 2021 = esmini 2.1.5 (build 1108)
mkdir build
cd build
cmake ../ -DUSE_OSG=true -DCMAKE_BUILD_TYPE=Release
make -j4 install
cd ../../
cp ./esmini/build/EnvironmentSimulator/Libraries/esminiRMLib/libesminiRMLib.so pyesmini/libesminiRMLib.so
cp ./esmini/build/EnvironmentSimulator/Libraries/esminiLib/libesminiLib.so pyesmini/libesminiLib.so
cp -r esmini/resources/ .
python3 tests/pyesmini_tests.py
python3 tests/pyesminiRM_tests.py
|
<filename>KS2.cpp
#include<bits/stdc++.h>
using namespace std;
int main(){
unsigned long long t,n,ans,m,i,j;
cin>>t;
for(i=0;i<t;i++){
ans=0;
cin>>n;
m=n;
for(j=0;n>0;j++){
ans+=n%10;
n/=10;
}
for(j=0;j<=9;j++){
if((ans+j)%10==0){
m=m*10+j;
break;
}
}
cout<<m<<endl;
}
return 0;
}
|
/**
* @param {String} type
* @param {Object} attributes
* @param {...any} children
*/
export function h (type, attributes, ...children) {
const el = document.createElement(type)
for (const key in attributes) {
if (key === 'style' && Array.isArray(attributes[key])) {
el.setAttribute(key, attributes[key].join(';'))
} else {
el.setAttribute(key, attributes[key])
}
}
children.forEach(child => {
if (child instanceof window.HTMLElement) {
el.append(child)
} else {
el.append(document.createTextNode(child))
}
})
return el
}
/**
* @param {HTMLElement} elem
* @param {String} name
* @param {?boolean} bool
*/
export function toggleClass (elem, name, bool) {
if (elem === null) {
return
}
if (typeof bool === 'undefined') {
bool = !elem.classList.contains(name)
}
if (bool) {
if (!elem.classList.contains(name)) {
elem.classList.add(name)
}
} else {
if (elem.classList.contains(name)) {
elem.classList.remove(name)
}
}
}
|
#!/bin/sh
args="$@"
echo "OpenCV installation..."
#PROJECT_PATH=echo ${args[0]}
# Save current working directory
cwd=$(pwd)
cd "$cwd" || exit
#Specify OpenCV version
cvVersion="master"
# Clean build directories
rm -rf opencv/build
rm -rf opencv_contrib/build
# Create directory for installation
mkdir lib-emscripten
mkdir lib-emscripten/opencv
git clone https://github.com/opencv/opencv.git
cd opencv || exit
git -C opencv checkout 4.5.2
git checkout $cvVersion
cd ..
# git clone https://github.com/opencv/opencv_contrib.git
# cd opencv_contrib
# git checkout $cvVersion
# cd ..
cd opencv || exit
mkdir build
cd build || exit
emscriptenToolChainFile=""
case "$OSTYPE" in
linux*) emscriptenToolChainFile=/usr/lib/emscripten/cmake/Modules/Platform/Emscripten.cmake ;;
darwin*) emscriptenToolChainFile=/usr/local/Cellar/emscripten/2.0.25/libexec/cmake/Modules/Platform/Emscripten.cmake ;;
msys*) echo "windows" ;;
solaris*) echo "solaris" ;;
bsd*) echo "bsd" ;;
*) echo "unknown" ;;
esac
# add this if extra modules needed -D OPENCV_EXTRA_MODULES_PATH=path to opencv_contrib modules \
# -D CMAKE_PREFIX_PATH="$PROJECT_PATH" \
# -D CMAKE_MODULE_PATH="$PROJECT_PATH"/lib/cmake/opencv4 \
cmake -D CMAKE_BUILD_TYPE=RELEASE \
-D CMAKE_INSTALL_PREFIX="$cwd"/lib-emscripten/opencv \
-D CMAKE_TOOLCHAIN_FILE="$emscriptenToolChainFile" \
-D ENABLE_PIC=FALSE \
-D CPU_BASELINE='' \
-D CPU_DISPATCH='' \
-D CV_TRACE=OFF \
-D BUILD_SHARED_LIBS=OFF \
-D BUILD_PROTOBUF=OFF \
-D WITH_PROTOBUF=OFF \
-D WITH_1394=OFF \
-D WITH_ADE=OFF \
-D WITH_VTK=OFF \
-D WITH_EIGEN=OFF \
-D WITH_FFMPEG=OFF \
-D WITH_GSTREAMER=OFF \
-D WITH_GTK=OFF \
-D WITH_GTK_2_X=OFF \
-D WITH_IPP=OFF \
-D WITH_JASPER=OFF \
-D WITH_JPEG=OFF \
-D WITH_WEBP=OFF \
-D WITH_OPENEXR=OFF \
-D WITH_OPENGL=OFF \
-D WITH_OPENVX=OFF \
-D WITH_OPENNI=OFF \
-D WITH_OPENNI2=OFF \
-D WITH_PNG=OFF \
-D WITH_TBB=OFF \
-D WITH_TIFF=OFF \
-D WITH_V4L=OFF \
-D WITH_OPENCL=OFF \
-D WITH_OPENCL_SVM=OFF \
-D WITH_OPENCLAMDFFT=OFF \
-D WITH_OPENCLAMDBLAS=OFF \
-D WITH_GPHOTO2=OFF \
-D WITH_LAPACK=OFF \
-D WITH_ITT=OFF \
-D WITH_QUIRC=OFF \
-D BUILD_ZLIB=ON \
-D BUILD_opencv_core=ON \
-D BUILD_opencv_apps=OFF \
-D BUILD_opencv_calib3d=OFF \
-D BUILD_opencv_dnn=OFF \
-D BUILD_opencv_features2d=ON \
-D BUILD_opencv_flann=ON \
-D BUILD_opencv_gapi=OFF \
-D BUILD_opencv_ml=OFF \
-D BUILD_opencv_photo=OFF \
-D BUILD_opencv_imgcodecs=OFF \
-D BUILD_opencv_shape=OFF \
-D BUILD_opencv_videoio=OFF \
-D BUILD_opencv_videostab=OFF \
-D BUILD_opencv_highgui=OFF \
-D BUILD_opencv_superres=OFF \
-D BUILD_opencv_stitching=OFF \
-D BUILD_opencv_java=OFF \
-D BUILD_opencv_js=OFF \
-D BUILD_opencv_python2=OFF \
-D BUILD_opencv_python3=OFF \
-D BUILD_EXAMPLES=OFF \
-D BUILD_PACKAGE=OFF \
-D BUILD_TESTS=OFF \
-D BUILD_PERF_TESTS=OFF \
-D BUILD_DOCS=OFF \
-D WITH_PTHREADS_PF=ON \
-D CV_ENABLE_INTRINSICS=ON \
-D BUILD_WASM_INTRIN_TESTS=OFF \
-D CMAKE_C_FLAGS="-s WASM=1 -s USE_PTHREADS=0 -fsigned-char -W -Wall -Werror=return-type -Werror=non-virtual-dtor -Werror=address -Werror=sequence-point -Wformat -Werror=format-security -Wmissing-declarations -Wmissing-prototypes -Wstrict-prototypes -Wundef -Winit-self -Wpointer-arith -Wshadow -Wsign-promo -Wuninitialized -Winconsistent-missing-override -Wno-delete-non-virtual-dtor -Wno-unnamed-type-template-args -Wno-comment -Wno-deprecated-enum-enum-conversion -Wno-deprecated-anon-enum-enum-conversion -fdiagnostics-show-option -pthread -Qunused-arguments -ffunction-sections -fdata-sections -fvisibility=hidden -fvisibility-inlines-hidden -O3 -DNDEBUG -DNDEBUG -msimd128" \
-D CMAKE_CXX_FLAGS="-s WASM=1 -s USE_PTHREADS=0 -fsigned-char -W -Wall -Werror=return-type -Werror=non-virtual-dtor -Werror=address -Werror=sequence-point -Wformat -Werror=format-security -Wmissing-declarations -Wmissing-prototypes -Wstrict-prototypes -Wundef -Winit-self -Wpointer-arith -Wshadow -Wsign-promo -Wuninitialized -Winconsistent-missing-override -Wno-delete-non-virtual-dtor -Wno-unnamed-type-template-args -Wno-comment -Wno-deprecated-enum-enum-conversion -Wno-deprecated-anon-enum-enum-conversion -fdiagnostics-show-option -pthread -Qunused-arguments -ffunction-sections -fdata-sections -fvisibility=hidden -fvisibility-inlines-hidden -O3 -DNDEBUG -DNDEBUG -msimd128" ..
make
make install
#export OpenCV_DIR="$cwd"/lib/opencv/lib/cmake/opencv4/OpenCVConfig.cmake
[[ ":$PATH:" != *":$cwd/lib-emscripten/opencv/lib/cmake/opencv4/OpenCVConfig.cmake:"* ]] && PATH="$cwd/lib-emscripten/opencv/lib/cmake/opencv4/OpenCVConfig.cmake:${PATH}"
echo "$PATH"
cd "$cwd" || exit
|
#!/bin/bash
dieharder -d 201 -g 16 -S 1486470799
|
package main
import (
"encoding/json"
"flag"
"github.com/go-openapi/spec"
"github.com/jackmanlabs/errors"
"go/build"
"log"
"os"
"runtime/pprof"
"strings"
)
var (
// Command-line parameters
pkgPath *string = flag.String("pkg", "", "The main package of your application.")
profilePath *string = flag.String("profile", "", "The path where you'd like to store profiling results.")
ignore *string = flag.String("ignore", "", "The comma seperated package paths that you want to ignore.")
naming *string = flag.String("naming", "full", "One of 'full', 'partial', or 'simple' to describe the amount of the package path on the resulting JSON models.")
)
var (
// Global variables
// Normally, I don't like global variables. The fact is, however, that if we
// were to pass these three things around, it would get very tedious very
// fast. This is not a multi-threaded program, and we've been careful to
// avoid modifying maps during iterations.
pkgInfos map[string]PackageInfo = make(map[string]PackageInfo)
srcPath string
ignoredPackages []string = make([]string, 0)
)
func main() {
flag.Parse()
if *profilePath != "" {
f, err := os.Create(*profilePath)
if err != nil {
log.Fatal(errors.Stack(err))
}
pprof.StartCPUProfile(f)
defer pprof.StopCPUProfile()
}
if *pkgPath == "" {
flag.Usage()
log.Fatal("Package path is required.")
}
if !(*naming == "full" || *naming == "partial" || *naming == "simple") {
flag.Usage()
log.Fatal("Unrecognized value provided for naming convention: " + *naming)
}
ignores := strings.Split(*ignore, ",")
for _, i := range ignores {
if i != "" {
ignoredPackages = append(ignoredPackages, i)
}
}
var err error
// Determine the source path of the package specified.
srcPath, err = getPackageSourceDir(*pkgPath)
if err != nil {
log.Fatal(errors.Stack(err))
}
// Which packages need to be analyzed? Get a list of all pkgInfos.
pkgInfos, err = getPackageInfoRecursive(*pkgPath)
if err != nil {
log.Fatal(errors.Stack(err))
}
// What comments need to be parsed?
// Find all comments that could conceivably have our tags in them.
packageCommentBlocks := make(map[string][]string, 0)
for importPath := range pkgInfos {
//log.Print("Scanning package for comments: ", importPath)
newBlocks, err := getCommentBlocks(importPath)
if err != nil {
log.Fatal(errors.Stack(err))
}
packageCommentBlocks[importPath] = newBlocks
}
// Now, let's check all of the comment blocks we found for tags, parsing them as necessary.
var (
apiCommentBlocks []string = make([]string, 0)
operationCommentBlocks map[string][]string = make(map[string][]string, 0)
tagCommentBlocks []string = make([]string, 0)
)
for importPath, commentBlocks := range packageCommentBlocks {
newApiCommentBlocks := detectApiCommentBlocks(commentBlocks)
//jlog.Log(newApiCommentBlocks)
apiCommentBlocks = append(apiCommentBlocks, newApiCommentBlocks...)
newOperationCommentBlocks := detectOperationComments(commentBlocks)
// We need to know the package so we know where to look for the types.
operationCommentBlocks[importPath] = newOperationCommentBlocks
newTagCommentBlocks := detectOperationComments(commentBlocks)
tagCommentBlocks = append(tagCommentBlocks, newTagCommentBlocks...)
}
// Let's turn our detected comments into our internal, intermediate types.
var (
apiIntermediate ApiIntermediate // There's only one.
operationIntermediates []OperationIntermediate = make([]OperationIntermediate, 0)
tagIntermediates []TagIntermediate = make([]TagIntermediate, 0)
)
// This function takes all API comment blocks, as they should all condense into a single API description.
apiIntermediate = intermediatateApi(apiCommentBlocks)
for importPath, commentBlocks := range operationCommentBlocks {
for _, commentBlock := range commentBlocks {
// This only scrapes the information found in the comment block.
// It doesn't do any further processing.
operationIntermediate := intermediatateOperation(commentBlock)
// We need this for later.
operationIntermediate.PackagePath = importPath
operationIntermediates = append(operationIntermediates, operationIntermediate)
}
}
for _, commentBlock := range tagCommentBlocks {
newTagIntermediates := intermediatateTags(commentBlock)
tagIntermediates = append(tagIntermediates, newTagIntermediates...)
}
// I really don't like the way this is done.
// TODO: Make this more functional.
defStore, err := deriveDefinitionsFromOperations(operationIntermediates)
if err != nil {
log.Fatal(errors.Stack(err))
}
// Transform the extractions above and combine them into a single Swagger Spec.
var swagger *spec.Swagger = swaggerizeApi(apiIntermediate)
swagger.Paths = swaggerizeOperations(operationIntermediates)
swagger.Tags = swaggerizeTags(tagIntermediates)
swagger.Definitions = swaggerizeDefinitions(defStore)
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", "\t")
err = enc.Encode(swagger)
if err != nil {
log.Fatal(errors.Stack(err))
}
}
func getPackageSourceDir(pkgPath string) (string, error) {
var (
bpkg *build.Package
err error
)
// I should hope there's an easier way of resolving the source path.
srcDirs := build.Default.SrcDirs()
for _, srcDir := range srcDirs {
bpkg, err = build.Import(pkgPath, srcDir, 0)
if err == nil {
break
}
}
if err != nil {
return "", errors.Stack(err)
}
return bpkg.Dir, nil
}
|
const withTypescript = (config) => {
// Implementation for withTypescript
return {
...config,
// Add TypeScript configuration here
}
}
const withSass = (config) => {
// Implementation for withSass
return {
...config,
// Add Sass configuration here
}
}
const compose = (...functions) => {
return (initialConfig) => {
return functions.reduceRight((acc, fn) => fn(acc), initialConfig)
}
}
const config = compose(
withTypescript,
withSass
)({})
module.exports = config
|
#!/bin/bash
JBROWSE_BUILD_MIN=${JBROWSE_BUILD_MIN:=1}
# check the exit status of the command, and print the last bit of the log if it fails
done_message () {
if [ $? == 0 ]; then
log_echo " done."
if [ "x$1" != "x" ]; then
echo $1;
fi
else
echo " failed. See setup.log file for error messages." $2;
if [[ "x$3" != "x" ]]; then
echo "setup cannot continue, aborting.";
tail -200 setup.log;
exit 1;
fi
fi
}
# echoes both to the console, and to setup.log
# adds extra carriage returns in setup.log for readability.
log_echo () {
echo $@
echo >> setup.log
echo $@ >> setup.log
echo >> setup.log
}
check_node () {
set +e
node_executable=$(which node)
npm_executable=$(which npm)
if ! [ -x "$node_executable" ] ; then
nodejs_executable=$(which nodejs)
if ! [ -x "$nodejs_executable" ] ; then
echo "No 'node' executable found. JBrowse expects node version 6 or later. Please install an updated version of node.js by following the instructions appropriate for your system https://nodejs.org/en/download/package-manager/";
exit 1
else
echo "Creating an alias 'node' for 'nodejs'"
node_executable="$nodejs_executable"
fi
fi
set -e
if ! [ -x "$npm_executable" ] ; then
echo "No 'npm' executable found. JBrowse expects npm version 3 or later. Please install an updated version of node.js by following the instructions appropriate for your system https://nodejs.org/en/download/package-manager/";
exit 1
fi
NODE_VERSION=`$node_executable -v`
NODE_MAJOR_VERSION=`$node_executable -v | cut -dv -f2 | cut -d. -f1`
NODE_MINOR_VERSION=`$node_executable -v | cut -d. -f1`
NPM_VERSION=`$npm_executable -v`
NPM_MAJOR_VERSION=`$npm_executable -v | cut -d. -f1`
if [[ $NODE_MAJOR_VERSION -lt 6 ]]; then
echo "node $NODE_VERSION found, but node version 6 or later must be installed. Please install an updated version of node.js by following the instructions appropriate for your system https://nodejs.org/en/download/package-manager/";
exit 1
fi
if [[ $NPM_MAJOR_VERSION -lt 3 ]]; then
echo "npm $NPM_VERSION found, but npm version 3 or later must be installed. Please install an updated version of node.js by following the instructions appropriate for your system https://nodejs.org/en/download/package-manager/";
exit 1
fi
echo "Node $NODE_VERSION installed at $node_executable with npm $NPM_VERSION";
}
# we are starting a new setup. clear the log file
rm -f setup.log
# log information about this system
log_echo -n "Gathering system information ..."
(
echo '============== System information ====';
set -x;
lsb_release -a;
uname -a;
sw_vers;
grep MemTotal /proc/meminfo;
echo; echo;
) >>setup.log 2>&1;
done_message "" ""
# check Mac OS version
SUPPRESS_BIODB_TO_JSON=0
sw_vers >& /dev/null;
if [ $? -eq 0 ]; then
product_version=`sw_vers -productVersion`;
have_db=`perl -MConfig=myconfig -e 'print myconfig' | grep -- -ldb`
if [[ $product_version =~ ^10.13 && x$have_db = 'x' ]]; then
SUPPRESS_BIODB_TO_JSON=1;
log_echo;
log_echo ===============================================================
log_echo "** MacOS High Sierra with broken system Perl detected. **";
log_echo "biodb-to-json.pl does not work on MacOS High Sierra with the stock system Perl.";
log_echo "The setup will not run biodb-to-json.pl for its sample data: Volvox and Yeast.";
log_echo "To re-enable formatting on your High Sierra machine, install a Perl with a working BerkeleyDB."
log_echo;
log_echo "If you use Homebrew, an easy way to install a working Perl would be:"
log_echo;
log_echo " brew install berkeley-db; brew install --build-from-source perl"
log_echo;
log_echo "Then delete the external perl libraries and run setup.sh again:"
log_echo;
log_echo " rm -rf extlibs/; ./setup.sh"
log_echo;
log_echo ===============================================================
log_echo;
fi
fi
log_echo "NOTE: Legacy scripts wig-to-json.pl and bam-to-json.pl have been removed from setup. Their functionality has been superseded by add-bam-track.pl and add-bw-track.pl. If you require the old versions, please use JBrowse 1.12.3 or earlier."
# if we are running in a development build, then run npm install and run the webpack build.
if [ -f "src/JBrowse/Browser.js" ]; then
log_echo -n "Installing node.js dependencies and building with webpack ..."
(
set -e
check_node
[[ -f node_modules/.bin/yarn ]] || npm install yarn
node_modules/.bin/yarn install
JBROWSE_BUILD_MIN=$JBROWSE_BUILD_MIN node_modules/.bin/yarn build
) >>setup.log 2>&1;
done_message "" "" "FAILURE NOT ALLOWED"
else
log_echo "Minimal release, skipping node and Webpack build (note: this version will not allow using plugins. Use a github clone or a dev version of JBrowse to use plugins"
fi
log_echo -n "Installing Perl prerequisites ..."
if ! ( perl -MExtUtils::MakeMaker -e 1 >/dev/null 2>&1); then
log_echo;
log_echo "WARNING: Your Perl installation does not seem to include a complete set of core modules. Attempting to cope with this, but if installation fails please make sure that at least ExtUtils::MakeMaker is installed. For most users, the best way to do this is to use your system's package manager: apt, yum, fink, homebrew, or similar.";
fi;
( set -x;
bin/cpanm -v --notest -l extlib/ Bio::Perl@1.7.2 < /dev/null;
bin/cpanm -v --notest -l extlib/ Bio::Perl@1.7.2 < /dev/null;
set -e
bin/cpanm -v --notest -l extlib/ Bio::Perl@1.7.2 < /dev/null;
set -x;
bin/cpanm -v --notest -l extlib/ --installdeps . < /dev/null;
bin/cpanm -v --notest -l extlib/ --installdeps . < /dev/null;
set -e;
bin/cpanm -v --notest -l extlib/ --installdeps . < /dev/null;
) >>setup.log 2>&1;
done_message "" "As a first troubleshooting step, make sure development libraries and header files for GD, Zlib, and libpng are installed and try again.";
log_echo
log_echo -n "Formatting Volvox example data ...";
( set -e;
set -x;
# format volvox
rm -rf sample_data/json/volvox;
bin/prepare-refseqs.pl --fasta docs/tutorial/data_files/volvox.fa --out sample_data/json/volvox;
if [ $SUPPRESS_BIODB_TO_JSON -eq 1 ]; then
echo "Not running biodb-to-json.pl for Volvox";
else
bin/biodb-to-json.pl -v --conf docs/tutorial/conf_files/volvox.json --out sample_data/json/volvox;
fi
cat \
docs/tutorial/data_files/volvox_microarray.bw.conf \
docs/tutorial/data_files/volvox_sine.bw.conf \
docs/tutorial/data_files/volvox-sorted.bam.conf \
docs/tutorial/data_files/volvox-sorted.bam.coverage.conf \
docs/tutorial/data_files/volvox-paired.bam.conf \
docs/tutorial/data_files/volvox.vcf.conf \
docs/tutorial/data_files/volvox_fromconfig.conf \
docs/tutorial/data_files/volvox.gff3.conf \
docs/tutorial/data_files/volvox.gtf.conf \
docs/tutorial/data_files/volvox.sort.gff3.gz.conf \
docs/tutorial/data_files/volvox.sort.gff3.gz.htmlfeatures.conf \
docs/tutorial/data_files/volvox.sort.bed.gz.conf \
docs/tutorial/data_files/gvcf.vcf.gz.conf \
docs/tutorial/data_files/bookmarks.conf \
docs/tutorial/data_files/volvox.subsubparts.gff3.conf \
docs/tutorial/data_files/volvox-long-reads.fastq.sorted.bam.conf \
docs/tutorial/data_files/volvox-long-reads.fastq.sorted.cram.conf \
docs/tutorial/data_files/volvox.bb.conf \
docs/tutorial/data_files/volvox-sorted.cram.conf \
docs/tutorial/data_files/volvox-sv.bam.conf \
docs/tutorial/data_files/volvox-sv.cram.conf \
>> sample_data/json/volvox/tracks.conf
bin/add-json.pl '{ "dataset_id": "volvox", "include": [ "../../raw/volvox/functions.conf" ] }' sample_data/json/volvox/trackList.json
bin/add-json.pl '{ "dataset_id": "volvox", "plugins": [ "HideTrackLabels", "NeatCanvasFeatures", "NeatHTMLFeatures" ] }' sample_data/json/volvox/trackList.json
bin/flatfile-to-json.pl --bed docs/tutorial/data_files/volvox_segment.bed --out sample_data/json/volvox --trackLabel ChromHMM --trackType CanvasFeatures --clientConfig '{"color": "{chromHMM}", "strandArrow": false}' --config '{"displayMode": "collapsed", "enableCollapsedMouseover": true, "category": "Miscellaneous" }';
bin/generate-names.pl --safeMode -v --out sample_data/json/volvox;
mkdir -p sample_data/raw;
if [ ! -e sample_data/raw/volvox ]; then
ln -s ../../docs/tutorial/data_files sample_data/raw/volvox;
fi;
ln -sf ../../docs/tutorial/conf_files/volvox.json sample_data/raw/;
touch sample_data/json/volvox/successfully_run;
) >>setup.log 2>&1
done_message "To see the volvox example data, browse to http://your.jbrowse.root/index.html?data=sample_data/json/volvox.";
log_echo
log_echo -n "Formatting Yeast example data ...";
( set -e;
set -x;
# format volvox
rm -rf sample_data/json/yeast/;
bin/prepare-refseqs.pl --fasta sample_data/raw/yeast_scaffolds/chr1.fa.gz --fasta sample_data/raw/yeast_scaffolds/chr2.fa.gzip --out sample_data/json/yeast/;
gunzip -c sample_data/raw/yeast_scaffolds/chr1.fa.gz sample_data/raw/yeast_scaffolds/chr2.fa.gzip > sample_data/raw/yeast_chr1+2/yeast.fa;
if [ $SUPPRESS_BIODB_TO_JSON -eq 1 ]; then
echo "Not running biodb-to-json.pl for Yeast";
else
bin/biodb-to-json.pl --conf sample_data/raw/yeast.json --out sample_data/json/yeast/;
fi
bin/add-json.pl '{ "dataset_id": "yeast" }' sample_data/json/yeast/trackList.json
bin/add-json.pl '{ "dataset_id": "yeast", "plugins": [ "NeatHTMLFeatures","NeatCanvasFeatures","HideTrackLabels" ] }' sample_data/json/yeast/trackList.json
bin/generate-names.pl --dir sample_data/json/yeast/;
) >>setup.log 2>&1;
done_message "To see the yeast example data, browse to http://your.jbrowse.root/index.html?data=sample_data/json/yeast.";
|
<filename>modules/api-system/common/models/system-user/user.roles.js
'use strict'
module.exports = function(SystemUser) {
const Role = SystemUser.app.models.SystemRole
const RoleMapping = SystemUser.app.models.SystemRoleMapping
const findUserRoleMapping = (userId, roleId) =>
RoleMapping.findOne({
where: {
roleId,
principalId: userId,
principalType: RoleMapping.USER,
},
})
const upsertUserRoleMapping = (id, userId, roleId) =>
RoleMapping.upsert({
id: id || null,
roleId: roleId,
principalId: userId,
principalType: RoleMapping.USER,
})
const removeUserRoleMapping = (userId, roleId) =>
RoleMapping.deleteAll({
roleId: roleId,
principalId: userId,
principalType: RoleMapping.USER,
})
/**
*
* Method that retrieves all the roles in the system
* @returns {boolean}
*/
const getSystemRoleNames = () =>
Role.find().then(roles => roles.map(role => role.name))
/**
* Find a role with a given name
* @param roleName the name of the role to search
* @returns {object} the role or a rejected promise if not found
*/
const findRoleByName = roleName =>
Role.findOne({ where: { name: roleName } }).then(
role =>
role
? role
: Promise.reject(
new Error(`Unable to find role with name ${roleName}`)
)
)
/**
* Verifies that user has a certain role
* @param {String} userId The ID of the user
* @param {String} roleId The ID of the Role
* @returns {object} the id of the roleMapping or false if the role is not assigned
*/
const hasRole = (userId, roleId) =>
findUserRoleMapping(userId, roleId)
.then(roleMapping => (roleMapping ? roleMapping.id : false))
.catch(err =>
Promise.reject(
`Error getting role ${roleId} assignment from user ${userId}. ${err.message}`
)
)
/**
* Add a role to a user
* @param {String} userId The ID of the user
* @param {String} roleId The ID of the Role
* @returns {boolean}
*/
const addUserRole = (userId, roleId) =>
hasRole(userId, roleId)
.then(roleMappingId =>
upsertUserRoleMapping(roleMappingId, userId, roleId)
)
.catch(err =>
Promise.reject(
`Error adding role ${roleId} to user ${userId}. ${err.message}`
)
)
/**
* Remove a role from a user
* @param {String} userId The ID of the user
* @param {String} roleId The ID of the Role
* @returns {boolean}
*/
const removeUserRole = (userId, roleId) =>
hasRole(userId, roleId)
.then(() => removeUserRoleMapping(userId, roleId))
.catch(err =>
Promise.reject(
`Error removing role ${roleId} from user ${userId}. ${err.message}`
)
)
/**
* Add a role to the current user
* @param {String} roleName The name of the Role
* @returns {Boolean} True if successful
*/
SystemUser.prototype.addRole = function addRole(roleName) {
return findRoleByName(roleName).then(role => addUserRole(this.id, role.id))
}
/**
* Remove a role from the current user
* @param {String} roleName The name of the Role
* @returns {Boolean} True if successful
*/
SystemUser.prototype.removeRole = function removeRole(roleName) {
return findRoleByName(roleName).then(role =>
removeUserRole(this.id, role.id)
)
}
/**
* Get the names of the assigned roles for the current user
* @returns {string[]} array of role names
*/
SystemUser.prototype.getUserRoleNames = function getUserRoleNames() {
return this.roles.getAsync().map(userRole => userRole.name)
}
/**
* Create a map of assigned and unassigned roles
* @param {string[]} systemRoles array of System role names
* @param {string[]} userRoles array of User assigned roles names
*/
const getRoleAssignment = (systemRoles, userRoles) => ({
assigned: systemRoles.filter(name => userRoles.includes(name)),
unassigned: systemRoles.filter(name => !userRoles.includes(name)),
})
/**
* Get the roles for this SystemUser.
* @returns {Object} A map of all roles of this user
*/
SystemUser.prototype.info = function info() {
return Promise.all([getSystemRoleNames(), this.getUserRoleNames()])
.then(([systemRoles, userRoles]) =>
getRoleAssignment(systemRoles, userRoles)
)
.then(roles => ({ user: this, roles }))
}
}
|
docker container run -d -p 3306:3306 --name db -e MYSQL_RANDOM_ROOT_PASSWORD=yes mysql
docker container logs db | grep 'MYSQL_RANDOM_ROOT_PASSWORD'
docker container run -d --name webserver -p 8080:80 httpd
docker container run -d --name proxy -p 80:80 nginx
docker container stop proxy db webserver
|
<gh_stars>1-10
import xml.etree.ElementTree
import numpy as np
from nexusutils.coordinatetransformer import CoordinateTransformer
import logging
from nexusutils.utils import normalise, find_rotation_axis_and_angle_between_vectors
import itertools
import uuid
logger = logging.getLogger("NeXus_Utils")
class NotFoundInIDFError(Exception):
pass
class UnknownPixelShapeError(Exception):
pass
class IDFParser:
"""
Parses Mantid IDF files
"""
def __init__(self, idf_file):
"""
:param idf_file: IDF file name or object
"""
self.root = xml.etree.ElementTree.parse(idf_file).getroot()
self.ns = {"d": "http://www.mantidproject.org/IDF/1.0"}
self.__get_defaults()
# Our root should be the instrument
assert self.root.tag == "{" + self.ns["d"] + "}instrument"
def get_instrument_name(self):
"""
Returns the name of the instrument
:return: Instrument name
"""
return self.root.get("name")
def get_source_name(self):
"""
Returns the name of the source or None if no source is found
:return: Source name or None if not found
"""
for xml_type in self.root.findall("d:type", self.ns):
if xml_type.get("is") == "Source":
return xml_type.get("name")
return None
def get_source_position(self):
"""
Returns the source position as an x,y,z coord list
:return: The source position as a list
"""
for xml_type in self.root.findall("d:type", self.ns):
if xml_type.get("is") == "Source":
for xml_source_component in self.root.findall("d:component", self.ns):
if xml_source_component.get("type") == xml_type.get("name"):
location_type = xml_source_component.find("d:location", self.ns)
location = self.__get_vector(location_type, top_level=True)
if location is not None:
return location
else:
return np.array([0.0, 0.0, 0.0])
raise NotFoundInIDFError("Source tag not found in IDF")
def get_sample_position(self):
"""
Find the sample position as an x,y,z coord list
:return: The sample position as a list
"""
for xml_type in self.root.findall("d:type", self.ns):
if xml_type.get("is") == "SamplePos":
for xml_sample_component in self.root.findall("d:component", self.ns):
if xml_sample_component.get("type") == xml_type.get("name"):
location_type = xml_sample_component.find("d:location", self.ns)
location = self.__get_vector(location_type, top_level=True)
if location is not None:
return location
else:
return np.array([0.0, 0.0, 0.0])
raise NotFoundInIDFError("SamplePos tag not found in IDF")
def get_rectangular_detectors(self):
"""
Get detector banks information from a Mantid IDF file for RectangularDetector panels
:returns A generator which yields details of each detector bank found in the instrument file
"""
# Look for detector bank definition
for xml_type in self.root.findall("d:type", self.ns):
if xml_type.get("is") == "rectangular_detector":
pixel_name = xml_type.get("type")
pixel_shape = self.__get_pixel_shape(self.root, pixel_name)
bank_type_name = xml_type.get("name")
x_pixel_offset_1d = self.__get_1d_pixel_offsets("x", xml_type)
y_pixel_offset_1d = self.__get_1d_pixel_offsets("y", xml_type)
x_pixel_offset, y_pixel_offset = np.meshgrid(
x_pixel_offset_1d, y_pixel_offset_1d
)
z_pixel_offset = np.zeros_like(x_pixel_offset)
offsets = np.stack(
(x_pixel_offset, y_pixel_offset, z_pixel_offset), axis=-1
)
yield from self.find_rectangular_detector_components(
bank_type_name,
offsets,
pixel_name,
pixel_shape,
x_pixel_offset,
y_pixel_offset,
self.root,
)
for xml_top_level_type in self.root.findall("d:type", self.ns):
yield from self.find_rectangular_detector_components(
bank_type_name,
offsets,
pixel_name,
pixel_shape,
x_pixel_offset,
y_pixel_offset,
xml_top_level_type,
)
def find_rectangular_detector_components(
self,
bank_type_name,
offsets,
pixel_name,
pixel_shape,
x_pixel_offset,
y_pixel_offset,
root_type,
):
for component in root_type.findall("d:component", self.ns):
if component.get("type") == bank_type_name:
location = component.find("d:location", self.ns)
detector_numbers = self.__get_rectangular_detector_ids(
component, len(x_pixel_offset), len(y_pixel_offset)
)
detector_name = component.find("d:location", self.ns).get("name")
if detector_name is None:
detector_name = bank_type_name
det_bank_info = {
"name": detector_name,
"pixel": {"name": pixel_name, "shape": pixel_shape},
"offsets": offsets,
"idlist": detector_numbers,
"sub_components": [
bank_type_name
], # allows use of links in builder
"location": self.__get_vector(location, top_level=True),
"orientation": self.__parse_facing_element(component),
}
yield det_bank_info
@staticmethod
def __get_rectangular_detector_ids(component, x_pixels, y_pixels):
idstart = component.get("idstart")
idstart = int(idstart) if idstart is not None else 1
idstep = component.get("idstep")
idstep = int(idstep) if idstep is not None else 1
idfillbyfirst = component.get("idfillbyfirst")
idfillbyfirst = idfillbyfirst if idfillbyfirst is not None else "y"
idstepbyrow = component.get("idstepbyrow")
idstepbyrow = int(idstepbyrow) if idstepbyrow is not None else 1
if idfillbyfirst == "x":
x_2d, y_2d = np.mgrid[
0 : x_pixels * idstep : idstep, 0 : y_pixels * idstepbyrow : idstepbyrow
]
else:
x_2d, y_2d = np.mgrid[
0 : x_pixels * idstepbyrow : idstepbyrow, 0 : y_pixels * idstep : idstep
]
return (x_2d + y_2d) + idstart
def __get_vector(self, xml_point, top_level=False):
"""
Get a numpy array vector from an IDF vector element
:param xml_point: The xml element defining the vector
:param top_level: If true this vector is relative to the coord system origin, not a parent component
:return: Numpy array of the vector
"""
vector = self.__get_vector_without_transforming(xml_point)
if vector is not None:
return self.transform.get_nexus_coordinates(vector, top_level)
return None
def __get_vector_without_transforming(self, xml_point):
x = xml_point.get("x")
y = xml_point.get("y")
z = xml_point.get("z")
if [x, y, z] == [None, None, None]:
# No cartesian axes, maybe there are spherical?
r = xml_point.get("r")
t = xml_point.get("t")
p = xml_point.get("p")
if [r, t, p] == [None, None, None]:
logger.debug("No x,y,z or r,t,p values found in IDFParser.__get_vector")
return None
vector = np.array(
[self.__none_to_zero(r), self.__none_to_zero(t), self.__none_to_zero(p)]
).astype(float)
vector = self.transform.spherical_to_cartesian(vector)
else:
vector = np.array(
[self.__none_to_zero(x), self.__none_to_zero(y), self.__none_to_zero(z)]
).astype(float)
return vector
def __get_pixel_names_and_shapes(self):
pixels = []
for xml_type in self.root.findall("d:type", self.ns):
if xml_type.get("is") == "detector":
name = xml_type.get("name")
pixels.append({"name": name, "shape": self.__get_shape(xml_type)})
return pixels
def __get_detector_offsets(self, xml_type, top_level=False):
"""
Gets list of locations from a detector component
:param xml_type: Component of a detector containing location or locations elements
:return: List of locations for this component
"""
detector_offsets = []
for child in xml_type:
if child.tag == "{" + self.ns["d"] + "}location":
detector_offsets.append(self.__get_vector(child, top_level=top_level))
elif child.tag == "{" + self.ns["d"] + "}locations":
n_locations = int(child.get("n-elements"))
locations = [
np.array([0.0] * n_locations).astype(float),
np.array([0.0] * n_locations).astype(float),
np.array([0.0] * n_locations).astype(float),
]
for axis_number, axis in enumerate(["x", "y", "z"]):
if child.get(axis):
if child.get(axis + "-end"):
locations[axis_number] = np.linspace(
start=float(child.get(axis)),
stop=float(child.get(axis + "-end")),
num=int(child.get("n-elements")),
)
else:
locations[axis_number] = np.array(
[float(child.get(axis))] * int(child.get("n-elements"))
).astype(float)
for n in range(n_locations):
detector_offsets.append(
np.array(
[locations[0][n], locations[1][n], locations[2][n]]
).astype(float)
)
return detector_offsets
def get_detectors(self):
"""
Get detector information from the IDF
:return: List of detector dictionaries
"""
pixels = self.__get_pixel_names_and_shapes()
components = []
for pixel in pixels:
searched_already = list()
self.__collect_detector_components(
components, pixel["name"], searched_already
)
top_level_detector_names = self.__find_top_level_detector_names(components)
self.__fix_top_level_components(components, top_level_detector_names)
detectors = self.__collate_detector_info(
pixels, components, top_level_detector_names
)
return detectors
@staticmethod
def __fix_top_level_components(components, top_level_detector_names):
"""
For some reason IDFs often have a superfluous top level component which only links the detector to an idlist
and does not contain a location element. Here we combine the top level component with its subcomponent to
create a new top level component with all the necessary metadata.
"""
delete_components = []
for component in components:
if component["name"] in top_level_detector_names:
if component["locations"][0][0] is None:
# We'll have to combine this with its subcomponent
if len(component["sub_components"]) != 1:
raise NotFoundInIDFError(
"Top level detector component has no location defined and does not have one "
"sub component to use the location of."
)
subcomponent_name = component["sub_components"][0]
subcomponent = next(
(
component
for component in components
if component["name"] == subcomponent_name
),
None,
)
top_level_detector_names.add(subcomponent_name)
top_level_detector_names.remove(component["name"])
subcomponent["idlist"] = component["idlist"]
delete_components.append(component["name"])
if subcomponent["locations"][0][0] is None:
subcomponent["locations"][0][0] = np.array([0.0, 0.0, 0.0])
components[:] = [
component
for component in components
if not component["name"] in delete_components
]
def __collate_detector_info(self, pixels, components, top_level_detector_names):
detectors = list()
# Components where we don't need to calculate offsets or we have already calculated the offsets
pixel_names = {pixel["name"] for pixel in pixels}
component_names_offsets_known = set(pixel_names)
all_component_names = {component["name"] for component in components}
all_component_names.update(component_names_offsets_known)
while component_names_offsets_known != all_component_names:
# Propagate pixel name up through components too,
# if we get a component with multiple pixel types then raise an error,
# eventually can deal with this by creating NXdetector_modules.
for component in components:
# If we know the offsets of all of this component's sub-components
# then we can calculate the offsets for it.
sub_component_names = component["sub_components"]
if set(sub_component_names).issubset(component_names_offsets_known):
# Get offset lists of the sub components
sub_component_offsets = []
for sub_comp_index, sub_component_name in enumerate(
sub_component_names
):
if sub_component_name in pixel_names:
sub_component_offsets.append([np.array([0.0, 0.0, 0.0])])
component["pixels"].append(sub_component_name)
else:
component["pixels"].extend(
self.__get_component_pixels(
components, sub_component_name
)
)
sub_component_offsets.append(
self.__get_component_offsets(
components, sub_component_name
)
)
if not self.__all_elements_equal(component["pixels"]):
raise NotImplementedError(
component["name"]
+ " has multiple pixel types, need to implement treating "
"its sub-components as NXdetector_modules"
)
if component["name"] in top_level_detector_names:
component["offsets"] = list(
itertools.chain.from_iterable(sub_component_offsets)
)
pixel_name = component["pixels"][0]
pixel = next(
(pixel for pixel in pixels if pixel["name"] == pixel_name),
None,
)
component["pixel"] = pixel
component["location"] = component["locations"][0][0]
detectors.append(component)
else:
component["offsets"] = []
for sub_comp_index, offset_list in enumerate(
sub_component_offsets
):
component["offsets"].extend(
self.__calculate_new_offsets(
offset_list, component["locations"][sub_comp_index]
)
)
component_names_offsets_known.add(component["name"])
return detectors
@staticmethod
def __all_elements_equal(input_list):
"""
Check all elements of the input list are equal
:param input_list: List, are all its elements equal?
:return: Bool result
"""
return input_list.count(input_list[0]) == len(input_list)
@staticmethod
def __get_component_offsets(components, component_name):
locations = next(
(
component["offsets"]
for component in components
if component["name"] == component_name
),
None,
)
return locations
@staticmethod
def __get_component_pixels(components, component_name):
pixel = next(
(
component["pixels"]
for component in components
if component["name"] == component_name
),
None,
)
return pixel
def __get_id_list(self, idname):
idlist = []
for xml_idlist in self.root.findall("d:idlist", self.ns):
if xml_idlist.get("idname") == idname:
for xml_id in xml_idlist.findall("d:id", self.ns):
if xml_id.get("start") is not None:
idlist += list(
range(int(xml_id.get("start")), int(xml_id.get("end")) + 1)
)
elif xml_id.get("val") is not None:
idlist.append(int(xml_id.get("val")))
else:
raise NotFoundInIDFError(
'Could not find IDs in idlist called "' + idname + '"'
)
return idlist
@staticmethod
def __find_top_level_detector_names(components):
sub_component_names = set()
component_names = set()
for component in components:
component_names.add(component["name"])
for sub_component in component["sub_components"]:
sub_component_names.add(sub_component)
# Component in component_names but not in sub_component names is a top level detector component
top_level_detector_component_names = component_names - sub_component_names
return top_level_detector_component_names
@staticmethod
def __calculate_new_offsets(old_offsets, new_offsets):
offsets = []
for new_offset in new_offsets:
# apply as a translation to each old offset
offsets.extend(old_offsets + np.expand_dims(new_offset, 1).T)
return offsets
def __collect_detector_components(self, components, search_type, searched_already):
if search_type in searched_already:
return
searched_already.append(search_type)
for xml_type in self.root.findall("d:type", self.ns):
for xml_component in xml_type.findall("d:component", self.ns):
if xml_component.get("type") == search_type:
name = xml_type.get("name")
self.__append_component(
name, xml_component, components, search_type, searched_already
)
for xml_top_component in self.root.findall("d:component", self.ns):
if xml_top_component.get("type") == search_type:
name = xml_top_component.get("name")
if name is None:
name = str(uuid.uuid4())
self.__append_component(
name,
xml_top_component,
components,
search_type,
searched_already,
top_level=True,
)
def __append_component(
self,
name,
xml_component,
components,
search_type,
searched_already,
top_level=False,
):
offsets = self.__get_detector_offsets(xml_component, top_level=top_level)
component = next(
(component for component in components if component["name"] == name), None
)
if component is not None:
component["sub_components"].append(search_type)
idlist = xml_component.get("idlist")
if idlist is not None:
component["idlist"] = idlist
component["locations"].append(offsets)
else:
idlist = xml_component.get("idlist")
if idlist is not None:
orientation = self.__parse_facing_element(xml_component)
components.append(
{
"name": name,
"sub_components": [search_type],
"locations": [offsets],
"idlist": self.__get_id_list(idlist),
"orientation": orientation,
"pixels": [],
}
)
else:
orientation = self.__parse_facing_element(xml_component)
components.append(
{
"name": name,
"sub_components": [search_type],
"locations": [offsets],
"orientation": orientation,
"pixels": [],
}
)
self.__collect_detector_components(components, name, searched_already)
def __rotation_list(self, rot, rotations):
if rot is not None:
axis = np.array(
[rot.get("axis-x"), rot.get("axis-y"), rot.get("axis-z")]
).astype(float)
if all([np.isnan(x) for x in axis]):
axis = np.array(
[0.0, 0.0, 1.0]
) # This is how mantid defines angle only definition (around z)
rotation = {"angle": float(rot.get("val")), "axis": axis}
rotations.append(rotation)
rotations = self.__rotation_list(rot.find("d:rot", self.ns), rotations)
return rotations
def __parse_facing_element(self, xml_component):
location_type = xml_component.find("d:location", self.ns)
orientation = None
if location_type is not None:
location = self.__get_vector(location_type)
facing_type = location_type.find("d:facing", self.ns)
rot = location_type.find("d:rot", self.ns)
if facing_type is not None:
facing_point = self.__get_vector(facing_type)
vector_to_face_point = facing_point - location
axis, angle = find_rotation_axis_and_angle_between_vectors(
vector_to_face_point, np.array([0, 0, -1.0])
)
orientation = {"axis": axis, "angle": np.rad2deg(angle)}
elif rot is not None:
orientation = self.__rotation_list(rot, [])
return orientation
def __get_pixel_shape(self, xml_root, type_name):
for xml_type in xml_root.findall("d:type", self.ns):
if xml_type.get("name") == type_name and (
xml_type.get("is") == "detector" or xml_type.get("is") == "Detector"
):
return self.__get_shape(xml_type)
return None
def __get_shape(self, xml_type):
cuboid = xml_type.find("d:cuboid", self.ns)
cylinder = xml_type.find("d:cylinder", self.ns)
if cuboid is not None:
return self.__parse_cuboid(cuboid)
elif cylinder is not None:
return self.__parse_cylinder(cylinder)
else:
if len(list(xml_type)) != 0:
raise UnknownPixelShapeError()
def __parse_cuboid(self, cuboid_xml):
"""
Get details NeXus needs to describe a cuboid
:param cuboid_xml: The xml element describing the cuboid
:return: A dictionary containing dimensions of the cuboid
"""
left_front_bottom = self.__get_vector(
cuboid_xml.find("d:left-front-bottom-point", self.ns)
)
left_front_top = self.__get_vector(
cuboid_xml.find("d:left-front-top-point", self.ns)
)
left_back_bottom = self.__get_vector(
cuboid_xml.find("d:left-back-bottom-point", self.ns)
)
right_front_bottom = self.__get_vector(
cuboid_xml.find("d:right-front-bottom-point", self.ns)
)
# Assume x pixel size is left to right
left_to_right = right_front_bottom - left_front_bottom
x_pixel_size = np.sqrt(np.dot(left_to_right, left_to_right))
# Assume y pixel size is front to back
front_to_back = left_back_bottom - left_front_bottom
y_pixel_size = np.sqrt(np.dot(front_to_back, front_to_back))
# Assume thickness is top to bottom
top_to_bottom = left_front_top - left_front_bottom
thickness = np.sqrt(np.dot(top_to_bottom, top_to_bottom))
return {
"shape": "cuboid",
"x_pixel_size": x_pixel_size,
"y_pixel_size": y_pixel_size,
"thickness": thickness,
}
def __parse_cylinder(self, cylinder_xml):
"""
Get details NeXus needs to describe a cylinder
:param cylinder_xml: The xml element describing the cylinder
:return: A dictionary containing dimensions of the cylinder
"""
axis, axis_mag = normalise(
self.__get_vector(cylinder_xml.find("d:axis", self.ns))
)
radius = float(cylinder_xml.find("d:radius", self.ns).get("val"))
height = float(cylinder_xml.find("d:height", self.ns).get("val"))
return {"shape": "cylinder", "height": height, "radius": radius, "axis": axis}
@staticmethod
def __get_1d_pixel_offsets(dimension_name, xml_type):
step = float(xml_type.get(dimension_name + "step"))
pixels = int(xml_type.get(dimension_name + "pixels"))
start = float(xml_type.get(dimension_name + "start"))
stop = start + (step * (pixels - 1))
return np.linspace(start, stop, pixels)
def __get_structured_detector_typenames(self):
names = []
for xml_type in self.root.findall("d:type", self.ns):
if xml_type.get("is") == "StructuredDetector":
names.append(xml_type.get("name"))
return names
def get_structured_detectors(self):
"""
Returns details for all components which are StructuredDetectors
:return:
"""
structured_detector_names = self.__get_structured_detector_typenames()
if not structured_detector_names:
return None
location = {}
rotation = {}
for xml_type in self.root.findall("d:component", self.ns):
if xml_type.get("type") in structured_detector_names:
for location_type in xml_type:
location = self.__get_vector(location_type, top_level=True)
angle = location_type.get("rot")
if angle is not None:
rotation = self.__rotation_list(angle, [])
else:
rotation = None
yield {
"id_start": int(xml_type.get("idstart")),
"X_id_step": int(xml_type.get("idstepbyrow")),
"Y_id_step": int(xml_type.get("idstep")),
"name": xml_type.get("name"),
"type_name": xml_type.get("type"),
"location": location,
"orientation": rotation,
}
def get_monitors(self):
"""
Get monitor information from the IDF
:return: List of monitor dictionaries, list of monitor type names
"""
all_monitor_type_names, monitor_types = self.__get_monitor_types()
# Now look for components with one of these types, they'll be grouped in another element
# Add them to a list, NB order matters for id assignment
monitors = []
for xml_type in self.root.findall("d:type", self.ns):
type_contains_monitors = False
for xml_component in xml_type.findall("d:component", self.ns):
type_name = xml_component.get("type")
if type_name in all_monitor_type_names:
type_contains_monitors = True
for xml_location in xml_component.findall("d:location", self.ns):
monitors.append(
{
"name": xml_location.get("name"),
"location": self.__get_vector(
xml_location, top_level=True
),
"type_name": type_name,
"id": None,
}
)
if type_contains_monitors:
id_list = self.__get_monitor_idlist(xml_type.get("name"))
self.__assign_ids(monitors, id_list)
return monitors, monitor_types
@staticmethod
def __assign_ids(components, id_list):
"""
Assign an id from id_list to each id-less component dictionary in components list in order
:param components: List of dictionaries, dictionary should have id key, assign an id to it if None
:param id_list: List of ids to assign
"""
next_id = 0
for component in components:
if component["id"] is None:
component["id"] = id_list[next_id]
next_id += 1
def __get_monitor_idlist(self, type_name):
for xml_component in self.root.findall("d:component", self.ns):
if xml_component.get("type") == type_name:
location_xml = xml_component.find("d:location", self.ns)
if location_xml:
if len(location_xml.attrib) > 0:
raise NotImplementedError(
"dealing with location in __get_monitor_idlist is not implemented yet"
)
idlist_name = xml_component.get("idlist")
idlist = self.__get_id_list(idlist_name)
return idlist
def __get_monitor_types(self):
monitor_types = []
for xml_type in self.root.findall("d:type", self.ns):
if xml_type.get("is") == "monitor":
name = xml_type.get("name")
monitor_types.append(
{"name": name, "shape": self.__get_shape(xml_type)}
)
all_monitor_type_names = [monitor["name"] for monitor in monitor_types]
return all_monitor_type_names, monitor_types
def __get_defaults(self):
angle_units = self.__get_default_units()
self.__get_default_coord_systems(angle_units)
def __get_default_coord_systems(self, angle_units):
xml_defaults = self.root.find("d:defaults", self.ns)
nexus_x = "x"
nexus_y = "y"
nexus_z = "z"
if xml_defaults:
# Default "location" element is undocumented in
# http://docs.mantidproject.org/nightly/concepts/InstrumentDefinitionFile.html
# but it seems to define the zero axis for the spherical coordinate system
xml_coord_map = xml_defaults.find("d:location", self.ns)
if xml_coord_map:
if not [
float(xml_coord_map.get("r")),
float(xml_coord_map.get("t")),
float(xml_coord_map.get("p")),
float(xml_coord_map.get("ang")),
float(xml_coord_map.get("x")),
float(xml_coord_map.get("y")),
float(xml_coord_map.get("z")),
] == [0, 0, 0, 0, 0, 0, 1]:
raise NotImplementedError(
"Dealing with spherical coordinate systems where the zero"
"axis is not along the z axis is not yet implemented"
)
xml_ref_frame = xml_defaults.find("d:reference-frame", self.ns)
xml_along_beam = xml_ref_frame.find("d:along-beam", self.ns)
xml_up = xml_ref_frame.find("d:pointing-up", self.ns)
if xml_along_beam is None or xml_up is None:
raise NotFoundInIDFError(
'Expected "along-beam" and "pointing-up" to be specified '
"in the default reference frame in the IDF"
)
nexus_y = xml_up.get("axis")
nexus_z = xml_along_beam.get("axis")
handedness = "right"
xml_handedness = xml_ref_frame.find("d:handedness", self.ns)
if xml_handedness:
handedness = xml_handedness.get("val")
def is_negative(direction):
return direction[0] == "-"
def flip_axis(nexus_a):
return "-" + nexus_a if not is_negative(nexus_a) else nexus_a[1:]
unsigned_yz_list = [
nexus_y[1:] if is_negative(nexus_y) else nexus_y,
nexus_z[1:] if is_negative(nexus_z) else nexus_z,
]
# Assuming right-handedness
if unsigned_yz_list == ["y", "z"]:
nexus_x = "x"
elif unsigned_yz_list == ["z", "y"]:
nexus_x = "-x"
elif unsigned_yz_list == ["x", "y"]:
nexus_x = "-z"
elif unsigned_yz_list == ["y", "x"]:
nexus_x = "z"
elif unsigned_yz_list == ["x", "z"]:
nexus_x = "y"
elif unsigned_yz_list == ["z", "x"]:
nexus_x = "-y"
else:
raise RuntimeError(
"Unexpected yz list in IDFParser.__get_default_coord_systems"
)
if is_negative(nexus_y) ^ is_negative(nexus_z):
nexus_x = flip_axis(nexus_x)
if handedness == "left":
nexus_x = flip_axis(nexus_x)
self.transform = CoordinateTransformer(
angles_in_degrees=(angle_units == "deg"),
nexus_coords=[nexus_x, nexus_y, nexus_z],
origin=self.__get_idf_sample_position(),
)
def __get_idf_sample_position(self):
"""
The sample position for the NeXus file is the origin,
this method gets the sample position in the coordinate system of the IDF
:return: Numpy array of the sample position
"""
sample_position = None
for xml_type in self.root.findall("d:type", self.ns):
if xml_type.get("is") == "SamplePos":
for xml_sample_component in self.root.findall("d:component", self.ns):
if xml_sample_component.get("type") == xml_type.get("name"):
location_type = xml_sample_component.find("d:location", self.ns)
if location_type is not None:
sample_position = self.__get_vector_without_transforming(
location_type
)
if sample_position is None:
sample_position = np.array([0.0, 0.0, 0.0])
return sample_position
def __get_default_units(self):
self.length_units = "m"
self.angle_units = "deg"
xml_defaults = self.root.find("d:defaults", self.ns)
if xml_defaults:
xml_default_length = xml_defaults.find("d:length", self.ns)
idf_length_units = xml_default_length.get("unit")
# Prefer SI unit abbreviation if we can
if idf_length_units.lower() in ["meter", "metre", "meters", "metres", "m"]:
self.length_units = "m"
else:
self.length_units = idf_length_units
xml_default_angle = xml_defaults.find("d:angle", self.ns)
idf_angle_units = xml_default_angle.get("unit")
if idf_angle_units.lower() in ["deg", "degree", "degrees"]:
self.angle_units = "deg"
elif idf_angle_units.lower() in ["rad", "radian", "radians"]:
self.angle_units = "rad"
else:
raise ValueError(
f"Unexpected default unit for angles in IDF file: {idf_angle_units}"
)
return self.angle_units
def get_length_units(self):
return self.length_units
def get_angle_units(self):
return self.angle_units
def get_structured_detector_vertices(self, type_name):
"""
Looks for type definition for a StructuredDetector with the specified name and returns an array of vertices
:param type_name: The name of a StructuredDetector type definition
:return: Numpy array of vertex coordinates
"""
for xml_type in self.root.findall("d:type", self.ns):
if xml_type.get("name") == type_name:
x_pixels = int(xml_type.get("xpixels"))
y_pixels = int(xml_type.get("ypixels"))
vertices = np.zeros((x_pixels + 1, y_pixels + 1, 3))
vertex_number_x = 0
vertex_number_y = 0
for vertex in xml_type:
vertices[vertex_number_x, vertex_number_y, :] = self.__get_vector(
vertex
)
vertex_number_x += 1
if vertex_number_x > x_pixels:
# We've filled a row, move to the next one
vertex_number_x = 0
vertex_number_y += 1
return vertices
return None
@staticmethod
def __none_to_zero(x):
return 0 if x is None else x
|
#!/bin/bash
# This function takes no arguments
# It tries to determine the name of this file in a programatic way.
function _get_sourced_filename() {
if [ -n "${BASH_SOURCE[0]}" ]; then
basename "${BASH_SOURCE[0]}"
elif [ -n "${(%):-%x}" ]; then
# in zsh use prompt-style expansion to introspect the same information
# see http://stackoverflow.com/questions/9901210/bash-source0-equivalent-in-zsh
basename "${(%):-%x}"
else
echo "UNKNOWN FILE"
fi
}
# The arguments to this are:
# 1. activation nature {activate|deactivate}
# 2. toolchain nature {build|host|ccc}
# 3. machine (should match -dumpmachine)
# 4. prefix (including any final -)
# 5+ program (or environment var comma value)
# The format for 5+ is name{,,value}. If value is specified
# then name taken to be an environment variable, otherwise
# it is taken to be a program. In this case, which is used
# to find the full filename during activation. The original
# value is stored in environment variable CONDA_BACKUP_NAME
# For deactivation, the distinction is irrelevant as in all
# cases NAME simply gets reset to CONDA_BACKUP_NAME. It is
# a fatal error if a program is identified but not present.
function _tc_activation() {
local act_nature=$1; shift
local tc_nature=$1; shift
local tc_machine=$1; shift
local tc_prefix=$1; shift
local thing
local newval
local from
local to
local pass
if [ "${act_nature}" = "activate" ]; then
from=""
to="CONDA_BACKUP_"
else
from="CONDA_BACKUP_"
to=""
fi
for pass in check apply; do
for thing in $tc_nature,$tc_machine "$@"; do
case "${thing}" in
*,*)
newval=$(echo "${thing}" | sed "s,^[^\,]*\,\(.*\),\1,")
thing=$(echo "${thing}" | sed "s,^\([^\,]*\)\,.*,\1,")
;;
*)
newval="${CONDA_PREFIX}/bin/${tc_prefix}${thing}"
if [ ! -x "${newval}" -a "${pass}" = "check" ]; then
echo "ERROR: This cross-compiler package contains no program ${newval}"
return 1
fi
;;
esac
if [ "${pass}" = "apply" ]; then
thing=$(echo ${thing} | tr 'a-z+-' 'A-ZX_')
eval oldval="\$${from}$thing"
if [ -n "${oldval}" ]; then
eval export "${to}'${thing}'=\"${oldval}\""
else
eval unset '${to}${thing}'
fi
if [ -n "${newval}" ]; then
eval export "'${from}${thing}=${newval}'"
else
eval unset '${from}${thing}'
fi
fi
done
done
return 0
}
# When people are using conda-build, assume that adding rpath during build, and pointing at
# the host env's includes and libs is helpful default behavior
if [ "${CONDA_BUILD:-0}" = "1" ]; then
CXXFLAGS_USED="@CXXFLAGS@ -isystem ${PREFIX}/include -fdebug-prefix-map=${SRC_DIR}=/usr/local/src/conda/${PKG_NAME}-${PKG_VERSION} -fdebug-prefix-map=${PREFIX}=/usr/local/src/conda-prefix"
DEBUG_CXXFLAGS_USED="@DEBUG_CXXFLAGS@ -isystem ${PREFIX}/include -fdebug-prefix-map=${SRC_DIR}=/usr/local/src/conda/${PKG_NAME}-${PKG_VERSION} -fdebug-prefix-map=${PREFIX}=/usr/local/src/conda-prefix"
else
CXXFLAGS_USED="@CXXFLAGS@ -isystem ${CONDA_PREFIX}/include"
DEBUG_CXXFLAGS_USED="@DEBUG_CXXFLAGS@ -isystem ${CONDA_PREFIX}/include"
fi
if [ "${CONDA_BUILD:-0}" = "1" ]; then
if [ -f /tmp/old-env-$$.txt ]; then
rm -f /tmp/old-env-$$.txt || true
fi
env > /tmp/old-env-$$.txt
fi
_tc_activation \
deactivate host @CHOST@ @CHOST@- \
c++ g++ \
"CXXFLAGS,${CXXFLAGS:-${CXXFLAGS_USED}}" \
"DEBUG_CXXFLAGS,${DEBUG_CXXFLAGS:-${DEBUG_CXXFLAGS_USED}}"
if [ $? -ne 0 ]; then
echo "ERROR: $(_get_sourced_filename) failed, see above for details"
#exit 1
else
if [ "${CONDA_BUILD:-0}" = "1" ]; then
if [ -f /tmp/new-env-$$.txt ]; then
rm -f /tmp/new-env-$$.txt || true
fi
env > /tmp/new-env-$$.txt
echo "INFO: $(_get_sourced_filename) made the following environmental changes:"
diff -U 0 -rN /tmp/old-env-$$.txt /tmp/new-env-$$.txt | tail -n +4 | grep "^-.*\|^+.*" | grep -v "CONDA_BACKUP_" | sort
rm -f /tmp/old-env-$$.txt /tmp/new-env-$$.txt || true
fi
fi
|
#!/bin/bash
res=6
while [ $res -eq 6 -o $res -eq 7 ]
do
sleep 1
curl -s $DATA_DATABASE_HOST:3306
res=$?
done
python Product/Database/DBConn.py
python Product/RecommendationManager/run_recommendation.py
|
#!/bin/zsh
MY_PATH="`dirname \"$0\"`"
source=$1
csv=$2
output_dir=$source.annotated
ann_type='latest'
n=4
mkdir -p $output_dir/plys
mkdir -p $output_dir/logs
parallel --colsep=',' -j $n --eta "node --max-old-space-size=6000 $MY_PATH/../export-annotated-ply.js --id {1} --source $source --ann_type $ann_type --output_dir $output_dir/plys >& $output_dir/logs/{1}.export.log" :::: $csv
|
#include <iostream>
using namespace std;
int main()
{
// Defining array
int arr[] = {12, 18, 4, 9, 14, 28};
int n = sizeof(arr)/sizeof(arr[0]);
// Count variable
int count = 0;
for (int i = 0; i < n; i++) {
// Check for even numbers
if (arr[i] % 2 == 0)
count++;
}
cout << "Number of even numbers in the given array: " << count << endl;
return 0;
}
|
require 'test_helper'
# Validações para quando não há transporte
module ValidationsWhenNotHasCarriage
extend ActiveSupport::Concern
included do
before { subject.stubs(:have_carriage?).returns(false) }
it { wont validate_presence_of(:veiculo) }
it { wont_validate_have_one :veiculo, BrNfe.veiculo_product_class, :invalid_veiculo }
it { wont validate_presence_of(:identificacao_balsa) }
it { wont validate_presence_of(:identificacao_vagao) }
end
end
describe BrNfe::Product::Nfe::Transporte::Base do
subject { FactoryGirl.build(:product_transporte_base) }
let(:veiculo) { FactoryGirl.build(:product_transporte_veiculo) }
let(:volume) { FactoryGirl.build(:product_transporte_volume) }
describe "Alias attributes" do
it { must_have_alias_attribute :modFrete, :modalidade_frete }
it { must_have_alias_attribute :vServ, :retencao_valor_sevico }
it { must_have_alias_attribute :vBCRet, :retencao_base_calculo_icms }
it { must_have_alias_attribute :pICMSRet, :retencao_aliquota }
it { must_have_alias_attribute :vICMSRet, :retencao_valor_icms }
it { must_have_alias_attribute :CFOP, :retencao_cfop }
it { must_have_alias_attribute :cMunFG, :retencao_codigo_municipio }
it { must_have_alias_attribute :veicTransp, :veiculo, BrNfe.veiculo_product_class.new }
it { must_have_alias_attribute :balsa, :identificacao_balsa }
it { must_have_alias_attribute :vagao, :identificacao_vagao }
it { must_have_alias_attribute :vol, :volumes, [BrNfe.volume_transporte_product_class.new] }
it { must_have_alias_attribute :transporta, :transportador, BrNfe.transportador_product_class.new }
end
describe "#default_values" do
it '#modalidade_frete deve ter o padrão 9' do
subject.class.new.modalidade_frete.must_equal 9
end
it '#forma_transporte deve ter o padrão :veiculo' do
subject.class.new.forma_transporte.must_equal :veiculo
end
end
describe '#Validations' do
it { must validate_inclusion_of(:modalidade_frete).in_array([0, '0', 1, '1', 2, '2', 9, '9']) }
it { must validate_inclusion_of(:forma_transporte).in_array([:veiculo, :balsa, :vagao]) }
it { must validate_presence_of(:forma_transporte) }
describe 'Reteção de ICMS' do
context "quando retencao_icms? for true" do
before { subject.stubs(:retencao_icms?).returns(true) }
it { must validate_presence_of(:retencao_codigo_municipio) }
it { must validate_presence_of(:retencao_cfop) }
it { must validate_numericality_of(:retencao_base_calculo_icms).allow_nil }
it { must validate_numericality_of(:retencao_aliquota).is_less_than(100).allow_nil }
it { must validate_numericality_of(:retencao_valor_icms).allow_nil }
end
context "quando retencao_icms? for false" do
before { subject.stubs(:retencao_icms?).returns(false) }
it { wont validate_presence_of(:retencao_codigo_municipio) }
it { wont validate_presence_of(:retencao_cfop) }
it { wont validate_numericality_of(:retencao_base_calculo_icms) }
it { wont validate_numericality_of(:retencao_aliquota) }
it { wont validate_numericality_of(:retencao_valor_icms) }
end
end
end
describe '#retencao_icms? method' do
it "deve retornar true se o valor setado em retencao_valor_sevico for maior que zero" do
subject.retencao_valor_sevico = 0.1
subject.retencao_icms?.must_equal true
subject.retencao_valor_sevico = 20
subject.retencao_icms?.must_equal true
end
it "deve retornar false se o valor setado em retencao_valor_sevico for nil, zero ou menor" do
subject.retencao_valor_sevico = nil
subject.retencao_icms?.must_equal false
subject.retencao_valor_sevico = 0.0
subject.retencao_icms?.must_equal false
subject.retencao_valor_sevico = -1
subject.retencao_icms?.must_equal false
end
end
describe 'Quando a forma_transporte for :veiculo' do
before { subject.forma_transporte = :veiculo }
it { must_have_one(:veiculo,
BrNfe.veiculo_product_class,
{placa: 'LOG', rntc: 'NR', uf: "SP"}
)}
context "e houver frete" do
before { subject.stubs(:have_carriage?).returns(true) }
it { must validate_presence_of(:veiculo) }
it { must_validate_have_one :veiculo, BrNfe.veiculo_product_class, :invalid_veiculo }
it { wont validate_presence_of(:identificacao_balsa) }
it { wont validate_presence_of(:identificacao_vagao) }
end
context "e não houver frete" do
include ValidationsWhenNotHasCarriage
end
end
describe 'Quando a forma_transporte for :balsa' do
before { subject.forma_transporte = :balsa }
context "e houver frete" do
before { subject.stubs(:have_carriage?).returns(true) }
it { must validate_presence_of(:identificacao_balsa) }
it { wont validate_presence_of(:veiculo) }
it { wont validate_presence_of(:identificacao_vagao) }
it { wont_validate_have_one :veiculo, BrNfe.veiculo_product_class, :invalid_veiculo }
end
context "e não houver frete" do
include ValidationsWhenNotHasCarriage
end
end
describe 'Quando a forma_transporte for :vagao' do
before { subject.forma_transporte = :vagao }
context "e houver frete" do
before { subject.stubs(:have_carriage?).returns(true) }
it { must validate_presence_of(:identificacao_vagao) }
it { wont validate_presence_of(:identificacao_balsa) }
it { wont validate_presence_of(:veiculo) }
it { wont_validate_have_one :veiculo, BrNfe.veiculo_product_class, :invalid_veiculo }
end
context "e não houver frete" do
include ValidationsWhenNotHasCarriage
end
end
describe '#reboques' do
it { must_validate_length_has_many(:reboques, BrNfe.veiculo_product_class, {maximum: 5}) }
it { must_validates_has_many(:reboques, BrNfe.veiculo_product_class, :invalid_reboque) }
it { must_have_many(:reboques, BrNfe.veiculo_product_class, {placa: 'XXL9999', rntc: '223'}) }
end
describe '#volumes' do
it { must_validates_has_many(:volumes, BrNfe.volume_transporte_product_class, :invalid_volume) }
it { must_have_many(:volumes, BrNfe.volume_transporte_product_class, {marca: 'QUIPO', quantidade: 223}) }
end
describe '#CÁLCULOS AUTOMÁTICOS' do
describe '#retencao_valor_icms' do
it "deve calcular o valor a partir dos atributos 'retencao_base_calculo_icms' e 'retencao_aliquota' se estiver nil " do
subject.retencao_valor_icms = nil
subject.assign_attributes(retencao_base_calculo_icms: 150.0, retencao_aliquota: 5.5)
subject.retencao_valor_icms.must_equal 8.25
subject.assign_attributes(retencao_base_calculo_icms: 1_000.0, retencao_aliquota: 10)
subject.retencao_valor_icms.must_equal 100.0
subject.assign_attributes(retencao_base_calculo_icms: nil, retencao_aliquota: nil)
subject.retencao_valor_icms.must_equal 0.0
end
it "deve manter o valor setado manualmente mesmo que o calculo entre os atributos 'retencao_base_calculo_icms' e 'retencao_aliquota' sejam diferentes" do
subject.retencao_valor_icms = 57.88
subject.assign_attributes(retencao_base_calculo_icms: 150.0, retencao_aliquota: 5.5)
subject.retencao_valor_icms.must_equal 57.88
subject.assign_attributes(retencao_base_calculo_icms: 1_000.0, retencao_aliquota: 10)
subject.retencao_valor_icms.must_equal 57.88
end
end
end
describe '#transportador' do
it { must_have_one(:transportador,
BrNfe.transportador_product_class,
{nome_fantasia: 'LOG', razao_social: 'NR', endereco_uf: "SP"}
)}
it { must_validate_have_one(:transportador, BrNfe.transportador_product_class, :invalid_transportador) }
end
end
|
#!/usr/bin/env bash
SCRIPT=`realpath $0`
SCRIPTPATH=`dirname $SCRIPT`
export PYTHONPATH=$SCRIPTPATH
# OPTIONS="--continue-on-collection-errors --doctest-modules"
pytest -vs apps/tests/
pytest -vs snippets/tests/
pytest -vs tests/
|
TERMUX_PKG_HOMEPAGE=https://nodejs.org/
TERMUX_PKG_DESCRIPTION="Open Source, cross-platform JavaScript runtime environment"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER="Yaksh Bariya <yakshbari4@gmail.com>"
TERMUX_PKG_VERSION=16.14.2
TERMUX_PKG_SRCURL=https://nodejs.org/dist/v${TERMUX_PKG_VERSION}/node-v${TERMUX_PKG_VERSION}.tar.xz
TERMUX_PKG_SHA256=e922e215cc68eb5f94d33e8a0b61e2c863b7731cc8600ab955d3822da90ff8d1
# Note that we do not use a shared libuv to avoid an issue with the Android
# linker, which does not use symbols of linked shared libraries when resolving
# symbols on dlopen(). See https://github.com/termux/termux-packages/issues/462.
#
# Node.js 16.x does not support `NODE_OPTIONS=--openssl-legacy-provider` option.
# See https://github.com/termux/termux-packages/issues/9266. Please revert back
# to depending on openssl (instead of openssl-1.1) when migrating to next LTS.
TERMUX_PKG_DEPENDS="libc++, openssl-1.1, c-ares, libicu, zlib"
TERMUX_PKG_CONFLICTS="nodejs, nodejs-current"
TERMUX_PKG_BREAKS="nodejs-dev"
TERMUX_PKG_REPLACES="nodejs-current, nodejs-dev"
TERMUX_PKG_SUGGESTS="clang, make, pkg-config, python"
TERMUX_PKG_PROVIDES="nodejs"
TERMUX_PKG_RM_AFTER_INSTALL="lib/node_modules/npm/html lib/node_modules/npm/make.bat share/systemtap lib/dtrace"
TERMUX_PKG_BUILD_IN_SRC=true
TERMUX_PKG_HOSTBUILD=true
termux_step_post_get_source() {
# Prevent caching of host build:
rm -Rf $TERMUX_PKG_HOSTBUILD_DIR
}
termux_step_host_build() {
local ICU_VERSION=70.1
local ICU_TAR=icu4c-${ICU_VERSION//./_}-src.tgz
local ICU_DOWNLOAD=https://github.com/unicode-org/icu/releases/download/release-${ICU_VERSION//./-}/$ICU_TAR
termux_download \
$ICU_DOWNLOAD\
$TERMUX_PKG_CACHEDIR/$ICU_TAR \
8d205428c17bf13bb535300669ed28b338a157b1c01ae66d31d0d3e2d47c3fd5
tar xf $TERMUX_PKG_CACHEDIR/$ICU_TAR
cd icu/source
if [ "$TERMUX_ARCH_BITS" = 32 ]; then
./configure --prefix $TERMUX_PKG_HOSTBUILD_DIR/icu-installed \
--disable-samples \
--disable-tests \
--build=i686-pc-linux-gnu "CFLAGS=-m32" "CXXFLAGS=-m32" "LDFLAGS=-m32"
else
./configure --prefix $TERMUX_PKG_HOSTBUILD_DIR/icu-installed \
--disable-samples \
--disable-tests
fi
make -j $TERMUX_MAKE_PROCESSES install
}
termux_step_configure() {
local DEST_CPU
if [ $TERMUX_ARCH = "arm" ]; then
DEST_CPU="arm"
elif [ $TERMUX_ARCH = "i686" ]; then
DEST_CPU="ia32"
elif [ $TERMUX_ARCH = "aarch64" ]; then
DEST_CPU="arm64"
elif [ $TERMUX_ARCH = "x86_64" ]; then
DEST_CPU="x64"
else
termux_error_exit "Unsupported arch '$TERMUX_ARCH'"
fi
export GYP_DEFINES="host_os=linux"
export CC_host=gcc
export CXX_host=g++
export LINK_host=g++
LDFLAGS+=" -ldl"
local _SHARED_OPENSSL_INCLUDES=$TERMUX_PREFIX/include
local _SHARED_OPENSSL_LIBPATH=$TERMUX_PREFIX/lib
if [ "${TERMUX_PKG_VERSION%%.*}" != "16" ]; then
termux_error_exit 'Please migrate to using openssl (instead of openssl-1.1).'
else
_SHARED_OPENSSL_INCLUDES=$TERMUX_PREFIX/include/openssl-1.1
_SHARED_OPENSSL_LIBPATH=$TERMUX_PREFIX/lib/openssl-1.1
LDFLAGS="-Wl,-rpath=$_SHARED_OPENSSL_LIBPATH $LDFLAGS"
fi
# See note above TERMUX_PKG_DEPENDS why we do not use a shared libuv.
./configure \
--prefix=$TERMUX_PREFIX \
--dest-cpu=$DEST_CPU \
--dest-os=android \
--shared-cares \
--shared-openssl \
--shared-openssl-includes=$_SHARED_OPENSSL_INCLUDES \
--shared-openssl-libpath=$_SHARED_OPENSSL_LIBPATH \
--shared-zlib \
--with-intl=system-icu \
--cross-compiling
export LD_LIBRARY_PATH=$TERMUX_PKG_HOSTBUILD_DIR/icu-installed/lib
perl -p -i -e "s@LIBS := \\$\\(LIBS\\)@LIBS := -L$TERMUX_PKG_HOSTBUILD_DIR/icu-installed/lib -lpthread -licui18n -licuuc -licudata -ldl -lz@" \
$TERMUX_PKG_SRCDIR/out/tools/v8_gypfiles/mksnapshot.host.mk \
$TERMUX_PKG_SRCDIR/out/tools/v8_gypfiles/torque.host.mk \
$TERMUX_PKG_SRCDIR/out/tools/v8_gypfiles/bytecode_builtins_list_generator.host.mk \
$TERMUX_PKG_SRCDIR/out/tools/v8_gypfiles/v8_libbase.host.mk \
$TERMUX_PKG_SRCDIR/out/tools/v8_gypfiles/gen-regexp-special-case.host.mk
}
termux_step_create_debscripts() {
cat <<- EOF > ./postinst
#!$TERMUX_PREFIX/bin/sh
npm config set foreground-scripts true
EOF
}
|
package net.community.chest.lang.math;
import java.util.Comparator;
/**
* Copyright 2007 as per GPLv2
*
* @param <V> Type of compared value
* @author <NAME>.
* @since Jun 10, 2007 2:59:29 PM
*/
public interface NumbersComparator<V extends Number & Comparable<V>> extends Comparator<V> {
/**
* @return {@link Class} of {@link Number}-s being compared
*/
Class<V> getNumbersClass ();
}
|
<gh_stars>1-10
import { __assign } from "tslib";
import { each, isArray, deepMix } from '@antv/util';
import BBox from '../../../util/bbox';
var LABEL_MARGIN = 4;
var MatrixLegend = /** @class */ (function () {
function MatrixLegend(cfg) {
this.destroyed = false;
this.dataSlides = {};
this.interactiveEvents = {};
var defaultOptions = this.getDefaultOptions();
this.options = deepMix({}, defaultOptions, cfg);
this.view = this.options.view;
this.afterRender = true;
this.init();
}
MatrixLegend.prototype.init = function () {
var _this = this;
this.layout = this.getLayout();
this.width = this.options.width ? this.options.width : this.getDefaultWidth();
this.height = this.options.height ? this.options.height : this.getDefaultHeight();
var plotContainer = this.options.plot.container;
if (this.container) {
this.container.remove();
}
this.container = plotContainer.addGroup();
this.view.on('beforerender', function () {
_this.clear();
_this.options.plot.canvas.draw();
});
};
MatrixLegend.prototype.render = function () {
var scales = this.view.geometries[0].scales;
var colorField = this.options.plot.options.colorField;
this.colorScale = scales[colorField];
var _a = this.colorScale, min = _a.min, max = _a.max;
var color = this.options.plot.options.color;
if (this.layout === 'horizontal') {
this.renderHorizontal(min, max, color);
}
else {
this.renderVertical(min, max, color);
}
this.legendLayout();
this.addInteraction();
};
MatrixLegend.prototype.hide = function () {
this.container.set('visible', false);
this.options.plot.canvas.draw();
};
MatrixLegend.prototype.show = function () {
this.container.set('visible', true);
this.options.plot.canvas.draw();
};
MatrixLegend.prototype.clear = function () {
if (this.container) {
this.container.clear();
}
};
MatrixLegend.prototype.destroy = function () {
if (this.container) {
this.container.remove();
}
this.offEvent();
this.destroyed = true;
};
MatrixLegend.prototype.getBBox = function () {
var origin_bbox = this.container.getBBox();
return new BBox(this.x, this.y, origin_bbox.width, origin_bbox.height);
};
MatrixLegend.prototype.renderVertical = function (min, max, colors) {
var _this = this;
var valueStep = (max - min) / (colors.length - 1);
var colorStep = 1 / (colors.length - 1);
var tickStep = this.height / (colors.length - 1);
var gradientColor = 'l(90)';
each(colors, function (c, index) {
var stepNum = colorStep * index;
gradientColor += stepNum + ":" + c + " ";
});
this.container.addShape('rect', {
attrs: {
x: 0,
y: 0,
width: this.width,
height: this.height,
fill: gradientColor,
},
name: 'legend',
});
// draw tick and label
each(colors, function (c, index) {
// tick
var step = tickStep * index;
_this.container.addShape('path', {
attrs: __assign({ path: [
['M', 0, step],
['L', _this.width, step],
] }, _this.options.ticklineStyle),
});
// value
var value = Math.round(valueStep * index);
_this.container.addShape('text', {
attrs: __assign({ text: value, textAlign: 'left', textBaseline: 'middle', x: _this.width + LABEL_MARGIN, y: step }, _this.options.text.style),
name: 'legend-label',
});
});
//anchor
var tri_width = 10;
var tri_height = 14;
var tri_path = [['M', -tri_width, -tri_height / 2], ['L', 0, 0], ['L', -tri_width, tri_height / 2], ['Z']];
this.anchor = this.container.addShape('path', {
attrs: __assign({ path: tri_path }, this.options.anchorStyle),
});
this.anchor.set('visible', false);
};
MatrixLegend.prototype.renderHorizontal = function (min, max, colors) {
var _this = this;
var valueStep = (max - min) / (colors.length - 1);
var colorStep = 1 / (colors.length - 1);
var tickStep = this.width / (colors.length - 1);
var gradientColor = 'l(0)';
each(colors, function (c, index) {
var stepNum = colorStep * index;
gradientColor += stepNum + ":" + c + " ";
});
this.container.addShape('rect', {
attrs: {
x: 0,
y: 0,
width: this.width,
height: this.height,
fill: gradientColor,
},
name: 'legend',
});
// draw tick and label
each(colors, function (c, index) {
// tick
var step = tickStep * index;
_this.container.addShape('path', {
attrs: __assign({ path: [
['M', step, 0],
['L', step, _this.height],
] }, _this.options.ticklineStyle),
name: 'legend-label',
});
// value
var value = Math.round(valueStep * index);
_this.container.addShape('text', {
attrs: __assign({ text: value, textAlign: 'center', textBaseline: 'top', x: step, y: _this.height + LABEL_MARGIN }, _this.options.text.style),
});
});
//anchor
var tri_width = 14;
var tri_height = 10;
var tri_path = [['M', 0, 0], ['L', -tri_width / 2, -tri_height], ['L', tri_width / 2, -tri_height], ['Z']];
this.anchor = this.container.addShape('path', {
attrs: __assign({ path: tri_path }, this.options.anchorStyle),
});
this.anchor.set('visible', false);
};
MatrixLegend.prototype.getLayout = function () {
var positions = this.options.position.split('-');
this.position = positions[0];
if (positions[0] === 'left' || positions[0] === 'right') {
return 'vertical';
}
return 'horizontal';
};
MatrixLegend.prototype.getDefaultWidth = function () {
if (this.layout === 'horizontal') {
var width = this.view.coordinateBBox.width;
return width;
}
return 10;
};
MatrixLegend.prototype.getDefaultHeight = function () {
if (this.layout === 'vertical') {
var height = this.view.coordinateBBox.height;
return height;
}
return 10;
};
MatrixLegend.prototype.legendLayout = function () {
var _this = this;
var panelRange = this.view.coordinateBBox;
var bleeding = this.options.plot.getPlotTheme().bleeding;
if (isArray(bleeding)) {
each(bleeding, function (it, index) {
if (typeof bleeding[index] === 'function') {
bleeding[index] = bleeding[index](_this.options.plot.options);
}
});
}
var bbox = this.container.getBBox();
var x = 0;
var y = 0;
var positions = this.options.position.split('-');
var plotWidth = this.options.plot.width;
var plotHeight = this.options.plot.height;
// 先确定x
if (positions[0] === 'left') {
x = bleeding[3];
}
else if (positions[0] === 'right') {
x = plotWidth - bleeding[1] - bbox.width;
}
else if (positions[1] === 'center') {
// default
if (this.width === panelRange.width) {
x = panelRange.x;
}
else {
x = (plotWidth - bbox.width) / 2;
}
}
else if (positions[1] === 'left') {
x = bleeding[3];
}
else if (positions[1] === 'right') {
x = this.options.plot.width - bleeding[1] - bbox.width;
}
// 再确定y
if (positions[0] === 'bottom') {
y = plotHeight - bleeding[2] - bbox.height;
}
else if (positions[0] === 'top') {
y = this.getTopPosition(bleeding);
}
else if (positions[1] === 'center') {
// default
if (this.height === panelRange.height) {
y = panelRange.y;
}
else {
//用户自行设定
y = (plotHeight - bbox.height) / 2;
}
}
else if (positions[1] === 'top') {
y = bleeding[0];
}
else if (positions[1] === 'bottom') {
y = plotHeight - bleeding[2] - bbox.height;
}
this.x = x;
this.y = y;
this.container.translate(x, y);
};
MatrixLegend.prototype.getDefaultOptions = function () {
return {
text: {
style: {
fontSize: 12,
fill: 'rgba(0, 0, 0, 0.45)',
},
},
ticklineStyle: {
lineWidth: 1,
stroke: 'rgba(0, 0, 0, 0.8)',
},
anchorStyle: {
fill: 'rgba(0,0,0,0.5)',
},
triggerOn: 'mousemove',
};
};
MatrixLegend.prototype.addInteraction = function () {
var _this = this;
var geomType;
if (this.options.plot.options.shapeType === 'rect') {
geomType = 'polygon';
}
else {
geomType = 'point';
}
var eventName = geomType + ":" + this.options.triggerOn;
//const labelEventName = `label:${this.options.triggerOn}`;
var field = this.options.plot.options.colorField;
var _a = this.colorScale, min = _a.min, max = _a.max;
var geomEventHandler = function (ev) {
var value = ev.data.data[field];
var ratio = (value - min) / (max - min);
_this.moveAnchor(ratio);
};
this.view.on(eventName, geomEventHandler);
this.interactiveEvents[eventName] = {
target: this.view,
handler: geomEventHandler,
};
/*this.view.on(labelEventName, (ev) => {
const value = ev.data[field];
const ratio = (value - min) / (max - min);
this.moveAnchor(ratio);
});*/
var mouseleaveHandler = function () {
_this.anchor.set('visible', false);
};
this.options.plot.canvas.on('mouseleave', mouseleaveHandler);
this.interactiveEvents.mouseleave = {
target: this.options.plot.canvas,
handler: mouseleaveHandler,
};
};
MatrixLegend.prototype.moveAnchor = function (ratio) {
this.anchor.set('visible', true);
if (this.layout === 'vertical') {
var pos = this.height * ratio;
var ulMatrix = [1, 0, 0, 0, 1, 0, 0, 0, 1];
ulMatrix[7] = pos;
this.anchor.stopAnimate();
this.anchor.animate({
matrix: ulMatrix,
}, 400, 'easeLinear');
}
else {
var pos = this.width * ratio;
var ulMatrix = [1, 0, 0, 0, 1, 0, 0, 0, 1];
ulMatrix[6] = pos;
this.anchor.stopAnimate();
this.anchor.animate({
matrix: ulMatrix,
}, 400, 'easeLinear');
}
};
MatrixLegend.prototype.getTopPosition = function (bleeding) {
if (this.options.plot.description) {
var bbox = this.options.plot.description.getBBox();
return bbox.maxY + 10;
}
else if (this.options.plot.title) {
var bbox = this.options.plot.title.getBBox();
return bbox.maxY + 10;
}
return bleeding[0];
};
MatrixLegend.prototype.offEvent = function () {
each(this.interactiveEvents, function (event, key) {
var target = event.target, handler = event.handler;
target.off(key, handler);
});
};
return MatrixLegend;
}());
export default MatrixLegend;
//# sourceMappingURL=legend.js.map
|
<gh_stars>10-100
// Package dynamodbquery queries objects from Amazon DynamoDB
package dynamodbquery
import (
"bytes"
"encoding/json"
"fmt"
"reflect"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/dynamodb"
"github.com/TIBCOSoftware/flogo-lib/core/activity"
"github.com/TIBCOSoftware/flogo-lib/logger"
)
// Constants used by the code to represent the input and outputs of the JSON structure
const (
ivAwsAccessKeyID = "awsAccessKeyID"
ivAwsSecretAccessKey = "awsSecretAccessKey"
ivAwsRegion = "awsRegion"
ivDynamoDBTableName = "dynamoDBTableName"
ivDynamoDBKeyConditionExpression = "dynamoDBKeyConditionExpression"
ivDynamoDBExpressionAttributes = "dynamoDBExpressionAttributes"
ivDynamoDBFilterExpression = "dynamoDBFilterExpression"
ivDynamoDBIndexName = "dynamoDBIndexName"
ovResult = "result"
ovScannedCount = "scannedCount"
ovConsumedCapacity = "consumedCapacity"
)
// log is the default package logger
var log = logger.GetLogger("activity-dynamodbquery")
// MyActivity is a stub for your Activity implementation
type MyActivity struct {
metadata *activity.Metadata
}
// NewActivity creates a new activity
func NewActivity(metadata *activity.Metadata) activity.Activity {
return &MyActivity{metadata: metadata}
}
// Metadata implements activity.Activity.Metadata
func (a *MyActivity) Metadata() *activity.Metadata {
return a.metadata
}
// ExpressionAttribute is a structure representing the JSON payload for the expression syntax
type ExpressionAttribute struct {
Name string
Value string
}
// Eval implements activity.Activity.Eval
func (a *MyActivity) Eval(context activity.Context) (done bool, err error) {
// Get the inputs
awsRegion := context.GetInput(ivAwsRegion).(string)
dynamoDBTableName := context.GetInput(ivDynamoDBTableName).(string)
dynamoDBKeyConditionExpression := context.GetInput(ivDynamoDBKeyConditionExpression).(string)
dynamoDBExpressionAttributes := context.GetInput(ivDynamoDBExpressionAttributes)
dynamoDBFilterExpression := context.GetInput(ivDynamoDBFilterExpression).(string)
dynamoDBIndexName := context.GetInput(ivDynamoDBIndexName).(string)
// AWS Credentials, only if needed
var awsAccessKeyID, awsSecretAccessKey = "", ""
if context.GetInput(ivAwsAccessKeyID) != nil {
awsAccessKeyID = context.GetInput(ivAwsAccessKeyID).(string)
}
if context.GetInput(ivAwsSecretAccessKey) != nil {
awsSecretAccessKey = context.GetInput(ivAwsSecretAccessKey).(string)
}
// Create a session with Credentials only if they are set
var awsSession *session.Session
if awsAccessKeyID != "" && awsSecretAccessKey != "" {
// Create new credentials using the accessKey and secretKey
awsCredentials := credentials.NewStaticCredentials(awsAccessKeyID, awsSecretAccessKey, "")
// Create a new session with AWS credentials
awsSession = session.Must(session.NewSession(&aws.Config{
Credentials: awsCredentials,
Region: aws.String(awsRegion),
}))
} else {
// Create a new session without AWS credentials
awsSession = session.Must(session.NewSession(&aws.Config{
Region: aws.String(awsRegion),
}))
}
// Create a new login to the DynamoDB service
dynamoService := dynamodb.New(awsSession)
// Construct the expression attributes
var expressionAttributes []ExpressionAttribute
v := reflect.ValueOf(dynamoDBExpressionAttributes)
switch v.Kind() {
case reflect.String:
json.Unmarshal([]byte(dynamoDBExpressionAttributes.(string)), &expressionAttributes)
case reflect.Slice:
fmt.Printf("slice")
case reflect.Map:
fmt.Printf("map")
default:
log.Errorf("Unknown type [%s]", reflect.TypeOf(dynamoDBExpressionAttributes).String())
return true, fmt.Errorf("Unknown type [%s]", reflect.TypeOf(dynamoDBExpressionAttributes).String())
}
// Construct the expression attributes
if reflect.TypeOf(dynamoDBExpressionAttributes).Kind() == reflect.Map {
expressionAttributes = buildExpressionAttributesArray(dynamoDBExpressionAttributes.(map[string]interface{}))
} else if reflect.TypeOf(dynamoDBExpressionAttributes).Kind() == reflect.Slice {
tempArray := dynamoDBExpressionAttributes.([]interface{})
for _, element := range tempArray {
expressionAttributes = append(expressionAttributes, buildExpressionAttributesArray(element.(map[string]interface{}))...)
}
}
log.Infof("%v", expressionAttributes)
expressionAttributeMap := make(map[string]*dynamodb.AttributeValue)
for _, attribute := range expressionAttributes {
expressionAttributeMap[attribute.Name] = &dynamodb.AttributeValue{S: aws.String(attribute.Value)}
}
// Construct the DynamoDB query
var queryInput = &dynamodb.QueryInput{}
if dynamoDBFilterExpression == "" {
if dynamoDBIndexName == "" {
queryInput = &dynamodb.QueryInput{
TableName: aws.String(dynamoDBTableName),
KeyConditionExpression: aws.String(dynamoDBKeyConditionExpression),
ExpressionAttributeValues: expressionAttributeMap,
ReturnConsumedCapacity: aws.String("TOTAL"),
}
} else {
queryInput = &dynamodb.QueryInput{
TableName: aws.String(dynamoDBTableName),
IndexName: aws.String(dynamoDBIndexName),
KeyConditionExpression: aws.String(dynamoDBKeyConditionExpression),
ExpressionAttributeValues: expressionAttributeMap,
ReturnConsumedCapacity: aws.String("TOTAL"),
}
}
} else {
if dynamoDBIndexName == "" {
queryInput = &dynamodb.QueryInput{
TableName: aws.String(dynamoDBTableName),
KeyConditionExpression: aws.String(dynamoDBKeyConditionExpression),
ExpressionAttributeValues: expressionAttributeMap,
FilterExpression: aws.String(dynamoDBFilterExpression),
ReturnConsumedCapacity: aws.String("TOTAL"),
}
} else {
queryInput = &dynamodb.QueryInput{
TableName: aws.String(dynamoDBTableName),
IndexName: aws.String(dynamoDBIndexName),
KeyConditionExpression: aws.String(dynamoDBKeyConditionExpression),
ExpressionAttributeValues: expressionAttributeMap,
FilterExpression: aws.String(dynamoDBFilterExpression),
ReturnConsumedCapacity: aws.String("TOTAL"),
}
}
}
// Prepare and execute the DynamoDB query
var queryOutput, err1 = dynamoService.Query(queryInput)
if err1 != nil {
log.Errorf("Error while executing query [%s]", err1)
} else {
result := make([]map[string]interface{}, len(queryOutput.Items))
// Loop over the result items and build a new map structure from it
for index, element := range queryOutput.Items {
dat := make(map[string]interface{})
for key, value := range element {
if value.N != nil {
actual := *value.N
dat[key] = actual
}
if value.S != nil {
actual := *value.S
dat[key] = actual
}
}
result[index] = dat
}
// Set the output value in the context
sc := *queryOutput.ScannedCount
context.SetOutput(ovScannedCount, sc)
cc := *queryOutput.ConsumedCapacity.CapacityUnits
context.SetOutput(ovConsumedCapacity, cc)
// Create a JSON representation from the result
jsonString, _ := json.Marshal(result)
var resultinterface interface{}
d := json.NewDecoder(bytes.NewReader(jsonString))
d.UseNumber()
err = d.Decode(&resultinterface)
f := map[string]interface{}{"results": resultinterface}
context.SetOutput(ovResult, f)
}
// Complete the activity
return true, nil
}
func buildExpressionAttributesArray(attribs map[string]interface{}) []ExpressionAttribute {
var expressionAttributes []ExpressionAttribute
attribValues := make([]string, 0, len(attribs))
for _, v := range attribs {
log.Infof("----[%s]", v.(string))
attribValues = append(attribValues, v.(string))
}
for i := 0; i < len(attribValues); {
expressionAttributes = append(expressionAttributes, ExpressionAttribute{Name: attribValues[i], Value: attribValues[i+1]})
i += 2
}
return expressionAttributes
}
|
#!/bin/bash
# Copyright 2020 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# NOTE: The integration scripts deliberately do not check to
# make sure that the test protos have been regenerated.
# It is intentional that older versions of the .pb.go files
# are checked in to ensure that they continue to function.
#
# Versions used:
# protoc: v3.9.1
# protoc-gen-go: v1.3.2
for X in $(find . -name "*.proto" | sed "s|^\./||"); do
protoc -I$(pwd) --go_out=paths=source_relative:. $X
done
|
<filename>components/form/index.ts
export { default } from './form';
export { FormMessageType, Rule, FormValue, Field } from './form';
|
<filename>dev/app/Services/ads.service.ts<gh_stars>0
"use strict";
import { Injectable } from "@angular/core";
import { Http, Headers, Response, RequestOptions } from "@angular/http";
import { Observable } from "rxjs/Observable";
export class Ad {
constructor(
public id: string = "",
public title: string = "",
public category: string = "",
public desc: string = "",
public photoMain: string = "",
public photos: any[] = [],
public city: string = "",
public price: number = 0,
public owner: string = "",
public approved: boolean = false,
public dateCreated: Date = new Date,
public dateValid: Date = new Date) { }
} // class Ad
@Injectable()
export class AdsService {
private adsUrl: string =
window.location.protocol + "//" +
window.location.hostname +
((window.location.port === "80")
? ("")
: (":" + window.location.port)) + "/api/ads";
public count: number;
private token: string;
constructor(private http: Http) {
this.count = 0;
this.token = JSON.parse(sessionStorage.getItem("token"));
}
public getAds(startIndex: number = 0, count: number = 0): Observable<any> {
let headers = new Headers({ "Authorization": "Bearer " + this.token });
let options = new RequestOptions({ headers: headers });
let urlToUse: string;
if (count) {
urlToUse = `${this.adsUrl}/${startIndex}/${count}`;
} else {
urlToUse = this.adsUrl;
console.log(`Using ${this.adsUrl}`);
}
return this.http.get(urlToUse, options)
.map(
res => {
let data: any = this.extractData(res);
this.count = data.count ? <number>data.count : 0;
return !count ? data : data.ads;
})
.catch(this.handleError);
} // getAds()
public getMyAds(id: string): Observable<Ad[]> {
let headers = new Headers({ "Authorization": "Bearer " + this.token, "Content-Type": "application/json" });
let options = new RequestOptions({ headers: headers });
let body = "{\"user\":\"" + id + "\"}";
return this.http.post(this.adsUrl + "/list", body, options)
.map(this.extractData)
.catch(this.handleError);
} // getMyAds()
public getAd(id: string): Observable<Ad> {
if (!id) {
console.log(`getAd was called with a bad id argument: ${id.toString()}`);
return Observable.create(new Ad());
}
let headers = new Headers({ "Authorization": "Bearer " + this.token });
let options = new RequestOptions({ headers: headers });
return this.http.get(this.adsUrl + "/" + id, options)
.map((response: Response) => {
let adData = this.extractData(response);
if (adData._id) {
adData.id = adData._id;
return adData;
} else {
return Observable.create(new Ad());
}
})
.catch(this.handleError);
} // getAd()
public postAd(ad: Ad): Observable<Ad> {
if (!ad) {
console.log(`postUser was called with a bad ad id argument: ${JSON.stringify(ad)}`);
return Observable.create(new Ad());
}
let headers = new Headers({ "Authorization": "Bearer " + this.token, "Content-Type": "application/json" });
let options: RequestOptions = new RequestOptions({ headers: headers });
let body: string = JSON.stringify(ad);
console.log(`SERVICE URL: ${this.adsUrl}`);
console.log(`SERVICE POST: ${body}`);
return this.http.post(this.adsUrl, body, options) // returns Observable<Response>
.map(this.extractData) // success
.catch(this.handleError); // error
} // postAd()
public putAd(ad: Ad): Observable<Ad> {
if (!ad || !ad.id) {
console.log(`putUser was called with a bad ad id argument: ${JSON.stringify(ad)}`);
return Observable.create(new Ad());
}
let headers = new Headers({ "Authorization": "Bearer " + this.token, "Content-Type": "application/json" });
let options = new RequestOptions({ headers: headers });
let body: string = JSON.stringify(ad);
return this.http.put(this.adsUrl + "/" + ad.id, body, options)
.map((response: Response) => {
let adData = this.extractData(response);
if (adData._id) {
adData.id = adData._id;
console.log(`Service returned: ${JSON.stringify(adData)}`);
return adData;
} else {
return Observable.create(new Ad());
}
})
.catch(this.handleError);
} // putAd()
public deleteAd(id: string): Observable<boolean> {
if (!id) {
console.log(`deleteAd was called with a bad id argument: ${id.toString()}`);
return Observable.create(false);
}
let headers = new Headers({ "Authorization": "Bearer " + this.token });
let options = new RequestOptions({ headers: headers });
return this.http.delete(this.adsUrl + "/" + id, options)
.map((response: Response) => {
return this.extractData(response)._id ? true : false;
})
.catch((error) => {
this.handleError(error).map((errMsg) => console.error("deleteUser Error: " + errMsg));
return Observable.create(false);
});
} // deleteAd()
private extractData(res: Response) {
let body = res.json();
return body.data || {};
} // extractData()
private handleError (error: any): Observable<string> {
let errMsg = (error.message)
? error.message
: (error.status)
? `${error.status} - ${error.statusText}`
: "Server Error";
console.error(errMsg);
return Observable.throw(errMsg);
} // handleError()
} // class AdsService
|
<gh_stars>1-10
import { Key } from '../../Any/Key';
import { _Pick as _OPick } from '../Pick';
import { List } from '../../List/List';
import { Tail } from '../../List/Tail';
import { BuiltIn } from '../../Misc/BuiltIn';
import { _ListOf } from '../ListOf';
/**
* @hidden
*/
declare type PickAt<O, Path extends List<Key>> = [
] extends Path ? O : O extends BuiltIn ? O : O extends List ? _ListOf<{
[K in keyof _OPick<O, Path[0]>]: PickAt<O[K], Tail<Path>>;
}> : O extends object ? {
[K in keyof _OPick<O, Path[0]>]: PickAt<O[K], Tail<Path>>;
} : O;
/**
* Extract out of `O` the fields at `Path`
* @param O to extract from
* @param Path to be followed
* @returns [[Object]]
* @example
* ```ts
* ```
*/
export declare type Pick<O extends object, Path extends List<Key>> = Path extends unknown ? PickAt<O, Path> : never;
export {};
|
// Copyright 2020 <NAME>
// Distributed under the Boost Software License, Version 1.0.
// https://www.boost.org/LICENSE_1_0.txt
#include <boost/describe.hpp>
#include <boost/mp11.hpp>
#include <boost/json.hpp>
#include <type_traits>
namespace app
{
template<class T> void extract( boost::json::object const & obj, char const * name, T & value )
{
value = boost::json::value_to<T>( obj.at( name ) );
}
template<class T,
class D1 = boost::describe::describe_members<T,
boost::describe::mod_public | boost::describe::mod_protected>,
class D2 = boost::describe::describe_members<T, boost::describe::mod_private>,
class En = std::enable_if_t<boost::mp11::mp_empty<D2>::value> >
T tag_invoke( boost::json::value_to_tag<T> const&, boost::json::value const& v )
{
auto const& obj = v.as_object();
T t{};
boost::mp11::mp_for_each<D1>([&](auto D){
extract( obj, D.name, t.*D.pointer );
});
return t;
}
struct A
{
int x;
int y;
};
BOOST_DESCRIBE_STRUCT(A, (), (x, y))
} // namespace app
#include <iostream>
int main()
{
boost::json::value jv{ { "x", 1 }, { "y", 2 } };
std::cout << "jv: " << jv << std::endl;
auto a = boost::json::value_to<app::A>( jv );
std::cout << "a: { " << a.x << ", " << a.y << " }" << std::endl;
}
|
const moment = require('moment-timezone');
let date = moment().tz("America/Los_Angeles").format('h:mm:ss a');
let date2 = moment().tz("Asia/Tokyo").format('h:mm:ss a');
let date3 = moment().tz("Europe/London").format('h:mm:ss a');
console.log('Current time in Los Angeles: ', date);
console.log('Current time in Tokyo: ', date2);
console.log('Current time in London: ', date3);
|
<filename>src/common/math.js
/**
* 数学计算
*/
/**
* 获取两点距离
* @method getDistance
* @param {{x,y}} A点
* @param {{x,y}} B点
* @return {Float} 距离
*/
function getDistance(A, B) {
return Math.sqrt(Math.pow(A.x - B.x, 2) + Math.pow(A.y - B.y, 2));
}
/**
* 二维向量
*/
class Vec2 {
constructor(x = 0, y = 0) {
this.x = x;
this.y = y;
this.info = {}; // 直接把生成信息带到向量里, 就是这么diao
}
// 取模
getMod() {
return Math.sqrt(Math.pow(this.x, 2) + Math.pow(this.y, 2));
}
// 获取反向
getOpp() {
return new Vec2(-this.x, -this.y);
}
// 获取垂线 (顺时针)
getPerp() {
if (this.isZero()) {
console.warn('零向量取法线, something might be wrong');
}
return new Vec2(-this.y, this.x);
}
// 乘积
product(n) {
return new Vec2(this.x * n, this.y * n);
}
mult(n) {
return new Vec2(this.x * n, this.y * n);
}
// 点积
dot(b) {
return this.x * b.x + this.y * b.y;
}
// 垂直点乘
perpDot(b) {
let res = this.getPerp().dot(b);
// 取正
// if (res < 0) {
// res = -res;
// }
return res
}
// 向量加
add(b) {
return new Vec2(this.x + b.x, this.y + b.y);
}
// 向量减
sub(b) {
return new Vec2(this.x - b.x, this.y - b.y);
}
// 判断相等
equal(b) {
return this.x === b.x && this.y && b.y;
}
// 修改
set(x, y) {
this.x = x;
this.y = y;
}
// 化为单位向量
unit() {
let mod = this.getMod();
// console.log('mod', mod);
if (mod === 0) {
throw new Error('无法化为单位向量: ', JSON.stringify(this));
}
return this.mult(1/mod);
}
// 判断是否零向量
isZero() {
return this.x === 0 && this.y === 0;
}
// 旋转
// - 旋转方向为从x正方向转向y正方向, 如果y朝下, 则为顺时针
rotate(deg, o) {
let len = this.sub(o).getMod();
// console.log(len, this.x, this.y);
let α = Math.atan2(this.y - o.y, this.x - o.x);
let β = α + deg;
// console.log(α, len * Math.cos(β), len * Math.sin(β));
this.x = o.x + len * Math.cos(β);
this.y = o.y + len * Math.sin(β);
}
}
// 向量三重积
// @NOTE A x B x A 能方便的计算A在B方向的法线, 但是可能会等于0, 需要做单独判断
// 如果只是需要一个法线, 而不需要方向的话, A.getPerp() 不会返回0向量
function vecTripleProduct (a, b, c) {
// a * b * c = -a(c . b) + b(c . a)
// console.log('三重积', a, b, c, b.product(c.dot(a)).sub(a.product(c.dot(b))));
// console.log('#001', c.dot(a), b.product(c.dot(a)), c.dot(b), a.product(c.dot(b)));
return b.mult(c.dot(a)).sub(a.mult(c.dot(b)));
}
// 根据方向获取闵可夫斯基差的支撑点
function support(shapeA, shapeB, dir) {
let pA = shapeA.getFarthest(dir);
let pB = shapeB.getFarthest(dir.getOpp());
// console.log('support: ', dir, pA, pB, pA.sub(pB));
let support = pA.sub(pB);
support.info.pair = {
A: pA,
B: pB,
}
// console.log(support);
return support;
}
// 点线距
function getDisPointLine(p, a, b) {
// 线向量
let ab = b.sub(a);
// 点到端点的向量
let pa = a.sub(p);
// 点到线的垂线
let n = vecTripleProduct(ab, pa, ab);
if (n.isZero()) {return 0};
n = n.unit();
// 距离
let d = n.dot(a);
return d;
}
export {
getDistance,
Vec2,
vecTripleProduct,
support,
getDisPointLine,
};
// export default {
// getDistance,
// Vec2,
// vecTripleProduct,
// }
|
/***************************************************************************
* Copyright (c) <NAME>, <NAME>, <NAME> and *
* <NAME> *
* Copyright (c) QuantStack *
* Copyright (c) <NAME> *
* *
* Distributed under the terms of the BSD 3-Clause License. *
* *
* The full license is in the file LICENSE, distributed with this software. *
****************************************************************************/
#ifndef XSIMD_GENERIC_MEMORY_HPP
#define XSIMD_GENERIC_MEMORY_HPP
#include <algorithm>
#include <complex>
#include <stdexcept>
#include "../../types/xsimd_batch_constant.hpp"
#include "./xsimd_generic_details.hpp"
namespace xsimd
{
template <class batch_type, typename batch_type::value_type... Values>
struct batch_constant;
namespace kernel
{
using namespace types;
// extract_pair
template <class A, class T>
inline batch<T, A> extract_pair(batch<T, A> const& self, batch<T, A> const& other, std::size_t i, requires_arch<generic>) noexcept
{
constexpr std::size_t size = batch<T, A>::size;
assert(0 <= i && i < size && "index in bounds");
alignas(A::alignment()) T self_buffer[size];
self.store_aligned(self_buffer);
alignas(A::alignment()) T other_buffer[size];
other.store_aligned(other_buffer);
alignas(A::alignment()) T concat_buffer[size];
for (std::size_t j = 0; j < (size - i); ++j)
{
concat_buffer[j] = other_buffer[i + j];
if (j < i)
{
concat_buffer[size - 1 - j] = self_buffer[i - 1 - j];
}
}
return batch<T, A>::load_aligned(concat_buffer);
}
// gather
namespace detail
{
template <class T, class U, class B>
using sizes_match_t = typename std::enable_if<sizeof(T) == sizeof(U), B>::type;
template <class T, class U, class B>
using sizes_mismatch_t = typename std::enable_if<sizeof(T) != sizeof(U), B>::type;
template <typename T, typename A, typename U, typename V, size_t I>
inline batch<T, A> gather(U const* src, batch<V, A> const& index,
::xsimd::detail::index_sequence<I>) noexcept
{
return insert(batch<T, A> {}, static_cast<T>(src[index.get(I)]),
::xsimd::index<I>());
}
template <typename T, typename A, typename U, typename V, size_t I0, size_t I1, size_t... Is>
inline batch<T, A>
gather(U const* src, batch<V, A> const& index,
::xsimd::detail::index_sequence<I0, I1, Is...>) noexcept
{
const auto test = gather<T, A>(
src, index, ::xsimd::detail::index_sequence<I1, Is...>());
return insert(test, static_cast<T>(src[index.get(I0)]),
::xsimd::index<I0>());
}
} // namespace detail
template <typename A, typename T, typename U, typename V>
inline typename detail::sizes_mismatch_t<T, U, batch<T, A>>
gather(U const* src, batch<V, A> const& index,
kernel::requires_arch<generic>) noexcept
{
static_assert(batch<T, A>::size == batch<V, A>::size,
"Index and destination sizes must match");
return detail::gather<T, A>(
src, index,
::xsimd::detail::make_index_sequence<xsimd::batch<T, A>::size>());
}
template <typename A, typename T, typename U, typename V>
inline typename detail::sizes_match_t<T, U, batch<T, A>>
gather(U const* src, batch<V, A> const& index,
kernel::requires_arch<generic>) noexcept
{
static_assert(batch<T, A>::size == batch<V, A>::size,
"Index and destination sizes must match");
const auto dst = detail::gather<U, A>(
src, index,
::xsimd::detail::make_index_sequence<batch<U, A>::size>());
return batch_cast<T>(dst);
}
// insert
template <class A, class T, size_t I>
inline batch<T, A> insert(batch<T, A> const& self, T val, index<I>, requires_arch<generic>) noexcept
{
struct index_mask
{
static constexpr bool get(size_t index, size_t /* size*/)
{
return index != I;
}
};
batch<T, A> tmp(val);
return select(make_batch_bool_constant<batch<T, A>, index_mask>(), self, tmp);
}
// load_aligned
namespace detail
{
template <class A, class T_in, class T_out>
inline batch<T_out, A> load_aligned(T_in const* mem, convert<T_out>, requires_arch<generic>, with_fast_conversion) noexcept
{
using batch_type_in = batch<T_in, A>;
using batch_type_out = batch<T_out, A>;
return fast_cast(batch_type_in::load_aligned(mem), batch_type_out(), A {});
}
template <class A, class T_in, class T_out>
inline batch<T_out, A> load_aligned(T_in const* mem, convert<T_out>, requires_arch<generic>, with_slow_conversion) noexcept
{
static_assert(!std::is_same<T_in, T_out>::value, "there should be a direct load for this type combination");
using batch_type_out = batch<T_out, A>;
alignas(A::alignment()) T_out buffer[batch_type_out::size];
std::copy(mem, mem + batch_type_out::size, std::begin(buffer));
return batch_type_out::load_aligned(buffer);
}
}
template <class A, class T_in, class T_out>
inline batch<T_out, A> load_aligned(T_in const* mem, convert<T_out> cvt, requires_arch<generic>) noexcept
{
return detail::load_aligned<A>(mem, cvt, A {}, detail::conversion_type<A, T_in, T_out> {});
}
// load_unaligned
namespace detail
{
template <class A, class T_in, class T_out>
inline batch<T_out, A> load_unaligned(T_in const* mem, convert<T_out>, requires_arch<generic>, with_fast_conversion) noexcept
{
using batch_type_in = batch<T_in, A>;
using batch_type_out = batch<T_out, A>;
return fast_cast(batch_type_in::load_unaligned(mem), batch_type_out(), A {});
}
template <class A, class T_in, class T_out>
inline batch<T_out, A> load_unaligned(T_in const* mem, convert<T_out> cvt, requires_arch<generic>, with_slow_conversion) noexcept
{
static_assert(!std::is_same<T_in, T_out>::value, "there should be a direct load for this type combination");
return load_aligned<A>(mem, cvt, generic {}, with_slow_conversion {});
}
}
template <class A, class T_in, class T_out>
inline batch<T_out, A> load_unaligned(T_in const* mem, convert<T_out> cvt, requires_arch<generic>) noexcept
{
return detail::load_unaligned<A>(mem, cvt, generic {}, detail::conversion_type<A, T_in, T_out> {});
}
// scatter
namespace detail
{
template <typename T, typename A, typename U, typename V, size_t I>
inline void scatter(batch<T, A> const& src, U* dst,
batch<V, A> const& index,
::xsimd::detail::index_sequence<I>) noexcept
{
dst[index.get(I)] = static_cast<U>(src.get(I));
}
template <typename T, typename A, typename U, typename V, size_t I0, size_t I1, size_t... Is>
inline void
scatter(batch<T, A> const& src, U* dst, batch<V, A> const& index,
::xsimd::detail::index_sequence<I0, I1, Is...>) noexcept
{
dst[index.get(I0)] = static_cast<U>(src.get(I0));
kernel::detail::scatter<T, A, U, V>(
src, dst, index, ::xsimd::detail::index_sequence<I1, Is...>());
}
} // namespace detail
template <typename A, typename T, typename U, typename V>
inline typename detail::sizes_mismatch_t<T, U, void>
scatter(batch<T, A> const& src, U* dst,
batch<V, A> const& index,
kernel::requires_arch<generic>) noexcept
{
static_assert(batch<T, A>::size == batch<V, A>::size,
"Source and index sizes must match");
kernel::detail::scatter<T, A, U, V>(
src, dst, index,
::xsimd::detail::make_index_sequence<batch<T, A>::size>());
}
template <typename A, typename T, typename U, typename V>
inline typename detail::sizes_match_t<T, U, void>
scatter(batch<T, A> const& src, U* dst,
batch<V, A> const& index,
kernel::requires_arch<generic>) noexcept
{
static_assert(batch<T, A>::size == batch<V, A>::size,
"Source and index sizes must match");
const auto tmp = batch_cast<U>(src);
kernel::detail::scatter<U, A, U, V>(
tmp, dst, index,
::xsimd::detail::make_index_sequence<batch<T, A>::size>());
}
// store
template <class T, class A>
inline void store(batch_bool<T, A> const& self, bool* mem, requires_arch<generic>) noexcept
{
using batch_type = batch<T, A>;
constexpr auto size = batch_bool<T, A>::size;
alignas(A::alignment()) T buffer[size];
kernel::store_aligned<A>(&buffer[0], batch_type(self), A {});
for (std::size_t i = 0; i < size; ++i)
mem[i] = bool(buffer[i]);
}
// store_aligned
template <class A, class T_in, class T_out>
inline void store_aligned(T_out* mem, batch<T_in, A> const& self, requires_arch<generic>) noexcept
{
static_assert(!std::is_same<T_in, T_out>::value, "there should be a direct store for this type combination");
alignas(A::alignment()) T_in buffer[batch<T_in, A>::size];
store_aligned(&buffer[0], self);
std::copy(std::begin(buffer), std::end(buffer), mem);
}
// store_unaligned
template <class A, class T_in, class T_out>
inline void store_unaligned(T_out* mem, batch<T_in, A> const& self, requires_arch<generic>) noexcept
{
static_assert(!std::is_same<T_in, T_out>::value, "there should be a direct store for this type combination");
return store_aligned<A>(mem, self, generic {});
}
// swizzle
template <class A, class T, class ITy, ITy... Vs>
inline batch<std::complex<T>, A> swizzle(batch<std::complex<T>, A> const& self, batch_constant<batch<ITy, A>, Vs...> mask, requires_arch<generic>) noexcept
{
return { swizzle(self.real(), mask), swizzle(self.imag(), mask) };
}
namespace detail
{
template <class A, class T>
inline batch<std::complex<T>, A> load_complex(batch<T, A> const& /*hi*/, batch<T, A> const& /*lo*/, requires_arch<generic>) noexcept
{
static_assert(std::is_same<T, void>::value, "load_complex not implemented for the required architecture");
}
template <class A, class T>
inline batch<T, A> complex_high(batch<std::complex<T>, A> const& /*src*/, requires_arch<generic>) noexcept
{
static_assert(std::is_same<T, void>::value, "complex_high not implemented for the required architecture");
}
template <class A, class T>
inline batch<T, A> complex_low(batch<std::complex<T>, A> const& /*src*/, requires_arch<generic>) noexcept
{
static_assert(std::is_same<T, void>::value, "complex_low not implemented for the required architecture");
}
}
// load_complex_aligned
template <class A, class T_out, class T_in>
inline batch<std::complex<T_out>, A> load_complex_aligned(std::complex<T_in> const* mem, convert<std::complex<T_out>>, requires_arch<generic>) noexcept
{
using real_batch = batch<T_out, A>;
T_in const* buffer = reinterpret_cast<T_in const*>(mem);
real_batch hi = real_batch::load_aligned(buffer),
lo = real_batch::load_aligned(buffer + real_batch::size);
return detail::load_complex(hi, lo, A {});
}
// load_complex_unaligned
template <class A, class T_out, class T_in>
inline batch<std::complex<T_out>, A> load_complex_unaligned(std::complex<T_in> const* mem, convert<std::complex<T_out>>, requires_arch<generic>) noexcept
{
using real_batch = batch<T_out, A>;
T_in const* buffer = reinterpret_cast<T_in const*>(mem);
real_batch hi = real_batch::load_unaligned(buffer),
lo = real_batch::load_unaligned(buffer + real_batch::size);
return detail::load_complex(hi, lo, A {});
}
// store_complex_aligned
template <class A, class T_out, class T_in>
inline void store_complex_aligned(std::complex<T_out>* dst, batch<std::complex<T_in>, A> const& src, requires_arch<generic>) noexcept
{
using real_batch = batch<T_in, A>;
real_batch hi = detail::complex_high(src, A {});
real_batch lo = detail::complex_low(src, A {});
T_out* buffer = reinterpret_cast<T_out*>(dst);
lo.store_aligned(buffer);
hi.store_aligned(buffer + real_batch::size);
}
// store_compelx_unaligned
template <class A, class T_out, class T_in>
inline void store_complex_unaligned(std::complex<T_out>* dst, batch<std::complex<T_in>, A> const& src, requires_arch<generic>) noexcept
{
using real_batch = batch<T_in, A>;
real_batch hi = detail::complex_high(src, A {});
real_batch lo = detail::complex_low(src, A {});
T_out* buffer = reinterpret_cast<T_out*>(dst);
lo.store_unaligned(buffer);
hi.store_unaligned(buffer + real_batch::size);
}
}
}
#endif
|
//*********************************************************************************
//
// Copyright(c) 2016 Carnegie Mellon University. All Rights Reserved.
// Copyright(c) 2016-2017 <NAME> All Rights Reserved
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//*********************************************************************************
package edu.cmu.xprize.listener;
import android.content.Context;
import android.os.AsyncTask;
import android.util.Log;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Field;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import cmu.xprize.comp_logging.CErrorManager;
import cmu.xprize.util.IReadyListener;
import edu.cmu.pocketsphinx.LogMath;
import edu.cmu.pocketsphinx.Segment;
/**
* This is the base Listener type
*/
public class ListenerBase {
private IReadyListener tutorRoot;
/**
* our modified SpeechRecognizer object wrapping the pocketsphinx decoder
*/
protected SpeechRecognizer recognizer;
static protected ListenerAssets assets; // created in init phase -
protected String captureLabel = ""; // label for capture, logging files
protected boolean IS_LOGGING = false;
protected File configFile; // config file to use, null => default
protected File modelsDir; // saved model directory
protected LogMath logMath; // needed for creating Fsgs
private String acousticModel = LCONST.KIDS; // LCONST.KIDS | LCONST.ADULT
protected String userID; // User ID
// to work around pocketsphinx timing bug: when recognizing continuously across silent pauses,
// after a pause hyp words from a speech segments before the pause have their reported frame times
// changed. We use this to save the original pre-pause results
protected HeardWord[] prePauseResult = null; // saved results from utterances prior to pause
protected BufferedWriter bw = null; // for writing language model files
protected IAsrEventListener eventListener; // where to send client notification callbacks
protected static final String SENTENCE_SEARCH = "sentence"; // label for our search in decoder
protected static final String JSGF_SEARCH = "jsgf_search"; // label for our search in decoder
// This is used to map language "Features" to the associated dictionary filenames
// Dictionary files are located in the assets/sync/models/lm
// Note: on Android these are case sensitive filenames
//
static private HashMap<String, String> dictMap = new HashMap<String, String>();
static {
dictMap.put("LANG_EN", "CMU07A-CAPS.DIC");
dictMap.put("LANG_SW", "SWAHILI.DIC");
}
static private boolean isReady = false;
static private String TAG = "ListenerBase";
public ListenerBase() {
}
/**
* construct Listener using default pocketsphinx config settings
*
* @param userID -- string identifying the user. will be prepended to capture files
*/
public ListenerBase(String userID) {
this.userID = userID;
configFile = null;
// decoder setup deferred until init() call.
}
/**
* construct Listener to setup decoder from a pocketsphinx config file. For path arguments config file must contain
* absolute paths on the Android device.
*
* @param userID -- string identifying the user. will be prepended to capture files
* @param config -- file of pocketsphinx config settings
*/
public ListenerBase(String userID, File config) {
this.userID = userID;
configFile = config;
// decoder setup deferred until init() call.
}
/**
* Initialize the listener
*
* @param langFTR -- application context for locating resources and external storage
*/
public void setLanguage(String langFTR) {
// Configure the phonetic rules that will be used by the decoder
// TODO: Need to make phoneme lang rules dynamic so we may have multiple recognizers
//
Phoneme.setTargetLanguage(langFTR);
// initialize recognizer for our task
//
setupRecognizer(assets.getExternalDir(), configFile, dictMap.get(langFTR));
}
/**
* Utility method to initialize the listener assets folder
*
* @param callback
*/
public void configListener(IReadyListener callback) {
tutorRoot = callback;
new listenerConfigTask().execute((Context) callback);
}
/**
* Construct and initialize the speech recognizer
*/
protected void setupRecognizer(File assetsDir, File configFile, String langDictionary) {
try {
// save path to modelsDir for use when finding fsgs
modelsDir = new File(assetsDir, "models");
// if caller specified a configFile, take parameters from that.
// In this config file must specify *all* non-default pocketsphinx parameters
if (configFile != null) {
recognizer = SpeechRecognizerSetup.setupFromFile(configFile).getRecognizer();
} else { // init using default config parameters
switch(acousticModel) {
case LCONST.KIDS:
// create pocketsphinx SpeechRecognizer using the SpeechRecognizerSetup factory method
recognizer = SpeechRecognizerSetup.defaultSetup()
// our pronunciation dictionary
.setDictionary(new File(modelsDir, "lm/" + langDictionary))
// our acoustic model
.setAcousticModel(new File(modelsDir, "hmm/en-con-ind"))
// this automatically logs raw audio to the specified directory:
.setRawLogDir(assetsDir)
.setBoolean("-verbose", true) // maximum log output
.setFloat("-samprate", 16000f)
.setInteger("-nfft", 512)
.setInteger("-frate", 100)
.setFloat("-lowerf", 50f)
.setFloat("-upperf", 6800f)
.setBoolean("-dither", true)
.setInteger("-nfilt", 40)
.setInteger("-ncep", 13)
.setString("-agc", "none")
.setFloat("-ascale", 1f) // 20 in default
.setBoolean("-backtrace", true) // no in default
.setDouble("-beam", 1e-80) // 1e-48 in default
.setBoolean("-bestpath", false) // yes in default
// .setString("-cmn", "current")
.setString("-cmn", "prior")
.setBoolean("-compallsen", false)
.setBoolean("-dictcase", false)
.setFloat("-fillprob", 1e-2f) // 1e-8 in default
.setBoolean("-fwdflat", false) // yes in default
.setInteger("-latsize", 5000)
.setFloat("-lpbeam", 1e-5f) // 1e-40 in default
.setDouble("-lponlybeam", 7e-29) //
.setFloat("-lw", 10f) // 6.5 in default
.setInteger("-maxhmmpf", 1500) // 10000 in default
//.setInteger("-maxnewoov", 5000) // 20 in default
.setDouble("-pbeam", 1e-80) // 1e-48 in default
.setFloat("-pip", 1f)
.setBoolean("-remove_noise", true) // yes in default
.setBoolean("-remove_silence", true) // yes in default
.setFloat("-silprob", 1f) // 0.005 in default
.setInteger("-topn", 4)
.setDouble("-wbeam", 1e-60) // 7e-29 in default
.setFloat("-wip", 1f) // 0.65 in default
.getRecognizer();
break;
case LCONST.ADULT:
// create pocketsphinx SpeechRecognizer using the SpeechRecognizerSetup factory method
recognizer = SpeechRecognizerSetup.defaultSetup()
// our pronunciation dictionary
//.setDictionary(new File(modelsDir, "lm/CMU07A-CAPS.DIC"))
.setDictionary(new File(modelsDir, "lm/" + langDictionary))
// our acoustic model
.setAcousticModel(new File(modelsDir, "hmm/en-us-semi"))
// this automatically logs raw audio to the specified directory:
.setRawLogDir(assetsDir)
/* can't get sphinx logfile on Android, log messages go to LogCat facility instead
.setString("-logfn", new File(assetsDir, logName).getPath())
*/
.setBoolean("-verbose", true) // maximum log output
// a few other settings we might want to experiment with:
// threshold for voice activity detection:
.setFloat("-vad_threshold", LCONST.VAD_THRESHOLD) // default 2.0
// other vad parameters:
// .setInteger("vad_postspeech", 50) // default 50 (centiseconds)
// .setInteger("vad_prespeech", 10) // default 10 (centiseconds)
// .setFloat("-silprob", 0.005f) // default 0.005
.setFloat("-fillprob", LCONST.FILLPROB) // default 1e-8f
// .setFloat("-wip", 0.65f) // default 0.65
.getRecognizer();
break;
}
}
// save a log math object to use when constructing FsgModels.
logMath = new LogMath();
}
catch (Exception e) {
CErrorManager.logEvent(TAG, "Recognizer configuration error: ", e, false);
}
}
/**
* Moves new assets to an external folder so the Sphinx code can access it.
*
*/
class listenerConfigTask extends AsyncTask<Context, Void, Boolean> {
@Override
protected void onPreExecute() {
}
@Override
protected Boolean doInBackground(Context... params) {
boolean result = false;
try {
// sync assets from resources to filesystem via ListenerAssets class
// This takes a modest but noticeable amount of time
//
assets = new ListenerAssets(params[0]);
assets.syncAssets();
result = true;
} catch (IOException e) {
// TODO: Manage exceptions
Log.d("ASR", "init Failed: " + e);
result = false;
}
return result;
}
@Override
protected void onPostExecute(Boolean result) {
isReady = result;
tutorRoot.onServiceReady("ASR", isReady? 1:0);
}
}
public void listenFor(String[] wordsToHear, int startWord){}
public void listenForSentence(String[] wordsToHear, int startWord){}
public void updateNextWordIndex(int next){
}
/**
* used by tutor root to test service availability
* @return
*/
public boolean isReady() {
return isReady;
}
/**
* Stop the listener. Will send final hypothesis event
*/
public void stop() {
if (recognizer != null)
recognizer.stop();
}
/**
* Cancel the listener. Does not send final hypothesis event
*/
public void cancel() {
if (recognizer != null)
recognizer.cancel();
}
/**
* Attach event listener to receive notification callbacks
*/
public void setEventListener(IAsrEventListener callbackSink) {
eventListener = callbackSink;
}
public void setPauseListener(boolean pauseListener) {
if (recognizer != null)
recognizer.setPauseRecognizer(pauseListener);
}
/**
* return whether or not the listener is alive and actively listening.
* @return
*/
public boolean isListening() {
return (recognizer != null)? recognizer.isListening(): false;
}
public void reInitializeListener(boolean restartListener) {
recognizer.setRestartListener(restartListener);
}
public void configTimedEvent(int eventType, long newTimeout) {
recognizer.configTimedEvent(eventType, newTimeout);
}
public void resetTimedEvent(int eventType) {
recognizer.resetTimedEvent(eventType);
}
public void configStaticEvent(int eventType) {
recognizer.configStaticEvent(eventType);
}
public void resetStaticEvent(int eventType) {
recognizer.resetStaticEvent(eventType);
}
/**
* get the path to the capture file for given utterance label
*/
public File getCaptureFile(String utteranceLabel) {
return new File(recognizer.rawLogDir, utteranceLabel + ".wav");
}
public void deleteLogFiles() {
if (recognizer == null)
return;
new File(recognizer.rawLogDir, captureLabel + "-log.txt").delete();
new File(recognizer.rawLogDir, captureLabel + ".raw").delete();
}
/**
* class used to hold info about heard words in recognition results.
*/
public static class HeardWord {
/**
* hypothesis word text as in dictionary (upper case) without pronunciation tag
*/
public String hypWord;
/**
* 0-based index of aligned sentence word, -1 if none
*/
public int iSentenceWord; // index of aligned sentence word, -1 if none
/**
* degree of match to sentence word coded as follows
*/
public int matchLevel;
/**
* default value: no information
*/
public static final int MATCH_UNKNOWN = 0;
/**
* heard wrong word
*/
public static final int MATCH_MISCUE = 1;
/**
* heard truncated prefix of word
*/
public static final int MATCH_TRUNCATION = 2;
/**
* heard exact match
*/
public static final int MATCH_EXACT = 3;
/**
* start time of word, milliseconds since epoch
*/
public long startTime;
/**
* end time of word, milliseconds since epoch
*/
public long endTime;
/**
* start of word in centiseconds since utterance start
*/
public long startFrame;
/**
* end of word in centiseconds since utterance start
*/
public long endFrame;
/**
* start time of utterance, ms since epoch
*/
public long utteranceStartTime;
/**
* utterance ID used for capture file
*/
public String utteranceId;
/**
* ms of silence that preceded word
*/
public int silence;
/**
* ms from end of reading of previous sentence word to start of this one
*/
public int latency;
public HeardWord(String asrWord) {
hypWord = asrWordText(asrWord); // strip any pronunciation tags
iSentenceWord = -1;
matchLevel = MATCH_UNKNOWN;
startTime = -1;
endTime = -1;
startFrame = -1;
endFrame = -1;
utteranceStartTime = -1;
utteranceId = "";
silence = -1;
latency = -1;
}
/**
* return word text stripped from possible parenthesized alternate pronunciation tag in sphinx result words
*/
protected static String asrWordText(String taggedWord) {
int iParen = taggedWord.indexOf('(');
return (iParen >= 0) ? taggedWord.substring(0, iParen) : taggedWord;
}
private static final String[] fieldsToPrint = {"hypWord", "iSentenceWord", "matchLevel", "startTime", "endTime", "utteranceStartTime", "utteranceId", "silence", "latency"};
public String toString() {
StringBuilder msg = new StringBuilder();
msg.append("{");
for (String fieldName : fieldsToPrint) {
try {
Field field = this.getClass().getDeclaredField(fieldName);
msg.append(fieldName);
msg.append(": ");
msg.append(field.get(this));
msg.append(", ");
} catch (NoSuchFieldException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
msg.delete(msg.length() - 2, msg.length());
msg.append("}");
return msg.toString();
}
}
/**
* utility function to convert text string into canonical-format word array
*
* @param text -- text string including punctuation
*/
public static String[] textToWords(String text) {
// TODO: strip word-final or -initial apostrophes as in James' or 'cause.
// Currently assuming hyphenated expressions split into two Asr words.
return text.replace('-', ' ').replaceAll("['.!?,:;\"\\(\\)]", " ").toUpperCase(Locale.US).trim().split("\\s+");
}
/***** Logging */
/**
* get the path to the hypothesis log file for given utterance label
*/
protected File getHypLogFile(String utteranceLabel) {
// store it alongside the captured audio file
return new File(recognizer.rawLogDir, utteranceLabel + "-log.txt");
}
/**
* get time stamp string for current time in milliseconds
*/
protected String timestampMillis() {
return new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss.SSS", Locale.US).format(new Date(System.currentTimeMillis()));
}
// create and write header of hypothesis log file.
protected void beginHypLog() {
Log.i("beginHypLog", "starting hypothesis log");
}
// log a partial hypothesis
protected void logHyp(String timestamp, String hyp, List<Segment> segments, HeardWord[] heardWords) {
try {
File hypLog = getHypLogFile(captureLabel);
BufferedWriter bw = new BufferedWriter(new FileWriter(hypLog.getPath(), true)); // appends
// write out both the raw result with pocketsphinx times for debugging, and
// then the adjusted times we have computed for comparison with offline results
bw.write("## FROM GET PARTIAL RESULT:\n"); // as in reading tutor
bw.write(" TIME: " + timestamp + "\n");
bw.write(" DECODER OUTPUT: " + hyp + "\n");
bw.write(" RAW SEGMENTS:\n");
for (Segment s : segments) {
bw.write(s.getWord() + " " + s.getStartFrame() + " " + s.getEndFrame() + "\n");
}
bw.write(" SEGMENTATION:\n");
for (HeardWord hw : heardWords) {
bw.write(hw.hypWord + " " + hw.startFrame + " " + hw.endFrame + "\n");
}
bw.write("\n");
bw.close();
} catch (Exception e) {
Log.e("logHyp", "Error writing hypothesis log file " + e.getMessage());
}
}
}
|
#!/bin/bash
if [ "$#" -ne 2 ]; then
echo "Usage: $0 [input-list] [team-id]"
exit 1
fi
INPUT=$1
TEAM=$2
if [ ! -f "$INPUT" ]; then
echo "$INPUT file not found"
exit 99
fi
USERNAME=""
PASSWORD=""
OLDIFS=$IFS
IFS=,
while read template datastore folder vmprefix; do
python2.7 ../scripts/clone_vm.py --host cdr-vcenter1.cse.buffalo.edu -u $USERNAME -p $PASSWORD --datastore-name $datastore --template $template --vm-folder $folder --no-power-on --cluster MAIN --vm-name "${vmprefix}${TEAM}";
done < $INPUT
IFS=$OLDIFS
|
#!/bin/bash
# ******************************
# Author:
# Lokesh Jindal
# April 2015
# lokeshjindal15@cs.wisc.edu
# ******************************
# Description:
# This script checks how many gem5 sims have finished gracefully ("m5_exit")
# and how many NPB benchmarks have complete gracefully ("benchmark completed")
# It then checks that both should be equal
# Further, it checks how many condor jobs are still running
# and then checks if the number of jobs running/completed satisy the constraints
# Usage: ./condor_sim_staus.sh rundir/NEHALEM_NPB_L10_B160_J1600/ 45 1 iris-21 iris-22
# This script takes 5 arguments:
# 1. directory which contains subdirectories of different benchmark runs
# 2. number of condor jobs initially launched - used to satisy the checks
# 3. should use both machines (1) or only the first one (0)
# 4. condor_machine 1
# 5. condor machine 2
# This script also uses another script called try.sh whose contents are listed at the end of this script
# Tip: You might want to
# change the username in try.sh
# change the strings used to grep completion of graceful exit of gem5
# change the strings used to grep graceful completion of benchmakrs
# number of condor_jobs per benchmark 5 in our case
CONDOR_JOBS_PER_BMARK=6
echo "condor_jobs per benchmark = $CONDOR_JOBS_PER_BMARK"
directory=$1
launched_cond_jobs=$2
use_machine2=$3
condor_machine1=$4
cwd=`pwd`
cd $directory
echo "running check_sim_status in dir $directory ..."
find . -name gem5sim.out | xargs grep 'Exiting @ tick.*because m5_exit instruction encountered'
num_exits=`find . -name gem5sim.out | xargs grep 'Exiting @ tick.*because m5_exit instruction encountered' | wc -l`
find . -name system.terminal | xargs grep -i '.. benchmark.*completed'
bcmplts=`find . -name system.terminal | xargs grep -i '.. benchmark.*completed' | wc -l`
echo "num_exits=$num_exits and bcmplts=$bcmplts"
if [ "$num_exits" != "$bcmplts" ]
then
echo "*****DANGER num_exits=$num_exits NOT EQUAL TO bcmplts=$bcmplts *****"
else
echo "num_exits=$num_exits equals bcmplts=$bcmplts"
fi
cd $cwd
echo "running condor_q on $condor_machine1 ..."
jobs_running1=$(ssh $condor_machine1 'bash -s' < try.sh)
if [ "$use_machine2" == 1 ]
then
condor_machine2=$5
echo "running condor_q on $condor_machine2 ..."
jobs_running2=$(ssh $condor_machine2 'bash -s' < try.sh)
else
echo "Warn: using only 1 condor machine"
jobs_running2=0
fi
echo "jobs_running1=$jobs_running1"
echo "jobs_running2=$jobs_running2"
jobs_running=$(($jobs_running1 + $jobs_running2))
exp_jobs=$(($launched_cond_jobs - $(($bcmplts * $CONDOR_JOBS_PER_BMARK))))
if [ "$jobs_running" != "$exp_jobs" ]
then
echo "*****DANGER jobs_running=$jobs_running NOT EQUAL TO exp_jobs=$exp_jobs *****"
else
echo "jobs_running=$jobs_running equals exp_jobs=$exp_jobs"
echo "Things seem fine!"
fi
echo "done!"
###################################################################################################
# contents of try.sh
# ##################
# # !/bin/bash
#
# echo "hello" > try.log
#
# #condor
# export CONDOR_CONFIG="/mnt/condor/etc/condor_config"
# export PATH="/mnt/condor/bin:/mnt/condor/sbin:$PATH"
#
# export PATH="$PATH:/condor/bin"
#
# #which condor_q
#
# condor_q | grep ljindal | wc -l
####################################################################################################
|
function mode(array) {
let count = {};
let maxEl = array[0], maxCount = 1;
for(let i = 0; i < array.length; i++) {
let el = array[i];
if(count[el] == null)
count[el] = 1;
else
count[el]++;
if(count[el] > maxCount)
{
maxEl = el;
maxCount = count[el];
}
}
return maxEl;
}
console.log(mode([3, 5, 4, 4, 1, 1, 2, 3])); // 1
|
<reponame>zonesgame/StendhalArcClient<filename>core/src/games/stendhal/client/gui/chattext/ChatCache.java
/***************************************************************************
* (C) Copyright 2003-2015 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.gui.chattext;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.util.LinkedList;
import java.util.ListIterator;
import games.stendhal.common.MathHelper;
import marauroa.common.Logger;
class ChatCache {
private final static Logger logger = Logger.getLogger(ChatCache.class);
private final String chatCacheFile;
private int current;
ChatCache(final String chatLogFile) {
this.chatCacheFile = chatLogFile;
}
public LinkedList<String> getLines() {
return lines;
}
private final LinkedList<String> lines = new LinkedList<String>();
void loadChatCache() {
if (chatCacheFile == null) {
return;
}
try {
final File chatfile = new File(chatCacheFile);
if (chatfile.exists()) {
final FileInputStream fis = new FileInputStream(chatfile);
final BufferedReader br = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
try {
String line = null;
while (null != (line = br.readLine())) {
lines.add(line);
}
} finally {
br.close();
}
fis.close();
}
setCurrent(lines.size());
} catch (final IOException e) {
logger.error(e, e);
}
}
/**
* Save the contents of the cache.
*/
void save() {
if (chatCacheFile == null) {
return;
}
try {
new File(chatCacheFile).getParentFile().mkdirs();
final PrintStream ps = new PrintStream(chatCacheFile, "UTF-8");
/*
* Keep size of chat.log in a reasonable size.
*/
while (lines.size() > 200) {
lines.removeFirst();
}
final ListIterator<String> iterator = lines.listIterator();
while (iterator.hasNext()) {
ps.println(iterator.next());
}
ps.close();
} catch (final IOException ex) {
logger.error(ex, ex);
}
}
void setCurrent(final int current) {
this.current = current;
}
int getCurrent() {
return current;
}
void addlinetoCache(final String text) {
getLines().add(text);
setCurrent(getLines().size());
if (getLines().size() > 50) {
getLines().removeFirst();
setCurrent((getCurrent() - 1));
}
}
String current() {
return getLines().get(current);
}
boolean hasNext() {
return lines.size() > current;
}
boolean hasPrevious() {
return current > 1;
}
String previous() {
current = Math.max(current - 1, 0);
if (!lines.isEmpty()) {
return current();
}
return "";
}
String next() {
current = MathHelper.clamp(current + 1, 0, lines.size() - 1);
if (!lines.isEmpty()) {
return current();
}
return "";
}
}
|
#ifndef H_LINGO_PAGE_POINT_MAPPER
#define H_LINGO_PAGE_POINT_MAPPER
#include <lingo/platform/constexpr.hpp>
#include <lingo/page/result.hpp>
#include <lingo/page/intermediate.hpp>
#include <type_traits>
#define LINGO_POINT_MAPPER_TYPEDEFS \
using source_page_type = SourcePage; \
using destination_page_type = DestinationPage; \
using source_point_type = typename source_page_type::point_type; \
using destination_point_type = typename destination_page_type::point_type; \
using result_type = map_result<destination_point_type>
namespace lingo
{
namespace page
{
// Default implementation uses an intermediate code page
template <typename SourcePage, typename DestinationPage, typename IntermediatePage = typename intermediate<SourcePage, DestinationPage>::type, typename Enable = void>
struct point_mapper
{
LINGO_POINT_MAPPER_TYPEDEFS;
static LINGO_CONSTEXPR14 result_type map(source_point_type source_point) noexcept
{
const auto to_intermediate_result = source_page_type::template map_to<IntermediatePage>(source_point);
if (to_intermediate_result.error != error::error_code::success)
{
return { {}, to_intermediate_result.error };
}
return destination_page_type::template map_from<IntermediatePage>(to_intermediate_result.point);
}
};
// No conversion is needed when the source and destination pages are the same
template <typename SourcePage, typename DestinationPage, typename IntermediatePage>
struct point_mapper<SourcePage, DestinationPage, IntermediatePage,
typename std::enable_if<
std::is_same<SourcePage, DestinationPage>::value>::type>
{
LINGO_POINT_MAPPER_TYPEDEFS;
static LINGO_CONSTEXPR14 result_type map(source_point_type source_point) noexcept
{
return { source_point, error::error_code::success };
}
};
// When the source page is the same as the intermediate page we only need to convert the destination
template <typename SourcePage, typename DestinationPage, typename IntermediatePage>
struct point_mapper<SourcePage, DestinationPage, IntermediatePage,
typename std::enable_if<
!std::is_same<SourcePage, DestinationPage>::value &&
std::is_same<SourcePage, IntermediatePage>::value>::type>
{
LINGO_POINT_MAPPER_TYPEDEFS;
static LINGO_CONSTEXPR14 result_type map(source_point_type source_point) noexcept
{
return destination_page_type::template map_from<IntermediatePage>(source_point);
}
};
// When the destination page is the same as the intermediate page we only need to convert the source
template <typename SourcePage, typename DestinationPage, typename IntermediatePage>
struct point_mapper<SourcePage, DestinationPage, IntermediatePage,
typename std::enable_if<
!std::is_same<SourcePage, DestinationPage>::value &&
std::is_same<DestinationPage, IntermediatePage>::value>::type>
{
LINGO_POINT_MAPPER_TYPEDEFS;
static LINGO_CONSTEXPR14 result_type map(source_point_type source_point) noexcept
{
return source_page_type::template map_to<IntermediatePage>(source_point);
}
};
}
}
#undef LINGO_POINT_MAPPER_TYPEDEFS
#endif
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.